feat(solc): optimize output selection in cache mode (#1029)

* feat(solc): add mock project generator

* feat: mock project

* refactor: rename to mock

* feat(solc): mock project support

* refactor: move output selection to new type

* refactor: revert filter implementation

* feat: filter sources

* feat: support filtering

* refactor: update function signatures

* feat: optimize output

* chore: export helper macros

* fix(deps): add required features

* style: allow unused

* feat: also generate artifactless files

* feat: add create function

* chore(clippy): make clippy happy

* feat: add helper function

* test: add state machine test

* test: add more mock tests
This commit is contained in:
Matthias Seitz 2022-03-15 16:58:33 +01:00 committed by GitHub
parent 49ed78d685
commit ff29e96380
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 969 additions and 256 deletions

View File

@ -24,7 +24,10 @@ use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer};
pub mod output_selection; pub mod output_selection;
pub mod serde_helpers; pub mod serde_helpers;
use crate::artifacts::output_selection::ContractOutputSelection; use crate::{
artifacts::output_selection::{ContractOutputSelection, OutputSelection},
cache::FilteredSources,
};
pub use serde_helpers::{deserialize_bytes, deserialize_opt_bytes}; pub use serde_helpers::{deserialize_bytes, deserialize_opt_bytes};
/// Solidity files are made up of multiple `source units`, a solidity contract is such a `source /// Solidity files are made up of multiple `source units`, a solidity contract is such a `source
@ -40,8 +43,12 @@ pub type Contracts = FileToContractsMap<Contract>;
/// An ordered list of files and their source /// An ordered list of files and their source
pub type Sources = BTreeMap<PathBuf, Source>; pub type Sources = BTreeMap<PathBuf, Source>;
/// A set of different Solc installations with their version and the sources to be compiled
pub type VersionedSources = BTreeMap<Solc, (Version, Sources)>; pub type VersionedSources = BTreeMap<Solc, (Version, Sources)>;
/// A set of different Solc installations with their version and the sources to be compiled
pub type VersionedFilteredSources = BTreeMap<Solc, (Version, FilteredSources)>;
/// Input type `solc` expects /// Input type `solc` expects
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub struct CompilerInput { pub struct CompilerInput {
@ -56,7 +63,10 @@ impl CompilerInput {
Source::read_all_from(path.as_ref()).map(Self::with_sources) Source::read_all_from(path.as_ref()).map(Self::with_sources)
} }
/// Creates a new Compiler input with default settings and the given sources /// Creates a new [CompilerInput](s) with default settings and the given sources
///
/// A [CompilerInput] expects a language setting, supported by solc are solidity or yul.
/// In case the `sources` is a mix of solidity and yul files, 2 CompilerInputs are returned
pub fn with_sources(sources: Sources) -> Vec<Self> { pub fn with_sources(sources: Sources) -> Vec<Self> {
let mut solidity_sources = BTreeMap::new(); let mut solidity_sources = BTreeMap::new();
let mut yul_sources = BTreeMap::new(); let mut yul_sources = BTreeMap::new();
@ -140,69 +150,8 @@ pub struct Settings {
/// on file and contract names. /// on file and contract names.
/// If this field is omitted, then the compiler loads and does type /// If this field is omitted, then the compiler loads and does type
/// checking, but will not generate any outputs apart from errors. /// checking, but will not generate any outputs apart from errors.
/// The first level key is the file name and the second level key is the
/// contract name. An empty contract name is used for outputs that are
/// not tied to a contract but to the whole source file like the AST.
/// A star as contract name refers to all contracts in the file.
/// Similarly, a star as a file name matches all files.
/// To select all outputs the compiler can possibly generate, use
/// "outputSelection: { "*": { "*": [ "*" ], "": [ "*" ] } }"
/// but note that this might slow down the compilation process needlessly.
///
/// The available output types are as follows:
///
/// File level (needs empty string as contract name):
/// ast - AST of all source files
///
/// Contract level (needs the contract name or "*"):
/// abi - ABI
/// devdoc - Developer documentation (natspec)
/// userdoc - User documentation (natspec)
/// metadata - Metadata
/// ir - Yul intermediate representation of the code before optimization
/// irOptimized - Intermediate representation after optimization
/// storageLayout - Slots, offsets and types of the contract's state
/// variables.
/// evm.assembly - New assembly format
/// evm.legacyAssembly - Old-style assembly format in JSON
/// evm.bytecode.functionDebugData - Debugging information at function level
/// evm.bytecode.object - Bytecode object
/// evm.bytecode.opcodes - Opcodes list
/// evm.bytecode.sourceMap - Source mapping (useful for debugging)
/// evm.bytecode.linkReferences - Link references (if unlinked object)
/// evm.bytecode.generatedSources - Sources generated by the compiler
/// evm.deployedBytecode* - Deployed bytecode (has all the options that
/// evm.bytecode has)
/// evm.deployedBytecode.immutableReferences - Map from AST ids to
/// bytecode ranges that reference immutables
/// evm.methodIdentifiers - The list of function hashes
/// evm.gasEstimates - Function gas estimates
/// ewasm.wast - Ewasm in WebAssembly S-expressions format
/// ewasm.wasm - Ewasm in WebAssembly binary format
///
/// Note that using a using `evm`, `evm.bytecode`, `ewasm`, etc. will select
/// every target part of that output. Additionally, `*` can be used as a
/// wildcard to request everything.
///
/// The default output selection is
///
/// ```json
/// {
/// "*": {
/// "*": [
/// "abi",
/// "evm.bytecode",
/// "evm.deployedBytecode",
/// "evm.methodIdentifiers"
/// ],
/// "": [
/// "ast"
/// ]
/// }
/// }
/// ```
#[serde(default)] #[serde(default)]
pub output_selection: BTreeMap<String, BTreeMap<String, Vec<String>>>, pub output_selection: OutputSelection,
#[serde( #[serde(
default, default,
with = "serde_helpers::display_from_str_opt", with = "serde_helpers::display_from_str_opt",
@ -215,37 +164,8 @@ pub struct Settings {
impl Settings { impl Settings {
/// Creates a new `Settings` instance with the given `output_selection` /// Creates a new `Settings` instance with the given `output_selection`
pub fn new(output_selection: BTreeMap<String, BTreeMap<String, Vec<String>>>) -> Self { pub fn new(output_selection: impl Into<OutputSelection>) -> Self {
Self { output_selection, ..Default::default() } Self { output_selection: output_selection.into(), ..Default::default() }
}
/// select all outputs the compiler can possibly generate, use
/// `{ "*": { "*": [ "*" ], "": [ "*" ] } }`
/// but note that this might slow down the compilation process needlessly.
pub fn complete_output_selection() -> BTreeMap<String, BTreeMap<String, Vec<String>>> {
BTreeMap::from([(
"*".to_string(),
BTreeMap::from([
("*".to_string(), vec!["*".to_string()]),
("".to_string(), vec!["*".to_string()]),
]),
)])
}
/// Default output selection for compiler output
pub fn default_output_selection() -> BTreeMap<String, BTreeMap<String, Vec<String>>> {
BTreeMap::from([(
"*".to_string(),
BTreeMap::from([(
"*".to_string(),
vec![
"abi".to_string(),
"evm.bytecode".to_string(),
"evm.deployedBytecode".to_string(),
"evm.methodIdentifiers".to_string(),
],
)]),
)])
} }
/// Inserts a set of `ContractOutputSelection` /// Inserts a set of `ContractOutputSelection`
@ -290,6 +210,7 @@ impl Settings {
let value = value.to_string(); let value = value.to_string();
let values = self let values = self
.output_selection .output_selection
.as_mut()
.entry("*".to_string()) .entry("*".to_string())
.or_default() .or_default()
.entry(contracts.into()) .entry(contracts.into())
@ -313,6 +234,7 @@ impl Settings {
values: impl IntoIterator<Item = impl ToString>, values: impl IntoIterator<Item = impl ToString>,
) { ) {
self.output_selection self.output_selection
.as_mut()
.entry("*".to_string()) .entry("*".to_string())
.or_default() .or_default()
.insert(key.into(), values.into_iter().map(|s| s.to_string()).collect()); .insert(key.into(), values.into_iter().map(|s| s.to_string()).collect());
@ -321,7 +243,8 @@ impl Settings {
/// Adds `ast` to output /// Adds `ast` to output
#[must_use] #[must_use]
pub fn with_ast(mut self) -> Self { pub fn with_ast(mut self) -> Self {
let output = self.output_selection.entry("*".to_string()).or_insert_with(BTreeMap::default); let output =
self.output_selection.as_mut().entry("*".to_string()).or_insert_with(BTreeMap::default);
output.insert("".to_string(), vec!["ast".to_string()]); output.insert("".to_string(), vec!["ast".to_string()]);
self self
} }
@ -333,7 +256,7 @@ impl Default for Settings {
stop_after: None, stop_after: None,
optimizer: Default::default(), optimizer: Default::default(),
metadata: None, metadata: None,
output_selection: Self::default_output_selection(), output_selection: OutputSelection::default_output_selection(),
evm_version: Some(EvmVersion::default()), evm_version: Some(EvmVersion::default()),
libraries: Default::default(), libraries: Default::default(),
remappings: Default::default(), remappings: Default::default(),
@ -2087,6 +2010,7 @@ pub struct SecondarySourceLocation {
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)] #[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
pub struct SourceFile { pub struct SourceFile {
pub id: u32, pub id: u32,
#[serde(default)]
pub ast: serde_json::Value, pub ast: serde_json::Value,
} }

View File

@ -1,7 +1,143 @@
//! bindings for standard json output selection //! bindings for standard json output selection
use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::{fmt, str::FromStr}; use std::{collections::BTreeMap, fmt, str::FromStr};
/// Represents the desired outputs based on a File `(file -> (contract -> [outputs]))`
pub type FileOutputSelection = BTreeMap<String, Vec<String>>;
/// Represents the selected output of files and contracts
/// The first level key is the file name and the second level key is the
/// contract name. An empty contract name is used for outputs that are
/// not tied to a contract but to the whole source file like the AST.
/// A star as contract name refers to all contracts in the file.
/// Similarly, a star as a file name matches all files.
/// To select all outputs the compiler can possibly generate, use
/// "outputSelection: { "*": { "*": [ "*" ], "": [ "*" ] } }"
/// but note that this might slow down the compilation process needlessly.
///
/// The available output types are as follows:
///
/// File level (needs empty string as contract name):
/// ast - AST of all source files
///
/// Contract level (needs the contract name or "*"):
/// abi - ABI
/// devdoc - Developer documentation (natspec)
/// userdoc - User documentation (natspec)
/// metadata - Metadata
/// ir - Yul intermediate representation of the code before optimization
/// irOptimized - Intermediate representation after optimization
/// storageLayout - Slots, offsets and types of the contract's state
/// variables.
/// evm.assembly - New assembly format
/// evm.legacyAssembly - Old-style assembly format in JSON
/// evm.bytecode.functionDebugData - Debugging information at function level
/// evm.bytecode.object - Bytecode object
/// evm.bytecode.opcodes - Opcodes list
/// evm.bytecode.sourceMap - Source mapping (useful for debugging)
/// evm.bytecode.linkReferences - Link references (if unlinked object)
/// evm.bytecode.generatedSources - Sources generated by the compiler
/// evm.deployedBytecode* - Deployed bytecode (has all the options that
/// evm.bytecode has)
/// evm.deployedBytecode.immutableReferences - Map from AST ids to
/// bytecode ranges that reference immutables
/// evm.methodIdentifiers - The list of function hashes
/// evm.gasEstimates - Function gas estimates
/// ewasm.wast - Ewasm in WebAssembly S-expressions format
/// ewasm.wasm - Ewasm in WebAssembly binary format
///
/// Note that using a using `evm`, `evm.bytecode`, `ewasm`, etc. will select
/// every target part of that output. Additionally, `*` can be used as a
/// wildcard to request everything.
///
/// The default output selection is
///
/// ```json
/// {
/// "*": {
/// "*": [
/// "abi",
/// "evm.bytecode",
/// "evm.deployedBytecode",
/// "evm.methodIdentifiers"
/// ],
/// "": [
/// "ast"
/// ]
/// }
/// }
/// ```
#[derive(Debug, Clone, Eq, PartialEq, Default, Serialize, Deserialize)]
#[serde(transparent)]
pub struct OutputSelection(pub BTreeMap<String, FileOutputSelection>);
impl OutputSelection {
/// select all outputs the compiler can possibly generate, use
/// `{ "*": { "*": [ "*" ], "": [ "*" ] } }`
/// but note that this might slow down the compilation process needlessly.
pub fn complete_output_selection() -> Self {
BTreeMap::from([(
"*".to_string(),
BTreeMap::from([
("*".to_string(), vec!["*".to_string()]),
("".to_string(), vec!["*".to_string()]),
]),
)])
.into()
}
/// Default output selection for compiler output:
///
/// `{ "*": { "*": [ "*" ], "": [
/// "abi","evm.bytecode","evm.deployedBytecode","evm.methodIdentifiers"] } }`
///
/// Which enables it for all files and all their contracts ("*" wildcard)
pub fn default_output_selection() -> Self {
BTreeMap::from([("*".to_string(), Self::default_file_output_selection())]).into()
}
/// Default output selection for a single file:
///
/// `{ "*": [ "*" ], "": [
/// "abi","evm.bytecode","evm.deployedBytecode","evm.methodIdentifiers"] }`
///
/// Which enables it for all the contracts in the file ("*" wildcard)
pub fn default_file_output_selection() -> FileOutputSelection {
BTreeMap::from([(
"*".to_string(),
vec![
"abi".to_string(),
"evm.bytecode".to_string(),
"evm.deployedBytecode".to_string(),
"evm.methodIdentifiers".to_string(),
],
)])
}
/// Returns an empty output selection which corresponds to an empty map `{}`
pub fn empty_file_output_select() -> FileOutputSelection {
Default::default()
}
}
impl AsRef<BTreeMap<String, FileOutputSelection>> for OutputSelection {
fn as_ref(&self) -> &BTreeMap<String, FileOutputSelection> {
&self.0
}
}
impl AsMut<BTreeMap<String, FileOutputSelection>> for OutputSelection {
fn as_mut(&mut self) -> &mut BTreeMap<String, FileOutputSelection> {
&mut self.0
}
}
impl From<BTreeMap<String, FileOutputSelection>> for OutputSelection {
fn from(s: BTreeMap<String, FileOutputSelection>) -> Self {
OutputSelection(s)
}
}
/// Contract level output selection /// Contract level output selection
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]

View File

@ -1,6 +1,6 @@
//! Support for compiling contracts //! Support for compiling contracts
use crate::{ use crate::{
artifacts::Sources, artifacts::{output_selection::OutputSelection, Settings, Sources},
config::SolcConfig, config::SolcConfig,
error::{Result, SolcError}, error::{Result, SolcError},
resolver::GraphEdges, resolver::GraphEdges,
@ -14,6 +14,7 @@ use std::{
btree_map::{BTreeMap, Entry}, btree_map::{BTreeMap, Entry},
hash_map, BTreeSet, HashMap, HashSet, hash_map, BTreeSet, HashMap, HashSet,
}, },
fmt,
fs::{self}, fs::{self},
path::{Path, PathBuf}, path::{Path, PathBuf},
time::{Duration, UNIX_EPOCH}, time::{Duration, UNIX_EPOCH},
@ -599,35 +600,78 @@ impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> {
} }
} }
/// Returns only those sources that /// Returns the set of [Source]s that need to be included in the [CompilerOutput] in order to
/// recompile the project.
///
/// We define _dirty_ sources as files that:
/// - are new /// - are new
/// - were changed /// - were changed
/// - their imports were changed /// - their imports were changed
/// - their artifact is missing /// - their artifact is missing
fn filter(&mut self, sources: Sources, version: &Version) -> Sources { ///
/// A _dirty_ file is always included in the [CompilerInput].
/// A _dirty_ file can also include clean files - files that do not match any of the above
/// criteria - which solc also requires in order to compile a dirty file.
///
/// Therefore, these files will also be included in the filtered output but not marked as dirty,
/// so that their [OutputSelection] can be optimized in the [CompilerOutput] and their (empty)
/// artifacts ignored.
fn filter(&mut self, sources: Sources, version: &Version) -> FilteredSources {
self.fill_hashes(&sources); self.fill_hashes(&sources);
sources
// all files that are not dirty themselves, but are pulled from a dirty file
let mut imports_of_dirty = HashSet::new();
// separates all source files that fit the criteria (dirty) from those that don't (clean)
let (mut filtered_sources, clean_sources) = sources
.into_iter() .into_iter()
.filter_map(|(file, source)| self.requires_solc(file, source, version)) .map(|(file, source)| self.filter_source(file, source, version))
.collect() .fold(
(BTreeMap::default(), Vec::new()),
|(mut dirty_sources, mut clean_sources), source| {
if source.dirty {
// mark all files that are imported by a dirty file
imports_of_dirty.extend(self.edges.all_imported_nodes(source.idx));
dirty_sources.insert(source.file, FilteredSource::Dirty(source.source));
} else {
clean_sources.push(source);
} }
/// Returns `Some` if the file _needs_ to be compiled and `None` if the artifact can be reu-used (dirty_sources, clean_sources)
fn requires_solc( },
&mut self, );
// track new cache entries for dirty files
for (file, filtered) in filtered_sources.iter() {
self.insert_new_cache_entry(file, filtered.source(), version.clone());
}
for clean_source in clean_sources {
let FilteredSourceInfo { file, source, idx, .. } = clean_source;
if imports_of_dirty.contains(&idx) {
// file is pulled in by a dirty file
filtered_sources.insert(file.clone(), FilteredSource::Clean(source.clone()));
}
self.insert_filtered_source(file, source, version.clone());
}
filtered_sources.into()
}
/// Returns the state of the given source file.
fn filter_source(
&self,
file: PathBuf, file: PathBuf,
source: Source, source: Source,
version: &Version, version: &Version,
) -> Option<(PathBuf, Source)> { ) -> FilteredSourceInfo {
let idx = self.edges.node_id(&file);
if !self.is_dirty(&file, version) && if !self.is_dirty(&file, version) &&
self.edges.imports(&file).iter().all(|file| !self.is_dirty(file, version)) self.edges.imports(&file).iter().all(|file| !self.is_dirty(file, version))
{ {
self.insert_filtered_source(file, source, version.clone()); FilteredSourceInfo { file, source, idx, dirty: false }
None
} else { } else {
self.insert_new_cache_entry(&file, &source, version.clone()); FilteredSourceInfo { file, source, idx, dirty: true }
Some((file, source))
} }
} }
@ -685,6 +729,157 @@ impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> {
} }
} }
/// Container type for a set of [FilteredSource]
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct FilteredSources(pub BTreeMap<PathBuf, FilteredSource>);
impl FilteredSources {
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
pub fn len(&self) -> usize {
self.0.len()
}
/// Returns `true` if all files are dirty
pub fn all_dirty(&self) -> bool {
self.0.values().all(|s| s.is_dirty())
}
/// Returns all entries that are dirty
pub fn dirty(&self) -> impl Iterator<Item = (&PathBuf, &FilteredSource)> + '_ {
self.0.iter().filter(|(_, s)| s.is_dirty())
}
/// Returns all entries that are clean
pub fn clean(&self) -> impl Iterator<Item = (&PathBuf, &FilteredSource)> + '_ {
self.0.iter().filter(|(_, s)| !s.is_dirty())
}
/// Returns all dirty files
pub fn dirty_files(&self) -> impl Iterator<Item = &PathBuf> + fmt::Debug + '_ {
self.0.iter().filter_map(|(k, s)| s.is_dirty().then(|| k))
}
/// While solc needs all the files to compile the actual _dirty_ files, we can tell solc to
/// output everything for those dirty files as currently configured in the settings, but output
/// nothing for the other files that are _not_ dirty.
///
/// This will modify the [OutputSelection] of the [Settings] so that we explicitly select the
/// files' output based on their state.
pub fn into_sources(self, settings: &mut Settings) -> Sources {
if !self.all_dirty() {
// settings can be optimized
tracing::trace!(
"Optimizing output selection for {}/{} sources",
self.clean().count(),
self.len()
);
let selection = settings
.output_selection
.as_mut()
.remove("*")
.unwrap_or_else(OutputSelection::default_file_output_selection);
for (file, source) in self.0.iter() {
if source.is_dirty() {
settings
.output_selection
.as_mut()
.insert(format!("{}", file.display()), selection.clone());
} else {
tracing::trace!("Optimizing output for {}", file.display());
settings.output_selection.as_mut().insert(
format!("{}", file.display()),
OutputSelection::empty_file_output_select(),
);
}
}
}
self.into()
}
}
impl From<FilteredSources> for Sources {
fn from(sources: FilteredSources) -> Self {
sources.0.into_iter().map(|(k, v)| (k, v.into_source())).collect()
}
}
impl From<Sources> for FilteredSources {
fn from(s: Sources) -> Self {
FilteredSources(s.into_iter().map(|(key, val)| (key, FilteredSource::Dirty(val))).collect())
}
}
impl From<BTreeMap<PathBuf, FilteredSource>> for FilteredSources {
fn from(s: BTreeMap<PathBuf, FilteredSource>) -> Self {
FilteredSources(s)
}
}
impl AsRef<BTreeMap<PathBuf, FilteredSource>> for FilteredSources {
fn as_ref(&self) -> &BTreeMap<PathBuf, FilteredSource> {
&self.0
}
}
impl AsMut<BTreeMap<PathBuf, FilteredSource>> for FilteredSources {
fn as_mut(&mut self) -> &mut BTreeMap<PathBuf, FilteredSource> {
&mut self.0
}
}
/// Represents the state of a filtered [Source]
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum FilteredSource {
/// A source that fits the _dirty_ criteria
Dirty(Source),
/// A source that does _not_ fit the _dirty_ criteria but is included in the filtered set
/// because a _dirty_ file pulls it in, either directly on indirectly.
Clean(Source),
}
impl FilteredSource {
/// Returns the underlying source
pub fn source(&self) -> &Source {
match self {
FilteredSource::Dirty(s) => s,
FilteredSource::Clean(s) => s,
}
}
/// Consumes the type and returns the underlying source
pub fn into_source(self) -> Source {
match self {
FilteredSource::Dirty(s) => s,
FilteredSource::Clean(s) => s,
}
}
/// Whether this file is actually dirt
pub fn is_dirty(&self) -> bool {
matches!(self, FilteredSource::Dirty(_))
}
}
/// Helper type that determines the state of a source file
struct FilteredSourceInfo {
/// path to the source file
file: PathBuf,
/// contents of the file
source: Source,
/// idx in the [GraphEdges]
idx: usize,
/// whether this file is actually dirty
///
/// See also [ArtifactsCacheInner::is_dirty()]
dirty: bool,
}
/// Abstraction over configured caching which can be either non-existent or an already loaded cache /// Abstraction over configured caching which can be either non-existent or an already loaded cache
#[allow(clippy::large_enum_variant)] #[allow(clippy::large_enum_variant)]
#[derive(Debug)] #[derive(Debug)]
@ -756,9 +951,9 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> {
} }
/// Filters out those sources that don't need to be compiled /// Filters out those sources that don't need to be compiled
pub fn filter(&mut self, sources: Sources, version: &Version) -> Sources { pub fn filter(&mut self, sources: Sources, version: &Version) -> FilteredSources {
match self { match self {
ArtifactsCache::Ephemeral(_, _) => sources, ArtifactsCache::Ephemeral(_, _) => sources.into(),
ArtifactsCache::Cached(cache) => cache.filter(sources, version), ArtifactsCache::Cached(cache) => cache.filter(sources, version),
} }
} }

View File

@ -103,7 +103,7 @@
use crate::{ use crate::{
artifact_output::Artifacts, artifact_output::Artifacts,
artifacts::{Settings, VersionedSources}, artifacts::{Settings, VersionedFilteredSources, VersionedSources},
cache::ArtifactsCache, cache::ArtifactsCache,
error::Result, error::Result,
output::AggregatedCompilerOutput, output::AggregatedCompilerOutput,
@ -114,7 +114,7 @@ use crate::{
}; };
use rayon::prelude::*; use rayon::prelude::*;
use std::collections::btree_map::BTreeMap; use std::{collections::btree_map::BTreeMap, path::PathBuf};
#[derive(Debug)] #[derive(Debug)]
pub struct ProjectCompiler<'a, T: ArtifactOutput> { pub struct ProjectCompiler<'a, T: ArtifactOutput> {
@ -203,11 +203,11 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> {
/// - sets proper source unit names /// - sets proper source unit names
/// - check cache /// - check cache
fn preprocess(self) -> Result<PreprocessedState<'a, T>> { fn preprocess(self) -> Result<PreprocessedState<'a, T>> {
let Self { edges, project, mut sources } = self; let Self { edges, project, sources } = self;
let mut cache = ArtifactsCache::new(project, edges)?; let mut cache = ArtifactsCache::new(project, edges)?;
// retain and compile only dirty sources and all their imports // retain and compile only dirty sources and all their imports
sources = sources.filtered(&mut cache); let sources = sources.filtered(&mut cache);
Ok(PreprocessedState { sources, cache }) Ok(PreprocessedState { sources, cache })
} }
@ -218,7 +218,9 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> {
/// The main reason is to debug all states individually /// The main reason is to debug all states individually
#[derive(Debug)] #[derive(Debug)]
struct PreprocessedState<'a, T: ArtifactOutput> { struct PreprocessedState<'a, T: ArtifactOutput> {
sources: CompilerSources, /// contains all sources to compile
sources: FilteredCompilerSources,
/// cache that holds [CacheEntry] object if caching is enabled and the project is recompiled
cache: ArtifactsCache<'a, T>, cache: ArtifactsCache<'a, T>,
} }
@ -302,11 +304,11 @@ enum CompilerSources {
impl CompilerSources { impl CompilerSources {
/// Filters out all sources that don't need to be compiled, see [`ArtifactsCache::filter`] /// Filters out all sources that don't need to be compiled, see [`ArtifactsCache::filter`]
fn filtered<T: ArtifactOutput>(self, cache: &mut ArtifactsCache<T>) -> Self { fn filtered<T: ArtifactOutput>(self, cache: &mut ArtifactsCache<T>) -> FilteredCompilerSources {
fn filtered_sources<T: ArtifactOutput>( fn filtered_sources<T: ArtifactOutput>(
sources: VersionedSources, sources: VersionedSources,
cache: &mut ArtifactsCache<T>, cache: &mut ArtifactsCache<T>,
) -> VersionedSources { ) -> VersionedFilteredSources {
sources sources
.into_iter() .into_iter()
.map(|(solc, (version, sources))| { .map(|(solc, (version, sources))| {
@ -314,8 +316,8 @@ impl CompilerSources {
let sources = cache.filter(sources, &version); let sources = cache.filter(sources, &version);
tracing::trace!( tracing::trace!(
"Detected {} dirty sources {:?}", "Detected {} dirty sources {:?}",
sources.len(), sources.dirty().count(),
sources.keys() sources.dirty_files()
); );
(solc, (version, sources)) (solc, (version, sources))
}) })
@ -324,14 +326,26 @@ impl CompilerSources {
match self { match self {
CompilerSources::Sequential(s) => { CompilerSources::Sequential(s) => {
CompilerSources::Sequential(filtered_sources(s, cache)) FilteredCompilerSources::Sequential(filtered_sources(s, cache))
} }
CompilerSources::Parallel(s, j) => { CompilerSources::Parallel(s, j) => {
CompilerSources::Parallel(filtered_sources(s, cache), j) FilteredCompilerSources::Parallel(filtered_sources(s, cache), j)
} }
} }
} }
}
/// Determines how the `solc <-> sources` pairs are executed
#[derive(Debug, Clone)]
#[allow(dead_code)]
enum FilteredCompilerSources {
/// Compile all these sequentially
Sequential(VersionedFilteredSources),
/// Compile all these in parallel using a certain amount of jobs
Parallel(VersionedFilteredSources, usize),
}
impl FilteredCompilerSources {
/// Compiles all the files with `Solc` /// Compiles all the files with `Solc`
fn compile( fn compile(
self, self,
@ -339,53 +353,88 @@ impl CompilerSources {
paths: &ProjectPathsConfig, paths: &ProjectPathsConfig,
) -> Result<AggregatedCompilerOutput> { ) -> Result<AggregatedCompilerOutput> {
match self { match self {
CompilerSources::Sequential(input) => compile_sequential(input, settings, paths), FilteredCompilerSources::Sequential(input) => {
CompilerSources::Parallel(input, j) => compile_parallel(input, j, settings, paths), compile_sequential(input, settings, paths)
}
FilteredCompilerSources::Parallel(input, j) => {
compile_parallel(input, j, settings, paths)
}
} }
} }
#[cfg(test)] #[cfg(test)]
#[allow(unused)] #[allow(unused)]
fn sources(&self) -> &VersionedSources { fn sources(&self) -> &VersionedFilteredSources {
match self { match self {
CompilerSources::Sequential(v) => v, FilteredCompilerSources::Sequential(v) => v,
CompilerSources::Parallel(v, _) => v, FilteredCompilerSources::Parallel(v, _) => v,
} }
} }
} }
/// Compiles the input set sequentially and returns an aggregated set of the solc `CompilerOutput`s /// Compiles the input set sequentially and returns an aggregated set of the solc `CompilerOutput`s
fn compile_sequential( fn compile_sequential(
input: VersionedSources, input: VersionedFilteredSources,
settings: &Settings, settings: &Settings,
paths: &ProjectPathsConfig, paths: &ProjectPathsConfig,
) -> Result<AggregatedCompilerOutput> { ) -> Result<AggregatedCompilerOutput> {
let mut aggregated = AggregatedCompilerOutput::default(); let mut aggregated = AggregatedCompilerOutput::default();
tracing::trace!("compiling {} jobs sequentially", input.len()); tracing::trace!("compiling {} jobs sequentially", input.len());
for (solc, (version, sources)) in input { for (solc, (version, filtered_sources)) in input {
if sources.is_empty() { if filtered_sources.is_empty() {
// nothing to compile // nothing to compile
tracing::trace!(
"skip solc {} {} for empty sources set",
solc.as_ref().display(),
version
);
continue continue
} }
tracing::trace!( tracing::trace!(
"compiling {} sources with solc \"{}\" {:?}", "compiling {} sources with solc \"{}\" {:?}",
sources.len(), filtered_sources.len(),
solc.as_ref().display(), solc.as_ref().display(),
solc.args solc.args
); );
let dirty_files: Vec<PathBuf> = filtered_sources.dirty_files().cloned().collect();
// depending on the composition of the filtered sources, the output selection can be
// optimized
let mut opt_settings = settings.clone();
let sources = filtered_sources.into_sources(&mut opt_settings);
for input in CompilerInput::with_sources(sources) { for input in CompilerInput::with_sources(sources) {
let actually_dirty = input
.sources
.keys()
.filter(|f| dirty_files.contains(f))
.cloned()
.collect::<Vec<_>>();
if actually_dirty.is_empty() {
// nothing to compile for this particular language, all dirty files are in the other
// language set
tracing::trace!(
"skip solc {} {} compilation of {} compiler input due to empty source set",
solc.as_ref().display(),
version,
input.language
);
continue
}
let input = input let input = input
.settings(settings.clone()) .settings(opt_settings.clone())
.normalize_evm_version(&version) .normalize_evm_version(&version)
.with_remappings(paths.remappings.clone()); .with_remappings(paths.remappings.clone());
tracing::trace!( tracing::trace!(
"calling solc `{}` with {} sources {:?}", "calling solc `{}` with {} sources {:?}",
version, version,
input.sources.len(), input.sources.len(),
input.sources.keys() input.sources.keys()
); );
report::solc_spawn(&solc, &version, &input);
report::solc_spawn(&solc, &version, &input, &actually_dirty);
let output = solc.compile_exact(&input)?; let output = solc.compile_exact(&input)?;
report::solc_success(&solc, &version, &output); report::solc_success(&solc, &version, &output);
tracing::trace!("compiled input, output has error: {}", output.has_error()); tracing::trace!("compiled input, output has error: {}", output.has_error());
@ -397,7 +446,7 @@ fn compile_sequential(
/// compiles the input set using `num_jobs` threads /// compiles the input set using `num_jobs` threads
fn compile_parallel( fn compile_parallel(
input: VersionedSources, input: VersionedFilteredSources,
num_jobs: usize, num_jobs: usize,
settings: &Settings, settings: &Settings,
paths: &ProjectPathsConfig, paths: &ProjectPathsConfig,
@ -410,18 +459,49 @@ fn compile_parallel(
); );
let mut jobs = Vec::with_capacity(input.len()); let mut jobs = Vec::with_capacity(input.len());
for (solc, (version, sources)) in input { for (solc, (version, filtered_sources)) in input {
if sources.is_empty() { if filtered_sources.is_empty() {
// nothing to compile // nothing to compile
tracing::trace!(
"skip solc {} {} for empty sources set",
solc.as_ref().display(),
version
);
continue continue
} }
let dirty_files: Vec<PathBuf> = filtered_sources.dirty_files().cloned().collect();
// depending on the composition of the filtered sources, the output selection can be
// optimized
let mut opt_settings = settings.clone();
let sources = filtered_sources.into_sources(&mut opt_settings);
for input in CompilerInput::with_sources(sources) { for input in CompilerInput::with_sources(sources) {
let actually_dirty = input
.sources
.keys()
.filter(|f| dirty_files.contains(f))
.cloned()
.collect::<Vec<_>>();
if actually_dirty.is_empty() {
// nothing to compile for this particular language, all dirty files are in the other
// language set
tracing::trace!(
"skip solc {} {} compilation of {} compiler input due to empty source set",
solc.as_ref().display(),
version,
input.language
);
continue
}
let job = input let job = input
.settings(settings.clone()) .settings(settings.clone())
.normalize_evm_version(&version) .normalize_evm_version(&version)
.with_remappings(paths.remappings.clone()); .with_remappings(paths.remappings.clone());
jobs.push((solc.clone(), version.clone(), job)) jobs.push((solc.clone(), version.clone(), job, actually_dirty))
} }
} }
@ -429,7 +509,7 @@ fn compile_parallel(
let pool = rayon::ThreadPoolBuilder::new().num_threads(num_jobs).build().unwrap(); let pool = rayon::ThreadPoolBuilder::new().num_threads(num_jobs).build().unwrap();
let outputs = pool.install(move || { let outputs = pool.install(move || {
jobs.into_par_iter() jobs.into_par_iter()
.map(|(solc, version, input)| { .map(|(solc, version, input, actually_dirty)| {
tracing::trace!( tracing::trace!(
"calling solc `{}` {:?} with {} sources: {:?}", "calling solc `{}` {:?} with {} sources: {:?}",
version, version,
@ -437,7 +517,7 @@ fn compile_parallel(
input.sources.len(), input.sources.len(),
input.sources.keys() input.sources.keys()
); );
report::solc_spawn(&solc, &version, &input); report::solc_spawn(&solc, &version, &input, &actually_dirty);
solc.compile(&input).map(move |output| { solc.compile(&input).map(move |output| {
report::solc_success(&solc, &version, &output); report::solc_success(&solc, &version, &output);
(version, output) (version, output)
@ -501,6 +581,105 @@ mod tests {
assert!(prep.cache.as_cached().unwrap().dirty_source_files.is_empty()) assert!(prep.cache.as_cached().unwrap().dirty_source_files.is_empty())
} }
#[test]
fn can_recompile_with_optimized_output() {
let tmp = TempProject::dapptools().unwrap();
tmp.add_source(
"A",
r#"
pragma solidity ^0.8.10;
import "./B.sol";
contract A {}
"#,
)
.unwrap();
tmp.add_source(
"B",
r#"
pragma solidity ^0.8.10;
contract B {
function hello() public {}
}
import "./C.sol";
"#,
)
.unwrap();
tmp.add_source(
"C",
r#"
pragma solidity ^0.8.10;
contract C {
function hello() public {}
}
"#,
)
.unwrap();
let compiled = tmp.compile().unwrap();
assert!(!compiled.has_compiler_errors());
tmp.artifacts_snapshot().unwrap().assert_artifacts_essentials_present();
// modify A.sol
tmp.add_source(
"A",
r#"
pragma solidity ^0.8.10;
import "./B.sol";
contract A {
function testExample() public {}
}
"#,
)
.unwrap();
let compiler = ProjectCompiler::new(tmp.project()).unwrap();
let state = compiler.preprocess().unwrap();
let sources = state.sources.sources();
// single solc
assert_eq!(sources.len(), 1);
let (_, filtered) = sources.values().next().unwrap();
// 3 contracts total
assert_eq!(filtered.0.len(), 3);
// A is modified
assert_eq!(filtered.dirty().count(), 1);
assert!(filtered.dirty_files().next().unwrap().ends_with("A.sol"));
let state = state.compile().unwrap();
assert_eq!(state.output.sources.len(), 3);
for (f, source) in &state.output.sources {
if f.ends_with("A.sol") {
assert!(source.ast.is_object());
} else {
assert!(source.ast.is_null());
}
}
assert_eq!(state.output.contracts.len(), 1);
let (a, c) = state.output.contracts_iter().next().unwrap();
assert_eq!(a, "A");
assert!(c.abi.is_some() && c.evm.is_some());
let state = state.write_artifacts().unwrap();
assert_eq!(state.compiled_artifacts.as_ref().len(), 1);
let out = state.write_cache().unwrap();
let artifacts: Vec<_> = out.into_artifacts().collect();
assert_eq!(artifacts.len(), 3);
for (_, artifact) in artifacts {
let c = artifact.into_contract_bytecode();
assert!(c.abi.is_some() && c.bytecode.is_some() && c.deployed_bytecode.is_some());
}
tmp.artifacts_snapshot().unwrap().assert_artifacts_essentials_present();
}
#[test] #[test]
#[ignore] #[ignore]
fn can_compile_real_project() { fn can_compile_real_project() {

View File

@ -31,6 +31,7 @@ pub mod utils;
use crate::{ use crate::{
artifacts::{Contract, Sources}, artifacts::{Contract, Sources},
cache::SolFilesCache,
contracts::VersionedContracts, contracts::VersionedContracts,
error::{SolcError, SolcIoError}, error::{SolcError, SolcIoError},
}; };
@ -125,6 +126,12 @@ impl<T: ArtifactOutput> Project<T> {
&self.artifacts &self.artifacts
} }
/// Convenience function to read the cache file.
/// See also [SolFilesCache::read_joined()]
pub fn read_cache_file(&self) -> Result<SolFilesCache> {
SolFilesCache::read_joined(&self.paths)
}
/// Applies the configured arguments to the given `Solc` /// Applies the configured arguments to the given `Solc`
/// ///
/// This will set the `--allow-paths` to the paths configured for the `Project`, if any. /// This will set the `--allow-paths` to the paths configured for the `Project`, if any.

View File

@ -1,9 +1,29 @@
//! Helpers to generate mock projects //! Helpers to generate mock projects
use crate::{error::Result, remappings::Remapping, ProjectPathsConfig}; use crate::{
use rand::{self, seq::SliceRandom, Rng}; error::Result, remappings::Remapping, resolver::GraphEdges, Graph, ProjectPathsConfig,
SolcError,
};
use rand::{
self,
distributions::{Distribution, Uniform},
seq::SliceRandom,
Rng,
};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{collections::BTreeSet, path::Path}; use std::{
collections::{BTreeSet, HashMap},
path::{Path, PathBuf},
};
/// Represents the layout of a project
#[derive(Serialize, Deserialize, Default)]
pub struct MockProjectSkeleton {
/// all files for the project
pub files: Vec<MockFile>,
/// all libraries
pub libraries: Vec<MockLib>,
}
/// Represents a virtual project /// Represents a virtual project
#[derive(Serialize)] #[derive(Serialize)]
@ -11,77 +31,109 @@ pub struct MockProjectGenerator {
/// how to name things /// how to name things
#[serde(skip)] #[serde(skip)]
name_strategy: Box<dyn NamingStrategy + 'static>, name_strategy: Box<dyn NamingStrategy + 'static>,
/// id counter for a file
next_file_id: usize,
/// id counter for a file
next_lib_id: usize,
/// all files for the project
files: Vec<MockFile>,
/// all libraries
libraries: Vec<MockLib>,
}
impl Default for MockProjectGenerator { #[serde(flatten)]
fn default() -> Self { inner: MockProjectSkeleton,
Self {
name_strategy: Box::new(SimpleNamingStrategy::default()),
next_file_id: 0,
next_lib_id: 0,
files: Default::default(),
libraries: Default::default(),
}
}
} }
impl MockProjectGenerator { impl MockProjectGenerator {
/// Create a new project and populate it using the given settings
pub fn new(settings: &MockProjectSettings) -> Self {
let mut mock = Self::default();
mock.populate(settings);
mock
}
/// Create a skeleton of a real project
pub fn create(paths: &ProjectPathsConfig) -> Result<Self> {
fn get_libs(edges: &GraphEdges, lib_folder: &Path) -> Option<HashMap<PathBuf, Vec<usize>>> {
let mut libs: HashMap<_, Vec<_>> = HashMap::new();
for lib_file in edges.library_files() {
let component =
edges.node_path(lib_file).strip_prefix(lib_folder).ok()?.components().next()?;
libs.entry(lib_folder.join(component)).or_default().push(lib_file);
}
Some(libs)
}
let graph = Graph::resolve(paths)?;
let mut gen = MockProjectGenerator::default();
let (_, edges) = graph.into_sources();
// add all files as source files
gen.add_sources(edges.files().count());
// stores libs and their files
let libs = get_libs(
&edges,
&paths.libraries.get(0).cloned().unwrap_or_else(|| paths.root.join("lib")),
)
.ok_or_else(|| SolcError::msg("Failed to detect libs"))?;
// mark all files as libs
for (lib_id, lib_files) in libs.into_values().enumerate() {
let lib_name = gen.name_strategy.new_lib_name(lib_id);
let offset = gen.inner.files.len();
let lib = MockLib { name: lib_name, id: lib_id, num_files: lib_files.len(), offset };
for lib_file in lib_files {
let file = &mut gen.inner.files[lib_file];
file.lib_id = Some(lib_id);
file.name = gen.name_strategy.new_lib_name(file.id);
}
gen.inner.libraries.push(lib);
}
for id in edges.files() {
for import in edges.imported_nodes(id).iter().copied() {
let import = gen.get_import(import);
gen.inner.files[id].imports.insert(import);
}
}
Ok(gen)
}
/// Consumes the type and returns the underlying skeleton
pub fn into_inner(self) -> MockProjectSkeleton {
self.inner
}
/// Generate all solidity files and write under the paths config /// Generate all solidity files and write under the paths config
pub fn write_to(&self, paths: &ProjectPathsConfig, version: impl AsRef<str>) -> Result<()> { pub fn write_to(&self, paths: &ProjectPathsConfig, version: impl AsRef<str>) -> Result<()> {
let version = version.as_ref(); let version = version.as_ref();
for file in self.files.iter() { for file in self.inner.files.iter() {
let mut imports = Vec::with_capacity(file.imports.len()); let imports = self.get_imports(file.id);
let content = file.mock_content(version, imports.join("\n").as_str());
for import in file.imports.iter() { super::create_contract_file(file.target_path(self, paths), content)?;
match *import {
MockImport::Internal(f) => {
imports.push(format!("import \"./{}.sol\";", self.files[f].name));
}
MockImport::External(lib, f) => {
imports.push(format!(
"import \"{}/{}.sol\";",
self.libraries[lib].name, self.files[f].name
));
}
}
}
let content = format!(
r#"
// SPDX-License-Identifier: UNLICENSED
pragma solidity {};
{}
contract {} {{}}
"#,
version,
imports.join("\n"),
file.name
);
let mut target = if let Some(lib) = file.lib_id {
paths.root.join("lib").join(&self.libraries[lib].name).join("src").join(&file.name)
} else {
paths.sources.join(&file.name)
};
target.set_extension("sol");
super::create_contract_file(target, content)?;
} }
Ok(()) Ok(())
} }
fn get_imports(&self, file: usize) -> Vec<String> {
let file = &self.inner.files[file];
let mut imports = Vec::with_capacity(file.imports.len());
for import in file.imports.iter() {
match *import {
MockImport::Internal(f) => {
imports.push(format!("import \"./{}.sol\";", self.inner.files[f].name));
}
MockImport::External(lib, f) => {
imports.push(format!(
"import \"{}/{}.sol\";",
self.inner.libraries[lib].name, self.inner.files[f].name
));
}
}
}
imports
}
/// Returns all the remappings for the project for the given root path /// Returns all the remappings for the project for the given root path
pub fn remappings_at(&self, root: &Path) -> Vec<Remapping> { pub fn remappings_at(&self, root: &Path) -> Vec<Remapping> {
self.libraries self.inner
.libraries
.iter() .iter()
.map(|lib| { .map(|lib| {
let path = root.join("lib").join(&lib.name).join("src"); let path = root.join("lib").join(&lib.name).join("src");
@ -92,19 +144,13 @@ contract {} {{}}
/// Returns all the remappings for the project /// Returns all the remappings for the project
pub fn remappings(&self) -> Vec<Remapping> { pub fn remappings(&self) -> Vec<Remapping> {
self.libraries self.inner
.libraries
.iter() .iter()
.map(|lib| format!("{0}/=lib/{0}/src/", lib.name).parse().unwrap()) .map(|lib| format!("{0}/=lib/{0}/src/", lib.name).parse().unwrap())
.collect() .collect()
} }
/// Create a new project and populate it using the given settings
pub fn new(settings: &MockProjectSettings) -> Self {
let mut mock = Self::default();
mock.populate(settings);
mock
}
/// Generates a random project with random settings /// Generates a random project with random settings
pub fn random() -> Self { pub fn random() -> Self {
let settings = MockProjectSettings::random(); let settings = MockProjectSettings::random();
@ -122,24 +168,21 @@ contract {} {{}}
self.populate_imports(settings) self.populate_imports(settings)
} }
fn next_file_id(&mut self) -> usize { fn next_file_id(&self) -> usize {
let next = self.next_file_id; self.inner.files.len()
self.next_file_id += 1;
next
} }
fn next_lib_id(&mut self) -> usize { fn next_lib_id(&self) -> usize {
let next = self.next_lib_id; self.inner.libraries.len()
self.next_lib_id += 1;
next
} }
/// Adds a new source file /// Adds a new source file
pub fn add_source(&mut self) -> &mut Self { pub fn add_source(&mut self) -> &mut Self {
let id = self.next_file_id(); let id = self.next_file_id();
let name = self.name_strategy.new_source_file_name(id); let name = self.name_strategy.new_source_file_name(id);
let file = MockFile { id, name, imports: Default::default(), lib_id: None }; let file =
self.files.push(file); MockFile { id, name, imports: Default::default(), lib_id: None, emit_artifacts: true };
self.inner.files.push(file);
self self
} }
@ -151,22 +194,50 @@ contract {} {{}}
self self
} }
/// Adds a new lib with the number of lib files /// Adds a new lib file
pub fn add_lib(&mut self, num_files: usize) -> &mut Self { pub fn add_lib_file(&mut self, lib_id: usize) -> &mut Self {
let lib_id = self.next_lib_id();
let lib_name = self.name_strategy.new_lib_name(lib_id);
let offset = self.files.len();
for _ in 0..num_files {
let id = self.next_file_id(); let id = self.next_file_id();
let name = self.name_strategy.new_lib_file_name(id); let name = self.name_strategy.new_source_file_name(id);
self.files.push(MockFile { let file = MockFile {
id, id,
name, name,
imports: Default::default(), imports: Default::default(),
lib_id: Some(lib_id), lib_id: Some(lib_id),
}); emit_artifacts: true,
};
self.inner.files.push(file);
self
}
/// Adds `num` new source files
pub fn add_lib_files(&mut self, num: usize, lib_id: usize) -> &mut Self {
for _ in 0..num {
self.add_lib_file(lib_id);
}
self
}
/// Adds a new lib with the number of lib files
pub fn add_lib(&mut self, num_files: usize) -> &mut Self {
let lib_id = self.next_lib_id();
let lib_name = self.name_strategy.new_lib_name(lib_id);
let offset = self.inner.files.len();
self.add_lib_files(num_files, lib_id);
self.inner.libraries.push(MockLib { name: lib_name, id: lib_id, num_files, offset });
self
}
/// randomly assign empty file status so that mocked files don't emit artifacts
pub fn assign_empty_files(&mut self) -> &mut Self {
let mut rng = rand::thread_rng();
let die = Uniform::from(0..self.inner.files.len());
for file in self.inner.files.iter_mut() {
let throw = die.sample(&mut rng);
if throw == 0 {
// give it a 1 in num(files) chance that the file will be empty
file.emit_artifacts = false;
}
} }
self.libraries.push(MockLib { name: lib_name, id: lib_id, num_files, offset });
self self
} }
@ -175,26 +246,26 @@ contract {} {{}}
let mut rng = rand::thread_rng(); let mut rng = rand::thread_rng();
// populate imports // populate imports
for id in 0..self.files.len() { for id in 0..self.inner.files.len() {
let imports = if let Some(lib) = self.files[id].lib_id { let imports = if let Some(lib) = self.inner.files[id].lib_id {
let num_imports = rng let num_imports = rng
.gen_range(settings.min_imports..=settings.max_imports) .gen_range(settings.min_imports..=settings.max_imports)
.min(self.libraries[lib].num_files.saturating_sub(1)); .min(self.inner.libraries[lib].num_files.saturating_sub(1));
self.unique_imports_for_lib(&mut rng, lib, id, num_imports) self.unique_imports_for_lib(&mut rng, lib, id, num_imports)
} else { } else {
let num_imports = rng let num_imports = rng
.gen_range(settings.min_imports..=settings.max_imports) .gen_range(settings.min_imports..=settings.max_imports)
.min(self.files.len().saturating_sub(1)); .min(self.inner.files.len().saturating_sub(1));
self.unique_imports_for_source(&mut rng, id, num_imports) self.unique_imports_for_source(&mut rng, id, num_imports)
}; };
self.files[id].imports = imports; self.inner.files[id].imports = imports;
} }
self self
} }
fn get_import(&self, id: usize) -> MockImport { fn get_import(&self, id: usize) -> MockImport {
if let Some(lib) = self.files[id].lib_id { if let Some(lib) = self.inner.files[id].lib_id {
MockImport::External(lib, id) MockImport::External(lib, id)
} else { } else {
MockImport::Internal(id) MockImport::Internal(id)
@ -203,17 +274,17 @@ contract {} {{}}
/// All file ids /// All file ids
pub fn file_ids(&self) -> impl Iterator<Item = usize> + '_ { pub fn file_ids(&self) -> impl Iterator<Item = usize> + '_ {
self.files.iter().map(|f| f.id) self.inner.files.iter().map(|f| f.id)
} }
/// All ids of internal files /// All ids of internal files
pub fn internal_file_ids(&self) -> impl Iterator<Item = usize> + '_ { pub fn internal_file_ids(&self) -> impl Iterator<Item = usize> + '_ {
self.files.iter().filter(|f| !f.is_external()).map(|f| f.id) self.inner.files.iter().filter(|f| !f.is_external()).map(|f| f.id)
} }
/// All ids of external files /// All ids of external files
pub fn external_file_ids(&self) -> impl Iterator<Item = usize> + '_ { pub fn external_file_ids(&self) -> impl Iterator<Item = usize> + '_ {
self.files.iter().filter(|f| f.is_external()).map(|f| f.id) self.inner.files.iter().filter(|f| f.is_external()).map(|f| f.id)
} }
/// generates exactly `num` unique imports in the range of all files /// generates exactly `num` unique imports in the range of all files
@ -227,12 +298,27 @@ contract {} {{}}
id: usize, id: usize,
num: usize, num: usize,
) -> BTreeSet<MockImport> { ) -> BTreeSet<MockImport> {
assert!(self.files.len() > num); assert!(self.inner.files.len() > num);
let mut imports: Vec<_> = (0..self.files.len()).collect(); let mut imports: Vec<_> = (0..self.inner.files.len()).collect();
imports.shuffle(rng); imports.shuffle(rng);
imports.into_iter().filter(|i| *i != id).map(|id| self.get_import(id)).take(num).collect() imports.into_iter().filter(|i| *i != id).map(|id| self.get_import(id)).take(num).collect()
} }
/// Modifies the content of the given file
pub fn modify_file(
&self,
id: usize,
paths: &ProjectPathsConfig,
version: impl AsRef<str>,
) -> Result<PathBuf> {
let file = &self.inner.files[id];
let target = file.target_path(self, paths);
let content = file.modified_content(version, self.get_imports(id).join("\n").as_str());
super::create_contract_file(target.clone(), content)?;
Ok(target)
}
/// generates exactly `num` unique imports in the range of a lib's files /// generates exactly `num` unique imports in the range of a lib's files
/// ///
/// # Panics /// # Panics
@ -245,7 +331,7 @@ contract {} {{}}
id: usize, id: usize,
num: usize, num: usize,
) -> BTreeSet<MockImport> { ) -> BTreeSet<MockImport> {
let lib = &self.libraries[lib_id]; let lib = &self.inner.libraries[lib_id];
assert!(lib.num_files > num); assert!(lib.num_files > num);
let mut imports: Vec<_> = (lib.offset..(lib.offset + lib.len())).collect(); let mut imports: Vec<_> = (lib.offset..(lib.offset + lib.len())).collect();
imports.shuffle(rng); imports.shuffle(rng);
@ -253,6 +339,18 @@ contract {} {{}}
} }
} }
impl From<MockProjectSkeleton> for MockProjectGenerator {
fn from(inner: MockProjectSkeleton) -> Self {
Self { inner, ..Default::default() }
}
}
impl Default for MockProjectGenerator {
fn default() -> Self {
Self { name_strategy: Box::new(SimpleNamingStrategy::default()), inner: Default::default() }
}
}
/// Used to determine the names for elements /// Used to determine the names for elements
trait NamingStrategy { trait NamingStrategy {
/// Return a new name for the given source file id /// Return a new name for the given source file id
@ -296,6 +394,8 @@ pub struct MockFile {
pub imports: BTreeSet<MockImport>, pub imports: BTreeSet<MockImport>,
/// lib id if this file is part of a lib /// lib id if this file is part of a lib
pub lib_id: Option<usize>, pub lib_id: Option<usize>,
/// whether this file should emit artifacts
pub emit_artifacts: bool,
} }
impl MockFile { impl MockFile {
@ -303,6 +403,61 @@ impl MockFile {
pub fn is_external(&self) -> bool { pub fn is_external(&self) -> bool {
self.lib_id.is_some() self.lib_id.is_some()
} }
pub fn target_path(&self, gen: &MockProjectGenerator, paths: &ProjectPathsConfig) -> PathBuf {
let mut target = if let Some(lib) = self.lib_id {
paths.root.join("lib").join(&gen.inner.libraries[lib].name).join("src").join(&self.name)
} else {
paths.sources.join(&self.name)
};
target.set_extension("sol");
target
}
/// Returns the content to use for a modified file
///
/// The content here is arbitrary, it should only differ from the mocked content
pub fn modified_content(&self, version: impl AsRef<str>, imports: &str) -> String {
format!(
r#"
// SPDX-License-Identifier: UNLICENSED
pragma solidity {};
{}
contract {} {{
function hello() public {{}}
}}
"#,
version.as_ref(),
imports,
self.name
)
}
/// Returns a mocked content for the file
pub fn mock_content(&self, version: impl AsRef<str>, imports: &str) -> String {
let version = version.as_ref();
if self.emit_artifacts {
format!(
r#"
// SPDX-License-Identifier: UNLICENSED
pragma solidity {};
{}
contract {} {{}}
"#,
version, imports, self.name
)
} else {
format!(
r#"
// SPDX-License-Identifier: UNLICENSED
pragma solidity {};
{}
"#,
version, imports,
)
}
}
} }
#[derive(Debug, Clone, Eq, PartialEq, Hash, Ord, PartialOrd, Serialize, Deserialize)] #[derive(Debug, Clone, Eq, PartialEq, Hash, Ord, PartialOrd, Serialize, Deserialize)]
@ -350,6 +505,8 @@ pub struct MockProjectSettings {
pub min_imports: usize, pub min_imports: usize,
/// max amount of import statements a file can use /// max amount of import statements a file can use
pub max_imports: usize, pub max_imports: usize,
/// whether to also use files that don't emit artifacts
pub allow_no_artifacts_files: bool,
} }
impl MockProjectSettings { impl MockProjectSettings {
@ -363,6 +520,7 @@ impl MockProjectSettings {
num_lib_files: rng.gen_range(1..10), num_lib_files: rng.gen_range(1..10),
min_imports: rng.gen_range(0..3), min_imports: rng.gen_range(0..3),
max_imports: rng.gen_range(4..10), max_imports: rng.gen_range(4..10),
allow_no_artifacts_files: true,
} }
} }
@ -375,6 +533,7 @@ impl MockProjectSettings {
num_lib_files: 15, num_lib_files: 15,
min_imports: 3, min_imports: 3,
max_imports: 12, max_imports: 12,
allow_no_artifacts_files: true,
} }
} }
} }
@ -382,7 +541,14 @@ impl MockProjectSettings {
impl Default for MockProjectSettings { impl Default for MockProjectSettings {
fn default() -> Self { fn default() -> Self {
// these are arbitrary // these are arbitrary
Self { num_sources: 20, num_libs: 2, num_lib_files: 10, min_imports: 0, max_imports: 5 } Self {
num_sources: 20,
num_libs: 2,
num_lib_files: 10,
min_imports: 0,
max_imports: 5,
allow_no_artifacts_files: true,
}
} }
} }

View File

@ -6,8 +6,8 @@ use crate::{
hh::HardhatArtifacts, hh::HardhatArtifacts,
project_util::mock::{MockProjectGenerator, MockProjectSettings}, project_util::mock::{MockProjectGenerator, MockProjectSettings},
utils::tempdir, utils::tempdir,
ArtifactOutput, ConfigurableArtifacts, PathStyle, Project, ProjectCompileOutput, Artifact, ArtifactOutput, Artifacts, ConfigurableArtifacts, ConfigurableContractArtifact,
ProjectPathsConfig, SolcIoError, PathStyle, Project, ProjectCompileOutput, ProjectPathsConfig, SolFilesCache, SolcIoError,
}; };
use fs_extra::{dir, file}; use fs_extra::{dir, file};
use std::{ use std::{
@ -214,6 +214,13 @@ contract {} {{}}
create_contract_file(source, content) create_contract_file(source, content)
} }
/// Returns a snapshot of all cached artifacts
pub fn artifacts_snapshot(&self) -> Result<ArtifactsSnapshot<T::Artifact>> {
let cache = self.project().read_cache_file()?;
let artifacts = cache.read_artifacts::<T::Artifact>()?;
Ok(ArtifactsSnapshot { cache, artifacts })
}
/// Populate the project with mock files /// Populate the project with mock files
pub fn mock(&self, gen: &MockProjectGenerator, version: impl AsRef<str>) -> Result<()> { pub fn mock(&self, gen: &MockProjectGenerator, version: impl AsRef<str>) -> Result<()> {
gen.write_to(self.paths(), version) gen.write_to(self.paths(), version)
@ -407,6 +414,25 @@ impl<T: ArtifactOutput> AsRef<Project<T>> for TempProject<T> {
} }
} }
/// The cache file and all the artifacts it references
#[derive(Debug, Clone)]
pub struct ArtifactsSnapshot<T> {
pub cache: SolFilesCache,
pub artifacts: Artifacts<T>,
}
impl ArtifactsSnapshot<ConfigurableContractArtifact> {
/// Ensures that all artifacts have abi, bytecode, deployedbytecode
pub fn assert_artifacts_essentials_present(&self) {
for artifact in self.artifacts.artifact_files() {
let c = artifact.artifact.clone().into_compact_contract();
assert!(c.abi.is_some());
assert!(c.bin.is_some());
assert!(c.bin_runtime.is_some());
}
}
}
/// commonly used options for copying entire folders /// commonly used options for copying entire folders
fn dir_copy_options() -> dir::CopyOptions { fn dir_copy_options() -> dir::CopyOptions {
dir::CopyOptions { dir::CopyOptions {

View File

@ -21,7 +21,7 @@ use std::{
cell::RefCell, cell::RefCell,
error::Error, error::Error,
fmt, fmt,
path::Path, path::{Path, PathBuf},
ptr::NonNull, ptr::NonNull,
sync::{ sync::{
atomic::{AtomicBool, AtomicUsize, Ordering}, atomic::{AtomicBool, AtomicUsize, Ordering},
@ -91,7 +91,25 @@ where
/// A `Reporter` is entirely passive and only listens to incoming "events". /// A `Reporter` is entirely passive and only listens to incoming "events".
pub trait Reporter: 'static { pub trait Reporter: 'static {
/// Callback invoked right before [`Solc::compile()`] is called /// Callback invoked right before [`Solc::compile()`] is called
fn on_solc_spawn(&self, _solc: &Solc, _version: &Version, _input: &CompilerInput) {} ///
/// This contains the [Solc] its [Version] the complete [CompilerInput] and all files that
/// triggered the compile job. The dirty files are only provided to give a better feedback what
/// was actually compiled.
///
/// If caching is enabled and there has been a previous successful solc run, the dirty files set
/// contains the files that absolutely must be recompiled, while the [CompilerInput] contains
/// all files, the dirty files and all their dependencies.
///
/// If this is a fresh compile then the [Sources] set of the [CompilerInput] matches the dirty
/// files set.
fn on_solc_spawn(
&self,
_solc: &Solc,
_version: &Version,
_input: &CompilerInput,
_dirty_files: &[PathBuf],
) {
}
/// Invoked with the `CompilerOutput` if [`Solc::compiled()`] was successful /// Invoked with the `CompilerOutput` if [`Solc::compiled()`] was successful
fn on_solc_success(&self, _solc: &Solc, _version: &Version, _output: &CompilerOutput) {} fn on_solc_success(&self, _solc: &Solc, _version: &Version, _output: &CompilerOutput) {}
@ -151,8 +169,13 @@ impl dyn Reporter {
} }
} }
pub(crate) fn solc_spawn(solc: &Solc, version: &Version, input: &CompilerInput) { pub(crate) fn solc_spawn(
get_default(|r| r.reporter.on_solc_spawn(solc, version, input)); solc: &Solc,
version: &Version,
input: &CompilerInput,
dirty_files: &[PathBuf],
) {
get_default(|r| r.reporter.on_solc_spawn(solc, version, input, dirty_files));
} }
pub(crate) fn solc_success(solc: &Solc, version: &Version, output: &CompilerOutput) { pub(crate) fn solc_success(solc: &Solc, version: &Version, output: &CompilerOutput) {
@ -296,10 +319,16 @@ pub struct BasicStdoutReporter(());
impl Reporter for BasicStdoutReporter { impl Reporter for BasicStdoutReporter {
/// Callback invoked right before [`Solc::compile()`] is called /// Callback invoked right before [`Solc::compile()`] is called
fn on_solc_spawn(&self, _solc: &Solc, version: &Version, input: &CompilerInput) { fn on_solc_spawn(
&self,
_solc: &Solc,
version: &Version,
_input: &CompilerInput,
dirty_files: &[PathBuf],
) {
println!( println!(
"Compiling {} files with {}.{}.{}", "Compiling {} files with {}.{}.{}",
input.sources.len(), dirty_files.len(),
version.major, version.major,
version.minor, version.minor,
version.patch version.patch

View File

@ -86,6 +86,26 @@ pub struct GraphEdges {
} }
impl GraphEdges { impl GraphEdges {
/// How many files are source files
pub fn num_source_files(&self) -> usize {
self.num_input_files
}
/// Returns an iterator over all file indices
pub fn files(&self) -> impl Iterator<Item = usize> + '_ {
0..self.edges.len()
}
/// Returns an iterator over all source file indices
pub fn source_files(&self) -> impl Iterator<Item = usize> + '_ {
0..self.num_input_files
}
/// Returns an iterator over all library files
pub fn library_files(&self) -> impl Iterator<Item = usize> + '_ {
self.files().skip(self.num_input_files)
}
/// Returns a list of nodes the given node index points to for the given kind. /// Returns a list of nodes the given node index points to for the given kind.
pub fn imported_nodes(&self, from: usize) -> &[usize] { pub fn imported_nodes(&self, from: usize) -> &[usize] {
&self.edges[from] &self.edges[from]
@ -110,6 +130,11 @@ impl GraphEdges {
self.indices[file.as_ref()] self.indices[file.as_ref()]
} }
/// Returns the path of the given node
pub fn node_path(&self, id: usize) -> &PathBuf {
&self.rev_indices[&id]
}
/// Returns true if the `file` was originally included when the graph was first created and not /// Returns true if the `file` was originally included when the graph was first created and not
/// added when all `imports` were resolved /// added when all `imports` were resolved
pub fn is_input_file(&self, file: impl AsRef<Path>) -> bool { pub fn is_input_file(&self, file: impl AsRef<Path>) -> bool {

View File

@ -30,7 +30,7 @@ impl From<(MockProjectSettings, &'static str)> for MockSettings {
/// Helper function to run a test and report the used generator if the closure failed. /// Helper function to run a test and report the used generator if the closure failed.
fn run_mock( fn run_mock(
settings: impl Into<MockSettings>, settings: impl Into<MockSettings>,
f: impl FnOnce(&mut TempProject) -> Result<()>, f: impl FnOnce(&mut TempProject, &MockProjectGenerator) -> Result<()>,
) -> TempProject { ) -> TempProject {
let MockSettings { settings, version } = settings.into(); let MockSettings { settings, version } = settings.into();
let gen = MockProjectGenerator::new(&settings); let gen = MockProjectGenerator::new(&settings);
@ -39,7 +39,7 @@ fn run_mock(
project.paths_mut().remappings.extend(remappings); project.paths_mut().remappings.extend(remappings);
project.mock(&gen, version).unwrap(); project.mock(&gen, version).unwrap();
if let Err(err) = f(&mut project) { if let Err(err) = f(&mut project, &gen) {
panic!( panic!(
"mock failed: `{}` with mock settings:\n {}", "mock failed: `{}` with mock settings:\n {}",
err, err,
@ -54,7 +54,7 @@ fn run_mock(
fn run_basic(settings: impl Into<MockSettings>) { fn run_basic(settings: impl Into<MockSettings>) {
let settings = settings.into(); let settings = settings.into();
let version = settings.version; let version = settings.version;
run_mock(settings, |project| { run_mock(settings, |project, _| {
project.ensure_no_errors_recompile_unchanged()?; project.ensure_no_errors_recompile_unchanged()?;
project.add_basic_source("Dummy", version)?; project.add_basic_source("Dummy", version)?;
project.ensure_changed()?; project.ensure_changed()?;
@ -79,3 +79,29 @@ fn can_compile_mocked_multi() {
fn can_compile_mocked_large() { fn can_compile_mocked_large() {
run_basic(MockProjectSettings::large()) run_basic(MockProjectSettings::large())
} }
#[test]
fn can_compile_mocked_modified() {
run_mock(MockProjectSettings::random(), |project, gen| {
project.ensure_no_errors_recompile_unchanged()?;
// modify a random file
gen.modify_file(gen.file_ids().count() / 2, project.paths(), DEFAULT_VERSION)?;
project.ensure_changed()?;
project.artifacts_snapshot()?.assert_artifacts_essentials_present();
Ok(())
});
}
#[test]
fn can_compile_mocked_modified_all() {
run_mock(MockProjectSettings::random(), |project, gen| {
project.ensure_no_errors_recompile_unchanged()?;
// modify a random file
for id in gen.file_ids() {
gen.modify_file(id, project.paths(), DEFAULT_VERSION)?;
project.ensure_changed()?;
project.artifacts_snapshot()?.assert_artifacts_essentials_present();
}
Ok(())
});
}