diff --git a/ethers-solc/src/artifacts/mod.rs b/ethers-solc/src/artifacts/mod.rs index b35f3cdc..61077e8f 100644 --- a/ethers-solc/src/artifacts/mod.rs +++ b/ethers-solc/src/artifacts/mod.rs @@ -24,7 +24,10 @@ use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer}; pub mod output_selection; pub mod serde_helpers; -use crate::artifacts::output_selection::ContractOutputSelection; +use crate::{ + artifacts::output_selection::{ContractOutputSelection, OutputSelection}, + cache::FilteredSources, +}; pub use serde_helpers::{deserialize_bytes, deserialize_opt_bytes}; /// Solidity files are made up of multiple `source units`, a solidity contract is such a `source @@ -40,8 +43,12 @@ pub type Contracts = FileToContractsMap; /// An ordered list of files and their source pub type Sources = BTreeMap; +/// A set of different Solc installations with their version and the sources to be compiled pub type VersionedSources = BTreeMap; +/// A set of different Solc installations with their version and the sources to be compiled +pub type VersionedFilteredSources = BTreeMap; + /// Input type `solc` expects #[derive(Clone, Debug, Serialize, Deserialize)] pub struct CompilerInput { @@ -56,7 +63,10 @@ impl CompilerInput { Source::read_all_from(path.as_ref()).map(Self::with_sources) } - /// Creates a new Compiler input with default settings and the given sources + /// Creates a new [CompilerInput](s) with default settings and the given sources + /// + /// A [CompilerInput] expects a language setting, supported by solc are solidity or yul. + /// In case the `sources` is a mix of solidity and yul files, 2 CompilerInputs are returned pub fn with_sources(sources: Sources) -> Vec { let mut solidity_sources = BTreeMap::new(); let mut yul_sources = BTreeMap::new(); @@ -140,69 +150,8 @@ pub struct Settings { /// on file and contract names. /// If this field is omitted, then the compiler loads and does type /// checking, but will not generate any outputs apart from errors. - /// The first level key is the file name and the second level key is the - /// contract name. An empty contract name is used for outputs that are - /// not tied to a contract but to the whole source file like the AST. - /// A star as contract name refers to all contracts in the file. - /// Similarly, a star as a file name matches all files. - /// To select all outputs the compiler can possibly generate, use - /// "outputSelection: { "*": { "*": [ "*" ], "": [ "*" ] } }" - /// but note that this might slow down the compilation process needlessly. - /// - /// The available output types are as follows: - /// - /// File level (needs empty string as contract name): - /// ast - AST of all source files - /// - /// Contract level (needs the contract name or "*"): - /// abi - ABI - /// devdoc - Developer documentation (natspec) - /// userdoc - User documentation (natspec) - /// metadata - Metadata - /// ir - Yul intermediate representation of the code before optimization - /// irOptimized - Intermediate representation after optimization - /// storageLayout - Slots, offsets and types of the contract's state - /// variables. - /// evm.assembly - New assembly format - /// evm.legacyAssembly - Old-style assembly format in JSON - /// evm.bytecode.functionDebugData - Debugging information at function level - /// evm.bytecode.object - Bytecode object - /// evm.bytecode.opcodes - Opcodes list - /// evm.bytecode.sourceMap - Source mapping (useful for debugging) - /// evm.bytecode.linkReferences - Link references (if unlinked object) - /// evm.bytecode.generatedSources - Sources generated by the compiler - /// evm.deployedBytecode* - Deployed bytecode (has all the options that - /// evm.bytecode has) - /// evm.deployedBytecode.immutableReferences - Map from AST ids to - /// bytecode ranges that reference immutables - /// evm.methodIdentifiers - The list of function hashes - /// evm.gasEstimates - Function gas estimates - /// ewasm.wast - Ewasm in WebAssembly S-expressions format - /// ewasm.wasm - Ewasm in WebAssembly binary format - /// - /// Note that using a using `evm`, `evm.bytecode`, `ewasm`, etc. will select - /// every target part of that output. Additionally, `*` can be used as a - /// wildcard to request everything. - /// - /// The default output selection is - /// - /// ```json - /// { - /// "*": { - /// "*": [ - /// "abi", - /// "evm.bytecode", - /// "evm.deployedBytecode", - /// "evm.methodIdentifiers" - /// ], - /// "": [ - /// "ast" - /// ] - /// } - /// } - /// ``` #[serde(default)] - pub output_selection: BTreeMap>>, + pub output_selection: OutputSelection, #[serde( default, with = "serde_helpers::display_from_str_opt", @@ -215,37 +164,8 @@ pub struct Settings { impl Settings { /// Creates a new `Settings` instance with the given `output_selection` - pub fn new(output_selection: BTreeMap>>) -> Self { - Self { output_selection, ..Default::default() } - } - - /// select all outputs the compiler can possibly generate, use - /// `{ "*": { "*": [ "*" ], "": [ "*" ] } }` - /// but note that this might slow down the compilation process needlessly. - pub fn complete_output_selection() -> BTreeMap>> { - BTreeMap::from([( - "*".to_string(), - BTreeMap::from([ - ("*".to_string(), vec!["*".to_string()]), - ("".to_string(), vec!["*".to_string()]), - ]), - )]) - } - - /// Default output selection for compiler output - pub fn default_output_selection() -> BTreeMap>> { - BTreeMap::from([( - "*".to_string(), - BTreeMap::from([( - "*".to_string(), - vec![ - "abi".to_string(), - "evm.bytecode".to_string(), - "evm.deployedBytecode".to_string(), - "evm.methodIdentifiers".to_string(), - ], - )]), - )]) + pub fn new(output_selection: impl Into) -> Self { + Self { output_selection: output_selection.into(), ..Default::default() } } /// Inserts a set of `ContractOutputSelection` @@ -290,6 +210,7 @@ impl Settings { let value = value.to_string(); let values = self .output_selection + .as_mut() .entry("*".to_string()) .or_default() .entry(contracts.into()) @@ -313,6 +234,7 @@ impl Settings { values: impl IntoIterator, ) { self.output_selection + .as_mut() .entry("*".to_string()) .or_default() .insert(key.into(), values.into_iter().map(|s| s.to_string()).collect()); @@ -321,7 +243,8 @@ impl Settings { /// Adds `ast` to output #[must_use] pub fn with_ast(mut self) -> Self { - let output = self.output_selection.entry("*".to_string()).or_insert_with(BTreeMap::default); + let output = + self.output_selection.as_mut().entry("*".to_string()).or_insert_with(BTreeMap::default); output.insert("".to_string(), vec!["ast".to_string()]); self } @@ -333,7 +256,7 @@ impl Default for Settings { stop_after: None, optimizer: Default::default(), metadata: None, - output_selection: Self::default_output_selection(), + output_selection: OutputSelection::default_output_selection(), evm_version: Some(EvmVersion::default()), libraries: Default::default(), remappings: Default::default(), @@ -2087,6 +2010,7 @@ pub struct SecondarySourceLocation { #[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)] pub struct SourceFile { pub id: u32, + #[serde(default)] pub ast: serde_json::Value, } diff --git a/ethers-solc/src/artifacts/output_selection.rs b/ethers-solc/src/artifacts/output_selection.rs index 375e485f..21a169b6 100644 --- a/ethers-solc/src/artifacts/output_selection.rs +++ b/ethers-solc/src/artifacts/output_selection.rs @@ -1,7 +1,143 @@ //! bindings for standard json output selection use serde::{Deserialize, Deserializer, Serialize, Serializer}; -use std::{fmt, str::FromStr}; +use std::{collections::BTreeMap, fmt, str::FromStr}; + +/// Represents the desired outputs based on a File `(file -> (contract -> [outputs]))` +pub type FileOutputSelection = BTreeMap>; + +/// Represents the selected output of files and contracts +/// The first level key is the file name and the second level key is the +/// contract name. An empty contract name is used for outputs that are +/// not tied to a contract but to the whole source file like the AST. +/// A star as contract name refers to all contracts in the file. +/// Similarly, a star as a file name matches all files. +/// To select all outputs the compiler can possibly generate, use +/// "outputSelection: { "*": { "*": [ "*" ], "": [ "*" ] } }" +/// but note that this might slow down the compilation process needlessly. +/// +/// The available output types are as follows: +/// +/// File level (needs empty string as contract name): +/// ast - AST of all source files +/// +/// Contract level (needs the contract name or "*"): +/// abi - ABI +/// devdoc - Developer documentation (natspec) +/// userdoc - User documentation (natspec) +/// metadata - Metadata +/// ir - Yul intermediate representation of the code before optimization +/// irOptimized - Intermediate representation after optimization +/// storageLayout - Slots, offsets and types of the contract's state +/// variables. +/// evm.assembly - New assembly format +/// evm.legacyAssembly - Old-style assembly format in JSON +/// evm.bytecode.functionDebugData - Debugging information at function level +/// evm.bytecode.object - Bytecode object +/// evm.bytecode.opcodes - Opcodes list +/// evm.bytecode.sourceMap - Source mapping (useful for debugging) +/// evm.bytecode.linkReferences - Link references (if unlinked object) +/// evm.bytecode.generatedSources - Sources generated by the compiler +/// evm.deployedBytecode* - Deployed bytecode (has all the options that +/// evm.bytecode has) +/// evm.deployedBytecode.immutableReferences - Map from AST ids to +/// bytecode ranges that reference immutables +/// evm.methodIdentifiers - The list of function hashes +/// evm.gasEstimates - Function gas estimates +/// ewasm.wast - Ewasm in WebAssembly S-expressions format +/// ewasm.wasm - Ewasm in WebAssembly binary format +/// +/// Note that using a using `evm`, `evm.bytecode`, `ewasm`, etc. will select +/// every target part of that output. Additionally, `*` can be used as a +/// wildcard to request everything. +/// +/// The default output selection is +/// +/// ```json +/// { +/// "*": { +/// "*": [ +/// "abi", +/// "evm.bytecode", +/// "evm.deployedBytecode", +/// "evm.methodIdentifiers" +/// ], +/// "": [ +/// "ast" +/// ] +/// } +/// } +/// ``` +#[derive(Debug, Clone, Eq, PartialEq, Default, Serialize, Deserialize)] +#[serde(transparent)] +pub struct OutputSelection(pub BTreeMap); + +impl OutputSelection { + /// select all outputs the compiler can possibly generate, use + /// `{ "*": { "*": [ "*" ], "": [ "*" ] } }` + /// but note that this might slow down the compilation process needlessly. + pub fn complete_output_selection() -> Self { + BTreeMap::from([( + "*".to_string(), + BTreeMap::from([ + ("*".to_string(), vec!["*".to_string()]), + ("".to_string(), vec!["*".to_string()]), + ]), + )]) + .into() + } + + /// Default output selection for compiler output: + /// + /// `{ "*": { "*": [ "*" ], "": [ + /// "abi","evm.bytecode","evm.deployedBytecode","evm.methodIdentifiers"] } }` + /// + /// Which enables it for all files and all their contracts ("*" wildcard) + pub fn default_output_selection() -> Self { + BTreeMap::from([("*".to_string(), Self::default_file_output_selection())]).into() + } + + /// Default output selection for a single file: + /// + /// `{ "*": [ "*" ], "": [ + /// "abi","evm.bytecode","evm.deployedBytecode","evm.methodIdentifiers"] }` + /// + /// Which enables it for all the contracts in the file ("*" wildcard) + pub fn default_file_output_selection() -> FileOutputSelection { + BTreeMap::from([( + "*".to_string(), + vec![ + "abi".to_string(), + "evm.bytecode".to_string(), + "evm.deployedBytecode".to_string(), + "evm.methodIdentifiers".to_string(), + ], + )]) + } + + /// Returns an empty output selection which corresponds to an empty map `{}` + pub fn empty_file_output_select() -> FileOutputSelection { + Default::default() + } +} + +impl AsRef> for OutputSelection { + fn as_ref(&self) -> &BTreeMap { + &self.0 + } +} + +impl AsMut> for OutputSelection { + fn as_mut(&mut self) -> &mut BTreeMap { + &mut self.0 + } +} + +impl From> for OutputSelection { + fn from(s: BTreeMap) -> Self { + OutputSelection(s) + } +} /// Contract level output selection #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index edf3b271..30e46bfe 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -1,6 +1,6 @@ //! Support for compiling contracts use crate::{ - artifacts::Sources, + artifacts::{output_selection::OutputSelection, Settings, Sources}, config::SolcConfig, error::{Result, SolcError}, resolver::GraphEdges, @@ -14,6 +14,7 @@ use std::{ btree_map::{BTreeMap, Entry}, hash_map, BTreeSet, HashMap, HashSet, }, + fmt, fs::{self}, path::{Path, PathBuf}, time::{Duration, UNIX_EPOCH}, @@ -599,35 +600,78 @@ impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> { } } - /// Returns only those sources that + /// Returns the set of [Source]s that need to be included in the [CompilerOutput] in order to + /// recompile the project. + /// + /// We define _dirty_ sources as files that: /// - are new /// - were changed /// - their imports were changed /// - their artifact is missing - fn filter(&mut self, sources: Sources, version: &Version) -> Sources { + /// + /// A _dirty_ file is always included in the [CompilerInput]. + /// A _dirty_ file can also include clean files - files that do not match any of the above + /// criteria - which solc also requires in order to compile a dirty file. + /// + /// Therefore, these files will also be included in the filtered output but not marked as dirty, + /// so that their [OutputSelection] can be optimized in the [CompilerOutput] and their (empty) + /// artifacts ignored. + fn filter(&mut self, sources: Sources, version: &Version) -> FilteredSources { self.fill_hashes(&sources); - sources + + // all files that are not dirty themselves, but are pulled from a dirty file + let mut imports_of_dirty = HashSet::new(); + + // separates all source files that fit the criteria (dirty) from those that don't (clean) + let (mut filtered_sources, clean_sources) = sources .into_iter() - .filter_map(|(file, source)| self.requires_solc(file, source, version)) - .collect() + .map(|(file, source)| self.filter_source(file, source, version)) + .fold( + (BTreeMap::default(), Vec::new()), + |(mut dirty_sources, mut clean_sources), source| { + if source.dirty { + // mark all files that are imported by a dirty file + imports_of_dirty.extend(self.edges.all_imported_nodes(source.idx)); + dirty_sources.insert(source.file, FilteredSource::Dirty(source.source)); + } else { + clean_sources.push(source); + } + + (dirty_sources, clean_sources) + }, + ); + + // track new cache entries for dirty files + for (file, filtered) in filtered_sources.iter() { + self.insert_new_cache_entry(file, filtered.source(), version.clone()); + } + + for clean_source in clean_sources { + let FilteredSourceInfo { file, source, idx, .. } = clean_source; + if imports_of_dirty.contains(&idx) { + // file is pulled in by a dirty file + filtered_sources.insert(file.clone(), FilteredSource::Clean(source.clone())); + } + self.insert_filtered_source(file, source, version.clone()); + } + + filtered_sources.into() } - /// Returns `Some` if the file _needs_ to be compiled and `None` if the artifact can be reu-used - fn requires_solc( - &mut self, + /// Returns the state of the given source file. + fn filter_source( + &self, file: PathBuf, source: Source, version: &Version, - ) -> Option<(PathBuf, Source)> { + ) -> FilteredSourceInfo { + let idx = self.edges.node_id(&file); if !self.is_dirty(&file, version) && self.edges.imports(&file).iter().all(|file| !self.is_dirty(file, version)) { - self.insert_filtered_source(file, source, version.clone()); - None + FilteredSourceInfo { file, source, idx, dirty: false } } else { - self.insert_new_cache_entry(&file, &source, version.clone()); - - Some((file, source)) + FilteredSourceInfo { file, source, idx, dirty: true } } } @@ -685,6 +729,157 @@ impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> { } } +/// Container type for a set of [FilteredSource] +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct FilteredSources(pub BTreeMap); + +impl FilteredSources { + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + + pub fn len(&self) -> usize { + self.0.len() + } + + /// Returns `true` if all files are dirty + pub fn all_dirty(&self) -> bool { + self.0.values().all(|s| s.is_dirty()) + } + + /// Returns all entries that are dirty + pub fn dirty(&self) -> impl Iterator + '_ { + self.0.iter().filter(|(_, s)| s.is_dirty()) + } + + /// Returns all entries that are clean + pub fn clean(&self) -> impl Iterator + '_ { + self.0.iter().filter(|(_, s)| !s.is_dirty()) + } + + /// Returns all dirty files + pub fn dirty_files(&self) -> impl Iterator + fmt::Debug + '_ { + self.0.iter().filter_map(|(k, s)| s.is_dirty().then(|| k)) + } + + /// While solc needs all the files to compile the actual _dirty_ files, we can tell solc to + /// output everything for those dirty files as currently configured in the settings, but output + /// nothing for the other files that are _not_ dirty. + /// + /// This will modify the [OutputSelection] of the [Settings] so that we explicitly select the + /// files' output based on their state. + pub fn into_sources(self, settings: &mut Settings) -> Sources { + if !self.all_dirty() { + // settings can be optimized + + tracing::trace!( + "Optimizing output selection for {}/{} sources", + self.clean().count(), + self.len() + ); + + let selection = settings + .output_selection + .as_mut() + .remove("*") + .unwrap_or_else(OutputSelection::default_file_output_selection); + + for (file, source) in self.0.iter() { + if source.is_dirty() { + settings + .output_selection + .as_mut() + .insert(format!("{}", file.display()), selection.clone()); + } else { + tracing::trace!("Optimizing output for {}", file.display()); + settings.output_selection.as_mut().insert( + format!("{}", file.display()), + OutputSelection::empty_file_output_select(), + ); + } + } + } + self.into() + } +} + +impl From for Sources { + fn from(sources: FilteredSources) -> Self { + sources.0.into_iter().map(|(k, v)| (k, v.into_source())).collect() + } +} + +impl From for FilteredSources { + fn from(s: Sources) -> Self { + FilteredSources(s.into_iter().map(|(key, val)| (key, FilteredSource::Dirty(val))).collect()) + } +} + +impl From> for FilteredSources { + fn from(s: BTreeMap) -> Self { + FilteredSources(s) + } +} + +impl AsRef> for FilteredSources { + fn as_ref(&self) -> &BTreeMap { + &self.0 + } +} + +impl AsMut> for FilteredSources { + fn as_mut(&mut self) -> &mut BTreeMap { + &mut self.0 + } +} + +/// Represents the state of a filtered [Source] +#[derive(Debug, Clone, Eq, PartialEq)] +pub enum FilteredSource { + /// A source that fits the _dirty_ criteria + Dirty(Source), + /// A source that does _not_ fit the _dirty_ criteria but is included in the filtered set + /// because a _dirty_ file pulls it in, either directly on indirectly. + Clean(Source), +} + +impl FilteredSource { + /// Returns the underlying source + pub fn source(&self) -> &Source { + match self { + FilteredSource::Dirty(s) => s, + FilteredSource::Clean(s) => s, + } + } + + /// Consumes the type and returns the underlying source + pub fn into_source(self) -> Source { + match self { + FilteredSource::Dirty(s) => s, + FilteredSource::Clean(s) => s, + } + } + + /// Whether this file is actually dirt + pub fn is_dirty(&self) -> bool { + matches!(self, FilteredSource::Dirty(_)) + } +} + +/// Helper type that determines the state of a source file +struct FilteredSourceInfo { + /// path to the source file + file: PathBuf, + /// contents of the file + source: Source, + /// idx in the [GraphEdges] + idx: usize, + /// whether this file is actually dirty + /// + /// See also [ArtifactsCacheInner::is_dirty()] + dirty: bool, +} + /// Abstraction over configured caching which can be either non-existent or an already loaded cache #[allow(clippy::large_enum_variant)] #[derive(Debug)] @@ -756,9 +951,9 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { } /// Filters out those sources that don't need to be compiled - pub fn filter(&mut self, sources: Sources, version: &Version) -> Sources { + pub fn filter(&mut self, sources: Sources, version: &Version) -> FilteredSources { match self { - ArtifactsCache::Ephemeral(_, _) => sources, + ArtifactsCache::Ephemeral(_, _) => sources.into(), ArtifactsCache::Cached(cache) => cache.filter(sources, version), } } diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index 31174ccb..b4c46a0f 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -103,7 +103,7 @@ use crate::{ artifact_output::Artifacts, - artifacts::{Settings, VersionedSources}, + artifacts::{Settings, VersionedFilteredSources, VersionedSources}, cache::ArtifactsCache, error::Result, output::AggregatedCompilerOutput, @@ -114,7 +114,7 @@ use crate::{ }; use rayon::prelude::*; -use std::collections::btree_map::BTreeMap; +use std::{collections::btree_map::BTreeMap, path::PathBuf}; #[derive(Debug)] pub struct ProjectCompiler<'a, T: ArtifactOutput> { @@ -203,11 +203,11 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { /// - sets proper source unit names /// - check cache fn preprocess(self) -> Result> { - let Self { edges, project, mut sources } = self; + let Self { edges, project, sources } = self; let mut cache = ArtifactsCache::new(project, edges)?; // retain and compile only dirty sources and all their imports - sources = sources.filtered(&mut cache); + let sources = sources.filtered(&mut cache); Ok(PreprocessedState { sources, cache }) } @@ -218,7 +218,9 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { /// The main reason is to debug all states individually #[derive(Debug)] struct PreprocessedState<'a, T: ArtifactOutput> { - sources: CompilerSources, + /// contains all sources to compile + sources: FilteredCompilerSources, + /// cache that holds [CacheEntry] object if caching is enabled and the project is recompiled cache: ArtifactsCache<'a, T>, } @@ -302,11 +304,11 @@ enum CompilerSources { impl CompilerSources { /// Filters out all sources that don't need to be compiled, see [`ArtifactsCache::filter`] - fn filtered(self, cache: &mut ArtifactsCache) -> Self { + fn filtered(self, cache: &mut ArtifactsCache) -> FilteredCompilerSources { fn filtered_sources( sources: VersionedSources, cache: &mut ArtifactsCache, - ) -> VersionedSources { + ) -> VersionedFilteredSources { sources .into_iter() .map(|(solc, (version, sources))| { @@ -314,8 +316,8 @@ impl CompilerSources { let sources = cache.filter(sources, &version); tracing::trace!( "Detected {} dirty sources {:?}", - sources.len(), - sources.keys() + sources.dirty().count(), + sources.dirty_files() ); (solc, (version, sources)) }) @@ -324,14 +326,26 @@ impl CompilerSources { match self { CompilerSources::Sequential(s) => { - CompilerSources::Sequential(filtered_sources(s, cache)) + FilteredCompilerSources::Sequential(filtered_sources(s, cache)) } CompilerSources::Parallel(s, j) => { - CompilerSources::Parallel(filtered_sources(s, cache), j) + FilteredCompilerSources::Parallel(filtered_sources(s, cache), j) } } } +} +/// Determines how the `solc <-> sources` pairs are executed +#[derive(Debug, Clone)] +#[allow(dead_code)] +enum FilteredCompilerSources { + /// Compile all these sequentially + Sequential(VersionedFilteredSources), + /// Compile all these in parallel using a certain amount of jobs + Parallel(VersionedFilteredSources, usize), +} + +impl FilteredCompilerSources { /// Compiles all the files with `Solc` fn compile( self, @@ -339,53 +353,88 @@ impl CompilerSources { paths: &ProjectPathsConfig, ) -> Result { match self { - CompilerSources::Sequential(input) => compile_sequential(input, settings, paths), - CompilerSources::Parallel(input, j) => compile_parallel(input, j, settings, paths), + FilteredCompilerSources::Sequential(input) => { + compile_sequential(input, settings, paths) + } + FilteredCompilerSources::Parallel(input, j) => { + compile_parallel(input, j, settings, paths) + } } } #[cfg(test)] #[allow(unused)] - fn sources(&self) -> &VersionedSources { + fn sources(&self) -> &VersionedFilteredSources { match self { - CompilerSources::Sequential(v) => v, - CompilerSources::Parallel(v, _) => v, + FilteredCompilerSources::Sequential(v) => v, + FilteredCompilerSources::Parallel(v, _) => v, } } } /// Compiles the input set sequentially and returns an aggregated set of the solc `CompilerOutput`s fn compile_sequential( - input: VersionedSources, + input: VersionedFilteredSources, settings: &Settings, paths: &ProjectPathsConfig, ) -> Result { let mut aggregated = AggregatedCompilerOutput::default(); tracing::trace!("compiling {} jobs sequentially", input.len()); - for (solc, (version, sources)) in input { - if sources.is_empty() { + for (solc, (version, filtered_sources)) in input { + if filtered_sources.is_empty() { // nothing to compile + tracing::trace!( + "skip solc {} {} for empty sources set", + solc.as_ref().display(), + version + ); continue } tracing::trace!( "compiling {} sources with solc \"{}\" {:?}", - sources.len(), + filtered_sources.len(), solc.as_ref().display(), solc.args ); + let dirty_files: Vec = filtered_sources.dirty_files().cloned().collect(); + + // depending on the composition of the filtered sources, the output selection can be + // optimized + let mut opt_settings = settings.clone(); + let sources = filtered_sources.into_sources(&mut opt_settings); + for input in CompilerInput::with_sources(sources) { + let actually_dirty = input + .sources + .keys() + .filter(|f| dirty_files.contains(f)) + .cloned() + .collect::>(); + if actually_dirty.is_empty() { + // nothing to compile for this particular language, all dirty files are in the other + // language set + tracing::trace!( + "skip solc {} {} compilation of {} compiler input due to empty source set", + solc.as_ref().display(), + version, + input.language + ); + continue + } let input = input - .settings(settings.clone()) + .settings(opt_settings.clone()) .normalize_evm_version(&version) .with_remappings(paths.remappings.clone()); + tracing::trace!( "calling solc `{}` with {} sources {:?}", version, input.sources.len(), input.sources.keys() ); - report::solc_spawn(&solc, &version, &input); + + report::solc_spawn(&solc, &version, &input, &actually_dirty); let output = solc.compile_exact(&input)?; report::solc_success(&solc, &version, &output); tracing::trace!("compiled input, output has error: {}", output.has_error()); @@ -397,7 +446,7 @@ fn compile_sequential( /// compiles the input set using `num_jobs` threads fn compile_parallel( - input: VersionedSources, + input: VersionedFilteredSources, num_jobs: usize, settings: &Settings, paths: &ProjectPathsConfig, @@ -410,18 +459,49 @@ fn compile_parallel( ); let mut jobs = Vec::with_capacity(input.len()); - for (solc, (version, sources)) in input { - if sources.is_empty() { + for (solc, (version, filtered_sources)) in input { + if filtered_sources.is_empty() { // nothing to compile + tracing::trace!( + "skip solc {} {} for empty sources set", + solc.as_ref().display(), + version + ); continue } + + let dirty_files: Vec = filtered_sources.dirty_files().cloned().collect(); + + // depending on the composition of the filtered sources, the output selection can be + // optimized + let mut opt_settings = settings.clone(); + let sources = filtered_sources.into_sources(&mut opt_settings); + for input in CompilerInput::with_sources(sources) { + let actually_dirty = input + .sources + .keys() + .filter(|f| dirty_files.contains(f)) + .cloned() + .collect::>(); + if actually_dirty.is_empty() { + // nothing to compile for this particular language, all dirty files are in the other + // language set + tracing::trace!( + "skip solc {} {} compilation of {} compiler input due to empty source set", + solc.as_ref().display(), + version, + input.language + ); + continue + } + let job = input .settings(settings.clone()) .normalize_evm_version(&version) .with_remappings(paths.remappings.clone()); - jobs.push((solc.clone(), version.clone(), job)) + jobs.push((solc.clone(), version.clone(), job, actually_dirty)) } } @@ -429,7 +509,7 @@ fn compile_parallel( let pool = rayon::ThreadPoolBuilder::new().num_threads(num_jobs).build().unwrap(); let outputs = pool.install(move || { jobs.into_par_iter() - .map(|(solc, version, input)| { + .map(|(solc, version, input, actually_dirty)| { tracing::trace!( "calling solc `{}` {:?} with {} sources: {:?}", version, @@ -437,7 +517,7 @@ fn compile_parallel( input.sources.len(), input.sources.keys() ); - report::solc_spawn(&solc, &version, &input); + report::solc_spawn(&solc, &version, &input, &actually_dirty); solc.compile(&input).map(move |output| { report::solc_success(&solc, &version, &output); (version, output) @@ -501,6 +581,105 @@ mod tests { assert!(prep.cache.as_cached().unwrap().dirty_source_files.is_empty()) } + #[test] + fn can_recompile_with_optimized_output() { + let tmp = TempProject::dapptools().unwrap(); + + tmp.add_source( + "A", + r#" + pragma solidity ^0.8.10; + import "./B.sol"; + contract A {} + "#, + ) + .unwrap(); + + tmp.add_source( + "B", + r#" + pragma solidity ^0.8.10; + contract B { + function hello() public {} + } + import "./C.sol"; + "#, + ) + .unwrap(); + + tmp.add_source( + "C", + r#" + pragma solidity ^0.8.10; + contract C { + function hello() public {} + } + "#, + ) + .unwrap(); + let compiled = tmp.compile().unwrap(); + assert!(!compiled.has_compiler_errors()); + + tmp.artifacts_snapshot().unwrap().assert_artifacts_essentials_present(); + + // modify A.sol + tmp.add_source( + "A", + r#" + pragma solidity ^0.8.10; + import "./B.sol"; + contract A { + function testExample() public {} + } + "#, + ) + .unwrap(); + + let compiler = ProjectCompiler::new(tmp.project()).unwrap(); + let state = compiler.preprocess().unwrap(); + let sources = state.sources.sources(); + + // single solc + assert_eq!(sources.len(), 1); + + let (_, filtered) = sources.values().next().unwrap(); + + // 3 contracts total + assert_eq!(filtered.0.len(), 3); + // A is modified + assert_eq!(filtered.dirty().count(), 1); + assert!(filtered.dirty_files().next().unwrap().ends_with("A.sol")); + + let state = state.compile().unwrap(); + assert_eq!(state.output.sources.len(), 3); + for (f, source) in &state.output.sources { + if f.ends_with("A.sol") { + assert!(source.ast.is_object()); + } else { + assert!(source.ast.is_null()); + } + } + + assert_eq!(state.output.contracts.len(), 1); + let (a, c) = state.output.contracts_iter().next().unwrap(); + assert_eq!(a, "A"); + assert!(c.abi.is_some() && c.evm.is_some()); + + let state = state.write_artifacts().unwrap(); + assert_eq!(state.compiled_artifacts.as_ref().len(), 1); + + let out = state.write_cache().unwrap(); + + let artifacts: Vec<_> = out.into_artifacts().collect(); + assert_eq!(artifacts.len(), 3); + for (_, artifact) in artifacts { + let c = artifact.into_contract_bytecode(); + assert!(c.abi.is_some() && c.bytecode.is_some() && c.deployed_bytecode.is_some()); + } + + tmp.artifacts_snapshot().unwrap().assert_artifacts_essentials_present(); + } + #[test] #[ignore] fn can_compile_real_project() { diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index f6cdef08..6224f205 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -31,6 +31,7 @@ pub mod utils; use crate::{ artifacts::{Contract, Sources}, + cache::SolFilesCache, contracts::VersionedContracts, error::{SolcError, SolcIoError}, }; @@ -125,6 +126,12 @@ impl Project { &self.artifacts } + /// Convenience function to read the cache file. + /// See also [SolFilesCache::read_joined()] + pub fn read_cache_file(&self) -> Result { + SolFilesCache::read_joined(&self.paths) + } + /// Applies the configured arguments to the given `Solc` /// /// This will set the `--allow-paths` to the paths configured for the `Project`, if any. diff --git a/ethers-solc/src/project_util/mock.rs b/ethers-solc/src/project_util/mock.rs index 07fee0ea..78fb81a9 100644 --- a/ethers-solc/src/project_util/mock.rs +++ b/ethers-solc/src/project_util/mock.rs @@ -1,9 +1,29 @@ //! Helpers to generate mock projects -use crate::{error::Result, remappings::Remapping, ProjectPathsConfig}; -use rand::{self, seq::SliceRandom, Rng}; +use crate::{ + error::Result, remappings::Remapping, resolver::GraphEdges, Graph, ProjectPathsConfig, + SolcError, +}; +use rand::{ + self, + distributions::{Distribution, Uniform}, + seq::SliceRandom, + Rng, +}; use serde::{Deserialize, Serialize}; -use std::{collections::BTreeSet, path::Path}; +use std::{ + collections::{BTreeSet, HashMap}, + path::{Path, PathBuf}, +}; + +/// Represents the layout of a project +#[derive(Serialize, Deserialize, Default)] +pub struct MockProjectSkeleton { + /// all files for the project + pub files: Vec, + /// all libraries + pub libraries: Vec, +} /// Represents a virtual project #[derive(Serialize)] @@ -11,77 +31,109 @@ pub struct MockProjectGenerator { /// how to name things #[serde(skip)] name_strategy: Box, - /// id counter for a file - next_file_id: usize, - /// id counter for a file - next_lib_id: usize, - /// all files for the project - files: Vec, - /// all libraries - libraries: Vec, -} -impl Default for MockProjectGenerator { - fn default() -> Self { - Self { - name_strategy: Box::new(SimpleNamingStrategy::default()), - next_file_id: 0, - next_lib_id: 0, - files: Default::default(), - libraries: Default::default(), - } - } + #[serde(flatten)] + inner: MockProjectSkeleton, } impl MockProjectGenerator { + /// Create a new project and populate it using the given settings + pub fn new(settings: &MockProjectSettings) -> Self { + let mut mock = Self::default(); + mock.populate(settings); + mock + } + + /// Create a skeleton of a real project + pub fn create(paths: &ProjectPathsConfig) -> Result { + fn get_libs(edges: &GraphEdges, lib_folder: &Path) -> Option>> { + let mut libs: HashMap<_, Vec<_>> = HashMap::new(); + for lib_file in edges.library_files() { + let component = + edges.node_path(lib_file).strip_prefix(lib_folder).ok()?.components().next()?; + libs.entry(lib_folder.join(component)).or_default().push(lib_file); + } + Some(libs) + } + + let graph = Graph::resolve(paths)?; + let mut gen = MockProjectGenerator::default(); + let (_, edges) = graph.into_sources(); + + // add all files as source files + gen.add_sources(edges.files().count()); + + // stores libs and their files + let libs = get_libs( + &edges, + &paths.libraries.get(0).cloned().unwrap_or_else(|| paths.root.join("lib")), + ) + .ok_or_else(|| SolcError::msg("Failed to detect libs"))?; + + // mark all files as libs + for (lib_id, lib_files) in libs.into_values().enumerate() { + let lib_name = gen.name_strategy.new_lib_name(lib_id); + let offset = gen.inner.files.len(); + let lib = MockLib { name: lib_name, id: lib_id, num_files: lib_files.len(), offset }; + for lib_file in lib_files { + let file = &mut gen.inner.files[lib_file]; + file.lib_id = Some(lib_id); + file.name = gen.name_strategy.new_lib_name(file.id); + } + gen.inner.libraries.push(lib); + } + + for id in edges.files() { + for import in edges.imported_nodes(id).iter().copied() { + let import = gen.get_import(import); + gen.inner.files[id].imports.insert(import); + } + } + + Ok(gen) + } + + /// Consumes the type and returns the underlying skeleton + pub fn into_inner(self) -> MockProjectSkeleton { + self.inner + } + /// Generate all solidity files and write under the paths config pub fn write_to(&self, paths: &ProjectPathsConfig, version: impl AsRef) -> Result<()> { let version = version.as_ref(); - for file in self.files.iter() { - let mut imports = Vec::with_capacity(file.imports.len()); - - for import in file.imports.iter() { - match *import { - MockImport::Internal(f) => { - imports.push(format!("import \"./{}.sol\";", self.files[f].name)); - } - MockImport::External(lib, f) => { - imports.push(format!( - "import \"{}/{}.sol\";", - self.libraries[lib].name, self.files[f].name - )); - } - } - } - - let content = format!( - r#" -// SPDX-License-Identifier: UNLICENSED -pragma solidity {}; -{} -contract {} {{}} - "#, - version, - imports.join("\n"), - file.name - ); - - let mut target = if let Some(lib) = file.lib_id { - paths.root.join("lib").join(&self.libraries[lib].name).join("src").join(&file.name) - } else { - paths.sources.join(&file.name) - }; - target.set_extension("sol"); - - super::create_contract_file(target, content)?; + for file in self.inner.files.iter() { + let imports = self.get_imports(file.id); + let content = file.mock_content(version, imports.join("\n").as_str()); + super::create_contract_file(file.target_path(self, paths), content)?; } Ok(()) } + fn get_imports(&self, file: usize) -> Vec { + let file = &self.inner.files[file]; + let mut imports = Vec::with_capacity(file.imports.len()); + + for import in file.imports.iter() { + match *import { + MockImport::Internal(f) => { + imports.push(format!("import \"./{}.sol\";", self.inner.files[f].name)); + } + MockImport::External(lib, f) => { + imports.push(format!( + "import \"{}/{}.sol\";", + self.inner.libraries[lib].name, self.inner.files[f].name + )); + } + } + } + imports + } + /// Returns all the remappings for the project for the given root path pub fn remappings_at(&self, root: &Path) -> Vec { - self.libraries + self.inner + .libraries .iter() .map(|lib| { let path = root.join("lib").join(&lib.name).join("src"); @@ -92,19 +144,13 @@ contract {} {{}} /// Returns all the remappings for the project pub fn remappings(&self) -> Vec { - self.libraries + self.inner + .libraries .iter() .map(|lib| format!("{0}/=lib/{0}/src/", lib.name).parse().unwrap()) .collect() } - /// Create a new project and populate it using the given settings - pub fn new(settings: &MockProjectSettings) -> Self { - let mut mock = Self::default(); - mock.populate(settings); - mock - } - /// Generates a random project with random settings pub fn random() -> Self { let settings = MockProjectSettings::random(); @@ -122,24 +168,21 @@ contract {} {{}} self.populate_imports(settings) } - fn next_file_id(&mut self) -> usize { - let next = self.next_file_id; - self.next_file_id += 1; - next + fn next_file_id(&self) -> usize { + self.inner.files.len() } - fn next_lib_id(&mut self) -> usize { - let next = self.next_lib_id; - self.next_lib_id += 1; - next + fn next_lib_id(&self) -> usize { + self.inner.libraries.len() } /// Adds a new source file pub fn add_source(&mut self) -> &mut Self { let id = self.next_file_id(); let name = self.name_strategy.new_source_file_name(id); - let file = MockFile { id, name, imports: Default::default(), lib_id: None }; - self.files.push(file); + let file = + MockFile { id, name, imports: Default::default(), lib_id: None, emit_artifacts: true }; + self.inner.files.push(file); self } @@ -151,22 +194,50 @@ contract {} {{}} self } + /// Adds a new lib file + pub fn add_lib_file(&mut self, lib_id: usize) -> &mut Self { + let id = self.next_file_id(); + let name = self.name_strategy.new_source_file_name(id); + let file = MockFile { + id, + name, + imports: Default::default(), + lib_id: Some(lib_id), + emit_artifacts: true, + }; + self.inner.files.push(file); + self + } + + /// Adds `num` new source files + pub fn add_lib_files(&mut self, num: usize, lib_id: usize) -> &mut Self { + for _ in 0..num { + self.add_lib_file(lib_id); + } + self + } + /// Adds a new lib with the number of lib files pub fn add_lib(&mut self, num_files: usize) -> &mut Self { let lib_id = self.next_lib_id(); let lib_name = self.name_strategy.new_lib_name(lib_id); - let offset = self.files.len(); - for _ in 0..num_files { - let id = self.next_file_id(); - let name = self.name_strategy.new_lib_file_name(id); - self.files.push(MockFile { - id, - name, - imports: Default::default(), - lib_id: Some(lib_id), - }); + let offset = self.inner.files.len(); + self.add_lib_files(num_files, lib_id); + self.inner.libraries.push(MockLib { name: lib_name, id: lib_id, num_files, offset }); + self + } + + /// randomly assign empty file status so that mocked files don't emit artifacts + pub fn assign_empty_files(&mut self) -> &mut Self { + let mut rng = rand::thread_rng(); + let die = Uniform::from(0..self.inner.files.len()); + for file in self.inner.files.iter_mut() { + let throw = die.sample(&mut rng); + if throw == 0 { + // give it a 1 in num(files) chance that the file will be empty + file.emit_artifacts = false; + } } - self.libraries.push(MockLib { name: lib_name, id: lib_id, num_files, offset }); self } @@ -175,26 +246,26 @@ contract {} {{}} let mut rng = rand::thread_rng(); // populate imports - for id in 0..self.files.len() { - let imports = if let Some(lib) = self.files[id].lib_id { + for id in 0..self.inner.files.len() { + let imports = if let Some(lib) = self.inner.files[id].lib_id { let num_imports = rng .gen_range(settings.min_imports..=settings.max_imports) - .min(self.libraries[lib].num_files.saturating_sub(1)); + .min(self.inner.libraries[lib].num_files.saturating_sub(1)); self.unique_imports_for_lib(&mut rng, lib, id, num_imports) } else { let num_imports = rng .gen_range(settings.min_imports..=settings.max_imports) - .min(self.files.len().saturating_sub(1)); + .min(self.inner.files.len().saturating_sub(1)); self.unique_imports_for_source(&mut rng, id, num_imports) }; - self.files[id].imports = imports; + self.inner.files[id].imports = imports; } self } fn get_import(&self, id: usize) -> MockImport { - if let Some(lib) = self.files[id].lib_id { + if let Some(lib) = self.inner.files[id].lib_id { MockImport::External(lib, id) } else { MockImport::Internal(id) @@ -203,17 +274,17 @@ contract {} {{}} /// All file ids pub fn file_ids(&self) -> impl Iterator + '_ { - self.files.iter().map(|f| f.id) + self.inner.files.iter().map(|f| f.id) } /// All ids of internal files pub fn internal_file_ids(&self) -> impl Iterator + '_ { - self.files.iter().filter(|f| !f.is_external()).map(|f| f.id) + self.inner.files.iter().filter(|f| !f.is_external()).map(|f| f.id) } /// All ids of external files pub fn external_file_ids(&self) -> impl Iterator + '_ { - self.files.iter().filter(|f| f.is_external()).map(|f| f.id) + self.inner.files.iter().filter(|f| f.is_external()).map(|f| f.id) } /// generates exactly `num` unique imports in the range of all files @@ -227,12 +298,27 @@ contract {} {{}} id: usize, num: usize, ) -> BTreeSet { - assert!(self.files.len() > num); - let mut imports: Vec<_> = (0..self.files.len()).collect(); + assert!(self.inner.files.len() > num); + let mut imports: Vec<_> = (0..self.inner.files.len()).collect(); imports.shuffle(rng); imports.into_iter().filter(|i| *i != id).map(|id| self.get_import(id)).take(num).collect() } + /// Modifies the content of the given file + pub fn modify_file( + &self, + id: usize, + paths: &ProjectPathsConfig, + version: impl AsRef, + ) -> Result { + let file = &self.inner.files[id]; + let target = file.target_path(self, paths); + let content = file.modified_content(version, self.get_imports(id).join("\n").as_str()); + super::create_contract_file(target.clone(), content)?; + + Ok(target) + } + /// generates exactly `num` unique imports in the range of a lib's files /// /// # Panics @@ -245,7 +331,7 @@ contract {} {{}} id: usize, num: usize, ) -> BTreeSet { - let lib = &self.libraries[lib_id]; + let lib = &self.inner.libraries[lib_id]; assert!(lib.num_files > num); let mut imports: Vec<_> = (lib.offset..(lib.offset + lib.len())).collect(); imports.shuffle(rng); @@ -253,6 +339,18 @@ contract {} {{}} } } +impl From for MockProjectGenerator { + fn from(inner: MockProjectSkeleton) -> Self { + Self { inner, ..Default::default() } + } +} + +impl Default for MockProjectGenerator { + fn default() -> Self { + Self { name_strategy: Box::new(SimpleNamingStrategy::default()), inner: Default::default() } + } +} + /// Used to determine the names for elements trait NamingStrategy { /// Return a new name for the given source file id @@ -296,6 +394,8 @@ pub struct MockFile { pub imports: BTreeSet, /// lib id if this file is part of a lib pub lib_id: Option, + /// whether this file should emit artifacts + pub emit_artifacts: bool, } impl MockFile { @@ -303,6 +403,61 @@ impl MockFile { pub fn is_external(&self) -> bool { self.lib_id.is_some() } + + pub fn target_path(&self, gen: &MockProjectGenerator, paths: &ProjectPathsConfig) -> PathBuf { + let mut target = if let Some(lib) = self.lib_id { + paths.root.join("lib").join(&gen.inner.libraries[lib].name).join("src").join(&self.name) + } else { + paths.sources.join(&self.name) + }; + target.set_extension("sol"); + + target + } + + /// Returns the content to use for a modified file + /// + /// The content here is arbitrary, it should only differ from the mocked content + pub fn modified_content(&self, version: impl AsRef, imports: &str) -> String { + format!( + r#" +// SPDX-License-Identifier: UNLICENSED +pragma solidity {}; +{} +contract {} {{ + function hello() public {{}} +}} + "#, + version.as_ref(), + imports, + self.name + ) + } + + /// Returns a mocked content for the file + pub fn mock_content(&self, version: impl AsRef, imports: &str) -> String { + let version = version.as_ref(); + if self.emit_artifacts { + format!( + r#" +// SPDX-License-Identifier: UNLICENSED +pragma solidity {}; +{} +contract {} {{}} + "#, + version, imports, self.name + ) + } else { + format!( + r#" +// SPDX-License-Identifier: UNLICENSED +pragma solidity {}; +{} + "#, + version, imports, + ) + } + } } #[derive(Debug, Clone, Eq, PartialEq, Hash, Ord, PartialOrd, Serialize, Deserialize)] @@ -350,6 +505,8 @@ pub struct MockProjectSettings { pub min_imports: usize, /// max amount of import statements a file can use pub max_imports: usize, + /// whether to also use files that don't emit artifacts + pub allow_no_artifacts_files: bool, } impl MockProjectSettings { @@ -363,6 +520,7 @@ impl MockProjectSettings { num_lib_files: rng.gen_range(1..10), min_imports: rng.gen_range(0..3), max_imports: rng.gen_range(4..10), + allow_no_artifacts_files: true, } } @@ -375,6 +533,7 @@ impl MockProjectSettings { num_lib_files: 15, min_imports: 3, max_imports: 12, + allow_no_artifacts_files: true, } } } @@ -382,7 +541,14 @@ impl MockProjectSettings { impl Default for MockProjectSettings { fn default() -> Self { // these are arbitrary - Self { num_sources: 20, num_libs: 2, num_lib_files: 10, min_imports: 0, max_imports: 5 } + Self { + num_sources: 20, + num_libs: 2, + num_lib_files: 10, + min_imports: 0, + max_imports: 5, + allow_no_artifacts_files: true, + } } } diff --git a/ethers-solc/src/project_util/mod.rs b/ethers-solc/src/project_util/mod.rs index 1f7ae59c..04ce0651 100644 --- a/ethers-solc/src/project_util/mod.rs +++ b/ethers-solc/src/project_util/mod.rs @@ -6,8 +6,8 @@ use crate::{ hh::HardhatArtifacts, project_util::mock::{MockProjectGenerator, MockProjectSettings}, utils::tempdir, - ArtifactOutput, ConfigurableArtifacts, PathStyle, Project, ProjectCompileOutput, - ProjectPathsConfig, SolcIoError, + Artifact, ArtifactOutput, Artifacts, ConfigurableArtifacts, ConfigurableContractArtifact, + PathStyle, Project, ProjectCompileOutput, ProjectPathsConfig, SolFilesCache, SolcIoError, }; use fs_extra::{dir, file}; use std::{ @@ -214,6 +214,13 @@ contract {} {{}} create_contract_file(source, content) } + /// Returns a snapshot of all cached artifacts + pub fn artifacts_snapshot(&self) -> Result> { + let cache = self.project().read_cache_file()?; + let artifacts = cache.read_artifacts::()?; + Ok(ArtifactsSnapshot { cache, artifacts }) + } + /// Populate the project with mock files pub fn mock(&self, gen: &MockProjectGenerator, version: impl AsRef) -> Result<()> { gen.write_to(self.paths(), version) @@ -407,6 +414,25 @@ impl AsRef> for TempProject { } } +/// The cache file and all the artifacts it references +#[derive(Debug, Clone)] +pub struct ArtifactsSnapshot { + pub cache: SolFilesCache, + pub artifacts: Artifacts, +} + +impl ArtifactsSnapshot { + /// Ensures that all artifacts have abi, bytecode, deployedbytecode + pub fn assert_artifacts_essentials_present(&self) { + for artifact in self.artifacts.artifact_files() { + let c = artifact.artifact.clone().into_compact_contract(); + assert!(c.abi.is_some()); + assert!(c.bin.is_some()); + assert!(c.bin_runtime.is_some()); + } + } +} + /// commonly used options for copying entire folders fn dir_copy_options() -> dir::CopyOptions { dir::CopyOptions { diff --git a/ethers-solc/src/report.rs b/ethers-solc/src/report.rs index d7806ca8..15413012 100644 --- a/ethers-solc/src/report.rs +++ b/ethers-solc/src/report.rs @@ -21,7 +21,7 @@ use std::{ cell::RefCell, error::Error, fmt, - path::Path, + path::{Path, PathBuf}, ptr::NonNull, sync::{ atomic::{AtomicBool, AtomicUsize, Ordering}, @@ -91,7 +91,25 @@ where /// A `Reporter` is entirely passive and only listens to incoming "events". pub trait Reporter: 'static { /// Callback invoked right before [`Solc::compile()`] is called - fn on_solc_spawn(&self, _solc: &Solc, _version: &Version, _input: &CompilerInput) {} + /// + /// This contains the [Solc] its [Version] the complete [CompilerInput] and all files that + /// triggered the compile job. The dirty files are only provided to give a better feedback what + /// was actually compiled. + /// + /// If caching is enabled and there has been a previous successful solc run, the dirty files set + /// contains the files that absolutely must be recompiled, while the [CompilerInput] contains + /// all files, the dirty files and all their dependencies. + /// + /// If this is a fresh compile then the [Sources] set of the [CompilerInput] matches the dirty + /// files set. + fn on_solc_spawn( + &self, + _solc: &Solc, + _version: &Version, + _input: &CompilerInput, + _dirty_files: &[PathBuf], + ) { + } /// Invoked with the `CompilerOutput` if [`Solc::compiled()`] was successful fn on_solc_success(&self, _solc: &Solc, _version: &Version, _output: &CompilerOutput) {} @@ -151,8 +169,13 @@ impl dyn Reporter { } } -pub(crate) fn solc_spawn(solc: &Solc, version: &Version, input: &CompilerInput) { - get_default(|r| r.reporter.on_solc_spawn(solc, version, input)); +pub(crate) fn solc_spawn( + solc: &Solc, + version: &Version, + input: &CompilerInput, + dirty_files: &[PathBuf], +) { + get_default(|r| r.reporter.on_solc_spawn(solc, version, input, dirty_files)); } pub(crate) fn solc_success(solc: &Solc, version: &Version, output: &CompilerOutput) { @@ -296,10 +319,16 @@ pub struct BasicStdoutReporter(()); impl Reporter for BasicStdoutReporter { /// Callback invoked right before [`Solc::compile()`] is called - fn on_solc_spawn(&self, _solc: &Solc, version: &Version, input: &CompilerInput) { + fn on_solc_spawn( + &self, + _solc: &Solc, + version: &Version, + _input: &CompilerInput, + dirty_files: &[PathBuf], + ) { println!( "Compiling {} files with {}.{}.{}", - input.sources.len(), + dirty_files.len(), version.major, version.minor, version.patch diff --git a/ethers-solc/src/resolver/mod.rs b/ethers-solc/src/resolver/mod.rs index 7f02d6c5..43dc9e75 100644 --- a/ethers-solc/src/resolver/mod.rs +++ b/ethers-solc/src/resolver/mod.rs @@ -86,6 +86,26 @@ pub struct GraphEdges { } impl GraphEdges { + /// How many files are source files + pub fn num_source_files(&self) -> usize { + self.num_input_files + } + + /// Returns an iterator over all file indices + pub fn files(&self) -> impl Iterator + '_ { + 0..self.edges.len() + } + + /// Returns an iterator over all source file indices + pub fn source_files(&self) -> impl Iterator + '_ { + 0..self.num_input_files + } + + /// Returns an iterator over all library files + pub fn library_files(&self) -> impl Iterator + '_ { + self.files().skip(self.num_input_files) + } + /// Returns a list of nodes the given node index points to for the given kind. pub fn imported_nodes(&self, from: usize) -> &[usize] { &self.edges[from] @@ -110,6 +130,11 @@ impl GraphEdges { self.indices[file.as_ref()] } + /// Returns the path of the given node + pub fn node_path(&self, id: usize) -> &PathBuf { + &self.rev_indices[&id] + } + /// Returns true if the `file` was originally included when the graph was first created and not /// added when all `imports` were resolved pub fn is_input_file(&self, file: impl AsRef) -> bool { diff --git a/ethers-solc/tests/mocked.rs b/ethers-solc/tests/mocked.rs index 946ae132..4bd717b2 100644 --- a/ethers-solc/tests/mocked.rs +++ b/ethers-solc/tests/mocked.rs @@ -30,7 +30,7 @@ impl From<(MockProjectSettings, &'static str)> for MockSettings { /// Helper function to run a test and report the used generator if the closure failed. fn run_mock( settings: impl Into, - f: impl FnOnce(&mut TempProject) -> Result<()>, + f: impl FnOnce(&mut TempProject, &MockProjectGenerator) -> Result<()>, ) -> TempProject { let MockSettings { settings, version } = settings.into(); let gen = MockProjectGenerator::new(&settings); @@ -39,7 +39,7 @@ fn run_mock( project.paths_mut().remappings.extend(remappings); project.mock(&gen, version).unwrap(); - if let Err(err) = f(&mut project) { + if let Err(err) = f(&mut project, &gen) { panic!( "mock failed: `{}` with mock settings:\n {}", err, @@ -54,7 +54,7 @@ fn run_mock( fn run_basic(settings: impl Into) { let settings = settings.into(); let version = settings.version; - run_mock(settings, |project| { + run_mock(settings, |project, _| { project.ensure_no_errors_recompile_unchanged()?; project.add_basic_source("Dummy", version)?; project.ensure_changed()?; @@ -79,3 +79,29 @@ fn can_compile_mocked_multi() { fn can_compile_mocked_large() { run_basic(MockProjectSettings::large()) } + +#[test] +fn can_compile_mocked_modified() { + run_mock(MockProjectSettings::random(), |project, gen| { + project.ensure_no_errors_recompile_unchanged()?; + // modify a random file + gen.modify_file(gen.file_ids().count() / 2, project.paths(), DEFAULT_VERSION)?; + project.ensure_changed()?; + project.artifacts_snapshot()?.assert_artifacts_essentials_present(); + Ok(()) + }); +} + +#[test] +fn can_compile_mocked_modified_all() { + run_mock(MockProjectSettings::random(), |project, gen| { + project.ensure_no_errors_recompile_unchanged()?; + // modify a random file + for id in gen.file_ids() { + gen.modify_file(id, project.paths(), DEFAULT_VERSION)?; + project.ensure_changed()?; + project.artifacts_snapshot()?.assert_artifacts_essentials_present(); + } + Ok(()) + }); +}