From b295d73c4a65b4d807b7c6134ee3f213fab768d4 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 4 Feb 2022 17:20:24 +0100 Subject: [PATCH] refactor(solc): rewrite compiler passes and cache change detection (#802) * chore: clippy * refactor: rewrite compiler passes and cache * feat: more work on compile pipeline * feat: add cache constructor * add artifact filtering * fine tune api * feat: prepare version integration * docs: more docs * feat: add cacheentry2 * replace cacheentry types * integrate new api * docs: more docs * feat: implement new output handler * feat: integrate cached files in new compile pipeline * refactor: more cache refactor * docs: more docs * feat: add source name mapping * feat: implement new parallel solc * refactor: do a little cleanup * refactor: even more cleanup * even more cleanup * chore: make it compile * chore: make it compile with all features * chore: clippy fix * feat: integrate new compiler pipeline * docs: more docs * refactor: move stuff around * refactor: start deprecating output type * chore: make it compile again * chore(deps): bump solc version 0.2.0 * feat: unify output types * cargo fix * refactor: add contracts wrapper * chore: replace ProjectCompileOutput * docs: add more docs * feat: add offline mode * feat: more artifact helpers * chore: cleanup cache * chore: streamline types * fix: better artifacts mapping * chore: some cleanup * chore: change artifact * chore: add configure solc fn * feat: add artifact reading * feat: implement retain and extend * feat: add cache extending * feat: write to disk * chore: make clippy happy * feat: implement path mapping * chore: nits * feat: introduce states * feat: add compiler state machine * chore: move cache types to cache mod * chore: make clippy happy * feat: add debug derives * fix: use resolved import source unit names * fix: failing tests * test: test multiple libs properly * chore: make clippy happy * chore: update CHANGELOG * fix: doc tests * fix: set offline mode correctly * chore: make it compile again * Update ethers-solc/src/artifacts.rs Co-authored-by: Georgios Konstantopoulos * feat: find remappings by default * typos * add eth_syncing RPC (#848) * add eth_syncing RPC * Changelo updated * small comments * Intermediate SyncingStatus * fix(core): adjust Ganache for new cli output (#851) * fix: review comments * fix: cache relative path bug * chore: add cache example * chore: use absolute paths * fix: remove overwritten files from cache * fix: rustfmt * chore: more helper functions * chore: export AggregatedOutput * feat: implement helper functions * feat: even more helpers * fix: failing doc tests * refactor: remove source name map tracking * fix: determine artifacts in ephemeral mode * refactor: allowed paths should not fail Co-authored-by: Georgios Konstantopoulos Co-authored-by: rakita Co-authored-by: wolflo <33909953+wolflo@users.noreply.github.com> --- CHANGELOG.md | 4 + Cargo.lock | 40 +- Cargo.toml | 2 +- ethers-contract/Cargo.toml | 2 +- ethers-middleware/Cargo.toml | 2 +- ethers-solc/Cargo.toml | 7 +- ethers-solc/src/artifact_output.rs | 567 +++++++++ ethers-solc/src/artifacts.rs | 155 +-- ethers-solc/src/cache.rs | 1036 +++++++++++------ ethers-solc/src/compile/contracts.rs | 146 +++ ethers-solc/src/compile/many.rs | 40 + .../src/{compile.rs => compile/mod.rs} | 51 +- ethers-solc/src/compile/output.rs | 363 ++++++ ethers-solc/src/compile/project.rs | 440 +++++++ ethers-solc/src/config.rs | 304 +---- ethers-solc/src/error.rs | 3 + ethers-solc/src/hh.rs | 29 +- ethers-solc/src/lib.rs | 623 ++-------- ethers-solc/src/project_util.rs | 15 + ethers-solc/src/resolver.rs | 144 ++- ethers-solc/src/utils.rs | 40 + .../test-data/solidity-files-cache.json | 137 ++- .../lib1/{ => bar/src}/Bar.sol | 0 .../lib2/{ => baz/src}/Baz.sol | 0 .../test-data/test-contract-libs/src/Foo.sol | 4 +- ethers-solc/tests/project.rs | 52 +- 26 files changed, 2777 insertions(+), 1429 deletions(-) create mode 100644 ethers-solc/src/artifact_output.rs create mode 100644 ethers-solc/src/compile/contracts.rs create mode 100644 ethers-solc/src/compile/many.rs rename ethers-solc/src/{compile.rs => compile/mod.rs} (95%) create mode 100644 ethers-solc/src/compile/output.rs create mode 100644 ethers-solc/src/compile/project.rs rename ethers-solc/test-data/test-contract-libs/lib1/{ => bar/src}/Bar.sol (100%) rename ethers-solc/test-data/test-contract-libs/lib2/{ => baz/src}/Baz.sol (100%) diff --git a/CHANGELOG.md b/CHANGELOG.md index 85885018..190a2885 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -44,6 +44,10 @@ ### Unreleased +- Total revamp of the `Project::compile` pipeline + [#802](https://github.com/gakonst/ethers-rs/pull/802) + - Support multiple versions of compiled contracts + - Breaking: deprecate hardhat cache file compatibility, cache file now tracks artifact paths and their versions - Fix flatten replacement target location [#846](https://github.com/gakonst/ethers-rs/pull/846) - Fix duplicate files during flattening diff --git a/Cargo.lock b/Cargo.lock index c8e3491d..82db5b05 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1056,6 +1056,19 @@ dependencies = [ "cfg-if 1.0.0", ] +[[package]] +name = "env_logger" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b2cf0344971ee6c64c31be0d530793fba457d322dfec2810c453d0ef228f9c3" +dependencies = [ + "atty", + "humantime", + "log", + "regex", + "termcolor", +] + [[package]] name = "eth-keystore" version = "0.3.0" @@ -1370,11 +1383,12 @@ dependencies = [ [[package]] name = "ethers-solc" -version = "0.1.0" +version = "0.2.0" dependencies = [ "colored", "criterion", "dunce", + "env_logger", "ethers-core", "fs_extra", "futures-util", @@ -1386,6 +1400,7 @@ dependencies = [ "num_cpus", "once_cell", "pretty_assertions", + "rand 0.8.4", "rayon", "regex", "semver", @@ -1399,6 +1414,7 @@ dependencies = [ "tiny-keccak", "tokio", "tracing", + "tracing-subscriber", "walkdir", ] @@ -1793,6 +1809,12 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + [[package]] name = "hyper" version = "0.14.16" @@ -2062,6 +2084,15 @@ dependencies = [ "cfg-if 1.0.0", ] +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata", +] + [[package]] name = "matches" version = "0.1.9" @@ -2848,6 +2879,9 @@ name = "regex-automata" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax", +] [[package]] name = "regex-syntax" @@ -3848,9 +3882,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5312f325fe3588e277415f5a6cca1f4ccad0f248c4cd5a4bd33032d7286abc22" dependencies = [ "ansi_term", + "lazy_static", + "matchers", + "regex", "sharded-slab", "smallvec", "thread_local", + "tracing", "tracing-core", "tracing-log", ] diff --git a/Cargo.toml b/Cargo.toml index 852e86c9..48933976 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -88,7 +88,7 @@ ethers-core = { version = "^0.6.0", default-features = false, path = "./ethers-c ethers-providers = { version = "^0.6.0", default-features = false, path = "./ethers-providers" } ethers-signers = { version = "^0.6.0", default-features = false, path = "./ethers-signers" } ethers-middleware = { version = "^0.6.0", default-features = false, path = "./ethers-middleware" } -ethers-solc = { version = "^0.1.0", default-features = false, path = "./ethers-solc" } +ethers-solc = { version = "^0.2.0", default-features = false, path = "./ethers-solc" } ethers-etherscan = { version = "^0.2.0", default-features = false, path = "./ethers-etherscan" } [dev-dependencies] diff --git a/ethers-contract/Cargo.toml b/ethers-contract/Cargo.toml index dc29519b..d3ac7f09 100644 --- a/ethers-contract/Cargo.toml +++ b/ethers-contract/Cargo.toml @@ -32,7 +32,7 @@ ethers-contract-abigen = { version = "^0.6.0", path = "ethers-contract-abigen" } ethers-contract-derive = { version = "^0.6.0", path = "ethers-contract-derive" } ethers-core = { version = "^0.6.0", path = "../ethers-core", default-features = false, features = ["eip712"]} ethers-derive-eip712 = { version = "^0.2.0", path = "../ethers-core/ethers-derive-eip712"} -ethers-solc = { version = "^0.1.0", path = "../ethers-solc", default-features = false } +ethers-solc = { version = "^0.2.0", path = "../ethers-solc", default-features = false } [target.'cfg(not(target_arch = "wasm32"))'.dev-dependencies] tokio = { version = "1.5", default-features = false, features = ["macros"] } diff --git a/ethers-middleware/Cargo.toml b/ethers-middleware/Cargo.toml index 82c5fabb..283633dc 100644 --- a/ethers-middleware/Cargo.toml +++ b/ethers-middleware/Cargo.toml @@ -42,7 +42,7 @@ hex = { version = "0.4.3", default-features = false, features = ["std"] } rand = { version = "0.8.4", default-features = false } ethers-providers = { version = "^0.6.0", path = "../ethers-providers", default-features = false, features = ["ws", "rustls"] } once_cell = "1.8.0" -ethers-solc = { version = "^0.1.0", path = "../ethers-solc", default-features = false } +ethers-solc = { version = "^0.2.0", path = "../ethers-solc", default-features = false } serial_test = "0.5.1" [target.'cfg(not(target_arch = "wasm32"))'.dev-dependencies] diff --git a/ethers-solc/Cargo.toml b/ethers-solc/Cargo.toml index 8fcac386..47a34f00 100644 --- a/ethers-solc/Cargo.toml +++ b/ethers-solc/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ethers-solc" -version = "0.1.0" +version = "0.2.0" authors = ["Matthias Seitz ", "Georgios Konstantopoulos "] license = "MIT OR Apache-2.0" edition = "2018" @@ -17,7 +17,7 @@ keywords = ["ethereum", "web3", "solc", "solidity", "ethers"] ethers-core = { version = "^0.6.0", path = "../ethers-core", default-features = false } serde_json = "1.0.68" serde = { version = "1.0.130", features = ["derive"] } -semver = "1.0.4" +semver = { version = "1.0.4", features = ["serde"] } walkdir = "2.3.2" tokio = { version = "1.15.0", default-features = false, features = ["process", "io-util", "fs", "time"], optional = true } futures-util = { version = "^0.3", optional = true } @@ -50,6 +50,9 @@ getrandom = { version = "0.2", features = ["js"] } [dev-dependencies] criterion = { version = "0.3", features = ["async_tokio"] } +env_logger = "*" +tracing-subscriber = {version = "0.3", default-features = false, features = ["env-filter", "fmt"]} +rand = "0.8.4" pretty_assertions = "1.1.0" tempfile = "3.3.0" tokio = { version = "1.15.0", features = ["full"] } diff --git a/ethers-solc/src/artifact_output.rs b/ethers-solc/src/artifact_output.rs new file mode 100644 index 00000000..899f6c0e --- /dev/null +++ b/ethers-solc/src/artifact_output.rs @@ -0,0 +1,567 @@ +//! Output artifact handling + +use crate::{ + artifacts::{CompactContract, CompactContractBytecode, Contract, FileToContractsMap}, + contracts::VersionedContracts, + error::Result, + utils, HardhatArtifact, ProjectPathsConfig, SolcError, +}; +use ethers_core::{abi::Abi, types::Bytes}; +use semver::Version; +use serde::{de::DeserializeOwned, Serialize}; +use std::{ + collections::btree_map::BTreeMap, + fmt, fs, io, + path::{Path, PathBuf}, +}; + +/// Represents an artifact file representing a [`crate::Contract`] +#[derive(Debug, Clone, PartialEq)] +pub struct ArtifactFile { + /// The Artifact that was written + pub artifact: T, + /// path to the file where the `artifact` was written to + pub file: PathBuf, + /// `solc` version that produced this artifact + pub version: Version, +} + +impl ArtifactFile { + /// Writes the given contract to the `out` path creating all parent directories + pub fn write(&self) -> Result<()> { + utils::create_parent_dir_all(&self.file)?; + fs::write(&self.file, serde_json::to_vec_pretty(&self.artifact)?) + .map_err(|err| SolcError::io(err, &self.file))?; + Ok(()) + } +} + +impl ArtifactFile { + /// Sets the file to `root` adjoined to `self.file`. + pub fn join(&mut self, root: impl AsRef) { + self.file = root.as_ref().join(&self.file); + } + + /// Removes `base` from the artifact's path + pub fn strip_prefix(&mut self, base: impl AsRef) { + if let Ok(prefix) = self.file.strip_prefix(base) { + self.file = prefix.to_path_buf(); + } + } +} + +/// local helper type alias `file name -> (contract name -> Vec<..>)` +pub(crate) type ArtifactsMap = FileToContractsMap>>; + +/// Represents a set of Artifacts +#[derive(Debug, Clone, PartialEq)] +pub struct Artifacts(pub ArtifactsMap); + +impl From> for Artifacts { + fn from(m: ArtifactsMap) -> Self { + Self(m) + } +} + +impl<'a, T> IntoIterator for &'a Artifacts { + type Item = (&'a String, &'a BTreeMap>>); + type IntoIter = + std::collections::btree_map::Iter<'a, String, BTreeMap>>>; + + fn into_iter(self) -> Self::IntoIter { + self.0.iter() + } +} + +impl IntoIterator for Artifacts { + type Item = (String, BTreeMap>>); + type IntoIter = + std::collections::btree_map::IntoIter>>>; + + fn into_iter(self) -> Self::IntoIter { + self.0.into_iter() + } +} + +impl Default for Artifacts { + fn default() -> Self { + Self(Default::default()) + } +} + +impl AsRef> for Artifacts { + fn as_ref(&self) -> &ArtifactsMap { + &self.0 + } +} + +impl AsMut> for Artifacts { + fn as_mut(&mut self) -> &mut ArtifactsMap { + &mut self.0 + } +} + +impl Artifacts { + /// Writes all artifacts into the given `artifacts_root` folder + pub fn write_all(&self) -> Result<()> { + for artifact in self.artifact_files() { + artifact.write()?; + } + Ok(()) + } +} + +impl Artifacts { + pub fn into_inner(self) -> ArtifactsMap { + self.0 + } + + /// Sets the artifact files location to `root` adjoined to `self.file`. + pub fn join_all(&mut self, root: impl AsRef) -> &mut Self { + let root = root.as_ref(); + self.artifact_files_mut().for_each(|artifact| artifact.join(root)); + self + } + + /// Removes `base` from all artifacts + pub fn strip_prefix_all(&mut self, base: impl AsRef) -> &mut Self { + let base = base.as_ref(); + self.artifact_files_mut().for_each(|artifact| artifact.strip_prefix(base)); + self + } + + /// Returns all `ArtifactFile`s for the contract with the matching name + fn get_contract_artifact_files(&self, contract_name: &str) -> Option<&Vec>> { + self.0.values().find_map(|all| all.get(contract_name)) + } + + /// Returns true if this type contains an artifact with the given path for the given contract + pub fn has_contract_artifact(&self, contract_name: &str, artifact_path: &Path) -> bool { + self.get_contract_artifact_files(contract_name) + .map(|artifacts| artifacts.iter().any(|artifact| artifact.file == artifact_path)) + .unwrap_or_default() + } + + /// Returns true if this type contains an artifact with the given path + pub fn has_artifact(&self, artifact_path: &Path) -> bool { + self.artifact_files().any(|artifact| artifact.file == artifact_path) + } + + /// Iterate over all artifact files + pub fn artifact_files(&self) -> impl Iterator> { + self.0.values().flat_map(|c| c.values().flat_map(|artifacts| artifacts.iter())) + } + /// Iterate over all artifact files + pub fn artifact_files_mut(&mut self) -> impl Iterator> { + self.0.values_mut().flat_map(|c| c.values_mut().flat_map(|artifacts| artifacts.iter_mut())) + } + + /// Returns an iterator over _all_ artifacts and `` + pub fn into_artifacts>( + self, + ) -> impl Iterator { + self.0.into_values().flat_map(|contract_artifacts| { + contract_artifacts.into_iter().flat_map(|(_contract_name, artifacts)| { + artifacts.into_iter().filter_map(|artifact| { + O::contract_name(&artifact.file).map(|name| { + ( + format!( + "{}:{}", + artifact.file.file_name().unwrap().to_string_lossy(), + name + ), + artifact.artifact, + ) + }) + }) + }) + }) + } + + /// Returns an iterator that yields the tuple `(file, contract name, artifact)` + /// + /// **NOTE** this returns the path as is + pub fn into_artifacts_with_files(self) -> impl Iterator { + self.0.into_iter().flat_map(|(f, contract_artifacts)| { + contract_artifacts.into_iter().flat_map(move |(name, artifacts)| { + let contract_name = name; + let file = f.clone(); + artifacts + .into_iter() + .map(move |artifact| (file.clone(), contract_name.clone(), artifact.artifact)) + }) + }) + } + + /// Strips the given prefix from all artifact file paths to make them relative to the given + /// `root` argument + pub fn into_stripped_file_prefixes(self, base: impl AsRef) -> Self { + let base = base.as_ref(); + let artifacts = self + .0 + .into_iter() + .map(|(file, c)| { + let file_path = Path::new(&file); + if let Ok(p) = file_path.strip_prefix(base) { + (p.to_string_lossy().to_string(), c) + } else { + (file, c) + } + }) + .collect(); + + Artifacts(artifacts) + } + + /// Finds the first artifact `T` with a matching contract name + pub fn find(&self, contract_name: impl AsRef) -> Option<&T> { + let contract_name = contract_name.as_ref(); + self.0.iter().find_map(|(_file, contracts)| { + contracts.get(contract_name).and_then(|c| c.get(0).map(|a| &a.artifact)) + }) + } + + /// Removes the first artifact `T` with a matching contract name + /// + /// *Note:* if there are multiple artifacts (contract compiled with different solc) then this + /// returns the first artifact in that set + pub fn remove(&mut self, contract_name: impl AsRef) -> Option { + let contract_name = contract_name.as_ref(); + self.0.iter_mut().find_map(|(_file, contracts)| { + let mut artifact = None; + if let Some((c, mut artifacts)) = contracts.remove_entry(contract_name) { + if !artifacts.is_empty() { + artifact = Some(artifacts.remove(0).artifact); + } + if !artifacts.is_empty() { + contracts.insert(c, artifacts); + } + } + artifact + }) + } +} + +/// A trait representation for a [`crate::Contract`] artifact +pub trait Artifact { + /// Returns the artifact's `Abi` and bytecode + fn into_inner(self) -> (Option, Option); + + /// Turns the artifact into a container type for abi, compact bytecode and deployed bytecode + fn into_compact_contract(self) -> CompactContract; + + /// Turns the artifact into a container type for abi, full bytecode and deployed bytecode + fn into_contract_bytecode(self) -> CompactContractBytecode; + + /// Returns the contents of this type as a single tuple of abi, bytecode and deployed bytecode + fn into_parts(self) -> (Option, Option, Option); + + /// Same as [`Self::into_parts()`] but returns `Err` if an element is `None` + fn try_into_parts(self) -> Result<(Abi, Bytes, Bytes)> + where + Self: Sized, + { + let (abi, bytecode, deployed_bytecode) = self.into_parts(); + + Ok(( + abi.ok_or_else(|| SolcError::msg("abi missing"))?, + bytecode.ok_or_else(|| SolcError::msg("bytecode missing"))?, + deployed_bytecode.ok_or_else(|| SolcError::msg("deployed bytecode missing"))?, + )) + } +} + +impl Artifact for T +where + T: Into + Into, +{ + fn into_inner(self) -> (Option, Option) { + let artifact = self.into_compact_contract(); + (artifact.abi, artifact.bin.and_then(|bin| bin.into_bytes())) + } + + fn into_compact_contract(self) -> CompactContract { + self.into() + } + + fn into_contract_bytecode(self) -> CompactContractBytecode { + self.into() + } + + fn into_parts(self) -> (Option, Option, Option) { + self.into_compact_contract().into_parts() + } +} + +/// Handler invoked with the output of `solc` +/// +/// Implementers of this trait are expected to take care of [`crate::Contract`] to +/// [`crate::ArtifactOutput::Artifact`] conversion and how that `Artifact` type is stored on disk, +/// this includes artifact file location and naming. +/// +/// Depending on the [`crate::Project`] contracts and their compatible versions, +/// [`crate::ProjectCompiler::compile()`] may invoke different `solc` executables on the same +/// solidity file leading to multiple [`crate::CompilerOutput`]s for the same `.sol` file. +/// In addition to the `solidity file` to `contract` relationship (1-N*) +/// [`crate::VersionedContracts`] also tracks the `contract` to (`artifact` + `solc version`) +/// relationship (1-N+). +pub trait ArtifactOutput { + /// Represents the artifact that will be stored for a `Contract` + type Artifact: Artifact + DeserializeOwned + Serialize + fmt::Debug; + + /// Handle the aggregated set of compiled contracts from the solc [`crate::CompilerOutput`]. + /// + /// This will be invoked with all aggregated contracts from (multiple) solc `CompilerOutput`. + /// See [`crate::AggregatedCompilerOutput`] + fn on_output( + contracts: &VersionedContracts, + layout: &ProjectPathsConfig, + ) -> Result> { + let mut artifacts = Self::output_to_artifacts(contracts); + artifacts.join_all(&layout.artifacts); + artifacts.write_all()?; + + Self::write_extras(contracts, layout)?; + + Ok(artifacts) + } + + /// Writes additional files for the contracts if the included in the `Contract`, such as `ir`, + /// `ewasm`, `iropt`. + /// + /// By default, these fields are _not_ enabled in the [`crate::Settings`], see + /// [`crate::Settings::default_output_selection()`], and the respective fields of the + /// [`Contract`] will `None`. If they'll be manually added to the `output_selection`, then + /// we're also creating individual files for this output, such as `Greeter.iropt`, + /// `Gretter.ewasm` + fn write_extras(contracts: &VersionedContracts, layout: &ProjectPathsConfig) -> Result<()> { + for (file, contracts) in contracts.as_ref().iter() { + for (name, versioned_contracts) in contracts { + for c in versioned_contracts { + let artifact_path = if versioned_contracts.len() > 1 { + Self::output_file_versioned(file, name, &c.version) + } else { + Self::output_file(file, name) + }; + + let file = layout.artifacts.join(artifact_path); + utils::create_parent_dir_all(&file)?; + + if let Some(iropt) = &c.contract.ir_optimized { + fs::write(&file.with_extension("iropt"), iropt) + .map_err(|err| SolcError::io(err, file.with_extension("iropt")))? + } + + if let Some(ir) = &c.contract.ir { + fs::write(&file.with_extension("ir"), ir) + .map_err(|err| SolcError::io(err, file.with_extension("ir")))? + } + + if let Some(ewasm) = &c.contract.ewasm { + fs::write( + &file.with_extension("ewasm"), + serde_json::to_vec_pretty(&ewasm)?, + ) + .map_err(|err| SolcError::io(err, file.with_extension("ewasm")))?; + } + + if let Some(evm) = &c.contract.evm { + if let Some(asm) = &evm.assembly { + fs::write(&file.with_extension("asm"), asm) + .map_err(|err| SolcError::io(err, file.with_extension("asm")))? + } + } + } + } + } + + Ok(()) + } + + /// Returns the file name for the contract's artifact + /// `Greeter.json` + fn output_file_name(name: impl AsRef) -> PathBuf { + format!("{}.json", name.as_ref()).into() + } + + /// Returns the file name for the contract's artifact and the given version + /// `Greeter.0.8.11.json` + fn output_file_name_versioned(name: impl AsRef, version: &Version) -> PathBuf { + format!("{}.{}.{}.{}.json", name.as_ref(), version.major, version.minor, version.patch) + .into() + } + + /// Returns the path to the contract's artifact location based on the contract's file and name + /// + /// This returns `contract.sol/contract.json` by default + fn output_file(contract_file: impl AsRef, name: impl AsRef) -> PathBuf { + let name = name.as_ref(); + contract_file + .as_ref() + .file_name() + .map(Path::new) + .map(|p| p.join(Self::output_file_name(name))) + .unwrap_or_else(|| Self::output_file_name(name)) + } + + /// Returns the path to the contract's artifact location based on the contract's file, name and + /// version + /// + /// This returns `contract.sol/contract.0.8.11.json` by default + fn output_file_versioned( + contract_file: impl AsRef, + name: impl AsRef, + version: &Version, + ) -> PathBuf { + let name = name.as_ref(); + contract_file + .as_ref() + .file_name() + .map(Path::new) + .map(|p| p.join(Self::output_file_name_versioned(name, version))) + .unwrap_or_else(|| Self::output_file_name_versioned(name, version)) + } + + /// The inverse of `contract_file_name` + /// + /// Expected to return the solidity contract's name derived from the file path + /// `sources/Greeter.sol` -> `Greeter` + fn contract_name(file: impl AsRef) -> Option { + file.as_ref().file_stem().and_then(|s| s.to_str().map(|s| s.to_string())) + } + + /// Whether the corresponding artifact of the given contract file and name exists + fn output_exists( + contract_file: impl AsRef, + name: impl AsRef, + root: impl AsRef, + ) -> bool { + root.as_ref().join(Self::output_file(contract_file, name)).exists() + } + + /// Read the artifact that's stored at the given path + /// + /// # Errors + /// + /// Returns an error if + /// - The file does not exist + /// - The file's content couldn't be deserialized into the `Artifact` type + fn read_cached_artifact(path: impl AsRef) -> Result { + let path = path.as_ref(); + let file = fs::File::open(path).map_err(|err| SolcError::io(err, path))?; + let file = io::BufReader::new(file); + Ok(serde_json::from_reader(file)?) + } + + /// Read the cached artifacts that are located the paths the iterator yields + /// + /// See [`Self::read_cached_artifact()`] + fn read_cached_artifacts(files: I) -> Result> + where + I: IntoIterator, + T: Into, + { + let mut artifacts = BTreeMap::default(); + for path in files.into_iter() { + let path = path.into(); + let artifact = Self::read_cached_artifact(&path)?; + artifacts.insert(path, artifact); + } + Ok(artifacts) + } + + /// Convert a contract to the artifact type + /// + /// This is the core conversion function that takes care of converting a `Contract` into the + /// associated `Artifact` type + fn contract_to_artifact(_file: &str, _name: &str, contract: Contract) -> Self::Artifact; + + /// Convert the compiler output into a set of artifacts + /// + /// **Note:** This does only convert, but _NOT_ write the artifacts to disk, See + /// [`Self::on_output()`] + fn output_to_artifacts(contracts: &VersionedContracts) -> Artifacts { + let mut artifacts = ArtifactsMap::new(); + for (file, contracts) in contracts.as_ref().iter() { + let mut entries = BTreeMap::new(); + for (name, versioned_contracts) in contracts { + let mut contracts = Vec::with_capacity(versioned_contracts.len()); + // check if the same contract compiled with multiple solc versions + for contract in versioned_contracts { + let artifact_path = if versioned_contracts.len() > 1 { + Self::output_file_versioned(file, name, &contract.version) + } else { + Self::output_file(file, name) + }; + let artifact = + Self::contract_to_artifact(file, name, contract.contract.clone()); + + contracts.push(ArtifactFile { + artifact, + file: artifact_path, + version: contract.version.clone(), + }); + } + entries.insert(name.to_string(), contracts); + } + artifacts.insert(file.to_string(), entries); + } + + Artifacts(artifacts) + } +} + +/// An Artifacts implementation that uses a compact representation +/// +/// Creates a single json artifact with +/// ```json +/// { +/// "abi": [], +/// "bin": "...", +/// "runtime-bin": "..." +/// } +/// ``` +#[derive(Debug, Copy, Clone, Eq, PartialEq)] +pub struct MinimalCombinedArtifacts; + +impl ArtifactOutput for MinimalCombinedArtifacts { + type Artifact = CompactContractBytecode; + + fn contract_to_artifact(_file: &str, _name: &str, contract: Contract) -> Self::Artifact { + Self::Artifact::from(contract) + } +} + +/// An Artifacts handler implementation that works the same as `MinimalCombinedArtifacts` but also +/// supports reading hardhat artifacts if an initial attempt to deserialize an artifact failed +#[derive(Debug, Copy, Clone, Eq, PartialEq)] +pub struct MinimalCombinedArtifactsHardhatFallback; + +impl ArtifactOutput for MinimalCombinedArtifactsHardhatFallback { + type Artifact = CompactContractBytecode; + + fn on_output( + output: &VersionedContracts, + layout: &ProjectPathsConfig, + ) -> Result> { + MinimalCombinedArtifacts::on_output(output, layout) + } + + fn read_cached_artifact(path: impl AsRef) -> Result { + let path = path.as_ref(); + let content = fs::read_to_string(path).map_err(|err| SolcError::io(err, path))?; + if let Ok(a) = serde_json::from_str(&content) { + Ok(a) + } else { + tracing::error!("Failed to deserialize compact artifact"); + tracing::trace!("Fallback to hardhat artifact deserialization"); + let artifact = serde_json::from_str::(&content)?; + tracing::trace!("successfully deserialized hardhat artifact"); + Ok(artifact.into_contract_bytecode()) + } + } + + fn contract_to_artifact(file: &str, name: &str, contract: Contract) -> Self::Artifact { + MinimalCombinedArtifacts::contract_to_artifact(file, name, contract) + } +} diff --git a/ethers-solc/src/artifacts.rs b/ethers-solc/src/artifacts.rs index 5c48602f..e0281d74 100644 --- a/ethers-solc/src/artifacts.rs +++ b/ethers-solc/src/artifacts.rs @@ -22,10 +22,20 @@ use crate::{ use ethers_core::abi::Address; use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer}; +/// Solidity files are made up of multiple `source units`, a solidity contract is such a `source +/// unit`, therefore a solidity file can contain multiple contracts: (1-N*) relationship. +/// +/// This types represents this mapping as `file name -> (contract name -> T)`, where the generic is +/// intended to represent contract specific information, like [`Contract`] itself, See [`Contracts`] +pub type FileToContractsMap = BTreeMap>; + +/// file -> (contract name -> Contract) +pub type Contracts = FileToContractsMap; + /// An ordered list of files and their source pub type Sources = BTreeMap; -pub type Contracts = BTreeMap>; +pub type VersionedSources = BTreeMap; /// Input type `solc` expects #[derive(Clone, Debug, Serialize, Deserialize)] @@ -547,22 +557,7 @@ impl CompilerOutput { }) } - pub fn diagnostics<'a>(&'a self, ignored_error_codes: &'a [u64]) -> OutputDiagnostics { - OutputDiagnostics { compiler_output: self, ignored_error_codes } - } - /// Finds the _first_ contract with the given name - /// - /// # Example - /// - /// ``` - /// use ethers_solc::Project; - /// use ethers_solc::artifacts::*; - /// # fn demo(project: Project) { - /// let output = project.compile().unwrap().output(); - /// let contract = output.find("Greeter").unwrap(); - /// # } - /// ``` pub fn find(&self, contract: impl AsRef) -> Option { let contract_name = contract.as_ref(); self.contracts_iter().find_map(|(name, contract)| { @@ -571,17 +566,6 @@ impl CompilerOutput { } /// Finds the first contract with the given name and removes it from the set - /// - /// # Example - /// - /// ``` - /// use ethers_solc::Project; - /// use ethers_solc::artifacts::*; - /// # fn demo(project: Project) { - /// let mut output = project.compile().unwrap().output(); - /// let contract = output.remove("Greeter").unwrap(); - /// # } - /// ``` pub fn remove(&mut self, contract: impl AsRef) -> Option { let contract_name = contract.as_ref(); self.contracts.values_mut().find_map(|c| c.remove(contract_name)) @@ -608,16 +592,6 @@ impl CompilerOutput { /// Returns the output's source files and contracts separately, wrapped in helper types that /// provide several helper methods - /// - /// # Example - /// - /// ``` - /// use ethers_solc::Project; - /// # fn demo(project: Project) { - /// let output = project.compile().unwrap().output(); - /// let (sources, contracts) = output.split(); - /// # } - /// ``` pub fn split(self) -> (SourceFiles, OutputContracts) { (SourceFiles(self.sources), OutputContracts(self.contracts)) } @@ -629,17 +603,6 @@ pub struct OutputContracts(pub Contracts); impl OutputContracts { /// Returns an iterator over all contracts and their source names. - /// - /// ``` - /// use std::collections::BTreeMap; - /// use ethers_solc::{ artifacts::*, Artifact }; - /// # fn demo(contracts: OutputContracts) { - /// let contracts: BTreeMap = contracts - /// .into_contracts() - /// .map(|(k, c)| (k, c.into_compact_contract().unwrap())) - /// .collect(); - /// # } - /// ``` pub fn into_contracts(self) -> impl Iterator { self.0.into_values().flatten() } @@ -650,17 +613,6 @@ impl OutputContracts { } /// Finds the _first_ contract with the given name - /// - /// # Example - /// - /// ``` - /// use ethers_solc::Project; - /// use ethers_solc::artifacts::*; - /// # fn demo(project: Project) { - /// let output = project.compile().unwrap().output(); - /// let contract = output.find("Greeter").unwrap(); - /// # } - /// ``` pub fn find(&self, contract: impl AsRef) -> Option { let contract_name = contract.as_ref(); self.contracts_iter().find_map(|(name, contract)| { @@ -669,87 +621,12 @@ impl OutputContracts { } /// Finds the first contract with the given name and removes it from the set - /// - /// # Example - /// - /// ``` - /// use ethers_solc::Project; - /// use ethers_solc::artifacts::*; - /// # fn demo(project: Project) { - /// let (_, mut contracts) = project.compile().unwrap().output().split(); - /// let contract = contracts.remove("Greeter").unwrap(); - /// # } - /// ``` pub fn remove(&mut self, contract: impl AsRef) -> Option { let contract_name = contract.as_ref(); self.0.values_mut().find_map(|c| c.remove(contract_name)) } } -/// Helper type to implement display for solc errors -#[derive(Clone, Debug)] -pub struct OutputDiagnostics<'a> { - compiler_output: &'a CompilerOutput, - ignored_error_codes: &'a [u64], -} - -impl<'a> OutputDiagnostics<'a> { - /// Returns true if there is at least one error of high severity - pub fn has_error(&self) -> bool { - self.compiler_output.has_error() - } - - /// Returns true if there is at least one warning - pub fn has_warning(&self) -> bool { - self.compiler_output.has_warning(self.ignored_error_codes) - } - - fn is_test>(&self, contract_path: T) -> bool { - if contract_path.as_ref().ends_with(".t.sol") { - return true - } - - self.compiler_output.find(&contract_path).map_or(false, |contract| { - contract.abi.map_or(false, |abi| abi.functions.contains_key("IS_TEST")) - }) - } -} - -impl<'a> fmt::Display for OutputDiagnostics<'a> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if self.has_error() { - f.write_str("Compiler run failed")?; - } else if self.has_warning() { - f.write_str("Compiler run successful (with warnings)")?; - } else { - f.write_str("Compiler run successful")?; - } - for err in &self.compiler_output.errors { - if err.severity.is_warning() { - let is_ignored = err.error_code.as_ref().map_or(false, |code| { - if let Some(source_location) = &err.source_location { - // we ignore spdx and contract size warnings in test - // files. if we are looking at one of these warnings - // from a test file we skip - if self.is_test(&source_location.file) && (*code == 1878 || *code == 5574) { - return true - } - } - - self.ignored_error_codes.contains(code) - }); - - if !is_ignored { - writeln!(f, "\n{}", err)?; - } - } else { - writeln!(f, "\n{}", err)?; - } - } - Ok(()) - } -} - /// Represents a compiled solidity contract #[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] #[serde(rename_all = "camelCase")] @@ -1727,7 +1604,7 @@ pub struct StorageType { pub number_of_bytes: String, } -#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)] +#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq, Hash)] #[serde(rename_all = "camelCase")] pub struct Error { #[serde(default, skip_serializing_if = "Option::is_none")] @@ -1757,7 +1634,7 @@ impl fmt::Display for Error { } } -#[derive(Clone, Debug, Eq, PartialEq)] +#[derive(Clone, Debug, Eq, PartialEq, Hash)] pub enum Severity { Error, Warning, @@ -1840,14 +1717,14 @@ impl<'de> Deserialize<'de> for Severity { } } -#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)] +#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq, Hash)] pub struct SourceLocation { pub file: String, pub start: i32, pub end: i32, } -#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)] +#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq, Hash)] pub struct SecondarySourceLocation { pub file: Option, pub start: Option, @@ -1867,7 +1744,7 @@ pub struct SourceFile { pub struct SourceFiles(pub BTreeMap); impl SourceFiles { - /// Returns an iterator over the the source files' ids and path + /// Returns an iterator over the source files' ids and path /// /// ``` /// use std::collections::BTreeMap; diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 0fa81b8b..bf1ec42b 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -1,27 +1,30 @@ //! Support for compiling contracts use crate::{ - artifacts::{Contracts, Sources}, + artifacts::Sources, config::SolcConfig, error::{Result, SolcError}, - utils, ArtifactOutput, ProjectPathsConfig, + resolver::GraphEdges, + utils, ArtifactFile, ArtifactOutput, Artifacts, ArtifactsMap, Project, ProjectPathsConfig, + Source, }; -use serde::{Deserialize, Serialize}; +use semver::Version; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; use std::{ - collections::{BTreeMap, HashMap, HashSet}, - fs::{self, File}, + collections::{ + btree_map::{BTreeMap, Entry}, + hash_map, BTreeSet, HashMap, HashSet, + }, + fs::{self}, path::{Path, PathBuf}, time::{Duration, UNIX_EPOCH}, }; -/// Hardhat format version -const HH_FORMAT_VERSION: &str = "hh-sol-cache-2"; - /// ethers-rs format version /// /// `ethers-solc` uses a different format version id, but the actual format is consistent with /// hardhat This allows ethers-solc to detect if the cache file was written by hardhat or /// `ethers-solc` -const ETHERS_FORMAT_VERSION: &str = "ethers-rs-sol-cache-1"; +const ETHERS_FORMAT_VERSION: &str = "ethers-rs-sol-cache-2"; /// The file name of the default cache file pub const SOLIDITY_FILES_CACHE_FILENAME: &str = "solidity-files-cache.json"; @@ -35,217 +38,276 @@ pub struct SolFilesCache { } impl SolFilesCache { - /// # Example - /// - /// Autodetect solc version and default settings - /// - /// ```no_run - /// use ethers_solc::artifacts::Source; - /// use ethers_solc::cache::SolFilesCache; - /// let files = Source::read_all_from("./sources").unwrap(); - /// let config = SolFilesCache::builder().insert_files(files, None).unwrap(); - /// ``` - pub fn builder() -> SolFilesCacheBuilder { - SolFilesCacheBuilder::default() + /// Create a new cache instance with the given files + pub fn new(files: BTreeMap) -> Self { + Self { format: ETHERS_FORMAT_VERSION.to_string(), files } } - /// Whether this cache's format is the hardhat format identifier - pub fn is_hardhat_format(&self) -> bool { - self.format == HH_FORMAT_VERSION + pub fn is_empty(&self) -> bool { + self.files.is_empty() } - /// Whether this cache's format is our custom format identifier - pub fn is_ethers_format(&self) -> bool { - self.format == ETHERS_FORMAT_VERSION + pub fn len(&self) -> usize { + self.files.len() + } + + /// Returns the corresponding `CacheEntry` for the file if it exists + pub fn entry(&self, file: impl AsRef) -> Option<&CacheEntry> { + self.files.get(file.as_ref()) + } + + /// Returns the corresponding `CacheEntry` for the file if it exists + pub fn entry_mut(&mut self, file: impl AsRef) -> Option<&mut CacheEntry> { + self.files.get_mut(file.as_ref()) } /// Reads the cache json file from the given path + /// + /// See also [`Self::read_joined()`] + /// + /// # Errors + /// + /// If the cache file does not exist + /// + /// # Example + /// + /// ``` + /// # fn t() { + /// use ethers_solc::cache::SolFilesCache; + /// use ethers_solc::Project; + /// + /// let project = Project::builder().build().unwrap(); + /// let mut cache = SolFilesCache::read(project.cache_path()).unwrap(); + /// cache.join_artifacts_files(project.artifacts_path()); + /// # } + /// ``` #[tracing::instrument(skip_all, name = "sol-files-cache::read")] pub fn read(path: impl AsRef) -> Result { let path = path.as_ref(); tracing::trace!("reading solfiles cache at {}", path.display()); - let file = fs::File::open(path).map_err(|err| SolcError::io(err, path))?; - let file = std::io::BufReader::new(file); - let cache: Self = serde_json::from_reader(file)?; + let cache: SolFilesCache = utils::read_json_file(path)?; tracing::trace!("read cache \"{}\" with {} entries", cache.format, cache.files.len()); Ok(cache) } - /// Write the cache to json file + /// Reads the cache json file from the given path and returns the cache with modified paths + /// + /// + /// # Example + /// + /// ``` + /// # fn t() { + /// use ethers_solc::cache::SolFilesCache; + /// use ethers_solc::Project; + /// + /// let project = Project::builder().build().unwrap(); + /// let cache = SolFilesCache::read_joined(&project.paths).unwrap(); + /// # } + /// ``` + pub fn read_joined(paths: &ProjectPathsConfig) -> Result { + let mut cache = SolFilesCache::read(&paths.cache)?; + cache.join_artifacts_files(&paths.artifacts); + Ok(cache) + } + + /// Write the cache as json file to the given path pub fn write(&self, path: impl AsRef) -> Result<()> { let path = path.as_ref(); + utils::create_parent_dir_all(path)?; let file = fs::File::create(path).map_err(|err| SolcError::io(err, path))?; - tracing::trace!("writing cache to json file: \"{}\"", path.display()); + tracing::trace!( + "writing cache with {} entries to json file: \"{}\"", + self.files.len(), + path.display() + ); serde_json::to_writer_pretty(file, self)?; tracing::trace!("cache file located: \"{}\"", path.display()); Ok(()) } + /// Sets the artifact files location to `base` adjoined to the `CachEntries` artifacts. + pub fn join_artifacts_files(&mut self, base: impl AsRef) -> &mut Self { + let base = base.as_ref(); + self.files.values_mut().for_each(|entry| entry.join_artifacts_files(base)); + self + } + + /// Removes `base` from all artifact file paths + pub fn strip_artifact_files_prefixes(&mut self, base: impl AsRef) -> &mut Self { + let base = base.as_ref(); + self.files.values_mut().for_each(|entry| entry.strip_artifact_files_prefixes(base)); + self + } + + /// Removes all `CacheEntry` which source files don't exist on disk + /// + /// **NOTE:** this assumes the `files` are absolute pub fn remove_missing_files(&mut self) { tracing::trace!("remove non existing files from cache"); - self.files.retain(|file, _| Path::new(file).exists()) - } - - pub fn remove_changed_files(&mut self, changed_files: &Sources) { - tracing::trace!("remove changed files from cache"); - self.files.retain(|file, _| !changed_files.contains_key(file)) - } - - /// Returns only the files that were changed from the provided sources, to save time - /// when compiling. - pub fn get_changed_files<'a>( - &'a self, - sources: Sources, - config: Option<&'a SolcConfig>, - ) -> Sources { - sources - .into_iter() - .filter(move |(file, source)| self.has_changed(file, source.content_hash(), config)) - .collect() - } - - /// Returns true if the given content hash or config differs from the file's - /// or the file does not exist - pub fn has_changed( - &self, - file: impl AsRef, - hash: impl AsRef<[u8]>, - config: Option<&SolcConfig>, - ) -> bool { - if let Some(entry) = self.files.get(file.as_ref()) { - if entry.content_hash.as_bytes() != hash.as_ref() { - return true + self.files.retain(|file, _| { + let exists = file.exists(); + if !exists { + tracing::trace!("remove {} from cache", file.display()); } - if let Some(config) = config { - if config != &entry.solc_config { - return true - } - } - false - } else { - true - } - } - - /// Returns only the files that were changed or are missing artifacts compared to previous - /// compiler execution, to save time when compiling. - pub fn get_changed_or_missing_artifacts_files<'a, T: ArtifactOutput>( - &'a self, - sources: Sources, - config: Option<&'a SolcConfig>, - paths: &ProjectPathsConfig, - ) -> Sources { - // all file hashes - let content_hashes: HashMap<_, _> = - sources.iter().map(|(file, source)| (file.clone(), source.content_hash())).collect(); - sources - .into_iter() - .filter(move |(file, _)| { - self.has_changed_or_missing_artifact::(file, &content_hashes, config, paths) - }) - .collect() - } - - /// Returns true if the given content hash or config differs from the file's - /// or the file does not exist or the files' artifacts are missing - pub fn has_changed_or_missing_artifact( - &self, - file: &Path, - hashes: &HashMap, - config: Option<&SolcConfig>, - paths: &ProjectPathsConfig, - ) -> bool { - let hash = hashes.get(file).unwrap().as_bytes(); - if let Some(entry) = self.files.get(file) { - if entry.content_hash.as_bytes() != hash { - tracing::trace!("changed content hash for cached artifact \"{}\"", file.display()); - return true - } - if let Some(config) = config { - if config != &entry.solc_config { - tracing::trace!( - "changed solc config for cached artifact \"{}\"", - file.display() - ); - return true - } - } - - // checks whether an artifact this file depends on was removed - if entry.artifacts.iter().any(|name| !T::output_exists(file, name, &paths.artifacts)) { - tracing::trace!( - "missing linked artifacts for cached artifact \"{}\"", - file.display() - ); - return true - } - - // check if any of the file's imported files changed - self.has_changed_imports(file, entry, hashes, paths, &mut HashSet::new()) - } else { - tracing::trace!("missing cached artifact for \"{}\"", file.display()); - true - } - } - - /// Returns true if the entry has any imports that were changed - fn has_changed_imports( - &self, - path: &Path, - entry: &CacheEntry, - hashes: &HashMap, - paths: &ProjectPathsConfig, - traversed: &mut HashSet, - ) -> bool { - let cwd = match path.parent() { - Some(inner) => inner, - None => return true, - }; - if !traversed.insert(path.to_path_buf()) { - // skip already traversed files, this prevents SO for circular imports - return false - } - - for import in entry.imports.iter() { - if let Some((import, import_path)) = paths - .resolve_import(cwd, Path::new(import.as_str())) - .ok() - .and_then(|import| self.files.get(&import).map(|e| (e, import))) - { - if let Some(hash) = hashes.get(&import_path) { - if import.content_hash == hash.as_str() && - !self.has_changed_imports(&import_path, import, hashes, paths, traversed) - { - return false - } - } - } - } - - !entry.imports.is_empty() - } - - /// Checks if all artifact files exist - pub fn all_artifacts_exist(&self, artifacts_root: &Path) -> bool { - self.files.iter().all(|(file, entry)| { - entry.artifacts.iter().all(|name| T::output_exists(file, name, artifacts_root)) + exists }) } - /// Reads all cached artifacts from disk using the given ArtifactOutput handler - pub fn read_artifacts( + /// Checks if all artifact files exist + pub fn all_artifacts_exist(&self) -> bool { + self.files.values().all(|entry| entry.all_artifacts_exist()) + } + + /// Strips the given prefix from all `file` paths that identify a `CacheEntry` to make them + /// relative to the given `base` argument + /// + /// In other words this sets the keys (the file path of a solidity file) relative to the `base` + /// argument, so that the key `/Users/me/project/src/Greeter.sol` will be changed to + /// `src/Greeter.sol` if `base` is `/Users/me/project` + /// + /// # Example + /// + /// ``` + /// fn t() { + /// use ethers_solc::artifacts::CompactContract; + /// use ethers_solc::cache::SolFilesCache; + /// use ethers_solc::Project; + /// let project = Project::builder().build().unwrap(); + /// let cache = SolFilesCache::read(project.cache_path()) + /// .unwrap() + /// .with_stripped_file_prefixes(project.root()); + /// let artifact: CompactContract = cache.read_artifact("src/Greeter.sol", "Greeter").unwrap(); + /// # } + /// ``` + /// + /// **Note:** this only affects the source files, see [`Self::strip_artifact_files_prefixes()`] + pub fn with_stripped_file_prefixes(mut self, base: impl AsRef) -> Self { + let base = base.as_ref(); + self.files = self + .files + .into_iter() + .map(|(f, e)| (utils::source_name(&f, base).to_path_buf(), e)) + .collect(); + self + } + + /// Returns the path to the artifact of the given `(file, contract)` pair + /// + /// # Example + /// + /// ``` + /// # fn t() { + /// use ethers_solc::cache::SolFilesCache; + /// use ethers_solc::Project; + /// + /// let project = Project::builder().build().unwrap(); + /// let cache = SolFilesCache::read_joined(&project.paths).unwrap(); + /// cache.find_artifact_path("/Users/git/myproject/src/Greeter.sol", "Greeter"); + /// # } + /// ``` + pub fn find_artifact_path( &self, - artifacts_root: &Path, - ) -> Result> { - let mut artifacts = BTreeMap::default(); - for (file, entry) in &self.files { - for artifact in &entry.artifacts { - let artifact_file = artifacts_root.join(T::output_file(file, artifact)); - let artifact = T::read_cached_artifact(&artifact_file)?; - artifacts.insert(artifact_file, artifact); + contract_file: impl AsRef, + contract_name: impl AsRef, + ) -> Option<&PathBuf> { + let entry = self.entry(contract_file)?; + entry.find_artifact_path(contract_name) + } + + /// Finds the path to the artifact of the given `(file, contract)` pair, see + /// [`Self::find_artifact_path()`], and reads the artifact as json file + /// # Example + /// + /// ``` + /// fn t() { + /// use ethers_solc::cache::SolFilesCache; + /// use ethers_solc::Project; + /// use ethers_solc::artifacts::CompactContract; + /// + /// let project = Project::builder().build().unwrap(); + /// let cache = SolFilesCache::read_joined(&project.paths).unwrap(); + /// let artifact: CompactContract = cache.read_artifact("/Users/git/myproject/src/Greeter.sol", "Greeter").unwrap(); + /// # } + /// ``` + /// + /// **NOTE**: unless the cache's `files` keys were modified `contract_file` is expected to be + /// absolute, see [``] + pub fn read_artifact( + &self, + contract_file: impl AsRef, + contract_name: impl AsRef, + ) -> Result { + let contract_file = contract_file.as_ref(); + let contract_name = contract_name.as_ref(); + + let artifact_path = + self.find_artifact_path(contract_file, contract_name).ok_or_else(|| { + SolcError::ArtifactNotFound(contract_file.to_path_buf(), contract_name.to_string()) + })?; + + utils::read_json_file(artifact_path) + } + + /// Reads all cached artifacts from disk using the given ArtifactOutput handler + /// + /// # Example + /// + /// ``` + /// use ethers_solc::cache::SolFilesCache; + /// use ethers_solc::Project; + /// use ethers_solc::artifacts::CompactContractBytecode; + /// # fn t() { + /// let project = Project::builder().build().unwrap(); + /// let cache = SolFilesCache::read_joined(&project.paths).unwrap(); + /// let artifacts = cache.read_artifacts::().unwrap(); + /// # } + /// ``` + pub fn read_artifacts(&self) -> Result> { + let mut artifacts = ArtifactsMap::new(); + for (file, entry) in self.files.iter() { + let file_name = format!("{}", file.display()); + artifacts.insert(file_name, entry.read_artifact_files()?); + } + Ok(Artifacts(artifacts)) + } + + /// Retains only the `CacheEntry` specified by the file + version combination. + /// + /// In other words, only keep those cache entries with the paths (keys) that the iterator yields + /// and only keep the versions in the cache entry that the version iterator yields. + pub fn retain<'a, I, V>(&mut self, files: I) + where + I: IntoIterator, + V: IntoIterator, + { + let mut files: HashMap<_, _> = files.into_iter().map(|(p, v)| (p, v)).collect(); + + self.files.retain(|file, entry| { + if let Some(versions) = files.remove(file.as_path()) { + entry.retain_versions(versions); + } else { + return false + } + !entry.artifacts.is_empty() + }); + } + + /// Inserts the provided cache entries, if there is an existing `CacheEntry` it will be updated + /// but versions will be merged. + pub fn extend(&mut self, entries: I) + where + I: IntoIterator, + { + for (file, entry) in entries.into_iter() { + match self.files.entry(file) { + Entry::Vacant(e) => { + e.insert(entry); + } + Entry::Occupied(mut other) => { + other.get_mut().merge_artifacts(entry); + } } } - Ok(artifacts) } } @@ -272,182 +334,480 @@ impl Default for SolFilesCache { } } -#[derive(Debug, Clone, Default)] -pub struct SolFilesCacheBuilder { - format: Option, - solc_config: Option, - root: Option, -} - -impl SolFilesCacheBuilder { - #[must_use] - pub fn format(mut self, format: impl Into) -> Self { - self.format = Some(format.into()); - self - } - - #[must_use] - pub fn solc_config(mut self, solc_config: SolcConfig) -> Self { - self.solc_config = Some(solc_config); - self - } - - #[must_use] - pub fn root(mut self, root: impl Into) -> Self { - self.root = Some(root.into()); - self - } - - /// Creates a new `SolFilesCache` instance - /// - /// If a `cache_file` path was provided it's used as base. - pub fn insert_files( - self, - sources: Sources, - cache_file: Option, - ) -> Result { - let format = self.format.unwrap_or_else(|| ETHERS_FORMAT_VERSION.to_string()); - let solc_config = self.solc_config.unwrap_or_else(|| SolcConfig::builder().build()); - - let root = self - .root - .map(Ok) - .unwrap_or_else(std::env::current_dir) - .map_err(|err| SolcError::io(err, "."))?; - - let mut files = BTreeMap::new(); - for (file, source) in sources { - let last_modification_date = fs::metadata(&file) - .map_err(|err| SolcError::io(err, file.clone()))? - .modified() - .map_err(|err| SolcError::io(err, file.clone()))? - .duration_since(UNIX_EPOCH) - .map_err(|err| SolcError::solc(err.to_string()))? - .as_millis() as u64; - let imports = - utils::find_import_paths(source.as_ref()).map(|m| m.as_str().to_owned()).collect(); - - let version_pragmas = utils::find_version_pragma(source.as_ref()) - .map(|v| vec![v.as_str().to_string()]) - .unwrap_or_default(); - - let entry = CacheEntry { - last_modification_date, - content_hash: source.content_hash(), - source_name: utils::source_name(&file, &root).into(), - solc_config: solc_config.clone(), - imports, - version_pragmas, - artifacts: vec![], - }; - files.insert(file, entry); - } - - let cache = if let Some(dest) = cache_file.as_ref().filter(|dest| dest.exists()) { - // read the existing cache and extend it by the files that changed - // (if we just wrote to the cache file, we'd overwrite the existing data) - let reader = - std::io::BufReader::new(File::open(dest).map_err(|err| SolcError::io(err, dest))?); - if let Ok(mut cache) = serde_json::from_reader::<_, SolFilesCache>(reader) { - cache.files.extend(files); - cache - } else { - tracing::error!("Failed to read existing cache file {}", dest.display()); - SolFilesCache { format, files } - } - } else { - SolFilesCache { format, files } - }; - - Ok(cache) - } -} - +/// A `CacheEntry` in the cache file represents a solidity file +/// +/// A solidity file can contain several contracts, for every contract a separate `Artifact` is +/// emitted. so the `CacheEntry` tracks the artifacts by name. A file can be compiled with multiple +/// `solc` versions generating version specific artifacts. #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct CacheEntry { /// the last modification time of this file pub last_modification_date: u64, + /// hash to identify whether the content of the file changed pub content_hash: String, + /// identifier name see [`crate::util::source_name()`] pub source_name: PathBuf, + /// what config was set when compiling this file pub solc_config: SolcConfig, - pub imports: Vec, - pub version_pragmas: Vec, - pub artifacts: Vec, + /// fully resolved imports of the file + /// + /// all paths start relative from the project's root: `src/importedFile.sol` + pub imports: BTreeSet, + /// The solidity version pragma + pub version_requirement: Option, + /// all artifacts produced for this file + /// + /// In theory a file can be compiled by different solc versions: + /// `A(<=0.8.10) imports C(>0.4.0)` and `B(0.8.11) imports C(>0.4.0)` + /// file `C` would be compiled twice, with `0.8.10` and `0.8.11`, producing two different + /// artifacts. + /// + /// This map tracks the artifacts by `name -> (Version -> PathBuf)`. + /// This mimics the default artifacts directory structure + pub artifacts: BTreeMap>, } impl CacheEntry { - /// Returns the time + /// Returns the last modified timestamp `Duration` pub fn last_modified(&self) -> Duration { Duration::from_millis(self.last_modification_date) } + + /// Returns the artifact path for the contract name + /// ``` + /// use ethers_solc::cache::CacheEntry; + /// # fn t(entry: CacheEntry) { + /// entry.find_artifact_path("Greeter"); + /// # } + /// ``` + pub fn find_artifact_path(&self, contract_name: impl AsRef) -> Option<&PathBuf> { + self.artifacts.get(contract_name.as_ref())?.iter().next().map(|(_, p)| p) + } + + /// Reads the last modification date from the file's metadata + pub fn read_last_modification_date(file: impl AsRef) -> Result { + let file = file.as_ref(); + let last_modification_date = fs::metadata(file) + .map_err(|err| SolcError::io(err, file.to_path_buf()))? + .modified() + .map_err(|err| SolcError::io(err, file.to_path_buf()))? + .duration_since(UNIX_EPOCH) + .map_err(|err| SolcError::solc(err.to_string()))? + .as_millis() as u64; + Ok(last_modification_date) + } + + /// Reads all artifact files associated with the `CacheEntry` + /// + /// **Note:** all artifact file paths should be absolute, see [`Self::join`] + fn read_artifact_files( + &self, + ) -> Result>>> { + let mut artifacts = BTreeMap::new(); + for (artifact_name, versioned_files) in self.artifacts.iter() { + let mut files = Vec::with_capacity(versioned_files.len()); + for (version, file) in versioned_files { + let artifact: Artifact = utils::read_json_file(file)?; + files.push(ArtifactFile { artifact, file: file.clone(), version: version.clone() }); + } + artifacts.insert(artifact_name.clone(), files); + } + Ok(artifacts) + } + + pub(crate) fn insert_artifacts<'a, I, T: 'a>(&mut self, artifacts: I) + where + I: IntoIterator>)>, + { + for (name, artifacts) in artifacts.into_iter().filter(|(_, a)| !a.is_empty()) { + let entries: BTreeMap<_, _> = artifacts + .into_iter() + .map(|artifact| (artifact.version.clone(), artifact.file.clone())) + .collect(); + self.artifacts.insert(name.clone(), entries); + } + } + + /// Merges another `CacheEntries` artifacts into the existing set + fn merge_artifacts(&mut self, other: CacheEntry) { + for (name, artifacts) in other.artifacts { + match self.artifacts.entry(name) { + Entry::Vacant(entry) => { + entry.insert(artifacts); + } + Entry::Occupied(mut entry) => { + entry.get_mut().extend(artifacts.into_iter()); + } + } + } + } + + /// Retains only those artifacts that match the provided version. + pub fn retain_versions<'a, I>(&mut self, versions: I) + where + I: IntoIterator, + { + let versions = versions.into_iter().collect::>(); + self.artifacts.retain(|_, artifacts| { + artifacts.retain(|version, _| versions.contains(version)); + !artifacts.is_empty() + }) + } + + /// Returns `true` if the artifacts set contains the given version + pub fn contains_version(&self, version: &Version) -> bool { + self.artifacts_versions().any(|(v, _)| v == version) + } + + /// Iterator that yields all artifact files and their version + pub fn artifacts_versions(&self) -> impl Iterator { + self.artifacts.values().flat_map(|artifacts| artifacts.iter()) + } + + /// Iterator that yields all artifact files and their version + pub fn artifacts_for_version<'a>( + &'a self, + version: &'a Version, + ) -> impl Iterator + 'a { + self.artifacts_versions().filter_map(move |(ver, file)| (ver == version).then(|| file)) + } + + /// Iterator that yields all artifact files + pub fn artifacts(&self) -> impl Iterator { + self.artifacts.values().flat_map(|artifacts| artifacts.values()) + } + + /// Mutable iterator over all artifact files + pub fn artifacts_mut(&mut self) -> impl Iterator { + self.artifacts.values_mut().flat_map(|artifacts| artifacts.values_mut()) + } + + /// Checks if all artifact files exist + pub fn all_artifacts_exist(&self) -> bool { + self.artifacts().all(|p| p.exists()) + } + + /// Sets the artifact's paths to `base` adjoined to the artifact's `path`. + pub fn join_artifacts_files(&mut self, base: impl AsRef) { + let base = base.as_ref(); + self.artifacts_mut().for_each(|p| *p = base.join(&*p)) + } + + /// Removes `base` from the artifact's path + pub fn strip_artifact_files_prefixes(&mut self, base: impl AsRef) { + let base = base.as_ref(); + self.artifacts_mut().for_each(|p| { + if let Ok(rem) = p.strip_prefix(base) { + *p = rem.to_path_buf(); + } + }) + } } -/// A helper type to handle source name/full disk mappings -/// -/// The disk path is the actual path where a file can be found on disk. -/// A source name is the internal identifier and is the remaining part of the disk path starting -/// with the configured source directory, (`contracts/contract.sol`) -#[derive(Debug, Default)] -pub struct PathMap { - /// all libraries to the source set while keeping track of their actual disk path - /// (`contracts/contract.sol` -> `/Users/.../contracts.sol`) - pub source_name_to_path: HashMap, - /// inverse of `source_name_to_path` : (`/Users/.../contracts.sol` -> `contracts/contract.sol`) - pub path_to_source_name: HashMap, - /* /// All paths, source names and actual file paths - * paths: Vec */ +/// A helper abstraction over the [`SolFilesCache`] used to determine what files need to compiled +/// and which `Artifacts` can be reused. +#[derive(Debug)] +pub(crate) struct ArtifactsCacheInner<'a, T: ArtifactOutput> { + /// preexisting cache file + pub cache: SolFilesCache, + /// all already existing artifacts + pub cached_artifacts: Artifacts, + /// relationship between all the files + pub edges: GraphEdges, + /// the project + pub project: &'a Project, + /// all files that were filtered because they haven't changed + pub filtered: HashMap)>, + /// the corresponding cache entries for all sources that were deemed to be dirty + /// + /// `CacheEntry` are grouped by their solidity file. + /// During preprocessing the `artifacts` field of a new `CacheEntry` is left blank, because in + /// order to determine the artifacts of the solidity file, the file needs to be compiled first. + /// Only after the `CompilerOutput` is received and all compiled contracts are handled, see + /// [`crate::ArtifactOutput::on_output()`] all artifacts, their disk paths, are determined and + /// can be populated before the updated [`crate::SolFilesCache`] is finally written to disk, + /// see [`Cache::finish()`] + pub dirty_entries: HashMap)>, + /// the file hashes + pub content_hashes: HashMap, } -impl PathMap { - fn apply_mappings(sources: Sources, mappings: &HashMap) -> Sources { +impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> { + /// Creates a new cache entry for the file + fn create_cache_entry(&self, file: &Path, source: &Source) -> CacheEntry { + let imports = self + .edges + .imports(file) + .into_iter() + .map(|import| utils::source_name(import, self.project.root()).to_path_buf()) + .collect(); + + let entry = CacheEntry { + last_modification_date: CacheEntry::read_last_modification_date(&file) + .unwrap_or_default(), + content_hash: source.content_hash(), + source_name: utils::source_name(file, self.project.root()).into(), + solc_config: self.project.solc_config.clone(), + imports, + version_requirement: self.edges.version_requirement(file).map(|v| v.to_string()), + // artifacts remain empty until we received the compiler output + artifacts: Default::default(), + }; + + entry + } + + /// inserts a new cache entry for the given file + /// + /// If there is already an entry available for the file the given version is added to the set + fn insert_new_cache_entry(&mut self, file: &Path, source: &Source, version: Version) { + if let Some((_, versions)) = self.dirty_entries.get_mut(file) { + versions.insert(version); + } else { + let entry = self.create_cache_entry(file, source); + self.dirty_entries.insert(file.to_path_buf(), (entry, HashSet::from([version]))); + } + } + + /// inserts the filtered source with the fiven version + fn insert_filtered_source(&mut self, file: PathBuf, source: Source, version: Version) { + match self.filtered.entry(file) { + hash_map::Entry::Occupied(mut entry) => { + entry.get_mut().1.insert(version); + } + hash_map::Entry::Vacant(entry) => { + entry.insert((source, HashSet::from([version]))); + } + } + } + + /// Returns only those sources that + /// - are new + /// - were changed + /// - their imports were changed + /// - their artifact is missing + fn filter(&mut self, sources: Sources, version: &Version) -> Sources { + self.fill_hashes(&sources); sources .into_iter() - .map(|(import, source)| { - if let Some(path) = mappings.get(&import).cloned() { - (path, source) - } else { - (import, source) + .filter_map(|(file, source)| self.requires_solc(file, source, version)) + .collect() + } + + /// Returns `Some` if the file _needs_ to be compiled and `None` if the artifact can be reu-used + fn requires_solc( + &mut self, + file: PathBuf, + source: Source, + version: &Version, + ) -> Option<(PathBuf, Source)> { + if !self.is_dirty(&file, version) && + self.edges.imports(&file).iter().all(|file| !self.is_dirty(file, version)) + { + self.insert_filtered_source(file, source, version.clone()); + None + } else { + self.insert_new_cache_entry(&file, &source, version.clone()); + + Some((file, source)) + } + } + + /// returns `false` if the corresponding cache entry remained unchanged otherwise `true` + fn is_dirty(&self, file: &Path, version: &Version) -> bool { + if let Some(hash) = self.content_hashes.get(file) { + if let Some(entry) = self.cache.entry(&file) { + if entry.content_hash.as_bytes() != hash.as_bytes() { + tracing::trace!( + "changed content hash for cached artifact \"{}\"", + file.display() + ); + return true } - }) - .collect() + if self.project.solc_config != entry.solc_config { + tracing::trace!( + "changed solc config for cached artifact \"{}\"", + file.display() + ); + return true + } + + if !entry.contains_version(version) { + tracing::trace!("missing linked artifacts for version \"{}\"", version); + return true + } + + if entry.artifacts_for_version(version).any(|artifact_path| { + let missing_artifact = !self.cached_artifacts.has_artifact(artifact_path); + if missing_artifact { + tracing::trace!("missing artifact \"{}\"", artifact_path.display()); + } + missing_artifact + }) { + return true + } + // all things match, can be reused + return false + } + tracing::trace!("Missing cache entry for {}", file.display()); + } + true } - /// Returns all contract names of the files mapped with the disk path - pub fn get_artifacts(&self, contracts: &Contracts) -> Vec<(PathBuf, Vec)> { - contracts - .iter() - .map(|(path, contracts)| { - let path = PathBuf::from(path); - let file = self.source_name_to_path.get(&path).cloned().unwrap_or(path); - (file, contracts.keys().cloned().collect::>()) - }) - .collect() - } - - pub fn extend(&mut self, other: PathMap) { - self.source_name_to_path.extend(other.source_name_to_path); - self.path_to_source_name.extend(other.path_to_source_name); - } - - /// Returns a new map with the source names as keys - pub fn set_source_names(&self, sources: Sources) -> Sources { - Self::apply_mappings(sources, &self.path_to_source_name) - } - - /// Returns a new map with the disk paths as keys - pub fn set_disk_paths(&self, sources: Sources) -> Sources { - Self::apply_mappings(sources, &self.source_name_to_path) + /// Adds the file's hashes to the set if not set yet + fn fill_hashes(&mut self, sources: &Sources) { + for (file, source) in sources { + if let hash_map::Entry::Vacant(entry) = self.content_hashes.entry(file.clone()) { + entry.insert(source.content_hash()); + } + } } } -#[cfg(test)] -mod tests { - use super::*; +/// Abstraction over configured caching which can be either non-existent or an already loaded cache +#[allow(clippy::large_enum_variant)] +#[derive(Debug)] +pub(crate) enum ArtifactsCache<'a, T: ArtifactOutput> { + /// Cache nothing on disk + Ephemeral(GraphEdges, &'a Project), + /// Handles the actual cached artifacts, detects artifacts that can be reused + Cached(ArtifactsCacheInner<'a, T>), +} - #[test] - fn can_parse_solidity_files_cache() { - let input = include_str!("../test-data/solidity-files-cache.json"); - let _ = serde_json::from_str::(input).unwrap(); +impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { + pub fn new(project: &'a Project, edges: GraphEdges) -> Result { + let cache = if project.cached { + // read the cache file if it already exists + let mut cache = if project.cache_path().exists() { + SolFilesCache::read_joined(&project.paths).unwrap_or_default() + } else { + SolFilesCache::default() + }; + + cache.remove_missing_files(); + + // read all artifacts + let cached_artifacts = if project.paths.artifacts.exists() { + tracing::trace!("reading artifacts from cache.."); + // if we failed to read the whole set of artifacts we use an empty set + let artifacts = cache.read_artifacts::().unwrap_or_default(); + tracing::trace!("read {} artifacts from cache", artifacts.artifact_files().count()); + artifacts + } else { + Default::default() + }; + + let cache = ArtifactsCacheInner { + cache, + cached_artifacts, + edges, + project, + filtered: Default::default(), + dirty_entries: Default::default(), + content_hashes: Default::default(), + }; + + ArtifactsCache::Cached(cache) + } else { + // nothing to cache + ArtifactsCache::Ephemeral(edges, project) + }; + + Ok(cache) + } + + #[cfg(test)] + pub fn as_cached(&self) -> Option<&ArtifactsCacheInner<'a, T>> { + match self { + ArtifactsCache::Ephemeral(_, _) => None, + ArtifactsCache::Cached(cached) => Some(cached), + } + } + + pub fn project(&self) -> &'a Project { + match self { + ArtifactsCache::Ephemeral(_, project) => project, + ArtifactsCache::Cached(cache) => cache.project, + } + } + + /// Filters out those sources that don't need to be compiled + pub fn filter(&mut self, sources: Sources, version: &Version) -> Sources { + match self { + ArtifactsCache::Ephemeral(_, _) => sources, + ArtifactsCache::Cached(cache) => cache.filter(sources, version), + } + } + + /// Consumes the `Cache`, rebuilds the [`SolFileCache`] by merging all artifacts that were + /// filtered out in the previous step (`Cache::filtered`) and the artifacts that were just + /// compiled and written to disk `written_artifacts`. + /// + /// Returns all the _cached_ artifacts. + pub fn write_cache( + self, + written_artifacts: &Artifacts, + ) -> Result> { + match self { + ArtifactsCache::Ephemeral(_, _) => Ok(Default::default()), + ArtifactsCache::Cached(cache) => { + let ArtifactsCacheInner { + mut cache, + mut cached_artifacts, + mut dirty_entries, + filtered, + project, + .. + } = cache; + + // keep only those files that were previously filtered (not dirty, reused) + cache.retain(filtered.iter().map(|(p, (_, v))| (p.as_path(), v))); + + // add the artifacts to the cache entries, this way we can keep a mapping from + // solidity file to its artifacts + // this step is necessary because the concrete artifacts are only known after solc + // was invoked and received as output, before that we merely know the file and + // the versions, so we add the artifacts on a file by file basis + for (file, artifacts) in written_artifacts.as_ref() { + let file_path = Path::new(&file); + if let Some((entry, versions)) = dirty_entries.get_mut(file_path) { + entry.insert_artifacts(artifacts.iter().map(|(name, artifacts)| { + let artifacts = artifacts + .iter() + .filter(|artifact| versions.contains(&artifact.version)) + .collect::>(); + (name, artifacts) + })); + } + + // cached artifacts that were overwritten also need to be removed from the + // `cached_artifacts` set + if let Some((f, mut cached)) = cached_artifacts.0.remove_entry(file) { + cached.retain(|name, files| { + if let Some(written_files) = artifacts.get(name) { + files.retain(|f| { + written_files.iter().all(|other| other.version != f.version) + }); + return !files.is_empty() + } + false + }); + if !cached.is_empty() { + cached_artifacts.0.insert(f, cached); + } + } + } + + // add the new cache entries to the cache file + cache.extend(dirty_entries.into_iter().map(|(file, (entry, _))| (file, entry))); + + cache.strip_artifact_files_prefixes(project.artifacts_path()); + // write to disk + cache.write(project.cache_path())?; + + Ok(cached_artifacts) + } + } } } diff --git a/ethers-solc/src/compile/contracts.rs b/ethers-solc/src/compile/contracts.rs new file mode 100644 index 00000000..47e6db53 --- /dev/null +++ b/ethers-solc/src/compile/contracts.rs @@ -0,0 +1,146 @@ +use crate::artifacts::{CompactContractRef, Contract, FileToContractsMap}; +use semver::Version; +use serde::{Deserialize, Serialize}; +use std::collections::BTreeMap; + +/// file -> [(contract name -> Contract + solc version)] +#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize)] +#[serde(transparent)] +pub struct VersionedContracts(pub FileToContractsMap>); + +impl VersionedContracts { + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + + pub fn len(&self) -> usize { + self.0.len() + } + + /// Returns an iterator over all files + pub fn files(&self) -> impl Iterator + '_ { + self.0.keys() + } + + /// Finds the _first_ contract with the given name + /// + /// # Example + /// + /// ``` + /// use ethers_solc::Project; + /// use ethers_solc::artifacts::*; + /// # fn demo(project: Project) { + /// let output = project.compile().unwrap().output(); + /// let contract = output.find("Greeter").unwrap(); + /// # } + /// ``` + pub fn find(&self, contract: impl AsRef) -> Option { + let contract_name = contract.as_ref(); + self.contracts().find_map(|(name, contract)| { + (name == contract_name).then(|| CompactContractRef::from(contract)) + }) + } + + /// Removes the _first_ contract with the given name from the set + /// + /// # Example + /// + /// ``` + /// use ethers_solc::Project; + /// use ethers_solc::artifacts::*; + /// # fn demo(project: Project) { + /// let (_, mut contracts) = project.compile().unwrap().output().split(); + /// let contract = contracts.remove("Greeter").unwrap(); + /// # } + /// ``` + pub fn remove(&mut self, contract: impl AsRef) -> Option { + let contract_name = contract.as_ref(); + self.0.values_mut().find_map(|all_contracts| { + let mut contract = None; + if let Some((c, mut contracts)) = all_contracts.remove_entry(contract_name) { + if !contracts.is_empty() { + contract = Some(contracts.remove(0).contract); + } + if !contracts.is_empty() { + all_contracts.insert(c, contracts); + } + } + contract + }) + } + + /// Given the contract file's path and the contract's name, tries to return the contract's + /// bytecode, runtime bytecode, and abi + pub fn get(&self, path: &str, contract: &str) -> Option { + self.0 + .get(path) + .and_then(|contracts| { + contracts.get(contract).and_then(|c| c.get(0).map(|c| &c.contract)) + }) + .map(CompactContractRef::from) + } + + /// Iterate over all contracts and their names + pub fn contracts(&self) -> impl Iterator { + self.0 + .values() + .flat_map(|c| c.iter().flat_map(|(name, c)| c.iter().map(move |c| (name, &c.contract)))) + } + + /// Returns an iterator over (`file`, `name`, `Contract`) + pub fn contracts_with_files(&self) -> impl Iterator { + self.0.iter().flat_map(|(file, contracts)| { + contracts + .iter() + .flat_map(move |(name, c)| c.iter().map(move |c| (file, name, &c.contract))) + }) + } + + /// Returns an iterator over all contracts and their source names. + /// + /// ``` + /// use std::collections::BTreeMap; + /// use ethers_solc::{ artifacts::*, Artifact }; + /// # fn demo(contracts: OutputContracts) { + /// let contracts: BTreeMap = contracts + /// .into_contracts() + /// .map(|(k, c)| (k, c.into_compact_contract().unwrap())) + /// .collect(); + /// # } + /// ``` + pub fn into_contracts(self) -> impl Iterator { + self.0.into_values().flat_map(|c| { + c.into_iter() + .flat_map(|(name, c)| c.into_iter().map(move |c| (name.clone(), c.contract))) + }) + } +} + +impl AsRef>> for VersionedContracts { + fn as_ref(&self) -> &FileToContractsMap> { + &self.0 + } +} + +impl AsMut>> for VersionedContracts { + fn as_mut(&mut self) -> &mut FileToContractsMap> { + &mut self.0 + } +} + +impl IntoIterator for VersionedContracts { + type Item = (String, BTreeMap>); + type IntoIter = + std::collections::btree_map::IntoIter>>; + + fn into_iter(self) -> Self::IntoIter { + self.0.into_iter() + } +} + +/// A contract and the compiler version used to compile it +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +pub struct VersionedContract { + pub contract: Contract, + pub version: Version, +} diff --git a/ethers-solc/src/compile/many.rs b/ethers-solc/src/compile/many.rs new file mode 100644 index 00000000..109c72e0 --- /dev/null +++ b/ethers-solc/src/compile/many.rs @@ -0,0 +1,40 @@ +use crate::{error::Result, CompilerInput, CompilerOutput, Solc}; + +/// The result of a `solc` process bundled with its `Solc` and `CompilerInput` +type CompileElement = (Result, Solc, CompilerInput); + +/// The bundled output of multiple `solc` processes. +#[derive(Debug)] +pub struct CompiledMany { + outputs: Vec, +} + +impl CompiledMany { + pub fn new(outputs: Vec) -> Self { + Self { outputs } + } + + /// Returns an iterator over all output elements + pub fn outputs(&self) -> impl Iterator { + self.outputs.iter() + } + + /// Returns an iterator over all output elements + pub fn into_outputs(self) -> impl Iterator { + self.outputs.into_iter() + } + + /// Returns all `CompilerOutput` or the first error that occurred + pub fn flattened(self) -> Result> { + self.into_iter().collect() + } +} + +impl IntoIterator for CompiledMany { + type Item = Result; + type IntoIter = std::vec::IntoIter>; + + fn into_iter(self) -> Self::IntoIter { + self.outputs.into_iter().map(|(res, _, _)| res).collect::>().into_iter() + } +} diff --git a/ethers-solc/src/compile.rs b/ethers-solc/src/compile/mod.rs similarity index 95% rename from ethers-solc/src/compile.rs rename to ethers-solc/src/compile/mod.rs index 5153591b..a26f0fa7 100644 --- a/ethers-solc/src/compile.rs +++ b/ethers-solc/src/compile/mod.rs @@ -15,6 +15,11 @@ use std::{ str::FromStr, }; +pub mod contracts; +pub mod many; +pub mod output; +pub mod project; + /// The name of the `solc` binary on the system pub const SOLC: &str = "solc"; @@ -496,7 +501,7 @@ impl Solc { .stdout(Stdio::piped()) .spawn() .map_err(|err| SolcError::io(err, &self.solc))?; - let stdin = child.stdin.take().unwrap(); + let stdin = child.stdin.take().expect("Stdin exists."); serde_json::to_writer(stdin, input)?; compile_output(child.wait_with_output().map_err(|err| SolcError::io(err, &self.solc))?) } @@ -602,7 +607,7 @@ impl Solc { /// let outputs = Solc::compile_many([(solc1, input1), (solc2, input2)], 2).await.flattened().unwrap(); /// # } /// ``` - pub async fn compile_many(jobs: I, n: usize) -> CompiledMany + pub async fn compile_many(jobs: I, n: usize) -> crate::many::CompiledMany where I: IntoIterator, { @@ -615,42 +620,8 @@ impl Solc { .buffer_unordered(n) .collect::>() .await; - CompiledMany { outputs } - } -} -/// The result of a `solc` process bundled with its `Solc` and `CompilerInput` -type CompileElement = (Result, Solc, CompilerInput); - -/// The output of multiple `solc` processes. -#[derive(Debug)] -pub struct CompiledMany { - outputs: Vec, -} - -impl CompiledMany { - /// Returns an iterator over all output elements - pub fn outputs(&self) -> impl Iterator { - self.outputs.iter() - } - - /// Returns an iterator over all output elements - pub fn into_outputs(self) -> impl Iterator { - self.outputs.into_iter() - } - - /// Returns all `CompilerOutput` or the first error that occurred - pub fn flattened(self) -> Result> { - self.into_iter().collect() - } -} - -impl IntoIterator for CompiledMany { - type Item = Result; - type IntoIter = std::vec::IntoIter>; - - fn into_iter(self) -> Self::IntoIter { - self.outputs.into_iter().map(|(res, _, _)| res).collect::>().into_iter() + crate::many::CompiledMany::new(outputs) } } @@ -716,7 +687,7 @@ mod tests { #[test] fn solc_compile_works() { - let input = include_str!("../test-data/in/compiler-in-1.json"); + let input = include_str!("../../test-data/in/compiler-in-1.json"); let input: CompilerInput = serde_json::from_str(input).unwrap(); let out = solc().compile(&input).unwrap(); let other = solc().compile(&serde_json::json!(input)).unwrap(); @@ -726,7 +697,7 @@ mod tests { #[cfg(feature = "async")] #[tokio::test] async fn async_solc_compile_works() { - let input = include_str!("../test-data/in/compiler-in-1.json"); + let input = include_str!("../../test-data/in/compiler-in-1.json"); let input: CompilerInput = serde_json::from_str(input).unwrap(); let out = solc().async_compile(&input).await.unwrap(); let other = solc().async_compile(&serde_json::json!(input)).await.unwrap(); @@ -735,7 +706,7 @@ mod tests { #[cfg(feature = "async")] #[tokio::test] async fn async_solc_compile_works2() { - let input = include_str!("../test-data/in/compiler-in-2.json"); + let input = include_str!("../../test-data/in/compiler-in-2.json"); let input: CompilerInput = serde_json::from_str(input).unwrap(); let out = solc().async_compile(&input).await.unwrap(); let other = solc().async_compile(&serde_json::json!(input)).await.unwrap(); diff --git a/ethers-solc/src/compile/output.rs b/ethers-solc/src/compile/output.rs new file mode 100644 index 00000000..3cd0a9dc --- /dev/null +++ b/ethers-solc/src/compile/output.rs @@ -0,0 +1,363 @@ +//! The output of a compiled project + +use crate::{ + artifacts::{CompactContractRef, Contract, Error, SourceFile, SourceFiles}, + contracts::{VersionedContract, VersionedContracts}, + ArtifactOutput, Artifacts, CompilerOutput, +}; +use semver::Version; +use std::{collections::BTreeMap, fmt, path::Path}; + +/// Contains a mixture of already compiled/cached artifacts and the input set of sources that still +/// need to be compiled. +#[derive(Debug, Clone, PartialEq, Default)] +pub struct ProjectCompileOutput { + /// contains the aggregated `CompilerOutput` + /// + /// See [`CompilerSources::compile`] + pub(crate) compiler_output: AggregatedCompilerOutput, + /// all artifact files from `output` that were freshly compiled and written + pub(crate) compiled_artifacts: Artifacts, + /// All artifacts that were read from cache + pub(crate) cached_artifacts: Artifacts, + /// errors that should be omitted + pub(crate) ignored_error_codes: Vec, +} + +impl ProjectCompileOutput { + /// All artifacts together with their contract file name and name `:` + /// + /// This returns a chained iterator of both cached and recompiled contract artifacts + /// + /// # Example + /// + /// ```no_run + /// use std::collections::btree_map::BTreeMap; + /// use ethers_solc::artifacts::CompactContractBytecode; + /// use ethers_solc::Project; + /// + /// let project = Project::builder().build().unwrap(); + /// let contracts: BTreeMap = project.compile().unwrap().into_artifacts().collect(); + /// ``` + pub fn into_artifacts(self) -> impl Iterator { + let Self { cached_artifacts, compiled_artifacts, .. } = self; + cached_artifacts.into_artifacts::().chain(compiled_artifacts.into_artifacts::()) + } + + /// All artifacts together with their contract file and name as tuple `(file, contract + /// name, artifact)` + /// + /// This returns a chained iterator of both cached and recompiled contract artifacts + /// + /// # Example + /// + /// ```no_run + /// use std::collections::btree_map::BTreeMap; + /// use ethers_solc::artifacts::CompactContractBytecode; + /// use ethers_solc::Project; + /// + /// let project = Project::builder().build().unwrap(); + /// let contracts: Vec<(String, String, CompactContractBytecode)> = project.compile().unwrap().into_artifacts_with_files().collect(); + /// ``` + /// + /// **NOTE** the `file` will be returned as is, see also [`Self::with_stripped_file_prefixes()`] + pub fn into_artifacts_with_files(self) -> impl Iterator { + let Self { cached_artifacts, compiled_artifacts, .. } = self; + cached_artifacts + .into_artifacts_with_files() + .chain(compiled_artifacts.into_artifacts_with_files()) + } + + /// Strips the given prefix from all artifact file paths to make them relative to the given + /// `base` argument + /// + /// # Example + /// + /// Make all artifact files relative tot the project's root directory + /// + /// ```no_run + /// use ethers_solc::artifacts::CompactContractBytecode; + /// use ethers_solc::Project; + /// + /// let project = Project::builder().build().unwrap(); + /// let output = project.compile().unwrap().with_stripped_file_prefixes(project.root()); + /// ``` + pub fn with_stripped_file_prefixes(mut self, base: impl AsRef) -> Self { + let base = base.as_ref(); + self.cached_artifacts = self.cached_artifacts.into_stripped_file_prefixes(base); + self.compiled_artifacts = self.compiled_artifacts.into_stripped_file_prefixes(base); + self + } + + /// Get the (merged) solc compiler output + /// ```no_run + /// use std::collections::btree_map::BTreeMap; + /// use ethers_solc::artifacts::Contract; + /// use ethers_solc::Project; + /// + /// let project = Project::builder().build().unwrap(); + /// let contracts: BTreeMap = + /// project.compile().unwrap().output().contracts_into_iter().collect(); + /// ``` + pub fn output(self) -> AggregatedCompilerOutput { + self.compiler_output + } + + /// Whether this type has a compiler output + pub fn has_compiled_contracts(&self) -> bool { + self.compiler_output.is_empty() + } + + /// Whether this type does not contain compiled contracts + pub fn is_unchanged(&self) -> bool { + self.compiler_output.is_unchanged() + } + + /// Whether there were errors + pub fn has_compiler_errors(&self) -> bool { + self.compiler_output.has_error() + } + + /// Whether there were warnings + pub fn has_compiler_warnings(&self) -> bool { + self.compiler_output.has_warning(&self.ignored_error_codes) + } + + /// Finds the first contract with the given name and removes it from the set + pub fn remove(&mut self, contract_name: impl AsRef) -> Option { + let contract_name = contract_name.as_ref(); + if let artifact @ Some(_) = self.compiled_artifacts.remove(contract_name) { + return artifact + } + self.cached_artifacts.remove(contract_name) + } + + /// Returns the set of `Artifacts` that were cached and got reused during [`Project::compile()`] + pub fn cached_artifacts(&self) -> &Artifacts { + &self.cached_artifacts + } + + /// Returns the set of `Artifacts` that were compiled with `solc` in [`Project::compile()`] + pub fn compiled_artifacts(&self) -> &Artifacts { + &self.compiled_artifacts + } +} + +impl ProjectCompileOutput +where + T::Artifact: Clone, +{ + /// Finds the first contract with the given name + pub fn find(&self, contract_name: impl AsRef) -> Option<&T::Artifact> { + let contract_name = contract_name.as_ref(); + if let artifact @ Some(_) = self.compiled_artifacts.find(contract_name) { + return artifact + } + self.cached_artifacts.find(contract_name) + } +} + +impl fmt::Display for ProjectCompileOutput { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.compiler_output.is_unchanged() { + f.write_str("Nothing to compile") + } else { + self.compiler_output.diagnostics(&self.ignored_error_codes).fmt(f) + } + } +} + +/// The aggregated output of (multiple) compile jobs +/// +/// This is effectively a solc version aware `CompilerOutput` +#[derive(Clone, Debug, Default, PartialEq)] +pub struct AggregatedCompilerOutput { + /// all errors from all `CompilerOutput` + pub errors: Vec, + /// All source files + pub sources: BTreeMap, + /// All compiled contracts combined with the solc version used to compile them + pub contracts: VersionedContracts, +} + +impl AggregatedCompilerOutput { + /// Whether the output contains a compiler error + pub fn has_error(&self) -> bool { + self.errors.iter().any(|err| err.severity.is_error()) + } + + /// Whether the output contains a compiler warning + pub fn has_warning(&self, ignored_error_codes: &[u64]) -> bool { + self.errors.iter().any(|err| { + if err.severity.is_warning() { + err.error_code.as_ref().map_or(false, |code| !ignored_error_codes.contains(code)) + } else { + false + } + }) + } + + pub fn diagnostics<'a>(&'a self, ignored_error_codes: &'a [u64]) -> OutputDiagnostics { + OutputDiagnostics { compiler_output: self, ignored_error_codes } + } + + pub fn is_empty(&self) -> bool { + self.contracts.is_empty() + } + + pub fn is_unchanged(&self) -> bool { + self.contracts.is_empty() && self.errors.is_empty() + } + + pub fn extend_all(&mut self, out: I) + where + I: IntoIterator, + { + for (v, o) in out { + self.extend(v, o) + } + } + + /// adds a new `CompilerOutput` to the aggregated output + pub fn extend(&mut self, version: Version, output: CompilerOutput) { + self.errors.extend(output.errors); + self.sources.extend(output.sources); + + for (file_name, new_contracts) in output.contracts { + let contracts = self.contracts.as_mut().entry(file_name).or_default(); + for (contract_name, contract) in new_contracts { + let versioned = contracts.entry(contract_name).or_default(); + versioned.push(VersionedContract { contract, version: version.clone() }); + } + } + } + + /// Finds the _first_ contract with the given name + /// + /// # Example + /// + /// ``` + /// use ethers_solc::Project; + /// use ethers_solc::artifacts::*; + /// # fn demo(project: Project) { + /// let output = project.compile().unwrap().output(); + /// let contract = output.find("Greeter").unwrap(); + /// # } + /// ``` + pub fn find(&self, contract: impl AsRef) -> Option { + self.contracts.find(contract) + } + + /// Removes the _first_ contract with the given name from the set + /// + /// # Example + /// + /// ``` + /// use ethers_solc::Project; + /// use ethers_solc::artifacts::*; + /// # fn demo(project: Project) { + /// let mut output = project.compile().unwrap().output(); + /// let contract = output.remove("Greeter").unwrap(); + /// # } + /// ``` + pub fn remove(&mut self, contract: impl AsRef) -> Option { + self.contracts.remove(contract) + } + + /// Iterate over all contracts and their names + pub fn contracts_iter(&self) -> impl Iterator { + self.contracts.contracts() + } + + /// Iterate over all contracts and their names + pub fn contracts_into_iter(self) -> impl Iterator { + self.contracts.into_contracts() + } + + /// Given the contract file's path and the contract's name, tries to return the contract's + /// bytecode, runtime bytecode, and abi + pub fn get(&self, path: &str, contract: &str) -> Option { + self.contracts.get(path, contract) + } + + /// Returns the output's source files and contracts separately, wrapped in helper types that + /// provide several helper methods + /// + /// # Example + /// + /// ``` + /// use ethers_solc::Project; + /// # fn demo(project: Project) { + /// let output = project.compile().unwrap().output(); + /// let (sources, contracts) = output.split(); + /// # } + /// ``` + pub fn split(self) -> (SourceFiles, VersionedContracts) { + (SourceFiles(self.sources), self.contracts) + } +} + +/// Helper type to implement display for solc errors +#[derive(Clone, Debug)] +pub struct OutputDiagnostics<'a> { + compiler_output: &'a AggregatedCompilerOutput, + ignored_error_codes: &'a [u64], +} + +impl<'a> OutputDiagnostics<'a> { + /// Returns true if there is at least one error of high severity + pub fn has_error(&self) -> bool { + self.compiler_output.has_error() + } + + /// Returns true if there is at least one warning + pub fn has_warning(&self) -> bool { + self.compiler_output.has_warning(self.ignored_error_codes) + } + + /// Returns true if the contract is a expected to be a test + fn is_test>(&self, contract_path: T) -> bool { + if contract_path.as_ref().ends_with(".t.sol") { + return true + } + + self.compiler_output.find(&contract_path).map_or(false, |contract| { + contract.abi.map_or(false, |abi| abi.functions.contains_key("IS_TEST")) + }) + } +} + +impl<'a> fmt::Display for OutputDiagnostics<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.has_error() { + f.write_str("Compiler run failed")?; + } else if self.has_warning() { + f.write_str("Compiler run successful (with warnings)")?; + } else { + f.write_str("Compiler run successful")?; + } + for err in &self.compiler_output.errors { + if err.severity.is_warning() { + let is_ignored = err.error_code.as_ref().map_or(false, |code| { + if let Some(source_location) = &err.source_location { + // we ignore spdx and contract size warnings in test + // files. if we are looking at one of these warnings + // from a test file we skip + if self.is_test(&source_location.file) && (*code == 1878 || *code == 5574) { + return true + } + } + + self.ignored_error_codes.contains(code) + }); + + if !is_ignored { + writeln!(f, "\n{}", err)?; + } + } else { + writeln!(f, "\n{}", err)?; + } + } + Ok(()) + } +} diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs new file mode 100644 index 00000000..5708494d --- /dev/null +++ b/ethers-solc/src/compile/project.rs @@ -0,0 +1,440 @@ +//! Manages compiling of a `Project` +//! +//! The compilation of a project is performed in several steps. +//! +//! First the project's dependency graph [`crate::Graph`] is constructed and all imported +//! dependencies are resolved. The graph holds all the relationships between the files and their +//! versions. From there the appropriate version set is derived +//! [`crate::Graph::into_sources_by_version()`] which need to be compiled with different +//! [`crate::Solc`] versions. +//! +//! At this point we check if we need to compile a source file or whether we can reuse an _existing_ +//! `Artifact`. We don't to compile if: +//! - caching is enabled +//! - the file is **not** dirty [`Cache::is_dirty()`] +//! - the artifact for that file exists +//! +//! This concludes the preprocessing, and we now have either +//! - only `Source` files that need to be compiled +//! - only cached `Artifacts`, compilation can be skipped. This is considered an unchanged, +//! cached project +//! - Mix of both `Source` and `Artifacts`, only the `Source` files need to be compiled, the +//! `Artifacts` can be reused. +//! +//! The final step is invoking `Solc` via the standard JSON format. +//! +//! ### Notes on [Import Path Resolution](https://docs.soliditylang.org/en/develop/path-resolution.html#path-resolution) +//! +//! In order to be able to support reproducible builds on all platforms, the Solidity compiler has +//! to abstract away the details of the filesystem where source files are stored. Paths used in +//! imports must work the same way everywhere while the command-line interface must be able to work +//! with platform-specific paths to provide good user experience. This section aims to explain in +//! detail how Solidity reconciles these requirements. +//! +//! The compiler maintains an internal database (virtual filesystem or VFS for short) where each +//! source unit is assigned a unique source unit name which is an opaque and unstructured +//! identifier. When you use the import statement, you specify an import path that references a +//! source unit name. If the compiler does not find any source unit name matching the import path in +//! the VFS, it invokes the callback, which is responsible for obtaining the source code to be +//! placed under that name. +//! +//! This becomes relevant when dealing with resolved imports +//! +//! #### Relative Imports +//! +//! ```solidity +//! import "./math/math.sol"; +//! import "contracts/tokens/token.sol"; +//! ``` +//! In the above `./math/math.sol` and `contracts/tokens/token.sol` are import paths while the +//! source unit names they translate to are `contracts/math/math.sol` and +//! `contracts/tokens/token.sol` respectively. +//! +//! #### Direct Imports +//! +//! An import that does not start with `./` or `../` is a direct import. +//! +//! ```solidity +//! import "/project/lib/util.sol"; // source unit name: /project/lib/util.sol +//! import "lib/util.sol"; // source unit name: lib/util.sol +//! import "@openzeppelin/address.sol"; // source unit name: @openzeppelin/address.sol +//! import "https://example.com/token.sol"; // source unit name: https://example.com/token.sol +//! ``` +//! +//! After applying any import remappings the import path simply becomes the source unit name. +//! +//! ##### Import Remapping +//! +//! ```solidity +//! import "github.com/ethereum/dapp-bin/library/math.sol"; // source unit name: dapp-bin/library/math.sol +//! ``` +//! +//! If compiled with `solc github.com/ethereum/dapp-bin/=dapp-bin/` the compiler will look for the +//! file in the VFS under `dapp-bin/library/math.sol`. If the file is not available there, the +//! source unit name will be passed to the Host Filesystem Loader, which will then look in +//! `/project/dapp-bin/library/iterable_mapping.sol` + +use crate::{ + artifact_output::Artifacts, + artifacts::{Settings, VersionedSources}, + cache::ArtifactsCache, + error::Result, + output::AggregatedCompilerOutput, + resolver::GraphEdges, + ArtifactOutput, CompilerInput, Graph, Project, ProjectCompileOutput, ProjectPathsConfig, Solc, + Sources, +}; +use rayon::prelude::*; + +use std::collections::btree_map::BTreeMap; + +#[derive(Debug)] +pub struct ProjectCompiler<'a, T: ArtifactOutput> { + /// Contains the relationship of the source files and their imports + edges: GraphEdges, + project: &'a Project, + /// how to compile all the sources + sources: CompilerSources, +} + +impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { + /// Create a new `ProjectCompiler` to bootstrap the compilation process of the project's + /// sources. + /// + /// # Example + /// + /// ```no_run + /// use ethers_solc::Project; + /// + /// let project = Project::builder().build().unwrap(); + /// let output = project.compile().unwrap(); + /// ``` + #[cfg(all(feature = "svm", feature = "async"))] + pub fn new(project: &'a Project) -> Result { + Self::with_sources(project, project.paths.read_input_files()?) + } + + /// Bootstraps the compilation process by resolving the dependency graph of all sources and the + /// appropriate `Solc` -> `Sources` set as well as the compile mode to use (parallel, + /// sequential) + /// + /// Multiple (`Solc` -> `Sources`) pairs can be compiled in parallel if the `Project` allows + /// multiple `jobs`, see [`crate::Project::set_solc_jobs()`]. + #[cfg(all(feature = "svm", feature = "async"))] + pub fn with_sources(project: &'a Project, sources: Sources) -> Result { + let graph = Graph::resolve_sources(&project.paths, sources)?; + let (versions, edges) = graph.into_sources_by_version(project.offline)?; + let sources_by_version = versions.get(&project.allowed_lib_paths)?; + + let sources = if project.solc_jobs > 1 && sources_by_version.len() > 1 { + // if there are multiple different versions, and we can use multiple jobs we can compile + // them in parallel + CompilerSources::Parallel(sources_by_version, project.solc_jobs) + } else { + CompilerSources::Sequential(sources_by_version) + }; + + Ok(Self { edges, project, sources }) + } + + /// Compiles the sources with a pinned `Solc` instance + pub fn with_sources_and_solc( + project: &'a Project, + sources: Sources, + solc: Solc, + ) -> Result { + let version = solc.version()?; + let (sources, edges) = Graph::resolve_sources(&project.paths, sources)?.into_sources(); + let sources_by_version = BTreeMap::from([(solc, (version, sources))]); + let sources = CompilerSources::Sequential(sources_by_version); + + Ok(Self { edges, project, sources }) + } + + /// Compiles all the sources of the `Project` in the appropriate mode + /// + /// If caching is enabled, the sources are filtered and only _dirty_ sources are recompiled. + /// + /// The output of the compile process can be a mix of reused artifacts and freshly compiled + /// `Contract`s + /// + /// # Example + /// + /// ```no_run + /// use ethers_solc::Project; + /// + /// let project = Project::builder().build().unwrap(); + /// let output = project.compile().unwrap(); + /// ``` + pub fn compile(self) -> Result> { + // drive the compiler statemachine to completion + self.preprocess()?.compile()?.write_artifacts()?.write_cache() + } + + /// Does basic preprocessing + /// - sets proper source unit names + /// - check cache + fn preprocess(self) -> Result> { + let Self { edges, project, mut sources } = self; + + let mut cache = ArtifactsCache::new(project, edges)?; + // retain and compile only dirty sources + sources = sources.filtered(&mut cache); + + Ok(PreprocessedState { sources, cache }) + } +} + +/// A series of states that comprise the [`ProjectCompiler::compile()`] state machine +/// +/// The main reason is to debug all states individually +#[derive(Debug)] +struct PreprocessedState<'a, T: ArtifactOutput> { + sources: CompilerSources, + cache: ArtifactsCache<'a, T>, +} + +impl<'a, T: ArtifactOutput> PreprocessedState<'a, T> { + /// advance to the next state by compiling all sources + fn compile(self) -> Result> { + let PreprocessedState { sources, cache } = self; + let output = + sources.compile(&cache.project().solc_config.settings, &cache.project().paths)?; + + Ok(CompiledState { output, cache }) + } +} + +/// Represents the state after `solc` was successfully invoked +#[derive(Debug)] +struct CompiledState<'a, T: ArtifactOutput> { + output: AggregatedCompilerOutput, + cache: ArtifactsCache<'a, T>, +} + +impl<'a, T: ArtifactOutput> CompiledState<'a, T> { + /// advance to the next state by handling all artifacts + /// + /// Writes all output contracts to disk if enabled in the `Project` + fn write_artifacts(self) -> Result> { + let CompiledState { output, cache } = self; + // write all artifacts + let compiled_artifacts = if !cache.project().no_artifacts { + T::on_output(&output.contracts, &cache.project().paths)? + } else { + T::output_to_artifacts(&output.contracts) + }; + + Ok(ArtifactsState { output, cache, compiled_artifacts }) + } +} + +/// Represents the state after all artifacts were written to disk +#[derive(Debug)] +struct ArtifactsState<'a, T: ArtifactOutput> { + output: AggregatedCompilerOutput, + cache: ArtifactsCache<'a, T>, + compiled_artifacts: Artifacts, +} + +impl<'a, T: ArtifactOutput> ArtifactsState<'a, T> { + /// Writes the cache file + /// + /// this concludes the [`Project::compile()`] statemachine + fn write_cache(self) -> Result> { + let ArtifactsState { output, cache, compiled_artifacts } = self; + let ignored_error_codes = cache.project().ignored_error_codes.clone(); + let cached_artifacts = cache.write_cache(&compiled_artifacts)?; + Ok(ProjectCompileOutput { + compiler_output: output, + compiled_artifacts, + cached_artifacts, + ignored_error_codes, + }) + } +} + +/// Determines how the `solc <-> sources` pairs are executed +#[derive(Debug, Clone)] +#[allow(dead_code)] +enum CompilerSources { + /// Compile all these sequentially + Sequential(VersionedSources), + /// Compile all these in parallel using a certain amount of jobs + Parallel(VersionedSources, usize), +} + +impl CompilerSources { + /// Filters out all sources that don't need to be compiled, see [`ArtifactsCache::filter`] + fn filtered(self, cache: &mut ArtifactsCache) -> Self { + fn filtered_sources( + sources: VersionedSources, + cache: &mut ArtifactsCache, + ) -> VersionedSources { + sources + .into_iter() + .map(|(solc, (version, sources))| { + let sources = cache.filter(sources, &version); + (solc, (version, sources)) + }) + .collect() + } + + match self { + CompilerSources::Sequential(s) => { + CompilerSources::Sequential(filtered_sources(s, cache)) + } + CompilerSources::Parallel(s, j) => { + CompilerSources::Parallel(filtered_sources(s, cache), j) + } + } + } + + /// Compiles all the files with `Solc` + fn compile( + self, + settings: &Settings, + paths: &ProjectPathsConfig, + ) -> Result { + match self { + CompilerSources::Sequential(input) => compile_sequential(input, settings, paths), + CompilerSources::Parallel(input, j) => compile_parallel(input, j, settings, paths), + } + } +} + +/// Compiles the input set sequentially and returns an aggregated set of the solc `CompilerOutput`s +fn compile_sequential( + input: VersionedSources, + settings: &Settings, + paths: &ProjectPathsConfig, +) -> Result { + let mut aggregated = AggregatedCompilerOutput::default(); + tracing::trace!("compiling {} jobs sequentially", input.len()); + for (solc, (version, sources)) in input { + if sources.is_empty() { + // nothing to compile + continue + } + tracing::trace!( + "compiling {} sources with solc \"{}\" {:?}", + sources.len(), + solc.as_ref().display(), + solc.args + ); + + let input = CompilerInput::with_sources(sources) + .settings(settings.clone()) + .normalize_evm_version(&version) + .with_remappings(paths.remappings.clone()); + + tracing::trace!( + "calling solc `{}` with {} sources {:?}", + version, + input.sources.len(), + input.sources.keys() + ); + let output = solc.compile(&input)?; + tracing::trace!("compiled input, output has error: {}", output.has_error()); + + aggregated.extend(version, output); + } + Ok(aggregated) +} + +/// compiles the input set using `num_jobs` threads +fn compile_parallel( + input: VersionedSources, + num_jobs: usize, + settings: &Settings, + paths: &ProjectPathsConfig, +) -> Result { + debug_assert!(num_jobs > 1); + tracing::trace!("compile sources in parallel using up to {} solc jobs", num_jobs); + + let mut jobs = Vec::with_capacity(input.len()); + for (solc, (version, sources)) in input { + if sources.is_empty() { + // nothing to compile + continue + } + + let job = CompilerInput::with_sources(sources) + .settings(settings.clone()) + .normalize_evm_version(&version) + .with_remappings(paths.remappings.clone()); + + jobs.push((solc, version, job)) + } + + // start a rayon threadpool that will execute all `Solc::compile()` processes + let pool = rayon::ThreadPoolBuilder::new().num_threads(num_jobs).build().unwrap(); + let outputs = pool.install(move || { + jobs.into_par_iter() + .map(|(solc, version, input)| { + tracing::trace!( + "calling solc `{}` {:?} with {} sources: {:?}", + version, + solc.args, + input.sources.len(), + input.sources.keys() + ); + solc.compile(&input).map(|output| (version, output)) + }) + .collect::>>() + })?; + + let mut aggregated = AggregatedCompilerOutput::default(); + aggregated.extend_all(outputs); + + Ok(aggregated) +} + +#[cfg(test)] +#[cfg(feature = "project-util")] +mod tests { + use super::*; + use crate::{project_util::TempProject, MinimalCombinedArtifacts}; + use std::path::PathBuf; + + #[allow(unused)] + fn init_tracing() { + let _ = tracing_subscriber::fmt() + .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) + .try_init() + .ok(); + } + + #[test] + fn can_preprocess() { + let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample"); + let project = + Project::builder().paths(ProjectPathsConfig::dapptools(root).unwrap()).build().unwrap(); + + let compiler = ProjectCompiler::new(&project).unwrap(); + let prep = compiler.preprocess().unwrap(); + let cache = prep.cache.as_cached().unwrap(); + // 3 contracts + assert_eq!(cache.dirty_entries.len(), 3); + assert!(cache.filtered.is_empty()); + assert!(cache.cache.is_empty()); + + let compiled = prep.compile().unwrap(); + assert_eq!(compiled.output.contracts.files().count(), 3); + } + + #[test] + fn can_detect_cached_files() { + let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample"); + let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib")); + let project = TempProject::::new(paths).unwrap(); + + let compiled = project.compile().unwrap(); + assert!(!compiled.has_compiler_errors()); + + let inner = project.project(); + let compiler = ProjectCompiler::new(inner).unwrap(); + let prep = compiler.preprocess().unwrap(); + assert!(prep.cache.as_cached().unwrap().dirty_entries.is_empty()) + } +} diff --git a/ethers-solc/src/config.rs b/ethers-solc/src/config.rs index 80208792..26e29e68 100644 --- a/ethers-solc/src/config.rs +++ b/ethers-solc/src/config.rs @@ -1,19 +1,16 @@ use crate::{ - artifacts::{CompactContract, CompactContractBytecode, Contract, Settings}, + artifacts::Settings, cache::SOLIDITY_FILES_CACHE_FILENAME, error::{Result, SolcError, SolcIoError}, - hh::HardhatArtifact, remappings::Remapping, resolver::Graph, - utils, CompilerOutput, Source, Sources, + utils, Source, Sources, }; -use ethers_core::{abi::Abi, types::Bytes}; -use serde::{de::DeserializeOwned, Deserialize, Serialize}; + +use serde::{Deserialize, Serialize}; use std::{ - collections::BTreeMap, - convert::TryFrom, fmt::{self, Formatter}, - fs, io, + fs, path::{Component, Path, PathBuf}, }; @@ -358,27 +355,27 @@ pub struct ProjectPathsConfigBuilder { impl ProjectPathsConfigBuilder { pub fn root(mut self, root: impl Into) -> Self { - self.root = Some(canonicalized(root)); + self.root = Some(utils::canonicalized(root)); self } pub fn cache(mut self, cache: impl Into) -> Self { - self.cache = Some(canonicalized(cache)); + self.cache = Some(utils::canonicalized(cache)); self } pub fn artifacts(mut self, artifacts: impl Into) -> Self { - self.artifacts = Some(canonicalized(artifacts)); + self.artifacts = Some(utils::canonicalized(artifacts)); self } pub fn sources(mut self, sources: impl Into) -> Self { - self.sources = Some(canonicalized(sources)); + self.sources = Some(utils::canonicalized(sources)); self } pub fn tests(mut self, tests: impl Into) -> Self { - self.tests = Some(canonicalized(tests)); + self.tests = Some(utils::canonicalized(tests)); self } @@ -389,14 +386,14 @@ impl ProjectPathsConfigBuilder { } pub fn lib(mut self, lib: impl Into) -> Self { - self.libraries.get_or_insert_with(Vec::new).push(canonicalized(lib)); + self.libraries.get_or_insert_with(Vec::new).push(utils::canonicalized(lib)); self } pub fn libs(mut self, libs: impl IntoIterator>) -> Self { let libraries = self.libraries.get_or_insert_with(Vec::new); for lib in libs.into_iter() { - libraries.push(canonicalized(lib)); + libraries.push(utils::canonicalized(lib)); } self } @@ -415,7 +412,10 @@ impl ProjectPathsConfigBuilder { } pub fn build_with_root(self, root: impl Into) -> ProjectPathsConfig { - let root = canonicalized(root); + let root = utils::canonicalized(root); + + let libraries = self.libraries.unwrap_or_else(|| ProjectPathsConfig::find_libs(&root)); + ProjectPathsConfig { cache: self .cache @@ -425,8 +425,10 @@ impl ProjectPathsConfigBuilder { .unwrap_or_else(|| ProjectPathsConfig::find_artifacts_dir(&root)), sources: self.sources.unwrap_or_else(|| ProjectPathsConfig::find_source_dir(&root)), tests: self.tests.unwrap_or_else(|| root.join("tests")), - libraries: self.libraries.unwrap_or_else(|| ProjectPathsConfig::find_libs(&root)), - remappings: self.remappings.unwrap_or_default(), + remappings: self + .remappings + .unwrap_or_else(|| libraries.iter().flat_map(Remapping::find_many).collect()), + libraries, root, } } @@ -442,20 +444,6 @@ impl ProjectPathsConfigBuilder { } } -/// Returns the same path config but with canonicalized paths. -/// -/// This will take care of potential symbolic linked directories. -/// For example, the tempdir library is creating directories hosted under `/var/`, which in OS X -/// is a symbolic link to `/private/var/`. So if when we try to resolve imports and a path is -/// rooted in a symbolic directory we might end up with different paths for the same file, like -/// `private/var/.../Dapp.sol` and `/var/.../Dapp.sol` -/// -/// This canonicalizes all the paths but does not treat non existing dirs as an error -fn canonicalized(path: impl Into) -> PathBuf { - let path = path.into(); - utils::canonicalize(&path).unwrap_or(path) -} - /// The config to use when compiling the contracts #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] pub struct SolcConfig { @@ -497,229 +485,6 @@ impl SolcConfigBuilder { } } -pub type Artifacts = BTreeMap>; - -pub trait Artifact { - /// Returns the artifact's `Abi` and bytecode - fn into_inner(self) -> (Option, Option); - - /// Turns the artifact into a container type for abi, compact bytecode and deployed bytecode - fn into_compact_contract(self) -> CompactContract; - - /// Turns the artifact into a container type for abi, full bytecode and deployed bytecode - fn into_contract_bytecode(self) -> CompactContractBytecode; - - /// Returns the contents of this type as a single tuple of abi, bytecode and deployed bytecode - fn into_parts(self) -> (Option, Option, Option); -} - -impl Artifact for T -where - T: Into + Into, -{ - fn into_inner(self) -> (Option, Option) { - let artifact = self.into_compact_contract(); - (artifact.abi, artifact.bin.and_then(|bin| bin.into_bytes())) - } - - fn into_compact_contract(self) -> CompactContract { - self.into() - } - - fn into_contract_bytecode(self) -> CompactContractBytecode { - self.into() - } - - fn into_parts(self) -> (Option, Option, Option) { - self.into_compact_contract().into_parts() - } -} - -pub trait ArtifactOutput { - /// How Artifacts are stored - type Artifact: Artifact + DeserializeOwned; - - /// Handle the compiler output. - fn on_output(output: &CompilerOutput, layout: &ProjectPathsConfig) -> Result<()>; - - /// Returns the file name for the contract's artifact - fn output_file_name(name: impl AsRef) -> PathBuf { - format!("{}.json", name.as_ref()).into() - } - - /// Returns the path to the contract's artifact location based on the contract's file and name - /// - /// This returns `contract.sol/contract.json` by default - fn output_file(contract_file: impl AsRef, name: impl AsRef) -> PathBuf { - let name = name.as_ref(); - contract_file - .as_ref() - .file_name() - .map(Path::new) - .map(|p| p.join(Self::output_file_name(name))) - .unwrap_or_else(|| Self::output_file_name(name)) - } - - /// The inverse of `contract_file_name` - /// - /// Expected to return the solidity contract's name derived from the file path - /// `sources/Greeter.sol` -> `Greeter` - fn contract_name(file: impl AsRef) -> Option { - file.as_ref().file_stem().and_then(|s| s.to_str().map(|s| s.to_string())) - } - - /// Whether the corresponding artifact of the given contract file and name exists - fn output_exists( - contract_file: impl AsRef, - name: impl AsRef, - root: impl AsRef, - ) -> bool { - root.as_ref().join(Self::output_file(contract_file, name)).exists() - } - - fn read_cached_artifact(path: impl AsRef) -> Result { - let path = path.as_ref(); - let file = fs::File::open(path).map_err(|err| SolcError::io(err, path))?; - let file = io::BufReader::new(file); - Ok(serde_json::from_reader(file)?) - } - - /// Read the cached artifacts from disk - fn read_cached_artifacts(files: I) -> Result> - where - I: IntoIterator, - T: Into, - { - let mut artifacts = BTreeMap::default(); - for path in files.into_iter() { - let path = path.into(); - let artifact = Self::read_cached_artifact(&path)?; - artifacts.insert(path, artifact); - } - Ok(artifacts) - } - - /// Convert a contract to the artifact type - fn contract_to_artifact(file: &str, name: &str, contract: Contract) -> Self::Artifact; - - /// Convert the compiler output into a set of artifacts - fn output_to_artifacts(output: CompilerOutput) -> Artifacts { - output - .contracts - .into_iter() - .map(|(file, contracts)| { - let contracts = contracts - .into_iter() - .map(|(name, c)| { - let contract = Self::contract_to_artifact(&file, &name, c); - (name, contract) - }) - .collect(); - (file, contracts) - }) - .collect() - } -} - -/// An Artifacts implementation that uses a compact representation -/// -/// Creates a single json artifact with -/// ```json -/// { -/// "abi": [], -/// "bin": "...", -/// "runtime-bin": "..." -/// } -/// ``` -#[derive(Debug, Copy, Clone, Eq, PartialEq)] -pub struct MinimalCombinedArtifacts; - -impl ArtifactOutput for MinimalCombinedArtifacts { - type Artifact = CompactContractBytecode; - - fn on_output(output: &CompilerOutput, layout: &ProjectPathsConfig) -> Result<()> { - fs::create_dir_all(&layout.artifacts) - .map_err(|err| SolcError::msg(format!("Failed to create artifacts dir: {}", err)))?; - for (file, contracts) in output.contracts.iter() { - for (name, contract) in contracts { - let artifact = Self::output_file(file, name); - let file = layout.artifacts.join(artifact); - if let Some(parent) = file.parent() { - fs::create_dir_all(parent).map_err(|err| { - SolcError::msg(format!( - "Failed to create artifact parent folder \"{}\": {}", - parent.display(), - err - )) - })?; - } - - if let Some(iropt) = &contract.ir_optimized { - fs::write(&file.with_extension("iropt"), iropt) - .map_err(|err| SolcError::io(err, file.with_extension("iropt")))? - } - - if let Some(ir) = &contract.ir { - fs::write(&file.with_extension("ir"), ir) - .map_err(|err| SolcError::io(err, file.with_extension("ir")))? - } - - if let Some(ewasm) = &contract.ewasm { - fs::write(&file.with_extension("ewasm"), serde_json::to_vec_pretty(&ewasm)?) - .map_err(|err| SolcError::io(err, file.with_extension("ewasm")))?; - } - - if let Some(evm) = &contract.evm { - if let Some(asm) = &evm.assembly { - fs::write(&file.with_extension("asm"), asm) - .map_err(|err| SolcError::io(err, file.with_extension("asm")))? - } - } - - let min = CompactContractBytecode::from(contract.clone()); - fs::write(&file, serde_json::to_vec_pretty(&min)?) - .map_err(|err| SolcError::io(err, file))? - } - } - Ok(()) - } - - fn contract_to_artifact(_file: &str, _name: &str, contract: Contract) -> Self::Artifact { - Self::Artifact::from(contract) - } -} - -/// An Artifacts handler implementation that works the same as `MinimalCombinedArtifacts` but also -/// supports reading hardhat artifacts if an initial attempt to deserialize an artifact failed -#[derive(Debug, Copy, Clone, Eq, PartialEq)] -pub struct MinimalCombinedArtifactsHardhatFallback; - -impl ArtifactOutput for MinimalCombinedArtifactsHardhatFallback { - type Artifact = CompactContractBytecode; - - fn on_output(output: &CompilerOutput, layout: &ProjectPathsConfig) -> Result<()> { - MinimalCombinedArtifacts::on_output(output, layout) - } - - fn read_cached_artifact(path: impl AsRef) -> Result { - let path = path.as_ref(); - let content = fs::read_to_string(path).map_err(|err| SolcError::io(err, path))?; - if let Ok(a) = serde_json::from_str(&content) { - Ok(a) - } else { - tracing::error!("Failed to deserialize compact artifact"); - tracing::trace!("Fallback to hardhat artifact deserialization"); - let artifact = serde_json::from_str::(&content)?; - tracing::trace!("successfully deserialized hardhat artifact"); - Ok(artifact.into_contract_bytecode()) - } - } - - fn contract_to_artifact(file: &str, name: &str, contract: Contract) -> Self::Artifact { - MinimalCombinedArtifacts::contract_to_artifact(file, name, contract) - } -} - /// Helper struct for serializing `--allow-paths` arguments to Solc /// /// From the [Solc docs](https://docs.soliditylang.org/en/v0.8.9/using-the-compiler.html#base-path-and-import-remapping): @@ -751,19 +516,10 @@ impl fmt::Display for AllowedLibPaths { } } -impl> TryFrom> for AllowedLibPaths { - type Error = SolcIoError; - - fn try_from(libs: Vec) -> std::result::Result { - let libs = libs - .into_iter() - .map(|lib| { - let path: PathBuf = lib.into(); - let lib = utils::canonicalize(&path)?; - Ok(lib) - }) - .collect::, _>>()?; - Ok(AllowedLibPaths(libs)) +impl> From> for AllowedLibPaths { + fn from(libs: Vec) -> Self { + let libs = libs.into_iter().map(utils::canonicalized).collect(); + AllowedLibPaths(libs) } } @@ -787,13 +543,13 @@ mod tests { assert_eq!(ProjectPathsConfig::find_source_dir(root), contracts,); assert_eq!( ProjectPathsConfig::builder().build_with_root(&root).sources, - canonicalized(contracts), + utils::canonicalized(contracts), ); std::fs::File::create(&src).unwrap(); assert_eq!(ProjectPathsConfig::find_source_dir(root), src,); assert_eq!( ProjectPathsConfig::builder().build_with_root(&root).sources, - canonicalized(src), + utils::canonicalized(src), ); assert_eq!(ProjectPathsConfig::find_artifacts_dir(root), out,); @@ -801,13 +557,13 @@ mod tests { assert_eq!(ProjectPathsConfig::find_artifacts_dir(root), artifacts,); assert_eq!( ProjectPathsConfig::builder().build_with_root(&root).artifacts, - canonicalized(artifacts), + utils::canonicalized(artifacts), ); std::fs::File::create(&out).unwrap(); assert_eq!(ProjectPathsConfig::find_artifacts_dir(root), out,); assert_eq!( ProjectPathsConfig::builder().build_with_root(&root).artifacts, - canonicalized(out), + utils::canonicalized(out), ); assert_eq!(ProjectPathsConfig::find_libs(root), vec![lib.clone()],); @@ -815,13 +571,13 @@ mod tests { assert_eq!(ProjectPathsConfig::find_libs(root), vec![node_modules.clone()],); assert_eq!( ProjectPathsConfig::builder().build_with_root(&root).libraries, - vec![canonicalized(node_modules)], + vec![utils::canonicalized(node_modules)], ); std::fs::File::create(&lib).unwrap(); assert_eq!(ProjectPathsConfig::find_libs(root), vec![lib.clone()],); assert_eq!( ProjectPathsConfig::builder().build_with_root(&root).libraries, - vec![canonicalized(lib)], + vec![utils::canonicalized(lib)], ); } } diff --git a/ethers-solc/src/error.rs b/ethers-solc/src/error.rs index 72f8d940..2180aa9a 100644 --- a/ethers-solc/src/error.rs +++ b/ethers-solc/src/error.rs @@ -34,6 +34,9 @@ pub enum SolcError { #[error("{0}")] Message(String), + #[error("No artifact found for `{}:{}`", .0.display(), .1)] + ArtifactNotFound(PathBuf, String), + #[cfg(feature = "project-util")] #[error(transparent)] FsExtra(#[from] fs_extra::error::Error), diff --git a/ethers-solc/src/hh.rs b/ethers-solc/src/hh.rs index 8a7e3d78..d6edb664 100644 --- a/ethers-solc/src/hh.rs +++ b/ethers-solc/src/hh.rs @@ -5,12 +5,11 @@ use crate::{ Bytecode, BytecodeObject, CompactContract, CompactContractBytecode, Contract, ContractBytecode, DeployedBytecode, Offsets, }, - error::{Result, SolcError}, - ArtifactOutput, CompilerOutput, ProjectPathsConfig, + ArtifactOutput, }; use ethers_core::abi::Abi; use serde::{Deserialize, Serialize}; -use std::{collections::BTreeMap, fs}; +use std::collections::btree_map::BTreeMap; const HH_ARTIFACT_VERSION: &str = "hh-sol-artifact-1"; @@ -85,30 +84,6 @@ pub struct HardhatArtifacts; impl ArtifactOutput for HardhatArtifacts { type Artifact = HardhatArtifact; - fn on_output(output: &CompilerOutput, layout: &ProjectPathsConfig) -> Result<()> { - fs::create_dir_all(&layout.artifacts) - .map_err(|err| SolcError::msg(format!("Failed to create artifacts dir: {}", err)))?; - for (file, contracts) in output.contracts.iter() { - for (name, contract) in contracts { - let artifact = Self::output_file(file, name); - let artifact_file = layout.artifacts.join(artifact); - if let Some(parent) = artifact_file.parent() { - fs::create_dir_all(parent).map_err(|err| { - SolcError::msg(format!( - "Failed to create artifact parent folder \"{}\": {}", - parent.display(), - err - )) - })?; - } - let artifact = Self::contract_to_artifact(file, name, contract.clone()); - fs::write(&artifact_file, serde_json::to_vec_pretty(&artifact)?) - .map_err(|err| SolcError::io(err, artifact_file))? - } - } - Ok(()) - } - fn contract_to_artifact(file: &str, name: &str, contract: Contract) -> Self::Artifact { let (bytecode, link_references, deployed_bytecode, deployed_link_references) = if let Some(evm) = contract.evm { diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index 92e4eb83..b45df550 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -1,46 +1,38 @@ -#![doc = include_str ! ("../README.md")] - pub mod artifacts; pub mod sourcemap; pub use artifacts::{CompilerInput, CompilerOutput, EvmVersion}; -use std::collections::btree_map::Entry; +mod artifact_output; pub mod cache; pub mod hh; +pub use artifact_output::*; + mod resolver; pub use hh::{HardhatArtifact, HardhatArtifacts}; pub use resolver::Graph; mod compile; - -pub use compile::*; - -mod config; - -pub use config::{ - AllowedLibPaths, Artifact, ArtifactOutput, MinimalCombinedArtifacts, PathStyle, - ProjectPathsConfig, SolcConfig, +pub use compile::{ + output::{AggregatedCompilerOutput, ProjectCompileOutput}, + *, }; -pub mod remappings; +mod config; +pub use config::{AllowedLibPaths, PathStyle, ProjectPathsConfig, SolcConfig}; -use crate::{artifacts::Source, cache::SolFilesCache}; +pub mod remappings; +use crate::artifacts::Source; pub mod error; pub mod utils; use crate::{ - artifacts::Sources, - cache::PathMap, + artifacts::{Contract, Sources}, error::{SolcError, SolcIoError}, }; use error::Result; use std::{ - borrow::Cow, - collections::BTreeMap, - convert::TryInto, - fmt, fs, marker::PhantomData, path::{Path, PathBuf}, }; @@ -72,6 +64,8 @@ pub struct Project { pub allowed_lib_paths: AllowedLibPaths, /// Maximum number of `solc` processes to run simultaneously. solc_jobs: usize, + /// Offline mode, if set, network access (download solc) is disallowed + pub offline: bool, } impl Project { @@ -120,43 +114,29 @@ impl Project { &self.paths.cache } + /// Returns the root directory of the project + pub fn root(&self) -> &PathBuf { + &self.paths.root + } + + /// Applies the configured settings to the given `Solc` + fn configure_solc(&self, mut solc: Solc) -> Solc { + if self.allowed_lib_paths.0.is_empty() { + solc = solc.arg("--allow-paths").arg(self.allowed_lib_paths.to_string()); + } + solc + } + /// Sets the maximum number of parallel `solc` processes to run simultaneously. + /// + /// # Panics + /// + /// if `jobs == 0` pub fn set_solc_jobs(&mut self, jobs: usize) { assert!(jobs > 0); self.solc_jobs = jobs; } - #[tracing::instrument(skip_all, name = "Project::write_cache_file")] - fn write_cache_file( - &self, - sources: Sources, - artifacts: Vec<(PathBuf, Vec)>, - ) -> Result<()> { - tracing::trace!("inserting {} sources in file cache", sources.len()); - let mut cache = SolFilesCache::builder() - .root(&self.paths.root) - .solc_config(self.solc_config.clone()) - .insert_files(sources, Some(self.paths.cache.clone()))?; - tracing::trace!("source files inserted"); - - // add the artifacts for each file to the cache entry - for (file, artifacts) in artifacts { - if let Some(entry) = cache.files.get_mut(&file) { - entry.artifacts = artifacts; - } - } - - if let Some(cache_dir) = self.paths.cache.parent() { - tracing::trace!("creating cache file parent directory \"{}\"", cache_dir.display()); - fs::create_dir_all(cache_dir).map_err(|err| SolcError::io(err, cache_dir))? - } - - tracing::trace!("writing cache file to \"{}\"", self.paths.cache.display()); - cache.write(&self.paths.cache)?; - - Ok(()) - } - /// Returns all sources found under the project's configured sources path #[tracing::instrument(skip_all, fields(name = "sources"))] pub fn sources(&self) -> Result { @@ -187,36 +167,14 @@ impl Project { println!("cargo:rerun-if-changed={}", self.paths.sources.display()) } - /// Attempts to read all unique libraries that are used as imports like "hardhat/console.sol" - fn resolved_libraries( - &self, - sources: &Sources, - ) -> Result> { - let mut libs = BTreeMap::default(); - for source in sources.values() { - for import in source.parse_imports() { - if let Some(lib) = utils::resolve_library(&self.paths.libraries, import) { - if let Entry::Vacant(entry) = libs.entry(import.into()) { - tracing::trace!( - "resolved library import \"{}\" at \"{}\"", - import, - lib.display() - ); - entry.insert((Source::read(&lib)?, lib)); - } - } - } - } - Ok(libs) - } - /// Attempts to compile the contracts found at the configured source location, see /// `ProjectPathsConfig::sources`. /// /// NOTE: this does not check if the contracts were successfully compiled, see /// `CompilerOutput::has_error` instead. + /// /// NB: If the `svm` feature is enabled, this function will automatically detect - /// solc versions across files. + /// solc versions across files, see [`Self::svm_compile()`] /// /// # Example /// @@ -238,122 +196,37 @@ impl Project { return self.svm_compile(sources) } - let mut solc = self.solc.clone(); - if !self.allowed_lib_paths.0.is_empty() { - solc = solc.arg("--allow-paths").arg(self.allowed_lib_paths.to_string()); - } + let solc = self.configure_solc(self.solc.clone()); - let sources = Graph::resolve_sources(&self.paths, sources)?.into_sources(); self.compile_with_version(&solc, sources) } - #[cfg(all(feature = "svm", feature = "async"))] - #[tracing::instrument(skip(self, sources))] - pub fn svm_compile(&self, sources: Sources) -> Result> { - let graph = Graph::resolve_sources(&self.paths, sources)?; - let sources_by_version = - graph.into_sources_by_version(!self.auto_detect)?.get(&self.allowed_lib_paths)?; - - // run the compilation step for each version - let compiled = if self.solc_jobs > 1 && sources_by_version.len() > 1 { - self.compile_many(sources_by_version)? - } else { - self.compile_sources(sources_by_version)? - }; - tracing::trace!("compiled all sources"); - - Ok(compiled) - } - - /// Compiles all sources with their intended `Solc` version sequentially. - #[cfg(all(feature = "svm", feature = "async"))] - fn compile_sources( - &self, - sources_by_version: BTreeMap>, - ) -> Result> { - tracing::trace!("compiling sources using a single solc job"); - let mut compiled = - ProjectCompileOutput::with_ignored_errors(self.ignored_error_codes.clone()); - for (solc, sources) in sources_by_version { - tracing::trace!( - "compiling {} sources with solc \"{}\"", - sources.len(), - solc.as_ref().display() - ); - compiled.extend(self.compile_with_version(&solc, sources)?); - } - Ok(compiled) - } - - /// Compiles all sources with their intended `Solc` version in parallel. + /// Compiles a set of contracts using `svm` managed solc installs /// - /// This runs `Self::solc_jobs` parallel `solc` jobs at most. + /// This will autodetect the appropriate `Solc` version(s) to use when compiling the provided + /// `Sources`. Solc auto-detection follows semver rules, see also + /// [`crate::resolver::Graph::get_input_node_versions()`] + /// + /// # Errors + /// + /// This returns an error if contracts in the `Sources` set are incompatible (violate semver + /// rules) with their imports, for example source contract `A(=0.8.11)` imports dependency + /// `C(<0.8.0)`, which are incompatible. + /// + /// # Example + /// + /// ``` + /// use ethers_solc::{artifacts::Source, Project, utils}; + /// # fn demo(project: Project) { + /// let project = Project::builder().build().unwrap(); + /// let files = utils::source_files("./src"); + /// let sources = Source::read_all(files).unwrap(); + /// let output = project.svm_compile(sources).unwrap(); + /// # } + /// ``` #[cfg(all(feature = "svm", feature = "async"))] - fn compile_many( - &self, - sources_by_version: BTreeMap>, - ) -> Result> { - tracing::trace!("compile sources in parallel using {} solc jobs", self.solc_jobs); - let mut compiled = - ProjectCompileOutput::with_ignored_errors(self.ignored_error_codes.clone()); - let mut paths = PathMap::default(); - let mut jobs = Vec::with_capacity(sources_by_version.len()); - - let mut all_sources = BTreeMap::default(); - let mut all_artifacts = Vec::with_capacity(sources_by_version.len()); - - // preprocess all sources - for (solc, sources) in sources_by_version { - match self.preprocess_sources(sources)? { - PreprocessedJob::Unchanged(artifacts) => { - compiled.extend(ProjectCompileOutput::from_unchanged(artifacts)); - } - PreprocessedJob::Items(sources, map, cached_artifacts) => { - tracing::trace!("cached artifacts: \"{:?}\"", cached_artifacts.keys()); - tracing::trace!("compile sources: \"{:?}\"", sources.keys()); - - compiled.extend_artifacts(cached_artifacts); - // replace absolute path with source name to make solc happy - let sources = map.set_source_names(sources); - paths.extend(map); - - let input = CompilerInput::with_sources(sources) - .settings(self.solc_config.settings.clone()) - .normalize_evm_version(&solc.version()?) - .with_remappings(self.paths.remappings.clone()); - - jobs.push((solc, input)) - } - }; - } - tracing::trace!("execute {} compile jobs in parallel", jobs.len()); - - let outputs = tokio::runtime::Runtime::new() - .unwrap() - .block_on(Solc::compile_many(jobs, self.solc_jobs)); - - for (res, _, input) in outputs.into_outputs() { - let output = res?; - if !output.has_error() { - if self.cached { - // get all contract names of the files and map them to the disk file - all_sources.extend(paths.set_disk_paths(input.sources)); - all_artifacts.extend(paths.get_artifacts(&output.contracts)); - } - - if !self.no_artifacts { - Artifacts::on_output(&output, &self.paths)?; - } - } - compiled.extend_output(output); - } - - // write the cache file - if self.cached { - self.write_cache_file(all_sources, all_artifacts)?; - } - - Ok(compiled) + pub fn svm_compile(&self, sources: Sources) -> Result> { + project::ProjectCompiler::with_sources(self, sources)?.compile() } /// Compiles the given source files with the exact `Solc` executable @@ -384,121 +257,33 @@ impl Project { solc: &Solc, sources: Sources, ) -> Result> { - let (sources, paths, cached_artifacts) = match self.preprocess_sources(sources)? { - PreprocessedJob::Unchanged(artifacts) => { - return Ok(ProjectCompileOutput::from_unchanged(artifacts)) - } - PreprocessedJob::Items(a, b, c) => (a, b, c), - }; - - let version = solc.version()?; - tracing::trace!( - "compiling {} files with {}. Using {} cached files", - sources.len(), - version, - cached_artifacts.len() - ); - tracing::trace!("cached artifacts: \"{:?}\"", cached_artifacts.keys()); - tracing::trace!("compile sources: \"{:?}\"", sources.keys()); - - // replace absolute path with source name to make solc happy - let sources = paths.set_source_names(sources); - - let input = CompilerInput::with_sources(sources) - .settings(self.solc_config.settings.clone()) - .normalize_evm_version(&version) - .with_remappings(self.paths.remappings.clone()); - - tracing::trace!("calling solc with {} sources", input.sources.len()); - let output = solc.compile(&input)?; - tracing::trace!("compiled input, output has error: {}", output.has_error()); - - if output.has_error() { - return Ok(ProjectCompileOutput::from_compiler_output( - output, - self.ignored_error_codes.clone(), - )) - } - - if self.cached { - // get all contract names of the files and map them to the disk file - let artifacts = paths.get_artifacts(&output.contracts); - // reapply to disk paths - let sources = paths.set_disk_paths(input.sources); - // create cache file - self.write_cache_file(sources, artifacts)?; - } - - // TODO: There seems to be some type redundancy here, c.f. discussion with @mattsse - if !self.no_artifacts { - Artifacts::on_output(&output, &self.paths)?; - } - - Ok(ProjectCompileOutput::from_compiler_output_and_cache( - output, - cached_artifacts, - self.ignored_error_codes.clone(), - )) - } - - /// Preprocesses the given source files by resolving their libs and check against cache if - /// configured - fn preprocess_sources(&self, mut sources: Sources) -> Result> { - tracing::trace!("start preprocessing {} sources files", sources.len()); - - // keeps track of source names / disk paths - let mut paths = PathMap::default(); - - tracing::trace!("start resolving libraries"); - for (import, (source, path)) in self.resolved_libraries(&sources)? { - // inserting with absolute path here and keep track of the source name <-> path mappings - sources.insert(path.clone(), source); - paths.path_to_source_name.insert(path.clone(), import.clone()); - paths.source_name_to_path.insert(import, path); - } - tracing::trace!("resolved all libraries"); - - // If there's a cache set, filter to only re-compile the files which were changed - let (sources, cached_artifacts) = if self.cached && self.paths.cache.exists() { - tracing::trace!("start reading solfiles cache for incremental compilation"); - let mut cache = SolFilesCache::read(&self.paths.cache)?; - cache.remove_missing_files(); - let changed_files = cache.get_changed_or_missing_artifacts_files::( - sources, - Some(&self.solc_config), - &self.paths, - ); - tracing::trace!("detected {} changed files", changed_files.len()); - cache.remove_changed_files(&changed_files); - - let cached_artifacts = if self.paths.artifacts.exists() { - tracing::trace!("reading artifacts from cache.."); - let artifacts = cache.read_artifacts::(&self.paths.artifacts)?; - tracing::trace!("read {} artifacts from cache", artifacts.len()); - artifacts - } else { - BTreeMap::default() - }; - - // if nothing changed and all artifacts still exist - if changed_files.is_empty() { - tracing::trace!( - "unchanged source files, reusing artifacts {:?}", - cached_artifacts.keys() - ); - return Ok(PreprocessedJob::Unchanged(cached_artifacts)) - } - // There are changed files and maybe some cached files - (changed_files, cached_artifacts) - } else { - (sources, BTreeMap::default()) - }; - Ok(PreprocessedJob::Items(sources, paths, cached_artifacts)) + project::ProjectCompiler::with_sources_and_solc( + self, + sources, + self.configure_solc(solc.clone()), + )? + .compile() } /// Removes the project's artifacts and cache file /// /// If the cache file was the only file in the folder, this also removes the empty folder. + /// + /// # Example + /// + /// ``` + /// use ethers_solc::Project; + /// # fn demo(project: Project) { + /// let project = Project::builder().build().unwrap(); + /// let _ = project.compile().unwrap(); + /// assert!(project.artifacts_path().exists()); + /// assert!(project.cache_path().exists()); + /// + /// project.cleanup(); + /// assert!(!project.artifacts_path().exists()); + /// assert!(!project.cache_path().exists()); + /// # } + /// ``` pub fn cleanup(&self) -> std::result::Result<(), SolcIoError> { tracing::trace!("clean up project"); if self.cache_path().exists() { @@ -526,24 +311,19 @@ impl Project { Ok(()) } - /// Flattens the target file into a single string suitable for verification + /// Flattens the target solidity file into a single string suitable for verification. /// /// This method uses a dependency graph to resolve imported files and substitute /// import directives with the contents of target files. It will strip the pragma - /// version directives and SDPX license identifiers from imported files. + /// version directives and SDPX license identifiers from all imported files. /// - /// NOTE: the SDPX license identifier will be removed from the imported file + /// NB: the SDPX license identifier will be removed from the imported file /// only if it is found at the beginning of the file. pub fn flatten(&self, target: &Path) -> Result { self.paths.flatten(target) } } -enum PreprocessedJob { - Unchanged(BTreeMap), - Items(Sources, PathMap, BTreeMap), -} - pub struct ProjectBuilder { /// The layout of the paths: Option, @@ -557,6 +337,8 @@ pub struct ProjectBuilder no_artifacts: bool, /// Whether automatic solc version detection is enabled auto_detect: bool, + /// Use offline mode + offline: bool, artifacts: PhantomData, /// Which error codes to ignore pub ignored_error_codes: Vec, @@ -611,6 +393,21 @@ impl ProjectBuilder { self } + /// Activates offline mode + /// + /// Prevents network possible access to download/check solc installs + #[must_use] + pub fn offline(self) -> Self { + self.set_offline(true) + } + + /// Sets the offline status + #[must_use] + pub fn set_offline(mut self, offline: bool) -> Self { + self.offline = offline; + self + } + /// Disables writing artifacts to disk #[must_use] pub fn no_artifacts(self) -> Self { @@ -667,6 +464,7 @@ impl ProjectBuilder { ignored_error_codes, allowed_paths, solc_jobs, + offline, .. } = self; ProjectBuilder { @@ -676,6 +474,7 @@ impl ProjectBuilder { cached, no_artifacts, auto_detect, + offline, artifacts: PhantomData::default(), ignored_error_codes, allowed_paths, @@ -715,13 +514,14 @@ impl ProjectBuilder { ignored_error_codes, mut allowed_paths, solc_jobs, + offline, } = self; + let paths = paths.map(Ok).unwrap_or_else(ProjectPathsConfig::current_hardhat)?; + let solc = solc.unwrap_or_default(); let solc_config = solc_config.unwrap_or_else(|| SolcConfig::builder().build()); - let paths = paths.map(Ok).unwrap_or_else(ProjectPathsConfig::current_hardhat)?; - if allowed_paths.is_empty() { // allow every contract under root by default allowed_paths.push(paths.root.clone()) @@ -736,8 +536,9 @@ impl ProjectBuilder { auto_detect, artifacts, ignored_error_codes, - allowed_lib_paths: allowed_paths.try_into()?, + allowed_lib_paths: allowed_paths.into(), solc_jobs: solc_jobs.unwrap_or_else(::num_cpus::get), + offline, }) } } @@ -751,6 +552,7 @@ impl Default for ProjectBuilder { cached: true, no_artifacts: false, auto_detect: true, + offline: false, artifacts: PhantomData::default(), ignored_error_codes: Vec::new(), allowed_paths: vec![], @@ -759,205 +561,18 @@ impl Default for ProjectBuilder { } } -/// The outcome of `Project::compile` -#[derive(Debug, Clone, PartialEq, Default)] -pub struct ProjectCompileOutput { - /// If solc was invoked multiple times in `Project::compile` then this contains a merged - /// version of all `CompilerOutput`s. If solc was called only once then `compiler_output` - /// holds the `CompilerOutput` of that call. - compiler_output: Option, - /// All artifacts that were read from cache - artifacts: BTreeMap, - ignored_error_codes: Vec, -} +impl ArtifactOutput for Project { + type Artifact = Artifacts::Artifact; -impl ProjectCompileOutput { - pub fn with_ignored_errors(ignored_errors: Vec) -> Self { - Self { - compiler_output: None, - artifacts: Default::default(), - ignored_error_codes: ignored_errors, - } - } - - pub fn from_unchanged(artifacts: BTreeMap) -> Self { - Self { compiler_output: None, artifacts, ignored_error_codes: vec![] } - } - - pub fn from_compiler_output( - compiler_output: CompilerOutput, - ignored_error_codes: Vec, - ) -> Self { - Self { - compiler_output: Some(compiler_output), - artifacts: Default::default(), - ignored_error_codes, - } - } - - pub fn from_compiler_output_and_cache( - compiler_output: CompilerOutput, - cache: BTreeMap, - ignored_error_codes: Vec, - ) -> Self { - Self { compiler_output: Some(compiler_output), artifacts: cache, ignored_error_codes } - } - - /// Get the (merged) solc compiler output - /// ```no_run - /// use std::collections::BTreeMap; - /// use ethers_solc::artifacts::Contract; - /// use ethers_solc::Project; - /// - /// let project = Project::builder().build().unwrap(); - /// let contracts: BTreeMap = - /// project.compile().unwrap().output().contracts_into_iter().collect(); - /// ``` - pub fn output(self) -> CompilerOutput { - self.compiler_output.unwrap_or_default() - } - - /// Combine two outputs - pub fn extend(&mut self, compiled: ProjectCompileOutput) { - let ProjectCompileOutput { compiler_output, artifacts, .. } = compiled; - self.artifacts.extend(artifacts); - if let Some(output) = compiler_output { - self.extend_output(output); - } - } - - pub fn extend_output(&mut self, compiled: CompilerOutput) { - if let Some(output) = self.compiler_output.as_mut() { - output.errors.extend(compiled.errors); - output.sources.extend(compiled.sources); - output.contracts.extend(compiled.contracts); - } else { - self.compiler_output = Some(compiled); - } - } - - pub fn extend_artifacts(&mut self, artifacts: BTreeMap) { - self.artifacts.extend(artifacts); - } - - /// Whether this type does not contain compiled contracts - pub fn is_unchanged(&self) -> bool { - !self.has_compiled_contracts() - } - - /// Whether this type has a compiler output - pub fn has_compiled_contracts(&self) -> bool { - if let Some(output) = self.compiler_output.as_ref() { - !output.contracts.is_empty() - } else { - false - } - } - - /// Whether there were errors - pub fn has_compiler_errors(&self) -> bool { - self.compiler_output.as_ref().map(|o| o.has_error()).unwrap_or_default() - } - - /// Whether there were warnings - pub fn has_compiler_warnings(&self) -> bool { - self.compiler_output - .as_ref() - .map(|o| o.has_warning(&self.ignored_error_codes)) - .unwrap_or_default() - } - - /// Finds the first contract with the given name and removes it from the set - pub fn remove(&mut self, contract_name: impl AsRef) -> Option { - let contract_name = contract_name.as_ref(); - if let Some(output) = self.compiler_output.as_mut() { - if let contract @ Some(_) = output.contracts.iter_mut().find_map(|(file, c)| { - c.remove(contract_name).map(|c| T::contract_to_artifact(file, contract_name, c)) - }) { - return contract - } - } - let key = self - .artifacts - .iter() - .find_map(|(path, _)| { - T::contract_name(path).filter(|name| name == contract_name).map(|_| path) - })? - .clone(); - self.artifacts.remove(&key) - } -} - -impl ProjectCompileOutput -where - T::Artifact: Clone, -{ - /// Finds the first contract with the given name - pub fn find(&self, contract_name: impl AsRef) -> Option> { - let contract_name = contract_name.as_ref(); - if let Some(output) = self.compiler_output.as_ref() { - if let contract @ Some(_) = output.contracts.iter().find_map(|(file, contracts)| { - contracts - .get(contract_name) - .map(|c| T::contract_to_artifact(file, contract_name, c.clone())) - .map(Cow::Owned) - }) { - return contract - } - } - self.artifacts.iter().find_map(|(path, art)| { - T::contract_name(path).filter(|name| name == contract_name).map(|_| Cow::Borrowed(art)) - }) - } -} - -impl ProjectCompileOutput { - /// All artifacts together with their contract file name and name `:` - /// - /// # Example - /// - /// ```no_run - /// use std::collections::BTreeMap; - /// use ethers_solc::artifacts::CompactContractBytecode; - /// use ethers_solc::Project; - /// - /// let project = Project::builder().build().unwrap(); - /// let contracts: BTreeMap = project.compile().unwrap().into_artifacts().collect(); - /// ``` - pub fn into_artifacts(mut self) -> Box> { - let artifacts = self.artifacts.into_iter().filter_map(|(path, art)| { - T::contract_name(&path).map(|name| { - (format!("{}:{}", path.file_name().unwrap().to_string_lossy(), name), art) - }) - }); - - let artifacts: Box> = if let Some(output) = - self.compiler_output.take() - { - Box::new(artifacts.chain(T::output_to_artifacts(output).into_values().flatten().map( - |(name, artifact)| { - (format!("{}:{}", T::output_file_name(&name).display(), name), artifact) - }, - ))) - } else { - Box::new(artifacts) - }; - artifacts - } -} - -impl fmt::Display for ProjectCompileOutput { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if let Some(output) = self.compiler_output.as_ref() { - output.diagnostics(&self.ignored_error_codes).fmt(f) - } else { - f.write_str("Nothing to compile") - } + fn contract_to_artifact(file: &str, name: &str, contract: Contract) -> Self::Artifact { + Artifacts::contract_to_artifact(file, name, contract) } } #[cfg(test)] mod tests { + use crate::remappings::Remapping; + #[test] #[cfg(all(feature = "svm", feature = "async"))] fn test_build_all_versions() { @@ -973,7 +588,7 @@ mod tests { assert!(!compiled.has_compiler_errors()); let contracts = compiled.output().contracts; // Contracts A to F - assert_eq!(contracts.keys().count(), 5); + assert_eq!(contracts.contracts().count(), 5); } #[test] @@ -988,6 +603,11 @@ mod tests { .sources(root.join("src")) .lib(root.join("lib1")) .lib(root.join("lib2")) + .remappings( + Remapping::find_many(&root.join("lib1")) + .into_iter() + .chain(Remapping::find_many(&root.join("lib2"))), + ) .build() .unwrap(); let project = Project::builder() @@ -1000,7 +620,7 @@ mod tests { let compiled = project.compile().unwrap(); assert!(!compiled.has_compiler_errors()); let contracts = compiled.output().contracts; - assert_eq!(contracts.keys().count(), 3); + assert_eq!(contracts.contracts().count(), 3); } #[test] @@ -1013,12 +633,13 @@ mod tests { .root(&root) .sources(root.join("src")) .lib(root.join("lib")) + .remappings(Remapping::find_many(&root.join("lib"))) .build() .unwrap(); let project = Project::builder().no_artifacts().paths(paths).ephemeral().build().unwrap(); let compiled = project.compile().unwrap(); assert!(!compiled.has_compiler_errors()); let contracts = compiled.output().contracts; - assert_eq!(contracts.keys().count(), 2); + assert_eq!(contracts.contracts().count(), 2); } } diff --git a/ethers-solc/src/project_util.rs b/ethers-solc/src/project_util.rs index aee3f2e8..f09ba432 100644 --- a/ethers-solc/src/project_util.rs +++ b/ethers-solc/src/project_util.rs @@ -77,6 +77,21 @@ impl TempProject { &mut self.project_mut().paths } + /// Returns the path to the artifacts directory + pub fn artifacts_path(&self) -> &PathBuf { + &self.paths().artifacts + } + + /// Returns the path to the sources directory + pub fn sources_path(&self) -> &PathBuf { + &self.paths().sources + } + + /// Returns the path to the cache file + pub fn cache_path(&self) -> &PathBuf { + &self.paths().cache + } + /// The root path of the temporary workspace pub fn root(&self) -> &Path { self.project().paths.root.as_path() diff --git a/ethers-solc/src/resolver.rs b/ethers-solc/src/resolver.rs index 0bc10f86..df46fcb4 100644 --- a/ethers-solc/src/resolver.rs +++ b/ethers-solc/src/resolver.rs @@ -38,20 +38,71 @@ use solang_parser::pt::{Import, Loc, SourceUnitPart}; use crate::{error::Result, utils, ProjectPathsConfig, Solc, Source, Sources}; +/// The underlying edges of the graph which only contains the raw relationship data. +/// +/// This is kept separate from the `Graph` as the `Node`s get consumed when the `Solc` to `Sources` +/// set is determined. +#[derive(Debug, Clone)] +pub struct GraphEdges { + /// The indices of `edges` correspond to the `nodes`. That is, `edges[0]` + /// is the set of outgoing edges for `nodes[0]`. + edges: Vec>, + /// index maps for a solidity file to an index, for fast lookup. + indices: HashMap, + /// reverse of `indices` for reverse lookup + rev_indices: HashMap, + /// the identified version requirement of a file + versions: HashMap>, + /// with how many input files we started with, corresponds to `let input_files = + /// nodes[..num_input_files]`. + /// + /// Combined with the `indices` this way we can determine if a file was original added to the + /// graph as input or was added as resolved import, see [`Self::is_input_file()`] + num_input_files: usize, +} + +impl GraphEdges { + /// Returns a list of nodes the given node index points to for the given kind. + pub fn imported_nodes(&self, from: usize) -> &[usize] { + &self.edges[from] + } + + /// Returns all files imported by the given file + pub fn imports(&self, file: impl AsRef) -> HashSet<&PathBuf> { + if let Some(start) = self.indices.get(file.as_ref()).copied() { + NodesIter::new(start, self).skip(1).map(move |idx| &self.rev_indices[&idx]).collect() + } else { + HashSet::new() + } + } + + /// Returns true if the `file` was originally included when the graph was first created and not + /// added when all `imports` were resolved + pub fn is_input_file(&self, file: impl AsRef) -> bool { + if let Some(idx) = self.indices.get(file.as_ref()).copied() { + idx < self.num_input_files + } else { + false + } + } + + /// Returns the `VersionReq` for the given file + pub fn version_requirement(&self, file: impl AsRef) -> Option<&VersionReq> { + self.indices + .get(file.as_ref()) + .and_then(|idx| self.versions.get(idx)) + .and_then(|v| v.as_ref()) + } +} + /// Represents a fully-resolved solidity dependency graph. Each node in the graph /// is a file and edges represent dependencies between them. /// See also https://docs.soliditylang.org/en/latest/layout-of-source-files.html?highlight=import#importing-other-source-files #[derive(Debug)] pub struct Graph { nodes: Vec, - /// The indices of `edges` correspond to the `nodes`. That is, `edges[0]` - /// is the set of outgoing edges for `nodes[0]`. - edges: Vec>, - /// index maps for a solidity file to an index, for fast lookup. - indices: HashMap, - /// with how many input files we started with, corresponds to `let input_files = - /// nodes[..num_input_files]`. - num_input_files: usize, + /// relationship of the nodes + edges: GraphEdges, /// the root of the project this graph represents #[allow(unused)] root: PathBuf, @@ -60,15 +111,19 @@ pub struct Graph { impl Graph { /// Returns a list of nodes the given node index points to for the given kind. pub fn imported_nodes(&self, from: usize) -> &[usize] { - &self.edges[from] + self.edges.imported_nodes(from) } /// Returns all the resolved files and their index in the graph pub fn files(&self) -> &HashMap { - &self.indices + &self.edges.indices } /// Gets a node by index. + /// + /// # Panics + /// + /// if the `index` node id is not included in the graph pub fn node(&self, index: usize) -> &Node { &self.nodes[index] } @@ -80,7 +135,7 @@ impl Graph { /// /// if the `start` node id is not included in the graph pub fn node_ids(&self, start: usize) -> impl Iterator + '_ { - NodesIter::new(start, self) + NodesIter::new(start, &self.edges) } /// Same as `Self::node_ids` but returns the actual `Node` @@ -88,16 +143,22 @@ impl Graph { self.node_ids(start).map(move |idx| self.node(idx)) } - /// Returns all files together with their paths - pub fn into_sources(self) -> Sources { - self.nodes.into_iter().map(|node| (node.path, node.source)).collect() + /// Consumes the `Graph`, effectively splitting the `nodes` and the `GraphEdges` off and + /// returning the `nodes` converted to `Sources` + pub fn into_sources(self) -> (Sources, GraphEdges) { + let Graph { nodes, edges, .. } = self; + (nodes.into_iter().map(|node| (node.path, node.source)).collect(), edges) } /// Returns an iterator that yields only those nodes that represent input files. /// See `Self::resolve_sources` /// This won't yield any resolved library nodes pub fn input_nodes(&self) -> impl Iterator { - self.nodes.iter().take(self.num_input_files) + self.nodes.iter().take(self.edges.num_input_files) + } + + pub fn imports(&self, path: impl AsRef) -> HashSet<&PathBuf> { + self.edges.imports(path) } /// Resolves a number of sources within the given config @@ -164,8 +225,18 @@ impl Graph { nodes.push(node); edges.push(resolved_imports); } - - Ok(Graph { nodes, edges, indices: index, num_input_files, root: paths.root.clone() }) + let edges = GraphEdges { + edges, + rev_indices: index.iter().map(|(k, v)| (*v, k.clone())).collect(), + indices: index, + num_input_files, + versions: nodes + .iter() + .enumerate() + .map(|(idx, node)| (idx, node.data.version_req.clone())) + .collect(), + }; + Ok(Graph { nodes, edges, root: paths.root.clone() }) } /// Resolves the dependencies of a project's source contracts @@ -176,11 +247,12 @@ impl Graph { #[cfg(all(feature = "svm", feature = "async"))] impl Graph { - /// Returns all input files together with their appropriate version. + /// Consumes the nodes of the graph and returns all input files together with their appropriate + /// version and the edges of the graph /// /// First we determine the compatible version for each input file (from sources and test folder, /// see `Self::resolve`) and then we add all resolved library imports. - pub fn into_sources_by_version(self, offline: bool) -> Result { + pub fn into_sources_by_version(self, offline: bool) -> Result<(VersionedSources, GraphEdges)> { /// insert the imports of the given node into the sources map /// There can be following graph: /// `A(<=0.8.10) imports C(>0.4.0)` and `B(0.8.11) imports C(>0.4.0)` @@ -209,7 +281,7 @@ impl Graph { } let versioned_nodes = self.get_input_node_versions(offline)?; - let Self { nodes, edges, num_input_files, .. } = self; + let Self { nodes, edges, .. } = self; let mut versioned_sources = HashMap::with_capacity(versioned_nodes.len()); let mut all_nodes = nodes.into_iter().enumerate().collect::>(); @@ -221,11 +293,17 @@ impl Graph { // insert the input node in the sources set and remove it from the available set let node = all_nodes.remove(&idx).expect("node is preset. qed"); sources.insert(node.path, node.source); - insert_imports(idx, &mut all_nodes, &mut sources, &edges, num_input_files); + insert_imports( + idx, + &mut all_nodes, + &mut sources, + &edges.edges, + edges.num_input_files, + ); } versioned_sources.insert(version, sources); } - Ok(VersionedSources { inner: versioned_sources, offline }) + Ok((VersionedSources { inner: versioned_sources, offline }, edges)) } /// Writes the list of imported files into the given formatter: @@ -294,7 +372,8 @@ impl Graph { // on first error, instead gather all the errors and return a bundled error message instead let mut errors = Vec::new(); // we also don't want duplicate error diagnostic - let mut erroneous_nodes = std::collections::HashSet::with_capacity(self.num_input_files); + let mut erroneous_nodes = + std::collections::HashSet::with_capacity(self.edges.num_input_files); let all_versions = if offline { Solc::installed_versions() } else { Solc::all_versions() }; @@ -302,7 +381,7 @@ impl Graph { let mut versioned_nodes = HashMap::new(); // walking through the node's dep tree and filtering the versions along the way - for idx in 0..self.num_input_files { + for idx in 0..self.edges.num_input_files { let mut candidates = all_versions.iter().collect::>(); self.retain_compatible_versions(idx, &mut candidates); @@ -346,12 +425,12 @@ pub struct NodesIter<'a> { /// stack of nodes stack: VecDeque, visited: HashSet, - graph: &'a Graph, + graph: &'a GraphEdges, } impl<'a> NodesIter<'a> { - fn new(start: usize, graph: &'a Graph) -> Self { - Self { stack: VecDeque::from([start]), visited: Default::default(), graph } + fn new(start: usize, graph: &'a GraphEdges) -> Self { + Self { stack: VecDeque::from([start]), visited: HashSet::new(), graph } } } @@ -382,7 +461,7 @@ impl VersionedSources { pub fn get( self, allowed_lib_paths: &crate::AllowedLibPaths, - ) -> Result> { + ) -> Result> { use crate::SolcError; // we take the installer lock here to ensure installation checking is done in sync @@ -411,8 +490,9 @@ impl VersionedSources { Solc::blocking_install(version.as_ref())?; tracing::trace!("reinstalled solc: \"{}\"", version); } - sources_by_version - .insert(solc.arg("--allow-paths").arg(allowed_lib_paths.to_string()), sources); + let solc = solc.arg("--allow-paths").arg(allowed_lib_paths.to_string()); + let version = solc.version()?; + sources_by_version.insert(solc, (version, sources)); } Ok(sources_by_version) } @@ -596,7 +676,7 @@ mod tests { let graph = Graph::resolve(&paths).unwrap(); - assert_eq!(graph.num_input_files, 1); + assert_eq!(graph.edges.num_input_files, 1); assert_eq!(graph.files().len(), 2); assert_eq!( @@ -615,7 +695,7 @@ mod tests { let graph = Graph::resolve(&paths).unwrap(); - assert_eq!(graph.num_input_files, 2); + assert_eq!(graph.edges.num_input_files, 2); assert_eq!(graph.files().len(), 3); assert_eq!( graph.files().clone(), diff --git a/ethers-solc/src/utils.rs b/ethers-solc/src/utils.rs index f6672a5f..cdb36af7 100644 --- a/ethers-solc/src/utils.rs +++ b/ethers-solc/src/utils.rs @@ -6,6 +6,7 @@ use crate::{error::SolcError, SolcIoError}; use once_cell::sync::Lazy; use regex::{Match, Regex}; use semver::Version; +use serde::de::DeserializeOwned; use tiny_keccak::{Hasher, Keccak}; use walkdir::WalkDir; @@ -82,6 +83,20 @@ pub fn canonicalize(path: impl AsRef) -> Result { dunce::canonicalize(&path).map_err(|err| SolcIoError::new(err, path)) } +/// Returns the same path config but with canonicalized paths. +/// +/// This will take care of potential symbolic linked directories. +/// For example, the tempdir library is creating directories hosted under `/var/`, which in OS X +/// is a symbolic link to `/private/var/`. So if when we try to resolve imports and a path is +/// rooted in a symbolic directory we might end up with different paths for the same file, like +/// `private/var/.../Dapp.sol` and `/var/.../Dapp.sol` +/// +/// This canonicalizes all the paths but does not treat non existing dirs as an error +pub fn canonicalized(path: impl Into) -> PathBuf { + let path = path.into(); + canonicalize(&path).unwrap_or(path) +} + /// Returns the path to the library if the source path is in fact determined to be a library path, /// and it exists. /// Note: this does not handle relative imports or remappings. @@ -252,6 +267,31 @@ pub(crate) fn tempdir(name: &str) -> Result { tempfile::Builder::new().prefix(name).tempdir().map_err(|err| SolcIoError::new(err, name)) } +/// Reads the json file and deserialize it into the provided type +pub fn read_json_file(path: impl AsRef) -> Result { + let path = path.as_ref(); + let file = std::fs::File::open(path).map_err(|err| SolcError::io(err, path))?; + let file = std::io::BufReader::new(file); + let val: T = serde_json::from_reader(file)?; + Ok(val) +} + +/// Creates the parent directory of the `file` and all its ancestors if it does not exist +/// See [`std::fs::create_dir_all()`] +pub fn create_parent_dir_all(file: impl AsRef) -> Result<(), SolcError> { + let file = file.as_ref(); + if let Some(parent) = file.parent() { + std::fs::create_dir_all(parent).map_err(|err| { + SolcError::msg(format!( + "Failed to create artifact parent folder \"{}\": {}", + parent.display(), + err + )) + })?; + } + Ok(()) +} + #[cfg(test)] mod tests { use super::*; diff --git a/ethers-solc/test-data/solidity-files-cache.json b/ethers-solc/test-data/solidity-files-cache.json index 2572b13e..0d5ca672 100644 --- a/ethers-solc/test-data/solidity-files-cache.json +++ b/ethers-solc/test-data/solidity-files-cache.json @@ -1,12 +1,11 @@ { - "_format": "hh-sol-cache-2", + "_format": "ethers-rs-sol-cache-2", "files": { - "Greeter.sol": { - "lastModificationDate": 1634246369587, - "contentHash": "483b7f4f64b06a04a24bd0af7c3bf8b7", - "sourceName": "contracts/Greeter.sol", + "/Users/Matthias/git/rust/ethers-rs/ethers-solc/test-data/dapp-sample/lib/ds-test/src/test.sol": { + "lastModificationDate": 1638218667720, + "contentHash": "5d45a46528eaf8a26f0a8d93669f3148", + "sourceName": "/Users/Matthias/git/rust/ethers-rs/ethers-solc/test-data/dapp-sample/lib/ds-test/src/test.sol", "solcConfig": { - "version": "0.8.4", "settings": { "optimizer": { "enabled": false, @@ -14,62 +13,98 @@ }, "outputSelection": { "*": { + "": [ + "ast" + ], "*": [ "abi", "evm.bytecode", "evm.deployedBytecode", "evm.methodIdentifiers" - ], - "": [ - "ast" ] } - } - } - }, - "imports": [ - "hardhat/console.sol" - ], - "versionPragmas": [ - "^0.8.0" - ], - "artifacts": [ - "Greeter" - ] - }, - "console.sol": { - "lastModificationDate": 1634245289287, - "contentHash": "cc4777addd464ea56fa35b1c45df0591", - "sourceName": "hardhat/console.sol", - "solcConfig": { - "version": "0.8.4", - "settings": { - "optimizer": { - "enabled": false, - "runs": 200 }, - "outputSelection": { - "*": { - "*": [ - "abi", - "evm.bytecode", - "evm.deployedBytecode", - "evm.methodIdentifiers" - ], - "": [ - "ast" - ] - } - } + "evmVersion": "london" } }, "imports": [], - "versionPragmas": [ - ">=0.4.22 <0.9.0" + "versionRequirement": ">=0.4.23", + "artifacts": { + "DSTest": { + "0.8.11+commit.d7f03943.Darwin.appleclang": "test.sol/DSTest.json" + } + } + }, + "/Users/Matthias/git/rust/ethers-rs/ethers-solc/test-data/dapp-sample/src/Dapp.sol": { + "lastModificationDate": 1638193396942, + "contentHash": "a41ddb3b99ae6b72b59341eabf948542", + "sourceName": "/Users/Matthias/git/rust/ethers-rs/ethers-solc/test-data/dapp-sample/src/Dapp.sol", + "solcConfig": { + "settings": { + "optimizer": { + "enabled": false, + "runs": 200 + }, + "outputSelection": { + "*": { + "": [ + "ast" + ], + "*": [ + "abi", + "evm.bytecode", + "evm.deployedBytecode", + "evm.methodIdentifiers" + ] + } + }, + "evmVersion": "london" + } + }, + "imports": [], + "versionRequirement": ">=0.6.6", + "artifacts": { + "Dapp": { + "0.8.11+commit.d7f03943.Darwin.appleclang": "Dapp.sol/Dapp.json" + } + } + }, + "/Users/Matthias/git/rust/ethers-rs/ethers-solc/test-data/dapp-sample/src/Dapp.t.sol": { + "lastModificationDate": 1638193396942, + "contentHash": "5f5038d89f69269d0734659efaa2ec52", + "sourceName": "/Users/Matthias/git/rust/ethers-rs/ethers-solc/test-data/dapp-sample/src/Dapp.t.sol", + "solcConfig": { + "settings": { + "optimizer": { + "enabled": false, + "runs": 200 + }, + "outputSelection": { + "*": { + "": [ + "ast" + ], + "*": [ + "abi", + "evm.bytecode", + "evm.deployedBytecode", + "evm.methodIdentifiers" + ] + } + }, + "evmVersion": "london" + } + }, + "imports": [ + "/Users/Matthias/git/rust/ethers-rs/ethers-solc/test-data/dapp-sample/lib/ds-test/src/test.sol", + "/Users/Matthias/git/rust/ethers-rs/ethers-solc/test-data/dapp-sample/src/Dapp.sol" ], - "artifacts": [ - "console" - ] + "versionRequirement": ">=0.6.6", + "artifacts": { + "DappTest": { + "0.8.11+commit.d7f03943.Darwin.appleclang": "Dapp.t.sol/DappTest.json" + } + } } } -} +} \ No newline at end of file diff --git a/ethers-solc/test-data/test-contract-libs/lib1/Bar.sol b/ethers-solc/test-data/test-contract-libs/lib1/bar/src/Bar.sol similarity index 100% rename from ethers-solc/test-data/test-contract-libs/lib1/Bar.sol rename to ethers-solc/test-data/test-contract-libs/lib1/bar/src/Bar.sol diff --git a/ethers-solc/test-data/test-contract-libs/lib2/Baz.sol b/ethers-solc/test-data/test-contract-libs/lib2/baz/src/Baz.sol similarity index 100% rename from ethers-solc/test-data/test-contract-libs/lib2/Baz.sol rename to ethers-solc/test-data/test-contract-libs/lib2/baz/src/Baz.sol diff --git a/ethers-solc/test-data/test-contract-libs/src/Foo.sol b/ethers-solc/test-data/test-contract-libs/src/Foo.sol index ab9eba94..9e96fe3c 100644 --- a/ethers-solc/test-data/test-contract-libs/src/Foo.sol +++ b/ethers-solc/test-data/test-contract-libs/src/Foo.sol @@ -1,6 +1,6 @@ pragma solidity 0.8.6; -import "../lib1/Bar.sol"; -import "../lib2/Baz.sol"; +import "bar/Bar.sol"; +import "baz/Baz.sol"; contract Foo is Bar, Baz {} diff --git a/ethers-solc/tests/project.rs b/ethers-solc/tests/project.rs index e095b01e..137e45d1 100644 --- a/ethers-solc/tests/project.rs +++ b/ethers-solc/tests/project.rs @@ -1,7 +1,7 @@ //! project tests use std::{ - collections::HashMap, + collections::{HashMap, HashSet}, io, path::{Path, PathBuf}, str::FromStr, @@ -11,8 +11,16 @@ use ethers_solc::{ cache::{SolFilesCache, SOLIDITY_FILES_CACHE_FILENAME}, project_util::*, remappings::Remapping, - Graph, MinimalCombinedArtifacts, Project, ProjectPathsConfig, + Graph, MinimalCombinedArtifacts, Project, ProjectCompileOutput, ProjectPathsConfig, }; +use pretty_assertions::assert_eq; + +#[allow(unused)] +fn init_tracing() { + tracing_subscriber::fmt() + .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) + .init(); +} #[test] fn can_compile_hardhat_sample() { @@ -56,11 +64,16 @@ fn can_compile_dapp_sample() { assert!(compiled.find("Dapp").is_some()); assert!(compiled.is_unchanged()); + let cache = SolFilesCache::read(project.cache_path()).unwrap(); + // delete artifacts std::fs::remove_dir_all(&project.paths().artifacts).unwrap(); let compiled = project.compile().unwrap(); assert!(compiled.find("Dapp").is_some()); assert!(!compiled.is_unchanged()); + + let updated_cache = SolFilesCache::read(project.cache_path()).unwrap(); + assert_eq!(cache, updated_cache); } #[test] @@ -72,7 +85,6 @@ fn can_compile_dapp_detect_changes_in_libs() { .paths_mut() .remappings .push(Remapping::from_str(&format!("remapping={}/", remapping.display())).unwrap()); - project.project_mut().auto_detect = false; let src = project .add_source( @@ -139,6 +151,7 @@ fn can_compile_dapp_detect_changes_in_libs() { #[test] fn can_compile_dapp_detect_changes_in_sources() { + init_tracing(); let project = TempProject::::dapptools().unwrap(); let src = project @@ -214,6 +227,7 @@ fn can_compile_dapp_detect_changes_in_sources() { assert!(compiled.find("DssSpellTestBase").is_some()); // ensure change is detected assert!(!compiled.is_unchanged()); + // and all recompiled artifacts are different for (p, artifact) in compiled.into_artifacts() { let other = artifacts.remove(&p).unwrap(); @@ -266,31 +280,31 @@ fn can_compile_dapp_sample_with_cache() { assert!(compiled.find("NewContract").is_some()); assert!(!compiled.is_unchanged()); assert_eq!( - compiled.into_artifacts().map(|(name, _)| name).collect::>(), - vec![ - "Dapp.json:Dapp", - "DappTest.json:DappTest", - "DSTest.json:DSTest", - "NewContract.json:NewContract" - ] + compiled.into_artifacts().map(|(name, _)| name).collect::>(), + HashSet::from([ + "Dapp.json:Dapp".to_string(), + "DappTest.json:DappTest".to_string(), + "DSTest.json:DSTest".to_string(), + "NewContract.json:NewContract".to_string(), + ]) ); // old cached artifact is not taken from the cache std::fs::copy(cache_testdata_dir.join("Dapp.sol"), root.join("src/Dapp.sol")).unwrap(); let compiled = project.compile().unwrap(); assert_eq!( - compiled.into_artifacts().map(|(name, _)| name).collect::>(), - vec![ - "DappTest.json:DappTest", - "NewContract.json:NewContract", - "DSTest.json:DSTest", - "Dapp.json:Dapp" - ] + compiled.into_artifacts().map(|(name, _)| name).collect::>(), + HashSet::from([ + "DappTest.json:DappTest".to_string(), + "NewContract.json:NewContract".to_string(), + "DSTest.json:DSTest".to_string(), + "Dapp.json:Dapp".to_string(), + ]) ); // deleted artifact is not taken from the cache std::fs::remove_file(&project.paths.sources.join("Dapp.sol")).unwrap(); - let compiled = project.compile().unwrap(); + let compiled: ProjectCompileOutput<_> = project.compile().unwrap(); assert!(compiled.find("Dapp").is_none()); } @@ -376,7 +390,7 @@ fn can_flatten_file_with_duplicates() { assert_eq!(result.matches("contract Foo {").count(), 1); assert_eq!(result.matches("contract Bar {").count(), 1); assert_eq!(result.matches("contract FooBar {").count(), 1); - assert_eq!(result.matches(";").count(), 1); + assert_eq!(result.matches(';').count(), 1); } #[test]