refactor(solc): rewrite compiler passes and cache change detection (#802)

* chore: clippy

* refactor: rewrite compiler passes and cache

* feat: more work on compile pipeline

* feat: add cache constructor

* add artifact filtering

* fine tune api

* feat: prepare version integration

* docs: more docs

* feat: add cacheentry2

* replace cacheentry types

* integrate new api

* docs: more docs

* feat: implement new output handler

* feat: integrate cached files in new compile pipeline

* refactor: more cache refactor

* docs: more docs

* feat: add source name mapping

* feat: implement new parallel solc

* refactor: do a little cleanup

* refactor: even more cleanup

* even more cleanup

* chore: make it compile

* chore: make it compile with all features

* chore: clippy fix

* feat: integrate new compiler pipeline

* docs: more docs

* refactor: move stuff around

* refactor: start deprecating output type

* chore: make it compile again

* chore(deps): bump solc version 0.2.0

* feat: unify output types

* cargo fix

* refactor: add contracts wrapper

* chore: replace ProjectCompileOutput

* docs: add more docs

* feat: add offline mode

* feat: more artifact helpers

* chore: cleanup cache

* chore: streamline types

* fix: better artifacts mapping

* chore: some cleanup

* chore: change artifact

* chore: add configure solc fn

* feat: add artifact reading

* feat: implement retain and extend

* feat: add cache extending

* feat: write to disk

* chore: make clippy happy

* feat: implement path mapping

* chore: nits

* feat: introduce states

* feat: add compiler state machine

* chore: move cache types to cache mod

* chore: make clippy happy

* feat: add debug derives

* fix: use resolved import source unit names

* fix: failing tests

* test: test multiple libs properly

* chore: make clippy happy

* chore: update CHANGELOG

* fix: doc tests

* fix: set offline mode correctly

* chore: make it compile again

* Update ethers-solc/src/artifacts.rs

Co-authored-by: Georgios Konstantopoulos <me@gakonst.com>

* feat: find remappings by default

* typos

* add eth_syncing RPC (#848)

* add eth_syncing RPC

* Changelo updated

* small comments

* Intermediate SyncingStatus

* fix(core): adjust Ganache for new cli output (#851)

* fix: review comments

* fix: cache relative path bug

* chore: add cache example

* chore: use absolute paths

* fix: remove overwritten files from cache

* fix: rustfmt

* chore: more helper functions

* chore: export AggregatedOutput

* feat: implement helper functions

* feat: even more helpers

* fix: failing doc tests

* refactor: remove source name map tracking

* fix: determine artifacts in ephemeral mode

* refactor: allowed paths should not fail

Co-authored-by: Georgios Konstantopoulos <me@gakonst.com>
Co-authored-by: rakita <rakita@users.noreply.github.com>
Co-authored-by: wolflo <33909953+wolflo@users.noreply.github.com>
This commit is contained in:
Matthias Seitz 2022-02-04 17:20:24 +01:00 committed by GitHub
parent 5005a3621a
commit b295d73c4a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
26 changed files with 2777 additions and 1429 deletions

View File

@ -44,6 +44,10 @@
### Unreleased ### Unreleased
- Total revamp of the `Project::compile` pipeline
[#802](https://github.com/gakonst/ethers-rs/pull/802)
- Support multiple versions of compiled contracts
- Breaking: deprecate hardhat cache file compatibility, cache file now tracks artifact paths and their versions
- Fix flatten replacement target location - Fix flatten replacement target location
[#846](https://github.com/gakonst/ethers-rs/pull/846) [#846](https://github.com/gakonst/ethers-rs/pull/846)
- Fix duplicate files during flattening - Fix duplicate files during flattening

40
Cargo.lock generated
View File

@ -1056,6 +1056,19 @@ dependencies = [
"cfg-if 1.0.0", "cfg-if 1.0.0",
] ]
[[package]]
name = "env_logger"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b2cf0344971ee6c64c31be0d530793fba457d322dfec2810c453d0ef228f9c3"
dependencies = [
"atty",
"humantime",
"log",
"regex",
"termcolor",
]
[[package]] [[package]]
name = "eth-keystore" name = "eth-keystore"
version = "0.3.0" version = "0.3.0"
@ -1370,11 +1383,12 @@ dependencies = [
[[package]] [[package]]
name = "ethers-solc" name = "ethers-solc"
version = "0.1.0" version = "0.2.0"
dependencies = [ dependencies = [
"colored", "colored",
"criterion", "criterion",
"dunce", "dunce",
"env_logger",
"ethers-core", "ethers-core",
"fs_extra", "fs_extra",
"futures-util", "futures-util",
@ -1386,6 +1400,7 @@ dependencies = [
"num_cpus", "num_cpus",
"once_cell", "once_cell",
"pretty_assertions", "pretty_assertions",
"rand 0.8.4",
"rayon", "rayon",
"regex", "regex",
"semver", "semver",
@ -1399,6 +1414,7 @@ dependencies = [
"tiny-keccak", "tiny-keccak",
"tokio", "tokio",
"tracing", "tracing",
"tracing-subscriber",
"walkdir", "walkdir",
] ]
@ -1793,6 +1809,12 @@ version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421"
[[package]]
name = "humantime"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
[[package]] [[package]]
name = "hyper" name = "hyper"
version = "0.14.16" version = "0.14.16"
@ -2062,6 +2084,15 @@ dependencies = [
"cfg-if 1.0.0", "cfg-if 1.0.0",
] ]
[[package]]
name = "matchers"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
dependencies = [
"regex-automata",
]
[[package]] [[package]]
name = "matches" name = "matches"
version = "0.1.9" version = "0.1.9"
@ -2848,6 +2879,9 @@ name = "regex-automata"
version = "0.1.10" version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
dependencies = [
"regex-syntax",
]
[[package]] [[package]]
name = "regex-syntax" name = "regex-syntax"
@ -3848,9 +3882,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5312f325fe3588e277415f5a6cca1f4ccad0f248c4cd5a4bd33032d7286abc22" checksum = "5312f325fe3588e277415f5a6cca1f4ccad0f248c4cd5a4bd33032d7286abc22"
dependencies = [ dependencies = [
"ansi_term", "ansi_term",
"lazy_static",
"matchers",
"regex",
"sharded-slab", "sharded-slab",
"smallvec", "smallvec",
"thread_local", "thread_local",
"tracing",
"tracing-core", "tracing-core",
"tracing-log", "tracing-log",
] ]

View File

@ -88,7 +88,7 @@ ethers-core = { version = "^0.6.0", default-features = false, path = "./ethers-c
ethers-providers = { version = "^0.6.0", default-features = false, path = "./ethers-providers" } ethers-providers = { version = "^0.6.0", default-features = false, path = "./ethers-providers" }
ethers-signers = { version = "^0.6.0", default-features = false, path = "./ethers-signers" } ethers-signers = { version = "^0.6.0", default-features = false, path = "./ethers-signers" }
ethers-middleware = { version = "^0.6.0", default-features = false, path = "./ethers-middleware" } ethers-middleware = { version = "^0.6.0", default-features = false, path = "./ethers-middleware" }
ethers-solc = { version = "^0.1.0", default-features = false, path = "./ethers-solc" } ethers-solc = { version = "^0.2.0", default-features = false, path = "./ethers-solc" }
ethers-etherscan = { version = "^0.2.0", default-features = false, path = "./ethers-etherscan" } ethers-etherscan = { version = "^0.2.0", default-features = false, path = "./ethers-etherscan" }
[dev-dependencies] [dev-dependencies]

View File

@ -32,7 +32,7 @@ ethers-contract-abigen = { version = "^0.6.0", path = "ethers-contract-abigen" }
ethers-contract-derive = { version = "^0.6.0", path = "ethers-contract-derive" } ethers-contract-derive = { version = "^0.6.0", path = "ethers-contract-derive" }
ethers-core = { version = "^0.6.0", path = "../ethers-core", default-features = false, features = ["eip712"]} ethers-core = { version = "^0.6.0", path = "../ethers-core", default-features = false, features = ["eip712"]}
ethers-derive-eip712 = { version = "^0.2.0", path = "../ethers-core/ethers-derive-eip712"} ethers-derive-eip712 = { version = "^0.2.0", path = "../ethers-core/ethers-derive-eip712"}
ethers-solc = { version = "^0.1.0", path = "../ethers-solc", default-features = false } ethers-solc = { version = "^0.2.0", path = "../ethers-solc", default-features = false }
[target.'cfg(not(target_arch = "wasm32"))'.dev-dependencies] [target.'cfg(not(target_arch = "wasm32"))'.dev-dependencies]
tokio = { version = "1.5", default-features = false, features = ["macros"] } tokio = { version = "1.5", default-features = false, features = ["macros"] }

View File

@ -42,7 +42,7 @@ hex = { version = "0.4.3", default-features = false, features = ["std"] }
rand = { version = "0.8.4", default-features = false } rand = { version = "0.8.4", default-features = false }
ethers-providers = { version = "^0.6.0", path = "../ethers-providers", default-features = false, features = ["ws", "rustls"] } ethers-providers = { version = "^0.6.0", path = "../ethers-providers", default-features = false, features = ["ws", "rustls"] }
once_cell = "1.8.0" once_cell = "1.8.0"
ethers-solc = { version = "^0.1.0", path = "../ethers-solc", default-features = false } ethers-solc = { version = "^0.2.0", path = "../ethers-solc", default-features = false }
serial_test = "0.5.1" serial_test = "0.5.1"
[target.'cfg(not(target_arch = "wasm32"))'.dev-dependencies] [target.'cfg(not(target_arch = "wasm32"))'.dev-dependencies]

View File

@ -1,6 +1,6 @@
[package] [package]
name = "ethers-solc" name = "ethers-solc"
version = "0.1.0" version = "0.2.0"
authors = ["Matthias Seitz <matthias.seitz@outlook.de>", "Georgios Konstantopoulos <me@gakonst.com>"] authors = ["Matthias Seitz <matthias.seitz@outlook.de>", "Georgios Konstantopoulos <me@gakonst.com>"]
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
edition = "2018" edition = "2018"
@ -17,7 +17,7 @@ keywords = ["ethereum", "web3", "solc", "solidity", "ethers"]
ethers-core = { version = "^0.6.0", path = "../ethers-core", default-features = false } ethers-core = { version = "^0.6.0", path = "../ethers-core", default-features = false }
serde_json = "1.0.68" serde_json = "1.0.68"
serde = { version = "1.0.130", features = ["derive"] } serde = { version = "1.0.130", features = ["derive"] }
semver = "1.0.4" semver = { version = "1.0.4", features = ["serde"] }
walkdir = "2.3.2" walkdir = "2.3.2"
tokio = { version = "1.15.0", default-features = false, features = ["process", "io-util", "fs", "time"], optional = true } tokio = { version = "1.15.0", default-features = false, features = ["process", "io-util", "fs", "time"], optional = true }
futures-util = { version = "^0.3", optional = true } futures-util = { version = "^0.3", optional = true }
@ -50,6 +50,9 @@ getrandom = { version = "0.2", features = ["js"] }
[dev-dependencies] [dev-dependencies]
criterion = { version = "0.3", features = ["async_tokio"] } criterion = { version = "0.3", features = ["async_tokio"] }
env_logger = "*"
tracing-subscriber = {version = "0.3", default-features = false, features = ["env-filter", "fmt"]}
rand = "0.8.4"
pretty_assertions = "1.1.0" pretty_assertions = "1.1.0"
tempfile = "3.3.0" tempfile = "3.3.0"
tokio = { version = "1.15.0", features = ["full"] } tokio = { version = "1.15.0", features = ["full"] }

View File

@ -0,0 +1,567 @@
//! Output artifact handling
use crate::{
artifacts::{CompactContract, CompactContractBytecode, Contract, FileToContractsMap},
contracts::VersionedContracts,
error::Result,
utils, HardhatArtifact, ProjectPathsConfig, SolcError,
};
use ethers_core::{abi::Abi, types::Bytes};
use semver::Version;
use serde::{de::DeserializeOwned, Serialize};
use std::{
collections::btree_map::BTreeMap,
fmt, fs, io,
path::{Path, PathBuf},
};
/// Represents an artifact file representing a [`crate::Contract`]
#[derive(Debug, Clone, PartialEq)]
pub struct ArtifactFile<T> {
/// The Artifact that was written
pub artifact: T,
/// path to the file where the `artifact` was written to
pub file: PathBuf,
/// `solc` version that produced this artifact
pub version: Version,
}
impl<T: Serialize> ArtifactFile<T> {
/// Writes the given contract to the `out` path creating all parent directories
pub fn write(&self) -> Result<()> {
utils::create_parent_dir_all(&self.file)?;
fs::write(&self.file, serde_json::to_vec_pretty(&self.artifact)?)
.map_err(|err| SolcError::io(err, &self.file))?;
Ok(())
}
}
impl<T> ArtifactFile<T> {
/// Sets the file to `root` adjoined to `self.file`.
pub fn join(&mut self, root: impl AsRef<Path>) {
self.file = root.as_ref().join(&self.file);
}
/// Removes `base` from the artifact's path
pub fn strip_prefix(&mut self, base: impl AsRef<Path>) {
if let Ok(prefix) = self.file.strip_prefix(base) {
self.file = prefix.to_path_buf();
}
}
}
/// local helper type alias `file name -> (contract name -> Vec<..>)`
pub(crate) type ArtifactsMap<T> = FileToContractsMap<Vec<ArtifactFile<T>>>;
/// Represents a set of Artifacts
#[derive(Debug, Clone, PartialEq)]
pub struct Artifacts<T>(pub ArtifactsMap<T>);
impl<T> From<ArtifactsMap<T>> for Artifacts<T> {
fn from(m: ArtifactsMap<T>) -> Self {
Self(m)
}
}
impl<'a, T> IntoIterator for &'a Artifacts<T> {
type Item = (&'a String, &'a BTreeMap<String, Vec<ArtifactFile<T>>>);
type IntoIter =
std::collections::btree_map::Iter<'a, String, BTreeMap<String, Vec<ArtifactFile<T>>>>;
fn into_iter(self) -> Self::IntoIter {
self.0.iter()
}
}
impl<T> IntoIterator for Artifacts<T> {
type Item = (String, BTreeMap<String, Vec<ArtifactFile<T>>>);
type IntoIter =
std::collections::btree_map::IntoIter<String, BTreeMap<String, Vec<ArtifactFile<T>>>>;
fn into_iter(self) -> Self::IntoIter {
self.0.into_iter()
}
}
impl<T> Default for Artifacts<T> {
fn default() -> Self {
Self(Default::default())
}
}
impl<T> AsRef<ArtifactsMap<T>> for Artifacts<T> {
fn as_ref(&self) -> &ArtifactsMap<T> {
&self.0
}
}
impl<T> AsMut<ArtifactsMap<T>> for Artifacts<T> {
fn as_mut(&mut self) -> &mut ArtifactsMap<T> {
&mut self.0
}
}
impl<T: Serialize> Artifacts<T> {
/// Writes all artifacts into the given `artifacts_root` folder
pub fn write_all(&self) -> Result<()> {
for artifact in self.artifact_files() {
artifact.write()?;
}
Ok(())
}
}
impl<T> Artifacts<T> {
pub fn into_inner(self) -> ArtifactsMap<T> {
self.0
}
/// Sets the artifact files location to `root` adjoined to `self.file`.
pub fn join_all(&mut self, root: impl AsRef<Path>) -> &mut Self {
let root = root.as_ref();
self.artifact_files_mut().for_each(|artifact| artifact.join(root));
self
}
/// Removes `base` from all artifacts
pub fn strip_prefix_all(&mut self, base: impl AsRef<Path>) -> &mut Self {
let base = base.as_ref();
self.artifact_files_mut().for_each(|artifact| artifact.strip_prefix(base));
self
}
/// Returns all `ArtifactFile`s for the contract with the matching name
fn get_contract_artifact_files(&self, contract_name: &str) -> Option<&Vec<ArtifactFile<T>>> {
self.0.values().find_map(|all| all.get(contract_name))
}
/// Returns true if this type contains an artifact with the given path for the given contract
pub fn has_contract_artifact(&self, contract_name: &str, artifact_path: &Path) -> bool {
self.get_contract_artifact_files(contract_name)
.map(|artifacts| artifacts.iter().any(|artifact| artifact.file == artifact_path))
.unwrap_or_default()
}
/// Returns true if this type contains an artifact with the given path
pub fn has_artifact(&self, artifact_path: &Path) -> bool {
self.artifact_files().any(|artifact| artifact.file == artifact_path)
}
/// Iterate over all artifact files
pub fn artifact_files(&self) -> impl Iterator<Item = &ArtifactFile<T>> {
self.0.values().flat_map(|c| c.values().flat_map(|artifacts| artifacts.iter()))
}
/// Iterate over all artifact files
pub fn artifact_files_mut(&mut self) -> impl Iterator<Item = &mut ArtifactFile<T>> {
self.0.values_mut().flat_map(|c| c.values_mut().flat_map(|artifacts| artifacts.iter_mut()))
}
/// Returns an iterator over _all_ artifacts and `<file name:contract name>`
pub fn into_artifacts<O: ArtifactOutput<Artifact = T>>(
self,
) -> impl Iterator<Item = (String, T)> {
self.0.into_values().flat_map(|contract_artifacts| {
contract_artifacts.into_iter().flat_map(|(_contract_name, artifacts)| {
artifacts.into_iter().filter_map(|artifact| {
O::contract_name(&artifact.file).map(|name| {
(
format!(
"{}:{}",
artifact.file.file_name().unwrap().to_string_lossy(),
name
),
artifact.artifact,
)
})
})
})
})
}
/// Returns an iterator that yields the tuple `(file, contract name, artifact)`
///
/// **NOTE** this returns the path as is
pub fn into_artifacts_with_files(self) -> impl Iterator<Item = (String, String, T)> {
self.0.into_iter().flat_map(|(f, contract_artifacts)| {
contract_artifacts.into_iter().flat_map(move |(name, artifacts)| {
let contract_name = name;
let file = f.clone();
artifacts
.into_iter()
.map(move |artifact| (file.clone(), contract_name.clone(), artifact.artifact))
})
})
}
/// Strips the given prefix from all artifact file paths to make them relative to the given
/// `root` argument
pub fn into_stripped_file_prefixes(self, base: impl AsRef<Path>) -> Self {
let base = base.as_ref();
let artifacts = self
.0
.into_iter()
.map(|(file, c)| {
let file_path = Path::new(&file);
if let Ok(p) = file_path.strip_prefix(base) {
(p.to_string_lossy().to_string(), c)
} else {
(file, c)
}
})
.collect();
Artifacts(artifacts)
}
/// Finds the first artifact `T` with a matching contract name
pub fn find(&self, contract_name: impl AsRef<str>) -> Option<&T> {
let contract_name = contract_name.as_ref();
self.0.iter().find_map(|(_file, contracts)| {
contracts.get(contract_name).and_then(|c| c.get(0).map(|a| &a.artifact))
})
}
/// Removes the first artifact `T` with a matching contract name
///
/// *Note:* if there are multiple artifacts (contract compiled with different solc) then this
/// returns the first artifact in that set
pub fn remove(&mut self, contract_name: impl AsRef<str>) -> Option<T> {
let contract_name = contract_name.as_ref();
self.0.iter_mut().find_map(|(_file, contracts)| {
let mut artifact = None;
if let Some((c, mut artifacts)) = contracts.remove_entry(contract_name) {
if !artifacts.is_empty() {
artifact = Some(artifacts.remove(0).artifact);
}
if !artifacts.is_empty() {
contracts.insert(c, artifacts);
}
}
artifact
})
}
}
/// A trait representation for a [`crate::Contract`] artifact
pub trait Artifact {
/// Returns the artifact's `Abi` and bytecode
fn into_inner(self) -> (Option<Abi>, Option<Bytes>);
/// Turns the artifact into a container type for abi, compact bytecode and deployed bytecode
fn into_compact_contract(self) -> CompactContract;
/// Turns the artifact into a container type for abi, full bytecode and deployed bytecode
fn into_contract_bytecode(self) -> CompactContractBytecode;
/// Returns the contents of this type as a single tuple of abi, bytecode and deployed bytecode
fn into_parts(self) -> (Option<Abi>, Option<Bytes>, Option<Bytes>);
/// Same as [`Self::into_parts()`] but returns `Err` if an element is `None`
fn try_into_parts(self) -> Result<(Abi, Bytes, Bytes)>
where
Self: Sized,
{
let (abi, bytecode, deployed_bytecode) = self.into_parts();
Ok((
abi.ok_or_else(|| SolcError::msg("abi missing"))?,
bytecode.ok_or_else(|| SolcError::msg("bytecode missing"))?,
deployed_bytecode.ok_or_else(|| SolcError::msg("deployed bytecode missing"))?,
))
}
}
impl<T> Artifact for T
where
T: Into<CompactContractBytecode> + Into<CompactContract>,
{
fn into_inner(self) -> (Option<Abi>, Option<Bytes>) {
let artifact = self.into_compact_contract();
(artifact.abi, artifact.bin.and_then(|bin| bin.into_bytes()))
}
fn into_compact_contract(self) -> CompactContract {
self.into()
}
fn into_contract_bytecode(self) -> CompactContractBytecode {
self.into()
}
fn into_parts(self) -> (Option<Abi>, Option<Bytes>, Option<Bytes>) {
self.into_compact_contract().into_parts()
}
}
/// Handler invoked with the output of `solc`
///
/// Implementers of this trait are expected to take care of [`crate::Contract`] to
/// [`crate::ArtifactOutput::Artifact`] conversion and how that `Artifact` type is stored on disk,
/// this includes artifact file location and naming.
///
/// Depending on the [`crate::Project`] contracts and their compatible versions,
/// [`crate::ProjectCompiler::compile()`] may invoke different `solc` executables on the same
/// solidity file leading to multiple [`crate::CompilerOutput`]s for the same `.sol` file.
/// In addition to the `solidity file` to `contract` relationship (1-N*)
/// [`crate::VersionedContracts`] also tracks the `contract` to (`artifact` + `solc version`)
/// relationship (1-N+).
pub trait ArtifactOutput {
/// Represents the artifact that will be stored for a `Contract`
type Artifact: Artifact + DeserializeOwned + Serialize + fmt::Debug;
/// Handle the aggregated set of compiled contracts from the solc [`crate::CompilerOutput`].
///
/// This will be invoked with all aggregated contracts from (multiple) solc `CompilerOutput`.
/// See [`crate::AggregatedCompilerOutput`]
fn on_output(
contracts: &VersionedContracts,
layout: &ProjectPathsConfig,
) -> Result<Artifacts<Self::Artifact>> {
let mut artifacts = Self::output_to_artifacts(contracts);
artifacts.join_all(&layout.artifacts);
artifacts.write_all()?;
Self::write_extras(contracts, layout)?;
Ok(artifacts)
}
/// Writes additional files for the contracts if the included in the `Contract`, such as `ir`,
/// `ewasm`, `iropt`.
///
/// By default, these fields are _not_ enabled in the [`crate::Settings`], see
/// [`crate::Settings::default_output_selection()`], and the respective fields of the
/// [`Contract`] will `None`. If they'll be manually added to the `output_selection`, then
/// we're also creating individual files for this output, such as `Greeter.iropt`,
/// `Gretter.ewasm`
fn write_extras(contracts: &VersionedContracts, layout: &ProjectPathsConfig) -> Result<()> {
for (file, contracts) in contracts.as_ref().iter() {
for (name, versioned_contracts) in contracts {
for c in versioned_contracts {
let artifact_path = if versioned_contracts.len() > 1 {
Self::output_file_versioned(file, name, &c.version)
} else {
Self::output_file(file, name)
};
let file = layout.artifacts.join(artifact_path);
utils::create_parent_dir_all(&file)?;
if let Some(iropt) = &c.contract.ir_optimized {
fs::write(&file.with_extension("iropt"), iropt)
.map_err(|err| SolcError::io(err, file.with_extension("iropt")))?
}
if let Some(ir) = &c.contract.ir {
fs::write(&file.with_extension("ir"), ir)
.map_err(|err| SolcError::io(err, file.with_extension("ir")))?
}
if let Some(ewasm) = &c.contract.ewasm {
fs::write(
&file.with_extension("ewasm"),
serde_json::to_vec_pretty(&ewasm)?,
)
.map_err(|err| SolcError::io(err, file.with_extension("ewasm")))?;
}
if let Some(evm) = &c.contract.evm {
if let Some(asm) = &evm.assembly {
fs::write(&file.with_extension("asm"), asm)
.map_err(|err| SolcError::io(err, file.with_extension("asm")))?
}
}
}
}
}
Ok(())
}
/// Returns the file name for the contract's artifact
/// `Greeter.json`
fn output_file_name(name: impl AsRef<str>) -> PathBuf {
format!("{}.json", name.as_ref()).into()
}
/// Returns the file name for the contract's artifact and the given version
/// `Greeter.0.8.11.json`
fn output_file_name_versioned(name: impl AsRef<str>, version: &Version) -> PathBuf {
format!("{}.{}.{}.{}.json", name.as_ref(), version.major, version.minor, version.patch)
.into()
}
/// Returns the path to the contract's artifact location based on the contract's file and name
///
/// This returns `contract.sol/contract.json` by default
fn output_file(contract_file: impl AsRef<Path>, name: impl AsRef<str>) -> PathBuf {
let name = name.as_ref();
contract_file
.as_ref()
.file_name()
.map(Path::new)
.map(|p| p.join(Self::output_file_name(name)))
.unwrap_or_else(|| Self::output_file_name(name))
}
/// Returns the path to the contract's artifact location based on the contract's file, name and
/// version
///
/// This returns `contract.sol/contract.0.8.11.json` by default
fn output_file_versioned(
contract_file: impl AsRef<Path>,
name: impl AsRef<str>,
version: &Version,
) -> PathBuf {
let name = name.as_ref();
contract_file
.as_ref()
.file_name()
.map(Path::new)
.map(|p| p.join(Self::output_file_name_versioned(name, version)))
.unwrap_or_else(|| Self::output_file_name_versioned(name, version))
}
/// The inverse of `contract_file_name`
///
/// Expected to return the solidity contract's name derived from the file path
/// `sources/Greeter.sol` -> `Greeter`
fn contract_name(file: impl AsRef<Path>) -> Option<String> {
file.as_ref().file_stem().and_then(|s| s.to_str().map(|s| s.to_string()))
}
/// Whether the corresponding artifact of the given contract file and name exists
fn output_exists(
contract_file: impl AsRef<Path>,
name: impl AsRef<str>,
root: impl AsRef<Path>,
) -> bool {
root.as_ref().join(Self::output_file(contract_file, name)).exists()
}
/// Read the artifact that's stored at the given path
///
/// # Errors
///
/// Returns an error if
/// - The file does not exist
/// - The file's content couldn't be deserialized into the `Artifact` type
fn read_cached_artifact(path: impl AsRef<Path>) -> Result<Self::Artifact> {
let path = path.as_ref();
let file = fs::File::open(path).map_err(|err| SolcError::io(err, path))?;
let file = io::BufReader::new(file);
Ok(serde_json::from_reader(file)?)
}
/// Read the cached artifacts that are located the paths the iterator yields
///
/// See [`Self::read_cached_artifact()`]
fn read_cached_artifacts<T, I>(files: I) -> Result<BTreeMap<PathBuf, Self::Artifact>>
where
I: IntoIterator<Item = T>,
T: Into<PathBuf>,
{
let mut artifacts = BTreeMap::default();
for path in files.into_iter() {
let path = path.into();
let artifact = Self::read_cached_artifact(&path)?;
artifacts.insert(path, artifact);
}
Ok(artifacts)
}
/// Convert a contract to the artifact type
///
/// This is the core conversion function that takes care of converting a `Contract` into the
/// associated `Artifact` type
fn contract_to_artifact(_file: &str, _name: &str, contract: Contract) -> Self::Artifact;
/// Convert the compiler output into a set of artifacts
///
/// **Note:** This does only convert, but _NOT_ write the artifacts to disk, See
/// [`Self::on_output()`]
fn output_to_artifacts(contracts: &VersionedContracts) -> Artifacts<Self::Artifact> {
let mut artifacts = ArtifactsMap::new();
for (file, contracts) in contracts.as_ref().iter() {
let mut entries = BTreeMap::new();
for (name, versioned_contracts) in contracts {
let mut contracts = Vec::with_capacity(versioned_contracts.len());
// check if the same contract compiled with multiple solc versions
for contract in versioned_contracts {
let artifact_path = if versioned_contracts.len() > 1 {
Self::output_file_versioned(file, name, &contract.version)
} else {
Self::output_file(file, name)
};
let artifact =
Self::contract_to_artifact(file, name, contract.contract.clone());
contracts.push(ArtifactFile {
artifact,
file: artifact_path,
version: contract.version.clone(),
});
}
entries.insert(name.to_string(), contracts);
}
artifacts.insert(file.to_string(), entries);
}
Artifacts(artifacts)
}
}
/// An Artifacts implementation that uses a compact representation
///
/// Creates a single json artifact with
/// ```json
/// {
/// "abi": [],
/// "bin": "...",
/// "runtime-bin": "..."
/// }
/// ```
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub struct MinimalCombinedArtifacts;
impl ArtifactOutput for MinimalCombinedArtifacts {
type Artifact = CompactContractBytecode;
fn contract_to_artifact(_file: &str, _name: &str, contract: Contract) -> Self::Artifact {
Self::Artifact::from(contract)
}
}
/// An Artifacts handler implementation that works the same as `MinimalCombinedArtifacts` but also
/// supports reading hardhat artifacts if an initial attempt to deserialize an artifact failed
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub struct MinimalCombinedArtifactsHardhatFallback;
impl ArtifactOutput for MinimalCombinedArtifactsHardhatFallback {
type Artifact = CompactContractBytecode;
fn on_output(
output: &VersionedContracts,
layout: &ProjectPathsConfig,
) -> Result<Artifacts<Self::Artifact>> {
MinimalCombinedArtifacts::on_output(output, layout)
}
fn read_cached_artifact(path: impl AsRef<Path>) -> Result<Self::Artifact> {
let path = path.as_ref();
let content = fs::read_to_string(path).map_err(|err| SolcError::io(err, path))?;
if let Ok(a) = serde_json::from_str(&content) {
Ok(a)
} else {
tracing::error!("Failed to deserialize compact artifact");
tracing::trace!("Fallback to hardhat artifact deserialization");
let artifact = serde_json::from_str::<HardhatArtifact>(&content)?;
tracing::trace!("successfully deserialized hardhat artifact");
Ok(artifact.into_contract_bytecode())
}
}
fn contract_to_artifact(file: &str, name: &str, contract: Contract) -> Self::Artifact {
MinimalCombinedArtifacts::contract_to_artifact(file, name, contract)
}
}

View File

@ -22,10 +22,20 @@ use crate::{
use ethers_core::abi::Address; use ethers_core::abi::Address;
use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer}; use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer};
/// Solidity files are made up of multiple `source units`, a solidity contract is such a `source
/// unit`, therefore a solidity file can contain multiple contracts: (1-N*) relationship.
///
/// This types represents this mapping as `file name -> (contract name -> T)`, where the generic is
/// intended to represent contract specific information, like [`Contract`] itself, See [`Contracts`]
pub type FileToContractsMap<T> = BTreeMap<String, BTreeMap<String, T>>;
/// file -> (contract name -> Contract)
pub type Contracts = FileToContractsMap<Contract>;
/// An ordered list of files and their source /// An ordered list of files and their source
pub type Sources = BTreeMap<PathBuf, Source>; pub type Sources = BTreeMap<PathBuf, Source>;
pub type Contracts = BTreeMap<String, BTreeMap<String, Contract>>; pub type VersionedSources = BTreeMap<Solc, (Version, Sources)>;
/// Input type `solc` expects /// Input type `solc` expects
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
@ -547,22 +557,7 @@ impl CompilerOutput {
}) })
} }
pub fn diagnostics<'a>(&'a self, ignored_error_codes: &'a [u64]) -> OutputDiagnostics {
OutputDiagnostics { compiler_output: self, ignored_error_codes }
}
/// Finds the _first_ contract with the given name /// Finds the _first_ contract with the given name
///
/// # Example
///
/// ```
/// use ethers_solc::Project;
/// use ethers_solc::artifacts::*;
/// # fn demo(project: Project) {
/// let output = project.compile().unwrap().output();
/// let contract = output.find("Greeter").unwrap();
/// # }
/// ```
pub fn find(&self, contract: impl AsRef<str>) -> Option<CompactContractRef> { pub fn find(&self, contract: impl AsRef<str>) -> Option<CompactContractRef> {
let contract_name = contract.as_ref(); let contract_name = contract.as_ref();
self.contracts_iter().find_map(|(name, contract)| { self.contracts_iter().find_map(|(name, contract)| {
@ -571,17 +566,6 @@ impl CompilerOutput {
} }
/// Finds the first contract with the given name and removes it from the set /// Finds the first contract with the given name and removes it from the set
///
/// # Example
///
/// ```
/// use ethers_solc::Project;
/// use ethers_solc::artifacts::*;
/// # fn demo(project: Project) {
/// let mut output = project.compile().unwrap().output();
/// let contract = output.remove("Greeter").unwrap();
/// # }
/// ```
pub fn remove(&mut self, contract: impl AsRef<str>) -> Option<Contract> { pub fn remove(&mut self, contract: impl AsRef<str>) -> Option<Contract> {
let contract_name = contract.as_ref(); let contract_name = contract.as_ref();
self.contracts.values_mut().find_map(|c| c.remove(contract_name)) self.contracts.values_mut().find_map(|c| c.remove(contract_name))
@ -608,16 +592,6 @@ impl CompilerOutput {
/// Returns the output's source files and contracts separately, wrapped in helper types that /// Returns the output's source files and contracts separately, wrapped in helper types that
/// provide several helper methods /// provide several helper methods
///
/// # Example
///
/// ```
/// use ethers_solc::Project;
/// # fn demo(project: Project) {
/// let output = project.compile().unwrap().output();
/// let (sources, contracts) = output.split();
/// # }
/// ```
pub fn split(self) -> (SourceFiles, OutputContracts) { pub fn split(self) -> (SourceFiles, OutputContracts) {
(SourceFiles(self.sources), OutputContracts(self.contracts)) (SourceFiles(self.sources), OutputContracts(self.contracts))
} }
@ -629,17 +603,6 @@ pub struct OutputContracts(pub Contracts);
impl OutputContracts { impl OutputContracts {
/// Returns an iterator over all contracts and their source names. /// Returns an iterator over all contracts and their source names.
///
/// ```
/// use std::collections::BTreeMap;
/// use ethers_solc::{ artifacts::*, Artifact };
/// # fn demo(contracts: OutputContracts) {
/// let contracts: BTreeMap<String, CompactContractSome> = contracts
/// .into_contracts()
/// .map(|(k, c)| (k, c.into_compact_contract().unwrap()))
/// .collect();
/// # }
/// ```
pub fn into_contracts(self) -> impl Iterator<Item = (String, Contract)> { pub fn into_contracts(self) -> impl Iterator<Item = (String, Contract)> {
self.0.into_values().flatten() self.0.into_values().flatten()
} }
@ -650,17 +613,6 @@ impl OutputContracts {
} }
/// Finds the _first_ contract with the given name /// Finds the _first_ contract with the given name
///
/// # Example
///
/// ```
/// use ethers_solc::Project;
/// use ethers_solc::artifacts::*;
/// # fn demo(project: Project) {
/// let output = project.compile().unwrap().output();
/// let contract = output.find("Greeter").unwrap();
/// # }
/// ```
pub fn find(&self, contract: impl AsRef<str>) -> Option<CompactContractRef> { pub fn find(&self, contract: impl AsRef<str>) -> Option<CompactContractRef> {
let contract_name = contract.as_ref(); let contract_name = contract.as_ref();
self.contracts_iter().find_map(|(name, contract)| { self.contracts_iter().find_map(|(name, contract)| {
@ -669,87 +621,12 @@ impl OutputContracts {
} }
/// Finds the first contract with the given name and removes it from the set /// Finds the first contract with the given name and removes it from the set
///
/// # Example
///
/// ```
/// use ethers_solc::Project;
/// use ethers_solc::artifacts::*;
/// # fn demo(project: Project) {
/// let (_, mut contracts) = project.compile().unwrap().output().split();
/// let contract = contracts.remove("Greeter").unwrap();
/// # }
/// ```
pub fn remove(&mut self, contract: impl AsRef<str>) -> Option<Contract> { pub fn remove(&mut self, contract: impl AsRef<str>) -> Option<Contract> {
let contract_name = contract.as_ref(); let contract_name = contract.as_ref();
self.0.values_mut().find_map(|c| c.remove(contract_name)) self.0.values_mut().find_map(|c| c.remove(contract_name))
} }
} }
/// Helper type to implement display for solc errors
#[derive(Clone, Debug)]
pub struct OutputDiagnostics<'a> {
compiler_output: &'a CompilerOutput,
ignored_error_codes: &'a [u64],
}
impl<'a> OutputDiagnostics<'a> {
/// Returns true if there is at least one error of high severity
pub fn has_error(&self) -> bool {
self.compiler_output.has_error()
}
/// Returns true if there is at least one warning
pub fn has_warning(&self) -> bool {
self.compiler_output.has_warning(self.ignored_error_codes)
}
fn is_test<T: AsRef<str>>(&self, contract_path: T) -> bool {
if contract_path.as_ref().ends_with(".t.sol") {
return true
}
self.compiler_output.find(&contract_path).map_or(false, |contract| {
contract.abi.map_or(false, |abi| abi.functions.contains_key("IS_TEST"))
})
}
}
impl<'a> fmt::Display for OutputDiagnostics<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.has_error() {
f.write_str("Compiler run failed")?;
} else if self.has_warning() {
f.write_str("Compiler run successful (with warnings)")?;
} else {
f.write_str("Compiler run successful")?;
}
for err in &self.compiler_output.errors {
if err.severity.is_warning() {
let is_ignored = err.error_code.as_ref().map_or(false, |code| {
if let Some(source_location) = &err.source_location {
// we ignore spdx and contract size warnings in test
// files. if we are looking at one of these warnings
// from a test file we skip
if self.is_test(&source_location.file) && (*code == 1878 || *code == 5574) {
return true
}
}
self.ignored_error_codes.contains(code)
});
if !is_ignored {
writeln!(f, "\n{}", err)?;
}
} else {
writeln!(f, "\n{}", err)?;
}
}
Ok(())
}
}
/// Represents a compiled solidity contract /// Represents a compiled solidity contract
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] #[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
@ -1727,7 +1604,7 @@ pub struct StorageType {
pub number_of_bytes: String, pub number_of_bytes: String,
} }
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)] #[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq, Hash)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct Error { pub struct Error {
#[serde(default, skip_serializing_if = "Option::is_none")] #[serde(default, skip_serializing_if = "Option::is_none")]
@ -1757,7 +1634,7 @@ impl fmt::Display for Error {
} }
} }
#[derive(Clone, Debug, Eq, PartialEq)] #[derive(Clone, Debug, Eq, PartialEq, Hash)]
pub enum Severity { pub enum Severity {
Error, Error,
Warning, Warning,
@ -1840,14 +1717,14 @@ impl<'de> Deserialize<'de> for Severity {
} }
} }
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)] #[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq, Hash)]
pub struct SourceLocation { pub struct SourceLocation {
pub file: String, pub file: String,
pub start: i32, pub start: i32,
pub end: i32, pub end: i32,
} }
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)] #[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq, Hash)]
pub struct SecondarySourceLocation { pub struct SecondarySourceLocation {
pub file: Option<String>, pub file: Option<String>,
pub start: Option<i32>, pub start: Option<i32>,
@ -1867,7 +1744,7 @@ pub struct SourceFile {
pub struct SourceFiles(pub BTreeMap<String, SourceFile>); pub struct SourceFiles(pub BTreeMap<String, SourceFile>);
impl SourceFiles { impl SourceFiles {
/// Returns an iterator over the the source files' ids and path /// Returns an iterator over the source files' ids and path
/// ///
/// ``` /// ```
/// use std::collections::BTreeMap; /// use std::collections::BTreeMap;

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,146 @@
use crate::artifacts::{CompactContractRef, Contract, FileToContractsMap};
use semver::Version;
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
/// file -> [(contract name -> Contract + solc version)]
#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize)]
#[serde(transparent)]
pub struct VersionedContracts(pub FileToContractsMap<Vec<VersionedContract>>);
impl VersionedContracts {
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
pub fn len(&self) -> usize {
self.0.len()
}
/// Returns an iterator over all files
pub fn files(&self) -> impl Iterator<Item = &String> + '_ {
self.0.keys()
}
/// Finds the _first_ contract with the given name
///
/// # Example
///
/// ```
/// use ethers_solc::Project;
/// use ethers_solc::artifacts::*;
/// # fn demo(project: Project) {
/// let output = project.compile().unwrap().output();
/// let contract = output.find("Greeter").unwrap();
/// # }
/// ```
pub fn find(&self, contract: impl AsRef<str>) -> Option<CompactContractRef> {
let contract_name = contract.as_ref();
self.contracts().find_map(|(name, contract)| {
(name == contract_name).then(|| CompactContractRef::from(contract))
})
}
/// Removes the _first_ contract with the given name from the set
///
/// # Example
///
/// ```
/// use ethers_solc::Project;
/// use ethers_solc::artifacts::*;
/// # fn demo(project: Project) {
/// let (_, mut contracts) = project.compile().unwrap().output().split();
/// let contract = contracts.remove("Greeter").unwrap();
/// # }
/// ```
pub fn remove(&mut self, contract: impl AsRef<str>) -> Option<Contract> {
let contract_name = contract.as_ref();
self.0.values_mut().find_map(|all_contracts| {
let mut contract = None;
if let Some((c, mut contracts)) = all_contracts.remove_entry(contract_name) {
if !contracts.is_empty() {
contract = Some(contracts.remove(0).contract);
}
if !contracts.is_empty() {
all_contracts.insert(c, contracts);
}
}
contract
})
}
/// Given the contract file's path and the contract's name, tries to return the contract's
/// bytecode, runtime bytecode, and abi
pub fn get(&self, path: &str, contract: &str) -> Option<CompactContractRef> {
self.0
.get(path)
.and_then(|contracts| {
contracts.get(contract).and_then(|c| c.get(0).map(|c| &c.contract))
})
.map(CompactContractRef::from)
}
/// Iterate over all contracts and their names
pub fn contracts(&self) -> impl Iterator<Item = (&String, &Contract)> {
self.0
.values()
.flat_map(|c| c.iter().flat_map(|(name, c)| c.iter().map(move |c| (name, &c.contract))))
}
/// Returns an iterator over (`file`, `name`, `Contract`)
pub fn contracts_with_files(&self) -> impl Iterator<Item = (&String, &String, &Contract)> {
self.0.iter().flat_map(|(file, contracts)| {
contracts
.iter()
.flat_map(move |(name, c)| c.iter().map(move |c| (file, name, &c.contract)))
})
}
/// Returns an iterator over all contracts and their source names.
///
/// ```
/// use std::collections::BTreeMap;
/// use ethers_solc::{ artifacts::*, Artifact };
/// # fn demo(contracts: OutputContracts) {
/// let contracts: BTreeMap<String, CompactContractSome> = contracts
/// .into_contracts()
/// .map(|(k, c)| (k, c.into_compact_contract().unwrap()))
/// .collect();
/// # }
/// ```
pub fn into_contracts(self) -> impl Iterator<Item = (String, Contract)> {
self.0.into_values().flat_map(|c| {
c.into_iter()
.flat_map(|(name, c)| c.into_iter().map(move |c| (name.clone(), c.contract)))
})
}
}
impl AsRef<FileToContractsMap<Vec<VersionedContract>>> for VersionedContracts {
fn as_ref(&self) -> &FileToContractsMap<Vec<VersionedContract>> {
&self.0
}
}
impl AsMut<FileToContractsMap<Vec<VersionedContract>>> for VersionedContracts {
fn as_mut(&mut self) -> &mut FileToContractsMap<Vec<VersionedContract>> {
&mut self.0
}
}
impl IntoIterator for VersionedContracts {
type Item = (String, BTreeMap<String, Vec<VersionedContract>>);
type IntoIter =
std::collections::btree_map::IntoIter<String, BTreeMap<String, Vec<VersionedContract>>>;
fn into_iter(self) -> Self::IntoIter {
self.0.into_iter()
}
}
/// A contract and the compiler version used to compile it
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VersionedContract {
pub contract: Contract,
pub version: Version,
}

View File

@ -0,0 +1,40 @@
use crate::{error::Result, CompilerInput, CompilerOutput, Solc};
/// The result of a `solc` process bundled with its `Solc` and `CompilerInput`
type CompileElement = (Result<CompilerOutput>, Solc, CompilerInput);
/// The bundled output of multiple `solc` processes.
#[derive(Debug)]
pub struct CompiledMany {
outputs: Vec<CompileElement>,
}
impl CompiledMany {
pub fn new(outputs: Vec<CompileElement>) -> Self {
Self { outputs }
}
/// Returns an iterator over all output elements
pub fn outputs(&self) -> impl Iterator<Item = &CompileElement> {
self.outputs.iter()
}
/// Returns an iterator over all output elements
pub fn into_outputs(self) -> impl Iterator<Item = CompileElement> {
self.outputs.into_iter()
}
/// Returns all `CompilerOutput` or the first error that occurred
pub fn flattened(self) -> Result<Vec<CompilerOutput>> {
self.into_iter().collect()
}
}
impl IntoIterator for CompiledMany {
type Item = Result<CompilerOutput>;
type IntoIter = std::vec::IntoIter<Result<CompilerOutput>>;
fn into_iter(self) -> Self::IntoIter {
self.outputs.into_iter().map(|(res, _, _)| res).collect::<Vec<_>>().into_iter()
}
}

View File

@ -15,6 +15,11 @@ use std::{
str::FromStr, str::FromStr,
}; };
pub mod contracts;
pub mod many;
pub mod output;
pub mod project;
/// The name of the `solc` binary on the system /// The name of the `solc` binary on the system
pub const SOLC: &str = "solc"; pub const SOLC: &str = "solc";
@ -496,7 +501,7 @@ impl Solc {
.stdout(Stdio::piped()) .stdout(Stdio::piped())
.spawn() .spawn()
.map_err(|err| SolcError::io(err, &self.solc))?; .map_err(|err| SolcError::io(err, &self.solc))?;
let stdin = child.stdin.take().unwrap(); let stdin = child.stdin.take().expect("Stdin exists.");
serde_json::to_writer(stdin, input)?; serde_json::to_writer(stdin, input)?;
compile_output(child.wait_with_output().map_err(|err| SolcError::io(err, &self.solc))?) compile_output(child.wait_with_output().map_err(|err| SolcError::io(err, &self.solc))?)
} }
@ -602,7 +607,7 @@ impl Solc {
/// let outputs = Solc::compile_many([(solc1, input1), (solc2, input2)], 2).await.flattened().unwrap(); /// let outputs = Solc::compile_many([(solc1, input1), (solc2, input2)], 2).await.flattened().unwrap();
/// # } /// # }
/// ``` /// ```
pub async fn compile_many<I>(jobs: I, n: usize) -> CompiledMany pub async fn compile_many<I>(jobs: I, n: usize) -> crate::many::CompiledMany
where where
I: IntoIterator<Item = (Solc, CompilerInput)>, I: IntoIterator<Item = (Solc, CompilerInput)>,
{ {
@ -615,42 +620,8 @@ impl Solc {
.buffer_unordered(n) .buffer_unordered(n)
.collect::<Vec<_>>() .collect::<Vec<_>>()
.await; .await;
CompiledMany { outputs }
}
}
/// The result of a `solc` process bundled with its `Solc` and `CompilerInput` crate::many::CompiledMany::new(outputs)
type CompileElement = (Result<CompilerOutput>, Solc, CompilerInput);
/// The output of multiple `solc` processes.
#[derive(Debug)]
pub struct CompiledMany {
outputs: Vec<CompileElement>,
}
impl CompiledMany {
/// Returns an iterator over all output elements
pub fn outputs(&self) -> impl Iterator<Item = &CompileElement> {
self.outputs.iter()
}
/// Returns an iterator over all output elements
pub fn into_outputs(self) -> impl Iterator<Item = CompileElement> {
self.outputs.into_iter()
}
/// Returns all `CompilerOutput` or the first error that occurred
pub fn flattened(self) -> Result<Vec<CompilerOutput>> {
self.into_iter().collect()
}
}
impl IntoIterator for CompiledMany {
type Item = Result<CompilerOutput>;
type IntoIter = std::vec::IntoIter<Result<CompilerOutput>>;
fn into_iter(self) -> Self::IntoIter {
self.outputs.into_iter().map(|(res, _, _)| res).collect::<Vec<_>>().into_iter()
} }
} }
@ -716,7 +687,7 @@ mod tests {
#[test] #[test]
fn solc_compile_works() { fn solc_compile_works() {
let input = include_str!("../test-data/in/compiler-in-1.json"); let input = include_str!("../../test-data/in/compiler-in-1.json");
let input: CompilerInput = serde_json::from_str(input).unwrap(); let input: CompilerInput = serde_json::from_str(input).unwrap();
let out = solc().compile(&input).unwrap(); let out = solc().compile(&input).unwrap();
let other = solc().compile(&serde_json::json!(input)).unwrap(); let other = solc().compile(&serde_json::json!(input)).unwrap();
@ -726,7 +697,7 @@ mod tests {
#[cfg(feature = "async")] #[cfg(feature = "async")]
#[tokio::test] #[tokio::test]
async fn async_solc_compile_works() { async fn async_solc_compile_works() {
let input = include_str!("../test-data/in/compiler-in-1.json"); let input = include_str!("../../test-data/in/compiler-in-1.json");
let input: CompilerInput = serde_json::from_str(input).unwrap(); let input: CompilerInput = serde_json::from_str(input).unwrap();
let out = solc().async_compile(&input).await.unwrap(); let out = solc().async_compile(&input).await.unwrap();
let other = solc().async_compile(&serde_json::json!(input)).await.unwrap(); let other = solc().async_compile(&serde_json::json!(input)).await.unwrap();
@ -735,7 +706,7 @@ mod tests {
#[cfg(feature = "async")] #[cfg(feature = "async")]
#[tokio::test] #[tokio::test]
async fn async_solc_compile_works2() { async fn async_solc_compile_works2() {
let input = include_str!("../test-data/in/compiler-in-2.json"); let input = include_str!("../../test-data/in/compiler-in-2.json");
let input: CompilerInput = serde_json::from_str(input).unwrap(); let input: CompilerInput = serde_json::from_str(input).unwrap();
let out = solc().async_compile(&input).await.unwrap(); let out = solc().async_compile(&input).await.unwrap();
let other = solc().async_compile(&serde_json::json!(input)).await.unwrap(); let other = solc().async_compile(&serde_json::json!(input)).await.unwrap();

View File

@ -0,0 +1,363 @@
//! The output of a compiled project
use crate::{
artifacts::{CompactContractRef, Contract, Error, SourceFile, SourceFiles},
contracts::{VersionedContract, VersionedContracts},
ArtifactOutput, Artifacts, CompilerOutput,
};
use semver::Version;
use std::{collections::BTreeMap, fmt, path::Path};
/// Contains a mixture of already compiled/cached artifacts and the input set of sources that still
/// need to be compiled.
#[derive(Debug, Clone, PartialEq, Default)]
pub struct ProjectCompileOutput<T: ArtifactOutput> {
/// contains the aggregated `CompilerOutput`
///
/// See [`CompilerSources::compile`]
pub(crate) compiler_output: AggregatedCompilerOutput,
/// all artifact files from `output` that were freshly compiled and written
pub(crate) compiled_artifacts: Artifacts<T::Artifact>,
/// All artifacts that were read from cache
pub(crate) cached_artifacts: Artifacts<T::Artifact>,
/// errors that should be omitted
pub(crate) ignored_error_codes: Vec<u64>,
}
impl<T: ArtifactOutput> ProjectCompileOutput<T> {
/// All artifacts together with their contract file name and name `<file name>:<name>`
///
/// This returns a chained iterator of both cached and recompiled contract artifacts
///
/// # Example
///
/// ```no_run
/// use std::collections::btree_map::BTreeMap;
/// use ethers_solc::artifacts::CompactContractBytecode;
/// use ethers_solc::Project;
///
/// let project = Project::builder().build().unwrap();
/// let contracts: BTreeMap<String, CompactContractBytecode> = project.compile().unwrap().into_artifacts().collect();
/// ```
pub fn into_artifacts(self) -> impl Iterator<Item = (String, T::Artifact)> {
let Self { cached_artifacts, compiled_artifacts, .. } = self;
cached_artifacts.into_artifacts::<T>().chain(compiled_artifacts.into_artifacts::<T>())
}
/// All artifacts together with their contract file and name as tuple `(file, contract
/// name, artifact)`
///
/// This returns a chained iterator of both cached and recompiled contract artifacts
///
/// # Example
///
/// ```no_run
/// use std::collections::btree_map::BTreeMap;
/// use ethers_solc::artifacts::CompactContractBytecode;
/// use ethers_solc::Project;
///
/// let project = Project::builder().build().unwrap();
/// let contracts: Vec<(String, String, CompactContractBytecode)> = project.compile().unwrap().into_artifacts_with_files().collect();
/// ```
///
/// **NOTE** the `file` will be returned as is, see also [`Self::with_stripped_file_prefixes()`]
pub fn into_artifacts_with_files(self) -> impl Iterator<Item = (String, String, T::Artifact)> {
let Self { cached_artifacts, compiled_artifacts, .. } = self;
cached_artifacts
.into_artifacts_with_files()
.chain(compiled_artifacts.into_artifacts_with_files())
}
/// Strips the given prefix from all artifact file paths to make them relative to the given
/// `base` argument
///
/// # Example
///
/// Make all artifact files relative tot the project's root directory
///
/// ```no_run
/// use ethers_solc::artifacts::CompactContractBytecode;
/// use ethers_solc::Project;
///
/// let project = Project::builder().build().unwrap();
/// let output = project.compile().unwrap().with_stripped_file_prefixes(project.root());
/// ```
pub fn with_stripped_file_prefixes(mut self, base: impl AsRef<Path>) -> Self {
let base = base.as_ref();
self.cached_artifacts = self.cached_artifacts.into_stripped_file_prefixes(base);
self.compiled_artifacts = self.compiled_artifacts.into_stripped_file_prefixes(base);
self
}
/// Get the (merged) solc compiler output
/// ```no_run
/// use std::collections::btree_map::BTreeMap;
/// use ethers_solc::artifacts::Contract;
/// use ethers_solc::Project;
///
/// let project = Project::builder().build().unwrap();
/// let contracts: BTreeMap<String, Contract> =
/// project.compile().unwrap().output().contracts_into_iter().collect();
/// ```
pub fn output(self) -> AggregatedCompilerOutput {
self.compiler_output
}
/// Whether this type has a compiler output
pub fn has_compiled_contracts(&self) -> bool {
self.compiler_output.is_empty()
}
/// Whether this type does not contain compiled contracts
pub fn is_unchanged(&self) -> bool {
self.compiler_output.is_unchanged()
}
/// Whether there were errors
pub fn has_compiler_errors(&self) -> bool {
self.compiler_output.has_error()
}
/// Whether there were warnings
pub fn has_compiler_warnings(&self) -> bool {
self.compiler_output.has_warning(&self.ignored_error_codes)
}
/// Finds the first contract with the given name and removes it from the set
pub fn remove(&mut self, contract_name: impl AsRef<str>) -> Option<T::Artifact> {
let contract_name = contract_name.as_ref();
if let artifact @ Some(_) = self.compiled_artifacts.remove(contract_name) {
return artifact
}
self.cached_artifacts.remove(contract_name)
}
/// Returns the set of `Artifacts` that were cached and got reused during [`Project::compile()`]
pub fn cached_artifacts(&self) -> &Artifacts<T::Artifact> {
&self.cached_artifacts
}
/// Returns the set of `Artifacts` that were compiled with `solc` in [`Project::compile()`]
pub fn compiled_artifacts(&self) -> &Artifacts<T::Artifact> {
&self.compiled_artifacts
}
}
impl<T: ArtifactOutput> ProjectCompileOutput<T>
where
T::Artifact: Clone,
{
/// Finds the first contract with the given name
pub fn find(&self, contract_name: impl AsRef<str>) -> Option<&T::Artifact> {
let contract_name = contract_name.as_ref();
if let artifact @ Some(_) = self.compiled_artifacts.find(contract_name) {
return artifact
}
self.cached_artifacts.find(contract_name)
}
}
impl<T: ArtifactOutput> fmt::Display for ProjectCompileOutput<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.compiler_output.is_unchanged() {
f.write_str("Nothing to compile")
} else {
self.compiler_output.diagnostics(&self.ignored_error_codes).fmt(f)
}
}
}
/// The aggregated output of (multiple) compile jobs
///
/// This is effectively a solc version aware `CompilerOutput`
#[derive(Clone, Debug, Default, PartialEq)]
pub struct AggregatedCompilerOutput {
/// all errors from all `CompilerOutput`
pub errors: Vec<Error>,
/// All source files
pub sources: BTreeMap<String, SourceFile>,
/// All compiled contracts combined with the solc version used to compile them
pub contracts: VersionedContracts,
}
impl AggregatedCompilerOutput {
/// Whether the output contains a compiler error
pub fn has_error(&self) -> bool {
self.errors.iter().any(|err| err.severity.is_error())
}
/// Whether the output contains a compiler warning
pub fn has_warning(&self, ignored_error_codes: &[u64]) -> bool {
self.errors.iter().any(|err| {
if err.severity.is_warning() {
err.error_code.as_ref().map_or(false, |code| !ignored_error_codes.contains(code))
} else {
false
}
})
}
pub fn diagnostics<'a>(&'a self, ignored_error_codes: &'a [u64]) -> OutputDiagnostics {
OutputDiagnostics { compiler_output: self, ignored_error_codes }
}
pub fn is_empty(&self) -> bool {
self.contracts.is_empty()
}
pub fn is_unchanged(&self) -> bool {
self.contracts.is_empty() && self.errors.is_empty()
}
pub fn extend_all<I>(&mut self, out: I)
where
I: IntoIterator<Item = (Version, CompilerOutput)>,
{
for (v, o) in out {
self.extend(v, o)
}
}
/// adds a new `CompilerOutput` to the aggregated output
pub fn extend(&mut self, version: Version, output: CompilerOutput) {
self.errors.extend(output.errors);
self.sources.extend(output.sources);
for (file_name, new_contracts) in output.contracts {
let contracts = self.contracts.as_mut().entry(file_name).or_default();
for (contract_name, contract) in new_contracts {
let versioned = contracts.entry(contract_name).or_default();
versioned.push(VersionedContract { contract, version: version.clone() });
}
}
}
/// Finds the _first_ contract with the given name
///
/// # Example
///
/// ```
/// use ethers_solc::Project;
/// use ethers_solc::artifacts::*;
/// # fn demo(project: Project) {
/// let output = project.compile().unwrap().output();
/// let contract = output.find("Greeter").unwrap();
/// # }
/// ```
pub fn find(&self, contract: impl AsRef<str>) -> Option<CompactContractRef> {
self.contracts.find(contract)
}
/// Removes the _first_ contract with the given name from the set
///
/// # Example
///
/// ```
/// use ethers_solc::Project;
/// use ethers_solc::artifacts::*;
/// # fn demo(project: Project) {
/// let mut output = project.compile().unwrap().output();
/// let contract = output.remove("Greeter").unwrap();
/// # }
/// ```
pub fn remove(&mut self, contract: impl AsRef<str>) -> Option<Contract> {
self.contracts.remove(contract)
}
/// Iterate over all contracts and their names
pub fn contracts_iter(&self) -> impl Iterator<Item = (&String, &Contract)> {
self.contracts.contracts()
}
/// Iterate over all contracts and their names
pub fn contracts_into_iter(self) -> impl Iterator<Item = (String, Contract)> {
self.contracts.into_contracts()
}
/// Given the contract file's path and the contract's name, tries to return the contract's
/// bytecode, runtime bytecode, and abi
pub fn get(&self, path: &str, contract: &str) -> Option<CompactContractRef> {
self.contracts.get(path, contract)
}
/// Returns the output's source files and contracts separately, wrapped in helper types that
/// provide several helper methods
///
/// # Example
///
/// ```
/// use ethers_solc::Project;
/// # fn demo(project: Project) {
/// let output = project.compile().unwrap().output();
/// let (sources, contracts) = output.split();
/// # }
/// ```
pub fn split(self) -> (SourceFiles, VersionedContracts) {
(SourceFiles(self.sources), self.contracts)
}
}
/// Helper type to implement display for solc errors
#[derive(Clone, Debug)]
pub struct OutputDiagnostics<'a> {
compiler_output: &'a AggregatedCompilerOutput,
ignored_error_codes: &'a [u64],
}
impl<'a> OutputDiagnostics<'a> {
/// Returns true if there is at least one error of high severity
pub fn has_error(&self) -> bool {
self.compiler_output.has_error()
}
/// Returns true if there is at least one warning
pub fn has_warning(&self) -> bool {
self.compiler_output.has_warning(self.ignored_error_codes)
}
/// Returns true if the contract is a expected to be a test
fn is_test<T: AsRef<str>>(&self, contract_path: T) -> bool {
if contract_path.as_ref().ends_with(".t.sol") {
return true
}
self.compiler_output.find(&contract_path).map_or(false, |contract| {
contract.abi.map_or(false, |abi| abi.functions.contains_key("IS_TEST"))
})
}
}
impl<'a> fmt::Display for OutputDiagnostics<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.has_error() {
f.write_str("Compiler run failed")?;
} else if self.has_warning() {
f.write_str("Compiler run successful (with warnings)")?;
} else {
f.write_str("Compiler run successful")?;
}
for err in &self.compiler_output.errors {
if err.severity.is_warning() {
let is_ignored = err.error_code.as_ref().map_or(false, |code| {
if let Some(source_location) = &err.source_location {
// we ignore spdx and contract size warnings in test
// files. if we are looking at one of these warnings
// from a test file we skip
if self.is_test(&source_location.file) && (*code == 1878 || *code == 5574) {
return true
}
}
self.ignored_error_codes.contains(code)
});
if !is_ignored {
writeln!(f, "\n{}", err)?;
}
} else {
writeln!(f, "\n{}", err)?;
}
}
Ok(())
}
}

View File

@ -0,0 +1,440 @@
//! Manages compiling of a `Project`
//!
//! The compilation of a project is performed in several steps.
//!
//! First the project's dependency graph [`crate::Graph`] is constructed and all imported
//! dependencies are resolved. The graph holds all the relationships between the files and their
//! versions. From there the appropriate version set is derived
//! [`crate::Graph::into_sources_by_version()`] which need to be compiled with different
//! [`crate::Solc`] versions.
//!
//! At this point we check if we need to compile a source file or whether we can reuse an _existing_
//! `Artifact`. We don't to compile if:
//! - caching is enabled
//! - the file is **not** dirty [`Cache::is_dirty()`]
//! - the artifact for that file exists
//!
//! This concludes the preprocessing, and we now have either
//! - only `Source` files that need to be compiled
//! - only cached `Artifacts`, compilation can be skipped. This is considered an unchanged,
//! cached project
//! - Mix of both `Source` and `Artifacts`, only the `Source` files need to be compiled, the
//! `Artifacts` can be reused.
//!
//! The final step is invoking `Solc` via the standard JSON format.
//!
//! ### Notes on [Import Path Resolution](https://docs.soliditylang.org/en/develop/path-resolution.html#path-resolution)
//!
//! In order to be able to support reproducible builds on all platforms, the Solidity compiler has
//! to abstract away the details of the filesystem where source files are stored. Paths used in
//! imports must work the same way everywhere while the command-line interface must be able to work
//! with platform-specific paths to provide good user experience. This section aims to explain in
//! detail how Solidity reconciles these requirements.
//!
//! The compiler maintains an internal database (virtual filesystem or VFS for short) where each
//! source unit is assigned a unique source unit name which is an opaque and unstructured
//! identifier. When you use the import statement, you specify an import path that references a
//! source unit name. If the compiler does not find any source unit name matching the import path in
//! the VFS, it invokes the callback, which is responsible for obtaining the source code to be
//! placed under that name.
//!
//! This becomes relevant when dealing with resolved imports
//!
//! #### Relative Imports
//!
//! ```solidity
//! import "./math/math.sol";
//! import "contracts/tokens/token.sol";
//! ```
//! In the above `./math/math.sol` and `contracts/tokens/token.sol` are import paths while the
//! source unit names they translate to are `contracts/math/math.sol` and
//! `contracts/tokens/token.sol` respectively.
//!
//! #### Direct Imports
//!
//! An import that does not start with `./` or `../` is a direct import.
//!
//! ```solidity
//! import "/project/lib/util.sol"; // source unit name: /project/lib/util.sol
//! import "lib/util.sol"; // source unit name: lib/util.sol
//! import "@openzeppelin/address.sol"; // source unit name: @openzeppelin/address.sol
//! import "https://example.com/token.sol"; // source unit name: https://example.com/token.sol
//! ```
//!
//! After applying any import remappings the import path simply becomes the source unit name.
//!
//! ##### Import Remapping
//!
//! ```solidity
//! import "github.com/ethereum/dapp-bin/library/math.sol"; // source unit name: dapp-bin/library/math.sol
//! ```
//!
//! If compiled with `solc github.com/ethereum/dapp-bin/=dapp-bin/` the compiler will look for the
//! file in the VFS under `dapp-bin/library/math.sol`. If the file is not available there, the
//! source unit name will be passed to the Host Filesystem Loader, which will then look in
//! `/project/dapp-bin/library/iterable_mapping.sol`
use crate::{
artifact_output::Artifacts,
artifacts::{Settings, VersionedSources},
cache::ArtifactsCache,
error::Result,
output::AggregatedCompilerOutput,
resolver::GraphEdges,
ArtifactOutput, CompilerInput, Graph, Project, ProjectCompileOutput, ProjectPathsConfig, Solc,
Sources,
};
use rayon::prelude::*;
use std::collections::btree_map::BTreeMap;
#[derive(Debug)]
pub struct ProjectCompiler<'a, T: ArtifactOutput> {
/// Contains the relationship of the source files and their imports
edges: GraphEdges,
project: &'a Project<T>,
/// how to compile all the sources
sources: CompilerSources,
}
impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> {
/// Create a new `ProjectCompiler` to bootstrap the compilation process of the project's
/// sources.
///
/// # Example
///
/// ```no_run
/// use ethers_solc::Project;
///
/// let project = Project::builder().build().unwrap();
/// let output = project.compile().unwrap();
/// ```
#[cfg(all(feature = "svm", feature = "async"))]
pub fn new(project: &'a Project<T>) -> Result<Self> {
Self::with_sources(project, project.paths.read_input_files()?)
}
/// Bootstraps the compilation process by resolving the dependency graph of all sources and the
/// appropriate `Solc` -> `Sources` set as well as the compile mode to use (parallel,
/// sequential)
///
/// Multiple (`Solc` -> `Sources`) pairs can be compiled in parallel if the `Project` allows
/// multiple `jobs`, see [`crate::Project::set_solc_jobs()`].
#[cfg(all(feature = "svm", feature = "async"))]
pub fn with_sources(project: &'a Project<T>, sources: Sources) -> Result<Self> {
let graph = Graph::resolve_sources(&project.paths, sources)?;
let (versions, edges) = graph.into_sources_by_version(project.offline)?;
let sources_by_version = versions.get(&project.allowed_lib_paths)?;
let sources = if project.solc_jobs > 1 && sources_by_version.len() > 1 {
// if there are multiple different versions, and we can use multiple jobs we can compile
// them in parallel
CompilerSources::Parallel(sources_by_version, project.solc_jobs)
} else {
CompilerSources::Sequential(sources_by_version)
};
Ok(Self { edges, project, sources })
}
/// Compiles the sources with a pinned `Solc` instance
pub fn with_sources_and_solc(
project: &'a Project<T>,
sources: Sources,
solc: Solc,
) -> Result<Self> {
let version = solc.version()?;
let (sources, edges) = Graph::resolve_sources(&project.paths, sources)?.into_sources();
let sources_by_version = BTreeMap::from([(solc, (version, sources))]);
let sources = CompilerSources::Sequential(sources_by_version);
Ok(Self { edges, project, sources })
}
/// Compiles all the sources of the `Project` in the appropriate mode
///
/// If caching is enabled, the sources are filtered and only _dirty_ sources are recompiled.
///
/// The output of the compile process can be a mix of reused artifacts and freshly compiled
/// `Contract`s
///
/// # Example
///
/// ```no_run
/// use ethers_solc::Project;
///
/// let project = Project::builder().build().unwrap();
/// let output = project.compile().unwrap();
/// ```
pub fn compile(self) -> Result<ProjectCompileOutput<T>> {
// drive the compiler statemachine to completion
self.preprocess()?.compile()?.write_artifacts()?.write_cache()
}
/// Does basic preprocessing
/// - sets proper source unit names
/// - check cache
fn preprocess(self) -> Result<PreprocessedState<'a, T>> {
let Self { edges, project, mut sources } = self;
let mut cache = ArtifactsCache::new(project, edges)?;
// retain and compile only dirty sources
sources = sources.filtered(&mut cache);
Ok(PreprocessedState { sources, cache })
}
}
/// A series of states that comprise the [`ProjectCompiler::compile()`] state machine
///
/// The main reason is to debug all states individually
#[derive(Debug)]
struct PreprocessedState<'a, T: ArtifactOutput> {
sources: CompilerSources,
cache: ArtifactsCache<'a, T>,
}
impl<'a, T: ArtifactOutput> PreprocessedState<'a, T> {
/// advance to the next state by compiling all sources
fn compile(self) -> Result<CompiledState<'a, T>> {
let PreprocessedState { sources, cache } = self;
let output =
sources.compile(&cache.project().solc_config.settings, &cache.project().paths)?;
Ok(CompiledState { output, cache })
}
}
/// Represents the state after `solc` was successfully invoked
#[derive(Debug)]
struct CompiledState<'a, T: ArtifactOutput> {
output: AggregatedCompilerOutput,
cache: ArtifactsCache<'a, T>,
}
impl<'a, T: ArtifactOutput> CompiledState<'a, T> {
/// advance to the next state by handling all artifacts
///
/// Writes all output contracts to disk if enabled in the `Project`
fn write_artifacts(self) -> Result<ArtifactsState<'a, T>> {
let CompiledState { output, cache } = self;
// write all artifacts
let compiled_artifacts = if !cache.project().no_artifacts {
T::on_output(&output.contracts, &cache.project().paths)?
} else {
T::output_to_artifacts(&output.contracts)
};
Ok(ArtifactsState { output, cache, compiled_artifacts })
}
}
/// Represents the state after all artifacts were written to disk
#[derive(Debug)]
struct ArtifactsState<'a, T: ArtifactOutput> {
output: AggregatedCompilerOutput,
cache: ArtifactsCache<'a, T>,
compiled_artifacts: Artifacts<T::Artifact>,
}
impl<'a, T: ArtifactOutput> ArtifactsState<'a, T> {
/// Writes the cache file
///
/// this concludes the [`Project::compile()`] statemachine
fn write_cache(self) -> Result<ProjectCompileOutput<T>> {
let ArtifactsState { output, cache, compiled_artifacts } = self;
let ignored_error_codes = cache.project().ignored_error_codes.clone();
let cached_artifacts = cache.write_cache(&compiled_artifacts)?;
Ok(ProjectCompileOutput {
compiler_output: output,
compiled_artifacts,
cached_artifacts,
ignored_error_codes,
})
}
}
/// Determines how the `solc <-> sources` pairs are executed
#[derive(Debug, Clone)]
#[allow(dead_code)]
enum CompilerSources {
/// Compile all these sequentially
Sequential(VersionedSources),
/// Compile all these in parallel using a certain amount of jobs
Parallel(VersionedSources, usize),
}
impl CompilerSources {
/// Filters out all sources that don't need to be compiled, see [`ArtifactsCache::filter`]
fn filtered<T: ArtifactOutput>(self, cache: &mut ArtifactsCache<T>) -> Self {
fn filtered_sources<T: ArtifactOutput>(
sources: VersionedSources,
cache: &mut ArtifactsCache<T>,
) -> VersionedSources {
sources
.into_iter()
.map(|(solc, (version, sources))| {
let sources = cache.filter(sources, &version);
(solc, (version, sources))
})
.collect()
}
match self {
CompilerSources::Sequential(s) => {
CompilerSources::Sequential(filtered_sources(s, cache))
}
CompilerSources::Parallel(s, j) => {
CompilerSources::Parallel(filtered_sources(s, cache), j)
}
}
}
/// Compiles all the files with `Solc`
fn compile(
self,
settings: &Settings,
paths: &ProjectPathsConfig,
) -> Result<AggregatedCompilerOutput> {
match self {
CompilerSources::Sequential(input) => compile_sequential(input, settings, paths),
CompilerSources::Parallel(input, j) => compile_parallel(input, j, settings, paths),
}
}
}
/// Compiles the input set sequentially and returns an aggregated set of the solc `CompilerOutput`s
fn compile_sequential(
input: VersionedSources,
settings: &Settings,
paths: &ProjectPathsConfig,
) -> Result<AggregatedCompilerOutput> {
let mut aggregated = AggregatedCompilerOutput::default();
tracing::trace!("compiling {} jobs sequentially", input.len());
for (solc, (version, sources)) in input {
if sources.is_empty() {
// nothing to compile
continue
}
tracing::trace!(
"compiling {} sources with solc \"{}\" {:?}",
sources.len(),
solc.as_ref().display(),
solc.args
);
let input = CompilerInput::with_sources(sources)
.settings(settings.clone())
.normalize_evm_version(&version)
.with_remappings(paths.remappings.clone());
tracing::trace!(
"calling solc `{}` with {} sources {:?}",
version,
input.sources.len(),
input.sources.keys()
);
let output = solc.compile(&input)?;
tracing::trace!("compiled input, output has error: {}", output.has_error());
aggregated.extend(version, output);
}
Ok(aggregated)
}
/// compiles the input set using `num_jobs` threads
fn compile_parallel(
input: VersionedSources,
num_jobs: usize,
settings: &Settings,
paths: &ProjectPathsConfig,
) -> Result<AggregatedCompilerOutput> {
debug_assert!(num_jobs > 1);
tracing::trace!("compile sources in parallel using up to {} solc jobs", num_jobs);
let mut jobs = Vec::with_capacity(input.len());
for (solc, (version, sources)) in input {
if sources.is_empty() {
// nothing to compile
continue
}
let job = CompilerInput::with_sources(sources)
.settings(settings.clone())
.normalize_evm_version(&version)
.with_remappings(paths.remappings.clone());
jobs.push((solc, version, job))
}
// start a rayon threadpool that will execute all `Solc::compile()` processes
let pool = rayon::ThreadPoolBuilder::new().num_threads(num_jobs).build().unwrap();
let outputs = pool.install(move || {
jobs.into_par_iter()
.map(|(solc, version, input)| {
tracing::trace!(
"calling solc `{}` {:?} with {} sources: {:?}",
version,
solc.args,
input.sources.len(),
input.sources.keys()
);
solc.compile(&input).map(|output| (version, output))
})
.collect::<Result<Vec<_>>>()
})?;
let mut aggregated = AggregatedCompilerOutput::default();
aggregated.extend_all(outputs);
Ok(aggregated)
}
#[cfg(test)]
#[cfg(feature = "project-util")]
mod tests {
use super::*;
use crate::{project_util::TempProject, MinimalCombinedArtifacts};
use std::path::PathBuf;
#[allow(unused)]
fn init_tracing() {
let _ = tracing_subscriber::fmt()
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.try_init()
.ok();
}
#[test]
fn can_preprocess() {
let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample");
let project =
Project::builder().paths(ProjectPathsConfig::dapptools(root).unwrap()).build().unwrap();
let compiler = ProjectCompiler::new(&project).unwrap();
let prep = compiler.preprocess().unwrap();
let cache = prep.cache.as_cached().unwrap();
// 3 contracts
assert_eq!(cache.dirty_entries.len(), 3);
assert!(cache.filtered.is_empty());
assert!(cache.cache.is_empty());
let compiled = prep.compile().unwrap();
assert_eq!(compiled.output.contracts.files().count(), 3);
}
#[test]
fn can_detect_cached_files() {
let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample");
let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib"));
let project = TempProject::<MinimalCombinedArtifacts>::new(paths).unwrap();
let compiled = project.compile().unwrap();
assert!(!compiled.has_compiler_errors());
let inner = project.project();
let compiler = ProjectCompiler::new(inner).unwrap();
let prep = compiler.preprocess().unwrap();
assert!(prep.cache.as_cached().unwrap().dirty_entries.is_empty())
}
}

View File

@ -1,19 +1,16 @@
use crate::{ use crate::{
artifacts::{CompactContract, CompactContractBytecode, Contract, Settings}, artifacts::Settings,
cache::SOLIDITY_FILES_CACHE_FILENAME, cache::SOLIDITY_FILES_CACHE_FILENAME,
error::{Result, SolcError, SolcIoError}, error::{Result, SolcError, SolcIoError},
hh::HardhatArtifact,
remappings::Remapping, remappings::Remapping,
resolver::Graph, resolver::Graph,
utils, CompilerOutput, Source, Sources, utils, Source, Sources,
}; };
use ethers_core::{abi::Abi, types::Bytes};
use serde::{de::DeserializeOwned, Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{ use std::{
collections::BTreeMap,
convert::TryFrom,
fmt::{self, Formatter}, fmt::{self, Formatter},
fs, io, fs,
path::{Component, Path, PathBuf}, path::{Component, Path, PathBuf},
}; };
@ -358,27 +355,27 @@ pub struct ProjectPathsConfigBuilder {
impl ProjectPathsConfigBuilder { impl ProjectPathsConfigBuilder {
pub fn root(mut self, root: impl Into<PathBuf>) -> Self { pub fn root(mut self, root: impl Into<PathBuf>) -> Self {
self.root = Some(canonicalized(root)); self.root = Some(utils::canonicalized(root));
self self
} }
pub fn cache(mut self, cache: impl Into<PathBuf>) -> Self { pub fn cache(mut self, cache: impl Into<PathBuf>) -> Self {
self.cache = Some(canonicalized(cache)); self.cache = Some(utils::canonicalized(cache));
self self
} }
pub fn artifacts(mut self, artifacts: impl Into<PathBuf>) -> Self { pub fn artifacts(mut self, artifacts: impl Into<PathBuf>) -> Self {
self.artifacts = Some(canonicalized(artifacts)); self.artifacts = Some(utils::canonicalized(artifacts));
self self
} }
pub fn sources(mut self, sources: impl Into<PathBuf>) -> Self { pub fn sources(mut self, sources: impl Into<PathBuf>) -> Self {
self.sources = Some(canonicalized(sources)); self.sources = Some(utils::canonicalized(sources));
self self
} }
pub fn tests(mut self, tests: impl Into<PathBuf>) -> Self { pub fn tests(mut self, tests: impl Into<PathBuf>) -> Self {
self.tests = Some(canonicalized(tests)); self.tests = Some(utils::canonicalized(tests));
self self
} }
@ -389,14 +386,14 @@ impl ProjectPathsConfigBuilder {
} }
pub fn lib(mut self, lib: impl Into<PathBuf>) -> Self { pub fn lib(mut self, lib: impl Into<PathBuf>) -> Self {
self.libraries.get_or_insert_with(Vec::new).push(canonicalized(lib)); self.libraries.get_or_insert_with(Vec::new).push(utils::canonicalized(lib));
self self
} }
pub fn libs(mut self, libs: impl IntoIterator<Item = impl Into<PathBuf>>) -> Self { pub fn libs(mut self, libs: impl IntoIterator<Item = impl Into<PathBuf>>) -> Self {
let libraries = self.libraries.get_or_insert_with(Vec::new); let libraries = self.libraries.get_or_insert_with(Vec::new);
for lib in libs.into_iter() { for lib in libs.into_iter() {
libraries.push(canonicalized(lib)); libraries.push(utils::canonicalized(lib));
} }
self self
} }
@ -415,7 +412,10 @@ impl ProjectPathsConfigBuilder {
} }
pub fn build_with_root(self, root: impl Into<PathBuf>) -> ProjectPathsConfig { pub fn build_with_root(self, root: impl Into<PathBuf>) -> ProjectPathsConfig {
let root = canonicalized(root); let root = utils::canonicalized(root);
let libraries = self.libraries.unwrap_or_else(|| ProjectPathsConfig::find_libs(&root));
ProjectPathsConfig { ProjectPathsConfig {
cache: self cache: self
.cache .cache
@ -425,8 +425,10 @@ impl ProjectPathsConfigBuilder {
.unwrap_or_else(|| ProjectPathsConfig::find_artifacts_dir(&root)), .unwrap_or_else(|| ProjectPathsConfig::find_artifacts_dir(&root)),
sources: self.sources.unwrap_or_else(|| ProjectPathsConfig::find_source_dir(&root)), sources: self.sources.unwrap_or_else(|| ProjectPathsConfig::find_source_dir(&root)),
tests: self.tests.unwrap_or_else(|| root.join("tests")), tests: self.tests.unwrap_or_else(|| root.join("tests")),
libraries: self.libraries.unwrap_or_else(|| ProjectPathsConfig::find_libs(&root)), remappings: self
remappings: self.remappings.unwrap_or_default(), .remappings
.unwrap_or_else(|| libraries.iter().flat_map(Remapping::find_many).collect()),
libraries,
root, root,
} }
} }
@ -442,20 +444,6 @@ impl ProjectPathsConfigBuilder {
} }
} }
/// Returns the same path config but with canonicalized paths.
///
/// This will take care of potential symbolic linked directories.
/// For example, the tempdir library is creating directories hosted under `/var/`, which in OS X
/// is a symbolic link to `/private/var/`. So if when we try to resolve imports and a path is
/// rooted in a symbolic directory we might end up with different paths for the same file, like
/// `private/var/.../Dapp.sol` and `/var/.../Dapp.sol`
///
/// This canonicalizes all the paths but does not treat non existing dirs as an error
fn canonicalized(path: impl Into<PathBuf>) -> PathBuf {
let path = path.into();
utils::canonicalize(&path).unwrap_or(path)
}
/// The config to use when compiling the contracts /// The config to use when compiling the contracts
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct SolcConfig { pub struct SolcConfig {
@ -497,229 +485,6 @@ impl SolcConfigBuilder {
} }
} }
pub type Artifacts<T> = BTreeMap<String, BTreeMap<String, T>>;
pub trait Artifact {
/// Returns the artifact's `Abi` and bytecode
fn into_inner(self) -> (Option<Abi>, Option<Bytes>);
/// Turns the artifact into a container type for abi, compact bytecode and deployed bytecode
fn into_compact_contract(self) -> CompactContract;
/// Turns the artifact into a container type for abi, full bytecode and deployed bytecode
fn into_contract_bytecode(self) -> CompactContractBytecode;
/// Returns the contents of this type as a single tuple of abi, bytecode and deployed bytecode
fn into_parts(self) -> (Option<Abi>, Option<Bytes>, Option<Bytes>);
}
impl<T> Artifact for T
where
T: Into<CompactContractBytecode> + Into<CompactContract>,
{
fn into_inner(self) -> (Option<Abi>, Option<Bytes>) {
let artifact = self.into_compact_contract();
(artifact.abi, artifact.bin.and_then(|bin| bin.into_bytes()))
}
fn into_compact_contract(self) -> CompactContract {
self.into()
}
fn into_contract_bytecode(self) -> CompactContractBytecode {
self.into()
}
fn into_parts(self) -> (Option<Abi>, Option<Bytes>, Option<Bytes>) {
self.into_compact_contract().into_parts()
}
}
pub trait ArtifactOutput {
/// How Artifacts are stored
type Artifact: Artifact + DeserializeOwned;
/// Handle the compiler output.
fn on_output(output: &CompilerOutput, layout: &ProjectPathsConfig) -> Result<()>;
/// Returns the file name for the contract's artifact
fn output_file_name(name: impl AsRef<str>) -> PathBuf {
format!("{}.json", name.as_ref()).into()
}
/// Returns the path to the contract's artifact location based on the contract's file and name
///
/// This returns `contract.sol/contract.json` by default
fn output_file(contract_file: impl AsRef<Path>, name: impl AsRef<str>) -> PathBuf {
let name = name.as_ref();
contract_file
.as_ref()
.file_name()
.map(Path::new)
.map(|p| p.join(Self::output_file_name(name)))
.unwrap_or_else(|| Self::output_file_name(name))
}
/// The inverse of `contract_file_name`
///
/// Expected to return the solidity contract's name derived from the file path
/// `sources/Greeter.sol` -> `Greeter`
fn contract_name(file: impl AsRef<Path>) -> Option<String> {
file.as_ref().file_stem().and_then(|s| s.to_str().map(|s| s.to_string()))
}
/// Whether the corresponding artifact of the given contract file and name exists
fn output_exists(
contract_file: impl AsRef<Path>,
name: impl AsRef<str>,
root: impl AsRef<Path>,
) -> bool {
root.as_ref().join(Self::output_file(contract_file, name)).exists()
}
fn read_cached_artifact(path: impl AsRef<Path>) -> Result<Self::Artifact> {
let path = path.as_ref();
let file = fs::File::open(path).map_err(|err| SolcError::io(err, path))?;
let file = io::BufReader::new(file);
Ok(serde_json::from_reader(file)?)
}
/// Read the cached artifacts from disk
fn read_cached_artifacts<T, I>(files: I) -> Result<BTreeMap<PathBuf, Self::Artifact>>
where
I: IntoIterator<Item = T>,
T: Into<PathBuf>,
{
let mut artifacts = BTreeMap::default();
for path in files.into_iter() {
let path = path.into();
let artifact = Self::read_cached_artifact(&path)?;
artifacts.insert(path, artifact);
}
Ok(artifacts)
}
/// Convert a contract to the artifact type
fn contract_to_artifact(file: &str, name: &str, contract: Contract) -> Self::Artifact;
/// Convert the compiler output into a set of artifacts
fn output_to_artifacts(output: CompilerOutput) -> Artifacts<Self::Artifact> {
output
.contracts
.into_iter()
.map(|(file, contracts)| {
let contracts = contracts
.into_iter()
.map(|(name, c)| {
let contract = Self::contract_to_artifact(&file, &name, c);
(name, contract)
})
.collect();
(file, contracts)
})
.collect()
}
}
/// An Artifacts implementation that uses a compact representation
///
/// Creates a single json artifact with
/// ```json
/// {
/// "abi": [],
/// "bin": "...",
/// "runtime-bin": "..."
/// }
/// ```
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub struct MinimalCombinedArtifacts;
impl ArtifactOutput for MinimalCombinedArtifacts {
type Artifact = CompactContractBytecode;
fn on_output(output: &CompilerOutput, layout: &ProjectPathsConfig) -> Result<()> {
fs::create_dir_all(&layout.artifacts)
.map_err(|err| SolcError::msg(format!("Failed to create artifacts dir: {}", err)))?;
for (file, contracts) in output.contracts.iter() {
for (name, contract) in contracts {
let artifact = Self::output_file(file, name);
let file = layout.artifacts.join(artifact);
if let Some(parent) = file.parent() {
fs::create_dir_all(parent).map_err(|err| {
SolcError::msg(format!(
"Failed to create artifact parent folder \"{}\": {}",
parent.display(),
err
))
})?;
}
if let Some(iropt) = &contract.ir_optimized {
fs::write(&file.with_extension("iropt"), iropt)
.map_err(|err| SolcError::io(err, file.with_extension("iropt")))?
}
if let Some(ir) = &contract.ir {
fs::write(&file.with_extension("ir"), ir)
.map_err(|err| SolcError::io(err, file.with_extension("ir")))?
}
if let Some(ewasm) = &contract.ewasm {
fs::write(&file.with_extension("ewasm"), serde_json::to_vec_pretty(&ewasm)?)
.map_err(|err| SolcError::io(err, file.with_extension("ewasm")))?;
}
if let Some(evm) = &contract.evm {
if let Some(asm) = &evm.assembly {
fs::write(&file.with_extension("asm"), asm)
.map_err(|err| SolcError::io(err, file.with_extension("asm")))?
}
}
let min = CompactContractBytecode::from(contract.clone());
fs::write(&file, serde_json::to_vec_pretty(&min)?)
.map_err(|err| SolcError::io(err, file))?
}
}
Ok(())
}
fn contract_to_artifact(_file: &str, _name: &str, contract: Contract) -> Self::Artifact {
Self::Artifact::from(contract)
}
}
/// An Artifacts handler implementation that works the same as `MinimalCombinedArtifacts` but also
/// supports reading hardhat artifacts if an initial attempt to deserialize an artifact failed
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub struct MinimalCombinedArtifactsHardhatFallback;
impl ArtifactOutput for MinimalCombinedArtifactsHardhatFallback {
type Artifact = CompactContractBytecode;
fn on_output(output: &CompilerOutput, layout: &ProjectPathsConfig) -> Result<()> {
MinimalCombinedArtifacts::on_output(output, layout)
}
fn read_cached_artifact(path: impl AsRef<Path>) -> Result<Self::Artifact> {
let path = path.as_ref();
let content = fs::read_to_string(path).map_err(|err| SolcError::io(err, path))?;
if let Ok(a) = serde_json::from_str(&content) {
Ok(a)
} else {
tracing::error!("Failed to deserialize compact artifact");
tracing::trace!("Fallback to hardhat artifact deserialization");
let artifact = serde_json::from_str::<HardhatArtifact>(&content)?;
tracing::trace!("successfully deserialized hardhat artifact");
Ok(artifact.into_contract_bytecode())
}
}
fn contract_to_artifact(file: &str, name: &str, contract: Contract) -> Self::Artifact {
MinimalCombinedArtifacts::contract_to_artifact(file, name, contract)
}
}
/// Helper struct for serializing `--allow-paths` arguments to Solc /// Helper struct for serializing `--allow-paths` arguments to Solc
/// ///
/// From the [Solc docs](https://docs.soliditylang.org/en/v0.8.9/using-the-compiler.html#base-path-and-import-remapping): /// From the [Solc docs](https://docs.soliditylang.org/en/v0.8.9/using-the-compiler.html#base-path-and-import-remapping):
@ -751,19 +516,10 @@ impl fmt::Display for AllowedLibPaths {
} }
} }
impl<T: Into<PathBuf>> TryFrom<Vec<T>> for AllowedLibPaths { impl<T: Into<PathBuf>> From<Vec<T>> for AllowedLibPaths {
type Error = SolcIoError; fn from(libs: Vec<T>) -> Self {
let libs = libs.into_iter().map(utils::canonicalized).collect();
fn try_from(libs: Vec<T>) -> std::result::Result<Self, Self::Error> { AllowedLibPaths(libs)
let libs = libs
.into_iter()
.map(|lib| {
let path: PathBuf = lib.into();
let lib = utils::canonicalize(&path)?;
Ok(lib)
})
.collect::<std::result::Result<Vec<_>, _>>()?;
Ok(AllowedLibPaths(libs))
} }
} }
@ -787,13 +543,13 @@ mod tests {
assert_eq!(ProjectPathsConfig::find_source_dir(root), contracts,); assert_eq!(ProjectPathsConfig::find_source_dir(root), contracts,);
assert_eq!( assert_eq!(
ProjectPathsConfig::builder().build_with_root(&root).sources, ProjectPathsConfig::builder().build_with_root(&root).sources,
canonicalized(contracts), utils::canonicalized(contracts),
); );
std::fs::File::create(&src).unwrap(); std::fs::File::create(&src).unwrap();
assert_eq!(ProjectPathsConfig::find_source_dir(root), src,); assert_eq!(ProjectPathsConfig::find_source_dir(root), src,);
assert_eq!( assert_eq!(
ProjectPathsConfig::builder().build_with_root(&root).sources, ProjectPathsConfig::builder().build_with_root(&root).sources,
canonicalized(src), utils::canonicalized(src),
); );
assert_eq!(ProjectPathsConfig::find_artifacts_dir(root), out,); assert_eq!(ProjectPathsConfig::find_artifacts_dir(root), out,);
@ -801,13 +557,13 @@ mod tests {
assert_eq!(ProjectPathsConfig::find_artifacts_dir(root), artifacts,); assert_eq!(ProjectPathsConfig::find_artifacts_dir(root), artifacts,);
assert_eq!( assert_eq!(
ProjectPathsConfig::builder().build_with_root(&root).artifacts, ProjectPathsConfig::builder().build_with_root(&root).artifacts,
canonicalized(artifacts), utils::canonicalized(artifacts),
); );
std::fs::File::create(&out).unwrap(); std::fs::File::create(&out).unwrap();
assert_eq!(ProjectPathsConfig::find_artifacts_dir(root), out,); assert_eq!(ProjectPathsConfig::find_artifacts_dir(root), out,);
assert_eq!( assert_eq!(
ProjectPathsConfig::builder().build_with_root(&root).artifacts, ProjectPathsConfig::builder().build_with_root(&root).artifacts,
canonicalized(out), utils::canonicalized(out),
); );
assert_eq!(ProjectPathsConfig::find_libs(root), vec![lib.clone()],); assert_eq!(ProjectPathsConfig::find_libs(root), vec![lib.clone()],);
@ -815,13 +571,13 @@ mod tests {
assert_eq!(ProjectPathsConfig::find_libs(root), vec![node_modules.clone()],); assert_eq!(ProjectPathsConfig::find_libs(root), vec![node_modules.clone()],);
assert_eq!( assert_eq!(
ProjectPathsConfig::builder().build_with_root(&root).libraries, ProjectPathsConfig::builder().build_with_root(&root).libraries,
vec![canonicalized(node_modules)], vec![utils::canonicalized(node_modules)],
); );
std::fs::File::create(&lib).unwrap(); std::fs::File::create(&lib).unwrap();
assert_eq!(ProjectPathsConfig::find_libs(root), vec![lib.clone()],); assert_eq!(ProjectPathsConfig::find_libs(root), vec![lib.clone()],);
assert_eq!( assert_eq!(
ProjectPathsConfig::builder().build_with_root(&root).libraries, ProjectPathsConfig::builder().build_with_root(&root).libraries,
vec![canonicalized(lib)], vec![utils::canonicalized(lib)],
); );
} }
} }

View File

@ -34,6 +34,9 @@ pub enum SolcError {
#[error("{0}")] #[error("{0}")]
Message(String), Message(String),
#[error("No artifact found for `{}:{}`", .0.display(), .1)]
ArtifactNotFound(PathBuf, String),
#[cfg(feature = "project-util")] #[cfg(feature = "project-util")]
#[error(transparent)] #[error(transparent)]
FsExtra(#[from] fs_extra::error::Error), FsExtra(#[from] fs_extra::error::Error),

View File

@ -5,12 +5,11 @@ use crate::{
Bytecode, BytecodeObject, CompactContract, CompactContractBytecode, Contract, Bytecode, BytecodeObject, CompactContract, CompactContractBytecode, Contract,
ContractBytecode, DeployedBytecode, Offsets, ContractBytecode, DeployedBytecode, Offsets,
}, },
error::{Result, SolcError}, ArtifactOutput,
ArtifactOutput, CompilerOutput, ProjectPathsConfig,
}; };
use ethers_core::abi::Abi; use ethers_core::abi::Abi;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{collections::BTreeMap, fs}; use std::collections::btree_map::BTreeMap;
const HH_ARTIFACT_VERSION: &str = "hh-sol-artifact-1"; const HH_ARTIFACT_VERSION: &str = "hh-sol-artifact-1";
@ -85,30 +84,6 @@ pub struct HardhatArtifacts;
impl ArtifactOutput for HardhatArtifacts { impl ArtifactOutput for HardhatArtifacts {
type Artifact = HardhatArtifact; type Artifact = HardhatArtifact;
fn on_output(output: &CompilerOutput, layout: &ProjectPathsConfig) -> Result<()> {
fs::create_dir_all(&layout.artifacts)
.map_err(|err| SolcError::msg(format!("Failed to create artifacts dir: {}", err)))?;
for (file, contracts) in output.contracts.iter() {
for (name, contract) in contracts {
let artifact = Self::output_file(file, name);
let artifact_file = layout.artifacts.join(artifact);
if let Some(parent) = artifact_file.parent() {
fs::create_dir_all(parent).map_err(|err| {
SolcError::msg(format!(
"Failed to create artifact parent folder \"{}\": {}",
parent.display(),
err
))
})?;
}
let artifact = Self::contract_to_artifact(file, name, contract.clone());
fs::write(&artifact_file, serde_json::to_vec_pretty(&artifact)?)
.map_err(|err| SolcError::io(err, artifact_file))?
}
}
Ok(())
}
fn contract_to_artifact(file: &str, name: &str, contract: Contract) -> Self::Artifact { fn contract_to_artifact(file: &str, name: &str, contract: Contract) -> Self::Artifact {
let (bytecode, link_references, deployed_bytecode, deployed_link_references) = let (bytecode, link_references, deployed_bytecode, deployed_link_references) =
if let Some(evm) = contract.evm { if let Some(evm) = contract.evm {

View File

@ -1,46 +1,38 @@
#![doc = include_str ! ("../README.md")]
pub mod artifacts; pub mod artifacts;
pub mod sourcemap; pub mod sourcemap;
pub use artifacts::{CompilerInput, CompilerOutput, EvmVersion}; pub use artifacts::{CompilerInput, CompilerOutput, EvmVersion};
use std::collections::btree_map::Entry;
mod artifact_output;
pub mod cache; pub mod cache;
pub mod hh; pub mod hh;
pub use artifact_output::*;
mod resolver; mod resolver;
pub use hh::{HardhatArtifact, HardhatArtifacts}; pub use hh::{HardhatArtifact, HardhatArtifacts};
pub use resolver::Graph; pub use resolver::Graph;
mod compile; mod compile;
pub use compile::{
pub use compile::*; output::{AggregatedCompilerOutput, ProjectCompileOutput},
*,
mod config;
pub use config::{
AllowedLibPaths, Artifact, ArtifactOutput, MinimalCombinedArtifacts, PathStyle,
ProjectPathsConfig, SolcConfig,
}; };
pub mod remappings; mod config;
pub use config::{AllowedLibPaths, PathStyle, ProjectPathsConfig, SolcConfig};
use crate::{artifacts::Source, cache::SolFilesCache}; pub mod remappings;
use crate::artifacts::Source;
pub mod error; pub mod error;
pub mod utils; pub mod utils;
use crate::{ use crate::{
artifacts::Sources, artifacts::{Contract, Sources},
cache::PathMap,
error::{SolcError, SolcIoError}, error::{SolcError, SolcIoError},
}; };
use error::Result; use error::Result;
use std::{ use std::{
borrow::Cow,
collections::BTreeMap,
convert::TryInto,
fmt, fs,
marker::PhantomData, marker::PhantomData,
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
@ -72,6 +64,8 @@ pub struct Project<Artifacts: ArtifactOutput = MinimalCombinedArtifacts> {
pub allowed_lib_paths: AllowedLibPaths, pub allowed_lib_paths: AllowedLibPaths,
/// Maximum number of `solc` processes to run simultaneously. /// Maximum number of `solc` processes to run simultaneously.
solc_jobs: usize, solc_jobs: usize,
/// Offline mode, if set, network access (download solc) is disallowed
pub offline: bool,
} }
impl Project { impl Project {
@ -120,43 +114,29 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
&self.paths.cache &self.paths.cache
} }
/// Returns the root directory of the project
pub fn root(&self) -> &PathBuf {
&self.paths.root
}
/// Applies the configured settings to the given `Solc`
fn configure_solc(&self, mut solc: Solc) -> Solc {
if self.allowed_lib_paths.0.is_empty() {
solc = solc.arg("--allow-paths").arg(self.allowed_lib_paths.to_string());
}
solc
}
/// Sets the maximum number of parallel `solc` processes to run simultaneously. /// Sets the maximum number of parallel `solc` processes to run simultaneously.
///
/// # Panics
///
/// if `jobs == 0`
pub fn set_solc_jobs(&mut self, jobs: usize) { pub fn set_solc_jobs(&mut self, jobs: usize) {
assert!(jobs > 0); assert!(jobs > 0);
self.solc_jobs = jobs; self.solc_jobs = jobs;
} }
#[tracing::instrument(skip_all, name = "Project::write_cache_file")]
fn write_cache_file(
&self,
sources: Sources,
artifacts: Vec<(PathBuf, Vec<String>)>,
) -> Result<()> {
tracing::trace!("inserting {} sources in file cache", sources.len());
let mut cache = SolFilesCache::builder()
.root(&self.paths.root)
.solc_config(self.solc_config.clone())
.insert_files(sources, Some(self.paths.cache.clone()))?;
tracing::trace!("source files inserted");
// add the artifacts for each file to the cache entry
for (file, artifacts) in artifacts {
if let Some(entry) = cache.files.get_mut(&file) {
entry.artifacts = artifacts;
}
}
if let Some(cache_dir) = self.paths.cache.parent() {
tracing::trace!("creating cache file parent directory \"{}\"", cache_dir.display());
fs::create_dir_all(cache_dir).map_err(|err| SolcError::io(err, cache_dir))?
}
tracing::trace!("writing cache file to \"{}\"", self.paths.cache.display());
cache.write(&self.paths.cache)?;
Ok(())
}
/// Returns all sources found under the project's configured sources path /// Returns all sources found under the project's configured sources path
#[tracing::instrument(skip_all, fields(name = "sources"))] #[tracing::instrument(skip_all, fields(name = "sources"))]
pub fn sources(&self) -> Result<Sources> { pub fn sources(&self) -> Result<Sources> {
@ -187,36 +167,14 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
println!("cargo:rerun-if-changed={}", self.paths.sources.display()) println!("cargo:rerun-if-changed={}", self.paths.sources.display())
} }
/// Attempts to read all unique libraries that are used as imports like "hardhat/console.sol"
fn resolved_libraries(
&self,
sources: &Sources,
) -> Result<BTreeMap<PathBuf, (Source, PathBuf)>> {
let mut libs = BTreeMap::default();
for source in sources.values() {
for import in source.parse_imports() {
if let Some(lib) = utils::resolve_library(&self.paths.libraries, import) {
if let Entry::Vacant(entry) = libs.entry(import.into()) {
tracing::trace!(
"resolved library import \"{}\" at \"{}\"",
import,
lib.display()
);
entry.insert((Source::read(&lib)?, lib));
}
}
}
}
Ok(libs)
}
/// Attempts to compile the contracts found at the configured source location, see /// Attempts to compile the contracts found at the configured source location, see
/// `ProjectPathsConfig::sources`. /// `ProjectPathsConfig::sources`.
/// ///
/// NOTE: this does not check if the contracts were successfully compiled, see /// NOTE: this does not check if the contracts were successfully compiled, see
/// `CompilerOutput::has_error` instead. /// `CompilerOutput::has_error` instead.
///
/// NB: If the `svm` feature is enabled, this function will automatically detect /// NB: If the `svm` feature is enabled, this function will automatically detect
/// solc versions across files. /// solc versions across files, see [`Self::svm_compile()`]
/// ///
/// # Example /// # Example
/// ///
@ -238,122 +196,37 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
return self.svm_compile(sources) return self.svm_compile(sources)
} }
let mut solc = self.solc.clone(); let solc = self.configure_solc(self.solc.clone());
if !self.allowed_lib_paths.0.is_empty() {
solc = solc.arg("--allow-paths").arg(self.allowed_lib_paths.to_string());
}
let sources = Graph::resolve_sources(&self.paths, sources)?.into_sources();
self.compile_with_version(&solc, sources) self.compile_with_version(&solc, sources)
} }
#[cfg(all(feature = "svm", feature = "async"))] /// Compiles a set of contracts using `svm` managed solc installs
#[tracing::instrument(skip(self, sources))]
pub fn svm_compile(&self, sources: Sources) -> Result<ProjectCompileOutput<Artifacts>> {
let graph = Graph::resolve_sources(&self.paths, sources)?;
let sources_by_version =
graph.into_sources_by_version(!self.auto_detect)?.get(&self.allowed_lib_paths)?;
// run the compilation step for each version
let compiled = if self.solc_jobs > 1 && sources_by_version.len() > 1 {
self.compile_many(sources_by_version)?
} else {
self.compile_sources(sources_by_version)?
};
tracing::trace!("compiled all sources");
Ok(compiled)
}
/// Compiles all sources with their intended `Solc` version sequentially.
#[cfg(all(feature = "svm", feature = "async"))]
fn compile_sources(
&self,
sources_by_version: BTreeMap<Solc, BTreeMap<PathBuf, Source>>,
) -> Result<ProjectCompileOutput<Artifacts>> {
tracing::trace!("compiling sources using a single solc job");
let mut compiled =
ProjectCompileOutput::with_ignored_errors(self.ignored_error_codes.clone());
for (solc, sources) in sources_by_version {
tracing::trace!(
"compiling {} sources with solc \"{}\"",
sources.len(),
solc.as_ref().display()
);
compiled.extend(self.compile_with_version(&solc, sources)?);
}
Ok(compiled)
}
/// Compiles all sources with their intended `Solc` version in parallel.
/// ///
/// This runs `Self::solc_jobs` parallel `solc` jobs at most. /// This will autodetect the appropriate `Solc` version(s) to use when compiling the provided
/// `Sources`. Solc auto-detection follows semver rules, see also
/// [`crate::resolver::Graph::get_input_node_versions()`]
///
/// # Errors
///
/// This returns an error if contracts in the `Sources` set are incompatible (violate semver
/// rules) with their imports, for example source contract `A(=0.8.11)` imports dependency
/// `C(<0.8.0)`, which are incompatible.
///
/// # Example
///
/// ```
/// use ethers_solc::{artifacts::Source, Project, utils};
/// # fn demo(project: Project) {
/// let project = Project::builder().build().unwrap();
/// let files = utils::source_files("./src");
/// let sources = Source::read_all(files).unwrap();
/// let output = project.svm_compile(sources).unwrap();
/// # }
/// ```
#[cfg(all(feature = "svm", feature = "async"))] #[cfg(all(feature = "svm", feature = "async"))]
fn compile_many( pub fn svm_compile(&self, sources: Sources) -> Result<ProjectCompileOutput<Artifacts>> {
&self, project::ProjectCompiler::with_sources(self, sources)?.compile()
sources_by_version: BTreeMap<Solc, BTreeMap<PathBuf, Source>>,
) -> Result<ProjectCompileOutput<Artifacts>> {
tracing::trace!("compile sources in parallel using {} solc jobs", self.solc_jobs);
let mut compiled =
ProjectCompileOutput::with_ignored_errors(self.ignored_error_codes.clone());
let mut paths = PathMap::default();
let mut jobs = Vec::with_capacity(sources_by_version.len());
let mut all_sources = BTreeMap::default();
let mut all_artifacts = Vec::with_capacity(sources_by_version.len());
// preprocess all sources
for (solc, sources) in sources_by_version {
match self.preprocess_sources(sources)? {
PreprocessedJob::Unchanged(artifacts) => {
compiled.extend(ProjectCompileOutput::from_unchanged(artifacts));
}
PreprocessedJob::Items(sources, map, cached_artifacts) => {
tracing::trace!("cached artifacts: \"{:?}\"", cached_artifacts.keys());
tracing::trace!("compile sources: \"{:?}\"", sources.keys());
compiled.extend_artifacts(cached_artifacts);
// replace absolute path with source name to make solc happy
let sources = map.set_source_names(sources);
paths.extend(map);
let input = CompilerInput::with_sources(sources)
.settings(self.solc_config.settings.clone())
.normalize_evm_version(&solc.version()?)
.with_remappings(self.paths.remappings.clone());
jobs.push((solc, input))
}
};
}
tracing::trace!("execute {} compile jobs in parallel", jobs.len());
let outputs = tokio::runtime::Runtime::new()
.unwrap()
.block_on(Solc::compile_many(jobs, self.solc_jobs));
for (res, _, input) in outputs.into_outputs() {
let output = res?;
if !output.has_error() {
if self.cached {
// get all contract names of the files and map them to the disk file
all_sources.extend(paths.set_disk_paths(input.sources));
all_artifacts.extend(paths.get_artifacts(&output.contracts));
}
if !self.no_artifacts {
Artifacts::on_output(&output, &self.paths)?;
}
}
compiled.extend_output(output);
}
// write the cache file
if self.cached {
self.write_cache_file(all_sources, all_artifacts)?;
}
Ok(compiled)
} }
/// Compiles the given source files with the exact `Solc` executable /// Compiles the given source files with the exact `Solc` executable
@ -384,121 +257,33 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
solc: &Solc, solc: &Solc,
sources: Sources, sources: Sources,
) -> Result<ProjectCompileOutput<Artifacts>> { ) -> Result<ProjectCompileOutput<Artifacts>> {
let (sources, paths, cached_artifacts) = match self.preprocess_sources(sources)? { project::ProjectCompiler::with_sources_and_solc(
PreprocessedJob::Unchanged(artifacts) => { self,
return Ok(ProjectCompileOutput::from_unchanged(artifacts)) sources,
} self.configure_solc(solc.clone()),
PreprocessedJob::Items(a, b, c) => (a, b, c), )?
}; .compile()
let version = solc.version()?;
tracing::trace!(
"compiling {} files with {}. Using {} cached files",
sources.len(),
version,
cached_artifacts.len()
);
tracing::trace!("cached artifacts: \"{:?}\"", cached_artifacts.keys());
tracing::trace!("compile sources: \"{:?}\"", sources.keys());
// replace absolute path with source name to make solc happy
let sources = paths.set_source_names(sources);
let input = CompilerInput::with_sources(sources)
.settings(self.solc_config.settings.clone())
.normalize_evm_version(&version)
.with_remappings(self.paths.remappings.clone());
tracing::trace!("calling solc with {} sources", input.sources.len());
let output = solc.compile(&input)?;
tracing::trace!("compiled input, output has error: {}", output.has_error());
if output.has_error() {
return Ok(ProjectCompileOutput::from_compiler_output(
output,
self.ignored_error_codes.clone(),
))
}
if self.cached {
// get all contract names of the files and map them to the disk file
let artifacts = paths.get_artifacts(&output.contracts);
// reapply to disk paths
let sources = paths.set_disk_paths(input.sources);
// create cache file
self.write_cache_file(sources, artifacts)?;
}
// TODO: There seems to be some type redundancy here, c.f. discussion with @mattsse
if !self.no_artifacts {
Artifacts::on_output(&output, &self.paths)?;
}
Ok(ProjectCompileOutput::from_compiler_output_and_cache(
output,
cached_artifacts,
self.ignored_error_codes.clone(),
))
}
/// Preprocesses the given source files by resolving their libs and check against cache if
/// configured
fn preprocess_sources(&self, mut sources: Sources) -> Result<PreprocessedJob<Artifacts>> {
tracing::trace!("start preprocessing {} sources files", sources.len());
// keeps track of source names / disk paths
let mut paths = PathMap::default();
tracing::trace!("start resolving libraries");
for (import, (source, path)) in self.resolved_libraries(&sources)? {
// inserting with absolute path here and keep track of the source name <-> path mappings
sources.insert(path.clone(), source);
paths.path_to_source_name.insert(path.clone(), import.clone());
paths.source_name_to_path.insert(import, path);
}
tracing::trace!("resolved all libraries");
// If there's a cache set, filter to only re-compile the files which were changed
let (sources, cached_artifacts) = if self.cached && self.paths.cache.exists() {
tracing::trace!("start reading solfiles cache for incremental compilation");
let mut cache = SolFilesCache::read(&self.paths.cache)?;
cache.remove_missing_files();
let changed_files = cache.get_changed_or_missing_artifacts_files::<Artifacts>(
sources,
Some(&self.solc_config),
&self.paths,
);
tracing::trace!("detected {} changed files", changed_files.len());
cache.remove_changed_files(&changed_files);
let cached_artifacts = if self.paths.artifacts.exists() {
tracing::trace!("reading artifacts from cache..");
let artifacts = cache.read_artifacts::<Artifacts>(&self.paths.artifacts)?;
tracing::trace!("read {} artifacts from cache", artifacts.len());
artifacts
} else {
BTreeMap::default()
};
// if nothing changed and all artifacts still exist
if changed_files.is_empty() {
tracing::trace!(
"unchanged source files, reusing artifacts {:?}",
cached_artifacts.keys()
);
return Ok(PreprocessedJob::Unchanged(cached_artifacts))
}
// There are changed files and maybe some cached files
(changed_files, cached_artifacts)
} else {
(sources, BTreeMap::default())
};
Ok(PreprocessedJob::Items(sources, paths, cached_artifacts))
} }
/// Removes the project's artifacts and cache file /// Removes the project's artifacts and cache file
/// ///
/// If the cache file was the only file in the folder, this also removes the empty folder. /// If the cache file was the only file in the folder, this also removes the empty folder.
///
/// # Example
///
/// ```
/// use ethers_solc::Project;
/// # fn demo(project: Project) {
/// let project = Project::builder().build().unwrap();
/// let _ = project.compile().unwrap();
/// assert!(project.artifacts_path().exists());
/// assert!(project.cache_path().exists());
///
/// project.cleanup();
/// assert!(!project.artifacts_path().exists());
/// assert!(!project.cache_path().exists());
/// # }
/// ```
pub fn cleanup(&self) -> std::result::Result<(), SolcIoError> { pub fn cleanup(&self) -> std::result::Result<(), SolcIoError> {
tracing::trace!("clean up project"); tracing::trace!("clean up project");
if self.cache_path().exists() { if self.cache_path().exists() {
@ -526,24 +311,19 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
Ok(()) Ok(())
} }
/// Flattens the target file into a single string suitable for verification /// Flattens the target solidity file into a single string suitable for verification.
/// ///
/// This method uses a dependency graph to resolve imported files and substitute /// This method uses a dependency graph to resolve imported files and substitute
/// import directives with the contents of target files. It will strip the pragma /// import directives with the contents of target files. It will strip the pragma
/// version directives and SDPX license identifiers from imported files. /// version directives and SDPX license identifiers from all imported files.
/// ///
/// NOTE: the SDPX license identifier will be removed from the imported file /// NB: the SDPX license identifier will be removed from the imported file
/// only if it is found at the beginning of the file. /// only if it is found at the beginning of the file.
pub fn flatten(&self, target: &Path) -> Result<String> { pub fn flatten(&self, target: &Path) -> Result<String> {
self.paths.flatten(target) self.paths.flatten(target)
} }
} }
enum PreprocessedJob<T: ArtifactOutput> {
Unchanged(BTreeMap<PathBuf, T::Artifact>),
Items(Sources, PathMap, BTreeMap<PathBuf, T::Artifact>),
}
pub struct ProjectBuilder<Artifacts: ArtifactOutput = MinimalCombinedArtifacts> { pub struct ProjectBuilder<Artifacts: ArtifactOutput = MinimalCombinedArtifacts> {
/// The layout of the /// The layout of the
paths: Option<ProjectPathsConfig>, paths: Option<ProjectPathsConfig>,
@ -557,6 +337,8 @@ pub struct ProjectBuilder<Artifacts: ArtifactOutput = MinimalCombinedArtifacts>
no_artifacts: bool, no_artifacts: bool,
/// Whether automatic solc version detection is enabled /// Whether automatic solc version detection is enabled
auto_detect: bool, auto_detect: bool,
/// Use offline mode
offline: bool,
artifacts: PhantomData<Artifacts>, artifacts: PhantomData<Artifacts>,
/// Which error codes to ignore /// Which error codes to ignore
pub ignored_error_codes: Vec<u64>, pub ignored_error_codes: Vec<u64>,
@ -611,6 +393,21 @@ impl<Artifacts: ArtifactOutput> ProjectBuilder<Artifacts> {
self self
} }
/// Activates offline mode
///
/// Prevents network possible access to download/check solc installs
#[must_use]
pub fn offline(self) -> Self {
self.set_offline(true)
}
/// Sets the offline status
#[must_use]
pub fn set_offline(mut self, offline: bool) -> Self {
self.offline = offline;
self
}
/// Disables writing artifacts to disk /// Disables writing artifacts to disk
#[must_use] #[must_use]
pub fn no_artifacts(self) -> Self { pub fn no_artifacts(self) -> Self {
@ -667,6 +464,7 @@ impl<Artifacts: ArtifactOutput> ProjectBuilder<Artifacts> {
ignored_error_codes, ignored_error_codes,
allowed_paths, allowed_paths,
solc_jobs, solc_jobs,
offline,
.. ..
} = self; } = self;
ProjectBuilder { ProjectBuilder {
@ -676,6 +474,7 @@ impl<Artifacts: ArtifactOutput> ProjectBuilder<Artifacts> {
cached, cached,
no_artifacts, no_artifacts,
auto_detect, auto_detect,
offline,
artifacts: PhantomData::default(), artifacts: PhantomData::default(),
ignored_error_codes, ignored_error_codes,
allowed_paths, allowed_paths,
@ -715,13 +514,14 @@ impl<Artifacts: ArtifactOutput> ProjectBuilder<Artifacts> {
ignored_error_codes, ignored_error_codes,
mut allowed_paths, mut allowed_paths,
solc_jobs, solc_jobs,
offline,
} = self; } = self;
let paths = paths.map(Ok).unwrap_or_else(ProjectPathsConfig::current_hardhat)?;
let solc = solc.unwrap_or_default(); let solc = solc.unwrap_or_default();
let solc_config = solc_config.unwrap_or_else(|| SolcConfig::builder().build()); let solc_config = solc_config.unwrap_or_else(|| SolcConfig::builder().build());
let paths = paths.map(Ok).unwrap_or_else(ProjectPathsConfig::current_hardhat)?;
if allowed_paths.is_empty() { if allowed_paths.is_empty() {
// allow every contract under root by default // allow every contract under root by default
allowed_paths.push(paths.root.clone()) allowed_paths.push(paths.root.clone())
@ -736,8 +536,9 @@ impl<Artifacts: ArtifactOutput> ProjectBuilder<Artifacts> {
auto_detect, auto_detect,
artifacts, artifacts,
ignored_error_codes, ignored_error_codes,
allowed_lib_paths: allowed_paths.try_into()?, allowed_lib_paths: allowed_paths.into(),
solc_jobs: solc_jobs.unwrap_or_else(::num_cpus::get), solc_jobs: solc_jobs.unwrap_or_else(::num_cpus::get),
offline,
}) })
} }
} }
@ -751,6 +552,7 @@ impl<Artifacts: ArtifactOutput> Default for ProjectBuilder<Artifacts> {
cached: true, cached: true,
no_artifacts: false, no_artifacts: false,
auto_detect: true, auto_detect: true,
offline: false,
artifacts: PhantomData::default(), artifacts: PhantomData::default(),
ignored_error_codes: Vec::new(), ignored_error_codes: Vec::new(),
allowed_paths: vec![], allowed_paths: vec![],
@ -759,205 +561,18 @@ impl<Artifacts: ArtifactOutput> Default for ProjectBuilder<Artifacts> {
} }
} }
/// The outcome of `Project::compile` impl<Artifacts: ArtifactOutput> ArtifactOutput for Project<Artifacts> {
#[derive(Debug, Clone, PartialEq, Default)] type Artifact = Artifacts::Artifact;
pub struct ProjectCompileOutput<T: ArtifactOutput> {
/// If solc was invoked multiple times in `Project::compile` then this contains a merged
/// version of all `CompilerOutput`s. If solc was called only once then `compiler_output`
/// holds the `CompilerOutput` of that call.
compiler_output: Option<CompilerOutput>,
/// All artifacts that were read from cache
artifacts: BTreeMap<PathBuf, T::Artifact>,
ignored_error_codes: Vec<u64>,
}
impl<T: ArtifactOutput> ProjectCompileOutput<T> { fn contract_to_artifact(file: &str, name: &str, contract: Contract) -> Self::Artifact {
pub fn with_ignored_errors(ignored_errors: Vec<u64>) -> Self { Artifacts::contract_to_artifact(file, name, contract)
Self {
compiler_output: None,
artifacts: Default::default(),
ignored_error_codes: ignored_errors,
}
}
pub fn from_unchanged(artifacts: BTreeMap<PathBuf, T::Artifact>) -> Self {
Self { compiler_output: None, artifacts, ignored_error_codes: vec![] }
}
pub fn from_compiler_output(
compiler_output: CompilerOutput,
ignored_error_codes: Vec<u64>,
) -> Self {
Self {
compiler_output: Some(compiler_output),
artifacts: Default::default(),
ignored_error_codes,
}
}
pub fn from_compiler_output_and_cache(
compiler_output: CompilerOutput,
cache: BTreeMap<PathBuf, T::Artifact>,
ignored_error_codes: Vec<u64>,
) -> Self {
Self { compiler_output: Some(compiler_output), artifacts: cache, ignored_error_codes }
}
/// Get the (merged) solc compiler output
/// ```no_run
/// use std::collections::BTreeMap;
/// use ethers_solc::artifacts::Contract;
/// use ethers_solc::Project;
///
/// let project = Project::builder().build().unwrap();
/// let contracts: BTreeMap<String, Contract> =
/// project.compile().unwrap().output().contracts_into_iter().collect();
/// ```
pub fn output(self) -> CompilerOutput {
self.compiler_output.unwrap_or_default()
}
/// Combine two outputs
pub fn extend(&mut self, compiled: ProjectCompileOutput<T>) {
let ProjectCompileOutput { compiler_output, artifacts, .. } = compiled;
self.artifacts.extend(artifacts);
if let Some(output) = compiler_output {
self.extend_output(output);
}
}
pub fn extend_output(&mut self, compiled: CompilerOutput) {
if let Some(output) = self.compiler_output.as_mut() {
output.errors.extend(compiled.errors);
output.sources.extend(compiled.sources);
output.contracts.extend(compiled.contracts);
} else {
self.compiler_output = Some(compiled);
}
}
pub fn extend_artifacts(&mut self, artifacts: BTreeMap<PathBuf, T::Artifact>) {
self.artifacts.extend(artifacts);
}
/// Whether this type does not contain compiled contracts
pub fn is_unchanged(&self) -> bool {
!self.has_compiled_contracts()
}
/// Whether this type has a compiler output
pub fn has_compiled_contracts(&self) -> bool {
if let Some(output) = self.compiler_output.as_ref() {
!output.contracts.is_empty()
} else {
false
}
}
/// Whether there were errors
pub fn has_compiler_errors(&self) -> bool {
self.compiler_output.as_ref().map(|o| o.has_error()).unwrap_or_default()
}
/// Whether there were warnings
pub fn has_compiler_warnings(&self) -> bool {
self.compiler_output
.as_ref()
.map(|o| o.has_warning(&self.ignored_error_codes))
.unwrap_or_default()
}
/// Finds the first contract with the given name and removes it from the set
pub fn remove(&mut self, contract_name: impl AsRef<str>) -> Option<T::Artifact> {
let contract_name = contract_name.as_ref();
if let Some(output) = self.compiler_output.as_mut() {
if let contract @ Some(_) = output.contracts.iter_mut().find_map(|(file, c)| {
c.remove(contract_name).map(|c| T::contract_to_artifact(file, contract_name, c))
}) {
return contract
}
}
let key = self
.artifacts
.iter()
.find_map(|(path, _)| {
T::contract_name(path).filter(|name| name == contract_name).map(|_| path)
})?
.clone();
self.artifacts.remove(&key)
}
}
impl<T: ArtifactOutput> ProjectCompileOutput<T>
where
T::Artifact: Clone,
{
/// Finds the first contract with the given name
pub fn find(&self, contract_name: impl AsRef<str>) -> Option<Cow<T::Artifact>> {
let contract_name = contract_name.as_ref();
if let Some(output) = self.compiler_output.as_ref() {
if let contract @ Some(_) = output.contracts.iter().find_map(|(file, contracts)| {
contracts
.get(contract_name)
.map(|c| T::contract_to_artifact(file, contract_name, c.clone()))
.map(Cow::Owned)
}) {
return contract
}
}
self.artifacts.iter().find_map(|(path, art)| {
T::contract_name(path).filter(|name| name == contract_name).map(|_| Cow::Borrowed(art))
})
}
}
impl<T: ArtifactOutput + 'static> ProjectCompileOutput<T> {
/// All artifacts together with their contract file name and name `<file name>:<name>`
///
/// # Example
///
/// ```no_run
/// use std::collections::BTreeMap;
/// use ethers_solc::artifacts::CompactContractBytecode;
/// use ethers_solc::Project;
///
/// let project = Project::builder().build().unwrap();
/// let contracts: BTreeMap<String, CompactContractBytecode> = project.compile().unwrap().into_artifacts().collect();
/// ```
pub fn into_artifacts(mut self) -> Box<dyn Iterator<Item = (String, T::Artifact)>> {
let artifacts = self.artifacts.into_iter().filter_map(|(path, art)| {
T::contract_name(&path).map(|name| {
(format!("{}:{}", path.file_name().unwrap().to_string_lossy(), name), art)
})
});
let artifacts: Box<dyn Iterator<Item = (String, T::Artifact)>> = if let Some(output) =
self.compiler_output.take()
{
Box::new(artifacts.chain(T::output_to_artifacts(output).into_values().flatten().map(
|(name, artifact)| {
(format!("{}:{}", T::output_file_name(&name).display(), name), artifact)
},
)))
} else {
Box::new(artifacts)
};
artifacts
}
}
impl<T: ArtifactOutput> fmt::Display for ProjectCompileOutput<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if let Some(output) = self.compiler_output.as_ref() {
output.diagnostics(&self.ignored_error_codes).fmt(f)
} else {
f.write_str("Nothing to compile")
}
} }
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::remappings::Remapping;
#[test] #[test]
#[cfg(all(feature = "svm", feature = "async"))] #[cfg(all(feature = "svm", feature = "async"))]
fn test_build_all_versions() { fn test_build_all_versions() {
@ -973,7 +588,7 @@ mod tests {
assert!(!compiled.has_compiler_errors()); assert!(!compiled.has_compiler_errors());
let contracts = compiled.output().contracts; let contracts = compiled.output().contracts;
// Contracts A to F // Contracts A to F
assert_eq!(contracts.keys().count(), 5); assert_eq!(contracts.contracts().count(), 5);
} }
#[test] #[test]
@ -988,6 +603,11 @@ mod tests {
.sources(root.join("src")) .sources(root.join("src"))
.lib(root.join("lib1")) .lib(root.join("lib1"))
.lib(root.join("lib2")) .lib(root.join("lib2"))
.remappings(
Remapping::find_many(&root.join("lib1"))
.into_iter()
.chain(Remapping::find_many(&root.join("lib2"))),
)
.build() .build()
.unwrap(); .unwrap();
let project = Project::builder() let project = Project::builder()
@ -1000,7 +620,7 @@ mod tests {
let compiled = project.compile().unwrap(); let compiled = project.compile().unwrap();
assert!(!compiled.has_compiler_errors()); assert!(!compiled.has_compiler_errors());
let contracts = compiled.output().contracts; let contracts = compiled.output().contracts;
assert_eq!(contracts.keys().count(), 3); assert_eq!(contracts.contracts().count(), 3);
} }
#[test] #[test]
@ -1013,12 +633,13 @@ mod tests {
.root(&root) .root(&root)
.sources(root.join("src")) .sources(root.join("src"))
.lib(root.join("lib")) .lib(root.join("lib"))
.remappings(Remapping::find_many(&root.join("lib")))
.build() .build()
.unwrap(); .unwrap();
let project = Project::builder().no_artifacts().paths(paths).ephemeral().build().unwrap(); let project = Project::builder().no_artifacts().paths(paths).ephemeral().build().unwrap();
let compiled = project.compile().unwrap(); let compiled = project.compile().unwrap();
assert!(!compiled.has_compiler_errors()); assert!(!compiled.has_compiler_errors());
let contracts = compiled.output().contracts; let contracts = compiled.output().contracts;
assert_eq!(contracts.keys().count(), 2); assert_eq!(contracts.contracts().count(), 2);
} }
} }

View File

@ -77,6 +77,21 @@ impl<T: ArtifactOutput> TempProject<T> {
&mut self.project_mut().paths &mut self.project_mut().paths
} }
/// Returns the path to the artifacts directory
pub fn artifacts_path(&self) -> &PathBuf {
&self.paths().artifacts
}
/// Returns the path to the sources directory
pub fn sources_path(&self) -> &PathBuf {
&self.paths().sources
}
/// Returns the path to the cache file
pub fn cache_path(&self) -> &PathBuf {
&self.paths().cache
}
/// The root path of the temporary workspace /// The root path of the temporary workspace
pub fn root(&self) -> &Path { pub fn root(&self) -> &Path {
self.project().paths.root.as_path() self.project().paths.root.as_path()

View File

@ -38,20 +38,71 @@ use solang_parser::pt::{Import, Loc, SourceUnitPart};
use crate::{error::Result, utils, ProjectPathsConfig, Solc, Source, Sources}; use crate::{error::Result, utils, ProjectPathsConfig, Solc, Source, Sources};
/// The underlying edges of the graph which only contains the raw relationship data.
///
/// This is kept separate from the `Graph` as the `Node`s get consumed when the `Solc` to `Sources`
/// set is determined.
#[derive(Debug, Clone)]
pub struct GraphEdges {
/// The indices of `edges` correspond to the `nodes`. That is, `edges[0]`
/// is the set of outgoing edges for `nodes[0]`.
edges: Vec<Vec<usize>>,
/// index maps for a solidity file to an index, for fast lookup.
indices: HashMap<PathBuf, usize>,
/// reverse of `indices` for reverse lookup
rev_indices: HashMap<usize, PathBuf>,
/// the identified version requirement of a file
versions: HashMap<usize, Option<VersionReq>>,
/// with how many input files we started with, corresponds to `let input_files =
/// nodes[..num_input_files]`.
///
/// Combined with the `indices` this way we can determine if a file was original added to the
/// graph as input or was added as resolved import, see [`Self::is_input_file()`]
num_input_files: usize,
}
impl GraphEdges {
/// Returns a list of nodes the given node index points to for the given kind.
pub fn imported_nodes(&self, from: usize) -> &[usize] {
&self.edges[from]
}
/// Returns all files imported by the given file
pub fn imports(&self, file: impl AsRef<Path>) -> HashSet<&PathBuf> {
if let Some(start) = self.indices.get(file.as_ref()).copied() {
NodesIter::new(start, self).skip(1).map(move |idx| &self.rev_indices[&idx]).collect()
} else {
HashSet::new()
}
}
/// Returns true if the `file` was originally included when the graph was first created and not
/// added when all `imports` were resolved
pub fn is_input_file(&self, file: impl AsRef<Path>) -> bool {
if let Some(idx) = self.indices.get(file.as_ref()).copied() {
idx < self.num_input_files
} else {
false
}
}
/// Returns the `VersionReq` for the given file
pub fn version_requirement(&self, file: impl AsRef<Path>) -> Option<&VersionReq> {
self.indices
.get(file.as_ref())
.and_then(|idx| self.versions.get(idx))
.and_then(|v| v.as_ref())
}
}
/// Represents a fully-resolved solidity dependency graph. Each node in the graph /// Represents a fully-resolved solidity dependency graph. Each node in the graph
/// is a file and edges represent dependencies between them. /// is a file and edges represent dependencies between them.
/// See also https://docs.soliditylang.org/en/latest/layout-of-source-files.html?highlight=import#importing-other-source-files /// See also https://docs.soliditylang.org/en/latest/layout-of-source-files.html?highlight=import#importing-other-source-files
#[derive(Debug)] #[derive(Debug)]
pub struct Graph { pub struct Graph {
nodes: Vec<Node>, nodes: Vec<Node>,
/// The indices of `edges` correspond to the `nodes`. That is, `edges[0]` /// relationship of the nodes
/// is the set of outgoing edges for `nodes[0]`. edges: GraphEdges,
edges: Vec<Vec<usize>>,
/// index maps for a solidity file to an index, for fast lookup.
indices: HashMap<PathBuf, usize>,
/// with how many input files we started with, corresponds to `let input_files =
/// nodes[..num_input_files]`.
num_input_files: usize,
/// the root of the project this graph represents /// the root of the project this graph represents
#[allow(unused)] #[allow(unused)]
root: PathBuf, root: PathBuf,
@ -60,15 +111,19 @@ pub struct Graph {
impl Graph { impl Graph {
/// Returns a list of nodes the given node index points to for the given kind. /// Returns a list of nodes the given node index points to for the given kind.
pub fn imported_nodes(&self, from: usize) -> &[usize] { pub fn imported_nodes(&self, from: usize) -> &[usize] {
&self.edges[from] self.edges.imported_nodes(from)
} }
/// Returns all the resolved files and their index in the graph /// Returns all the resolved files and their index in the graph
pub fn files(&self) -> &HashMap<PathBuf, usize> { pub fn files(&self) -> &HashMap<PathBuf, usize> {
&self.indices &self.edges.indices
} }
/// Gets a node by index. /// Gets a node by index.
///
/// # Panics
///
/// if the `index` node id is not included in the graph
pub fn node(&self, index: usize) -> &Node { pub fn node(&self, index: usize) -> &Node {
&self.nodes[index] &self.nodes[index]
} }
@ -80,7 +135,7 @@ impl Graph {
/// ///
/// if the `start` node id is not included in the graph /// if the `start` node id is not included in the graph
pub fn node_ids(&self, start: usize) -> impl Iterator<Item = usize> + '_ { pub fn node_ids(&self, start: usize) -> impl Iterator<Item = usize> + '_ {
NodesIter::new(start, self) NodesIter::new(start, &self.edges)
} }
/// Same as `Self::node_ids` but returns the actual `Node` /// Same as `Self::node_ids` but returns the actual `Node`
@ -88,16 +143,22 @@ impl Graph {
self.node_ids(start).map(move |idx| self.node(idx)) self.node_ids(start).map(move |idx| self.node(idx))
} }
/// Returns all files together with their paths /// Consumes the `Graph`, effectively splitting the `nodes` and the `GraphEdges` off and
pub fn into_sources(self) -> Sources { /// returning the `nodes` converted to `Sources`
self.nodes.into_iter().map(|node| (node.path, node.source)).collect() pub fn into_sources(self) -> (Sources, GraphEdges) {
let Graph { nodes, edges, .. } = self;
(nodes.into_iter().map(|node| (node.path, node.source)).collect(), edges)
} }
/// Returns an iterator that yields only those nodes that represent input files. /// Returns an iterator that yields only those nodes that represent input files.
/// See `Self::resolve_sources` /// See `Self::resolve_sources`
/// This won't yield any resolved library nodes /// This won't yield any resolved library nodes
pub fn input_nodes(&self) -> impl Iterator<Item = &Node> { pub fn input_nodes(&self) -> impl Iterator<Item = &Node> {
self.nodes.iter().take(self.num_input_files) self.nodes.iter().take(self.edges.num_input_files)
}
pub fn imports(&self, path: impl AsRef<Path>) -> HashSet<&PathBuf> {
self.edges.imports(path)
} }
/// Resolves a number of sources within the given config /// Resolves a number of sources within the given config
@ -164,8 +225,18 @@ impl Graph {
nodes.push(node); nodes.push(node);
edges.push(resolved_imports); edges.push(resolved_imports);
} }
let edges = GraphEdges {
Ok(Graph { nodes, edges, indices: index, num_input_files, root: paths.root.clone() }) edges,
rev_indices: index.iter().map(|(k, v)| (*v, k.clone())).collect(),
indices: index,
num_input_files,
versions: nodes
.iter()
.enumerate()
.map(|(idx, node)| (idx, node.data.version_req.clone()))
.collect(),
};
Ok(Graph { nodes, edges, root: paths.root.clone() })
} }
/// Resolves the dependencies of a project's source contracts /// Resolves the dependencies of a project's source contracts
@ -176,11 +247,12 @@ impl Graph {
#[cfg(all(feature = "svm", feature = "async"))] #[cfg(all(feature = "svm", feature = "async"))]
impl Graph { impl Graph {
/// Returns all input files together with their appropriate version. /// Consumes the nodes of the graph and returns all input files together with their appropriate
/// version and the edges of the graph
/// ///
/// First we determine the compatible version for each input file (from sources and test folder, /// First we determine the compatible version for each input file (from sources and test folder,
/// see `Self::resolve`) and then we add all resolved library imports. /// see `Self::resolve`) and then we add all resolved library imports.
pub fn into_sources_by_version(self, offline: bool) -> Result<VersionedSources> { pub fn into_sources_by_version(self, offline: bool) -> Result<(VersionedSources, GraphEdges)> {
/// insert the imports of the given node into the sources map /// insert the imports of the given node into the sources map
/// There can be following graph: /// There can be following graph:
/// `A(<=0.8.10) imports C(>0.4.0)` and `B(0.8.11) imports C(>0.4.0)` /// `A(<=0.8.10) imports C(>0.4.0)` and `B(0.8.11) imports C(>0.4.0)`
@ -209,7 +281,7 @@ impl Graph {
} }
let versioned_nodes = self.get_input_node_versions(offline)?; let versioned_nodes = self.get_input_node_versions(offline)?;
let Self { nodes, edges, num_input_files, .. } = self; let Self { nodes, edges, .. } = self;
let mut versioned_sources = HashMap::with_capacity(versioned_nodes.len()); let mut versioned_sources = HashMap::with_capacity(versioned_nodes.len());
let mut all_nodes = nodes.into_iter().enumerate().collect::<HashMap<_, _>>(); let mut all_nodes = nodes.into_iter().enumerate().collect::<HashMap<_, _>>();
@ -221,11 +293,17 @@ impl Graph {
// insert the input node in the sources set and remove it from the available set // insert the input node in the sources set and remove it from the available set
let node = all_nodes.remove(&idx).expect("node is preset. qed"); let node = all_nodes.remove(&idx).expect("node is preset. qed");
sources.insert(node.path, node.source); sources.insert(node.path, node.source);
insert_imports(idx, &mut all_nodes, &mut sources, &edges, num_input_files); insert_imports(
idx,
&mut all_nodes,
&mut sources,
&edges.edges,
edges.num_input_files,
);
} }
versioned_sources.insert(version, sources); versioned_sources.insert(version, sources);
} }
Ok(VersionedSources { inner: versioned_sources, offline }) Ok((VersionedSources { inner: versioned_sources, offline }, edges))
} }
/// Writes the list of imported files into the given formatter: /// Writes the list of imported files into the given formatter:
@ -294,7 +372,8 @@ impl Graph {
// on first error, instead gather all the errors and return a bundled error message instead // on first error, instead gather all the errors and return a bundled error message instead
let mut errors = Vec::new(); let mut errors = Vec::new();
// we also don't want duplicate error diagnostic // we also don't want duplicate error diagnostic
let mut erroneous_nodes = std::collections::HashSet::with_capacity(self.num_input_files); let mut erroneous_nodes =
std::collections::HashSet::with_capacity(self.edges.num_input_files);
let all_versions = if offline { Solc::installed_versions() } else { Solc::all_versions() }; let all_versions = if offline { Solc::installed_versions() } else { Solc::all_versions() };
@ -302,7 +381,7 @@ impl Graph {
let mut versioned_nodes = HashMap::new(); let mut versioned_nodes = HashMap::new();
// walking through the node's dep tree and filtering the versions along the way // walking through the node's dep tree and filtering the versions along the way
for idx in 0..self.num_input_files { for idx in 0..self.edges.num_input_files {
let mut candidates = all_versions.iter().collect::<Vec<_>>(); let mut candidates = all_versions.iter().collect::<Vec<_>>();
self.retain_compatible_versions(idx, &mut candidates); self.retain_compatible_versions(idx, &mut candidates);
@ -346,12 +425,12 @@ pub struct NodesIter<'a> {
/// stack of nodes /// stack of nodes
stack: VecDeque<usize>, stack: VecDeque<usize>,
visited: HashSet<usize>, visited: HashSet<usize>,
graph: &'a Graph, graph: &'a GraphEdges,
} }
impl<'a> NodesIter<'a> { impl<'a> NodesIter<'a> {
fn new(start: usize, graph: &'a Graph) -> Self { fn new(start: usize, graph: &'a GraphEdges) -> Self {
Self { stack: VecDeque::from([start]), visited: Default::default(), graph } Self { stack: VecDeque::from([start]), visited: HashSet::new(), graph }
} }
} }
@ -382,7 +461,7 @@ impl VersionedSources {
pub fn get( pub fn get(
self, self,
allowed_lib_paths: &crate::AllowedLibPaths, allowed_lib_paths: &crate::AllowedLibPaths,
) -> Result<std::collections::BTreeMap<Solc, Sources>> { ) -> Result<std::collections::BTreeMap<Solc, (semver::Version, Sources)>> {
use crate::SolcError; use crate::SolcError;
// we take the installer lock here to ensure installation checking is done in sync // we take the installer lock here to ensure installation checking is done in sync
@ -411,8 +490,9 @@ impl VersionedSources {
Solc::blocking_install(version.as_ref())?; Solc::blocking_install(version.as_ref())?;
tracing::trace!("reinstalled solc: \"{}\"", version); tracing::trace!("reinstalled solc: \"{}\"", version);
} }
sources_by_version let solc = solc.arg("--allow-paths").arg(allowed_lib_paths.to_string());
.insert(solc.arg("--allow-paths").arg(allowed_lib_paths.to_string()), sources); let version = solc.version()?;
sources_by_version.insert(solc, (version, sources));
} }
Ok(sources_by_version) Ok(sources_by_version)
} }
@ -596,7 +676,7 @@ mod tests {
let graph = Graph::resolve(&paths).unwrap(); let graph = Graph::resolve(&paths).unwrap();
assert_eq!(graph.num_input_files, 1); assert_eq!(graph.edges.num_input_files, 1);
assert_eq!(graph.files().len(), 2); assert_eq!(graph.files().len(), 2);
assert_eq!( assert_eq!(
@ -615,7 +695,7 @@ mod tests {
let graph = Graph::resolve(&paths).unwrap(); let graph = Graph::resolve(&paths).unwrap();
assert_eq!(graph.num_input_files, 2); assert_eq!(graph.edges.num_input_files, 2);
assert_eq!(graph.files().len(), 3); assert_eq!(graph.files().len(), 3);
assert_eq!( assert_eq!(
graph.files().clone(), graph.files().clone(),

View File

@ -6,6 +6,7 @@ use crate::{error::SolcError, SolcIoError};
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::{Match, Regex}; use regex::{Match, Regex};
use semver::Version; use semver::Version;
use serde::de::DeserializeOwned;
use tiny_keccak::{Hasher, Keccak}; use tiny_keccak::{Hasher, Keccak};
use walkdir::WalkDir; use walkdir::WalkDir;
@ -82,6 +83,20 @@ pub fn canonicalize(path: impl AsRef<Path>) -> Result<PathBuf, SolcIoError> {
dunce::canonicalize(&path).map_err(|err| SolcIoError::new(err, path)) dunce::canonicalize(&path).map_err(|err| SolcIoError::new(err, path))
} }
/// Returns the same path config but with canonicalized paths.
///
/// This will take care of potential symbolic linked directories.
/// For example, the tempdir library is creating directories hosted under `/var/`, which in OS X
/// is a symbolic link to `/private/var/`. So if when we try to resolve imports and a path is
/// rooted in a symbolic directory we might end up with different paths for the same file, like
/// `private/var/.../Dapp.sol` and `/var/.../Dapp.sol`
///
/// This canonicalizes all the paths but does not treat non existing dirs as an error
pub fn canonicalized(path: impl Into<PathBuf>) -> PathBuf {
let path = path.into();
canonicalize(&path).unwrap_or(path)
}
/// Returns the path to the library if the source path is in fact determined to be a library path, /// Returns the path to the library if the source path is in fact determined to be a library path,
/// and it exists. /// and it exists.
/// Note: this does not handle relative imports or remappings. /// Note: this does not handle relative imports or remappings.
@ -252,6 +267,31 @@ pub(crate) fn tempdir(name: &str) -> Result<tempfile::TempDir, SolcIoError> {
tempfile::Builder::new().prefix(name).tempdir().map_err(|err| SolcIoError::new(err, name)) tempfile::Builder::new().prefix(name).tempdir().map_err(|err| SolcIoError::new(err, name))
} }
/// Reads the json file and deserialize it into the provided type
pub fn read_json_file<T: DeserializeOwned>(path: impl AsRef<Path>) -> Result<T, SolcError> {
let path = path.as_ref();
let file = std::fs::File::open(path).map_err(|err| SolcError::io(err, path))?;
let file = std::io::BufReader::new(file);
let val: T = serde_json::from_reader(file)?;
Ok(val)
}
/// Creates the parent directory of the `file` and all its ancestors if it does not exist
/// See [`std::fs::create_dir_all()`]
pub fn create_parent_dir_all(file: impl AsRef<Path>) -> Result<(), SolcError> {
let file = file.as_ref();
if let Some(parent) = file.parent() {
std::fs::create_dir_all(parent).map_err(|err| {
SolcError::msg(format!(
"Failed to create artifact parent folder \"{}\": {}",
parent.display(),
err
))
})?;
}
Ok(())
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View File

@ -1,12 +1,11 @@
{ {
"_format": "hh-sol-cache-2", "_format": "ethers-rs-sol-cache-2",
"files": { "files": {
"Greeter.sol": { "/Users/Matthias/git/rust/ethers-rs/ethers-solc/test-data/dapp-sample/lib/ds-test/src/test.sol": {
"lastModificationDate": 1634246369587, "lastModificationDate": 1638218667720,
"contentHash": "483b7f4f64b06a04a24bd0af7c3bf8b7", "contentHash": "5d45a46528eaf8a26f0a8d93669f3148",
"sourceName": "contracts/Greeter.sol", "sourceName": "/Users/Matthias/git/rust/ethers-rs/ethers-solc/test-data/dapp-sample/lib/ds-test/src/test.sol",
"solcConfig": { "solcConfig": {
"version": "0.8.4",
"settings": { "settings": {
"optimizer": { "optimizer": {
"enabled": false, "enabled": false,
@ -14,62 +13,98 @@
}, },
"outputSelection": { "outputSelection": {
"*": { "*": {
"": [
"ast"
],
"*": [ "*": [
"abi", "abi",
"evm.bytecode", "evm.bytecode",
"evm.deployedBytecode", "evm.deployedBytecode",
"evm.methodIdentifiers" "evm.methodIdentifiers"
],
"": [
"ast"
] ]
} }
}
}
},
"imports": [
"hardhat/console.sol"
],
"versionPragmas": [
"^0.8.0"
],
"artifacts": [
"Greeter"
]
},
"console.sol": {
"lastModificationDate": 1634245289287,
"contentHash": "cc4777addd464ea56fa35b1c45df0591",
"sourceName": "hardhat/console.sol",
"solcConfig": {
"version": "0.8.4",
"settings": {
"optimizer": {
"enabled": false,
"runs": 200
}, },
"outputSelection": { "evmVersion": "london"
"*": {
"*": [
"abi",
"evm.bytecode",
"evm.deployedBytecode",
"evm.methodIdentifiers"
],
"": [
"ast"
]
}
}
} }
}, },
"imports": [], "imports": [],
"versionPragmas": [ "versionRequirement": ">=0.4.23",
">=0.4.22 <0.9.0" "artifacts": {
"DSTest": {
"0.8.11+commit.d7f03943.Darwin.appleclang": "test.sol/DSTest.json"
}
}
},
"/Users/Matthias/git/rust/ethers-rs/ethers-solc/test-data/dapp-sample/src/Dapp.sol": {
"lastModificationDate": 1638193396942,
"contentHash": "a41ddb3b99ae6b72b59341eabf948542",
"sourceName": "/Users/Matthias/git/rust/ethers-rs/ethers-solc/test-data/dapp-sample/src/Dapp.sol",
"solcConfig": {
"settings": {
"optimizer": {
"enabled": false,
"runs": 200
},
"outputSelection": {
"*": {
"": [
"ast"
],
"*": [
"abi",
"evm.bytecode",
"evm.deployedBytecode",
"evm.methodIdentifiers"
]
}
},
"evmVersion": "london"
}
},
"imports": [],
"versionRequirement": ">=0.6.6",
"artifacts": {
"Dapp": {
"0.8.11+commit.d7f03943.Darwin.appleclang": "Dapp.sol/Dapp.json"
}
}
},
"/Users/Matthias/git/rust/ethers-rs/ethers-solc/test-data/dapp-sample/src/Dapp.t.sol": {
"lastModificationDate": 1638193396942,
"contentHash": "5f5038d89f69269d0734659efaa2ec52",
"sourceName": "/Users/Matthias/git/rust/ethers-rs/ethers-solc/test-data/dapp-sample/src/Dapp.t.sol",
"solcConfig": {
"settings": {
"optimizer": {
"enabled": false,
"runs": 200
},
"outputSelection": {
"*": {
"": [
"ast"
],
"*": [
"abi",
"evm.bytecode",
"evm.deployedBytecode",
"evm.methodIdentifiers"
]
}
},
"evmVersion": "london"
}
},
"imports": [
"/Users/Matthias/git/rust/ethers-rs/ethers-solc/test-data/dapp-sample/lib/ds-test/src/test.sol",
"/Users/Matthias/git/rust/ethers-rs/ethers-solc/test-data/dapp-sample/src/Dapp.sol"
], ],
"artifacts": [ "versionRequirement": ">=0.6.6",
"console" "artifacts": {
] "DappTest": {
"0.8.11+commit.d7f03943.Darwin.appleclang": "Dapp.t.sol/DappTest.json"
}
}
} }
} }
} }

View File

@ -1,6 +1,6 @@
pragma solidity 0.8.6; pragma solidity 0.8.6;
import "../lib1/Bar.sol"; import "bar/Bar.sol";
import "../lib2/Baz.sol"; import "baz/Baz.sol";
contract Foo is Bar, Baz {} contract Foo is Bar, Baz {}

View File

@ -1,7 +1,7 @@
//! project tests //! project tests
use std::{ use std::{
collections::HashMap, collections::{HashMap, HashSet},
io, io,
path::{Path, PathBuf}, path::{Path, PathBuf},
str::FromStr, str::FromStr,
@ -11,8 +11,16 @@ use ethers_solc::{
cache::{SolFilesCache, SOLIDITY_FILES_CACHE_FILENAME}, cache::{SolFilesCache, SOLIDITY_FILES_CACHE_FILENAME},
project_util::*, project_util::*,
remappings::Remapping, remappings::Remapping,
Graph, MinimalCombinedArtifacts, Project, ProjectPathsConfig, Graph, MinimalCombinedArtifacts, Project, ProjectCompileOutput, ProjectPathsConfig,
}; };
use pretty_assertions::assert_eq;
#[allow(unused)]
fn init_tracing() {
tracing_subscriber::fmt()
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.init();
}
#[test] #[test]
fn can_compile_hardhat_sample() { fn can_compile_hardhat_sample() {
@ -56,11 +64,16 @@ fn can_compile_dapp_sample() {
assert!(compiled.find("Dapp").is_some()); assert!(compiled.find("Dapp").is_some());
assert!(compiled.is_unchanged()); assert!(compiled.is_unchanged());
let cache = SolFilesCache::read(project.cache_path()).unwrap();
// delete artifacts // delete artifacts
std::fs::remove_dir_all(&project.paths().artifacts).unwrap(); std::fs::remove_dir_all(&project.paths().artifacts).unwrap();
let compiled = project.compile().unwrap(); let compiled = project.compile().unwrap();
assert!(compiled.find("Dapp").is_some()); assert!(compiled.find("Dapp").is_some());
assert!(!compiled.is_unchanged()); assert!(!compiled.is_unchanged());
let updated_cache = SolFilesCache::read(project.cache_path()).unwrap();
assert_eq!(cache, updated_cache);
} }
#[test] #[test]
@ -72,7 +85,6 @@ fn can_compile_dapp_detect_changes_in_libs() {
.paths_mut() .paths_mut()
.remappings .remappings
.push(Remapping::from_str(&format!("remapping={}/", remapping.display())).unwrap()); .push(Remapping::from_str(&format!("remapping={}/", remapping.display())).unwrap());
project.project_mut().auto_detect = false;
let src = project let src = project
.add_source( .add_source(
@ -139,6 +151,7 @@ fn can_compile_dapp_detect_changes_in_libs() {
#[test] #[test]
fn can_compile_dapp_detect_changes_in_sources() { fn can_compile_dapp_detect_changes_in_sources() {
init_tracing();
let project = TempProject::<MinimalCombinedArtifacts>::dapptools().unwrap(); let project = TempProject::<MinimalCombinedArtifacts>::dapptools().unwrap();
let src = project let src = project
@ -214,6 +227,7 @@ fn can_compile_dapp_detect_changes_in_sources() {
assert!(compiled.find("DssSpellTestBase").is_some()); assert!(compiled.find("DssSpellTestBase").is_some());
// ensure change is detected // ensure change is detected
assert!(!compiled.is_unchanged()); assert!(!compiled.is_unchanged());
// and all recompiled artifacts are different // and all recompiled artifacts are different
for (p, artifact) in compiled.into_artifacts() { for (p, artifact) in compiled.into_artifacts() {
let other = artifacts.remove(&p).unwrap(); let other = artifacts.remove(&p).unwrap();
@ -266,31 +280,31 @@ fn can_compile_dapp_sample_with_cache() {
assert!(compiled.find("NewContract").is_some()); assert!(compiled.find("NewContract").is_some());
assert!(!compiled.is_unchanged()); assert!(!compiled.is_unchanged());
assert_eq!( assert_eq!(
compiled.into_artifacts().map(|(name, _)| name).collect::<Vec<_>>(), compiled.into_artifacts().map(|(name, _)| name).collect::<HashSet<_>>(),
vec![ HashSet::from([
"Dapp.json:Dapp", "Dapp.json:Dapp".to_string(),
"DappTest.json:DappTest", "DappTest.json:DappTest".to_string(),
"DSTest.json:DSTest", "DSTest.json:DSTest".to_string(),
"NewContract.json:NewContract" "NewContract.json:NewContract".to_string(),
] ])
); );
// old cached artifact is not taken from the cache // old cached artifact is not taken from the cache
std::fs::copy(cache_testdata_dir.join("Dapp.sol"), root.join("src/Dapp.sol")).unwrap(); std::fs::copy(cache_testdata_dir.join("Dapp.sol"), root.join("src/Dapp.sol")).unwrap();
let compiled = project.compile().unwrap(); let compiled = project.compile().unwrap();
assert_eq!( assert_eq!(
compiled.into_artifacts().map(|(name, _)| name).collect::<Vec<_>>(), compiled.into_artifacts().map(|(name, _)| name).collect::<HashSet<_>>(),
vec![ HashSet::from([
"DappTest.json:DappTest", "DappTest.json:DappTest".to_string(),
"NewContract.json:NewContract", "NewContract.json:NewContract".to_string(),
"DSTest.json:DSTest", "DSTest.json:DSTest".to_string(),
"Dapp.json:Dapp" "Dapp.json:Dapp".to_string(),
] ])
); );
// deleted artifact is not taken from the cache // deleted artifact is not taken from the cache
std::fs::remove_file(&project.paths.sources.join("Dapp.sol")).unwrap(); std::fs::remove_file(&project.paths.sources.join("Dapp.sol")).unwrap();
let compiled = project.compile().unwrap(); let compiled: ProjectCompileOutput<_> = project.compile().unwrap();
assert!(compiled.find("Dapp").is_none()); assert!(compiled.find("Dapp").is_none());
} }
@ -376,7 +390,7 @@ fn can_flatten_file_with_duplicates() {
assert_eq!(result.matches("contract Foo {").count(), 1); assert_eq!(result.matches("contract Foo {").count(), 1);
assert_eq!(result.matches("contract Bar {").count(), 1); assert_eq!(result.matches("contract Bar {").count(), 1);
assert_eq!(result.matches("contract FooBar {").count(), 1); assert_eq!(result.matches("contract FooBar {").count(), 1);
assert_eq!(result.matches(";").count(), 1); assert_eq!(result.matches(';').count(), 1);
} }
#[test] #[test]