feat(solc): add dependency graph implementation (#750)

* docs: document public functions

* add mod

* feat(solc): add dependency graph

* detect version

* add into sources

* fix: canonicalize temp paths

* test: add graph tests

* chore(clippy): make clippy happy

* more wasm compat

* chore: update changelog

* wasm compat

* unused

* fix get source fill function

* Update ethers-solc/src/resolver.rs

Co-authored-by: Georgios Konstantopoulos <me@gakonst.com>

* perf: use solang_parser to trim down deps

* resolve graph in compile

* refactor add node function

* docs: clear up comment

* docs: typos

* fix: make all versions unique based on their major minor path version

* prepare test

* docs: add more resolve docs

* test: add lib change detection test

* test: update tests

Co-authored-by: Georgios Konstantopoulos <me@gakonst.com>
This commit is contained in:
Matthias Seitz 2022-01-05 22:46:57 +01:00 committed by GitHub
parent 3da5a419fe
commit fc9f66c916
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 1193 additions and 104 deletions

View File

@ -34,6 +34,8 @@
### Unreleased
- Add dependency graph and resolve all imported libraryfiles
[#750](https://github.com/gakonst/ethers-rs/pull/750)
- `Remapping::find_many` does not return a `Result` anymore
[#707](https://github.com/gakonst/ethers-rs/pull/707)
- Add support for hardhat artifacts

233
Cargo.lock generated
View File

@ -76,6 +76,15 @@ version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6"
[[package]]
name = "ascii-canvas"
version = "3.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8824ecca2e851cec16968d54a01dd372ef8f95b244fb84b84e70128be347c3c6"
dependencies = [
"term",
]
[[package]]
name = "async-trait"
version = "0.1.52"
@ -176,6 +185,21 @@ dependencies = [
"serde",
]
[[package]]
name = "bit-set"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e11e16035ea35e4e5997b393eacbf6f63983188f7a2ad25bfb13465f5ad59de"
dependencies = [
"bit-vec",
]
[[package]]
name = "bit-vec"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb"
[[package]]
name = "bitflags"
version = "1.3.2"
@ -971,6 +995,15 @@ dependencies = [
"zeroize",
]
[[package]]
name = "ena"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7402b94a93c24e742487327a7cd839dc9d36fec9de9fb25b09f2dae459f36c3"
dependencies = [
"log",
]
[[package]]
name = "encode_unicode"
version = "0.3.6"
@ -1300,11 +1333,13 @@ dependencies = [
"num_cpus",
"once_cell",
"pretty_assertions",
"rayon",
"regex",
"semver",
"serde",
"serde_json",
"sha2 0.9.8",
"solang-parser",
"svm-rs",
"tempdir",
"thiserror",
@ -1359,6 +1394,12 @@ dependencies = [
"static_assertions",
]
[[package]]
name = "fixedbitset"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37ab347416e802de484e4d03c7316c48f1ecb56574dfd4a46a80f173ce1de04d"
[[package]]
name = "fnv"
version = "1.0.7"
@ -1878,6 +1919,38 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67c21572b4949434e4fc1e1978b99c5f77064153c59d998bf13ecd96fb5ecba7"
[[package]]
name = "lalrpop"
version = "0.19.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15174f1c529af5bf1283c3bc0058266b483a67156f79589fab2a25e23cf8988"
dependencies = [
"ascii-canvas",
"atty",
"bit-set",
"diff",
"ena",
"itertools",
"lalrpop-util",
"petgraph",
"pico-args",
"regex",
"regex-syntax",
"string_cache",
"term",
"tiny-keccak",
"unicode-xid",
]
[[package]]
name = "lalrpop-util"
version = "0.19.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3e58cce361efcc90ba8a0a5f982c741ff86b603495bb15a998412e957dcd278"
dependencies = [
"regex",
]
[[package]]
name = "lazy_static"
version = "1.4.0"
@ -2013,6 +2086,12 @@ dependencies = [
"tempfile",
]
[[package]]
name = "new_debug_unreachable"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54"
[[package]]
name = "nix"
version = "0.13.1"
@ -2035,6 +2114,17 @@ dependencies = [
"winapi",
]
[[package]]
name = "num-bigint"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f"
dependencies = [
"autocfg",
"num-integer",
"num-traits",
]
[[package]]
name = "num-integer"
version = "0.1.44"
@ -2045,6 +2135,18 @@ dependencies = [
"num-traits",
]
[[package]]
name = "num-rational"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d41702bd167c2df5520b384281bc111a4b5efcf7fbc4c9c222c815b07e0a6a6a"
dependencies = [
"autocfg",
"num-bigint",
"num-integer",
"num-traits",
]
[[package]]
name = "num-traits"
version = "0.2.14"
@ -2249,6 +2351,16 @@ version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e"
[[package]]
name = "petgraph"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "467d164a6de56270bd7c4d070df81d07beace25012d5103ced4e9ff08d6afdb7"
dependencies = [
"fixedbitset",
"indexmap",
]
[[package]]
name = "pharos"
version = "0.5.3"
@ -2259,6 +2371,65 @@ dependencies = [
"rustc_version",
]
[[package]]
name = "phf"
version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fabbf1ead8a5bcbc20f5f8b939ee3f5b0f6f281b6ad3468b84656b658b455259"
dependencies = [
"phf_macros",
"phf_shared 0.10.0",
"proc-macro-hack",
]
[[package]]
name = "phf_generator"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6"
dependencies = [
"phf_shared 0.10.0",
"rand 0.8.4",
]
[[package]]
name = "phf_macros"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "58fdf3184dd560f160dd73922bea2d5cd6e8f064bf4b13110abd81b03697b4e0"
dependencies = [
"phf_generator",
"phf_shared 0.10.0",
"proc-macro-hack",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "phf_shared"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c00cf8b9eafe68dde5e9eaa2cef8ee84a9336a47d566ec55ca16589633b65af7"
dependencies = [
"siphasher",
]
[[package]]
name = "phf_shared"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096"
dependencies = [
"siphasher",
]
[[package]]
name = "pico-args"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "db8bcd96cb740d03149cbad5518db9fd87126a10ab519c011893b1754134c468"
[[package]]
name = "pin-project"
version = "1.0.8"
@ -2342,6 +2513,12 @@ version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed0cfbc8191465bed66e1718596ee0b0b35d5ee1f41c5df2189d0fe8bde535ba"
[[package]]
name = "precomputed-hash"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
[[package]]
name = "pretty_assertions"
version = "1.0.0"
@ -2401,6 +2578,12 @@ dependencies = [
"version_check",
]
[[package]]
name = "proc-macro-hack"
version = "0.5.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5"
[[package]]
name = "proc-macro2"
version = "1.0.36"
@ -2858,6 +3041,12 @@ dependencies = [
"base64 0.13.0",
]
[[package]]
name = "rustversion"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2cc38e8fa666e2de3c4aba7edeb5ffc5246c1c2ed0e3d17e560aeeba736b23f"
[[package]]
name = "ryu"
version = "1.0.9"
@ -3171,6 +3360,12 @@ dependencies = [
"synstructure",
]
[[package]]
name = "siphasher"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "533494a8f9b724d33625ab53c6c4800f7cc445895924a8ef649222dcb76e938b"
[[package]]
name = "slab"
version = "0.4.5"
@ -3193,6 +3388,20 @@ dependencies = [
"winapi",
]
[[package]]
name = "solang-parser"
version = "0.1.0"
source = "git+https://github.com/hyperledger-labs/solang#d92c43a7257009022016d6611255a5ce572852df"
dependencies = [
"lalrpop",
"lalrpop-util",
"num-bigint",
"num-rational",
"num-traits",
"phf",
"unicode-xid",
]
[[package]]
name = "spin"
version = "0.5.2"
@ -3215,6 +3424,19 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]]
name = "string_cache"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "923f0f39b6267d37d23ce71ae7235602134b250ace715dd2c90421998ddac0c6"
dependencies = [
"lazy_static",
"new_debug_unreachable",
"parking_lot",
"phf_shared 0.8.0",
"precomputed-hash",
]
[[package]]
name = "structopt"
version = "0.3.25"
@ -3325,6 +3547,17 @@ dependencies = [
"winapi",
]
[[package]]
name = "term"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f"
dependencies = [
"dirs-next",
"rustversion",
"winapi",
]
[[package]]
name = "terminal_size"
version = "0.1.17"

View File

@ -35,6 +35,8 @@ tempdir = { version = "0.3.7", optional = true }
fs_extra = { version = "1.2.0", optional = true }
sha2 = { version = "0.9.8", default-features = false }
dunce = "1.0.2"
solang-parser = { git = "https://github.com/hyperledger-labs/solang", default-features = false }
rayon = "1.5.1"
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
home = "0.5.3"
@ -56,6 +58,10 @@ tokio = { version = "1.15.0", features = ["full"] }
name = "compile_many"
harness = false
[[bench]]
name = "read_all"
harness = false
[[test]]
name = "project"
path = "tests/project.rs"

View File

@ -0,0 +1,53 @@
//! read many sources
#[macro_use]
extern crate criterion;
use criterion::Criterion;
use ethers_core::rand;
use ethers_solc::artifacts::Source;
use rand::{distributions::Alphanumeric, Rng};
use std::{
fs::File,
io::{BufWriter, Write},
path::{Path, PathBuf},
};
fn read_all_benchmark(c: &mut Criterion) {
let root = tempdir::TempDir::new("bench_read_many").unwrap();
let inputs = prepare_contracts(root.path(), 8);
let mut group = c.benchmark_group("read many");
group.sample_size(10);
group.bench_function("sequential", |b| {
b.iter(|| {
Source::read_all(&inputs).unwrap();
});
});
group.bench_function("parallel", |b| {
b.iter(|| {
Source::par_read_all(&inputs).unwrap();
});
});
}
fn prepare_contracts(root: &Path, num: usize) -> Vec<PathBuf> {
let mut files = Vec::with_capacity(num);
for _ in 0..num {
let path = root.join(format!("file{}.sol", num));
let f = File::create(&path).unwrap();
let mut writer = BufWriter::new(f);
let mut rng = rand::thread_rng();
// let's assume a solidity file is between 2kb and 16kb
let n: usize = rng.gen_range(2..17);
let s: String = rng.sample_iter(&Alphanumeric).take(n * 1024).map(char::from).collect();
writer.write_all(s.as_bytes()).unwrap();
writer.flush().unwrap();
files.push(path)
}
files
}
criterion_group!(benches, read_all_benchmark);
criterion_main!(benches);

View File

@ -393,15 +393,36 @@ pub struct Source {
}
impl Source {
/// this is a heuristically measured threshold at which we can generally expect a speedup by
/// using rayon's `par_iter`, See `Self::read_all_files`
pub const NUM_READ_PAR: usize = 8;
/// Reads the file content
pub fn read(file: impl AsRef<Path>) -> Result<Self, SolcIoError> {
let file = file.as_ref();
Ok(Self { content: fs::read_to_string(file).map_err(|err| SolcIoError::new(err, file))? })
}
/// Finds all source files under the given dir path and reads them all
/// Recursively finds all source files under the given dir path and reads them all
pub fn read_all_from(dir: impl AsRef<Path>) -> Result<Sources, SolcIoError> {
Self::read_all(utils::source_files(dir))
Self::read_all_files(utils::source_files(dir))
}
/// Reads all source files of the given vec
///
/// Depending on the len of the vec it will try to read the files in parallel
pub fn read_all_files(files: Vec<PathBuf>) -> Result<Sources, SolcIoError> {
use rayon::prelude::*;
if files.len() < Self::NUM_READ_PAR {
Self::read_all(files)
} else {
files
.par_iter()
.map(Into::into)
.map(|file| Self::read(&file).map(|source| (file, source)))
.collect()
}
}
/// Reads all files
@ -417,6 +438,25 @@ impl Source {
.collect()
}
/// Parallelized version of `Self::read_all` that reads all files using a parallel iterator
///
/// NOTE: this is only expected to be faster than `Self::read_all` if the given iterator
/// contains at least several paths. see also `Self::read_all_files`.
pub fn par_read_all<T, I>(files: I) -> Result<Sources, SolcIoError>
where
I: IntoIterator<Item = T>,
<I as IntoIterator>::IntoIter: Send,
T: Into<PathBuf> + Send,
{
use rayon::{iter::ParallelBridge, prelude::ParallelIterator};
files
.into_iter()
.par_bridge()
.map(Into::into)
.map(|file| Self::read(&file).map(|source| (file, source)))
.collect()
}
/// Generate a non-cryptographically secure checksum of the file's content
pub fn content_hash(&self) -> String {
let mut hasher = md5::Md5::new();

View File

@ -6,6 +6,8 @@ use crate::{
use semver::{Version, VersionReq};
use serde::{de::DeserializeOwned, Serialize};
use std::{
fmt,
fmt::Formatter,
io::BufRead,
path::{Path, PathBuf},
process::{Command, Output, Stdio},
@ -78,6 +80,43 @@ pub static RELEASES: Lazy<(svm::Releases, Vec<Version>, bool)> = Lazy::new(|| {
}
});
/// A `Solc` version is either installed (available locally) or can be downloaded, from the remote
/// endpoint
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum SolcVersion {
Installed(Version),
Remote(Version),
}
impl SolcVersion {
/// Whether this version is installed
pub fn is_installed(&self) -> bool {
matches!(self, SolcVersion::Installed(_))
}
}
impl AsRef<Version> for SolcVersion {
fn as_ref(&self) -> &Version {
match self {
SolcVersion::Installed(v) | SolcVersion::Remote(v) => v,
}
}
}
impl From<SolcVersion> for Version {
fn from(s: SolcVersion) -> Version {
match s {
SolcVersion::Installed(v) | SolcVersion::Remote(v) => v,
}
}
}
impl fmt::Display for SolcVersion {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.as_ref())
}
}
/// Abstraction over `solc` command line utility
///
/// Supports sync and async functions.
@ -159,6 +198,44 @@ impl Solc {
Version::parse(&version).ok()
}
/// Returns the list of all solc instances installed at `SVM_HOME`
#[cfg(not(target_arch = "wasm32"))]
pub fn installed_versions() -> Vec<SolcVersion> {
if let Some(home) = Self::svm_home() {
utils::installed_versions(home)
.unwrap_or_default()
.into_iter()
.map(SolcVersion::Installed)
.collect()
} else {
Vec::new()
}
}
/// Returns the list of all versions that are available to download and marking those which are
/// already installed.
#[cfg(all(feature = "svm", feature = "async"))]
pub fn all_versions() -> Vec<SolcVersion> {
let mut all_versions = Self::installed_versions();
let mut uniques = all_versions
.iter()
.map(|v| {
let v = v.as_ref();
(v.major, v.minor, v.patch)
})
.collect::<std::collections::HashSet<_>>();
all_versions.extend(
RELEASES
.1
.clone()
.into_iter()
.filter(|v| uniques.insert((v.major, v.minor, v.patch)))
.map(SolcVersion::Remote),
);
all_versions.sort_unstable();
all_versions
}
/// Returns the path for a [svm](https://github.com/roynalnaruto/svm-rs) installed version.
///
/// # Example
@ -201,7 +278,7 @@ impl Solc {
#[cfg(all(feature = "svm", feature = "async"))]
pub fn detect_version(source: &Source) -> Result<Version> {
// detects the required solc version
let sol_version = Self::version_req(source)?;
let sol_version = Self::source_version_req(source)?;
Self::ensure_installed(&sol_version)
}
@ -243,10 +320,15 @@ impl Solc {
/// Parses the given source looking for the `pragma` definition and
/// returns the corresponding SemVer version requirement.
pub fn version_req(source: &Source) -> Result<VersionReq> {
let version = utils::find_version_pragma(&source.content)
.ok_or(SolcError::PragmaNotFound)?
.replace(' ', ",");
pub fn source_version_req(source: &Source) -> Result<VersionReq> {
let version =
utils::find_version_pragma(&source.content).ok_or(SolcError::PragmaNotFound)?;
Self::version_req(version)
}
/// Returns the corresponding SemVer version requirement for the solidity version
pub fn version_req(version: &str) -> Result<VersionReq> {
let version = version.replace(' ', ",");
// Somehow, Solidity semver without an operator is considered to be "exact",
// but lack of operator automatically marks the operator as Caret, so we need
@ -611,7 +693,7 @@ mod tests {
let sources = versions.iter().map(|version| source(version));
sources.zip(versions).for_each(|(source, version)| {
let version_req = Solc::version_req(&source).unwrap();
let version_req = Solc::source_version_req(&source).unwrap();
assert_eq!(version_req, VersionReq::from_str(version).unwrap());
});
@ -619,7 +701,7 @@ mod tests {
// requires them to be separated with a comma
let version_range = ">=0.8.0 <0.9.0";
let source = source(version_range);
let version_req = Solc::version_req(&source).unwrap();
let version_req = Solc::source_version_req(&source).unwrap();
assert_eq!(version_req, VersionReq::from_str(">=0.8.0,<0.9.0").unwrap());
}

View File

@ -4,7 +4,7 @@ use crate::{
error::{Result, SolcError, SolcIoError},
hh::HardhatArtifact,
remappings::Remapping,
utils, CompilerOutput,
utils, CompilerOutput, Source, Sources,
};
use ethers_core::{abi::Abi, types::Bytes};
use serde::{de::DeserializeOwned, Deserialize, Serialize};
@ -76,6 +76,47 @@ impl ProjectPathsConfig {
Ok(())
}
/// Returns all sources found under the project's configured `sources` path
pub fn read_sources(&self) -> Result<Sources> {
tracing::trace!("reading all sources from \"{}\"", self.sources.display());
Ok(Source::read_all_from(&self.sources)?)
}
/// Returns all sources found under the project's configured `test` path
pub fn read_tests(&self) -> Result<Sources> {
tracing::trace!("reading all tests from \"{}\"", self.tests.display());
Ok(Source::read_all_from(&self.tests)?)
}
/// Returns the combined set solidity file paths for `Self::sources` and `Self::tests`
pub fn input_files(&self) -> Vec<PathBuf> {
utils::source_files(&self.sources)
.into_iter()
.chain(utils::source_files(&self.tests))
.collect()
}
/// Returns the combined set of `Self::read_sources` + `Self::read_tests`
pub fn read_input_files(&self) -> Result<Sources> {
Ok(Source::read_all_files(self.input_files())?)
}
/// Attempts to find the path to the real solidity file that's imported via the given `import`
/// path by applying the configured remappings and checking the library dirs
pub fn resolve_library_import(&self, import: &Path) -> Option<PathBuf> {
// if the import path starts with the name of the remapping then we get the resolved path by
// removing the name and adding the remainder to the path of the remapping
if let Some(path) = self
.remappings
.iter()
.find_map(|r| import.strip_prefix(&r.name).ok().map(|p| Path::new(&r.path).join(p)))
{
Some(self.root.join(path))
} else {
utils::resolve_library(&self.libraries, import)
}
}
/// Attempts to autodetect the artifacts directory based on the given root path
///
/// Dapptools layout takes precedence over hardhat style.
@ -132,9 +173,10 @@ pub enum PathStyle {
}
impl PathStyle {
/// Convert into a `ProjectPathsConfig` given the root path and based on the styled
pub fn paths(&self, root: impl AsRef<Path>) -> Result<ProjectPathsConfig> {
let root = root.as_ref();
let root = dunce::canonicalize(root).map_err(|err| SolcError::io(err, root))?;
let root = utils::canonicalize(root)?;
Ok(match self {
PathStyle::Dapptools => ProjectPathsConfig::builder()
@ -167,27 +209,27 @@ pub struct ProjectPathsConfigBuilder {
impl ProjectPathsConfigBuilder {
pub fn root(mut self, root: impl Into<PathBuf>) -> Self {
self.root = Some(root.into());
self.root = Some(canonicalized(root));
self
}
pub fn cache(mut self, cache: impl Into<PathBuf>) -> Self {
self.cache = Some(cache.into());
self.cache = Some(canonicalized(cache));
self
}
pub fn artifacts(mut self, artifacts: impl Into<PathBuf>) -> Self {
self.artifacts = Some(artifacts.into());
self.artifacts = Some(canonicalized(artifacts));
self
}
pub fn sources(mut self, sources: impl Into<PathBuf>) -> Self {
self.sources = Some(sources.into());
self.sources = Some(canonicalized(sources));
self
}
pub fn tests(mut self, tests: impl Into<PathBuf>) -> Self {
self.tests = Some(tests.into());
self.tests = Some(canonicalized(tests));
self
}
@ -198,14 +240,14 @@ impl ProjectPathsConfigBuilder {
}
pub fn lib(mut self, lib: impl Into<PathBuf>) -> Self {
self.libraries.get_or_insert_with(Vec::new).push(lib.into());
self.libraries.get_or_insert_with(Vec::new).push(canonicalized(lib));
self
}
pub fn libs(mut self, libs: impl IntoIterator<Item = impl Into<PathBuf>>) -> Self {
let libraries = self.libraries.get_or_insert_with(Vec::new);
for lib in libs.into_iter() {
libraries.push(lib.into());
libraries.push(canonicalized(lib));
}
self
}
@ -224,8 +266,7 @@ impl ProjectPathsConfigBuilder {
}
pub fn build_with_root(self, root: impl Into<PathBuf>) -> ProjectPathsConfig {
let root = root.into();
let root = canonicalized(root);
ProjectPathsConfig {
cache: self
.cache
@ -248,11 +289,24 @@ impl ProjectPathsConfigBuilder {
.map(Ok)
.unwrap_or_else(std::env::current_dir)
.map_err(|err| SolcIoError::new(err, "."))?;
let root = dunce::canonicalize(&root).map_err(|err| SolcIoError::new(err, &root))?;
Ok(self.build_with_root(root))
}
}
/// Returns the same path config but with canonicalized paths.
///
/// This will take care of potential symbolic linked directories.
/// For example, the tempdir library is creating directories hosted under `/var/`, which in OS X
/// is a symbolic link to `/private/var/`. So if when we try to resolve imports and a path is
/// rooted in a symbolic directory we might end up with different paths for the same file, like
/// `private/var/.../Dapp.sol` and `/var/.../Dapp.sol`
///
/// This canonicalizes all the paths but does not treat non existing dirs as an error
fn canonicalized(path: impl Into<PathBuf>) -> PathBuf {
let path = path.into();
utils::canonicalize(&path).unwrap_or(path)
}
/// The config to use when compiling the contracts
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct SolcConfig {
@ -517,7 +571,7 @@ impl<T: Into<PathBuf>> TryFrom<Vec<T>> for AllowedLibPaths {
.into_iter()
.map(|lib| {
let path: PathBuf = lib.into();
let lib = dunce::canonicalize(&path).map_err(|err| SolcIoError::new(err, path))?;
let lib = utils::canonicalize(&path)?;
Ok(lib)
})
.collect::<std::result::Result<Vec<_>, _>>()?;
@ -541,39 +595,45 @@ mod tests {
let root = root.path();
assert_eq!(ProjectPathsConfig::find_source_dir(root), src,);
assert_eq!(ProjectPathsConfig::builder().build_with_root(&root).sources, src,);
std::fs::File::create(&contracts).unwrap();
assert_eq!(ProjectPathsConfig::find_source_dir(root), contracts,);
assert_eq!(ProjectPathsConfig::builder().build_with_root(&root).sources, contracts,);
assert_eq!(
ProjectPathsConfig::builder().build_with_root(&root).sources,
canonicalized(contracts),
);
std::fs::File::create(&src).unwrap();
assert_eq!(ProjectPathsConfig::find_source_dir(root), src,);
assert_eq!(ProjectPathsConfig::builder().build_with_root(&root).sources, src,);
assert_eq!(
ProjectPathsConfig::builder().build_with_root(&root).sources,
canonicalized(src),
);
assert_eq!(ProjectPathsConfig::find_artifacts_dir(root), out,);
assert_eq!(ProjectPathsConfig::builder().build_with_root(&root).artifacts, out,);
std::fs::File::create(&artifacts).unwrap();
assert_eq!(ProjectPathsConfig::find_artifacts_dir(root), artifacts,);
assert_eq!(ProjectPathsConfig::builder().build_with_root(&root).artifacts, artifacts,);
assert_eq!(
ProjectPathsConfig::builder().build_with_root(&root).artifacts,
canonicalized(artifacts),
);
std::fs::File::create(&out).unwrap();
assert_eq!(ProjectPathsConfig::find_artifacts_dir(root), out,);
assert_eq!(ProjectPathsConfig::builder().build_with_root(&root).artifacts, out,);
assert_eq!(
ProjectPathsConfig::builder().build_with_root(&root).artifacts,
canonicalized(out),
);
assert_eq!(ProjectPathsConfig::find_libs(root), vec![lib.clone()],);
assert_eq!(
ProjectPathsConfig::builder().build_with_root(&root).libraries,
vec![lib.clone()],
);
std::fs::File::create(&node_modules).unwrap();
assert_eq!(ProjectPathsConfig::find_libs(root), vec![node_modules.clone()],);
assert_eq!(
ProjectPathsConfig::builder().build_with_root(&root).libraries,
vec![node_modules.clone()],
vec![canonicalized(node_modules.clone())],
);
std::fs::File::create(&lib).unwrap();
assert_eq!(ProjectPathsConfig::find_libs(root), vec![lib.clone()],);
assert_eq!(
ProjectPathsConfig::builder().build_with_root(&root).libraries,
vec![lib.clone()],
vec![canonicalized(lib.clone())],
);
}
}

View File

@ -7,7 +7,9 @@ use std::collections::btree_map::Entry;
pub mod cache;
pub mod hh;
mod resolver;
pub use hh::{HardhatArtifact, HardhatArtifacts};
pub use resolver::Graph;
mod compile;
@ -153,8 +155,7 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
/// Returns all sources found under the project's configured sources path
#[tracing::instrument(skip_all, fields(name = "sources"))]
pub fn sources(&self) -> Result<Sources> {
tracing::trace!("reading all sources from \"{}\"", self.paths.sources.display());
Ok(Source::read_all_from(&self.paths.sources)?)
self.paths.read_sources()
}
/// This emits the cargo [`rerun-if-changed`](https://doc.rust-lang.org/cargo/reference/build-scripts.html#cargorerun-if-changedpath) instruction.
@ -204,7 +205,8 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
Ok(libs)
}
/// Attempts to compile the contracts found at the configured location.
/// Attempts to compile the contracts found at the configured source location, see
/// `ProjectPathsConfig::sources`.
///
/// NOTE: this does not check if the contracts were successfully compiled, see
/// `CompilerOutput::has_error` instead.
@ -212,7 +214,7 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
/// solc versions across files.
#[tracing::instrument(skip_all, name = "compile")]
pub fn compile(&self) -> Result<ProjectCompileOutput<Artifacts>> {
let sources = self.sources()?;
let sources = self.paths.read_input_files()?;
tracing::trace!("found {} sources to compile: {:?}", sources.len(), sources.keys());
#[cfg(all(feature = "svm", feature = "async"))]
@ -225,64 +227,17 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
if !self.allowed_lib_paths.0.is_empty() {
solc = solc.arg("--allow-paths").arg(self.allowed_lib_paths.to_string());
}
let sources = Graph::resolve_sources(&self.paths, sources)?.into_sources();
self.compile_with_version(&solc, sources)
}
#[cfg(all(feature = "svm", feature = "async"))]
#[tracing::instrument(skip(self, sources))]
fn svm_compile(&self, sources: Sources) -> Result<ProjectCompileOutput<Artifacts>> {
use semver::{Version, VersionReq};
use std::collections::hash_map::{self, HashMap};
// split them by version
let mut sources_by_version = BTreeMap::new();
// we store the solc versions by path, in case there exists a corrupt solc binary
let mut solc_versions = HashMap::new();
// tracks unique version requirements to minimize install effort
let mut solc_version_req = HashMap::<VersionReq, Version>::new();
tracing::trace!("preprocessing source files and solc installs");
for (path, source) in sources.into_iter() {
// will detect and install the solc version if it's missing
tracing::trace!("detecting solc version for \"{}\"", path.display());
let version_req = Solc::version_req(&source)?;
let version = match solc_version_req.entry(version_req) {
hash_map::Entry::Occupied(version) => version.get().clone(),
hash_map::Entry::Vacant(entry) => {
let version = Solc::ensure_installed(entry.key())?;
entry.insert(version.clone());
version
}
};
tracing::trace!("found installed solc \"{}\"", version);
// gets the solc binary for that version, it is expected tha this will succeed
// AND find the solc since it was installed right above
let mut solc = Solc::find_svm_installed_version(version.to_string())?
.unwrap_or_else(|| panic!("solc \"{}\" should have been installed", version));
if !self.allowed_lib_paths.0.is_empty() {
solc = solc.arg("--allow-paths").arg(self.allowed_lib_paths.to_string());
}
solc_versions.insert(solc.solc.clone(), version);
let entry = sources_by_version.entry(solc).or_insert_with(BTreeMap::new);
entry.insert(path.clone(), source);
}
tracing::trace!("solc version preprocessing finished");
tracing::trace!("verifying solc checksums");
for solc in sources_by_version.keys() {
// verify that this solc version's checksum matches the checksum found remotely. If
// not, re-install the same version.
let version = &solc_versions[&solc.solc];
if solc.verify_checksum().is_err() {
tracing::trace!("corrupted solc version, redownloading \"{}\"", version);
Solc::blocking_install(version)?;
tracing::trace!("reinstalled solc: \"{}\"", version);
}
}
let graph = Graph::resolve_sources(&self.paths, sources)?;
let sources_by_version =
graph.into_sources_by_version(!self.auto_detect)?.get(&self.allowed_lib_paths)?;
// run the compilation step for each version
let compiled = if self.solc_jobs > 1 && sources_by_version.len() > 1 {
@ -295,6 +250,7 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
Ok(compiled)
}
/// Compiles all sources with their intended `Solc` version sequentially.
#[cfg(all(feature = "svm", feature = "async"))]
fn compile_sources(
&self,
@ -314,6 +270,9 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
Ok(compiled)
}
/// Compiles all sources with their intended `Solc` version in parallel.
///
/// This runs `Self::solc_jobs` parallel `solc` jobs at most.
#[cfg(all(feature = "svm", feature = "async"))]
fn compile_many(
&self,
@ -929,7 +888,7 @@ mod tests {
fn test_build_many_libs() {
use super::*;
let root = dunce::canonicalize("./test-data/test-contract-libs").unwrap();
let root = utils::canonicalize("./test-data/test-contract-libs").unwrap();
let paths = ProjectPathsConfig::builder()
.root(&root)
@ -956,7 +915,7 @@ mod tests {
fn test_build_remappings() {
use super::*;
let root = dunce::canonicalize("./test-data/test-contract-remappings").unwrap();
let root = utils::canonicalize("./test-data/test-contract-remappings").unwrap();
let paths = ProjectPathsConfig::builder()
.root(&root)
.sources(root.join("src"))

View File

@ -7,7 +7,7 @@ use crate::{
SolcIoError,
};
use fs_extra::{dir, file};
use std::path::Path;
use std::path::{Path, PathBuf};
use tempdir::TempDir;
pub struct TempProject<T: ArtifactOutput> {
@ -49,6 +49,11 @@ impl<T: ArtifactOutput> TempProject<T> {
&self.project().paths
}
/// The configured paths of the project
pub fn paths_mut(&mut self) -> &mut ProjectPathsConfig {
&mut self.project_mut().paths
}
/// The root path of the temporary workspace
pub fn root(&self) -> &Path {
self.project().paths.root.as_path()
@ -70,13 +75,17 @@ impl<T: ArtifactOutput> TempProject<T> {
Ok(())
}
/// Copies a single file into the project's main library directory
pub fn copy_lib(&self, lib: impl AsRef<Path>) -> Result<()> {
let lib_dir = self
.paths()
fn get_lib(&self) -> Result<PathBuf> {
self.paths()
.libraries
.get(0)
.ok_or_else(|| SolcError::msg("No libraries folders configured"))?;
.cloned()
.ok_or_else(|| SolcError::msg("No libraries folders configured"))
}
/// Copies a single file into the project's main library directory
pub fn copy_lib(&self, lib: impl AsRef<Path>) -> Result<()> {
let lib_dir = self.get_lib()?;
copy_file(lib, lib_dir)
}
@ -91,6 +100,39 @@ impl<T: ArtifactOutput> TempProject<T> {
}
Ok(())
}
/// Adds a new library file
pub fn add_lib(&self, name: impl AsRef<str>, content: impl AsRef<str>) -> Result<PathBuf> {
let name = contract_file_name(name);
let lib_dir = self.get_lib()?;
let lib = lib_dir.join(name);
create_contract_file(lib, content)
}
/// Adds a new source file
pub fn add_source(&self, name: impl AsRef<str>, content: impl AsRef<str>) -> Result<PathBuf> {
let name = contract_file_name(name);
let source = self.paths().sources.join(name);
create_contract_file(source, content)
}
}
fn create_contract_file(path: PathBuf, content: impl AsRef<str>) -> Result<PathBuf> {
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent)
.map_err(|err| SolcIoError::new(err, parent.to_path_buf()))?;
}
std::fs::write(&path, content.as_ref()).map_err(|err| SolcIoError::new(err, path.clone()))?;
Ok(path)
}
fn contract_file_name(name: impl AsRef<str>) -> String {
let name = name.as_ref();
if name.ends_with(".sol") {
name.to_string()
} else {
format!("{}.sol", name)
}
}
impl TempProject<HardhatArtifacts> {

525
ethers-solc/src/resolver.rs Normal file
View File

@ -0,0 +1,525 @@
//! Resolution of the entire dependency graph for a project.
//!
//! This module implements the core logic in taking all contracts of a project and creating a
//! resolved graph with applied remappings for all source contracts.
//!
//! Some constraints we're working with when resolving contracts
//!
//! 1. Each file can contain several source units and can have any number of imports/dependencies
//! (using the term interchangeably). Each dependency can declare a version range that it is
//! compatible with, solidity version pragma.
//! 2. A dependency can be imported from any directory,
//! see `Remappings`
//!
//! Finding all dependencies is fairly simple, we're simply doing a DFS, starting the source
//! contracts
//!
//! ## Performance
//!
//! Note that this is a relatively performance-critical portion of the ethers-solc preprocessing.
//! The data that needs to be processed is proportional to the size of the dependency
//! graph, which can, depending on the project, often be quite large.
//!
//! Note that, unlike the solidity compiler, we work with the filesystem, where we have to resolve
//! remappings and follow relative paths. We're also limiting the nodes in the graph to solidity
//! files, since we're only interested in their
//! [version pragma](https://docs.soliditylang.org/en/develop/layout-of-source-files.html#version-pragma),
//! which is defined on a per source file basis.
use std::{
collections::{HashMap, VecDeque},
path::{Component, Path, PathBuf},
};
use rayon::prelude::*;
use semver::VersionReq;
use solang_parser::pt::{Import, SourceUnitPart};
use crate::{error::Result, utils, ProjectPathsConfig, Solc, Source, Sources};
/// Represents a fully-resolved solidity dependency graph. Each node in the graph
/// is a file and edges represent dependencies between them.
/// See also https://docs.soliditylang.org/en/latest/layout-of-source-files.html?highlight=import#importing-other-source-files
#[derive(Debug)]
pub struct Graph {
nodes: Vec<Node>,
/// The indices of `edges` correspond to the `nodes`. That is, `edges[0]`
/// is the set of outgoing edges for `nodes[0]`.
edges: Vec<Vec<usize>>,
/// index maps for a solidity file to an index, for fast lookup.
indices: HashMap<PathBuf, usize>,
/// with how many input files we started with, corresponds to `let input_files =
/// nodes[..num_input_files]`.
num_input_files: usize,
/// the root of the project this graph represents
#[allow(unused)]
root: PathBuf,
}
impl Graph {
/// Returns a list of nodes the given node index points to for the given kind.
pub fn imported_nodes(&self, from: usize) -> &[usize] {
&self.edges[from]
}
/// Returns all the resolved files and their index in the graph
pub fn files(&self) -> &HashMap<PathBuf, usize> {
&self.indices
}
/// Gets a node by index.
pub fn node(&self, index: usize) -> &Node {
&self.nodes[index]
}
/// Returns all files together with their paths
pub fn into_sources(self) -> Sources {
self.nodes.into_iter().map(|node| (node.path, node.source)).collect()
}
/// Returns an iterator that yields only those nodes that represent input files.
/// See `Self::resolve_sources`
/// This won't yield any resolved library nodes
pub fn input_nodes(&self) -> impl Iterator<Item = &Node> {
self.nodes.iter().take(self.num_input_files)
}
/// Resolves a number of sources within the given config
pub fn resolve_sources(paths: &ProjectPathsConfig, sources: Sources) -> Result<Graph> {
/// checks if the given target path was already resolved, if so it adds its id to the list
/// of resolved imports. If it hasn't been resolved yet, it queues in the file for
/// processing
fn add_node(
unresolved: &mut VecDeque<(PathBuf, Node)>,
index: &mut HashMap<PathBuf, usize>,
resolved_imports: &mut Vec<usize>,
target: PathBuf,
) -> Result<()> {
if let Some(idx) = index.get(&target).copied() {
resolved_imports.push(idx);
} else {
// imported file is not part of the input files
let node = read_node(&target)?;
unresolved.push_back((target.clone(), node));
let idx = index.len();
index.insert(target, idx);
resolved_imports.push(idx);
}
Ok(())
}
// we start off by reading all input files, which includes all solidity files from the
// source and test folder
let mut unresolved: VecDeque<(PathBuf, Node)> = sources
.into_par_iter()
.map(|(path, source)| {
let data = parse_data(source.as_ref());
(path.clone(), Node { path, source, data })
})
.collect();
// identifiers of all resolved files
let mut index: HashMap<_, _> =
unresolved.iter().enumerate().map(|(idx, (p, _))| (p.clone(), idx)).collect();
let num_input_files = unresolved.len();
// contains the files and their dependencies
let mut nodes = Vec::with_capacity(unresolved.len());
let mut edges = Vec::with_capacity(unresolved.len());
// now we need to resolve all imports for the source file and those imported from other
// locations
while let Some((path, node)) = unresolved.pop_front() {
let mut resolved_imports = Vec::with_capacity(node.data.imports.len());
// parent directory of the current file
let node_dir = match path.parent() {
Some(inner) => inner,
None => continue,
};
for import in node.data.imports.iter() {
let component = match import.components().next() {
Some(inner) => inner,
None => continue,
};
if component == Component::CurDir || component == Component::ParentDir {
// if the import is relative we assume it's already part of the processed input
// file set
match utils::canonicalize(node_dir.join(import)) {
Ok(target) => {
// the file at least exists,
add_node(&mut unresolved, &mut index, &mut resolved_imports, target)?;
}
Err(err) => {
tracing::trace!("failed to resolve relative import \"{:?}\"", err);
}
}
} else {
// resolve library file
if let Some(lib) = paths.resolve_library_import(import.as_ref()) {
add_node(&mut unresolved, &mut index, &mut resolved_imports, lib)?;
} else {
tracing::trace!(
"failed to resolve library import \"{:?}\"",
import.display()
);
}
}
}
nodes.push(node);
edges.push(resolved_imports);
}
Ok(Graph { nodes, edges, indices: index, num_input_files, root: paths.root.clone() })
}
/// Resolves the dependencies of a project's source contracts
pub fn resolve(paths: &ProjectPathsConfig) -> Result<Graph> {
Self::resolve_sources(paths, paths.read_input_files()?)
}
}
#[cfg(all(feature = "svm", feature = "async"))]
impl Graph {
/// Returns all input files together with their appropriate version.
///
/// First we determine the compatible version for each input file (from sources and test folder,
/// see `Self::resolve`) and then we add all resolved library imports.
pub fn into_sources_by_version(self, offline: bool) -> Result<VersionedSources> {
/// insert the imports of the given node into the sources map
/// There can be following graph:
/// `A(<=0.8.10) imports C(>0.4.0)` and `B(0.8.11) imports C(>0.4.0)`
/// where `C` is a library import, in which case we assign `C` only to the first input file.
/// However, it's not required to include them in the solc `CompilerInput` as they would get
/// picked up by solc otherwise, but we add them, so we can create a corresponding
/// cache entry for them as well. This can be optimized however
fn insert_imports(
idx: usize,
all_nodes: &mut HashMap<usize, Node>,
sources: &mut Sources,
edges: &[Vec<usize>],
num_input_files: usize,
) {
for dep in edges[idx].iter().copied() {
// we only process nodes that were added as part of the resolve step because input
// nodes are handled separately
if dep >= num_input_files {
// library import
if let Some(node) = all_nodes.remove(&dep) {
sources.insert(node.path, node.source);
insert_imports(dep, all_nodes, sources, edges, num_input_files);
}
}
}
}
let versioned_nodes = self.get_input_node_versions(offline)?;
let Self { nodes, edges, num_input_files, .. } = self;
let mut versioned_sources = HashMap::with_capacity(versioned_nodes.len());
let mut all_nodes = nodes.into_iter().enumerate().collect::<HashMap<_, _>>();
// determine the `Sources` set for each solc version
for (version, input_node_indices) in versioned_nodes {
let mut sources = Sources::new();
// we only process input nodes (from sources, tests for example)
for idx in input_node_indices {
// insert the input node in the sources set and remove it from the available set
let node = all_nodes.remove(&idx).expect("node is preset. qed");
sources.insert(node.path, node.source);
insert_imports(idx, &mut all_nodes, &mut sources, &edges, num_input_files);
}
versioned_sources.insert(version, sources);
}
Ok(VersionedSources { inner: versioned_sources, offline })
}
/// Writes the list of imported files into the given formatter:
/// `A (version) imports B (version)`
fn format_imports_list<W: std::fmt::Write>(
&self,
idx: usize,
f: &mut W,
) -> std::result::Result<(), std::fmt::Error> {
let node = self.node(idx);
for dep in self.imported_nodes(idx) {
let dep = self.node(*dep);
writeln!(
f,
" {} ({:?}) imports {} ({:?})",
utils::source_name(&node.path, &self.root).display(),
node.data.version,
utils::source_name(&dep.path, &self.root).display(),
dep.data.version
)?;
}
for dep in self.imported_nodes(idx) {
self.format_imports_list(*dep, f)?;
}
Ok(())
}
/// Filters incompatible versions from the `candidates`.
fn retain_compatible_versions(
&self,
idx: usize,
candidates: &mut Vec<&crate::SolcVersion>,
traversed: &mut std::collections::HashSet<(usize, usize)>,
) -> std::result::Result<(), String> {
let node = self.node(idx);
if let Some(ref req) = node.data.version_req {
candidates.retain(|v| req.matches(v.as_ref()));
}
for dep in self.imported_nodes(idx).iter().copied() {
// check for circular deps which would result in endless recursion SO here
// a circular dependency exists, if there was already a `dependency imports current
// node` relationship in the traversed path
if traversed.contains(&(dep, idx)) {
let mut msg = String::new();
self.format_imports_list(dep, &mut msg).unwrap();
return Err(format!("Encountered circular dependencies in:\n{}", msg))
}
traversed.insert((idx, dep));
self.retain_compatible_versions(dep, candidates, traversed)?;
}
Ok(())
}
/// Ensures that all files are compatible with all of their imports.
pub fn ensure_compatible_imports(&self, offline: bool) -> Result<()> {
self.get_input_node_versions(offline)?;
Ok(())
}
/// Returns a map of versions together with the input nodes that are compatible with that
/// version.
///
/// This will essentially do a DFS on all input sources and their transitive imports and
/// checking that all can compiled with the version stated in the input file.
///
/// Returns an error message with __all__ input files that don't have compatible imports.
///
/// This also attempts to prefer local installations over remote available.
/// If `offline` is set to `true` then only already installed.
fn get_input_node_versions(
&self,
offline: bool,
) -> Result<HashMap<crate::SolcVersion, Vec<usize>>> {
// this is likely called by an application and will be eventually printed so we don't exit
// on first error, instead gather all the errors and return a bundled error message instead
let mut errors = Vec::new();
// we also don't want duplicate error diagnostic
let mut erroneous_nodes = std::collections::HashSet::with_capacity(self.num_input_files);
let all_versions = if offline { Solc::installed_versions() } else { Solc::all_versions() };
// stores all versions and their nodes
let mut versioned_nodes = HashMap::new();
// walking through the node's dep tree and filtering the versions along the way
for idx in 0..self.num_input_files {
let mut candidates = all_versions.iter().collect::<Vec<_>>();
let mut traveresd = std::collections::HashSet::new();
if let Err(msg) = self.retain_compatible_versions(idx, &mut candidates, &mut traveresd)
{
errors.push(msg);
}
if candidates.is_empty() && !erroneous_nodes.contains(&idx) {
let mut msg = String::new();
self.format_imports_list(idx, &mut msg).unwrap();
errors.push(format!(
"Discovered incompatible solidity versions in following\n: {}",
msg
));
erroneous_nodes.insert(idx);
} else {
let candidate = (*candidates
.iter()
.rev()
.find(|v| v.is_installed())
.or_else(|| candidates.iter().last())
.unwrap())
.clone();
versioned_nodes.entry(candidate).or_insert_with(|| Vec::with_capacity(1)).push(idx);
}
}
if errors.is_empty() {
Ok(versioned_nodes)
} else {
Err(crate::error::SolcError::msg(errors.join("\n")))
}
}
}
/// Container type for solc versions and their compatible sources
#[cfg(all(feature = "svm", feature = "async"))]
#[derive(Debug)]
pub struct VersionedSources {
inner: HashMap<crate::SolcVersion, Sources>,
offline: bool,
}
#[cfg(all(feature = "svm", feature = "async"))]
impl VersionedSources {
/// Resolves or installs the corresponding `Solc` installation.
pub fn get(
self,
allowed_lib_paths: &crate::AllowedLibPaths,
) -> Result<std::collections::BTreeMap<Solc, Sources>> {
use crate::SolcError;
let mut sources_by_version = std::collections::BTreeMap::new();
for (version, sources) in self.inner {
if !version.is_installed() {
if self.offline {
return Err(SolcError::msg(format!(
"missing solc \"{}\" installation in offline mode",
version
)))
} else {
Solc::blocking_install(version.as_ref())?;
}
}
let solc = Solc::find_svm_installed_version(version.to_string())?.ok_or_else(|| {
SolcError::msg(format!("solc \"{}\" should have been installed", version))
})?;
tracing::trace!("verifying solc checksum for {}", solc.solc.display());
if solc.verify_checksum().is_err() {
tracing::trace!("corrupted solc version, redownloading \"{}\"", version);
Solc::blocking_install(version.as_ref())?;
tracing::trace!("reinstalled solc: \"{}\"", version);
}
sources_by_version
.insert(solc.arg("--allow-paths").arg(allowed_lib_paths.to_string()), sources);
}
Ok(sources_by_version)
}
}
#[derive(Debug)]
pub struct Node {
path: PathBuf,
source: Source,
data: SolData,
}
#[derive(Debug, Clone)]
#[allow(unused)]
struct SolData {
version: Option<String>,
version_req: Option<VersionReq>,
imports: Vec<PathBuf>,
}
fn read_node(file: impl AsRef<Path>) -> Result<Node> {
let file = file.as_ref();
let source = Source::read(file)?;
let data = parse_data(source.as_ref());
Ok(Node { path: file.to_path_buf(), source, data })
}
/// Extracts the useful data from a solidity source
///
/// This will attempt to parse the solidity AST and extract the imports and version pragma. If
/// parsing fails, we'll fall back to extract that info via regex
fn parse_data(content: &str) -> SolData {
let mut version = None;
let mut imports = Vec::new();
match solang_parser::parse(content, 0) {
Ok(units) => {
for unit in units.0 {
match unit {
SourceUnitPart::PragmaDirective(_, pragma, value) => {
if pragma.name == "solidity" {
// we're only interested in the solidity version pragma
version = Some(value.string);
}
}
SourceUnitPart::ImportDirective(_, import) => {
let import = match import {
Import::Plain(s) => s,
Import::GlobalSymbol(s, _) => s,
Import::Rename(s, _) => s,
};
imports.push(PathBuf::from(import.string));
}
_ => {}
}
}
}
Err(err) => {
tracing::trace!(
"failed to parse solidity ast: \"{:?}\". Falling back to regex to extract data",
err
);
version = utils::find_version_pragma(content).map(str::to_string);
imports = utils::find_import_paths(content)
.into_iter()
.map(|p| Path::new(p).to_path_buf())
.collect()
}
};
let version_req = if let Some(ref v) = version { Solc::version_req(v).ok() } else { None };
SolData { version_req, version, imports }
}
#[cfg(test)]
mod tests {
use super::*;
use std::path::Path;
#[test]
fn can_resolve_hardhat_dependency_graph() {
let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/hardhat-sample");
let paths = ProjectPathsConfig::hardhat(root).unwrap();
let graph = Graph::resolve(&paths).unwrap();
assert_eq!(graph.num_input_files, 1);
assert_eq!(graph.files().len(), 2);
assert_eq!(
graph.files().clone(),
HashMap::from([
(paths.sources.join("Greeter.sol"), 0),
(paths.root.join("node_modules/hardhat/console.sol"), 1),
])
);
}
#[test]
fn can_resolve_dapp_dependency_graph() {
let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample");
let paths = ProjectPathsConfig::dapptools(root).unwrap();
let graph = Graph::resolve(&paths).unwrap();
assert_eq!(graph.num_input_files, 2);
assert_eq!(graph.files().len(), 3);
assert_eq!(
graph.files().clone(),
HashMap::from([
(paths.sources.join("Dapp.sol"), 0),
(paths.sources.join("Dapp.t.sol"), 1),
(paths.root.join("lib/ds-test/src/test.sol"), 2),
])
);
let dapp_test = graph.node(1);
assert_eq!(dapp_test.path, paths.sources.join("Dapp.t.sol"));
assert_eq!(
dapp_test.data.imports,
vec![
Path::new("ds-test/test.sol").to_path_buf(),
Path::new("./Dapp.sol").to_path_buf()
]
);
assert_eq!(graph.imported_nodes(1).to_vec(), vec![2, 0]);
}
}

View File

@ -2,7 +2,7 @@
use std::path::{Component, Path, PathBuf};
use crate::error::SolcError;
use crate::{error::SolcError, SolcIoError};
use once_cell::sync::Lazy;
use regex::Regex;
use semver::Version;
@ -41,7 +41,8 @@ pub fn find_version_pragma(contract: &str) -> Option<&str> {
RE_SOL_PRAGMA_VERSION.captures(contract)?.name("version").map(|m| m.as_str())
}
/// Returns a list of absolute paths to all the solidity files under the root
/// Returns a list of absolute paths to all the solidity files under the root, or the file itself,
/// if the path is a solidity file.
///
/// NOTE: this does not resolve imports from other locations
///
@ -72,6 +73,12 @@ pub fn is_local_source_name(libs: &[impl AsRef<Path>], source: impl AsRef<Path>)
resolve_library(libs, source).is_none()
}
/// Canonicalize the path, platform-agnostic
pub fn canonicalize(path: impl AsRef<Path>) -> Result<PathBuf, SolcIoError> {
let path = path.as_ref();
dunce::canonicalize(&path).map_err(|err| SolcIoError::new(err, path))
}
/// Returns the path to the library if the source path is in fact determined to be a library path,
/// and it exists.
/// Note: this does not handle relative imports or remappings.

View File

@ -1,15 +1,21 @@
//! project tests
use ethers_solc::{
cache::SOLIDITY_FILES_CACHE_FILENAME, project_util::*, MinimalCombinedArtifacts, Project,
ProjectPathsConfig,
};
use std::{
collections::HashMap,
io,
path::{Path, PathBuf},
str::FromStr,
};
use tempdir::TempDir;
use ethers_solc::{
cache::{SolFilesCache, SOLIDITY_FILES_CACHE_FILENAME},
project_util::*,
remappings::Remapping,
Graph, MinimalCombinedArtifacts, Project, ProjectPathsConfig,
};
#[test]
fn can_compile_hardhat_sample() {
let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/hardhat-sample");
@ -59,6 +65,80 @@ fn can_compile_dapp_sample() {
assert!(!compiled.is_unchanged());
}
#[test]
fn can_compile_dapp_detect_changes_in_libs() {
let mut project = TempProject::<MinimalCombinedArtifacts>::dapptools().unwrap();
let remapping = project.paths().libraries[0].join("remapping");
project
.paths_mut()
.remappings
.push(Remapping::from_str(&format!("remapping={}/", remapping.display())).unwrap());
project.project_mut().auto_detect = false;
let src = project
.add_source(
"Foo",
r#"
pragma solidity ^0.8.10;
import "remapping/Bar.sol";
contract Foo {}
"#,
)
.unwrap();
let lib = project
.add_lib(
"remapping/Bar",
r#"
pragma solidity ^0.8.10;
contract Bar {}
"#,
)
.unwrap();
let graph = Graph::resolve(project.paths()).unwrap();
assert_eq!(graph.files().len(), 2);
assert_eq!(graph.files().clone(), HashMap::from([(src, 0), (lib, 1),]));
let compiled = project.compile().unwrap();
assert!(compiled.find("Foo").is_some());
assert!(compiled.find("Bar").is_some());
assert!(!compiled.has_compiler_errors());
// nothing to compile
let compiled = project.compile().unwrap();
assert!(compiled.find("Foo").is_some());
assert!(compiled.is_unchanged());
let cache = SolFilesCache::read(&project.paths().cache).unwrap();
assert_eq!(cache.files.len(), 2);
// overwrite lib
project
.add_lib(
"remapping/Bar",
r#"
pragma solidity ^0.8.10;
// changed lib
contract Bar {}
"#,
)
.unwrap();
let graph = Graph::resolve(project.paths()).unwrap();
assert_eq!(graph.files().len(), 2);
let compiled = project.compile().unwrap();
assert!(compiled.find("Foo").is_some());
assert!(compiled.find("Bar").is_some());
// ensure change is detected
assert!(!compiled.is_unchanged());
}
#[test]
fn can_compile_dapp_sample_with_cache() {
let tmp_dir = TempDir::new("root").unwrap();