feat(solc): add support for compiling solc in parallel (#652)
* docs: more docs and tracing * feat: add compile many * feat: add compile many * fix: make fields optional * chore: add num_cpus and criterion * add compile benchmark * feat: add job option * feat: add parallel compilation support * use ful utilization * chore: move pathmap to cache * fix: async write all * chore: clean up
This commit is contained in:
parent
788f59623d
commit
a9a47a4cd6
|
@ -318,6 +318,18 @@ version = "0.4.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "771fe0050b883fcc3ea2359b1a96bcfbc090b7116eae7c3c512c7a083fdf23d3"
|
checksum = "771fe0050b883fcc3ea2359b1a96bcfbc090b7116eae7c3c512c7a083fdf23d3"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bstr"
|
||||||
|
version = "0.2.17"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ba3569f383e8f1598449f1a423e72e99569137b47740b1da11ef19af3d5c3223"
|
||||||
|
dependencies = [
|
||||||
|
"lazy_static",
|
||||||
|
"memchr",
|
||||||
|
"regex-automata",
|
||||||
|
"serde",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bumpalo"
|
name = "bumpalo"
|
||||||
version = "3.8.0"
|
version = "3.8.0"
|
||||||
|
@ -382,6 +394,15 @@ dependencies = [
|
||||||
"serde_json",
|
"serde_json",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cast"
|
||||||
|
version = "0.2.7"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "4c24dab4283a142afa2fdca129b80ad2c6284e073930f964c3a1293c225ee39a"
|
||||||
|
dependencies = [
|
||||||
|
"rustc_version",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cc"
|
name = "cc"
|
||||||
version = "1.0.72"
|
version = "1.0.72"
|
||||||
|
@ -643,6 +664,88 @@ dependencies = [
|
||||||
"cfg-if 1.0.0",
|
"cfg-if 1.0.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "criterion"
|
||||||
|
version = "0.3.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1604dafd25fba2fe2d5895a9da139f8dc9b319a5fe5354ca137cbbce4e178d10"
|
||||||
|
dependencies = [
|
||||||
|
"atty",
|
||||||
|
"cast",
|
||||||
|
"clap",
|
||||||
|
"criterion-plot",
|
||||||
|
"csv",
|
||||||
|
"futures",
|
||||||
|
"itertools",
|
||||||
|
"lazy_static",
|
||||||
|
"num-traits",
|
||||||
|
"oorandom",
|
||||||
|
"plotters",
|
||||||
|
"rayon",
|
||||||
|
"regex",
|
||||||
|
"serde",
|
||||||
|
"serde_cbor",
|
||||||
|
"serde_derive",
|
||||||
|
"serde_json",
|
||||||
|
"tinytemplate",
|
||||||
|
"tokio",
|
||||||
|
"walkdir",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "criterion-plot"
|
||||||
|
version = "0.4.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d00996de9f2f7559f7f4dc286073197f83e92256a59ed395f9aac01fe717da57"
|
||||||
|
dependencies = [
|
||||||
|
"cast",
|
||||||
|
"itertools",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "crossbeam-channel"
|
||||||
|
version = "0.5.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "06ed27e177f16d65f0f0c22a213e17c696ace5dd64b14258b52f9417ccb52db4"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if 1.0.0",
|
||||||
|
"crossbeam-utils",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "crossbeam-deque"
|
||||||
|
version = "0.8.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if 1.0.0",
|
||||||
|
"crossbeam-epoch",
|
||||||
|
"crossbeam-utils",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "crossbeam-epoch"
|
||||||
|
version = "0.9.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "4ec02e091aa634e2c3ada4a392989e7c3116673ef0ac5b72232439094d73b7fd"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if 1.0.0",
|
||||||
|
"crossbeam-utils",
|
||||||
|
"lazy_static",
|
||||||
|
"memoffset",
|
||||||
|
"scopeguard",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "crossbeam-utils"
|
||||||
|
version = "0.8.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d82cfc11ce7f2c3faef78d8a684447b40d503d9681acebed6cb728d45940c4db"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if 1.0.0",
|
||||||
|
"lazy_static",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "crunchy"
|
name = "crunchy"
|
||||||
version = "0.2.2"
|
version = "0.2.2"
|
||||||
|
@ -694,6 +797,28 @@ dependencies = [
|
||||||
"subtle",
|
"subtle",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "csv"
|
||||||
|
version = "1.1.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "22813a6dc45b335f9bade10bf7271dc477e81113e89eb251a0bc2a8a81c536e1"
|
||||||
|
dependencies = [
|
||||||
|
"bstr",
|
||||||
|
"csv-core",
|
||||||
|
"itoa",
|
||||||
|
"ryu",
|
||||||
|
"serde",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "csv-core"
|
||||||
|
version = "0.1.10"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "2b2466559f260f48ad25fe6317b3c8dac77b5bdb5763ac7d9d6103530663bc90"
|
||||||
|
dependencies = [
|
||||||
|
"memchr",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ctr"
|
name = "ctr"
|
||||||
version = "0.7.0"
|
version = "0.7.0"
|
||||||
|
@ -1187,6 +1312,7 @@ name = "ethers-solc"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"colored",
|
"colored",
|
||||||
|
"criterion",
|
||||||
"ethers-core",
|
"ethers-core",
|
||||||
"futures-util",
|
"futures-util",
|
||||||
"getrandom 0.2.3",
|
"getrandom 0.2.3",
|
||||||
|
@ -1194,6 +1320,7 @@ dependencies = [
|
||||||
"hex",
|
"hex",
|
||||||
"home",
|
"home",
|
||||||
"md-5",
|
"md-5",
|
||||||
|
"num_cpus",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"regex",
|
"regex",
|
||||||
"semver",
|
"semver",
|
||||||
|
@ -1476,6 +1603,12 @@ dependencies = [
|
||||||
"tracing",
|
"tracing",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "half"
|
||||||
|
version = "1.8.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "harp"
|
name = "harp"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
|
@ -1829,6 +1962,15 @@ version = "2.4.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
|
checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "memoffset"
|
||||||
|
version = "0.6.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
|
||||||
|
dependencies = [
|
||||||
|
"autocfg",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "memory_units"
|
name = "memory_units"
|
||||||
version = "0.4.0"
|
version = "0.4.0"
|
||||||
|
@ -1963,6 +2105,12 @@ version = "1.8.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "692fcb63b64b1758029e0a96ee63e049ce8c5948587f2f7208df04625e5f6b56"
|
checksum = "692fcb63b64b1758029e0a96ee63e049ce8c5948587f2f7208df04625e5f6b56"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "oorandom"
|
||||||
|
version = "11.1.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "opaque-debug"
|
name = "opaque-debug"
|
||||||
version = "0.2.3"
|
version = "0.2.3"
|
||||||
|
@ -2168,6 +2316,34 @@ version = "0.3.22"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "12295df4f294471248581bc09bef3c38a5e46f1e36d6a37353621a0c6c357e1f"
|
checksum = "12295df4f294471248581bc09bef3c38a5e46f1e36d6a37353621a0c6c357e1f"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "plotters"
|
||||||
|
version = "0.3.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "32a3fd9ec30b9749ce28cd91f255d569591cdf937fe280c312143e3c4bad6f2a"
|
||||||
|
dependencies = [
|
||||||
|
"num-traits",
|
||||||
|
"plotters-backend",
|
||||||
|
"plotters-svg",
|
||||||
|
"wasm-bindgen",
|
||||||
|
"web-sys",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "plotters-backend"
|
||||||
|
version = "0.3.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d88417318da0eaf0fdcdb51a0ee6c3bed624333bff8f946733049380be67ac1c"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "plotters-svg"
|
||||||
|
version = "0.3.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "521fa9638fa597e1dc53e9412a4f9cefb01187ee1f7413076f9e6749e2885ba9"
|
||||||
|
dependencies = [
|
||||||
|
"plotters-backend",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ppv-lite86"
|
name = "ppv-lite86"
|
||||||
version = "0.2.15"
|
version = "0.2.15"
|
||||||
|
@ -2360,6 +2536,31 @@ dependencies = [
|
||||||
"rand_core 0.6.3",
|
"rand_core 0.6.3",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rayon"
|
||||||
|
version = "1.5.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c06aca804d41dbc8ba42dfd964f0d01334eceb64314b9ecf7c5fad5188a06d90"
|
||||||
|
dependencies = [
|
||||||
|
"autocfg",
|
||||||
|
"crossbeam-deque",
|
||||||
|
"either",
|
||||||
|
"rayon-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rayon-core"
|
||||||
|
version = "1.9.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d78120e2c850279833f1dd3582f730c4ab53ed95aeaaaa862a2a5c71b1656d8e"
|
||||||
|
dependencies = [
|
||||||
|
"crossbeam-channel",
|
||||||
|
"crossbeam-deque",
|
||||||
|
"crossbeam-utils",
|
||||||
|
"lazy_static",
|
||||||
|
"num_cpus",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rdrand"
|
name = "rdrand"
|
||||||
version = "0.4.0"
|
version = "0.4.0"
|
||||||
|
@ -2399,6 +2600,12 @@ dependencies = [
|
||||||
"regex-syntax",
|
"regex-syntax",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "regex-automata"
|
||||||
|
version = "0.1.10"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex-syntax"
|
name = "regex-syntax"
|
||||||
version = "0.6.25"
|
version = "0.6.25"
|
||||||
|
@ -2798,6 +3005,16 @@ dependencies = [
|
||||||
"serde_json",
|
"serde_json",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "serde_cbor"
|
||||||
|
version = "0.11.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "2bef2ebfde456fb76bbcf9f59315333decc4fda0b2b44b420243c11e0f5ec1f5"
|
||||||
|
dependencies = [
|
||||||
|
"half",
|
||||||
|
"serde",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_derive"
|
name = "serde_derive"
|
||||||
version = "1.0.130"
|
version = "1.0.130"
|
||||||
|
@ -3182,6 +3399,16 @@ dependencies = [
|
||||||
"crunchy",
|
"crunchy",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tinytemplate"
|
||||||
|
version = "1.2.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc"
|
||||||
|
dependencies = [
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tinyvec"
|
name = "tinyvec"
|
||||||
version = "1.5.1"
|
version = "1.5.1"
|
||||||
|
|
|
@ -30,6 +30,7 @@ colored = "2.0.0"
|
||||||
svm = { package = "svm-rs", version = "0.2.0", optional = true }
|
svm = { package = "svm-rs", version = "0.2.0", optional = true }
|
||||||
glob = "0.3.0"
|
glob = "0.3.0"
|
||||||
tracing = "0.1.29"
|
tracing = "0.1.29"
|
||||||
|
num_cpus = "1.13.0"
|
||||||
|
|
||||||
[target.'cfg(not(any(target_arch = "x86", target_arch = "x86_64")))'.dependencies]
|
[target.'cfg(not(any(target_arch = "x86", target_arch = "x86_64")))'.dependencies]
|
||||||
sha2 = { version = "0.9.8", default-features = false }
|
sha2 = { version = "0.9.8", default-features = false }
|
||||||
|
@ -45,9 +46,14 @@ home = "0.5.3"
|
||||||
getrandom = { version = "0.2", features = ["js"] }
|
getrandom = { version = "0.2", features = ["js"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
criterion = { version = "0.3", features = ["async_tokio"] }
|
||||||
tokio = { version = "1.12.0", features = ["full"] }
|
tokio = { version = "1.12.0", features = ["full"] }
|
||||||
tempdir = "0.3.7"
|
tempdir = "0.3.7"
|
||||||
|
|
||||||
|
[[bench]]
|
||||||
|
name = "compile_many"
|
||||||
|
harness = false
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
async = ["tokio", "futures-util"]
|
async = ["tokio", "futures-util"]
|
||||||
full = ["async", "svm"]
|
full = ["async", "svm"]
|
||||||
|
|
|
@ -0,0 +1,55 @@
|
||||||
|
//! compile many benches
|
||||||
|
#[macro_use]
|
||||||
|
extern crate criterion;
|
||||||
|
|
||||||
|
use criterion::Criterion;
|
||||||
|
use ethers_solc::{CompilerInput, Solc};
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
fn compile_many_benchmark(c: &mut Criterion) {
|
||||||
|
let inputs = load_compiler_inputs();
|
||||||
|
let solc = Solc::default();
|
||||||
|
|
||||||
|
let mut group = c.benchmark_group("compile many");
|
||||||
|
group.sample_size(10);
|
||||||
|
group.bench_function("sequential", |b| {
|
||||||
|
b.iter(|| {
|
||||||
|
for i in inputs.iter() {
|
||||||
|
let _ = solc.compile(i).unwrap();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
#[cfg(feature = "full")]
|
||||||
|
{
|
||||||
|
let tasks = inputs.into_iter().map(|input| (Solc::default(), input)).collect::<Vec<_>>();
|
||||||
|
let num = tasks.len();
|
||||||
|
group.bench_function("concurrently", |b| {
|
||||||
|
b.to_async(tokio::runtime::Runtime::new().unwrap()).iter(|| async {
|
||||||
|
let _ = Solc::compile_many(tasks.clone(), num).await.flattened().unwrap();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn load_compiler_inputs() -> Vec<CompilerInput> {
|
||||||
|
let mut inputs = Vec::new();
|
||||||
|
for file in std::fs::read_dir(Path::new(&env!("CARGO_MANIFEST_DIR")).join("test-data/in"))
|
||||||
|
.unwrap()
|
||||||
|
.into_iter()
|
||||||
|
.take(5)
|
||||||
|
{
|
||||||
|
let file = file.unwrap();
|
||||||
|
if file.path().to_string_lossy().as_ref().ends_with("20.json") {
|
||||||
|
// TODO needs support for parsing library placeholders first
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
let input = std::fs::read_to_string(file.path()).unwrap();
|
||||||
|
let input: CompilerInput = serde_json::from_str(&input).unwrap();
|
||||||
|
inputs.push(input);
|
||||||
|
}
|
||||||
|
inputs
|
||||||
|
}
|
||||||
|
|
||||||
|
criterion_group!(benches, compile_many_benchmark);
|
||||||
|
criterion_main!(benches);
|
|
@ -17,6 +17,8 @@ use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer};
|
||||||
/// An ordered list of files and their source
|
/// An ordered list of files and their source
|
||||||
pub type Sources = BTreeMap<PathBuf, Source>;
|
pub type Sources = BTreeMap<PathBuf, Source>;
|
||||||
|
|
||||||
|
pub type Contracts = BTreeMap<String, BTreeMap<String, Contract>>;
|
||||||
|
|
||||||
/// Input type `solc` expects
|
/// Input type `solc` expects
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
pub struct CompilerInput {
|
pub struct CompilerInput {
|
||||||
|
@ -453,7 +455,7 @@ pub struct CompilerOutput {
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub sources: BTreeMap<String, SourceFile>,
|
pub sources: BTreeMap<String, SourceFile>,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub contracts: BTreeMap<String, BTreeMap<String, Contract>>,
|
pub contracts: Contracts,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CompilerOutput {
|
impl CompilerOutput {
|
||||||
|
@ -592,7 +594,10 @@ impl CompactContract {
|
||||||
impl From<Contract> for CompactContract {
|
impl From<Contract> for CompactContract {
|
||||||
fn from(c: Contract) -> Self {
|
fn from(c: Contract) -> Self {
|
||||||
let (bin, bin_runtime) = if let Some(evm) = c.evm {
|
let (bin, bin_runtime) = if let Some(evm) = c.evm {
|
||||||
(Some(evm.bytecode.object), evm.deployed_bytecode.bytecode.map(|evm| evm.object))
|
(
|
||||||
|
Some(evm.bytecode.object),
|
||||||
|
evm.deployed_bytecode.and_then(|deployed| deployed.bytecode.map(|evm| evm.object)),
|
||||||
|
)
|
||||||
} else {
|
} else {
|
||||||
(None, None)
|
(None, None)
|
||||||
};
|
};
|
||||||
|
@ -636,7 +641,9 @@ impl<'a> From<&'a Contract> for CompactContractRef<'a> {
|
||||||
let (bin, bin_runtime) = if let Some(ref evm) = c.evm {
|
let (bin, bin_runtime) = if let Some(ref evm) = c.evm {
|
||||||
(
|
(
|
||||||
Some(&evm.bytecode.object),
|
Some(&evm.bytecode.object),
|
||||||
evm.deployed_bytecode.bytecode.as_ref().map(|evm| &evm.object),
|
evm.deployed_bytecode
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|deployed| deployed.bytecode.as_ref().map(|evm| &evm.object)),
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
(None, None)
|
(None, None)
|
||||||
|
@ -684,7 +691,8 @@ pub struct Evm {
|
||||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
pub legacy_assembly: Option<serde_json::Value>,
|
pub legacy_assembly: Option<serde_json::Value>,
|
||||||
pub bytecode: Bytecode,
|
pub bytecode: Bytecode,
|
||||||
pub deployed_bytecode: DeployedBytecode,
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub deployed_bytecode: Option<DeployedBytecode>,
|
||||||
/// The list of function hashes
|
/// The list of function hashes
|
||||||
#[serde(default, skip_serializing_if = "::std::collections::BTreeMap::is_empty")]
|
#[serde(default, skip_serializing_if = "::std::collections::BTreeMap::is_empty")]
|
||||||
pub method_identifiers: BTreeMap<String, String>,
|
pub method_identifiers: BTreeMap<String, String>,
|
||||||
|
@ -703,9 +711,11 @@ pub struct Bytecode {
|
||||||
#[serde(deserialize_with = "deserialize_bytes")]
|
#[serde(deserialize_with = "deserialize_bytes")]
|
||||||
pub object: Bytes,
|
pub object: Bytes,
|
||||||
/// Opcodes list (string)
|
/// Opcodes list (string)
|
||||||
pub opcodes: String,
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub opcodes: Option<String>,
|
||||||
/// The source mapping as a string. See the source mapping definition.
|
/// The source mapping as a string. See the source mapping definition.
|
||||||
pub source_map: String,
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub source_map: Option<String>,
|
||||||
/// Array of sources generated by the compiler. Currently only contains a
|
/// Array of sources generated by the compiler. Currently only contains a
|
||||||
/// single Yul file.
|
/// single Yul file.
|
||||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
//! Support for compiling contracts
|
//! Support for compiling contracts
|
||||||
use crate::{
|
use crate::{
|
||||||
artifacts::Sources,
|
artifacts::{Contracts, Sources},
|
||||||
config::SolcConfig,
|
config::SolcConfig,
|
||||||
error::{Result, SolcError},
|
error::{Result, SolcError},
|
||||||
utils, ArtifactOutput,
|
utils, ArtifactOutput,
|
||||||
};
|
};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::{
|
use std::{
|
||||||
collections::BTreeMap,
|
collections::{BTreeMap, HashMap},
|
||||||
fs::{self, File},
|
fs::{self, File},
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
time::{Duration, UNIX_EPOCH},
|
time::{Duration, UNIX_EPOCH},
|
||||||
|
@ -287,6 +287,64 @@ impl CacheEntry {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A helper type to handle source name/full disk mappings
|
||||||
|
///
|
||||||
|
/// The disk path is the actual path where a file can be found on disk.
|
||||||
|
/// A source name is the internal identifier and is the remaining part of the disk path starting
|
||||||
|
/// with the configured source directory, (`contracts/contract.sol`)
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
pub struct PathMap {
|
||||||
|
/// all libraries to the source set while keeping track of their actual disk path
|
||||||
|
/// (`contracts/contract.sol` -> `/Users/.../contracts.sol`)
|
||||||
|
pub source_name_to_path: HashMap<PathBuf, PathBuf>,
|
||||||
|
/// inverse of `source_name_to_path` : (`/Users/.../contracts.sol` -> `contracts/contract.sol`)
|
||||||
|
pub path_to_source_name: HashMap<PathBuf, PathBuf>,
|
||||||
|
/* /// All paths, source names and actual file paths
|
||||||
|
* paths: Vec<PathBuf> */
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PathMap {
|
||||||
|
fn apply_mappings(sources: Sources, mappings: &HashMap<PathBuf, PathBuf>) -> Sources {
|
||||||
|
sources
|
||||||
|
.into_iter()
|
||||||
|
.map(|(import, source)| {
|
||||||
|
if let Some(path) = mappings.get(&import).cloned() {
|
||||||
|
(path, source)
|
||||||
|
} else {
|
||||||
|
(import, source)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns all contract names of the files mapped with the disk path
|
||||||
|
pub fn get_artifacts(&self, contracts: &Contracts) -> Vec<(PathBuf, Vec<String>)> {
|
||||||
|
contracts
|
||||||
|
.iter()
|
||||||
|
.map(|(path, contracts)| {
|
||||||
|
let path = PathBuf::from(path);
|
||||||
|
let file = self.source_name_to_path.get(&path).cloned().unwrap_or(path);
|
||||||
|
(file, contracts.keys().cloned().collect::<Vec<_>>())
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn extend(&mut self, other: PathMap) {
|
||||||
|
self.source_name_to_path.extend(other.source_name_to_path);
|
||||||
|
self.path_to_source_name.extend(other.path_to_source_name);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a new map with the source names as keys
|
||||||
|
pub fn set_source_names(&self, sources: Sources) -> Sources {
|
||||||
|
Self::apply_mappings(sources, &self.path_to_source_name)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a new map with the disk paths as keys
|
||||||
|
pub fn set_disk_paths(&self, sources: Sources) -> Sources {
|
||||||
|
Self::apply_mappings(sources, &self.source_name_to_path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
|
@ -40,6 +40,7 @@ use once_cell::sync::Lazy;
|
||||||
|
|
||||||
#[cfg(any(test, feature = "tests"))]
|
#[cfg(any(test, feature = "tests"))]
|
||||||
use std::sync::Mutex;
|
use std::sync::Mutex;
|
||||||
|
|
||||||
#[cfg(any(test, feature = "tests"))]
|
#[cfg(any(test, feature = "tests"))]
|
||||||
static LOCK: Lazy<Mutex<()>> = Lazy::new(|| Mutex::new(()));
|
static LOCK: Lazy<Mutex<()>> = Lazy::new(|| Mutex::new(()));
|
||||||
|
|
||||||
|
@ -363,7 +364,7 @@ impl Solc {
|
||||||
.stdout(Stdio::piped())
|
.stdout(Stdio::piped())
|
||||||
.spawn()?;
|
.spawn()?;
|
||||||
let stdin = child.stdin.as_mut().unwrap();
|
let stdin = child.stdin.as_mut().unwrap();
|
||||||
stdin.write(&content).await?;
|
stdin.write_all(&content).await?;
|
||||||
stdin.flush().await?;
|
stdin.flush().await?;
|
||||||
compile_output(child.wait_with_output().await?)
|
compile_output(child.wait_with_output().await?)
|
||||||
}
|
}
|
||||||
|
@ -380,6 +381,78 @@ impl Solc {
|
||||||
.await?,
|
.await?,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Compiles all `CompilerInput`s with their associated `Solc`.
|
||||||
|
///
|
||||||
|
/// This will buffer up to `n` `solc` processes and then return the `CompilerOutput`s in the
|
||||||
|
/// order in which they complete. No more than `n` futures will be buffered at any point in
|
||||||
|
/// time, and less than `n` may also be buffered depending on the state of each future.
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// Compile 2 `CompilerInput`s at once
|
||||||
|
///
|
||||||
|
/// ```no_run
|
||||||
|
/// # async fn example() {
|
||||||
|
/// use ethers_solc::{CompilerInput, Solc};
|
||||||
|
/// let solc1 = Solc::default();
|
||||||
|
/// let solc2 = Solc::default();
|
||||||
|
/// let input1 = CompilerInput::new("contracts").unwrap();
|
||||||
|
/// let input2 = CompilerInput::new("src").unwrap();
|
||||||
|
///
|
||||||
|
/// let outputs = Solc::compile_many([(solc1, input1), (solc2, input2)], 2).await.flattened().unwrap();
|
||||||
|
/// # }
|
||||||
|
/// ```
|
||||||
|
pub async fn compile_many<I>(jobs: I, n: usize) -> CompiledMany
|
||||||
|
where
|
||||||
|
I: IntoIterator<Item = (Solc, CompilerInput)>,
|
||||||
|
{
|
||||||
|
use futures_util::stream::StreamExt;
|
||||||
|
|
||||||
|
let outputs = futures_util::stream::iter(
|
||||||
|
jobs.into_iter()
|
||||||
|
.map(|(solc, input)| async { (solc.async_compile(&input).await, solc, input) }),
|
||||||
|
)
|
||||||
|
.buffer_unordered(n)
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.await;
|
||||||
|
CompiledMany { outputs }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The result of a `solc` process bundled with its `Solc` and `CompilerInput`
|
||||||
|
type CompileElement = (Result<CompilerOutput>, Solc, CompilerInput);
|
||||||
|
|
||||||
|
/// The output of multiple `solc` processes.
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct CompiledMany {
|
||||||
|
outputs: Vec<CompileElement>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CompiledMany {
|
||||||
|
/// Returns an iterator over all output elements
|
||||||
|
pub fn outputs(&self) -> impl Iterator<Item = &CompileElement> {
|
||||||
|
self.outputs.iter()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns an iterator over all output elements
|
||||||
|
pub fn into_outputs(self) -> impl Iterator<Item = CompileElement> {
|
||||||
|
self.outputs.into_iter()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns all `CompilerOutput` or the first error that occurred
|
||||||
|
pub fn flattened(self) -> Result<Vec<CompilerOutput>> {
|
||||||
|
self.into_iter().collect()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoIterator for CompiledMany {
|
||||||
|
type Item = Result<CompilerOutput>;
|
||||||
|
type IntoIter = std::vec::IntoIter<Result<CompilerOutput>>;
|
||||||
|
|
||||||
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
|
self.outputs.into_iter().map(|(res, _, _)| res).collect::<Vec<_>>().into_iter()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compile_output(output: Output) -> Result<Vec<u8>> {
|
fn compile_output(output: Output) -> Result<Vec<u8>> {
|
||||||
|
@ -459,6 +532,17 @@ mod tests {
|
||||||
let other = solc().async_compile(&serde_json::json!(input)).await.unwrap();
|
let other = solc().async_compile(&serde_json::json!(input)).await.unwrap();
|
||||||
assert_eq!(out, other);
|
assert_eq!(out, other);
|
||||||
}
|
}
|
||||||
|
#[cfg(feature = "async")]
|
||||||
|
#[tokio::test]
|
||||||
|
async fn async_solc_compile_works2() {
|
||||||
|
let input = include_str!("../test-data/in/compiler-in-2.json");
|
||||||
|
let input: CompilerInput = serde_json::from_str(input).unwrap();
|
||||||
|
let out = solc().async_compile(&input).await.unwrap();
|
||||||
|
let other = solc().async_compile(&serde_json::json!(input)).await.unwrap();
|
||||||
|
assert_eq!(out, other);
|
||||||
|
let sync_out = solc().compile(&input).unwrap();
|
||||||
|
assert_eq!(out, sync_out);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_version_req() {
|
fn test_version_req() {
|
||||||
|
|
|
@ -25,18 +25,14 @@ use crate::{artifacts::Source, cache::SolFilesCache};
|
||||||
pub mod error;
|
pub mod error;
|
||||||
pub mod utils;
|
pub mod utils;
|
||||||
|
|
||||||
use crate::artifacts::Sources;
|
use crate::{artifacts::Sources, cache::PathMap};
|
||||||
use error::Result;
|
use error::Result;
|
||||||
use std::{
|
use std::{
|
||||||
borrow::Cow,
|
borrow::Cow, collections::BTreeMap, convert::TryInto, fmt, fs, io, marker::PhantomData,
|
||||||
collections::{BTreeMap, HashMap},
|
|
||||||
convert::TryInto,
|
|
||||||
fmt, fs, io,
|
|
||||||
marker::PhantomData,
|
|
||||||
path::PathBuf,
|
path::PathBuf,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Handles contract compiling
|
/// Represents a project workspace and handles `solc` compiling of all contracts in that workspace.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Project<Artifacts: ArtifactOutput = MinimalCombinedArtifacts> {
|
pub struct Project<Artifacts: ArtifactOutput = MinimalCombinedArtifacts> {
|
||||||
/// The layout of the
|
/// The layout of the
|
||||||
|
@ -57,6 +53,8 @@ pub struct Project<Artifacts: ArtifactOutput = MinimalCombinedArtifacts> {
|
||||||
pub ignored_error_codes: Vec<u64>,
|
pub ignored_error_codes: Vec<u64>,
|
||||||
/// The paths which will be allowed for library inclusion
|
/// The paths which will be allowed for library inclusion
|
||||||
pub allowed_lib_paths: AllowedLibPaths,
|
pub allowed_lib_paths: AllowedLibPaths,
|
||||||
|
/// Maximum number of `solc` processes to run simultaneously.
|
||||||
|
solc_jobs: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Project {
|
impl Project {
|
||||||
|
@ -90,6 +88,12 @@ impl Project {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Artifacts: ArtifactOutput> Project<Artifacts> {
|
impl<Artifacts: ArtifactOutput> Project<Artifacts> {
|
||||||
|
/// Sets the maximum number of parallel `solc` processes to run simultaneously.
|
||||||
|
pub fn set_solc_jobs(&mut self, jobs: usize) {
|
||||||
|
assert!(jobs > 0);
|
||||||
|
self.solc_jobs = jobs;
|
||||||
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, name = "Project::write_cache_file")]
|
#[tracing::instrument(skip_all, name = "Project::write_cache_file")]
|
||||||
fn write_cache_file(
|
fn write_cache_file(
|
||||||
&self,
|
&self,
|
||||||
|
@ -121,10 +125,11 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns all sources found under the project's sources path
|
/// Returns all sources found under the project's configured sources path
|
||||||
#[tracing::instrument(skip_all, fields(name = "sources"))]
|
#[tracing::instrument(skip_all, fields(name = "sources"))]
|
||||||
pub fn sources(&self) -> io::Result<Sources> {
|
pub fn sources(&self) -> io::Result<Sources> {
|
||||||
Source::read_all_from(self.paths.sources.as_path())
|
tracing::trace!("reading all sources from \"{}\"", self.paths.sources.display());
|
||||||
|
Source::read_all_from(&self.paths.sources)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This emits the cargo [`rerun-if-changed`](https://doc.rust-lang.org/cargo/reference/build-scripts.html#cargorerun-if-changedpath) instruction.
|
/// This emits the cargo [`rerun-if-changed`](https://doc.rust-lang.org/cargo/reference/build-scripts.html#cargorerun-if-changedpath) instruction.
|
||||||
|
@ -138,7 +143,7 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
|
||||||
///
|
///
|
||||||
/// ```no_run
|
/// ```no_run
|
||||||
/// use ethers_solc::{Project, ProjectPathsConfig};
|
/// use ethers_solc::{Project, ProjectPathsConfig};
|
||||||
/// // configure the project with all its paths, solc, cache etc.
|
/// // configure the project with all its paths, solc, cache etc. where the root dir is the current rust project.
|
||||||
/// let project = Project::builder()
|
/// let project = Project::builder()
|
||||||
/// .paths(ProjectPathsConfig::hardhat(env!("CARGO_MANIFEST_DIR")).unwrap())
|
/// .paths(ProjectPathsConfig::hardhat(env!("CARGO_MANIFEST_DIR")).unwrap())
|
||||||
/// .build()
|
/// .build()
|
||||||
|
@ -173,18 +178,15 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
|
||||||
///
|
///
|
||||||
/// NOTE: this does not check if the contracts were successfully compiled, see
|
/// NOTE: this does not check if the contracts were successfully compiled, see
|
||||||
/// `CompilerOutput::has_error` instead.
|
/// `CompilerOutput::has_error` instead.
|
||||||
|
|
||||||
/// NB: If the `svm` feature is enabled, this function will automatically detect
|
/// NB: If the `svm` feature is enabled, this function will automatically detect
|
||||||
/// solc versions across files.
|
/// solc versions across files.
|
||||||
#[tracing::instrument(skip_all, name = "compile")]
|
#[tracing::instrument(skip_all, name = "compile")]
|
||||||
pub fn compile(&self) -> Result<ProjectCompileOutput<Artifacts>> {
|
pub fn compile(&self) -> Result<ProjectCompileOutput<Artifacts>> {
|
||||||
tracing::trace!("sources");
|
|
||||||
let sources = self.sources()?;
|
let sources = self.sources()?;
|
||||||
tracing::trace!("done");
|
|
||||||
|
|
||||||
#[cfg(all(feature = "svm", feature = "async"))]
|
#[cfg(all(feature = "svm", feature = "async"))]
|
||||||
if self.auto_detect {
|
if self.auto_detect {
|
||||||
tracing::trace!("auto-compile");
|
tracing::trace!("using solc auto detection");
|
||||||
return self.svm_compile(sources)
|
return self.svm_compile(sources)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -199,7 +201,7 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
|
||||||
#[tracing::instrument(skip(self, sources))]
|
#[tracing::instrument(skip(self, sources))]
|
||||||
fn svm_compile(&self, sources: Sources) -> Result<ProjectCompileOutput<Artifacts>> {
|
fn svm_compile(&self, sources: Sources) -> Result<ProjectCompileOutput<Artifacts>> {
|
||||||
use semver::{Version, VersionReq};
|
use semver::{Version, VersionReq};
|
||||||
use std::collections::hash_map;
|
use std::collections::hash_map::{self, HashMap};
|
||||||
|
|
||||||
// split them by version
|
// split them by version
|
||||||
let mut sources_by_version = BTreeMap::new();
|
let mut sources_by_version = BTreeMap::new();
|
||||||
|
@ -223,8 +225,8 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
|
||||||
version
|
version
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
tracing::trace!("found installed solc \"{}\"", version);
|
tracing::trace!("found installed solc \"{}\"", version);
|
||||||
|
|
||||||
// gets the solc binary for that version, it is expected tha this will succeed
|
// gets the solc binary for that version, it is expected tha this will succeed
|
||||||
// AND find the solc since it was installed right above
|
// AND find the solc since it was installed right above
|
||||||
let mut solc = Solc::find_svm_installed_version(version.to_string())?
|
let mut solc = Solc::find_svm_installed_version(version.to_string())?
|
||||||
|
@ -239,29 +241,108 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
|
||||||
}
|
}
|
||||||
tracing::trace!("preprocessing finished");
|
tracing::trace!("preprocessing finished");
|
||||||
|
|
||||||
let mut compiled =
|
tracing::trace!("verifying solc checksums");
|
||||||
ProjectCompileOutput::with_ignored_errors(self.ignored_error_codes.clone());
|
for solc in sources_by_version.keys() {
|
||||||
|
|
||||||
// run the compilation step for each version
|
|
||||||
tracing::trace!("compiling sources with viable solc versions");
|
|
||||||
for (solc, sources) in sources_by_version {
|
|
||||||
let span = tracing::trace_span!("solc", "{}", solc.version_short()?);
|
|
||||||
let _enter = span.enter();
|
|
||||||
|
|
||||||
// verify that this solc version's checksum matches the checksum found remotely. If
|
// verify that this solc version's checksum matches the checksum found remotely. If
|
||||||
// not, re-install the same version.
|
// not, re-install the same version.
|
||||||
let version = solc_versions.get(&solc.solc).unwrap();
|
let version = &solc_versions[&solc.solc];
|
||||||
if let Err(_e) = solc.verify_checksum() {
|
if solc.verify_checksum().is_err() {
|
||||||
tracing::trace!("corrupted solc version, redownloading...");
|
tracing::trace!("corrupted solc version, redownloading \"{}\"", version);
|
||||||
Solc::blocking_install(version)?;
|
Solc::blocking_install(version)?;
|
||||||
tracing::trace!("reinstalled solc: \"{}\"", version);
|
tracing::trace!("reinstalled solc: \"{}\"", version);
|
||||||
}
|
}
|
||||||
// once matched, proceed to compile with it
|
|
||||||
tracing::trace!("compiling_with_version");
|
|
||||||
compiled.extend(self.compile_with_version(&solc, sources)?);
|
|
||||||
tracing::trace!("done compiling_with_version");
|
|
||||||
}
|
}
|
||||||
tracing::trace!("compiled sources with viable solc versions");
|
|
||||||
|
// run the compilation step for each version
|
||||||
|
let compiled = if self.solc_jobs > 1 && sources_by_version.len() > 1 {
|
||||||
|
self.compile_many(sources_by_version)?
|
||||||
|
} else {
|
||||||
|
self.compile_sources(sources_by_version)?
|
||||||
|
};
|
||||||
|
tracing::trace!("compiled all sources");
|
||||||
|
|
||||||
|
Ok(compiled)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(all(feature = "svm", feature = "async"))]
|
||||||
|
fn compile_sources(
|
||||||
|
&self,
|
||||||
|
sources_by_version: BTreeMap<Solc, BTreeMap<PathBuf, Source>>,
|
||||||
|
) -> Result<ProjectCompileOutput<Artifacts>> {
|
||||||
|
tracing::trace!("compiling sources using a single solc job");
|
||||||
|
let mut compiled =
|
||||||
|
ProjectCompileOutput::with_ignored_errors(self.ignored_error_codes.clone());
|
||||||
|
for (solc, sources) in sources_by_version {
|
||||||
|
tracing::trace!(
|
||||||
|
"compiling {} sources with solc \"{}\"",
|
||||||
|
sources.len(),
|
||||||
|
solc.as_ref().display()
|
||||||
|
);
|
||||||
|
compiled.extend(self.compile_with_version(&solc, sources)?);
|
||||||
|
}
|
||||||
|
Ok(compiled)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(all(feature = "svm", feature = "async"))]
|
||||||
|
fn compile_many(
|
||||||
|
&self,
|
||||||
|
sources_by_version: BTreeMap<Solc, BTreeMap<PathBuf, Source>>,
|
||||||
|
) -> Result<ProjectCompileOutput<Artifacts>> {
|
||||||
|
tracing::trace!("compiling sources using {} solc jobs", self.solc_jobs);
|
||||||
|
let mut compiled =
|
||||||
|
ProjectCompileOutput::with_ignored_errors(self.ignored_error_codes.clone());
|
||||||
|
let mut paths = PathMap::default();
|
||||||
|
let mut jobs = Vec::with_capacity(sources_by_version.len());
|
||||||
|
|
||||||
|
let mut all_sources = BTreeMap::default();
|
||||||
|
let mut all_artifacts = Vec::with_capacity(sources_by_version.len());
|
||||||
|
|
||||||
|
// preprocess all sources
|
||||||
|
for (solc, sources) in sources_by_version {
|
||||||
|
match self.preprocess_sources(sources)? {
|
||||||
|
PreprocessedJob::Unchanged(artifacts) => {
|
||||||
|
compiled.extend(ProjectCompileOutput::from_unchanged(artifacts));
|
||||||
|
}
|
||||||
|
PreprocessedJob::Items(sources, map, cached_artifacts) => {
|
||||||
|
compiled.extend_artifacts(cached_artifacts);
|
||||||
|
// replace absolute path with source name to make solc happy
|
||||||
|
let sources = map.set_source_names(sources);
|
||||||
|
paths.extend(map);
|
||||||
|
|
||||||
|
let input = CompilerInput::with_sources(sources)
|
||||||
|
.normalize_evm_version(&solc.version()?)
|
||||||
|
.with_remappings(self.paths.remappings.clone());
|
||||||
|
|
||||||
|
jobs.push((solc, input))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
let outputs = tokio::runtime::Runtime::new()
|
||||||
|
.unwrap()
|
||||||
|
.block_on(Solc::compile_many(jobs, self.solc_jobs));
|
||||||
|
|
||||||
|
for (res, _, input) in outputs.into_outputs() {
|
||||||
|
let output = res?;
|
||||||
|
|
||||||
|
if !output.has_error() {
|
||||||
|
if self.cached {
|
||||||
|
// get all contract names of the files and map them to the disk file
|
||||||
|
all_artifacts.extend(paths.get_artifacts(&output.contracts));
|
||||||
|
all_sources.extend(paths.set_disk_paths(input.sources));
|
||||||
|
}
|
||||||
|
|
||||||
|
if !self.no_artifacts {
|
||||||
|
Artifacts::on_output(&output, &self.paths)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
compiled.extend_output(output);
|
||||||
|
}
|
||||||
|
|
||||||
|
// write the cache file
|
||||||
|
if self.cached {
|
||||||
|
self.write_cache_file(all_sources, all_artifacts)?;
|
||||||
|
}
|
||||||
|
|
||||||
Ok(compiled)
|
Ok(compiled)
|
||||||
}
|
}
|
||||||
|
@ -276,23 +357,70 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
|
||||||
pub fn compile_with_version(
|
pub fn compile_with_version(
|
||||||
&self,
|
&self,
|
||||||
solc: &Solc,
|
solc: &Solc,
|
||||||
mut sources: Sources,
|
sources: Sources,
|
||||||
) -> Result<ProjectCompileOutput<Artifacts>> {
|
) -> Result<ProjectCompileOutput<Artifacts>> {
|
||||||
let span = tracing::trace_span!("compiling");
|
let (sources, paths, cached_artifacts) = match self.preprocess_sources(sources)? {
|
||||||
let _enter = span.enter();
|
PreprocessedJob::Unchanged(artifacts) => {
|
||||||
// add all libraries to the source set while keeping track of their actual disk path
|
return Ok(ProjectCompileOutput::from_unchanged(artifacts))
|
||||||
// (`contracts/contract.sol` -> `/Users/.../contracts.sol`)
|
}
|
||||||
let mut source_name_to_path = HashMap::new();
|
PreprocessedJob::Items(a, b, c) => (a, b, c),
|
||||||
// inverse of `source_name_to_path` : (`/Users/.../contracts.sol` ->
|
};
|
||||||
// `contracts/contract.sol`)
|
|
||||||
let mut path_to_source_name = HashMap::new();
|
tracing::trace!("compiling");
|
||||||
|
|
||||||
|
// replace absolute path with source name to make solc happy
|
||||||
|
let sources = paths.set_source_names(sources);
|
||||||
|
|
||||||
|
let input = CompilerInput::with_sources(sources)
|
||||||
|
.normalize_evm_version(&solc.version()?)
|
||||||
|
.with_remappings(self.paths.remappings.clone());
|
||||||
|
|
||||||
|
tracing::trace!("calling solc with {} sources", input.sources.len());
|
||||||
|
let output = solc.compile(&input)?;
|
||||||
|
tracing::trace!("compiled input, output has error: {}", output.has_error());
|
||||||
|
|
||||||
|
if output.has_error() {
|
||||||
|
return Ok(ProjectCompileOutput::from_compiler_output(
|
||||||
|
output,
|
||||||
|
self.ignored_error_codes.clone(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.cached {
|
||||||
|
// get all contract names of the files and map them to the disk file
|
||||||
|
let artifacts = paths.get_artifacts(&output.contracts);
|
||||||
|
// reapply to disk paths
|
||||||
|
let sources = paths.set_disk_paths(input.sources);
|
||||||
|
// create cache file
|
||||||
|
self.write_cache_file(sources, artifacts)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: There seems to be some type redundancy here, c.f. discussion with @mattsse
|
||||||
|
if !self.no_artifacts {
|
||||||
|
Artifacts::on_output(&output, &self.paths)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(ProjectCompileOutput::from_compiler_output_and_cache(
|
||||||
|
output,
|
||||||
|
cached_artifacts,
|
||||||
|
self.ignored_error_codes.clone(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Preprocesses the given source files by resolving their libs and check against cache if
|
||||||
|
/// configured
|
||||||
|
fn preprocess_sources(&self, mut sources: Sources) -> Result<PreprocessedJob<Artifacts>> {
|
||||||
|
tracing::trace!("preprocessing sources files");
|
||||||
|
|
||||||
|
// keeps track of source names / disk paths
|
||||||
|
let mut paths = PathMap::default();
|
||||||
|
|
||||||
tracing::trace!("resolving libraries");
|
tracing::trace!("resolving libraries");
|
||||||
for (import, (source, path)) in self.resolved_libraries(&sources)? {
|
for (import, (source, path)) in self.resolved_libraries(&sources)? {
|
||||||
// inserting with absolute path here and keep track of the source name <-> path mappings
|
// inserting with absolute path here and keep track of the source name <-> path mappings
|
||||||
sources.insert(path.clone(), source);
|
sources.insert(path.clone(), source);
|
||||||
path_to_source_name.insert(path.clone(), import.clone());
|
paths.path_to_source_name.insert(path.clone(), import.clone());
|
||||||
source_name_to_path.insert(import, path);
|
paths.source_name_to_path.insert(import, path);
|
||||||
}
|
}
|
||||||
tracing::trace!("resolved libraries");
|
tracing::trace!("resolved libraries");
|
||||||
|
|
||||||
|
@ -321,60 +449,14 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
|
||||||
// if nothing changed and all artifacts still exist
|
// if nothing changed and all artifacts still exist
|
||||||
if changed_files.is_empty() {
|
if changed_files.is_empty() {
|
||||||
tracing::trace!("unchanged source files");
|
tracing::trace!("unchanged source files");
|
||||||
return Ok(ProjectCompileOutput::from_unchanged(cached_artifacts))
|
return Ok(PreprocessedJob::Unchanged(cached_artifacts))
|
||||||
}
|
}
|
||||||
// There are changed files and maybe some cached files
|
// There are changed files and maybe some cached files
|
||||||
(changed_files, cached_artifacts)
|
(changed_files, cached_artifacts)
|
||||||
} else {
|
} else {
|
||||||
(sources, BTreeMap::default())
|
(sources, BTreeMap::default())
|
||||||
};
|
};
|
||||||
|
Ok(PreprocessedJob::Items(sources, paths, cached_artifacts))
|
||||||
// replace absolute path with source name to make solc happy
|
|
||||||
let sources = apply_mappings(sources, path_to_source_name);
|
|
||||||
|
|
||||||
let input = CompilerInput::with_sources(sources)
|
|
||||||
.normalize_evm_version(&solc.version()?)
|
|
||||||
.with_remappings(self.paths.remappings.clone());
|
|
||||||
tracing::trace!("calling solc with {} sources", input.sources.len());
|
|
||||||
let output = solc.compile(&input)?;
|
|
||||||
tracing::trace!("compiled input, output has error: {}", output.has_error());
|
|
||||||
|
|
||||||
if output.has_error() {
|
|
||||||
return Ok(ProjectCompileOutput::from_compiler_output(
|
|
||||||
output,
|
|
||||||
self.ignored_error_codes.clone(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
if self.cached {
|
|
||||||
// get all contract names of the files and map them to the disk file
|
|
||||||
let artifacts = output
|
|
||||||
.contracts
|
|
||||||
.iter()
|
|
||||||
.map(|(path, contracts)| {
|
|
||||||
let path = PathBuf::from(path);
|
|
||||||
let file = source_name_to_path.get(&path).cloned().unwrap_or(path);
|
|
||||||
(file, contracts.keys().cloned().collect::<Vec<_>>())
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
// reapply to disk paths
|
|
||||||
let sources = apply_mappings(input.sources, source_name_to_path);
|
|
||||||
|
|
||||||
// create cache file
|
|
||||||
self.write_cache_file(sources, artifacts)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: There seems to be some type redundancy here, c.f. discussion with @mattsse
|
|
||||||
if !self.no_artifacts {
|
|
||||||
Artifacts::on_output(&output, &self.paths)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(ProjectCompileOutput::from_compiler_output_and_cache(
|
|
||||||
output,
|
|
||||||
cached_artifacts,
|
|
||||||
self.ignored_error_codes.clone(),
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Removes the project's artifacts and cache file
|
/// Removes the project's artifacts and cache file
|
||||||
|
@ -392,17 +474,9 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn apply_mappings(sources: Sources, mut mappings: HashMap<PathBuf, PathBuf>) -> Sources {
|
enum PreprocessedJob<T: ArtifactOutput> {
|
||||||
sources
|
Unchanged(BTreeMap<PathBuf, T::Artifact>),
|
||||||
.into_iter()
|
Items(Sources, PathMap, BTreeMap<PathBuf, T::Artifact>),
|
||||||
.map(|(import, source)| {
|
|
||||||
if let Some(path) = mappings.remove(&import) {
|
|
||||||
(path, source)
|
|
||||||
} else {
|
|
||||||
(import, source)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ProjectBuilder<Artifacts: ArtifactOutput = MinimalCombinedArtifacts> {
|
pub struct ProjectBuilder<Artifacts: ArtifactOutput = MinimalCombinedArtifacts> {
|
||||||
|
@ -423,6 +497,7 @@ pub struct ProjectBuilder<Artifacts: ArtifactOutput = MinimalCombinedArtifacts>
|
||||||
pub ignored_error_codes: Vec<u64>,
|
pub ignored_error_codes: Vec<u64>,
|
||||||
/// All allowed paths
|
/// All allowed paths
|
||||||
pub allowed_paths: Vec<PathBuf>,
|
pub allowed_paths: Vec<PathBuf>,
|
||||||
|
solc_jobs: Option<usize>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Artifacts: ArtifactOutput> ProjectBuilder<Artifacts> {
|
impl<Artifacts: ArtifactOutput> ProjectBuilder<Artifacts> {
|
||||||
|
@ -464,6 +539,22 @@ impl<Artifacts: ArtifactOutput> ProjectBuilder<Artifacts> {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Sets the maximum number of parallel `solc` processes to run simultaneously.
|
||||||
|
///
|
||||||
|
/// # Panics
|
||||||
|
///
|
||||||
|
/// `jobs` must be at least 1
|
||||||
|
pub fn solc_jobs(mut self, jobs: usize) -> Self {
|
||||||
|
assert!(jobs > 0);
|
||||||
|
self.solc_jobs = Some(jobs);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sets the number of parallel `solc` processes to `1`, no parallelization
|
||||||
|
pub fn single_solc_jobs(self) -> Self {
|
||||||
|
self.solc_jobs(1)
|
||||||
|
}
|
||||||
|
|
||||||
/// Set arbitrary `ArtifactOutputHandler`
|
/// Set arbitrary `ArtifactOutputHandler`
|
||||||
pub fn artifacts<A: ArtifactOutput>(self) -> ProjectBuilder<A> {
|
pub fn artifacts<A: ArtifactOutput>(self) -> ProjectBuilder<A> {
|
||||||
let ProjectBuilder {
|
let ProjectBuilder {
|
||||||
|
@ -475,6 +566,7 @@ impl<Artifacts: ArtifactOutput> ProjectBuilder<Artifacts> {
|
||||||
auto_detect,
|
auto_detect,
|
||||||
ignored_error_codes,
|
ignored_error_codes,
|
||||||
allowed_paths,
|
allowed_paths,
|
||||||
|
solc_jobs,
|
||||||
..
|
..
|
||||||
} = self;
|
} = self;
|
||||||
ProjectBuilder {
|
ProjectBuilder {
|
||||||
|
@ -487,6 +579,7 @@ impl<Artifacts: ArtifactOutput> ProjectBuilder<Artifacts> {
|
||||||
artifacts: PhantomData::default(),
|
artifacts: PhantomData::default(),
|
||||||
ignored_error_codes,
|
ignored_error_codes,
|
||||||
allowed_paths,
|
allowed_paths,
|
||||||
|
solc_jobs,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -519,6 +612,7 @@ impl<Artifacts: ArtifactOutput> ProjectBuilder<Artifacts> {
|
||||||
artifacts,
|
artifacts,
|
||||||
ignored_error_codes,
|
ignored_error_codes,
|
||||||
mut allowed_paths,
|
mut allowed_paths,
|
||||||
|
solc_jobs,
|
||||||
} = self;
|
} = self;
|
||||||
|
|
||||||
let solc = solc.unwrap_or_default();
|
let solc = solc.unwrap_or_default();
|
||||||
|
@ -541,6 +635,7 @@ impl<Artifacts: ArtifactOutput> ProjectBuilder<Artifacts> {
|
||||||
artifacts,
|
artifacts,
|
||||||
ignored_error_codes,
|
ignored_error_codes,
|
||||||
allowed_lib_paths: allowed_paths.try_into()?,
|
allowed_lib_paths: allowed_paths.try_into()?,
|
||||||
|
solc_jobs: solc_jobs.unwrap_or_else(::num_cpus::get),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -557,6 +652,7 @@ impl<Artifacts: ArtifactOutput> Default for ProjectBuilder<Artifacts> {
|
||||||
artifacts: PhantomData::default(),
|
artifacts: PhantomData::default(),
|
||||||
ignored_error_codes: Vec::new(),
|
ignored_error_codes: Vec::new(),
|
||||||
allowed_paths: vec![],
|
allowed_paths: vec![],
|
||||||
|
solc_jobs: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -623,17 +719,25 @@ impl<T: ArtifactOutput> ProjectCompileOutput<T> {
|
||||||
pub fn extend(&mut self, compiled: ProjectCompileOutput<T>) {
|
pub fn extend(&mut self, compiled: ProjectCompileOutput<T>) {
|
||||||
let ProjectCompileOutput { compiler_output, artifacts, .. } = compiled;
|
let ProjectCompileOutput { compiler_output, artifacts, .. } = compiled;
|
||||||
self.artifacts.extend(artifacts);
|
self.artifacts.extend(artifacts);
|
||||||
if let Some(compiled) = compiler_output {
|
if let Some(output) = compiler_output {
|
||||||
if let Some(output) = self.compiler_output.as_mut() {
|
self.extend_output(output);
|
||||||
output.errors.extend(compiled.errors);
|
|
||||||
output.sources.extend(compiled.sources);
|
|
||||||
output.contracts.extend(compiled.contracts);
|
|
||||||
} else {
|
|
||||||
self.compiler_output = Some(compiled);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn extend_output(&mut self, compiled: CompilerOutput) {
|
||||||
|
if let Some(output) = self.compiler_output.as_mut() {
|
||||||
|
output.errors.extend(compiled.errors);
|
||||||
|
output.sources.extend(compiled.sources);
|
||||||
|
output.contracts.extend(compiled.contracts);
|
||||||
|
} else {
|
||||||
|
self.compiler_output = Some(compiled);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn extend_artifacts(&mut self, artifacts: BTreeMap<PathBuf, T::Artifact>) {
|
||||||
|
self.artifacts.extend(artifacts);
|
||||||
|
}
|
||||||
|
|
||||||
/// Whether this type does not contain compiled contracts
|
/// Whether this type does not contain compiled contracts
|
||||||
pub fn is_unchanged(&self) -> bool {
|
pub fn is_unchanged(&self) -> bool {
|
||||||
!self.has_compiled_contracts()
|
!self.has_compiled_contracts()
|
||||||
|
|
Loading…
Reference in New Issue