2021-11-26 16:49:19 +00:00
|
|
|
#![doc = include_str ! ("../README.md")]
|
2021-10-26 11:28:10 +00:00
|
|
|
|
|
|
|
pub mod artifacts;
|
2022-01-06 11:54:46 +00:00
|
|
|
pub mod sourcemap;
|
2021-10-26 11:28:10 +00:00
|
|
|
|
|
|
|
pub use artifacts::{CompilerInput, CompilerOutput, EvmVersion};
|
2021-12-05 13:27:37 +00:00
|
|
|
use std::collections::btree_map::Entry;
|
2021-10-26 11:28:10 +00:00
|
|
|
|
|
|
|
pub mod cache;
|
2021-12-11 17:39:39 +00:00
|
|
|
pub mod hh;
|
2022-01-05 21:46:57 +00:00
|
|
|
mod resolver;
|
2021-12-12 23:39:28 +00:00
|
|
|
pub use hh::{HardhatArtifact, HardhatArtifacts};
|
2022-01-05 21:46:57 +00:00
|
|
|
pub use resolver::Graph;
|
2021-10-26 11:28:10 +00:00
|
|
|
|
|
|
|
mod compile;
|
2021-11-26 16:49:19 +00:00
|
|
|
|
2021-10-31 14:41:36 +00:00
|
|
|
pub use compile::*;
|
2021-10-26 11:28:10 +00:00
|
|
|
|
|
|
|
mod config;
|
2021-11-26 16:49:19 +00:00
|
|
|
|
2021-11-15 23:29:06 +00:00
|
|
|
pub use config::{
|
2022-01-10 19:43:34 +00:00
|
|
|
AllowedLibPaths, Artifact, ArtifactOutput, MinimalCombinedArtifacts, PathStyle,
|
|
|
|
ProjectPathsConfig, SolcConfig,
|
2021-11-15 23:29:06 +00:00
|
|
|
};
|
2021-10-30 17:59:44 +00:00
|
|
|
|
2021-11-13 19:31:55 +00:00
|
|
|
pub mod remappings;
|
|
|
|
|
2021-10-30 17:59:44 +00:00
|
|
|
use crate::{artifacts::Source, cache::SolFilesCache};
|
2021-10-26 11:28:10 +00:00
|
|
|
|
|
|
|
pub mod error;
|
|
|
|
pub mod utils;
|
2021-11-26 16:49:19 +00:00
|
|
|
|
2021-12-12 17:10:40 +00:00
|
|
|
use crate::{
|
|
|
|
artifacts::Sources,
|
|
|
|
cache::PathMap,
|
|
|
|
error::{SolcError, SolcIoError},
|
|
|
|
};
|
2021-10-26 11:28:10 +00:00
|
|
|
use error::Result;
|
2021-10-30 17:59:44 +00:00
|
|
|
use std::{
|
2022-01-17 12:27:40 +00:00
|
|
|
borrow::Cow,
|
|
|
|
collections::BTreeMap,
|
|
|
|
convert::TryInto,
|
|
|
|
fmt, fs,
|
|
|
|
marker::PhantomData,
|
|
|
|
path::{Path, PathBuf},
|
2021-10-30 17:59:44 +00:00
|
|
|
};
|
2021-10-26 11:28:10 +00:00
|
|
|
|
2021-12-12 23:39:28 +00:00
|
|
|
/// Utilities for creating, mocking and testing of (temporary) projects
|
|
|
|
#[cfg(feature = "project-util")]
|
|
|
|
pub mod project_util;
|
|
|
|
|
2021-12-06 23:02:13 +00:00
|
|
|
/// Represents a project workspace and handles `solc` compiling of all contracts in that workspace.
|
2021-10-26 11:28:10 +00:00
|
|
|
#[derive(Debug)]
|
2021-11-15 23:29:06 +00:00
|
|
|
pub struct Project<Artifacts: ArtifactOutput = MinimalCombinedArtifacts> {
|
2021-10-26 11:28:10 +00:00
|
|
|
/// The layout of the
|
2021-10-30 17:59:44 +00:00
|
|
|
pub paths: ProjectPathsConfig,
|
2021-10-26 11:28:10 +00:00
|
|
|
/// Where to find solc
|
|
|
|
pub solc: Solc,
|
2021-10-30 17:59:44 +00:00
|
|
|
/// How solc invocation should be configured.
|
|
|
|
pub solc_config: SolcConfig,
|
2021-10-26 11:28:10 +00:00
|
|
|
/// Whether caching is enabled
|
|
|
|
pub cached: bool,
|
2021-11-15 23:29:06 +00:00
|
|
|
/// Whether writing artifacts to disk is enabled
|
|
|
|
pub no_artifacts: bool,
|
2021-11-18 13:10:41 +00:00
|
|
|
/// Whether writing artifacts to disk is enabled
|
|
|
|
pub auto_detect: bool,
|
2021-10-26 11:28:10 +00:00
|
|
|
/// How to handle compiler output
|
2021-11-15 23:29:06 +00:00
|
|
|
pub artifacts: PhantomData<Artifacts>,
|
2021-10-30 17:59:44 +00:00
|
|
|
/// Errors/Warnings which match these error codes are not going to be logged
|
|
|
|
pub ignored_error_codes: Vec<u64>,
|
2021-11-08 20:11:45 +00:00
|
|
|
/// The paths which will be allowed for library inclusion
|
|
|
|
pub allowed_lib_paths: AllowedLibPaths,
|
2021-12-06 23:02:13 +00:00
|
|
|
/// Maximum number of `solc` processes to run simultaneously.
|
|
|
|
solc_jobs: usize,
|
2021-10-26 11:28:10 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl Project {
|
2021-11-15 23:29:06 +00:00
|
|
|
/// Convenience function to call `ProjectBuilder::default()`
|
2021-10-30 17:59:44 +00:00
|
|
|
///
|
|
|
|
/// # Example
|
|
|
|
///
|
2021-11-15 23:29:06 +00:00
|
|
|
/// Configure with `MinimalCombinedArtifacts` artifacts output
|
|
|
|
///
|
|
|
|
/// ```rust
|
|
|
|
/// use ethers_solc::Project;
|
|
|
|
/// let config = Project::builder().build().unwrap();
|
|
|
|
/// ```
|
|
|
|
///
|
|
|
|
/// To configure any a project with any `ArtifactOutput` use either
|
|
|
|
///
|
2021-10-30 17:59:44 +00:00
|
|
|
/// ```rust
|
|
|
|
/// use ethers_solc::Project;
|
|
|
|
/// let config = Project::builder().build().unwrap();
|
|
|
|
/// ```
|
2021-11-15 23:29:06 +00:00
|
|
|
///
|
|
|
|
/// or use the builder directly
|
|
|
|
///
|
|
|
|
/// ```rust
|
|
|
|
/// use ethers_solc::{MinimalCombinedArtifacts, ProjectBuilder};
|
|
|
|
/// let config = ProjectBuilder::<MinimalCombinedArtifacts>::default().build().unwrap();
|
|
|
|
/// ```
|
2021-10-30 17:59:44 +00:00
|
|
|
pub fn builder() -> ProjectBuilder {
|
|
|
|
ProjectBuilder::default()
|
2021-10-26 11:28:10 +00:00
|
|
|
}
|
2021-11-15 23:29:06 +00:00
|
|
|
}
|
2021-10-26 11:28:10 +00:00
|
|
|
|
2021-11-15 23:29:06 +00:00
|
|
|
impl<Artifacts: ArtifactOutput> Project<Artifacts> {
|
2021-12-12 23:39:28 +00:00
|
|
|
/// Returns the path to the artifacts directory
|
|
|
|
pub fn artifacts_path(&self) -> &PathBuf {
|
|
|
|
&self.paths.artifacts
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the path to the sources directory
|
|
|
|
pub fn sources_path(&self) -> &PathBuf {
|
|
|
|
&self.paths.sources
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the path to the cache file
|
|
|
|
pub fn cache_path(&self) -> &PathBuf {
|
|
|
|
&self.paths.cache
|
|
|
|
}
|
|
|
|
|
2021-12-06 23:02:13 +00:00
|
|
|
/// Sets the maximum number of parallel `solc` processes to run simultaneously.
|
|
|
|
pub fn set_solc_jobs(&mut self, jobs: usize) {
|
|
|
|
assert!(jobs > 0);
|
|
|
|
self.solc_jobs = jobs;
|
|
|
|
}
|
|
|
|
|
2021-11-28 17:14:34 +00:00
|
|
|
#[tracing::instrument(skip_all, name = "Project::write_cache_file")]
|
2021-11-15 23:29:06 +00:00
|
|
|
fn write_cache_file(
|
|
|
|
&self,
|
|
|
|
sources: Sources,
|
|
|
|
artifacts: Vec<(PathBuf, Vec<String>)>,
|
|
|
|
) -> Result<()> {
|
2021-12-05 13:27:37 +00:00
|
|
|
tracing::trace!("inserting {} sources in file cache", sources.len());
|
2021-11-15 23:29:06 +00:00
|
|
|
let mut cache = SolFilesCache::builder()
|
2021-10-30 17:59:44 +00:00
|
|
|
.root(&self.paths.root)
|
|
|
|
.solc_config(self.solc_config.clone())
|
2021-11-26 19:16:09 +00:00
|
|
|
.insert_files(sources, Some(self.paths.cache.clone()))?;
|
2021-12-05 13:27:37 +00:00
|
|
|
tracing::trace!("source files inserted");
|
2021-11-15 23:29:06 +00:00
|
|
|
|
|
|
|
// add the artifacts for each file to the cache entry
|
|
|
|
for (file, artifacts) in artifacts {
|
|
|
|
if let Some(entry) = cache.files.get_mut(&file) {
|
|
|
|
entry.artifacts = artifacts;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-10-30 17:59:44 +00:00
|
|
|
if let Some(cache_dir) = self.paths.cache.parent() {
|
2021-12-05 13:27:37 +00:00
|
|
|
tracing::trace!("creating cache file parent directory \"{}\"", cache_dir.display());
|
2021-12-12 17:10:40 +00:00
|
|
|
fs::create_dir_all(cache_dir).map_err(|err| SolcError::io(err, cache_dir))?
|
2021-10-30 17:59:44 +00:00
|
|
|
}
|
2021-12-05 13:27:37 +00:00
|
|
|
|
|
|
|
tracing::trace!("writing cache file to \"{}\"", self.paths.cache.display());
|
2021-11-28 17:14:34 +00:00
|
|
|
cache.write(&self.paths.cache)?;
|
|
|
|
|
|
|
|
Ok(())
|
2021-10-30 17:59:44 +00:00
|
|
|
}
|
|
|
|
|
2021-12-06 23:02:13 +00:00
|
|
|
/// Returns all sources found under the project's configured sources path
|
2021-11-28 17:14:34 +00:00
|
|
|
#[tracing::instrument(skip_all, fields(name = "sources"))]
|
2021-12-12 17:10:40 +00:00
|
|
|
pub fn sources(&self) -> Result<Sources> {
|
2022-01-05 21:46:57 +00:00
|
|
|
self.paths.read_sources()
|
2021-10-30 17:59:44 +00:00
|
|
|
}
|
|
|
|
|
2021-11-20 14:47:36 +00:00
|
|
|
/// This emits the cargo [`rerun-if-changed`](https://doc.rust-lang.org/cargo/reference/build-scripts.html#cargorerun-if-changedpath) instruction.
|
|
|
|
/// Which tells Cargo to re-run the build script if a file inside the project's sources
|
|
|
|
/// directory has changed.
|
|
|
|
///
|
|
|
|
/// Use this if you compile a project in a `build.rs` file.
|
|
|
|
///
|
|
|
|
/// # Example `build.rs` file
|
|
|
|
///
|
|
|
|
///
|
|
|
|
/// ```no_run
|
|
|
|
/// use ethers_solc::{Project, ProjectPathsConfig};
|
2021-12-06 23:02:13 +00:00
|
|
|
/// // configure the project with all its paths, solc, cache etc. where the root dir is the current rust project.
|
2021-11-20 14:47:36 +00:00
|
|
|
/// let project = Project::builder()
|
|
|
|
/// .paths(ProjectPathsConfig::hardhat(env!("CARGO_MANIFEST_DIR")).unwrap())
|
|
|
|
/// .build()
|
|
|
|
/// .unwrap();
|
|
|
|
/// let output = project.compile().unwrap();
|
|
|
|
/// // Tell Cargo that if a source file changes, to rerun this build script.
|
|
|
|
/// project.rerun_if_sources_changed();
|
|
|
|
/// ```
|
|
|
|
pub fn rerun_if_sources_changed(&self) {
|
|
|
|
println!("cargo:rerun-if-changed={}", self.paths.sources.display())
|
|
|
|
}
|
|
|
|
|
2021-10-30 17:59:44 +00:00
|
|
|
/// Attempts to read all unique libraries that are used as imports like "hardhat/console.sol"
|
|
|
|
fn resolved_libraries(
|
|
|
|
&self,
|
|
|
|
sources: &Sources,
|
2021-12-12 17:10:40 +00:00
|
|
|
) -> Result<BTreeMap<PathBuf, (Source, PathBuf)>> {
|
2021-10-30 17:59:44 +00:00
|
|
|
let mut libs = BTreeMap::default();
|
|
|
|
for source in sources.values() {
|
|
|
|
for import in source.parse_imports() {
|
|
|
|
if let Some(lib) = utils::resolve_library(&self.paths.libraries, import) {
|
|
|
|
if let Entry::Vacant(entry) = libs.entry(import.into()) {
|
2021-12-25 16:18:57 +00:00
|
|
|
tracing::trace!(
|
|
|
|
"resolved library import \"{}\" at \"{}\"",
|
|
|
|
import,
|
|
|
|
lib.display()
|
|
|
|
);
|
2021-10-30 17:59:44 +00:00
|
|
|
entry.insert((Source::read(&lib)?, lib));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(libs)
|
2021-10-26 11:28:10 +00:00
|
|
|
}
|
|
|
|
|
2022-01-05 21:46:57 +00:00
|
|
|
/// Attempts to compile the contracts found at the configured source location, see
|
|
|
|
/// `ProjectPathsConfig::sources`.
|
2021-10-30 17:59:44 +00:00
|
|
|
///
|
|
|
|
/// NOTE: this does not check if the contracts were successfully compiled, see
|
|
|
|
/// `CompilerOutput::has_error` instead.
|
2021-11-03 08:05:09 +00:00
|
|
|
/// NB: If the `svm` feature is enabled, this function will automatically detect
|
|
|
|
/// solc versions across files.
|
2022-01-11 10:02:57 +00:00
|
|
|
///
|
|
|
|
/// # Example
|
|
|
|
///
|
|
|
|
/// ```
|
|
|
|
/// use ethers_solc::Project;
|
|
|
|
/// # fn demo(project: Project) {
|
|
|
|
/// let project = Project::builder().build().unwrap();
|
|
|
|
/// let output = project.compile().unwrap();
|
|
|
|
/// # }
|
|
|
|
/// ```
|
2021-11-28 17:14:34 +00:00
|
|
|
#[tracing::instrument(skip_all, name = "compile")]
|
2021-11-15 23:29:06 +00:00
|
|
|
pub fn compile(&self) -> Result<ProjectCompileOutput<Artifacts>> {
|
2022-01-05 21:46:57 +00:00
|
|
|
let sources = self.paths.read_input_files()?;
|
2021-12-25 16:18:57 +00:00
|
|
|
tracing::trace!("found {} sources to compile: {:?}", sources.len(), sources.keys());
|
2021-11-03 08:05:09 +00:00
|
|
|
|
|
|
|
#[cfg(all(feature = "svm", feature = "async"))]
|
2021-11-18 13:10:41 +00:00
|
|
|
if self.auto_detect {
|
2021-12-25 16:18:57 +00:00
|
|
|
tracing::trace!("using solc auto detection to compile sources");
|
2021-11-18 13:10:41 +00:00
|
|
|
return self.svm_compile(sources)
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut solc = self.solc.clone();
|
|
|
|
if !self.allowed_lib_paths.0.is_empty() {
|
|
|
|
solc = solc.arg("--allow-paths").arg(self.allowed_lib_paths.to_string());
|
|
|
|
}
|
2022-01-05 21:46:57 +00:00
|
|
|
|
|
|
|
let sources = Graph::resolve_sources(&self.paths, sources)?.into_sources();
|
2021-11-18 13:10:41 +00:00
|
|
|
self.compile_with_version(&solc, sources)
|
2021-11-03 08:05:09 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(all(feature = "svm", feature = "async"))]
|
2021-11-28 17:14:34 +00:00
|
|
|
#[tracing::instrument(skip(self, sources))]
|
2022-01-08 20:51:18 +00:00
|
|
|
pub fn svm_compile(&self, sources: Sources) -> Result<ProjectCompileOutput<Artifacts>> {
|
2022-01-05 21:46:57 +00:00
|
|
|
let graph = Graph::resolve_sources(&self.paths, sources)?;
|
|
|
|
let sources_by_version =
|
|
|
|
graph.into_sources_by_version(!self.auto_detect)?.get(&self.allowed_lib_paths)?;
|
2021-12-06 23:02:13 +00:00
|
|
|
|
|
|
|
// run the compilation step for each version
|
|
|
|
let compiled = if self.solc_jobs > 1 && sources_by_version.len() > 1 {
|
|
|
|
self.compile_many(sources_by_version)?
|
|
|
|
} else {
|
|
|
|
self.compile_sources(sources_by_version)?
|
|
|
|
};
|
|
|
|
tracing::trace!("compiled all sources");
|
|
|
|
|
|
|
|
Ok(compiled)
|
|
|
|
}
|
|
|
|
|
2022-01-05 21:46:57 +00:00
|
|
|
/// Compiles all sources with their intended `Solc` version sequentially.
|
2021-12-06 23:02:13 +00:00
|
|
|
#[cfg(all(feature = "svm", feature = "async"))]
|
|
|
|
fn compile_sources(
|
|
|
|
&self,
|
|
|
|
sources_by_version: BTreeMap<Solc, BTreeMap<PathBuf, Source>>,
|
|
|
|
) -> Result<ProjectCompileOutput<Artifacts>> {
|
|
|
|
tracing::trace!("compiling sources using a single solc job");
|
|
|
|
let mut compiled =
|
|
|
|
ProjectCompileOutput::with_ignored_errors(self.ignored_error_codes.clone());
|
|
|
|
for (solc, sources) in sources_by_version {
|
|
|
|
tracing::trace!(
|
|
|
|
"compiling {} sources with solc \"{}\"",
|
|
|
|
sources.len(),
|
|
|
|
solc.as_ref().display()
|
|
|
|
);
|
2021-11-15 23:29:06 +00:00
|
|
|
compiled.extend(self.compile_with_version(&solc, sources)?);
|
2021-11-03 08:05:09 +00:00
|
|
|
}
|
2021-12-06 23:02:13 +00:00
|
|
|
Ok(compiled)
|
|
|
|
}
|
|
|
|
|
2022-01-05 21:46:57 +00:00
|
|
|
/// Compiles all sources with their intended `Solc` version in parallel.
|
|
|
|
///
|
|
|
|
/// This runs `Self::solc_jobs` parallel `solc` jobs at most.
|
2021-12-06 23:02:13 +00:00
|
|
|
#[cfg(all(feature = "svm", feature = "async"))]
|
|
|
|
fn compile_many(
|
|
|
|
&self,
|
|
|
|
sources_by_version: BTreeMap<Solc, BTreeMap<PathBuf, Source>>,
|
|
|
|
) -> Result<ProjectCompileOutput<Artifacts>> {
|
2021-12-25 16:18:57 +00:00
|
|
|
tracing::trace!("compile sources in parallel using {} solc jobs", self.solc_jobs);
|
2021-12-06 23:02:13 +00:00
|
|
|
let mut compiled =
|
|
|
|
ProjectCompileOutput::with_ignored_errors(self.ignored_error_codes.clone());
|
|
|
|
let mut paths = PathMap::default();
|
|
|
|
let mut jobs = Vec::with_capacity(sources_by_version.len());
|
|
|
|
|
|
|
|
let mut all_sources = BTreeMap::default();
|
|
|
|
let mut all_artifacts = Vec::with_capacity(sources_by_version.len());
|
|
|
|
|
|
|
|
// preprocess all sources
|
|
|
|
for (solc, sources) in sources_by_version {
|
|
|
|
match self.preprocess_sources(sources)? {
|
|
|
|
PreprocessedJob::Unchanged(artifacts) => {
|
|
|
|
compiled.extend(ProjectCompileOutput::from_unchanged(artifacts));
|
|
|
|
}
|
|
|
|
PreprocessedJob::Items(sources, map, cached_artifacts) => {
|
2021-12-25 16:18:57 +00:00
|
|
|
tracing::trace!("cached artifacts: \"{:?}\"", cached_artifacts.keys());
|
|
|
|
tracing::trace!("compile sources: \"{:?}\"", sources.keys());
|
|
|
|
|
2021-12-06 23:02:13 +00:00
|
|
|
compiled.extend_artifacts(cached_artifacts);
|
|
|
|
// replace absolute path with source name to make solc happy
|
|
|
|
let sources = map.set_source_names(sources);
|
|
|
|
paths.extend(map);
|
|
|
|
|
|
|
|
let input = CompilerInput::with_sources(sources)
|
2021-12-23 15:03:37 +00:00
|
|
|
.settings(self.solc_config.settings.clone())
|
2021-12-06 23:02:13 +00:00
|
|
|
.normalize_evm_version(&solc.version()?)
|
|
|
|
.with_remappings(self.paths.remappings.clone());
|
|
|
|
|
|
|
|
jobs.push((solc, input))
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
2021-12-25 16:18:57 +00:00
|
|
|
tracing::trace!("execute {} compile jobs in parallel", jobs.len());
|
2021-12-06 23:02:13 +00:00
|
|
|
|
|
|
|
let outputs = tokio::runtime::Runtime::new()
|
|
|
|
.unwrap()
|
|
|
|
.block_on(Solc::compile_many(jobs, self.solc_jobs));
|
|
|
|
|
|
|
|
for (res, _, input) in outputs.into_outputs() {
|
|
|
|
let output = res?;
|
|
|
|
if !output.has_error() {
|
|
|
|
if self.cached {
|
|
|
|
// get all contract names of the files and map them to the disk file
|
|
|
|
all_sources.extend(paths.set_disk_paths(input.sources));
|
2021-12-25 16:18:57 +00:00
|
|
|
all_artifacts.extend(paths.get_artifacts(&output.contracts));
|
2021-12-06 23:02:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if !self.no_artifacts {
|
|
|
|
Artifacts::on_output(&output, &self.paths)?;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
compiled.extend_output(output);
|
|
|
|
}
|
|
|
|
|
|
|
|
// write the cache file
|
|
|
|
if self.cached {
|
|
|
|
self.write_cache_file(all_sources, all_artifacts)?;
|
|
|
|
}
|
2021-11-26 17:02:18 +00:00
|
|
|
|
2021-11-15 23:29:06 +00:00
|
|
|
Ok(compiled)
|
2021-11-03 08:05:09 +00:00
|
|
|
}
|
|
|
|
|
2021-12-05 13:27:37 +00:00
|
|
|
/// Compiles the given source files with the exact `Solc` executable
|
|
|
|
///
|
|
|
|
/// First all libraries for the sources are resolved by scanning all their imports.
|
|
|
|
/// If caching is enabled for the `Project`, then all unchanged files are filtered from the
|
|
|
|
/// sources and their existing artifacts are read instead. This will also update the cache
|
|
|
|
/// file and cleans up entries for files which may have been removed. Unchanged files that
|
|
|
|
/// for which an artifact exist, are not compiled again.
|
2022-01-11 10:02:57 +00:00
|
|
|
///
|
|
|
|
/// # Example
|
|
|
|
///
|
|
|
|
/// ```
|
|
|
|
/// use ethers_solc::{Project, Solc};
|
|
|
|
/// # fn demo(project: Project) {
|
|
|
|
/// let project = Project::builder().build().unwrap();
|
|
|
|
/// let sources = project.paths.read_sources().unwrap();
|
|
|
|
/// project
|
|
|
|
/// .compile_with_version(
|
|
|
|
/// &Solc::find_svm_installed_version("0.8.11").unwrap().unwrap(),
|
|
|
|
/// sources,
|
|
|
|
/// )
|
|
|
|
/// .unwrap();
|
|
|
|
/// # }
|
|
|
|
/// ```
|
2021-11-03 08:05:09 +00:00
|
|
|
pub fn compile_with_version(
|
|
|
|
&self,
|
|
|
|
solc: &Solc,
|
2021-12-06 23:02:13 +00:00
|
|
|
sources: Sources,
|
2021-11-15 23:29:06 +00:00
|
|
|
) -> Result<ProjectCompileOutput<Artifacts>> {
|
2021-12-06 23:02:13 +00:00
|
|
|
let (sources, paths, cached_artifacts) = match self.preprocess_sources(sources)? {
|
|
|
|
PreprocessedJob::Unchanged(artifacts) => {
|
|
|
|
return Ok(ProjectCompileOutput::from_unchanged(artifacts))
|
|
|
|
}
|
|
|
|
PreprocessedJob::Items(a, b, c) => (a, b, c),
|
|
|
|
};
|
|
|
|
|
2021-12-25 16:18:57 +00:00
|
|
|
let version = solc.version()?;
|
|
|
|
tracing::trace!(
|
|
|
|
"compiling {} files with {}. Using {} cached files",
|
|
|
|
sources.len(),
|
|
|
|
version,
|
|
|
|
cached_artifacts.len()
|
|
|
|
);
|
|
|
|
tracing::trace!("cached artifacts: \"{:?}\"", cached_artifacts.keys());
|
|
|
|
tracing::trace!("compile sources: \"{:?}\"", sources.keys());
|
2021-12-06 23:02:13 +00:00
|
|
|
|
|
|
|
// replace absolute path with source name to make solc happy
|
|
|
|
let sources = paths.set_source_names(sources);
|
|
|
|
|
|
|
|
let input = CompilerInput::with_sources(sources)
|
2021-12-23 15:03:37 +00:00
|
|
|
.settings(self.solc_config.settings.clone())
|
2021-12-25 16:18:57 +00:00
|
|
|
.normalize_evm_version(&version)
|
2021-12-06 23:02:13 +00:00
|
|
|
.with_remappings(self.paths.remappings.clone());
|
|
|
|
|
|
|
|
tracing::trace!("calling solc with {} sources", input.sources.len());
|
|
|
|
let output = solc.compile(&input)?;
|
|
|
|
tracing::trace!("compiled input, output has error: {}", output.has_error());
|
|
|
|
|
|
|
|
if output.has_error() {
|
|
|
|
return Ok(ProjectCompileOutput::from_compiler_output(
|
|
|
|
output,
|
|
|
|
self.ignored_error_codes.clone(),
|
|
|
|
))
|
|
|
|
}
|
|
|
|
|
|
|
|
if self.cached {
|
|
|
|
// get all contract names of the files and map them to the disk file
|
|
|
|
let artifacts = paths.get_artifacts(&output.contracts);
|
|
|
|
// reapply to disk paths
|
|
|
|
let sources = paths.set_disk_paths(input.sources);
|
|
|
|
// create cache file
|
|
|
|
self.write_cache_file(sources, artifacts)?;
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO: There seems to be some type redundancy here, c.f. discussion with @mattsse
|
|
|
|
if !self.no_artifacts {
|
|
|
|
Artifacts::on_output(&output, &self.paths)?;
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(ProjectCompileOutput::from_compiler_output_and_cache(
|
|
|
|
output,
|
|
|
|
cached_artifacts,
|
|
|
|
self.ignored_error_codes.clone(),
|
|
|
|
))
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Preprocesses the given source files by resolving their libs and check against cache if
|
|
|
|
/// configured
|
|
|
|
fn preprocess_sources(&self, mut sources: Sources) -> Result<PreprocessedJob<Artifacts>> {
|
2021-12-25 16:18:57 +00:00
|
|
|
tracing::trace!("start preprocessing {} sources files", sources.len());
|
2021-12-06 23:02:13 +00:00
|
|
|
|
|
|
|
// keeps track of source names / disk paths
|
|
|
|
let mut paths = PathMap::default();
|
2021-11-15 23:29:06 +00:00
|
|
|
|
2021-12-25 16:18:57 +00:00
|
|
|
tracing::trace!("start resolving libraries");
|
2021-10-30 17:59:44 +00:00
|
|
|
for (import, (source, path)) in self.resolved_libraries(&sources)? {
|
|
|
|
// inserting with absolute path here and keep track of the source name <-> path mappings
|
|
|
|
sources.insert(path.clone(), source);
|
2021-12-06 23:02:13 +00:00
|
|
|
paths.path_to_source_name.insert(path.clone(), import.clone());
|
|
|
|
paths.source_name_to_path.insert(import, path);
|
2021-10-30 17:59:44 +00:00
|
|
|
}
|
2021-12-25 16:18:57 +00:00
|
|
|
tracing::trace!("resolved all libraries");
|
2021-10-30 17:59:44 +00:00
|
|
|
|
2021-10-30 18:27:17 +00:00
|
|
|
// If there's a cache set, filter to only re-compile the files which were changed
|
2021-11-26 16:49:19 +00:00
|
|
|
let (sources, cached_artifacts) = if self.cached && self.paths.cache.exists() {
|
2021-12-25 16:18:57 +00:00
|
|
|
tracing::trace!("start reading solfiles cache for incremental compilation");
|
2021-11-26 16:49:19 +00:00
|
|
|
let mut cache = SolFilesCache::read(&self.paths.cache)?;
|
|
|
|
cache.remove_missing_files();
|
2021-11-15 23:29:06 +00:00
|
|
|
let changed_files = cache.get_changed_or_missing_artifacts_files::<Artifacts>(
|
|
|
|
sources,
|
|
|
|
Some(&self.solc_config),
|
2022-01-11 10:02:57 +00:00
|
|
|
&self.paths,
|
2021-11-15 23:29:06 +00:00
|
|
|
);
|
2021-12-25 16:18:57 +00:00
|
|
|
tracing::trace!("detected {} changed files", changed_files.len());
|
2021-11-28 12:33:24 +00:00
|
|
|
cache.remove_changed_files(&changed_files);
|
2021-11-15 23:29:06 +00:00
|
|
|
|
2021-11-26 16:49:19 +00:00
|
|
|
let cached_artifacts = if self.paths.artifacts.exists() {
|
2021-11-28 17:14:34 +00:00
|
|
|
tracing::trace!("reading artifacts from cache..");
|
|
|
|
let artifacts = cache.read_artifacts::<Artifacts>(&self.paths.artifacts)?;
|
2021-12-25 16:18:57 +00:00
|
|
|
tracing::trace!("read {} artifacts from cache", artifacts.len());
|
2021-11-28 17:14:34 +00:00
|
|
|
artifacts
|
2021-11-26 16:49:19 +00:00
|
|
|
} else {
|
|
|
|
BTreeMap::default()
|
|
|
|
};
|
2021-11-28 17:14:34 +00:00
|
|
|
|
2021-11-15 23:29:06 +00:00
|
|
|
// if nothing changed and all artifacts still exist
|
2021-10-30 18:27:17 +00:00
|
|
|
if changed_files.is_empty() {
|
2021-12-25 16:18:57 +00:00
|
|
|
tracing::trace!(
|
|
|
|
"unchanged source files, reusing artifacts {:?}",
|
|
|
|
cached_artifacts.keys()
|
|
|
|
);
|
2021-12-06 23:02:13 +00:00
|
|
|
return Ok(PreprocessedJob::Unchanged(cached_artifacts))
|
2021-10-30 17:59:44 +00:00
|
|
|
}
|
2021-11-26 16:49:19 +00:00
|
|
|
// There are changed files and maybe some cached files
|
|
|
|
(changed_files, cached_artifacts)
|
2021-10-30 18:27:17 +00:00
|
|
|
} else {
|
2021-11-26 16:49:19 +00:00
|
|
|
(sources, BTreeMap::default())
|
2021-10-30 18:27:17 +00:00
|
|
|
};
|
2021-12-06 23:02:13 +00:00
|
|
|
Ok(PreprocessedJob::Items(sources, paths, cached_artifacts))
|
2021-10-30 17:59:44 +00:00
|
|
|
}
|
2021-12-04 17:27:30 +00:00
|
|
|
|
|
|
|
/// Removes the project's artifacts and cache file
|
2021-12-12 17:10:40 +00:00
|
|
|
pub fn cleanup(&self) -> std::result::Result<(), SolcIoError> {
|
2021-12-05 13:27:37 +00:00
|
|
|
tracing::trace!("clean up project");
|
2021-12-12 23:39:28 +00:00
|
|
|
if self.cache_path().exists() {
|
2022-01-20 13:49:59 +00:00
|
|
|
std::fs::remove_file(self.cache_path())
|
2021-12-12 23:39:28 +00:00
|
|
|
.map_err(|err| SolcIoError::new(err, self.cache_path()))?;
|
|
|
|
tracing::trace!("removed cache file \"{}\"", self.cache_path().display());
|
2021-12-04 17:27:30 +00:00
|
|
|
}
|
|
|
|
if self.paths.artifacts.exists() {
|
2021-12-12 23:39:28 +00:00
|
|
|
std::fs::remove_dir_all(self.artifacts_path())
|
|
|
|
.map_err(|err| SolcIoError::new(err, self.artifacts_path().clone()))?;
|
|
|
|
tracing::trace!("removed artifacts dir \"{}\"", self.artifacts_path().display());
|
2021-12-04 17:27:30 +00:00
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
2022-01-17 12:27:40 +00:00
|
|
|
|
|
|
|
/// Flattens the target file into a single string suitable for verification
|
|
|
|
///
|
|
|
|
/// This method uses a dependency graph to resolve imported files and substitute
|
|
|
|
/// import directives with the contents of target files. It will strip the pragma
|
|
|
|
/// version directives and SDPX license identifiers from imported files.
|
|
|
|
///
|
|
|
|
/// NOTE: the SDPX license identifier will be removed from the imported file
|
|
|
|
/// only if it is found at the beginning of the file.
|
|
|
|
pub fn flatten(&self, target: &Path) -> Result<String> {
|
|
|
|
self.paths.flatten(target)
|
|
|
|
}
|
2021-10-30 17:59:44 +00:00
|
|
|
}
|
|
|
|
|
2021-12-06 23:02:13 +00:00
|
|
|
enum PreprocessedJob<T: ArtifactOutput> {
|
|
|
|
Unchanged(BTreeMap<PathBuf, T::Artifact>),
|
|
|
|
Items(Sources, PathMap, BTreeMap<PathBuf, T::Artifact>),
|
2021-10-30 17:59:44 +00:00
|
|
|
}
|
|
|
|
|
2021-11-15 23:29:06 +00:00
|
|
|
pub struct ProjectBuilder<Artifacts: ArtifactOutput = MinimalCombinedArtifacts> {
|
2021-10-30 17:59:44 +00:00
|
|
|
/// The layout of the
|
|
|
|
paths: Option<ProjectPathsConfig>,
|
|
|
|
/// Where to find solc
|
|
|
|
solc: Option<Solc>,
|
|
|
|
/// How solc invocation should be configured.
|
|
|
|
solc_config: Option<SolcConfig>,
|
|
|
|
/// Whether caching is enabled, default is true.
|
|
|
|
cached: bool,
|
2021-11-15 23:29:06 +00:00
|
|
|
/// Whether writing artifacts to disk is enabled, default is true.
|
|
|
|
no_artifacts: bool,
|
2021-11-18 13:10:41 +00:00
|
|
|
/// Whether automatic solc version detection is enabled
|
|
|
|
auto_detect: bool,
|
2021-11-15 23:29:06 +00:00
|
|
|
artifacts: PhantomData<Artifacts>,
|
2021-10-30 17:59:44 +00:00
|
|
|
/// Which error codes to ignore
|
|
|
|
pub ignored_error_codes: Vec<u64>,
|
2021-11-08 20:11:45 +00:00
|
|
|
/// All allowed paths
|
|
|
|
pub allowed_paths: Vec<PathBuf>,
|
2021-12-06 23:02:13 +00:00
|
|
|
solc_jobs: Option<usize>,
|
2021-10-30 17:59:44 +00:00
|
|
|
}
|
|
|
|
|
2021-11-15 23:29:06 +00:00
|
|
|
impl<Artifacts: ArtifactOutput> ProjectBuilder<Artifacts> {
|
2021-12-19 04:28:38 +00:00
|
|
|
#[must_use]
|
2021-10-30 17:59:44 +00:00
|
|
|
pub fn paths(mut self, paths: ProjectPathsConfig) -> Self {
|
|
|
|
self.paths = Some(paths);
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
2021-12-19 04:28:38 +00:00
|
|
|
#[must_use]
|
2021-10-30 17:59:44 +00:00
|
|
|
pub fn solc(mut self, solc: impl Into<Solc>) -> Self {
|
|
|
|
self.solc = Some(solc.into());
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
2021-12-19 04:28:38 +00:00
|
|
|
#[must_use]
|
2021-10-30 17:59:44 +00:00
|
|
|
pub fn solc_config(mut self, solc_config: SolcConfig) -> Self {
|
|
|
|
self.solc_config = Some(solc_config);
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
2021-12-19 04:28:38 +00:00
|
|
|
#[must_use]
|
2021-10-30 17:59:44 +00:00
|
|
|
pub fn ignore_error_code(mut self, code: u64) -> Self {
|
|
|
|
self.ignored_error_codes.push(code);
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
2022-01-20 19:41:19 +00:00
|
|
|
#[must_use]
|
|
|
|
pub fn ignore_error_codes(mut self, codes: impl IntoIterator<Item = u64>) -> Self {
|
|
|
|
for code in codes {
|
|
|
|
self = self.ignore_error_code(code);
|
|
|
|
}
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
2021-10-30 17:59:44 +00:00
|
|
|
/// Disables cached builds
|
2021-12-19 04:28:38 +00:00
|
|
|
#[must_use]
|
2022-01-20 19:41:19 +00:00
|
|
|
pub fn ephemeral(self) -> Self {
|
|
|
|
self.set_cached(false)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Sets the cache status
|
|
|
|
#[must_use]
|
|
|
|
pub fn set_cached(mut self, cached: bool) -> Self {
|
|
|
|
self.cached = cached;
|
2021-10-30 17:59:44 +00:00
|
|
|
self
|
|
|
|
}
|
|
|
|
|
2021-11-15 23:29:06 +00:00
|
|
|
/// Disables writing artifacts to disk
|
2021-12-19 04:28:38 +00:00
|
|
|
#[must_use]
|
2022-01-20 19:41:19 +00:00
|
|
|
pub fn no_artifacts(self) -> Self {
|
|
|
|
self.set_no_artifacts(true)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Sets the no artifacts status
|
|
|
|
#[must_use]
|
|
|
|
pub fn set_no_artifacts(mut self, artifacts: bool) -> Self {
|
|
|
|
self.no_artifacts = artifacts;
|
2021-11-15 23:29:06 +00:00
|
|
|
self
|
|
|
|
}
|
|
|
|
|
2022-01-20 19:41:19 +00:00
|
|
|
/// Sets automatic solc version detection
|
2021-12-19 04:28:38 +00:00
|
|
|
#[must_use]
|
2022-01-20 19:41:19 +00:00
|
|
|
pub fn set_auto_detect(mut self, auto_detect: bool) -> Self {
|
|
|
|
self.auto_detect = auto_detect;
|
2021-11-18 13:10:41 +00:00
|
|
|
self
|
|
|
|
}
|
|
|
|
|
2022-01-20 19:41:19 +00:00
|
|
|
/// Disables automatic solc version detection
|
|
|
|
#[must_use]
|
|
|
|
pub fn no_auto_detect(self) -> Self {
|
|
|
|
self.set_auto_detect(false)
|
|
|
|
}
|
|
|
|
|
2021-12-06 23:02:13 +00:00
|
|
|
/// Sets the maximum number of parallel `solc` processes to run simultaneously.
|
|
|
|
///
|
|
|
|
/// # Panics
|
|
|
|
///
|
|
|
|
/// `jobs` must be at least 1
|
2021-12-19 04:28:38 +00:00
|
|
|
#[must_use]
|
2021-12-06 23:02:13 +00:00
|
|
|
pub fn solc_jobs(mut self, jobs: usize) -> Self {
|
|
|
|
assert!(jobs > 0);
|
|
|
|
self.solc_jobs = Some(jobs);
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Sets the number of parallel `solc` processes to `1`, no parallelization
|
2021-12-19 04:28:38 +00:00
|
|
|
#[must_use]
|
2021-12-06 23:02:13 +00:00
|
|
|
pub fn single_solc_jobs(self) -> Self {
|
|
|
|
self.solc_jobs(1)
|
|
|
|
}
|
|
|
|
|
2021-11-15 23:29:06 +00:00
|
|
|
/// Set arbitrary `ArtifactOutputHandler`
|
|
|
|
pub fn artifacts<A: ArtifactOutput>(self) -> ProjectBuilder<A> {
|
|
|
|
let ProjectBuilder {
|
|
|
|
paths,
|
|
|
|
solc,
|
|
|
|
solc_config,
|
|
|
|
cached,
|
|
|
|
no_artifacts,
|
2021-11-18 13:10:41 +00:00
|
|
|
auto_detect,
|
2021-11-15 23:29:06 +00:00
|
|
|
ignored_error_codes,
|
|
|
|
allowed_paths,
|
2021-12-06 23:02:13 +00:00
|
|
|
solc_jobs,
|
2021-11-15 23:29:06 +00:00
|
|
|
..
|
|
|
|
} = self;
|
|
|
|
ProjectBuilder {
|
|
|
|
paths,
|
|
|
|
solc,
|
|
|
|
solc_config,
|
|
|
|
cached,
|
|
|
|
no_artifacts,
|
2021-11-18 13:10:41 +00:00
|
|
|
auto_detect,
|
2021-11-15 23:29:06 +00:00
|
|
|
artifacts: PhantomData::default(),
|
|
|
|
ignored_error_codes,
|
|
|
|
allowed_paths,
|
2021-12-06 23:02:13 +00:00
|
|
|
solc_jobs,
|
2021-11-15 23:29:06 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-11-08 20:11:45 +00:00
|
|
|
/// Adds an allowed-path to the solc executable
|
2021-12-19 04:28:38 +00:00
|
|
|
#[must_use]
|
2021-11-08 20:11:45 +00:00
|
|
|
pub fn allowed_path<T: Into<PathBuf>>(mut self, path: T) -> Self {
|
|
|
|
self.allowed_paths.push(path.into());
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Adds multiple allowed-path to the solc executable
|
2021-12-19 04:28:38 +00:00
|
|
|
#[must_use]
|
2021-11-08 20:11:45 +00:00
|
|
|
pub fn allowed_paths<I, S>(mut self, args: I) -> Self
|
|
|
|
where
|
|
|
|
I: IntoIterator<Item = S>,
|
|
|
|
S: Into<PathBuf>,
|
|
|
|
{
|
|
|
|
for arg in args {
|
|
|
|
self = self.allowed_path(arg);
|
|
|
|
}
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
2021-11-15 23:29:06 +00:00
|
|
|
pub fn build(self) -> Result<Project<Artifacts>> {
|
2021-11-08 20:11:45 +00:00
|
|
|
let Self {
|
|
|
|
paths,
|
|
|
|
solc,
|
|
|
|
solc_config,
|
|
|
|
cached,
|
2021-11-15 23:29:06 +00:00
|
|
|
no_artifacts,
|
2021-11-18 13:10:41 +00:00
|
|
|
auto_detect,
|
2021-11-08 20:11:45 +00:00
|
|
|
artifacts,
|
|
|
|
ignored_error_codes,
|
|
|
|
mut allowed_paths,
|
2021-12-06 23:02:13 +00:00
|
|
|
solc_jobs,
|
2021-11-08 20:11:45 +00:00
|
|
|
} = self;
|
2021-10-30 17:59:44 +00:00
|
|
|
|
|
|
|
let solc = solc.unwrap_or_default();
|
2022-01-20 19:41:19 +00:00
|
|
|
let solc_config = solc_config.unwrap_or_else(|| SolcConfig::builder().build());
|
2021-10-30 17:59:44 +00:00
|
|
|
|
2021-11-08 20:11:45 +00:00
|
|
|
let paths = paths.map(Ok).unwrap_or_else(ProjectPathsConfig::current_hardhat)?;
|
|
|
|
|
|
|
|
if allowed_paths.is_empty() {
|
|
|
|
// allow every contract under root by default
|
|
|
|
allowed_paths.push(paths.root.clone())
|
|
|
|
}
|
|
|
|
|
2021-10-30 17:59:44 +00:00
|
|
|
Ok(Project {
|
2021-11-08 20:11:45 +00:00
|
|
|
paths,
|
2021-10-30 17:59:44 +00:00
|
|
|
solc,
|
|
|
|
solc_config,
|
|
|
|
cached,
|
2021-11-15 23:29:06 +00:00
|
|
|
no_artifacts,
|
2021-11-18 13:10:41 +00:00
|
|
|
auto_detect,
|
2021-11-15 23:29:06 +00:00
|
|
|
artifacts,
|
2021-10-30 17:59:44 +00:00
|
|
|
ignored_error_codes,
|
2021-11-08 20:11:45 +00:00
|
|
|
allowed_lib_paths: allowed_paths.try_into()?,
|
2021-12-06 23:02:13 +00:00
|
|
|
solc_jobs: solc_jobs.unwrap_or_else(::num_cpus::get),
|
2021-10-30 17:59:44 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-11-15 23:29:06 +00:00
|
|
|
impl<Artifacts: ArtifactOutput> Default for ProjectBuilder<Artifacts> {
|
2021-10-30 17:59:44 +00:00
|
|
|
fn default() -> Self {
|
|
|
|
Self {
|
|
|
|
paths: None,
|
|
|
|
solc: None,
|
|
|
|
solc_config: None,
|
|
|
|
cached: true,
|
2021-11-15 23:29:06 +00:00
|
|
|
no_artifacts: false,
|
2021-11-18 13:10:41 +00:00
|
|
|
auto_detect: true,
|
2021-11-15 23:29:06 +00:00
|
|
|
artifacts: PhantomData::default(),
|
2021-10-30 17:59:44 +00:00
|
|
|
ignored_error_codes: Vec::new(),
|
2021-11-08 20:11:45 +00:00
|
|
|
allowed_paths: vec![],
|
2021-12-06 23:02:13 +00:00
|
|
|
solc_jobs: None,
|
2021-10-26 11:28:10 +00:00
|
|
|
}
|
2021-10-30 17:59:44 +00:00
|
|
|
}
|
|
|
|
}
|
2021-10-26 11:28:10 +00:00
|
|
|
|
2021-11-15 23:29:06 +00:00
|
|
|
/// The outcome of `Project::compile`
|
|
|
|
#[derive(Debug, Clone, PartialEq, Default)]
|
|
|
|
pub struct ProjectCompileOutput<T: ArtifactOutput> {
|
|
|
|
/// If solc was invoked multiple times in `Project::compile` then this contains a merged
|
|
|
|
/// version of all `CompilerOutput`s. If solc was called only once then `compiler_output`
|
|
|
|
/// holds the `CompilerOutput` of that call.
|
|
|
|
compiler_output: Option<CompilerOutput>,
|
|
|
|
/// All artifacts that were read from cache
|
|
|
|
artifacts: BTreeMap<PathBuf, T::Artifact>,
|
|
|
|
ignored_error_codes: Vec<u64>,
|
2021-10-30 17:59:44 +00:00
|
|
|
}
|
|
|
|
|
2021-11-15 23:29:06 +00:00
|
|
|
impl<T: ArtifactOutput> ProjectCompileOutput<T> {
|
|
|
|
pub fn with_ignored_errors(ignored_errors: Vec<u64>) -> Self {
|
|
|
|
Self {
|
|
|
|
compiler_output: None,
|
|
|
|
artifacts: Default::default(),
|
|
|
|
ignored_error_codes: ignored_errors,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn from_unchanged(artifacts: BTreeMap<PathBuf, T::Artifact>) -> Self {
|
|
|
|
Self { compiler_output: None, artifacts, ignored_error_codes: vec![] }
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn from_compiler_output(
|
|
|
|
compiler_output: CompilerOutput,
|
|
|
|
ignored_error_codes: Vec<u64>,
|
|
|
|
) -> Self {
|
|
|
|
Self {
|
|
|
|
compiler_output: Some(compiler_output),
|
|
|
|
artifacts: Default::default(),
|
|
|
|
ignored_error_codes,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-11-26 16:49:19 +00:00
|
|
|
pub fn from_compiler_output_and_cache(
|
|
|
|
compiler_output: CompilerOutput,
|
|
|
|
cache: BTreeMap<PathBuf, T::Artifact>,
|
|
|
|
ignored_error_codes: Vec<u64>,
|
|
|
|
) -> Self {
|
|
|
|
Self { compiler_output: Some(compiler_output), artifacts: cache, ignored_error_codes }
|
|
|
|
}
|
|
|
|
|
2021-11-15 23:29:06 +00:00
|
|
|
/// Get the (merged) solc compiler output
|
|
|
|
/// ```no_run
|
|
|
|
/// use std::collections::BTreeMap;
|
|
|
|
/// use ethers_solc::artifacts::Contract;
|
|
|
|
/// use ethers_solc::Project;
|
|
|
|
///
|
|
|
|
/// let project = Project::builder().build().unwrap();
|
|
|
|
/// let contracts: BTreeMap<String, Contract> =
|
|
|
|
/// project.compile().unwrap().output().contracts_into_iter().collect();
|
|
|
|
/// ```
|
|
|
|
pub fn output(self) -> CompilerOutput {
|
|
|
|
self.compiler_output.unwrap_or_default()
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Combine two outputs
|
|
|
|
pub fn extend(&mut self, compiled: ProjectCompileOutput<T>) {
|
|
|
|
let ProjectCompileOutput { compiler_output, artifacts, .. } = compiled;
|
|
|
|
self.artifacts.extend(artifacts);
|
2021-12-06 23:02:13 +00:00
|
|
|
if let Some(output) = compiler_output {
|
|
|
|
self.extend_output(output);
|
2021-10-30 17:59:44 +00:00
|
|
|
}
|
2021-10-26 11:28:10 +00:00
|
|
|
}
|
2021-11-15 23:29:06 +00:00
|
|
|
|
2021-12-06 23:02:13 +00:00
|
|
|
pub fn extend_output(&mut self, compiled: CompilerOutput) {
|
|
|
|
if let Some(output) = self.compiler_output.as_mut() {
|
|
|
|
output.errors.extend(compiled.errors);
|
|
|
|
output.sources.extend(compiled.sources);
|
|
|
|
output.contracts.extend(compiled.contracts);
|
|
|
|
} else {
|
|
|
|
self.compiler_output = Some(compiled);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn extend_artifacts(&mut self, artifacts: BTreeMap<PathBuf, T::Artifact>) {
|
|
|
|
self.artifacts.extend(artifacts);
|
|
|
|
}
|
|
|
|
|
2021-11-15 23:29:06 +00:00
|
|
|
/// Whether this type does not contain compiled contracts
|
|
|
|
pub fn is_unchanged(&self) -> bool {
|
|
|
|
!self.has_compiled_contracts()
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Whether this type has a compiler output
|
|
|
|
pub fn has_compiled_contracts(&self) -> bool {
|
|
|
|
if let Some(output) = self.compiler_output.as_ref() {
|
|
|
|
!output.contracts.is_empty()
|
|
|
|
} else {
|
|
|
|
false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Whether there were errors
|
|
|
|
pub fn has_compiler_errors(&self) -> bool {
|
2022-01-07 15:29:19 +00:00
|
|
|
self.compiler_output.as_ref().map(|o| o.has_error()).unwrap_or_default()
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Whether there were warnings
|
|
|
|
pub fn has_compiler_warnings(&self) -> bool {
|
2022-01-08 20:26:23 +00:00
|
|
|
self.compiler_output
|
|
|
|
.as_ref()
|
|
|
|
.map(|o| o.has_warning(&self.ignored_error_codes))
|
|
|
|
.unwrap_or_default()
|
2021-11-15 23:29:06 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Finds the first contract with the given name and removes it from the set
|
2021-12-11 17:39:39 +00:00
|
|
|
pub fn remove(&mut self, contract_name: impl AsRef<str>) -> Option<T::Artifact> {
|
|
|
|
let contract_name = contract_name.as_ref();
|
2021-11-15 23:29:06 +00:00
|
|
|
if let Some(output) = self.compiler_output.as_mut() {
|
2021-12-11 17:39:39 +00:00
|
|
|
if let contract @ Some(_) = output.contracts.iter_mut().find_map(|(file, c)| {
|
|
|
|
c.remove(contract_name).map(|c| T::contract_to_artifact(file, contract_name, c))
|
|
|
|
}) {
|
2021-11-15 23:29:06 +00:00
|
|
|
return contract
|
|
|
|
}
|
|
|
|
}
|
|
|
|
let key = self
|
|
|
|
.artifacts
|
|
|
|
.iter()
|
|
|
|
.find_map(|(path, _)| {
|
2021-12-11 17:39:39 +00:00
|
|
|
T::contract_name(path).filter(|name| name == contract_name).map(|_| path)
|
2021-11-15 23:29:06 +00:00
|
|
|
})?
|
|
|
|
.clone();
|
|
|
|
self.artifacts.remove(&key)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<T: ArtifactOutput> ProjectCompileOutput<T>
|
|
|
|
where
|
|
|
|
T::Artifact: Clone,
|
|
|
|
{
|
|
|
|
/// Finds the first contract with the given name
|
2021-12-11 17:39:39 +00:00
|
|
|
pub fn find(&self, contract_name: impl AsRef<str>) -> Option<Cow<T::Artifact>> {
|
|
|
|
let contract_name = contract_name.as_ref();
|
2021-11-15 23:29:06 +00:00
|
|
|
if let Some(output) = self.compiler_output.as_ref() {
|
2021-12-11 17:39:39 +00:00
|
|
|
if let contract @ Some(_) = output.contracts.iter().find_map(|(file, contracts)| {
|
|
|
|
contracts
|
|
|
|
.get(contract_name)
|
|
|
|
.map(|c| T::contract_to_artifact(file, contract_name, c.clone()))
|
|
|
|
.map(Cow::Owned)
|
2021-11-15 23:29:06 +00:00
|
|
|
}) {
|
|
|
|
return contract
|
|
|
|
}
|
|
|
|
}
|
|
|
|
self.artifacts.iter().find_map(|(path, art)| {
|
2021-12-11 17:39:39 +00:00
|
|
|
T::contract_name(path).filter(|name| name == contract_name).map(|_| Cow::Borrowed(art))
|
2021-11-15 23:29:06 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<T: ArtifactOutput + 'static> ProjectCompileOutput<T> {
|
2022-01-11 10:02:57 +00:00
|
|
|
/// All artifacts together with their contract file name and name `<file name>:<name>`
|
2021-11-15 23:29:06 +00:00
|
|
|
///
|
|
|
|
/// # Example
|
|
|
|
///
|
|
|
|
/// ```no_run
|
|
|
|
/// use std::collections::BTreeMap;
|
|
|
|
/// use ethers_solc::artifacts::CompactContract;
|
|
|
|
/// use ethers_solc::Project;
|
|
|
|
///
|
|
|
|
/// let project = Project::builder().build().unwrap();
|
|
|
|
/// let contracts: BTreeMap<String, CompactContract> = project.compile().unwrap().into_artifacts().collect();
|
|
|
|
/// ```
|
|
|
|
pub fn into_artifacts(mut self) -> Box<dyn Iterator<Item = (String, T::Artifact)>> {
|
|
|
|
let artifacts = self.artifacts.into_iter().filter_map(|(path, art)| {
|
2022-01-11 10:02:57 +00:00
|
|
|
T::contract_name(&path).map(|name| {
|
|
|
|
(format!("{}:{}", path.file_name().unwrap().to_string_lossy(), name), art)
|
|
|
|
})
|
2021-11-15 23:29:06 +00:00
|
|
|
});
|
|
|
|
|
2022-01-11 10:02:57 +00:00
|
|
|
let artifacts: Box<dyn Iterator<Item = (String, T::Artifact)>> = if let Some(output) =
|
|
|
|
self.compiler_output.take()
|
|
|
|
{
|
|
|
|
Box::new(artifacts.chain(T::output_to_artifacts(output).into_values().flatten().map(
|
|
|
|
|(name, artifact)| {
|
|
|
|
(format!("{}:{}", T::output_file_name(&name).display(), name), artifact)
|
|
|
|
},
|
|
|
|
)))
|
|
|
|
} else {
|
|
|
|
Box::new(artifacts)
|
|
|
|
};
|
2021-11-15 23:29:06 +00:00
|
|
|
artifacts
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<T: ArtifactOutput> fmt::Display for ProjectCompileOutput<T> {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
|
|
if let Some(output) = self.compiler_output.as_ref() {
|
|
|
|
output.diagnostics(&self.ignored_error_codes).fmt(f)
|
|
|
|
} else {
|
|
|
|
f.write_str("Nothing to compile")
|
|
|
|
}
|
|
|
|
}
|
2021-10-26 11:28:10 +00:00
|
|
|
}
|
2021-11-03 08:05:09 +00:00
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
|
|
|
#[test]
|
|
|
|
#[cfg(all(feature = "svm", feature = "async"))]
|
|
|
|
fn test_build_all_versions() {
|
|
|
|
use super::*;
|
|
|
|
|
|
|
|
let paths = ProjectPathsConfig::builder()
|
|
|
|
.root("./test-data/test-contract-versions")
|
|
|
|
.sources("./test-data/test-contract-versions")
|
|
|
|
.build()
|
|
|
|
.unwrap();
|
2021-11-15 23:29:06 +00:00
|
|
|
let project = Project::builder().paths(paths).no_artifacts().ephemeral().build().unwrap();
|
2021-11-03 08:05:09 +00:00
|
|
|
let compiled = project.compile().unwrap();
|
2021-11-15 23:29:06 +00:00
|
|
|
assert!(!compiled.has_compiler_errors());
|
|
|
|
let contracts = compiled.output().contracts;
|
2021-11-03 08:05:09 +00:00
|
|
|
// Contracts A to F
|
|
|
|
assert_eq!(contracts.keys().count(), 5);
|
|
|
|
}
|
2021-11-08 20:11:45 +00:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
#[cfg(all(feature = "svm", feature = "async"))]
|
|
|
|
fn test_build_many_libs() {
|
|
|
|
use super::*;
|
|
|
|
|
2022-01-05 21:46:57 +00:00
|
|
|
let root = utils::canonicalize("./test-data/test-contract-libs").unwrap();
|
2021-11-08 20:11:45 +00:00
|
|
|
|
|
|
|
let paths = ProjectPathsConfig::builder()
|
|
|
|
.root(&root)
|
|
|
|
.sources(root.join("src"))
|
|
|
|
.lib(root.join("lib1"))
|
|
|
|
.lib(root.join("lib2"))
|
|
|
|
.build()
|
|
|
|
.unwrap();
|
|
|
|
let project = Project::builder()
|
|
|
|
.paths(paths)
|
2021-11-15 23:29:06 +00:00
|
|
|
.no_artifacts()
|
2021-11-08 20:11:45 +00:00
|
|
|
.ephemeral()
|
2021-11-15 23:29:06 +00:00
|
|
|
.no_artifacts()
|
2021-11-08 20:11:45 +00:00
|
|
|
.build()
|
|
|
|
.unwrap();
|
|
|
|
let compiled = project.compile().unwrap();
|
2021-11-15 23:29:06 +00:00
|
|
|
assert!(!compiled.has_compiler_errors());
|
|
|
|
let contracts = compiled.output().contracts;
|
2021-11-08 20:11:45 +00:00
|
|
|
assert_eq!(contracts.keys().count(), 3);
|
|
|
|
}
|
2021-11-13 19:31:55 +00:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
#[cfg(all(feature = "svm", feature = "async"))]
|
|
|
|
fn test_build_remappings() {
|
|
|
|
use super::*;
|
|
|
|
|
2022-01-05 21:46:57 +00:00
|
|
|
let root = utils::canonicalize("./test-data/test-contract-remappings").unwrap();
|
2021-11-13 19:31:55 +00:00
|
|
|
let paths = ProjectPathsConfig::builder()
|
|
|
|
.root(&root)
|
|
|
|
.sources(root.join("src"))
|
|
|
|
.lib(root.join("lib"))
|
|
|
|
.build()
|
|
|
|
.unwrap();
|
2021-11-15 23:29:06 +00:00
|
|
|
let project = Project::builder().no_artifacts().paths(paths).ephemeral().build().unwrap();
|
2021-11-13 19:31:55 +00:00
|
|
|
let compiled = project.compile().unwrap();
|
2021-11-15 23:29:06 +00:00
|
|
|
assert!(!compiled.has_compiler_errors());
|
|
|
|
let contracts = compiled.output().contracts;
|
2021-11-13 19:31:55 +00:00
|
|
|
assert_eq!(contracts.keys().count(), 2);
|
|
|
|
}
|
2021-11-03 08:05:09 +00:00
|
|
|
}
|