2022-02-04 16:20:24 +00:00
|
|
|
//! Manages compiling of a `Project`
|
|
|
|
//!
|
|
|
|
//! The compilation of a project is performed in several steps.
|
|
|
|
//!
|
|
|
|
//! First the project's dependency graph [`crate::Graph`] is constructed and all imported
|
|
|
|
//! dependencies are resolved. The graph holds all the relationships between the files and their
|
|
|
|
//! versions. From there the appropriate version set is derived
|
2022-03-19 17:05:39 +00:00
|
|
|
//! [`crate::Graph`] which need to be compiled with different
|
2022-02-04 16:20:24 +00:00
|
|
|
//! [`crate::Solc`] versions.
|
|
|
|
//!
|
|
|
|
//! At this point we check if we need to compile a source file or whether we can reuse an _existing_
|
|
|
|
//! `Artifact`. We don't to compile if:
|
|
|
|
//! - caching is enabled
|
2022-03-19 17:05:39 +00:00
|
|
|
//! - the file is **not** dirty
|
2022-02-04 16:20:24 +00:00
|
|
|
//! - the artifact for that file exists
|
|
|
|
//!
|
|
|
|
//! This concludes the preprocessing, and we now have either
|
|
|
|
//! - only `Source` files that need to be compiled
|
|
|
|
//! - only cached `Artifacts`, compilation can be skipped. This is considered an unchanged,
|
|
|
|
//! cached project
|
|
|
|
//! - Mix of both `Source` and `Artifacts`, only the `Source` files need to be compiled, the
|
|
|
|
//! `Artifacts` can be reused.
|
|
|
|
//!
|
|
|
|
//! The final step is invoking `Solc` via the standard JSON format.
|
|
|
|
//!
|
|
|
|
//! ### Notes on [Import Path Resolution](https://docs.soliditylang.org/en/develop/path-resolution.html#path-resolution)
|
|
|
|
//!
|
|
|
|
//! In order to be able to support reproducible builds on all platforms, the Solidity compiler has
|
|
|
|
//! to abstract away the details of the filesystem where source files are stored. Paths used in
|
|
|
|
//! imports must work the same way everywhere while the command-line interface must be able to work
|
|
|
|
//! with platform-specific paths to provide good user experience. This section aims to explain in
|
|
|
|
//! detail how Solidity reconciles these requirements.
|
|
|
|
//!
|
|
|
|
//! The compiler maintains an internal database (virtual filesystem or VFS for short) where each
|
|
|
|
//! source unit is assigned a unique source unit name which is an opaque and unstructured
|
|
|
|
//! identifier. When you use the import statement, you specify an import path that references a
|
|
|
|
//! source unit name. If the compiler does not find any source unit name matching the import path in
|
|
|
|
//! the VFS, it invokes the callback, which is responsible for obtaining the source code to be
|
|
|
|
//! placed under that name.
|
|
|
|
//!
|
|
|
|
//! This becomes relevant when dealing with resolved imports
|
|
|
|
//!
|
|
|
|
//! #### Relative Imports
|
|
|
|
//!
|
|
|
|
//! ```solidity
|
|
|
|
//! import "./math/math.sol";
|
|
|
|
//! import "contracts/tokens/token.sol";
|
|
|
|
//! ```
|
|
|
|
//! In the above `./math/math.sol` and `contracts/tokens/token.sol` are import paths while the
|
|
|
|
//! source unit names they translate to are `contracts/math/math.sol` and
|
|
|
|
//! `contracts/tokens/token.sol` respectively.
|
|
|
|
//!
|
|
|
|
//! #### Direct Imports
|
|
|
|
//!
|
|
|
|
//! An import that does not start with `./` or `../` is a direct import.
|
|
|
|
//!
|
|
|
|
//! ```solidity
|
|
|
|
//! import "/project/lib/util.sol"; // source unit name: /project/lib/util.sol
|
|
|
|
//! import "lib/util.sol"; // source unit name: lib/util.sol
|
|
|
|
//! import "@openzeppelin/address.sol"; // source unit name: @openzeppelin/address.sol
|
2022-03-19 17:05:39 +00:00
|
|
|
//! import "https://example.com/token.sol"; // source unit name: <https://example.com/token.sol>
|
2022-02-04 16:20:24 +00:00
|
|
|
//! ```
|
|
|
|
//!
|
|
|
|
//! After applying any import remappings the import path simply becomes the source unit name.
|
|
|
|
//!
|
|
|
|
//! ##### Import Remapping
|
|
|
|
//!
|
|
|
|
//! ```solidity
|
|
|
|
//! import "github.com/ethereum/dapp-bin/library/math.sol"; // source unit name: dapp-bin/library/math.sol
|
|
|
|
//! ```
|
|
|
|
//!
|
|
|
|
//! If compiled with `solc github.com/ethereum/dapp-bin/=dapp-bin/` the compiler will look for the
|
|
|
|
//! file in the VFS under `dapp-bin/library/math.sol`. If the file is not available there, the
|
|
|
|
//! source unit name will be passed to the Host Filesystem Loader, which will then look in
|
|
|
|
//! `/project/dapp-bin/library/iterable_mapping.sol`
|
2022-03-11 16:43:48 +00:00
|
|
|
//!
|
|
|
|
//!
|
|
|
|
//! ### Caching and Change detection
|
|
|
|
//!
|
|
|
|
//! If caching is enabled in the [Project](crate::Project) a cache file will be created upon a
|
2022-03-19 17:05:39 +00:00
|
|
|
//! successful solc build. The [cache file](crate::cache::SolFilesCache) stores metadata for all the
|
|
|
|
//! files that were provided to solc.
|
2022-03-11 16:43:48 +00:00
|
|
|
//! For every file the cache file contains a dedicated [cache
|
2022-03-19 17:05:39 +00:00
|
|
|
//! entry](crate::cache::CacheEntry), which represents the state of the file. A solidity file can
|
|
|
|
//! contain several contracts, for every contract a separate [artifact](crate::Artifact) is emitted.
|
2022-03-11 16:43:48 +00:00
|
|
|
//! Therefor the entry also tracks all artifacts emitted by a file. A solidity file can also be
|
|
|
|
//! compiled with several solc versions.
|
|
|
|
//!
|
|
|
|
//! For example in `A(<=0.8.10) imports C(>0.4.0)` and
|
|
|
|
//! `B(0.8.11) imports C(>0.4.0)`, both `A` and `B` import `C` but there's no solc version that's
|
|
|
|
//! compatible with `A` and `B`, in which case two sets are compiled: [`A`, `C`] and [`B`, `C`].
|
|
|
|
//! This is reflected in the cache entry which tracks the file's artifacts by version.
|
|
|
|
//!
|
|
|
|
//! The cache makes it possible to detect changes during recompilation, so that only the changed,
|
|
|
|
//! dirty, files need to be passed to solc. A file will be considered as dirty if:
|
|
|
|
//! - the file is new, not included in the existing cache
|
|
|
|
//! - the file was modified since the last compiler run, detected by comparing content hashes
|
|
|
|
//! - any of the imported files is dirty
|
|
|
|
//! - the file's artifacts don't exist, were deleted.
|
|
|
|
//!
|
|
|
|
//! Recompiling a project with cache enabled detects all files that meet these criteria and provides
|
|
|
|
//! solc with only these dirty files instead of the entire source set.
|
2022-02-04 16:20:24 +00:00
|
|
|
|
|
|
|
use crate::{
|
|
|
|
artifact_output::Artifacts,
|
2022-03-15 15:58:33 +00:00
|
|
|
artifacts::{Settings, VersionedFilteredSources, VersionedSources},
|
2022-02-04 16:20:24 +00:00
|
|
|
cache::ArtifactsCache,
|
|
|
|
error::Result,
|
|
|
|
output::AggregatedCompilerOutput,
|
2022-02-08 23:14:57 +00:00
|
|
|
report,
|
2022-02-04 16:20:24 +00:00
|
|
|
resolver::GraphEdges,
|
|
|
|
ArtifactOutput, CompilerInput, Graph, Project, ProjectCompileOutput, ProjectPathsConfig, Solc,
|
|
|
|
Sources,
|
|
|
|
};
|
|
|
|
use rayon::prelude::*;
|
|
|
|
|
2022-04-04 22:50:10 +00:00
|
|
|
use crate::filter::SparseOutputFilter;
|
2022-04-02 21:37:38 +00:00
|
|
|
use std::{collections::btree_map::BTreeMap, path::PathBuf, time::Instant};
|
2022-02-04 16:20:24 +00:00
|
|
|
|
|
|
|
#[derive(Debug)]
|
|
|
|
pub struct ProjectCompiler<'a, T: ArtifactOutput> {
|
|
|
|
/// Contains the relationship of the source files and their imports
|
|
|
|
edges: GraphEdges,
|
|
|
|
project: &'a Project<T>,
|
|
|
|
/// how to compile all the sources
|
|
|
|
sources: CompilerSources,
|
2022-03-19 17:05:39 +00:00
|
|
|
/// How to select solc [`crate::artifacts::CompilerOutput`] for files
|
2022-04-04 22:50:10 +00:00
|
|
|
sparse_output: SparseOutputFilter,
|
2022-02-04 16:20:24 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> {
|
|
|
|
/// Create a new `ProjectCompiler` to bootstrap the compilation process of the project's
|
|
|
|
/// sources.
|
|
|
|
///
|
|
|
|
/// # Example
|
|
|
|
///
|
|
|
|
/// ```no_run
|
|
|
|
/// use ethers_solc::Project;
|
|
|
|
///
|
|
|
|
/// let project = Project::builder().build().unwrap();
|
|
|
|
/// let output = project.compile().unwrap();
|
|
|
|
/// ```
|
2022-03-21 08:58:56 +00:00
|
|
|
#[cfg(all(feature = "svm-solc", feature = "async"))]
|
2022-02-04 16:20:24 +00:00
|
|
|
pub fn new(project: &'a Project<T>) -> Result<Self> {
|
|
|
|
Self::with_sources(project, project.paths.read_input_files()?)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Bootstraps the compilation process by resolving the dependency graph of all sources and the
|
|
|
|
/// appropriate `Solc` -> `Sources` set as well as the compile mode to use (parallel,
|
|
|
|
/// sequential)
|
|
|
|
///
|
|
|
|
/// Multiple (`Solc` -> `Sources`) pairs can be compiled in parallel if the `Project` allows
|
|
|
|
/// multiple `jobs`, see [`crate::Project::set_solc_jobs()`].
|
2022-03-21 08:58:56 +00:00
|
|
|
#[cfg(all(feature = "svm-solc", feature = "async"))]
|
2022-02-04 16:20:24 +00:00
|
|
|
pub fn with_sources(project: &'a Project<T>, sources: Sources) -> Result<Self> {
|
|
|
|
let graph = Graph::resolve_sources(&project.paths, sources)?;
|
|
|
|
let (versions, edges) = graph.into_sources_by_version(project.offline)?;
|
|
|
|
let sources_by_version = versions.get(&project.allowed_lib_paths)?;
|
|
|
|
|
|
|
|
let sources = if project.solc_jobs > 1 && sources_by_version.len() > 1 {
|
|
|
|
// if there are multiple different versions, and we can use multiple jobs we can compile
|
|
|
|
// them in parallel
|
|
|
|
CompilerSources::Parallel(sources_by_version, project.solc_jobs)
|
|
|
|
} else {
|
|
|
|
CompilerSources::Sequential(sources_by_version)
|
|
|
|
};
|
|
|
|
|
2022-03-16 14:36:35 +00:00
|
|
|
Ok(Self { edges, project, sources, sparse_output: Default::default() })
|
2022-02-04 16:20:24 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Compiles the sources with a pinned `Solc` instance
|
|
|
|
pub fn with_sources_and_solc(
|
|
|
|
project: &'a Project<T>,
|
|
|
|
sources: Sources,
|
|
|
|
solc: Solc,
|
|
|
|
) -> Result<Self> {
|
|
|
|
let version = solc.version()?;
|
|
|
|
let (sources, edges) = Graph::resolve_sources(&project.paths, sources)?.into_sources();
|
|
|
|
let sources_by_version = BTreeMap::from([(solc, (version, sources))]);
|
|
|
|
let sources = CompilerSources::Sequential(sources_by_version);
|
|
|
|
|
2022-03-16 14:36:35 +00:00
|
|
|
Ok(Self { edges, project, sources, sparse_output: Default::default() })
|
|
|
|
}
|
|
|
|
|
2022-03-19 17:05:39 +00:00
|
|
|
/// Applies the specified filter to be applied when selecting solc output for
|
2022-03-16 14:36:35 +00:00
|
|
|
/// specific files to be compiled
|
2022-04-04 22:50:10 +00:00
|
|
|
pub fn with_sparse_output(mut self, sparse_output: impl Into<SparseOutputFilter>) -> Self {
|
2022-03-16 14:36:35 +00:00
|
|
|
self.sparse_output = sparse_output.into();
|
|
|
|
self
|
2022-02-04 16:20:24 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Compiles all the sources of the `Project` in the appropriate mode
|
|
|
|
///
|
|
|
|
/// If caching is enabled, the sources are filtered and only _dirty_ sources are recompiled.
|
|
|
|
///
|
|
|
|
/// The output of the compile process can be a mix of reused artifacts and freshly compiled
|
|
|
|
/// `Contract`s
|
|
|
|
///
|
|
|
|
/// # Example
|
|
|
|
///
|
|
|
|
/// ```no_run
|
|
|
|
/// use ethers_solc::Project;
|
|
|
|
///
|
|
|
|
/// let project = Project::builder().build().unwrap();
|
|
|
|
/// let output = project.compile().unwrap();
|
|
|
|
/// ```
|
|
|
|
pub fn compile(self) -> Result<ProjectCompileOutput<T>> {
|
|
|
|
// drive the compiler statemachine to completion
|
|
|
|
self.preprocess()?.compile()?.write_artifacts()?.write_cache()
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Does basic preprocessing
|
|
|
|
/// - sets proper source unit names
|
|
|
|
/// - check cache
|
|
|
|
fn preprocess(self) -> Result<PreprocessedState<'a, T>> {
|
2022-03-16 14:36:35 +00:00
|
|
|
let Self { edges, project, sources, sparse_output } = self;
|
2022-02-04 16:20:24 +00:00
|
|
|
|
|
|
|
let mut cache = ArtifactsCache::new(project, edges)?;
|
2022-03-09 18:52:40 +00:00
|
|
|
// retain and compile only dirty sources and all their imports
|
2022-03-15 15:58:33 +00:00
|
|
|
let sources = sources.filtered(&mut cache);
|
2022-02-04 16:20:24 +00:00
|
|
|
|
2022-03-16 14:36:35 +00:00
|
|
|
Ok(PreprocessedState { sources, cache, sparse_output })
|
2022-02-04 16:20:24 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// A series of states that comprise the [`ProjectCompiler::compile()`] state machine
|
|
|
|
///
|
|
|
|
/// The main reason is to debug all states individually
|
|
|
|
#[derive(Debug)]
|
|
|
|
struct PreprocessedState<'a, T: ArtifactOutput> {
|
2022-03-15 15:58:33 +00:00
|
|
|
/// contains all sources to compile
|
|
|
|
sources: FilteredCompilerSources,
|
|
|
|
/// cache that holds [CacheEntry] object if caching is enabled and the project is recompiled
|
2022-02-04 16:20:24 +00:00
|
|
|
cache: ArtifactsCache<'a, T>,
|
2022-04-04 22:50:10 +00:00
|
|
|
sparse_output: SparseOutputFilter,
|
2022-02-04 16:20:24 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a, T: ArtifactOutput> PreprocessedState<'a, T> {
|
|
|
|
/// advance to the next state by compiling all sources
|
|
|
|
fn compile(self) -> Result<CompiledState<'a, T>> {
|
2022-03-16 14:36:35 +00:00
|
|
|
let PreprocessedState { sources, cache, sparse_output } = self;
|
|
|
|
let output = sources.compile(
|
|
|
|
&cache.project().solc_config.settings,
|
|
|
|
&cache.project().paths,
|
|
|
|
sparse_output,
|
2022-04-04 22:50:10 +00:00
|
|
|
cache.graph(),
|
2022-03-16 14:36:35 +00:00
|
|
|
)?;
|
2022-02-04 16:20:24 +00:00
|
|
|
|
|
|
|
Ok(CompiledState { output, cache })
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Represents the state after `solc` was successfully invoked
|
|
|
|
#[derive(Debug)]
|
|
|
|
struct CompiledState<'a, T: ArtifactOutput> {
|
|
|
|
output: AggregatedCompilerOutput,
|
|
|
|
cache: ArtifactsCache<'a, T>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a, T: ArtifactOutput> CompiledState<'a, T> {
|
|
|
|
/// advance to the next state by handling all artifacts
|
|
|
|
///
|
2022-03-09 18:52:40 +00:00
|
|
|
/// Writes all output contracts to disk if enabled in the `Project` and if the build was
|
|
|
|
/// successful
|
2022-02-04 16:20:24 +00:00
|
|
|
fn write_artifacts(self) -> Result<ArtifactsState<'a, T>> {
|
|
|
|
let CompiledState { output, cache } = self;
|
2022-02-17 15:31:35 +00:00
|
|
|
|
2022-03-09 18:52:40 +00:00
|
|
|
// write all artifacts via the handler but only if the build succeeded
|
|
|
|
let compiled_artifacts = if cache.project().no_artifacts {
|
2022-03-27 16:56:33 +00:00
|
|
|
cache
|
|
|
|
.project()
|
|
|
|
.artifacts_handler()
|
|
|
|
.output_to_artifacts(&output.contracts, &output.sources)
|
2022-03-09 18:52:40 +00:00
|
|
|
} else if output.has_error() {
|
|
|
|
tracing::trace!("skip writing cache file due to solc errors: {:?}", output.errors);
|
2022-02-17 15:31:35 +00:00
|
|
|
cache
|
|
|
|
.project()
|
|
|
|
.artifacts_handler()
|
2022-03-27 16:56:33 +00:00
|
|
|
.output_to_artifacts(&output.contracts, &output.sources)
|
|
|
|
} else {
|
|
|
|
cache.project().artifacts_handler().on_output(
|
|
|
|
&output.contracts,
|
|
|
|
&output.sources,
|
|
|
|
&cache.project().paths,
|
|
|
|
)?
|
2022-02-04 16:20:24 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
Ok(ArtifactsState { output, cache, compiled_artifacts })
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Represents the state after all artifacts were written to disk
|
|
|
|
#[derive(Debug)]
|
|
|
|
struct ArtifactsState<'a, T: ArtifactOutput> {
|
|
|
|
output: AggregatedCompilerOutput,
|
|
|
|
cache: ArtifactsCache<'a, T>,
|
|
|
|
compiled_artifacts: Artifacts<T::Artifact>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a, T: ArtifactOutput> ArtifactsState<'a, T> {
|
|
|
|
/// Writes the cache file
|
|
|
|
///
|
|
|
|
/// this concludes the [`Project::compile()`] statemachine
|
|
|
|
fn write_cache(self) -> Result<ProjectCompileOutput<T>> {
|
|
|
|
let ArtifactsState { output, cache, compiled_artifacts } = self;
|
|
|
|
let ignored_error_codes = cache.project().ignored_error_codes.clone();
|
|
|
|
let cached_artifacts = cache.write_cache(&compiled_artifacts)?;
|
|
|
|
Ok(ProjectCompileOutput {
|
|
|
|
compiler_output: output,
|
|
|
|
compiled_artifacts,
|
|
|
|
cached_artifacts,
|
|
|
|
ignored_error_codes,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Determines how the `solc <-> sources` pairs are executed
|
|
|
|
#[derive(Debug, Clone)]
|
|
|
|
#[allow(dead_code)]
|
|
|
|
enum CompilerSources {
|
|
|
|
/// Compile all these sequentially
|
|
|
|
Sequential(VersionedSources),
|
|
|
|
/// Compile all these in parallel using a certain amount of jobs
|
|
|
|
Parallel(VersionedSources, usize),
|
|
|
|
}
|
|
|
|
|
|
|
|
impl CompilerSources {
|
|
|
|
/// Filters out all sources that don't need to be compiled, see [`ArtifactsCache::filter`]
|
2022-03-15 15:58:33 +00:00
|
|
|
fn filtered<T: ArtifactOutput>(self, cache: &mut ArtifactsCache<T>) -> FilteredCompilerSources {
|
2022-02-04 16:20:24 +00:00
|
|
|
fn filtered_sources<T: ArtifactOutput>(
|
|
|
|
sources: VersionedSources,
|
|
|
|
cache: &mut ArtifactsCache<T>,
|
2022-03-15 15:58:33 +00:00
|
|
|
) -> VersionedFilteredSources {
|
2022-02-04 16:20:24 +00:00
|
|
|
sources
|
|
|
|
.into_iter()
|
|
|
|
.map(|(solc, (version, sources))| {
|
2022-03-11 16:43:48 +00:00
|
|
|
tracing::trace!("Filtering {} sources for {}", sources.len(), version);
|
2022-02-04 16:20:24 +00:00
|
|
|
let sources = cache.filter(sources, &version);
|
2022-03-11 16:43:48 +00:00
|
|
|
tracing::trace!(
|
|
|
|
"Detected {} dirty sources {:?}",
|
2022-03-15 15:58:33 +00:00
|
|
|
sources.dirty().count(),
|
2022-03-17 08:27:03 +00:00
|
|
|
sources.dirty_files().collect::<Vec<_>>()
|
2022-03-11 16:43:48 +00:00
|
|
|
);
|
2022-02-04 16:20:24 +00:00
|
|
|
(solc, (version, sources))
|
|
|
|
})
|
|
|
|
.collect()
|
|
|
|
}
|
|
|
|
|
|
|
|
match self {
|
|
|
|
CompilerSources::Sequential(s) => {
|
2022-03-15 15:58:33 +00:00
|
|
|
FilteredCompilerSources::Sequential(filtered_sources(s, cache))
|
2022-02-04 16:20:24 +00:00
|
|
|
}
|
|
|
|
CompilerSources::Parallel(s, j) => {
|
2022-03-15 15:58:33 +00:00
|
|
|
FilteredCompilerSources::Parallel(filtered_sources(s, cache), j)
|
2022-02-04 16:20:24 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2022-03-15 15:58:33 +00:00
|
|
|
}
|
2022-02-04 16:20:24 +00:00
|
|
|
|
2022-03-15 15:58:33 +00:00
|
|
|
/// Determines how the `solc <-> sources` pairs are executed
|
|
|
|
#[derive(Debug, Clone)]
|
|
|
|
#[allow(dead_code)]
|
|
|
|
enum FilteredCompilerSources {
|
|
|
|
/// Compile all these sequentially
|
|
|
|
Sequential(VersionedFilteredSources),
|
|
|
|
/// Compile all these in parallel using a certain amount of jobs
|
|
|
|
Parallel(VersionedFilteredSources, usize),
|
|
|
|
}
|
|
|
|
|
|
|
|
impl FilteredCompilerSources {
|
2022-02-04 16:20:24 +00:00
|
|
|
/// Compiles all the files with `Solc`
|
|
|
|
fn compile(
|
|
|
|
self,
|
|
|
|
settings: &Settings,
|
|
|
|
paths: &ProjectPathsConfig,
|
2022-04-04 22:50:10 +00:00
|
|
|
sparse_output: SparseOutputFilter,
|
|
|
|
graph: &GraphEdges,
|
2022-02-04 16:20:24 +00:00
|
|
|
) -> Result<AggregatedCompilerOutput> {
|
|
|
|
match self {
|
2022-03-15 15:58:33 +00:00
|
|
|
FilteredCompilerSources::Sequential(input) => {
|
2022-04-04 22:50:10 +00:00
|
|
|
compile_sequential(input, settings, paths, sparse_output, graph)
|
2022-03-15 15:58:33 +00:00
|
|
|
}
|
|
|
|
FilteredCompilerSources::Parallel(input, j) => {
|
2022-04-04 22:50:10 +00:00
|
|
|
compile_parallel(input, j, settings, paths, sparse_output, graph)
|
2022-03-15 15:58:33 +00:00
|
|
|
}
|
2022-02-04 16:20:24 +00:00
|
|
|
}
|
|
|
|
}
|
2022-02-05 14:07:37 +00:00
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
#[allow(unused)]
|
2022-03-15 15:58:33 +00:00
|
|
|
fn sources(&self) -> &VersionedFilteredSources {
|
2022-02-05 14:07:37 +00:00
|
|
|
match self {
|
2022-03-15 15:58:33 +00:00
|
|
|
FilteredCompilerSources::Sequential(v) => v,
|
|
|
|
FilteredCompilerSources::Parallel(v, _) => v,
|
2022-02-05 14:07:37 +00:00
|
|
|
}
|
|
|
|
}
|
2022-02-04 16:20:24 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Compiles the input set sequentially and returns an aggregated set of the solc `CompilerOutput`s
|
|
|
|
fn compile_sequential(
|
2022-03-15 15:58:33 +00:00
|
|
|
input: VersionedFilteredSources,
|
2022-02-04 16:20:24 +00:00
|
|
|
settings: &Settings,
|
|
|
|
paths: &ProjectPathsConfig,
|
2022-04-04 22:50:10 +00:00
|
|
|
sparse_output: SparseOutputFilter,
|
|
|
|
graph: &GraphEdges,
|
2022-02-04 16:20:24 +00:00
|
|
|
) -> Result<AggregatedCompilerOutput> {
|
|
|
|
let mut aggregated = AggregatedCompilerOutput::default();
|
|
|
|
tracing::trace!("compiling {} jobs sequentially", input.len());
|
2022-03-15 15:58:33 +00:00
|
|
|
for (solc, (version, filtered_sources)) in input {
|
|
|
|
if filtered_sources.is_empty() {
|
2022-02-04 16:20:24 +00:00
|
|
|
// nothing to compile
|
2022-03-15 15:58:33 +00:00
|
|
|
tracing::trace!(
|
|
|
|
"skip solc {} {} for empty sources set",
|
|
|
|
solc.as_ref().display(),
|
|
|
|
version
|
|
|
|
);
|
2022-02-04 16:20:24 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
tracing::trace!(
|
|
|
|
"compiling {} sources with solc \"{}\" {:?}",
|
2022-03-15 15:58:33 +00:00
|
|
|
filtered_sources.len(),
|
2022-02-04 16:20:24 +00:00
|
|
|
solc.as_ref().display(),
|
|
|
|
solc.args
|
|
|
|
);
|
|
|
|
|
2022-03-15 15:58:33 +00:00
|
|
|
let dirty_files: Vec<PathBuf> = filtered_sources.dirty_files().cloned().collect();
|
|
|
|
|
|
|
|
// depending on the composition of the filtered sources, the output selection can be
|
|
|
|
// optimized
|
|
|
|
let mut opt_settings = settings.clone();
|
2022-04-04 22:50:10 +00:00
|
|
|
let sources = sparse_output.sparse_sources(filtered_sources, &mut opt_settings, graph);
|
2022-03-15 15:58:33 +00:00
|
|
|
|
2022-03-10 18:42:02 +00:00
|
|
|
for input in CompilerInput::with_sources(sources) {
|
2022-03-15 15:58:33 +00:00
|
|
|
let actually_dirty = input
|
|
|
|
.sources
|
|
|
|
.keys()
|
|
|
|
.filter(|f| dirty_files.contains(f))
|
|
|
|
.cloned()
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
if actually_dirty.is_empty() {
|
|
|
|
// nothing to compile for this particular language, all dirty files are in the other
|
|
|
|
// language set
|
|
|
|
tracing::trace!(
|
|
|
|
"skip solc {} {} compilation of {} compiler input due to empty source set",
|
|
|
|
solc.as_ref().display(),
|
|
|
|
version,
|
|
|
|
input.language
|
|
|
|
);
|
|
|
|
continue
|
|
|
|
}
|
2022-03-10 18:42:02 +00:00
|
|
|
let input = input
|
2022-03-15 15:58:33 +00:00
|
|
|
.settings(opt_settings.clone())
|
2022-03-10 18:42:02 +00:00
|
|
|
.normalize_evm_version(&version)
|
2022-04-05 19:00:04 +00:00
|
|
|
.with_remappings(paths.remappings.clone())
|
|
|
|
.sanitized(&version);
|
2022-03-15 15:58:33 +00:00
|
|
|
|
2022-03-10 18:42:02 +00:00
|
|
|
tracing::trace!(
|
|
|
|
"calling solc `{}` with {} sources {:?}",
|
|
|
|
version,
|
|
|
|
input.sources.len(),
|
|
|
|
input.sources.keys()
|
|
|
|
);
|
2022-03-15 15:58:33 +00:00
|
|
|
|
2022-04-02 21:37:38 +00:00
|
|
|
let start = Instant::now();
|
2022-03-15 15:58:33 +00:00
|
|
|
report::solc_spawn(&solc, &version, &input, &actually_dirty);
|
2022-03-10 18:42:02 +00:00
|
|
|
let output = solc.compile_exact(&input)?;
|
2022-04-02 21:37:38 +00:00
|
|
|
report::solc_success(&solc, &version, &output, &start.elapsed());
|
2022-03-10 18:42:02 +00:00
|
|
|
tracing::trace!("compiled input, output has error: {}", output.has_error());
|
2022-03-17 08:27:03 +00:00
|
|
|
tracing::trace!("received compiler output: {:?}", output.contracts.keys());
|
2022-03-10 18:42:02 +00:00
|
|
|
aggregated.extend(version.clone(), output);
|
|
|
|
}
|
2022-02-04 16:20:24 +00:00
|
|
|
}
|
|
|
|
Ok(aggregated)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// compiles the input set using `num_jobs` threads
|
|
|
|
fn compile_parallel(
|
2022-03-15 15:58:33 +00:00
|
|
|
input: VersionedFilteredSources,
|
2022-02-04 16:20:24 +00:00
|
|
|
num_jobs: usize,
|
|
|
|
settings: &Settings,
|
|
|
|
paths: &ProjectPathsConfig,
|
2022-04-04 22:50:10 +00:00
|
|
|
sparse_output: SparseOutputFilter,
|
|
|
|
graph: &GraphEdges,
|
2022-02-04 16:20:24 +00:00
|
|
|
) -> Result<AggregatedCompilerOutput> {
|
|
|
|
debug_assert!(num_jobs > 1);
|
2022-02-05 14:07:37 +00:00
|
|
|
tracing::trace!(
|
|
|
|
"compile {} sources in parallel using up to {} solc jobs",
|
|
|
|
input.len(),
|
|
|
|
num_jobs
|
|
|
|
);
|
2022-02-04 16:20:24 +00:00
|
|
|
|
|
|
|
let mut jobs = Vec::with_capacity(input.len());
|
2022-03-15 15:58:33 +00:00
|
|
|
for (solc, (version, filtered_sources)) in input {
|
|
|
|
if filtered_sources.is_empty() {
|
2022-02-04 16:20:24 +00:00
|
|
|
// nothing to compile
|
2022-03-15 15:58:33 +00:00
|
|
|
tracing::trace!(
|
|
|
|
"skip solc {} {} for empty sources set",
|
|
|
|
solc.as_ref().display(),
|
|
|
|
version
|
|
|
|
);
|
2022-02-04 16:20:24 +00:00
|
|
|
continue
|
|
|
|
}
|
2022-03-15 15:58:33 +00:00
|
|
|
|
|
|
|
let dirty_files: Vec<PathBuf> = filtered_sources.dirty_files().cloned().collect();
|
|
|
|
|
|
|
|
// depending on the composition of the filtered sources, the output selection can be
|
|
|
|
// optimized
|
|
|
|
let mut opt_settings = settings.clone();
|
2022-04-04 22:50:10 +00:00
|
|
|
let sources = sparse_output.sparse_sources(filtered_sources, &mut opt_settings, graph);
|
2022-03-15 15:58:33 +00:00
|
|
|
|
2022-03-10 18:42:02 +00:00
|
|
|
for input in CompilerInput::with_sources(sources) {
|
2022-03-15 15:58:33 +00:00
|
|
|
let actually_dirty = input
|
|
|
|
.sources
|
|
|
|
.keys()
|
|
|
|
.filter(|f| dirty_files.contains(f))
|
|
|
|
.cloned()
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
if actually_dirty.is_empty() {
|
|
|
|
// nothing to compile for this particular language, all dirty files are in the other
|
|
|
|
// language set
|
|
|
|
tracing::trace!(
|
|
|
|
"skip solc {} {} compilation of {} compiler input due to empty source set",
|
|
|
|
solc.as_ref().display(),
|
|
|
|
version,
|
|
|
|
input.language
|
|
|
|
);
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2022-03-10 18:42:02 +00:00
|
|
|
let job = input
|
|
|
|
.settings(settings.clone())
|
|
|
|
.normalize_evm_version(&version)
|
2022-04-05 19:00:04 +00:00
|
|
|
.with_remappings(paths.remappings.clone())
|
|
|
|
.sanitized(&version);
|
2022-02-04 16:20:24 +00:00
|
|
|
|
2022-03-15 15:58:33 +00:00
|
|
|
jobs.push((solc.clone(), version.clone(), job, actually_dirty))
|
2022-03-10 18:42:02 +00:00
|
|
|
}
|
2022-02-04 16:20:24 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// start a rayon threadpool that will execute all `Solc::compile()` processes
|
|
|
|
let pool = rayon::ThreadPoolBuilder::new().num_threads(num_jobs).build().unwrap();
|
|
|
|
let outputs = pool.install(move || {
|
|
|
|
jobs.into_par_iter()
|
2022-03-15 15:58:33 +00:00
|
|
|
.map(|(solc, version, input, actually_dirty)| {
|
2022-02-04 16:20:24 +00:00
|
|
|
tracing::trace!(
|
|
|
|
"calling solc `{}` {:?} with {} sources: {:?}",
|
|
|
|
version,
|
|
|
|
solc.args,
|
|
|
|
input.sources.len(),
|
|
|
|
input.sources.keys()
|
|
|
|
);
|
2022-04-02 21:37:38 +00:00
|
|
|
let start = Instant::now();
|
2022-03-15 15:58:33 +00:00
|
|
|
report::solc_spawn(&solc, &version, &input, &actually_dirty);
|
2022-02-08 23:14:57 +00:00
|
|
|
solc.compile(&input).map(move |output| {
|
2022-04-02 21:37:38 +00:00
|
|
|
report::solc_success(&solc, &version, &output, &start.elapsed());
|
2022-02-08 23:14:57 +00:00
|
|
|
(version, output)
|
|
|
|
})
|
2022-02-04 16:20:24 +00:00
|
|
|
})
|
|
|
|
.collect::<Result<Vec<_>>>()
|
|
|
|
})?;
|
|
|
|
|
|
|
|
let mut aggregated = AggregatedCompilerOutput::default();
|
|
|
|
aggregated.extend_all(outputs);
|
|
|
|
|
|
|
|
Ok(aggregated)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
#[cfg(feature = "project-util")]
|
|
|
|
mod tests {
|
|
|
|
use super::*;
|
|
|
|
use crate::{project_util::TempProject, MinimalCombinedArtifacts};
|
2022-02-05 14:07:37 +00:00
|
|
|
|
2022-02-04 16:20:24 +00:00
|
|
|
use std::path::PathBuf;
|
|
|
|
|
|
|
|
#[allow(unused)]
|
|
|
|
fn init_tracing() {
|
|
|
|
let _ = tracing_subscriber::fmt()
|
|
|
|
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
|
|
|
|
.try_init()
|
|
|
|
.ok();
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn can_preprocess() {
|
|
|
|
let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample");
|
|
|
|
let project =
|
|
|
|
Project::builder().paths(ProjectPathsConfig::dapptools(root).unwrap()).build().unwrap();
|
|
|
|
|
|
|
|
let compiler = ProjectCompiler::new(&project).unwrap();
|
|
|
|
let prep = compiler.preprocess().unwrap();
|
|
|
|
let cache = prep.cache.as_cached().unwrap();
|
|
|
|
// 3 contracts
|
2022-02-05 14:07:37 +00:00
|
|
|
assert_eq!(cache.dirty_source_files.len(), 3);
|
2022-02-04 16:20:24 +00:00
|
|
|
assert!(cache.filtered.is_empty());
|
|
|
|
assert!(cache.cache.is_empty());
|
|
|
|
|
|
|
|
let compiled = prep.compile().unwrap();
|
|
|
|
assert_eq!(compiled.output.contracts.files().count(), 3);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn can_detect_cached_files() {
|
|
|
|
let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample");
|
|
|
|
let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib"));
|
|
|
|
let project = TempProject::<MinimalCombinedArtifacts>::new(paths).unwrap();
|
|
|
|
|
|
|
|
let compiled = project.compile().unwrap();
|
|
|
|
assert!(!compiled.has_compiler_errors());
|
|
|
|
|
|
|
|
let inner = project.project();
|
|
|
|
let compiler = ProjectCompiler::new(inner).unwrap();
|
|
|
|
let prep = compiler.preprocess().unwrap();
|
2022-02-05 14:07:37 +00:00
|
|
|
assert!(prep.cache.as_cached().unwrap().dirty_source_files.is_empty())
|
|
|
|
}
|
|
|
|
|
2022-03-15 15:58:33 +00:00
|
|
|
#[test]
|
|
|
|
fn can_recompile_with_optimized_output() {
|
|
|
|
let tmp = TempProject::dapptools().unwrap();
|
|
|
|
|
|
|
|
tmp.add_source(
|
|
|
|
"A",
|
|
|
|
r#"
|
|
|
|
pragma solidity ^0.8.10;
|
|
|
|
import "./B.sol";
|
|
|
|
contract A {}
|
|
|
|
"#,
|
|
|
|
)
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
tmp.add_source(
|
|
|
|
"B",
|
|
|
|
r#"
|
|
|
|
pragma solidity ^0.8.10;
|
|
|
|
contract B {
|
|
|
|
function hello() public {}
|
|
|
|
}
|
|
|
|
import "./C.sol";
|
|
|
|
"#,
|
|
|
|
)
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
tmp.add_source(
|
|
|
|
"C",
|
|
|
|
r#"
|
|
|
|
pragma solidity ^0.8.10;
|
|
|
|
contract C {
|
|
|
|
function hello() public {}
|
|
|
|
}
|
|
|
|
"#,
|
|
|
|
)
|
|
|
|
.unwrap();
|
|
|
|
let compiled = tmp.compile().unwrap();
|
|
|
|
assert!(!compiled.has_compiler_errors());
|
|
|
|
|
|
|
|
tmp.artifacts_snapshot().unwrap().assert_artifacts_essentials_present();
|
|
|
|
|
|
|
|
// modify A.sol
|
|
|
|
tmp.add_source(
|
|
|
|
"A",
|
|
|
|
r#"
|
|
|
|
pragma solidity ^0.8.10;
|
|
|
|
import "./B.sol";
|
|
|
|
contract A {
|
|
|
|
function testExample() public {}
|
|
|
|
}
|
|
|
|
"#,
|
|
|
|
)
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
let compiler = ProjectCompiler::new(tmp.project()).unwrap();
|
|
|
|
let state = compiler.preprocess().unwrap();
|
|
|
|
let sources = state.sources.sources();
|
|
|
|
|
|
|
|
// single solc
|
|
|
|
assert_eq!(sources.len(), 1);
|
|
|
|
|
|
|
|
let (_, filtered) = sources.values().next().unwrap();
|
|
|
|
|
|
|
|
// 3 contracts total
|
|
|
|
assert_eq!(filtered.0.len(), 3);
|
|
|
|
// A is modified
|
|
|
|
assert_eq!(filtered.dirty().count(), 1);
|
|
|
|
assert!(filtered.dirty_files().next().unwrap().ends_with("A.sol"));
|
|
|
|
|
|
|
|
let state = state.compile().unwrap();
|
|
|
|
assert_eq!(state.output.sources.len(), 3);
|
|
|
|
for (f, source) in &state.output.sources {
|
|
|
|
if f.ends_with("A.sol") {
|
|
|
|
assert!(source.ast.is_object());
|
|
|
|
} else {
|
|
|
|
assert!(source.ast.is_null());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
assert_eq!(state.output.contracts.len(), 1);
|
|
|
|
let (a, c) = state.output.contracts_iter().next().unwrap();
|
|
|
|
assert_eq!(a, "A");
|
|
|
|
assert!(c.abi.is_some() && c.evm.is_some());
|
|
|
|
|
|
|
|
let state = state.write_artifacts().unwrap();
|
|
|
|
assert_eq!(state.compiled_artifacts.as_ref().len(), 1);
|
|
|
|
|
|
|
|
let out = state.write_cache().unwrap();
|
|
|
|
|
|
|
|
let artifacts: Vec<_> = out.into_artifacts().collect();
|
|
|
|
assert_eq!(artifacts.len(), 3);
|
|
|
|
for (_, artifact) in artifacts {
|
|
|
|
let c = artifact.into_contract_bytecode();
|
|
|
|
assert!(c.abi.is_some() && c.bytecode.is_some() && c.deployed_bytecode.is_some());
|
|
|
|
}
|
|
|
|
|
|
|
|
tmp.artifacts_snapshot().unwrap().assert_artifacts_essentials_present();
|
|
|
|
}
|
|
|
|
|
2022-02-05 14:07:37 +00:00
|
|
|
#[test]
|
|
|
|
#[ignore]
|
|
|
|
fn can_compile_real_project() {
|
|
|
|
init_tracing();
|
|
|
|
let paths = ProjectPathsConfig::builder()
|
|
|
|
.root("../../foundry-integration-tests/testdata/solmate")
|
|
|
|
.build()
|
|
|
|
.unwrap();
|
|
|
|
let project = Project::builder().paths(paths).build().unwrap();
|
|
|
|
let compiler = ProjectCompiler::new(&project).unwrap();
|
|
|
|
let out = compiler.compile().unwrap();
|
|
|
|
println!("{}", out);
|
2022-02-04 16:20:24 +00:00
|
|
|
}
|
|
|
|
}
|