fix(solc): use cache context when determining artifact files (#1621)

* fix(solc): use cache context when determining artifact files

* update  changelog
This commit is contained in:
Matthias Seitz 2022-08-19 23:33:28 +02:00 committed by GitHub
parent 68fba606c2
commit ff754263a3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 223 additions and 47 deletions

View File

@ -117,6 +117,8 @@
### Unreleased
- Add `OutputContext` to `ArtifactOutput` trait
[#1621](https://github.com/gakonst/ethers-rs/pull/1621)
- On windows all paths in the `ProjectCompilerOutput` are now slashed by default
[#1540](https://github.com/gakonst/ethers-rs/pull/1540)
- `ArtifactOutput::write_extras` now takes the `Artifacts` directly

View File

@ -10,7 +10,7 @@ use crate::{
error::Result,
sourcemap::{SourceMap, SyntaxError},
sources::VersionedSourceFile,
utils, HardhatArtifact, ProjectPathsConfig, SolcError,
utils, HardhatArtifact, ProjectPathsConfig, SolFilesCache, SolcError,
};
use ethers_core::{abi::Abi, types::Bytes};
use semver::Version;
@ -23,7 +23,7 @@ use std::{
ops::Deref,
path::{Path, PathBuf},
};
use tracing::trace;
use tracing::{error, trace};
mod configurable;
use crate::contracts::VersionedContract;
@ -98,6 +98,7 @@ pub struct ArtifactFile<T> {
impl<T: Serialize> ArtifactFile<T> {
/// Writes the given contract to the `out` path creating all parent directories
pub fn write(&self) -> Result<()> {
trace!("writing artifact file {:?} {}", self.file, self.version);
utils::create_parent_dir_all(&self.file)?;
fs::write(&self.file, serde_json::to_vec_pretty(&self.artifact)?)
.map_err(|err| SolcError::io(err, &self.file))?;
@ -574,8 +575,9 @@ pub trait ArtifactOutput {
contracts: &VersionedContracts,
sources: &VersionedSourceFiles,
layout: &ProjectPathsConfig,
ctx: OutputContext,
) -> Result<Artifacts<Self::Artifact>> {
let mut artifacts = self.output_to_artifacts(contracts, sources);
let mut artifacts = self.output_to_artifacts(contracts, sources, ctx);
artifacts.join_all(&layout.artifacts);
artifacts.write_all()?;
@ -792,6 +794,7 @@ pub trait ArtifactOutput {
&self,
contracts: &VersionedContracts,
sources: &VersionedSourceFiles,
ctx: OutputContext,
) -> Artifacts<Self::Artifact> {
let mut artifacts = ArtifactsMap::new();
@ -799,7 +802,7 @@ pub trait ArtifactOutput {
let mut non_standalone_sources = HashSet::new();
// this holds all output files and the contract(s) it belongs to
let artifact_files = contracts.artifact_files::<Self>();
let artifact_files = contracts.artifact_files::<Self>(&ctx);
// this tracks the final artifacts, which we use as lookup for checking conflicts when
// converting stand-alone artifacts in the next step
@ -932,6 +935,46 @@ pub trait ArtifactOutput {
) -> Option<Self::Artifact>;
}
/// Additional context to use during [`ArtifactOutput::on_output()`]
#[derive(Debug, Clone, Default)]
#[non_exhaustive]
pub struct OutputContext<'a> {
/// Cache file of the project or empty if no caching is enabled
///
/// This context is required for partially cached recompile with conflicting files, so that we
/// can use the same adjusted output path for conflicting files like:
///
/// ```text
/// src
/// ├── a.sol
/// └── inner
/// └── a.sol
/// ```
pub cache: Cow<'a, SolFilesCache>,
}
// === impl OutputContext
impl<'a> OutputContext<'a> {
/// Create a new context with the given cache file
pub fn new(cache: &'a SolFilesCache) -> Self {
Self { cache: Cow::Borrowed(cache) }
}
/// Returns the path of the already existing artifact for the `contract` of the `file` compiled
/// with the `version`.
///
/// Returns `None` if no file exists
pub fn existing_artifact(
&self,
file: impl AsRef<Path>,
contract: &str,
version: &Version,
) -> Option<&PathBuf> {
self.cache.entry(file)?.find_artifact(contract, version)
}
}
/// An `Artifact` implementation that uses a compact representation
///
/// Creates a single json artifact with
@ -984,8 +1027,9 @@ impl ArtifactOutput for MinimalCombinedArtifactsHardhatFallback {
output: &VersionedContracts,
sources: &VersionedSourceFiles,
layout: &ProjectPathsConfig,
ctx: OutputContext,
) -> Result<Artifacts<Self::Artifact>> {
MinimalCombinedArtifacts::default().on_output(output, sources, layout)
MinimalCombinedArtifacts::default().on_output(output, sources, layout, ctx)
}
fn read_cached_artifact(path: impl AsRef<Path>) -> Result<Self::Artifact> {
@ -994,10 +1038,10 @@ impl ArtifactOutput for MinimalCombinedArtifactsHardhatFallback {
if let Ok(a) = serde_json::from_str(&content) {
Ok(a)
} else {
tracing::error!("Failed to deserialize compact artifact");
tracing::trace!("Fallback to hardhat artifact deserialization");
error!("Failed to deserialize compact artifact");
trace!("Fallback to hardhat artifact deserialization");
let artifact = serde_json::from_str::<HardhatArtifact>(&content)?;
tracing::trace!("successfully deserialized hardhat artifact");
trace!("successfully deserialized hardhat artifact");
Ok(artifact.into_contract_bytecode())
}
}

View File

@ -5,8 +5,8 @@ use crate::{
error::{Result, SolcError},
filter::{FilteredSource, FilteredSourceInfo, FilteredSources},
resolver::GraphEdges,
utils, ArtifactFile, ArtifactOutput, Artifacts, ArtifactsMap, Project, ProjectPathsConfig,
Source,
utils, ArtifactFile, ArtifactOutput, Artifacts, ArtifactsMap, OutputContext, Project,
ProjectPathsConfig, Source,
};
use semver::Version;
use serde::{de::DeserializeOwned, Deserialize, Serialize};
@ -523,6 +523,11 @@ impl CacheEntry {
self.artifacts.values().flat_map(|artifacts| artifacts.iter())
}
/// Returns the artifact file for the contract and version pair
pub fn find_artifact(&self, contract: &str, version: &Version) -> Option<&PathBuf> {
self.artifacts.get(contract).and_then(|files| files.get(version))
}
/// Iterator that yields all artifact files and their version
pub fn artifacts_for_version<'a>(
&'a self,
@ -864,6 +869,13 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> {
}
}
pub fn output_ctx(&self) -> OutputContext {
match self {
ArtifactsCache::Ephemeral(_, _) => Default::default(),
ArtifactsCache::Cached(inner) => OutputContext::new(&inner.cache),
}
}
pub fn project(&self) -> &'a Project<T> {
match self {
ArtifactsCache::Ephemeral(_, project) => project,
@ -948,7 +960,8 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> {
}).unwrap_or_default();
if !retain {
tracing::trace!(
"purging obsolete cached artifact for contract {} and version {}",
"purging obsolete cached artifact {:?} for contract {} and version {}",
f.file,
name,
f.version
);

View File

@ -3,11 +3,12 @@ use crate::{
contract::{CompactContractRef, Contract},
FileToContractsMap,
},
ArtifactFiles, ArtifactOutput,
ArtifactFiles, ArtifactOutput, OutputContext,
};
use semver::Version;
use serde::{Deserialize, Serialize};
use std::{collections::BTreeMap, ops::Deref, path::Path};
use tracing::trace;
/// file -> [(contract name -> Contract + solc version)]
#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize)]
@ -40,16 +41,38 @@ impl VersionedContracts {
}
/// Returns all the artifact files mapped with their contracts
pub(crate) fn artifact_files<T: ArtifactOutput + ?Sized>(&self) -> ArtifactFiles {
///
/// This will compute the appropriate output file paths but will _not_ write them.
/// The `ctx` is used to avoid possible conflicts
pub(crate) fn artifact_files<T: ArtifactOutput + ?Sized>(
&self,
ctx: &OutputContext,
) -> ArtifactFiles {
let mut output_files = ArtifactFiles::with_capacity(self.len());
for (file, contracts) in self.iter() {
for (name, versioned_contracts) in contracts {
for contract in versioned_contracts {
let output = if versioned_contracts.len() > 1 {
// if an artifact for the contract already exists (from a previous compile job)
// we reuse the path, this will make sure that even if there are conflicting
// files (files for witch `T::output_file()` would return the same path) we use
// consistent output paths
let output = if let Some(existing_artifact) =
ctx.existing_artifact(file, name, &contract.version).cloned()
{
trace!("use existing artifact file {:?}", existing_artifact,);
existing_artifact
} else if versioned_contracts.len() > 1 {
T::output_file_versioned(file, name, &contract.version)
} else {
T::output_file(file, name)
};
trace!(
"use artifact file {:?} for contract file {} {}",
output,
file,
contract.version
);
let contract = (file.as_str(), name.as_str(), contract);
output_files.entry(output).or_default().push(contract);
}

View File

@ -116,6 +116,7 @@ use crate::{
};
use rayon::prelude::*;
use std::{collections::btree_map::BTreeMap, path::PathBuf, time::Instant};
use tracing::trace;
#[derive(Debug)]
pub struct ProjectCompiler<'a, T: ArtifactOutput> {
@ -231,6 +232,7 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> {
/// - sets proper source unit names
/// - check cache
fn preprocess(self) -> Result<PreprocessedState<'a, T>> {
trace!("preprocessing");
let Self { edges, project, mut sources, sparse_output } = self;
// convert paths on windows to ensure consistency with the `CompilerOutput` `solc` emits,
@ -260,6 +262,7 @@ struct PreprocessedState<'a, T: ArtifactOutput> {
impl<'a, T: ArtifactOutput> PreprocessedState<'a, T> {
/// advance to the next state by compiling all sources
fn compile(self) -> Result<CompiledState<'a, T>> {
trace!("compiling");
let PreprocessedState { sources, cache, sparse_output } = self;
let project = cache.project();
let mut output = sources.compile(
@ -293,19 +296,21 @@ impl<'a, T: ArtifactOutput> CompiledState<'a, T> {
///
/// Writes all output contracts to disk if enabled in the `Project` and if the build was
/// successful
#[tracing::instrument(skip_all, name = "write-artifacts")]
fn write_artifacts(self) -> Result<ArtifactsState<'a, T>> {
let CompiledState { output, cache } = self;
let project = cache.project();
let ctx = cache.output_ctx();
// write all artifacts via the handler but only if the build succeeded and project wasn't
// configured with `no_artifacts == true`
let compiled_artifacts = if project.no_artifacts {
project.artifacts_handler().output_to_artifacts(&output.contracts, &output.sources)
project.artifacts_handler().output_to_artifacts(&output.contracts, &output.sources, ctx)
} else if output.has_error() {
tracing::trace!("skip writing cache file due to solc errors: {:?}", output.errors);
project.artifacts_handler().output_to_artifacts(&output.contracts, &output.sources)
trace!("skip writing cache file due to solc errors: {:?}", output.errors);
project.artifacts_handler().output_to_artifacts(&output.contracts, &output.sources, ctx)
} else {
tracing::trace!(
trace!(
"handling artifact output for {} contracts and {} sources",
output.contracts.len(),
output.sources.len()
@ -315,6 +320,7 @@ impl<'a, T: ArtifactOutput> CompiledState<'a, T> {
&output.contracts,
&output.sources,
&project.paths,
ctx,
)?;
// emits all the build infos, if they exist
@ -340,6 +346,7 @@ impl<'a, T: ArtifactOutput> ArtifactsState<'a, T> {
///
/// this concludes the [`Project::compile()`] statemachine
fn write_cache(self) -> Result<ProjectCompileOutput<T>> {
trace!("write cache");
let ArtifactsState { output, cache, compiled_artifacts } = self;
let ignored_error_codes = cache.project().ignored_error_codes.clone();
let skip_write_to_disk = cache.project().no_artifacts || output.has_error();
@ -404,9 +411,9 @@ impl CompilerSources {
sources
.into_iter()
.map(|(solc, (version, sources))| {
tracing::trace!("Filtering {} sources for {}", sources.len(), version);
trace!("Filtering {} sources for {}", sources.len(), version);
let sources = cache.filter(sources, &version);
tracing::trace!(
trace!(
"Detected {} dirty sources {:?}",
sources.dirty().count(),
sources.dirty_files().collect::<Vec<_>>()
@ -477,18 +484,14 @@ fn compile_sequential(
create_build_info: bool,
) -> Result<AggregatedCompilerOutput> {
let mut aggregated = AggregatedCompilerOutput::default();
tracing::trace!("compiling {} jobs sequentially", input.len());
trace!("compiling {} jobs sequentially", input.len());
for (solc, (version, filtered_sources)) in input {
if filtered_sources.is_empty() {
// nothing to compile
tracing::trace!(
"skip solc {} {} for empty sources set",
solc.as_ref().display(),
version
);
trace!("skip solc {} {} for empty sources set", solc.as_ref().display(), version);
continue
}
tracing::trace!(
trace!(
"compiling {} sources with solc \"{}\" {:?}",
filtered_sources.len(),
solc.as_ref().display(),
@ -512,7 +515,7 @@ fn compile_sequential(
if actually_dirty.is_empty() {
// nothing to compile for this particular language, all dirty files are in the other
// language set
tracing::trace!(
trace!(
"skip solc {} {} compilation of {} compiler input due to empty source set",
solc.as_ref().display(),
version,
@ -527,7 +530,7 @@ fn compile_sequential(
.with_base_path(&paths.root)
.sanitized(&version);
tracing::trace!(
trace!(
"calling solc `{}` with {} sources {:?}",
version,
input.sources.len(),
@ -538,8 +541,8 @@ fn compile_sequential(
report::solc_spawn(&solc, &version, &input, &actually_dirty);
let output = solc.compile(&input)?;
report::solc_success(&solc, &version, &output, &start.elapsed());
tracing::trace!("compiled input, output has error: {}", output.has_error());
tracing::trace!("received compiler output: {:?}", output.contracts.keys());
trace!("compiled input, output has error: {}", output.has_error());
trace!("received compiler output: {:?}", output.contracts.keys());
// if configured also create the build info
if create_build_info {
@ -564,21 +567,13 @@ fn compile_parallel(
create_build_info: bool,
) -> Result<AggregatedCompilerOutput> {
debug_assert!(num_jobs > 1);
tracing::trace!(
"compile {} sources in parallel using up to {} solc jobs",
input.len(),
num_jobs
);
trace!("compile {} sources in parallel using up to {} solc jobs", input.len(), num_jobs);
let mut jobs = Vec::with_capacity(input.len());
for (solc, (version, filtered_sources)) in input {
if filtered_sources.is_empty() {
// nothing to compile
tracing::trace!(
"skip solc {} {} for empty sources set",
solc.as_ref().display(),
version
);
trace!("skip solc {} {} for empty sources set", solc.as_ref().display(), version);
continue
}
@ -599,7 +594,7 @@ fn compile_parallel(
if actually_dirty.is_empty() {
// nothing to compile for this particular language, all dirty files are in the other
// language set
tracing::trace!(
trace!(
"skip solc {} {} compilation of {} compiler input due to empty source set",
solc.as_ref().display(),
version,
@ -633,7 +628,7 @@ fn compile_parallel(
// set the reporter on this thread
let _guard = report::set_scoped(&scoped_report);
tracing::trace!(
trace!(
"calling solc `{}` {:?} with {} sources: {:?}",
version,
solc.args,

View File

@ -859,8 +859,9 @@ impl<T: ArtifactOutput> ArtifactOutput for Project<T> {
contracts: &VersionedContracts,
sources: &VersionedSourceFiles,
layout: &ProjectPathsConfig,
ctx: OutputContext,
) -> Result<Artifacts<Self::Artifact>> {
self.artifacts_handler().on_output(contracts, sources, layout)
self.artifacts_handler().on_output(contracts, sources, layout, ctx)
}
fn write_contract_extras(&self, contract: &Contract, file: &Path) -> Result<()> {
@ -933,8 +934,9 @@ impl<T: ArtifactOutput> ArtifactOutput for Project<T> {
&self,
contracts: &VersionedContracts,
sources: &VersionedSourceFiles,
ctx: OutputContext,
) -> Artifacts<Self::Artifact> {
self.artifacts_handler().output_to_artifacts(contracts, sources)
self.artifacts_handler().output_to_artifacts(contracts, sources, ctx)
}
fn standalone_source_file_to_artifact(

View File

@ -11,8 +11,8 @@ use ethers_solc::{
info::ContractInfo,
project_util::*,
remappings::Remapping,
CompilerInput, ConfigurableArtifacts, ExtraOutputValues, Graph, Project, ProjectCompileOutput,
ProjectPathsConfig, Solc, TestFileFilter,
Artifact, CompilerInput, ConfigurableArtifacts, ExtraOutputValues, Graph, Project,
ProjectCompileOutput, ProjectPathsConfig, Solc, TestFileFilter,
};
use pretty_assertions::assert_eq;
use semver::Version;
@ -2129,6 +2129,103 @@ fn can_handle_conflicting_files() {
);
}
// <https://github.com/foundry-rs/foundry/issues/2843>
#[test]
fn can_handle_conflicting_files_recompile() {
let project = TempProject::<ConfigurableArtifacts>::dapptools().unwrap();
project
.add_source(
"A",
r#"
pragma solidity ^0.8.10;
contract A {
function foo() public{}
}
"#,
)
.unwrap();
project
.add_source(
"inner/A",
r#"
pragma solidity ^0.8.10;
contract A {
function bar() public{}
}
"#,
)
.unwrap();
let compiled = project.compile().unwrap();
assert!(!compiled.has_compiler_errors());
let artifacts = compiled.artifacts().count();
assert_eq!(artifacts, 2);
// nothing to compile
let compiled = project.compile().unwrap();
assert!(compiled.is_unchanged());
let artifacts = compiled.artifacts().count();
assert_eq!(artifacts, 2);
let cache = SolFilesCache::read(project.cache_path()).unwrap();
let mut source_files = cache.files.keys().cloned().collect::<Vec<_>>();
source_files.sort_unstable();
assert_eq!(source_files, vec![PathBuf::from("src/A.sol"), PathBuf::from("src/inner/A.sol"),]);
let mut artifacts =
project.artifacts_snapshot().unwrap().artifacts.into_stripped_file_prefixes(project.root());
artifacts.strip_prefix_all(&project.paths().artifacts);
assert_eq!(artifacts.len(), 2);
let mut artifact_files = artifacts.artifact_files().map(|f| f.file.clone()).collect::<Vec<_>>();
artifact_files.sort_unstable();
let expected_files = vec![PathBuf::from("A.sol/A.json"), PathBuf::from("inner/A.sol/A.json")];
assert_eq!(artifact_files, expected_files);
// overwrite conflicting nested file, effectively changing it
project
.add_source(
"inner/A",
r#"
pragma solidity ^0.8.10;
contract A {
function bar() public{}
function baz() public{}
}
"#,
)
.unwrap();
let compiled = project.compile().unwrap();
assert!(!compiled.has_compiler_errors());
let mut recompiled_artifacts =
project.artifacts_snapshot().unwrap().artifacts.into_stripped_file_prefixes(project.root());
recompiled_artifacts.strip_prefix_all(&project.paths().artifacts);
assert_eq!(recompiled_artifacts.len(), 2);
let mut artifact_files =
recompiled_artifacts.artifact_files().map(|f| f.file.clone()).collect::<Vec<_>>();
artifact_files.sort_unstable();
assert_eq!(artifact_files, expected_files);
// ensure that `a.sol/A.json` is unchanged
let outer = artifacts.find("src/A.sol", "A").unwrap();
let outer_recompiled = recompiled_artifacts.find("src/A.sol", "A").unwrap();
assert_eq!(outer, outer_recompiled);
let inner_recompiled = recompiled_artifacts.find("src/inner/A.sol", "A").unwrap();
assert!(inner_recompiled.get_abi().unwrap().functions.contains_key("baz"));
}
#[test]
fn can_checkout_repo() {
let project = TempProject::checkout("transmissions11/solmate").unwrap();