fix(solc): invalid cached artifacts (#783)

* add some docs

* ignore malformed cache file

* more artifacts fixes

* fix(solc): remove quotation marks from tests

Co-authored-by: Georgios Konstantopoulos <me@gakonst.com>
This commit is contained in:
Matthias Seitz 2022-01-11 11:02:57 +01:00 committed by GitHub
parent 092bd96a39
commit 4d2cd83698
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 236 additions and 65 deletions

View File

@ -3,11 +3,11 @@ use crate::{
artifacts::{Contracts, Sources}, artifacts::{Contracts, Sources},
config::SolcConfig, config::SolcConfig,
error::{Result, SolcError}, error::{Result, SolcError},
utils, ArtifactOutput, utils, ArtifactOutput, ProjectPathsConfig,
}; };
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{ use std::{
collections::{BTreeMap, HashMap}, collections::{BTreeMap, HashMap, HashSet},
fs::{self, File}, fs::{self, File},
path::{Path, PathBuf}, path::{Path, PathBuf},
time::{Duration, UNIX_EPOCH}, time::{Duration, UNIX_EPOCH},
@ -133,17 +133,15 @@ impl SolFilesCache {
&'a self, &'a self,
sources: Sources, sources: Sources,
config: Option<&'a SolcConfig>, config: Option<&'a SolcConfig>,
artifacts_root: &Path, paths: &ProjectPathsConfig,
) -> Sources { ) -> Sources {
// all file hashes
let content_hashes: HashMap<_, _> =
sources.iter().map(|(file, source)| (file.clone(), source.content_hash())).collect();
sources sources
.into_iter() .into_iter()
.filter(move |(file, source)| { .filter(move |(file, _)| {
self.has_changed_or_missing_artifact::<T>( self.has_changed_or_missing_artifact::<T>(file, &content_hashes, config, paths)
file,
source.content_hash().as_bytes(),
config,
artifacts_root,
)
}) })
.collect() .collect()
} }
@ -153,10 +151,11 @@ impl SolFilesCache {
pub fn has_changed_or_missing_artifact<T: ArtifactOutput>( pub fn has_changed_or_missing_artifact<T: ArtifactOutput>(
&self, &self,
file: &Path, file: &Path,
hash: &[u8], hashes: &HashMap<PathBuf, String>,
config: Option<&SolcConfig>, config: Option<&SolcConfig>,
artifacts_root: &Path, paths: &ProjectPathsConfig,
) -> bool { ) -> bool {
let hash = hashes.get(file).unwrap().as_bytes();
if let Some(entry) = self.files.get(file) { if let Some(entry) = self.files.get(file) {
if entry.content_hash.as_bytes() != hash { if entry.content_hash.as_bytes() != hash {
tracing::trace!("changed content hash for cached artifact \"{}\"", file.display()); tracing::trace!("changed content hash for cached artifact \"{}\"", file.display());
@ -172,21 +171,60 @@ impl SolFilesCache {
} }
} }
let missing_artifacts = // checks whether an artifact this file depends on was removed
entry.artifacts.iter().any(|name| !T::output_exists(file, name, artifacts_root)); if entry.artifacts.iter().any(|name| !T::output_exists(file, name, &paths.artifacts)) {
if missing_artifacts {
tracing::trace!( tracing::trace!(
"missing linked artifacts for cached artifact \"{}\"", "missing linked artifacts for cached artifact \"{}\"",
file.display() file.display()
); );
return true
} }
missing_artifacts
// check if any of the file's imported files changed
self.has_changed_imports(file, entry, hashes, paths, &mut HashSet::new())
} else { } else {
tracing::trace!("missing cached artifact for \"{}\"", file.display()); tracing::trace!("missing cached artifact for \"{}\"", file.display());
true true
} }
} }
/// Returns true if the entry has any imports that were changed
fn has_changed_imports(
&self,
path: &Path,
entry: &CacheEntry,
hashes: &HashMap<PathBuf, String>,
paths: &ProjectPathsConfig,
traversed: &mut HashSet<PathBuf>,
) -> bool {
let cwd = match path.parent() {
Some(inner) => inner,
None => return true,
};
if !traversed.insert(path.to_path_buf()) {
// skip already traversed files, this prevents SO for circular imports
return false
}
for import in entry.imports.iter() {
if let Some((import, import_path)) = paths
.resolve_import(cwd, Path::new(import.as_str()))
.ok()
.and_then(|import| self.files.get(&import).map(|e| (e, import)))
{
if let Some(hash) = hashes.get(&import_path) {
if import.content_hash == hash.as_str() &&
!self.has_changed_imports(&import_path, import, hashes, paths, traversed)
{
return false
}
}
}
}
!entry.imports.is_empty()
}
/// Checks if all artifact files exist /// Checks if all artifact files exist
pub fn all_artifacts_exist<T: ArtifactOutput>(&self, artifacts_root: &Path) -> bool { pub fn all_artifacts_exist<T: ArtifactOutput>(&self, artifacts_root: &Path) -> bool {
self.files.iter().all(|(file, entry)| { self.files.iter().all(|(file, entry)| {
@ -211,6 +249,7 @@ impl SolFilesCache {
} }
} }
// async variants for read and write
#[cfg(feature = "async")] #[cfg(feature = "async")]
impl SolFilesCache { impl SolFilesCache {
pub async fn async_read(path: impl AsRef<Path>) -> Result<Self> { pub async fn async_read(path: impl AsRef<Path>) -> Result<Self> {
@ -259,7 +298,14 @@ impl SolFilesCacheBuilder {
self self
} }
pub fn insert_files(self, sources: Sources, dest: Option<PathBuf>) -> Result<SolFilesCache> { /// Creates a new `SolFilesCache` instance
///
/// If a `cache_file` path was provided it's used as base.
pub fn insert_files(
self,
sources: Sources,
cache_file: Option<PathBuf>,
) -> Result<SolFilesCache> {
let format = self.format.unwrap_or_else(|| ETHERS_FORMAT_VERSION.to_string()); let format = self.format.unwrap_or_else(|| ETHERS_FORMAT_VERSION.to_string());
let solc_config = let solc_config =
self.solc_config.map(Ok).unwrap_or_else(|| SolcConfig::builder().build())?; self.solc_config.map(Ok).unwrap_or_else(|| SolcConfig::builder().build())?;
@ -298,14 +344,18 @@ impl SolFilesCacheBuilder {
files.insert(file, entry); files.insert(file, entry);
} }
let cache = if let Some(dest) = dest.as_ref().filter(|dest| dest.exists()) { let cache = if let Some(dest) = cache_file.as_ref().filter(|dest| dest.exists()) {
// read the existing cache and extend it by the files that changed // read the existing cache and extend it by the files that changed
// (if we just wrote to the cache file, we'd overwrite the existing data) // (if we just wrote to the cache file, we'd overwrite the existing data)
let reader = let reader =
std::io::BufReader::new(File::open(dest).map_err(|err| SolcError::io(err, dest))?); std::io::BufReader::new(File::open(dest).map_err(|err| SolcError::io(err, dest))?);
let mut cache: SolFilesCache = serde_json::from_reader(reader)?; if let Ok(mut cache) = serde_json::from_reader::<_, SolFilesCache>(reader) {
cache.files.extend(files); cache.files.extend(files);
cache cache
} else {
tracing::error!("Failed to read existing cache file {}", dest.display());
SolFilesCache { format, files }
}
} else { } else {
SolFilesCache { format, files } SolFilesCache { format, files }
}; };

View File

@ -14,7 +14,7 @@ use std::{
fmt, fmt,
fmt::Formatter, fmt::Formatter,
fs, io, fs, io,
path::{Path, PathBuf}, path::{Component, Path, PathBuf},
}; };
/// Where to find all files or where to write them /// Where to find all files or where to write them
@ -101,6 +101,31 @@ impl ProjectPathsConfig {
Ok(Source::read_all_files(self.input_files())?) Ok(Source::read_all_files(self.input_files())?)
} }
/// Attempts to resolve an `import` from the given working directory.
///
/// The `cwd` path is the parent dir of the file that includes the `import`
pub fn resolve_import(&self, cwd: &Path, import: &Path) -> Result<PathBuf> {
let component = import
.components()
.next()
.ok_or_else(|| SolcError::msg(format!("Empty import path {}", import.display())))?;
if component == Component::CurDir || component == Component::ParentDir {
// if the import is relative we assume it's already part of the processed input
// file set
utils::canonicalize(cwd.join(import)).map_err(|err| {
SolcError::msg(format!("failed to resolve relative import \"{:?}\"", err))
})
} else {
// resolve library file
self.resolve_library_import(import.as_ref()).ok_or_else(|| {
SolcError::msg(format!(
"failed to resolve library import \"{:?}\"",
import.display()
))
})
}
}
/// Attempts to find the path to the real solidity file that's imported via the given `import` /// Attempts to find the path to the real solidity file that's imported via the given `import`
/// path by applying the configured remappings and checking the library dirs /// path by applying the configured remappings and checking the library dirs
pub fn resolve_library_import(&self, import: &Path) -> Option<PathBuf> { pub fn resolve_library_import(&self, import: &Path) -> Option<PathBuf> {

View File

@ -213,6 +213,16 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
/// `CompilerOutput::has_error` instead. /// `CompilerOutput::has_error` instead.
/// NB: If the `svm` feature is enabled, this function will automatically detect /// NB: If the `svm` feature is enabled, this function will automatically detect
/// solc versions across files. /// solc versions across files.
///
/// # Example
///
/// ```
/// use ethers_solc::Project;
/// # fn demo(project: Project) {
/// let project = Project::builder().build().unwrap();
/// let output = project.compile().unwrap();
/// # }
/// ```
#[tracing::instrument(skip_all, name = "compile")] #[tracing::instrument(skip_all, name = "compile")]
pub fn compile(&self) -> Result<ProjectCompileOutput<Artifacts>> { pub fn compile(&self) -> Result<ProjectCompileOutput<Artifacts>> {
let sources = self.paths.read_input_files()?; let sources = self.paths.read_input_files()?;
@ -349,6 +359,22 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
/// sources and their existing artifacts are read instead. This will also update the cache /// sources and their existing artifacts are read instead. This will also update the cache
/// file and cleans up entries for files which may have been removed. Unchanged files that /// file and cleans up entries for files which may have been removed. Unchanged files that
/// for which an artifact exist, are not compiled again. /// for which an artifact exist, are not compiled again.
///
/// # Example
///
/// ```
/// use ethers_solc::{Project, Solc};
/// # fn demo(project: Project) {
/// let project = Project::builder().build().unwrap();
/// let sources = project.paths.read_sources().unwrap();
/// project
/// .compile_with_version(
/// &Solc::find_svm_installed_version("0.8.11").unwrap().unwrap(),
/// sources,
/// )
/// .unwrap();
/// # }
/// ```
pub fn compile_with_version( pub fn compile_with_version(
&self, &self,
solc: &Solc, solc: &Solc,
@ -436,7 +462,7 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
let changed_files = cache.get_changed_or_missing_artifacts_files::<Artifacts>( let changed_files = cache.get_changed_or_missing_artifacts_files::<Artifacts>(
sources, sources,
Some(&self.solc_config), Some(&self.solc_config),
&self.paths.artifacts, &self.paths,
); );
tracing::trace!("detected {} changed files", changed_files.len()); tracing::trace!("detected {} changed files", changed_files.len());
cache.remove_changed_files(&changed_files); cache.remove_changed_files(&changed_files);
@ -830,7 +856,7 @@ where
} }
impl<T: ArtifactOutput + 'static> ProjectCompileOutput<T> { impl<T: ArtifactOutput + 'static> ProjectCompileOutput<T> {
/// All artifacts together with their contract name /// All artifacts together with their contract file name and name `<file name>:<name>`
/// ///
/// # Example /// # Example
/// ///
@ -844,16 +870,22 @@ impl<T: ArtifactOutput + 'static> ProjectCompileOutput<T> {
/// ``` /// ```
pub fn into_artifacts(mut self) -> Box<dyn Iterator<Item = (String, T::Artifact)>> { pub fn into_artifacts(mut self) -> Box<dyn Iterator<Item = (String, T::Artifact)>> {
let artifacts = self.artifacts.into_iter().filter_map(|(path, art)| { let artifacts = self.artifacts.into_iter().filter_map(|(path, art)| {
T::contract_name(&path) T::contract_name(&path).map(|name| {
.map(|name| (format!("{:?}:{}", path.file_name().unwrap(), name), art)) (format!("{}:{}", path.file_name().unwrap().to_string_lossy(), name), art)
})
}); });
let artifacts: Box<dyn Iterator<Item = (String, T::Artifact)>> = let artifacts: Box<dyn Iterator<Item = (String, T::Artifact)>> = if let Some(output) =
if let Some(output) = self.compiler_output.take() { self.compiler_output.take()
Box::new(artifacts.chain(T::output_to_artifacts(output).into_values().flatten())) {
} else { Box::new(artifacts.chain(T::output_to_artifacts(output).into_values().flatten().map(
Box::new(artifacts) |(name, artifact)| {
}; (format!("{}:{}", T::output_file_name(&name).display(), name), artifact)
},
)))
} else {
Box::new(artifacts)
};
artifacts artifacts
} }
} }

View File

@ -28,7 +28,7 @@
use std::{ use std::{
collections::{HashMap, VecDeque}, collections::{HashMap, VecDeque},
path::{Component, Path, PathBuf}, path::{Path, PathBuf},
}; };
use rayon::prelude::*; use rayon::prelude::*;
@ -131,39 +131,19 @@ impl Graph {
// locations // locations
while let Some((path, node)) = unresolved.pop_front() { while let Some((path, node)) = unresolved.pop_front() {
let mut resolved_imports = Vec::with_capacity(node.data.imports.len()); let mut resolved_imports = Vec::with_capacity(node.data.imports.len());
// parent directory of the current file // parent directory of the current file
let node_dir = match path.parent() { let cwd = match path.parent() {
Some(inner) => inner, Some(inner) => inner,
None => continue, None => continue,
}; };
for import in node.data.imports.iter() { for import in node.data.imports.iter() {
let component = match import.components().next() { match paths.resolve_import(cwd, import) {
Some(inner) => inner, Ok(import) => {
None => continue, add_node(&mut unresolved, &mut index, &mut resolved_imports, import)?;
};
if component == Component::CurDir || component == Component::ParentDir {
// if the import is relative we assume it's already part of the processed input
// file set
match utils::canonicalize(node_dir.join(import)) {
Ok(target) => {
// the file at least exists,
add_node(&mut unresolved, &mut index, &mut resolved_imports, target)?;
}
Err(err) => {
tracing::trace!("failed to resolve relative import \"{:?}\"", err);
}
} }
} else { Err(err) => {
// resolve library file tracing::trace!("{}", err)
if let Some(lib) = paths.resolve_library_import(import.as_ref()) {
add_node(&mut unresolved, &mut index, &mut resolved_imports, lib)?;
} else {
tracing::trace!(
"failed to resolve library import \"{:?}\"",
import.display()
);
} }
} }
} }

View File

@ -137,6 +137,90 @@ fn can_compile_dapp_detect_changes_in_libs() {
assert!(!compiled.is_unchanged()); assert!(!compiled.is_unchanged());
} }
#[test]
fn can_compile_dapp_detect_changes_in_sources() {
let project = TempProject::<MinimalCombinedArtifacts>::dapptools().unwrap();
let src = project
.add_source(
"DssSpell.t",
r#"
pragma solidity ^0.8.10;
import "./DssSpell.t.base.sol";
contract DssSpellTest is DssSpellTestBase { }
"#,
)
.unwrap();
let base = project
.add_source(
"DssSpell.t.base",
r#"
pragma solidity ^0.8.10;
contract DssSpellTestBase {
address deployed_spell;
function setUp() public {
deployed_spell = address(0xA867399B43aF7790aC800f2fF3Fa7387dc52Ec5E);
}
}
"#,
)
.unwrap();
let graph = Graph::resolve(project.paths()).unwrap();
assert_eq!(graph.files().len(), 2);
assert_eq!(graph.files().clone(), HashMap::from([(base, 0), (src, 1),]));
assert_eq!(graph.imported_nodes(1).to_vec(), vec![0]);
let compiled = project.compile().unwrap();
assert!(!compiled.has_compiler_errors());
assert!(compiled.find("DssSpellTest").is_some());
assert!(compiled.find("DssSpellTestBase").is_some());
// nothing to compile
let compiled = project.compile().unwrap();
assert!(compiled.is_unchanged());
assert!(compiled.find("DssSpellTest").is_some());
assert!(compiled.find("DssSpellTestBase").is_some());
let cache = SolFilesCache::read(&project.paths().cache).unwrap();
assert_eq!(cache.files.len(), 2);
let mut artifacts = compiled.into_artifacts().collect::<HashMap<_, _>>();
// overwrite import
let _ = project
.add_source(
"DssSpell.t.base",
r#"
pragma solidity ^0.8.10;
contract DssSpellTestBase {
address deployed_spell;
function setUp() public {
deployed_spell = address(0);
}
}
"#,
)
.unwrap();
let graph = Graph::resolve(project.paths()).unwrap();
assert_eq!(graph.files().len(), 2);
let compiled = project.compile().unwrap();
assert!(compiled.find("DssSpellTest").is_some());
assert!(compiled.find("DssSpellTestBase").is_some());
// ensure change is detected
assert!(!compiled.is_unchanged());
// and all recompiled artifacts are different
for (p, artifact) in compiled.into_artifacts() {
let other = artifacts.remove(&p).unwrap();
assert_ne!(artifact, other);
}
}
#[test] #[test]
fn can_compile_dapp_sample_with_cache() { fn can_compile_dapp_sample_with_cache() {
let tmp_dir = tempfile::tempdir().unwrap(); let tmp_dir = tempfile::tempdir().unwrap();
@ -184,9 +268,9 @@ fn can_compile_dapp_sample_with_cache() {
assert_eq!( assert_eq!(
compiled.into_artifacts().map(|(name, _)| name).collect::<Vec<_>>(), compiled.into_artifacts().map(|(name, _)| name).collect::<Vec<_>>(),
vec![ vec![
r#""Dapp.json":Dapp"#, r#"Dapp.json:Dapp"#,
r#""DappTest.json":DappTest"#, r#"DappTest.json:DappTest"#,
r#""DSTest.json":DSTest"#, r#"DSTest.json:DSTest"#,
"NewContract" "NewContract"
] ]
); );
@ -197,9 +281,9 @@ fn can_compile_dapp_sample_with_cache() {
assert_eq!( assert_eq!(
compiled.into_artifacts().map(|(name, _)| name).collect::<Vec<_>>(), compiled.into_artifacts().map(|(name, _)| name).collect::<Vec<_>>(),
vec![ vec![
r#""DappTest.json":DappTest"#, r#"DappTest.json:DappTest"#,
r#""NewContract.json":NewContract"#, r#"NewContract.json:NewContract"#,
r#""DSTest.json":DSTest"#, r#"DSTest.json:DSTest"#,
"Dapp" "Dapp"
] ]
); );