fix(solc): fix cache and allowed paths bug (#998)
* fix: don't emit on error * fix: allowed-paths condition * fix(solc): cache and allowed paths bug
This commit is contained in:
parent
cc96245b1d
commit
0c42c23746
|
@ -581,7 +581,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// inserts the filtered source with the fiven version
|
/// inserts the filtered source with the given version
|
||||||
fn insert_filtered_source(&mut self, file: PathBuf, source: Source, version: Version) {
|
fn insert_filtered_source(&mut self, file: PathBuf, source: Source, version: Version) {
|
||||||
match self.filtered.entry(file) {
|
match self.filtered.entry(file) {
|
||||||
hash_map::Entry::Occupied(mut entry) => {
|
hash_map::Entry::Occupied(mut entry) => {
|
||||||
|
@ -593,35 +593,62 @@ impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns only those sources that
|
/// Returns only dirty sources that:
|
||||||
/// - are new
|
/// - are new
|
||||||
/// - were changed
|
/// - were changed
|
||||||
/// - their imports were changed
|
/// - their imports were changed
|
||||||
/// - their artifact is missing
|
/// - their artifact is missing
|
||||||
|
/// This also includes their respective imports
|
||||||
fn filter(&mut self, sources: Sources, version: &Version) -> Sources {
|
fn filter(&mut self, sources: Sources, version: &Version) -> Sources {
|
||||||
self.fill_hashes(&sources);
|
self.fill_hashes(&sources);
|
||||||
sources
|
|
||||||
|
let mut imports_of_dirty = HashSet::new();
|
||||||
|
// separates all source files that fit the criteria (dirty) from those that don't (clean)
|
||||||
|
let (mut dirty_sources, clean_sources) = sources
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|(file, source)| self.requires_solc(file, source, version))
|
.map(|(file, source)| self.filter_source(file, source, version))
|
||||||
.collect()
|
.fold(
|
||||||
|
(Sources::default(), Vec::new()),
|
||||||
|
|(mut dirty_sources, mut clean_sources), source| {
|
||||||
|
if source.dirty {
|
||||||
|
// mark all files that are imported by a dirty file
|
||||||
|
imports_of_dirty.extend(self.edges.all_imported_nodes(source.idx));
|
||||||
|
dirty_sources.insert(source.file, source.source);
|
||||||
|
} else {
|
||||||
|
clean_sources.push(source);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `Some` if the file _needs_ to be compiled and `None` if the artifact can be reu-used
|
(dirty_sources, clean_sources)
|
||||||
fn requires_solc(
|
},
|
||||||
&mut self,
|
);
|
||||||
file: PathBuf,
|
|
||||||
source: Source,
|
for clean_source in clean_sources {
|
||||||
version: &Version,
|
let FilteredSource { file, source, idx, .. } = clean_source;
|
||||||
) -> Option<(PathBuf, Source)> {
|
if imports_of_dirty.contains(&idx) {
|
||||||
|
// file is imported by a dirty file
|
||||||
|
dirty_sources.insert(file, source);
|
||||||
|
} else {
|
||||||
|
self.insert_filtered_source(file, source, version.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// track dirty sources internally
|
||||||
|
for (file, source) in dirty_sources.iter() {
|
||||||
|
self.insert_new_cache_entry(file, source, version.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
dirty_sources
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the state of the given source file.
|
||||||
|
fn filter_source(&self, file: PathBuf, source: Source, version: &Version) -> FilteredSource {
|
||||||
|
let idx = self.edges.node_id(&file);
|
||||||
if !self.is_dirty(&file, version) &&
|
if !self.is_dirty(&file, version) &&
|
||||||
self.edges.imports(&file).iter().all(|file| !self.is_dirty(file, version))
|
self.edges.imports(&file).iter().all(|file| !self.is_dirty(file, version))
|
||||||
{
|
{
|
||||||
self.insert_filtered_source(file, source, version.clone());
|
FilteredSource { file, source, idx, dirty: false }
|
||||||
None
|
|
||||||
} else {
|
} else {
|
||||||
self.insert_new_cache_entry(&file, &source, version.clone());
|
FilteredSource { file, source, idx, dirty: true }
|
||||||
|
|
||||||
Some((file, source))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -674,6 +701,14 @@ impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper type to represent the state of a source file
|
||||||
|
struct FilteredSource {
|
||||||
|
file: PathBuf,
|
||||||
|
source: Source,
|
||||||
|
idx: usize,
|
||||||
|
dirty: bool,
|
||||||
|
}
|
||||||
|
|
||||||
/// Abstraction over configured caching which can be either non-existent or an already loaded cache
|
/// Abstraction over configured caching which can be either non-existent or an already loaded cache
|
||||||
#[allow(clippy::large_enum_variant)]
|
#[allow(clippy::large_enum_variant)]
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
|
|
@ -179,7 +179,7 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> {
|
||||||
let Self { edges, project, mut sources } = self;
|
let Self { edges, project, mut sources } = self;
|
||||||
|
|
||||||
let mut cache = ArtifactsCache::new(project, edges)?;
|
let mut cache = ArtifactsCache::new(project, edges)?;
|
||||||
// retain and compile only dirty sources
|
// retain and compile only dirty sources and all their imports
|
||||||
sources = sources.filtered(&mut cache);
|
sources = sources.filtered(&mut cache);
|
||||||
|
|
||||||
Ok(PreprocessedState { sources, cache })
|
Ok(PreprocessedState { sources, cache })
|
||||||
|
@ -216,18 +216,22 @@ struct CompiledState<'a, T: ArtifactOutput> {
|
||||||
impl<'a, T: ArtifactOutput> CompiledState<'a, T> {
|
impl<'a, T: ArtifactOutput> CompiledState<'a, T> {
|
||||||
/// advance to the next state by handling all artifacts
|
/// advance to the next state by handling all artifacts
|
||||||
///
|
///
|
||||||
/// Writes all output contracts to disk if enabled in the `Project`
|
/// Writes all output contracts to disk if enabled in the `Project` and if the build was
|
||||||
|
/// successful
|
||||||
fn write_artifacts(self) -> Result<ArtifactsState<'a, T>> {
|
fn write_artifacts(self) -> Result<ArtifactsState<'a, T>> {
|
||||||
let CompiledState { output, cache } = self;
|
let CompiledState { output, cache } = self;
|
||||||
|
|
||||||
// write all artifacts
|
// write all artifacts via the handler but only if the build succeeded
|
||||||
let compiled_artifacts = if !cache.project().no_artifacts {
|
let compiled_artifacts = if cache.project().no_artifacts {
|
||||||
|
cache.project().artifacts_handler().output_to_artifacts(&output.contracts)
|
||||||
|
} else if output.has_error() {
|
||||||
|
tracing::trace!("skip writing cache file due to solc errors: {:?}", output.errors);
|
||||||
|
cache.project().artifacts_handler().output_to_artifacts(&output.contracts)
|
||||||
|
} else {
|
||||||
cache
|
cache
|
||||||
.project()
|
.project()
|
||||||
.artifacts_handler()
|
.artifacts_handler()
|
||||||
.on_output(&output.contracts, &cache.project().paths)?
|
.on_output(&output.contracts, &cache.project().paths)?
|
||||||
} else {
|
|
||||||
cache.project().artifacts_handler().output_to_artifacts(&output.contracts)
|
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(ArtifactsState { output, cache, compiled_artifacts })
|
Ok(ArtifactsState { output, cache, compiled_artifacts })
|
||||||
|
|
|
@ -125,9 +125,11 @@ impl<T: ArtifactOutput> Project<T> {
|
||||||
&self.artifacts
|
&self.artifacts
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Applies the configured settings to the given `Solc`
|
/// Applies the configured arguments to the given `Solc`
|
||||||
|
///
|
||||||
|
/// This will set the `--allow-paths` to the paths configured for the `Project`, if any.
|
||||||
fn configure_solc(&self, mut solc: Solc) -> Solc {
|
fn configure_solc(&self, mut solc: Solc) -> Solc {
|
||||||
if self.allowed_lib_paths.0.is_empty() {
|
if solc.args.is_empty() && !self.allowed_lib_paths.0.is_empty() {
|
||||||
solc = solc.arg("--allow-paths").arg(self.allowed_lib_paths.to_string());
|
solc = solc.arg("--allow-paths").arg(self.allowed_lib_paths.to_string());
|
||||||
}
|
}
|
||||||
solc
|
solc
|
||||||
|
|
|
@ -91,6 +91,11 @@ impl GraphEdges {
|
||||||
&self.edges[from]
|
&self.edges[from]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns an iterator that yields all imports of a node and all their imports
|
||||||
|
pub fn all_imported_nodes(&self, from: usize) -> impl Iterator<Item = usize> + '_ {
|
||||||
|
NodesIter::new(from, self).skip(1)
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns all files imported by the given file
|
/// Returns all files imported by the given file
|
||||||
pub fn imports(&self, file: impl AsRef<Path>) -> HashSet<&PathBuf> {
|
pub fn imports(&self, file: impl AsRef<Path>) -> HashSet<&PathBuf> {
|
||||||
if let Some(start) = self.indices.get(file.as_ref()).copied() {
|
if let Some(start) = self.indices.get(file.as_ref()).copied() {
|
||||||
|
@ -100,6 +105,11 @@ impl GraphEdges {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns the id of the given file
|
||||||
|
pub fn node_id(&self, file: impl AsRef<Path>) -> usize {
|
||||||
|
self.indices[file.as_ref()]
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns true if the `file` was originally included when the graph was first created and not
|
/// Returns true if the `file` was originally included when the graph was first created and not
|
||||||
/// added when all `imports` were resolved
|
/// added when all `imports` were resolved
|
||||||
pub fn is_input_file(&self, file: impl AsRef<Path>) -> bool {
|
pub fn is_input_file(&self, file: impl AsRef<Path>) -> bool {
|
||||||
|
|
|
@ -613,3 +613,43 @@ contract LinkTest {
|
||||||
let s = serde_json::to_string(&bytecode).unwrap();
|
let s = serde_json::to_string(&bytecode).unwrap();
|
||||||
assert_eq!(bytecode.clone(), serde_json::from_str(&s).unwrap());
|
assert_eq!(bytecode.clone(), serde_json::from_str(&s).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_recompile_with_changes() {
|
||||||
|
let mut tmp = TempProject::dapptools().unwrap();
|
||||||
|
tmp.project_mut().allowed_lib_paths = vec![tmp.root().join("modules")].into();
|
||||||
|
|
||||||
|
let content = r#"
|
||||||
|
pragma solidity ^0.8.10;
|
||||||
|
import "../modules/B.sol";
|
||||||
|
contract A {}
|
||||||
|
"#;
|
||||||
|
tmp.add_source("A", content).unwrap();
|
||||||
|
|
||||||
|
tmp.add_contract(
|
||||||
|
"modules/B",
|
||||||
|
r#"
|
||||||
|
pragma solidity ^0.8.10;
|
||||||
|
contract B {}
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let compiled = tmp.compile().unwrap();
|
||||||
|
assert!(!compiled.has_compiler_errors());
|
||||||
|
assert!(compiled.find("A").is_some());
|
||||||
|
assert!(compiled.find("B").is_some());
|
||||||
|
|
||||||
|
let compiled = tmp.compile().unwrap();
|
||||||
|
assert!(compiled.find("A").is_some());
|
||||||
|
assert!(compiled.find("B").is_some());
|
||||||
|
assert!(compiled.is_unchanged());
|
||||||
|
|
||||||
|
// modify A.sol
|
||||||
|
tmp.add_source("A", format!("{}\n", content)).unwrap();
|
||||||
|
let compiled = tmp.compile().unwrap();
|
||||||
|
assert!(!compiled.has_compiler_errors());
|
||||||
|
assert!(!compiled.is_unchanged());
|
||||||
|
assert!(compiled.find("A").is_some());
|
||||||
|
assert!(compiled.find("B").is_some());
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in New Issue