revert: use simple change filter again (#1008)
* chore: rollback dirty detection * docs: caching docs
This commit is contained in:
parent
bbb1ce31a7
commit
e5dbeb6b28
|
@ -29,7 +29,7 @@ const ETHERS_FORMAT_VERSION: &str = "ethers-rs-sol-cache-2";
|
||||||
/// The file name of the default cache file
|
/// The file name of the default cache file
|
||||||
pub const SOLIDITY_FILES_CACHE_FILENAME: &str = "solidity-files-cache.json";
|
pub const SOLIDITY_FILES_CACHE_FILENAME: &str = "solidity-files-cache.json";
|
||||||
|
|
||||||
/// A hardhat compatible cache representation
|
/// A multi version cache file
|
||||||
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
|
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
|
||||||
pub struct SolFilesCache {
|
pub struct SolFilesCache {
|
||||||
#[serde(rename = "_format")]
|
#[serde(rename = "_format")]
|
||||||
|
@ -593,62 +593,35 @@ impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns only dirty sources that:
|
/// Returns only those sources that
|
||||||
/// - are new
|
/// - are new
|
||||||
/// - were changed
|
/// - were changed
|
||||||
/// - their imports were changed
|
/// - their imports were changed
|
||||||
/// - their artifact is missing
|
/// - their artifact is missing
|
||||||
/// This also includes their respective imports
|
|
||||||
fn filter(&mut self, sources: Sources, version: &Version) -> Sources {
|
fn filter(&mut self, sources: Sources, version: &Version) -> Sources {
|
||||||
self.fill_hashes(&sources);
|
self.fill_hashes(&sources);
|
||||||
|
sources
|
||||||
let mut imports_of_dirty = HashSet::new();
|
|
||||||
// separates all source files that fit the criteria (dirty) from those that don't (clean)
|
|
||||||
let (mut dirty_sources, clean_sources) = sources
|
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(file, source)| self.filter_source(file, source, version))
|
.filter_map(|(file, source)| self.requires_solc(file, source, version))
|
||||||
.fold(
|
.collect()
|
||||||
(Sources::default(), Vec::new()),
|
|
||||||
|(mut dirty_sources, mut clean_sources), source| {
|
|
||||||
if source.dirty {
|
|
||||||
// mark all files that are imported by a dirty file
|
|
||||||
imports_of_dirty.extend(self.edges.all_imported_nodes(source.idx));
|
|
||||||
dirty_sources.insert(source.file, source.source);
|
|
||||||
} else {
|
|
||||||
clean_sources.push(source);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
(dirty_sources, clean_sources)
|
/// Returns `Some` if the file _needs_ to be compiled and `None` if the artifact can be reu-used
|
||||||
},
|
fn requires_solc(
|
||||||
);
|
&mut self,
|
||||||
|
file: PathBuf,
|
||||||
for clean_source in clean_sources {
|
source: Source,
|
||||||
let FilteredSource { file, source, idx, .. } = clean_source;
|
version: &Version,
|
||||||
if imports_of_dirty.contains(&idx) {
|
) -> Option<(PathBuf, Source)> {
|
||||||
// file is imported by a dirty file
|
|
||||||
dirty_sources.insert(file, source);
|
|
||||||
} else {
|
|
||||||
self.insert_filtered_source(file, source, version.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// track dirty sources internally
|
|
||||||
for (file, source) in dirty_sources.iter() {
|
|
||||||
self.insert_new_cache_entry(file, source, version.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
dirty_sources
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the state of the given source file.
|
|
||||||
fn filter_source(&self, file: PathBuf, source: Source, version: &Version) -> FilteredSource {
|
|
||||||
let idx = self.edges.node_id(&file);
|
|
||||||
if !self.is_dirty(&file, version) &&
|
if !self.is_dirty(&file, version) &&
|
||||||
self.edges.imports(&file).iter().all(|file| !self.is_dirty(file, version))
|
self.edges.imports(&file).iter().all(|file| !self.is_dirty(file, version))
|
||||||
{
|
{
|
||||||
FilteredSource { file, source, idx, dirty: false }
|
self.insert_filtered_source(file, source, version.clone());
|
||||||
|
None
|
||||||
} else {
|
} else {
|
||||||
FilteredSource { file, source, idx, dirty: true }
|
self.insert_new_cache_entry(&file, &source, version.clone());
|
||||||
|
|
||||||
|
Some((file, source))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -701,14 +674,6 @@ impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Helper type to represent the state of a source file
|
|
||||||
struct FilteredSource {
|
|
||||||
file: PathBuf,
|
|
||||||
source: Source,
|
|
||||||
idx: usize,
|
|
||||||
dirty: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Abstraction over configured caching which can be either non-existent or an already loaded cache
|
/// Abstraction over configured caching which can be either non-existent or an already loaded cache
|
||||||
#[allow(clippy::large_enum_variant)]
|
#[allow(clippy::large_enum_variant)]
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
|
|
@ -73,6 +73,33 @@
|
||||||
//! file in the VFS under `dapp-bin/library/math.sol`. If the file is not available there, the
|
//! file in the VFS under `dapp-bin/library/math.sol`. If the file is not available there, the
|
||||||
//! source unit name will be passed to the Host Filesystem Loader, which will then look in
|
//! source unit name will be passed to the Host Filesystem Loader, which will then look in
|
||||||
//! `/project/dapp-bin/library/iterable_mapping.sol`
|
//! `/project/dapp-bin/library/iterable_mapping.sol`
|
||||||
|
//!
|
||||||
|
//!
|
||||||
|
//! ### Caching and Change detection
|
||||||
|
//!
|
||||||
|
//! If caching is enabled in the [Project](crate::Project) a cache file will be created upon a
|
||||||
|
//! successful solc build. The [cache file](crate::SolFilesCache) stores metadata for all the files
|
||||||
|
//! that were provided to solc.
|
||||||
|
//! For every file the cache file contains a dedicated [cache
|
||||||
|
//! entry](crate::CacheEntry), which represents the state of the file. A solidity file can contain
|
||||||
|
//! several contracts, for every contract a separate [artifact](crate::Artifact) is emitted.
|
||||||
|
//! Therefor the entry also tracks all artifacts emitted by a file. A solidity file can also be
|
||||||
|
//! compiled with several solc versions.
|
||||||
|
//!
|
||||||
|
//! For example in `A(<=0.8.10) imports C(>0.4.0)` and
|
||||||
|
//! `B(0.8.11) imports C(>0.4.0)`, both `A` and `B` import `C` but there's no solc version that's
|
||||||
|
//! compatible with `A` and `B`, in which case two sets are compiled: [`A`, `C`] and [`B`, `C`].
|
||||||
|
//! This is reflected in the cache entry which tracks the file's artifacts by version.
|
||||||
|
//!
|
||||||
|
//! The cache makes it possible to detect changes during recompilation, so that only the changed,
|
||||||
|
//! dirty, files need to be passed to solc. A file will be considered as dirty if:
|
||||||
|
//! - the file is new, not included in the existing cache
|
||||||
|
//! - the file was modified since the last compiler run, detected by comparing content hashes
|
||||||
|
//! - any of the imported files is dirty
|
||||||
|
//! - the file's artifacts don't exist, were deleted.
|
||||||
|
//!
|
||||||
|
//! Recompiling a project with cache enabled detects all files that meet these criteria and provides
|
||||||
|
//! solc with only these dirty files instead of the entire source set.
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
artifact_output::Artifacts,
|
artifact_output::Artifacts,
|
||||||
|
@ -283,7 +310,13 @@ impl CompilerSources {
|
||||||
sources
|
sources
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(solc, (version, sources))| {
|
.map(|(solc, (version, sources))| {
|
||||||
|
tracing::trace!("Filtering {} sources for {}", sources.len(), version);
|
||||||
let sources = cache.filter(sources, &version);
|
let sources = cache.filter(sources, &version);
|
||||||
|
tracing::trace!(
|
||||||
|
"Detected {} dirty sources {:?}",
|
||||||
|
sources.len(),
|
||||||
|
sources.keys()
|
||||||
|
);
|
||||||
(solc, (version, sources))
|
(solc, (version, sources))
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
|
|
Loading…
Reference in New Issue