Merge branch 'alexeuler:feature/yul_compilation' into feature/yul_compilation

This commit is contained in:
ControlCplusControlV 2022-03-10 11:44:14 -07:00 committed by GitHub
commit cb595113b6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 450 additions and 77 deletions

View File

@ -48,6 +48,8 @@
versions to a vector of name + contract struct tuples versions to a vector of name + contract struct tuples
[#908](https://github.com/gakonst/ethers-rs/pull/908) [#908](https://github.com/gakonst/ethers-rs/pull/908)
- Add Yul compilation [994](https://github.com/gakonst/ethers-rs/pull/994) - Add Yul compilation [994](https://github.com/gakonst/ethers-rs/pull/994)
- Enforce commutativity of ENS reverse resolution
[#996](https://github.com/gakonst/ethers-rs/pull/996)
## ethers-contract-abigen ## ethers-contract-abigen

16
Cargo.lock generated
View File

@ -2882,9 +2882,9 @@ dependencies = [
[[package]] [[package]]
name = "regex" name = "regex"
version = "1.5.4" version = "1.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461" checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"memchr", "memchr",
@ -3574,8 +3574,8 @@ checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601"
[[package]] [[package]]
name = "svm-rs" name = "svm-rs"
version = "0.2.8" version = "0.2.9"
source = "git+https://github.com/roynalnaruto/svm-rs#9b3627cfde2763fdc35afa9cbcea8ebc2926938d" source = "git+https://github.com/roynalnaruto/svm-rs#8e33f55fa2a2afb937749e31b2ffa42600bfe216"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"cfg-if 1.0.0", "cfg-if 1.0.0",
@ -3876,9 +3876,9 @@ checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6"
[[package]] [[package]]
name = "tracing" name = "tracing"
version = "0.1.31" version = "0.1.32"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6c650a8ef0cd2dd93736f033d21cbd1224c5a967aa0c258d00fcf7dafef9b9f" checksum = "4a1bdf54a7c28a2bbf701e1d2233f6c77f473486b94bee4f9678da5a148dca7f"
dependencies = [ dependencies = [
"cfg-if 1.0.0", "cfg-if 1.0.0",
"pin-project-lite", "pin-project-lite",
@ -3888,9 +3888,9 @@ dependencies = [
[[package]] [[package]]
name = "tracing-attributes" name = "tracing-attributes"
version = "0.1.19" version = "0.1.20"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8276d9a4a3a558d7b7ad5303ad50b53d58264641b82914b7ada36bd762e7a716" checksum = "2e65ce065b4b5c53e73bb28912318cb8c9e9ad3921f1d669eb0e68b4c8143a2b"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",

View File

@ -24,7 +24,7 @@ async-trait = { version = "0.1.50", default-features = false }
serde = { version = "1.0.124", default-features = false, features = ["derive"] } serde = { version = "1.0.124", default-features = false, features = ["derive"] }
thiserror = { version = "1.0.30", default-features = false } thiserror = { version = "1.0.30", default-features = false }
futures-util = { version = "^0.3" } futures-util = { version = "^0.3" }
tracing = { version = "0.1.31", default-features = false } tracing = { version = "0.1.32", default-features = false }
tracing-futures = { version = "0.2.5", default-features = false } tracing-futures = { version = "0.2.5", default-features = false }
# for gas oracles # for gas oracles

View File

@ -35,7 +35,7 @@ futures-channel = { version = "0.3.16", default-features = false }
pin-project = { version = "1.0.7", default-features = false } pin-project = { version = "1.0.7", default-features = false }
# tracing # tracing
tracing = { version = "0.1.31", default-features = false } tracing = { version = "0.1.32", default-features = false }
tracing-futures = { version = "0.2.5", default-features = false, features = ["std-future"] } tracing-futures = { version = "0.2.5", default-features = false, features = ["std-future"] }
bytes = { version = "1.1.0", default-features = false, optional = true } bytes = { version = "1.1.0", default-features = false, optional = true }

View File

@ -112,6 +112,10 @@ pub enum ProviderError {
#[error("ens name not found: {0}")] #[error("ens name not found: {0}")]
EnsError(String), EnsError(String),
/// Invalid reverse ENS name
#[error("reverse ens name not pointing to itself: {0}")]
EnsNotOwned(String),
#[error(transparent)] #[error(transparent)]
SerdeJson(#[from] serde_json::Error), SerdeJson(#[from] serde_json::Error),
@ -794,7 +798,14 @@ impl<P: JsonRpcClient> Middleware for Provider<P> {
/// a string. This should theoretically never happen. /// a string. This should theoretically never happen.
async fn lookup_address(&self, address: Address) -> Result<String, ProviderError> { async fn lookup_address(&self, address: Address) -> Result<String, ProviderError> {
let ens_name = ens::reverse_address(address); let ens_name = ens::reverse_address(address);
self.query_resolver(ParamType::String, &ens_name, ens::NAME_SELECTOR).await let domain: String =
self.query_resolver(ParamType::String, &ens_name, ens::NAME_SELECTOR).await?;
let reverse_address = self.resolve_name(&domain).await?;
if address != reverse_address {
Err(ProviderError::EnsNotOwned(domain))
} else {
Ok(domain)
}
} }
/// Returns the avatar HTTP link of the avatar that the `ens_name` resolves to (or None /// Returns the avatar HTTP link of the avatar that the `ens_name` resolves to (or None

View File

@ -33,7 +33,7 @@ trezor-client = { version = "0.0.5", optional = true, default-features = false,
# aws # aws
rusoto_core = { version = "0.47.0", optional = true } rusoto_core = { version = "0.47.0", optional = true }
rusoto_kms = { version = "0.47.0", optional = true } rusoto_kms = { version = "0.47.0", optional = true }
tracing = { version = "0.1.31", optional = true } tracing = { version = "0.1.32", optional = true }
tracing-futures = { version = "0.2.5", optional = true } tracing-futures = { version = "0.2.5", optional = true }
spki = { version = "0.5.4", optional = true } spki = { version = "0.5.4", optional = true }

View File

@ -22,13 +22,13 @@ walkdir = "2.3.2"
tokio = { version = "1.15.0", default-features = false, features = ["process", "io-util", "fs", "time"], optional = true } tokio = { version = "1.15.0", default-features = false, features = ["process", "io-util", "fs", "time"], optional = true }
futures-util = { version = "^0.3", optional = true } futures-util = { version = "^0.3", optional = true }
once_cell = "1.10.0" once_cell = "1.10.0"
regex = "1.5.4" regex = "1.5.5"
md-5 = "0.10.1" md-5 = "0.10.1"
thiserror = "1.0.30" thiserror = "1.0.30"
hex = "0.4.3" hex = "0.4.3"
colored = "2.0.0" colored = "2.0.0"
glob = "0.3.0" glob = "0.3.0"
tracing = "0.1.31" tracing = "0.1.32"
num_cpus = "1.13.1" num_cpus = "1.13.1"
tiny-keccak = { version = "2.0.2", default-features = false } tiny-keccak = { version = "2.0.2", default-features = false }
tempfile = { version = "3.3.0", optional = true } tempfile = { version = "3.3.0", optional = true }

View File

@ -581,7 +581,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> {
} }
} }
/// inserts the filtered source with the fiven version /// inserts the filtered source with the given version
fn insert_filtered_source(&mut self, file: PathBuf, source: Source, version: Version) { fn insert_filtered_source(&mut self, file: PathBuf, source: Source, version: Version) {
match self.filtered.entry(file) { match self.filtered.entry(file) {
hash_map::Entry::Occupied(mut entry) => { hash_map::Entry::Occupied(mut entry) => {
@ -593,35 +593,62 @@ impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> {
} }
} }
/// Returns only those sources that /// Returns only dirty sources that:
/// - are new /// - are new
/// - were changed /// - were changed
/// - their imports were changed /// - their imports were changed
/// - their artifact is missing /// - their artifact is missing
/// This also includes their respective imports
fn filter(&mut self, sources: Sources, version: &Version) -> Sources { fn filter(&mut self, sources: Sources, version: &Version) -> Sources {
self.fill_hashes(&sources); self.fill_hashes(&sources);
sources
let mut imports_of_dirty = HashSet::new();
// separates all source files that fit the criteria (dirty) from those that don't (clean)
let (mut dirty_sources, clean_sources) = sources
.into_iter() .into_iter()
.filter_map(|(file, source)| self.requires_solc(file, source, version)) .map(|(file, source)| self.filter_source(file, source, version))
.collect() .fold(
(Sources::default(), Vec::new()),
|(mut dirty_sources, mut clean_sources), source| {
if source.dirty {
// mark all files that are imported by a dirty file
imports_of_dirty.extend(self.edges.all_imported_nodes(source.idx));
dirty_sources.insert(source.file, source.source);
} else {
clean_sources.push(source);
}
(dirty_sources, clean_sources)
},
);
for clean_source in clean_sources {
let FilteredSource { file, source, idx, .. } = clean_source;
if imports_of_dirty.contains(&idx) {
// file is imported by a dirty file
dirty_sources.insert(file, source);
} else {
self.insert_filtered_source(file, source, version.clone());
}
}
// track dirty sources internally
for (file, source) in dirty_sources.iter() {
self.insert_new_cache_entry(file, source, version.clone());
}
dirty_sources
} }
/// Returns `Some` if the file _needs_ to be compiled and `None` if the artifact can be reu-used /// Returns the state of the given source file.
fn requires_solc( fn filter_source(&self, file: PathBuf, source: Source, version: &Version) -> FilteredSource {
&mut self, let idx = self.edges.node_id(&file);
file: PathBuf,
source: Source,
version: &Version,
) -> Option<(PathBuf, Source)> {
if !self.is_dirty(&file, version) && if !self.is_dirty(&file, version) &&
self.edges.imports(&file).iter().all(|file| !self.is_dirty(file, version)) self.edges.imports(&file).iter().all(|file| !self.is_dirty(file, version))
{ {
self.insert_filtered_source(file, source, version.clone()); FilteredSource { file, source, idx, dirty: false }
None
} else { } else {
self.insert_new_cache_entry(&file, &source, version.clone()); FilteredSource { file, source, idx, dirty: true }
Some((file, source))
} }
} }
@ -674,6 +701,14 @@ impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> {
} }
} }
/// Helper type to represent the state of a source file
struct FilteredSource {
file: PathBuf,
source: Source,
idx: usize,
dirty: bool,
}
/// Abstraction over configured caching which can be either non-existent or an already loaded cache /// Abstraction over configured caching which can be either non-existent or an already loaded cache
#[allow(clippy::large_enum_variant)] #[allow(clippy::large_enum_variant)]
#[derive(Debug)] #[derive(Debug)]

View File

@ -179,7 +179,7 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> {
let Self { edges, project, mut sources } = self; let Self { edges, project, mut sources } = self;
let mut cache = ArtifactsCache::new(project, edges)?; let mut cache = ArtifactsCache::new(project, edges)?;
// retain and compile only dirty sources // retain and compile only dirty sources and all their imports
sources = sources.filtered(&mut cache); sources = sources.filtered(&mut cache);
Ok(PreprocessedState { sources, cache }) Ok(PreprocessedState { sources, cache })
@ -216,18 +216,22 @@ struct CompiledState<'a, T: ArtifactOutput> {
impl<'a, T: ArtifactOutput> CompiledState<'a, T> { impl<'a, T: ArtifactOutput> CompiledState<'a, T> {
/// advance to the next state by handling all artifacts /// advance to the next state by handling all artifacts
/// ///
/// Writes all output contracts to disk if enabled in the `Project` /// Writes all output contracts to disk if enabled in the `Project` and if the build was
/// successful
fn write_artifacts(self) -> Result<ArtifactsState<'a, T>> { fn write_artifacts(self) -> Result<ArtifactsState<'a, T>> {
let CompiledState { output, cache } = self; let CompiledState { output, cache } = self;
// write all artifacts // write all artifacts via the handler but only if the build succeeded
let compiled_artifacts = if !cache.project().no_artifacts { let compiled_artifacts = if cache.project().no_artifacts {
cache.project().artifacts_handler().output_to_artifacts(&output.contracts)
} else if output.has_error() {
tracing::trace!("skip writing cache file due to solc errors: {:?}", output.errors);
cache.project().artifacts_handler().output_to_artifacts(&output.contracts)
} else {
cache cache
.project() .project()
.artifacts_handler() .artifacts_handler()
.on_output(&output.contracts, &cache.project().paths)? .on_output(&output.contracts, &cache.project().paths)?
} else {
cache.project().artifacts_handler().output_to_artifacts(&output.contracts)
}; };
Ok(ArtifactsState { output, cache, compiled_artifacts }) Ok(ArtifactsState { output, cache, compiled_artifacts })

View File

@ -10,6 +10,7 @@ use crate::{
use crate::artifacts::output_selection::ContractOutputSelection; use crate::artifacts::output_selection::ContractOutputSelection;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{ use std::{
collections::HashSet,
fmt::{self, Formatter}, fmt::{self, Formatter},
fs, fs,
path::{Component, Path, PathBuf}, path::{Component, Path, PathBuf},
@ -229,7 +230,7 @@ impl ProjectPathsConfig {
pub fn flatten(&self, target: &Path) -> Result<String> { pub fn flatten(&self, target: &Path) -> Result<String> {
tracing::trace!("flattening file"); tracing::trace!("flattening file");
let graph = Graph::resolve(self)?; let graph = Graph::resolve(self)?;
self.flatten_node(target, &graph, &mut vec![], false, false) self.flatten_node(target, &graph, &mut Default::default(), false, false)
} }
/// Flattens a single node from the dependency graph /// Flattens a single node from the dependency graph
@ -237,7 +238,7 @@ impl ProjectPathsConfig {
&self, &self,
target: &Path, target: &Path,
graph: &Graph, graph: &Graph,
imported: &mut Vec<usize>, imported: &mut HashSet<usize>,
strip_version_pragma: bool, strip_version_pragma: bool,
strip_license: bool, strip_license: bool,
) -> Result<String> { ) -> Result<String> {
@ -248,9 +249,11 @@ impl ProjectPathsConfig {
SolcError::msg(format!("cannot resolve file at \"{:?}\"", target.display())) SolcError::msg(format!("cannot resolve file at \"{:?}\"", target.display()))
})?; })?;
if imported.iter().any(|&idx| idx == *target_index) { if imported.contains(target_index) {
// short circuit nodes that were already imported, if both A.sol and B.sol import C.sol
return Ok(String::new()) return Ok(String::new())
} }
imported.insert(*target_index);
let target_node = graph.node(*target_index); let target_node = graph.node(*target_index);
@ -278,21 +281,17 @@ impl ProjectPathsConfig {
for import in imports.iter() { for import in imports.iter() {
let import_path = self.resolve_import(target_dir, import.data())?; let import_path = self.resolve_import(target_dir, import.data())?;
let import_content = self let s = self.flatten_node(&import_path, graph, imported, true, true)?;
.flatten_node(&import_path, graph, imported, true, true)? let import_content = s.trim().as_bytes();
.trim()
.as_bytes()
.to_owned();
let import_content_len = import_content.len() as isize; let import_content_len = import_content.len() as isize;
let (start, end) = import.loc_by_offset(offset); let (start, end) = import.loc_by_offset(offset);
content.splice(start..end, import_content); content.splice(start..end, import_content.iter().copied());
offset += import_content_len - ((end - start) as isize); offset += import_content_len - ((end - start) as isize);
} }
let result = String::from_utf8(content).map_err(|err| { let result = String::from_utf8(content).map_err(|err| {
SolcError::msg(format!("failed to convert extended bytes to string: {}", err)) SolcError::msg(format!("failed to convert extended bytes to string: {}", err))
})?; })?;
imported.push(*target_index);
Ok(result) Ok(result)
} }

View File

@ -125,9 +125,11 @@ impl<T: ArtifactOutput> Project<T> {
&self.artifacts &self.artifacts
} }
/// Applies the configured settings to the given `Solc` /// Applies the configured arguments to the given `Solc`
///
/// This will set the `--allow-paths` to the paths configured for the `Project`, if any.
fn configure_solc(&self, mut solc: Solc) -> Solc { fn configure_solc(&self, mut solc: Solc) -> Solc {
if self.allowed_lib_paths.0.is_empty() { if solc.args.is_empty() && !self.allowed_lib_paths.0.is_empty() {
solc = solc.arg("--allow-paths").arg(self.allowed_lib_paths.to_string()); solc = solc.arg("--allow-paths").arg(self.allowed_lib_paths.to_string());
} }
solc solc

View File

@ -49,6 +49,13 @@ pub struct Remapping {
pub path: String, pub path: String,
} }
impl Remapping {
/// Convenience function for [`RelativeRemapping::new`]
pub fn into_relative(self, root: impl AsRef<Path>) -> RelativeRemapping {
RelativeRemapping::new(self, root)
}
}
#[derive(thiserror::Error, Debug, PartialEq, PartialOrd)] #[derive(thiserror::Error, Debug, PartialEq, PartialOrd)]
pub enum RemappingError { pub enum RemappingError {
#[error("no prefix found")] #[error("no prefix found")]
@ -222,6 +229,12 @@ impl RelativeRemapping {
self.path.parent = Some(root); self.path.parent = Some(root);
self.into() self.into()
} }
/// Converts this relative remapping into [`Remapping`] without the root path
pub fn to_relative_remapping(mut self) -> Remapping {
self.path.parent.take();
self.into()
}
} }
// Remappings are printed as `prefix=target` // Remappings are printed as `prefix=target`
@ -263,8 +276,8 @@ impl From<Remapping> for RelativeRemapping {
/// resolve as a `weird-erc20/=/var/lib/weird-erc20/src/` remapping. /// resolve as a `weird-erc20/=/var/lib/weird-erc20/src/` remapping.
#[derive(Clone, Debug, PartialEq, PartialOrd, Eq, Ord)] #[derive(Clone, Debug, PartialEq, PartialOrd, Eq, Ord)]
pub struct RelativeRemappingPathBuf { pub struct RelativeRemappingPathBuf {
parent: Option<PathBuf>, pub parent: Option<PathBuf>,
path: PathBuf, pub path: PathBuf,
} }
impl RelativeRemappingPathBuf { impl RelativeRemappingPathBuf {
@ -647,6 +660,9 @@ mod tests {
assert_eq!(relative.path.relative(), Path::new(&remapping.path)); assert_eq!(relative.path.relative(), Path::new(&remapping.path));
assert_eq!(relative.path.original(), Path::new(&remapping.path)); assert_eq!(relative.path.original(), Path::new(&remapping.path));
assert!(relative.path.parent.is_none()); assert!(relative.path.parent.is_none());
let relative = RelativeRemapping::new(remapping.clone(), "/a/b");
assert_eq!(relative.to_relative_remapping(), Remapping::from_str("oz/=c/d/").unwrap());
} }
#[test] #[test]

View File

@ -1,17 +1,52 @@
//! Subscribe to events in the compiler pipeline //! Subscribe to events in the compiler pipeline
//!
//! The _reporter_ is the component of the [`Project::compile()`] pipeline which is responsible
//! for reporting on specific steps in the process.
//!
//! By default, the current reporter is a noop that does
//! nothing.
//!
//! To use another report implementation, it must be set as the current reporter.
//! There are two methods for doing so: [`with_scoped`] and
//! [`set_global`]. `with_scoped` sets the reporter for the
//! duration of a scope, while `set_global` sets a global default report
//! for the entire process.
// https://github.com/tokio-rs/tracing/blob/master/tracing-core/src/dispatch.rs
use crate::{CompilerInput, CompilerOutput, Solc}; use crate::{CompilerInput, CompilerOutput, Solc};
use semver::Version; use semver::Version;
use std::{ use std::{
any::{Any, TypeId},
cell::RefCell,
error::Error, error::Error,
fmt, fmt,
path::Path, path::Path,
ptr::NonNull,
sync::{ sync::{
atomic::{AtomicUsize, Ordering}, atomic::{AtomicBool, AtomicUsize, Ordering},
Arc, Arc,
}, },
}; };
thread_local! {
static CURRENT_STATE: State = State {
scoped: RefCell::new(Report::none()),
};
}
static EXISTS: AtomicBool = AtomicBool::new(false);
static SCOPED_COUNT: AtomicUsize = AtomicUsize::new(0);
// tracks the state of `GLOBAL_REPORTER`
static GLOBAL_REPORTER_STATE: AtomicUsize = AtomicUsize::new(UN_SET);
const UN_SET: usize = 0;
const SETTING: usize = 1;
const SET: usize = 2;
static mut GLOBAL_REPORTER: Option<Report> = None;
/// Install this `Reporter` as the global default if one is /// Install this `Reporter` as the global default if one is
/// not already set. /// not already set.
/// ///
@ -69,28 +104,70 @@ pub trait Reporter: 'static {
/// Invoked if the import couldn't be resolved /// Invoked if the import couldn't be resolved
fn on_unresolved_import(&self, _import: &Path) {} fn on_unresolved_import(&self, _import: &Path) {}
/// If `self` is the same type as the provided `TypeId`, returns an untyped
/// [`NonNull`] pointer to that type. Otherwise, returns `None`.
///
/// If you wish to downcast a `Reporter`, it is strongly advised to use
/// the safe API provided by [`downcast_ref`] instead.
///
/// This API is required for `downcast_raw` to be a trait method; a method
/// signature like [`downcast_ref`] (with a generic type parameter) is not
/// object-safe, and thus cannot be a trait method for `Reporter`. This
/// means that if we only exposed `downcast_ref`, `Reporter`
/// implementations could not override the downcasting behavior
///
/// # Safety
///
/// The [`downcast_ref`] method expects that the pointer returned by
/// `downcast_raw` points to a valid instance of the type
/// with the provided `TypeId`. Failure to ensure this will result in
/// undefined behaviour, so implementing `downcast_raw` is unsafe.
unsafe fn downcast_raw(&self, id: TypeId) -> Option<NonNull<()>> {
if id == TypeId::of::<Self>() {
Some(NonNull::from(self).cast())
} else {
None
}
}
}
impl dyn Reporter {
/// Returns `true` if this `Reporter` is the same type as `T`.
pub fn is<T: Any>(&self) -> bool {
self.downcast_ref::<T>().is_some()
}
/// Returns some reference to this `Reporter` value if it is of type `T`,
/// or `None` if it isn't.
pub fn downcast_ref<T: Any>(&self) -> Option<&T> {
unsafe {
let raw = self.downcast_raw(TypeId::of::<T>())?;
Some(&*(raw.cast().as_ptr()))
}
}
} }
pub(crate) fn solc_spawn(solc: &Solc, version: &Version, input: &CompilerInput) { pub(crate) fn solc_spawn(solc: &Solc, version: &Version, input: &CompilerInput) {
with_global(|r| r.reporter.on_solc_spawn(solc, version, input)); get_default(|r| r.reporter.on_solc_spawn(solc, version, input));
} }
pub(crate) fn solc_success(solc: &Solc, version: &Version, output: &CompilerOutput) { pub(crate) fn solc_success(solc: &Solc, version: &Version, output: &CompilerOutput) {
with_global(|r| r.reporter.on_solc_success(solc, version, output)); get_default(|r| r.reporter.on_solc_success(solc, version, output));
} }
#[allow(unused)] #[allow(unused)]
pub(crate) fn solc_installation_start(version: &Version) { pub(crate) fn solc_installation_start(version: &Version) {
with_global(|r| r.reporter.on_solc_installation_start(version)); get_default(|r| r.reporter.on_solc_installation_start(version));
} }
#[allow(unused)] #[allow(unused)]
pub(crate) fn solc_installation_success(version: &Version) { pub(crate) fn solc_installation_success(version: &Version) {
with_global(|r| r.reporter.on_solc_installation_success(version)); get_default(|r| r.reporter.on_solc_installation_success(version));
} }
pub(crate) fn unresolved_import(import: &Path) { pub(crate) fn unresolved_import(import: &Path) {
with_global(|r| r.reporter.on_unresolved_import(import)); get_default(|r| r.reporter.on_unresolved_import(import));
} }
fn get_global() -> Option<&'static Report> { fn get_global() -> Option<&'static Report> {
@ -106,10 +183,97 @@ fn get_global() -> Option<&'static Report> {
} }
} }
/// Executes a closure with a reference to this thread's current [reporter].
#[inline(always)]
pub fn get_default<T, F>(mut f: F) -> T
where
F: FnMut(&Report) -> T,
{
if SCOPED_COUNT.load(Ordering::Acquire) == 0 {
// fast path if no scoped reporter has been set; use the global
// default.
return if let Some(glob) = get_global() { f(glob) } else { f(&Report::none()) }
}
get_default_scoped(f)
}
#[inline(never)]
fn get_default_scoped<T, F>(mut f: F) -> T
where
F: FnMut(&Report) -> T,
{
CURRENT_STATE
.try_with(|state| {
let scoped = state.scoped.borrow_mut();
f(&*scoped)
})
.unwrap_or_else(|_| f(&Report::none()))
}
/// Executes a closure with a reference to the `Reporter`. /// Executes a closure with a reference to the `Reporter`.
pub fn with_global<T>(f: impl FnOnce(&Report) -> T) -> Option<T> { pub fn with_global<T>(f: impl FnOnce(&Report) -> T) -> Option<T> {
let dispatch = get_global()?; let report = get_global()?;
Some(f(dispatch)) Some(f(report))
}
/// Sets this reporter as the scoped reporter for the duration of a closure.
pub fn with_scoped<T>(report: &Report, f: impl FnOnce() -> T) -> T {
// When this guard is dropped, the scoped reporter will be reset to the
// prior reporter. Using this (rather than simply resetting after calling
// `f`) ensures that we always reset to the prior reporter even if `f`
// panics.
let _guard = set_scoped(report);
f()
}
/// The report state of a thread.
struct State {
/// This thread's current scoped reporter.
scoped: RefCell<Report>,
}
impl State {
/// Replaces the current scoped reporter on this thread with the provided
/// reporter.
///
/// Dropping the returned `ResetGuard` will reset the scoped reporter to
/// the previous value.
#[inline]
fn set_scoped(new_report: Report) -> ScopeGuard {
let prior = CURRENT_STATE.try_with(|state| state.scoped.replace(new_report)).ok();
EXISTS.store(true, Ordering::Release);
SCOPED_COUNT.fetch_add(1, Ordering::Release);
ScopeGuard(prior)
}
}
/// A guard that resets the current scoped reporter to the prior
/// scoped reporter when dropped.
#[derive(Debug)]
pub struct ScopeGuard(Option<Report>);
impl Drop for ScopeGuard {
#[inline]
fn drop(&mut self) {
SCOPED_COUNT.fetch_sub(1, Ordering::Release);
if let Some(report) = self.0.take() {
// Replace the reporter and then drop the old one outside
// of the thread-local context.
let prev = CURRENT_STATE.try_with(|state| state.scoped.replace(report));
drop(prev)
}
}
}
/// Sets the reporter as the scoped reporter for the duration of the lifetime
/// of the returned DefaultGuard
#[must_use = "Dropping the guard unregisters the reporter."]
pub fn set_scoped(reporter: &Report) -> ScopeGuard {
// When this guard is dropped, the scoped reporter will be reset to the
// prior default. Using this ensures that we always reset to the prior
// reporter even if the thread calling this function panics.
State::set_scoped(reporter.clone())
} }
/// A no-op [`Reporter`] that does nothing. /// A no-op [`Reporter`] that does nothing.
@ -165,6 +329,7 @@ impl fmt::Display for SetGlobalReporterError {
impl Error for SetGlobalReporterError {} impl Error for SetGlobalReporterError {}
/// `Report` trace data to a [`Reporter`]. /// `Report` trace data to a [`Reporter`].
#[derive(Clone)]
pub struct Report { pub struct Report {
reporter: Arc<dyn Reporter + Send + Sync>, reporter: Arc<dyn Reporter + Send + Sync>,
} }
@ -184,16 +349,20 @@ impl Report {
{ {
Self { reporter: Arc::new(reporter) } Self { reporter: Arc::new(reporter) }
} }
/// Returns `true` if this `Report` forwards to a reporter of type
/// `T`.
#[inline]
pub fn is<T: Any>(&self) -> bool {
<dyn Reporter>::is::<T>(&*self.reporter)
}
} }
// tracks the state of `GLOBAL_REPORTER` impl fmt::Debug for Report {
static GLOBAL_REPORTER_STATE: AtomicUsize = AtomicUsize::new(UN_SET); fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.pad("Report(...)")
const UN_SET: usize = 0; }
const SETTING: usize = 1; }
const SET: usize = 2;
static mut GLOBAL_REPORTER: Option<Report> = None;
/// Sets this report as the global default for the duration of the entire program. /// Sets this report as the global default for the duration of the entire program.
/// ///
@ -216,3 +385,35 @@ fn set_global_reporter(report: Report) -> Result<(), SetGlobalReporterError> {
Err(SetGlobalReporterError { _priv: () }) Err(SetGlobalReporterError { _priv: () })
} }
} }
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn scoped_reporter_works() {
struct TestReporter;
impl Reporter for TestReporter {}
with_scoped(&Report::new(TestReporter), || {
get_default(|reporter| assert!(reporter.is::<TestReporter>()))
});
}
#[test]
fn global_and_scoped_reporter_works() {
get_default(|reporter| {
assert!(reporter.is::<NoReporter>());
});
set_global_reporter(Report::new(BasicStdoutReporter::default())).unwrap();
struct TestReporter;
impl Reporter for TestReporter {}
with_scoped(&Report::new(TestReporter), || {
get_default(|reporter| assert!(reporter.is::<TestReporter>()))
});
get_default(|reporter| assert!(reporter.is::<BasicStdoutReporter>()))
}
}

View File

@ -91,6 +91,11 @@ impl GraphEdges {
&self.edges[from] &self.edges[from]
} }
/// Returns an iterator that yields all imports of a node and all their imports
pub fn all_imported_nodes(&self, from: usize) -> impl Iterator<Item = usize> + '_ {
NodesIter::new(from, self).skip(1)
}
/// Returns all files imported by the given file /// Returns all files imported by the given file
pub fn imports(&self, file: impl AsRef<Path>) -> HashSet<&PathBuf> { pub fn imports(&self, file: impl AsRef<Path>) -> HashSet<&PathBuf> {
if let Some(start) = self.indices.get(file.as_ref()).copied() { if let Some(start) = self.indices.get(file.as_ref()).copied() {
@ -100,6 +105,11 @@ impl GraphEdges {
} }
} }
/// Returns the id of the given file
pub fn node_id(&self, file: impl AsRef<Path>) -> usize {
self.indices[file.as_ref()]
}
/// Returns true if the `file` was originally included when the graph was first created and not /// Returns true if the `file` was originally included when the graph was first created and not
/// added when all `imports` were resolved /// added when all `imports` were resolved
pub fn is_input_file(&self, file: impl AsRef<Path>) -> bool { pub fn is_input_file(&self, file: impl AsRef<Path>) -> bool {
@ -631,11 +641,18 @@ impl VersionedSources {
SolcError::msg(format!("solc \"{}\" should have been installed", version)) SolcError::msg(format!("solc \"{}\" should have been installed", version))
})?; })?;
tracing::trace!("verifying solc checksum for {}", solc.solc.display()); if self.offline {
if solc.verify_checksum().is_err() { tracing::trace!(
tracing::trace!("corrupted solc version, redownloading \"{}\"", version); "skip verifying solc checksum for {} in offline mode",
Solc::blocking_install(version.as_ref())?; solc.solc.display()
tracing::trace!("reinstalled solc: \"{}\"", version); );
} else {
tracing::trace!("verifying solc checksum for {}", solc.solc.display());
if solc.verify_checksum().is_err() {
tracing::trace!("corrupted solc version, redownloading \"{}\"", version);
Solc::blocking_install(version.as_ref())?;
tracing::trace!("reinstalled solc: \"{}\"", version);
}
} }
let solc = solc.arg("--allow-paths").arg(allowed_lib_paths.to_string()); let solc = solc.arg("--allow-paths").arg(allowed_lib_paths.to_string());
let version = solc.version()?; let version = solc.version()?;

View File

@ -157,11 +157,6 @@ fn print_imports(
return Ok(()) return Ok(())
} }
for continues in &**levels_continue {
let c = if *continues { symbols.down } else { " " };
write!(out, "{} ", c)?;
}
let mut iter = imports.iter().peekable(); let mut iter = imports.iter().peekable();
while let Some(import) = iter.next() { while let Some(import) = iter.next() {

View File

@ -451,6 +451,57 @@ fn can_flatten_file_in_dapp_sample() {
assert!(result.contains("contract DappTest")); assert!(result.contains("contract DappTest"));
} }
#[test]
fn can_flatten_unique() {
let project = TempProject::dapptools().unwrap();
let f = project
.add_source(
"A",
r#"
pragma solidity ^0.8.10;
import "./C.sol";
import "./B.sol";
contract A { }
"#,
)
.unwrap();
project
.add_source(
"B",
r#"
pragma solidity ^0.8.10;
import "./C.sol";
contract B { }
"#,
)
.unwrap();
project
.add_source(
"C",
r#"
pragma solidity ^0.8.10;
import "./A.sol";
contract C { }
"#,
)
.unwrap();
let result = project.flatten(&f).unwrap();
assert_eq!(
result,
r#"
pragma solidity ^0.8.10;
contract C { }
contract B { }
contract A { }
"#
);
}
#[test] #[test]
fn can_flatten_file_with_duplicates() { fn can_flatten_file_with_duplicates() {
let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/test-flatten-duplicates"); let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/test-flatten-duplicates");
@ -596,3 +647,43 @@ contract LinkTest {
let s = serde_json::to_string(&bytecode).unwrap(); let s = serde_json::to_string(&bytecode).unwrap();
assert_eq!(bytecode.clone(), serde_json::from_str(&s).unwrap()); assert_eq!(bytecode.clone(), serde_json::from_str(&s).unwrap());
} }
#[test]
fn can_recompile_with_changes() {
let mut tmp = TempProject::dapptools().unwrap();
tmp.project_mut().allowed_lib_paths = vec![tmp.root().join("modules")].into();
let content = r#"
pragma solidity ^0.8.10;
import "../modules/B.sol";
contract A {}
"#;
tmp.add_source("A", content).unwrap();
tmp.add_contract(
"modules/B",
r#"
pragma solidity ^0.8.10;
contract B {}
"#,
)
.unwrap();
let compiled = tmp.compile().unwrap();
assert!(!compiled.has_compiler_errors());
assert!(compiled.find("A").is_some());
assert!(compiled.find("B").is_some());
let compiled = tmp.compile().unwrap();
assert!(compiled.find("A").is_some());
assert!(compiled.find("B").is_some());
assert!(compiled.is_unchanged());
// modify A.sol
tmp.add_source("A", format!("{}\n", content)).unwrap();
let compiled = tmp.compile().unwrap();
assert!(!compiled.has_compiler_errors());
assert!(!compiled.is_unchanged());
assert!(compiled.find("A").is_some());
assert!(compiled.find("B").is_some());
}