refactor!: make artifactoutput a trait (#579)

* feat: add artifacts trait

* add artifactsoutput trait

* deprecated old artifactoutput

* feat: better artifacts handling

* force update

* feat: update metadata artifacts

* feat: add default to types

* feat: make useliteralcontent optional

* feat: replace ProjectCompilerOutput with struct

* docs

* add output function

* feat: add Artifact trait for reading Abi/Bytes from an artifact

* feat(solc): replace () for empty artifacts with a conditional check

As discussed with @mattsse the abstraction here might not be super clean, so we should revisit this if we do not like it

* chore: fix doctest

Co-authored-by: Georgios Konstantopoulos <me@gakonst.com>
This commit is contained in:
Matthias Seitz 2021-11-16 00:29:06 +01:00 committed by GitHub
parent 8870aff4b5
commit 1da62d65d2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 713 additions and 185 deletions

View File

@ -76,7 +76,7 @@ pub struct Settings {
pub remappings: Vec<Remapping>, pub remappings: Vec<Remapping>,
pub optimizer: Optimizer, pub optimizer: Optimizer,
#[serde(default, skip_serializing_if = "Option::is_none")] #[serde(default, skip_serializing_if = "Option::is_none")]
pub metadata: Option<Metadata>, pub metadata: Option<SettingsMetadata>,
/// This field can be used to select desired outputs based /// This field can be used to select desired outputs based
/// on file and contract names. /// on file and contract names.
/// If this field is omitted, then the compiler loads and does type /// If this field is omitted, then the compiler loads and does type
@ -294,11 +294,76 @@ impl FromStr for EvmVersion {
} }
} }
} }
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct SettingsMetadata {
#[serde(default, rename = "useLiteralContent", skip_serializing_if = "Option::is_none")]
pub use_literal_content: Option<bool>,
#[serde(default, rename = "bytecodeHash", skip_serializing_if = "Option::is_none")]
pub bytecode_hash: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Metadata { pub struct Metadata {
#[serde(rename = "useLiteralContent")] pub compiler: Compiler,
pub use_literal_content: bool, pub language: String,
pub output: Output,
pub settings: Settings,
pub sources: MetadataSources,
pub version: i64,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct MetadataSources {
#[serde(flatten)]
pub inner: BTreeMap<String, serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Compiler {
pub version: String,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Output {
pub abi: Vec<SolcAbi>,
pub devdoc: Option<Doc>,
pub userdoc: Option<Doc>,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct SolcAbi {
pub inputs: Vec<Item>,
#[serde(rename = "stateMutability")]
pub state_mutability: Option<String>,
#[serde(rename = "type")]
pub abi_type: String,
pub name: Option<String>,
pub outputs: Option<Vec<Item>>,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Item {
#[serde(rename = "internalType")]
pub internal_type: String,
pub name: String,
#[serde(rename = "type")]
pub put_type: String,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Doc {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub kind: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub methods: Option<Libraries>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub version: Option<u32>,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
pub struct Libraries {
#[serde(flatten)]
pub libs: BTreeMap<String, serde_json::Value>,
} }
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)] #[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
@ -403,11 +468,26 @@ impl CompilerOutput {
/// Finds the first contract with the given name /// Finds the first contract with the given name
pub fn find(&self, contract: impl AsRef<str>) -> Option<CompactContractRef> { pub fn find(&self, contract: impl AsRef<str>) -> Option<CompactContractRef> {
let contract = contract.as_ref(); let contract_name = contract.as_ref();
self.contracts self.contracts_iter().find_map(|(name, contract)| {
.values() (name == contract_name).then(|| CompactContractRef::from(contract))
.find_map(|contracts| contracts.get(contract)) })
.map(CompactContractRef::from) }
/// Finds the first contract with the given name and removes it from the set
pub fn remove(&mut self, contract: impl AsRef<str>) -> Option<Contract> {
let contract_name = contract.as_ref();
self.contracts.values_mut().find_map(|c| c.remove(contract_name))
}
/// Iterate over all contracts and their names
pub fn contracts_iter(&self) -> impl Iterator<Item = (&String, &Contract)> {
self.contracts.values().flatten()
}
/// Iterate over all contracts and their names
pub fn contracts_into_iter(self) -> impl Iterator<Item = (String, Contract)> {
self.contracts.into_values().flatten()
} }
/// Given the contract file's path and the contract's name, tries to return the contract's /// Given the contract file's path and the contract's name, tries to return the contract's
@ -454,11 +534,11 @@ impl<'a> fmt::Display for OutputDiagnostics<'a> {
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] #[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
pub struct Contract { pub struct Contract {
/// The Ethereum Contract ABI. /// The Ethereum Contract Metadata.
/// See https://docs.soliditylang.org/en/develop/abi-spec.html /// See https://docs.soliditylang.org/en/develop/metadata.html
pub abi: Option<Abi>, pub abi: Option<Abi>,
#[serde(default, skip_serializing_if = "Option::is_none")] #[serde(default, skip_serializing_if = "Option::is_none", with = "json_string_opt")]
pub metadata: Option<String>, pub metadata: Option<Metadata>,
#[serde(default)] #[serde(default)]
pub userdoc: UserDoc, pub userdoc: UserDoc,
#[serde(default)] #[serde(default)]
@ -699,6 +779,7 @@ pub struct Ewasm {
#[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq)] #[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq)]
pub struct StorageLayout { pub struct StorageLayout {
pub storage: Vec<Storage>, pub storage: Vec<Storage>,
#[serde(default)]
pub types: BTreeMap<String, StorageType>, pub types: BTreeMap<String, StorageType>,
} }
@ -885,6 +966,38 @@ mod display_from_str_opt {
} }
} }
mod json_string_opt {
use serde::{
de::{self, DeserializeOwned},
ser, Deserialize, Deserializer, Serialize, Serializer,
};
pub fn serialize<T, S>(value: &Option<T>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
T: Serialize,
{
if let Some(value) = value {
let value = serde_json::to_string(value).map_err(ser::Error::custom)?;
serializer.serialize_str(&value)
} else {
serializer.serialize_none()
}
}
pub fn deserialize<'de, T, D>(deserializer: D) -> Result<Option<T>, D::Error>
where
D: Deserializer<'de>,
T: DeserializeOwned,
{
if let Some(s) = Option::<String>::deserialize(deserializer)? {
serde_json::from_str(&s).map_err(de::Error::custom).map(Some)
} else {
Ok(None)
}
}
}
pub fn deserialize_bytes<'de, D>(d: D) -> std::result::Result<Bytes, D::Error> pub fn deserialize_bytes<'de, D>(d: D) -> std::result::Result<Bytes, D::Error>
where where
D: Deserializer<'de>, D: Deserializer<'de>,
@ -899,7 +1012,15 @@ where
{ {
let value = Option::<String>::deserialize(d)?; let value = Option::<String>::deserialize(d)?;
if let Some(value) = value { if let Some(value) = value {
Ok(Some(hex::decode(&value).map_err(|e| serde::de::Error::custom(e.to_string()))?.into())) Ok(Some(
if let Some(value) = value.strip_prefix("0x") {
hex::decode(value)
} else {
hex::decode(&value)
}
.map_err(|e| serde::de::Error::custom(e.to_string()))?
.into(),
))
} else { } else {
Ok(None) Ok(None)
} }

View File

@ -3,7 +3,7 @@ use crate::{
artifacts::Sources, artifacts::Sources,
config::SolcConfig, config::SolcConfig,
error::{Result, SolcError}, error::{Result, SolcError},
utils, utils, ArtifactOutput,
}; };
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{ use std::{
@ -93,6 +93,75 @@ impl SolFilesCache {
true true
} }
} }
/// Returns only the files that were changed or are missing artifacts compared to previous
/// compiler execution, to save time when compiling.
pub fn get_changed_or_missing_artifacts_files<'a, T: ArtifactOutput>(
&'a self,
sources: Sources,
config: Option<&'a SolcConfig>,
artifacts_root: &Path,
) -> Sources {
sources
.into_iter()
.filter(move |(file, source)| {
self.has_changed_or_missing_artifact::<T>(
file,
source.content_hash().as_bytes(),
config,
artifacts_root,
)
})
.collect()
}
/// Returns true if the given content hash or config differs from the file's
/// or the file does not exist or the files' artifacts are missing
pub fn has_changed_or_missing_artifact<T: ArtifactOutput>(
&self,
file: &Path,
hash: &[u8],
config: Option<&SolcConfig>,
artifacts_root: &Path,
) -> bool {
if let Some(entry) = self.files.get(file) {
if entry.content_hash.as_bytes() != hash {
return true
}
if let Some(config) = config {
if config != &entry.solc_config {
return true
}
}
entry.artifacts.iter().any(|name| !T::output_exists(file, name, artifacts_root))
} else {
true
}
}
/// Checks if all artifact files exist
pub fn all_artifacts_exist<T: ArtifactOutput>(&self, artifacts_root: &Path) -> bool {
self.files.iter().all(|(file, entry)| {
entry.artifacts.iter().all(|name| T::output_exists(file, name, artifacts_root))
})
}
/// Reads all cached artifacts from disk
pub fn read_artifacts<T: ArtifactOutput>(
&self,
artifacts_root: &Path,
) -> Result<BTreeMap<PathBuf, T::Artifact>> {
let mut artifacts = BTreeMap::default();
for (file, entry) in &self.files {
for artifact in &entry.artifacts {
let artifact_file = artifacts_root.join(T::output_file(file, artifact));
let artifact = T::read_cached_artifact(&artifact_file)?;
artifacts.insert(artifact_file, artifact);
}
}
Ok(artifacts)
}
} }
#[cfg(feature = "async")] #[cfg(feature = "async")]

View File

@ -1,12 +1,15 @@
use crate::{ use crate::{
artifacts::{CompactContractRef, Settings}, artifacts::{CompactContract, CompactContractRef, Contract, Settings},
cache::SOLIDITY_FILES_CACHE_FILENAME, cache::SOLIDITY_FILES_CACHE_FILENAME,
error::Result, error::Result,
remappings::Remapping, remappings::Remapping,
CompilerOutput, Solc, CompilerOutput, Solc,
}; };
use ethers_core::{abi::Abi, types::Bytes};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{ use std::{
collections::BTreeMap,
convert::TryFrom,
fmt, fs, io, fmt, fs, io,
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
@ -224,10 +227,113 @@ impl SolcConfigBuilder {
} }
} }
/// Determines how to handle compiler output pub type Artifacts<T> = BTreeMap<String, BTreeMap<String, T>>;
pub enum ArtifactOutput {
/// No-op, does not write the artifacts to disk. pub trait Artifact {
Nothing, fn into_inner(self) -> (Option<Abi>, Option<Bytes>);
}
impl Artifact for CompactContract {
fn into_inner(self) -> (Option<Abi>, Option<Bytes>) {
(self.abi, self.bin)
}
}
impl Artifact for serde_json::Value {
fn into_inner(self) -> (Option<Abi>, Option<Bytes>) {
let abi = self.get("abi").map(|abi| {
serde_json::from_value::<Abi>(abi.clone()).expect("could not get artifact abi")
});
let bytecode = self.get("bin").map(|bin| {
serde_json::from_value::<Bytes>(bin.clone()).expect("could not get artifact bytecode")
});
(abi, bytecode)
}
}
pub trait ArtifactOutput {
/// How Artifacts are stored
type Artifact: Artifact;
/// Handle the compiler output.
fn on_output(output: &CompilerOutput, layout: &ProjectPathsConfig) -> Result<()>;
/// Returns the file name for the contract's artifact
fn output_file_name(name: impl AsRef<str>) -> PathBuf {
format!("{}.json", name.as_ref()).into()
}
/// Returns the path to the contract's artifact location based on the contract's file and name
///
/// This returns `contract.sol/contract.json` by default
fn output_file(contract_file: impl AsRef<Path>, name: impl AsRef<str>) -> PathBuf {
let name = name.as_ref();
contract_file
.as_ref()
.file_name()
.map(Path::new)
.map(|p| p.join(Self::output_file_name(name)))
.unwrap_or_else(|| Self::output_file_name(name))
}
/// The inverse of `contract_file_name`
///
/// Expected to return the solidity contract's name derived from the file path
/// `sources/Greeter.sol` -> `Greeter`
fn contract_name(file: impl AsRef<Path>) -> Option<String> {
file.as_ref().file_stem().and_then(|s| s.to_str().map(|s| s.to_string()))
}
/// Whether the corresponding artifact of the given contract file and name exists
fn output_exists(
contract_file: impl AsRef<Path>,
name: impl AsRef<str>,
root: impl AsRef<Path>,
) -> bool {
root.as_ref().join(Self::output_file(contract_file, name)).exists()
}
fn read_cached_artifact(path: impl AsRef<Path>) -> Result<Self::Artifact>;
/// Read the cached artifacts from disk
fn read_cached_artifacts<T, I>(files: I) -> Result<BTreeMap<PathBuf, Self::Artifact>>
where
I: IntoIterator<Item = T>,
T: Into<PathBuf>,
{
let mut artifacts = BTreeMap::default();
for path in files.into_iter() {
let path = path.into();
let artifact = Self::read_cached_artifact(&path)?;
artifacts.insert(path, artifact);
}
Ok(artifacts)
}
/// Convert a contract to the artifact type
fn contract_to_artifact(contract: Contract) -> Self::Artifact;
/// Convert the compiler output into a set of artifacts
fn output_to_artifacts(output: CompilerOutput) -> Artifacts<Self::Artifact> {
output
.contracts
.into_iter()
.map(|(s, contracts)| {
(
s,
contracts
.into_iter()
.map(|(s, c)| (s, Self::contract_to_artifact(c)))
.collect(),
)
})
.collect()
}
}
/// An Artifacts implementation that uses a compact representation
///
/// Creates a single json artifact with /// Creates a single json artifact with
/// ```json /// ```json
/// { /// {
@ -236,65 +342,58 @@ pub enum ArtifactOutput {
/// "runtime-bin": "..." /// "runtime-bin": "..."
/// } /// }
/// ``` /// ```
MinimalCombined, #[derive(Debug, Copy, Clone, Eq, PartialEq)]
/// Hardhat style artifacts pub struct MinimalCombinedArtifacts;
Hardhat,
/// Custom output handler
Custom(Box<dyn Fn(&CompilerOutput, &ProjectPathsConfig) -> Result<()>>),
}
impl ArtifactOutput { impl ArtifactOutput for MinimalCombinedArtifacts {
/// Is expected to handle the output and where to store it type Artifact = CompactContract;
pub fn on_output(&self, output: &CompilerOutput, layout: &ProjectPathsConfig) -> Result<()> {
match self { fn on_output(output: &CompilerOutput, layout: &ProjectPathsConfig) -> Result<()> {
ArtifactOutput::Nothing => Ok(()),
ArtifactOutput::MinimalCombined => {
fs::create_dir_all(&layout.artifacts)?; fs::create_dir_all(&layout.artifacts)?;
for (file, contracts) in output.contracts.iter() {
for contracts in output.contracts.values() {
for (name, contract) in contracts { for (name, contract) in contracts {
let file = layout.artifacts.join(format!("{}.json", name)); let artifact = Self::output_file(file, name);
let file = layout.artifacts.join(artifact);
if let Some(parent) = file.parent() {
fs::create_dir_all(parent)?;
}
let min = CompactContractRef::from(contract); let min = CompactContractRef::from(contract);
fs::write(file, serde_json::to_vec_pretty(&min)?)? fs::write(file, serde_json::to_vec_pretty(&min)?)?
} }
} }
Ok(()) Ok(())
} }
ArtifactOutput::Hardhat => {
fn read_cached_artifact(path: impl AsRef<Path>) -> Result<Self::Artifact> {
let file = fs::File::open(path.as_ref())?;
Ok(serde_json::from_reader(file)?)
}
fn contract_to_artifact(contract: Contract) -> Self::Artifact {
CompactContract::from(contract)
}
}
/// Hardhat style artifacts
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub struct HardhatArtifacts;
impl ArtifactOutput for HardhatArtifacts {
type Artifact = serde_json::Value;
fn on_output(_output: &CompilerOutput, _layout: &ProjectPathsConfig) -> Result<()> {
todo!("Hardhat style artifacts not yet implemented") todo!("Hardhat style artifacts not yet implemented")
} }
ArtifactOutput::Custom(f) => f(output, layout),
} fn read_cached_artifact(_path: impl AsRef<Path>) -> Result<Self::Artifact> {
} todo!("Hardhat style artifacts not yet implemented")
} }
impl Default for ArtifactOutput { fn contract_to_artifact(_contract: Contract) -> Self::Artifact {
fn default() -> Self { todo!("Hardhat style artifacts not yet implemented")
ArtifactOutput::MinimalCombined
} }
} }
impl fmt::Debug for ArtifactOutput {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ArtifactOutput::Nothing => {
write!(f, "Nothing")
}
ArtifactOutput::MinimalCombined => {
write!(f, "MinimalCombined")
}
ArtifactOutput::Hardhat => {
write!(f, "Hardhat")
}
ArtifactOutput::Custom(_) => {
write!(f, "Custom")
}
}
}
}
use std::convert::TryFrom;
/// Helper struct for serializing `--allow-paths` arguments to Solc /// Helper struct for serializing `--allow-paths` arguments to Solc
/// ///
/// From the [Solc docs](https://docs.soliditylang.org/en/v0.8.9/using-the-compiler.html#base-path-and-import-remapping): /// From the [Solc docs](https://docs.soliditylang.org/en/v0.8.9/using-the-compiler.html#base-path-and-import-remapping):

View File

@ -11,7 +11,10 @@ mod compile;
pub use compile::*; pub use compile::*;
mod config; mod config;
pub use config::{AllowedLibPaths, ArtifactOutput, ProjectPathsConfig, SolcConfig}; pub use config::{
AllowedLibPaths, Artifact, ArtifactOutput, MinimalCombinedArtifacts, ProjectPathsConfig,
SolcConfig,
};
pub mod remappings; pub mod remappings;
@ -22,15 +25,17 @@ pub mod utils;
use crate::artifacts::Sources; use crate::artifacts::Sources;
use error::Result; use error::Result;
use std::{ use std::{
borrow::Cow,
collections::{BTreeMap, HashMap}, collections::{BTreeMap, HashMap},
convert::TryInto, convert::TryInto,
fmt, fs, io, fmt, fs, io,
marker::PhantomData,
path::PathBuf, path::PathBuf,
}; };
/// Handles contract compiling /// Handles contract compiling
#[derive(Debug)] #[derive(Debug)]
pub struct Project { pub struct Project<Artifacts: ArtifactOutput = MinimalCombinedArtifacts> {
/// The layout of the /// The layout of the
pub paths: ProjectPathsConfig, pub paths: ProjectPathsConfig,
/// Where to find solc /// Where to find solc
@ -39,8 +44,10 @@ pub struct Project {
pub solc_config: SolcConfig, pub solc_config: SolcConfig,
/// Whether caching is enabled /// Whether caching is enabled
pub cached: bool, pub cached: bool,
/// Whether writing artifacts to disk is enabled
pub no_artifacts: bool,
/// How to handle compiler output /// How to handle compiler output
pub artifacts: ArtifactOutput, pub artifacts: PhantomData<Artifacts>,
/// Errors/Warnings which match these error codes are not going to be logged /// Errors/Warnings which match these error codes are not going to be logged
pub ignored_error_codes: Vec<u64>, pub ignored_error_codes: Vec<u64>,
/// The paths which will be allowed for library inclusion /// The paths which will be allowed for library inclusion
@ -48,23 +55,53 @@ pub struct Project {
} }
impl Project { impl Project {
/// Configure the current project /// Convenience function to call `ProjectBuilder::default()`
/// ///
/// # Example /// # Example
/// ///
/// Configure with `MinimalCombinedArtifacts` artifacts output
///
/// ```rust /// ```rust
/// use ethers_solc::Project; /// use ethers_solc::Project;
/// let config = Project::builder().build().unwrap(); /// let config = Project::builder().build().unwrap();
/// ``` /// ```
///
/// To configure any a project with any `ArtifactOutput` use either
///
/// ```rust
/// use ethers_solc::Project;
/// let config = Project::builder().build().unwrap();
/// ```
///
/// or use the builder directly
///
/// ```rust
/// use ethers_solc::{MinimalCombinedArtifacts, ProjectBuilder};
/// let config = ProjectBuilder::<MinimalCombinedArtifacts>::default().build().unwrap();
/// ```
pub fn builder() -> ProjectBuilder { pub fn builder() -> ProjectBuilder {
ProjectBuilder::default() ProjectBuilder::default()
} }
}
fn write_cache_file(&self, sources: Sources) -> Result<()> { impl<Artifacts: ArtifactOutput> Project<Artifacts> {
let cache = SolFilesCache::builder() fn write_cache_file(
&self,
sources: Sources,
artifacts: Vec<(PathBuf, Vec<String>)>,
) -> Result<()> {
let mut cache = SolFilesCache::builder()
.root(&self.paths.root) .root(&self.paths.root)
.solc_config(self.solc_config.clone()) .solc_config(self.solc_config.clone())
.insert_files(sources)?; .insert_files(sources)?;
// add the artifacts for each file to the cache entry
for (file, artifacts) in artifacts {
if let Some(entry) = cache.files.get_mut(&file) {
entry.artifacts = artifacts;
}
}
if let Some(cache_dir) = self.paths.cache.parent() { if let Some(cache_dir) = self.paths.cache.parent() {
fs::create_dir_all(cache_dir)? fs::create_dir_all(cache_dir)?
} }
@ -101,7 +138,7 @@ impl Project {
/// NB: If the `svm` feature is enabled, this function will automatically detect /// NB: If the `svm` feature is enabled, this function will automatically detect
/// solc versions across files. /// solc versions across files.
pub fn compile(&self) -> Result<ProjectCompileOutput> { pub fn compile(&self) -> Result<ProjectCompileOutput<Artifacts>> {
let sources = self.sources()?; let sources = self.sources()?;
#[cfg(not(all(feature = "svm", feature = "async")))] #[cfg(not(all(feature = "svm", feature = "async")))]
@ -113,7 +150,7 @@ impl Project {
} }
#[cfg(all(feature = "svm", feature = "async"))] #[cfg(all(feature = "svm", feature = "async"))]
fn svm_compile(&self, sources: Sources) -> Result<ProjectCompileOutput> { fn svm_compile(&self, sources: Sources) -> Result<ProjectCompileOutput<Artifacts>> {
// split them by version // split them by version
let mut sources_by_version = BTreeMap::new(); let mut sources_by_version = BTreeMap::new();
for (path, source) in sources.into_iter() { for (path, source) in sources.into_iter() {
@ -131,44 +168,57 @@ impl Project {
entry.insert(path, source); entry.insert(path, source);
} }
let mut compiled =
ProjectCompileOutput::with_ignored_errors(self.ignored_error_codes.clone());
// run the compilation step for each version // run the compilation step for each version
let mut res = CompilerOutput::default();
for (solc, sources) in sources_by_version { for (solc, sources) in sources_by_version {
let output = self.compile_with_version(&solc, sources)?; compiled.extend(self.compile_with_version(&solc, sources)?);
if let ProjectCompileOutput::Compiled((compiled, _)) = output {
res.errors.extend(compiled.errors);
res.sources.extend(compiled.sources);
res.contracts.extend(compiled.contracts);
} }
if !compiled.has_compiled_contracts() &&
!compiled.has_compiler_errors() &&
self.cached &&
self.paths.cache.exists()
{
let cache = SolFilesCache::read(&self.paths.cache)?;
let artifacts = cache.read_artifacts::<Artifacts>(&self.paths.artifacts)?;
compiled.artifacts.extend(artifacts);
} }
Ok(if res.contracts.is_empty() && res.errors.is_empty() { Ok(compiled)
ProjectCompileOutput::Unchanged
} else {
ProjectCompileOutput::Compiled((res, &self.ignored_error_codes))
})
} }
pub fn compile_with_version( pub fn compile_with_version(
&self, &self,
solc: &Solc, solc: &Solc,
mut sources: Sources, mut sources: Sources,
) -> Result<ProjectCompileOutput> { ) -> Result<ProjectCompileOutput<Artifacts>> {
// add all libraries to the source set while keeping track of their actual disk path // add all libraries to the source set while keeping track of their actual disk path
let mut source_name_path = HashMap::new(); // (`contracts/contract.sol` -> `/Users/.../contracts.sol`)
let mut path_source_name = HashMap::new(); let mut source_name_to_path = HashMap::new();
// inverse of `source_name_to_path` : (`/Users/.../contracts.sol` ->
// `contracts/contract.sol`)
let mut path_to_source_name = HashMap::new();
for (import, (source, path)) in self.resolved_libraries(&sources)? { for (import, (source, path)) in self.resolved_libraries(&sources)? {
// inserting with absolute path here and keep track of the source name <-> path mappings // inserting with absolute path here and keep track of the source name <-> path mappings
sources.insert(path.clone(), source); sources.insert(path.clone(), source);
path_source_name.insert(path.clone(), import.clone()); path_to_source_name.insert(path.clone(), import.clone());
source_name_path.insert(import, path); source_name_to_path.insert(import, path);
} }
// If there's a cache set, filter to only re-compile the files which were changed // If there's a cache set, filter to only re-compile the files which were changed
let sources = if self.cached && self.paths.cache.exists() { let sources = if self.cached && self.paths.cache.exists() {
let cache = SolFilesCache::read(&self.paths.cache)?; let cache = SolFilesCache::read(&self.paths.cache)?;
let changed_files = cache.get_changed_files(sources, Some(&self.solc_config)); let changed_files = cache.get_changed_or_missing_artifacts_files::<Artifacts>(
sources,
Some(&self.solc_config),
&self.paths.artifacts,
);
// if nothing changed and all artifacts still exist
if changed_files.is_empty() { if changed_files.is_empty() {
return Ok(ProjectCompileOutput::Unchanged) let artifacts = cache.read_artifacts::<Artifacts>(&self.paths.artifacts)?;
return Ok(ProjectCompileOutput::from_unchanged(artifacts))
} }
changed_files changed_files
} else { } else {
@ -176,25 +226,43 @@ impl Project {
}; };
// replace absolute path with source name to make solc happy // replace absolute path with source name to make solc happy
let sources = apply_mappings(sources, path_source_name); let sources = apply_mappings(sources, path_to_source_name);
let input = CompilerInput::with_sources(sources) let input = CompilerInput::with_sources(sources)
.normalize_evm_version(&solc.version()?) .normalize_evm_version(&solc.version()?)
.with_remappings(self.paths.remappings.clone()); .with_remappings(self.paths.remappings.clone());
let output = solc.compile(&input)?; let output = solc.compile(&input)?;
if output.has_error() { if output.has_error() {
return Ok(ProjectCompileOutput::Compiled((output, &self.ignored_error_codes))) return Ok(ProjectCompileOutput::from_compiler_output(
output,
self.ignored_error_codes.clone(),
))
} }
if self.cached { if self.cached {
// get all contract names of the files and map them to the disk file
let artifacts = output
.contracts
.iter()
.map(|(path, contracts)| {
let path = PathBuf::from(path);
let file = source_name_to_path.get(&path).cloned().unwrap_or(path);
(file, contracts.keys().cloned().collect::<Vec<_>>())
})
.collect::<Vec<_>>();
// reapply to disk paths // reapply to disk paths
let sources = apply_mappings(input.sources, source_name_path); let sources = apply_mappings(input.sources, source_name_to_path);
// create cache file // create cache file
self.write_cache_file(sources)?; self.write_cache_file(sources, artifacts)?;
} }
self.artifacts.on_output(&output, &self.paths)?; // TODO: There seems to be some type redundancy here, c.f. discussion with @mattsse
Ok(ProjectCompileOutput::Compiled((output, &self.ignored_error_codes))) if !self.no_artifacts {
Artifacts::on_output(&output, &self.paths)?;
}
Ok(ProjectCompileOutput::from_compiler_output(output, self.ignored_error_codes.clone()))
} }
} }
@ -211,7 +279,7 @@ fn apply_mappings(sources: Sources, mut mappings: HashMap<PathBuf, PathBuf>) ->
.collect() .collect()
} }
pub struct ProjectBuilder { pub struct ProjectBuilder<Artifacts: ArtifactOutput = MinimalCombinedArtifacts> {
/// The layout of the /// The layout of the
paths: Option<ProjectPathsConfig>, paths: Option<ProjectPathsConfig>,
/// Where to find solc /// Where to find solc
@ -220,15 +288,16 @@ pub struct ProjectBuilder {
solc_config: Option<SolcConfig>, solc_config: Option<SolcConfig>,
/// Whether caching is enabled, default is true. /// Whether caching is enabled, default is true.
cached: bool, cached: bool,
/// How to handle compiler output /// Whether writing artifacts to disk is enabled, default is true.
artifacts: Option<ArtifactOutput>, no_artifacts: bool,
artifacts: PhantomData<Artifacts>,
/// Which error codes to ignore /// Which error codes to ignore
pub ignored_error_codes: Vec<u64>, pub ignored_error_codes: Vec<u64>,
/// All allowed paths /// All allowed paths
pub allowed_paths: Vec<PathBuf>, pub allowed_paths: Vec<PathBuf>,
} }
impl ProjectBuilder { impl<Artifacts: ArtifactOutput> ProjectBuilder<Artifacts> {
pub fn paths(mut self, paths: ProjectPathsConfig) -> Self { pub fn paths(mut self, paths: ProjectPathsConfig) -> Self {
self.paths = Some(paths); self.paths = Some(paths);
self self
@ -244,11 +313,6 @@ impl ProjectBuilder {
self self
} }
pub fn artifacts(mut self, artifacts: ArtifactOutput) -> Self {
self.artifacts = Some(artifacts);
self
}
pub fn ignore_error_code(mut self, code: u64) -> Self { pub fn ignore_error_code(mut self, code: u64) -> Self {
self.ignored_error_codes.push(code); self.ignored_error_codes.push(code);
self self
@ -260,6 +324,36 @@ impl ProjectBuilder {
self self
} }
/// Disables writing artifacts to disk
pub fn no_artifacts(mut self) -> Self {
self.no_artifacts = true;
self
}
/// Set arbitrary `ArtifactOutputHandler`
pub fn artifacts<A: ArtifactOutput>(self) -> ProjectBuilder<A> {
let ProjectBuilder {
paths,
solc,
solc_config,
cached,
no_artifacts,
ignored_error_codes,
allowed_paths,
..
} = self;
ProjectBuilder {
paths,
solc,
solc_config,
cached,
no_artifacts,
artifacts: PhantomData::default(),
ignored_error_codes,
allowed_paths,
}
}
/// Adds an allowed-path to the solc executable /// Adds an allowed-path to the solc executable
pub fn allowed_path<T: Into<PathBuf>>(mut self, path: T) -> Self { pub fn allowed_path<T: Into<PathBuf>>(mut self, path: T) -> Self {
self.allowed_paths.push(path.into()); self.allowed_paths.push(path.into());
@ -278,12 +372,13 @@ impl ProjectBuilder {
self self
} }
pub fn build(self) -> Result<Project> { pub fn build(self) -> Result<Project<Artifacts>> {
let Self { let Self {
paths, paths,
solc, solc,
solc_config, solc_config,
cached, cached,
no_artifacts,
artifacts, artifacts,
ignored_error_codes, ignored_error_codes,
mut allowed_paths, mut allowed_paths,
@ -307,42 +402,196 @@ impl ProjectBuilder {
solc, solc,
solc_config, solc_config,
cached, cached,
artifacts: artifacts.unwrap_or_default(), no_artifacts,
artifacts,
ignored_error_codes, ignored_error_codes,
allowed_lib_paths: allowed_paths.try_into()?, allowed_lib_paths: allowed_paths.try_into()?,
}) })
} }
} }
impl Default for ProjectBuilder { impl<Artifacts: ArtifactOutput> Default for ProjectBuilder<Artifacts> {
fn default() -> Self { fn default() -> Self {
Self { Self {
paths: None, paths: None,
solc: None, solc: None,
solc_config: None, solc_config: None,
cached: true, cached: true,
artifacts: None, no_artifacts: false,
artifacts: PhantomData::default(),
ignored_error_codes: Vec::new(), ignored_error_codes: Vec::new(),
allowed_paths: vec![], allowed_paths: vec![],
} }
} }
} }
#[derive(Debug, Clone, PartialEq)] /// The outcome of `Project::compile`
pub enum ProjectCompileOutput<'a> { #[derive(Debug, Clone, PartialEq, Default)]
/// Nothing to compile because unchanged sources pub struct ProjectCompileOutput<T: ArtifactOutput> {
Unchanged, /// If solc was invoked multiple times in `Project::compile` then this contains a merged
Compiled((CompilerOutput, &'a [u64])), /// version of all `CompilerOutput`s. If solc was called only once then `compiler_output`
/// holds the `CompilerOutput` of that call.
compiler_output: Option<CompilerOutput>,
/// All artifacts that were read from cache
artifacts: BTreeMap<PathBuf, T::Artifact>,
ignored_error_codes: Vec<u64>,
} }
impl<'a> fmt::Display for ProjectCompileOutput<'a> { impl<T: ArtifactOutput> ProjectCompileOutput<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { pub fn with_ignored_errors(ignored_errors: Vec<u64>) -> Self {
match self { Self {
ProjectCompileOutput::Unchanged => f.write_str("Nothing to compile"), compiler_output: None,
ProjectCompileOutput::Compiled((output, ignored_error_codes)) => { artifacts: Default::default(),
output.diagnostics(ignored_error_codes).fmt(f) ignored_error_codes: ignored_errors,
} }
} }
pub fn from_unchanged(artifacts: BTreeMap<PathBuf, T::Artifact>) -> Self {
Self { compiler_output: None, artifacts, ignored_error_codes: vec![] }
}
pub fn from_compiler_output(
compiler_output: CompilerOutput,
ignored_error_codes: Vec<u64>,
) -> Self {
Self {
compiler_output: Some(compiler_output),
artifacts: Default::default(),
ignored_error_codes,
}
}
/// Get the (merged) solc compiler output
/// ```no_run
/// use std::collections::BTreeMap;
/// use ethers_solc::artifacts::Contract;
/// use ethers_solc::Project;
///
/// let project = Project::builder().build().unwrap();
/// let contracts: BTreeMap<String, Contract> =
/// project.compile().unwrap().output().contracts_into_iter().collect();
/// ```
pub fn output(self) -> CompilerOutput {
self.compiler_output.unwrap_or_default()
}
/// Combine two outputs
pub fn extend(&mut self, compiled: ProjectCompileOutput<T>) {
let ProjectCompileOutput { compiler_output, artifacts, .. } = compiled;
self.artifacts.extend(artifacts);
if let Some(compiled) = compiler_output {
if let Some(output) = self.compiler_output.as_mut() {
output.errors.extend(compiled.errors);
output.sources.extend(compiled.sources);
output.contracts.extend(compiled.contracts);
} else {
self.compiler_output = Some(compiled);
}
}
}
/// Whether this type does not contain compiled contracts
pub fn is_unchanged(&self) -> bool {
!self.has_compiled_contracts()
}
/// Whether this type has a compiler output
pub fn has_compiled_contracts(&self) -> bool {
if let Some(output) = self.compiler_output.as_ref() {
!output.contracts.is_empty()
} else {
false
}
}
/// Whether there were errors
pub fn has_compiler_errors(&self) -> bool {
if let Some(output) = self.compiler_output.as_ref() {
output.has_error()
} else {
false
}
}
/// Finds the first contract with the given name and removes it from the set
pub fn remove(&mut self, contract: impl AsRef<str>) -> Option<T::Artifact> {
let contract = contract.as_ref();
if let Some(output) = self.compiler_output.as_mut() {
if let contract @ Some(_) = output
.contracts
.values_mut()
.find_map(|c| c.remove(contract).map(T::contract_to_artifact))
{
return contract
}
}
let key = self
.artifacts
.iter()
.find_map(|(path, _)| {
T::contract_name(path).filter(|name| name == contract).map(|_| path)
})?
.clone();
self.artifacts.remove(&key)
}
}
impl<T: ArtifactOutput> ProjectCompileOutput<T>
where
T::Artifact: Clone,
{
/// Finds the first contract with the given name
pub fn find(&self, contract: impl AsRef<str>) -> Option<Cow<T::Artifact>> {
let contract = contract.as_ref();
if let Some(output) = self.compiler_output.as_ref() {
if let contract @ Some(_) = output.contracts.values().find_map(|c| {
c.get(contract).map(|c| T::contract_to_artifact(c.clone())).map(Cow::Owned)
}) {
return contract
}
}
self.artifacts.iter().find_map(|(path, art)| {
T::contract_name(path).filter(|name| name == contract).map(|_| Cow::Borrowed(art))
})
}
}
impl<T: ArtifactOutput + 'static> ProjectCompileOutput<T> {
/// All artifacts together with their contract name
///
/// # Example
///
/// ```no_run
/// use std::collections::BTreeMap;
/// use ethers_solc::artifacts::CompactContract;
/// use ethers_solc::Project;
///
/// let project = Project::builder().build().unwrap();
/// let contracts: BTreeMap<String, CompactContract> = project.compile().unwrap().into_artifacts().collect();
/// ```
pub fn into_artifacts(mut self) -> Box<dyn Iterator<Item = (String, T::Artifact)>> {
let artifacts = self.artifacts.into_iter().filter_map(|(path, art)| {
T::contract_name(&path)
.map(|name| (format!("{:?}:{}", path.file_name().unwrap(), name), art))
});
let artifacts: Box<dyn Iterator<Item = (String, T::Artifact)>> =
if let Some(output) = self.compiler_output.take() {
Box::new(artifacts.chain(T::output_to_artifacts(output).into_values().flatten()))
} else {
Box::new(artifacts)
};
artifacts
}
}
impl<T: ArtifactOutput> fmt::Display for ProjectCompileOutput<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if let Some(output) = self.compiler_output.as_ref() {
output.diagnostics(&self.ignored_error_codes).fmt(f)
} else {
f.write_str("Nothing to compile")
}
} }
} }
@ -359,20 +608,10 @@ mod tests {
.sources("./test-data/test-contract-versions") .sources("./test-data/test-contract-versions")
.build() .build()
.unwrap(); .unwrap();
let project = Project::builder() let project = Project::builder().paths(paths).no_artifacts().ephemeral().build().unwrap();
.paths(paths)
.ephemeral()
.artifacts(ArtifactOutput::Nothing)
.build()
.unwrap();
let compiled = project.compile().unwrap(); let compiled = project.compile().unwrap();
let contracts = match compiled { assert!(!compiled.has_compiler_errors());
ProjectCompileOutput::Compiled((out, _)) => { let contracts = compiled.output().contracts;
assert!(!out.has_error());
out.contracts
}
_ => panic!("must compile"),
};
// Contracts A to F // Contracts A to F
assert_eq!(contracts.keys().count(), 5); assert_eq!(contracts.keys().count(), 5);
} }
@ -393,18 +632,14 @@ mod tests {
.unwrap(); .unwrap();
let project = Project::builder() let project = Project::builder()
.paths(paths) .paths(paths)
.no_artifacts()
.ephemeral() .ephemeral()
.artifacts(ArtifactOutput::Nothing) .no_artifacts()
.build() .build()
.unwrap(); .unwrap();
let compiled = project.compile().unwrap(); let compiled = project.compile().unwrap();
let contracts = match compiled { assert!(!compiled.has_compiler_errors());
ProjectCompileOutput::Compiled((out, _)) => { let contracts = compiled.output().contracts;
assert!(!out.has_error());
out.contracts
}
_ => panic!("must compile"),
};
assert_eq!(contracts.keys().count(), 3); assert_eq!(contracts.keys().count(), 3);
} }
@ -420,20 +655,10 @@ mod tests {
.lib(root.join("lib")) .lib(root.join("lib"))
.build() .build()
.unwrap(); .unwrap();
let project = Project::builder() let project = Project::builder().no_artifacts().paths(paths).ephemeral().build().unwrap();
.paths(paths)
.ephemeral()
.artifacts(ArtifactOutput::Nothing)
.build()
.unwrap();
let compiled = project.compile().unwrap(); let compiled = project.compile().unwrap();
let contracts = match compiled { assert!(!compiled.has_compiler_errors());
ProjectCompileOutput::Compiled((out, _)) => { let contracts = compiled.output().contracts;
assert!(!out.has_error());
out.contracts
}
_ => panic!("must compile"),
};
assert_eq!(contracts.keys().count(), 2); assert_eq!(contracts.keys().count(), 2);
} }
} }

View File

@ -182,7 +182,7 @@ mod tests {
#[test] #[test]
fn serde() { fn serde() {
let remapping = "oz=../b/c/d"; let remapping = "oz=../b/c/d";
let remapping = Remapping::from_str(&remapping).unwrap(); let remapping = Remapping::from_str(remapping).unwrap();
assert_eq!(remapping.name, "oz".to_string()); assert_eq!(remapping.name, "oz".to_string());
assert_eq!(remapping.path, "../b/c/d".to_string()); assert_eq!(remapping.path, "../b/c/d".to_string());

View File

@ -1,5 +1,5 @@
// SPDX-License-Identifier: GPL-3.0-or-later // SPDX-License-Identifier: GPL-3.0-or-later
pragma solidity ^0.6.6; pragma solidity >=0.6.6;
contract Dapp { contract Dapp {
} }

View File

@ -1,5 +1,5 @@
// SPDX-License-Identifier: GPL-3.0-or-later // SPDX-License-Identifier: GPL-3.0-or-later
pragma solidity ^0.6.6; pragma solidity >=0.6.6;
import "ds-test/test.sol"; import "ds-test/test.sol";

View File

@ -1,5 +1,5 @@
//SPDX-License-Identifier: Unlicense //SPDX-License-Identifier: Unlicense
pragma solidity ^0.6.0; pragma solidity >=0.6.0;
import "hardhat/console.sol"; import "hardhat/console.sol";

View File

@ -33,7 +33,6 @@
"sourceFile.sol": { "sourceFile.sol": {
"ContractName": { "ContractName": {
"abi": [], "abi": [],
"metadata": "{/* ... */}",
"userdoc": {}, "userdoc": {},
"devdoc": {}, "devdoc": {},
"ir": "", "ir": "",

View File

@ -1,8 +1,6 @@
//! project tests //! project tests
use ethers_solc::{ use ethers_solc::{cache::SOLIDITY_FILES_CACHE_FILENAME, Project, ProjectPathsConfig};
cache::SOLIDITY_FILES_CACHE_FILENAME, Project, ProjectCompileOutput, ProjectPathsConfig,
};
use std::path::PathBuf; use std::path::PathBuf;
use tempdir::TempDir; use tempdir::TempDir;
@ -26,12 +24,22 @@ fn can_compile_hardhat_sample() {
let project = Project::builder().paths(paths).build().unwrap(); let project = Project::builder().paths(paths).build().unwrap();
let compiled = project.compile().unwrap(); let compiled = project.compile().unwrap();
match compiled { assert!(compiled.find("Greeter").is_some());
ProjectCompileOutput::Compiled((out, _)) => assert!(!out.has_error()), assert!(compiled.find("console").is_some());
_ => panic!("must compile"), assert!(!compiled.has_compiler_errors());
}
// nothing to compile // nothing to compile
assert_eq!(project.compile().unwrap(), ProjectCompileOutput::Unchanged); let compiled = project.compile().unwrap();
assert!(compiled.find("Greeter").is_some());
assert!(compiled.find("console").is_some());
assert!(compiled.is_unchanged());
// delete artifacts
std::fs::remove_dir_all(&project.paths.artifacts).unwrap();
let compiled = project.compile().unwrap();
assert!(compiled.find("Greeter").is_some());
assert!(compiled.find("console").is_some());
assert!(!compiled.is_unchanged());
} }
#[test] #[test]
@ -53,10 +61,17 @@ fn can_compile_dapp_sample() {
let project = Project::builder().paths(paths).build().unwrap(); let project = Project::builder().paths(paths).build().unwrap();
let compiled = project.compile().unwrap(); let compiled = project.compile().unwrap();
match compiled { assert!(compiled.find("Dapp").is_some());
ProjectCompileOutput::Compiled((out, _)) => assert!(!out.has_error()), assert!(!compiled.has_compiler_errors());
_ => panic!("must compile"),
}
// nothing to compile // nothing to compile
assert_eq!(project.compile().unwrap(), ProjectCompileOutput::Unchanged); let compiled = project.compile().unwrap();
assert!(compiled.find("Dapp").is_some());
assert!(compiled.is_unchanged());
// delete artifacts
std::fs::remove_dir_all(&project.paths.artifacts).unwrap();
let compiled = project.compile().unwrap();
assert!(compiled.find("Dapp").is_some());
assert!(!compiled.is_unchanged());
} }