feat(solc): add hardhat artifact support (#677)

* feat: add hardhat artifact support

* test: add hh test

* feat: add hh fallback artifact handler

* add format detection

* chore: update changelog

* chore: typos
This commit is contained in:
Matthias Seitz 2021-12-11 18:39:39 +01:00 committed by GitHub
parent 0769174b40
commit 275f7179bf
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 284 additions and 67 deletions

View File

@ -24,6 +24,7 @@
### Unreleased ### Unreleased
- Add support for hardhat artifacts [#677](https://github.com/gakonst/ethers-rs/pull/677)
- Add more utility functions to the `Artifact` trait [#673](https://github.com/gakonst/ethers-rs/pull/673) - Add more utility functions to the `Artifact` trait [#673](https://github.com/gakonst/ethers-rs/pull/673)
- Return cached artifacts from project `compile` when the cache only contains - Return cached artifacts from project `compile` when the cache only contains
some files some files

View File

@ -940,6 +940,13 @@ impl BytecodeObject {
} }
} }
// Returns a not deployable bytecode by default as "0x"
impl Default for BytecodeObject {
fn default() -> Self {
BytecodeObject::Unlinked("0x".to_string())
}
}
impl AsRef<[u8]> for BytecodeObject { impl AsRef<[u8]> for BytecodeObject {
fn as_ref(&self) -> &[u8] { fn as_ref(&self) -> &[u8] {
match self { match self {

View File

@ -16,6 +16,13 @@ use std::{
/// Hardhat format version /// Hardhat format version
const HH_FORMAT_VERSION: &str = "hh-sol-cache-2"; const HH_FORMAT_VERSION: &str = "hh-sol-cache-2";
/// ethers-rs format version
///
/// `ethers-solc` uses a different format version id, but the actual format is consistent with
/// hardhat This allows ethers-solc to detect if the cache file was written by hardhat or
/// `ethers-solc`
const ETHERS_FORMAT_VERSION: &str = "ethers-rs-sol-cache-1";
/// The file name of the default cache file /// The file name of the default cache file
pub const SOLIDITY_FILES_CACHE_FILENAME: &str = "solidity-files-cache.json"; pub const SOLIDITY_FILES_CACHE_FILENAME: &str = "solidity-files-cache.json";
@ -42,6 +49,16 @@ impl SolFilesCache {
SolFilesCacheBuilder::default() SolFilesCacheBuilder::default()
} }
/// Whether this cache's format is the hardhat format identifier
pub fn is_hardhat_format(&self) -> bool {
self.format == HH_FORMAT_VERSION
}
/// Whether this cache's format is our custom format identifier
pub fn is_ethers_format(&self) -> bool {
self.format == ETHERS_FORMAT_VERSION
}
/// Reads the cache json file from the given path /// Reads the cache json file from the given path
#[tracing::instrument(skip_all, name = "sol-files-cache::read")] #[tracing::instrument(skip_all, name = "sol-files-cache::read")]
pub fn read(path: impl AsRef<Path>) -> Result<Self> { pub fn read(path: impl AsRef<Path>) -> Result<Self> {
@ -161,7 +178,7 @@ impl SolFilesCache {
}) })
} }
/// Reads all cached artifacts from disk /// Reads all cached artifacts from disk using the given ArtifactOutput handler
pub fn read_artifacts<T: ArtifactOutput>( pub fn read_artifacts<T: ArtifactOutput>(
&self, &self,
artifacts_root: &Path, artifacts_root: &Path,
@ -215,7 +232,7 @@ impl SolFilesCacheBuilder {
} }
pub fn insert_files(self, sources: Sources, dest: Option<PathBuf>) -> Result<SolFilesCache> { pub fn insert_files(self, sources: Sources, dest: Option<PathBuf>) -> Result<SolFilesCache> {
let format = self.format.unwrap_or_else(|| HH_FORMAT_VERSION.to_string()); let format = self.format.unwrap_or_else(|| ETHERS_FORMAT_VERSION.to_string());
let solc_config = let solc_config =
self.solc_config.map(Ok).unwrap_or_else(|| SolcConfig::builder().build())?; self.solc_config.map(Ok).unwrap_or_else(|| SolcConfig::builder().build())?;

View File

@ -1,12 +1,13 @@
use crate::{ use crate::{
artifacts::{CompactContract, CompactContractRef, Contract, Settings}, artifacts::{CompactContract, CompactContractRef, Contract, Settings},
cache::SOLIDITY_FILES_CACHE_FILENAME, cache::SOLIDITY_FILES_CACHE_FILENAME,
error::Result, error::{Result, SolcError},
hh::HardhatArtifact,
remappings::Remapping, remappings::Remapping,
CompilerOutput, CompilerOutput,
}; };
use ethers_core::{abi::Abi, types::Bytes}; use ethers_core::{abi::Abi, types::Bytes};
use serde::{Deserialize, Serialize}; use serde::{de::DeserializeOwned, Deserialize, Serialize};
use std::{ use std::{
collections::BTreeMap, collections::BTreeMap,
convert::TryFrom, convert::TryFrom,
@ -225,37 +226,27 @@ pub trait Artifact {
fn into_compact_contract(self) -> CompactContract; fn into_compact_contract(self) -> CompactContract;
/// Returns the contents of this type as a single tuple of abi, bytecode and deployed bytecode /// Returns the contents of this type as a single tuple of abi, bytecode and deployed bytecode
fn into_parts(self) -> (Option<Abi>, Option<Bytes>, Option<Bytes>) fn into_parts(self) -> (Option<Abi>, Option<Bytes>, Option<Bytes>);
where
Self: Sized,
{
self.into_compact_contract().into_parts()
}
} }
impl Artifact for CompactContract { impl<T: Into<CompactContract>> Artifact for T {
fn into_inner(self) -> (Option<Abi>, Option<Bytes>) { fn into_inner(self) -> (Option<Abi>, Option<Bytes>) {
(self.abi, self.bin.and_then(|bin| bin.into_bytes())) let artifact = self.into_compact_contract();
} (artifact.abi, artifact.bin.and_then(|bin| bin.into_bytes()))
fn into_compact_contract(self) -> CompactContract {
self
}
}
impl Artifact for serde_json::Value {
fn into_inner(self) -> (Option<Abi>, Option<Bytes>) {
self.into_compact_contract().into_inner()
} }
fn into_compact_contract(self) -> CompactContract { fn into_compact_contract(self) -> CompactContract {
self.into() self.into()
} }
fn into_parts(self) -> (Option<Abi>, Option<Bytes>, Option<Bytes>) {
self.into_compact_contract().into_parts()
}
} }
pub trait ArtifactOutput { pub trait ArtifactOutput {
/// How Artifacts are stored /// How Artifacts are stored
type Artifact: Artifact; type Artifact: Artifact + DeserializeOwned;
/// Handle the compiler output. /// Handle the compiler output.
fn on_output(output: &CompilerOutput, layout: &ProjectPathsConfig) -> Result<()>; fn on_output(output: &CompilerOutput, layout: &ProjectPathsConfig) -> Result<()>;
@ -295,7 +286,11 @@ pub trait ArtifactOutput {
root.as_ref().join(Self::output_file(contract_file, name)).exists() root.as_ref().join(Self::output_file(contract_file, name)).exists()
} }
fn read_cached_artifact(path: impl AsRef<Path>) -> Result<Self::Artifact>; fn read_cached_artifact(path: impl AsRef<Path>) -> Result<Self::Artifact> {
let file = fs::File::open(path.as_ref())?;
let file = io::BufReader::new(file);
Ok(serde_json::from_reader(file)?)
}
/// Read the cached artifacts from disk /// Read the cached artifacts from disk
fn read_cached_artifacts<T, I>(files: I) -> Result<BTreeMap<PathBuf, Self::Artifact>> fn read_cached_artifacts<T, I>(files: I) -> Result<BTreeMap<PathBuf, Self::Artifact>>
@ -313,21 +308,22 @@ pub trait ArtifactOutput {
} }
/// Convert a contract to the artifact type /// Convert a contract to the artifact type
fn contract_to_artifact(contract: Contract) -> Self::Artifact; fn contract_to_artifact(file: &str, name: &str, contract: Contract) -> Self::Artifact;
/// Convert the compiler output into a set of artifacts /// Convert the compiler output into a set of artifacts
fn output_to_artifacts(output: CompilerOutput) -> Artifacts<Self::Artifact> { fn output_to_artifacts(output: CompilerOutput) -> Artifacts<Self::Artifact> {
output output
.contracts .contracts
.into_iter() .into_iter()
.map(|(s, contracts)| { .map(|(file, contracts)| {
( let contracts = contracts
s, .into_iter()
contracts .map(|(name, c)| {
.into_iter() let contract = Self::contract_to_artifact(&file, &name, c);
.map(|(s, c)| (s, Self::contract_to_artifact(c))) (name, contract)
.collect(), })
) .collect();
(file, contracts)
}) })
.collect() .collect()
} }
@ -350,49 +346,60 @@ impl ArtifactOutput for MinimalCombinedArtifacts {
type Artifact = CompactContract; type Artifact = CompactContract;
fn on_output(output: &CompilerOutput, layout: &ProjectPathsConfig) -> Result<()> { fn on_output(output: &CompilerOutput, layout: &ProjectPathsConfig) -> Result<()> {
fs::create_dir_all(&layout.artifacts)?; fs::create_dir_all(&layout.artifacts)
.map_err(|err| SolcError::msg(format!("Failed to create artifacts dir: {}", err)))?;
for (file, contracts) in output.contracts.iter() { for (file, contracts) in output.contracts.iter() {
for (name, contract) in contracts { for (name, contract) in contracts {
let artifact = Self::output_file(file, name); let artifact = Self::output_file(file, name);
let file = layout.artifacts.join(artifact); let file = layout.artifacts.join(artifact);
if let Some(parent) = file.parent() { if let Some(parent) = file.parent() {
fs::create_dir_all(parent)?; fs::create_dir_all(parent).map_err(|err| {
SolcError::msg(format!(
"Failed to create artifact parent folder \"{}\": {}",
parent.display(),
err
))
})?;
} }
let min = CompactContractRef::from(contract); let min = CompactContractRef::from(contract);
fs::write(file, serde_json::to_vec_pretty(&min)?)? fs::write(&file, serde_json::to_vec_pretty(&min)?)?
} }
} }
Ok(()) Ok(())
} }
fn read_cached_artifact(path: impl AsRef<Path>) -> Result<Self::Artifact> { fn contract_to_artifact(_file: &str, _name: &str, contract: Contract) -> Self::Artifact {
let file = fs::File::open(path.as_ref())?; Self::Artifact::from(contract)
let file = io::BufReader::new(file);
Ok(serde_json::from_reader(file)?)
}
fn contract_to_artifact(contract: Contract) -> Self::Artifact {
CompactContract::from(contract)
} }
} }
/// Hardhat style artifacts /// An Artifacts handler implementation that works the same as `MinimalCombinedArtifacts` but also
/// supports reading hardhat artifacts if an initial attempt to deserialize an artifact failed
#[derive(Debug, Copy, Clone, Eq, PartialEq)] #[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub struct HardhatArtifacts; pub struct MinimalCombinedArtifactsHardhatFallback;
impl ArtifactOutput for HardhatArtifacts { impl ArtifactOutput for MinimalCombinedArtifactsHardhatFallback {
type Artifact = serde_json::Value; type Artifact = CompactContract;
fn on_output(_output: &CompilerOutput, _layout: &ProjectPathsConfig) -> Result<()> { fn on_output(output: &CompilerOutput, layout: &ProjectPathsConfig) -> Result<()> {
todo!("Hardhat style artifacts not yet implemented") MinimalCombinedArtifacts::on_output(output, layout)
} }
fn read_cached_artifact(_path: impl AsRef<Path>) -> Result<Self::Artifact> { fn read_cached_artifact(path: impl AsRef<Path>) -> Result<Self::Artifact> {
todo!("Hardhat style artifacts not yet implemented") let content = fs::read_to_string(path)?;
if let Ok(a) = serde_json::from_str(&content) {
Ok(a)
} else {
tracing::error!("Failed to deserialize compact artifact");
tracing::trace!("Fallback to hardhat artifact deserialization");
let artifact = serde_json::from_str::<HardhatArtifact>(&content)?;
tracing::trace!("successfully deserialized hardhat artifact");
Ok(artifact.into_compact_contract())
}
} }
fn contract_to_artifact(_contract: Contract) -> Self::Artifact { fn contract_to_artifact(file: &str, name: &str, contract: Contract) -> Self::Artifact {
todo!("Hardhat style artifacts not yet implemented") MinimalCombinedArtifacts::contract_to_artifact(file, name, contract)
} }
} }

View File

@ -29,10 +29,16 @@ pub enum SolcError {
NoContracts(String), NoContracts(String),
#[error(transparent)] #[error(transparent)]
PatternError(#[from] glob::PatternError), PatternError(#[from] glob::PatternError),
/// General purpose message
#[error("{0}")]
Message(String),
} }
impl SolcError { impl SolcError {
pub(crate) fn solc(msg: impl Into<String>) -> Self { pub(crate) fn solc(msg: impl Into<String>) -> Self {
SolcError::SolcError(msg.into()) SolcError::SolcError(msg.into())
} }
pub(crate) fn msg(msg: impl Into<String>) -> Self {
SolcError::Message(msg.into())
}
} }

129
ethers-solc/src/hh.rs Normal file
View File

@ -0,0 +1,129 @@
//! Hardhat support
use crate::{
artifacts::{BytecodeObject, CompactContract, Contract, Offsets},
error::{Result, SolcError},
ArtifactOutput, CompilerOutput, ProjectPathsConfig,
};
use ethers_core::abi::Abi;
use serde::{Deserialize, Serialize};
use std::{collections::BTreeMap, fs};
const HH_ARTIFACT_VERSION: &str = "hh-sol-artifact-1";
/// A hardhat artifact
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct HardhatArtifact {
#[serde(rename = "_format")]
pub format: String,
/// A string with the contract's name.
pub contract_name: String,
/// The source name of this contract in the workspace like `contracts/Greeter.sol`
pub source_name: String,
/// The contract's ABI
pub abi: Abi,
/// A "0x"-prefixed hex string of the unlinked deployment bytecode. If the contract is not
/// deployable, this has the string "0x"
pub bytecode: BytecodeObject,
/// A "0x"-prefixed hex string of the unlinked runtime/deployed bytecode. If the contract is
/// not deployable, this has the string "0x"
pub deployed_bytecode: Option<BytecodeObject>,
/// The bytecode's link references object as returned by solc. If the contract doesn't need to
/// be linked, this value contains an empty object.
#[serde(default)]
pub link_references: BTreeMap<String, BTreeMap<String, Vec<Offsets>>>,
/// The deployed bytecode's link references object as returned by solc. If the contract doesn't
/// need to be linked, this value contains an empty object.
#[serde(default)]
pub deployed_link_references: BTreeMap<String, BTreeMap<String, Vec<Offsets>>>,
}
impl From<HardhatArtifact> for CompactContract {
fn from(artifact: HardhatArtifact) -> Self {
CompactContract {
abi: Some(artifact.abi),
bin: Some(artifact.bytecode),
bin_runtime: artifact.deployed_bytecode,
}
}
}
/// Hardhat style artifacts handler
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub struct HardhatArtifacts;
impl ArtifactOutput for HardhatArtifacts {
type Artifact = HardhatArtifact;
fn on_output(output: &CompilerOutput, layout: &ProjectPathsConfig) -> Result<()> {
fs::create_dir_all(&layout.artifacts)
.map_err(|err| SolcError::msg(format!("Failed to create artifacts dir: {}", err)))?;
for (file, contracts) in output.contracts.iter() {
for (name, contract) in contracts {
let artifact = Self::output_file(file, name);
let artifact_file = layout.artifacts.join(artifact);
if let Some(parent) = artifact_file.parent() {
fs::create_dir_all(parent).map_err(|err| {
SolcError::msg(format!(
"Failed to create artifact parent folder \"{}\": {}",
parent.display(),
err
))
})?;
}
let artifact = Self::contract_to_artifact(file, name, contract.clone());
fs::write(&artifact_file, serde_json::to_vec_pretty(&artifact)?)?
}
}
Ok(())
}
fn contract_to_artifact(file: &str, name: &str, contract: Contract) -> Self::Artifact {
let (bytecode, link_references, deployed_bytecode, deployed_link_references) =
if let Some(evm) = contract.evm {
let (deployed_bytecode, deployed_link_references) =
if let Some(code) = evm.deployed_bytecode.and_then(|code| code.bytecode) {
(Some(code.object), code.link_references)
} else {
(None, Default::default())
};
(
evm.bytecode.object,
evm.bytecode.link_references,
deployed_bytecode,
deployed_link_references,
)
} else {
(Default::default(), Default::default(), None, Default::default())
};
HardhatArtifact {
format: HH_ARTIFACT_VERSION.to_string(),
contract_name: name.to_string(),
source_name: file.to_string(),
abi: contract.abi.unwrap_or_default(),
bytecode,
deployed_bytecode,
link_references,
deployed_link_references,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::Artifact;
#[test]
fn can_parse_hh_artifact() {
let s = include_str!("../test-data/hh-greeter-artifact.json");
let artifact = serde_json::from_str::<HardhatArtifact>(s).unwrap();
let compact = artifact.into_compact_contract();
assert!(compact.abi.is_some());
assert!(compact.bin.is_some());
assert!(compact.bin_runtime.is_some());
}
}

View File

@ -6,6 +6,7 @@ pub use artifacts::{CompilerInput, CompilerOutput, EvmVersion};
use std::collections::btree_map::Entry; use std::collections::btree_map::Entry;
pub mod cache; pub mod cache;
pub mod hh;
mod compile; mod compile;
@ -763,14 +764,12 @@ impl<T: ArtifactOutput> ProjectCompileOutput<T> {
} }
/// Finds the first contract with the given name and removes it from the set /// Finds the first contract with the given name and removes it from the set
pub fn remove(&mut self, contract: impl AsRef<str>) -> Option<T::Artifact> { pub fn remove(&mut self, contract_name: impl AsRef<str>) -> Option<T::Artifact> {
let contract = contract.as_ref(); let contract_name = contract_name.as_ref();
if let Some(output) = self.compiler_output.as_mut() { if let Some(output) = self.compiler_output.as_mut() {
if let contract @ Some(_) = output if let contract @ Some(_) = output.contracts.iter_mut().find_map(|(file, c)| {
.contracts c.remove(contract_name).map(|c| T::contract_to_artifact(file, contract_name, c))
.values_mut() }) {
.find_map(|c| c.remove(contract).map(T::contract_to_artifact))
{
return contract return contract
} }
} }
@ -778,7 +777,7 @@ impl<T: ArtifactOutput> ProjectCompileOutput<T> {
.artifacts .artifacts
.iter() .iter()
.find_map(|(path, _)| { .find_map(|(path, _)| {
T::contract_name(path).filter(|name| name == contract).map(|_| path) T::contract_name(path).filter(|name| name == contract_name).map(|_| path)
})? })?
.clone(); .clone();
self.artifacts.remove(&key) self.artifacts.remove(&key)
@ -790,17 +789,20 @@ where
T::Artifact: Clone, T::Artifact: Clone,
{ {
/// Finds the first contract with the given name /// Finds the first contract with the given name
pub fn find(&self, contract: impl AsRef<str>) -> Option<Cow<T::Artifact>> { pub fn find(&self, contract_name: impl AsRef<str>) -> Option<Cow<T::Artifact>> {
let contract = contract.as_ref(); let contract_name = contract_name.as_ref();
if let Some(output) = self.compiler_output.as_ref() { if let Some(output) = self.compiler_output.as_ref() {
if let contract @ Some(_) = output.contracts.values().find_map(|c| { if let contract @ Some(_) = output.contracts.iter().find_map(|(file, contracts)| {
c.get(contract).map(|c| T::contract_to_artifact(c.clone())).map(Cow::Owned) contracts
.get(contract_name)
.map(|c| T::contract_to_artifact(file, contract_name, c.clone()))
.map(Cow::Owned)
}) { }) {
return contract return contract
} }
} }
self.artifacts.iter().find_map(|(path, art)| { self.artifacts.iter().find_map(|(path, art)| {
T::contract_name(path).filter(|name| name == contract).map(|_| Cow::Borrowed(art)) T::contract_name(path).filter(|name| name == contract_name).map(|_| Cow::Borrowed(art))
}) })
} }
} }

File diff suppressed because one or more lines are too long