feat(solc): emit build info files if configured (#1338)

* feat(solc): emit build info files if configured

* feat: add content hashing
This commit is contained in:
Matthias Seitz 2022-06-15 21:44:20 +02:00 committed by GitHub
parent beffd96cc4
commit e3389f336a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 239 additions and 12 deletions

View File

@ -78,7 +78,7 @@ pub mod json_string_opt {
pub mod empty_json_object_opt {
use serde::{
de::{self, DeserializeOwned},
ser, Deserialize, Deserializer, Serialize, Serializer,
Deserialize, Deserializer, Serialize, Serializer,
};
pub fn serialize<T, S>(value: &Option<T>, serializer: S) -> Result<S::Ok, S::Error>
@ -87,8 +87,7 @@ pub mod empty_json_object_opt {
T: Serialize,
{
if let Some(value) = value {
let value = serde_json::to_string(value).map_err(ser::Error::custom)?;
serializer.serialize_str(&value)
value.serialize(serializer)
} else {
let empty = serde_json::Value::Object(Default::default());
serde_json::Value::serialize(&empty, serializer)

View File

@ -0,0 +1,114 @@
//! Represents an entire build
use crate::{utils, CompilerInput, CompilerOutput, SolcError};
use md5::Digest;
use semver::Version;
use serde::{ser::SerializeStruct, Deserialize, Serialize, Serializer};
use std::{cell::RefCell, path::Path, rc::Rc};
pub const ETHERS_FORMAT_VERSION: &str = "ethers-rs-sol-build-info-1";
// A hardhat compatible build info representation
#[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct BuildInfo {
pub id: String,
#[serde(rename = "_format")]
pub format: String,
pub solc_version: Version,
pub solc_long_version: Version,
pub input: CompilerInput,
pub output: CompilerOutput,
}
impl BuildInfo {
/// Deserializes the `BuildInfo` object from the given file
pub fn read(path: impl AsRef<Path>) -> Result<Self, SolcError> {
utils::read_json_file(path)
}
}
/// Represents `BuildInfo` object
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct RawBuildInfo {
/// The hash that identifies the BuildInfo
pub id: String,
/// serialized `BuildInfo` json
pub build_info: String,
}
// === impl RawBuildInfo ===
impl RawBuildInfo {
/// Serializes a `BuildInfo` object
pub fn new(
input: &CompilerInput,
output: &CompilerOutput,
version: &Version,
) -> serde_json::Result<RawBuildInfo> {
let mut hasher = md5::Md5::new();
let w = BuildInfoWriter { buf: Rc::new(RefCell::new(Vec::with_capacity(128))) };
let mut buf = w.clone();
let mut serializer = serde_json::Serializer::pretty(&mut buf);
let mut s = serializer.serialize_struct("BuildInfo", 6)?;
s.serialize_field("_format", &ETHERS_FORMAT_VERSION)?;
let solc_short = format!("{}.{}.{}", version.major, version.minor, version.patch);
s.serialize_field("solcVersion", &solc_short)?;
s.serialize_field("solcLongVersion", &version)?;
s.serialize_field("input", input)?;
// create the hash for `{_format,solcVersion,solcLongVersion,input}`
// N.B. this is not exactly the same as hashing the json representation of these values but
// the must efficient one
hasher.update(&*w.buf.borrow());
let result = hasher.finalize();
let id = hex::encode(result);
s.serialize_field("id", &id)?;
s.serialize_field("output", output)?;
s.end()?;
drop(buf);
let build_info = unsafe {
// serde_json does not emit non UTF8
String::from_utf8_unchecked(w.buf.take())
};
Ok(RawBuildInfo { id, build_info })
}
}
#[derive(Clone)]
struct BuildInfoWriter {
buf: Rc<RefCell<Vec<u8>>>,
}
impl std::io::Write for BuildInfoWriter {
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
self.buf.borrow_mut().write(buf)
}
fn flush(&mut self) -> std::io::Result<()> {
self.buf.borrow_mut().flush()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::Source;
use std::{collections::BTreeMap, path::PathBuf};
#[test]
fn build_info_serde() {
let inputs = CompilerInput::with_sources(BTreeMap::from([(
PathBuf::from("input.sol"),
Source { content: "".to_string() },
)]));
let output = CompilerOutput::default();
let v: Version = "0.8.4+commit.c7e474f2".parse().unwrap();
let raw_info = RawBuildInfo::new(&inputs[0], &output, &v).unwrap();
let _info: BuildInfo = serde_json::from_str(&raw_info.build_info).unwrap();
}
}

View File

@ -5,12 +5,14 @@ use crate::{
contract::{CompactContractBytecode, CompactContractRef, Contract},
Error,
},
buildinfo::RawBuildInfo,
sources::{VersionedSourceFile, VersionedSourceFiles},
ArtifactId, ArtifactOutput, Artifacts, CompilerOutput, ConfigurableArtifacts,
ArtifactId, ArtifactOutput, Artifacts, CompilerOutput, ConfigurableArtifacts, SolcIoError,
};
use contracts::{VersionedContract, VersionedContracts};
use semver::Version;
use std::{collections::BTreeMap, fmt, path::Path};
use tracing::trace;
pub mod contracts;
pub mod sources;
@ -243,6 +245,8 @@ pub struct AggregatedCompilerOutput {
pub sources: VersionedSourceFiles,
/// All compiled contracts combined with the solc version used to compile them
pub contracts: VersionedContracts,
// All the `BuildInfo`s of solc invocations.
pub build_infos: BTreeMap<Version, RawBuildInfo>,
}
impl AggregatedCompilerOutput {
@ -302,6 +306,29 @@ impl AggregatedCompilerOutput {
}
}
/// Creates all `BuildInfo` files in the given `build_info_dir`
///
/// There can be multiple `BuildInfo`, since we support multiple versions.
///
/// The created files have the md5 hash `{_format,solcVersion,solcLongVersion,input}` as their
/// file name
pub fn write_build_infos(&self, build_info_dir: impl AsRef<Path>) -> Result<(), SolcIoError> {
if self.build_infos.is_empty() {
return Ok(())
}
let build_info_dir = build_info_dir.as_ref();
std::fs::create_dir_all(build_info_dir)
.map_err(|err| SolcIoError::new(err, build_info_dir))?;
for (version, build_info) in &self.build_infos {
trace!("writing build info file for solc {}", version);
let file_name = format!("{}.json", build_info.id);
let file = build_info_dir.join(file_name);
std::fs::write(&file, &build_info.build_info)
.map_err(|err| SolcIoError::new(err, file))?;
}
Ok(())
}
/// Finds the _first_ contract with the given name
///
/// # Example

View File

@ -104,6 +104,7 @@
use crate::{
artifact_output::Artifacts,
artifacts::{Settings, VersionedFilteredSources, VersionedSources},
buildinfo::RawBuildInfo,
cache::ArtifactsCache,
error::Result,
filter::SparseOutputFilter,
@ -240,11 +241,13 @@ impl<'a, T: ArtifactOutput> PreprocessedState<'a, T> {
/// advance to the next state by compiling all sources
fn compile(self) -> Result<CompiledState<'a, T>> {
let PreprocessedState { sources, cache, sparse_output } = self;
let project = cache.project();
let mut output = sources.compile(
&cache.project().solc_config.settings,
&cache.project().paths,
&project.solc_config.settings,
&project.paths,
sparse_output,
cache.graph(),
project.build_info,
)?;
// source paths get stripped before handing them over to solc, so solc never uses absolute
@ -288,11 +291,16 @@ impl<'a, T: ArtifactOutput> CompiledState<'a, T> {
output.sources.len()
);
// this emits the artifacts via the project's artifacts handler
project.artifacts_handler().on_output(
let artifacts = project.artifacts_handler().on_output(
&output.contracts,
&output.sources,
&project.paths,
)?
)?;
// emits all the build infos, if they exist
output.write_build_infos(project.build_info_path())?;
artifacts
};
Ok(ArtifactsState { output, cache, compiled_artifacts })
@ -391,13 +399,14 @@ impl FilteredCompilerSources {
paths: &ProjectPathsConfig,
sparse_output: SparseOutputFilter,
graph: &GraphEdges,
create_build_info: bool,
) -> Result<AggregatedCompilerOutput> {
match self {
FilteredCompilerSources::Sequential(input) => {
compile_sequential(input, settings, paths, sparse_output, graph)
compile_sequential(input, settings, paths, sparse_output, graph, create_build_info)
}
FilteredCompilerSources::Parallel(input, j) => {
compile_parallel(input, j, settings, paths, sparse_output, graph)
compile_parallel(input, j, settings, paths, sparse_output, graph, create_build_info)
}
}
}
@ -419,6 +428,7 @@ fn compile_sequential(
paths: &ProjectPathsConfig,
sparse_output: SparseOutputFilter,
graph: &GraphEdges,
create_build_info: bool,
) -> Result<AggregatedCompilerOutput> {
let mut aggregated = AggregatedCompilerOutput::default();
tracing::trace!("compiling {} jobs sequentially", input.len());
@ -484,6 +494,13 @@ fn compile_sequential(
report::solc_success(&solc, &version, &output, &start.elapsed());
tracing::trace!("compiled input, output has error: {}", output.has_error());
tracing::trace!("received compiler output: {:?}", output.contracts.keys());
// if configured also create the build info
if create_build_info {
let build_info = RawBuildInfo::new(&input, &output, &version)?;
aggregated.build_infos.insert(version.clone(), build_info);
}
aggregated.extend(version.clone(), output);
}
}
@ -498,6 +515,7 @@ fn compile_parallel(
paths: &ProjectPathsConfig,
sparse_output: SparseOutputFilter,
graph: &GraphEdges,
create_build_info: bool,
) -> Result<AggregatedCompilerOutput> {
debug_assert!(num_jobs > 1);
tracing::trace!(
@ -580,14 +598,21 @@ fn compile_parallel(
report::solc_spawn(&solc, &version, &input, &actually_dirty);
solc.compile(&input).map(move |output| {
report::solc_success(&solc, &version, &output, &start.elapsed());
(version, output)
(version, input, output)
})
})
.collect::<Result<Vec<_>>>()
})?;
let mut aggregated = AggregatedCompilerOutput::default();
aggregated.extend_all(outputs);
for (version, input, output) in outputs {
// if configured also create the build info
if create_build_info {
let build_info = RawBuildInfo::new(&input, &output, &version)?;
aggregated.build_infos.insert(version.clone(), build_info);
}
aggregated.extend(version, output);
}
Ok(aggregated)
}

View File

@ -6,6 +6,7 @@ pub use artifacts::{CompilerInput, CompilerOutput, EvmVersion};
use std::collections::{BTreeMap, HashSet};
mod artifact_output;
pub mod buildinfo;
pub mod cache;
pub mod hh;
pub use artifact_output::*;
@ -59,6 +60,8 @@ pub struct Project<T: ArtifactOutput = ConfigurableArtifacts> {
pub solc_config: SolcConfig,
/// Whether caching is enabled
pub cached: bool,
/// Whether to output build information with each solc call.
pub build_info: bool,
/// Whether writing artifacts to disk is enabled
pub no_artifacts: bool,
/// Whether writing artifacts to disk is enabled
@ -121,6 +124,11 @@ impl<T: ArtifactOutput> Project<T> {
&self.paths.cache
}
/// Returns the path to the `build-info` directory nested in the artifacts dir
pub fn build_info_path(&self) -> PathBuf {
self.paths.artifacts.join("build-info")
}
/// Returns the root directory of the project
pub fn root(&self) -> &PathBuf {
&self.paths.root
@ -513,6 +521,8 @@ pub struct ProjectBuilder<T: ArtifactOutput = ConfigurableArtifacts> {
solc_config: Option<SolcConfig>,
/// Whether caching is enabled, default is true.
cached: bool,
/// Whether to output build information with each solc call.
build_info: bool,
/// Whether writing artifacts to disk is enabled, default is true.
no_artifacts: bool,
/// Whether automatic solc version detection is enabled
@ -536,6 +546,7 @@ impl<T: ArtifactOutput> ProjectBuilder<T> {
solc: None,
solc_config: None,
cached: true,
build_info: false,
no_artifacts: false,
auto_detect: true,
offline: false,
@ -591,6 +602,13 @@ impl<T: ArtifactOutput> ProjectBuilder<T> {
self
}
/// Sets the build info value
#[must_use]
pub fn set_build_info(mut self, build_info: bool) -> Self {
self.build_info = build_info;
self
}
/// Activates offline mode
///
/// Prevents network possible access to download/check solc installs
@ -663,6 +681,7 @@ impl<T: ArtifactOutput> ProjectBuilder<T> {
allowed_paths,
solc_jobs,
offline,
build_info,
..
} = self;
ProjectBuilder {
@ -677,6 +696,7 @@ impl<T: ArtifactOutput> ProjectBuilder<T> {
ignored_error_codes,
allowed_paths,
solc_jobs,
build_info,
}
}
@ -713,6 +733,7 @@ impl<T: ArtifactOutput> ProjectBuilder<T> {
mut allowed_paths,
solc_jobs,
offline,
build_info,
} = self;
let paths = paths.map(Ok).unwrap_or_else(ProjectPathsConfig::current_hardhat)?;
@ -730,6 +751,7 @@ impl<T: ArtifactOutput> ProjectBuilder<T> {
solc,
solc_config,
cached,
build_info,
no_artifacts,
auto_detect,
artifacts,

View File

@ -13,6 +13,7 @@ use ethers_solc::{
BytecodeHash, DevDoc, ErrorDoc, EventDoc, Libraries, MethodDoc, ModelCheckerEngine::CHC,
ModelCheckerSettings, UserDoc, UserDocNotice,
},
buildinfo::BuildInfo,
cache::{SolFilesCache, SOLIDITY_FILES_CACHE_FILENAME},
project_util::*,
remappings::Remapping,
@ -311,6 +312,45 @@ fn can_compile_dapp_detect_changes_in_sources() {
}
}
#[test]
fn can_emit_build_info() {
let mut project = TempProject::dapptools().unwrap();
project.project_mut().build_info = true;
project
.add_source(
"A",
r#"
pragma solidity ^0.8.10;
import "./B.sol";
contract A { }
"#,
)
.unwrap();
project
.add_source(
"B",
r#"
pragma solidity ^0.8.10;
contract B { }
"#,
)
.unwrap();
let compiled = project.compile().unwrap();
assert!(!compiled.has_compiler_errors());
let info_dir = project.project().build_info_path();
assert!(info_dir.exists());
let mut build_info_count = 0;
for entry in fs::read_dir(info_dir).unwrap() {
let _info = BuildInfo::read(entry.unwrap().path()).unwrap();
build_info_count += 1;
}
assert_eq!(build_info_count, 1);
}
#[test]
fn can_compile_dapp_sample_with_cache() {
let tmp_dir = tempfile::tempdir().unwrap();