feat: add standardjson compiler input type (#1169)
This commit is contained in:
parent
35c29c82c6
commit
ac3e12fe8a
|
@ -1393,6 +1393,7 @@ dependencies = [
|
||||||
"md-5 0.10.1",
|
"md-5 0.10.1",
|
||||||
"num_cpus",
|
"num_cpus",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
|
"path-slash",
|
||||||
"pretty_assertions",
|
"pretty_assertions",
|
||||||
"rand 0.8.5",
|
"rand 0.8.5",
|
||||||
"rayon",
|
"rayon",
|
||||||
|
@ -2466,6 +2467,12 @@ dependencies = [
|
||||||
"subtle",
|
"subtle",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "path-slash"
|
||||||
|
version = "0.1.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "3cacbb3c4ff353b534a67fb8d7524d00229da4cb1dc8c79f4db96e375ab5b619"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pbkdf2"
|
name = "pbkdf2"
|
||||||
version = "0.8.0"
|
version = "0.8.0"
|
||||||
|
|
|
@ -38,6 +38,7 @@ dunce = "1.0.2"
|
||||||
solang-parser = { default-features = false, version = "0.1.12" }
|
solang-parser = { default-features = false, version = "0.1.12" }
|
||||||
rayon = "1.5.2"
|
rayon = "1.5.2"
|
||||||
rand = { version = "0.8.5", optional = true }
|
rand = { version = "0.8.5", optional = true }
|
||||||
|
path-slash = "0.1.4"
|
||||||
|
|
||||||
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
|
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
|
||||||
home = "0.5.3"
|
home = "0.5.3"
|
||||||
|
|
|
@ -46,6 +46,8 @@ pub type VersionedSources = BTreeMap<Solc, (Version, Sources)>;
|
||||||
/// A set of different Solc installations with their version and the sources to be compiled
|
/// A set of different Solc installations with their version and the sources to be compiled
|
||||||
pub type VersionedFilteredSources = BTreeMap<Solc, (Version, FilteredSources)>;
|
pub type VersionedFilteredSources = BTreeMap<Solc, (Version, FilteredSources)>;
|
||||||
|
|
||||||
|
const SOLIDITY: &str = "Solidity";
|
||||||
|
|
||||||
/// Input type `solc` expects
|
/// Input type `solc` expects
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
pub struct CompilerInput {
|
pub struct CompilerInput {
|
||||||
|
@ -77,7 +79,7 @@ impl CompilerInput {
|
||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
if !solidity_sources.is_empty() {
|
if !solidity_sources.is_empty() {
|
||||||
res.push(Self {
|
res.push(Self {
|
||||||
language: "Solidity".to_string(),
|
language: SOLIDITY.to_string(),
|
||||||
sources: solidity_sources,
|
sources: solidity_sources,
|
||||||
settings: Default::default(),
|
settings: Default::default(),
|
||||||
});
|
});
|
||||||
|
@ -178,6 +180,52 @@ impl CompilerInput {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A `CompilerInput` representation used for verify
|
||||||
|
///
|
||||||
|
/// This type is an alternative `CompilerInput` but uses non-alphabetic ordering of the `sources`
|
||||||
|
/// and instead emits the (Path -> Source) path in the same order as the pairs in the `sources`
|
||||||
|
/// `Vec`. This is used over a map, so we can determine the order in which etherscan will display
|
||||||
|
/// the verified contracts
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
|
pub struct StandardJsonCompilerInput {
|
||||||
|
pub language: String,
|
||||||
|
#[serde(with = "serde_helpers::tuple_vec_map")]
|
||||||
|
pub sources: Vec<(PathBuf, Source)>,
|
||||||
|
pub settings: Settings,
|
||||||
|
}
|
||||||
|
|
||||||
|
// === impl StandardJsonCompilerInput ===
|
||||||
|
|
||||||
|
impl StandardJsonCompilerInput {
|
||||||
|
pub fn new(sources: Vec<(PathBuf, Source)>, settings: Settings) -> Self {
|
||||||
|
Self { language: SOLIDITY.to_string(), sources, settings }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Normalizes the EVM version used in the settings to be up to the latest one
|
||||||
|
/// supported by the provided compiler version.
|
||||||
|
#[must_use]
|
||||||
|
pub fn normalize_evm_version(mut self, version: &Version) -> Self {
|
||||||
|
if let Some(ref mut evm_version) = self.settings.evm_version {
|
||||||
|
self.settings.evm_version = evm_version.normalize_version(version);
|
||||||
|
}
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<StandardJsonCompilerInput> for CompilerInput {
|
||||||
|
fn from(input: StandardJsonCompilerInput) -> Self {
|
||||||
|
let StandardJsonCompilerInput { language, sources, settings } = input;
|
||||||
|
CompilerInput { language, sources: sources.into_iter().collect(), settings }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<CompilerInput> for StandardJsonCompilerInput {
|
||||||
|
fn from(input: CompilerInput) -> Self {
|
||||||
|
let CompilerInput { language, sources, settings } = input;
|
||||||
|
StandardJsonCompilerInput { language, sources: sources.into_iter().collect(), settings }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct Settings {
|
pub struct Settings {
|
||||||
|
@ -1486,10 +1534,30 @@ mod tests {
|
||||||
|
|
||||||
for path in fs::read_dir(dir).unwrap() {
|
for path in fs::read_dir(dir).unwrap() {
|
||||||
let path = path.unwrap().path();
|
let path = path.unwrap().path();
|
||||||
let compiler_output = fs::read_to_string(&path).unwrap();
|
let compiler_input = fs::read_to_string(&path).unwrap();
|
||||||
serde_json::from_str::<CompilerInput>(&compiler_output).unwrap_or_else(|err| {
|
serde_json::from_str::<CompilerInput>(&compiler_input).unwrap_or_else(|err| {
|
||||||
|
panic!("Failed to read compiler input of {} {}", path.display(), err)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_parse_standard_json_compiler_input() {
|
||||||
|
let mut dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
||||||
|
dir.push("test-data/in");
|
||||||
|
|
||||||
|
for path in fs::read_dir(dir).unwrap() {
|
||||||
|
let path = path.unwrap().path();
|
||||||
|
let compiler_input = fs::read_to_string(&path).unwrap();
|
||||||
|
let val = serde_json::from_str::<StandardJsonCompilerInput>(&compiler_input)
|
||||||
|
.unwrap_or_else(|err| {
|
||||||
panic!("Failed to read compiler output of {} {}", path.display(), err)
|
panic!("Failed to read compiler output of {} {}", path.display(), err)
|
||||||
});
|
});
|
||||||
|
|
||||||
|
let pretty = serde_json::to_string_pretty(&val).unwrap();
|
||||||
|
serde_json::from_str::<CompilerInput>(&pretty).unwrap_or_else(|err| {
|
||||||
|
panic!("Failed to read converted compiler input of {} {}", path.display(), err)
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -127,3 +127,71 @@ pub mod display_from_str_opt {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// (De)serialize vec of tuples as map
|
||||||
|
pub mod tuple_vec_map {
|
||||||
|
use serde::{de::DeserializeOwned, Deserialize, Deserializer, Serialize, Serializer};
|
||||||
|
|
||||||
|
pub fn serialize<K, V, S>(data: &[(K, V)], serializer: S) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: Serializer,
|
||||||
|
K: Serialize,
|
||||||
|
V: Serialize,
|
||||||
|
{
|
||||||
|
serializer.collect_map(data.iter().map(|x| (&x.0, &x.1)))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn deserialize<'de, K, V, D>(deserializer: D) -> Result<Vec<(K, V)>, D::Error>
|
||||||
|
where
|
||||||
|
D: Deserializer<'de>,
|
||||||
|
K: DeserializeOwned,
|
||||||
|
V: DeserializeOwned,
|
||||||
|
{
|
||||||
|
use serde::de::{MapAccess, Visitor};
|
||||||
|
use std::{fmt, marker::PhantomData};
|
||||||
|
|
||||||
|
struct TupleVecMapVisitor<K, V> {
|
||||||
|
marker: PhantomData<Vec<(K, V)>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K, V> TupleVecMapVisitor<K, V> {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
TupleVecMapVisitor { marker: PhantomData }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'de, K, V> Visitor<'de> for TupleVecMapVisitor<K, V>
|
||||||
|
where
|
||||||
|
K: Deserialize<'de>,
|
||||||
|
V: Deserialize<'de>,
|
||||||
|
{
|
||||||
|
type Value = Vec<(K, V)>;
|
||||||
|
|
||||||
|
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
formatter.write_str("a map")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn visit_unit<E>(self) -> Result<Vec<(K, V)>, E> {
|
||||||
|
Ok(Vec::new())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn visit_map<T>(self, mut access: T) -> Result<Vec<(K, V)>, T::Error>
|
||||||
|
where
|
||||||
|
T: MapAccess<'de>,
|
||||||
|
{
|
||||||
|
let mut values =
|
||||||
|
Vec::with_capacity(std::cmp::min(access.size_hint().unwrap_or(0), 4096));
|
||||||
|
|
||||||
|
while let Some((key, value)) = access.next_entry()? {
|
||||||
|
values.push((key, value));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(values)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
deserializer.deserialize_map(TupleVecMapVisitor::new())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -24,7 +24,7 @@ mod config;
|
||||||
pub use config::{AllowedLibPaths, PathStyle, ProjectPathsConfig, SolcConfig};
|
pub use config::{AllowedLibPaths, PathStyle, ProjectPathsConfig, SolcConfig};
|
||||||
|
|
||||||
pub mod remappings;
|
pub mod remappings;
|
||||||
use crate::artifacts::{Source, SourceFile};
|
use crate::artifacts::{Source, SourceFile, StandardJsonCompilerInput};
|
||||||
|
|
||||||
pub mod error;
|
pub mod error;
|
||||||
mod filter;
|
mod filter;
|
||||||
|
@ -428,7 +428,12 @@ impl<T: ArtifactOutput> Project<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns standard-json-input to compile the target contract
|
/// Returns standard-json-input to compile the target contract
|
||||||
pub fn standard_json_input(&self, target: impl AsRef<Path>) -> Result<CompilerInput> {
|
pub fn standard_json_input(
|
||||||
|
&self,
|
||||||
|
target: impl AsRef<Path>,
|
||||||
|
) -> Result<StandardJsonCompilerInput> {
|
||||||
|
use path_slash::PathExt;
|
||||||
|
|
||||||
let target = target.as_ref();
|
let target = target.as_ref();
|
||||||
tracing::trace!("Building standard-json-input for {:?}", target);
|
tracing::trace!("Building standard-json-input for {:?}", target);
|
||||||
let graph = Graph::resolve(&self.paths)?;
|
let graph = Graph::resolve(&self.paths)?;
|
||||||
|
@ -442,12 +447,22 @@ impl<T: ArtifactOutput> Project<T> {
|
||||||
graph.all_imported_nodes(*target_index).map(|index| graph.node(index).unpack()),
|
graph.all_imported_nodes(*target_index).map(|index| graph.node(index).unpack()),
|
||||||
);
|
);
|
||||||
|
|
||||||
let compiler_inputs = CompilerInput::with_sources(
|
let root = self.root();
|
||||||
sources.into_iter().map(|(s, p)| (s.clone(), p.clone())).collect(),
|
let sources = sources
|
||||||
);
|
.into_iter()
|
||||||
|
.map(|(path, source)| {
|
||||||
|
let path: PathBuf = if let Ok(stripped) = path.strip_prefix(root) {
|
||||||
|
stripped.to_slash_lossy().into()
|
||||||
|
} else {
|
||||||
|
path.to_slash_lossy().into()
|
||||||
|
};
|
||||||
|
(path, source.clone())
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let mut settings = self.solc_config.settings.clone();
|
||||||
// strip the path to the project root from all remappings
|
// strip the path to the project root from all remappings
|
||||||
let remappings = self
|
settings.remappings = self
|
||||||
.paths
|
.paths
|
||||||
.remappings
|
.remappings
|
||||||
.clone()
|
.clone()
|
||||||
|
@ -455,15 +470,9 @@ impl<T: ArtifactOutput> Project<T> {
|
||||||
.map(|r| r.into_relative(self.root()).to_relative_remapping())
|
.map(|r| r.into_relative(self.root()).to_relative_remapping())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let compiler_input = compiler_inputs
|
let input = StandardJsonCompilerInput::new(sources, settings);
|
||||||
.first()
|
|
||||||
.ok_or_else(|| SolcError::msg("cannot get the compiler input"))?
|
|
||||||
.clone()
|
|
||||||
.settings(self.solc_config.settings.clone())
|
|
||||||
.with_remappings(remappings)
|
|
||||||
.strip_prefix(self.root());
|
|
||||||
|
|
||||||
Ok(compiler_input)
|
Ok(input)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,7 @@ use ethers_solc::{
|
||||||
cache::{SolFilesCache, SOLIDITY_FILES_CACHE_FILENAME},
|
cache::{SolFilesCache, SOLIDITY_FILES_CACHE_FILENAME},
|
||||||
project_util::*,
|
project_util::*,
|
||||||
remappings::Remapping,
|
remappings::Remapping,
|
||||||
ConfigurableArtifacts, ExtraOutputValues, Graph, Project, ProjectCompileOutput,
|
CompilerInput, ConfigurableArtifacts, ExtraOutputValues, Graph, Project, ProjectCompileOutput,
|
||||||
ProjectPathsConfig, Solc, TestFileFilter,
|
ProjectPathsConfig, Solc, TestFileFilter,
|
||||||
};
|
};
|
||||||
use pretty_assertions::assert_eq;
|
use pretty_assertions::assert_eq;
|
||||||
|
@ -1023,11 +1023,11 @@ fn can_sanitize_bytecode_hash() {
|
||||||
fn can_compile_std_json_input() {
|
fn can_compile_std_json_input() {
|
||||||
let tmp = TempProject::dapptools_init().unwrap();
|
let tmp = TempProject::dapptools_init().unwrap();
|
||||||
tmp.assert_no_errors();
|
tmp.assert_no_errors();
|
||||||
let source =
|
let source = tmp.list_source_files().into_iter().find(|p| p.ends_with("Dapp.t.sol")).unwrap();
|
||||||
tmp.list_source_files().into_iter().filter(|p| p.ends_with("Dapp.t.sol")).next().unwrap();
|
|
||||||
let input = tmp.project().standard_json_input(source).unwrap();
|
let input = tmp.project().standard_json_input(source).unwrap();
|
||||||
|
|
||||||
assert!(input.settings.remappings.contains(&"ds-test/=lib/ds-test/src/".parse().unwrap()));
|
assert!(input.settings.remappings.contains(&"ds-test/=lib/ds-test/src/".parse().unwrap()));
|
||||||
|
let input: CompilerInput = input.into();
|
||||||
assert!(input.sources.contains_key(Path::new("lib/ds-test/src/test.sol")));
|
assert!(input.sources.contains_key(Path::new("lib/ds-test/src/test.sol")));
|
||||||
|
|
||||||
// should be installed
|
// should be installed
|
||||||
|
|
Loading…
Reference in New Issue