feat: improved solc management (#539)

* feat: improved solc management

* test: add basic test

* rustfmt

* rustfmt

* feat: add support for lib paths

* test: add dapp testing data

* feat: support dapp style libs

* fix: doc test

* use SOLC_path by default

* docs: import readme

* feat: add diagnostics

* chore: cleanup

* docs: update compile docs

* style: use red for error msg

* style: simplifiy error format

* chore: add newline on successful compiler run log

* feat: allow ignoring error codes so that they do not get logged

* chore: use solc 0.6.6 to match CI Version

* fix: make constructor public in hardhat tests

Co-authored-by: Georgios Konstantopoulos <me@gakonst.com>
This commit is contained in:
Matthias Seitz 2021-10-30 19:59:44 +02:00 committed by GitHub
parent 8a7f42b6fa
commit 5c6ce6b0a1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 3055 additions and 73 deletions

24
Cargo.lock generated
View File

@ -122,6 +122,17 @@ dependencies = [
"rustc_version 0.3.3", "rustc_version 0.3.3",
] ]
[[package]]
name = "atty"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
dependencies = [
"hermit-abi",
"libc",
"winapi",
]
[[package]] [[package]]
name = "auto_impl" name = "auto_impl"
version = "0.4.1" version = "0.4.1"
@ -514,6 +525,17 @@ dependencies = [
"wasm-bindgen-futures", "wasm-bindgen-futures",
] ]
[[package]]
name = "colored"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3616f750b84d8f0de8a58bda93e08e2a81ad3f523089b05f1dffecab48c6cbd"
dependencies = [
"atty",
"lazy_static",
"winapi",
]
[[package]] [[package]]
name = "const-oid" name = "const-oid"
version = "0.6.1" version = "0.6.1"
@ -1051,7 +1073,9 @@ dependencies = [
name = "ethers-solc" name = "ethers-solc"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"colored",
"futures-util", "futures-util",
"hex",
"md-5", "md-5",
"once_cell", "once_cell",
"regex", "regex",

View File

@ -24,6 +24,8 @@ once_cell = "1.8.0"
regex = "1.5.4" regex = "1.5.4"
md-5 = "0.9.1" md-5 = "0.9.1"
thiserror = "1.0.30" thiserror = "1.0.30"
hex = "0.4.3"
colored = "2.0.0"
[dev-dependencies] [dev-dependencies]
tokio = { version = "1.12.0", features = ["full"] } tokio = { version = "1.12.0", features = ["full"] }

26
ethers-solc/README.md Normal file
View File

@ -0,0 +1,26 @@
# ethers-solc
Utilities for working with native `solc` and compiling projects.
To also compile contracts during `cargo build` (so that ethers `abigen!` can pull in updated abi automatically) you can configure a `ethers_solc::Project` in your `build.rs` file
First add `ethers-solc` to your cargo build-dependencies
```toml
[build-dependencies]
ethers-solc = { git = "https://github.com/gakonst/ethers-rs" }
```
```rust
use ethers_solc::{Project, ProjectPathsConfig};
fn main() {
// configure the project with all its paths, solc, cache etc.
let project = Project::builder()
.paths(ProjectPathsConfig::hardhat(env!("CARGO_MANIFEST_DIR")).unwrap())
.build()
.unwrap();
let output = project.compile().unwrap();
println!("{}", output);
}
```

View File

@ -1,5 +1,7 @@
//! Solc artifact types //! Solc artifact types
use colored::Colorize;
use md5::Digest;
use semver::Version; use semver::Version;
use std::{ use std::{
collections::BTreeMap, collections::BTreeMap,
@ -9,13 +11,19 @@ use std::{
}; };
use crate::{compile::*, utils}; use crate::{compile::*, utils};
use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer}; use serde::{
de::{self, Visitor},
Deserialize, Deserializer, Serialize, Serializer,
};
/// An ordered list of files and their source
pub type Sources = BTreeMap<PathBuf, Source>;
/// Input type `solc` expects /// Input type `solc` expects
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub struct CompilerInput { pub struct CompilerInput {
pub language: String, pub language: String,
pub sources: BTreeMap<PathBuf, Source>, pub sources: Sources,
pub settings: Settings, pub settings: Settings,
} }
@ -26,7 +34,7 @@ impl CompilerInput {
} }
/// Creates a new Compiler input with default settings and the given sources /// Creates a new Compiler input with default settings and the given sources
pub fn with_sources(sources: BTreeMap<PathBuf, Source>) -> Self { pub fn with_sources(sources: Sources) -> Self {
Self { language: "Solidity".to_string(), sources, settings: Default::default() } Self { language: "Solidity".to_string(), sources, settings: Default::default() }
} }
@ -49,7 +57,7 @@ impl Default for CompilerInput {
} }
} }
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct Settings { pub struct Settings {
pub optimizer: Optimizer, pub optimizer: Optimizer,
@ -102,6 +110,24 @@ pub struct Settings {
/// Note that using a using `evm`, `evm.bytecode`, `ewasm`, etc. will select /// Note that using a using `evm`, `evm.bytecode`, `ewasm`, etc. will select
/// every target part of that output. Additionally, `*` can be used as a /// every target part of that output. Additionally, `*` can be used as a
/// wildcard to request everything. /// wildcard to request everything.
///
/// The default output selection is
///
/// ```json
/// {
/// "*": {
/// "*": [
/// "abi",
/// "evm.bytecode",
/// "evm.deployedBytecode",
/// "evm.methodIdentifiers"
/// ],
/// "": [
/// "ast"
/// ]
/// }
/// }
/// ```
#[serde(default)] #[serde(default)]
pub output_selection: BTreeMap<String, BTreeMap<String, Vec<String>>>, pub output_selection: BTreeMap<String, BTreeMap<String, Vec<String>>>,
#[serde(default, with = "display_from_str_opt", skip_serializing_if = "Option::is_none")] #[serde(default, with = "display_from_str_opt", skip_serializing_if = "Option::is_none")]
@ -129,7 +155,7 @@ impl Settings {
} }
/// Adds `ast` to output /// Adds `ast` to output
pub fn with_ast(&mut self) -> &mut Self { pub fn with_ast(mut self) -> Self {
let output = self.output_selection.entry("*".to_string()).or_insert_with(BTreeMap::default); let output = self.output_selection.entry("*".to_string()).or_insert_with(BTreeMap::default);
output.insert("".to_string(), vec!["ast".to_string()]); output.insert("".to_string(), vec!["ast".to_string()]);
self self
@ -145,10 +171,11 @@ impl Default for Settings {
evm_version: Some(EvmVersion::Istanbul), evm_version: Some(EvmVersion::Istanbul),
libraries: Default::default(), libraries: Default::default(),
} }
.with_ast()
} }
} }
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Optimizer { pub struct Optimizer {
#[serde(default, skip_serializing_if = "Option::is_none")] #[serde(default, skip_serializing_if = "Option::is_none")]
pub enabled: Option<bool>, pub enabled: Option<bool>,
@ -253,7 +280,7 @@ impl FromStr for EvmVersion {
} }
} }
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Metadata { pub struct Metadata {
#[serde(rename = "useLiteralContent")] #[serde(rename = "useLiteralContent")]
pub use_literal_content: bool, pub use_literal_content: bool,
@ -271,12 +298,12 @@ impl Source {
} }
/// Finds all source files under the given dir path and reads them all /// Finds all source files under the given dir path and reads them all
pub fn read_all_from(dir: impl AsRef<Path>) -> io::Result<BTreeMap<PathBuf, Source>> { pub fn read_all_from(dir: impl AsRef<Path>) -> io::Result<Sources> {
Self::read_all(utils::source_files(dir)?) Self::read_all(utils::source_files(dir)?)
} }
/// Reads all files /// Reads all files
pub fn read_all<T, I>(files: I) -> io::Result<BTreeMap<PathBuf, Source>> pub fn read_all<T, I>(files: I) -> io::Result<Sources>
where where
I: IntoIterator<Item = T>, I: IntoIterator<Item = T>,
T: Into<PathBuf>, T: Into<PathBuf>,
@ -287,6 +314,19 @@ impl Source {
.map(|file| Self::read(&file).map(|source| (file, source))) .map(|file| Self::read(&file).map(|source| (file, source)))
.collect() .collect()
} }
/// Generate a non-cryptographically secure checksum of the file's content
pub fn content_hash(&self) -> String {
let mut hasher = md5::Md5::new();
hasher.update(&self.content);
let result = hasher.finalize();
hex::encode(result)
}
/// Returns all import statements of the file
pub fn parse_imports(&self) -> Vec<&str> {
utils::find_import_paths(self.as_ref())
}
} }
#[cfg(feature = "async")] #[cfg(feature = "async")]
@ -297,14 +337,12 @@ impl Source {
} }
/// Finds all source files under the given dir path and reads them all /// Finds all source files under the given dir path and reads them all
pub async fn async_read_all_from( pub async fn async_read_all_from(dir: impl AsRef<Path>) -> io::Result<Sources> {
dir: impl AsRef<Path>,
) -> io::Result<BTreeMap<PathBuf, Source>> {
Self::async_read_all(utils::source_files(dir.as_ref())?).await Self::async_read_all(utils::source_files(dir.as_ref())?).await
} }
/// async version of `Self::read_all` /// async version of `Self::read_all`
pub async fn async_read_all<T, I>(files: I) -> io::Result<BTreeMap<PathBuf, Source>> pub async fn async_read_all<T, I>(files: I) -> io::Result<Sources>
where where
I: IntoIterator<Item = T>, I: IntoIterator<Item = T>,
T: Into<PathBuf>, T: Into<PathBuf>,
@ -321,8 +359,14 @@ impl Source {
} }
} }
impl AsRef<str> for Source {
fn as_ref(&self) -> &str {
&self.content
}
}
/// Output type `solc` produces /// Output type `solc` produces
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)] #[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq, Default)]
pub struct CompilerOutput { pub struct CompilerOutput {
#[serde(default, skip_serializing_if = "Vec::is_empty")] #[serde(default, skip_serializing_if = "Vec::is_empty")]
pub errors: Vec<Error>, pub errors: Vec<Error>,
@ -332,6 +376,49 @@ pub struct CompilerOutput {
pub contracts: BTreeMap<String, BTreeMap<String, Contract>>, pub contracts: BTreeMap<String, BTreeMap<String, Contract>>,
} }
impl CompilerOutput {
/// Whether the output contains an compiler error
pub fn has_error(&self) -> bool {
self.errors.iter().any(|err| err.severity.is_error())
}
pub fn diagnostics<'a>(&'a self, ignored_error_codes: &'a [u64]) -> OutputDiagnostics {
OutputDiagnostics { errors: &self.errors, ignored_error_codes }
}
}
/// Helper type to implement display for solc errors
#[derive(Clone, Debug)]
pub struct OutputDiagnostics<'a> {
errors: &'a [Error],
ignored_error_codes: &'a [u64],
}
impl<'a> OutputDiagnostics<'a> {
pub fn has_error(&self) -> bool {
self.errors.iter().any(|err| err.severity.is_error())
}
}
impl<'a> fmt::Display for OutputDiagnostics<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if !self.has_error() {
f.write_str("Compiler run successful")?;
}
for err in self.errors {
// Do not log any ignored error codes
if let Some(error_code) = err.error_code {
if !self.ignored_error_codes.contains(&error_code) {
writeln!(f, "\n{}", err)?;
}
} else {
writeln!(f, "\n{}", err)?;
}
}
Ok(())
}
}
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)] #[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
pub struct Contract { pub struct Contract {
/// The Ethereum Contract ABI. If empty, it is represented as an empty /// The Ethereum Contract ABI. If empty, it is represented as an empty
@ -475,10 +562,10 @@ pub struct Bytecode {
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)] #[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct FunctionDebugData { pub struct FunctionDebugData {
pub entry_point: u32, pub entry_point: Option<u32>,
pub id: u32, pub id: Option<u32>,
pub parameter_slots: u32, pub parameter_slots: Option<u32>,
pub return_slots: u32, pub return_slots: Option<u32>,
} }
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)] #[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
@ -573,17 +660,71 @@ pub struct Error {
pub r#type: String, pub r#type: String,
pub component: String, pub component: String,
pub severity: Severity, pub severity: Severity,
pub error_code: Option<String>, #[serde(default, deserialize_with = "from_optional_str")]
pub error_code: Option<u64>,
pub message: String, pub message: String,
pub formatted_message: Option<String>, pub formatted_message: Option<String>,
} }
fn from_optional_str<'de, T, D>(deserializer: D) -> Result<Option<T>, D::Error>
where
T: FromStr,
T::Err: fmt::Display,
D: Deserializer<'de>,
{
let s = Option::<String>::deserialize(deserializer)?;
if let Some(s) = s {
T::from_str(&s).map_err(de::Error::custom).map(Some)
} else {
Ok(None)
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if let Some(msg) = &self.formatted_message {
match self.severity {
Severity::Error => msg.as_str().red().fmt(f),
Severity::Warning | Severity::Info => msg.as_str().yellow().fmt(f),
}
} else {
self.severity.fmt(f)?;
writeln!(f, ": {}", self.message)
}
}
}
#[derive(Clone, Debug, Eq, PartialEq)] #[derive(Clone, Debug, Eq, PartialEq)]
pub enum Severity { pub enum Severity {
Error, Error,
Warning, Warning,
Info, Info,
} }
impl fmt::Display for Severity {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Severity::Error => f.write_str(&"Error".red()),
Severity::Warning => f.write_str(&"Warning".yellow()),
Severity::Info => f.write_str("Info"),
}
}
}
impl Severity {
pub fn is_error(&self) -> bool {
matches!(self, Severity::Error)
}
pub fn is_warning(&self) -> bool {
matches!(self, Severity::Warning)
}
pub fn is_info(&self) -> bool {
matches!(self, Severity::Info)
}
}
impl FromStr for Severity { impl FromStr for Severity {
type Err = String; type Err = String;

View File

@ -1,11 +1,16 @@
//! Support for compiling contracts //! Support for compiling contracts
use crate::error::Result; use crate::{
artifacts::Sources,
config::SolcConfig,
error::{Result, SolcError},
utils,
};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{ use std::{
collections::BTreeMap, collections::BTreeMap,
fs, fs,
path::{Path, PathBuf}, path::{Path, PathBuf},
time::Duration, time::{Duration, UNIX_EPOCH},
}; };
/// Hardhat format version /// Hardhat format version
@ -23,8 +28,18 @@ pub struct SolFilesCache {
} }
impl SolFilesCache { impl SolFilesCache {
fn new(format: impl Into<String>) -> Self { /// # Example
Self { format: format.into(), files: Default::default() } ///
/// Autodetect solc version and default settings
///
/// ```no_run
/// use ethers_solc::artifacts::Source;
/// use ethers_solc::cache::SolFilesCache;
/// let files = Source::read_all_from("./sources").unwrap();
/// let config = SolFilesCache::builder().insert_files(files).unwrap();
/// ```
pub fn builder() -> SolFilesCacheBuilder {
SolFilesCacheBuilder::default()
} }
/// Reads the cache json file from the given path /// Reads the cache json file from the given path
@ -43,21 +58,25 @@ impl SolFilesCache {
self.files.retain(|file, _| Path::new(file).exists()) self.files.retain(|file, _| Path::new(file).exists())
} }
/// Returns if true if a source has changed and false if no source has changed
pub fn is_changed(&self, sources: &Sources, config: Option<&SolcConfig>) -> bool {
sources.iter().any(|(file, source)| self.has_changed(file, source.content_hash(), config))
}
/// Returns true if the given content hash or config differs from the file's /// Returns true if the given content hash or config differs from the file's
/// or the file does not exist /// or the file does not exist
pub fn has_changed( pub fn has_changed(
&self, &self,
file: impl AsRef<Path>, file: impl AsRef<Path>,
hash: impl AsRef<[u8]>, hash: impl AsRef<[u8]>,
config: Option<SolcConfig>, config: Option<&SolcConfig>,
) -> bool { ) -> bool {
if let Some(entry) = self.files.get(file.as_ref()) { if let Some(entry) = self.files.get(file.as_ref()) {
if entry.content_hash.as_bytes() != hash.as_ref() { if entry.content_hash.as_bytes() != hash.as_ref() {
return true return true
} }
if let Some(config) = config { if let Some(config) = config {
if config != entry.solc_config { if config != &entry.solc_config {
return true return true
} }
} }
@ -81,9 +100,64 @@ impl SolFilesCache {
} }
} }
impl Default for SolFilesCache { #[derive(Debug, Clone, Default)]
fn default() -> Self { pub struct SolFilesCacheBuilder {
Self::new(HH_FORMAT_VERSION) format: Option<String>,
solc_config: Option<SolcConfig>,
root: Option<PathBuf>,
}
impl SolFilesCacheBuilder {
pub fn format(mut self, format: impl Into<String>) -> Self {
self.format = Some(format.into());
self
}
pub fn solc_config(mut self, solc_config: SolcConfig) -> Self {
self.solc_config = Some(solc_config);
self
}
pub fn root(mut self, root: impl Into<PathBuf>) -> Self {
self.root = Some(root.into());
self
}
pub fn insert_files(self, sources: Sources) -> Result<SolFilesCache> {
let format = self.format.unwrap_or_else(|| HH_FORMAT_VERSION.to_string());
let solc_config =
self.solc_config.map(Ok).unwrap_or_else(|| SolcConfig::builder().build())?;
let root = self.root.map(Ok).unwrap_or_else(std::env::current_dir)?;
let mut files = BTreeMap::new();
for (file, source) in sources {
let last_modification_date = fs::metadata(&file)?
.modified()?
.duration_since(UNIX_EPOCH)
.map_err(|err| SolcError::solc(err.to_string()))?
.as_millis() as u64;
let imports =
utils::find_import_paths(source.as_ref()).into_iter().map(str::to_string).collect();
let version_pragmas = utils::find_version_pragma(source.as_ref())
.map(|v| vec![v.to_string()])
.unwrap_or_default();
let entry = CacheEntry {
last_modification_date,
content_hash: source.content_hash(),
source_name: utils::source_name(&file, &root).into(),
solc_config: solc_config.clone(),
imports,
version_pragmas,
// TODO detect artifacts
artifacts: vec![],
};
files.insert(file, entry);
}
Ok(SolFilesCache { format, files })
} }
} }
@ -93,7 +167,7 @@ pub struct CacheEntry {
/// the last modification time of this file /// the last modification time of this file
pub last_modification_date: u64, pub last_modification_date: u64,
pub content_hash: String, pub content_hash: String,
pub source_name: String, pub source_name: PathBuf,
pub solc_config: SolcConfig, pub solc_config: SolcConfig,
pub imports: Vec<String>, pub imports: Vec<String>,
pub version_pragmas: Vec<String>, pub version_pragmas: Vec<String>,
@ -107,13 +181,6 @@ impl CacheEntry {
} }
} }
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct SolcConfig {
pub version: String,
pub settings: serde_json::Value,
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View File

@ -42,7 +42,7 @@ pub struct Solc(pub PathBuf);
impl Default for Solc { impl Default for Solc {
fn default() -> Self { fn default() -> Self {
Self::new(SOLC) std::env::var("SOLC_PATH").map(Solc::new).unwrap_or_else(|_| Solc::new(SOLC))
} }
} }
@ -193,6 +193,12 @@ impl AsRef<Path> for Solc {
} }
} }
impl<T: Into<PathBuf>> From<T> for Solc {
fn from(solc: T) -> Self {
Solc(solc.into())
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View File

@ -1,8 +1,14 @@
use crate::{ use crate::{
artifacts::CompactContractRef, cache::SOLIDITY_FILES_CACHE_FILENAME, error::Result, artifacts::{CompactContractRef, Settings},
CompilerOutput, cache::SOLIDITY_FILES_CACHE_FILENAME,
error::Result,
CompilerOutput, Solc,
};
use serde::{Deserialize, Serialize};
use std::{
fmt, fs, io,
path::{Path, PathBuf},
}; };
use std::{fmt, fs, io, path::PathBuf};
/// Where to find all files or where to write them /// Where to find all files or where to write them
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -17,23 +23,187 @@ pub struct ProjectPathsConfig {
pub sources: PathBuf, pub sources: PathBuf,
/// Where to find tests /// Where to find tests
pub tests: PathBuf, pub tests: PathBuf,
/// Where to look for libraries
pub libraries: Vec<PathBuf>,
} }
impl ProjectPathsConfig { impl ProjectPathsConfig {
/// Creates a new config instance which points to the canonicalized root pub fn builder() -> ProjectPathsConfigBuilder {
/// path ProjectPathsConfigBuilder::default()
pub fn new(root: impl Into<PathBuf>) -> io::Result<Self> { }
let root = std::fs::canonicalize(root.into())?;
Ok(Self { /// Creates a new hardhat style config instance which points to the canonicalized root path
cache: root.join("cache").join(SOLIDITY_FILES_CACHE_FILENAME), pub fn hardhat(root: impl AsRef<Path>) -> io::Result<Self> {
artifacts: root.join("artifacts"), PathStyle::HardHat.paths(root)
sources: root.join("contracts"), }
tests: root.join("tests"),
/// Creates a new dapptools style config instance which points to the canonicalized root path
pub fn dapptools(root: impl AsRef<Path>) -> io::Result<Self> {
PathStyle::Dapptools.paths(root)
}
/// Creates a new config with the current directory as the root
pub fn current_hardhat() -> io::Result<Self> {
Self::hardhat(std::env::current_dir()?)
}
/// Creates a new config with the current directory as the root
pub fn current_dapptools() -> io::Result<Self> {
Self::dapptools(std::env::current_dir()?)
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum PathStyle {
HardHat,
Dapptools,
}
impl PathStyle {
pub fn paths(&self, root: impl AsRef<Path>) -> io::Result<ProjectPathsConfig> {
let root = std::fs::canonicalize(root)?;
match self {
PathStyle::Dapptools => ProjectPathsConfig::builder()
.sources(root.join("src"))
.artifacts(root.join("out"))
.lib(root.join("lib"))
.root(root)
.build(),
PathStyle::HardHat => ProjectPathsConfig::builder()
.sources(root.join("contracts"))
.artifacts(root.join("artifacts"))
.lib(root.join("node_modules"))
.root(root)
.build(),
}
}
}
#[derive(Debug, Clone, Default)]
pub struct ProjectPathsConfigBuilder {
root: Option<PathBuf>,
cache: Option<PathBuf>,
artifacts: Option<PathBuf>,
sources: Option<PathBuf>,
tests: Option<PathBuf>,
libraries: Option<Vec<PathBuf>>,
}
impl ProjectPathsConfigBuilder {
pub fn root(mut self, root: impl Into<PathBuf>) -> Self {
self.root = Some(root.into());
self
}
pub fn cache(mut self, cache: impl Into<PathBuf>) -> Self {
self.cache = Some(cache.into());
self
}
pub fn artifacts(mut self, artifacts: impl Into<PathBuf>) -> Self {
self.artifacts = Some(artifacts.into());
self
}
pub fn sources(mut self, sources: impl Into<PathBuf>) -> Self {
self.sources = Some(sources.into());
self
}
pub fn tests(mut self, tests: impl Into<PathBuf>) -> Self {
self.tests = Some(tests.into());
self
}
/// Specifically disallow additional libraries
pub fn no_libs(mut self) -> Self {
self.libraries = Some(Vec::new());
self
}
pub fn lib(mut self, lib: impl Into<PathBuf>) -> Self {
self.libraries.get_or_insert_with(Vec::new).push(lib.into());
self
}
pub fn libs(mut self, libs: impl IntoIterator<Item = impl Into<PathBuf>>) -> Self {
let libraries = self.libraries.get_or_insert_with(Vec::new);
for lib in libs.into_iter() {
libraries.push(lib.into());
}
self
}
pub fn build(self) -> io::Result<ProjectPathsConfig> {
let root = self.root.map(Ok).unwrap_or_else(std::env::current_dir)?;
let root = std::fs::canonicalize(root)?;
Ok(ProjectPathsConfig {
cache: self
.cache
.unwrap_or_else(|| root.join("cache").join(SOLIDITY_FILES_CACHE_FILENAME)),
artifacts: self.artifacts.unwrap_or_else(|| root.join("artifacts")),
sources: self.sources.unwrap_or_else(|| root.join("contracts")),
tests: self.tests.unwrap_or_else(|| root.join("tests")),
libraries: self.libraries.unwrap_or_default(),
root, root,
}) })
} }
} }
/// The config to use when compiling the contracts
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct SolcConfig {
/// Configured solc version
pub version: String,
/// How the file was compiled
pub settings: Settings,
}
impl SolcConfig {
/// # Example
///
/// Autodetect solc version and default settings
///
/// ```rust
/// use ethers_solc::SolcConfig;
/// let config = SolcConfig::builder().build().unwrap();
/// ```
pub fn builder() -> SolcConfigBuilder {
SolcConfigBuilder::default()
}
}
#[derive(Default)]
pub struct SolcConfigBuilder {
version: Option<String>,
settings: Option<Settings>,
}
impl SolcConfigBuilder {
pub fn version(mut self, version: impl Into<String>) -> Self {
self.version = Some(version.into());
self
}
pub fn settings(mut self, settings: Settings) -> Self {
self.settings = Some(settings);
self
}
/// Creates the solc config
///
/// If no solc version is configured then it will be determined by calling `solc --version`.
pub fn build(self) -> Result<SolcConfig> {
let Self { version, settings } = self;
let version =
version.map(Ok).unwrap_or_else(|| Solc::default().version().map(|s| s.to_string()))?;
let settings = settings.unwrap_or_default();
Ok(SolcConfig { version, settings })
}
}
/// Determines how to handle compiler output /// Determines how to handle compiler output
pub enum ArtifactOutput { pub enum ArtifactOutput {
/// Creates a single json artifact with /// Creates a single json artifact with
@ -56,9 +226,11 @@ impl ArtifactOutput {
pub fn on_output(&self, output: &CompilerOutput, layout: &ProjectPathsConfig) -> Result<()> { pub fn on_output(&self, output: &CompilerOutput, layout: &ProjectPathsConfig) -> Result<()> {
match self { match self {
ArtifactOutput::MinimalCombined => { ArtifactOutput::MinimalCombined => {
fs::create_dir_all(&layout.artifacts)?;
for contracts in output.contracts.values() { for contracts in output.contracts.values() {
for (name, contract) in contracts { for (name, contract) in contracts {
let file = layout.root.join(format!("{}.json", name)); let file = layout.artifacts.join(format!("{}.json", name));
let min = CompactContractRef::from(contract); let min = CompactContractRef::from(contract);
fs::write(file, serde_json::to_vec_pretty(&min)?)? fs::write(file, serde_json::to_vec_pretty(&min)?)?
} }
@ -73,6 +245,12 @@ impl ArtifactOutput {
} }
} }
impl Default for ArtifactOutput {
fn default() -> Self {
ArtifactOutput::MinimalCombined
}
}
impl fmt::Debug for ArtifactOutput { impl fmt::Debug for ArtifactOutput {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { match self {

View File

@ -1,8 +1,9 @@
//! Support for compiling contracts #![doc = include_str!("../README.md")]
pub mod artifacts; pub mod artifacts;
pub use artifacts::{CompilerInput, CompilerOutput, EvmVersion}; pub use artifacts::{CompilerInput, CompilerOutput, EvmVersion};
use std::collections::btree_map::Entry;
pub mod cache; pub mod cache;
@ -10,48 +11,236 @@ mod compile;
pub use compile::Solc; pub use compile::Solc;
mod config; mod config;
use crate::{artifacts::Source, cache::SolFilesCache, config::ArtifactOutput}; pub use config::{ArtifactOutput, ProjectPathsConfig, SolcConfig};
pub use config::ProjectPathsConfig;
use crate::{artifacts::Source, cache::SolFilesCache};
pub mod error; pub mod error;
pub mod utils; pub mod utils;
use crate::artifacts::Sources;
use error::Result; use error::Result;
use std::{
collections::{BTreeMap, HashMap},
fmt, fs, io,
path::PathBuf,
};
/// Handles contract compiling /// Handles contract compiling
#[derive(Debug)] #[derive(Debug)]
pub struct Project { pub struct Project {
/// The layout of the /// The layout of the
pub config: ProjectPathsConfig, pub paths: ProjectPathsConfig,
/// Where to find solc /// Where to find solc
pub solc: Solc, pub solc: Solc,
/// How solc invocation should be configured.
pub solc_config: SolcConfig,
/// Whether caching is enabled /// Whether caching is enabled
pub cached: bool, pub cached: bool,
/// How to handle compiler output /// How to handle compiler output
pub artifacts: ArtifactOutput, pub artifacts: ArtifactOutput,
/// Errors/Warnings which match these error codes are not going to be logged
pub ignored_error_codes: Vec<u64>,
} }
impl Project { impl Project {
/// New compile project without cache support. /// Configure the current project
pub fn new(config: ProjectPathsConfig, solc: Solc, artifacts: ArtifactOutput) -> Self { ///
Self { config, solc, cached: false, artifacts } /// # Example
///
/// ```rust
/// use ethers_solc::Project;
/// let config = Project::builder().build().unwrap();
/// ```
pub fn builder() -> ProjectBuilder {
ProjectBuilder::default()
} }
/// Enable cache. fn write_cache_file(&self, sources: Sources) -> Result<()> {
pub fn cached(mut self) -> Self { let cache = SolFilesCache::builder()
self.cached = true; .root(&self.paths.root)
.solc_config(self.solc_config.clone())
.insert_files(sources)?;
if let Some(cache_dir) = self.paths.cache.parent() {
fs::create_dir_all(cache_dir)?
}
cache.write(&self.paths.cache)
}
/// Returns all sources found under the project's sources path
pub fn sources(&self) -> io::Result<Sources> {
Source::read_all_from(self.paths.sources.as_path())
}
/// Attempts to read all unique libraries that are used as imports like "hardhat/console.sol"
fn resolved_libraries(
&self,
sources: &Sources,
) -> io::Result<BTreeMap<PathBuf, (Source, PathBuf)>> {
let mut libs = BTreeMap::default();
for source in sources.values() {
for import in source.parse_imports() {
if let Some(lib) = utils::resolve_library(&self.paths.libraries, import) {
if let Entry::Vacant(entry) = libs.entry(import.into()) {
entry.insert((Source::read(&lib)?, lib));
}
}
}
}
Ok(libs)
}
/// Attempts to compile the contracts found at the configured location.
///
/// NOTE: this does not check if the contracts were successfully compiled, see
/// `CompilerOutput::has_error` instead.
pub fn compile(&self) -> Result<ProjectCompileOutput> {
let mut sources = self.sources()?;
// add all libraries to the source set while keeping track of their actual disk path
let mut source_name_path = HashMap::new();
let mut path_source_name = HashMap::new();
for (import, (source, path)) in self.resolved_libraries(&sources)? {
// inserting with absolute path here and keep track of the source name <-> path mappings
sources.insert(path.clone(), source);
path_source_name.insert(path.clone(), import.clone());
source_name_path.insert(import, path);
}
if self.cached && self.paths.cache.exists() {
// check anything changed
let cache = SolFilesCache::read(&self.paths.cache)?;
if !cache.is_changed(&sources, Some(&self.solc_config)) {
return Ok(ProjectCompileOutput::Unchanged)
}
}
// replace absolute path with source name to make solc happy
let sources = apply_mappings(sources, path_source_name);
let input = CompilerInput::with_sources(sources);
let output = self.solc.compile(&input)?;
if output.has_error() {
return Ok(ProjectCompileOutput::Compiled((output, &self.ignored_error_codes)))
}
if self.cached {
// reapply to disk paths
let sources = apply_mappings(input.sources, source_name_path);
// create cache file
self.write_cache_file(sources)?;
}
self.artifacts.on_output(&output, &self.paths)?;
Ok(ProjectCompileOutput::Compiled((output, &self.ignored_error_codes)))
}
}
fn apply_mappings(sources: Sources, mut mappings: HashMap<PathBuf, PathBuf>) -> Sources {
sources
.into_iter()
.map(|(import, source)| {
if let Some(path) = mappings.remove(&import) {
(path, source)
} else {
(import, source)
}
})
.collect()
}
pub struct ProjectBuilder {
/// The layout of the
paths: Option<ProjectPathsConfig>,
/// Where to find solc
solc: Option<Solc>,
/// How solc invocation should be configured.
solc_config: Option<SolcConfig>,
/// Whether caching is enabled, default is true.
cached: bool,
/// How to handle compiler output
artifacts: Option<ArtifactOutput>,
/// Which error codes to ignore
pub ignored_error_codes: Vec<u64>,
}
impl ProjectBuilder {
pub fn paths(mut self, paths: ProjectPathsConfig) -> Self {
self.paths = Some(paths);
self self
} }
pub fn compile(&self) -> Result<()> { pub fn solc(mut self, solc: impl Into<Solc>) -> Self {
let _sources = Source::read_all_from(self.config.sources.as_path())?; self.solc = Some(solc.into());
if self.cached { self
let _cache = if self.config.cache.exists() { }
SolFilesCache::read(&self.config.cache)?
} else {
SolFilesCache::default()
};
}
unimplemented!() pub fn solc_config(mut self, solc_config: SolcConfig) -> Self {
self.solc_config = Some(solc_config);
self
}
pub fn artifacts(mut self, artifacts: ArtifactOutput) -> Self {
self.artifacts = Some(artifacts);
self
}
pub fn ignore_error_code(mut self, code: u64) -> Self {
self.ignored_error_codes.push(code);
self
}
/// Disables cached builds
pub fn ephemeral(mut self) -> Self {
self.cached = false;
self
}
pub fn build(self) -> Result<Project> {
let Self { paths, solc, solc_config, cached, artifacts, ignored_error_codes } = self;
let solc = solc.unwrap_or_default();
let solc_config = solc_config.map(Ok).unwrap_or_else(|| {
let version = solc.version()?;
SolcConfig::builder().version(version.to_string()).build()
})?;
Ok(Project {
paths: paths.map(Ok).unwrap_or_else(ProjectPathsConfig::current_hardhat)?,
solc,
solc_config,
cached,
artifacts: artifacts.unwrap_or_default(),
ignored_error_codes,
})
}
}
impl Default for ProjectBuilder {
fn default() -> Self {
Self {
paths: None,
solc: None,
solc_config: None,
cached: true,
artifacts: None,
ignored_error_codes: Vec::new(),
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ProjectCompileOutput<'a> {
/// Nothing to compile because unchanged sources
Unchanged,
Compiled((CompilerOutput, &'a [u64])),
}
impl<'a> fmt::Display for ProjectCompileOutput<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ProjectCompileOutput::Unchanged => f.write_str("Nothing to compile"),
ProjectCompileOutput::Compiled((output, ignored_error_codes)) => {
output.diagnostics(ignored_error_codes).fmt(f)
}
}
} }
} }

View File

@ -1,6 +1,6 @@
//! Utility functions //! Utility functions
use std::path::{Path, PathBuf}; use std::path::{Component, Path, PathBuf};
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::Regex; use regex::Regex;
@ -59,8 +59,52 @@ pub fn source_files(root: impl AsRef<Path>) -> walkdir::Result<Vec<PathBuf>> {
Ok(files) Ok(files)
} }
/// Returns the source name for the given source path, the ancestors of the root path
/// `/Users/project/sources/contract.sol` -> `sources/contracts.sol`
pub fn source_name(source: &Path, root: impl AsRef<Path>) -> &Path {
source.strip_prefix(root.as_ref()).unwrap_or(source)
}
/// Attempts to determine if the given source is a local, relative import
pub fn is_local_source_name(libs: &[impl AsRef<Path>], source: impl AsRef<Path>) -> bool {
resolve_library(libs, source).is_none()
}
/// Returns the path to the library if the source path is in fact determined to be a library path,
/// and it exists.
pub fn resolve_library(libs: &[impl AsRef<Path>], source: impl AsRef<Path>) -> Option<PathBuf> {
let source = source.as_ref();
let comp = source.components().next()?;
match comp {
Component::Normal(first_dir) => {
// attempt to verify that the root component of this source exists under a library
// folder
for lib in libs {
let lib = lib.as_ref();
let contract = lib.join(source);
if contract.exists() {
// contract exists in <lib>/<source>
return Some(contract)
}
// check for <lib>/<first_dir>/src/name.sol
let contract = lib
.join(first_dir)
.join("src")
.join(source.strip_prefix(first_dir).expect("is first component"));
if contract.exists() {
return Some(contract)
}
}
None
}
Component::RootDir => Some(source.into()),
_ => None,
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*;
use std::{ use std::{
collections::HashSet, collections::HashSet,
fs::{create_dir_all, File}, fs::{create_dir_all, File},
@ -68,7 +112,19 @@ mod tests {
use tempdir::TempDir; use tempdir::TempDir;
use super::*; #[test]
fn can_determine_local_paths() {
assert!(is_local_source_name(&[""], "./local/contract.sol"));
assert!(is_local_source_name(&[""], "../local/contract.sol"));
assert!(!is_local_source_name(&[""], "/ds-test/test.sol"));
let tmp_dir = TempDir::new("contracts").unwrap();
let dir = tmp_dir.path().join("ds-test");
create_dir_all(&dir).unwrap();
File::create(dir.join("test.sol")).unwrap();
assert!(!is_local_source_name(&[tmp_dir.path()], "ds-test/test.sol"));
}
#[test] #[test]
fn can_find_solidity_sources() { fn can_find_solidity_sources() {

View File

@ -0,0 +1,223 @@
// SPDX-License-Identifier: GPL-3.0-or-later
pragma solidity >=0.4.23;
import "../src/test.sol";
contract DemoTest is DSTest {
function test_this() public pure {
require(true);
}
function test_logs() public {
emit log("-- log(string)");
emit log("a string");
emit log("-- log_named_uint(string, uint)");
log_named_uint("uint", 512);
emit log("-- log_named_int(string, int)");
log_named_int("int", -512);
emit log("-- log_named_address(string, address)");
log_named_address("address", address(this));
emit log("-- log_named_bytes32(string, bytes32)");
log_named_bytes32("bytes32", "a string");
emit log("-- log_named_bytes(string, bytes)");
log_named_bytes("bytes", hex"cafefe");
emit log("-- log_named_string(string, string)");
log_named_string("string", "a string");
emit log("-- log_named_decimal_uint(string, uint, uint)");
log_named_decimal_uint("decimal uint", 1.0e18, 18);
emit log("-- log_named_decimal_int(string, int, uint)");
log_named_decimal_int("decimal int", -1.0e18, 18);
}
event log_old_named_uint(bytes32,uint);
function test_old_logs() public {
log_old_named_uint("key", 500);
log_named_bytes32("bkey", "val");
}
function test_trace() public view {
this.echo("string 1", "string 2");
}
function test_multiline() public {
emit log("a multiline\\n" "string");
emit log("a multiline " "string");
log_bytes("a string");
log_bytes("a multiline\n" "string");
log_bytes("a multiline\\n" "string");
emit log(unicode"Ώ");
logs(hex"0000");
log_named_bytes("0x0000", hex"0000");
logs(hex"ff");
}
function echo(string memory s1, string memory s2) public pure
returns (string memory, string memory)
{
return (s1, s2);
}
function prove_this(uint x) public {
log_named_uint("sym x", x);
assertGt(x + 1, 0);
}
function test_logn() public {
assembly {
log0(0x01, 0x02)
log1(0x01, 0x02, 0x03)
log2(0x01, 0x02, 0x03, 0x04)
log3(0x01, 0x02, 0x03, 0x04, 0x05)
}
}
event MyEvent(uint, uint indexed, uint, uint indexed);
function test_events() public {
emit MyEvent(1, 2, 3, 4);
}
function test_asserts() public {
string memory err = "this test has failed!";
emit log("## assertTrue(bool)\n");
assertTrue(false);
emit log("\n");
assertTrue(false, err);
emit log("\n## assertEq(address,address)\n");
assertEq(address(this), msg.sender);
emit log("\n");
assertEq(address(this), msg.sender, err);
emit log("\n## assertEq32(bytes32,bytes32)\n");
assertEq32("bytes 1", "bytes 2");
emit log("\n");
assertEq32("bytes 1", "bytes 2", err);
emit log("\n## assertEq(bytes32,bytes32)\n");
assertEq32("bytes 1", "bytes 2");
emit log("\n");
assertEq32("bytes 1", "bytes 2", err);
emit log("\n## assertEq(uint,uint)\n");
assertEq(uint(0), 1);
emit log("\n");
assertEq(uint(0), 1, err);
emit log("\n## assertEq(int,int)\n");
assertEq(-1, -2);
emit log("\n");
assertEq(-1, -2, err);
emit log("\n## assertEqDecimal(int,int,uint)\n");
assertEqDecimal(-1.0e18, -1.1e18, 18);
emit log("\n");
assertEqDecimal(-1.0e18, -1.1e18, 18, err);
emit log("\n## assertEqDecimal(uint,uint,uint)\n");
assertEqDecimal(uint(1.0e18), 1.1e18, 18);
emit log("\n");
assertEqDecimal(uint(1.0e18), 1.1e18, 18, err);
emit log("\n## assertGt(uint,uint)\n");
assertGt(uint(0), 0);
emit log("\n");
assertGt(uint(0), 0, err);
emit log("\n## assertGt(int,int)\n");
assertGt(-1, -1);
emit log("\n");
assertGt(-1, -1, err);
emit log("\n## assertGtDecimal(int,int,uint)\n");
assertGtDecimal(-2.0e18, -1.1e18, 18);
emit log("\n");
assertGtDecimal(-2.0e18, -1.1e18, 18, err);
emit log("\n## assertGtDecimal(uint,uint,uint)\n");
assertGtDecimal(uint(1.0e18), 1.1e18, 18);
emit log("\n");
assertGtDecimal(uint(1.0e18), 1.1e18, 18, err);
emit log("\n## assertGe(uint,uint)\n");
assertGe(uint(0), 1);
emit log("\n");
assertGe(uint(0), 1, err);
emit log("\n## assertGe(int,int)\n");
assertGe(-1, 0);
emit log("\n");
assertGe(-1, 0, err);
emit log("\n## assertGeDecimal(int,int,uint)\n");
assertGeDecimal(-2.0e18, -1.1e18, 18);
emit log("\n");
assertGeDecimal(-2.0e18, -1.1e18, 18, err);
emit log("\n## assertGeDecimal(uint,uint,uint)\n");
assertGeDecimal(uint(1.0e18), 1.1e18, 18);
emit log("\n");
assertGeDecimal(uint(1.0e18), 1.1e18, 18, err);
emit log("\n## assertLt(uint,uint)\n");
assertLt(uint(0), 0);
emit log("\n");
assertLt(uint(0), 0, err);
emit log("\n## assertLt(int,int)\n");
assertLt(-1, -1);
emit log("\n");
assertLt(-1, -1, err);
emit log("\n## assertLtDecimal(int,int,uint)\n");
assertLtDecimal(-1.0e18, -1.1e18, 18);
emit log("\n");
assertLtDecimal(-1.0e18, -1.1e18, 18, err);
emit log("\n## assertLtDecimal(uint,uint,uint)\n");
assertLtDecimal(uint(2.0e18), 1.1e18, 18);
emit log("\n");
assertLtDecimal(uint(2.0e18), 1.1e18, 18, err);
emit log("\n## assertLe(uint,uint)\n");
assertLe(uint(1), 0);
emit log("\n");
assertLe(uint(1), 0, err);
emit log("\n## assertLe(int,int)\n");
assertLe(0, -1);
emit log("\n");
assertLe(0, -1, err);
emit log("\n## assertLeDecimal(int,int,uint)\n");
assertLeDecimal(-1.0e18, -1.1e18, 18);
emit log("\n");
assertLeDecimal(-1.0e18, -1.1e18, 18, err);
emit log("\n## assertLeDecimal(uint,uint,uint)\n");
assertLeDecimal(uint(2.0e18), 1.1e18, 18);
emit log("\n");
assertLeDecimal(uint(2.0e18), 1.1e18, 18, err);
emit log("\n## assertEq(string,string)\n");
string memory s1 = "string 1";
string memory s2 = "string 2";
assertEq(s1, s2);
emit log("\n");
assertEq(s1, s2, err);
emit log("\n## assertEq0(bytes,bytes)\n");
assertEq0(hex"abcdef01", hex"abcdef02");
log("\n");
assertEq0(hex"abcdef01", hex"abcdef02", err);
}
}
contract DemoTestWithSetUp {
function setUp() public {
}
function test_pass() public pure {
}
}

View File

@ -0,0 +1,434 @@
// SPDX-License-Identifier: GPL-3.0-or-later
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
pragma solidity >=0.4.23;
contract DSTest {
event log (string);
event logs (bytes);
event log_address (address);
event log_bytes32 (bytes32);
event log_int (int);
event log_uint (uint);
event log_bytes (bytes);
event log_string (string);
event log_named_address (string key, address val);
event log_named_bytes32 (string key, bytes32 val);
event log_named_decimal_int (string key, int val, uint decimals);
event log_named_decimal_uint (string key, uint val, uint decimals);
event log_named_int (string key, int val);
event log_named_uint (string key, uint val);
event log_named_bytes (string key, bytes val);
event log_named_string (string key, string val);
bool public IS_TEST = true;
bool public failed;
address constant HEVM_ADDRESS =
address(bytes20(uint160(uint256(keccak256('hevm cheat code')))));
modifier mayRevert() { _; }
modifier testopts(string memory) { _; }
function fail() internal {
failed = true;
}
modifier logs_gas() {
uint startGas = gasleft();
_;
uint endGas = gasleft();
emit log_named_uint("gas", startGas - endGas);
}
function assertTrue(bool condition) internal {
if (!condition) {
emit log("Error: Assertion Failed");
fail();
}
}
function assertTrue(bool condition, string memory err) internal {
if (!condition) {
emit log_named_string("Error", err);
assertTrue(condition);
}
}
function assertEq(address a, address b) internal {
if (a != b) {
emit log("Error: a == b not satisfied [address]");
emit log_named_address(" Expected", b);
emit log_named_address(" Actual", a);
fail();
}
}
function assertEq(address a, address b, string memory err) internal {
if (a != b) {
emit log_named_string ("Error", err);
assertEq(a, b);
}
}
function assertEq(bytes32 a, bytes32 b) internal {
if (a != b) {
emit log("Error: a == b not satisfied [bytes32]");
emit log_named_bytes32(" Expected", b);
emit log_named_bytes32(" Actual", a);
fail();
}
}
function assertEq(bytes32 a, bytes32 b, string memory err) internal {
if (a != b) {
emit log_named_string ("Error", err);
assertEq(a, b);
}
}
function assertEq32(bytes32 a, bytes32 b) internal {
assertEq(a, b);
}
function assertEq32(bytes32 a, bytes32 b, string memory err) internal {
assertEq(a, b, err);
}
function assertEq(int a, int b) internal {
if (a != b) {
emit log("Error: a == b not satisfied [int]");
emit log_named_int(" Expected", b);
emit log_named_int(" Actual", a);
fail();
}
}
function assertEq(int a, int b, string memory err) internal {
if (a != b) {
emit log_named_string("Error", err);
assertEq(a, b);
}
}
function assertEq(uint a, uint b) internal {
if (a != b) {
emit log("Error: a == b not satisfied [uint]");
emit log_named_uint(" Expected", b);
emit log_named_uint(" Actual", a);
fail();
}
}
function assertEq(uint a, uint b, string memory err) internal {
if (a != b) {
emit log_named_string("Error", err);
assertEq(a, b);
}
}
function assertEqDecimal(int a, int b, uint decimals) internal {
if (a != b) {
emit log("Error: a == b not satisfied [decimal int]");
emit log_named_decimal_int(" Expected", b, decimals);
emit log_named_decimal_int(" Actual", a, decimals);
fail();
}
}
function assertEqDecimal(int a, int b, uint decimals, string memory err) internal {
if (a != b) {
emit log_named_string("Error", err);
assertEqDecimal(a, b, decimals);
}
}
function assertEqDecimal(uint a, uint b, uint decimals) internal {
if (a != b) {
emit log("Error: a == b not satisfied [decimal uint]");
emit log_named_decimal_uint(" Expected", b, decimals);
emit log_named_decimal_uint(" Actual", a, decimals);
fail();
}
}
function assertEqDecimal(uint a, uint b, uint decimals, string memory err) internal {
if (a != b) {
emit log_named_string("Error", err);
assertEqDecimal(a, b, decimals);
}
}
function assertGt(uint a, uint b) internal {
if (a <= b) {
emit log("Error: a > b not satisfied [uint]");
emit log_named_uint(" Value a", a);
emit log_named_uint(" Value b", b);
fail();
}
}
function assertGt(uint a, uint b, string memory err) internal {
if (a <= b) {
emit log_named_string("Error", err);
assertGt(a, b);
}
}
function assertGt(int a, int b) internal {
if (a <= b) {
emit log("Error: a > b not satisfied [int]");
emit log_named_int(" Value a", a);
emit log_named_int(" Value b", b);
fail();
}
}
function assertGt(int a, int b, string memory err) internal {
if (a <= b) {
emit log_named_string("Error", err);
assertGt(a, b);
}
}
function assertGtDecimal(int a, int b, uint decimals) internal {
if (a <= b) {
emit log("Error: a > b not satisfied [decimal int]");
emit log_named_decimal_int(" Value a", a, decimals);
emit log_named_decimal_int(" Value b", b, decimals);
fail();
}
}
function assertGtDecimal(int a, int b, uint decimals, string memory err) internal {
if (a <= b) {
emit log_named_string("Error", err);
assertGtDecimal(a, b, decimals);
}
}
function assertGtDecimal(uint a, uint b, uint decimals) internal {
if (a <= b) {
emit log("Error: a > b not satisfied [decimal uint]");
emit log_named_decimal_uint(" Value a", a, decimals);
emit log_named_decimal_uint(" Value b", b, decimals);
fail();
}
}
function assertGtDecimal(uint a, uint b, uint decimals, string memory err) internal {
if (a <= b) {
emit log_named_string("Error", err);
assertGtDecimal(a, b, decimals);
}
}
function assertGe(uint a, uint b) internal {
if (a < b) {
emit log("Error: a >= b not satisfied [uint]");
emit log_named_uint(" Value a", a);
emit log_named_uint(" Value b", b);
fail();
}
}
function assertGe(uint a, uint b, string memory err) internal {
if (a < b) {
emit log_named_string("Error", err);
assertGe(a, b);
}
}
function assertGe(int a, int b) internal {
if (a < b) {
emit log("Error: a >= b not satisfied [int]");
emit log_named_int(" Value a", a);
emit log_named_int(" Value b", b);
fail();
}
}
function assertGe(int a, int b, string memory err) internal {
if (a < b) {
emit log_named_string("Error", err);
assertGe(a, b);
}
}
function assertGeDecimal(int a, int b, uint decimals) internal {
if (a < b) {
emit log("Error: a >= b not satisfied [decimal int]");
emit log_named_decimal_int(" Value a", a, decimals);
emit log_named_decimal_int(" Value b", b, decimals);
fail();
}
}
function assertGeDecimal(int a, int b, uint decimals, string memory err) internal {
if (a < b) {
emit log_named_string("Error", err);
assertGeDecimal(a, b, decimals);
}
}
function assertGeDecimal(uint a, uint b, uint decimals) internal {
if (a < b) {
emit log("Error: a >= b not satisfied [decimal uint]");
emit log_named_decimal_uint(" Value a", a, decimals);
emit log_named_decimal_uint(" Value b", b, decimals);
fail();
}
}
function assertGeDecimal(uint a, uint b, uint decimals, string memory err) internal {
if (a < b) {
emit log_named_string("Error", err);
assertGeDecimal(a, b, decimals);
}
}
function assertLt(uint a, uint b) internal {
if (a >= b) {
emit log("Error: a < b not satisfied [uint]");
emit log_named_uint(" Value a", a);
emit log_named_uint(" Value b", b);
fail();
}
}
function assertLt(uint a, uint b, string memory err) internal {
if (a >= b) {
emit log_named_string("Error", err);
assertLt(a, b);
}
}
function assertLt(int a, int b) internal {
if (a >= b) {
emit log("Error: a < b not satisfied [int]");
emit log_named_int(" Value a", a);
emit log_named_int(" Value b", b);
fail();
}
}
function assertLt(int a, int b, string memory err) internal {
if (a >= b) {
emit log_named_string("Error", err);
assertLt(a, b);
}
}
function assertLtDecimal(int a, int b, uint decimals) internal {
if (a >= b) {
emit log("Error: a < b not satisfied [decimal int]");
emit log_named_decimal_int(" Value a", a, decimals);
emit log_named_decimal_int(" Value b", b, decimals);
fail();
}
}
function assertLtDecimal(int a, int b, uint decimals, string memory err) internal {
if (a >= b) {
emit log_named_string("Error", err);
assertLtDecimal(a, b, decimals);
}
}
function assertLtDecimal(uint a, uint b, uint decimals) internal {
if (a >= b) {
emit log("Error: a < b not satisfied [decimal uint]");
emit log_named_decimal_uint(" Value a", a, decimals);
emit log_named_decimal_uint(" Value b", b, decimals);
fail();
}
}
function assertLtDecimal(uint a, uint b, uint decimals, string memory err) internal {
if (a >= b) {
emit log_named_string("Error", err);
assertLtDecimal(a, b, decimals);
}
}
function assertLe(uint a, uint b) internal {
if (a > b) {
emit log("Error: a <= b not satisfied [uint]");
emit log_named_uint(" Value a", a);
emit log_named_uint(" Value b", b);
fail();
}
}
function assertLe(uint a, uint b, string memory err) internal {
if (a > b) {
emit log_named_string("Error", err);
assertLe(a, b);
}
}
function assertLe(int a, int b) internal {
if (a > b) {
emit log("Error: a <= b not satisfied [int]");
emit log_named_int(" Value a", a);
emit log_named_int(" Value b", b);
fail();
}
}
function assertLe(int a, int b, string memory err) internal {
if (a > b) {
emit log_named_string("Error", err);
assertLe(a, b);
}
}
function assertLeDecimal(int a, int b, uint decimals) internal {
if (a > b) {
emit log("Error: a <= b not satisfied [decimal int]");
emit log_named_decimal_int(" Value a", a, decimals);
emit log_named_decimal_int(" Value b", b, decimals);
fail();
}
}
function assertLeDecimal(int a, int b, uint decimals, string memory err) internal {
if (a > b) {
emit log_named_string("Error", err);
assertLeDecimal(a, b, decimals);
}
}
function assertLeDecimal(uint a, uint b, uint decimals) internal {
if (a > b) {
emit log("Error: a <= b not satisfied [decimal uint]");
emit log_named_decimal_uint(" Value a", a, decimals);
emit log_named_decimal_uint(" Value b", b, decimals);
fail();
}
}
function assertLeDecimal(uint a, uint b, uint decimals, string memory err) internal {
if (a > b) {
emit log_named_string("Error", err);
assertGeDecimal(a, b, decimals);
}
}
function assertEq(string memory a, string memory b) internal {
if (keccak256(abi.encodePacked(a)) != keccak256(abi.encodePacked(b))) {
emit log("Error: a == b not satisfied [string]");
emit log_named_string(" Value a", a);
emit log_named_string(" Value b", b);
fail();
}
}
function assertEq(string memory a, string memory b, string memory err) internal {
if (keccak256(abi.encodePacked(a)) != keccak256(abi.encodePacked(b))) {
emit log_named_string("Error", err);
assertEq(a, b);
}
}
function checkEq0(bytes memory a, bytes memory b) internal pure returns (bool ok) {
ok = true;
if (a.length == b.length) {
for (uint i = 0; i < a.length; i++) {
if (a[i] != b[i]) {
ok = false;
}
}
} else {
ok = false;
}
}
function assertEq0(bytes memory a, bytes memory b) internal {
if (!checkEq0(a, b)) {
emit log("Error: a == b not satisfied [bytes]");
emit log_named_bytes(" Expected", a);
emit log_named_bytes(" Actual", b);
fail();
}
}
function assertEq0(bytes memory a, bytes memory b, string memory err) internal {
if (!checkEq0(a, b)) {
emit log_named_string("Error", err);
assertEq0(a, b);
}
}
}

View File

@ -0,0 +1,5 @@
// SPDX-License-Identifier: GPL-3.0-or-later
pragma solidity ^0.6.6;
contract Dapp {
}

View File

@ -0,0 +1,22 @@
// SPDX-License-Identifier: GPL-3.0-or-later
pragma solidity ^0.6.6;
import "ds-test/test.sol";
import "./Dapp.sol";
contract DappTest is DSTest {
Dapp dapp;
function setUp() public {
dapp = new Dapp();
}
function testFail_basic_sanity() public {
assertTrue(false);
}
function test_basic_sanity() public {
assertTrue(true);
}
}

View File

@ -0,0 +1,22 @@
//SPDX-License-Identifier: Unlicense
pragma solidity ^0.6.0;
import "hardhat/console.sol";
contract Greeter {
string private greeting;
constructor(string memory _greeting) public {
console.log("Deploying a Greeter with greeting:", _greeting);
greeting = _greeting;
}
function greet() public view returns (string memory) {
return greeting;
}
function setGreeting(string memory _greeting) public {
console.log("Changing greeting from '%s' to '%s'", greeting, _greeting);
greeting = _greeting;
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,55 @@
//! project tests
use ethers_solc::{
cache::SOLIDITY_FILES_CACHE_FILENAME, Project, ProjectCompileOutput, ProjectPathsConfig,
};
use std::path::PathBuf;
use tempdir::TempDir;
#[test]
fn can_compile_hardhat_sample() {
let tmp_dir = TempDir::new("root").unwrap();
let cache = tmp_dir.path().join("cache");
let cache = cache.join(SOLIDITY_FILES_CACHE_FILENAME);
let artifacts = tmp_dir.path().join("artifacts");
let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/hardhat-sample");
let paths = ProjectPathsConfig::builder()
.cache(cache)
.sources(root.join("contracts"))
.artifacts(artifacts)
.lib(root.join("node_modules"))
.root(root)
.build()
.unwrap();
// let paths = ProjectPathsConfig::hardhat(root).unwrap();
let project = Project::builder().paths(paths).build().unwrap();
assert_ne!(project.compile().unwrap(), ProjectCompileOutput::Unchanged);
// nothing to compile
assert_eq!(project.compile().unwrap(), ProjectCompileOutput::Unchanged);
}
#[test]
fn can_compile_dapp_sample() {
let tmp_dir = TempDir::new("root").unwrap();
let cache = tmp_dir.path().join("cache");
let cache = cache.join(SOLIDITY_FILES_CACHE_FILENAME);
let artifacts = tmp_dir.path().join("out");
let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample");
let paths = ProjectPathsConfig::builder()
.cache(cache)
.sources(root.join("src"))
.artifacts(artifacts)
.lib(root.join("lib"))
.root(root)
.build()
.unwrap();
// let paths = ProjectPathsConfig::dapptools(root).unwrap();
let project = Project::builder().paths(paths).build().unwrap();
assert_ne!(project.compile().unwrap(), ProjectCompileOutput::Unchanged);
// nothing to compile
assert_eq!(project.compile().unwrap(), ProjectCompileOutput::Unchanged);
}