refactor(solc): add new io error with path info (#680)

* refactor(solc): add new io error with path info

* chore: rustfmt
This commit is contained in:
Matthias Seitz 2021-12-12 18:10:40 +01:00 committed by GitHub
parent ab8b5233d2
commit 5dec757493
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 134 additions and 66 deletions

View File

@ -6,12 +6,12 @@ use md5::Digest;
use semver::Version;
use std::{
collections::BTreeMap,
fmt, fs, io,
fmt, fs,
path::{Path, PathBuf},
str::FromStr,
};
use crate::{compile::*, remappings::Remapping, utils};
use crate::{compile::*, error::SolcIoError, remappings::Remapping, utils};
use ethers_core::abi::Address;
use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer};
@ -30,7 +30,7 @@ pub struct CompilerInput {
impl CompilerInput {
/// Reads all contracts found under the path
pub fn new(path: impl AsRef<Path>) -> io::Result<Self> {
pub fn new(path: impl AsRef<Path>) -> Result<Self, SolcIoError> {
Source::read_all_from(path.as_ref()).map(Self::with_sources)
}
@ -382,17 +382,18 @@ pub struct Source {
impl Source {
/// Reads the file content
pub fn read(file: impl AsRef<Path>) -> io::Result<Self> {
Ok(Self { content: fs::read_to_string(file.as_ref())? })
pub fn read(file: impl AsRef<Path>) -> Result<Self, SolcIoError> {
let file = file.as_ref();
Ok(Self { content: fs::read_to_string(file).map_err(|err| SolcIoError::new(err, file))? })
}
/// Finds all source files under the given dir path and reads them all
pub fn read_all_from(dir: impl AsRef<Path>) -> io::Result<Sources> {
Self::read_all(utils::source_files(dir)?)
pub fn read_all_from(dir: impl AsRef<Path>) -> Result<Sources, SolcIoError> {
Self::read_all(utils::source_files(dir))
}
/// Reads all files
pub fn read_all<T, I>(files: I) -> io::Result<Sources>
pub fn read_all<T, I>(files: I) -> Result<Sources, SolcIoError>
where
I: IntoIterator<Item = T>,
T: Into<PathBuf>,
@ -421,17 +422,22 @@ impl Source {
#[cfg(feature = "async")]
impl Source {
/// async version of `Self::read`
pub async fn async_read(file: impl AsRef<Path>) -> io::Result<Self> {
Ok(Self { content: tokio::fs::read_to_string(file.as_ref()).await? })
pub async fn async_read(file: impl AsRef<Path>) -> Result<Self, SolcIoError> {
let file = file.as_ref();
Ok(Self {
content: tokio::fs::read_to_string(file)
.await
.map_err(|err| SolcIoError::new(err, file))?,
})
}
/// Finds all source files under the given dir path and reads them all
pub async fn async_read_all_from(dir: impl AsRef<Path>) -> io::Result<Sources> {
Self::async_read_all(utils::source_files(dir.as_ref())?).await
pub async fn async_read_all_from(dir: impl AsRef<Path>) -> Result<Sources, SolcIoError> {
Self::async_read_all(utils::source_files(dir.as_ref())).await
}
/// async version of `Self::read_all`
pub async fn async_read_all<T, I>(files: I) -> io::Result<Sources>
pub async fn async_read_all<T, I>(files: I) -> Result<Sources, SolcIoError>
where
I: IntoIterator<Item = T>,
T: Into<PathBuf>,

View File

@ -64,7 +64,7 @@ impl SolFilesCache {
pub fn read(path: impl AsRef<Path>) -> Result<Self> {
let path = path.as_ref();
tracing::trace!("reading solfiles cache at {}", path.display());
let file = fs::File::open(path)?;
let file = fs::File::open(path).map_err(|err| SolcError::io(err, path))?;
let file = std::io::BufReader::new(file);
let cache = serde_json::from_reader(file)?;
tracing::trace!("done");
@ -74,7 +74,7 @@ impl SolFilesCache {
/// Write the cache to json file
pub fn write(&self, path: impl AsRef<Path>) -> Result<()> {
let path = path.as_ref();
let file = fs::File::create(path)?;
let file = fs::File::create(path).map_err(|err| SolcError::io(err, path))?;
tracing::trace!("writing cache to json file");
serde_json::to_writer_pretty(file, self)?;
tracing::trace!("cache file located: {}", path.display());
@ -198,13 +198,16 @@ impl SolFilesCache {
#[cfg(feature = "async")]
impl SolFilesCache {
pub async fn async_read(path: impl AsRef<Path>) -> Result<Self> {
let content = tokio::fs::read_to_string(path.as_ref()).await?;
let path = path.as_ref();
let content =
tokio::fs::read_to_string(path).await.map_err(|err| SolcError::io(err, path))?;
Ok(serde_json::from_str(&content)?)
}
pub async fn async_write(&self, path: impl AsRef<Path>) -> Result<()> {
let path = path.as_ref();
let content = serde_json::to_vec_pretty(self)?;
Ok(tokio::fs::write(path.as_ref(), content).await?)
Ok(tokio::fs::write(path, content).await.map_err(|err| SolcError::io(err, path))?)
}
}
@ -236,12 +239,18 @@ impl SolFilesCacheBuilder {
let solc_config =
self.solc_config.map(Ok).unwrap_or_else(|| SolcConfig::builder().build())?;
let root = self.root.map(Ok).unwrap_or_else(std::env::current_dir)?;
let root = self
.root
.map(Ok)
.unwrap_or_else(std::env::current_dir)
.map_err(|err| SolcError::io(err, "."))?;
let mut files = BTreeMap::new();
for (file, source) in sources {
let last_modification_date = fs::metadata(&file)?
.modified()?
let last_modification_date = fs::metadata(&file)
.map_err(|err| SolcError::io(err, file.clone()))?
.modified()
.map_err(|err| SolcError::io(err, file.clone()))?
.duration_since(UNIX_EPOCH)
.map_err(|err| SolcError::solc(err.to_string()))?
.as_millis() as u64;
@ -268,7 +277,9 @@ impl SolFilesCacheBuilder {
if dest.exists() {
// read the existing cache and extend it by the files that changed
// (if we just wrote to the cache file, we'd overwrite the existing data)
let reader = std::io::BufReader::new(File::open(dest)?);
let reader = std::io::BufReader::new(
File::open(dest).map_err(|err| SolcError::io(err, dest))?,
);
let mut cache: SolFilesCache = serde_json::from_reader(reader)?;
assert_eq!(cache.format, format);
cache.files.extend(files);

View File

@ -247,7 +247,8 @@ impl Solc {
let version = self.version_short()?;
let mut version_path = svm::version_path(version.to_string().as_str());
version_path.push(format!("solc-{}", version.to_string().as_str()));
let content = std::fs::read(version_path)?;
let content =
std::fs::read(&version_path).map_err(|err| SolcError::io(err, version_path))?;
use sha2::Digest;
let mut hasher = sha2::Sha256::new();
@ -265,6 +266,7 @@ impl Solc {
/// Convenience function for compiling all sources under the given path
pub fn compile_source(&self, path: impl AsRef<Path>) -> Result<CompilerOutput> {
let path = path.as_ref();
self.compile(&CompilerInput::new(path)?)
}
@ -302,11 +304,12 @@ impl Solc {
.stdin(Stdio::piped())
.stderr(Stdio::piped())
.stdout(Stdio::piped())
.spawn()?;
.spawn()
.map_err(|err| SolcError::io(err, &self.solc))?;
let stdin = child.stdin.take().unwrap();
serde_json::to_writer(stdin, input)?;
compile_output(child.wait_with_output()?)
compile_output(child.wait_with_output().map_err(|err| SolcError::io(err, &self.solc))?)
}
pub fn version_short(&self) -> Result<Version> {
@ -322,7 +325,8 @@ impl Solc {
.stdin(Stdio::piped())
.stderr(Stdio::piped())
.stdout(Stdio::piped())
.output()?,
.output()
.map_err(|err| SolcError::io(err, &self.solc))?,
)
}
}
@ -363,11 +367,14 @@ impl Solc {
.stdin(Stdio::piped())
.stderr(Stdio::piped())
.stdout(Stdio::piped())
.spawn()?;
.spawn()
.map_err(|err| SolcError::io(err, &self.solc))?;
let stdin = child.stdin.as_mut().unwrap();
stdin.write_all(&content).await?;
stdin.flush().await?;
compile_output(child.wait_with_output().await?)
stdin.write_all(&content).await.map_err(|err| SolcError::io(err, &self.solc))?;
stdin.flush().await.map_err(|err| SolcError::io(err, &self.solc))?;
compile_output(
child.wait_with_output().await.map_err(|err| SolcError::io(err, &self.solc))?,
)
}
pub async fn async_version(&self) -> Result<Version> {
@ -377,9 +384,11 @@ impl Solc {
.stdin(Stdio::piped())
.stderr(Stdio::piped())
.stdout(Stdio::piped())
.spawn()?
.spawn()
.map_err(|err| SolcError::io(err, &self.solc))?
.wait_with_output()
.await?,
.await
.map_err(|err| SolcError::io(err, &self.solc))?,
)
}
@ -470,7 +479,8 @@ fn version_from_output(output: Output) -> Result<Version> {
.stdout
.lines()
.last()
.ok_or_else(|| SolcError::solc("version not found in solc output"))??;
.ok_or_else(|| SolcError::solc("version not found in solc output"))?
.map_err(|err| SolcError::msg(format!("Failed to read output: {}", err)))?;
// NOTE: semver doesn't like `+` in g++ in build metadata which is invalid semver
Ok(Version::from_str(&version.trim_start_matches("Version: ").replace(".g++", ".gcc"))?)
} else {

View File

@ -1,7 +1,7 @@
use crate::{
artifacts::{CompactContract, CompactContractRef, Contract, Settings},
cache::SOLIDITY_FILES_CACHE_FILENAME,
error::{Result, SolcError},
error::{Result, SolcError, SolcIoError},
hh::HardhatArtifact,
remappings::Remapping,
CompilerOutput,
@ -51,12 +51,12 @@ impl ProjectPathsConfig {
/// Creates a new config with the current directory as the root
pub fn current_hardhat() -> Result<Self> {
Self::hardhat(std::env::current_dir()?)
Self::hardhat(std::env::current_dir().map_err(|err| SolcError::io(err, "."))?)
}
/// Creates a new config with the current directory as the root
pub fn current_dapptools() -> Result<Self> {
Self::dapptools(std::env::current_dir()?)
Self::dapptools(std::env::current_dir().map_err(|err| SolcError::io(err, "."))?)
}
}
@ -68,7 +68,8 @@ pub enum PathStyle {
impl PathStyle {
pub fn paths(&self, root: impl AsRef<Path>) -> Result<ProjectPathsConfig> {
let root = std::fs::canonicalize(root)?;
let root = root.as_ref();
let root = std::fs::canonicalize(root).map_err(|err| SolcError::io(err, root))?;
Ok(match self {
PathStyle::Dapptools => ProjectPathsConfig::builder()
@ -157,9 +158,13 @@ impl ProjectPathsConfigBuilder {
self
}
pub fn build(self) -> io::Result<ProjectPathsConfig> {
let root = self.root.map(Ok).unwrap_or_else(std::env::current_dir)?;
let root = std::fs::canonicalize(root)?;
pub fn build(self) -> std::result::Result<ProjectPathsConfig, SolcIoError> {
let root = self
.root
.map(Ok)
.unwrap_or_else(std::env::current_dir)
.map_err(|err| SolcIoError::new(err, "."))?;
let root = std::fs::canonicalize(&root).map_err(|err| SolcIoError::new(err, root))?;
Ok(ProjectPathsConfig {
cache: self
@ -287,7 +292,8 @@ pub trait ArtifactOutput {
}
fn read_cached_artifact(path: impl AsRef<Path>) -> Result<Self::Artifact> {
let file = fs::File::open(path.as_ref())?;
let path = path.as_ref();
let file = fs::File::open(path).map_err(|err| SolcError::io(err, path))?;
let file = io::BufReader::new(file);
Ok(serde_json::from_reader(file)?)
}
@ -362,7 +368,8 @@ impl ArtifactOutput for MinimalCombinedArtifacts {
})?;
}
let min = CompactContractRef::from(contract);
fs::write(&file, serde_json::to_vec_pretty(&min)?)?
fs::write(&file, serde_json::to_vec_pretty(&min)?)
.map_err(|err| SolcError::io(err, file))?
}
}
Ok(())
@ -386,7 +393,8 @@ impl ArtifactOutput for MinimalCombinedArtifactsHardhatFallback {
}
fn read_cached_artifact(path: impl AsRef<Path>) -> Result<Self::Artifact> {
let content = fs::read_to_string(path)?;
let path = path.as_ref();
let content = fs::read_to_string(path).map_err(|err| SolcError::io(err, path))?;
if let Ok(a) = serde_json::from_str(&content) {
Ok(a)
} else {
@ -429,17 +437,18 @@ impl fmt::Display for AllowedLibPaths {
}
impl<T: Into<PathBuf>> TryFrom<Vec<T>> for AllowedLibPaths {
type Error = std::io::Error;
type Error = SolcIoError;
fn try_from(libs: Vec<T>) -> std::result::Result<Self, Self::Error> {
let libs = libs
.into_iter()
.map(|lib| {
let path: PathBuf = lib.into();
let lib = std::fs::canonicalize(path)?;
let lib =
std::fs::canonicalize(&path).map_err(|err| SolcIoError::new(err, path))?;
Ok(lib)
})
.collect::<std::result::Result<Vec<_>, std::io::Error>>()?;
.collect::<std::result::Result<Vec<_>, _>>()?;
Ok(AllowedLibPaths(libs))
}
}

View File

@ -1,3 +1,4 @@
use std::{io, path::PathBuf};
use thiserror::Error;
pub type Result<T> = std::result::Result<T, SolcError>;
@ -21,7 +22,7 @@ pub enum SolcError {
SerdeJson(#[from] serde_json::Error),
/// Filesystem IO error
#[error(transparent)]
Io(#[from] std::io::Error),
Io(#[from] SolcIoError),
#[cfg(feature = "svm")]
#[error(transparent)]
SvmError(#[from] svm::SolcVmError),
@ -35,6 +36,9 @@ pub enum SolcError {
}
impl SolcError {
pub(crate) fn io(err: io::Error, path: impl Into<PathBuf>) -> Self {
SolcIoError::new(err, path).into()
}
pub(crate) fn solc(msg: impl Into<String>) -> Self {
SolcError::SolcError(msg.into())
}
@ -42,3 +46,22 @@ impl SolcError {
SolcError::Message(msg.into())
}
}
#[derive(Debug, Error)]
#[error("\"{}\": {io}", self.path.display())]
pub struct SolcIoError {
io: io::Error,
path: PathBuf,
}
impl SolcIoError {
pub fn new(io: io::Error, path: impl Into<PathBuf>) -> Self {
Self { io, path: path.into() }
}
}
impl From<SolcIoError> for io::Error {
fn from(err: SolcIoError) -> Self {
err.io
}
}

View File

@ -73,7 +73,8 @@ impl ArtifactOutput for HardhatArtifacts {
})?;
}
let artifact = Self::contract_to_artifact(file, name, contract.clone());
fs::write(&artifact_file, serde_json::to_vec_pretty(&artifact)?)?
fs::write(&artifact_file, serde_json::to_vec_pretty(&artifact)?)
.map_err(|err| SolcError::io(err, artifact_file))?
}
}
Ok(())

View File

@ -26,10 +26,14 @@ use crate::{artifacts::Source, cache::SolFilesCache};
pub mod error;
pub mod utils;
use crate::{artifacts::Sources, cache::PathMap};
use crate::{
artifacts::Sources,
cache::PathMap,
error::{SolcError, SolcIoError},
};
use error::Result;
use std::{
borrow::Cow, collections::BTreeMap, convert::TryInto, fmt, fs, io, marker::PhantomData,
borrow::Cow, collections::BTreeMap, convert::TryInto, fmt, fs, marker::PhantomData,
path::PathBuf,
};
@ -117,7 +121,7 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
if let Some(cache_dir) = self.paths.cache.parent() {
tracing::trace!("creating cache file parent directory \"{}\"", cache_dir.display());
fs::create_dir_all(cache_dir)?
fs::create_dir_all(cache_dir).map_err(|err| SolcError::io(err, cache_dir))?
}
tracing::trace!("writing cache file to \"{}\"", self.paths.cache.display());
@ -128,9 +132,9 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
/// Returns all sources found under the project's configured sources path
#[tracing::instrument(skip_all, fields(name = "sources"))]
pub fn sources(&self) -> io::Result<Sources> {
pub fn sources(&self) -> Result<Sources> {
tracing::trace!("reading all sources from \"{}\"", self.paths.sources.display());
Source::read_all_from(&self.paths.sources)
Ok(Source::read_all_from(&self.paths.sources)?)
}
/// This emits the cargo [`rerun-if-changed`](https://doc.rust-lang.org/cargo/reference/build-scripts.html#cargorerun-if-changedpath) instruction.
@ -161,7 +165,7 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
fn resolved_libraries(
&self,
sources: &Sources,
) -> io::Result<BTreeMap<PathBuf, (Source, PathBuf)>> {
) -> Result<BTreeMap<PathBuf, (Source, PathBuf)>> {
let mut libs = BTreeMap::default();
for source in sources.values() {
for import in source.parse_imports() {
@ -462,14 +466,16 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
}
/// Removes the project's artifacts and cache file
pub fn cleanup(&self) -> Result<()> {
pub fn cleanup(&self) -> std::result::Result<(), SolcIoError> {
tracing::trace!("clean up project");
if self.paths.cache.exists() {
std::fs::remove_file(&self.paths.cache)?;
std::fs::remove_file(&self.paths.cache)
.map_err(|err| SolcIoError::new(err, self.paths.cache.clone()))?;
tracing::trace!("removed cache file \"{}\"", self.paths.cache.display());
}
if self.paths.artifacts.exists() {
std::fs::remove_dir_all(&self.paths.artifacts)?;
std::fs::remove_dir_all(&self.paths.artifacts)
.map_err(|err| SolcIoError::new(err, self.paths.artifacts.clone()))?;
tracing::trace!("removed artifacts dir \"{}\"", self.paths.artifacts.display());
}
Ok(())

View File

@ -132,16 +132,19 @@ impl Remapping {
// nothing to find
return Ok(Vec::new())
}
let mut paths = std::fs::read_dir(path)?.into_iter().collect::<Vec<_>>();
let mut paths = std::fs::read_dir(path)
.map_err(|err| SolcError::io(err, path))?
.into_iter()
.collect::<Vec<_>>();
let mut remappings = Vec::new();
while let Some(path) = paths.pop() {
let path = path?.path();
while let Some(p) = paths.pop() {
let path = p.map_err(|err| SolcError::io(err, path))?.path();
// get all the directories inside a file if it's a valid dir
if let Ok(dir) = std::fs::read_dir(&path) {
for inner in dir {
let inner = inner?;
let inner = inner.map_err(|err| SolcError::io(err, &path))?;
let path = inner.path().display().to_string();
let path = path.rsplit('/').next().unwrap().to_string();
if path != DAPPTOOLS_CONTRACTS_DIR && path != JS_CONTRACTS_DIR {

View File

@ -49,17 +49,16 @@ pub fn find_version_pragma(contract: &str) -> Option<&str> {
///
/// ```no_run
/// use ethers_solc::utils;
/// let sources = utils::source_files("./contracts").unwrap();
/// let sources = utils::source_files("./contracts");
/// ```
pub fn source_files(root: impl AsRef<Path>) -> walkdir::Result<Vec<PathBuf>> {
let files = WalkDir::new(root)
pub fn source_files(root: impl AsRef<Path>) -> Vec<PathBuf> {
WalkDir::new(root)
.into_iter()
.filter_map(Result::ok)
.filter(|e| e.file_type().is_file())
.filter(|e| e.path().extension().map(|ext| ext == "sol").unwrap_or_default())
.map(|e| e.path().into())
.collect();
Ok(files)
.collect()
}
/// Returns the source name for the given source path, the ancestors of the root path
@ -192,7 +191,7 @@ mod tests {
File::create(&file_c).unwrap();
File::create(&file_d).unwrap();
let files: HashSet<_> = source_files(tmp_dir.path()).unwrap().into_iter().collect();
let files: HashSet<_> = source_files(tmp_dir.path()).into_iter().collect();
let expected: HashSet<_> = [file_a, file_b, file_c, file_d].into();
assert_eq!(files, expected);
}