2022-01-05 21:46:57 +00:00
//! Resolution of the entire dependency graph for a project.
//!
//! This module implements the core logic in taking all contracts of a project and creating a
//! resolved graph with applied remappings for all source contracts.
//!
//! Some constraints we're working with when resolving contracts
//!
//! 1. Each file can contain several source units and can have any number of imports/dependencies
//! (using the term interchangeably). Each dependency can declare a version range that it is
//! compatible with, solidity version pragma.
//! 2. A dependency can be imported from any directory,
//! see `Remappings`
//!
//! Finding all dependencies is fairly simple, we're simply doing a DFS, starting the source
//! contracts
//!
2022-02-05 14:07:37 +00:00
//! ## Solc version auto-detection
//!
//! Solving a constraint graph is an NP-hard problem. The algorithm for finding the "best" solution
//! makes several assumptions and tries to find a version of "Solc" that is compatible with all
//! source files.
//!
//! The algorithm employed here is fairly simple, we simply do a DFS over all the source files and
//! find the set of Solc versions that the file and all its imports are compatible with, and then we
//! try to find a single Solc version that is compatible with all the files. This is effectively the
//! intersection of all version sets.
//!
//! We always try to activate the highest (installed) solc version first. Uninstalled solc is only
//! used if this version is the only compatible version for a single file or in the intersection of
//! all version sets.
//!
//! This leads to finding the optimal version, if there is one. If there is no single Solc version
//! that is compatible with all sources and their imports, then suddenly this becomes a very
//! difficult problem, because what would be the "best" solution. In this case, just choose the
//! latest (installed) Solc version and try to minimize the number of Solc versions used.
//!
2022-01-05 21:46:57 +00:00
//! ## Performance
//!
//! Note that this is a relatively performance-critical portion of the ethers-solc preprocessing.
//! The data that needs to be processed is proportional to the size of the dependency
//! graph, which can, depending on the project, often be quite large.
//!
//! Note that, unlike the solidity compiler, we work with the filesystem, where we have to resolve
//! remappings and follow relative paths. We're also limiting the nodes in the graph to solidity
//! files, since we're only interested in their
//! [version pragma](https://docs.soliditylang.org/en/develop/layout-of-source-files.html#version-pragma),
//! which is defined on a per source file basis.
2022-08-12 19:11:08 +00:00
use crate ::{ error ::Result , utils , IncludePaths , ProjectPathsConfig , SolcError , Source , Sources } ;
use parse ::{ SolData , SolDataUnit , SolImport } ;
use rayon ::prelude ::* ;
use semver ::VersionReq ;
2022-01-05 21:46:57 +00:00
use std ::{
2022-01-17 12:05:52 +00:00
collections ::{ HashMap , HashSet , VecDeque } ,
2022-02-19 13:55:21 +00:00
fmt , io ,
2022-01-11 10:02:57 +00:00
path ::{ Path , PathBuf } ,
2022-01-05 21:46:57 +00:00
} ;
2022-04-04 22:50:10 +00:00
mod parse ;
2022-02-19 13:55:21 +00:00
mod tree ;
2022-04-04 22:50:10 +00:00
2023-01-03 13:14:01 +00:00
use crate ::utils ::find_case_sensitive_existing_file ;
2022-05-04 05:33:25 +00:00
pub use parse ::SolImportAlias ;
2022-02-19 13:55:21 +00:00
pub use tree ::{ print , Charset , TreeOptions } ;
2022-02-04 16:20:24 +00:00
/// The underlying edges of the graph which only contains the raw relationship data.
///
/// This is kept separate from the `Graph` as the `Node`s get consumed when the `Solc` to `Sources`
/// set is determined.
2022-04-04 22:50:10 +00:00
#[ derive(Debug) ]
2022-02-04 16:20:24 +00:00
pub struct GraphEdges {
2022-01-05 21:46:57 +00:00
/// The indices of `edges` correspond to the `nodes`. That is, `edges[0]`
/// is the set of outgoing edges for `nodes[0]`.
edges : Vec < Vec < usize > > ,
/// index maps for a solidity file to an index, for fast lookup.
indices : HashMap < PathBuf , usize > ,
2022-02-04 16:20:24 +00:00
/// reverse of `indices` for reverse lookup
rev_indices : HashMap < usize , PathBuf > ,
/// the identified version requirement of a file
versions : HashMap < usize , Option < VersionReq > > ,
2022-04-04 22:50:10 +00:00
/// the extracted data from the source file
data : HashMap < usize , SolData > ,
2022-01-05 21:46:57 +00:00
/// with how many input files we started with, corresponds to `let input_files =
/// nodes[..num_input_files]`.
2022-02-04 16:20:24 +00:00
///
/// Combined with the `indices` this way we can determine if a file was original added to the
/// graph as input or was added as resolved import, see [`Self::is_input_file()`]
2022-01-05 21:46:57 +00:00
num_input_files : usize ,
2022-08-01 16:46:15 +00:00
/// tracks all imports that we failed to resolve for a file
unresolved_imports : HashSet < ( PathBuf , PathBuf ) > ,
2022-08-12 19:11:08 +00:00
/// tracks additional include paths resolved by scanning all imports of the graph
///
/// Absolute imports, like `import "src/Contract.sol"` are possible, but this does not play
/// nice with the standard-json import format, since the VFS won't be able to resolve
/// "src/Contract.sol" without help via `--include-path`
#[ allow(unused) ]
resolved_solc_include_paths : IncludePaths ,
2022-02-04 16:20:24 +00:00
}
impl GraphEdges {
2022-03-15 15:58:33 +00:00
/// How many files are source files
pub fn num_source_files ( & self ) -> usize {
self . num_input_files
}
/// Returns an iterator over all file indices
pub fn files ( & self ) -> impl Iterator < Item = usize > + '_ {
0 .. self . edges . len ( )
}
/// Returns an iterator over all source file indices
pub fn source_files ( & self ) -> impl Iterator < Item = usize > + '_ {
0 .. self . num_input_files
}
/// Returns an iterator over all library files
pub fn library_files ( & self ) -> impl Iterator < Item = usize > + '_ {
self . files ( ) . skip ( self . num_input_files )
}
2022-08-12 19:11:08 +00:00
/// Returns all additional `--include-paths`
pub fn include_paths ( & self ) -> & IncludePaths {
& self . resolved_solc_include_paths
}
2022-06-02 17:31:02 +00:00
/// Returns all imports that we failed to resolve
2022-08-01 16:46:15 +00:00
pub fn unresolved_imports ( & self ) -> & HashSet < ( PathBuf , PathBuf ) > {
2022-06-02 17:31:02 +00:00
& self . unresolved_imports
}
2022-02-04 16:20:24 +00:00
/// Returns a list of nodes the given node index points to for the given kind.
pub fn imported_nodes ( & self , from : usize ) -> & [ usize ] {
& self . edges [ from ]
}
2022-03-09 18:52:40 +00:00
/// Returns an iterator that yields all imports of a node and all their imports
pub fn all_imported_nodes ( & self , from : usize ) -> impl Iterator < Item = usize > + '_ {
NodesIter ::new ( from , self ) . skip ( 1 )
}
2022-02-04 16:20:24 +00:00
/// Returns all files imported by the given file
pub fn imports ( & self , file : impl AsRef < Path > ) -> HashSet < & PathBuf > {
if let Some ( start ) = self . indices . get ( file . as_ref ( ) ) . copied ( ) {
NodesIter ::new ( start , self ) . skip ( 1 ) . map ( move | idx | & self . rev_indices [ & idx ] ) . collect ( )
} else {
HashSet ::new ( )
}
}
2022-03-09 18:52:40 +00:00
/// Returns the id of the given file
pub fn node_id ( & self , file : impl AsRef < Path > ) -> usize {
self . indices [ file . as_ref ( ) ]
}
2022-03-15 15:58:33 +00:00
/// Returns the path of the given node
pub fn node_path ( & self , id : usize ) -> & PathBuf {
& self . rev_indices [ & id ]
}
2022-02-04 16:20:24 +00:00
/// Returns true if the `file` was originally included when the graph was first created and not
/// added when all `imports` were resolved
pub fn is_input_file ( & self , file : impl AsRef < Path > ) -> bool {
if let Some ( idx ) = self . indices . get ( file . as_ref ( ) ) . copied ( ) {
idx < self . num_input_files
} else {
false
}
}
/// Returns the `VersionReq` for the given file
pub fn version_requirement ( & self , file : impl AsRef < Path > ) -> Option < & VersionReq > {
self . indices
. get ( file . as_ref ( ) )
. and_then ( | idx | self . versions . get ( idx ) )
. and_then ( | v | v . as_ref ( ) )
}
2022-04-04 22:50:10 +00:00
/// Returns those library files that will be required as `linkReferences` by the given file
///
/// This is a preprocess function that attempts to resolve those libraries that will the
/// solidity `file` will be required to link. And further restrict this list to libraries
/// that won't be inlined See also [SolLibrary](parse::SolLibrary)
pub fn get_link_references ( & self , file : impl AsRef < Path > ) -> HashSet < & PathBuf > {
let mut link_references = HashSet ::new ( ) ;
for import in self . all_imported_nodes ( self . node_id ( file ) ) {
let data = & self . data [ & import ] ;
if data . has_link_references ( ) {
link_references . insert ( & self . rev_indices [ & import ] ) ;
}
}
link_references
}
2022-02-04 16:20:24 +00:00
}
/// Represents a fully-resolved solidity dependency graph. Each node in the graph
/// is a file and edges represent dependencies between them.
2022-03-19 17:05:39 +00:00
/// See also <https://docs.soliditylang.org/en/latest/layout-of-source-files.html?highlight=import#importing-other-source-files>
2022-02-04 16:20:24 +00:00
#[ derive(Debug) ]
pub struct Graph {
2022-04-04 22:50:10 +00:00
/// all nodes in the project, a `Node` represents a single file
2022-02-04 16:20:24 +00:00
nodes : Vec < Node > ,
/// relationship of the nodes
edges : GraphEdges ,
2022-01-05 21:46:57 +00:00
/// the root of the project this graph represents
#[ allow(unused) ]
root : PathBuf ,
}
impl Graph {
2022-02-19 13:55:21 +00:00
/// Print the graph to `StdOut`
pub fn print ( & self ) {
self . print_with_options ( Default ::default ( ) )
}
/// Print the graph to `StdOut` using the provided `TreeOptions`
pub fn print_with_options ( & self , opts : TreeOptions ) {
let stdout = io ::stdout ( ) ;
let mut out = stdout . lock ( ) ;
tree ::print ( self , & opts , & mut out ) . expect ( " failed to write to stdout. " )
}
2022-01-05 21:46:57 +00:00
/// Returns a list of nodes the given node index points to for the given kind.
pub fn imported_nodes ( & self , from : usize ) -> & [ usize ] {
2022-02-04 16:20:24 +00:00
self . edges . imported_nodes ( from )
2022-01-05 21:46:57 +00:00
}
2022-04-09 00:18:39 +00:00
/// Returns an iterator that yields all imports of a node and all their imports
pub fn all_imported_nodes ( & self , from : usize ) -> impl Iterator < Item = usize > + '_ {
self . edges . all_imported_nodes ( from )
}
2022-02-19 13:55:21 +00:00
/// Returns `true` if the given node has any outgoing edges.
pub ( crate ) fn has_outgoing_edges ( & self , index : usize ) -> bool {
! self . edges . edges [ index ] . is_empty ( )
}
2022-01-05 21:46:57 +00:00
/// Returns all the resolved files and their index in the graph
pub fn files ( & self ) -> & HashMap < PathBuf , usize > {
2022-02-04 16:20:24 +00:00
& self . edges . indices
2022-01-05 21:46:57 +00:00
}
/// Gets a node by index.
2022-02-04 16:20:24 +00:00
///
/// # Panics
///
/// if the `index` node id is not included in the graph
2022-01-05 21:46:57 +00:00
pub fn node ( & self , index : usize ) -> & Node {
& self . nodes [ index ]
}
2022-02-19 13:55:21 +00:00
pub ( crate ) fn display_node ( & self , index : usize ) -> DisplayNode {
DisplayNode { node : self . node ( index ) , root : & self . root }
}
2022-01-17 12:05:52 +00:00
/// Returns an iterator that yields all nodes of the dependency tree that the given node id
/// spans, starting with the node itself.
///
/// # Panics
///
/// if the `start` node id is not included in the graph
pub fn node_ids ( & self , start : usize ) -> impl Iterator < Item = usize > + '_ {
2022-02-04 16:20:24 +00:00
NodesIter ::new ( start , & self . edges )
2022-01-17 12:05:52 +00:00
}
/// Same as `Self::node_ids` but returns the actual `Node`
pub fn nodes ( & self , start : usize ) -> impl Iterator < Item = & Node > + '_ {
self . node_ids ( start ) . map ( move | idx | self . node ( idx ) )
}
2022-04-04 22:50:10 +00:00
fn split ( self ) -> ( Vec < ( PathBuf , Source ) > , GraphEdges ) {
let Graph { nodes , mut edges , .. } = self ;
// need to move the extracted data to the edges, essentially splitting the node so we have
// access to the data at a later stage in the compile pipeline
let mut sources = Vec ::new ( ) ;
for ( idx , node ) in nodes . into_iter ( ) . enumerate ( ) {
let Node { path , source , data } = node ;
sources . push ( ( path , source ) ) ;
edges . data . insert ( idx , data ) ;
}
( sources , edges )
}
2022-02-04 16:20:24 +00:00
/// Consumes the `Graph`, effectively splitting the `nodes` and the `GraphEdges` off and
/// returning the `nodes` converted to `Sources`
pub fn into_sources ( self ) -> ( Sources , GraphEdges ) {
2022-04-04 22:50:10 +00:00
let ( sources , edges ) = self . split ( ) ;
( sources . into_iter ( ) . collect ( ) , edges )
2022-01-05 21:46:57 +00:00
}
/// Returns an iterator that yields only those nodes that represent input files.
/// See `Self::resolve_sources`
/// This won't yield any resolved library nodes
pub fn input_nodes ( & self ) -> impl Iterator < Item = & Node > {
2022-02-04 16:20:24 +00:00
self . nodes . iter ( ) . take ( self . edges . num_input_files )
}
2022-06-02 17:31:02 +00:00
/// Returns all files imported by the given file
2022-02-04 16:20:24 +00:00
pub fn imports ( & self , path : impl AsRef < Path > ) -> HashSet < & PathBuf > {
self . edges . imports ( path )
2022-01-05 21:46:57 +00:00
}
/// Resolves a number of sources within the given config
pub fn resolve_sources ( paths : & ProjectPathsConfig , sources : Sources ) -> Result < Graph > {
/// checks if the given target path was already resolved, if so it adds its id to the list
/// of resolved imports. If it hasn't been resolved yet, it queues in the file for
/// processing
fn add_node (
unresolved : & mut VecDeque < ( PathBuf , Node ) > ,
index : & mut HashMap < PathBuf , usize > ,
resolved_imports : & mut Vec < usize > ,
target : PathBuf ,
) -> Result < ( ) > {
if let Some ( idx ) = index . get ( & target ) . copied ( ) {
resolved_imports . push ( idx ) ;
} else {
// imported file is not part of the input files
2022-04-04 22:50:10 +00:00
let node = Node ::read ( & target ) ? ;
2022-01-05 21:46:57 +00:00
unresolved . push_back ( ( target . clone ( ) , node ) ) ;
let idx = index . len ( ) ;
index . insert ( target , idx ) ;
resolved_imports . push ( idx ) ;
}
Ok ( ( ) )
}
// we start off by reading all input files, which includes all solidity files from the
// source and test folder
let mut unresolved : VecDeque < ( PathBuf , Node ) > = sources
. into_par_iter ( )
. map ( | ( path , source ) | {
2022-04-04 22:50:10 +00:00
let data = SolData ::parse ( source . as_ref ( ) , & path ) ;
2022-01-05 21:46:57 +00:00
( path . clone ( ) , Node { path , source , data } )
} )
. collect ( ) ;
// identifiers of all resolved files
let mut index : HashMap < _ , _ > =
unresolved . iter ( ) . enumerate ( ) . map ( | ( idx , ( p , _ ) ) | ( p . clone ( ) , idx ) ) . collect ( ) ;
let num_input_files = unresolved . len ( ) ;
// contains the files and their dependencies
let mut nodes = Vec ::with_capacity ( unresolved . len ( ) ) ;
let mut edges = Vec ::with_capacity ( unresolved . len ( ) ) ;
2022-04-08 14:37:43 +00:00
2022-08-12 19:11:08 +00:00
// tracks additional paths that should be used with `--include-path`, these are libraries
// that use absolute imports like `import "src/Contract.sol"`
let mut resolved_solc_include_paths = IncludePaths ::default ( ) ;
2022-04-08 14:37:43 +00:00
// keep track of all unique paths that we failed to resolve to not spam the reporter with
// the same path
2022-06-02 17:31:02 +00:00
let mut unresolved_imports = HashSet ::new ( ) ;
2022-04-08 14:37:43 +00:00
2022-01-05 21:46:57 +00:00
// now we need to resolve all imports for the source file and those imported from other
// locations
while let Some ( ( path , node ) ) = unresolved . pop_front ( ) {
let mut resolved_imports = Vec ::with_capacity ( node . data . imports . len ( ) ) ;
// parent directory of the current file
2022-01-11 10:02:57 +00:00
let cwd = match path . parent ( ) {
2022-01-05 21:46:57 +00:00
Some ( inner ) = > inner ,
None = > continue ,
} ;
for import in node . data . imports . iter ( ) {
2022-05-04 05:33:25 +00:00
let import_path = import . data ( ) . path ( ) ;
2022-08-12 19:11:08 +00:00
match paths . resolve_import_and_include_paths (
cwd ,
import_path ,
& mut resolved_solc_include_paths ,
) {
2022-01-11 10:02:57 +00:00
Ok ( import ) = > {
2022-08-19 15:36:51 +00:00
add_node ( & mut unresolved , & mut index , & mut resolved_imports , import )
. map_err ( | err | {
match err {
2023-01-03 13:14:01 +00:00
err @ SolcError ::ResolveCaseSensitiveFileName { .. } |
err @ SolcError ::Resolve ( _ ) = > {
// make the error more helpful by providing additional
// context
2022-08-19 15:36:51 +00:00
SolcError ::FailedResolveImport (
2023-01-03 13:14:01 +00:00
Box ::new ( err ) ,
2022-08-19 15:36:51 +00:00
node . path . clone ( ) ,
import_path . clone ( ) ,
)
}
_ = > err ,
}
} ) ?
2022-01-05 21:46:57 +00:00
}
2022-02-12 23:05:43 +00:00
Err ( err ) = > {
2022-08-01 16:46:15 +00:00
unresolved_imports . insert ( ( import_path . to_path_buf ( ) , node . path . clone ( ) ) ) ;
2022-04-08 14:37:43 +00:00
tracing ::trace! (
" failed to resolve import component \" {:?} \" for {:?} " ,
err ,
node . path
)
2022-02-12 23:05:43 +00:00
}
2022-01-17 12:27:40 +00:00
} ;
2022-01-05 21:46:57 +00:00
}
2022-04-08 14:37:43 +00:00
2022-01-05 21:46:57 +00:00
nodes . push ( node ) ;
edges . push ( resolved_imports ) ;
}
2022-08-01 16:46:15 +00:00
if ! unresolved_imports . is_empty ( ) {
// notify on all unresolved imports
crate ::report ::unresolved_imports (
& unresolved_imports
. iter ( )
. map ( | ( i , f ) | ( i . as_path ( ) , f . as_path ( ) ) )
. collect ::< Vec < _ > > ( ) ,
& paths . remappings ,
) ;
}
2022-02-04 16:20:24 +00:00
let edges = GraphEdges {
edges ,
rev_indices : index . iter ( ) . map ( | ( k , v ) | ( * v , k . clone ( ) ) ) . collect ( ) ,
indices : index ,
num_input_files ,
versions : nodes
. iter ( )
. enumerate ( )
. map ( | ( idx , node ) | ( idx , node . data . version_req . clone ( ) ) )
. collect ( ) ,
2022-04-04 22:50:10 +00:00
data : Default ::default ( ) ,
2022-06-02 17:31:02 +00:00
unresolved_imports ,
2022-08-12 19:11:08 +00:00
resolved_solc_include_paths ,
2022-02-04 16:20:24 +00:00
} ;
Ok ( Graph { nodes , edges , root : paths . root . clone ( ) } )
2022-01-05 21:46:57 +00:00
}
/// Resolves the dependencies of a project's source contracts
pub fn resolve ( paths : & ProjectPathsConfig ) -> Result < Graph > {
Self ::resolve_sources ( paths , paths . read_input_files ( ) ? )
}
}
2022-04-27 12:37:40 +00:00
#[ cfg(all(feature = " svm-solc " )) ]
2022-01-05 21:46:57 +00:00
impl Graph {
2022-02-04 16:20:24 +00:00
/// Consumes the nodes of the graph and returns all input files together with their appropriate
/// version and the edges of the graph
2022-01-05 21:46:57 +00:00
///
/// First we determine the compatible version for each input file (from sources and test folder,
/// see `Self::resolve`) and then we add all resolved library imports.
2022-02-04 16:20:24 +00:00
pub fn into_sources_by_version ( self , offline : bool ) -> Result < ( VersionedSources , GraphEdges ) > {
2022-01-05 21:46:57 +00:00
/// insert the imports of the given node into the sources map
/// There can be following graph:
/// `A(<=0.8.10) imports C(>0.4.0)` and `B(0.8.11) imports C(>0.4.0)`
/// where `C` is a library import, in which case we assign `C` only to the first input file.
/// However, it's not required to include them in the solc `CompilerInput` as they would get
/// picked up by solc otherwise, but we add them, so we can create a corresponding
/// cache entry for them as well. This can be optimized however
fn insert_imports (
idx : usize ,
2022-04-04 22:50:10 +00:00
all_nodes : & mut HashMap < usize , ( PathBuf , Source ) > ,
2022-01-05 21:46:57 +00:00
sources : & mut Sources ,
edges : & [ Vec < usize > ] ,
num_input_files : usize ,
) {
for dep in edges [ idx ] . iter ( ) . copied ( ) {
// we only process nodes that were added as part of the resolve step because input
// nodes are handled separately
if dep > = num_input_files {
// library import
2022-04-04 22:50:10 +00:00
if let Some ( ( path , source ) ) = all_nodes . remove ( & dep ) {
sources . insert ( path , source ) ;
2022-01-05 21:46:57 +00:00
insert_imports ( dep , all_nodes , sources , edges , num_input_files ) ;
}
}
}
}
let versioned_nodes = self . get_input_node_versions ( offline ) ? ;
2022-04-04 22:50:10 +00:00
let ( nodes , edges ) = self . split ( ) ;
2022-01-05 21:46:57 +00:00
let mut versioned_sources = HashMap ::with_capacity ( versioned_nodes . len ( ) ) ;
2022-04-04 22:50:10 +00:00
2022-01-05 21:46:57 +00:00
let mut all_nodes = nodes . into_iter ( ) . enumerate ( ) . collect ::< HashMap < _ , _ > > ( ) ;
// determine the `Sources` set for each solc version
for ( version , input_node_indices ) in versioned_nodes {
let mut sources = Sources ::new ( ) ;
// we only process input nodes (from sources, tests for example)
for idx in input_node_indices {
// insert the input node in the sources set and remove it from the available set
2022-04-04 22:50:10 +00:00
let ( path , source ) = all_nodes . remove ( & idx ) . expect ( " node is preset. qed " ) ;
sources . insert ( path , source ) ;
2022-02-04 16:20:24 +00:00
insert_imports (
idx ,
& mut all_nodes ,
& mut sources ,
& edges . edges ,
edges . num_input_files ,
) ;
2022-01-05 21:46:57 +00:00
}
versioned_sources . insert ( version , sources ) ;
}
2022-08-12 19:11:08 +00:00
Ok ( (
VersionedSources {
inner : versioned_sources ,
offline ,
resolved_solc_include_paths : edges . resolved_solc_include_paths . clone ( ) ,
} ,
edges ,
) )
2022-01-05 21:46:57 +00:00
}
/// Writes the list of imported files into the given formatter:
/// `A (version) imports B (version)`
fn format_imports_list < W : std ::fmt ::Write > (
& self ,
idx : usize ,
f : & mut W ,
) -> std ::result ::Result < ( ) , std ::fmt ::Error > {
let node = self . node ( idx ) ;
2022-02-07 16:27:23 +00:00
write! ( f , " {} " , utils ::source_name ( & node . path , & self . root ) . display ( ) , ) ? ;
node . data . fmt_version ( f ) ? ;
write! ( f , " imports: " , ) ? ;
for dep in self . node_ids ( idx ) . skip ( 1 ) {
writeln! ( f ) ? ;
let dep = self . node ( dep ) ;
write! ( f , " {} " , utils ::source_name ( & dep . path , & self . root ) . display ( ) ) ? ;
dep . data . fmt_version ( f ) ? ;
2022-01-05 21:46:57 +00:00
}
Ok ( ( ) )
}
/// Filters incompatible versions from the `candidates`.
2022-01-17 12:05:52 +00:00
fn retain_compatible_versions ( & self , idx : usize , candidates : & mut Vec < & crate ::SolcVersion > ) {
let nodes : HashSet < _ > = self . node_ids ( idx ) . collect ( ) ;
for node in nodes {
let node = self . node ( node ) ;
if let Some ( ref req ) = node . data . version_req {
candidates . retain ( | v | req . matches ( v . as_ref ( ) ) ) ;
}
if candidates . is_empty ( ) {
// nothing to filter anymore
return
2022-01-05 21:46:57 +00:00
}
}
}
/// Ensures that all files are compatible with all of their imports.
pub fn ensure_compatible_imports ( & self , offline : bool ) -> Result < ( ) > {
self . get_input_node_versions ( offline ) ? ;
Ok ( ( ) )
}
/// Returns a map of versions together with the input nodes that are compatible with that
/// version.
///
/// This will essentially do a DFS on all input sources and their transitive imports and
/// checking that all can compiled with the version stated in the input file.
///
/// Returns an error message with __all__ input files that don't have compatible imports.
///
/// This also attempts to prefer local installations over remote available.
/// If `offline` is set to `true` then only already installed.
fn get_input_node_versions (
& self ,
offline : bool ,
) -> Result < HashMap < crate ::SolcVersion , Vec < usize > > > {
2022-04-05 19:00:04 +00:00
use crate ::Solc ;
2022-01-07 00:12:33 +00:00
tracing ::trace! ( " resolving input node versions " ) ;
2022-01-05 21:46:57 +00:00
// this is likely called by an application and will be eventually printed so we don't exit
// on first error, instead gather all the errors and return a bundled error message instead
let mut errors = Vec ::new ( ) ;
// we also don't want duplicate error diagnostic
2022-02-04 16:20:24 +00:00
let mut erroneous_nodes =
std ::collections ::HashSet ::with_capacity ( self . edges . num_input_files ) ;
2022-01-05 21:46:57 +00:00
2022-02-05 14:07:37 +00:00
// the sorted list of all versions
2022-01-05 21:46:57 +00:00
let all_versions = if offline { Solc ::installed_versions ( ) } else { Solc ::all_versions ( ) } ;
2022-02-05 14:07:37 +00:00
// stores all versions and their nodes that can be compiled
2022-01-05 21:46:57 +00:00
let mut versioned_nodes = HashMap ::new ( ) ;
2022-02-05 14:07:37 +00:00
// stores all files and the versions they're compatible with
let mut all_candidates = Vec ::with_capacity ( self . edges . num_input_files ) ;
2022-01-05 21:46:57 +00:00
// walking through the node's dep tree and filtering the versions along the way
2022-02-04 16:20:24 +00:00
for idx in 0 .. self . edges . num_input_files {
2022-01-05 21:46:57 +00:00
let mut candidates = all_versions . iter ( ) . collect ::< Vec < _ > > ( ) ;
2022-02-05 14:07:37 +00:00
// remove all incompatible versions from the candidates list by checking the node and
// all its imports
2022-01-17 12:05:52 +00:00
self . retain_compatible_versions ( idx , & mut candidates ) ;
2022-01-05 21:46:57 +00:00
if candidates . is_empty ( ) & & ! erroneous_nodes . contains ( & idx ) {
let mut msg = String ::new ( ) ;
self . format_imports_list ( idx , & mut msg ) . unwrap ( ) ;
errors . push ( format! (
2022-12-06 06:05:33 +00:00
" Discovered incompatible solidity versions in following \n : {msg} "
2022-01-05 21:46:57 +00:00
) ) ;
erroneous_nodes . insert ( idx ) ;
} else {
2022-02-05 14:07:37 +00:00
// found viable candidates, pick the most recent version that's already installed
let candidate =
if let Some ( pos ) = candidates . iter ( ) . rposition ( | v | v . is_installed ( ) ) {
candidates [ pos ]
} else {
candidates . last ( ) . expect ( " not empty; qed. " )
}
. clone ( ) ;
// also store all possible candidates to optimize the set
all_candidates . push ( ( idx , candidates . into_iter ( ) . collect ::< HashSet < _ > > ( ) ) ) ;
2022-01-05 21:46:57 +00:00
versioned_nodes . entry ( candidate ) . or_insert_with ( | | Vec ::with_capacity ( 1 ) ) . push ( idx ) ;
}
}
2022-02-05 14:07:37 +00:00
// detected multiple versions but there might still exist a single version that satisfies
// all sources
if versioned_nodes . len ( ) > 1 {
versioned_nodes = Self ::resolve_multiple_versions ( all_candidates ) ;
}
if versioned_nodes . len ( ) = = 1 {
tracing ::trace! (
" found exact solc version for all sources \" {} \" " ,
versioned_nodes . keys ( ) . next ( ) . unwrap ( )
) ;
}
2022-01-05 21:46:57 +00:00
if errors . is_empty ( ) {
2022-01-07 00:12:33 +00:00
tracing ::trace! (
" resolved {} versions {:?} " ,
versioned_nodes . len ( ) ,
versioned_nodes . keys ( )
) ;
2022-01-05 21:46:57 +00:00
Ok ( versioned_nodes )
} else {
2022-01-07 00:12:33 +00:00
tracing ::error! ( " failed to resolve versions " ) ;
2022-02-10 17:56:25 +00:00
Err ( SolcError ::msg ( errors . join ( " \n " ) ) )
2022-01-05 21:46:57 +00:00
}
}
2022-02-05 14:07:37 +00:00
/// Tries to find the "best" set of versions to nodes, See [Solc version
/// auto-detection](#solc-version-auto-detection)
///
/// This is a bit inefficient but is fine, the max. number of versions is ~80 and there's
/// a high chance that the number of source files is <50, even for larger projects.
fn resolve_multiple_versions (
all_candidates : Vec < ( usize , HashSet < & crate ::SolcVersion > ) > ,
) -> HashMap < crate ::SolcVersion , Vec < usize > > {
// returns the intersection as sorted set of nodes
fn intersection < ' a > (
mut sets : Vec < & HashSet < & ' a crate ::SolcVersion > > ,
) -> Vec < & ' a crate ::SolcVersion > {
if sets . is_empty ( ) {
return Vec ::new ( )
}
2022-12-30 12:19:41 +00:00
let mut result = sets . pop ( ) . cloned ( ) . expect ( " not empty; qed. " ) ;
2022-02-18 17:24:02 +00:00
if ! sets . is_empty ( ) {
2022-02-05 14:07:37 +00:00
result . retain ( | item | sets . iter ( ) . all ( | set | set . contains ( item ) ) ) ;
}
let mut v = result . into_iter ( ) . collect ::< Vec < _ > > ( ) ;
v . sort_unstable ( ) ;
v
}
/// returns the highest version that is installed
/// if the candidates set only contains uninstalled versions then this returns the highest
/// uninstalled version
fn remove_candidate ( candidates : & mut Vec < & crate ::SolcVersion > ) -> crate ::SolcVersion {
debug_assert! ( ! candidates . is_empty ( ) ) ;
if let Some ( pos ) = candidates . iter ( ) . rposition ( | v | v . is_installed ( ) ) {
candidates . remove ( pos )
} else {
candidates . pop ( ) . expect ( " not empty; qed. " )
}
. clone ( )
}
let all_sets = all_candidates . iter ( ) . map ( | ( _ , versions ) | versions ) . collect ( ) ;
// find all versions that satisfy all nodes
let mut intersection = intersection ( all_sets ) ;
if ! intersection . is_empty ( ) {
let exact_version = remove_candidate ( & mut intersection ) ;
let all_nodes = all_candidates . into_iter ( ) . map ( | ( node , _ ) | node ) . collect ( ) ;
tracing ::trace! (
" resolved solc version compatible with all sources \" {} \" " ,
exact_version
) ;
return HashMap ::from ( [ ( exact_version , all_nodes ) ] )
}
// no version satisfies all nodes
let mut versioned_nodes : HashMap < crate ::SolcVersion , Vec < usize > > = HashMap ::new ( ) ;
// try to minimize the set of versions, this is guaranteed to lead to `versioned_nodes.len()
// > 1` as no solc version exists that can satisfy all sources
for ( node , versions ) in all_candidates {
// need to sort them again
let mut versions = versions . into_iter ( ) . collect ::< Vec < _ > > ( ) ;
versions . sort_unstable ( ) ;
let candidate =
if let Some ( idx ) = versions . iter ( ) . rposition ( | v | versioned_nodes . contains_key ( v ) ) {
// use a version that's already in the set
versions . remove ( idx ) . clone ( )
} else {
// use the highest version otherwise
remove_candidate ( & mut versions )
} ;
versioned_nodes . entry ( candidate ) . or_insert_with ( | | Vec ::with_capacity ( 1 ) ) . push ( node ) ;
}
tracing ::trace! (
" no solc version can satisfy all source files, resolved multiple versions \" {:?} \" " ,
versioned_nodes . keys ( )
) ;
versioned_nodes
}
2022-01-05 21:46:57 +00:00
}
2022-01-17 12:05:52 +00:00
/// An iterator over a node and its dependencies
#[ derive(Debug) ]
pub struct NodesIter < ' a > {
/// stack of nodes
stack : VecDeque < usize > ,
visited : HashSet < usize > ,
2022-02-04 16:20:24 +00:00
graph : & ' a GraphEdges ,
2022-01-17 12:05:52 +00:00
}
impl < ' a > NodesIter < ' a > {
2022-02-04 16:20:24 +00:00
fn new ( start : usize , graph : & ' a GraphEdges ) -> Self {
Self { stack : VecDeque ::from ( [ start ] ) , visited : HashSet ::new ( ) , graph }
2022-01-17 12:05:52 +00:00
}
}
impl < ' a > Iterator for NodesIter < ' a > {
type Item = usize ;
fn next ( & mut self ) -> Option < Self ::Item > {
let node = self . stack . pop_front ( ) ? ;
if self . visited . insert ( node ) {
// push the node's direct dependencies to the stack if we haven't visited it already
self . stack . extend ( self . graph . imported_nodes ( node ) . iter ( ) . copied ( ) ) ;
}
Some ( node )
}
}
2022-01-05 21:46:57 +00:00
/// Container type for solc versions and their compatible sources
2022-04-27 12:37:40 +00:00
#[ cfg(all(feature = " svm-solc " )) ]
2022-01-05 21:46:57 +00:00
#[ derive(Debug) ]
pub struct VersionedSources {
2022-08-12 19:11:08 +00:00
resolved_solc_include_paths : IncludePaths ,
2022-01-05 21:46:57 +00:00
inner : HashMap < crate ::SolcVersion , Sources > ,
offline : bool ,
}
2022-04-27 12:37:40 +00:00
#[ cfg(all(feature = " svm-solc " )) ]
2022-01-05 21:46:57 +00:00
impl VersionedSources {
/// Resolves or installs the corresponding `Solc` installation.
2022-05-27 20:31:11 +00:00
///
/// This will also configure following solc arguments:
/// - `allowed_paths`
/// - `base_path`
2022-08-12 19:11:08 +00:00
pub fn get < T : crate ::ArtifactOutput > (
2022-01-05 21:46:57 +00:00
self ,
2022-08-12 19:11:08 +00:00
project : & crate ::Project < T > ,
2022-04-05 19:00:04 +00:00
) -> Result < std ::collections ::BTreeMap < crate ::Solc , ( semver ::Version , Sources ) > > {
use crate ::Solc ;
2022-01-17 13:51:15 +00:00
// we take the installer lock here to ensure installation checking is done in sync
#[ cfg(any(test, feature = " tests " )) ]
let _lock = crate ::compile ::take_solc_installer_lock ( ) ;
2022-01-05 21:46:57 +00:00
let mut sources_by_version = std ::collections ::BTreeMap ::new ( ) ;
for ( version , sources ) in self . inner {
2022-03-15 09:16:22 +00:00
let solc = if ! version . is_installed ( ) {
2022-01-05 21:46:57 +00:00
if self . offline {
return Err ( SolcError ::msg ( format! (
2022-12-06 06:05:33 +00:00
" missing solc \" {version} \" installation in offline mode "
2022-01-05 21:46:57 +00:00
) ) )
} else {
2022-03-15 09:16:22 +00:00
// install missing solc
Solc ::blocking_install ( version . as_ref ( ) ) ?
2022-01-05 21:46:57 +00:00
}
2022-03-15 09:16:22 +00:00
} else {
// find installed svm
Solc ::find_svm_installed_version ( version . to_string ( ) ) ? . ok_or_else ( | | {
2022-11-07 23:43:11 +00:00
SolcError ::msg ( format! ( " solc \" {version} \" should have been installed " ) )
2022-03-15 09:16:22 +00:00
} ) ?
} ;
2022-01-05 21:46:57 +00:00
2022-03-10 14:40:35 +00:00
if self . offline {
tracing ::trace! (
" skip verifying solc checksum for {} in offline mode " ,
solc . solc . display ( )
) ;
} else {
tracing ::trace! ( " verifying solc checksum for {} " , solc . solc . display ( ) ) ;
if solc . verify_checksum ( ) . is_err ( ) {
tracing ::trace! ( " corrupted solc version, redownloading \" {} \" " , version ) ;
Solc ::blocking_install ( version . as_ref ( ) ) ? ;
tracing ::trace! ( " reinstalled solc: \" {} \" " , version ) ;
}
2022-01-05 21:46:57 +00:00
}
2022-05-27 20:31:11 +00:00
2022-08-12 19:11:08 +00:00
let version = solc . version ( ) ? ;
2022-05-27 20:31:11 +00:00
2022-08-12 19:11:08 +00:00
// this will configure the `Solc` executable and its arguments
let solc = project . configure_solc_with_version (
solc ,
Some ( version . clone ( ) ) ,
self . resolved_solc_include_paths . clone ( ) ,
) ;
2022-02-04 16:20:24 +00:00
sources_by_version . insert ( solc , ( version , sources ) ) ;
2022-01-05 21:46:57 +00:00
}
Ok ( sources_by_version )
}
}
#[ derive(Debug) ]
pub struct Node {
2022-04-04 22:50:10 +00:00
/// path of the solidity file
2022-01-05 21:46:57 +00:00
path : PathBuf ,
2022-04-04 22:50:10 +00:00
/// content of the solidity file
2022-01-05 21:46:57 +00:00
source : Source ,
2022-04-04 22:50:10 +00:00
/// parsed data
2022-01-05 21:46:57 +00:00
data : SolData ,
}
2022-01-17 12:27:40 +00:00
impl Node {
2022-04-04 22:50:10 +00:00
/// Reads the content of the file and returns a [Node] containing relevant information
2022-08-13 21:03:48 +00:00
pub fn read ( file : impl AsRef < Path > ) -> Result < Self > {
2022-04-04 22:50:10 +00:00
let file = file . as_ref ( ) ;
2022-08-13 21:03:48 +00:00
let source = Source ::read ( file ) . map_err ( | err | {
2023-01-03 13:14:01 +00:00
let exists = err . path ( ) . exists ( ) ;
if ! exists & & err . path ( ) . is_symlink ( ) {
2022-08-13 21:03:48 +00:00
SolcError ::ResolveBadSymlink ( err )
} else {
2023-01-03 13:14:01 +00:00
// This is an additional check useful on OS that have case-sensitive paths, See also <https://docs.soliditylang.org/en/v0.8.17/path-resolution.html#import-callback>
if ! exists {
// check if there exists a file with different case
if let Some ( existing_file ) = find_case_sensitive_existing_file ( file ) {
SolcError ::ResolveCaseSensitiveFileName { error : err , existing_file }
} else {
SolcError ::Resolve ( err )
}
} else {
SolcError ::Resolve ( err )
}
2022-08-13 21:03:48 +00:00
}
} ) ? ;
2022-04-04 22:50:10 +00:00
let data = SolData ::parse ( source . as_ref ( ) , file ) ;
Ok ( Self { path : file . to_path_buf ( ) , source , data } )
}
2022-01-17 12:27:40 +00:00
pub fn content ( & self ) -> & str {
& self . source . content
}
2022-05-04 05:33:25 +00:00
pub fn imports ( & self ) -> & Vec < SolDataUnit < SolImport > > {
2022-01-17 12:27:40 +00:00
& self . data . imports
}
pub fn version ( & self ) -> & Option < SolDataUnit < String > > {
& self . data . version
}
2022-04-07 10:12:25 +00:00
pub fn experimental ( & self ) -> & Option < SolDataUnit < String > > {
& self . data . experimental
}
2022-01-17 12:27:40 +00:00
pub fn license ( & self ) -> & Option < SolDataUnit < String > > {
& self . data . license
}
2022-04-09 00:18:39 +00:00
pub fn unpack ( & self ) -> ( & PathBuf , & Source ) {
( & self . path , & self . source )
}
2022-01-17 12:27:40 +00:00
}
2022-02-19 13:55:21 +00:00
/// Helper type for formatting a node
pub ( crate ) struct DisplayNode < ' a > {
node : & ' a Node ,
root : & ' a PathBuf ,
}
impl < ' a > fmt ::Display for DisplayNode < ' a > {
fn fmt ( & self , f : & mut fmt ::Formatter < '_ > ) -> fmt ::Result {
let path = utils ::source_name ( & self . node . path , self . root ) ;
write! ( f , " {} " , path . display ( ) ) ? ;
if let Some ( ref v ) = self . node . data . version {
write! ( f , " {} " , v . data ( ) ) ? ;
}
Ok ( ( ) )
}
}
2022-01-05 21:46:57 +00:00
#[ cfg(test) ]
mod tests {
use super ::* ;
#[ test ]
fn can_resolve_hardhat_dependency_graph ( ) {
let root = PathBuf ::from ( env! ( " CARGO_MANIFEST_DIR " ) ) . join ( " test-data/hardhat-sample " ) ;
let paths = ProjectPathsConfig ::hardhat ( root ) . unwrap ( ) ;
let graph = Graph ::resolve ( & paths ) . unwrap ( ) ;
2022-02-04 16:20:24 +00:00
assert_eq! ( graph . edges . num_input_files , 1 ) ;
2022-01-05 21:46:57 +00:00
assert_eq! ( graph . files ( ) . len ( ) , 2 ) ;
assert_eq! (
graph . files ( ) . clone ( ) ,
HashMap ::from ( [
( paths . sources . join ( " Greeter.sol " ) , 0 ) ,
( paths . root . join ( " node_modules/hardhat/console.sol " ) , 1 ) ,
] )
) ;
}
#[ test ]
fn can_resolve_dapp_dependency_graph ( ) {
let root = PathBuf ::from ( env! ( " CARGO_MANIFEST_DIR " ) ) . join ( " test-data/dapp-sample " ) ;
let paths = ProjectPathsConfig ::dapptools ( root ) . unwrap ( ) ;
let graph = Graph ::resolve ( & paths ) . unwrap ( ) ;
2022-02-04 16:20:24 +00:00
assert_eq! ( graph . edges . num_input_files , 2 ) ;
2022-01-05 21:46:57 +00:00
assert_eq! ( graph . files ( ) . len ( ) , 3 ) ;
assert_eq! (
graph . files ( ) . clone ( ) ,
HashMap ::from ( [
( paths . sources . join ( " Dapp.sol " ) , 0 ) ,
( paths . sources . join ( " Dapp.t.sol " ) , 1 ) ,
( paths . root . join ( " lib/ds-test/src/test.sol " ) , 2 ) ,
] )
) ;
let dapp_test = graph . node ( 1 ) ;
assert_eq! ( dapp_test . path , paths . sources . join ( " Dapp.t.sol " ) ) ;
assert_eq! (
2022-05-04 05:33:25 +00:00
dapp_test . data . imports . iter ( ) . map ( | i | i . data ( ) . path ( ) ) . collect ::< Vec < & PathBuf > > ( ) ,
2022-01-17 12:27:40 +00:00
vec! [ & PathBuf ::from ( " ds-test/test.sol " ) , & PathBuf ::from ( " ./Dapp.sol " ) ]
2022-01-05 21:46:57 +00:00
) ;
assert_eq! ( graph . imported_nodes ( 1 ) . to_vec ( ) , vec! [ 2 , 0 ] ) ;
}
2022-02-19 13:55:21 +00:00
#[ test ]
#[ cfg(not(target_os = " windows " )) ]
fn can_print_dapp_sample_graph ( ) {
let root = PathBuf ::from ( env! ( " CARGO_MANIFEST_DIR " ) ) . join ( " test-data/dapp-sample " ) ;
let paths = ProjectPathsConfig ::dapptools ( root ) . unwrap ( ) ;
let graph = Graph ::resolve ( & paths ) . unwrap ( ) ;
let mut out = Vec ::< u8 > ::new ( ) ;
tree ::print ( & graph , & Default ::default ( ) , & mut out ) . unwrap ( ) ;
assert_eq! (
"
src / Dapp . sol > = 0. 6.6
src / Dapp . t . sol > = 0. 6.6
├ ─ ─ lib / ds - test / src / test . sol > = 0. 4.23
└ ─ ─ src / Dapp . sol > = 0. 6.6
"
. trim_start ( )
. as_bytes ( )
. to_vec ( ) ,
out
) ;
}
#[ test ]
#[ cfg(not(target_os = " windows " )) ]
fn can_print_hardhat_sample_graph ( ) {
let root = PathBuf ::from ( env! ( " CARGO_MANIFEST_DIR " ) ) . join ( " test-data/hardhat-sample " ) ;
let paths = ProjectPathsConfig ::hardhat ( root ) . unwrap ( ) ;
let graph = Graph ::resolve ( & paths ) . unwrap ( ) ;
let mut out = Vec ::< u8 > ::new ( ) ;
tree ::print ( & graph , & Default ::default ( ) , & mut out ) . unwrap ( ) ;
assert_eq! (
"
contracts / Greeter . sol > = 0. 6.0
└ ─ ─ node_modules / hardhat / console . sol > = 0. 4.22 < 0. 9.0
"
. trim_start ( )
. as_bytes ( )
. to_vec ( ) ,
out
) ;
}
2022-01-05 21:46:57 +00:00
}