feat(solc): flatten (#774)

* solc flatten implementation

* upd changelog

* upd docs

* revamp flattening

* clippy

* use resolve_import method

* extract recursive flatenning into a separate func

* change content iteration for flatten

* remove redundant result

* clean up solimport

* add comment to project.flatten

* add support for ver pragma loc

* address pr comments

* uncomment the test

* improve test cov

* add handling of sdpx license identifiers

* change arg name

* match license only at the beginning of the file

* add comments

* lint

* morrre comments
This commit is contained in:
Roman Krasiuk 2022-01-17 13:27:40 +01:00 committed by GitHub
parent 579311bfdd
commit afcba9567f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 316 additions and 48 deletions

View File

@ -34,6 +34,8 @@
### Unreleased ### Unreleased
- Add ability to flatten file imports
[#774](https://github.com/gakonst/ethers-rs/pull/774)
- Add dependency graph and resolve all imported libraryfiles - Add dependency graph and resolve all imported libraryfiles
[#750](https://github.com/gakonst/ethers-rs/pull/750) [#750](https://github.com/gakonst/ethers-rs/pull/750)
- `Remapping::find_many` does not return a `Result` anymore - `Remapping::find_many` does not return a `Result` anymore

View File

@ -474,7 +474,7 @@ impl Source {
/// Returns all import statements of the file /// Returns all import statements of the file
pub fn parse_imports(&self) -> Vec<&str> { pub fn parse_imports(&self) -> Vec<&str> {
utils::find_import_paths(self.as_ref()) utils::find_import_paths(self.as_ref()).map(|m| m.as_str()).collect()
} }
} }

View File

@ -326,10 +326,10 @@ impl SolFilesCacheBuilder {
.map_err(|err| SolcError::solc(err.to_string()))? .map_err(|err| SolcError::solc(err.to_string()))?
.as_millis() as u64; .as_millis() as u64;
let imports = let imports =
utils::find_import_paths(source.as_ref()).into_iter().map(str::to_string).collect(); utils::find_import_paths(source.as_ref()).map(|m| m.as_str().to_owned()).collect();
let version_pragmas = utils::find_version_pragma(source.as_ref()) let version_pragmas = utils::find_version_pragma(source.as_ref())
.map(|v| vec![v.to_string()]) .map(|v| vec![v.as_str().to_string()])
.unwrap_or_default(); .unwrap_or_default();
let entry = CacheEntry { let entry = CacheEntry {

View File

@ -323,7 +323,7 @@ impl Solc {
pub fn source_version_req(source: &Source) -> Result<VersionReq> { pub fn source_version_req(source: &Source) -> Result<VersionReq> {
let version = let version =
utils::find_version_pragma(&source.content).ok_or(SolcError::PragmaNotFound)?; utils::find_version_pragma(&source.content).ok_or(SolcError::PragmaNotFound)?;
Self::version_req(version) Self::version_req(version.as_str())
} }
/// Returns the corresponding SemVer version requirement for the solidity version /// Returns the corresponding SemVer version requirement for the solidity version

View File

@ -4,6 +4,7 @@ use crate::{
error::{Result, SolcError, SolcIoError}, error::{Result, SolcError, SolcIoError},
hh::HardhatArtifact, hh::HardhatArtifact,
remappings::Remapping, remappings::Remapping,
resolver::Graph,
utils, CompilerOutput, Source, Sources, utils, CompilerOutput, Source, Sources,
}; };
use ethers_core::{abi::Abi, types::Bytes}; use ethers_core::{abi::Abi, types::Bytes};
@ -11,8 +12,7 @@ use serde::{de::DeserializeOwned, Deserialize, Serialize};
use std::{ use std::{
collections::BTreeMap, collections::BTreeMap,
convert::TryFrom, convert::TryFrom,
fmt, fmt::{self, Formatter},
fmt::Formatter,
fs, io, fs, io,
path::{Component, Path, PathBuf}, path::{Component, Path, PathBuf},
}; };
@ -171,6 +171,68 @@ impl ProjectPathsConfig {
pub fn find_libs(root: impl AsRef<Path>) -> Vec<PathBuf> { pub fn find_libs(root: impl AsRef<Path>) -> Vec<PathBuf> {
vec![utils::find_fave_or_alt_path(root, "lib", "node_modules")] vec![utils::find_fave_or_alt_path(root, "lib", "node_modules")]
} }
/// Flattens all file imports into a single string
pub fn flatten(&self, target: &Path) -> Result<String> {
tracing::trace!("flattening file");
let graph = Graph::resolve(self)?;
self.flatten_node(target, &graph, false, false)
}
/// Flattens a single node from the dependency graph
fn flatten_node(
&self,
target: &Path,
graph: &Graph,
strip_version_pragma: bool,
strip_license: bool,
) -> Result<String> {
let target_dir = target.parent().ok_or_else(|| {
SolcError::msg(format!("failed to get parent directory for \"{:?}\"", target.display()))
})?;
let target_index = graph.files().get(target).ok_or_else(|| {
SolcError::msg(format!("cannot resolve file at \"{:?}\"", target.display()))
})?;
let target_node = graph.node(*target_index);
let mut imports = target_node.imports().clone();
imports.sort_by_key(|x| x.loc().0);
let mut content = target_node.content().as_bytes().to_vec();
let mut offset = 0_isize;
if strip_license {
if let Some(license) = target_node.license() {
let (start, end) = license.loc_by_offset(offset);
content.splice(start..end, std::iter::empty());
offset -= (end - start) as isize;
}
}
if strip_version_pragma {
if let Some(version) = target_node.version() {
let (start, end) = version.loc_by_offset(offset);
content.splice(start..end, std::iter::empty());
offset -= (end - start) as isize;
}
}
for import in imports.iter() {
let import_path = self.resolve_import(target_dir, import.data())?;
let import_content = self.flatten_node(&import_path, graph, true, true)?;
let import_content = import_content.trim().as_bytes().to_owned();
let import_content_len = import_content.len() as isize;
let (start, end) = import.loc_by_offset(offset);
content.splice(start..end, import_content);
offset += import_content_len - ((end - start) as isize);
}
let result = String::from_utf8(content).map_err(|err| {
SolcError::msg(format!("failed to convert extended bytes to string: {}", err))
})?;
Ok(result)
}
} }
impl fmt::Display for ProjectPathsConfig { impl fmt::Display for ProjectPathsConfig {

View File

@ -37,8 +37,12 @@ use crate::{
}; };
use error::Result; use error::Result;
use std::{ use std::{
borrow::Cow, collections::BTreeMap, convert::TryInto, fmt, fs, marker::PhantomData, borrow::Cow,
path::PathBuf, collections::BTreeMap,
convert::TryInto,
fmt, fs,
marker::PhantomData,
path::{Path, PathBuf},
}; };
/// Utilities for creating, mocking and testing of (temporary) projects /// Utilities for creating, mocking and testing of (temporary) projects
@ -507,6 +511,18 @@ impl<Artifacts: ArtifactOutput> Project<Artifacts> {
} }
Ok(()) Ok(())
} }
/// Flattens the target file into a single string suitable for verification
///
/// This method uses a dependency graph to resolve imported files and substitute
/// import directives with the contents of target files. It will strip the pragma
/// version directives and SDPX license identifiers from imported files.
///
/// NOTE: the SDPX license identifier will be removed from the imported file
/// only if it is found at the beginning of the file.
pub fn flatten(&self, target: &Path) -> Result<String> {
self.paths.flatten(target)
}
} }
enum PreprocessedJob<T: ArtifactOutput> { enum PreprocessedJob<T: ArtifactOutput> {

View File

@ -59,6 +59,10 @@ impl<T: ArtifactOutput> TempProject<T> {
self.project().compile() self.project().compile()
} }
pub fn flatten(&self, target: &Path) -> Result<String> {
self.project().flatten(target)
}
pub fn project_mut(&mut self) -> &mut Project<T> { pub fn project_mut(&mut self) -> &mut Project<T> {
&mut self.inner &mut self.inner
} }

View File

@ -32,8 +32,9 @@ use std::{
}; };
use rayon::prelude::*; use rayon::prelude::*;
use regex::Match;
use semver::VersionReq; use semver::VersionReq;
use solang_parser::pt::{Import, SourceUnitPart}; use solang_parser::pt::{Import, Loc, SourceUnitPart};
use crate::{error::Result, utils, ProjectPathsConfig, Solc, Source, Sources}; use crate::{error::Result, utils, ProjectPathsConfig, Solc, Source, Sources};
@ -153,14 +154,12 @@ impl Graph {
}; };
for import in node.data.imports.iter() { for import in node.data.imports.iter() {
match paths.resolve_import(cwd, import) { match paths.resolve_import(cwd, import.data()) {
Ok(import) => { Ok(import) => {
add_node(&mut unresolved, &mut index, &mut resolved_imports, import)?; add_node(&mut unresolved, &mut index, &mut resolved_imports, import)?;
} }
Err(err) => { Err(err) => tracing::trace!("failed to resolve import component \"{:?}\"", err),
tracing::trace!("{}", err) };
}
}
} }
nodes.push(node); nodes.push(node);
edges.push(resolved_imports); edges.push(resolved_imports);
@ -422,12 +421,81 @@ pub struct Node {
data: SolData, data: SolData,
} }
impl Node {
pub fn content(&self) -> &str {
&self.source.content
}
pub fn imports(&self) -> &Vec<SolDataUnit<PathBuf>> {
&self.data.imports
}
pub fn version(&self) -> &Option<SolDataUnit<String>> {
&self.data.version
}
pub fn license(&self) -> &Option<SolDataUnit<String>> {
&self.data.license
}
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
#[allow(unused)] #[allow(unused)]
struct SolData { struct SolData {
version: Option<String>, license: Option<SolDataUnit<String>>,
version: Option<SolDataUnit<String>>,
imports: Vec<SolDataUnit<PathBuf>>,
version_req: Option<VersionReq>, version_req: Option<VersionReq>,
imports: Vec<PathBuf>, }
#[derive(Debug, Clone)]
pub struct SolDataUnit<T> {
loc: Location,
data: T,
}
#[derive(Debug, Clone)]
pub struct Location {
pub start: usize,
pub end: usize,
}
/// Solidity Data Unit decorated with its location within the file
impl<T> SolDataUnit<T> {
pub fn new(data: T, loc: Location) -> Self {
Self { data, loc }
}
/// Returns the underlying data for the unit
pub fn data(&self) -> &T {
&self.data
}
/// Returns the location of the given data unit
pub fn loc(&self) -> (usize, usize) {
(self.loc.start, self.loc.end)
}
/// Returns the location of the given data unit adjusted by an offset.
/// Used to determine new position of the unit within the file after
/// content manipulation.
pub fn loc_by_offset(&self, offset: isize) -> (usize, usize) {
(
offset.saturating_add(self.loc.start as isize) as usize,
offset.saturating_add(self.loc.end as isize) as usize,
)
}
}
impl From<Match<'_>> for Location {
fn from(src: Match) -> Self {
Location { start: src.start(), end: src.end() }
}
}
impl From<Loc> for Location {
fn from(src: Loc) -> Self {
Location { start: src.1, end: src.2 }
}
} }
fn read_node(file: impl AsRef<Path>) -> Result<Node> { fn read_node(file: impl AsRef<Path>) -> Result<Node> {
@ -443,24 +511,24 @@ fn read_node(file: impl AsRef<Path>) -> Result<Node> {
/// parsing fails, we'll fall back to extract that info via regex /// parsing fails, we'll fall back to extract that info via regex
fn parse_data(content: &str) -> SolData { fn parse_data(content: &str) -> SolData {
let mut version = None; let mut version = None;
let mut imports = Vec::new(); let mut imports = Vec::<SolDataUnit<PathBuf>>::new();
match solang_parser::parse(content, 0) { match solang_parser::parse(content, 0) {
Ok(units) => { Ok(units) => {
for unit in units.0 { for unit in units.0 {
match unit { match unit {
SourceUnitPart::PragmaDirective(_, _, pragma, value) => { SourceUnitPart::PragmaDirective(loc, _, pragma, value) => {
if pragma.name == "solidity" { if pragma.name == "solidity" {
// we're only interested in the solidity version pragma // we're only interested in the solidity version pragma
version = Some(value.string); version = Some(SolDataUnit::new(value.string, loc.into()));
} }
} }
SourceUnitPart::ImportDirective(_, import) => { SourceUnitPart::ImportDirective(_, import) => {
let import = match import { let (import, loc) = match import {
Import::Plain(s, _) => s, Import::Plain(s, l) => (s, l),
Import::GlobalSymbol(s, _, _) => s, Import::GlobalSymbol(s, _, l) => (s, l),
Import::Rename(s, _, _) => s, Import::Rename(s, _, l) => (s, l),
}; };
imports.push(PathBuf::from(import.string)); imports.push(SolDataUnit::new(PathBuf::from(import.string), loc.into()));
} }
_ => {} _ => {}
} }
@ -471,21 +539,50 @@ fn parse_data(content: &str) -> SolData {
"failed to parse solidity ast: \"{:?}\". Falling back to regex to extract data", "failed to parse solidity ast: \"{:?}\". Falling back to regex to extract data",
err err
); );
version = utils::find_version_pragma(content).map(str::to_string); version = capture_outer_and_inner(content, &utils::RE_SOL_PRAGMA_VERSION, &["version"])
imports = utils::find_import_paths(content) .first()
.into_iter() .map(|(cap, name)| {
.map(|p| Path::new(p).to_path_buf()) SolDataUnit::new(name.as_str().to_owned(), cap.to_owned().into())
.collect() });
imports = capture_outer_and_inner(content, &utils::RE_SOL_IMPORT, &["p1", "p2", "p3"])
.iter()
.map(|(cap, m)| SolDataUnit::new(PathBuf::from(m.as_str()), cap.to_owned().into()))
.collect();
} }
}; };
let version_req = if let Some(ref v) = version { Solc::version_req(v).ok() } else { None }; let license = content.lines().next().and_then(|line| {
SolData { version_req, version, imports } capture_outer_and_inner(line, &utils::RE_SOL_SDPX_LICENSE_IDENTIFIER, &["license"])
.first()
.map(|(cap, l)| SolDataUnit::new(l.as_str().to_owned(), cap.to_owned().into()))
});
let version_req = version.as_ref().and_then(|v| Solc::version_req(v.data()).ok());
SolData { version_req, version, imports, license }
}
/// Given the regex and the target string, find all occurrences
/// of named groups within the string. This method returns
/// the tuple of matches `(a, b)` where `a` is the match for the
/// entire regex and `b` is the match for the first named group.
///
/// NOTE: This method will return the match for the first named
/// group, so the order of passed named groups matters.
fn capture_outer_and_inner<'a>(
content: &'a str,
regex: &regex::Regex,
names: &[&str],
) -> Vec<(regex::Match<'a>, regex::Match<'a>)> {
regex
.captures_iter(content)
.filter_map(|cap| {
let cap_match = names.iter().find_map(|name| cap.name(name));
cap_match.and_then(|m| cap.get(0).map(|outer| (outer.to_owned(), m)))
})
.collect()
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use std::path::Path;
#[test] #[test]
fn can_resolve_hardhat_dependency_graph() { fn can_resolve_hardhat_dependency_graph() {
@ -527,11 +624,8 @@ mod tests {
let dapp_test = graph.node(1); let dapp_test = graph.node(1);
assert_eq!(dapp_test.path, paths.sources.join("Dapp.t.sol")); assert_eq!(dapp_test.path, paths.sources.join("Dapp.t.sol"));
assert_eq!( assert_eq!(
dapp_test.data.imports, dapp_test.data.imports.iter().map(|i| i.data()).collect::<Vec<&PathBuf>>(),
vec![ vec![&PathBuf::from("ds-test/test.sol"), &PathBuf::from("./Dapp.sol")]
Path::new("ds-test/test.sol").to_path_buf(),
Path::new("./Dapp.sol").to_path_buf()
]
); );
assert_eq!(graph.imported_nodes(1).to_vec(), vec![2, 0]); assert_eq!(graph.imported_nodes(1).to_vec(), vec![2, 0]);
} }

View File

@ -2,43 +2,49 @@
use std::path::{Component, Path, PathBuf}; use std::path::{Component, Path, PathBuf};
use crate::{error::SolcError, SolcIoError}; use crate::{
error::{self, SolcError},
ProjectPathsConfig, SolcIoError,
};
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::Regex; use regex::{Match, Regex};
use semver::Version; use semver::Version;
use tiny_keccak::{Hasher, Keccak}; use tiny_keccak::{Hasher, Keccak};
use walkdir::WalkDir; use walkdir::WalkDir;
/// A regex that matches the import path and identifier of a solidity import /// A regex that matches the import path and identifier of a solidity import
/// statement with the named groups "path", "id". /// statement with the named groups "path", "id".
// Adapted from https://github.com/nomiclabs/hardhat/blob/cced766c65b25d3d0beb39ef847246ac9618bdd9/packages/hardhat-core/src/internal/solidity/parse.ts#L100
pub static RE_SOL_IMPORT: Lazy<Regex> = Lazy::new(|| { pub static RE_SOL_IMPORT: Lazy<Regex> = Lazy::new(|| {
// Adapted from https://github.com/nomiclabs/hardhat/blob/cced766c65b25d3d0beb39ef847246ac9618bdd9/packages/hardhat-core/src/internal/solidity/parse.ts#L100
Regex::new(r#"import\s+(?:(?:"(?P<p1>[^;]*)"|'([^;]*)')(?:;|\s+as\s+(?P<id>[^;]*);)|.+from\s+(?:"(?P<p2>.*)"|'(?P<p3>.*)');)"#).unwrap() Regex::new(r#"import\s+(?:(?:"(?P<p1>[^;]*)"|'([^;]*)')(?:;|\s+as\s+(?P<id>[^;]*);)|.+from\s+(?:"(?P<p2>.*)"|'(?P<p3>.*)');)"#).unwrap()
}); });
/// A regex that matches the version part of a solidity pragma /// A regex that matches the version part of a solidity pragma
/// as follows: `pragma solidity ^0.5.2;` => `^0.5.2` /// as follows: `pragma solidity ^0.5.2;` => `^0.5.2`
/// statement with the named groups "path", "id". /// statement with the named group "version".
// Adapted from https://github.com/nomiclabs/hardhat/blob/cced766c65b25d3d0beb39ef847246ac9618bdd9/packages/hardhat-core/src/internal/solidity/parse.ts#L119 // Adapted from https://github.com/nomiclabs/hardhat/blob/cced766c65b25d3d0beb39ef847246ac9618bdd9/packages/hardhat-core/src/internal/solidity/parse.ts#L119
pub static RE_SOL_PRAGMA_VERSION: Lazy<Regex> = pub static RE_SOL_PRAGMA_VERSION: Lazy<Regex> =
Lazy::new(|| Regex::new(r"pragma\s+solidity\s+(?P<version>.+?);").unwrap()); Lazy::new(|| Regex::new(r"pragma\s+solidity\s+(?P<version>.+?);").unwrap());
/// A regex that matches the SDPX license identifier
/// statement with the named group "license".
pub static RE_SOL_SDPX_LICENSE_IDENTIFIER: Lazy<Regex> =
Lazy::new(|| Regex::new(r"///?\s*SPDX-License-Identifier:\s*(?P<license>.+)").unwrap());
/// Returns all path parts from any solidity import statement in a string, /// Returns all path parts from any solidity import statement in a string,
/// `import "./contracts/Contract.sol";` -> `"./contracts/Contract.sol"`. /// `import "./contracts/Contract.sol";` -> `"./contracts/Contract.sol"`.
/// ///
/// See also https://docs.soliditylang.org/en/v0.8.9/grammar.html /// See also https://docs.soliditylang.org/en/v0.8.9/grammar.html
pub fn find_import_paths(contract: &str) -> Vec<&str> { pub fn find_import_paths(contract: &str) -> impl Iterator<Item = Match> {
RE_SOL_IMPORT RE_SOL_IMPORT
.captures_iter(contract) .captures_iter(contract)
.filter_map(|cap| cap.name("p1").or_else(|| cap.name("p2")).or_else(|| cap.name("p3"))) .filter_map(|cap| cap.name("p1").or_else(|| cap.name("p2")).or_else(|| cap.name("p3")))
.map(|m| m.as_str())
.collect()
} }
/// Returns the solidity version pragma from the given input: /// Returns the solidity version pragma from the given input:
/// `pragma solidity ^0.5.2;` => `^0.5.2` /// `pragma solidity ^0.5.2;` => `^0.5.2`
pub fn find_version_pragma(contract: &str) -> Option<&str> { pub fn find_version_pragma(contract: &str) -> Option<Match> {
RE_SOL_PRAGMA_VERSION.captures(contract)?.name("version").map(|m| m.as_str()) RE_SOL_PRAGMA_VERSION.captures(contract)?.name("version")
} }
/// Returns a list of absolute paths to all the solidity files under the root, or the file itself, /// Returns a list of absolute paths to all the solidity files under the root, or the file itself,
@ -79,6 +85,37 @@ pub fn canonicalize(path: impl AsRef<Path>) -> Result<PathBuf, SolcIoError> {
dunce::canonicalize(&path).map_err(|err| SolcIoError::new(err, path)) dunce::canonicalize(&path).map_err(|err| SolcIoError::new(err, path))
} }
/// Try to resolve import to a local file or library path
pub fn resolve_import_component(
import: &Path,
node_dir: &Path,
paths: &ProjectPathsConfig,
) -> error::Result<PathBuf> {
let component = match import.components().next() {
Some(inner) => inner,
None => {
return Err(SolcError::msg(format!(
"failed to resolve import at \"{:?}\"",
import.display()
)))
}
};
if component == Component::CurDir || component == Component::ParentDir {
// if the import is relative we assume it's already part of the processed input file set
canonicalize(node_dir.join(import)).map_err(|err| err.into())
} else {
// resolve library file
match paths.resolve_library_import(import.as_ref()) {
Some(lib) => Ok(lib),
None => Err(SolcError::msg(format!(
"failed to resolve library import \"{:?}\"",
import.display()
))),
}
}
}
/// Returns the path to the library if the source path is in fact determined to be a library path, /// Returns the path to the library if the source path is in fact determined to be a library path,
/// and it exists. /// and it exists.
/// Note: this does not handle relative imports or remappings. /// Note: this does not handle relative imports or remappings.
@ -305,7 +342,7 @@ import { T } from '../Test2.sol';
"##; "##;
assert_eq!( assert_eq!(
vec!["hardhat/console.sol", "../contract/Contract.sol", "../Test.sol", "../Test2.sol"], vec!["hardhat/console.sol", "../contract/Contract.sol", "../Test.sol", "../Test2.sol"],
find_import_paths(s) find_import_paths(s).map(|m| m.as_str()).collect::<Vec<&str>>()
); );
} }
#[test] #[test]
@ -313,7 +350,7 @@ import { T } from '../Test2.sol';
let s = r##"//SPDX-License-Identifier: Unlicense let s = r##"//SPDX-License-Identifier: Unlicense
pragma solidity ^0.8.0; pragma solidity ^0.8.0;
"##; "##;
assert_eq!(Some("^0.8.0"), find_version_pragma(s)); assert_eq!(Some("^0.8.0"), find_version_pragma(s).map(|s| s.as_str()));
} }
#[test] #[test]

View File

@ -307,3 +307,56 @@ fn copy_dir_all(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> io::Result<()>
} }
Ok(()) Ok(())
} }
#[test]
fn can_flatten_file() {
let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/test-contract-libs");
let target = root.join("src").join("Foo.sol");
let paths = ProjectPathsConfig::builder()
.sources(root.join("src"))
.lib(root.join("lib1"))
.lib(root.join("lib2"));
let project = TempProject::<MinimalCombinedArtifacts>::new(paths).unwrap();
let result = project.flatten(&target);
assert!(result.is_ok());
let result = result.unwrap();
assert!(result.find("contract Foo").is_some());
assert!(result.find("contract Bar").is_some());
}
#[test]
fn can_flatten_file_with_external_lib() {
let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/hardhat-sample");
let paths = ProjectPathsConfig::builder()
.sources(root.join("contracts"))
.lib(root.join("node_modules"));
let project = TempProject::<MinimalCombinedArtifacts>::new(paths).unwrap();
let target = root.join("contracts").join("Greeter.sol");
let result = project.flatten(&target);
assert!(result.is_ok());
let result = result.unwrap();
assert!(result.find("library console").is_some());
assert!(result.find("contract Greeter").is_some());
}
#[test]
fn can_flatten_file_in_dapp_sample() {
let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample");
let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib"));
let project = TempProject::<MinimalCombinedArtifacts>::new(paths).unwrap();
let target = root.join("src/Dapp.t.sol");
let result = project.flatten(&target);
assert!(result.is_ok());
let result = result.unwrap();
assert!(result.find("contract DSTest").is_some());
assert!(result.find("contract Dapp").is_some());
assert!(result.find("contract DappTest").is_some());
}