chore: clippy (#1812)

* chore: clippy

* fmt
This commit is contained in:
DaniPopes 2022-11-08 00:43:11 +01:00 committed by GitHub
parent 1119976dc7
commit debd6fec3d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
84 changed files with 247 additions and 248 deletions

View File

@ -337,7 +337,7 @@ where
if not_aliased.len() > 1 { if not_aliased.len() > 1 {
let mut overloaded_aliases = Vec::new(); let mut overloaded_aliases = Vec::new();
for (idx, (sig, name)) in not_aliased.into_iter().enumerate() { for (idx, (sig, name)) in not_aliased.into_iter().enumerate() {
let unique_name = format!("{}{}", name, idx + 1); let unique_name = format!("{name}{}", idx + 1);
overloaded_aliases.push((sig, get_ident(&unique_name))); overloaded_aliases.push((sig, get_ident(&unique_name)));
} }
aliases.extend(overloaded_aliases); aliases.extend(overloaded_aliases);

View File

@ -64,7 +64,7 @@ where
} }
pub(crate) fn imports(name: &str) -> TokenStream { pub(crate) fn imports(name: &str) -> TokenStream {
let doc = util::expand_doc(&format!("{} was auto-generated with ethers-rs Abigen. More information at: https://github.com/gakonst/ethers-rs", name)); let doc = util::expand_doc(&format!("{name} was auto-generated with ethers-rs Abigen. More information at: https://github.com/gakonst/ethers-rs"));
let ethers_core = ethers_core_crate(); let ethers_core = ethers_core_crate();
let ethers_providers = ethers_providers_crate(); let ethers_providers = ethers_providers_crate();
@ -116,7 +116,7 @@ pub(crate) fn struct_declaration(cx: &Context) -> TokenStream {
let bytecode = if let Some(ref bytecode) = cx.contract_bytecode { let bytecode = if let Some(ref bytecode) = cx.contract_bytecode {
let bytecode_name = cx.inline_bytecode_ident(); let bytecode_name = cx.inline_bytecode_ident();
let hex_bytecode = format!("{}", bytecode); let hex_bytecode = format!("{bytecode}");
quote! { quote! {
/// Bytecode of the #name contract /// Bytecode of the #name contract
pub static #bytecode_name: #ethers_contract::Lazy<#ethers_core::types::Bytes> = #ethers_contract::Lazy::new(|| #hex_bytecode.parse() pub static #bytecode_name: #ethers_contract::Lazy<#ethers_core::types::Bytes> = #ethers_contract::Lazy::new(|| #hex_bytecode.parse()

View File

@ -34,7 +34,7 @@ impl Context {
.map(|function| { .map(|function| {
let signature = function.abi_signature(); let signature = function.abi_signature();
self.expand_function(function, aliases.get(&signature).cloned()) self.expand_function(function, aliases.get(&signature).cloned())
.with_context(|| format!("error expanding function '{}'", signature)) .with_context(|| format!("error expanding function '{signature}'"))
}) })
.collect::<Result<Vec<_>>>()?; .collect::<Result<Vec<_>>>()?;
@ -593,7 +593,7 @@ impl Context {
name_conflicts(*idx, &diffs) name_conflicts(*idx, &diffs)
{ {
needs_alias_for_first_fun_using_idx = true; needs_alias_for_first_fun_using_idx = true;
format!("{}{}", overloaded_fun.name.to_snake_case(), idx) format!("{}{idx}", overloaded_fun.name.to_snake_case())
} else { } else {
format!( format!(
"{}_with_{}", "{}_with_{}",
@ -608,7 +608,7 @@ impl Context {
name_conflicts(*idx, &diffs) name_conflicts(*idx, &diffs)
{ {
needs_alias_for_first_fun_using_idx = true; needs_alias_for_first_fun_using_idx = true;
format!("{}{}", overloaded_fun.name.to_snake_case(), idx) format!("{}{idx}", overloaded_fun.name.to_snake_case())
} else { } else {
// 1 + n additional input params // 1 + n additional input params
let and = diff let and = diff
@ -632,7 +632,7 @@ impl Context {
if needs_alias_for_first_fun_using_idx { if needs_alias_for_first_fun_using_idx {
// insert an alias for the root duplicated call // insert an alias for the root duplicated call
let prev_alias = format!("{}{}", first_fun.name.to_snake_case(), first_fun_idx); let prev_alias = format!("{}{first_fun_idx}", first_fun.name.to_snake_case());
let alias = MethodAlias::new(&prev_alias); let alias = MethodAlias::new(&prev_alias);
@ -698,9 +698,9 @@ fn expand_struct_name_postfix(
postfix: &str, postfix: &str,
) -> Ident { ) -> Ident {
let name = if let Some(alias) = alias { let name = if let Some(alias) = alias {
format!("{}{}", alias.struct_name, postfix) format!("{}{postfix}", alias.struct_name)
} else { } else {
format!("{}{}", util::safe_pascal_case(&function.name), postfix) format!("{}{postfix}", util::safe_pascal_case(&function.name))
}; };
util::ident(&name) util::ident(&name)
} }

View File

@ -129,9 +129,9 @@ impl Context {
"".to_string() "".to_string()
}; };
let abi_signature = format!("{}({})", name, sig,); let abi_signature = format!("{name}({sig})",);
let abi_signature_doc = util::expand_doc(&format!("`{}`", abi_signature)); let abi_signature_doc = util::expand_doc(&format!("`{abi_signature}`"));
// use the same derives as for events // use the same derives as for events
let derives = util::expand_derives(&self.event_derives); let derives = util::expand_derives(&self.event_derives);
@ -184,7 +184,7 @@ impl Context {
param_types.iter().map(|kind| kind.to_string()).collect::<Vec<_>>().join(","), param_types.iter().map(|kind| kind.to_string()).collect::<Vec<_>>().join(","),
); );
let abi_signature_doc = util::expand_doc(&format!("`{}`", abi_signature)); let abi_signature_doc = util::expand_doc(&format!("`{abi_signature}`"));
let name = util::ident(name); let name = util::ident(name);
@ -392,12 +392,12 @@ fn insert_rust_type_name(
let mut other_name = name.clone(); let mut other_name = name.clone();
// name collision `A.name` `B.name`, rename to `AName`, `BName` // name collision `A.name` `B.name`, rename to `AName`, `BName`
if !other_projections.is_empty() { if !other_projections.is_empty() {
other_name = format!("{}{}", other_projections.remove(0).to_pascal_case(), other_name); other_name = format!("{}{other_name}", other_projections.remove(0).to_pascal_case());
} }
insert_rust_type_name(type_names, other_name, other_projections, other_id); insert_rust_type_name(type_names, other_name, other_projections, other_id);
if !projections.is_empty() { if !projections.is_empty() {
name = format!("{}{}", projections.remove(0).to_pascal_case(), name); name = format!("{}{name}", projections.remove(0).to_pascal_case());
} }
insert_rust_type_name(type_names, name, projections, id); insert_rust_type_name(type_names, name, projections, id);
} else { } else {

View File

@ -598,8 +598,8 @@ ethers = {{ git = "https://github.com/gakonst/ethers-rs", default-features = fal
mod_names.insert(shared.name.to_snake_case()); mod_names.insert(shared.name.to_snake_case());
} }
for module in mod_names.into_iter().map(|name| format!("pub mod {};", name)) { for module in mod_names.into_iter().map(|name| format!("pub mod {name};")) {
writeln!(buf, "{}", module)?; writeln!(buf, "{module}")?;
} }
Ok(()) Ok(())

View File

@ -248,7 +248,7 @@ fn get_local_contract(path: impl AsRef<str>) -> Result<String> {
#[cfg(not(target_arch = "wasm32"))] #[cfg(not(target_arch = "wasm32"))]
fn get_http_contract(url: &Url) -> Result<String> { fn get_http_contract(url: &Url) -> Result<String> {
let json = util::http_get(url.as_str()) let json = util::http_get(url.as_str())
.with_context(|| format!("failed to retrieve JSON from {}", url))?; .with_context(|| format!("failed to retrieve JSON from {url}"))?;
Ok(json) Ok(json)
} }
@ -266,15 +266,14 @@ fn get_etherscan_contract(address: Address, domain: &str) -> Result<String> {
"snowtrace.io" => env::var("SNOWTRACE_API_KEY").ok(), "snowtrace.io" => env::var("SNOWTRACE_API_KEY").ok(),
_ => None, _ => None,
}; };
key_res.map(|key| format!("&apikey={}", key)).unwrap_or_default() key_res.map(|key| format!("&apikey={key}")).unwrap_or_default()
}; };
let abi_url = format!( let abi_url = format!(
"http://api.{}/api?module=contract&action=getabi&address={:?}&format=raw{}", "http://api.{}/api?module=contract&action=getabi&address={:?}&format=raw{}",
domain, address, api_key, domain, address, api_key,
); );
let abi = let abi = util::http_get(&abi_url).context(format!("failed to retrieve ABI from {domain}"))?;
util::http_get(&abi_url).context(format!("failed to retrieve ABI from {}", domain))?;
if abi.starts_with("Contract source code not verified") { if abi.starts_with("Contract source code not verified") {
eyre::bail!("Contract source code not verified: {:?}", address); eyre::bail!("Contract source code not verified: {:?}", address);
@ -292,9 +291,9 @@ fn get_etherscan_contract(address: Address, domain: &str) -> Result<String> {
/// Retrieves a Truffle artifact or ABI from an npm package through `unpkg.io`. /// Retrieves a Truffle artifact or ABI from an npm package through `unpkg.io`.
#[cfg(not(target_arch = "wasm32"))] #[cfg(not(target_arch = "wasm32"))]
fn get_npm_contract(package: &str) -> Result<String> { fn get_npm_contract(package: &str) -> Result<String> {
let unpkg_url = format!("https://unpkg.io/{}", package); let unpkg_url = format!("https://unpkg.io/{package}");
let json = util::http_get(&unpkg_url) let json = util::http_get(&unpkg_url)
.with_context(|| format!("failed to retrieve JSON from for npm package {}", package))?; .with_context(|| format!("failed to retrieve JSON from for npm package {package}"))?;
Ok(json) Ok(json)
} }

View File

@ -19,7 +19,7 @@ pub fn ident(name: &str) -> Ident {
/// ///
/// Parsing keywords like `self` can fail, in this case we add an underscore. /// Parsing keywords like `self` can fail, in this case we add an underscore.
pub fn safe_ident(name: &str) -> Ident { pub fn safe_ident(name: &str) -> Ident {
syn::parse_str::<SynIdent>(name).unwrap_or_else(|_| ident(&format!("{}_", name))) syn::parse_str::<SynIdent>(name).unwrap_or_else(|_| ident(&format!("{name}_")))
} }
/// Converts a `&str` to `snake_case` `String` while respecting identifier rules /// Converts a `&str` to `snake_case` `String` while respecting identifier rules
@ -35,7 +35,7 @@ pub fn safe_pascal_case(ident: &str) -> String {
/// respects identifier rules, such as, an identifier must not start with a numeric char /// respects identifier rules, such as, an identifier must not start with a numeric char
fn safe_identifier_name(name: String) -> String { fn safe_identifier_name(name: String) -> String {
if name.starts_with(|c: char| c.is_numeric()) { if name.starts_with(|c: char| c.is_numeric()) {
format!("_{}", name) format!("_{name}")
} else { } else {
name name
} }
@ -76,7 +76,7 @@ pub fn preserve_underscore_delim(ident: &str, alias: &str) -> String {
/// identifiers that are reserved keywords get `_` appended to them. /// identifiers that are reserved keywords get `_` appended to them.
pub fn expand_input_name(index: usize, name: &str) -> TokenStream { pub fn expand_input_name(index: usize, name: &str) -> TokenStream {
let name_str = match name { let name_str = match name {
"" => format!("p{}", index), "" => format!("p{index}"),
n => n.to_snake_case(), n => n.to_snake_case(),
}; };
let name = safe_ident(&name_str); let name = safe_ident(&name_str);

View File

@ -184,7 +184,7 @@ impl Parse for Parameter {
_ => { _ => {
return Err(ParseError::new( return Err(ParseError::new(
name.span(), name.span(),
format!("unexpected named parameter `{}`", name), format!("unexpected named parameter `{name}`"),
)) ))
} }
}; };

View File

@ -33,7 +33,7 @@ pub(crate) fn derive_eth_call_impl(input: DeriveInput) -> TokenStream {
) { ) {
Ok(derived) => derived, Ok(derived) => derived,
Err(err) => { Err(err) => {
Error::new(span, format!("Unable to determine ABI for `{}` : {}", src, err)) Error::new(span, format!("Unable to determine ABI for `{src}` : {err}"))
.to_compile_error() .to_compile_error()
} }
} }

View File

@ -32,14 +32,14 @@ pub fn parse_calllike_attributes(
Meta::Path(path) => { Meta::Path(path) => {
return Err(Error::new( return Err(Error::new(
path.span(), path.span(),
format!("unrecognized {} parameter", attr_name), format!("unrecognized {attr_name} parameter"),
) )
.to_compile_error()) .to_compile_error())
} }
Meta::List(meta) => { Meta::List(meta) => {
return Err(Error::new( return Err(Error::new(
meta.path.span(), meta.path.span(),
format!("unrecognized {} parameter", attr_name), format!("unrecognized {attr_name} parameter"),
) )
.to_compile_error()) .to_compile_error())
} }
@ -85,7 +85,7 @@ pub fn parse_calllike_attributes(
} else { } else {
return Err(Error::new( return Err(Error::new(
meta.span(), meta.span(),
format!("unrecognized {} parameter", attr_name), format!("unrecognized {attr_name} parameter"),
) )
.to_compile_error()) .to_compile_error())
} }

View File

@ -32,7 +32,7 @@ pub(crate) fn derive_eth_error_impl(input: DeriveInput) -> TokenStream {
) { ) {
Ok(derived) => derived, Ok(derived) => derived,
Err(err) => { Err(err) => {
Error::new(span, format!("Unable to determine ABI for `{}` : {}", src, err)) Error::new(span, format!("Unable to determine ABI for `{src}` : {err}"))
.to_compile_error() .to_compile_error()
} }
} }

View File

@ -168,20 +168,20 @@ pub fn derive_abi_inputs_from_fields(
Fields::Unit => { Fields::Unit => {
return Err(Error::new( return Err(Error::new(
input.span(), input.span(),
format!("{} cannot be derived for empty structs and unit", trait_name), format!("{trait_name} cannot be derived for empty structs and unit"),
)) ))
} }
}, },
Data::Enum(_) => { Data::Enum(_) => {
return Err(Error::new( return Err(Error::new(
input.span(), input.span(),
format!("{} cannot be derived for enums", trait_name), format!("{trait_name} cannot be derived for enums"),
)) ))
} }
Data::Union(_) => { Data::Union(_) => {
return Err(Error::new( return Err(Error::new(
input.span(), input.span(),
format!("{} cannot be derived for unions", trait_name), format!("{trait_name} cannot be derived for unions"),
)) ))
} }
}; };
@ -258,20 +258,20 @@ pub fn derive_abi_parameters_array(
Fields::Unit => { Fields::Unit => {
return Err(Error::new( return Err(Error::new(
input.span(), input.span(),
format!("{} cannot be derived for empty structs and unit", trait_name), format!("{trait_name} cannot be derived for empty structs and unit"),
)) ))
} }
}, },
Data::Enum(_) => { Data::Enum(_) => {
return Err(Error::new( return Err(Error::new(
input.span(), input.span(),
format!("{} cannot be derived for enums", trait_name), format!("{trait_name} cannot be derived for enums"),
)) ))
} }
Data::Union(_) => { Data::Union(_) => {
return Err(Error::new( return Err(Error::new(
input.span(), input.span(),
format!("{} cannot be derived for unions", trait_name), format!("{trait_name} cannot be derived for unions"),
)) ))
} }
}; };

View File

@ -140,7 +140,7 @@ impl TryFrom<u8> for MulticallVersion {
1 => Ok(MulticallVersion::Multicall), 1 => Ok(MulticallVersion::Multicall),
2 => Ok(MulticallVersion::Multicall2), 2 => Ok(MulticallVersion::Multicall2),
3 => Ok(MulticallVersion::Multicall3), 3 => Ok(MulticallVersion::Multicall3),
_ => Err(format!("Invalid Multicall version: {}. Accepted values: 1, 2, 3.", v)), _ => Err(format!("Invalid Multicall version: {v}. Accepted values: 1, 2, 3.")),
} }
} }
} }

View File

@ -85,7 +85,7 @@ fn can_gen_structs_readable() {
assert_codec::<Value>(); assert_codec::<Value>();
assert_codec::<Addresses>(); assert_codec::<Addresses>();
let encoded = addr.clone().encode(); let encoded = addr.clone().encode();
let other = Addresses::decode(&encoded).unwrap(); let other = Addresses::decode(encoded).unwrap();
assert_eq!(addr, other); assert_eq!(addr, other);
} }
@ -178,7 +178,7 @@ fn can_gen_return_struct() {
binding: T, binding: T,
) { ) {
let encoded = binding.clone().encode(); let encoded = binding.clone().encode();
let decoded = T::decode(&encoded).unwrap(); let decoded = T::decode(encoded).unwrap();
assert_eq!(binding, decoded); assert_eq!(binding, decoded);
} }

View File

@ -394,7 +394,7 @@ fn eth_display_works() {
hex::encode(&item.v), hex::encode(&item.v),
); );
assert_eq!(val, format!("{}", item)); assert_eq!(val, format!("{item}"));
} }
#[test] #[test]
@ -408,9 +408,9 @@ fn eth_display_works_for_human_readable() {
); );
let log = LogFilter("abc".to_string()); let log = LogFilter("abc".to_string());
assert_eq!("abc".to_string(), format!("{}", log)); assert_eq!("abc".to_string(), format!("{log}"));
let log = Log2Filter { x: "abc".to_string() }; let log = Log2Filter { x: "abc".to_string() };
assert_eq!("abc".to_string(), format!("{}", log)); assert_eq!("abc".to_string(), format!("{log}"));
} }
#[test] #[test]
@ -493,7 +493,7 @@ fn can_derive_abi_codec() {
let val = SomeType { inner: Default::default(), msg: "hello".to_string() }; let val = SomeType { inner: Default::default(), msg: "hello".to_string() };
let encoded = val.clone().encode(); let encoded = val.clone().encode();
let other = SomeType::decode(&encoded).unwrap(); let other = SomeType::decode(encoded).unwrap();
assert_eq!(val, other); assert_eq!(val, other);
} }
@ -603,7 +603,7 @@ fn eth_display_works_on_ethers_bytes() {
} }
let call = LogBytesCall { p_0: hex::decode(b"aaaaaa").unwrap().into() }; let call = LogBytesCall { p_0: hex::decode(b"aaaaaa").unwrap().into() };
let s = format!("{}", call); let s = format!("{call}");
assert_eq!(s, "0xaaaaaa"); assert_eq!(s, "0xaaaaaa");
} }

View File

@ -31,7 +31,7 @@ pub struct ValueChanged {
/// compiles the given contract and returns the ABI and Bytecode /// compiles the given contract and returns the ABI and Bytecode
#[track_caller] #[track_caller]
pub fn compile_contract(name: &str, filename: &str) -> (Abi, Bytes) { pub fn compile_contract(name: &str, filename: &str) -> (Abi, Bytes) {
let path = format!("./tests/solidity-contracts/{}", filename); let path = format!("./tests/solidity-contracts/{filename}");
let compiled = Solc::default().compile_source(&path).unwrap(); let compiled = Solc::default().compile_source(&path).unwrap();
let contract = compiled.get(&path, name).expect("could not find contract"); let contract = compiled.get(&path, name).expect("could not find contract");
let (abi, bin, _) = contract.into_parts_or_default(); let (abi, bin, _) = contract.into_parts_or_default();

View File

@ -94,12 +94,12 @@ impl<'input> Token<'input> {
impl<'input> fmt::Display for Token<'input> { impl<'input> fmt::Display for Token<'input> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { match self {
Token::Identifier(id) => write!(f, "{}", id), Token::Identifier(id) => write!(f, "{id}"),
Token::Number(num) => write!(f, "{}", num), Token::Number(num) => write!(f, "{num}"),
Token::HexNumber(num) => write!(f, "0x{}", num), Token::HexNumber(num) => write!(f, "0x{num}"),
Token::Uint(w) => write!(f, "uint{}", w), Token::Uint(w) => write!(f, "uint{w}"),
Token::Int(w) => write!(f, "int{}", w), Token::Int(w) => write!(f, "int{w}"),
Token::Bytes(w) => write!(f, "bytes{}", w), Token::Bytes(w) => write!(f, "bytes{w}"),
Token::Byte => write!(f, "byte"), Token::Byte => write!(f, "byte"),
Token::DynamicBytes => write!(f, "bytes"), Token::DynamicBytes => write!(f, "bytes"),
Token::Semicolon => write!(f, ";"), Token::Semicolon => write!(f, ";"),

View File

@ -95,7 +95,7 @@ impl ErrorExt for ethabi::AbiError {
return format!("{}()", self.name) return format!("{}()", self.name)
} }
let inputs = self.inputs.iter().map(|p| p.kind.to_string()).collect::<Vec<_>>().join(","); let inputs = self.inputs.iter().map(|p| p.kind.to_string()).collect::<Vec<_>>().join(",");
format!("{}({})", self.name, inputs) format!("{}({inputs})", self.name)
} }
fn selector(&self) -> Selector { fn selector(&self) -> Selector {

View File

@ -257,10 +257,10 @@ mod tests {
assert!(matches!(abi, JsonAbi::Array(_))); assert!(matches!(abi, JsonAbi::Array(_)));
let code = "0x608060405234801561001057600080fd5b50610242806100206000396000f3fe608060405234801561001057600080fd5b506004361061002b5760003560e01c80635581701b14610030575b600080fd5b61004a60048036038101906100459190610199565b610060565b60405161005791906101f1565b60405180910390f35b610068610070565b819050919050565b60405180602001604052806000151581525090565b6000604051905090565b600080fd5b600080fd5b6000601f19601f8301169050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b6100e282610099565b810181811067ffffffffffffffff82111715610101576101006100aa565b5b80604052505050565b6000610114610085565b905061012082826100d9565b919050565b60008115159050919050565b61013a81610125565b811461014557600080fd5b50565b60008135905061015781610131565b92915050565b60006020828403121561017357610172610094565b5b61017d602061010a565b9050600061018d84828501610148565b60008301525092915050565b6000602082840312156101af576101ae61008f565b5b60006101bd8482850161015d565b91505092915050565b6101cf81610125565b82525050565b6020820160008201516101eb60008501826101c6565b50505050565b600060208201905061020660008301846101d5565b9291505056fea2646970667358221220890202b0964477379a457ab3725a21d7c14581e4596552e32a54e23f1c6564e064736f6c634300080c0033"; let code = "0x608060405234801561001057600080fd5b50610242806100206000396000f3fe608060405234801561001057600080fd5b506004361061002b5760003560e01c80635581701b14610030575b600080fd5b61004a60048036038101906100459190610199565b610060565b60405161005791906101f1565b60405180910390f35b610068610070565b819050919050565b60405180602001604052806000151581525090565b6000604051905090565b600080fd5b600080fd5b6000601f19601f8301169050919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b6100e282610099565b810181811067ffffffffffffffff82111715610101576101006100aa565b5b80604052505050565b6000610114610085565b905061012082826100d9565b919050565b60008115159050919050565b61013a81610125565b811461014557600080fd5b50565b60008135905061015781610131565b92915050565b60006020828403121561017357610172610094565b5b61017d602061010a565b9050600061018d84828501610148565b60008301525092915050565b6000602082840312156101af576101ae61008f565b5b60006101bd8482850161015d565b91505092915050565b6101cf81610125565b82525050565b6020820160008201516101eb60008501826101c6565b50505050565b600060208201905061020660008301846101d5565b9291505056fea2646970667358221220890202b0964477379a457ab3725a21d7c14581e4596552e32a54e23f1c6564e064736f6c634300080c0033";
let s = format!(r#"{{"abi": {}, "bin" : "{}" }}"#, abi_str, code); let s = format!(r#"{{"abi": {abi_str}, "bin" : "{code}" }}"#);
assert_has_bytecode(&s); assert_has_bytecode(&s);
let s = format!(r#"{{"abi": {}, "bytecode" : {{ "object": "{}" }} }}"#, abi_str, code); let s = format!(r#"{{"abi": {abi_str}, "bytecode" : {{ "object": "{code}" }} }}"#);
assert_has_bytecode(&s); assert_has_bytecode(&s);
let hh_artifact = include_str!( let hh_artifact = include_str!(
@ -286,7 +286,7 @@ mod tests {
#[test] #[test]
fn ignores_empty_bytecode() { fn ignores_empty_bytecode() {
let abi_str = r#"[{"anonymous":false,"inputs":[{"indexed":true,"internalType":"uint64","name":"number","type":"uint64"}],"name":"MyEvent","type":"event"},{"inputs":[],"name":"greet","outputs":[],"stateMutability":"nonpayable","type":"function"}]"#; let abi_str = r#"[{"anonymous":false,"inputs":[{"indexed":true,"internalType":"uint64","name":"number","type":"uint64"}],"name":"MyEvent","type":"event"},{"inputs":[],"name":"greet","outputs":[],"stateMutability":"nonpayable","type":"function"}]"#;
let s = format!(r#"{{"abi": {}, "bin" : "0x" }}"#, abi_str); let s = format!(r#"{{"abi": {abi_str}, "bin" : "0x" }}"#);
match serde_json::from_str::<JsonAbi>(&s).unwrap() { match serde_json::from_str::<JsonAbi>(&s).unwrap() {
JsonAbi::Object(abi) => { JsonAbi::Object(abi) => {
@ -297,7 +297,7 @@ mod tests {
} }
} }
let s = format!(r#"{{"abi": {}, "bytecode" : {{ "object": "0x" }} }}"#, abi_str); let s = format!(r#"{{"abi": {abi_str}, "bytecode" : {{ "object": "0x" }} }}"#);
match serde_json::from_str::<JsonAbi>(&s).unwrap() { match serde_json::from_str::<JsonAbi>(&s).unwrap() {
JsonAbi::Object(abi) => { JsonAbi::Object(abi) => {

View File

@ -127,7 +127,7 @@ impl StructFieldType {
if path.is_empty() { if path.is_empty() {
name.to_string() name.to_string()
} else { } else {
format!("{}.{}", path, name) format!("{path}.{name}")
} }
} }

View File

@ -59,11 +59,11 @@ pub fn determine_ethers_crates() -> (&'static str, &'static str, &'static str) {
}; };
// check if the lock file exists, if it's missing we need to clean up afterward // check if the lock file exists, if it's missing we need to clean up afterward
let lock_file = format!("{}/Cargo.lock", manifest_dir); let lock_file = format!("{manifest_dir}/Cargo.lock");
let needs_lock_file_cleanup = !std::path::Path::new(&lock_file).exists(); let needs_lock_file_cleanup = !std::path::Path::new(&lock_file).exists();
let res = MetadataCommand::new() let res = MetadataCommand::new()
.manifest_path(&format!("{}/Cargo.toml", manifest_dir)) .manifest_path(&format!("{manifest_dir}/Cargo.toml"))
.exec() .exec()
.ok() .ok()
.and_then(|metadata| { .and_then(|metadata| {

View File

@ -461,7 +461,7 @@ impl Serialize for BlockId {
match *self { match *self {
BlockId::Hash(ref x) => { BlockId::Hash(ref x) => {
let mut s = serializer.serialize_struct("BlockIdEip1898", 1)?; let mut s = serializer.serialize_struct("BlockIdEip1898", 1)?;
s.serialize_field("blockHash", &format!("{:?}", x))?; s.serialize_field("blockHash", &format!("{x:?}"))?;
s.end() s.end()
} }
BlockId::Number(ref num) => num.serialize(serializer), BlockId::Number(ref num) => num.serialize(serializer),
@ -599,7 +599,7 @@ impl Serialize for BlockNumber {
S: Serializer, S: Serializer,
{ {
match *self { match *self {
BlockNumber::Number(ref x) => serializer.serialize_str(&format!("0x{:x}", x)), BlockNumber::Number(ref x) => serializer.serialize_str(&format!("0x{x:x}")),
BlockNumber::Latest => serializer.serialize_str("latest"), BlockNumber::Latest => serializer.serialize_str("latest"),
BlockNumber::Finalized => serializer.serialize_str("finalized"), BlockNumber::Finalized => serializer.serialize_str("finalized"),
BlockNumber::Safe => serializer.serialize_str("safe"), BlockNumber::Safe => serializer.serialize_str("safe"),
@ -638,7 +638,7 @@ impl FromStr for BlockNumber {
impl fmt::Display for BlockNumber { impl fmt::Display for BlockNumber {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { match self {
BlockNumber::Number(ref x) => format!("0x{:x}", x).fmt(f), BlockNumber::Number(ref x) => format!("0x{x:x}").fmt(f),
BlockNumber::Latest => f.write_str("latest"), BlockNumber::Latest => f.write_str("latest"),
BlockNumber::Finalized => f.write_str("finalized"), BlockNumber::Finalized => f.write_str("finalized"),
BlockNumber::Safe => f.write_str("safe"), BlockNumber::Safe => f.write_str("safe"),

View File

@ -166,7 +166,7 @@ impl FromStr for Bytes {
hex::decode(value) hex::decode(value)
} }
.map(Into::into) .map(Into::into)
.map_err(|e| ParseBytesError(format!("Invalid hex: {}", e))) .map_err(|e| ParseBytesError(format!("Invalid hex: {e}")))
} }
} }
@ -200,8 +200,8 @@ mod tests {
fn hex_formatting() { fn hex_formatting() {
let b = Bytes::from(vec![1, 35, 69, 103, 137, 171, 205, 239]); let b = Bytes::from(vec![1, 35, 69, 103, 137, 171, 205, 239]);
let expected = String::from("0x0123456789abcdef"); let expected = String::from("0x0123456789abcdef");
assert_eq!(format!("{:x}", b), expected); assert_eq!(format!("{b:x}"), expected);
assert_eq!(format!("{}", b), expected); assert_eq!(format!("{b}"), expected);
} }
#[test] #[test]
@ -219,7 +219,7 @@ mod tests {
#[test] #[test]
fn test_debug_formatting() { fn test_debug_formatting() {
let b = Bytes::from(vec![1, 35, 69, 103, 137, 171, 205, 239]); let b = Bytes::from(vec![1, 35, 69, 103, 137, 171, 205, 239]);
assert_eq!(format!("{:?}", b), "Bytes(0x0123456789abcdef)"); assert_eq!(format!("{b:?}"), "Bytes(0x0123456789abcdef)");
assert_eq!(format!("{:#?}", b), "Bytes(0x0123456789abcdef)"); assert_eq!(format!("{b:#?}"), "Bytes(0x0123456789abcdef)");
} }
} }

View File

@ -271,7 +271,7 @@ impl fmt::Display for Chain {
Chain::AuroraTestnet => "aurora-testnet", Chain::AuroraTestnet => "aurora-testnet",
}; };
write!(formatter, "{}", chain) write!(formatter, "{chain}")
} }
} }

View File

@ -626,7 +626,7 @@ where
} }
match serde_json::from_value::<Variadic<T>>(value).map_err(|err| { match serde_json::from_value::<Variadic<T>>(value).map_err(|err| {
serde::de::Error::custom(format!("Invalid variadic value or array type: {}", err)) serde::de::Error::custom(format!("Invalid variadic value or array type: {err}"))
})? { })? {
Variadic::Value(val) => Ok(ValueOrArray::Value(val)), Variadic::Value(val) => Ok(ValueOrArray::Value(val)),
Variadic::Array(arr) => Ok(ValueOrArray::Array(arr)), Variadic::Array(arr) => Ok(ValueOrArray::Array(arr)),

View File

@ -1046,7 +1046,7 @@ impl fmt::Display for I256 {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let (sign, abs) = self.into_sign_and_abs(); let (sign, abs) = self.into_sign_and_abs();
sign.fmt(f)?; sign.fmt(f)?;
write!(f, "{}", abs) write!(f, "{abs}")
} }
} }
@ -1054,7 +1054,7 @@ impl fmt::LowerHex for I256 {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let (sign, abs) = self.into_sign_and_abs(); let (sign, abs) = self.into_sign_and_abs();
fmt::Display::fmt(&sign, f)?; fmt::Display::fmt(&sign, f)?;
write!(f, "{:x}", abs) write!(f, "{abs:x}")
} }
} }
@ -1064,9 +1064,9 @@ impl fmt::UpperHex for I256 {
fmt::Display::fmt(&sign, f)?; fmt::Display::fmt(&sign, f)?;
// NOTE: Work around `U256: !UpperHex`. // NOTE: Work around `U256: !UpperHex`.
let mut buffer = format!("{:x}", abs); let mut buffer = format!("{abs:x}");
buffer.make_ascii_uppercase(); buffer.make_ascii_uppercase();
write!(f, "{}", buffer) write!(f, "{buffer}")
} }
} }
@ -1385,13 +1385,13 @@ mod tests {
fn parse_dec_str() { fn parse_dec_str() {
let unsigned = U256::from_dec_str("314159265358979323846264338327950288419716").unwrap(); let unsigned = U256::from_dec_str("314159265358979323846264338327950288419716").unwrap();
let value = I256::from_dec_str(&format!("-{}", unsigned)).unwrap(); let value = I256::from_dec_str(&format!("-{unsigned}")).unwrap();
assert_eq!(value.into_sign_and_abs(), (Sign::Negative, unsigned)); assert_eq!(value.into_sign_and_abs(), (Sign::Negative, unsigned));
let value = I256::from_dec_str(&format!("{}", unsigned)).unwrap(); let value = I256::from_dec_str(&format!("{unsigned}")).unwrap();
assert_eq!(value.into_sign_and_abs(), (Sign::Positive, unsigned)); assert_eq!(value.into_sign_and_abs(), (Sign::Positive, unsigned));
let value = I256::from_dec_str(&format!("+{}", unsigned)).unwrap(); let value = I256::from_dec_str(&format!("+{unsigned}")).unwrap();
assert_eq!(value.into_sign_and_abs(), (Sign::Positive, unsigned)); assert_eq!(value.into_sign_and_abs(), (Sign::Positive, unsigned));
let err = I256::from_dec_str("invalid string").unwrap_err(); let err = I256::from_dec_str("invalid string").unwrap_err();
@ -1414,13 +1414,13 @@ mod tests {
fn parse_hex_str() { fn parse_hex_str() {
let unsigned = U256::from_dec_str("314159265358979323846264338327950288419716").unwrap(); let unsigned = U256::from_dec_str("314159265358979323846264338327950288419716").unwrap();
let value = I256::from_hex_str(&format!("-{:x}", unsigned)).unwrap(); let value = I256::from_hex_str(&format!("-{unsigned:x}")).unwrap();
assert_eq!(value.into_sign_and_abs(), (Sign::Negative, unsigned)); assert_eq!(value.into_sign_and_abs(), (Sign::Negative, unsigned));
let value = I256::from_hex_str(&format!("{:x}", unsigned)).unwrap(); let value = I256::from_hex_str(&format!("{unsigned:x}")).unwrap();
assert_eq!(value.into_sign_and_abs(), (Sign::Positive, unsigned)); assert_eq!(value.into_sign_and_abs(), (Sign::Positive, unsigned));
let value = I256::from_hex_str(&format!("+{:x}", unsigned)).unwrap(); let value = I256::from_hex_str(&format!("+{unsigned:x}")).unwrap();
assert_eq!(value.into_sign_and_abs(), (Sign::Positive, unsigned)); assert_eq!(value.into_sign_and_abs(), (Sign::Positive, unsigned));
let err = I256::from_hex_str("invalid string").unwrap_err(); let err = I256::from_hex_str("invalid string").unwrap_err();
@ -1445,20 +1445,20 @@ mod tests {
let positive = I256::try_from(unsigned).unwrap(); let positive = I256::try_from(unsigned).unwrap();
let negative = -positive; let negative = -positive;
assert_eq!(format!("{}", positive), format!("{}", unsigned)); assert_eq!(format!("{positive}"), format!("{unsigned}"));
assert_eq!(format!("{}", negative), format!("-{}", unsigned)); assert_eq!(format!("{negative}"), format!("-{unsigned}"));
assert_eq!(format!("{:+}", positive), format!("+{}", unsigned)); assert_eq!(format!("{positive:+}"), format!("+{unsigned}"));
assert_eq!(format!("{:+}", negative), format!("-{}", unsigned)); assert_eq!(format!("{negative:+}"), format!("-{unsigned}"));
assert_eq!(format!("{:x}", positive), format!("{:x}", unsigned)); assert_eq!(format!("{positive:x}"), format!("{unsigned:x}"));
assert_eq!(format!("{:x}", negative), format!("-{:x}", unsigned)); assert_eq!(format!("{negative:x}"), format!("-{unsigned:x}"));
assert_eq!(format!("{:+x}", positive), format!("+{:x}", unsigned)); assert_eq!(format!("{positive:+x}"), format!("+{unsigned:x}"));
assert_eq!(format!("{:+x}", negative), format!("-{:x}", unsigned)); assert_eq!(format!("{negative:+x}"), format!("-{unsigned:x}"));
assert_eq!(format!("{:X}", positive), format!("{:x}", unsigned).to_uppercase()); assert_eq!(format!("{positive:X}"), format!("{unsigned:x}").to_uppercase());
assert_eq!(format!("{:X}", negative), format!("-{:x}", unsigned).to_uppercase()); assert_eq!(format!("{negative:X}"), format!("-{unsigned:x}").to_uppercase());
assert_eq!(format!("{:+X}", positive), format!("+{:x}", unsigned).to_uppercase()); assert_eq!(format!("{positive:+X}"), format!("+{unsigned:x}").to_uppercase());
assert_eq!(format!("{:+X}", negative), format!("-{:x}", unsigned).to_uppercase()); assert_eq!(format!("{negative:+X}"), format!("-{unsigned:x}").to_uppercase());
} }
#[test] #[test]

View File

@ -741,7 +741,7 @@ pub fn encode_field(
// uints are commonly stringified due to how ethers-js encodes // uints are commonly stringified due to how ethers-js encodes
let val: StringifiedNumeric = serde_json::from_value(value.clone())?; let val: StringifiedNumeric = serde_json::from_value(value.clone())?;
let val = val.try_into().map_err(|err| { let val = val.try_into().map_err(|err| {
Eip712Error::Message(format!("Failed to parse uint {}", err)) Eip712Error::Message(format!("Failed to parse uint {err}"))
})?; })?;
Token::Uint(val) Token::Uint(val)
@ -808,7 +808,7 @@ pub fn find_parameter_type(ty: &Type) -> Result<ParamType, TokenStream> {
s => parse_int_param_type(s).ok_or_else(|| { s => parse_int_param_type(s).ok_or_else(|| {
Error::new( Error::new(
ty.span(), ty.span(),
format!("Failed to derive proper ABI from field: {})", s), format!("Failed to derive proper ABI from field: {s})"),
) )
.to_compile_error() .to_compile_error()
}), }),
@ -906,9 +906,9 @@ pub fn parse_fields(ast: &DeriveInput) -> Result<Vec<(String, ParamType)>, Token
/// Convert hash map of field names and types into a type hash corresponding to enc types; /// Convert hash map of field names and types into a type hash corresponding to enc types;
pub fn make_type_hash(primary_type: String, fields: &[(String, ParamType)]) -> [u8; 32] { pub fn make_type_hash(primary_type: String, fields: &[(String, ParamType)]) -> [u8; 32] {
let parameters = let parameters =
fields.iter().map(|(k, v)| format!("{} {}", v, k)).collect::<Vec<String>>().join(","); fields.iter().map(|(k, v)| format!("{v} {k}")).collect::<Vec<String>>().join(",");
let sig = format!("{}({})", primary_type, parameters); let sig = format!("{primary_type}({parameters})");
keccak256(sig) keccak256(sig)
} }

View File

@ -83,7 +83,7 @@ impl Tokenizable for Uint8 {
} }
Ok(Uint8(data.low_u32() as u8)) Ok(Uint8(data.low_u32() as u8))
} }
other => Err(InvalidOutputType(format!("Expected `uint8`, got {:?}", other))), other => Err(InvalidOutputType(format!("Expected `uint8`, got {other:?}"))),
} }
} }
fn into_token(self) -> Token { fn into_token(self) -> Token {

View File

@ -15,7 +15,7 @@ where
{ {
let message = message.as_ref(); let message = message.as_ref();
let mut eth_message = format!("{}{}", PREFIX, message.len()).into_bytes(); let mut eth_message = format!("{PREFIX}{}", message.len()).into_bytes();
eth_message.extend_from_slice(message); eth_message.extend_from_slice(message);
keccak256(&eth_message).into() keccak256(&eth_message).into()

View File

@ -265,7 +265,7 @@ pub fn get_create2_address_from_hash(
[&[0xff], from.into().as_bytes(), salt.into().as_ref(), init_code_hash.into().as_ref()] [&[0xff], from.into().as_bytes(), salt.into().as_ref(), init_code_hash.into().as_ref()]
.concat(); .concat();
let hash = keccak256(&bytes); let hash = keccak256(bytes);
let mut bytes = [0u8; 20]; let mut bytes = [0u8; 20];
bytes.copy_from_slice(&hash[12..]); bytes.copy_from_slice(&hash[12..]);
@ -286,10 +286,10 @@ pub fn secret_key_to_address(secret_key: &SigningKey) -> Address {
/// Ref: <https://github.com/ethereum/EIPs/blob/master/EIPS/eip-55.md> /// Ref: <https://github.com/ethereum/EIPs/blob/master/EIPS/eip-55.md>
pub fn to_checksum(addr: &Address, chain_id: Option<u8>) -> String { pub fn to_checksum(addr: &Address, chain_id: Option<u8>) -> String {
let prefixed_addr = match chain_id { let prefixed_addr = match chain_id {
Some(chain_id) => format!("{}0x{:x}", chain_id, addr), Some(chain_id) => format!("{chain_id}0x{addr:x}"),
None => format!("{:x}", addr), None => format!("{addr:x}"),
}; };
let hash = hex::encode(keccak256(&prefixed_addr)); let hash = hex::encode(keccak256(prefixed_addr));
let hash = hash.as_bytes(); let hash = hash.as_bytes();
let addr_hex = hex::encode(addr.as_bytes()); let addr_hex = hex::encode(addr.as_bytes());

View File

@ -448,16 +448,16 @@ impl TokenQueryOption {
let mut params: HashMap<&'static str, String> = list_params.into(); let mut params: HashMap<&'static str, String> = list_params.into();
match self { match self {
TokenQueryOption::ByAddress(address) => { TokenQueryOption::ByAddress(address) => {
params.insert("address", format!("{:?}", address)); params.insert("address", format!("{address:?}"));
params params
} }
TokenQueryOption::ByContract(contract) => { TokenQueryOption::ByContract(contract) => {
params.insert("contractaddress", format!("{:?}", contract)); params.insert("contractaddress", format!("{contract:?}"));
params params
} }
TokenQueryOption::ByAddressAndContract(address, contract) => { TokenQueryOption::ByAddressAndContract(address, contract) => {
params.insert("address", format!("{:?}", address)); params.insert("address", format!("{address:?}"));
params.insert("contractaddress", format!("{:?}", contract)); params.insert("contractaddress", format!("{contract:?}"));
params params
} }
} }
@ -507,7 +507,7 @@ impl Client {
tag: Option<Tag>, tag: Option<Tag>,
) -> Result<AccountBalance> { ) -> Result<AccountBalance> {
let tag_str = tag.unwrap_or_default().to_string(); let tag_str = tag.unwrap_or_default().to_string();
let addr_str = format!("{:?}", address); let addr_str = format!("{address:?}");
let query = self.create_query( let query = self.create_query(
"account", "account",
"balance", "balance",
@ -542,7 +542,7 @@ impl Client {
tag: Option<Tag>, tag: Option<Tag>,
) -> Result<Vec<AccountBalance>> { ) -> Result<Vec<AccountBalance>> {
let tag_str = tag.unwrap_or_default().to_string(); let tag_str = tag.unwrap_or_default().to_string();
let addrs = addresses.iter().map(|x| format!("{:?}", x)).collect::<Vec<String>>().join(","); let addrs = addresses.iter().map(|x| format!("{x:?}")).collect::<Vec<String>>().join(",");
let query: Query<HashMap<&str, &str>> = self.create_query( let query: Query<HashMap<&str, &str>> = self.create_query(
"account", "account",
"balancemulti", "balancemulti",
@ -577,7 +577,7 @@ impl Client {
params: Option<TxListParams>, params: Option<TxListParams>,
) -> Result<Vec<NormalTransaction>> { ) -> Result<Vec<NormalTransaction>> {
let mut tx_params: HashMap<&str, String> = params.unwrap_or_default().into(); let mut tx_params: HashMap<&str, String> = params.unwrap_or_default().into();
tx_params.insert("address", format!("{:?}", address)); tx_params.insert("address", format!("{address:?}"));
let query = self.create_query("account", "txlist", tx_params); let query = self.create_query("account", "txlist", tx_params);
let response: Response<Vec<NormalTransaction>> = self.get_json(&query).await?; let response: Response<Vec<NormalTransaction>> = self.get_json(&query).await?;
@ -608,10 +608,10 @@ impl Client {
let mut tx_params: HashMap<&str, String> = params.unwrap_or_default().into(); let mut tx_params: HashMap<&str, String> = params.unwrap_or_default().into();
match tx_query_option { match tx_query_option {
InternalTxQueryOption::ByAddress(address) => { InternalTxQueryOption::ByAddress(address) => {
tx_params.insert("address", format!("{:?}", address)); tx_params.insert("address", format!("{address:?}"));
} }
InternalTxQueryOption::ByTransactionHash(tx_hash) => { InternalTxQueryOption::ByTransactionHash(tx_hash) => {
tx_params.insert("txhash", format!("{:?}", tx_hash)); tx_params.insert("txhash", format!("{tx_hash:?}"));
} }
_ => {} _ => {}
} }
@ -730,7 +730,7 @@ impl Client {
page_and_offset: Option<(u64, u64)>, page_and_offset: Option<(u64, u64)>,
) -> Result<Vec<MinedBlock>> { ) -> Result<Vec<MinedBlock>> {
let mut params = HashMap::new(); let mut params = HashMap::new();
params.insert("address", format!("{:?}", address)); params.insert("address", format!("{address:?}"));
params.insert("blocktype", block_type.unwrap_or_default().to_string()); params.insert("blocktype", block_type.unwrap_or_default().to_string());
if let Some((page, offset)) = page_and_offset { if let Some((page, offset)) = page_and_offset {
params.insert("page", page.to_string()); params.insert("page", page.to_string());

View File

@ -139,22 +139,22 @@ impl Client {
/// Return the URL for the given block number /// Return the URL for the given block number
pub fn block_url(&self, block: u64) -> String { pub fn block_url(&self, block: u64) -> String {
format!("{}block/{}", self.etherscan_url, block) format!("{}block/{block}", self.etherscan_url)
} }
/// Return the URL for the given address /// Return the URL for the given address
pub fn address_url(&self, address: Address) -> String { pub fn address_url(&self, address: Address) -> String {
format!("{}address/{:?}", self.etherscan_url, address) format!("{}address/{address:?}", self.etherscan_url)
} }
/// Return the URL for the given transaction hash /// Return the URL for the given transaction hash
pub fn transaction_url(&self, tx_hash: H256) -> String { pub fn transaction_url(&self, tx_hash: H256) -> String {
format!("{}tx/{:?}", self.etherscan_url, tx_hash) format!("{}tx/{tx_hash:?}", self.etherscan_url)
} }
/// Return the URL for the given token hash /// Return the URL for the given token hash
pub fn token_url(&self, token_hash: Address) -> String { pub fn token_url(&self, token_hash: Address) -> String {
format!("{}token/{:?}", self.etherscan_url, token_hash) format!("{}token/{token_hash:?}", self.etherscan_url)
} }
/// Execute an GET request with parameters. /// Execute an GET request with parameters.
@ -373,7 +373,7 @@ impl Cache {
} }
fn set<T: Serialize>(&self, prefix: &str, address: Address, item: T) { fn set<T: Serialize>(&self, prefix: &str, address: Address, item: T) {
let path = self.root.join(prefix).join(format!("{:?}.json", address)); let path = self.root.join(prefix).join(format!("{address:?}.json"));
let writer = std::fs::File::create(path).ok().map(std::io::BufWriter::new); let writer = std::fs::File::create(path).ok().map(std::io::BufWriter::new);
if let Some(mut writer) = writer { if let Some(mut writer) = writer {
let _ = serde_json::to_writer( let _ = serde_json::to_writer(
@ -393,7 +393,7 @@ impl Cache {
} }
fn get<T: DeserializeOwned>(&self, prefix: &str, address: Address) -> Option<T> { fn get<T: DeserializeOwned>(&self, prefix: &str, address: Address) -> Option<T> {
let path = self.root.join(prefix).join(format!("{:?}.json", address)); let path = self.root.join(prefix).join(format!("{address:?}.json"));
let reader = std::io::BufReader::new(std::fs::File::open(path).ok()?); let reader = std::io::BufReader::new(std::fs::File::open(path).ok()?);
if let Ok(inner) = serde_json::from_reader::<_, CacheEnvelope<T>>(reader) { if let Ok(inner) = serde_json::from_reader::<_, CacheEnvelope<T>>(reader) {
// If this does not return None then we have passed the expiry // If this does not return None then we have passed the expiry
@ -459,7 +459,7 @@ mod tests {
let etherscan = Client::new_from_env(Chain::Mainnet).unwrap(); let etherscan = Client::new_from_env(Chain::Mainnet).unwrap();
let block: u64 = 1; let block: u64 = 1;
let block_url: String = etherscan.block_url(block); let block_url: String = etherscan.block_url(block);
assert_eq!(block_url, format!("https://etherscan.io/block/{}", block)); assert_eq!(block_url, format!("https://etherscan.io/block/{block}"));
} }
#[test] #[test]
@ -467,7 +467,7 @@ mod tests {
let etherscan = Client::new_from_env(Chain::Mainnet).unwrap(); let etherscan = Client::new_from_env(Chain::Mainnet).unwrap();
let addr: Address = Address::zero(); let addr: Address = Address::zero();
let address_url: String = etherscan.address_url(addr); let address_url: String = etherscan.address_url(addr);
assert_eq!(address_url, format!("https://etherscan.io/address/{:?}", addr)); assert_eq!(address_url, format!("https://etherscan.io/address/{addr:?}"));
} }
#[test] #[test]
@ -475,7 +475,7 @@ mod tests {
let etherscan = Client::new_from_env(Chain::Mainnet).unwrap(); let etherscan = Client::new_from_env(Chain::Mainnet).unwrap();
let tx_hash = H256::zero(); let tx_hash = H256::zero();
let tx_url: String = etherscan.transaction_url(tx_hash); let tx_url: String = etherscan.transaction_url(tx_hash);
assert_eq!(tx_url, format!("https://etherscan.io/tx/{:?}", tx_hash)); assert_eq!(tx_url, format!("https://etherscan.io/tx/{tx_hash:?}"));
} }
#[test] #[test]
@ -483,7 +483,7 @@ mod tests {
let etherscan = Client::new_from_env(Chain::Mainnet).unwrap(); let etherscan = Client::new_from_env(Chain::Mainnet).unwrap();
let token_hash = Address::zero(); let token_hash = Address::zero();
let token_url: String = etherscan.token_url(token_hash); let token_url: String = etherscan.token_url(token_hash);
assert_eq!(token_url, format!("https://etherscan.io/token/{:?}", token_hash)); assert_eq!(token_url, format!("https://etherscan.io/token/{token_hash:?}"));
} }
#[test] #[test]

View File

@ -57,7 +57,7 @@ impl VerifyContract {
#[must_use] #[must_use]
pub fn runs(mut self, runs: u32) -> Self { pub fn runs(mut self, runs: u32) -> Self {
self.runs = Some(format!("{}", runs)); self.runs = Some(format!("{runs}"));
self self
} }

View File

@ -150,7 +150,7 @@ mod dsproxyfactory_mod {
cache: Tokenizable::from_token(iter.next().unwrap())?, cache: Tokenizable::from_token(iter.next().unwrap())?,
}) })
} else { } else {
Err(InvalidOutputType(format!("Expected Tuple, got {:?}", token))) Err(InvalidOutputType(format!("Expected Tuple, got {token:?}")))
} }
} }
fn into_token(self) -> Token { fn into_token(self) -> Token {

View File

@ -245,7 +245,7 @@ async fn deploy_and_call_contract() {
// compiles the given contract and returns the ABI and Bytecode // compiles the given contract and returns the ABI and Bytecode
fn compile_contract(path: &str, name: &str) -> (Abi, Bytes) { fn compile_contract(path: &str, name: &str) -> (Abi, Bytes) {
let path = format!("./tests/solidity-contracts/{}", path); let path = format!("./tests/solidity-contracts/{path}");
let compiled = Solc::default().compile_source(&path).unwrap(); let compiled = Solc::default().compile_source(&path).unwrap();
let contract = compiled.get(&path, name).expect("could not find contract"); let contract = compiled.get(&path, name).expect("could not find contract");
let (abi, bin, _) = contract.into_parts_or_default(); let (abi, bin, _) = contract.into_parts_or_default();
@ -308,7 +308,7 @@ impl TestWallets {
let mut nonce = provider.get_transaction_count(addr, None).await.unwrap(); let mut nonce = provider.get_transaction_count(addr, None).await.unwrap();
let mut pending_txs = Vec::new(); let mut pending_txs = Vec::new();
for addr in addrs { for addr in addrs {
println!("Funding wallet {:?}", addr); println!("Funding wallet {addr:?}");
let tx = TransactionRequest::new() let tx = TransactionRequest::new()
.nonce(nonce) .nonce(nonce)
.to(addr) .to(addr)

View File

@ -16,7 +16,7 @@ type HttpWallet = SignerMiddleware<Provider<Http>, LocalWallet>;
// compiles the given contract and returns the ABI and Bytecode // compiles the given contract and returns the ABI and Bytecode
fn compile_contract(path: &str, name: &str) -> (Abi, Bytes) { fn compile_contract(path: &str, name: &str) -> (Abi, Bytes) {
let path = format!("./tests/solidity-contracts/{}", path); let path = format!("./tests/solidity-contracts/{path}");
let compiled = Solc::default().compile_source(&path).unwrap(); let compiled = Solc::default().compile_source(&path).unwrap();
let contract = compiled.get(&path, name).expect("could not find contract"); let contract = compiled.get(&path, name).expect("could not find contract");
let (abi, bin, _) = contract.into_parts_or_default(); let (abi, bin, _) = contract.into_parts_or_default();

View File

@ -72,7 +72,7 @@ pub fn resolve<T: Into<NameOrAddress>>(
/// Returns the reverse-registrar name of an address. /// Returns the reverse-registrar name of an address.
pub fn reverse_address(addr: Address) -> String { pub fn reverse_address(addr: Address) -> String {
format!("{:?}.{}", addr, ENS_REVERSE_REGISTRAR_DOMAIN)[2..].to_string() format!("{addr:?}.{ENS_REVERSE_REGISTRAR_DOMAIN}")[2..].to_string()
} }
/// Returns the ENS namehash as specified in [EIP-137](https://eips.ethereum.org/EIPS/eip-137) /// Returns the ENS namehash as specified in [EIP-137](https://eips.ethereum.org/EIPS/eip-137)
@ -83,7 +83,7 @@ pub fn namehash(name: &str) -> H256 {
// iterate in reverse // iterate in reverse
name.rsplit('.') name.rsplit('.')
.fold([0u8; 32], |node, label| keccak256(&[node, keccak256(label.as_bytes())].concat())) .fold([0u8; 32], |node, label| keccak256([node, keccak256(label.as_bytes())].concat()))
.into() .into()
} }

View File

@ -38,12 +38,12 @@ impl FromStr for ERCNFT {
let token_split: Vec<&str> = inner_path.split('/').collect(); let token_split: Vec<&str> = inner_path.split('/').collect();
let (contract_addr, token_id) = if token_split.len() == 2 { let (contract_addr, token_id) = if token_split.len() == 2 {
let token_id = U256::from_dec_str(token_split[1]) let token_id = U256::from_dec_str(token_split[1])
.map_err(|e| format!("Unsupported token id type: {} {}", token_split[1], e))?; .map_err(|e| format!("Unsupported token id type: {} {e}", token_split[1]))?;
let mut token_id_bytes = [0x0; 32]; let mut token_id_bytes = [0x0; 32];
token_id.to_big_endian(&mut token_id_bytes); token_id.to_big_endian(&mut token_id_bytes);
( (
Address::from_str(token_split[0].trim_start_matches("0x")) Address::from_str(token_split[0].trim_start_matches("0x"))
.map_err(|e| format!("Invalid contract address: {} {}", token_split[0], e))?, .map_err(|e| format!("Invalid contract address: {} {e}", token_split[0]))?,
token_id_bytes, token_id_bytes,
) )
} else { } else {

View File

@ -130,7 +130,7 @@ macro_rules! completed {
/// Tests Provider error for nonce too low issue through debug contents /// Tests Provider error for nonce too low issue through debug contents
fn is_nonce_too_low(e: &ProviderError) -> bool { fn is_nonce_too_low(e: &ProviderError) -> bool {
let debug_str = format!("{:?}", e); let debug_str = format!("{e:?}");
debug_str.contains("nonce too low") // Geth, Arbitrum, Optimism debug_str.contains("nonce too low") // Geth, Arbitrum, Optimism
|| debug_str.contains("nonce is too low") // Parity || debug_str.contains("nonce is too low") // Parity

View File

@ -137,7 +137,7 @@ impl<'a, P> PendingTransaction<'a, P> {
/// Logs the pending transaction hash along with a custom message before it. /// Logs the pending transaction hash along with a custom message before it.
pub fn log_msg<S: std::fmt::Display>(self, msg: S) -> Self { pub fn log_msg<S: std::fmt::Display>(self, msg: S) -> Self {
self.inspect(|s| println!("{}: {:?}", msg, **s)) self.inspect(|s| println!("{msg}: {:?}", **s))
} }
/// Logs the pending transaction's hash /// Logs the pending transaction's hash

View File

@ -939,7 +939,7 @@ impl<P: JsonRpcClient> Middleware for Provider<P> {
}; };
let data = self.call(&tx.into(), None).await?; let data = self.call(&tx.into(), None).await?;
let mut metadata_url = Url::parse(&decode_bytes::<String>(ParamType::String, data)) let mut metadata_url = Url::parse(&decode_bytes::<String>(ParamType::String, data))
.map_err(|e| ProviderError::CustomError(format!("Invalid metadata url: {}", e)))?; .map_err(|e| ProviderError::CustomError(format!("Invalid metadata url: {e}")))?;
if token.type_ == erc::ERCNFTType::ERC1155 { if token.type_ == erc::ERCNFTType::ERC1155 {
metadata_url.set_path(&metadata_url.path().replace("%7Bid%7D", &hex::encode(token.id))); metadata_url.set_path(&metadata_url.path().replace("%7Bid%7D", &hex::encode(token.id)));
@ -1832,7 +1832,7 @@ mod tests {
("cdixon.eth", "https://ipfs.io/ipfs/QmYA6ZpEARgHvRHZQdFPynMMX8NtdL2JCadvyuyG2oA88u"), ("cdixon.eth", "https://ipfs.io/ipfs/QmYA6ZpEARgHvRHZQdFPynMMX8NtdL2JCadvyuyG2oA88u"),
("0age.eth", "data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiPz48c3ZnIHN0eWxlPSJiYWNrZ3JvdW5kLWNvbG9yOmJsYWNrIiB2aWV3Qm94PSIwIDAgNTAwIDUwMCIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj48cmVjdCB4PSIxNTUiIHk9IjYwIiB3aWR0aD0iMTkwIiBoZWlnaHQ9IjM5MCIgZmlsbD0iIzY5ZmYzNyIvPjwvc3ZnPg==") ("0age.eth", "data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiPz48c3ZnIHN0eWxlPSJiYWNrZ3JvdW5kLWNvbG9yOmJsYWNrIiB2aWV3Qm94PSIwIDAgNTAwIDUwMCIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj48cmVjdCB4PSIxNTUiIHk9IjYwIiB3aWR0aD0iMTkwIiBoZWlnaHQ9IjM5MCIgZmlsbD0iIzY5ZmYzNyIvPjwvc3ZnPg==")
] { ] {
println!("Resolving: {}", ens_name); println!("Resolving: {ens_name}");
assert_eq!(provider.resolve_avatar(ens_name).await.unwrap(), Url::parse(res).unwrap()); assert_eq!(provider.resolve_avatar(ens_name).await.unwrap(), Url::parse(res).unwrap());
} }
} }

View File

@ -204,8 +204,8 @@ impl Authorization {
impl fmt::Display for Authorization { impl fmt::Display for Authorization {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { match self {
Authorization::Basic(auth_secret) => write!(f, "Basic {}", auth_secret), Authorization::Basic(auth_secret) => write!(f, "Basic {auth_secret}"),
Authorization::Bearer(token) => write!(f, "Bearer {}", token), Authorization::Bearer(token) => write!(f, "Bearer {token}"),
} }
} }
} }

View File

@ -205,7 +205,7 @@ impl Shared {
match msg { match msg {
Request { id, request, sender } => { Request { id, request, sender } => {
let prev = self.pending.borrow_mut().insert(id, sender); let prev = self.pending.borrow_mut().insert(id, sender);
assert!(prev.is_none(), "replaced pending IPC request (id={})", id); assert!(prev.is_none(), "{}", "replaced pending IPC request (id={id})");
if let Err(err) = writer.write_all(&request).await { if let Err(err) = writer.write_all(&request).await {
tracing::error!("IPC connection error: {:?}", err); tracing::error!("IPC connection error: {:?}", err);

View File

@ -210,7 +210,7 @@ pub enum RetryClientError {
impl std::fmt::Display for RetryClientError { impl std::fmt::Display for RetryClientError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}", self) write!(f, "{self:?}")
} }
} }

View File

@ -419,7 +419,7 @@ where
// TrySendError is private :( // TrySendError is private :(
fn to_client_error<T: Debug>(err: T) -> ClientError { fn to_client_error<T: Debug>(err: T) -> ClientError {
ClientError::ChannelError(format!("{:?}", err)) ClientError::ChannelError(format!("{err:?}"))
} }
#[derive(Error, Debug)] #[derive(Error, Debug)]

View File

@ -21,8 +21,8 @@ impl fmt::Display for DerivationType {
f, f,
"{}", "{}",
match self { match self {
DerivationType::Legacy(index) => format!("m/44'/60'/0'/{}", index), DerivationType::Legacy(index) => format!("m/44'/60'/0'/{index}"),
DerivationType::LedgerLive(index) => format!("m/44'/60'/{}'/0/0", index), DerivationType::LedgerLive(index) => format!("m/44'/60'/{index}'/0/0"),
DerivationType::Other(inner) => inner.to_owned(), DerivationType::Other(inner) => inner.to_owned(),
} }
) )

View File

@ -23,7 +23,7 @@ impl fmt::Display for DerivationType {
f, f,
"{}", "{}",
match self { match self {
DerivationType::TrezorLive(index) => format!("m/44'/60'/{}'/0/0", index), DerivationType::TrezorLive(index) => format!("m/44'/60'/{index}'/0/0"),
DerivationType::Other(inner) => inner.to_owned(), DerivationType::Other(inner) => inner.to_owned(),
} }
) )

View File

@ -152,7 +152,7 @@ mod tests {
// read from the encrypted JSON keystore and decrypt it, while validating that the // read from the encrypted JSON keystore and decrypt it, while validating that the
// signatures produced by both the keys should match // signatures produced by both the keys should match
let path = Path::new(dir.path()).join(uuid); let path = Path::new(dir.path()).join(uuid);
let key2 = Wallet::<SigningKey>::decrypt_keystore(&path.clone(), "randpsswd").unwrap(); let key2 = Wallet::<SigningKey>::decrypt_keystore(path.clone(), "randpsswd").unwrap();
let signature2 = key2.sign_message(message).await.unwrap(); let signature2 = key2.sign_message(message).await.unwrap();
assert_eq!(signature, signature2); assert_eq!(signature, signature2);
std::fs::remove_file(&path).unwrap(); std::fs::remove_file(&path).unwrap();

View File

@ -33,7 +33,7 @@ fn read_all_benchmark(c: &mut Criterion) {
fn prepare_contracts(root: &Path, num: usize) -> Vec<PathBuf> { fn prepare_contracts(root: &Path, num: usize) -> Vec<PathBuf> {
let mut files = Vec::with_capacity(num); let mut files = Vec::with_capacity(num);
for _ in 0..num { for _ in 0..num {
let path = root.join(format!("file{}.sol", num)); let path = root.join(format!("file{num}.sol"));
let f = File::create(&path).unwrap(); let f = File::create(&path).unwrap();
let mut writer = BufWriter::new(f); let mut writer = BufWriter::new(f);

View File

@ -1133,7 +1133,7 @@ mod tests {
let alternative = ConfigurableArtifacts::conflict_free_output_file( let alternative = ConfigurableArtifacts::conflict_free_output_file(
&already_taken, &already_taken,
conflict.clone(), conflict,
file, file,
"/Users/carter/dev/goldfinch/mono/packages/protocol/artifacts", "/Users/carter/dev/goldfinch/mono/packages/protocol/artifacts",
); );

View File

@ -73,7 +73,7 @@ impl FromStr for SourceLocation {
type Err = String; type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> { fn from_str(s: &str) -> Result<Self, Self::Err> {
let invalid_location = move || format!("{} invalid source location", s); let invalid_location = move || format!("{s} invalid source location");
let mut split = s.split(':'); let mut split = s.split(':');
let start = split let start = split

View File

@ -305,8 +305,8 @@ impl BytecodeObject {
let fully_qualified_placeholder = utils::library_fully_qualified_placeholder(name); let fully_qualified_placeholder = utils::library_fully_qualified_placeholder(name);
*unlinked = unlinked *unlinked = unlinked
.replace(&format!("__{}__", fully_qualified_placeholder), &hex_addr) .replace(&format!("__{fully_qualified_placeholder}__"), &hex_addr)
.replace(&format!("__{}__", place_holder), &hex_addr) .replace(&format!("__{place_holder}__"), &hex_addr)
} }
self self
} }

View File

@ -522,14 +522,14 @@ impl Libraries {
for lib in libs { for lib in libs {
let mut items = lib.split(':'); let mut items = lib.split(':');
let file = items.next().ok_or_else(|| { let file = items.next().ok_or_else(|| {
SolcError::msg(format!("failed to parse path to library file: {}", lib)) SolcError::msg(format!("failed to parse path to library file: {lib}"))
})?; })?;
let lib = items let lib = items
.next() .next()
.ok_or_else(|| SolcError::msg(format!("failed to parse library name: {}", lib)))?; .ok_or_else(|| SolcError::msg(format!("failed to parse library name: {lib}")))?;
let addr = items.next().ok_or_else(|| { let addr = items
SolcError::msg(format!("failed to parse library address: {}", lib)) .next()
})?; .ok_or_else(|| SolcError::msg(format!("failed to parse library address: {lib}")))?;
if items.next().is_some() { if items.next().is_some() {
return Err(SolcError::msg(format!( return Err(SolcError::msg(format!(
"failed to parse, too many arguments passed: {}", "failed to parse, too many arguments passed: {}",
@ -735,7 +735,7 @@ impl fmt::Display for EvmVersion {
EvmVersion::London => "london", EvmVersion::London => "london",
EvmVersion::Byzantium => "byzantium", EvmVersion::Byzantium => "byzantium",
}; };
write!(f, "{}", string) write!(f, "{string}")
} }
} }
@ -753,7 +753,7 @@ impl FromStr for EvmVersion {
"berlin" => Ok(EvmVersion::Berlin), "berlin" => Ok(EvmVersion::Berlin),
"london" => Ok(EvmVersion::London), "london" => Ok(EvmVersion::London),
"byzantium" => Ok(EvmVersion::Byzantium), "byzantium" => Ok(EvmVersion::Byzantium),
s => Err(format!("Unknown evm version: {}", s)), s => Err(format!("Unknown evm version: {s}")),
} }
} }
} }
@ -807,7 +807,7 @@ impl fmt::Display for RevertStrings {
RevertStrings::Debug => "debug", RevertStrings::Debug => "debug",
RevertStrings::VerboseDebug => "verboseDebug", RevertStrings::VerboseDebug => "verboseDebug",
}; };
write!(f, "{}", string) write!(f, "{string}")
} }
} }
@ -820,7 +820,7 @@ impl FromStr for RevertStrings {
"strip" => Ok(RevertStrings::Strip), "strip" => Ok(RevertStrings::Strip),
"debug" => Ok(RevertStrings::Debug), "debug" => Ok(RevertStrings::Debug),
"verboseDebug" | "verbosedebug" => Ok(RevertStrings::VerboseDebug), "verboseDebug" | "verbosedebug" => Ok(RevertStrings::VerboseDebug),
s => Err(format!("Unknown evm version: {}", s)), s => Err(format!("Unknown evm version: {s}")),
} }
} }
} }
@ -887,7 +887,7 @@ impl FromStr for BytecodeHash {
"none" => Ok(BytecodeHash::None), "none" => Ok(BytecodeHash::None),
"ipfs" => Ok(BytecodeHash::Ipfs), "ipfs" => Ok(BytecodeHash::Ipfs),
"bzzr1" => Ok(BytecodeHash::Bzzr1), "bzzr1" => Ok(BytecodeHash::Bzzr1),
s => Err(format!("Unknown bytecode hash: {}", s)), s => Err(format!("Unknown bytecode hash: {s}")),
} }
} }
} }
@ -1050,7 +1050,7 @@ impl fmt::Display for ModelCheckerEngine {
ModelCheckerEngine::BMC => "bmc", ModelCheckerEngine::BMC => "bmc",
ModelCheckerEngine::CHC => "chc", ModelCheckerEngine::CHC => "chc",
}; };
write!(f, "{}", string) write!(f, "{string}")
} }
} }
@ -1063,7 +1063,7 @@ impl FromStr for ModelCheckerEngine {
"all" => Ok(ModelCheckerEngine::All), "all" => Ok(ModelCheckerEngine::All),
"bmc" => Ok(ModelCheckerEngine::BMC), "bmc" => Ok(ModelCheckerEngine::BMC),
"chc" => Ok(ModelCheckerEngine::CHC), "chc" => Ok(ModelCheckerEngine::CHC),
s => Err(format!("Unknown model checker engine: {}", s)), s => Err(format!("Unknown model checker engine: {s}")),
} }
} }
} }
@ -1100,7 +1100,7 @@ impl fmt::Display for ModelCheckerTarget {
ModelCheckerTarget::OutOfBounds => "outOfBounds", ModelCheckerTarget::OutOfBounds => "outOfBounds",
ModelCheckerTarget::Balance => "balance", ModelCheckerTarget::Balance => "balance",
}; };
write!(f, "{}", string) write!(f, "{string}")
} }
} }
@ -1117,7 +1117,7 @@ impl FromStr for ModelCheckerTarget {
"popEmptyArray" => Ok(ModelCheckerTarget::PopEmptyArray), "popEmptyArray" => Ok(ModelCheckerTarget::PopEmptyArray),
"outOfBounds" => Ok(ModelCheckerTarget::OutOfBounds), "outOfBounds" => Ok(ModelCheckerTarget::OutOfBounds),
"balance" => Ok(ModelCheckerTarget::Balance), "balance" => Ok(ModelCheckerTarget::Balance),
s => Err(format!("Unknown model checker target: {}", s)), s => Err(format!("Unknown model checker target: {s}")),
} }
} }
} }
@ -1729,13 +1729,13 @@ impl fmt::Display for Error {
match self.severity { match self.severity {
Severity::Error => { Severity::Error => {
if let Some(code) = self.error_code { if let Some(code) = self.error_code {
Paint::red(format!("error[{}]: ", code)).fmt(f)?; Paint::red(format!("error[{code}]: ")).fmt(f)?;
} }
Paint::red(msg).fmt(f) Paint::red(msg).fmt(f)
} }
Severity::Warning | Severity::Info => { Severity::Warning | Severity::Info => {
if let Some(code) = self.error_code { if let Some(code) = self.error_code {
Paint::yellow(format!("warning[{}]: ", code)).fmt(f)?; Paint::yellow(format!("warning[{code}]: ")).fmt(f)?;
} }
Paint::yellow(msg).fmt(f) Paint::yellow(msg).fmt(f)
} }
@ -1786,7 +1786,7 @@ impl FromStr for Severity {
"error" => Ok(Severity::Error), "error" => Ok(Severity::Error),
"warning" => Ok(Severity::Warning), "warning" => Ok(Severity::Warning),
"info" => Ok(Severity::Info), "info" => Ok(Severity::Info),
s => Err(format!("Invalid severity: {}", s)), s => Err(format!("Invalid severity: {s}")),
} }
} }
} }

View File

@ -256,7 +256,7 @@ impl FromStr for ContractOutputSelection {
s => EvmOutputSelection::from_str(s) s => EvmOutputSelection::from_str(s)
.map(ContractOutputSelection::Evm) .map(ContractOutputSelection::Evm)
.or_else(|_| EwasmOutputSelection::from_str(s).map(ContractOutputSelection::Ewasm)) .or_else(|_| EwasmOutputSelection::from_str(s).map(ContractOutputSelection::Ewasm))
.map_err(|_| format!("Invalid contract output selection: {}", s)), .map_err(|_| format!("Invalid contract output selection: {s}")),
} }
} }
} }
@ -347,7 +347,7 @@ impl FromStr for EvmOutputSelection {
DeployedBytecodeOutputSelection::from_str(s) DeployedBytecodeOutputSelection::from_str(s)
.map(EvmOutputSelection::DeployedByteCode) .map(EvmOutputSelection::DeployedByteCode)
}) })
.map_err(|_| format!("Invalid evm selection: {}", s)), .map_err(|_| format!("Invalid evm selection: {s}")),
} }
} }
} }
@ -412,7 +412,7 @@ impl FromStr for BytecodeOutputSelection {
"evm.bytecode.sourceMap" => Ok(BytecodeOutputSelection::SourceMap), "evm.bytecode.sourceMap" => Ok(BytecodeOutputSelection::SourceMap),
"evm.bytecode.linkReferences" => Ok(BytecodeOutputSelection::LinkReferences), "evm.bytecode.linkReferences" => Ok(BytecodeOutputSelection::LinkReferences),
"evm.bytecode.generatedSources" => Ok(BytecodeOutputSelection::GeneratedSources), "evm.bytecode.generatedSources" => Ok(BytecodeOutputSelection::GeneratedSources),
s => Err(format!("Invalid bytecode selection: {}", s)), s => Err(format!("Invalid bytecode selection: {s}")),
} }
} }
} }
@ -494,7 +494,7 @@ impl FromStr for DeployedBytecodeOutputSelection {
"evm.deployedBytecode.immutableReferences" => { "evm.deployedBytecode.immutableReferences" => {
Ok(DeployedBytecodeOutputSelection::ImmutableReferences) Ok(DeployedBytecodeOutputSelection::ImmutableReferences)
} }
s => Err(format!("Invalid deployedBytecode selection: {}", s)), s => Err(format!("Invalid deployedBytecode selection: {s}")),
} }
} }
} }
@ -543,7 +543,7 @@ impl FromStr for EwasmOutputSelection {
"ewasm" => Ok(EwasmOutputSelection::All), "ewasm" => Ok(EwasmOutputSelection::All),
"ewasm.wast" => Ok(EwasmOutputSelection::Wast), "ewasm.wast" => Ok(EwasmOutputSelection::Wast),
"ewasm.wasm" => Ok(EwasmOutputSelection::Wasm), "ewasm.wasm" => Ok(EwasmOutputSelection::Wasm),
s => Err(format!("Invalid ewasm selection: {}", s)), s => Err(format!("Invalid ewasm selection: {s}")),
} }
} }
} }

View File

@ -120,7 +120,7 @@ pub mod string_bytes {
if value.starts_with("0x") { if value.starts_with("0x") {
serializer.serialize_str(value.as_str()) serializer.serialize_str(value.as_str())
} else { } else {
serializer.serialize_str(&format!("0x{}", value)) serializer.serialize_str(&format!("0x{value}"))
} }
} }

View File

@ -149,7 +149,7 @@ impl Default for Solc {
#[cfg(not(target_arch = "wasm32"))] #[cfg(not(target_arch = "wasm32"))]
{ {
if let Some(solc) = Solc::svm_global_version() if let Some(solc) = Solc::svm_global_version()
.and_then(|vers| Solc::find_svm_installed_version(&vers.to_string()).ok()) .and_then(|vers| Solc::find_svm_installed_version(vers.to_string()).ok())
.flatten() .flatten()
{ {
return solc return solc
@ -279,7 +279,7 @@ impl Solc {
let solc = Self::svm_home() let solc = Self::svm_home()
.ok_or_else(|| SolcError::solc("svm home dir not found"))? .ok_or_else(|| SolcError::solc("svm home dir not found"))?
.join(version) .join(version)
.join(format!("solc-{}", version)); .join(format!("solc-{version}"));
if !solc.is_file() { if !solc.is_file() {
return Ok(None) return Ok(None)
@ -688,7 +688,7 @@ fn version_from_output(output: Output) -> Result<Version> {
.lines() .lines()
.last() .last()
.ok_or_else(|| SolcError::solc("version not found in solc output"))? .ok_or_else(|| SolcError::solc("version not found in solc output"))?
.map_err(|err| SolcError::msg(format!("Failed to read output: {}", err)))?; .map_err(|err| SolcError::msg(format!("Failed to read output: {err}")))?;
// NOTE: semver doesn't like `+` in g++ in build metadata which is invalid semver // NOTE: semver doesn't like `+` in g++ in build metadata which is invalid semver
Ok(Version::from_str(&version.trim_start_matches("Version: ").replace(".g++", ".gcc"))?) Ok(Version::from_str(&version.trim_start_matches("Version: ").replace(".g++", ".gcc"))?)
} else { } else {
@ -858,8 +858,8 @@ mod tests {
{ {
Solc::blocking_install(&version).unwrap(); Solc::blocking_install(&version).unwrap();
} }
let res = Solc::find_svm_installed_version(&version.to_string()).unwrap().unwrap(); let res = Solc::find_svm_installed_version(version.to_string()).unwrap().unwrap();
let expected = svm::SVM_HOME.join(ver).join(format!("solc-{}", ver)); let expected = svm::SVM_HOME.join(ver).join(format!("solc-{ver}"));
assert_eq!(res.solc, expected); assert_eq!(res.solc, expected);
} }
@ -876,7 +876,7 @@ mod tests {
fn does_not_find_not_installed_version() { fn does_not_find_not_installed_version() {
let ver = "1.1.1"; let ver = "1.1.1";
let version = Version::from_str(ver).unwrap(); let version = Version::from_str(ver).unwrap();
let res = Solc::find_svm_installed_version(&version.to_string()).unwrap(); let res = Solc::find_svm_installed_version(version.to_string()).unwrap();
assert!(res.is_none()); assert!(res.is_none());
} }
@ -908,6 +908,6 @@ mod tests {
///// helpers ///// helpers
fn source(version: &str) -> Source { fn source(version: &str) -> Source {
Source { content: format!("pragma solidity {};\n", version) } Source { content: format!("pragma solidity {version};\n") }
} }
} }

View File

@ -41,7 +41,7 @@ impl ContractInfo {
impl fmt::Display for ContractInfo { impl fmt::Display for ContractInfo {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if let Some(ref path) = self.path { if let Some(ref path) = self.path {
write!(f, "{}:{}", path, self.name) write!(f, "{path}:{}", self.name)
} else { } else {
write!(f, "{}", self.name) write!(f, "{}", self.name)
} }

View File

@ -752,10 +752,10 @@ impl<'a> fmt::Display for OutputDiagnostics<'a> {
}); });
if !is_ignored { if !is_ignored {
writeln!(f, "\n{}", err)?; writeln!(f, "\n{err}")?;
} }
} else { } else {
writeln!(f, "\n{}", err)?; writeln!(f, "\n{err}")?;
} }
} }
Ok(()) Ok(())

View File

@ -230,7 +230,7 @@ impl ProjectPathsConfig {
// if the import is relative we assume it's already part of the processed input // if the import is relative we assume it's already part of the processed input
// file set // file set
utils::canonicalize(cwd.join(import)).map_err(|err| { utils::canonicalize(cwd.join(import)).map_err(|err| {
SolcError::msg(format!("failed to resolve relative import \"{:?}\"", err)) SolcError::msg(format!("failed to resolve relative import \"{err:?}\""))
}) })
} else { } else {
// resolve library file // resolve library file
@ -477,7 +477,7 @@ impl ProjectPathsConfig {
} }
let result = String::from_utf8(content).map_err(|err| { let result = String::from_utf8(content).map_err(|err| {
SolcError::msg(format!("failed to convert extended bytes to string: {}", err)) SolcError::msg(format!("failed to convert extended bytes to string: {err}"))
})?; })?;
Ok(result) Ok(result)
@ -497,7 +497,7 @@ impl fmt::Display for ProjectPathsConfig {
} }
writeln!(f, "remappings:")?; writeln!(f, "remappings:")?;
for remapping in &self.remappings { for remapping in &self.remappings {
writeln!(f, " {}", remapping)?; writeln!(f, " {remapping}")?;
} }
Ok(()) Ok(())
} }
@ -588,7 +588,7 @@ impl PathStyle {
.artifacts(root.join("out")) .artifacts(root.join("out"))
.build_infos(root.join("out").join("build-info")) .build_infos(root.join("out").join("build-info"))
.lib(root.join("lib")) .lib(root.join("lib"))
.remappings(Remapping::find_many(&root.join("lib"))) .remappings(Remapping::find_many(root.join("lib")))
.root(root) .root(root)
.build()?, .build()?,
PathStyle::HardHat => ProjectPathsConfig::builder() PathStyle::HardHat => ProjectPathsConfig::builder()
@ -883,7 +883,7 @@ impl fmt::Display for AllowedLibPaths {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let lib_paths = let lib_paths =
self.paths().map(|path| format!("{}", path.display())).collect::<Vec<_>>().join(","); self.paths().map(|path| format!("{}", path.display())).collect::<Vec<_>>().join(",");
write!(f, "{}", lib_paths) write!(f, "{lib_paths}")
} }
} }

View File

@ -981,9 +981,9 @@ mod tests {
.lib(root.join("lib1")) .lib(root.join("lib1"))
.lib(root.join("lib2")) .lib(root.join("lib2"))
.remappings( .remappings(
Remapping::find_many(&root.join("lib1")) Remapping::find_many(root.join("lib1"))
.into_iter() .into_iter()
.chain(Remapping::find_many(&root.join("lib2"))), .chain(Remapping::find_many(root.join("lib2"))),
) )
.build() .build()
.unwrap(); .unwrap();
@ -1009,7 +1009,7 @@ mod tests {
.root(&root) .root(&root)
.sources(root.join("src")) .sources(root.join("src"))
.lib(root.join("lib")) .lib(root.join("lib"))
.remappings(Remapping::find_many(&root.join("lib"))) .remappings(Remapping::find_many(root.join("lib")))
.build() .build()
.unwrap(); .unwrap();
let project = Project::builder().no_artifacts().paths(paths).ephemeral().build().unwrap(); let project = Project::builder().no_artifacts().paths(paths).ephemeral().build().unwrap();

View File

@ -395,15 +395,15 @@ pub struct SimpleNamingStrategy {
impl NamingStrategy for SimpleNamingStrategy { impl NamingStrategy for SimpleNamingStrategy {
fn new_source_file_name(&mut self, id: usize) -> String { fn new_source_file_name(&mut self, id: usize) -> String {
format!("SourceFile{}", id) format!("SourceFile{id}")
} }
fn new_lib_file_name(&mut self, id: usize) -> String { fn new_lib_file_name(&mut self, id: usize) -> String {
format!("LibFile{}", id) format!("LibFile{id}")
} }
fn new_lib_name(&mut self, id: usize) -> String { fn new_lib_name(&mut self, id: usize) -> String {
format!("Lib{}", id) format!("Lib{id}")
} }
} }

View File

@ -388,7 +388,7 @@ fn contract_file_name(name: impl AsRef<str>) -> String {
if name.ends_with(".sol") { if name.ends_with(".sol") {
name.to_string() name.to_string()
} else { } else {
format!("{}.sol", name) format!("{name}.sol")
} }
} }
@ -419,7 +419,7 @@ impl TempProject<ConfigurableArtifacts> {
pub fn dapptools_init() -> Result<Self> { pub fn dapptools_init() -> Result<Self> {
let mut project = Self::dapptools()?; let mut project = Self::dapptools()?;
let orig_root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample"); let orig_root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample");
copy_dir(&orig_root, project.root())?; copy_dir(orig_root, project.root())?;
project.project_mut().paths.remappings = Remapping::find_many(project.root()); project.project_mut().paths.remappings = Remapping::find_many(project.root());
Ok(project) Ok(project)

View File

@ -217,7 +217,7 @@ impl Remapping {
if let Some(name) = candidate.window_start.file_name().and_then(|s| s.to_str()) { if let Some(name) = candidate.window_start.file_name().and_then(|s| s.to_str()) {
insert_prioritized( insert_prioritized(
&mut all_remappings, &mut all_remappings,
format!("{}/", name), format!("{name}/"),
candidate.source_dir, candidate.source_dir,
); );
} }
@ -778,7 +778,7 @@ mod tests {
touch(&path).unwrap(); touch(&path).unwrap();
} else { } else {
let path = tmp.join(path); let path = tmp.join(path);
std::fs::create_dir_all(&path).unwrap(); std::fs::create_dir_all(path).unwrap();
} }
} }
} }
@ -801,7 +801,7 @@ mod tests {
assert_eq!(remappings.len(), 1); assert_eq!(remappings.len(), 1);
assert_eq!(remappings[0].name, "repo1/"); assert_eq!(remappings[0].name, "repo1/");
assert_eq!(remappings[0].path, format!("{}/src/", path)); assert_eq!(remappings[0].path, format!("{path}/src/"));
} }
#[test] #[test]
@ -1015,7 +1015,7 @@ mod tests {
"node_modules/@openzeppelin/contracts/token/ERC20/IERC20.sol", "node_modules/@openzeppelin/contracts/token/ERC20/IERC20.sol",
]; ];
mkdir_or_touch(tmp_dir.path(), &paths[..]); mkdir_or_touch(tmp_dir.path(), &paths[..]);
let remappings = Remapping::find_many(&tmp_dir_node_modules); let remappings = Remapping::find_many(tmp_dir_node_modules);
let mut paths = ProjectPathsConfig::hardhat(tmp_dir.path()).unwrap(); let mut paths = ProjectPathsConfig::hardhat(tmp_dir.path()).unwrap();
paths.remappings = remappings; paths.remappings = remappings;
@ -1054,7 +1054,7 @@ mod tests {
mkdir_or_touch(tmp_dir_path, &paths[..]); mkdir_or_touch(tmp_dir_path, &paths[..]);
let path = tmp_dir_path.display().to_string(); let path = tmp_dir_path.display().to_string();
let mut remappings = Remapping::find_many(&path); let mut remappings = Remapping::find_many(path);
remappings.sort_unstable(); remappings.sort_unstable();
let mut expected = vec![ let mut expected = vec![
@ -1115,7 +1115,7 @@ mod tests {
touch(&contract2).unwrap(); touch(&contract2).unwrap();
let path = tmp_dir_path.display().to_string(); let path = tmp_dir_path.display().to_string();
let mut remappings = Remapping::find_many(&path); let mut remappings = Remapping::find_many(path);
remappings.sort_unstable(); remappings.sort_unstable();
let mut expected = vec![ let mut expected = vec![
Remapping { Remapping {
@ -1151,7 +1151,7 @@ mod tests {
mkdir_or_touch(tmp_dir_path, &paths[..]); mkdir_or_touch(tmp_dir_path, &paths[..]);
let path = tmp_dir_path.display().to_string(); let path = tmp_dir_path.display().to_string();
let mut remappings = Remapping::find_many(&path); let mut remappings = Remapping::find_many(path);
remappings.sort_unstable(); remappings.sort_unstable();
let mut expected = vec![ let mut expected = vec![

View File

@ -163,7 +163,7 @@ pub struct BadName {
fn get_file_name(path: impl Into<PathBuf>, v: &Version) -> PathBuf { fn get_file_name(path: impl Into<PathBuf>, v: &Version) -> PathBuf {
let mut path = path.into(); let mut path = path.into();
if let Some(stem) = path.file_stem().and_then(|s| s.to_str().map(|s| s.to_string())) { if let Some(stem) = path.file_stem().and_then(|s| s.to_str().map(|s| s.to_string())) {
path.set_file_name(format!("{}.{}.{}.{}.json", stem, v.major, v.minor, v.patch)); path.set_file_name(format!("{stem}.{}.{}.{}.json", v.major, v.minor, v.patch));
} }
path path
} }

View File

@ -370,23 +370,23 @@ impl Reporter for BasicStdoutReporter {
) { ) {
self.solc_io_report.log_compiler_output(output, version); self.solc_io_report.log_compiler_output(output, version);
println!( println!(
"Solc {}.{}.{} finished in {:.2?}", "Solc {}.{}.{} finished in {duration:.2?}",
version.major, version.minor, version.patch, duration version.major, version.minor, version.patch
); );
} }
/// Invoked before a new [`Solc`] bin is installed /// Invoked before a new [`Solc`] bin is installed
fn on_solc_installation_start(&self, version: &Version) { fn on_solc_installation_start(&self, version: &Version) {
println!("installing solc version \"{}\"", version); println!("installing solc version \"{version}\"");
} }
/// Invoked before a new [`Solc`] bin was successfully installed /// Invoked before a new [`Solc`] bin was successfully installed
fn on_solc_installation_success(&self, version: &Version) { fn on_solc_installation_success(&self, version: &Version) {
println!("Successfully installed solc {}", version); println!("Successfully installed solc {version}");
} }
fn on_solc_installation_error(&self, version: &Version, error: &str) { fn on_solc_installation_error(&self, version: &Version, error: &str) {
eprintln!("Failed to install solc {}: {}", version, error); eprintln!("Failed to install solc {version}: {error}");
} }
fn on_unresolved_imports(&self, imports: &[(&Path, &Path)], remappings: &[Remapping]) { fn on_unresolved_imports(&self, imports: &[(&Path, &Path)], remappings: &[Remapping]) {

View File

@ -787,7 +787,7 @@ impl VersionedSources {
} else { } else {
// find installed svm // find installed svm
Solc::find_svm_installed_version(version.to_string())?.ok_or_else(|| { Solc::find_svm_installed_version(version.to_string())?.ok_or_else(|| {
SolcError::msg(format!("solc \"{}\" should have been installed", version)) SolcError::msg(format!("solc \"{version}\" should have been installed"))
})? })?
}; };

View File

@ -27,7 +27,7 @@ impl FromStr for Charset {
match s { match s {
"utf8" => Ok(Charset::Utf8), "utf8" => Ok(Charset::Utf8),
"ascii" => Ok(Charset::Ascii), "ascii" => Ok(Charset::Ascii),
s => Err(format!("invalid charset: {}", s)), s => Err(format!("invalid charset: {s}")),
} }
} }
} }
@ -103,7 +103,7 @@ fn print_node(
if let Some((last_continues, rest)) = levels_continue.split_last() { if let Some((last_continues, rest)) = levels_continue.split_last() {
for continues in rest { for continues in rest {
let c = if *continues { symbols.down } else { " " }; let c = if *continues { symbols.down } else { " " };
write!(out, "{} ", c)?; write!(out, "{c} ")?;
} }
let c = if *last_continues { symbols.tee } else { symbols.ell }; let c = if *last_continues { symbols.tee } else { symbols.ell };
@ -117,7 +117,7 @@ fn print_node(
let has_deps = graph.has_outgoing_edges(node_index); let has_deps = graph.has_outgoing_edges(node_index);
let star = if (new_node && !in_cycle) || !has_deps { "" } else { " (*)" }; let star = if (new_node && !in_cycle) || !has_deps { "" } else { " (*)" };
writeln!(out, "{}{}", graph.display_node(node_index), star)?; writeln!(out, "{}{star}", graph.display_node(node_index))?;
if !new_node || in_cycle { if !new_node || in_cycle {
return Ok(()) return Ok(())

View File

@ -38,7 +38,7 @@ enum Token<'a> {
impl<'a> fmt::Debug for Token<'a> { impl<'a> fmt::Debug for Token<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { match self {
Token::Number(s) => write!(f, "NUMBER({:?})", s), Token::Number(s) => write!(f, "NUMBER({s:?})"),
Token::Semicolon => write!(f, "SEMICOLON"), Token::Semicolon => write!(f, "SEMICOLON"),
Token::Colon => write!(f, "COLON"), Token::Colon => write!(f, "COLON"),
Token::In => write!(f, "JMP(i)"), Token::In => write!(f, "JMP(i)"),
@ -291,7 +291,7 @@ impl SourceElementBuilder {
fn set_jmp(&mut self, jmp: Jump, i: usize) -> Option<SyntaxError> { fn set_jmp(&mut self, jmp: Jump, i: usize) -> Option<SyntaxError> {
if self.jump.is_some() { if self.jump.is_some() {
return Some(SyntaxError::new(format!("Jump already set: {}", i))) return Some(SyntaxError::new(format!("Jump already set: {i}")))
} }
self.jump = Some(jmp); self.jump = Some(jmp);
None None
@ -299,7 +299,7 @@ impl SourceElementBuilder {
fn set_offset(&mut self, offset: usize, i: usize) -> Option<SyntaxError> { fn set_offset(&mut self, offset: usize, i: usize) -> Option<SyntaxError> {
if self.offset.is_some() { if self.offset.is_some() {
return Some(SyntaxError::new(format!("Offset already set: {}", i))) return Some(SyntaxError::new(format!("Offset already set: {i}")))
} }
self.offset = Some(offset); self.offset = Some(offset);
None None
@ -307,7 +307,7 @@ impl SourceElementBuilder {
fn set_length(&mut self, length: usize, i: usize) -> Option<SyntaxError> { fn set_length(&mut self, length: usize, i: usize) -> Option<SyntaxError> {
if self.length.is_some() { if self.length.is_some() {
return Some(SyntaxError::new(format!("Length already set: {}", i))) return Some(SyntaxError::new(format!("Length already set: {i}")))
} }
self.length = Some(length); self.length = Some(length);
None None
@ -315,7 +315,7 @@ impl SourceElementBuilder {
fn set_index(&mut self, index: Option<u32>, i: usize) -> Option<SyntaxError> { fn set_index(&mut self, index: Option<u32>, i: usize) -> Option<SyntaxError> {
if self.index.is_some() { if self.index.is_some() {
return Some(SyntaxError::new(format!("Index already set: {}", i))) return Some(SyntaxError::new(format!("Index already set: {i}")))
} }
self.index = Some(index); self.index = Some(index);
None None
@ -323,7 +323,7 @@ impl SourceElementBuilder {
fn set_modifier(&mut self, modifier_depth: usize, i: usize) -> Option<SyntaxError> { fn set_modifier(&mut self, modifier_depth: usize, i: usize) -> Option<SyntaxError> {
if self.modifier_depth.is_some() { if self.modifier_depth.is_some() {
return Some(SyntaxError::new(format!("Modifier depth already set: {}", i))) return Some(SyntaxError::new(format!("Modifier depth already set: {i}")))
} }
self.modifier_depth = Some(modifier_depth); self.modifier_depth = Some(modifier_depth);
None None
@ -486,7 +486,7 @@ impl State {
State::Length => *self = State::Index, State::Length => *self = State::Index,
State::Index => *self = State::Jmp, State::Index => *self = State::Jmp,
State::Jmp => *self = State::Modifier, State::Jmp => *self = State::Modifier,
State::Modifier => return Some(SyntaxError::new(format!("unexpected colon at {}", i))), State::Modifier => return Some(SyntaxError::new(format!("unexpected colon at {i}"))),
} }
None None
} }

View File

@ -43,7 +43,7 @@ pub static RE_THREE_OR_MORE_NEWLINES: Lazy<Regex> = Lazy::new(|| Regex::new("\n{
/// Create a regex that matches any library or contract name inside a file /// Create a regex that matches any library or contract name inside a file
pub fn create_contract_or_lib_name_regex(name: &str) -> Regex { pub fn create_contract_or_lib_name_regex(name: &str) -> Regex {
Regex::new(&format!(r#"(?:using\s+(?P<n1>{name})\s+|is\s+(?:\w+\s*,\s*)*(?P<n2>{name})(?:\s*,\s*\w+)*|(?:(?P<ignore>(?:function|error|as)\s+|\n[^\n]*(?:"([^"\n]|\\")*|'([^'\n]|\\')*))|\W+)(?P<n3>{name})(?:\.|\(| ))"#, name = name)).unwrap() Regex::new(&format!(r#"(?:using\s+(?P<n1>{name})\s+|is\s+(?:\w+\s*,\s*)*(?P<n2>{name})(?:\s*,\s*\w+)*|(?:(?P<ignore>(?:function|error|as)\s+|\n[^\n]*(?:"([^"\n]|\\")*|'([^'\n]|\\')*))|\W+)(?P<n3>{name})name.|\(| ))"#)).unwrap()
} }
/// Move a range by a specified offset /// Move a range by a specified offset
@ -285,7 +285,7 @@ pub fn library_fully_qualified_placeholder(name: impl AsRef<str>) -> String {
pub fn library_hash_placeholder(name: impl AsRef<[u8]>) -> String { pub fn library_hash_placeholder(name: impl AsRef<[u8]>) -> String {
let hash = library_hash(name); let hash = library_hash(name);
let placeholder = hex::encode(hash); let placeholder = hex::encode(hash);
format!("${}$", placeholder) format!("${placeholder}$")
} }
/// Returns the library placeholder for the given name /// Returns the library placeholder for the given name

View File

@ -421,7 +421,7 @@ fn can_compile_dapp_sample_with_cache() {
); );
// deleted artifact is not taken from the cache // deleted artifact is not taken from the cache
std::fs::remove_file(&project.paths.sources.join("Dapp.sol")).unwrap(); std::fs::remove_file(project.paths.sources.join("Dapp.sol")).unwrap();
let compiled: ProjectCompileOutput<_> = project.compile().unwrap(); let compiled: ProjectCompileOutput<_> = project.compile().unwrap();
assert!(compiled.find_first("Dapp").is_none()); assert!(compiled.find_first("Dapp").is_none());
} }
@ -636,7 +636,7 @@ contract FooBar {}
fn can_flatten_on_solang_failure() { fn can_flatten_on_solang_failure() {
let root = let root =
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/test-flatten-solang-failure"); PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/test-flatten-solang-failure");
let paths = ProjectPathsConfig::builder().sources(&root.join("contracts")); let paths = ProjectPathsConfig::builder().sources(root.join("contracts"));
let project = TempProject::<ConfigurableArtifacts>::new(paths).unwrap(); let project = TempProject::<ConfigurableArtifacts>::new(paths).unwrap();
let target = root.join("contracts/Contract.sol"); let target = root.join("contracts/Contract.sol");
@ -1231,7 +1231,7 @@ fn can_recompile_with_changes() {
assert!(compiled.is_unchanged()); assert!(compiled.is_unchanged());
// modify A.sol // modify A.sol
tmp.add_source("A", format!("{}\n", content)).unwrap(); tmp.add_source("A", format!("{content}\n")).unwrap();
let compiled = tmp.compile().unwrap(); let compiled = tmp.compile().unwrap();
assert!(!compiled.has_compiler_errors()); assert!(!compiled.has_compiler_errors());
assert!(!compiled.is_unchanged()); assert!(!compiled.is_unchanged());
@ -1286,7 +1286,7 @@ fn can_recompile_with_lowercase_names() {
assert!(compiled.is_unchanged()); assert!(compiled.is_unchanged());
// modify upgradeProxy.sol // modify upgradeProxy.sol
tmp.add_source("upgradeProxy.sol", format!("{}\n", upgrade)).unwrap(); tmp.add_source("upgradeProxy.sol", format!("{upgrade}\n")).unwrap();
let compiled = tmp.compile().unwrap(); let compiled = tmp.compile().unwrap();
assert!(!compiled.has_compiler_errors()); assert!(!compiled.has_compiler_errors());
assert!(!compiled.is_unchanged()); assert!(!compiled.is_unchanged());
@ -1339,7 +1339,7 @@ fn can_recompile_unchanged_with_empty_files() {
assert!(compiled.is_unchanged()); assert!(compiled.is_unchanged());
// modify C.sol // modify C.sol
tmp.add_source("C", format!("{}\n", c)).unwrap(); tmp.add_source("C", format!("{c}\n")).unwrap();
let compiled = tmp.compile().unwrap(); let compiled = tmp.compile().unwrap();
assert!(!compiled.has_compiler_errors()); assert!(!compiled.has_compiler_errors());
assert!(!compiled.is_unchanged()); assert!(!compiled.is_unchanged());

View File

@ -7,7 +7,7 @@ fn main() -> eyre::Result<()> {
let contract_name = args.next().unwrap_or_else(|| "SimpleStorage".to_owned()); let contract_name = args.next().unwrap_or_else(|| "SimpleStorage".to_owned());
let contract: String = args.next().unwrap_or_else(|| "examples/contract.sol".to_owned()); let contract: String = args.next().unwrap_or_else(|| "examples/contract.sol".to_owned());
println!("Generating bindings for {}\n", contract); println!("Generating bindings for {contract}\n");
// compile it // compile it
let abi = if contract.ends_with(".sol") { let abi = if contract.ends_with(".sol") {
@ -22,7 +22,7 @@ fn main() -> eyre::Result<()> {
// print to stdout if no output arg is given // print to stdout if no output arg is given
if let Some(output_path) = args.next() { if let Some(output_path) = args.next() {
bindings.write_to_file(&output_path)?; bindings.write_to_file(output_path)?;
} else { } else {
bindings.write(std::io::stdout())?; bindings.write(std::io::stdout())?;
} }

View File

@ -67,7 +67,7 @@ async fn main() -> Result<()> {
// 11. get the new value // 11. get the new value
let value = contract.get_value().call().await?; let value = contract.get_value().call().await?;
println!("Value: {}. Logs: {}", value, serde_json::to_string(&logs)?); println!("Value: {value}. Logs: {}", serde_json::to_string(&logs)?);
Ok(()) Ok(())
} }

View File

@ -56,7 +56,7 @@ async fn main() -> Result<()> {
// 11. get the new value // 11. get the new value
let value = contract.get_value().call().await?; let value = contract.get_value().call().await?;
println!("Value: {}. Logs: {}", value, serde_json::to_string(&logs)?); println!("Value: {value}. Logs: {}", serde_json::to_string(&logs)?);
Ok(()) Ok(())
} }

View File

@ -18,8 +18,8 @@ fn main() -> Result<()> {
let from = path.next().unwrap(); let from = path.next().unwrap();
let to = path.next().unwrap(); let to = path.next().unwrap();
println!( println!(
"Swapped {} of token {} for {} of token {}", "Swapped {} of token {from} for {} of token {to}",
decoded.amount_in, from, decoded.amount_out_min, to decoded.amount_in, decoded.amount_out_min
); );
Ok(()) Ok(())

View File

@ -66,7 +66,7 @@ pub async fn deploy() {
let value = contract.get_value().call().await.unwrap(); let value = contract.get_value().call().await.unwrap();
console::log_2( console::log_2(
&format!("Value: `{}`. Logs: ", value).into(), &format!("Value: `{value}`. Logs: ").into(),
&serde_wasm_bindgen::to_value(&logs).unwrap(), &serde_wasm_bindgen::to_value(&logs).unwrap(),
); );
} }

View File

@ -12,7 +12,7 @@ async fn main() -> Result<()> {
let h: H256 = H256::from_str(tx_hash)?; let h: H256 = H256::from_str(tx_hash)?;
let options: GethDebugTracingOptions = GethDebugTracingOptions::default(); let options: GethDebugTracingOptions = GethDebugTracingOptions::default();
let traces = client.debug_trace_transaction(h, options).await?; let traces = client.debug_trace_transaction(h, options).await?;
println!("{:?}", traces); println!("{traces:?}");
} }
Ok(()) Ok(())

View File

@ -7,7 +7,7 @@ async fn main() -> eyre::Result<()> {
.await? .await?
.interval(std::time::Duration::from_millis(2000)); .interval(std::time::Duration::from_millis(2000));
let block = provider.get_block_number().await?; let block = provider.get_block_number().await?;
println!("Current block: {}", block); println!("Current block: {block}");
let mut stream = provider.watch_blocks().await?.stream(); let mut stream = provider.watch_blocks().await?.stream();
while let Some(block) = stream.next().await { while let Some(block) = stream.next().await {
dbg!(block); dbg!(block);

View File

@ -65,7 +65,7 @@ async fn main() -> eyre::Result<()> {
// 11. get the new value // 11. get the new value
let value = contract.get_value().call().await?; let value = contract.get_value().call().await?;
println!("Value: {}. Logs: {}", value, serde_json::to_string(&logs)?); println!("Value: {value}. Logs: {}", serde_json::to_string(&logs)?);
Ok(()) Ok(())
} }

View File

@ -10,7 +10,7 @@ async fn main() -> Result<()> {
let client = Arc::new(client); let client = Arc::new(client);
let last_block = client.get_block(BlockNumber::Latest).await?.unwrap().number.unwrap(); let last_block = client.get_block(BlockNumber::Latest).await?.unwrap().number.unwrap();
println!("last_block: {}", last_block); println!("last_block: {last_block}");
let erc20_transfer_filter = Filter::new() let erc20_transfer_filter = Filter::new()
.from_block(last_block - 10000) .from_block(last_block - 10000)

View File

@ -49,12 +49,12 @@ async fn example() -> Result<()> {
let (reserve0, reserve1, _) = pair.get_reserves().call().await?; let (reserve0, reserve1, _) = pair.get_reserves().call().await?;
println!("Reserves (token A, Token B): ({}, {})", reserve0, reserve1); println!("Reserves (token A, Token B): ({reserve0}, {reserve1})");
let price = let price =
if reserve0 > reserve1 { 1000 * reserve0 / reserve1 } else { 1000 * reserve1 / reserve0 } / if reserve0 > reserve1 { 1000 * reserve0 / reserve1 } else { 1000 * reserve1 / reserve0 } /
1000; 1000;
println!("token0 / token1 price = {}", price); println!("token0 / token1 price = {price}");
let liquidity = 100.into(); let liquidity = 100.into();
@ -62,9 +62,9 @@ async fn example() -> Result<()> {
let receipt = let receipt =
pair.approve(router.address(), liquidity).send().await?.await?.expect("no receipt found"); pair.approve(router.address(), liquidity).send().await?.await?.expect("no receipt found");
println!("contract approved succesfully!"); println!("contract approved succesfully!");
println!("{:?}", receipt); println!("{receipt:?}");
println!("Removing {} liquidity!", liquidity); println!("Removing {liquidity} liquidity!");
let token0 = pair.token_0().call().await?; let token0 = pair.token_0().call().await?;
let token1 = pair.token_1().call().await?; let token1 = pair.token_1().call().await?;
@ -84,7 +84,7 @@ async fn example() -> Result<()> {
.await? .await?
.expect("no receipt for remove_liquidity"); .expect("no receipt for remove_liquidity");
println!("liquidity removed succesfully!"); println!("liquidity removed succesfully!");
println!("{:?}", receipt); println!("{receipt:?}");
Ok(()) Ok(())
} }

View File

@ -16,7 +16,7 @@ async fn main() -> Result<()> {
// sign message from your wallet and print out signature produced. // sign message from your wallet and print out signature produced.
let signature = wallet.sign_message(message).await?; let signature = wallet.sign_message(message).await?;
println!("Produced signature {}", signature); println!("Produced signature {signature}");
// verify the signature produced from your wallet. // verify the signature produced from your wallet.
signature.verify(message, wallet.address()).unwrap(); signature.verify(message, wallet.address()).unwrap();

View File

@ -12,7 +12,7 @@ async fn main() -> Result<()> {
let client = Arc::new(client); let client = Arc::new(client);
let last_block = client.get_block(BlockNumber::Latest).await?.unwrap().number.unwrap(); let last_block = client.get_block(BlockNumber::Latest).await?.unwrap().number.unwrap();
println!("last_block: {}", last_block); println!("last_block: {last_block}");
let erc20_transfer_filter = Filter::new() let erc20_transfer_filter = Filter::new()
.from_block(last_block - 25) .from_block(last_block - 25)

View File

@ -30,8 +30,8 @@ async fn main() -> Result<()> {
let balance_after = provider.get_balance(from, None).await?; let balance_after = provider.get_balance(from, None).await?;
assert!(balance_after < balance_before); assert!(balance_after < balance_before);
println!("Balance before {}", balance_before); println!("Balance before {balance_before}");
println!("Balance after {}", balance_after); println!("Balance after {balance_after}");
Ok(()) Ok(())
} }

View File

@ -24,9 +24,9 @@ async fn main() -> Result<()> {
// getReserves -> get_reserves // getReserves -> get_reserves
let (reserve0, reserve1, _timestamp) = pair.get_reserves().call().await?; let (reserve0, reserve1, _timestamp) = pair.get_reserves().call().await?;
println!("Reserves (ETH, USDT): ({}, {})", reserve0, reserve1); println!("Reserves (ETH, USDT): ({reserve0}, {reserve1})");
let mid_price = f64::powi(10.0, 18 - 6) * reserve1 as f64 / reserve0 as f64; let mid_price = f64::powi(10.0, 18 - 6) * reserve1 as f64 / reserve0 as f64;
println!("ETH/USDT price: {:.2}", mid_price); println!("ETH/USDT price: {mid_price:.2}");
Ok(()) Ok(())
} }