chore(clippy): make clippy happy (#1659)

This commit is contained in:
Matthias Seitz 2022-09-04 19:57:52 +02:00 committed by GitHub
parent 9bcbc6c1c8
commit 6a86d4ff22
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 26 additions and 27 deletions

View File

@ -387,9 +387,9 @@ fn eth_display_works() {
let val = format!(
"0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, 50, 100, 0x{}, {}, 0x{}, {:?}, 0x{}",
hex::encode(&item.h),
hex::encode(item.h),
item.i,
hex::encode(&item.arr_u8),
hex::encode(item.arr_u8),
item.arr_u16,
hex::encode(&item.v),
);

View File

@ -157,7 +157,7 @@ impl FromStr for Bytes {
if let Some(value) = value.strip_prefix("0x") {
hex::decode(value)
} else {
hex::decode(&value)
hex::decode(value)
}
.map(Into::into)
.map_err(|e| ParseBytesError(format!("Invalid hex: {}", e)))

View File

@ -311,7 +311,7 @@ impl TypedTransaction {
/// Hashes the transaction's data with the included signature.
pub fn hash(&self, signature: &Signature) -> H256 {
keccak256(&self.rlp_signed(signature).as_ref()).into()
keccak256(self.rlp_signed(signature).as_ref()).into()
}
/// Decodes a signed TypedTransaction from a rlp encoded byte stream

View File

@ -130,7 +130,7 @@ impl Transaction {
}
pub fn hash(&self) -> H256 {
keccak256(&self.rlp().as_ref()).into()
keccak256(self.rlp().as_ref()).into()
}
pub fn rlp(&self) -> Bytes {

View File

@ -19,7 +19,7 @@ impl SourceTree {
/// Expand the source tree into the provided directory. This method sanitizes paths to ensure
/// that no directory traversal happens.
pub fn write_to(&self, dir: &Path) -> Result<()> {
create_dir_all(&dir)?;
create_dir_all(dir)?;
for entry in &self.entries {
let mut sanitized_path = sanitize_path(&entry.path);
if sanitized_path.extension().is_none() {

View File

@ -936,8 +936,7 @@ impl<P: JsonRpcClient> Middleware for Provider<P> {
.map_err(|e| ProviderError::CustomError(format!("Invalid metadata url: {}", e)))?;
if token.type_ == erc::ERCNFTType::ERC1155 {
metadata_url
.set_path(&metadata_url.path().replace("%7Bid%7D", &hex::encode(&token.id)));
metadata_url.set_path(&metadata_url.path().replace("%7Bid%7D", &hex::encode(token.id)));
}
if metadata_url.scheme() == "ipfs" {
metadata_url = erc::http_link_ipfs(metadata_url).map_err(ProviderError::CustomError)?;
@ -1265,7 +1264,7 @@ impl<P: JsonRpcClient> Provider<P> {
"`{}` resolver ({:?}) does not support selector {}.",
ens_name,
resolver_address,
hex::encode(&selector)
hex::encode(selector)
)))
}

View File

@ -123,7 +123,7 @@ where
resp
}
#[instrument(err, skip(kms, digest, key_id), fields(digest = %hex::encode(&digest), key_id = %key_id.as_ref()))]
#[instrument(err, skip(kms, digest, key_id), fields(digest = %hex::encode(digest), key_id = %key_id.as_ref()))]
async fn request_sign_digest<T>(
kms: &KmsClient,
key_id: T,
@ -165,8 +165,8 @@ impl<'a> AwsSigner<'a> {
debug!(
"Instantiated AWS signer with pubkey 0x{} and address 0x{}",
hex::encode(&pubkey.to_bytes()),
hex::encode(&address)
hex::encode(pubkey.to_bytes()),
hex::encode(address)
);
Ok(Self { kms, chain_id, key_id: key_id.as_ref().to_owned(), pubkey, address })
@ -204,7 +204,7 @@ impl<'a> AwsSigner<'a> {
/// Sign a digest with this signer's key and add the eip155 `v` value
/// corresponding to the input chain_id
#[instrument(err, skip(digest), fields(digest = %hex::encode(&digest)))]
#[instrument(err, skip(digest), fields(digest = %hex::encode(digest)))]
async fn sign_digest_with_eip155(
&self,
digest: H256,

View File

@ -243,7 +243,7 @@ impl LedgerEthereum {
index |= 0x80000000;
}
bytes.extend(&index.to_be_bytes());
bytes.extend(index.to_be_bytes());
}
bytes

View File

@ -607,7 +607,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> {
.collect();
let entry = CacheEntry {
last_modification_date: CacheEntry::read_last_modification_date(&file)
last_modification_date: CacheEntry::read_last_modification_date(file)
.unwrap_or_default(),
content_hash: source.content_hash(),
source_name: utils::source_name(file, self.project.root()).into(),
@ -721,7 +721,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> {
/// returns `false` if the corresponding cache entry remained unchanged otherwise `true`
fn is_dirty(&self, file: &Path, version: &Version) -> bool {
if let Some(hash) = self.content_hashes.get(file) {
if let Some(entry) = self.cache.entry(&file) {
if let Some(entry) = self.cache.entry(file) {
if entry.content_hash.as_bytes() != hash.as_bytes() {
tracing::trace!("changed content hash for source file \"{}\"", file.display());
return true

View File

@ -465,7 +465,7 @@ impl Solc {
if checksum_calc == checksum_found {
Ok(())
} else {
let expected = hex::encode(&checksum_found);
let expected = hex::encode(checksum_found);
let detected = hex::encode(checksum_calc);
tracing:: warn!(target : "solc", "checksum mismatch for {:?}, expected {}, but found {} for file {:?}", version, expected, detected, version_path);
Err(SolcError::ChecksumMismatch { version, expected, detected, file: version_path })

View File

@ -923,13 +923,13 @@ mod tests {
std::fs::File::create(&contracts).unwrap();
assert_eq!(ProjectPathsConfig::find_source_dir(root), contracts,);
assert_eq!(
ProjectPathsConfig::builder().build_with_root(&root).sources,
ProjectPathsConfig::builder().build_with_root(root).sources,
utils::canonicalized(contracts),
);
std::fs::File::create(&src).unwrap();
assert_eq!(ProjectPathsConfig::find_source_dir(root), src,);
assert_eq!(
ProjectPathsConfig::builder().build_with_root(&root).sources,
ProjectPathsConfig::builder().build_with_root(root).sources,
utils::canonicalized(src),
);
@ -937,18 +937,18 @@ mod tests {
std::fs::File::create(&artifacts).unwrap();
assert_eq!(ProjectPathsConfig::find_artifacts_dir(root), artifacts,);
assert_eq!(
ProjectPathsConfig::builder().build_with_root(&root).artifacts,
ProjectPathsConfig::builder().build_with_root(root).artifacts,
utils::canonicalized(artifacts),
);
assert_eq!(
ProjectPathsConfig::builder().build_with_root(&root).build_infos,
ProjectPathsConfig::builder().build_with_root(root).build_infos,
utils::canonicalized(build_infos)
);
std::fs::File::create(&out).unwrap();
assert_eq!(ProjectPathsConfig::find_artifacts_dir(root), out,);
assert_eq!(
ProjectPathsConfig::builder().build_with_root(&root).artifacts,
ProjectPathsConfig::builder().build_with_root(root).artifacts,
utils::canonicalized(out),
);
@ -956,13 +956,13 @@ mod tests {
std::fs::File::create(&node_modules).unwrap();
assert_eq!(ProjectPathsConfig::find_libs(root), vec![node_modules.clone()],);
assert_eq!(
ProjectPathsConfig::builder().build_with_root(&root).libraries,
ProjectPathsConfig::builder().build_with_root(root).libraries,
vec![utils::canonicalized(node_modules)],
);
std::fs::File::create(&lib).unwrap();
assert_eq!(ProjectPathsConfig::find_libs(root), vec![lib.clone()],);
assert_eq!(
ProjectPathsConfig::builder().build_with_root(&root).libraries,
ProjectPathsConfig::builder().build_with_root(root).libraries,
vec![utils::canonicalized(lib)],
);
}
@ -975,7 +975,7 @@ mod tests {
// Set the artifacts directory without setting the
// build info directory
let project = ProjectPathsConfig::builder().artifacts(&artifacts).build_with_root(&root);
let project = ProjectPathsConfig::builder().artifacts(&artifacts).build_with_root(root);
// The artifacts should be set correctly based on the configured value
assert_eq!(project.artifacts, utils::canonicalized(artifacts));

View File

@ -564,7 +564,7 @@ pragma solidity ^0.8.0;
let a = Path::new("/foo/bar/bar/test.txt");
let b = Path::new("/foo/bar/foo/example/constract.sol");
let expected = Path::new("/foo/bar");
assert_eq!(common_ancestor(&a, &b).unwrap(), expected.to_path_buf())
assert_eq!(common_ancestor(a, b).unwrap(), expected.to_path_buf())
}
#[test]

View File

@ -385,7 +385,7 @@ fn can_compile_dapp_sample_with_cache() {
assert!(compiled.is_unchanged());
// deleted artifacts cause recompile even with cache
std::fs::remove_dir_all(&project.artifacts_path()).unwrap();
std::fs::remove_dir_all(project.artifacts_path()).unwrap();
let compiled = project.compile().unwrap();
assert!(compiled.find_first("Dapp").is_some());
assert!(!compiled.is_unchanged());