chore(clippy): make clippy happy (#1659)
This commit is contained in:
parent
9bcbc6c1c8
commit
6a86d4ff22
|
@ -387,9 +387,9 @@ fn eth_display_works() {
|
|||
|
||||
let val = format!(
|
||||
"0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, 50, 100, 0x{}, {}, 0x{}, {:?}, 0x{}",
|
||||
hex::encode(&item.h),
|
||||
hex::encode(item.h),
|
||||
item.i,
|
||||
hex::encode(&item.arr_u8),
|
||||
hex::encode(item.arr_u8),
|
||||
item.arr_u16,
|
||||
hex::encode(&item.v),
|
||||
);
|
||||
|
|
|
@ -157,7 +157,7 @@ impl FromStr for Bytes {
|
|||
if let Some(value) = value.strip_prefix("0x") {
|
||||
hex::decode(value)
|
||||
} else {
|
||||
hex::decode(&value)
|
||||
hex::decode(value)
|
||||
}
|
||||
.map(Into::into)
|
||||
.map_err(|e| ParseBytesError(format!("Invalid hex: {}", e)))
|
||||
|
|
|
@ -311,7 +311,7 @@ impl TypedTransaction {
|
|||
|
||||
/// Hashes the transaction's data with the included signature.
|
||||
pub fn hash(&self, signature: &Signature) -> H256 {
|
||||
keccak256(&self.rlp_signed(signature).as_ref()).into()
|
||||
keccak256(self.rlp_signed(signature).as_ref()).into()
|
||||
}
|
||||
|
||||
/// Decodes a signed TypedTransaction from a rlp encoded byte stream
|
||||
|
|
|
@ -130,7 +130,7 @@ impl Transaction {
|
|||
}
|
||||
|
||||
pub fn hash(&self) -> H256 {
|
||||
keccak256(&self.rlp().as_ref()).into()
|
||||
keccak256(self.rlp().as_ref()).into()
|
||||
}
|
||||
|
||||
pub fn rlp(&self) -> Bytes {
|
||||
|
|
|
@ -19,7 +19,7 @@ impl SourceTree {
|
|||
/// Expand the source tree into the provided directory. This method sanitizes paths to ensure
|
||||
/// that no directory traversal happens.
|
||||
pub fn write_to(&self, dir: &Path) -> Result<()> {
|
||||
create_dir_all(&dir)?;
|
||||
create_dir_all(dir)?;
|
||||
for entry in &self.entries {
|
||||
let mut sanitized_path = sanitize_path(&entry.path);
|
||||
if sanitized_path.extension().is_none() {
|
||||
|
|
|
@ -936,8 +936,7 @@ impl<P: JsonRpcClient> Middleware for Provider<P> {
|
|||
.map_err(|e| ProviderError::CustomError(format!("Invalid metadata url: {}", e)))?;
|
||||
|
||||
if token.type_ == erc::ERCNFTType::ERC1155 {
|
||||
metadata_url
|
||||
.set_path(&metadata_url.path().replace("%7Bid%7D", &hex::encode(&token.id)));
|
||||
metadata_url.set_path(&metadata_url.path().replace("%7Bid%7D", &hex::encode(token.id)));
|
||||
}
|
||||
if metadata_url.scheme() == "ipfs" {
|
||||
metadata_url = erc::http_link_ipfs(metadata_url).map_err(ProviderError::CustomError)?;
|
||||
|
@ -1265,7 +1264,7 @@ impl<P: JsonRpcClient> Provider<P> {
|
|||
"`{}` resolver ({:?}) does not support selector {}.",
|
||||
ens_name,
|
||||
resolver_address,
|
||||
hex::encode(&selector)
|
||||
hex::encode(selector)
|
||||
)))
|
||||
}
|
||||
|
||||
|
|
|
@ -123,7 +123,7 @@ where
|
|||
resp
|
||||
}
|
||||
|
||||
#[instrument(err, skip(kms, digest, key_id), fields(digest = %hex::encode(&digest), key_id = %key_id.as_ref()))]
|
||||
#[instrument(err, skip(kms, digest, key_id), fields(digest = %hex::encode(digest), key_id = %key_id.as_ref()))]
|
||||
async fn request_sign_digest<T>(
|
||||
kms: &KmsClient,
|
||||
key_id: T,
|
||||
|
@ -165,8 +165,8 @@ impl<'a> AwsSigner<'a> {
|
|||
|
||||
debug!(
|
||||
"Instantiated AWS signer with pubkey 0x{} and address 0x{}",
|
||||
hex::encode(&pubkey.to_bytes()),
|
||||
hex::encode(&address)
|
||||
hex::encode(pubkey.to_bytes()),
|
||||
hex::encode(address)
|
||||
);
|
||||
|
||||
Ok(Self { kms, chain_id, key_id: key_id.as_ref().to_owned(), pubkey, address })
|
||||
|
@ -204,7 +204,7 @@ impl<'a> AwsSigner<'a> {
|
|||
|
||||
/// Sign a digest with this signer's key and add the eip155 `v` value
|
||||
/// corresponding to the input chain_id
|
||||
#[instrument(err, skip(digest), fields(digest = %hex::encode(&digest)))]
|
||||
#[instrument(err, skip(digest), fields(digest = %hex::encode(digest)))]
|
||||
async fn sign_digest_with_eip155(
|
||||
&self,
|
||||
digest: H256,
|
||||
|
|
|
@ -243,7 +243,7 @@ impl LedgerEthereum {
|
|||
index |= 0x80000000;
|
||||
}
|
||||
|
||||
bytes.extend(&index.to_be_bytes());
|
||||
bytes.extend(index.to_be_bytes());
|
||||
}
|
||||
|
||||
bytes
|
||||
|
|
|
@ -607,7 +607,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> {
|
|||
.collect();
|
||||
|
||||
let entry = CacheEntry {
|
||||
last_modification_date: CacheEntry::read_last_modification_date(&file)
|
||||
last_modification_date: CacheEntry::read_last_modification_date(file)
|
||||
.unwrap_or_default(),
|
||||
content_hash: source.content_hash(),
|
||||
source_name: utils::source_name(file, self.project.root()).into(),
|
||||
|
@ -721,7 +721,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> {
|
|||
/// returns `false` if the corresponding cache entry remained unchanged otherwise `true`
|
||||
fn is_dirty(&self, file: &Path, version: &Version) -> bool {
|
||||
if let Some(hash) = self.content_hashes.get(file) {
|
||||
if let Some(entry) = self.cache.entry(&file) {
|
||||
if let Some(entry) = self.cache.entry(file) {
|
||||
if entry.content_hash.as_bytes() != hash.as_bytes() {
|
||||
tracing::trace!("changed content hash for source file \"{}\"", file.display());
|
||||
return true
|
||||
|
|
|
@ -465,7 +465,7 @@ impl Solc {
|
|||
if checksum_calc == checksum_found {
|
||||
Ok(())
|
||||
} else {
|
||||
let expected = hex::encode(&checksum_found);
|
||||
let expected = hex::encode(checksum_found);
|
||||
let detected = hex::encode(checksum_calc);
|
||||
tracing:: warn!(target : "solc", "checksum mismatch for {:?}, expected {}, but found {} for file {:?}", version, expected, detected, version_path);
|
||||
Err(SolcError::ChecksumMismatch { version, expected, detected, file: version_path })
|
||||
|
|
|
@ -923,13 +923,13 @@ mod tests {
|
|||
std::fs::File::create(&contracts).unwrap();
|
||||
assert_eq!(ProjectPathsConfig::find_source_dir(root), contracts,);
|
||||
assert_eq!(
|
||||
ProjectPathsConfig::builder().build_with_root(&root).sources,
|
||||
ProjectPathsConfig::builder().build_with_root(root).sources,
|
||||
utils::canonicalized(contracts),
|
||||
);
|
||||
std::fs::File::create(&src).unwrap();
|
||||
assert_eq!(ProjectPathsConfig::find_source_dir(root), src,);
|
||||
assert_eq!(
|
||||
ProjectPathsConfig::builder().build_with_root(&root).sources,
|
||||
ProjectPathsConfig::builder().build_with_root(root).sources,
|
||||
utils::canonicalized(src),
|
||||
);
|
||||
|
||||
|
@ -937,18 +937,18 @@ mod tests {
|
|||
std::fs::File::create(&artifacts).unwrap();
|
||||
assert_eq!(ProjectPathsConfig::find_artifacts_dir(root), artifacts,);
|
||||
assert_eq!(
|
||||
ProjectPathsConfig::builder().build_with_root(&root).artifacts,
|
||||
ProjectPathsConfig::builder().build_with_root(root).artifacts,
|
||||
utils::canonicalized(artifacts),
|
||||
);
|
||||
assert_eq!(
|
||||
ProjectPathsConfig::builder().build_with_root(&root).build_infos,
|
||||
ProjectPathsConfig::builder().build_with_root(root).build_infos,
|
||||
utils::canonicalized(build_infos)
|
||||
);
|
||||
|
||||
std::fs::File::create(&out).unwrap();
|
||||
assert_eq!(ProjectPathsConfig::find_artifacts_dir(root), out,);
|
||||
assert_eq!(
|
||||
ProjectPathsConfig::builder().build_with_root(&root).artifacts,
|
||||
ProjectPathsConfig::builder().build_with_root(root).artifacts,
|
||||
utils::canonicalized(out),
|
||||
);
|
||||
|
||||
|
@ -956,13 +956,13 @@ mod tests {
|
|||
std::fs::File::create(&node_modules).unwrap();
|
||||
assert_eq!(ProjectPathsConfig::find_libs(root), vec![node_modules.clone()],);
|
||||
assert_eq!(
|
||||
ProjectPathsConfig::builder().build_with_root(&root).libraries,
|
||||
ProjectPathsConfig::builder().build_with_root(root).libraries,
|
||||
vec![utils::canonicalized(node_modules)],
|
||||
);
|
||||
std::fs::File::create(&lib).unwrap();
|
||||
assert_eq!(ProjectPathsConfig::find_libs(root), vec![lib.clone()],);
|
||||
assert_eq!(
|
||||
ProjectPathsConfig::builder().build_with_root(&root).libraries,
|
||||
ProjectPathsConfig::builder().build_with_root(root).libraries,
|
||||
vec![utils::canonicalized(lib)],
|
||||
);
|
||||
}
|
||||
|
@ -975,7 +975,7 @@ mod tests {
|
|||
|
||||
// Set the artifacts directory without setting the
|
||||
// build info directory
|
||||
let project = ProjectPathsConfig::builder().artifacts(&artifacts).build_with_root(&root);
|
||||
let project = ProjectPathsConfig::builder().artifacts(&artifacts).build_with_root(root);
|
||||
|
||||
// The artifacts should be set correctly based on the configured value
|
||||
assert_eq!(project.artifacts, utils::canonicalized(artifacts));
|
||||
|
|
|
@ -564,7 +564,7 @@ pragma solidity ^0.8.0;
|
|||
let a = Path::new("/foo/bar/bar/test.txt");
|
||||
let b = Path::new("/foo/bar/foo/example/constract.sol");
|
||||
let expected = Path::new("/foo/bar");
|
||||
assert_eq!(common_ancestor(&a, &b).unwrap(), expected.to_path_buf())
|
||||
assert_eq!(common_ancestor(a, b).unwrap(), expected.to_path_buf())
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -385,7 +385,7 @@ fn can_compile_dapp_sample_with_cache() {
|
|||
assert!(compiled.is_unchanged());
|
||||
|
||||
// deleted artifacts cause recompile even with cache
|
||||
std::fs::remove_dir_all(&project.artifacts_path()).unwrap();
|
||||
std::fs::remove_dir_all(project.artifacts_path()).unwrap();
|
||||
let compiled = project.compile().unwrap();
|
||||
assert!(compiled.find_first("Dapp").is_some());
|
||||
assert!(!compiled.is_unchanged());
|
||||
|
|
Loading…
Reference in New Issue