refactor(contract): derive procedural macros (#2152)
* refactor: use Result<_, Error> * fix: report both errors during parsing * refactor: abigen derive results * update Event derive * refactor: derive utils * fmt Display derive * fmt Codec derive * refactor: derives * fix artifacts * chore: clippy
This commit is contained in:
parent
37acf65dae
commit
d8597202ed
|
@ -1392,6 +1392,7 @@ version = "1.0.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ethers-contract-abigen",
|
"ethers-contract-abigen",
|
||||||
"ethers-core",
|
"ethers-core",
|
||||||
|
"eyre",
|
||||||
"hex",
|
"hex",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
|
|
@ -20,12 +20,14 @@ proc-macro = true
|
||||||
ethers-core = { version = "^1.0.0", path = "../../ethers-core" }
|
ethers-core = { version = "^1.0.0", path = "../../ethers-core" }
|
||||||
ethers-contract-abigen = { version = "^1.0.0", path = "../ethers-contract-abigen", default-features = false }
|
ethers-contract-abigen = { version = "^1.0.0", path = "../ethers-contract-abigen", default-features = false }
|
||||||
|
|
||||||
serde_json = "1.0.53"
|
|
||||||
hex = { version = "0.4.3", default-features = false, features = ["std"] }
|
|
||||||
proc-macro2 = "1.0"
|
proc-macro2 = "1.0"
|
||||||
quote = "1.0"
|
quote = "1.0"
|
||||||
syn = "1.0.12"
|
syn = "1.0.12"
|
||||||
|
|
||||||
|
serde_json = "1.0.53"
|
||||||
|
hex = { version = "0.4.3", default-features = false, features = ["std"] }
|
||||||
|
eyre = "0.6"
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
[package.metadata.docs.rs]
|
||||||
all-features = true
|
all-features = true
|
||||||
rustdoc-args = ["--cfg", "docsrs"]
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
|
@ -4,42 +4,32 @@ use crate::utils;
|
||||||
use ethers_core::macros::ethers_core_crate;
|
use ethers_core::macros::ethers_core_crate;
|
||||||
use proc_macro2::{Ident, Literal, TokenStream};
|
use proc_macro2::{Ident, Literal, TokenStream};
|
||||||
use quote::{quote, quote_spanned};
|
use quote::{quote, quote_spanned};
|
||||||
use syn::{parse::Error, spanned::Spanned as _, Data, DeriveInput, Fields, Variant};
|
use syn::{parse::Error, spanned::Spanned, Data, DeriveInput, Fields, Variant};
|
||||||
|
|
||||||
/// Generates the tokenize implementation
|
/// Generates the tokenize implementation
|
||||||
pub fn derive_tokenizeable_impl(input: &DeriveInput) -> proc_macro2::TokenStream {
|
pub fn derive_tokenizeable_impl(input: &DeriveInput) -> Result<TokenStream, Error> {
|
||||||
let core_crate = ethers_core_crate();
|
let ethers_core = ethers_core_crate();
|
||||||
let name = &input.ident;
|
let name = &input.ident;
|
||||||
let generic_params = input.generics.params.iter().map(|p| quote! { #p });
|
|
||||||
let generic_params = quote! { #(#generic_params,)* };
|
|
||||||
|
|
||||||
let generic_args = input.generics.type_params().map(|p| {
|
let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
|
||||||
let name = &p.ident;
|
let generic_predicates = where_clause.map(|c| &c.predicates);
|
||||||
quote_spanned! { p.ident.span() => #name }
|
|
||||||
});
|
|
||||||
|
|
||||||
let generic_args = quote! { #(#generic_args,)* };
|
|
||||||
|
|
||||||
let generic_predicates = match input.generics.where_clause {
|
|
||||||
Some(ref clause) => {
|
|
||||||
let predicates = clause.predicates.iter().map(|p| quote! { #p });
|
|
||||||
quote! { #(#predicates,)* }
|
|
||||||
}
|
|
||||||
None => quote! {},
|
|
||||||
};
|
|
||||||
|
|
||||||
let (tokenize_predicates, params_len, init_struct_impl, into_token_impl) = match input.data {
|
let (tokenize_predicates, params_len, init_struct_impl, into_token_impl) = match input.data {
|
||||||
Data::Struct(ref data) => match data.fields {
|
Data::Struct(ref data) => match data.fields {
|
||||||
Fields::Named(ref fields) => {
|
Fields::Named(ref fields) => {
|
||||||
let tokenize_predicates = fields.named.iter().map(|f| {
|
let tokenize_predicates = fields.named.iter().map(|f| {
|
||||||
let ty = &f.ty;
|
let ty = &f.ty;
|
||||||
quote_spanned! { f.span() => #ty: #core_crate::abi::Tokenize }
|
quote_spanned! { f.span() => #ty: #ethers_core::abi::Tokenize }
|
||||||
});
|
});
|
||||||
let tokenize_predicates = quote! { #(#tokenize_predicates,)* };
|
let tokenize_predicates = quote! { #(#tokenize_predicates,)* };
|
||||||
|
|
||||||
let assignments = fields.named.iter().map(|f| {
|
let assignments = fields.named.iter().map(|f| {
|
||||||
let name = f.ident.as_ref().expect("Named fields have names");
|
let name = f.ident.as_ref().expect("Named fields have names");
|
||||||
quote_spanned! { f.span() => #name: #core_crate::abi::Tokenizable::from_token(iter.next().expect("The iter is guaranteed to be something due to the size check"))? }
|
quote_spanned! { f.span() =>
|
||||||
|
#name: #ethers_core::abi::Tokenizable::from_token(
|
||||||
|
iter.next().expect("The iter is guaranteed to be something due to the size check")
|
||||||
|
)?
|
||||||
|
}
|
||||||
});
|
});
|
||||||
let init_struct_impl = quote! { Self { #(#assignments,)* } };
|
let init_struct_impl = quote! { Self { #(#assignments,)* } };
|
||||||
|
|
||||||
|
@ -54,12 +44,16 @@ pub fn derive_tokenizeable_impl(input: &DeriveInput) -> proc_macro2::TokenStream
|
||||||
Fields::Unnamed(ref fields) => {
|
Fields::Unnamed(ref fields) => {
|
||||||
let tokenize_predicates = fields.unnamed.iter().map(|f| {
|
let tokenize_predicates = fields.unnamed.iter().map(|f| {
|
||||||
let ty = &f.ty;
|
let ty = &f.ty;
|
||||||
quote_spanned! { f.span() => #ty: #core_crate::abi::Tokenize }
|
quote_spanned! { f.span() => #ty: #ethers_core::abi::Tokenize }
|
||||||
});
|
});
|
||||||
let tokenize_predicates = quote! { #(#tokenize_predicates,)* };
|
let tokenize_predicates = quote! { #(#tokenize_predicates,)* };
|
||||||
|
|
||||||
let assignments = fields.unnamed.iter().map(|f| {
|
let assignments = fields.unnamed.iter().map(|f| {
|
||||||
quote_spanned! { f.span() => #core_crate::abi::Tokenizable::from_token(iter.next().expect("The iter is guaranteed to be something due to the size check"))? }
|
quote_spanned! { f.span() =>
|
||||||
|
#ethers_core::abi::Tokenizable::from_token(
|
||||||
|
iter.next().expect("The iter is guaranteed to be something due to the size check")
|
||||||
|
)?
|
||||||
|
}
|
||||||
});
|
});
|
||||||
let init_struct_impl = quote! { Self(#(#assignments,)* ) };
|
let init_struct_impl = quote! { Self(#(#assignments,)* ) };
|
||||||
|
|
||||||
|
@ -71,17 +65,11 @@ pub fn derive_tokenizeable_impl(input: &DeriveInput) -> proc_macro2::TokenStream
|
||||||
|
|
||||||
(tokenize_predicates, fields.unnamed.len(), init_struct_impl, into_token_impl)
|
(tokenize_predicates, fields.unnamed.len(), init_struct_impl, into_token_impl)
|
||||||
}
|
}
|
||||||
Fields::Unit => return tokenize_unit_type(&input.ident),
|
Fields::Unit => return Ok(tokenize_unit_type(&input.ident)),
|
||||||
},
|
},
|
||||||
Data::Enum(ref data) => {
|
Data::Enum(ref data) => return tokenize_enum(name, data.variants.iter()),
|
||||||
return match tokenize_enum(name, data.variants.iter()) {
|
|
||||||
Ok(tokens) => tokens,
|
|
||||||
Err(err) => err.to_compile_error(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Data::Union(_) => {
|
Data::Union(_) => {
|
||||||
return Error::new(input.span(), "EthAbiType cannot be derived for unions")
|
return Err(Error::new(input.span(), "EthAbiType cannot be derived for unions"))
|
||||||
.to_compile_error()
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -95,14 +83,14 @@ pub fn derive_tokenizeable_impl(input: &DeriveInput) -> proc_macro2::TokenStream
|
||||||
// can't encode an empty struct
|
// can't encode an empty struct
|
||||||
// TODO: panic instead?
|
// TODO: panic instead?
|
||||||
quote! {
|
quote! {
|
||||||
#core_crate::abi::Token::Tuple(Vec::new())
|
#ethers_core::abi::Token::Tuple(Vec::new())
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
_ => {
|
_ => {
|
||||||
let from_token = quote! {
|
let from_token = quote! {
|
||||||
if let #core_crate::abi::Token::Tuple(tokens) = token {
|
if let #ethers_core::abi::Token::Tuple(tokens) = token {
|
||||||
if tokens.len() != #params_len {
|
if tokens.len() != #params_len {
|
||||||
return Err(#core_crate::abi::InvalidOutputType(::std::format!(
|
return Err(#ethers_core::abi::InvalidOutputType(::std::format!(
|
||||||
"Expected {} tokens, got {}: {:?}",
|
"Expected {} tokens, got {}: {:?}",
|
||||||
#params_len,
|
#params_len,
|
||||||
tokens.len(),
|
tokens.len(),
|
||||||
|
@ -114,7 +102,7 @@ pub fn derive_tokenizeable_impl(input: &DeriveInput) -> proc_macro2::TokenStream
|
||||||
|
|
||||||
Ok(#init_struct_impl)
|
Ok(#init_struct_impl)
|
||||||
} else {
|
} else {
|
||||||
Err(#core_crate::abi::InvalidOutputType(::std::format!(
|
Err(#ethers_core::abi::InvalidOutputType(::std::format!(
|
||||||
"Expected Tuple, got {:?}",
|
"Expected Tuple, got {:?}",
|
||||||
token
|
token
|
||||||
)))
|
)))
|
||||||
|
@ -122,7 +110,7 @@ pub fn derive_tokenizeable_impl(input: &DeriveInput) -> proc_macro2::TokenStream
|
||||||
};
|
};
|
||||||
|
|
||||||
let into_token = quote! {
|
let into_token = quote! {
|
||||||
#core_crate::abi::Token::Tuple(
|
#ethers_core::abi::Token::Tuple(
|
||||||
::std::vec![
|
::std::vec![
|
||||||
#into_token_impl
|
#into_token_impl
|
||||||
]
|
]
|
||||||
|
@ -132,56 +120,57 @@ pub fn derive_tokenizeable_impl(input: &DeriveInput) -> proc_macro2::TokenStream
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let params = match utils::derive_param_type_with_abi_type(input, "EthAbiType") {
|
let params = utils::derive_param_type_with_abi_type(input, "EthAbiType")?;
|
||||||
Ok(params) => params,
|
|
||||||
Err(err) => return err.to_compile_error(),
|
|
||||||
};
|
|
||||||
quote! {
|
|
||||||
|
|
||||||
impl<#generic_params> #core_crate::abi::AbiType for #name<#generic_args> {
|
Ok(quote! {
|
||||||
fn param_type() -> #core_crate::abi::ParamType {
|
impl #impl_generics #ethers_core::abi::AbiType for #name #ty_generics #where_clause {
|
||||||
|
fn param_type() -> #ethers_core::abi::ParamType {
|
||||||
#params
|
#params
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<#generic_params> #core_crate::abi::AbiArrayType for #name<#generic_args> {}
|
impl #impl_generics #ethers_core::abi::AbiArrayType for #name #ty_generics #where_clause {}
|
||||||
|
|
||||||
impl<#generic_params> #core_crate::abi::Tokenizable for #name<#generic_args>
|
impl #impl_generics #ethers_core::abi::Tokenizable for #name #ty_generics
|
||||||
where
|
where
|
||||||
#generic_predicates
|
#generic_predicates
|
||||||
#tokenize_predicates
|
#tokenize_predicates
|
||||||
{
|
{
|
||||||
|
fn from_token(token: #ethers_core::abi::Token) -> ::std::result::Result<Self, #ethers_core::abi::InvalidOutputType>
|
||||||
fn from_token(token: #core_crate::abi::Token) -> ::std::result::Result<Self, #core_crate::abi::InvalidOutputType> where
|
where
|
||||||
Self: Sized {
|
Self: Sized,
|
||||||
|
{
|
||||||
#from_token_impl
|
#from_token_impl
|
||||||
}
|
}
|
||||||
|
|
||||||
fn into_token(self) -> #core_crate::abi::Token {
|
fn into_token(self) -> #ethers_core::abi::Token {
|
||||||
#into_token_impl
|
#into_token_impl
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<#generic_params> #core_crate::abi::TokenizableItem for #name<#generic_args>
|
impl #impl_generics #ethers_core::abi::TokenizableItem for #name #ty_generics
|
||||||
where
|
where
|
||||||
#generic_predicates
|
#generic_predicates
|
||||||
#tokenize_predicates
|
#tokenize_predicates
|
||||||
{ }
|
{}
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn tokenize_unit_type(name: &Ident) -> TokenStream {
|
fn tokenize_unit_type(name: &Ident) -> TokenStream {
|
||||||
let ethers_core = ethers_core_crate();
|
let ethers_core = ethers_core_crate();
|
||||||
|
|
||||||
quote! {
|
quote! {
|
||||||
impl #ethers_core::abi::Tokenizable for #name {
|
impl #ethers_core::abi::Tokenizable for #name {
|
||||||
fn from_token(token: #ethers_core::abi::Token) -> ::std::result::Result<Self, #ethers_core::abi::InvalidOutputType> where
|
fn from_token(token: #ethers_core::abi::Token) -> ::std::result::Result<Self, #ethers_core::abi::InvalidOutputType>
|
||||||
Self: Sized {
|
where
|
||||||
|
Self: Sized,
|
||||||
|
{
|
||||||
if let #ethers_core::abi::Token::Tuple(tokens) = token {
|
if let #ethers_core::abi::Token::Tuple(tokens) = token {
|
||||||
if !tokens.is_empty() {
|
if !tokens.is_empty() {
|
||||||
Err(#ethers_core::abi::InvalidOutputType(::std::format!(
|
Err(#ethers_core::abi::InvalidOutputType(::std::format!(
|
||||||
"Expected empty tuple, got {:?}",
|
"Expected empty tuple, got {:?}",
|
||||||
tokens
|
tokens
|
||||||
)))
|
)))
|
||||||
} else {
|
} else {
|
||||||
Ok(#name{})
|
Ok(#name{})
|
||||||
}
|
}
|
||||||
|
@ -194,10 +183,11 @@ fn tokenize_unit_type(name: &Ident) -> TokenStream {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn into_token(self) -> #ethers_core::abi::Token {
|
fn into_token(self) -> #ethers_core::abi::Token {
|
||||||
#ethers_core::abi::Token::Tuple(::std::vec::Vec::new())
|
#ethers_core::abi::Token::Tuple(::std::vec::Vec::new())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl #ethers_core::abi::TokenizableItem for #name { }
|
|
||||||
|
impl #ethers_core::abi::TokenizableItem for #name {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -210,7 +200,7 @@ fn tokenize_unit_type(name: &Ident) -> TokenStream {
|
||||||
fn tokenize_enum<'a>(
|
fn tokenize_enum<'a>(
|
||||||
enum_name: &Ident,
|
enum_name: &Ident,
|
||||||
variants: impl Iterator<Item = &'a Variant> + 'a,
|
variants: impl Iterator<Item = &'a Variant> + 'a,
|
||||||
) -> ::std::result::Result<TokenStream, Error> {
|
) -> Result<TokenStream, Error> {
|
||||||
let ethers_core = ethers_core_crate();
|
let ethers_core = ethers_core_crate();
|
||||||
|
|
||||||
let mut into_tokens = TokenStream::new();
|
let mut into_tokens = TokenStream::new();
|
||||||
|
@ -225,12 +215,12 @@ fn tokenize_enum<'a>(
|
||||||
} else if variant.fields.is_empty() {
|
} else if variant.fields.is_empty() {
|
||||||
let value = Literal::u8_unsuffixed(idx as u8);
|
let value = Literal::u8_unsuffixed(idx as u8);
|
||||||
from_tokens.extend(quote! {
|
from_tokens.extend(quote! {
|
||||||
if let Ok(#value) = u8::from_token(token.clone()) {
|
if let Ok(#value) = u8::from_token(token.clone()) {
|
||||||
return Ok(#enum_name::#var_ident)
|
return Ok(#enum_name::#var_ident)
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
into_tokens.extend(quote! {
|
into_tokens.extend(quote! {
|
||||||
#enum_name::#var_ident => #value.into_token(),
|
#enum_name::#var_ident => #value.into_token(),
|
||||||
});
|
});
|
||||||
} else if let Some(field) = variant.fields.iter().next() {
|
} else if let Some(field) = variant.fields.iter().next() {
|
||||||
let ty = &field.ty;
|
let ty = &field.ty;
|
||||||
|
@ -240,30 +230,32 @@ fn tokenize_enum<'a>(
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
into_tokens.extend(quote! {
|
into_tokens.extend(quote! {
|
||||||
#enum_name::#var_ident(element) => element.into_token(),
|
#enum_name::#var_ident(element) => element.into_token(),
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
into_tokens.extend(quote! {
|
into_tokens.extend(quote! {
|
||||||
#enum_name::#var_ident(element) => # ethers_core::abi::Token::Tuple(::std::vec::Vec::new()),
|
#enum_name::#var_ident(element) => # ethers_core::abi::Token::Tuple(::std::vec::Vec::new()),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(quote! {
|
Ok(quote! {
|
||||||
impl #ethers_core::abi::Tokenizable for #enum_name {
|
impl #ethers_core::abi::Tokenizable for #enum_name {
|
||||||
|
fn from_token(token: #ethers_core::abi::Token) -> ::std::result::Result<Self, #ethers_core::abi::InvalidOutputType>
|
||||||
fn from_token(token: #ethers_core::abi::Token) -> ::std::result::Result<Self, #ethers_core::abi::InvalidOutputType> where
|
where
|
||||||
Self: Sized {
|
Self: Sized,
|
||||||
|
{
|
||||||
#from_tokens
|
#from_tokens
|
||||||
Err(#ethers_core::abi::InvalidOutputType("Failed to decode all type variants".to_string()))
|
Err(#ethers_core::abi::InvalidOutputType("Failed to decode all type variants".to_string()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn into_token(self) -> #ethers_core::abi::Token {
|
fn into_token(self) -> #ethers_core::abi::Token {
|
||||||
match self {
|
match self {
|
||||||
#into_tokens
|
#into_tokens
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl #ethers_core::abi::TokenizableItem for #enum_name { }
|
|
||||||
|
impl #ethers_core::abi::TokenizableItem for #enum_name {}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,14 +8,15 @@ use ethers_contract_abigen::{
|
||||||
Abigen,
|
Abigen,
|
||||||
};
|
};
|
||||||
use ethers_core::abi::{Function, FunctionExt, Param, StateMutability};
|
use ethers_core::abi::{Function, FunctionExt, Param, StateMutability};
|
||||||
|
use eyre::Result;
|
||||||
use proc_macro2::{Span, TokenStream as TokenStream2};
|
use proc_macro2::{Span, TokenStream as TokenStream2};
|
||||||
use quote::ToTokens;
|
use quote::ToTokens;
|
||||||
use std::{collections::HashSet, error::Error};
|
use std::collections::HashSet;
|
||||||
use syn::{
|
use syn::{
|
||||||
braced,
|
braced,
|
||||||
ext::IdentExt,
|
ext::IdentExt,
|
||||||
parenthesized,
|
parenthesized,
|
||||||
parse::{Error as ParseError, Parse, ParseStream, Result as ParseResult},
|
parse::{Error, Parse, ParseStream, Result as ParseResult},
|
||||||
Ident, LitStr, Path, Token,
|
Ident, LitStr, Path, Token,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -26,13 +27,13 @@ pub(crate) struct Contracts {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Contracts {
|
impl Contracts {
|
||||||
pub(crate) fn expand(self) -> ::std::result::Result<TokenStream2, syn::Error> {
|
pub(crate) fn expand(self) -> Result<TokenStream2, Error> {
|
||||||
let mut expansions = Vec::with_capacity(self.inner.len());
|
let mut expansions = Vec::with_capacity(self.inner.len());
|
||||||
|
|
||||||
// expand all contracts
|
// expand all contracts
|
||||||
for (span, contract) in self.inner {
|
for (span, contract) in self.inner {
|
||||||
let contract = Self::expand_contract(contract)
|
let contract =
|
||||||
.map_err(|err| syn::Error::new(span, err.to_string()))?;
|
Self::expand_contract(contract).map_err(|err| Error::new(span, err.to_string()))?;
|
||||||
expansions.push(contract);
|
expansions.push(contract);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -40,10 +41,8 @@ impl Contracts {
|
||||||
Ok(MultiExpansion::new(expansions).expand_inplace())
|
Ok(MultiExpansion::new(expansions).expand_inplace())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_contract(
|
fn expand_contract(contract: ContractArgs) -> Result<(ExpandedContract, Context)> {
|
||||||
contract: ContractArgs,
|
contract.into_builder()?.expand()
|
||||||
) -> Result<(ExpandedContract, Context), Box<dyn Error>> {
|
|
||||||
Ok(contract.into_builder()?.expand()?)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -66,7 +65,7 @@ pub(crate) struct ContractArgs {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ContractArgs {
|
impl ContractArgs {
|
||||||
fn into_builder(self) -> Result<Abigen, Box<dyn Error>> {
|
fn into_builder(self) -> Result<Abigen> {
|
||||||
let mut builder = Abigen::new(&self.name, &self.abi)?;
|
let mut builder = Abigen::new(&self.name, &self.abi)?;
|
||||||
|
|
||||||
for parameter in self.parameters.into_iter() {
|
for parameter in self.parameters.into_iter() {
|
||||||
|
@ -151,13 +150,13 @@ impl Parse for Parameter {
|
||||||
let mut aliases = HashSet::new();
|
let mut aliases = HashSet::new();
|
||||||
for method in parsed {
|
for method in parsed {
|
||||||
if !signatures.insert(method.signature.clone()) {
|
if !signatures.insert(method.signature.clone()) {
|
||||||
return Err(ParseError::new(
|
return Err(Error::new(
|
||||||
method.span(),
|
method.span(),
|
||||||
"duplicate method signature in `abigen!` macro invocation",
|
"duplicate method signature in `abigen!` macro invocation",
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
if !aliases.insert(method.alias.clone()) {
|
if !aliases.insert(method.alias.clone()) {
|
||||||
return Err(ParseError::new(
|
return Err(Error::new(
|
||||||
method.span(),
|
method.span(),
|
||||||
"duplicate method alias in `abigen!` macro invocation",
|
"duplicate method alias in `abigen!` macro invocation",
|
||||||
))
|
))
|
||||||
|
@ -181,10 +180,7 @@ impl Parse for Parameter {
|
||||||
Parameter::Derives(derives)
|
Parameter::Derives(derives)
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
return Err(ParseError::new(
|
return Err(Error::new(name.span(), format!("unexpected named parameter `{name}`")))
|
||||||
name.span(),
|
|
||||||
format!("unexpected named parameter `{name}`"),
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -211,7 +207,7 @@ impl Parse for Method {
|
||||||
.iter()
|
.iter()
|
||||||
.map(|ident| {
|
.map(|ident| {
|
||||||
let kind = serde_json::from_value(serde_json::json!(&ident.to_string()))
|
let kind = serde_json::from_value(serde_json::json!(&ident.to_string()))
|
||||||
.map_err(|err| ParseError::new(ident.span(), err))?;
|
.map_err(|err| Error::new(ident.span(), err))?;
|
||||||
Ok(Param { name: "".into(), kind, internal_type: None })
|
Ok(Param { name: "".into(), kind, internal_type: None })
|
||||||
})
|
})
|
||||||
.collect::<ParseResult<Vec<_>>>()?;
|
.collect::<ParseResult<Vec<_>>>()?;
|
||||||
|
|
|
@ -10,50 +10,40 @@ use quote::quote;
|
||||||
use syn::{parse::Error, DeriveInput};
|
use syn::{parse::Error, DeriveInput};
|
||||||
|
|
||||||
/// Generates the `ethcall` trait support
|
/// Generates the `ethcall` trait support
|
||||||
pub(crate) fn derive_eth_call_impl(input: DeriveInput) -> TokenStream {
|
pub(crate) fn derive_eth_call_impl(input: DeriveInput) -> Result<TokenStream, Error> {
|
||||||
let attributes = match parse_calllike_attributes(&input, "ethcall") {
|
let attributes = parse_calllike_attributes(&input, "ethcall")?;
|
||||||
Ok(attributes) => attributes,
|
|
||||||
Err(errors) => return errors,
|
|
||||||
};
|
|
||||||
|
|
||||||
let function_call_name =
|
let function_call_name =
|
||||||
attributes.name.map(|(s, _)| s).unwrap_or_else(|| input.ident.to_string());
|
attributes.name.map(|(s, _)| s).unwrap_or_else(|| input.ident.to_string());
|
||||||
|
|
||||||
let mut function = if let Some((src, span)) = attributes.abi {
|
let mut function = if let Some((abi, span)) = attributes.abi {
|
||||||
let raw_function_sig = src.trim_start_matches("function ").trim_start();
|
let sig = abi.trim_start_matches("function ").trim_start();
|
||||||
// try to parse as solidity function
|
// try to parse as solidity function
|
||||||
if let Ok(fun) = HumanReadableParser::parse_function(&src) {
|
match HumanReadableParser::parse_function(&abi) {
|
||||||
fun
|
Ok(fun) => fun,
|
||||||
} else {
|
Err(parse_err) => {
|
||||||
// try to determine the abi by using its fields at runtime
|
return derive_trait_impls_with_abi_type(&input, &function_call_name, Some(sig))
|
||||||
return match derive_trait_impls_with_abi_type(
|
.map_err(|e| {
|
||||||
&input,
|
let mut error = Error::new(span, parse_err);
|
||||||
&function_call_name,
|
error.combine(Error::new(span, e));
|
||||||
Some(raw_function_sig),
|
error
|
||||||
) {
|
})
|
||||||
Ok(derived) => derived,
|
|
||||||
Err(err) => {
|
|
||||||
Error::new(span, format!("Unable to determine ABI for `{src}` : {err}"))
|
|
||||||
.to_compile_error()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// try to determine the abi by using its fields at runtime
|
// try to determine the abi by using its fields at runtime
|
||||||
return match derive_trait_impls_with_abi_type(&input, &function_call_name, None) {
|
return derive_trait_impls_with_abi_type(&input, &function_call_name, None)
|
||||||
Ok(derived) => derived,
|
|
||||||
Err(err) => err.to_compile_error(),
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
function.name = function_call_name.clone();
|
function.name = function_call_name.clone();
|
||||||
let abi = function.abi_signature();
|
|
||||||
|
let sig = function.abi_signature();
|
||||||
let selector = utils::selector(function.selector());
|
let selector = utils::selector(function.selector());
|
||||||
let decode_impl = derive_decode_impl_from_params(&function.inputs, ident("EthCall"));
|
let decode_impl = derive_decode_impl_from_params(&function.inputs, ident("EthCall"));
|
||||||
|
|
||||||
derive_trait_impls(
|
derive_trait_impls(
|
||||||
&input,
|
&input,
|
||||||
&function_call_name,
|
&function_call_name,
|
||||||
quote! {#abi.into()},
|
quote!(#sig.into()),
|
||||||
Some(selector),
|
Some(selector),
|
||||||
decode_impl,
|
decode_impl,
|
||||||
)
|
)
|
||||||
|
@ -65,17 +55,14 @@ fn derive_trait_impls_with_abi_type(
|
||||||
function_call_name: &str,
|
function_call_name: &str,
|
||||||
abi_signature: Option<&str>,
|
abi_signature: Option<&str>,
|
||||||
) -> Result<TokenStream, Error> {
|
) -> Result<TokenStream, Error> {
|
||||||
let abi_signature = if let Some(abi) = abi_signature {
|
let mut abi_signature = if let Some(sig) = abi_signature {
|
||||||
quote! {#abi}
|
quote!(#sig)
|
||||||
} else {
|
} else {
|
||||||
utils::derive_abi_signature_with_abi_type(input, function_call_name, "EthCall")?
|
utils::abi_signature_with_abi_type(input, function_call_name, "EthCall")?
|
||||||
};
|
|
||||||
|
|
||||||
let abi_signature = quote! {
|
|
||||||
#abi_signature.into()
|
|
||||||
};
|
};
|
||||||
|
abi_signature.extend(quote!(.into()));
|
||||||
let decode_impl = derive_decode_impl_with_abi_type(input, ident("EthCall"))?;
|
let decode_impl = derive_decode_impl_with_abi_type(input, ident("EthCall"))?;
|
||||||
Ok(derive_trait_impls(input, function_call_name, abi_signature, None, decode_impl))
|
derive_trait_impls(input, function_call_name, abi_signature, None, decode_impl)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generates the EthCall implementation
|
/// Generates the EthCall implementation
|
||||||
|
@ -85,26 +72,25 @@ pub fn derive_trait_impls(
|
||||||
abi_signature: TokenStream,
|
abi_signature: TokenStream,
|
||||||
selector: Option<TokenStream>,
|
selector: Option<TokenStream>,
|
||||||
decode_impl: TokenStream,
|
decode_impl: TokenStream,
|
||||||
) -> TokenStream {
|
) -> Result<TokenStream, Error> {
|
||||||
// the ethers crates to use
|
// the ethers crates to use
|
||||||
let core_crate = ethers_core_crate();
|
let ethers_core = ethers_core_crate();
|
||||||
let contract_crate = ethers_contract_crate();
|
let ethers_contract = ethers_contract_crate();
|
||||||
let struct_name = &input.ident;
|
let struct_name = &input.ident;
|
||||||
|
|
||||||
let selector = selector.unwrap_or_else(|| {
|
let selector = selector.unwrap_or_else(|| {
|
||||||
quote! {
|
quote! {
|
||||||
#core_crate::utils::id(Self::abi_signature())
|
#ethers_core::utils::id(Self::abi_signature())
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
let ethcall_impl = quote! {
|
let ethcall_impl = quote! {
|
||||||
impl #contract_crate::EthCall for #struct_name {
|
impl #ethers_contract::EthCall for #struct_name {
|
||||||
|
|
||||||
fn function_name() -> ::std::borrow::Cow<'static, str> {
|
fn function_name() -> ::std::borrow::Cow<'static, str> {
|
||||||
#function_call_name.into()
|
#function_call_name.into()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn selector() -> #core_crate::types::Selector {
|
fn selector() -> #ethers_core::types::Selector {
|
||||||
#selector
|
#selector
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -113,10 +99,10 @@ pub fn derive_trait_impls(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let codec_impl = derive_codec_impls(input, decode_impl, ident("EthCall"));
|
let codec_impl = derive_codec_impls(input, decode_impl, ident("EthCall"))?;
|
||||||
|
|
||||||
quote! {
|
Ok(quote! {
|
||||||
#ethcall_impl
|
#ethcall_impl
|
||||||
#codec_impl
|
#codec_impl
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,7 +7,7 @@ use ethers_core::{
|
||||||
};
|
};
|
||||||
use proc_macro2::{Ident, Span, TokenStream};
|
use proc_macro2::{Ident, Span, TokenStream};
|
||||||
use quote::quote;
|
use quote::quote;
|
||||||
use syn::{parse::Error, spanned::Spanned as _, AttrStyle, DeriveInput, Lit, Meta, NestedMeta};
|
use syn::{parse::Error, spanned::Spanned, AttrStyle, DeriveInput, Lit, Meta, NestedMeta};
|
||||||
|
|
||||||
/// All the attributes the `EthCall`/`EthError` macro supports
|
/// All the attributes the `EthCall`/`EthError` macro supports
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
|
@ -20,7 +20,7 @@ pub struct EthCalllikeAttributes {
|
||||||
pub fn parse_calllike_attributes(
|
pub fn parse_calllike_attributes(
|
||||||
input: &DeriveInput,
|
input: &DeriveInput,
|
||||||
attr_name: &str,
|
attr_name: &str,
|
||||||
) -> Result<EthCalllikeAttributes, TokenStream> {
|
) -> Result<EthCalllikeAttributes, Error> {
|
||||||
let mut result = EthCalllikeAttributes::default();
|
let mut result = EthCalllikeAttributes::default();
|
||||||
for a in input.attrs.iter() {
|
for a in input.attrs.iter() {
|
||||||
if let AttrStyle::Outer = a.style {
|
if let AttrStyle::Outer = a.style {
|
||||||
|
@ -33,15 +33,13 @@ pub fn parse_calllike_attributes(
|
||||||
return Err(Error::new(
|
return Err(Error::new(
|
||||||
path.span(),
|
path.span(),
|
||||||
format!("unrecognized {attr_name} parameter"),
|
format!("unrecognized {attr_name} parameter"),
|
||||||
)
|
))
|
||||||
.to_compile_error())
|
|
||||||
}
|
}
|
||||||
Meta::List(meta) => {
|
Meta::List(meta) => {
|
||||||
return Err(Error::new(
|
return Err(Error::new(
|
||||||
meta.path.span(),
|
meta.path.span(),
|
||||||
format!("unrecognized {attr_name} parameter"),
|
format!("unrecognized {attr_name} parameter"),
|
||||||
)
|
))
|
||||||
.to_compile_error())
|
|
||||||
}
|
}
|
||||||
Meta::NameValue(meta) => {
|
Meta::NameValue(meta) => {
|
||||||
if meta.path.is_ident("name") {
|
if meta.path.is_ident("name") {
|
||||||
|
@ -53,15 +51,13 @@ pub fn parse_calllike_attributes(
|
||||||
return Err(Error::new(
|
return Err(Error::new(
|
||||||
meta.span(),
|
meta.span(),
|
||||||
"name already specified",
|
"name already specified",
|
||||||
)
|
))
|
||||||
.to_compile_error())
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return Err(Error::new(
|
return Err(Error::new(
|
||||||
meta.span(),
|
meta.span(),
|
||||||
"name must be a string",
|
"name must be a string",
|
||||||
)
|
))
|
||||||
.to_compile_error())
|
|
||||||
}
|
}
|
||||||
} else if meta.path.is_ident("abi") {
|
} else if meta.path.is_ident("abi") {
|
||||||
if let Lit::Str(ref lit_str) = meta.lit {
|
if let Lit::Str(ref lit_str) = meta.lit {
|
||||||
|
@ -72,22 +68,19 @@ pub fn parse_calllike_attributes(
|
||||||
return Err(Error::new(
|
return Err(Error::new(
|
||||||
meta.span(),
|
meta.span(),
|
||||||
"abi already specified",
|
"abi already specified",
|
||||||
)
|
))
|
||||||
.to_compile_error())
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return Err(Error::new(
|
return Err(Error::new(
|
||||||
meta.span(),
|
meta.span(),
|
||||||
"abi must be a string",
|
"abi must be a string",
|
||||||
)
|
))
|
||||||
.to_compile_error())
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return Err(Error::new(
|
return Err(Error::new(
|
||||||
meta.span(),
|
meta.span(),
|
||||||
format!("unrecognized {attr_name} parameter"),
|
format!("unrecognized {attr_name} parameter"),
|
||||||
)
|
))
|
||||||
.to_compile_error())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -105,7 +98,7 @@ pub fn derive_decode_impl_with_abi_type(
|
||||||
input: &DeriveInput,
|
input: &DeriveInput,
|
||||||
trait_ident: Ident,
|
trait_ident: Ident,
|
||||||
) -> Result<TokenStream, Error> {
|
) -> Result<TokenStream, Error> {
|
||||||
let datatypes_array = utils::derive_abi_parameters_array(input, &trait_ident.to_string())?;
|
let datatypes_array = utils::abi_parameters_array(input, &trait_ident.to_string())?;
|
||||||
Ok(derive_decode_impl(datatypes_array, trait_ident))
|
Ok(derive_decode_impl(datatypes_array, trait_ident))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -117,18 +110,18 @@ pub fn derive_decode_impl_from_params(params: &[Param], trait_ident: Ident) -> T
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn derive_decode_impl(datatypes_array: TokenStream, trait_ident: Ident) -> TokenStream {
|
pub fn derive_decode_impl(datatypes_array: TokenStream, trait_ident: Ident) -> TokenStream {
|
||||||
let core_crate = ethers_core_crate();
|
let ethers_core = ethers_core_crate();
|
||||||
let contract_crate = ethers_contract_crate();
|
let ethers_contract = ethers_contract_crate();
|
||||||
let data_types_init = quote! {let data_types = #datatypes_array;};
|
let data_types_init = quote! {let data_types = #datatypes_array;};
|
||||||
|
|
||||||
quote! {
|
quote! {
|
||||||
let bytes = bytes.as_ref();
|
let bytes = bytes.as_ref();
|
||||||
if bytes.len() < 4 || bytes[..4] != <Self as #contract_crate::#trait_ident>::selector() {
|
if bytes.len() < 4 || bytes[..4] != <Self as #ethers_contract::#trait_ident>::selector() {
|
||||||
return Err(#contract_crate::AbiError::WrongSelector);
|
return Err(#ethers_contract::AbiError::WrongSelector);
|
||||||
}
|
}
|
||||||
#data_types_init
|
#data_types_init
|
||||||
let data_tokens = #core_crate::abi::decode(&data_types, &bytes[4..])?;
|
let data_tokens = #ethers_core::abi::decode(&data_types, &bytes[4..])?;
|
||||||
Ok(<Self as #core_crate::abi::Tokenizable>::from_token( #core_crate::abi::Token::Tuple(data_tokens))?)
|
Ok(<Self as #ethers_core::abi::Tokenizable>::from_token(#ethers_core::abi::Token::Tuple(data_tokens))?)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -137,25 +130,24 @@ pub fn derive_codec_impls(
|
||||||
input: &DeriveInput,
|
input: &DeriveInput,
|
||||||
decode_impl: TokenStream,
|
decode_impl: TokenStream,
|
||||||
trait_ident: Ident,
|
trait_ident: Ident,
|
||||||
) -> TokenStream {
|
) -> Result<TokenStream, Error> {
|
||||||
// the ethers crates to use
|
// the ethers crates to use
|
||||||
let core_crate = ethers_core_crate();
|
let ethers_core = ethers_core_crate();
|
||||||
let contract_crate = ethers_contract_crate();
|
let ethers_contract = ethers_contract_crate();
|
||||||
let struct_name = &input.ident;
|
let struct_name = &input.ident;
|
||||||
|
|
||||||
let codec_impl = quote! {
|
let codec_impl = quote! {
|
||||||
|
impl #ethers_core::abi::AbiDecode for #struct_name {
|
||||||
impl #core_crate::abi::AbiDecode for #struct_name {
|
fn decode(bytes: impl AsRef<[u8]>) -> ::std::result::Result<Self, #ethers_core::abi::AbiError> {
|
||||||
fn decode(bytes: impl AsRef<[u8]>) -> ::std::result::Result<Self, #core_crate::abi::AbiError> {
|
|
||||||
#decode_impl
|
#decode_impl
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl #core_crate::abi::AbiEncode for #struct_name {
|
impl #ethers_core::abi::AbiEncode for #struct_name {
|
||||||
fn encode(self) -> ::std::vec::Vec<u8> {
|
fn encode(self) -> ::std::vec::Vec<u8> {
|
||||||
let tokens = #core_crate::abi::Tokenize::into_tokens(self);
|
let tokens = #ethers_core::abi::Tokenize::into_tokens(self);
|
||||||
let selector = <Self as #contract_crate::#trait_ident>::selector();
|
let selector = <Self as #ethers_contract::#trait_ident>::selector();
|
||||||
let encoded = #core_crate::abi::encode(&tokens);
|
let encoded = #ethers_core::abi::encode(&tokens);
|
||||||
selector
|
selector
|
||||||
.iter()
|
.iter()
|
||||||
.copied()
|
.copied()
|
||||||
|
@ -165,10 +157,10 @@ pub fn derive_codec_impls(
|
||||||
}
|
}
|
||||||
|
|
||||||
};
|
};
|
||||||
let tokenize_impl = abi_ty::derive_tokenizeable_impl(input);
|
let tokenize_impl = abi_ty::derive_tokenizeable_impl(input)?;
|
||||||
|
|
||||||
quote! {
|
Ok(quote! {
|
||||||
#tokenize_impl
|
#tokenize_impl
|
||||||
#codec_impl
|
#codec_impl
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,32 +1,32 @@
|
||||||
//! Helper functions for deriving `EthAbiType`
|
//! Helper functions for deriving `EthAbiType`
|
||||||
|
|
||||||
use ethers_core::macros::ethers_core_crate;
|
use ethers_core::macros::ethers_core_crate;
|
||||||
|
|
||||||
use quote::quote;
|
use quote::quote;
|
||||||
use syn::DeriveInput;
|
use syn::DeriveInput;
|
||||||
|
|
||||||
/// Generates the `AbiEncode` + `AbiDecode` implementation
|
/// Generates the `AbiEncode` + `AbiDecode` implementation
|
||||||
pub fn derive_codec_impl(input: &DeriveInput) -> proc_macro2::TokenStream {
|
pub fn derive_codec_impl(input: &DeriveInput) -> proc_macro2::TokenStream {
|
||||||
let name = &input.ident;
|
let name = &input.ident;
|
||||||
let core_crate = ethers_core_crate();
|
let ethers_core = ethers_core_crate();
|
||||||
|
|
||||||
quote! {
|
quote! {
|
||||||
impl #core_crate::abi::AbiDecode for #name {
|
impl #ethers_core::abi::AbiDecode for #name {
|
||||||
fn decode(bytes: impl AsRef<[u8]>) -> ::std::result::Result<Self, #core_crate::abi::AbiError> {
|
fn decode(bytes: impl AsRef<[u8]>) -> ::std::result::Result<Self, #ethers_core::abi::AbiError> {
|
||||||
if let #core_crate::abi::ParamType::Tuple(params) = <Self as #core_crate::abi::AbiType>::param_type() {
|
if let #ethers_core::abi::ParamType::Tuple(params) = <Self as #ethers_core::abi::AbiType>::param_type() {
|
||||||
let tokens = #core_crate::abi::decode(¶ms, bytes.as_ref())?;
|
let tokens = #ethers_core::abi::decode(¶ms, bytes.as_ref())?;
|
||||||
Ok(<Self as #core_crate::abi::Tokenizable>::from_token(#core_crate::abi::Token::Tuple(tokens))?)
|
Ok(<Self as #ethers_core::abi::Tokenizable>::from_token(#ethers_core::abi::Token::Tuple(tokens))?)
|
||||||
} else {
|
} else {
|
||||||
Err(
|
Err(
|
||||||
#core_crate::abi::InvalidOutputType("Expected tuple".to_string()).into()
|
#ethers_core::abi::InvalidOutputType("Expected tuple".to_string()).into()
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl #core_crate::abi::AbiEncode for #name {
|
|
||||||
|
impl #ethers_core::abi::AbiEncode for #name {
|
||||||
fn encode(self) -> ::std::vec::Vec<u8> {
|
fn encode(self) -> ::std::vec::Vec<u8> {
|
||||||
let tokens = #core_crate::abi::Tokenize::into_tokens(self);
|
let tokens = #ethers_core::abi::Tokenize::into_tokens(self);
|
||||||
#core_crate::abi::encode(&tokens)
|
#ethers_core::abi::encode(&tokens)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,22 +1,18 @@
|
||||||
//! Helper functions for deriving `Display`
|
//! Helper functions for deriving `Display`
|
||||||
|
|
||||||
|
use crate::utils;
|
||||||
|
use ethers_core::{abi::ParamType, macros::ethers_core_crate};
|
||||||
use proc_macro2::TokenStream;
|
use proc_macro2::TokenStream;
|
||||||
use quote::quote;
|
use quote::quote;
|
||||||
use syn::{parse::Error, spanned::Spanned as _, Data, DeriveInput, Fields, Index};
|
use syn::{parse::Error, spanned::Spanned, Data, DeriveInput, Fields, Index};
|
||||||
|
|
||||||
use ethers_core::{abi::ParamType, macros::ethers_core_crate};
|
|
||||||
|
|
||||||
use crate::utils;
|
|
||||||
|
|
||||||
/// Derive `fmt::Display` for the given type
|
/// Derive `fmt::Display` for the given type
|
||||||
pub(crate) fn derive_eth_display_impl(input: DeriveInput) -> Result<TokenStream, Error> {
|
pub(crate) fn derive_eth_display_impl(input: DeriveInput) -> Result<TokenStream, Error> {
|
||||||
let fields: Vec<_> = match input.data {
|
let fields = match input.data {
|
||||||
Data::Struct(ref data) => match data.fields {
|
Data::Struct(ref data) => match data.fields {
|
||||||
Fields::Named(ref fields) => fields.named.iter().collect(),
|
Fields::Named(ref fields) => fields.named.iter().collect(),
|
||||||
Fields::Unnamed(ref fields) => fields.unnamed.iter().collect(),
|
Fields::Unnamed(ref fields) => fields.unnamed.iter().collect(),
|
||||||
Fields::Unit => {
|
Fields::Unit => vec![],
|
||||||
vec![]
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
Data::Enum(_) => {
|
Data::Enum(_) => {
|
||||||
return Err(Error::new(input.span(), "Enum types are not supported by EthDisplay"))
|
return Err(Error::new(input.span(), "Enum types are not supported by EthDisplay"))
|
||||||
|
@ -25,8 +21,10 @@ pub(crate) fn derive_eth_display_impl(input: DeriveInput) -> Result<TokenStream,
|
||||||
return Err(Error::new(input.span(), "Union types are not supported by EthDisplay"))
|
return Err(Error::new(input.span(), "Union types are not supported by EthDisplay"))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let core_crate = ethers_core_crate();
|
|
||||||
let hex_encode = quote! {#core_crate::utils::hex::encode};
|
let ethers_core = ethers_core_crate();
|
||||||
|
let hex_encode = quote! {#ethers_core::utils::hex::encode};
|
||||||
|
|
||||||
let mut fmts = TokenStream::new();
|
let mut fmts = TokenStream::new();
|
||||||
for (idx, field) in fields.iter().enumerate() {
|
for (idx, field) in fields.iter().enumerate() {
|
||||||
let ident = field.ident.clone().map(|id| quote! {#id}).unwrap_or_else(|| {
|
let ident = field.ident.clone().map(|id| quote! {#id}).unwrap_or_else(|| {
|
||||||
|
@ -37,17 +35,17 @@ pub(crate) fn derive_eth_display_impl(input: DeriveInput) -> Result<TokenStream,
|
||||||
match param {
|
match param {
|
||||||
ParamType::Address | ParamType::Uint(_) | ParamType::Int(_) => {
|
ParamType::Address | ParamType::Uint(_) | ParamType::Int(_) => {
|
||||||
quote! {
|
quote! {
|
||||||
write!(f, "{:?}", self.#ident)?;
|
write!(f, "{:?}", self.#ident)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ParamType::Bytes => {
|
ParamType::Bytes => {
|
||||||
quote! {
|
quote! {
|
||||||
write!(f, "0x{}", #hex_encode(&self.#ident))?;
|
write!(f, "0x{}", #hex_encode(&self.#ident))?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ParamType::Bool | ParamType::String => {
|
ParamType::Bool | ParamType::String => {
|
||||||
quote! {
|
quote! {
|
||||||
self.#ident.fmt(f)?;
|
self.#ident.fmt(f)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ParamType::Tuple(_) => {
|
ParamType::Tuple(_) => {
|
||||||
|
@ -59,32 +57,32 @@ pub(crate) fn derive_eth_display_impl(input: DeriveInput) -> Result<TokenStream,
|
||||||
if *ty == ParamType::Uint(8) {
|
if *ty == ParamType::Uint(8) {
|
||||||
// `u8`
|
// `u8`
|
||||||
quote! {
|
quote! {
|
||||||
write!(f, "0x{}", #hex_encode(&self.#ident[..]))?;
|
write!(f, "0x{}", #hex_encode(&self.#ident[..]))?;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// format as array with `[arr[0].display, arr[1].display,...]`
|
// format as array with `[arr[0].display, arr[1].display,...]`
|
||||||
quote! {
|
quote! {
|
||||||
write!(f, "[")?;
|
write!(f, "[")?;
|
||||||
for (idx, val) in self.#ident.iter().enumerate() {
|
for (idx, val) in self.#ident.iter().enumerate() {
|
||||||
write!(f, "{:?}", val)?;
|
write!(f, "{:?}", val)?;
|
||||||
if idx < self.#ident.len() - 1 {
|
if idx < self.#ident.len() - 1 {
|
||||||
write!(f, ", ")?;
|
write!(f, ", ")?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
write!(f, "]")?;
|
write!(f, "]")?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ParamType::FixedBytes(_) => {
|
ParamType::FixedBytes(_) => {
|
||||||
quote! {
|
quote! {
|
||||||
write!(f, "0x{}", #hex_encode(&self.#ident))?;
|
write!(f, "0x{}", #hex_encode(&self.#ident))?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// could not detect the parameter type and rely on using debug fmt
|
// could not detect the parameter type and rely on using debug fmt
|
||||||
quote! {
|
quote! {
|
||||||
write!(f, "{:?}", &self.#ident)?;
|
write!(f, "{:?}", &self.#ident)?;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
fmts.extend(tokens);
|
fmts.extend(tokens);
|
||||||
|
@ -92,6 +90,7 @@ pub(crate) fn derive_eth_display_impl(input: DeriveInput) -> Result<TokenStream,
|
||||||
fmts.extend(quote! { write!(f, ", ")?;});
|
fmts.extend(quote! { write!(f, ", ")?;});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let name = &input.ident;
|
let name = &input.ident;
|
||||||
Ok(quote! {
|
Ok(quote! {
|
||||||
impl ::std::fmt::Display for #name {
|
impl ::std::fmt::Display for #name {
|
||||||
|
|
|
@ -10,46 +10,45 @@ use quote::quote;
|
||||||
use syn::{parse::Error, DeriveInput};
|
use syn::{parse::Error, DeriveInput};
|
||||||
|
|
||||||
/// Generates the `EthError` trait support
|
/// Generates the `EthError` trait support
|
||||||
pub(crate) fn derive_eth_error_impl(input: DeriveInput) -> TokenStream {
|
pub(crate) fn derive_eth_error_impl(input: DeriveInput) -> Result<TokenStream, Error> {
|
||||||
let attributes = match parse_calllike_attributes(&input, "etherror") {
|
let attributes = parse_calllike_attributes(&input, "etherror")?;
|
||||||
Ok(attributes) => attributes,
|
|
||||||
Err(errors) => return errors,
|
|
||||||
};
|
|
||||||
|
|
||||||
let error_name = attributes.name.map(|(s, _)| s).unwrap_or_else(|| input.ident.to_string());
|
let error_name = attributes.name.map(|(s, _)| s).unwrap_or_else(|| input.ident.to_string());
|
||||||
|
|
||||||
let mut error = if let Some((src, span)) = attributes.abi {
|
let mut error = if let Some((src, span)) = attributes.abi {
|
||||||
let raw_function_sig = src.trim_start_matches("error ").trim_start();
|
let raw_function_sig = src.trim_start_matches("error ").trim_start();
|
||||||
// try to parse as solidity error
|
// try to parse as solidity error
|
||||||
if let Ok(fun) = HumanReadableParser::parse_error(&src) {
|
match HumanReadableParser::parse_error(&src) {
|
||||||
fun
|
Ok(solidity_error) => solidity_error,
|
||||||
} else {
|
Err(parse_err) => {
|
||||||
// try to determine the abi by using its fields at runtime
|
return match derive_trait_impls_with_abi_type(
|
||||||
return match derive_trait_impls_with_abi_type(
|
&input,
|
||||||
&input,
|
&error_name,
|
||||||
&error_name,
|
Some(raw_function_sig),
|
||||||
Some(raw_function_sig),
|
) {
|
||||||
) {
|
Ok(derived) => Ok(derived),
|
||||||
Ok(derived) => derived,
|
Err(err) => {
|
||||||
Err(err) => {
|
Err(Error::new(span, format!("Unable to determine ABI for `{src}`: {err}")))
|
||||||
Error::new(span, format!("Unable to determine ABI for `{src}` : {err}"))
|
}
|
||||||
.to_compile_error()
|
.map_err(|e| {
|
||||||
|
let mut error = Error::new(span, parse_err);
|
||||||
|
error.combine(Error::new(span, e));
|
||||||
|
error
|
||||||
|
}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// try to determine the abi by using its fields at runtime
|
// try to determine the abi by using its fields at runtime
|
||||||
return match derive_trait_impls_with_abi_type(&input, &error_name, None) {
|
return derive_trait_impls_with_abi_type(&input, &error_name, None)
|
||||||
Ok(derived) => derived,
|
|
||||||
Err(err) => err.to_compile_error(),
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
error.name = error_name.clone();
|
error.name = error_name.clone();
|
||||||
let abi = error.abi_signature();
|
|
||||||
|
let sig = error.abi_signature();
|
||||||
let selector = utils::selector(error.selector());
|
let selector = utils::selector(error.selector());
|
||||||
let decode_impl = derive_decode_impl_from_params(&error.inputs, ident("EthError"));
|
let decode_impl = derive_decode_impl_from_params(&error.inputs, ident("EthError"));
|
||||||
|
|
||||||
derive_trait_impls(&input, &error_name, quote! {#abi.into()}, Some(selector), decode_impl)
|
derive_trait_impls(&input, &error_name, quote!(#sig.into()), Some(selector), decode_impl)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Use the `AbiType` trait to determine the correct `ParamType` and signature at runtime
|
/// Use the `AbiType` trait to determine the correct `ParamType` and signature at runtime
|
||||||
|
@ -58,17 +57,14 @@ fn derive_trait_impls_with_abi_type(
|
||||||
function_call_name: &str,
|
function_call_name: &str,
|
||||||
abi_signature: Option<&str>,
|
abi_signature: Option<&str>,
|
||||||
) -> Result<TokenStream, Error> {
|
) -> Result<TokenStream, Error> {
|
||||||
let abi_signature = if let Some(abi) = abi_signature {
|
let mut abi_signature = if let Some(sig) = abi_signature {
|
||||||
quote! {#abi}
|
quote!(#sig)
|
||||||
} else {
|
} else {
|
||||||
utils::derive_abi_signature_with_abi_type(input, function_call_name, "EthError")?
|
utils::abi_signature_with_abi_type(input, function_call_name, "EthError")?
|
||||||
};
|
|
||||||
|
|
||||||
let abi_signature = quote! {
|
|
||||||
#abi_signature.into()
|
|
||||||
};
|
};
|
||||||
|
abi_signature.extend(quote!(.into()));
|
||||||
let decode_impl = derive_decode_impl_with_abi_type(input, ident("EthError"))?;
|
let decode_impl = derive_decode_impl_with_abi_type(input, ident("EthError"))?;
|
||||||
Ok(derive_trait_impls(input, function_call_name, abi_signature, None, decode_impl))
|
derive_trait_impls(input, function_call_name, abi_signature, None, decode_impl)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generates the EthError implementation
|
/// Generates the EthError implementation
|
||||||
|
@ -78,26 +74,25 @@ pub fn derive_trait_impls(
|
||||||
abi_signature: TokenStream,
|
abi_signature: TokenStream,
|
||||||
selector: Option<TokenStream>,
|
selector: Option<TokenStream>,
|
||||||
decode_impl: TokenStream,
|
decode_impl: TokenStream,
|
||||||
) -> TokenStream {
|
) -> Result<TokenStream, Error> {
|
||||||
// the ethers crates to use
|
// the ethers crates to use
|
||||||
let core_crate = ethers_core_crate();
|
let ethers_core = ethers_core_crate();
|
||||||
let contract_crate = ethers_contract_crate();
|
let ethers_contract = ethers_contract_crate();
|
||||||
let struct_name = &input.ident;
|
let struct_name = &input.ident;
|
||||||
|
|
||||||
let selector = selector.unwrap_or_else(|| {
|
let selector = selector.unwrap_or_else(|| {
|
||||||
quote! {
|
quote! {
|
||||||
#core_crate::utils::id(Self::abi_signature())
|
#ethers_core::utils::id(Self::abi_signature())
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
let etherror_impl = quote! {
|
let etherror_impl = quote! {
|
||||||
impl #contract_crate::EthError for #struct_name {
|
impl #ethers_contract::EthError for #struct_name {
|
||||||
|
|
||||||
fn error_name() -> ::std::borrow::Cow<'static, str> {
|
fn error_name() -> ::std::borrow::Cow<'static, str> {
|
||||||
#function_call_name.into()
|
#function_call_name.into()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn selector() -> #core_crate::types::Selector {
|
fn selector() -> #ethers_core::types::Selector {
|
||||||
#selector
|
#selector
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -105,12 +100,11 @@ pub fn derive_trait_impls(
|
||||||
#abi_signature
|
#abi_signature
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
};
|
};
|
||||||
let codec_impl = derive_codec_impls(input, decode_impl, ident("EthError"));
|
let codec_impl = derive_codec_impls(input, decode_impl, ident("EthError"))?;
|
||||||
|
|
||||||
quote! {
|
Ok(quote! {
|
||||||
#etherror_impl
|
#etherror_impl
|
||||||
#codec_impl
|
#codec_impl
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,7 @@ use ethers_contract_abigen::Source;
|
||||||
use proc_macro2::{Span, TokenStream};
|
use proc_macro2::{Span, TokenStream};
|
||||||
use quote::quote;
|
use quote::quote;
|
||||||
use syn::{
|
use syn::{
|
||||||
parse::Error, spanned::Spanned as _, AttrStyle, Data, DeriveInput, Field, Fields, Lit, Meta,
|
parse::Error, spanned::Spanned, AttrStyle, Data, DeriveInput, Field, Fields, Lit, Meta,
|
||||||
NestedMeta,
|
NestedMeta,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -42,8 +42,9 @@ pub(crate) fn derive_eth_event_impl(input: DeriveInput) -> Result<TokenStream, E
|
||||||
}
|
}
|
||||||
Err(source_err) => {
|
Err(source_err) => {
|
||||||
// Return both error messages
|
// Return both error messages
|
||||||
let message = format!("Failed parsing ABI: {parse_err} ({source_err})");
|
let mut error = Error::new(span, parse_err);
|
||||||
Err(Error::new(span, message))
|
error.combine(Error::new(span, source_err));
|
||||||
|
Err(error)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -102,7 +103,7 @@ pub(crate) fn derive_eth_event_impl(input: DeriveInput) -> Result<TokenStream, E
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let tokenize_impl = abi_ty::derive_tokenizeable_impl(&input);
|
let tokenize_impl = abi_ty::derive_tokenizeable_impl(&input)?;
|
||||||
|
|
||||||
Ok(quote! {
|
Ok(quote! {
|
||||||
#tokenize_impl
|
#tokenize_impl
|
||||||
|
|
|
@ -112,7 +112,11 @@ pub fn abigen(input: TokenStream) -> TokenStream {
|
||||||
#[proc_macro_derive(EthAbiType)]
|
#[proc_macro_derive(EthAbiType)]
|
||||||
pub fn derive_abi_type(input: TokenStream) -> TokenStream {
|
pub fn derive_abi_type(input: TokenStream) -> TokenStream {
|
||||||
let input = parse_macro_input!(input as DeriveInput);
|
let input = parse_macro_input!(input as DeriveInput);
|
||||||
TokenStream::from(abi_ty::derive_tokenizeable_impl(&input))
|
match abi_ty::derive_tokenizeable_impl(&input) {
|
||||||
|
Ok(tokens) => tokens,
|
||||||
|
Err(err) => err.to_compile_error(),
|
||||||
|
}
|
||||||
|
.into()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Derives the `AbiEncode`, `AbiDecode` and traits for the labeled type.
|
/// Derives the `AbiEncode`, `AbiDecode` and traits for the labeled type.
|
||||||
|
@ -174,9 +178,10 @@ pub fn derive_abi_codec(input: TokenStream) -> TokenStream {
|
||||||
pub fn derive_eth_display(input: TokenStream) -> TokenStream {
|
pub fn derive_eth_display(input: TokenStream) -> TokenStream {
|
||||||
let input = parse_macro_input!(input as DeriveInput);
|
let input = parse_macro_input!(input as DeriveInput);
|
||||||
match display::derive_eth_display_impl(input) {
|
match display::derive_eth_display_impl(input) {
|
||||||
Ok(tokens) => TokenStream::from(tokens),
|
Ok(tokens) => tokens,
|
||||||
Err(err) => err.to_compile_error().into(),
|
Err(err) => err.to_compile_error(),
|
||||||
}
|
}
|
||||||
|
.into()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Derives the `EthEvent` and `Tokenizeable` trait for the labeled type.
|
/// Derives the `EthEvent` and `Tokenizeable` trait for the labeled type.
|
||||||
|
@ -225,9 +230,10 @@ pub fn derive_eth_display(input: TokenStream) -> TokenStream {
|
||||||
pub fn derive_abi_event(input: TokenStream) -> TokenStream {
|
pub fn derive_abi_event(input: TokenStream) -> TokenStream {
|
||||||
let input = parse_macro_input!(input as DeriveInput);
|
let input = parse_macro_input!(input as DeriveInput);
|
||||||
match event::derive_eth_event_impl(input) {
|
match event::derive_eth_event_impl(input) {
|
||||||
Ok(tokens) => TokenStream::from(tokens),
|
Ok(tokens) => tokens,
|
||||||
Err(err) => err.to_compile_error().into(),
|
Err(err) => err.to_compile_error(),
|
||||||
}
|
}
|
||||||
|
.into()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Derives the `EthCall` and `Tokenizeable` trait for the labeled type.
|
/// Derives the `EthCall` and `Tokenizeable` trait for the labeled type.
|
||||||
|
@ -291,7 +297,11 @@ pub fn derive_abi_event(input: TokenStream) -> TokenStream {
|
||||||
#[proc_macro_derive(EthCall, attributes(ethcall))]
|
#[proc_macro_derive(EthCall, attributes(ethcall))]
|
||||||
pub fn derive_abi_call(input: TokenStream) -> TokenStream {
|
pub fn derive_abi_call(input: TokenStream) -> TokenStream {
|
||||||
let input = parse_macro_input!(input as DeriveInput);
|
let input = parse_macro_input!(input as DeriveInput);
|
||||||
TokenStream::from(call::derive_eth_call_impl(input))
|
match call::derive_eth_call_impl(input) {
|
||||||
|
Ok(tokens) => tokens,
|
||||||
|
Err(err) => err.to_compile_error(),
|
||||||
|
}
|
||||||
|
.into()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Derives the `EthError` and `Tokenizeable` trait for the labeled type.
|
/// Derives the `EthError` and `Tokenizeable` trait for the labeled type.
|
||||||
|
@ -328,5 +338,9 @@ pub fn derive_abi_call(input: TokenStream) -> TokenStream {
|
||||||
#[proc_macro_derive(EthError, attributes(etherror))]
|
#[proc_macro_derive(EthError, attributes(etherror))]
|
||||||
pub fn derive_abi_error(input: TokenStream) -> TokenStream {
|
pub fn derive_abi_error(input: TokenStream) -> TokenStream {
|
||||||
let input = parse_macro_input!(input as DeriveInput);
|
let input = parse_macro_input!(input as DeriveInput);
|
||||||
TokenStream::from(error::derive_eth_error_impl(input))
|
match error::derive_eth_error_impl(input) {
|
||||||
|
Ok(tokens) => tokens,
|
||||||
|
Err(err) => err.to_compile_error(),
|
||||||
|
}
|
||||||
|
.into()
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
use ethers_core::{abi::ParamType, macros::ethers_core_crate, types::Selector};
|
use ethers_core::{abi::ParamType, macros::ethers_core_crate, types::Selector};
|
||||||
use proc_macro2::{Ident, Literal, Span};
|
use proc_macro2::{Ident, Literal, Span, TokenStream};
|
||||||
use quote::{quote, quote_spanned};
|
use quote::{quote, quote_spanned};
|
||||||
use syn::{
|
use syn::{
|
||||||
parse::Error, spanned::Spanned as _, Data, DeriveInput, Expr, Fields, GenericArgument, Lit,
|
parse::Error, spanned::Spanned, Data, DeriveInput, Expr, Fields, GenericArgument, Lit,
|
||||||
PathArguments, Type,
|
PathArguments, Type,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -10,26 +10,29 @@ pub fn ident(name: &str) -> Ident {
|
||||||
Ident::new(name, Span::call_site())
|
Ident::new(name, Span::call_site())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn signature(hash: &[u8]) -> proc_macro2::TokenStream {
|
pub fn signature(hash: &[u8]) -> TokenStream {
|
||||||
let core_crate = ethers_core_crate();
|
let ethers_core = ethers_core_crate();
|
||||||
let bytes = hash.iter().copied().map(Literal::u8_unsuffixed);
|
let bytes = hash.iter().copied().map(Literal::u8_unsuffixed);
|
||||||
quote! {#core_crate::types::H256([#( #bytes ),*])}
|
quote! {#ethers_core::types::H256([#( #bytes ),*])}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn selector(selector: Selector) -> proc_macro2::TokenStream {
|
pub fn selector(selector: Selector) -> TokenStream {
|
||||||
let bytes = selector.iter().copied().map(Literal::u8_unsuffixed);
|
let bytes = selector.iter().copied().map(Literal::u8_unsuffixed);
|
||||||
quote! {[#( #bytes ),*]}
|
quote! {[#( #bytes ),*]}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parses an int type from its string representation
|
/// Parses an int type from its string representation
|
||||||
pub fn parse_int_param_type(s: &str) -> Option<ParamType> {
|
pub fn parse_int_param_type(s: &str) -> Option<ParamType> {
|
||||||
let size = s.chars().skip(1).collect::<String>().parse::<usize>().ok()?;
|
match s.chars().next() {
|
||||||
if s.starts_with('u') {
|
Some(c @ 'u') | Some(c @ 'i') => {
|
||||||
Some(ParamType::Uint(size))
|
let size = s[1..].parse::<usize>().ok()?;
|
||||||
} else if s.starts_with('i') {
|
if c == 'u' {
|
||||||
Some(ParamType::Int(size))
|
Some(ParamType::Uint(size))
|
||||||
} else {
|
} else {
|
||||||
None
|
Some(ParamType::Int(size))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -37,64 +40,58 @@ pub fn parse_int_param_type(s: &str) -> Option<ParamType> {
|
||||||
// This applies to strings, arrays, structs and bytes to follow the encoding of
|
// This applies to strings, arrays, structs and bytes to follow the encoding of
|
||||||
// these indexed param types according to
|
// these indexed param types according to
|
||||||
// <https://solidity.readthedocs.io/en/develop/abi-spec.html#encoding-of-indexed-event-parameters>
|
// <https://solidity.readthedocs.io/en/develop/abi-spec.html#encoding-of-indexed-event-parameters>
|
||||||
pub fn topic_param_type_quote(kind: &ParamType) -> proc_macro2::TokenStream {
|
pub fn topic_param_type_quote(kind: &ParamType) -> TokenStream {
|
||||||
let core_crate = ethers_core_crate();
|
let ethers_core = ethers_core_crate();
|
||||||
match kind {
|
match kind {
|
||||||
ParamType::String |
|
ParamType::String |
|
||||||
ParamType::Bytes |
|
ParamType::Bytes |
|
||||||
ParamType::Array(_) |
|
ParamType::Array(_) |
|
||||||
ParamType::FixedArray(_, _) |
|
ParamType::FixedArray(_, _) |
|
||||||
ParamType::Tuple(_) => quote! {#core_crate::abi::ParamType::FixedBytes(32)},
|
ParamType::Tuple(_) => quote! {#ethers_core::abi::ParamType::FixedBytes(32)},
|
||||||
ty => param_type_quote(ty),
|
ty => param_type_quote(ty),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the rust type for the given parameter
|
/// Returns the rust type for the given parameter
|
||||||
pub fn param_type_quote(kind: &ParamType) -> proc_macro2::TokenStream {
|
pub fn param_type_quote(kind: &ParamType) -> TokenStream {
|
||||||
let core_crate = ethers_core_crate();
|
let ethers_core = ethers_core_crate();
|
||||||
match kind {
|
match kind {
|
||||||
ParamType::Address => {
|
ParamType::Address => {
|
||||||
quote! {#core_crate::abi::ParamType::Address}
|
quote! {#ethers_core::abi::ParamType::Address}
|
||||||
}
|
}
|
||||||
ParamType::Bytes => {
|
ParamType::Bytes => {
|
||||||
quote! {#core_crate::abi::ParamType::Bytes}
|
quote! {#ethers_core::abi::ParamType::Bytes}
|
||||||
}
|
}
|
||||||
ParamType::Int(size) => {
|
ParamType::Int(size) => {
|
||||||
let size = Literal::usize_suffixed(*size);
|
let size = Literal::usize_suffixed(*size);
|
||||||
quote! {#core_crate::abi::ParamType::Int(#size)}
|
quote! {#ethers_core::abi::ParamType::Int(#size)}
|
||||||
}
|
}
|
||||||
ParamType::Uint(size) => {
|
ParamType::Uint(size) => {
|
||||||
let size = Literal::usize_suffixed(*size);
|
let size = Literal::usize_suffixed(*size);
|
||||||
quote! {#core_crate::abi::ParamType::Uint(#size)}
|
quote! {#ethers_core::abi::ParamType::Uint(#size)}
|
||||||
}
|
}
|
||||||
ParamType::Bool => {
|
ParamType::Bool => {
|
||||||
quote! {#core_crate::abi::ParamType::Bool}
|
quote! {#ethers_core::abi::ParamType::Bool}
|
||||||
}
|
}
|
||||||
ParamType::String => {
|
ParamType::String => {
|
||||||
quote! {#core_crate::abi::ParamType::String}
|
quote! {#ethers_core::abi::ParamType::String}
|
||||||
}
|
}
|
||||||
ParamType::Array(ty) => {
|
ParamType::Array(ty) => {
|
||||||
let ty = param_type_quote(ty);
|
let ty = param_type_quote(ty);
|
||||||
quote! {#core_crate::abi::ParamType::Array(Box::new(#ty))}
|
quote! {#ethers_core::abi::ParamType::Array(Box::new(#ty))}
|
||||||
}
|
}
|
||||||
ParamType::FixedBytes(size) => {
|
ParamType::FixedBytes(size) => {
|
||||||
let size = Literal::usize_suffixed(*size);
|
let size = Literal::usize_suffixed(*size);
|
||||||
quote! {#core_crate::abi::ParamType::FixedBytes(#size)}
|
quote! {#ethers_core::abi::ParamType::FixedBytes(#size)}
|
||||||
}
|
}
|
||||||
ParamType::FixedArray(ty, size) => {
|
ParamType::FixedArray(ty, size) => {
|
||||||
let ty = param_type_quote(ty);
|
let ty = param_type_quote(ty);
|
||||||
let size = Literal::usize_suffixed(*size);
|
let size = Literal::usize_suffixed(*size);
|
||||||
quote! {#core_crate::abi::ParamType::FixedArray(Box::new(#ty),#size)}
|
quote! {#ethers_core::abi::ParamType::FixedArray(Box::new(#ty), #size)}
|
||||||
}
|
}
|
||||||
ParamType::Tuple(tuple) => {
|
ParamType::Tuple(tuple) => {
|
||||||
let elements = tuple.iter().map(param_type_quote);
|
let elements = tuple.iter().map(param_type_quote);
|
||||||
quote! {
|
quote!(#ethers_core::abi::ParamType::Tuple(::std::vec![#( #elements ),*]))
|
||||||
#core_crate::abi::ParamType::Tuple(
|
|
||||||
::std::vec![
|
|
||||||
#( #elements ),*
|
|
||||||
]
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -120,8 +117,8 @@ pub fn find_parameter_type(ty: &Type) -> Result<ParamType, Error> {
|
||||||
if let PathArguments::AngleBracketed(ref args) = ty.path.segments[0].arguments {
|
if let PathArguments::AngleBracketed(ref args) = ty.path.segments[0].arguments {
|
||||||
if args.args.len() == 1 {
|
if args.args.len() == 1 {
|
||||||
if let GenericArgument::Type(ref ty) = args.args.iter().next().unwrap() {
|
if let GenericArgument::Type(ref ty) = args.args.iter().next().unwrap() {
|
||||||
let kind = find_parameter_type(ty)?;
|
return find_parameter_type(ty)
|
||||||
return Ok(ParamType::Array(Box::new(kind)))
|
.map(|kind| ParamType::Array(Box::new(kind)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -148,10 +145,12 @@ pub fn find_parameter_type(ty: &Type) -> Result<ParamType, Error> {
|
||||||
}
|
}
|
||||||
Err(Error::new(ty.span(), "Failed to derive proper ABI from fields"))
|
Err(Error::new(ty.span(), "Failed to derive proper ABI from fields"))
|
||||||
}
|
}
|
||||||
Type::Tuple(ty) => {
|
Type::Tuple(ty) => ty
|
||||||
let params = ty.elems.iter().map(find_parameter_type).collect::<Result<Vec<_>, _>>()?;
|
.elems
|
||||||
Ok(ParamType::Tuple(params))
|
.iter()
|
||||||
}
|
.map(find_parameter_type)
|
||||||
|
.collect::<Result<Vec<_>, _>>()
|
||||||
|
.map(ParamType::Tuple),
|
||||||
_ => Err(Error::new(ty.span(), "Failed to derive proper ABI from fields")),
|
_ => Err(Error::new(ty.span(), "Failed to derive proper ABI from fields")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -200,29 +199,29 @@ pub fn derive_abi_inputs_from_fields(
|
||||||
pub fn derive_param_type_with_abi_type(
|
pub fn derive_param_type_with_abi_type(
|
||||||
input: &DeriveInput,
|
input: &DeriveInput,
|
||||||
trait_name: &str,
|
trait_name: &str,
|
||||||
) -> Result<proc_macro2::TokenStream, Error> {
|
) -> Result<TokenStream, Error> {
|
||||||
let core_crate = ethers_core_crate();
|
let ethers_core = ethers_core_crate();
|
||||||
let params = derive_abi_parameters_array(input, trait_name)?;
|
let params = abi_parameters_array(input, trait_name)?;
|
||||||
Ok(quote! {
|
Ok(quote! {
|
||||||
#core_crate::abi::ParamType::Tuple(::std::vec!#params)
|
#ethers_core::abi::ParamType::Tuple(::std::vec!#params)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Use `AbiType::param_type` fo each field to construct the whole signature `<name>(<params,>*)` as
|
/// Use `AbiType::param_type` fo each field to construct the whole signature `<name>(<params,>*)` as
|
||||||
/// `String`
|
/// `String`.
|
||||||
pub fn derive_abi_signature_with_abi_type(
|
pub fn abi_signature_with_abi_type(
|
||||||
input: &DeriveInput,
|
input: &DeriveInput,
|
||||||
function_name: &str,
|
function_name: &str,
|
||||||
trait_name: &str,
|
trait_name: &str,
|
||||||
) -> Result<proc_macro2::TokenStream, Error> {
|
) -> Result<TokenStream, Error> {
|
||||||
let params = derive_abi_parameters_array(input, trait_name)?;
|
let params = abi_parameters_array(input, trait_name)?;
|
||||||
Ok(quote! {
|
Ok(quote! {
|
||||||
{
|
{
|
||||||
let params: String = #params
|
let params: String = #params
|
||||||
.iter()
|
.iter()
|
||||||
.map(|p| p.to_string())
|
.map(|p| p.to_string())
|
||||||
.collect::<::std::vec::Vec<_>>()
|
.collect::<::std::vec::Vec<_>>()
|
||||||
.join(",");
|
.join(",");
|
||||||
let function_name = #function_name;
|
let function_name = #function_name;
|
||||||
format!("{}({})", function_name, params)
|
format!("{}({})", function_name, params)
|
||||||
}
|
}
|
||||||
|
@ -231,30 +230,13 @@ pub fn derive_abi_signature_with_abi_type(
|
||||||
|
|
||||||
/// Use `AbiType::param_type` fo each field to construct the signature's parameters as runtime array
|
/// Use `AbiType::param_type` fo each field to construct the signature's parameters as runtime array
|
||||||
/// `[param1, param2,...]`
|
/// `[param1, param2,...]`
|
||||||
pub fn derive_abi_parameters_array(
|
pub fn abi_parameters_array(input: &DeriveInput, trait_name: &str) -> Result<TokenStream, Error> {
|
||||||
input: &DeriveInput,
|
let ethers_core = ethers_core_crate();
|
||||||
trait_name: &str,
|
|
||||||
) -> Result<proc_macro2::TokenStream, Error> {
|
|
||||||
let core_crate = ethers_core_crate();
|
|
||||||
|
|
||||||
let param_types: Vec<_> = match input.data {
|
let fields = match input.data {
|
||||||
Data::Struct(ref data) => match data.fields {
|
Data::Struct(ref data) => match data.fields {
|
||||||
Fields::Named(ref fields) => fields
|
Fields::Named(ref fields) => &fields.named,
|
||||||
.named
|
Fields::Unnamed(ref fields) => &fields.unnamed,
|
||||||
.iter()
|
|
||||||
.map(|f| {
|
|
||||||
let ty = &f.ty;
|
|
||||||
quote_spanned! { f.span() => <#ty as #core_crate::abi::AbiType>::param_type() }
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
Fields::Unnamed(ref fields) => fields
|
|
||||||
.unnamed
|
|
||||||
.iter()
|
|
||||||
.map(|f| {
|
|
||||||
let ty = &f.ty;
|
|
||||||
quote_spanned! { f.span() => <#ty as #core_crate::abi::AbiType>::param_type() }
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
Fields::Unit => {
|
Fields::Unit => {
|
||||||
return Err(Error::new(
|
return Err(Error::new(
|
||||||
input.span(),
|
input.span(),
|
||||||
|
@ -276,7 +258,12 @@ pub fn derive_abi_parameters_array(
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let iter = fields.iter().map(|f| {
|
||||||
|
let ty = &f.ty;
|
||||||
|
quote_spanned!(f.span() => <#ty as #ethers_core::abi::AbiType>::param_type())
|
||||||
|
});
|
||||||
|
|
||||||
Ok(quote! {
|
Ok(quote! {
|
||||||
[#( #param_types ),*]
|
[#( #iter ),*]
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue