feat(abigen): add EthAbiCodec proc macro (#704)
* feat(abigen): add EthAbiCodec proc macro * rustfmt * fix: tuple codec
This commit is contained in:
parent
12334443eb
commit
3c164bc9bf
|
@ -43,6 +43,8 @@
|
||||||
|
|
||||||
### 0.6.0
|
### 0.6.0
|
||||||
|
|
||||||
|
- add `EthAbiCodec` proc macro to derive `AbiEncode` `AbiDecode` implementation
|
||||||
|
[#704](https://github.com/gakonst/ethers-rs/pull/704)
|
||||||
- move `AbiEncode` `AbiDecode` trait to ethers-core and implement for core types
|
- move `AbiEncode` `AbiDecode` trait to ethers-core and implement for core types
|
||||||
[#531](https://github.com/gakonst/ethers-rs/pull/531)
|
[#531](https://github.com/gakonst/ethers-rs/pull/531)
|
||||||
- Add EIP-712 `sign_typed_data` signer method; add ethers-core type `Eip712`
|
- Add EIP-712 `sign_typed_data` signer method; add ethers-core type `Eip712`
|
||||||
|
|
|
@ -129,7 +129,7 @@ impl Context {
|
||||||
let ethers_contract = ethers_contract_crate();
|
let ethers_contract = ethers_contract_crate();
|
||||||
Ok(quote! {
|
Ok(quote! {
|
||||||
#abi_signature_doc
|
#abi_signature_doc
|
||||||
#[derive(Clone, Debug, Default, Eq, PartialEq, #ethers_contract::EthAbiType, #derives)]
|
#[derive(Clone, Debug, Default, Eq, PartialEq, #ethers_contract::EthAbiType, #ethers_contract::EthAbiCodec, #derives)]
|
||||||
pub struct #name {
|
pub struct #name {
|
||||||
#( #fields ),*
|
#( #fields ),*
|
||||||
}
|
}
|
||||||
|
@ -191,7 +191,7 @@ impl Context {
|
||||||
|
|
||||||
Ok(quote! {
|
Ok(quote! {
|
||||||
#abi_signature_doc
|
#abi_signature_doc
|
||||||
#[derive(Clone, Debug, Default, Eq, PartialEq, #ethers_contract::EthAbiType, #derives)]
|
#[derive(Clone, Debug, Default, Eq, PartialEq, #ethers_contract::EthAbiType, #ethers_contract::EthAbiCodec, #derives)]
|
||||||
pub struct #name {
|
pub struct #name {
|
||||||
#( #fields ),*
|
#( #fields ),*
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,33 @@
|
||||||
|
//! Helper functions for deriving `EthAbiType`
|
||||||
|
|
||||||
|
use ethers_core::macros::ethers_core_crate;
|
||||||
|
|
||||||
|
use quote::quote;
|
||||||
|
use syn::DeriveInput;
|
||||||
|
|
||||||
|
/// Generates the `AbiEncode` + `AbiDecode` implementation
|
||||||
|
pub fn derive_codec_impl(input: &DeriveInput) -> proc_macro2::TokenStream {
|
||||||
|
let name = &input.ident;
|
||||||
|
let core_crate = ethers_core_crate();
|
||||||
|
|
||||||
|
quote! {
|
||||||
|
impl #core_crate::abi::AbiDecode for #name {
|
||||||
|
fn decode(bytes: impl AsRef<[u8]>) -> Result<Self, #core_crate::abi::AbiError> {
|
||||||
|
if let #core_crate::abi::ParamType::Tuple(params) = <Self as #core_crate::abi::AbiType>::param_type() {
|
||||||
|
let tokens = #core_crate::abi::decode(¶ms, bytes.as_ref())?;
|
||||||
|
Ok(<Self as #core_crate::abi::Tokenizable>::from_token(#core_crate::abi::Token::Tuple(tokens))?)
|
||||||
|
} else {
|
||||||
|
Err(
|
||||||
|
#core_crate::abi::InvalidOutputType("Expected tuple".to_string()).into()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl #core_crate::abi::AbiEncode for #name {
|
||||||
|
fn encode(self) -> ::std::vec::Vec<u8> {
|
||||||
|
let tokens = #core_crate::abi::Tokenize::into_tokens(self);
|
||||||
|
#core_crate::abi::encode(&tokens)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -10,6 +10,7 @@ use abigen::Contracts;
|
||||||
pub(crate) mod abi_ty;
|
pub(crate) mod abi_ty;
|
||||||
mod abigen;
|
mod abigen;
|
||||||
mod call;
|
mod call;
|
||||||
|
mod codec;
|
||||||
mod display;
|
mod display;
|
||||||
mod event;
|
mod event;
|
||||||
mod spanned;
|
mod spanned;
|
||||||
|
@ -43,7 +44,7 @@ pub(crate) mod utils;
|
||||||
/// `ETHERSCAN_API_KEY` environment variable can be set. If it is, it will use
|
/// `ETHERSCAN_API_KEY` environment variable can be set. If it is, it will use
|
||||||
/// that API key when retrieving the contract ABI.
|
/// that API key when retrieving the contract ABI.
|
||||||
///
|
///
|
||||||
/// Currently the proc macro accepts additional parameters to configure some
|
/// Currently, the proc macro accepts additional parameters to configure some
|
||||||
/// aspects of the code generation. Specifically it accepts:
|
/// aspects of the code generation. Specifically it accepts:
|
||||||
/// - `methods`: A list of mappings from method signatures to method names allowing methods names to
|
/// - `methods`: A list of mappings from method signatures to method names allowing methods names to
|
||||||
/// be explicitely set for contract methods. This also provides a workaround for generating code
|
/// be explicitely set for contract methods. This also provides a workaround for generating code
|
||||||
|
@ -94,7 +95,7 @@ pub fn abigen(input: TokenStream) -> TokenStream {
|
||||||
contracts.expand().unwrap_or_else(|err| err.to_compile_error()).into()
|
contracts.expand().unwrap_or_else(|err| err.to_compile_error()).into()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Derives the `Tokenizable` trait for the labeled type.
|
/// Derives the `AbiType` and all `Tokenizable` traits for the labeled type.
|
||||||
///
|
///
|
||||||
/// This derive macro automatically adds a type bound `field: Tokenizable` for
|
/// This derive macro automatically adds a type bound `field: Tokenizable` for
|
||||||
/// each field type.
|
/// each field type.
|
||||||
|
@ -104,6 +105,36 @@ pub fn derive_abi_type(input: TokenStream) -> TokenStream {
|
||||||
TokenStream::from(abi_ty::derive_tokenizeable_impl(&input))
|
TokenStream::from(abi_ty::derive_tokenizeable_impl(&input))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Derives the `AbiEncode`, `AbiDecode` and traits for the labeled type.
|
||||||
|
///
|
||||||
|
/// This is an addition to `EthAbiType` that lacks the `AbiEncode`, `AbiDecode` implementation.
|
||||||
|
///
|
||||||
|
/// The reason why this is a separate macro is the `AbiEncode` / `AbiDecode` are `ethers`
|
||||||
|
/// generalized codec traits used for types, calls, etc. However, encoding/decoding a call differs
|
||||||
|
/// from the basic encoding/decoding, (`[selector + encode(self)]`)
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// ```ignore
|
||||||
|
/// use ethers_contract::{EthAbiCodec, EthAbiType};
|
||||||
|
/// use ethers_core::types::*;
|
||||||
|
///
|
||||||
|
/// #[derive(Debug, Clone, EthAbiType, EthAbiCodec)]
|
||||||
|
/// struct MyStruct {
|
||||||
|
/// addr: Address,
|
||||||
|
/// old_value: String,
|
||||||
|
/// new_value: String,
|
||||||
|
/// }
|
||||||
|
/// let val = MyStruct {..};
|
||||||
|
/// let bytes = val.encode();
|
||||||
|
/// let val = MyStruct::decode(&bytes).unwrap();
|
||||||
|
/// ```
|
||||||
|
#[proc_macro_derive(EthAbiCodec)]
|
||||||
|
pub fn derive_abi_codec(input: TokenStream) -> TokenStream {
|
||||||
|
let input = parse_macro_input!(input as DeriveInput);
|
||||||
|
TokenStream::from(codec::derive_codec_impl(&input))
|
||||||
|
}
|
||||||
|
|
||||||
/// Derives `fmt::Display` trait and generates a convenient format for all the
|
/// Derives `fmt::Display` trait and generates a convenient format for all the
|
||||||
/// underlying primitive types/tokens.
|
/// underlying primitive types/tokens.
|
||||||
///
|
///
|
||||||
|
@ -113,7 +144,7 @@ pub fn derive_abi_type(input: TokenStream) -> TokenStream {
|
||||||
/// # Example
|
/// # Example
|
||||||
///
|
///
|
||||||
/// ```ignore
|
/// ```ignore
|
||||||
/// use ethers_contract::EthDisplay;
|
/// use ethers_contract::{EthDisplay, EthAbiType};
|
||||||
/// use ethers_core::types::*;
|
/// use ethers_core::types::*;
|
||||||
///
|
///
|
||||||
/// #[derive(Debug, Clone, EthAbiType, EthDisplay)]
|
/// #[derive(Debug, Clone, EthAbiType, EthDisplay)]
|
||||||
|
|
|
@ -35,7 +35,7 @@ pub use ethers_contract_abigen::Abigen;
|
||||||
|
|
||||||
#[cfg(any(test, feature = "abigen"))]
|
#[cfg(any(test, feature = "abigen"))]
|
||||||
#[cfg_attr(docsrs, doc(cfg(feature = "abigen")))]
|
#[cfg_attr(docsrs, doc(cfg(feature = "abigen")))]
|
||||||
pub use ethers_contract_derive::{abigen, EthAbiType, EthCall, EthDisplay, EthEvent};
|
pub use ethers_contract_derive::{abigen, EthAbiCodec, EthAbiType, EthCall, EthDisplay, EthEvent};
|
||||||
|
|
||||||
// Hide the Lazy re-export, it's just for convenience
|
// Hide the Lazy re-export, it's just for convenience
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
|
|
|
@ -9,6 +9,9 @@ use ethers_providers::Provider;
|
||||||
use ethers_solc::Solc;
|
use ethers_solc::Solc;
|
||||||
use std::{convert::TryFrom, sync::Arc};
|
use std::{convert::TryFrom, sync::Arc};
|
||||||
|
|
||||||
|
fn assert_codec<T: AbiDecode + AbiEncode>() {}
|
||||||
|
fn assert_tokenizeable<T: Tokenizable>() {}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_gen_human_readable() {
|
fn can_gen_human_readable() {
|
||||||
abigen!(
|
abigen!(
|
||||||
|
@ -54,18 +57,24 @@ fn can_gen_structs_readable() {
|
||||||
]"#,
|
]"#,
|
||||||
event_derives(serde::Deserialize, serde::Serialize)
|
event_derives(serde::Deserialize, serde::Serialize)
|
||||||
);
|
);
|
||||||
let value = Addresses {
|
let addr = Addresses {
|
||||||
addr: vec!["eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee".parse().unwrap()],
|
addr: vec!["eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee".parse().unwrap()],
|
||||||
s: "hello".to_string(),
|
s: "hello".to_string(),
|
||||||
};
|
};
|
||||||
let token = value.clone().into_token();
|
let token = addr.clone().into_token();
|
||||||
assert_eq!(value, Addresses::from_token(token).unwrap());
|
assert_eq!(addr, Addresses::from_token(token).unwrap());
|
||||||
|
|
||||||
assert_eq!("ValueChanged", ValueChangedFilter::name());
|
assert_eq!("ValueChanged", ValueChangedFilter::name());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
"ValueChanged((address,string),(address,string),(address[],string))",
|
"ValueChanged((address,string),(address,string),(address[],string))",
|
||||||
ValueChangedFilter::abi_signature()
|
ValueChangedFilter::abi_signature()
|
||||||
);
|
);
|
||||||
|
|
||||||
|
assert_codec::<Value>();
|
||||||
|
assert_codec::<Addresses>();
|
||||||
|
let encoded = addr.clone().encode();
|
||||||
|
let other = Addresses::decode(&encoded).unwrap();
|
||||||
|
assert_eq!(addr, other);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -83,9 +92,10 @@ fn can_gen_structs_with_arrays_readable() {
|
||||||
"ValueChanged((address,string),(address,string),(address[],string)[])",
|
"ValueChanged((address,string),(address,string),(address[],string)[])",
|
||||||
ValueChangedFilter::abi_signature()
|
ValueChangedFilter::abi_signature()
|
||||||
);
|
);
|
||||||
}
|
|
||||||
|
|
||||||
fn assert_tokenizeable<T: Tokenizable>() {}
|
assert_codec::<Value>();
|
||||||
|
assert_codec::<Addresses>();
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_generate_internal_structs() {
|
fn can_generate_internal_structs() {
|
||||||
|
@ -97,6 +107,10 @@ fn can_generate_internal_structs() {
|
||||||
assert_tokenizeable::<VerifyingKey>();
|
assert_tokenizeable::<VerifyingKey>();
|
||||||
assert_tokenizeable::<G1Point>();
|
assert_tokenizeable::<G1Point>();
|
||||||
assert_tokenizeable::<G2Point>();
|
assert_tokenizeable::<G2Point>();
|
||||||
|
|
||||||
|
assert_codec::<VerifyingKey>();
|
||||||
|
assert_codec::<G1Point>();
|
||||||
|
assert_codec::<G2Point>();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -119,6 +133,10 @@ fn can_generate_internal_structs_multiple() {
|
||||||
assert_tokenizeable::<G1Point>();
|
assert_tokenizeable::<G1Point>();
|
||||||
assert_tokenizeable::<G2Point>();
|
assert_tokenizeable::<G2Point>();
|
||||||
|
|
||||||
|
assert_codec::<VerifyingKey>();
|
||||||
|
assert_codec::<G1Point>();
|
||||||
|
assert_codec::<G2Point>();
|
||||||
|
|
||||||
let (provider, _) = Provider::mocked();
|
let (provider, _) = Provider::mocked();
|
||||||
let client = Arc::new(provider);
|
let client = Arc::new(provider);
|
||||||
|
|
||||||
|
@ -153,6 +171,7 @@ fn can_gen_human_readable_with_structs() {
|
||||||
event_derives(serde::Deserialize, serde::Serialize)
|
event_derives(serde::Deserialize, serde::Serialize)
|
||||||
);
|
);
|
||||||
assert_tokenizeable::<Foo>();
|
assert_tokenizeable::<Foo>();
|
||||||
|
assert_codec::<Foo>();
|
||||||
|
|
||||||
let (client, _mock) = Provider::mocked();
|
let (client, _mock) = Provider::mocked();
|
||||||
let contract = SimpleContract::new(Address::default(), Arc::new(client));
|
let contract = SimpleContract::new(Address::default(), Arc::new(client));
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
use ethers_contract::{abigen, EthAbiType, EthCall, EthDisplay, EthEvent, EthLogDecode};
|
use ethers_contract::{
|
||||||
|
abigen, EthAbiCodec, EthAbiType, EthCall, EthDisplay, EthEvent, EthLogDecode,
|
||||||
|
};
|
||||||
use ethers_core::{
|
use ethers_core::{
|
||||||
abi::{RawLog, Tokenizable},
|
abi::{AbiDecode, AbiEncode, RawLog, Tokenizable},
|
||||||
types::{Address, H160, H256, I256, U128, U256},
|
types::{Address, H160, H256, I256, U128, U256},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -478,3 +480,83 @@ fn can_derive_for_enum() {
|
||||||
let token = ActionChoices::GoLeft.into_token();
|
let token = ActionChoices::GoLeft.into_token();
|
||||||
assert_eq!(ActionChoices::GoLeft, ActionChoices::from_token(token).unwrap());
|
assert_eq!(ActionChoices::GoLeft, ActionChoices::from_token(token).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_derive_abi_codec() {
|
||||||
|
#[derive(Debug, Clone, PartialEq, EthAbiType, EthAbiCodec)]
|
||||||
|
pub struct SomeType {
|
||||||
|
inner: Address,
|
||||||
|
msg: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
let val = SomeType { inner: Default::default(), msg: "hello".to_string() };
|
||||||
|
|
||||||
|
let encoded = val.clone().encode();
|
||||||
|
let other = SomeType::decode(&encoded).unwrap();
|
||||||
|
assert_eq!(val, other);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_derive_abi_codec_single_field() {
|
||||||
|
#[derive(Debug, Clone, PartialEq, EthAbiType, EthAbiCodec)]
|
||||||
|
pub struct SomeType {
|
||||||
|
inner: Vec<U256>,
|
||||||
|
}
|
||||||
|
|
||||||
|
let val = SomeType { inner: Default::default() };
|
||||||
|
|
||||||
|
let encoded = val.clone().encode();
|
||||||
|
let decoded = SomeType::decode(&encoded).unwrap();
|
||||||
|
assert_eq!(val, decoded);
|
||||||
|
|
||||||
|
let encoded_tuple = (Vec::<U256>::default(),).encode();
|
||||||
|
|
||||||
|
assert_eq!(encoded_tuple, encoded);
|
||||||
|
let decoded_tuple = SomeType::decode(&encoded_tuple).unwrap();
|
||||||
|
assert_eq!(decoded_tuple, decoded);
|
||||||
|
|
||||||
|
let tuple = (val,);
|
||||||
|
let encoded = tuple.clone().encode();
|
||||||
|
let decoded = <(SomeType,)>::decode(&encoded).unwrap();
|
||||||
|
assert_eq!(tuple, decoded);
|
||||||
|
|
||||||
|
let wrapped =
|
||||||
|
ethers_core::abi::encode(ðers_core::abi::Tokenize::into_tokens(tuple.clone())).to_vec();
|
||||||
|
assert_eq!(wrapped, encoded);
|
||||||
|
let decoded_wrapped = <(SomeType,)>::decode(&wrapped).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(decoded_wrapped, tuple);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_derive_abi_codec_two_field() {
|
||||||
|
#[derive(Debug, Clone, PartialEq, EthAbiType, EthAbiCodec)]
|
||||||
|
pub struct SomeType {
|
||||||
|
inner: Vec<U256>,
|
||||||
|
addr: Address,
|
||||||
|
}
|
||||||
|
|
||||||
|
let val = SomeType { inner: Default::default(), addr: Default::default() };
|
||||||
|
|
||||||
|
let encoded = val.clone().encode();
|
||||||
|
let decoded = SomeType::decode(&encoded).unwrap();
|
||||||
|
assert_eq!(val, decoded);
|
||||||
|
|
||||||
|
let encoded_tuple = (Vec::<U256>::default(), Address::default()).encode();
|
||||||
|
|
||||||
|
assert_eq!(encoded_tuple, encoded);
|
||||||
|
let decoded_tuple = SomeType::decode(&encoded_tuple).unwrap();
|
||||||
|
assert_eq!(decoded_tuple, decoded);
|
||||||
|
|
||||||
|
let tuple = (val,);
|
||||||
|
let encoded = tuple.clone().encode();
|
||||||
|
let decoded = <(SomeType,)>::decode(&encoded).unwrap();
|
||||||
|
assert_eq!(tuple, decoded);
|
||||||
|
|
||||||
|
let wrapped =
|
||||||
|
ethers_core::abi::encode(ðers_core::abi::Tokenize::into_tokens(tuple.clone())).to_vec();
|
||||||
|
assert_eq!(wrapped, encoded);
|
||||||
|
let decoded_wrapped = <(SomeType,)>::decode(&wrapped).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(decoded_wrapped, tuple);
|
||||||
|
}
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
abi::{AbiArrayType, AbiError, AbiType, Detokenize, Tokenizable, TokenizableItem},
|
abi::{
|
||||||
|
AbiArrayType, AbiError, AbiType, Detokenize, Token, Tokenizable, TokenizableItem, Tokenize,
|
||||||
|
},
|
||||||
types::{Address, H256, U128, U256},
|
types::{Address, H256, U128, U256},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -109,8 +111,7 @@ macro_rules! impl_abi_codec_tuple {
|
||||||
)+
|
)+
|
||||||
{
|
{
|
||||||
fn encode(self) -> Vec<u8> {
|
fn encode(self) -> Vec<u8> {
|
||||||
let token = self.into_token();
|
crate::abi::encode(&self.into_tokens()).into()
|
||||||
crate::abi::encode(&[token]).into()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -119,10 +120,14 @@ macro_rules! impl_abi_codec_tuple {
|
||||||
$ty: AbiType + Tokenizable,
|
$ty: AbiType + Tokenizable,
|
||||||
)+ {
|
)+ {
|
||||||
fn decode(bytes: impl AsRef<[u8]>) -> Result<Self, AbiError> {
|
fn decode(bytes: impl AsRef<[u8]>) -> Result<Self, AbiError> {
|
||||||
let tokens = crate::abi::decode(
|
if let crate::abi::ParamType::Tuple(params) = <Self as AbiType>::param_type() {
|
||||||
&[Self::param_type()], bytes.as_ref()
|
let tokens = crate::abi::decode(¶ms, bytes.as_ref())?;
|
||||||
)?;
|
Ok(<Self as Tokenizable>::from_token(Token::Tuple(tokens))?)
|
||||||
Ok(<Self as Detokenize>::from_tokens(tokens)?)
|
} else {
|
||||||
|
Err(
|
||||||
|
crate::abi::InvalidOutputType("Expected tuple".to_string()).into()
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue