feat: add EthEvent proc macro derive support (#227)

* refactor: extract error module and use error macros

* feat: add solidity struct parser

* refactor: add AbiParse and support struct parsing

* test: add more struct parsing tests

* feat: add EthAbiType proc macro derive for deriving Tokenizable trait

* test: add EthAbiType derive tests

* refactor: extract tokenizeable implementation in separate method

* chore(test): use EthAbiType derive instead implementing Detokenizeable

* feat: introduce EthEvent trait

* feat: add EthEvent proc macro derive support

* test: add proc macro derive tests

* chore: rustfmt

Co-authored-by: Georgios Konstantopoulos <me@gakonst.com>
This commit is contained in:
Matthias Seitz 2021-03-15 12:59:52 +01:00 committed by GitHub
parent cf8e2391c8
commit 7b10b76e20
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 733 additions and 30 deletions

3
Cargo.lock generated
View File

@ -1,5 +1,7 @@
# This file is automatically @generated by Cargo. # This file is automatically @generated by Cargo.
# It is not intended for manual editing. # It is not intended for manual editing.
version = 3
[[package]] [[package]]
name = "Inflector" name = "Inflector"
version = "0.11.4" version = "0.11.4"
@ -672,6 +674,7 @@ version = "0.2.2"
dependencies = [ dependencies = [
"ethers-contract-abigen", "ethers-contract-abigen",
"ethers-core", "ethers-core",
"hex",
"proc-macro2", "proc-macro2",
"quote", "quote",
"serde_json", "serde_json",

View File

@ -17,7 +17,7 @@ ethers-core = { version = "0.2.2", path = "../../ethers-core" }
ethers-contract-abigen = { version = "0.2.2", path = "../ethers-contract-abigen" } ethers-contract-abigen = { version = "0.2.2", path = "../ethers-contract-abigen" }
serde_json = "1.0.53" serde_json = "1.0.53"
hex = { version = "0.4.3", default-features = false, features = ["std"] }
proc-macro2 = "1.0" proc-macro2 = "1.0"
quote = "1.0" quote = "1.0"
syn = "1.0.12" syn = "1.0.12"

View File

@ -2,14 +2,23 @@
//! ethereum smart contract. //! ethereum smart contract.
#![deny(missing_docs, unsafe_code)] #![deny(missing_docs, unsafe_code)]
mod spanned; use ethers_contract_abigen::Source;
use proc_macro::TokenStream;
use proc_macro2::{Literal, Span};
use quote::{quote, quote_spanned};
use syn::spanned::Spanned as _;
use syn::{
parse::Error, parse_macro_input, AttrStyle, Data, DeriveInput, Expr, Fields, GenericArgument,
Lit, Meta, NestedMeta, PathArguments, Type,
};
use abigen::{expand, ContractArgs};
use ethers_core::abi::{AbiParser, Event, EventExt, EventParam, ParamType};
use hex::FromHex;
use spanned::Spanned; use spanned::Spanned;
mod abigen; mod abigen;
use abigen::{expand, ContractArgs}; mod spanned;
use proc_macro::TokenStream;
use syn::{parse::Error, parse_macro_input};
/// Proc macro to generate type-safe bindings to a contract. This macro accepts /// Proc macro to generate type-safe bindings to a contract. This macro accepts
/// an Ethereum contract ABI or a path. Note that this path is rooted in /// an Ethereum contract ABI or a path. Note that this path is rooted in
@ -65,3 +74,508 @@ pub fn abigen(input: TokenStream) -> TokenStream {
.unwrap_or_else(|e| Error::new(span, format!("{:?}", e)).to_compile_error()) .unwrap_or_else(|e| Error::new(span, format!("{:?}", e)).to_compile_error())
.into() .into()
} }
/// Derives the `EthEvent` and `Tokenizeable` trait for the labeled type.
///
/// Additional arguments can be specified using the `#[ethevent(...)]` attribute:
///
/// - `name`, `name = "..."`: Overrides the generated `EthEvent` name, default is the struct's name.
/// - `signature`, `signature = "..."`: The signature as hex string to override the event's signature.
/// - `abi`, `abi = "..."`: The ABI signature for the event this event's data corresponds to.
#[proc_macro_derive(EthEvent, attributes(ethevent))]
pub fn derive_abi_event(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
let name = &input.ident;
let attributes = match parse_attributes(&input) {
Ok(attributes) => attributes,
Err(errors) => return TokenStream::from(errors),
};
let event_name = attributes
.name
.map(|(n, _)| n)
.unwrap_or_else(|| input.ident.to_string());
let (abi, hash) = if let Some((src, span)) = attributes.abi {
if let Ok(mut event) = parse_event(&src) {
event.name = event_name.clone();
(event.abi_signature(), event.signature())
} else {
match src.parse::<Source>().and_then(|s| s.get()) {
Ok(abi) => {
// try to derive the signature from the abi from the parsed abi
// TODO(mattsse): this will fail for events that contain other non elementary types in their abi
// because the parser doesn't know how to substitute the types
// this could be mitigated by getting the ABI of each non elementary type at runtime
// and computing the the signature as `static Lazy::...`
match parse_event(&abi) {
Ok(mut event) => {
event.name = event_name.clone();
(event.abi_signature(), event.signature())
}
Err(err) => {
return TokenStream::from(Error::new(span, err).to_compile_error())
}
}
}
Err(err) => return TokenStream::from(Error::new(span, err).to_compile_error()),
}
}
} else {
// try to determine the abi from the fields
match derive_abi_event_from_fields(&input) {
Ok(mut event) => {
event.name = event_name.clone();
(event.abi_signature(), event.signature())
}
Err(err) => return TokenStream::from(err.to_compile_error()),
}
};
let signature = if let Some((hash, _)) = attributes.signature_hash {
signature(&hash)
} else {
signature(hash.as_bytes())
};
let ethevent_impl = quote! {
impl ethers_contract::EthEvent for #name {
fn name(&self) -> ::std::borrow::Cow<'static, str> {
#event_name.into()
}
fn signature() -> ethers_core::types::H256 {
#signature
}
fn abi_signature() -> ::std::borrow::Cow<'static, str> {
#abi.into()
}
}
};
let tokenize_impl = derive_tokenizeable_impl(&input);
// parse attributes abi into source
TokenStream::from(quote! {
#tokenize_impl
#ethevent_impl
})
}
fn derive_abi_event_from_fields(input: &DeriveInput) -> Result<Event, Error> {
let types: Vec<_> = match input.data {
Data::Struct(ref data) => match data.fields {
Fields::Named(ref fields) => fields.named.iter().map(|f| &f.ty).collect(),
Fields::Unnamed(ref fields) => fields.unnamed.iter().map(|f| &f.ty).collect(),
Fields::Unit => {
return Err(Error::new(
input.span(),
"EthEvent cannot be derived for empty structs and unit",
))
}
},
Data::Enum(_) => {
return Err(Error::new(
input.span(),
"EthEvent cannot be derived for enums",
));
}
Data::Union(_) => {
return Err(Error::new(
input.span(),
"EthEvent cannot be derived for unions",
));
}
};
let inputs = types
.iter()
.map(|ty| find_parameter_type(ty))
.collect::<Result<Vec<_>, _>>()?;
let event = Event {
name: "".to_string(),
inputs: inputs
.into_iter()
.map(|kind| EventParam {
name: "".to_string(),
kind,
indexed: false,
})
.collect(),
anonymous: false,
};
Ok(event)
}
fn find_parameter_type(ty: &Type) -> Result<ParamType, Error> {
match ty {
Type::Array(ty) => {
let param = find_parameter_type(ty.elem.as_ref())?;
if let Expr::Lit(ref expr) = ty.len {
if let Lit::Int(ref len) = expr.lit {
if let Ok(size) = len.base10_parse::<usize>() {
return Ok(ParamType::FixedArray(Box::new(param), size));
}
}
}
Err(Error::new(
ty.span(),
"Failed to derive proper ABI from array field",
))
}
Type::Path(ty) => {
if let Some(ident) = ty.path.get_ident() {
return match ident.to_string().to_lowercase().as_str() {
"address" => Ok(ParamType::Address),
"string" => Ok(ParamType::String),
"bool" => Ok(ParamType::Bool),
"int" | "uint" => Ok(ParamType::Uint(256)),
"h160" => Ok(ParamType::FixedBytes(20)),
"h256" | "secret" | "hash" => Ok(ParamType::FixedBytes(32)),
"h512" | "public" => Ok(ParamType::FixedBytes(64)),
s => parse_int_param_type(s).ok_or_else(|| {
Error::new(ty.span(), "Failed to derive proper ABI from fields")
}),
};
}
// check for `Vec`
if ty.path.segments.len() == 1 && ty.path.segments[0].ident == "Vec" {
if let PathArguments::AngleBracketed(ref args) = ty.path.segments[0].arguments {
if args.args.len() == 1 {
if let GenericArgument::Type(ref ty) = args.args.iter().next().unwrap() {
let kind = find_parameter_type(ty)?;
return Ok(ParamType::Array(Box::new(kind)));
}
}
}
}
Err(Error::new(
ty.span(),
"Failed to derive proper ABI from fields",
))
}
Type::Tuple(ty) => {
let params = ty
.elems
.iter()
.map(|t| find_parameter_type(t))
.collect::<Result<Vec<_>, _>>()?;
Ok(ParamType::Tuple(params))
}
_ => {
eprintln!("Found other types");
Err(Error::new(
ty.span(),
"Failed to derive proper ABI from fields",
))
}
}
}
fn parse_int_param_type(s: &str) -> Option<ParamType> {
let size = s
.chars()
.skip(1)
.collect::<String>()
.parse::<usize>()
.ok()?;
if s.starts_with('u') {
Some(ParamType::Uint(size))
} else if s.starts_with('i') {
Some(ParamType::Int(size))
} else {
None
}
}
fn signature(hash: &[u8]) -> proc_macro2::TokenStream {
let bytes = hash.iter().copied().map(Literal::u8_unsuffixed);
quote! {ethers_core::types::H256([#( #bytes ),*])}
}
fn parse_event(abi: &str) -> Result<Event, String> {
let abi = if !abi.trim_start().starts_with("event ") {
format!("event {}", abi)
} else {
abi.to_string()
};
AbiParser::default()
.parse_event(&abi)
.map_err(|err| format!("Failed to parse the event ABI: {:?}", err))
}
/// Derives the `Tokenizable` trait for the labeled type.
///
/// This derive macro automatically adds a type bound `field: Tokenizable` for each
/// field type.
#[proc_macro_derive(EthAbiType)]
pub fn derive_abi_type(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
TokenStream::from(derive_tokenizeable_impl(&input))
}
fn derive_tokenizeable_impl(input: &DeriveInput) -> proc_macro2::TokenStream {
let name = &input.ident;
let generic_params = input.generics.params.iter().map(|p| quote! { #p });
let generic_params = quote! { #(#generic_params,)* };
let generic_args = input.generics.type_params().map(|p| {
let name = &p.ident;
quote_spanned! { p.ident.span() => #name }
});
let generic_args = quote! { #(#generic_args,)* };
let generic_predicates = match input.generics.where_clause {
Some(ref clause) => {
let predicates = clause.predicates.iter().map(|p| quote! { #p });
quote! { #(#predicates,)* }
}
None => quote! {},
};
let (tokenize_predicates, params_len, init_struct_impl, into_token_impl) = match input.data {
Data::Struct(ref data) => match data.fields {
Fields::Named(ref fields) => {
let tokenize_predicates = fields.named.iter().map(|f| {
let ty = &f.ty;
quote_spanned! { f.span() => #ty: ethers_core::abi::Tokenize }
});
let tokenize_predicates = quote! { #(#tokenize_predicates,)* };
let assignments = fields.named.iter().map(|f| {
let name = f.ident.as_ref().expect("Named fields have names");
quote_spanned! { f.span() => #name: ethers_core::abi::Tokenizable::from_token(iter.next().expect("tokens size is sufficient qed").into_token())? }
});
let init_struct_impl = quote! { Self { #(#assignments,)* } };
let into_token = fields.named.iter().map(|f| {
let name = f.ident.as_ref().expect("Named fields have names");
quote_spanned! { f.span() => self.#name.into_token() }
});
let into_token_impl = quote! { #(#into_token,)* };
(
tokenize_predicates,
fields.named.len(),
init_struct_impl,
into_token_impl,
)
}
Fields::Unnamed(ref fields) => {
let tokenize_predicates = fields.unnamed.iter().map(|f| {
let ty = &f.ty;
quote_spanned! { f.span() => #ty: ethers_core::abi::Tokenize }
});
let tokenize_predicates = quote! { #(#tokenize_predicates,)* };
let assignments = fields.unnamed.iter().map(|f| {
quote_spanned! { f.span() => ethers_core::abi::Tokenizable::from_token(iter.next().expect("tokens size is sufficient qed").into_token())? }
});
let init_struct_impl = quote! { Self(#(#assignments,)* ) };
let into_token = fields.unnamed.iter().enumerate().map(|(i, f)| {
let idx = syn::Index::from(i);
quote_spanned! { f.span() => self.#idx.into_token() }
});
let into_token_impl = quote! { #(#into_token,)* };
(
tokenize_predicates,
fields.unnamed.len(),
init_struct_impl,
into_token_impl,
)
}
Fields::Unit => {
return Error::new(
input.span(),
"EthAbiType cannot be derived for empty structs and unit",
)
.to_compile_error();
}
},
Data::Enum(_) => {
return Error::new(input.span(), "EthAbiType cannot be derived for enums")
.to_compile_error();
}
Data::Union(_) => {
return Error::new(input.span(), "EthAbiType cannot be derived for unions")
.to_compile_error();
}
};
quote! {
impl<#generic_params> ethers_core::abi::Tokenizable for #name<#generic_args>
where
#generic_predicates
#tokenize_predicates
{
fn from_token(token: ethers_core::abi::Token) -> Result<Self, ethers_core::abi::InvalidOutputType> where
Self: Sized {
if let ethers_core::abi::Token::Tuple(tokens) = token {
if tokens.len() != #params_len {
return Err(ethers_core::abi::InvalidOutputType(format!(
"Expected {} tokens, got {}: {:?}",
#params_len,
tokens.len(),
tokens
)));
}
let mut iter = tokens.into_iter();
Ok(#init_struct_impl)
} else {
Err(ethers_core::abi::InvalidOutputType(format!(
"Expected Tuple, got {:?}",
token
)))
}
}
fn into_token(self) -> ethers_core::abi::Token {
ethers_core::abi::Token::Tuple(
vec![
#into_token_impl
]
)
}
}
}
}
struct Attributes {
name: Option<(String, Span)>,
abi: Option<(String, Span)>,
signature_hash: Option<(Vec<u8>, Span)>,
}
impl Default for Attributes {
fn default() -> Self {
Self {
name: None,
abi: None,
signature_hash: None,
}
}
}
fn parse_attributes(input: &DeriveInput) -> Result<Attributes, proc_macro2::TokenStream> {
let mut result = Attributes::default();
for a in input.attrs.iter() {
if let AttrStyle::Outer = a.style {
if let Ok(Meta::List(meta)) = a.parse_meta() {
if meta.path.is_ident("ethevent") {
for n in meta.nested.iter() {
if let NestedMeta::Meta(meta) = n {
match meta {
Meta::Path(path) => {
return Err(Error::new(
path.span(),
"unrecognized ethevent parameter",
)
.to_compile_error());
}
Meta::List(meta) => {
// TODO support raw list
return Err(Error::new(
meta.path.span(),
"unrecognized ethevent parameter",
)
.to_compile_error());
}
Meta::NameValue(meta) => {
if meta.path.is_ident("name") {
if let Lit::Str(ref lit_str) = meta.lit {
if result.name.is_none() {
result.name =
Some((lit_str.value(), lit_str.span()));
} else {
return Err(Error::new(
meta.span(),
"name already specified",
)
.to_compile_error());
}
} else {
return Err(Error::new(
meta.span(),
"name must be a string",
)
.to_compile_error());
}
} else if meta.path.is_ident("abi") {
if let Lit::Str(ref lit_str) = meta.lit {
if result.abi.is_none() {
result.abi =
Some((lit_str.value(), lit_str.span()));
} else {
return Err(Error::new(
meta.span(),
"abi already specified",
)
.to_compile_error());
}
} else {
return Err(Error::new(
meta.span(),
"abi must be a string",
)
.to_compile_error());
}
} else if meta.path.is_ident("signature") {
if let Lit::Str(ref lit_str) = meta.lit {
if result.signature_hash.is_none() {
match Vec::from_hex(lit_str.value()) {
Ok(sig) => {
result.signature_hash =
Some((sig, lit_str.span()))
}
Err(err) => {
return Err(Error::new(
meta.span(),
format!(
"Expected hex signature: {:?}",
err
),
)
.to_compile_error());
}
}
} else {
return Err(Error::new(
meta.span(),
"signature already specified",
)
.to_compile_error());
}
} else {
return Err(Error::new(
meta.span(),
"signature must be a hex string",
)
.to_compile_error());
}
} else {
return Err(Error::new(
meta.span(),
"unrecognized ethevent parameter",
)
.to_compile_error());
}
}
}
}
}
}
}
}
}
Ok(result)
}

View File

@ -5,8 +5,24 @@ use ethers_core::{
types::{BlockNumber, Filter, Log, TxHash, ValueOrArray, H256, U64}, types::{BlockNumber, Filter, Log, TxHash, ValueOrArray, H256, U64},
}; };
use ethers_providers::{FilterWatcher, Middleware, PubsubClient, SubscriptionStream}; use ethers_providers::{FilterWatcher, Middleware, PubsubClient, SubscriptionStream};
use std::borrow::Cow;
use std::marker::PhantomData; use std::marker::PhantomData;
/// A trait for implementing event bindings
pub trait EthEvent: Detokenize {
/// The name of the event this type represents
fn name(&self) -> Cow<'static, str>;
/// Retrieves the signature for the event this data corresponds to.
/// This signature is the Keccak-256 hash of the ABI signature of
/// this event.
fn signature() -> H256;
/// Retrieves the ABI signature for the event this data corresponds
/// to.
fn abi_signature() -> Cow<'static, str>;
}
/// Helper for managing the event filter before querying or streaming its logs /// Helper for managing the event filter before querying or streaming its logs
#[derive(Debug)] #[derive(Debug)]
#[must_use = "event filters do nothing unless you `query` or `stream` them"] #[must_use = "event filters do nothing unless you `query` or `stream` them"]

View File

@ -26,6 +26,7 @@ mod factory;
pub use factory::ContractFactory; pub use factory::ContractFactory;
mod event; mod event;
pub use event::EthEvent;
mod stream; mod stream;
@ -46,7 +47,7 @@ pub use ethers_contract_abigen::Abigen;
#[cfg(feature = "abigen")] #[cfg(feature = "abigen")]
#[cfg_attr(docsrs, doc(cfg(feature = "abigen")))] #[cfg_attr(docsrs, doc(cfg(feature = "abigen")))]
pub use ethers_contract_derive::abigen; pub use ethers_contract_derive::{abigen, EthAbiType, EthEvent};
// Hide the Lazy re-export, it's just for convenience // Hide the Lazy re-export, it's just for convenience
#[doc(hidden)] #[doc(hidden)]

View File

@ -0,0 +1,183 @@
use ethers::core::types::{H160, H256, I256, U128, U256};
use ethers_contract::{EthAbiType, EthEvent};
use ethers_core::abi::Tokenizable;
use ethers_core::types::Address;
#[derive(Debug, Clone, PartialEq, EthAbiType)]
struct ValueChanged {
old_author: Address,
new_author: Address,
old_value: String,
new_value: String,
}
#[derive(Debug, Clone, PartialEq, EthAbiType)]
struct ValueChangedWrapper {
inner: ValueChanged,
msg: String,
}
#[derive(Debug, Clone, PartialEq, EthAbiType)]
struct ValueChangedTuple(Address, Address, String, String);
#[derive(Debug, Clone, PartialEq, EthAbiType)]
struct ValueChangedTupleWrapper(ValueChangedTuple, String);
#[test]
fn can_detokenize_struct() {
let value = ValueChanged {
old_author: "eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee".parse().unwrap(),
new_author: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa".parse().unwrap(),
old_value: "50".to_string(),
new_value: "100".to_string(),
};
let token = value.clone().into_token();
assert_eq!(value, ValueChanged::from_token(token).unwrap());
}
#[test]
fn can_detokenize_nested_structs() {
let value = ValueChangedWrapper {
inner: ValueChanged {
old_author: "eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee".parse().unwrap(),
new_author: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa".parse().unwrap(),
old_value: "50".to_string(),
new_value: "100".to_string(),
},
msg: "hello world".to_string(),
};
let token = value.clone().into_token();
assert_eq!(value, ValueChangedWrapper::from_token(token).unwrap());
}
#[test]
fn can_detokenize_tuple_struct() {
let value = ValueChangedTuple(
"eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee".parse().unwrap(),
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa".parse().unwrap(),
"50".to_string(),
"100".to_string(),
);
let token = value.clone().into_token();
assert_eq!(value, ValueChangedTuple::from_token(token).unwrap());
}
#[test]
fn can_detokenize_nested_tuple_struct() {
let value = ValueChangedTupleWrapper(
ValueChangedTuple(
"eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee".parse().unwrap(),
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa".parse().unwrap(),
"50".to_string(),
"100".to_string(),
),
"hello world".to_string(),
);
let token = value.clone().into_token();
assert_eq!(value, ValueChangedTupleWrapper::from_token(token).unwrap());
}
#[test]
fn can_derive_eth_event() {
#[derive(Debug, Clone, PartialEq, EthEvent)]
pub struct ValueChangedEvent {
old_author: Address,
new_author: Address,
old_value: String,
new_value: String,
}
let value = ValueChangedEvent {
old_author: "eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee".parse().unwrap(),
new_author: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa".parse().unwrap(),
old_value: "50".to_string(),
new_value: "100".to_string(),
};
assert_eq!("ValueChangedEvent", value.name());
assert_eq!(
"ValueChangedEvent(address,address,string,string)",
ValueChangedEvent::abi_signature()
);
let token = value.clone().into_token();
assert_eq!(value, ValueChangedEvent::from_token(token).unwrap());
}
#[test]
fn can_set_eth_event_name_attribute() {
#[derive(Debug, PartialEq, EthEvent)]
#[ethevent(name = "MyEvent")]
pub struct ValueChangedEvent {
old_author: Address,
new_author: Address,
old_value: String,
new_value: String,
}
let value = ValueChangedEvent {
old_author: "eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee".parse().unwrap(),
new_author: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa".parse().unwrap(),
old_value: "50".to_string(),
new_value: "100".to_string(),
};
assert_eq!("MyEvent", value.name());
assert_eq!(
"MyEvent(address,address,string,string)",
ValueChangedEvent::abi_signature()
);
}
#[test]
fn can_detect_various_event_abi_types() {
#[derive(Debug, PartialEq, EthEvent)]
struct ValueChangedEvent {
old_author: Address,
s: String,
h1: H256,
i256: I256,
u256: U256,
b: bool,
v: Vec<Address>,
bs: Vec<bool>,
h160: H160,
u128: U128,
int8: i8,
int16: i16,
int32: i32,
int64: i64,
int128: i128,
uint8: u8,
uint16: u16,
uint32: u32,
uint64: u64,
uint128: u128,
}
assert_eq!(
"ValueChangedEvent(address,string,bytes32,int256,uint256,bool,address[],bool[],bytes20,uint128,int8,int16,int32,int64,int128,uint8,uint16,uint32,uint64,uint128)",
ValueChangedEvent::abi_signature()
);
}
// #[test]
// fn can_set_eth_abi_attribute() {
// #[derive(Debug, Clone, PartialEq, EthAbiType)]
// struct SomeType {
// inner: Address,
// msg: String,
// }
//
// #[derive(Debug, PartialEq, EthEvent)]
// #[ethevent(abi = "ValueChangedEvent(address,(address,string),string)")]
// pub struct ValueChangedEvent {
// old_author: Address,
// inner: SomeType,
// new_value: String,
// }
// }

View File

@ -1,17 +1,19 @@
mod derive;
use ethers_core::{ use ethers_core::{
abi::{Abi, Detokenize, InvalidOutputType, Token}, abi::Abi,
types::{Address, Bytes}, types::{Address, Bytes},
}; };
use ethers_contract::{Contract, ContractFactory}; use ethers_contract::{Contract, ContractFactory, EthAbiType};
use ethers_core::utils::{GanacheInstance, Solc}; use ethers_core::utils::{GanacheInstance, Solc};
use ethers_middleware::signer::SignerMiddleware; use ethers_middleware::signer::SignerMiddleware;
use ethers_providers::{Http, Middleware, Provider}; use ethers_providers::{Http, Middleware, Provider};
use ethers_signers::LocalWallet; use ethers_signers::LocalWallet;
use std::{convert::TryFrom, sync::Arc, time::Duration}; use std::{convert::TryFrom, sync::Arc, time::Duration};
// Note: We also provide the `abigen` macro for generating these bindings automatically // Note: The `EthAbiType` derive macro implements the necessary conversion between `Tokens` and
#[derive(Clone, Debug)] // the struct
#[derive(Clone, Debug, EthAbiType)]
pub struct ValueChanged { pub struct ValueChanged {
pub old_author: Address, pub old_author: Address,
pub new_author: Address, pub new_author: Address,
@ -19,22 +21,6 @@ pub struct ValueChanged {
pub new_value: String, pub new_value: String,
} }
impl Detokenize for ValueChanged {
fn from_tokens(tokens: Vec<Token>) -> Result<ValueChanged, InvalidOutputType> {
let old_author: Address = tokens[1].clone().into_address().unwrap();
let new_author: Address = tokens[1].clone().into_address().unwrap();
let old_value = tokens[2].clone().into_string().unwrap();
let new_value = tokens[3].clone().into_string().unwrap();
Ok(Self {
old_author,
new_author,
old_value,
new_value,
})
}
}
/// compiles the given contract and returns the ABI and Bytecode /// compiles the given contract and returns the ABI and Bytecode
pub fn compile_contract(name: &str, filename: &str) -> (Abi, Bytes) { pub fn compile_contract(name: &str, filename: &str) -> (Abi, Bytes) {
let compiled = Solc::new(&format!("./tests/solidity-contracts/{}", filename)) let compiled = Solc::new(&format!("./tests/solidity-contracts/{}", filename))

View File

@ -152,7 +152,7 @@ impl AbiParser {
} }
/// Parses a solidity event declaration from `event <name> (args*) anonymous?` /// Parses a solidity event declaration from `event <name> (args*) anonymous?`
fn parse_event(&self, s: &str) -> Result<Event> { pub fn parse_event(&self, s: &str) -> Result<Event> {
let mut event = s.trim(); let mut event = s.trim();
if !event.starts_with("event ") { if !event.starts_with("event ") {
bail!("Not an event `{}`", s) bail!("Not an event `{}`", s)
@ -237,7 +237,7 @@ impl AbiParser {
}) })
} }
fn parse_function(&mut self, s: &str) -> Result<Function> { pub fn parse_function(&mut self, s: &str) -> Result<Function> {
let mut input = s.trim(); let mut input = s.trim();
if !input.starts_with("function ") { if !input.starts_with("function ") {
bail!("Not a function `{}`", input) bail!("Not a function `{}`", input)
@ -328,7 +328,7 @@ impl AbiParser {
} }
} }
fn parse_constructor(&self, s: &str) -> Result<Constructor> { pub fn parse_constructor(&self, s: &str) -> Result<Constructor> {
let mut input = s.trim(); let mut input = s.trim();
if !input.starts_with("constructor") { if !input.starts_with("constructor") {
bail!("Not a constructor `{}`", input) bail!("Not a constructor `{}`", input)