fix: always treat abi type structs as tuples (#417)

* fix: always treat abi type structs as tuples

* fix: keep decoding for single field structs

* feat: unify event and struct encoding
This commit is contained in:
Matthias Seitz 2021-09-02 18:16:39 +02:00 committed by GitHub
parent 664ccfe9d6
commit 8b5f4bfa81
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 27 additions and 23 deletions

View File

@ -414,7 +414,7 @@ fn derive_decode_from_log_impl(
.filter(|f| !f.is_indexed())
.map(|f| param_type_quote(&f.param.kind));
let data_types_init = quote! {let data_types = ::std::vec![#( #data_types ),*];};
let data_types_init = quote! {let data_types = [#( #data_types ),*];};
// decode
let (signature_check, flat_topics_init, topic_tokens_len_check) = if event.anonymous {
@ -478,7 +478,6 @@ fn derive_decode_from_log_impl(
#( tokens.push(#swap_tokens); )*
}
};
Ok(quote! {
let #core_crate::abi::RawLog {data, topics} = log;
@ -492,7 +491,7 @@ fn derive_decode_from_log_impl(
#tokens_init
#core_crate::abi::Detokenize::from_tokens(tokens).map_err(|_|#core_crate::abi::Error::InvalidData)
#core_crate::abi::Tokenizable::from_token(#core_crate::abi::Token::Tuple(tokens)).map_err(|_|#core_crate::abi::Error::InvalidData)
})
}
@ -808,26 +807,6 @@ fn derive_tokenizeable_impl(input: &DeriveInput) -> proc_macro2::TokenStream {
#core_crate::abi::Token::Tuple(Vec::new())
},
),
1 => {
// This is a hacky solution in order to keep the same tokenstream as for tuples
let from_token = quote! {
let mut iter = Some(token).into_iter();
Ok(#init_struct_impl)
};
// This is a hack to get rid of the trailing comma introduced in the macro that concatenates all the fields
if let Ok(into_token) = into_token_impl
.to_string()
.as_str()
.trim_end_matches(',')
.parse()
{
(from_token, into_token)
} else {
return Error::new(input.span(), "Failed to derive Tokenizeable implementation")
.to_compile_error();
}
}
_ => {
let from_token = quote! {
if let #core_crate::abi::Token::Tuple(tokens) = token {

View File

@ -24,6 +24,11 @@ struct ValueChangedTuple(Address, Address, String, String);
#[derive(Debug, Clone, PartialEq, EthAbiType)]
struct ValueChangedTupleWrapper(ValueChangedTuple, String);
#[derive(Debug, Clone, PartialEq, EthAbiType)]
struct ValueChangedVecWrapper {
inner: Vec<ValueChanged>,
}
#[test]
fn can_detokenize_struct() {
let value = ValueChanged {
@ -82,6 +87,26 @@ fn can_detokenize_nested_tuple_struct() {
assert_eq!(value, ValueChangedTupleWrapper::from_token(token).unwrap());
}
#[test]
fn can_detokenize_single_field() {
let value = ValueChangedVecWrapper { inner: vec![] };
let token = value.clone().into_token();
assert_eq!(value, ValueChangedVecWrapper::from_token(token).unwrap());
let value = ValueChangedVecWrapper {
inner: vec![ValueChanged {
old_author: "eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee".parse().unwrap(),
new_author: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa".parse().unwrap(),
old_value: "50".to_string(),
new_value: "100".to_string(),
}],
};
let token = value.clone().into_token();
assert_eq!(value, ValueChangedVecWrapper::from_token(token).unwrap());
}
#[test]
fn can_derive_eth_event() {
#[derive(Debug, Clone, PartialEq, EthEvent)]