Fix buggy non-nested tuples (#48)

* test(tokens): ensure nested tuples tokenize properly

* test(contract): add failing test with 2 args

This happens because the args are serialized as Token::Tuple, while instead they should be just a vector

* fix(tokens): add token flattening method to fix non-nested tuples

* fix: do not export the flatten function
This commit is contained in:
Georgios Konstantopoulos 2020-07-03 17:37:38 +03:00 committed by GitHub
parent 04aefeb160
commit 3d2d40699f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 54 additions and 4 deletions

View File

@ -239,8 +239,10 @@ where
function: &Function,
args: T,
) -> Result<ContractCall<P, S, D>, Error> {
let tokens = args.into_tokens();
// create the calldata
let data = function.encode_input(&args.into_tokens())?;
let data = function.encode_input(&tokens)?;
// create the tx object
let tx = TransactionRequest {

View File

@ -73,6 +73,14 @@ mod eth_tests {
.unwrap();
assert_eq!(init_address, Address::zero());
assert_eq!(init_value, "initial value");
// methods with multiple args also work
let _tx_hash = contract
.method::<_, H256>("setValues", ("hi".to_owned(), "bye".to_owned()))
.unwrap()
.send()
.await
.unwrap();
}
#[tokio::test]

View File

@ -6,6 +6,7 @@ contract SimpleStorage {
address public lastSender;
string _value;
string _otherValue;
constructor(string memory value) public {
emit ValueChanged(msg.sender, address(0), _value, value);
@ -21,4 +22,10 @@ contract SimpleStorage {
_value = value;
lastSender = msg.sender;
}
function setValues(string memory value, string memory value2) public {
_value = value;
_otherValue = value2;
lastSender = msg.sender;
}
}

View File

@ -56,13 +56,13 @@ pub trait Tokenize {
impl<'a> Tokenize for &'a [Token] {
fn into_tokens(self) -> Vec<Token> {
self.to_vec()
flatten_tokens(self.to_vec())
}
}
impl<T: Tokenizable> Tokenize for T {
fn into_tokens(self) -> Vec<Token> {
vec![self.into_token()]
flatten_tokens(vec![self.into_token()])
}
}
@ -476,9 +476,25 @@ impl_fixed_types!(256);
impl_fixed_types!(512);
impl_fixed_types!(1024);
/// Helper for flattening non-nested tokens into their inner
/// types, e.g. (A, B, C ) would get tokenized to Tuple([A, B, C])
/// when in fact we need [A, B, C].
fn flatten_tokens(tokens: Vec<Token>) -> Vec<Token> {
if tokens.len() == 1 {
// flatten the tokens if required
// and there is no nesting
match tokens[0].clone() {
Token::Tuple(inner) => inner,
other => vec![other],
}
} else {
tokens
}
}
#[cfg(test)]
mod tests {
use super::{Detokenize, Tokenizable};
use super::*;
use crate::types::{Address, U256};
use ethabi::Token;
@ -511,6 +527,23 @@ mod tests {
let _vec_of_tuple_5: Vec<(Address, Vec<Vec<u8>>, String, U256, bool)> = output();
}
#[test]
fn nested_tokenization() {
let x = (1u64, (2u64, 3u64));
let tokens = x.into_tokens();
assert_eq!(
tokens,
vec![
Token::Uint(1.into()),
Token::Tuple(vec![Token::Uint(2.into()), Token::Uint(3.into())])
]
);
let x = (1u64, 2u64);
let tokens = x.into_tokens();
assert_eq!(tokens, vec![Token::Uint(1.into()), Token::Uint(2.into()),]);
}
#[test]
fn should_decode_array_of_fixed_bytes() {
// byte[8][]