fix(tokens): make token decoding abiencoderv2 friendly

This commit is contained in:
Georgios Konstantopoulos 2020-07-03 18:51:19 +03:00
parent 3d2d40699f
commit 41998d6d2d
No known key found for this signature in database
GPG Key ID: FA607837CD26EDBC
2 changed files with 8 additions and 15 deletions

View File

@ -104,7 +104,6 @@ where
let bytes = self.client.call(&self.tx, self.block).await?;
let tokens = self.function.decode_output(&bytes.0)?;
let data = D::from_tokens(tokens)?;
Ok(data)

View File

@ -31,20 +31,14 @@ impl Detokenize for () {
}
impl<T: Tokenizable> Detokenize for T {
fn from_tokens(mut tokens: Vec<Token>) -> Result<Self, InvalidOutputType> {
if tokens.len() != 1 {
Err(InvalidOutputType(format!(
"Expected single element, got a list: {:?}",
tokens
)))
} else {
Self::from_token(
tokens
.drain(..)
.next()
.expect("At least one element in vector; qed"),
)
}
fn from_tokens(tokens: Vec<Token>) -> Result<Self, InvalidOutputType> {
let token = match tokens.len() {
0 => Token::Tuple(vec![]),
1 => tokens[0].clone(),
_ => Token::Tuple(tokens),
};
Self::from_token(token)
}
}