fix(tokens): make token decoding abiencoderv2 friendly

This commit is contained in:
Georgios Konstantopoulos 2020-07-03 18:51:19 +03:00
parent 3d2d40699f
commit 41998d6d2d
No known key found for this signature in database
GPG Key ID: FA607837CD26EDBC
2 changed files with 8 additions and 15 deletions

View File

@ -104,7 +104,6 @@ where
let bytes = self.client.call(&self.tx, self.block).await?; let bytes = self.client.call(&self.tx, self.block).await?;
let tokens = self.function.decode_output(&bytes.0)?; let tokens = self.function.decode_output(&bytes.0)?;
let data = D::from_tokens(tokens)?; let data = D::from_tokens(tokens)?;
Ok(data) Ok(data)

View File

@ -31,20 +31,14 @@ impl Detokenize for () {
} }
impl<T: Tokenizable> Detokenize for T { impl<T: Tokenizable> Detokenize for T {
fn from_tokens(mut tokens: Vec<Token>) -> Result<Self, InvalidOutputType> { fn from_tokens(tokens: Vec<Token>) -> Result<Self, InvalidOutputType> {
if tokens.len() != 1 { let token = match tokens.len() {
Err(InvalidOutputType(format!( 0 => Token::Tuple(vec![]),
"Expected single element, got a list: {:?}", 1 => tokens[0].clone(),
tokens _ => Token::Tuple(tokens),
))) };
} else {
Self::from_token( Self::from_token(token)
tokens
.drain(..)
.next()
.expect("At least one element in vector; qed"),
)
}
} }
} }