From d3b9b378c511d4281695284361f936677b2a5186 Mon Sep 17 00:00:00 2001 From: Georgios Konstantopoulos Date: Mon, 25 May 2020 21:14:13 +0300 Subject: [PATCH] feat: add tokenization and improve contract API for events --- Cargo.lock | 1 + Cargo.toml | 1 + README.md | 8 + examples/contract.rs | 81 +++--- src/abi.rs | 2 + src/contract/contract.rs | 120 +++++++-- src/contract/mod.rs | 3 + src/contract/tokens.rs | 528 +++++++++++++++++++++++++++++++++++++++ src/types/log.rs | 6 +- src/types/mod.rs | 4 +- 10 files changed, 679 insertions(+), 75 deletions(-) create mode 100644 src/contract/tokens.rs diff --git a/Cargo.lock b/Cargo.lock index 073f9eef..b24a1f74 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -244,6 +244,7 @@ dependencies = [ name = "ethers" version = "0.1.0" dependencies = [ + "arrayvec", "async-trait", "bincode", "ethabi", diff --git a/Cargo.toml b/Cargo.toml index 7ba73c9d..42a081da 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -22,6 +22,7 @@ solc = { git = "https://github.com/paritytech/rust_solc "} rlp = "0.4.5" ethabi = "12.0.0" bincode = "1.2.1" +arrayvec = "0.5.1" [dev-dependencies] tokio = { version = "0.2.21", features = ["macros"] } diff --git a/README.md b/README.md index d53d5eb5..195169d4 100644 --- a/README.md +++ b/README.md @@ -12,6 +12,14 @@ Complete Ethereum wallet implementation and utilities in Rust (with WASM and FFI - [ ] Hardware wallet support - [ ] ... +## Acknowledgements + +This library would not have been possibly without the great work of the creators of [`rust-web3`]() and [`ethcontract-rs`]() + +A lot of the code was inspired and adapted from them, to a unified and opinionated interface. +That said, Rust-web3 is ~9k LoC (tests included) and ethcontract-rs is 11k lines, +so in total about 20k lines of code with tests. This library is xxx LoC. + ## Examples ### Sending a transaction with an offline key diff --git a/examples/contract.rs b/examples/contract.rs index 53dd0a72..23d7e593 100644 --- a/examples/contract.rs +++ b/examples/contract.rs @@ -1,11 +1,38 @@ +use ethabi::Token; use ethers::{ - abi::ParamType, - contract::Contract, - types::{Address, Filter}, + contract::{Contract, Detokenize}, + types::Address, HttpProvider, MainnetWallet, }; +use serde::Serialize; use std::convert::TryFrom; +const ABI: &'static str = r#"[{"inputs":[{"internalType":"string","name":"value","type":"string"}],"stateMutability":"nonpayable","type":"constructor"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"author","type":"address"},{"indexed":false,"internalType":"string","name":"oldValue","type":"string"},{"indexed":false,"internalType":"string","name":"newValue","type":"string"}],"name":"ValueChanged","type":"event"},{"inputs":[],"name":"getValue","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"string","name":"value","type":"string"}],"name":"setValue","outputs":[],"stateMutability":"nonpayable","type":"function"}]"#; + +#[derive(Clone, Debug, Serialize)] +// TODO: This should be `derive`-able on such types -> similar to how Zexe's Deserialize is done +struct ValueChanged { + author: Address, + old_value: String, + new_value: String, +} + +impl Detokenize for ValueChanged { + fn from_tokens( + tokens: Vec, + ) -> Result { + let author: Address = tokens[0].clone().to_address().unwrap(); + let old_value = tokens[1].clone().to_string().unwrap(); + let new_value = tokens[2].clone().to_string().unwrap(); + + Ok(Self { + author, + old_value, + new_value, + }) + } +} + #[tokio::main] async fn main() -> Result<(), failure::Error> { // connect to the network @@ -21,50 +48,18 @@ async fn main() -> Result<(), failure::Error> { // get the contract's address let addr = "683BEE23D79A1D8664dF70714edA966e1484Fd3d".parse::
()?; - // get the contract's ABI - let abi = r#"[{"inputs":[{"internalType":"string","name":"value","type":"string"}],"stateMutability":"nonpayable","type":"constructor"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"author","type":"address"},{"indexed":false,"internalType":"string","name":"oldValue","type":"string"},{"indexed":false,"internalType":"string","name":"newValue","type":"string"}],"name":"ValueChanged","type":"event"},{"inputs":[],"name":"getValue","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"string","name":"value","type":"string"}],"name":"setValue","outputs":[],"stateMutability":"nonpayable","type":"function"}]"#; - // instantiate it - let contract = Contract::new(&client, serde_json::from_str(abi)?, addr); - - // get the args - let event = "ValueChanged(address,string,string)"; - - let args = &[ethabi::Token::String("hello!".to_owned())]; + let contract = Contract::new(&client, serde_json::from_str(ABI)?, addr); // call the method - let tx_hash = contract.method("setValue", args)?.send().await?; + let _tx_hash = contract.method("setValue", "hi".to_owned())?.send().await?; - #[derive(Clone, Debug)] - struct ValueChanged { - author: Address, - old_value: String, - new_value: String, - } + let logs: Vec = contract + .event("ValueChanged")? + .from_block(0u64) + .query() + .await?; - let filter = Filter::new().from_block(0).address(addr).event(event); - let logs = provider - .get_logs(&filter) - .await? - .into_iter() - .map(|log| { - // decode the non-indexed data - let data = ethabi::decode(&[ParamType::String, ParamType::String], log.data.as_ref())?; - - let author = log.topics[1].into(); - - // Unwrap? - let old_value = data[0].clone().to_string().unwrap(); - let new_value = data[1].clone().to_string().unwrap(); - - Ok(ValueChanged { - old_value, - new_value, - author, - }) - }) - .collect::, ethabi::Error>>()?; - - dbg!(logs); + println!("{}", serde_json::to_string(&logs)?); Ok(()) } diff --git a/src/abi.rs b/src/abi.rs index cc74d3bc..fb0bfc3a 100644 --- a/src/abi.rs +++ b/src/abi.rs @@ -54,6 +54,8 @@ impl EventExt for Event { } } +// Tokenization macros: Given ABI -> codegen: copy Gnosis' thing + #[cfg(test)] mod tests { use super::*; diff --git a/src/contract/contract.rs b/src/contract/contract.rs index dcc749d3..9af09b68 100644 --- a/src/contract/contract.rs +++ b/src/contract/contract.rs @@ -1,13 +1,13 @@ use crate::{ - abi::{Abi, Function, FunctionExt}, - providers::JsonRpcClient, + abi::{self, Abi, EventExt, Function, FunctionExt}, + contract::{Detokenize, Tokenize}, + providers::{JsonRpcClient, Provider}, signers::{Client, Signer}, - types::{Address, BlockNumber, Selector, TransactionRequest, H256, U256}, + types::{Address, BlockNumber, Filter, Selector, TransactionRequest, ValueOrArray, H256, U256}, }; - use rustc_hex::ToHex; use serde::Deserialize; -use std::{collections::HashMap, hash::Hash}; +use std::{collections::HashMap, fmt::Debug, hash::Hash}; /// Represents a contract instance at an address. Provides methods for /// contract interaction. @@ -20,36 +20,43 @@ pub struct Contract<'a, S, P> { /// A mapping from method signature to a name-index pair for accessing /// functions in the contract ABI. This is used to avoid allocation when /// searching for matching functions by signature. + // Adapted from: https://github.com/gnosis/ethcontract-rs/blob/master/src/contract.rs methods: HashMap, - - /// A mapping from event signature to a name-index pair for resolving - /// events in the contract ABI. - events: HashMap, } impl<'a, S, P> Contract<'a, S, P> { /// Creates a new contract from the provided client, abi and address pub fn new(client: &'a Client<'a, S, P>, abi: Abi, address: Address) -> Self { let methods = create_mapping(&abi.functions, |function| function.selector()); - let events = create_mapping(&abi.events, |event| event.signature()); Self { client, abi, address, methods, - events, } } /// Returns a transaction builder for the provided function name. If there are /// multiple functions with the same name due to overloading, consider using /// the `method_hash` method instead, since this will use the first match. - pub fn method( - &self, - name: &str, - args: &[ethabi::Token], - ) -> Result, ethabi::Error> { + pub fn event<'b>(&'a self, name: &str) -> Result, abi::Error> + where + 'a: 'b, + { + // get the event's full name + let event = self.abi.event(name)?; + Ok(Event { + provider: &self.client.provider, + filter: Filter::new().event(&event.abi_signature()), + event: &event, + }) + } + + /// Returns a transaction builder for the provided function name. If there are + /// multiple functions with the same name due to overloading, consider using + /// the `method_hash` method instead, since this will use the first match. + pub fn method(&self, name: &str, args: T) -> Result, abi::Error> { // get the function let function = self.abi.function(name)?; self.method_func(function, args) @@ -57,26 +64,26 @@ impl<'a, S, P> Contract<'a, S, P> { /// Returns a transaction builder for the selected function signature. This should be /// preferred if there are overloaded functions in your smart contract - pub fn method_hash( + pub fn method_hash( &self, signature: Selector, - args: &[ethabi::Token], - ) -> Result, ethabi::Error> { + args: T, + ) -> Result, abi::Error> { let function = self .methods .get(&signature) .map(|(name, index)| &self.abi.functions[name][*index]) - .ok_or_else(|| ethabi::Error::InvalidName(signature.to_hex::()))?; + .ok_or_else(|| abi::Error::InvalidName(signature.to_hex::()))?; self.method_func(function, args) } - fn method_func( + fn method_func( &self, function: &Function, - args: &[ethabi::Token], - ) -> Result, ethabi::Error> { + args: T, + ) -> Result, abi::Error> { // create the calldata - let data = function.encode_input(args)?; + let data = function.encode_input(&args.into_tokens())?; // create the tx object let tx = TransactionRequest { @@ -99,9 +106,6 @@ impl<'a, S, P> Contract<'a, S, P> { pub fn abi(&self) -> &Abi { &self.abi } - - // call events - // deploy } pub struct Sender<'a, S, P> { @@ -146,6 +150,68 @@ impl<'a, S: Signer, P: JsonRpcClient> Sender<'a, S, P> { } } +pub struct Event<'a, 'b, P> { + filter: Filter, + provider: &'a Provider

, + event: &'b abi::Event, +} + +// copy of the builder pattern from Filter +impl<'a, 'b, P> Event<'a, 'b, P> { + pub fn from_block>(mut self, block: T) -> Self { + self.filter.from_block = Some(block.into()); + self + } + + pub fn to_block>(mut self, block: T) -> Self { + self.filter.to_block = Some(block.into()); + self + } + + pub fn topic>>(mut self, topic: T) -> Self { + self.filter.topics.push(topic.into()); + self + } + + pub fn topics(mut self, topics: &[ValueOrArray]) -> Self { + self.filter.topics.extend_from_slice(topics); + self + } +} + +impl<'a, 'b, P: JsonRpcClient> Event<'a, 'b, P> { + pub async fn query(self) -> Result, P::Error> { + // get the logs + let logs = self.provider.get_logs(&self.filter).await?; + + let events = logs + .into_iter() + .map(|log| { + // ethabi parses the unindexed and indexed logs together to a + // vector of tokens + let tokens = self + .event + .parse_log(abi::RawLog { + topics: log.topics, + data: log.data.0, + }) + .unwrap() // TODO: remove + .params + .into_iter() + .map(|param| param.value) + .collect::>(); + + // convert the tokens to the requested datatype + T::from_tokens(tokens).unwrap() + }) + .collect::>(); + + Ok(events) + } +} + +// Helpers + /// Utility function for creating a mapping between a unique signature and a /// name-index pair for accessing contract ABI items. fn create_mapping( diff --git a/src/contract/mod.rs b/src/contract/mod.rs index dd0ebb98..98ea0efa 100644 --- a/src/contract/mod.rs +++ b/src/contract/mod.rs @@ -1,2 +1,5 @@ mod contract; pub use contract::Contract; + +mod tokens; +pub use tokens::{Detokenize, InvalidOutputType, Tokenize}; diff --git a/src/contract/tokens.rs b/src/contract/tokens.rs new file mode 100644 index 00000000..14488d26 --- /dev/null +++ b/src/contract/tokens.rs @@ -0,0 +1,528 @@ +//! Contract Functions Output types. +//! Adapted from: https://github.com/tomusdrw/rust-web3/blob/master/src/contract/tokens.rs + +use crate::types::{Address, Bytes, H256, U128, U256}; +use arrayvec::ArrayVec; +use ethabi::Token; +use thiserror::Error; + +#[derive(Clone, Debug, Error)] +#[error("{0}")] +pub struct InvalidOutputType(String); + +/// Output type possible to deserialize from Contract ABI +pub trait Detokenize { + /// Creates a new instance from parsed ABI tokens. + fn from_tokens(tokens: Vec) -> Result + where + Self: Sized; +} + +impl Detokenize for T { + fn from_tokens(mut tokens: Vec) -> Result { + if tokens.len() != 1 { + Err(InvalidOutputType(format!( + "Expected single element, got a list: {:?}", + tokens + ))) + } else { + Self::from_token( + tokens + .drain(..) + .next() + .expect("At least one element in vector; qed"), + ) + } + } +} + +macro_rules! impl_output { + ($num: expr, $( $ty: ident , )+) => { + impl<$($ty, )+> Detokenize for ($($ty,)+) where + $( + $ty: Tokenizable, + )+ + { + fn from_tokens(mut tokens: Vec) -> Result { + if tokens.len() != $num { + return Err(InvalidOutputType(format!( + "Expected {} elements, got a list of {}: {:?}", + $num, + tokens.len(), + tokens + ))); + } + let mut it = tokens.drain(..); + Ok(($( + $ty::from_token(it.next().expect("All elements are in vector; qed"))?, + )+)) + } + } + } +} + +impl_output!(1, A,); +impl_output!(2, A, B,); +impl_output!(3, A, B, C,); +impl_output!(4, A, B, C, D,); +// impl_output!(5, A, B, C, D, E,); +// impl_output!(6, A, B, C, D, E, F,); +// impl_output!(7, A, B, C, D, E, F, G,); +// impl_output!(8, A, B, C, D, E, F, G, H,); +// impl_output!(9, A, B, C, D, E, F, G, H, I,); +// impl_output!(10, A, B, C, D, E, F, G, H, I, J,); +// impl_output!(11, A, B, C, D, E, F, G, H, I, J, K,); +// impl_output!(12, A, B, C, D, E, F, G, H, I, J, K, L,); +// impl_output!(13, A, B, C, D, E, F, G, H, I, J, K, L, M,); +// impl_output!(14, A, B, C, D, E, F, G, H, I, J, K, L, M, N,); +// impl_output!(15, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O,); +// impl_output!(16, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P,); + +/// Tokens conversion trait +pub trait Tokenize { + /// Convert to list of tokens + fn into_tokens(self) -> Vec; +} + +impl<'a> Tokenize for &'a [Token] { + fn into_tokens(self) -> Vec { + self.to_vec() + } +} + +impl Tokenize for T { + fn into_tokens(self) -> Vec { + vec![self.into_token()] + } +} + +impl Tokenize for () { + fn into_tokens(self) -> Vec { + vec![] + } +} + +macro_rules! impl_tokens { + ($( $ty: ident : $no: tt, )+) => { + impl<$($ty, )+> Tokenize for ($($ty,)+) where + $( + $ty: Tokenizable, + )+ + { + fn into_tokens(self) -> Vec { + vec![ + $( self.$no.into_token(), )+ + ] + } + } + } +} + +impl_tokens!(A:0, ); +impl_tokens!(A:0, B:1, ); +impl_tokens!(A:0, B:1, C:2, ); +impl_tokens!(A:0, B:1, C:2, D:3, ); + +// Commented out macros to reduce codegen time. Re-enable if needed. +// impl_tokens!(A:0, B:1, C:2, D:3, E:4, ); +// impl_tokens!(A:0, B:1, C:2, D:3, E:4, F:5, ); +// impl_tokens!(A:0, B:1, C:2, D:3, E:4, F:5, G:6, ); +// impl_tokens!(A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, ); +// impl_tokens!(A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, ); +// impl_tokens!(A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, ); +// impl_tokens!(A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, ); +// impl_tokens!(A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, ); +// impl_tokens!(A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, ); +// impl_tokens!(A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, ); +// impl_tokens!(A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, O:14, ); +// impl_tokens!(A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, O:14, P:15, ); + +/// Simplified output type for single value. +pub trait Tokenizable { + /// Converts a `Token` into expected type. + fn from_token(token: Token) -> Result + where + Self: Sized; + /// Converts a specified type back into token. + fn into_token(self) -> Token; +} + +impl Tokenizable for Token { + fn from_token(token: Token) -> Result { + Ok(token) + } + fn into_token(self) -> Token { + self + } +} + +impl Tokenizable for String { + fn from_token(token: Token) -> Result { + match token { + Token::String(s) => Ok(s), + other => Err(InvalidOutputType(format!( + "Expected `String`, got {:?}", + other + ))), + } + } + + fn into_token(self) -> Token { + Token::String(self) + } +} + +impl Tokenizable for Bytes { + fn from_token(token: Token) -> Result { + match token { + Token::Bytes(s) => Ok(s.into()), + other => Err(InvalidOutputType(format!( + "Expected `Bytes`, got {:?}", + other + ))), + } + } + + fn into_token(self) -> Token { + Token::Bytes(self.0) + } +} + +impl Tokenizable for H256 { + fn from_token(token: Token) -> Result { + match token { + Token::FixedBytes(mut s) => { + if s.len() != 32 { + return Err(InvalidOutputType(format!("Expected `H256`, got {:?}", s))); + } + let mut data = [0; 32]; + for (idx, val) in s.drain(..).enumerate() { + data[idx] = val; + } + Ok(data.into()) + } + other => Err(InvalidOutputType(format!( + "Expected `H256`, got {:?}", + other + ))), + } + } + + fn into_token(self) -> Token { + Token::FixedBytes(self.as_ref().to_vec()) + } +} + +impl Tokenizable for Address { + fn from_token(token: Token) -> Result { + match token { + Token::Address(data) => Ok(data), + other => Err(InvalidOutputType(format!( + "Expected `Address`, got {:?}", + other + ))), + } + } + + fn into_token(self) -> Token { + Token::Address(self) + } +} + +macro_rules! eth_uint_tokenizable { + ($uint: ident, $name: expr) => { + impl Tokenizable for $uint { + fn from_token(token: Token) -> Result { + match token { + Token::Int(data) | Token::Uint(data) => { + Ok(::std::convert::TryInto::try_into(data).unwrap()) + } + other => Err(InvalidOutputType(format!( + "Expected `{}`, got {:?}", + $name, other + )) + .into()), + } + } + + fn into_token(self) -> Token { + Token::Uint(self.into()) + } + } + }; +} + +eth_uint_tokenizable!(U256, "U256"); +eth_uint_tokenizable!(U128, "U128"); + +macro_rules! int_tokenizable { + ($int: ident, $token: ident) => { + impl Tokenizable for $int { + fn from_token(token: Token) -> Result { + match token { + Token::Int(data) | Token::Uint(data) => Ok(data.low_u128() as _), + other => Err(InvalidOutputType(format!( + "Expected `{}`, got {:?}", + stringify!($int), + other + ))), + } + } + + fn into_token(self) -> Token { + // this should get optimized away by the compiler for unsigned integers + #[allow(unused_comparisons)] + let data = if self < 0 { + // NOTE: Rust does sign extension when converting from a + // signed integer to an unsigned integer, so: + // `-1u8 as u128 == u128::max_value()` + U256::from(self as u128) | U256([0, 0, u64::max_value(), u64::max_value()]) + } else { + self.into() + }; + Token::$token(data) + } + } + }; +} + +int_tokenizable!(i8, Int); +int_tokenizable!(i16, Int); +int_tokenizable!(i32, Int); +int_tokenizable!(i64, Int); +int_tokenizable!(i128, Int); +int_tokenizable!(u8, Uint); +int_tokenizable!(u16, Uint); +int_tokenizable!(u32, Uint); +int_tokenizable!(u64, Uint); +int_tokenizable!(u128, Uint); + +impl Tokenizable for bool { + fn from_token(token: Token) -> Result { + match token { + Token::Bool(data) => Ok(data), + other => Err(InvalidOutputType(format!( + "Expected `bool`, got {:?}", + other + ))), + } + } + fn into_token(self) -> Token { + Token::Bool(self) + } +} + +/// Marker trait for `Tokenizable` types that are can tokenized to and from a +/// `Token::Array` and `Token:FixedArray`. +pub trait TokenizableItem: Tokenizable {} + +macro_rules! tokenizable_item { + ($($type: ty,)*) => { + $( + impl TokenizableItem for $type {} + )* + }; +} + +tokenizable_item! { + Token, String, Address, H256, U256, U128, bool, Vec, + i8, i16, i32, i64, i128, u16, u32, u64, u128, +} + +impl Tokenizable for Vec { + fn from_token(token: Token) -> Result { + match token { + Token::Bytes(data) => Ok(data), + Token::FixedBytes(data) => Ok(data), + other => Err(InvalidOutputType(format!( + "Expected `bytes`, got {:?}", + other + ))), + } + } + fn into_token(self) -> Token { + Token::Bytes(self) + } +} + +impl Tokenizable for Vec { + fn from_token(token: Token) -> Result { + match token { + Token::FixedArray(tokens) | Token::Array(tokens) => { + tokens.into_iter().map(Tokenizable::from_token).collect() + } + other => Err(InvalidOutputType(format!( + "Expected `Array`, got {:?}", + other + ))), + } + } + + fn into_token(self) -> Token { + Token::Array(self.into_iter().map(Tokenizable::into_token).collect()) + } +} + +impl TokenizableItem for Vec {} + +macro_rules! impl_fixed_types { + ($num: expr) => { + impl Tokenizable for [u8; $num] { + fn from_token(token: Token) -> Result { + match token { + Token::FixedBytes(bytes) => { + if bytes.len() != $num { + return Err(InvalidOutputType(format!( + "Expected `FixedBytes({})`, got FixedBytes({})", + $num, + bytes.len() + ))); + } + + let mut arr = [0; $num]; + arr.copy_from_slice(&bytes); + Ok(arr) + } + other => Err(InvalidOutputType(format!( + "Expected `FixedBytes({})`, got {:?}", + $num, other + )) + .into()), + } + } + + fn into_token(self) -> Token { + Token::FixedBytes(self.to_vec()) + } + } + + impl TokenizableItem for [u8; $num] {} + + impl Tokenizable for [T; $num] { + fn from_token(token: Token) -> Result { + match token { + Token::FixedArray(tokens) => { + if tokens.len() != $num { + return Err(InvalidOutputType(format!( + "Expected `FixedArray({})`, got FixedArray({})", + $num, + tokens.len() + ))); + } + + let mut arr = ArrayVec::<[T; $num]>::new(); + let mut it = tokens.into_iter().map(T::from_token); + for _ in 0..$num { + arr.push(it.next().expect("Length validated in guard; qed")?); + } + // Can't use expect here because [T; $num]: Debug is not satisfied. + match arr.into_inner() { + Ok(arr) => Ok(arr), + Err(_) => panic!("All elements inserted so the array is full; qed"), + } + } + other => Err(InvalidOutputType(format!( + "Expected `FixedArray({})`, got {:?}", + $num, other + )) + .into()), + } + } + + fn into_token(self) -> Token { + Token::FixedArray( + ArrayVec::from(self) + .into_iter() + .map(T::into_token) + .collect(), + ) + } + } + + impl TokenizableItem for [T; $num] {} + }; +} + +impl_fixed_types!(1); +impl_fixed_types!(2); +impl_fixed_types!(3); +impl_fixed_types!(4); +impl_fixed_types!(5); +impl_fixed_types!(6); +impl_fixed_types!(7); +impl_fixed_types!(8); +impl_fixed_types!(9); +impl_fixed_types!(10); +impl_fixed_types!(11); +impl_fixed_types!(12); +impl_fixed_types!(13); +impl_fixed_types!(14); +impl_fixed_types!(15); +impl_fixed_types!(16); +impl_fixed_types!(32); +impl_fixed_types!(64); +impl_fixed_types!(128); +impl_fixed_types!(256); +impl_fixed_types!(512); +impl_fixed_types!(1024); + +#[cfg(test)] +mod tests { + use super::{Detokenize, Tokenizable}; + use crate::types::{Address, U256}; + use ethabi::Token; + + fn output() -> R { + unimplemented!() + } + + #[test] + #[ignore] + fn should_be_able_to_compile() { + let _tokens: Vec = output(); + let _uint: U256 = output(); + let _address: Address = output(); + let _string: String = output(); + let _bool: bool = output(); + let _bytes: Vec = output(); + + let _pair: (U256, bool) = output(); + let _vec: Vec = output(); + let _array: [U256; 4] = output(); + let _bytes: Vec<[[u8; 1]; 64]> = output(); + + let _mixed: (Vec>, [U256; 4], Vec, U256) = output(); + + let _ints: (i16, i32, i64, i128) = output(); + let _uints: (u16, u32, u64, u128) = output(); + } + + #[test] + fn should_decode_array_of_fixed_bytes() { + // byte[8][] + let tokens = vec![Token::FixedArray(vec![ + Token::FixedBytes(vec![1]), + Token::FixedBytes(vec![2]), + Token::FixedBytes(vec![3]), + Token::FixedBytes(vec![4]), + Token::FixedBytes(vec![5]), + Token::FixedBytes(vec![6]), + Token::FixedBytes(vec![7]), + Token::FixedBytes(vec![8]), + ])]; + let data: [[u8; 1]; 8] = Detokenize::from_tokens(tokens).unwrap(); + assert_eq!(data[0][0], 1); + assert_eq!(data[1][0], 2); + assert_eq!(data[2][0], 3); + assert_eq!(data[7][0], 8); + } + + #[test] + fn should_sign_extend_negative_integers() { + assert_eq!((-1i8).into_token(), Token::Int(U256::MAX)); + assert_eq!((-2i16).into_token(), Token::Int(U256::MAX - 1)); + assert_eq!((-3i32).into_token(), Token::Int(U256::MAX - 2)); + assert_eq!((-4i64).into_token(), Token::Int(U256::MAX - 3)); + assert_eq!((-5i128).into_token(), Token::Int(U256::MAX - 4)); + } +} diff --git a/src/types/log.rs b/src/types/log.rs index 0876fc29..18cbe9e3 100644 --- a/src/types/log.rs +++ b/src/types/log.rs @@ -67,11 +67,11 @@ pub struct Log { pub struct Filter { /// From Block #[serde(rename = "fromBlock", skip_serializing_if = "Option::is_none")] - from_block: Option, + pub(crate) from_block: Option, /// To Block #[serde(rename = "toBlock", skip_serializing_if = "Option::is_none")] - to_block: Option, + pub(crate) to_block: Option, /// Address #[serde(skip_serializing_if = "Option::is_none")] @@ -82,7 +82,7 @@ pub struct Filter { /// Topics #[serde(skip_serializing_if = "Vec::is_empty")] // TODO: Split in an event name + 3 topics - topics: Vec>, + pub(crate) topics: Vec>, /// Limit #[serde(skip_serializing_if = "Option::is_none")] diff --git a/src/types/mod.rs b/src/types/mod.rs index e89a8744..7f45fa0d 100644 --- a/src/types/mod.rs +++ b/src/types/mod.rs @@ -4,7 +4,7 @@ pub type Selector = [u8; 4]; // Re-export common ethereum datatypes with more specific names pub use ethereum_types::H256 as TxHash; -pub use ethereum_types::{Address, Bloom, H256, U256, U64}; +pub use ethereum_types::{Address, Bloom, H256, U128, U256, U64}; mod transaction; pub use transaction::{Overrides, Transaction, TransactionReceipt, TransactionRequest}; @@ -22,4 +22,4 @@ mod block; pub use block::{Block, BlockId, BlockNumber}; mod log; -pub use log::{Filter, Log}; +pub use log::{Filter, Log, ValueOrArray};