ethcontract/
tokens.rs

1//! Tokenization related functionality allowing rust types to be mapped to solidity types.
2
3// This file is based on https://github.com/tomusdrw/rust-web3/blob/e6d044a28458be9a3ee31108475d787e0440ce8b/src/contract/tokens.rs .
4// Generated contract bindings should operate on native rust types for ease of use. To encode them
5// with ethabi we need to map them to ethabi tokens. Tokenize does this for base types like
6// u32 and compounds of other Tokenize in the form of vectors, arrays and tuples.
7//
8// In some cases like when passing arguments to `MethodBuilder` or decoding events we need to be
9// able to pack multiple types into a single generic parameter. This is accomplished by representing
10// the collection of arguments as a tuple.
11//
12// A completely different approach could be to avoid using the trait system and instead encode all
13// rust types into tokens directly in the ethcontract generated bindings.
14
15use crate::I256;
16use arrayvec::ArrayVec;
17use ethcontract_common::{abi::Token, TransactionHash};
18use serde::{Deserialize, Serialize};
19use web3::types::{Address, U256};
20
21/// A tokenization related error.
22#[derive(Debug, thiserror::Error)]
23pub enum Error {
24    /// Tokenize::from_token token type doesn't match the rust type.
25    #[error("expected a different token type")]
26    TypeMismatch,
27    /// Tokenize::from_token is called with integer that doesn't fit in the rust type.
28    #[error("abi integer does not fit rust integer")]
29    IntegerMismatch,
30    /// Tokenize::from_token token is fixed bytes with wrong length.
31    #[error("expected a different number of fixed bytes")]
32    FixedBytesLengthsMismatch,
33    /// Tokenize::from_token token is fixed array with wrong length.
34    #[error("expected a different number of tokens in fixed array")]
35    FixedArrayLengthsMismatch,
36    /// Tokenize::from_token token is tuple with wrong length.
37    #[error("expected a different number of tokens in tuple")]
38    TupleLengthMismatch,
39}
40
41/// Rust type and single token conversion.
42pub trait Tokenize {
43    /// Convert self into token.
44    fn from_token(token: Token) -> Result<Self, Error>
45    where
46        Self: Sized;
47
48    /// Convert token into Self.
49    fn into_token(self) -> Token;
50}
51
52/// Wrapper around Vec<u8> and [u8; N] representing Token::{Bytes, FixedBytes}. Distinguishes a list
53/// of u8 from bytes.
54#[derive(
55    Clone, Copy, Debug, Default, Deserialize, Eq, Hash, PartialEq, PartialOrd, Ord, Serialize,
56)]
57pub struct Bytes<T>(pub T);
58
59impl Tokenize for Token {
60    fn from_token(token: Token) -> Result<Self, Error>
61    where
62        Self: Sized,
63    {
64        Ok(token)
65    }
66
67    fn into_token(self) -> Token {
68        self
69    }
70}
71
72impl Tokenize for Bytes<Vec<u8>> {
73    fn from_token(token: Token) -> Result<Self, Error>
74    where
75        Self: Sized,
76    {
77        match token {
78            Token::Bytes(bytes) => Ok(Self(bytes)),
79            _ => Err(Error::TypeMismatch),
80        }
81    }
82
83    fn into_token(self) -> Token {
84        Token::Bytes(self.0)
85    }
86}
87
88impl<const N: usize> Tokenize for Bytes<[u8; N]> {
89    fn from_token(token: Token) -> Result<Self, Error> {
90        match token {
91            Token::FixedBytes(bytes) => bytes
92                .try_into()
93                .map(Self)
94                .map_err(|_| Error::FixedBytesLengthsMismatch),
95            _ => Err(Error::TypeMismatch),
96        }
97    }
98
99    fn into_token(self) -> Token {
100        Token::FixedBytes(self.0.to_vec())
101    }
102}
103
104impl Tokenize for String {
105    fn from_token(token: Token) -> Result<Self, Error> {
106        match token {
107            Token::String(s) => Ok(s),
108            _ => Err(Error::TypeMismatch),
109        }
110    }
111
112    fn into_token(self) -> Token {
113        Token::String(self)
114    }
115}
116
117impl Tokenize for Address {
118    fn from_token(token: Token) -> Result<Self, Error> {
119        match token {
120            Token::Address(data) => Ok(data),
121            _ => Err(Error::TypeMismatch),
122        }
123    }
124
125    fn into_token(self) -> Token {
126        Token::Address(self)
127    }
128}
129
130impl Tokenize for U256 {
131    fn from_token(token: Token) -> Result<Self, Error> {
132        match token {
133            Token::Uint(u256) => Ok(u256),
134            _ => Err(Error::TypeMismatch),
135        }
136    }
137
138    fn into_token(self) -> Token {
139        Token::Uint(self)
140    }
141}
142
143impl Tokenize for I256 {
144    fn from_token(token: Token) -> Result<Self, Error> {
145        match token {
146            Token::Int(u256) => Ok(Self::from_raw(u256)),
147            _ => Err(Error::TypeMismatch),
148        }
149    }
150
151    fn into_token(self) -> Token {
152        Token::Int(self.into_raw())
153    }
154}
155
156impl Tokenize for TransactionHash {
157    fn from_token(token: Token) -> Result<Self, Error>
158    where
159        Self: Sized,
160    {
161        let bytes = Bytes::from_token(token)?;
162        Ok(Self(bytes.0))
163    }
164
165    fn into_token(self) -> Token {
166        Bytes(self.0).into_token()
167    }
168}
169
170macro_rules! uint_tokenize {
171    ($int: ident, $token: ident) => {
172        impl Tokenize for $int {
173            fn from_token(token: Token) -> Result<Self, Error> {
174                let u256 = match token {
175                    Token::Uint(u256) => u256,
176                    _ => return Err(Error::TypeMismatch),
177                };
178                u256.try_into().map_err(|_| Error::IntegerMismatch)
179            }
180
181            fn into_token(self) -> Token {
182                Token::Uint(self.into())
183            }
184        }
185    };
186}
187
188macro_rules! int_tokenize {
189    ($int: ident, $token: ident) => {
190        impl Tokenize for $int {
191            fn from_token(token: Token) -> Result<Self, Error> {
192                let u256 = match token {
193                    Token::Int(u256) => u256,
194                    _ => return Err(Error::TypeMismatch),
195                };
196                let i256 = I256::from_raw(u256);
197                i256.try_into().map_err(|_| Error::IntegerMismatch)
198            }
199
200            fn into_token(self) -> Token {
201                Token::Int(I256::from(self).into_raw())
202            }
203        }
204    };
205}
206
207int_tokenize!(i8, Int);
208int_tokenize!(i16, Int);
209int_tokenize!(i32, Int);
210int_tokenize!(i64, Int);
211int_tokenize!(i128, Int);
212uint_tokenize!(u8, Uint);
213uint_tokenize!(u16, Uint);
214uint_tokenize!(u32, Uint);
215uint_tokenize!(u64, Uint);
216uint_tokenize!(u128, Uint);
217
218impl Tokenize for bool {
219    fn from_token(token: Token) -> Result<Self, Error> {
220        match token {
221            Token::Bool(data) => Ok(data),
222            _ => Err(Error::TypeMismatch),
223        }
224    }
225
226    fn into_token(self) -> Token {
227        Token::Bool(self)
228    }
229}
230
231impl<T, const N: usize> Tokenize for [T; N]
232where
233    T: Tokenize,
234{
235    fn from_token(token: Token) -> Result<Self, Error>
236    where
237        Self: Sized,
238    {
239        let tokens = match token {
240            Token::FixedArray(tokens) => tokens,
241            _ => return Err(Error::TypeMismatch),
242        };
243        let arr_vec = tokens
244            .into_iter()
245            .map(T::from_token)
246            .collect::<Result<ArrayVec<T, N>, _>>()?;
247        arr_vec
248            .into_inner()
249            .map_err(|_| Error::FixedArrayLengthsMismatch)
250    }
251
252    fn into_token(self) -> Token {
253        Token::FixedArray(
254            ArrayVec::from(self)
255                .into_iter()
256                .map(T::into_token)
257                .collect(),
258        )
259    }
260}
261
262impl<T> Tokenize for Vec<T>
263where
264    T: Tokenize,
265{
266    fn from_token(token: Token) -> Result<Self, Error> {
267        match token {
268            Token::Array(tokens) => tokens.into_iter().map(Tokenize::from_token).collect(),
269            _ => Err(Error::TypeMismatch),
270        }
271    }
272
273    fn into_token(self) -> Token {
274        Token::Array(self.into_iter().map(Tokenize::into_token).collect())
275    }
276}
277
278macro_rules! impl_single_tokenize_for_tuple {
279    ($count: expr, $( $ty: ident : $no: tt, )*) => {
280        impl<$($ty, )*> Tokenize for ($($ty,)*)
281        where
282            $($ty: Tokenize,)*
283        {
284            fn from_token(token: Token) -> Result<Self, Error>
285            {
286                let tokens = match token {
287                    Token::Tuple(tokens) => tokens,
288                    _ => return Err(Error::TypeMismatch),
289                };
290                if tokens.len() != $count {
291                    return Err(Error::TupleLengthMismatch);
292                }
293                #[allow(unused_variables)]
294                #[allow(unused_mut)]
295                let mut drain = tokens.into_iter();
296                Ok(($($ty::from_token(drain.next().unwrap())?,)*))
297            }
298
299            fn into_token(self) -> Token {
300                Token::Tuple(vec![$(self.$no.into_token(),)*])
301            }
302        }
303    }
304}
305
306impl_single_tokenize_for_tuple!(0,);
307impl_single_tokenize_for_tuple!(1, A:0, );
308impl_single_tokenize_for_tuple!(2, A:0, B:1, );
309impl_single_tokenize_for_tuple!(3, A:0, B:1, C:2, );
310impl_single_tokenize_for_tuple!(4, A:0, B:1, C:2, D:3, );
311impl_single_tokenize_for_tuple!(5, A:0, B:1, C:2, D:3, E:4, );
312impl_single_tokenize_for_tuple!(6, A:0, B:1, C:2, D:3, E:4, F:5, );
313impl_single_tokenize_for_tuple!(7, A:0, B:1, C:2, D:3, E:4, F:5, G:6, );
314impl_single_tokenize_for_tuple!(8, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, );
315impl_single_tokenize_for_tuple!(9, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, );
316impl_single_tokenize_for_tuple!(10, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, );
317impl_single_tokenize_for_tuple!(11, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, );
318impl_single_tokenize_for_tuple!(12, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, );
319impl_single_tokenize_for_tuple!(13, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, );
320impl_single_tokenize_for_tuple!(14, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, );
321impl_single_tokenize_for_tuple!(15, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, O:14, );
322impl_single_tokenize_for_tuple!(16, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, O:14, P:15, );
323impl_single_tokenize_for_tuple!(17, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, O:14, P:15, Q: 16, );
324impl_single_tokenize_for_tuple!(18, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, O:14, P:15, Q: 16, R: 17, );
325
326#[cfg(test)]
327mod tests {
328    use super::*;
329
330    fn assert_single_tokenize_roundtrip<T>(value: T)
331    where
332        T: Tokenize + Clone + std::fmt::Debug + Eq,
333    {
334        assert_eq!(value, T::from_token(value.clone().into_token()).unwrap());
335    }
336
337    #[test]
338    fn single_tokenize_roundtrip() {
339        assert_single_tokenize_roundtrip(u8::MIN);
340        assert_single_tokenize_roundtrip(u8::MAX);
341        assert_single_tokenize_roundtrip(i8::MIN);
342        assert_single_tokenize_roundtrip(i8::MAX);
343        assert_single_tokenize_roundtrip(u16::MIN);
344        assert_single_tokenize_roundtrip(i16::MAX);
345        assert_single_tokenize_roundtrip(u32::MIN);
346        assert_single_tokenize_roundtrip(i32::MAX);
347        assert_single_tokenize_roundtrip(u64::MIN);
348        assert_single_tokenize_roundtrip(i64::MAX);
349        assert_single_tokenize_roundtrip(u128::MIN);
350        assert_single_tokenize_roundtrip(i128::MAX);
351        assert_single_tokenize_roundtrip(U256::zero());
352        assert_single_tokenize_roundtrip(U256::MAX);
353        assert_single_tokenize_roundtrip(I256::MIN);
354        assert_single_tokenize_roundtrip(I256::MAX);
355        assert_single_tokenize_roundtrip(false);
356        assert_single_tokenize_roundtrip(true);
357        assert_single_tokenize_roundtrip("abcd".to_string());
358        assert_single_tokenize_roundtrip(vec![0u8, 1u8, 2u8]);
359        assert_single_tokenize_roundtrip([0u8, 1u8, 2u8]);
360        assert_single_tokenize_roundtrip(Bytes(vec![0u8, 1u8, 2u8]));
361        assert_single_tokenize_roundtrip(Bytes([0u8, 1u8, 2u8]));
362        assert_single_tokenize_roundtrip(Address::from_low_u64_be(42));
363        assert_single_tokenize_roundtrip(TransactionHash::from_low_u64_be(42));
364        assert_single_tokenize_roundtrip(());
365        assert_single_tokenize_roundtrip((-1i8, 1i8));
366        assert_single_tokenize_roundtrip([-1i8, 1i8]);
367    }
368
369    #[test]
370    fn tokenize_bytes() {
371        assert!(matches!([0u8].into_token(), Token::FixedArray(_)));
372        assert!(matches!(vec![0u8].into_token(), Token::Array(_)));
373        assert!(matches!(Bytes([0u8]).into_token(), Token::FixedBytes(_)));
374        assert!(matches!(Bytes(vec![0u8]).into_token(), Token::Bytes(_)));
375    }
376
377    #[test]
378    fn complex() {
379        let rust = (vec![[(0u8, 1i8)]], false);
380        let token = Token::Tuple(vec![
381            Token::Array(vec![Token::FixedArray(vec![Token::Tuple(vec![
382                Token::Uint(0.into()),
383                Token::Int(1.into()),
384            ])])]),
385            Token::Bool(false),
386        ]);
387        assert_eq!(rust.clone().into_token(), token);
388        assert_single_tokenize_roundtrip(rust);
389    }
390
391    #[test]
392    fn i256_tokenization() {
393        assert_eq!(I256::from(42).into_token(), 42i32.into_token());
394        assert_eq!(I256::minus_one().into_token(), Token::Int(U256::MAX),);
395        assert_eq!(
396            I256::from_token(Token::Int(U256::MAX)).unwrap(),
397            I256::minus_one()
398        );
399
400        assert_eq!(
401            I256::from_token(42i32.into_token()).unwrap(),
402            I256::from(42),
403        );
404    }
405}