zksync_basic_types/web3/
contract.rs

1//! Serialization logic allowing to convert between [`ethabi::Token`]s and domain types, such as `H256`,
2//! `U256` and `Address`.
3//!
4//! The majority of the code is copied from the `web3` crate 0.19.0, https://github.com/tomusdrw/rust-web3,
5//! licensed under the MIT open-source license.
6
7#[derive(Debug, thiserror::Error)]
8pub enum Error {
9    #[error("invalid output type: {0}")]
10    InvalidOutputType(String),
11    #[error("{0}")]
12    Other(String),
13}
14
15use crate::{H160, H256, U256};
16
17pub trait Detokenize: Sized {
18    fn from_tokens(tokens: Vec<ethabi::Token>) -> Result<Self, Error>;
19}
20
21impl<T: Tokenizable> Detokenize for T {
22    fn from_tokens(mut tokens: Vec<ethabi::Token>) -> Result<Self, Error> {
23        if tokens.len() != 1 {
24            return Err(Error::InvalidOutputType(format!(
25                "expected array with 1 token, got {tokens:?}"
26            )));
27        }
28        Self::from_token(tokens.pop().unwrap())
29    }
30}
31
32pub trait Tokenize {
33    fn into_tokens(self) -> Vec<ethabi::Token>;
34}
35
36impl<T: Tokenizable> Tokenize for T {
37    fn into_tokens(self) -> Vec<ethabi::Token> {
38        vec![self.into_token()]
39    }
40}
41
42impl Tokenize for () {
43    fn into_tokens(self) -> Vec<ethabi::Token> {
44        vec![]
45    }
46}
47
48macro_rules! impl_tokenize_for_tuple {
49    ($($idx:tt : $ty:ident),+) => {
50        impl<$($ty,)+> Tokenize for ($($ty,)+)
51        where
52            $($ty : Tokenizable,)+
53        {
54            fn into_tokens(self) -> Vec<ethabi::Token> {
55                vec![$(self.$idx.into_token(),)+]
56            }
57        }
58    };
59}
60
61impl_tokenize_for_tuple!(0: A);
62impl_tokenize_for_tuple!(0: A, 1: B);
63impl_tokenize_for_tuple!(0: A, 1: B, 2: C);
64impl_tokenize_for_tuple!(0: A, 1: B, 2: C, 3: D);
65impl_tokenize_for_tuple!(0: A, 1: B, 2: C, 3: D, 4: E);
66impl_tokenize_for_tuple!(0: A, 1: B, 2: C, 3: D, 4: E, 5: F);
67impl_tokenize_for_tuple!(0: A, 1: B, 2: C, 3: D, 4: E, 5: F, 6: G);
68impl_tokenize_for_tuple!(0: A, 1: B, 2: C, 3: D, 4: E, 5: F, 6: G, 7: H);
69
70pub trait Tokenizable: Sized {
71    fn from_token(token: ethabi::Token) -> Result<Self, Error>;
72    fn into_token(self) -> ethabi::Token;
73}
74
75impl Tokenizable for bool {
76    fn from_token(token: ethabi::Token) -> Result<Self, Error> {
77        match token {
78            ethabi::Token::Bool(flag) => Ok(flag),
79            _ => Err(Error::InvalidOutputType(format!(
80                "expected Boolean, got {token:?}"
81            ))),
82        }
83    }
84
85    fn into_token(self) -> ethabi::Token {
86        ethabi::Token::Bool(self)
87    }
88}
89
90impl Tokenizable for H160 {
91    fn from_token(token: ethabi::Token) -> Result<Self, Error> {
92        match token {
93            ethabi::Token::Address(address) => Ok(address),
94            _ => Err(Error::InvalidOutputType(format!(
95                "expected address, got {token:?}"
96            ))),
97        }
98    }
99
100    fn into_token(self) -> ethabi::Token {
101        ethabi::Token::Address(self)
102    }
103}
104
105impl Tokenizable for U256 {
106    fn from_token(token: ethabi::Token) -> Result<Self, Error> {
107        match token {
108            ethabi::Token::Uint(value) => Ok(value),
109            _ => Err(Error::InvalidOutputType(format!(
110                "expected uint256, got {token:?}"
111            ))),
112        }
113    }
114
115    fn into_token(self) -> ethabi::Token {
116        ethabi::Token::Uint(self)
117    }
118}
119
120impl Tokenizable for H256 {
121    fn from_token(token: ethabi::Token) -> Result<Self, Error> {
122        match token {
123            ethabi::Token::FixedBytes(value) => {
124                value.as_slice().try_into().map(H256).map_err(|_| {
125                    Error::InvalidOutputType(format!("expected bytes32, got {value:?}"))
126                })
127            }
128            _ => Err(Error::InvalidOutputType(format!(
129                "expected bytes32, got {token:?}"
130            ))),
131        }
132    }
133
134    fn into_token(self) -> ethabi::Token {
135        ethabi::Token::FixedBytes(self.as_bytes().to_vec())
136    }
137}
138
139impl Tokenizable for Vec<u8> {
140    fn from_token(token: ethabi::Token) -> Result<Self, Error> {
141        match token {
142            ethabi::Token::Bytes(bytes) => Ok(bytes),
143            _ => Err(Error::InvalidOutputType(format!(
144                "expected bytes, got {token:?}"
145            ))),
146        }
147    }
148
149    fn into_token(self) -> ethabi::Token {
150        ethabi::Token::Bytes(self)
151    }
152}
153
154impl Tokenizable for ethabi::Token {
155    fn from_token(token: ethabi::Token) -> Result<Self, Error> {
156        Ok(token)
157    }
158
159    fn into_token(self) -> ethabi::Token {
160        self
161    }
162}
163
164impl<T: TokenizableItem> Tokenizable for Vec<T> {
165    fn from_token(token: ethabi::Token) -> Result<Self, Error> {
166        match token {
167            ethabi::Token::FixedArray(tokens) | ethabi::Token::Array(tokens) => {
168                tokens.into_iter().map(Tokenizable::from_token).collect()
169            }
170            other => Err(Error::InvalidOutputType(format!(
171                "Expected `Array`, got {other:?}"
172            ))),
173        }
174    }
175
176    fn into_token(self) -> ethabi::Token {
177        ethabi::Token::Array(self.into_iter().map(Tokenizable::into_token).collect())
178    }
179}
180
181/// Marker trait for `Tokenizable` types that are can tokenized to and from a
182/// `Token::Array` and `Token:FixedArray`.
183pub trait TokenizableItem: Tokenizable {}
184
185impl TokenizableItem for ethabi::Token {}
186impl TokenizableItem for Vec<u8> {}