ethers_abi/token/
strict.rs1#[cfg(not(feature = "std"))]
9use crate::no_std_prelude::*;
10
11use crate::{errors::Error, token::Tokenizer};
12
13pub struct StrictTokenizer;
15
16impl Tokenizer for StrictTokenizer {
17 fn tokenize_address(value: &str) -> Result<[u8; 20], Error> {
18 let hex: Vec<u8> = hex::decode(value)?;
19 match hex.len() == 20 {
20 false => Err(Error::InvalidData),
21 true => {
22 let mut address = [0u8; 20];
23 address.copy_from_slice(&hex);
24 Ok(address)
25 }
26 }
27 }
28
29 fn tokenize_string(value: &str) -> Result<String, Error> {
30 Ok(value.to_owned())
31 }
32
33 fn tokenize_bool(value: &str) -> Result<bool, Error> {
34 match value {
35 "true" | "1" => Ok(true),
36 "false" | "0" => Ok(false),
37 _ => Err(Error::InvalidData),
38 }
39 }
40
41 fn tokenize_bytes(value: &str) -> Result<Vec<u8>, Error> {
42 hex::decode(value).map_err(Into::into)
43 }
44
45 fn tokenize_fixed_bytes(value: &str, len: usize) -> Result<Vec<u8>, Error> {
46 let hex: Vec<u8> = hex::decode(value)?;
47 match hex.len() == len {
48 true => Ok(hex),
49 false => Err(Error::InvalidData),
50 }
51 }
52
53 fn tokenize_uint(value: &str) -> Result<[u8; 32], Error> {
54 let hex: Vec<u8> = hex::decode(value)?;
55 match hex.len() == 32 {
56 true => {
57 let mut uint = [0u8; 32];
58 uint.copy_from_slice(&hex);
59 Ok(uint)
60 }
61 false => Err(Error::InvalidData),
62 }
63 }
64
65 fn tokenize_int(value: &str) -> Result<[u8; 32], Error> {
66 Self::tokenize_uint(value)
67 }
68}
69
70#[cfg(test)]
71mod tests {
72 use crate::{
73 token::{StrictTokenizer, Token, Tokenizer},
74 ParamType,
75 };
76
77 #[test]
78 fn tokenize_address() {
79 assert_eq!(
80 StrictTokenizer::tokenize(&ParamType::Address, "1111111111111111111111111111111111111111").unwrap(),
81 Token::Address([0x11u8; 20].into())
82 );
83 assert_eq!(
84 StrictTokenizer::tokenize(&ParamType::Address, "2222222222222222222222222222222222222222").unwrap(),
85 Token::Address([0x22u8; 20].into())
86 );
87 }
88
89 #[test]
90 fn tokenize_string() {
91 assert_eq!(
92 StrictTokenizer::tokenize(&ParamType::String, "gavofyork").unwrap(),
93 Token::String("gavofyork".to_owned())
94 );
95 assert_eq!(StrictTokenizer::tokenize(&ParamType::String, "hello").unwrap(), Token::String("hello".to_owned()));
96 }
97
98 #[test]
99 fn tokenize_bool() {
100 assert_eq!(StrictTokenizer::tokenize(&ParamType::Bool, "true").unwrap(), Token::Bool(true));
101 assert_eq!(StrictTokenizer::tokenize(&ParamType::Bool, "1").unwrap(), Token::Bool(true));
102 assert_eq!(StrictTokenizer::tokenize(&ParamType::Bool, "false").unwrap(), Token::Bool(false));
103 assert_eq!(StrictTokenizer::tokenize(&ParamType::Bool, "0").unwrap(), Token::Bool(false));
104 }
105
106 #[test]
107 fn tokenize_bytes() {
108 assert_eq!(
109 StrictTokenizer::tokenize(&ParamType::Bytes, "123456").unwrap(),
110 Token::Bytes(vec![0x12, 0x34, 0x56])
111 );
112 assert_eq!(StrictTokenizer::tokenize(&ParamType::Bytes, "0017").unwrap(), Token::Bytes(vec![0x00, 0x17]));
113 }
114
115 #[test]
116 fn tokenize_fixed_bytes() {
117 assert_eq!(
118 StrictTokenizer::tokenize(&ParamType::FixedBytes(3), "123456").unwrap(),
119 Token::FixedBytes(vec![0x12, 0x34, 0x56])
120 );
121 assert_eq!(
122 StrictTokenizer::tokenize(&ParamType::FixedBytes(2), "0017").unwrap(),
123 Token::FixedBytes(vec![0x00, 0x17])
124 );
125 }
126
127 #[test]
128 fn tokenize_uint() {
129 assert_eq!(
130 StrictTokenizer::tokenize(
131 &ParamType::Uint(256),
132 "1111111111111111111111111111111111111111111111111111111111111111"
133 )
134 .unwrap(),
135 Token::Uint([0x11u8; 32].into())
136 );
137
138 assert_eq!(
139 StrictTokenizer::tokenize(
140 &ParamType::Uint(256),
141 "2222222222222222222222222222222222222222222222222222222222222222"
142 )
143 .unwrap(),
144 Token::Uint([0x22u8; 32].into())
145 );
146 }
147
148 #[test]
149 fn tokenize_int() {
150 assert_eq!(
151 StrictTokenizer::tokenize(
152 &ParamType::Int(256),
153 "1111111111111111111111111111111111111111111111111111111111111111"
154 )
155 .unwrap(),
156 Token::Int([0x11u8; 32].into())
157 );
158
159 assert_eq!(
160 StrictTokenizer::tokenize(
161 &ParamType::Int(256),
162 "2222222222222222222222222222222222222222222222222222222222222222"
163 )
164 .unwrap(),
165 Token::Int([0x22u8; 32].into())
166 );
167 }
168
169 #[test]
170 fn tokenize_empty_array() {
171 assert_eq!(
172 StrictTokenizer::tokenize(&ParamType::Array(Box::new(ParamType::Bool)), "[]").unwrap(),
173 Token::Array(vec![])
174 );
175 }
176
177 #[test]
178 fn tokenize_bool_array() {
179 assert_eq!(
180 StrictTokenizer::tokenize(&ParamType::Array(Box::new(ParamType::Bool)), "[true,1,0,false]").unwrap(),
181 Token::Array(vec![Token::Bool(true), Token::Bool(true), Token::Bool(false), Token::Bool(false)])
182 );
183 }
184
185 #[test]
186 fn tokenize_bool_array_of_arrays() {
187 assert_eq!(
188 StrictTokenizer::tokenize(
189 &ParamType::Array(Box::new(ParamType::Array(Box::new(ParamType::Bool)))),
190 "[[true,1,0],[false]]"
191 )
192 .unwrap(),
193 Token::Array(vec![
194 Token::Array(vec![Token::Bool(true), Token::Bool(true), Token::Bool(false)]),
195 Token::Array(vec![Token::Bool(false)])
196 ])
197 );
198 }
199}