ethers_abi/token/
mod.rs

1// Copyright 2015-2020 Parity Technologies
2//
3// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
4// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
5// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
6// option. This file may not be copied, modified, or distributed
7// except according to those terms.
8
9//! ABI param and parsing for it.
10
11#[cfg(feature = "full-serde")]
12mod lenient;
13#[cfg(feature = "full-serde")]
14pub use lenient::LenientTokenizer;
15
16#[cfg(feature = "full-serde")]
17mod strict;
18#[cfg(feature = "full-serde")]
19pub use strict::StrictTokenizer;
20
21mod token;
22pub use token::Token;
23
24#[cfg(all(feature = "serde", not(feature = "std")))]
25use crate::no_std_prelude::*;
26#[cfg(feature = "serde")]
27use core::cmp::Ordering::{Equal, Less};
28
29#[cfg(feature = "serde")]
30use crate::{Error, ParamType};
31
32/// This trait should be used to parse string values as tokens.
33#[cfg(feature = "serde")]
34pub trait Tokenizer {
35	/// Tries to parse a string as a token of given type.
36	fn tokenize(param: &ParamType, value: &str) -> Result<Token, Error> {
37		match *param {
38			ParamType::Address => {
39				Self::tokenize_address(value.strip_prefix("0x").unwrap_or(value)).map(|a| Token::Address(a.into()))
40			}
41			ParamType::String => Self::tokenize_string(value).map(Token::String),
42			ParamType::Bool => Self::tokenize_bool(value).map(Token::Bool),
43			ParamType::Bytes => Self::tokenize_bytes(value.strip_prefix("0x").unwrap_or(value)).map(Token::Bytes),
44			ParamType::FixedBytes(len) => {
45				Self::tokenize_fixed_bytes(value.strip_prefix("0x").unwrap_or(value), len).map(Token::FixedBytes)
46			}
47			ParamType::Uint(_) => Self::tokenize_uint(value).map(Into::into).map(Token::Uint),
48			ParamType::Int(_) => Self::tokenize_int(value).map(Into::into).map(Token::Int),
49			ParamType::Array(ref p) => Self::tokenize_array(value, p).map(Token::Array),
50			ParamType::FixedArray(ref p, len) => Self::tokenize_fixed_array(value, p, len).map(Token::FixedArray),
51			ParamType::Tuple(ref p) => Self::tokenize_struct(value, p).map(Token::Tuple),
52		}
53	}
54
55	/// Tries to parse a value as a vector of tokens of fixed size.
56	fn tokenize_fixed_array(value: &str, param: &ParamType, len: usize) -> Result<Vec<Token>, Error> {
57		let result = Self::tokenize_array(value, param)?;
58		match result.len() == len {
59			true => Ok(result),
60			false => Err(Error::InvalidData),
61		}
62	}
63
64	/// Tried to parse a struct as a vector of tokens
65	fn tokenize_struct(value: &str, param: &[ParamType]) -> Result<Vec<Token>, Error> {
66		if !value.starts_with('(') || !value.ends_with(')') {
67			return Err(Error::InvalidData);
68		}
69
70		if value.chars().count() == 2 {
71			return Ok(vec![]);
72		}
73
74		let mut result = vec![];
75		let mut nested = 0isize;
76		let mut ignore = false;
77		let mut last_item = 1;
78
79		let mut array_nested = 0isize;
80		let mut array_item_start = 1;
81		let mut last_is_array = false;
82
83		let mut params = param.iter();
84		for (pos, ch) in value.chars().enumerate() {
85			match ch {
86				'[' if !ignore => {
87					if array_nested == 0 {
88						array_item_start = pos;
89					}
90					array_nested += 1;
91				}
92				']' if !ignore => {
93					array_nested -= 1;
94
95					if nested > 0 {
96						// still in nested tuple
97						continue;
98					}
99
100					match array_nested.cmp(&0) {
101						Less => {
102							return Err(Error::InvalidData);
103						}
104						Equal => {
105							let sub = &value[array_item_start..pos + 1];
106							let token = Self::tokenize(params.next().ok_or(Error::InvalidData)?, sub)?;
107							result.push(token);
108							last_is_array = !last_is_array;
109						}
110						_ => {}
111					}
112				}
113				_ if array_nested != 0 => continue,
114				'(' if !ignore => {
115					nested += 1;
116				}
117				')' if !ignore && last_is_array => {
118					nested -= 1;
119					last_is_array = !last_is_array;
120				}
121				')' if !ignore => {
122					nested -= 1;
123
124					match nested.cmp(&0) {
125						Less => {
126							return Err(Error::InvalidData);
127						}
128						Equal => {
129							if last_is_array {
130								last_is_array = !last_is_array;
131							} else {
132								let sub = &value[last_item..pos];
133								let token = Self::tokenize(params.next().ok_or(Error::InvalidData)?, sub)?;
134								result.push(token);
135								last_item = pos + 1;
136							}
137						}
138						_ => {}
139					}
140				}
141				'"' => {
142					ignore = !ignore;
143				}
144				',' if array_nested == 0 && nested == 1 && !ignore && last_is_array => {
145					last_is_array = !last_is_array;
146				}
147				',' if nested == 1 && !ignore => {
148					let sub = &value[last_item..pos];
149					let token = Self::tokenize(params.next().ok_or(Error::InvalidData)?, sub)?;
150					result.push(token);
151					last_item = pos + 1;
152				}
153				_ => (),
154			}
155		}
156
157		if ignore {
158			return Err(Error::InvalidData);
159		}
160
161		Ok(result)
162	}
163
164	/// Tries to parse a value as a vector of tokens.
165	fn tokenize_array(value: &str, param: &ParamType) -> Result<Vec<Token>, Error> {
166		if !value.starts_with('[') || !value.ends_with(']') {
167			return Err(Error::InvalidData);
168		}
169
170		if value.chars().count() == 2 {
171			return Ok(vec![]);
172		}
173
174		let mut result = vec![];
175		let mut nested = 0isize;
176		let mut ignore = false;
177		let mut last_item = 1;
178
179		let mut tuple_nested = 0isize;
180		let mut tuple_item_start = 1;
181		let mut last_is_tuple = false;
182		for (i, ch) in value.chars().enumerate() {
183			match ch {
184				'(' if !ignore => {
185					if tuple_nested == 0 {
186						tuple_item_start = i;
187					}
188					tuple_nested += 1;
189				}
190				')' if !ignore => {
191					tuple_nested -= 1;
192					match tuple_nested.cmp(&0) {
193						Less => {
194							return Err(Error::InvalidData);
195						}
196						Equal => {
197							let sub = &value[tuple_item_start..i + 1];
198							let token = Self::tokenize(param, sub)?;
199							result.push(token);
200							last_is_tuple = !last_is_tuple;
201						}
202						_ => {}
203					}
204				}
205				_ if tuple_nested != 0 => continue,
206				'[' if !ignore => {
207					nested += 1;
208				}
209				']' if !ignore && last_is_tuple => {
210					nested -= 1;
211					last_is_tuple = !last_is_tuple;
212				}
213				']' if !ignore => {
214					nested -= 1;
215					match nested.cmp(&0) {
216						Less => {
217							return Err(Error::InvalidData);
218						}
219						Equal => {
220							if last_is_tuple {
221								last_is_tuple = !last_is_tuple;
222							} else {
223								let sub = &value[last_item..i];
224								let token = Self::tokenize(param, sub)?;
225								result.push(token);
226								last_item = i + 1;
227							}
228						}
229						_ => {}
230					}
231				}
232				'"' => {
233					ignore = !ignore;
234				}
235				',' if tuple_nested == 0 && nested == 1 && !ignore && last_is_tuple => {
236					last_is_tuple = !last_is_tuple;
237				}
238				',' if tuple_nested == 0 && nested == 1 && !ignore => {
239					let sub = &value[last_item..i];
240					let token = Self::tokenize(param, sub)?;
241					result.push(token);
242					last_item = i + 1;
243				}
244				_ => (),
245			}
246		}
247
248		if ignore {
249			return Err(Error::InvalidData);
250		}
251
252		Ok(result)
253	}
254
255	/// Tries to parse a value as an address.
256	fn tokenize_address(value: &str) -> Result<[u8; 20], Error>;
257
258	/// Tries to parse a value as a string.
259	fn tokenize_string(value: &str) -> Result<String, Error>;
260
261	/// Tries to parse a value as a bool.
262	fn tokenize_bool(value: &str) -> Result<bool, Error>;
263
264	/// Tries to parse a value as bytes.
265	fn tokenize_bytes(value: &str) -> Result<Vec<u8>, Error>;
266
267	/// Tries to parse a value as bytes.
268	fn tokenize_fixed_bytes(value: &str, len: usize) -> Result<Vec<u8>, Error>;
269
270	/// Tries to parse a value as unsigned integer.
271	fn tokenize_uint(value: &str) -> Result<[u8; 32], Error>;
272
273	/// Tries to parse a value as signed integer.
274	fn tokenize_int(value: &str) -> Result<[u8; 32], Error>;
275}
276
277#[cfg(all(test, feature = "full-serde"))]
278mod test {
279	use super::{LenientTokenizer, ParamType, Tokenizer};
280	use crate::Token;
281
282	#[test]
283	fn single_quoted_in_array_must_error() {
284		assert!(LenientTokenizer::tokenize_array("[1,\"0,false]", &ParamType::Bool).is_err());
285		assert!(LenientTokenizer::tokenize_array("[false\"]", &ParamType::Bool).is_err());
286		assert!(LenientTokenizer::tokenize_array("[1,false\"]", &ParamType::Bool).is_err());
287		assert!(LenientTokenizer::tokenize_array("[1,\"0\",false]", &ParamType::Bool).is_err());
288		assert!(LenientTokenizer::tokenize_array("[1,0]", &ParamType::Bool).is_ok());
289	}
290
291	#[test]
292	fn tuples_arrays_mixed() {
293		assert_eq!(
294			LenientTokenizer::tokenize_array(
295				"[([(true)],[(false,true)])]",
296				&ParamType::Tuple(vec![
297					ParamType::Array(Box::new(ParamType::Tuple(vec![ParamType::Bool]))),
298					ParamType::Array(Box::new(ParamType::Tuple(vec![ParamType::Bool, ParamType::Bool]))),
299				]),
300			)
301			.unwrap(),
302			vec![Token::Tuple(vec![
303				Token::Array(vec![Token::Tuple(vec![Token::Bool(true)])]),
304				Token::Array(vec![Token::Tuple(vec![Token::Bool(false), Token::Bool(true)])]),
305			])]
306		);
307
308		assert_eq!(
309			LenientTokenizer::tokenize_struct(
310				"([(true)],[(false,true)])",
311				&[
312					ParamType::Array(Box::new(ParamType::Tuple(vec![ParamType::Bool]))),
313					ParamType::Array(Box::new(ParamType::Tuple(vec![ParamType::Bool, ParamType::Bool]))),
314				]
315			)
316			.unwrap(),
317			vec![
318				Token::Array(vec![Token::Tuple(vec![Token::Bool(true)])]),
319				Token::Array(vec![Token::Tuple(vec![Token::Bool(false), Token::Bool(true)])]),
320			]
321		);
322	}
323
324	#[test]
325	fn tuple_array_nested() {
326		assert_eq!(
327			LenientTokenizer::tokenize_struct(
328				"([(5c9d55b78febcc2061715ba4f57ecf8ea2711f2c)],2)",
329				&[ParamType::Array(Box::new(ParamType::Tuple(vec![ParamType::Address,],)),), ParamType::Uint(256,),]
330			)
331			.unwrap(),
332			vec![
333				Token::Array(vec![Token::Tuple(vec![Token::Address(
334					"0x5c9d55b78febcc2061715ba4f57ecf8ea2711f2c".parse().unwrap(),
335				),])]),
336				Token::Uint(2u64.into()),
337			]
338		);
339	}
340}