ethabi_fork_ethcontract/token/
mod.rs

1// Copyright 2015-2020 Parity Technologies
2//
3// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
4// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
5// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
6// option. This file may not be copied, modified, or distributed
7// except according to those terms.
8
9//! ABI param and parsing for it.
10
11mod lenient;
12mod strict;
13mod token;
14
15use std::cmp::Ordering::{Equal, Less};
16
17pub use self::{lenient::LenientTokenizer, strict::StrictTokenizer, token::Token};
18use crate::{Error, ParamType};
19
20/// This trait should be used to parse string values as tokens.
21pub trait Tokenizer {
22	/// Tries to parse a string as a token of given type.
23	fn tokenize(param: &ParamType, value: &str) -> Result<Token, Error> {
24		match *param {
25			ParamType::Address => Self::tokenize_address(value).map(|a| Token::Address(a.into())),
26			ParamType::String => Self::tokenize_string(value).map(Token::String),
27			ParamType::Bool => Self::tokenize_bool(value).map(Token::Bool),
28			ParamType::Bytes => Self::tokenize_bytes(value).map(Token::Bytes),
29			ParamType::FixedBytes(len) => Self::tokenize_fixed_bytes(value, len).map(Token::FixedBytes),
30			ParamType::Uint(_) => Self::tokenize_uint(value).map(Into::into).map(Token::Uint),
31			ParamType::Int(_) => Self::tokenize_int(value).map(Into::into).map(Token::Int),
32			ParamType::Array(ref p) => Self::tokenize_array(value, p).map(Token::Array),
33			ParamType::FixedArray(ref p, len) => Self::tokenize_fixed_array(value, p, len).map(Token::FixedArray),
34			ParamType::Tuple(ref p) => Self::tokenize_struct(value, p).map(Token::Tuple),
35		}
36	}
37
38	/// Tries to parse a value as a vector of tokens of fixed size.
39	fn tokenize_fixed_array(value: &str, param: &ParamType, len: usize) -> Result<Vec<Token>, Error> {
40		let result = Self::tokenize_array(value, param)?;
41		match result.len() == len {
42			true => Ok(result),
43			false => Err(Error::InvalidData),
44		}
45	}
46
47	/// Tried to parse a struct as a vector of tokens
48	fn tokenize_struct(value: &str, param: &[ParamType]) -> Result<Vec<Token>, Error> {
49		if !value.starts_with('(') || !value.ends_with(')') {
50			return Err(Error::InvalidData);
51		}
52
53		if value.chars().count() == 2 {
54			return Ok(vec![]);
55		}
56
57		let mut result = vec![];
58		let mut nested = 0isize;
59		let mut ignore = false;
60		let mut last_item = 1;
61		let mut params = param.iter();
62		for (pos, ch) in value.chars().enumerate() {
63			match ch {
64				'(' if !ignore => {
65					nested += 1;
66				}
67				')' if !ignore => {
68					nested -= 1;
69
70					match nested.cmp(&0) {
71						Less => {
72							return Err(Error::InvalidData);
73						}
74						Equal => {
75							let sub = &value[last_item..pos];
76							let token = Self::tokenize(params.next().ok_or(Error::InvalidData)?, sub)?;
77							result.push(token);
78							last_item = pos + 1;
79						}
80						_ => {}
81					}
82				}
83				'"' => {
84					ignore = !ignore;
85				}
86				',' if nested == 1 && !ignore => {
87					let sub = &value[last_item..pos];
88					let token = Self::tokenize(params.next().ok_or(Error::InvalidData)?, sub)?;
89					result.push(token);
90					last_item = pos + 1;
91				}
92				_ => (),
93			}
94		}
95
96		if ignore {
97			return Err(Error::InvalidData);
98		}
99
100		Ok(result)
101	}
102
103	/// Tries to parse a value as a vector of tokens.
104	fn tokenize_array(value: &str, param: &ParamType) -> Result<Vec<Token>, Error> {
105		if !value.starts_with('[') || !value.ends_with(']') {
106			return Err(Error::InvalidData);
107		}
108
109		if value.chars().count() == 2 {
110			return Ok(vec![]);
111		}
112
113		let mut result = vec![];
114		let mut nested = 0isize;
115		let mut ignore = false;
116		let mut last_item = 1;
117		for (i, ch) in value.chars().enumerate() {
118			match ch {
119				'[' if !ignore => {
120					nested += 1;
121				}
122				']' if !ignore => {
123					nested -= 1;
124					match nested.cmp(&0) {
125						Less => {
126							return Err(Error::InvalidData);
127						}
128						Equal => {
129							let sub = &value[last_item..i];
130							let token = Self::tokenize(param, sub)?;
131							result.push(token);
132							last_item = i + 1;
133						}
134						_ => {}
135					}
136				}
137				'"' => {
138					ignore = !ignore;
139				}
140				',' if nested == 1 && !ignore => {
141					let sub = &value[last_item..i];
142					let token = Self::tokenize(param, sub)?;
143					result.push(token);
144					last_item = i + 1;
145				}
146				_ => (),
147			}
148		}
149
150		if ignore {
151			return Err(Error::InvalidData);
152		}
153
154		Ok(result)
155	}
156
157	/// Tries to parse a value as an address.
158	fn tokenize_address(value: &str) -> Result<[u8; 20], Error>;
159
160	/// Tries to parse a value as a string.
161	fn tokenize_string(value: &str) -> Result<String, Error>;
162
163	/// Tries to parse a value as a bool.
164	fn tokenize_bool(value: &str) -> Result<bool, Error>;
165
166	/// Tries to parse a value as bytes.
167	fn tokenize_bytes(value: &str) -> Result<Vec<u8>, Error>;
168
169	/// Tries to parse a value as bytes.
170	fn tokenize_fixed_bytes(value: &str, len: usize) -> Result<Vec<u8>, Error>;
171
172	/// Tries to parse a value as unsigned integer.
173	fn tokenize_uint(value: &str) -> Result<[u8; 32], Error>;
174
175	/// Tries to parse a value as signed integer.
176	fn tokenize_int(value: &str) -> Result<[u8; 32], Error>;
177}
178
179#[cfg(test)]
180mod test {
181	use super::{LenientTokenizer, ParamType, Tokenizer};
182	#[test]
183	fn single_quoted_in_array_must_error() {
184		assert!(LenientTokenizer::tokenize_array("[1,\"0,false]", &ParamType::Bool).is_err());
185		assert!(LenientTokenizer::tokenize_array("[false\"]", &ParamType::Bool).is_err());
186		assert!(LenientTokenizer::tokenize_array("[1,false\"]", &ParamType::Bool).is_err());
187		assert!(LenientTokenizer::tokenize_array("[1,\"0\",false]", &ParamType::Bool).is_err());
188		assert!(LenientTokenizer::tokenize_array("[1,0]", &ParamType::Bool).is_ok());
189	}
190}