ethabi_fork_ethcontract/token/
strict.rs

1// Copyright 2015-2020 Parity Technologies
2//
3// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
4// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
5// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
6// option. This file may not be copied, modified, or distributed
7// except according to those terms.
8
9use crate::{errors::Error, token::Tokenizer};
10
11/// Tries to parse string as a token. Require string to clearly represent the value.
12pub struct StrictTokenizer;
13
14impl Tokenizer for StrictTokenizer {
15	fn tokenize_address(value: &str) -> Result<[u8; 20], Error> {
16		let hex: Vec<u8> = hex::decode(value)?;
17		match hex.len() == 20 {
18			false => Err(Error::InvalidData),
19			true => {
20				let mut address = [0u8; 20];
21				address.copy_from_slice(&hex);
22				Ok(address)
23			}
24		}
25	}
26
27	fn tokenize_string(value: &str) -> Result<String, Error> {
28		Ok(value.to_owned())
29	}
30
31	fn tokenize_bool(value: &str) -> Result<bool, Error> {
32		match value {
33			"true" | "1" => Ok(true),
34			"false" | "0" => Ok(false),
35			_ => Err(Error::InvalidData),
36		}
37	}
38
39	fn tokenize_bytes(value: &str) -> Result<Vec<u8>, Error> {
40		hex::decode(value).map_err(Into::into)
41	}
42
43	fn tokenize_fixed_bytes(value: &str, len: usize) -> Result<Vec<u8>, Error> {
44		let hex: Vec<u8> = hex::decode(value)?;
45		match hex.len() == len {
46			true => Ok(hex),
47			false => Err(Error::InvalidData),
48		}
49	}
50
51	fn tokenize_uint(value: &str) -> Result<[u8; 32], Error> {
52		let hex: Vec<u8> = hex::decode(value)?;
53		match hex.len() == 32 {
54			true => {
55				let mut uint = [0u8; 32];
56				uint.copy_from_slice(&hex);
57				Ok(uint)
58			}
59			false => Err(Error::InvalidData),
60		}
61	}
62
63	fn tokenize_int(value: &str) -> Result<[u8; 32], Error> {
64		Self::tokenize_uint(value)
65	}
66}
67
68#[cfg(test)]
69mod tests {
70	use crate::{
71		token::{StrictTokenizer, Token, Tokenizer},
72		ParamType,
73	};
74
75	#[test]
76	fn tokenize_address() {
77		assert_eq!(
78			StrictTokenizer::tokenize(&ParamType::Address, "1111111111111111111111111111111111111111").unwrap(),
79			Token::Address([0x11u8; 20].into())
80		);
81		assert_eq!(
82			StrictTokenizer::tokenize(&ParamType::Address, "2222222222222222222222222222222222222222").unwrap(),
83			Token::Address([0x22u8; 20].into())
84		);
85	}
86
87	#[test]
88	fn tokenize_string() {
89		assert_eq!(
90			StrictTokenizer::tokenize(&ParamType::String, "gavofyork").unwrap(),
91			Token::String("gavofyork".to_owned())
92		);
93		assert_eq!(StrictTokenizer::tokenize(&ParamType::String, "hello").unwrap(), Token::String("hello".to_owned()));
94	}
95
96	#[test]
97	fn tokenize_bool() {
98		assert_eq!(StrictTokenizer::tokenize(&ParamType::Bool, "true").unwrap(), Token::Bool(true));
99		assert_eq!(StrictTokenizer::tokenize(&ParamType::Bool, "1").unwrap(), Token::Bool(true));
100		assert_eq!(StrictTokenizer::tokenize(&ParamType::Bool, "false").unwrap(), Token::Bool(false));
101		assert_eq!(StrictTokenizer::tokenize(&ParamType::Bool, "0").unwrap(), Token::Bool(false));
102	}
103
104	#[test]
105	fn tokenize_bytes() {
106		assert_eq!(
107			StrictTokenizer::tokenize(&ParamType::Bytes, "123456").unwrap(),
108			Token::Bytes(vec![0x12, 0x34, 0x56])
109		);
110		assert_eq!(StrictTokenizer::tokenize(&ParamType::Bytes, "0017").unwrap(), Token::Bytes(vec![0x00, 0x17]));
111	}
112
113	#[test]
114	fn tokenize_fixed_bytes() {
115		assert_eq!(
116			StrictTokenizer::tokenize(&ParamType::FixedBytes(3), "123456").unwrap(),
117			Token::FixedBytes(vec![0x12, 0x34, 0x56])
118		);
119		assert_eq!(
120			StrictTokenizer::tokenize(&ParamType::FixedBytes(2), "0017").unwrap(),
121			Token::FixedBytes(vec![0x00, 0x17])
122		);
123	}
124
125	#[test]
126	fn tokenize_uint() {
127		assert_eq!(
128			StrictTokenizer::tokenize(
129				&ParamType::Uint(256),
130				"1111111111111111111111111111111111111111111111111111111111111111"
131			)
132			.unwrap(),
133			Token::Uint([0x11u8; 32].into())
134		);
135
136		assert_eq!(
137			StrictTokenizer::tokenize(
138				&ParamType::Uint(256),
139				"2222222222222222222222222222222222222222222222222222222222222222"
140			)
141			.unwrap(),
142			Token::Uint([0x22u8; 32].into())
143		);
144	}
145
146	#[test]
147	fn tokenize_int() {
148		assert_eq!(
149			StrictTokenizer::tokenize(
150				&ParamType::Int(256),
151				"1111111111111111111111111111111111111111111111111111111111111111"
152			)
153			.unwrap(),
154			Token::Int([0x11u8; 32].into())
155		);
156
157		assert_eq!(
158			StrictTokenizer::tokenize(
159				&ParamType::Int(256),
160				"2222222222222222222222222222222222222222222222222222222222222222"
161			)
162			.unwrap(),
163			Token::Int([0x22u8; 32].into())
164		);
165	}
166
167	#[test]
168	fn tokenize_empty_array() {
169		assert_eq!(
170			StrictTokenizer::tokenize(&ParamType::Array(Box::new(ParamType::Bool)), "[]").unwrap(),
171			Token::Array(vec![])
172		);
173	}
174
175	#[test]
176	fn tokenize_bool_array() {
177		assert_eq!(
178			StrictTokenizer::tokenize(&ParamType::Array(Box::new(ParamType::Bool)), "[true,1,0,false]").unwrap(),
179			Token::Array(vec![Token::Bool(true), Token::Bool(true), Token::Bool(false), Token::Bool(false)])
180		);
181	}
182
183	#[test]
184	fn tokenize_bool_array_of_arrays() {
185		assert_eq!(
186			StrictTokenizer::tokenize(
187				&ParamType::Array(Box::new(ParamType::Array(Box::new(ParamType::Bool)))),
188				"[[true,1,0],[false]]"
189			)
190			.unwrap(),
191			Token::Array(vec![
192				Token::Array(vec![Token::Bool(true), Token::Bool(true), Token::Bool(false)]),
193				Token::Array(vec![Token::Bool(false)])
194			])
195		);
196	}
197}