1use fuel_types::{Address, AssetId, ContractId};
2
3use crate::{
4 traits::Parameterize,
5 types::{
6 AsciiString, Bits256, Bytes, RawSlice, SizedAsciiString, StaticStringToken, Token,
7 errors::{Result, error},
8 param_types::ParamType,
9 },
10};
11
12pub trait Tokenizable {
13 fn from_token(token: Token) -> Result<Self>
15 where
16 Self: Sized;
17 fn into_token(self) -> Token;
19}
20
21impl Tokenizable for Token {
22 fn from_token(token: Token) -> Result<Self> {
23 Ok(token)
24 }
25 fn into_token(self) -> Token {
26 self
27 }
28}
29
30impl Tokenizable for Bits256 {
31 fn from_token(token: Token) -> Result<Self>
32 where
33 Self: Sized,
34 {
35 match token {
36 Token::B256(data) => Ok(Bits256(data)),
37 _ => Err(error!(
38 Other,
39 "`Bits256` cannot be constructed from token {token}"
40 )),
41 }
42 }
43
44 fn into_token(self) -> Token {
45 Token::B256(self.0)
46 }
47}
48
49impl<T: Tokenizable> Tokenizable for Vec<T> {
50 fn from_token(token: Token) -> Result<Self>
51 where
52 Self: Sized,
53 {
54 if let Token::Vector(tokens) = token {
55 tokens.into_iter().map(Tokenizable::from_token).collect()
56 } else {
57 Err(error!(
58 Other,
59 "`Vec::from_token` must only be given a `Token::Vector`. Got: `{token}`"
60 ))
61 }
62 }
63
64 fn into_token(self) -> Token {
65 let tokens = self.into_iter().map(Tokenizable::into_token).collect();
66 Token::Vector(tokens)
67 }
68}
69
70impl Tokenizable for bool {
71 fn from_token(token: Token) -> Result<Self> {
72 match token {
73 Token::Bool(data) => Ok(data),
74 other => Err(error!(Other, "expected `bool`, got `{:?}`", other)),
75 }
76 }
77 fn into_token(self) -> Token {
78 Token::Bool(self)
79 }
80}
81
82impl Tokenizable for () {
83 fn from_token(token: Token) -> Result<Self>
84 where
85 Self: Sized,
86 {
87 match token {
88 Token::Unit => Ok(()),
89 other => Err(error!(Other, "expected `Unit`, got `{:?}`", other)),
90 }
91 }
92
93 fn into_token(self) -> Token {
94 Token::Unit
95 }
96}
97
98impl Tokenizable for u8 {
99 fn from_token(token: Token) -> Result<Self> {
100 match token {
101 Token::U8(data) => Ok(data),
102 other => Err(error!(Other, "expected `u8`, got `{:?}`", other)),
103 }
104 }
105 fn into_token(self) -> Token {
106 Token::U8(self)
107 }
108}
109
110impl Tokenizable for u16 {
111 fn from_token(token: Token) -> Result<Self> {
112 match token {
113 Token::U16(data) => Ok(data),
114 other => Err(error!(Other, "expected `u16`, got `{:?}`", other)),
115 }
116 }
117 fn into_token(self) -> Token {
118 Token::U16(self)
119 }
120}
121
122impl Tokenizable for u32 {
123 fn from_token(token: Token) -> Result<Self> {
124 match token {
125 Token::U32(data) => Ok(data),
126 other => Err(error!(Other, "expected `u32`, got {:?}", other)),
127 }
128 }
129 fn into_token(self) -> Token {
130 Token::U32(self)
131 }
132}
133
134impl Tokenizable for u64 {
135 fn from_token(token: Token) -> Result<Self> {
136 match token {
137 Token::U64(data) => Ok(data),
138 other => Err(error!(Other, "expected `u64`, got {:?}", other)),
139 }
140 }
141 fn into_token(self) -> Token {
142 Token::U64(self)
143 }
144}
145
146impl Tokenizable for u128 {
147 fn from_token(token: Token) -> Result<Self> {
148 match token {
149 Token::U128(data) => Ok(data),
150 other => Err(error!(Other, "expected `u128`, got {:?}", other)),
151 }
152 }
153 fn into_token(self) -> Token {
154 Token::U128(self)
155 }
156}
157
158impl Tokenizable for RawSlice {
159 fn from_token(token: Token) -> Result<Self>
160 where
161 Self: Sized,
162 {
163 match token {
164 Token::RawSlice(contents) => Ok(Self(contents)),
165 _ => Err(error!(
166 Other,
167 "`RawSlice::from_token` expected a token of the variant `Token::RawSlice`, got: `{token}`"
168 )),
169 }
170 }
171
172 fn into_token(self) -> Token {
173 Token::RawSlice(Vec::from(self))
174 }
175}
176
177impl Tokenizable for Bytes {
178 fn from_token(token: Token) -> Result<Self>
179 where
180 Self: Sized,
181 {
182 match token {
183 Token::Bytes(contents) => Ok(Self(contents)),
184 _ => Err(error!(
185 Other,
186 "`Bytes::from_token` expected a token of the variant `Token::Bytes`, got: `{token}`"
187 )),
188 }
189 }
190
191 fn into_token(self) -> Token {
192 Token::Bytes(Vec::from(self))
193 }
194}
195
196impl Tokenizable for String {
197 fn from_token(token: Token) -> Result<Self>
198 where
199 Self: Sized,
200 {
201 match token {
202 Token::String(string) => Ok(string),
203 _ => Err(error!(
204 Other,
205 "`String::from_token` expected a token of the variant `Token::String`, got: `{token}`"
206 )),
207 }
208 }
209
210 fn into_token(self) -> Token {
211 Token::String(self)
212 }
213}
214
215macro_rules! impl_tokenizable_tuples {
220 ($num: expr, $( $ty: ident : $no: tt, )+) => {
221 impl<$($ty, )+> Tokenizable for ($($ty,)+) where
222 $(
223 $ty: Tokenizable,
224 )+
225 {
226 fn from_token(token: Token) -> Result<Self> {
227 match token {
228 Token::Tuple(tokens) => {
229 let mut it = tokens.into_iter();
230 let mut next_token = move || {
231 it.next().ok_or_else(|| {
232 error!(Other, "ran out of tokens before tuple could be constructed")
233 })
234 };
235 Ok(($(
236 $ty::from_token(next_token()?)?,
237 )+))
238 },
239 other => Err(error!(Other,
240 "expected `Tuple`, got `{:?}`",
241 other
242 )),
243 }
244 }
245
246 fn into_token(self) -> Token {
247 Token::Tuple(vec![
248 $( self.$no.into_token(), )+
249 ])
250 }
251 }
252
253 }
254}
255
256impl_tokenizable_tuples!(1, A:0, );
259impl_tokenizable_tuples!(2, A:0, B:1, );
260impl_tokenizable_tuples!(3, A:0, B:1, C:2, );
261impl_tokenizable_tuples!(4, A:0, B:1, C:2, D:3, );
262impl_tokenizable_tuples!(5, A:0, B:1, C:2, D:3, E:4, );
263impl_tokenizable_tuples!(6, A:0, B:1, C:2, D:3, E:4, F:5, );
264impl_tokenizable_tuples!(7, A:0, B:1, C:2, D:3, E:4, F:5, G:6, );
265impl_tokenizable_tuples!(8, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, );
266impl_tokenizable_tuples!(9, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, );
267impl_tokenizable_tuples!(10, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, );
268impl_tokenizable_tuples!(11, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, );
269impl_tokenizable_tuples!(12, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, );
270impl_tokenizable_tuples!(13, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, );
271impl_tokenizable_tuples!(14, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, );
272impl_tokenizable_tuples!(15, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, O:14, );
273impl_tokenizable_tuples!(16, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, O:14, P:15, );
274
275impl Tokenizable for ContractId {
276 fn from_token(token: Token) -> Result<Self>
277 where
278 Self: Sized,
279 {
280 if let Token::Struct(tokens) = token {
281 if let [Token::B256(data)] = tokens.as_slice() {
282 Ok(ContractId::from(*data))
283 } else {
284 Err(error!(
285 Other,
286 "`ContractId` expected one `Token::B256`, got `{tokens:?}`"
287 ))
288 }
289 } else {
290 Err(error!(
291 Other,
292 "`ContractId` expected `Token::Struct` got `{token:?}`"
293 ))
294 }
295 }
296
297 fn into_token(self) -> Token {
298 let underlying_data: &[u8; 32] = &self;
299 Token::Struct(vec![Bits256(*underlying_data).into_token()])
300 }
301}
302
303impl Tokenizable for Address {
304 fn from_token(token: Token) -> Result<Self>
305 where
306 Self: Sized,
307 {
308 if let Token::Struct(tokens) = token {
309 if let [Token::B256(data)] = tokens.as_slice() {
310 Ok(Address::from(*data))
311 } else {
312 Err(error!(
313 Other,
314 "`Address` expected one `Token::B256`, got `{tokens:?}`"
315 ))
316 }
317 } else {
318 Err(error!(
319 Other,
320 "`Address` expected `Token::Struct` got `{token:?}`"
321 ))
322 }
323 }
324
325 fn into_token(self) -> Token {
326 let underlying_data: &[u8; 32] = &self;
327
328 Token::Struct(vec![Bits256(*underlying_data).into_token()])
329 }
330}
331
332impl Tokenizable for AssetId {
333 fn from_token(token: Token) -> Result<Self>
334 where
335 Self: Sized,
336 {
337 if let Token::Struct(tokens) = token {
338 if let [Token::B256(data)] = tokens.as_slice() {
339 Ok(AssetId::from(*data))
340 } else {
341 Err(error!(
342 Other,
343 "`AssetId` expected one `Token::B256`, got `{tokens:?}`"
344 ))
345 }
346 } else {
347 Err(error!(
348 Other,
349 "`AssetId` expected `Token::Struct` got `{token:?}`"
350 ))
351 }
352 }
353
354 fn into_token(self) -> Token {
355 let underlying_data: &[u8; 32] = &self;
356 Token::Struct(vec![Bits256(*underlying_data).into_token()])
357 }
358}
359
360impl<T> Tokenizable for Option<T>
361where
362 T: Tokenizable + Parameterize,
363{
364 fn from_token(token: Token) -> Result<Self> {
365 if let Token::Enum(enum_selector) = token {
366 match *enum_selector {
367 (0, _, _) => Ok(None),
368 (1, token, _) => Ok(Option::<T>::Some(T::from_token(token)?)),
369 (_, _, _) => Err(error!(
370 Other,
371 "could not construct `Option` from `enum_selector`. Received: `{:?}`",
372 enum_selector
373 )),
374 }
375 } else {
376 Err(error!(
377 Other,
378 "could not construct `Option` from token. Received: `{token:?}`"
379 ))
380 }
381 }
382 fn into_token(self) -> Token {
383 let (dis, tok) = match self {
384 None => (0, Token::Unit),
385 Some(value) => (1, value.into_token()),
386 };
387 if let ParamType::Enum { enum_variants, .. } = Self::param_type() {
388 let selector = (dis, tok, enum_variants);
389 Token::Enum(Box::new(selector))
390 } else {
391 panic!("should never happen as `Option::param_type()` returns valid Enum variants");
392 }
393 }
394}
395
396impl<T, E> Tokenizable for std::result::Result<T, E>
397where
398 T: Tokenizable + Parameterize,
399 E: Tokenizable + Parameterize,
400{
401 fn from_token(token: Token) -> Result<Self> {
402 if let Token::Enum(enum_selector) = token {
403 match *enum_selector {
404 (0, token, _) => Ok(std::result::Result::<T, E>::Ok(T::from_token(token)?)),
405 (1, token, _) => Ok(std::result::Result::<T, E>::Err(E::from_token(token)?)),
406 (_, _, _) => Err(error!(
407 Other,
408 "could not construct `Result` from `enum_selector`. Received: `{:?}`",
409 enum_selector
410 )),
411 }
412 } else {
413 Err(error!(
414 Other,
415 "could not construct `Result` from token. Received: `{token:?}`"
416 ))
417 }
418 }
419 fn into_token(self) -> Token {
420 let (dis, tok) = match self {
421 Ok(value) => (0, value.into_token()),
422 Err(value) => (1, value.into_token()),
423 };
424 if let ParamType::Enum { enum_variants, .. } = Self::param_type() {
425 let selector = (dis, tok, enum_variants);
426 Token::Enum(Box::new(selector))
427 } else {
428 panic!("should never happen as Result::param_type() returns valid Enum variants");
429 }
430 }
431}
432
433impl<const SIZE: usize, T: Tokenizable> Tokenizable for [T; SIZE] {
434 fn from_token(token: Token) -> Result<Self>
435 where
436 Self: Sized,
437 {
438 let gen_error = |reason| error!(Other, "constructing an array of size {SIZE}: {reason}");
439
440 match token {
441 Token::Array(elements) => {
442 let len = elements.len();
443 if len != SIZE {
444 return Err(gen_error(format!(
445 "`Token::Array` has wrong number of elements: {len}"
446 )));
447 }
448
449 let detokenized = elements
450 .into_iter()
451 .map(Tokenizable::from_token)
452 .collect::<Result<Vec<T>>>()
453 .map_err(|err| {
454 gen_error(format!(", not all elements could be detokenized: {err}"))
455 })?;
456
457 Ok(detokenized.try_into().unwrap_or_else(|_| {
458 panic!("this should never fail since we're checking the length beforehand")
459 }))
460 }
461 _ => Err(gen_error(format!("expected a `Token::Array`, got {token}"))),
462 }
463 }
464
465 fn into_token(self) -> Token {
466 Token::Array(self.map(Tokenizable::into_token).to_vec())
467 }
468}
469
470impl<const LEN: usize> Tokenizable for SizedAsciiString<LEN> {
471 fn from_token(token: Token) -> Result<Self>
472 where
473 Self: Sized,
474 {
475 match token {
476 Token::StringArray(contents) => {
477 let expected_len = contents.get_encodable_str()?.len();
478 if expected_len != LEN {
479 return Err(error!(
480 Other,
481 "`SizedAsciiString<{LEN}>::from_token` got a `Token::StringArray` whose expected length({}) is != {LEN}",
482 expected_len
483 ));
484 }
485 Self::new(contents.try_into()?)
486 }
487 _ => Err(error!(
488 Other,
489 "`SizedAsciiString<{LEN}>::from_token` expected a token of the variant `Token::StringArray`, got: `{token}`"
490 )),
491 }
492 }
493
494 fn into_token(self) -> Token {
495 Token::StringArray(StaticStringToken::new(self.into(), Some(LEN)))
496 }
497}
498
499impl Tokenizable for AsciiString {
500 fn from_token(token: Token) -> Result<Self>
501 where
502 Self: Sized,
503 {
504 match token {
505 Token::StringSlice(contents) => Self::new(contents.try_into()?),
506 _ => Err(error!(
507 Other,
508 "`AsciiString::from_token` expected a token of the variant `Token::StringSlice`, got: `{token}`"
509 )),
510 }
511 }
512
513 fn into_token(self) -> Token {
514 Token::StringSlice(StaticStringToken::new(self.into(), None))
515 }
516}
517
518#[cfg(test)]
519mod tests {
520 use super::*;
521
522 #[test]
523 fn test_from_token_b256() -> Result<()> {
524 let data = [1u8; 32];
525 let token = Token::B256(data);
526
527 let bits256 = Bits256::from_token(token)?;
528
529 assert_eq!(bits256.0, data);
530
531 Ok(())
532 }
533
534 #[test]
535 fn test_into_token_b256() {
536 let bytes = [1u8; 32];
537 let bits256 = Bits256(bytes);
538
539 let token = bits256.into_token();
540
541 assert_eq!(token, Token::B256(bytes));
542 }
543
544 #[test]
545 fn test_from_token_raw_slice() -> Result<()> {
546 let data = vec![42; 11];
547 let token = Token::RawSlice(data.clone());
548
549 let slice = RawSlice::from_token(token)?;
550
551 assert_eq!(slice, data);
552
553 Ok(())
554 }
555
556 #[test]
557 fn test_into_token_raw_slice() {
558 let data = vec![13; 32];
559 let raw_slice_token = Token::RawSlice(data.clone());
560
561 let token = raw_slice_token.into_token();
562
563 assert_eq!(token, Token::RawSlice(data));
564 }
565
566 #[test]
567 fn sized_ascii_string_is_tokenized_correctly() -> Result<()> {
568 let sut = SizedAsciiString::<3>::new("abc".to_string())?;
569
570 let token = sut.into_token();
571
572 match token {
573 Token::StringArray(string_token) => {
574 let contents = string_token.get_encodable_str()?;
575 assert_eq!(contents, "abc");
576 }
577 _ => {
578 panic!("not tokenized correctly! Should have gotten a `Token::String`")
579 }
580 }
581
582 Ok(())
583 }
584
585 #[test]
586 fn sized_ascii_string_is_detokenized_correctly() -> Result<()> {
587 let token = Token::StringArray(StaticStringToken::new("abc".to_string(), Some(3)));
588
589 let sized_ascii_string =
590 SizedAsciiString::<3>::from_token(token).expect("should have succeeded");
591
592 assert_eq!(sized_ascii_string, "abc");
593
594 Ok(())
595 }
596
597 #[test]
598 fn test_into_token_std_string() -> Result<()> {
599 let expected = String::from("hello");
600 let token = Token::String(expected.clone());
601 let detokenized = String::from_token(token.into_token())?;
602
603 assert_eq!(detokenized, expected);
604
605 Ok(())
606 }
607}