quote_data_helpers/
tokenizable.rs

1use quote::ToTokens;
2use syn::{Type, Error};
3use proc_macro2::TokenStream;
4use crate::helper::{assert_angle_args, get_nested_types};
5use crate::error::QuoteItError;
6use std::hash::Hash;
7use std::collections::{HashMap, HashSet};
8use crate::TokenizableError;
9
10pub fn get_value_wrapper(ty: &Type, value_path: TokenStream, as_ref: bool, clone: bool) -> Result<TokenStream, Error> {
11    let ref_token = if as_ref {
12        quote::quote! {&}
13    } else {
14        TokenStream::new()
15    };
16
17    let clone_token = if clone {
18        quote::quote! {.clone()}
19    } else {
20        TokenStream::new()
21    };
22
23    let handlers = [
24        TokenizableVec::<String>::convert_token_stream,
25        TokenizableString::convert_token_stream,
26        TokenizableOption::<String>::convert_token_stream,
27        TokenizableResult::<String, TokenizableError>::convert_token_stream,
28        TokenizableHashMap::<String, String>::convert_token_stream,
29        TokenizableHashSet::<String>::convert_token_stream,
30        TokenizablePair::<String, String>::convert_token_stream,
31        TokenizablePhantomData::convert_token_stream
32    ];
33
34    let result = handlers.iter().try_fold(
35        None,
36        |prev, handler| {
37            if prev.is_none() {
38                handler(ty, &value_path)
39            } else {
40                Ok(prev)
41            }
42        }
43    )?;
44
45    Ok(result.unwrap_or_else(|| quote::quote! {
46        #ref_token#value_path#clone_token
47    }))
48}
49
50pub trait Tokenizable: ToTokens + Clone + Sized {
51    type ValueType;
52
53    fn value_token_stream(&self) -> TokenStream;
54
55    fn from_value(value: Self::ValueType) -> Self;
56
57    fn convert_token_stream(
58        ty: &Type,
59        value_path: &TokenStream,
60    ) -> Result<Option<TokenStream>, Error>;
61}
62
63
64#[derive(Clone)]
65pub struct TokenizableVec<T: ToTokens + Clone>(pub Vec<T>);
66
67impl<T: ToTokens + Clone> Tokenizable for TokenizableVec<T> {
68    type ValueType = Vec<T>;
69
70    fn value_token_stream(&self) -> TokenStream {
71        let value = &self.0;
72        quote::quote! {
73            vec![#(#value),*]
74        }
75    }
76
77    fn from_value(value: Self::ValueType) -> Self {
78        TokenizableVec(value)
79    }
80
81    fn convert_token_stream(
82        ty: &Type,
83        value_path: &TokenStream,
84    ) -> Result<Option<TokenStream>, Error> {
85        if let Type::Path(type_path) = ty {
86            let last_segment = type_path.path.segments.last().unwrap();
87
88            if last_segment.ident != "Vec" {
89                return Ok(None)
90            }
91
92            let arguments = assert_angle_args(&last_segment.arguments)?;
93            let nested_types = get_nested_types(arguments);
94            let nested_type = match nested_types.first() {
95                Some(r) => r,
96                None => return Err(QuoteItError::TypeParamCountError("Vec", 1, 0).into_syn_error(ty))
97            };
98
99            let wrapped_value = get_value_wrapper(
100                nested_type,
101                quote::quote! {
102                item
103            },
104                false,
105                true,
106            )?;
107
108            Ok(Some(quote::quote! {
109                quote_data::TokenizableVec::from_value(#value_path.iter().map(
110                    |item| #wrapped_value
111                ).collect())
112            }))
113        } else {
114            Ok(None)
115        }
116    }
117}
118
119impl<T: ToTokens + Clone> ToTokens for TokenizableVec<T> {
120    fn to_tokens(&self, tokens: &mut TokenStream) {
121        let value = self.value_token_stream();
122        (quote::quote! {
123                #value
124        }).to_tokens(tokens)
125    }
126}
127
128#[derive(Clone, Eq, PartialEq, Hash)]
129pub struct TokenizableString(pub String);
130
131impl Tokenizable for TokenizableString {
132    type ValueType = String;
133
134    fn value_token_stream(&self) -> TokenStream {
135        let value = &self.0;
136        quote::quote! {
137            #value.to_string()
138        }
139    }
140
141    fn from_value(value: Self::ValueType) -> Self {
142        TokenizableString(value)
143    }
144
145    fn convert_token_stream(
146        ty: &Type,
147        value_path: &TokenStream,
148    ) -> Result<Option<TokenStream>, Error> {
149        if let Type::Path(type_path) = ty {
150            let last_segment = type_path.path.segments.last().unwrap();
151
152            if last_segment.ident != "String" {
153                return Ok(None)
154            }
155
156            Ok(Some(quote::quote! {
157                quote_data::TokenizableString::from_value(#value_path.clone())
158            }))
159        } else {
160            Ok(None)
161        }
162    }
163}
164
165impl ToTokens for TokenizableString {
166    fn to_tokens(&self, tokens: &mut TokenStream) {
167        let value = self.value_token_stream();
168        (quote::quote! {
169                #value
170        }).to_tokens(tokens)
171    }
172}
173
174#[derive(Clone, Eq, PartialEq, Hash)]
175pub struct TokenizableOption<T: ToTokens + Clone>(pub Option<T>);
176
177impl<T: ToTokens + Clone> Tokenizable for TokenizableOption<T> {
178    type ValueType = Option<T>;
179
180    fn value_token_stream(&self) -> TokenStream {
181        match &self.0 {
182            Some(nested) => quote::quote! {
183                Some(#nested)
184            },
185            None => quote::quote! {
186                None
187            },
188        }
189    }
190
191    fn from_value(value: Self::ValueType) -> Self {
192        TokenizableOption(value)
193    }
194
195    fn convert_token_stream(
196        ty: &Type,
197        value_path: &TokenStream,
198    ) -> Result<Option<TokenStream>, Error> {
199        if let Type::Path(type_path) = ty {
200            let last_segment = type_path.path.segments.last().unwrap();
201
202            if last_segment.ident != "Option" {
203                return Ok(None)
204            }
205
206            let arguments = assert_angle_args(&last_segment.arguments)?;
207            let nested_types = get_nested_types(arguments);
208            let nested_type = match nested_types.first() {
209                Some(r) => r,
210                None => return Err(QuoteItError::TypeParamCountError("Option", 1, 0).into_syn_error(ty))
211            };
212
213            let wrapped_value = get_value_wrapper(
214                nested_type,
215                quote::quote! {
216                option_value
217            },
218                false,
219                true,
220            )?;
221
222            Ok(Some(quote::quote! {
223                quote_data::TokenizableOption::from_value(#value_path.as_ref().map(|option_value| #wrapped_value))
224            }))
225        } else {
226            Ok(None)
227        }
228    }
229}
230
231impl<T: ToTokens + Clone> ToTokens for TokenizableOption<T> {
232    fn to_tokens(&self, tokens: &mut TokenStream) {
233        let value = self.value_token_stream();
234        (quote::quote! {
235                #value
236        }).to_tokens(tokens)
237    }
238}
239
240#[derive(Clone, Eq, PartialEq, Hash)]
241pub struct TokenizableResult<T: ToTokens + Clone, E: ToTokens + Clone + std::error::Error>(
242    pub Result<T, E>,
243);
244
245impl<T, E> Tokenizable for TokenizableResult<T, E>
246    where
247        T: ToTokens + Clone,
248        E: ToTokens + Clone + std::error::Error,
249{
250    type ValueType = Result<T, E>;
251
252    fn value_token_stream(&self) -> TokenStream {
253        match &self.0 {
254            Ok(v) => quote::quote! {
255                Ok(#v)
256            },
257            Err(e) => quote::quote! {
258                Err(#e)
259            },
260        }
261    }
262
263    fn from_value(value: Self::ValueType) -> Self {
264        TokenizableResult(value)
265    }
266
267    fn convert_token_stream(
268        ty: &Type,
269        value_path: &TokenStream,
270    ) -> Result<Option<TokenStream>, Error> {
271        if let Type::Path(type_path) = ty {
272            let last_segment = type_path.path.segments.last().unwrap();
273
274            if last_segment.ident != "Result" {
275                return Ok(None)
276            }
277
278            let arguments = assert_angle_args(&last_segment.arguments)?;
279            let nested_types = get_nested_types(arguments);
280
281            let first_param = nested_types.first().ok_or_else(|| {
282                QuoteItError::TypeParamCountError("Result", 2, 0).into_syn_error(ty)
283            })?;
284
285            let second_param = nested_types.get(1).ok_or_else(|| {
286                QuoteItError::TypeParamCountError("Result", 2, 1).into_syn_error(ty)
287            })?;
288
289            let first_wrapped_value = get_value_wrapper(
290                first_param,
291                quote::quote! {
292                result
293            },
294                false,
295                true,
296            )?;
297
298            let second_wrapped_value = get_value_wrapper(
299                second_param,
300                quote::quote! {
301                error
302            },
303                false,
304                true,
305            )?;
306
307            Ok(Some(quote::quote! {
308                quote_data::TokenizableResult::from_value(
309                    #value_path.clone()
310                    .map(|result| #first_wrapped_value)
311                    .map_err(|error| #second_wrapped_value)
312                )
313            }))
314        } else {
315            Ok(None)
316        }
317    }
318}
319
320impl<T, E> ToTokens for TokenizableResult<T, E>
321    where
322        T: ToTokens + Clone,
323        E: ToTokens + Clone + std::error::Error,
324{
325    fn to_tokens(&self, tokens: &mut TokenStream) {
326        let value = self.value_token_stream();
327        (quote::quote! {
328                #value
329        }).to_tokens(tokens)
330    }
331}
332
333#[derive(Clone)]
334pub struct TokenizableHashMap<K: Eq + Hash + Clone + ToTokens, V: Clone + ToTokens>(
335    pub HashMap<K, V>,
336);
337
338impl<K, V> Tokenizable for TokenizableHashMap<K, V>
339    where
340        K: Eq + Hash + Clone + ToTokens,
341        V: Clone + ToTokens,
342{
343    type ValueType = Vec<(K, V)>;
344
345    fn value_token_stream(&self) -> TokenStream {
346        let pairs: Vec<TokenStream> = self
347            .0
348            .iter()
349            .map(|(key, value)| {
350                quote::quote! {
351                    (#key, #value)
352                }
353            })
354            .collect();
355
356        quote::quote! {
357            vec![#(#pairs),*].into_iter().collect()
358        }
359    }
360
361    fn from_value(value: Self::ValueType) -> Self {
362        TokenizableHashMap(value.into_iter().collect())
363    }
364
365    fn convert_token_stream(
366        ty: &Type,
367        value_path: &TokenStream,
368    ) -> Result<Option<TokenStream>, Error> {
369        if let Type::Path(type_path) = ty {
370            let last_segment = type_path.path.segments.last().unwrap();
371
372            if last_segment.ident != "HashMap" {
373                return Ok(None)
374            }
375
376            let arguments = assert_angle_args(&last_segment.arguments)?;
377            let nested_types = get_nested_types(arguments);
378
379            let first_param = nested_types.first().ok_or_else(|| {
380                QuoteItError::TypeParamCountError("HashMap", 2, 0).into_syn_error(ty)
381            })?;
382
383            let second_param = nested_types.get(1).ok_or_else(|| {
384                QuoteItError::TypeParamCountError("HashMap", 2, 1).into_syn_error(ty)
385            })?;
386
387            let first_wrapped_value = get_value_wrapper(
388                first_param,
389                quote::quote! {
390                key
391            },
392                false,
393                true,
394            )?;
395
396            let second_wrapped_value = get_value_wrapper(
397                second_param,
398                quote::quote! {
399                value
400            },
401                false,
402                true,
403            )?;
404
405            Ok(Some(quote::quote! {
406                quote_data::TokenizableHashMap::from_value(
407                    #value_path.iter().map(
408                        |(key, value)| (#first_wrapped_value, #second_wrapped_value)
409                    ).collect()
410                )
411            }))
412        } else {
413            Ok(None)
414        }
415    }
416}
417
418impl<K, V> ToTokens for TokenizableHashMap<K, V>
419    where
420        K: Eq + Hash + Clone + ToTokens,
421        V: Clone + ToTokens,
422{
423    fn to_tokens(&self, tokens: &mut TokenStream) {
424        let value = self.value_token_stream();
425        (quote::quote! {
426                #value
427        })
428            .to_tokens(tokens)
429    }
430}
431
432#[derive(Clone)]
433pub struct TokenizableHashSet<T: ToTokens + Clone + Eq + Hash>(pub HashSet<T>);
434
435impl<T> Tokenizable for TokenizableHashSet<T>
436    where
437        T: ToTokens + Clone + Eq + Hash,
438{
439    type ValueType = Vec<T>;
440
441    fn value_token_stream(&self) -> TokenStream {
442        let items = self.0.iter();
443        quote::quote! {
444            vec![#(#items),*].into_iter().collect()
445        }
446    }
447
448    fn from_value(value: Self::ValueType) -> Self {
449        TokenizableHashSet(value.into_iter().collect())
450    }
451
452    fn convert_token_stream(
453        ty: &Type,
454        value_path: &TokenStream,
455    ) -> Result<Option<TokenStream>, Error> {
456        if let Type::Path(type_path) = ty {
457            let last_segment = type_path.path.segments.last().unwrap();
458
459            if last_segment.ident != "HashSet" {
460                return Ok(None)
461            }
462
463            let arguments = assert_angle_args(&last_segment.arguments)?;
464            let nested_types = get_nested_types(arguments);
465            let nested_type = nested_types.first().ok_or_else(|| {
466                QuoteItError::TypeParamCountError("HashSet", 1, 0).into_syn_error(ty)
467            })?;
468
469            let wrapped_value = get_value_wrapper(
470                nested_type,
471                quote::quote! {
472                item
473            },
474                false,
475                true,
476            )?;
477
478            Ok(Some(quote::quote! {
479                quote_data::TokenizableHashSet::from_value(#value_path.iter().map(
480                    |item| #wrapped_value
481                ).collect())
482            }))
483        } else {
484            Ok(None)
485        }
486    }
487}
488
489impl<T> ToTokens for TokenizableHashSet<T>
490    where
491        T: ToTokens + Clone + Eq + Hash,
492{
493    fn to_tokens(&self, tokens: &mut TokenStream) {
494        let value = self.value_token_stream();
495        (quote::quote! {
496                #value
497        })
498            .to_tokens(tokens)
499    }
500}
501
502#[derive(Clone)]
503pub struct TokenizablePair<A: ToTokens + Clone, B: ToTokens + Clone>(pub (A, B));
504
505impl <A, B> Tokenizable for TokenizablePair<A, B>
506    where A: ToTokens + Clone, B: ToTokens + Clone {
507    type ValueType = (A, B);
508
509    fn value_token_stream(&self) -> TokenStream {
510        let first = &self.0.0;
511        let second = &self.0.1;
512
513        quote::quote! {
514            (#first, #second)
515        }
516    }
517
518    fn from_value(value: Self::ValueType) -> Self {
519        TokenizablePair(value)
520    }
521
522    fn convert_token_stream(ty: &Type, value_path: &TokenStream) -> Result<Option<TokenStream>, Error> {
523        if let Type::Tuple(type_tuple) = ty {
524            let (first_ty, second_tye) = if type_tuple.elems.len() != 2 {
525                return Err(QuoteItError::TypeParamCountError(
526                    "Pair", 2, type_tuple.elems.len()
527                ).into_syn_error(ty))
528            } else {
529                let mut iter = type_tuple.elems.iter();
530
531                (iter.next().unwrap(), iter.next().unwrap())
532            };
533
534            let first = get_value_wrapper(
535                first_ty,
536                quote::quote! {#value_path.0},
537                false,
538                true,
539            )?;
540
541            let second = get_value_wrapper(
542                second_tye,
543                quote::quote! {#value_path.1},
544                false,
545                true,
546            )?;
547
548            Ok(Some(quote::quote! {
549                quote_data::TokenizablePair::from_value((#first, #second))
550            }))
551        } else {
552            Ok(None)
553        }
554    }
555}
556
557impl <A, B> ToTokens for TokenizablePair<A, B>
558    where A: ToTokens + Clone, B: ToTokens + Clone ,
559{
560    fn to_tokens(&self, tokens: &mut TokenStream) {
561        let value = self.value_token_stream();
562        (quote::quote! {
563                #value
564        })
565            .to_tokens(tokens)
566    }
567}
568
569#[derive(Clone)]
570pub struct TokenizablePhantomData;
571
572impl Tokenizable for TokenizablePhantomData {
573    type ValueType = ();
574
575    fn value_token_stream(&self) -> TokenStream {
576        quote::quote! {
577            std::marker::PhantomData::default()
578        }
579    }
580
581    fn from_value(_value: Self::ValueType) -> Self {
582        TokenizablePhantomData
583    }
584
585    fn convert_token_stream(ty: &Type, _value_path: &TokenStream) -> Result<Option<TokenStream>, Error> {
586        if let Type::Path(type_path) = ty {
587            let last_segment = type_path.path.segments.last().unwrap();
588
589            if last_segment.ident != "PhantomData" {
590                return Ok(None)
591            }
592
593            Ok(Some(quote::quote! {
594                quote_data::TokenizablePhantomData::from_value(())
595            }))
596        } else {
597            Ok(None)
598        }
599    }
600}
601
602impl ToTokens for TokenizablePhantomData {
603    fn to_tokens(&self, tokens: &mut TokenStream) {
604        let value = self.value_token_stream();
605        (quote::quote! {
606                #value
607        })
608            .to_tokens(tokens)
609    }
610}