easy_sqlx_utils/
value_parser.rs

1use std::ops::Deref;
2
3use proc_macro2::{Group, Punct, Span};
4use quote::{ToTokens, TokenStreamExt};
5use syn::{
6    parenthesized, parse::{Parse, ParseBuffer, ParseStream}, punctuated::Punctuated, token::Comma, Error, Lit, Token
7};
8
9pub fn parse_next<T: FnOnce() -> Result<R, syn::Error>, R: Sized>(
10    input: ParseStream,
11    next: T,
12) -> Result<R, syn::Error> {
13    input.parse::<Token![=]>()?;
14    next()
15}
16
17pub fn parse_groups<T, R>(input: ParseStream) -> syn::Result<R>
18where
19    T: Sized,
20    T: Parse,
21    R: FromIterator<T>,
22{
23    Punctuated::<Group, Comma>::parse_terminated(input).and_then(|groups| {
24        groups
25            .into_iter()
26            .map(|group| syn::parse2::<T>(group.stream()))
27            .collect::<syn::Result<R>>()
28    })
29}
30
31pub fn parse_punctuated_within_parenthesis<T>(
32    input: ParseStream,
33) -> syn::Result<Punctuated<T, Comma>>
34where
35    T: Parse,
36{
37    let content;
38    parenthesized!(content in input);
39    Punctuated::<T, Comma>::parse_terminated(&content)
40}
41
42/// Tokenizes slice or Vec of tokenizable items as array either with reference (`&[...]`)
43/// or without correctly to OpenAPI JSON.
44#[derive(Debug)]
45pub enum Array<'a, T>
46where
47    T: Sized + ToTokens,
48{
49    Owned(Vec<T>),
50    #[allow(dead_code)]
51    Borrowed(&'a [T]),
52}
53
54impl<T> Array<'_, T> where T: ToTokens + Sized {}
55
56impl<V> FromIterator<V> for Array<'_, V>
57where
58    V: Sized + ToTokens,
59{
60    fn from_iter<T: IntoIterator<Item = V>>(iter: T) -> Self {
61        Self::Owned(iter.into_iter().collect())
62    }
63}
64
65impl<'a, T> Deref for Array<'a, T>
66where
67    T: Sized + ToTokens,
68{
69    type Target = [T];
70
71    fn deref(&self) -> &Self::Target {
72        match self {
73            Self::Owned(vec) => vec.as_slice(),
74            Self::Borrowed(slice) => slice,
75        }
76    }
77}
78
79impl<T> ToTokens for Array<'_, T>
80where
81    T: Sized + ToTokens,
82{
83    fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
84        let values = match self {
85            Self::Owned(values) => values.iter(),
86            Self::Borrowed(values) => values.iter(),
87        };
88        tokens.append(Group::new(
89            proc_macro2::Delimiter::Bracket,
90            values
91                .fold(Punctuated::new(), |mut punctuated, item| {
92                    punctuated.push_value(item);
93                    punctuated.push_punct(Punct::new(',', proc_macro2::Spacing::Alone));
94
95                    punctuated
96                })
97                .to_token_stream(),
98        ));
99
100        // tokens.append();
101    }
102}
103
104// column.col_name = info_stream
105//     .parse::<syn::Lit>()
106//     .map(|a| {
107//         match a {
108//             Lit::Str(str) => {
109//                 return Some(str.value());
110//             }
111//             _ => {
112//                 // return Err(Error::new(ident.span(), EXPECTED_ATTRIBUTE));
113//             }
114//         }
115//         Some("".to_string())
116//         // column.name = a.span()
117//     })?;
118pub fn parse_string(
119    stream: ParseBuffer,
120    field: &'static str,
121    attr: &'static str,
122) -> syn::Result<String> {
123    stream
124        .parse::<syn::Lit>()
125        .map(|lit| match lit {
126            Lit::Str(val) => {
127                return Ok(val.value());
128            }
129            _ => {
130                return Err(Error::new(
131                    Span::call_site(),
132                    format!("field {}'s attr '{}' expect String", field, attr),
133                ));
134            }
135        })
136        .map_err(|err| {
137            return Error::new(
138                Span::call_site(),
139                format!("field {}'s attr '{}' parse error: {}", field, attr, err),
140            );
141        })
142        .unwrap()
143}