default_constructor_macros/
lib.rs

1use proc_macro::TokenStream as TokenStream1;
2use proc_macro2::{token_stream::IntoIter, Delimiter, Group, Ident, Span, TokenStream, TokenTree};
3use proc_macro_error::{abort, proc_macro_error};
4use quote::quote;
5
6/// The meta macro.
7///
8/// # Syntax
9///
10/// ```
11/// # /*
12/// meta_default_constructor!(
13///     // conversion function
14///     [Into::into]
15///     // struct name and optional generics
16///     MyStruct::<T>
17///     // fields
18///     {
19///         // name value pairs like normal structs
20///         //
21///         // value is converted via the conversion function
22///         // name: Into::into(value),
23///         name: value,
24///         // use `effect` boxed to do another conversion like boxing, see the `effect` module.
25///         boxed: @boxed inner,
26///         // Nested structs will be recursively applied this macro
27///         // `OtherStruct` will be constructed recursively using the same `meta_default_constructor!`
28///         other: OtherStruct {
29///             ..
30///         },
31///         // Ignore this behavior like this.
32///         other2: {OtherStruct {
33///             ..
34///         }},
35///         // The `arr` effect uses the same conversion as fields.
36///         array: @arr [
37///             "Hello", "World!"
38///         ],
39///         // append [..Default::default()] at the end
40///     }
41/// )
42/// # */
43/// ```
44#[proc_macro]
45#[proc_macro_error]
46pub fn meta_default_constructor(tokens: TokenStream1) -> TokenStream1 {
47    meta_default_constructor2(tokens.into()).into()
48}
49
50fn ident_is_pascal(ident: &Ident) -> bool {
51    ident
52        .to_string()
53        .chars()
54        .next()
55        .is_some_and(|c| c.is_uppercase())
56}
57
58fn parse_until_comma(
59    stream: &mut IntoIter,
60    pfx: impl IntoIterator<Item = TokenTree>,
61) -> Vec<TokenTree> {
62    let mut result = Vec::from_iter(pfx);
63    for tt in stream.by_ref() {
64        match tt {
65            TokenTree::Punct(p) if p.as_char() == ',' => break,
66            _ => result.push(tt),
67        }
68    }
69    result
70}
71
72/// Expected expression as an input.
73///
74/// * `path::to::Struct { .. }`: apply `parse_struct_definition` recursively.
75/// * `[a, b, ..]`: apply `.into_iter().collect()`.
76///
77/// Tuple structs are not parsed since they are identical to functions.
78fn transform_field(
79    convert_fn: &TokenStream,
80    mut expr: Vec<TokenTree>,
81    arr: bool,
82) -> Vec<TokenTree> {
83    match expr.last() {
84        Some(TokenTree::Group(g))
85            if arr && g.delimiter() == Delimiter::Bracket && expr.len() == 1 =>
86        {
87            let buf = parse_delimited(convert_fn, g.stream());
88            quote! {
89                [#buf]
90            }
91            .into_iter()
92            .collect()
93        }
94        Some(TokenTree::Group(g)) if g.delimiter() == Delimiter::Brace && expr.len() > 1 => {
95            let fields = parse_struct_definition(convert_fn, g.stream());
96            expr.pop();
97            quote! {
98                #(#expr)* #fields
99            }
100            .into_iter()
101            .collect()
102        }
103        _ => expr,
104    }
105}
106
107fn parse_delimited(convert_fn: &TokenStream, stream: TokenStream) -> TokenStream {
108    let mut result = Vec::new();
109    let mut iter = stream.into_iter();
110    loop {
111        match iter.next() {
112            Some(TokenTree::Punct(p)) if p.as_char() == '@' => {
113                let Some(TokenTree::Ident(mut convert_fn2)) = iter.next() else {
114                    abort!(p.span(), "Expected convert function after '@'.")
115                };
116                if convert_fn2 == "box" {
117                    convert_fn2 = Ident::new("boxed", convert_fn2.span())
118                }
119                let arr = convert_fn2 == "arr";
120                let iter = transform_field(convert_fn, parse_until_comma(&mut iter, []), arr);
121                result.extend(quote! {{
122                    use ::default_constructor::effects::*;
123                    #convert_fn2(#convert_fn(#(#iter)*))
124                },})
125            }
126            Some(pfx) => {
127                let iter = transform_field(convert_fn, parse_until_comma(&mut iter, [pfx]), false);
128                result.extend(quote! {#convert_fn(#(#iter)*),})
129            }
130            None => break,
131        }
132    }
133    result.into_iter().collect()
134}
135
136fn parse_struct_definition(convert_fn: &TokenStream, stream: TokenStream) -> TokenTree {
137    let mut result = Vec::new();
138    let mut iter = stream.into_iter();
139    while let Some(field) = iter.next() {
140        iter.next();
141        match iter.next() {
142            Some(TokenTree::Punct(p)) if p.as_char() == '@' => {
143                let Some(TokenTree::Ident(mut convert_fn2)) = iter.next() else {
144                    abort!(p.span(), "Expected convert function after '@'.")
145                };
146                if convert_fn2 == "box" {
147                    convert_fn2 = Ident::new("boxed", convert_fn2.span())
148                }
149                let arr = convert_fn2 == "arr";
150                let iter = transform_field(convert_fn, parse_until_comma(&mut iter, []), arr);
151                result.extend(quote! {#field: {
152                    #convert_fn2(#convert_fn(#(#iter)*))
153                },})
154            }
155            Some(pfx) => {
156                let iter = transform_field(convert_fn, parse_until_comma(&mut iter, [pfx]), false);
157                result.extend(quote! {#field: #convert_fn(#(#iter)*),})
158            }
159            None => abort!(Span::call_site(), "Expected field."),
160        }
161    }
162    TokenTree::Group(Group::new(
163        Delimiter::Brace,
164        quote! {
165            #(#result)*
166            ..::core::default::Default::default()
167        },
168    ))
169}
170
171fn meta_default_constructor2(tokens: TokenStream) -> TokenStream {
172    let mut iter = tokens.into_iter();
173    let Some(TokenTree::Group(convert_fn)) = iter.next() else {
174        abort!(Span::call_site(), "Missing conversion function.")
175    };
176    let convert_fn = convert_fn.stream();
177    let tokens: Vec<_> = iter.collect();
178    let mut turbofish_counter = 0;
179    let result: Vec<_> = tokens
180        .split(|x| match x {
181            TokenTree::Punct(p) if p.as_char() == ',' && turbofish_counter == 0 => true,
182            TokenTree::Punct(p) if p.as_char() == '<' => {
183                turbofish_counter += 1;
184                false
185            }
186            TokenTree::Punct(p) if p.as_char() == '>' => {
187                turbofish_counter -= 1;
188                false
189            }
190            _ => false,
191        })
192        .filter_map(|segment| {
193            match segment {
194                // ignore things wrapped in `()` or `{}`
195                [TokenTree::Group(g)] => Some(quote! {#g}),
196                // ignore macros
197                [tt @ .., TokenTree::Punct(p), TokenTree::Group(g)] if p.as_char() == '!' => {
198                    Some(quote! {
199                        #(#tt)*! #g
200                    })
201                }
202                [tt @ .., TokenTree::Group(g)] if g.delimiter() == Delimiter::Parenthesis => {
203                    let mut count = 0;
204                    let mut is_ty = false;
205
206                    // Look for the last ident, ignore turbofish, if uppercase, treat as a type
207                    // and apply `into`s. If lowercase, treat as function and forward as is.
208                    //
209                    // Does not fully parse the token stream, any error should be returned as is.
210                    for i in (0..tt.len()).rev() {
211                        match &tt[i] {
212                            TokenTree::Ident(ident) => {
213                                if count <= 0 {
214                                    is_ty = ident_is_pascal(ident);
215                                    break;
216                                }
217                            }
218                            TokenTree::Punct(p) if p.as_char() == '>' => {
219                                count += 1;
220                            }
221                            TokenTree::Punct(p) if p.as_char() == '<' => {
222                                count -= 1;
223                            }
224                            _ => (),
225                        }
226                    }
227
228                    // If is a type or enum variant, so no `x: impl Into<T>`.
229                    // This makes it ok to insert `into`s.
230                    if is_ty {
231                        let block = parse_delimited(&convert_fn, g.stream());
232                        Some(quote! {
233                            {
234                                #[allow(unused_imports)]
235                                #[allow(clippy::needless_update)]
236                                {
237                                    #(#tt)* (#block)
238                                }
239                            }
240                        })
241                    } else {
242                        Some(quote! {
243                            #(#tt)* #g
244                        })
245                    }
246                }
247                // braces after something is only used for struct declaration
248                [tt @ .., TokenTree::Group(g)] if g.delimiter() == Delimiter::Brace => {
249                    let block = parse_struct_definition(&convert_fn, g.stream());
250                    Some(quote! {
251                        {
252                            #[allow(unused_imports)]
253                            #[allow(clippy::needless_update)]
254                            {
255                                #(#tt)* #block
256                            }
257                        }
258                    })
259                }
260                [] => None,
261                tt => Some(quote! {#(#tt)*}),
262            }
263        })
264        .collect();
265    quote! {(#(#result),*)}
266}