default_constructor_macros/
lib.rs1use proc_macro::TokenStream as TokenStream1;
2use proc_macro2::{token_stream::IntoIter, Delimiter, Group, Ident, Span, TokenStream, TokenTree};
3use proc_macro_error::{abort, proc_macro_error};
4use quote::quote;
5
6#[proc_macro]
45#[proc_macro_error]
46pub fn meta_default_constructor(tokens: TokenStream1) -> TokenStream1 {
47 meta_default_constructor2(tokens.into()).into()
48}
49
50fn ident_is_pascal(ident: &Ident) -> bool {
51 ident
52 .to_string()
53 .chars()
54 .next()
55 .is_some_and(|c| c.is_uppercase())
56}
57
58fn parse_until_comma(
59 stream: &mut IntoIter,
60 pfx: impl IntoIterator<Item = TokenTree>,
61) -> Vec<TokenTree> {
62 let mut result = Vec::from_iter(pfx);
63 for tt in stream.by_ref() {
64 match tt {
65 TokenTree::Punct(p) if p.as_char() == ',' => break,
66 _ => result.push(tt),
67 }
68 }
69 result
70}
71
72fn transform_field(
79 convert_fn: &TokenStream,
80 mut expr: Vec<TokenTree>,
81 arr: bool,
82) -> Vec<TokenTree> {
83 match expr.last() {
84 Some(TokenTree::Group(g))
85 if arr && g.delimiter() == Delimiter::Bracket && expr.len() == 1 =>
86 {
87 let buf = parse_delimited(convert_fn, g.stream());
88 quote! {
89 [#buf]
90 }
91 .into_iter()
92 .collect()
93 }
94 Some(TokenTree::Group(g)) if g.delimiter() == Delimiter::Brace && expr.len() > 1 => {
95 let fields = parse_struct_definition(convert_fn, g.stream());
96 expr.pop();
97 quote! {
98 #(#expr)* #fields
99 }
100 .into_iter()
101 .collect()
102 }
103 _ => expr,
104 }
105}
106
107fn parse_delimited(convert_fn: &TokenStream, stream: TokenStream) -> TokenStream {
108 let mut result = Vec::new();
109 let mut iter = stream.into_iter();
110 loop {
111 match iter.next() {
112 Some(TokenTree::Punct(p)) if p.as_char() == '@' => {
113 let Some(TokenTree::Ident(mut convert_fn2)) = iter.next() else {
114 abort!(p.span(), "Expected convert function after '@'.")
115 };
116 if convert_fn2 == "box" {
117 convert_fn2 = Ident::new("boxed", convert_fn2.span())
118 }
119 let arr = convert_fn2 == "arr";
120 let iter = transform_field(convert_fn, parse_until_comma(&mut iter, []), arr);
121 result.extend(quote! {{
122 use ::default_constructor::effects::*;
123 #convert_fn2(#convert_fn(#(#iter)*))
124 },})
125 }
126 Some(pfx) => {
127 let iter = transform_field(convert_fn, parse_until_comma(&mut iter, [pfx]), false);
128 result.extend(quote! {#convert_fn(#(#iter)*),})
129 }
130 None => break,
131 }
132 }
133 result.into_iter().collect()
134}
135
136fn parse_struct_definition(convert_fn: &TokenStream, stream: TokenStream) -> TokenTree {
137 let mut result = Vec::new();
138 let mut iter = stream.into_iter();
139 while let Some(field) = iter.next() {
140 iter.next();
141 match iter.next() {
142 Some(TokenTree::Punct(p)) if p.as_char() == '@' => {
143 let Some(TokenTree::Ident(mut convert_fn2)) = iter.next() else {
144 abort!(p.span(), "Expected convert function after '@'.")
145 };
146 if convert_fn2 == "box" {
147 convert_fn2 = Ident::new("boxed", convert_fn2.span())
148 }
149 let arr = convert_fn2 == "arr";
150 let iter = transform_field(convert_fn, parse_until_comma(&mut iter, []), arr);
151 result.extend(quote! {#field: {
152 #convert_fn2(#convert_fn(#(#iter)*))
153 },})
154 }
155 Some(pfx) => {
156 let iter = transform_field(convert_fn, parse_until_comma(&mut iter, [pfx]), false);
157 result.extend(quote! {#field: #convert_fn(#(#iter)*),})
158 }
159 None => abort!(Span::call_site(), "Expected field."),
160 }
161 }
162 TokenTree::Group(Group::new(
163 Delimiter::Brace,
164 quote! {
165 #(#result)*
166 ..::core::default::Default::default()
167 },
168 ))
169}
170
171fn meta_default_constructor2(tokens: TokenStream) -> TokenStream {
172 let mut iter = tokens.into_iter();
173 let Some(TokenTree::Group(convert_fn)) = iter.next() else {
174 abort!(Span::call_site(), "Missing conversion function.")
175 };
176 let convert_fn = convert_fn.stream();
177 let tokens: Vec<_> = iter.collect();
178 let mut turbofish_counter = 0;
179 let result: Vec<_> = tokens
180 .split(|x| match x {
181 TokenTree::Punct(p) if p.as_char() == ',' && turbofish_counter == 0 => true,
182 TokenTree::Punct(p) if p.as_char() == '<' => {
183 turbofish_counter += 1;
184 false
185 }
186 TokenTree::Punct(p) if p.as_char() == '>' => {
187 turbofish_counter -= 1;
188 false
189 }
190 _ => false,
191 })
192 .filter_map(|segment| {
193 match segment {
194 [TokenTree::Group(g)] => Some(quote! {#g}),
196 [tt @ .., TokenTree::Punct(p), TokenTree::Group(g)] if p.as_char() == '!' => {
198 Some(quote! {
199 #(#tt)*! #g
200 })
201 }
202 [tt @ .., TokenTree::Group(g)] if g.delimiter() == Delimiter::Parenthesis => {
203 let mut count = 0;
204 let mut is_ty = false;
205
206 for i in (0..tt.len()).rev() {
211 match &tt[i] {
212 TokenTree::Ident(ident) => {
213 if count <= 0 {
214 is_ty = ident_is_pascal(ident);
215 break;
216 }
217 }
218 TokenTree::Punct(p) if p.as_char() == '>' => {
219 count += 1;
220 }
221 TokenTree::Punct(p) if p.as_char() == '<' => {
222 count -= 1;
223 }
224 _ => (),
225 }
226 }
227
228 if is_ty {
231 let block = parse_delimited(&convert_fn, g.stream());
232 Some(quote! {
233 {
234 #[allow(unused_imports)]
235 #[allow(clippy::needless_update)]
236 {
237 #(#tt)* (#block)
238 }
239 }
240 })
241 } else {
242 Some(quote! {
243 #(#tt)* #g
244 })
245 }
246 }
247 [tt @ .., TokenTree::Group(g)] if g.delimiter() == Delimiter::Brace => {
249 let block = parse_struct_definition(&convert_fn, g.stream());
250 Some(quote! {
251 {
252 #[allow(unused_imports)]
253 #[allow(clippy::needless_update)]
254 {
255 #(#tt)* #block
256 }
257 }
258 })
259 }
260 [] => None,
261 tt => Some(quote! {#(#tt)*}),
262 }
263 })
264 .collect();
265 quote! {(#(#result),*)}
266}