dyn_stack_macros/
lib.rs

1use proc_macro::*;
2
3#[proc_macro]
4pub fn alloc_impl_proc(item: TokenStream) -> TokenStream {
5	let mut item = item.into_iter();
6	let Some(TokenTree::Group(krate)) = item.next() else { panic!() };
7	let krate = krate.stream();
8	let stack = match item.next() {
9		Some(TokenTree::Group(stack)) => {
10			let mut stack = stack.stream().into_iter();
11			_ = stack.next();
12			let Some(TokenTree::Ident(stack)) = stack.next() else { panic!() };
13			stack
14		},
15		Some(_) => {
16			let Some(TokenTree::Ident(stack)) = item.next() else { panic!() };
17			stack
18		},
19
20		_ => panic!(),
21	};
22	let Some(TokenTree::Group(block)) = item.next() else { panic!() };
23
24	let mut tokens = Vec::new();
25
26	tokens.extend([
27		TokenTree::Ident(Ident::new("let", Span::call_site())),
28		TokenTree::Ident(stack.clone()),
29		TokenTree::Punct(Punct::new('=', Spacing::Alone)),
30		TokenTree::Ident(stack.clone()),
31		TokenTree::Punct(Punct::new(';', Spacing::Alone)),
32	]);
33
34	let mut cur_stmt = vec![TokenTree::Ident(stack.clone())];
35	for token in block.stream().into_iter() {
36		match token {
37			TokenTree::Punct(p) if p.as_char() == ';' => {
38				tokens.extend(krate.clone().into_iter().chain([
39					TokenTree::Punct(Punct::new(':', Spacing::Joint)),
40					TokenTree::Punct(Punct::new(':', Spacing::Alone)),
41					TokenTree::Ident(Ident::new("alloc_impl_rules", Span::call_site())),
42					TokenTree::Punct(Punct::new('!', Spacing::Alone)),
43					TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::from_iter(cur_stmt))),
44					TokenTree::Punct(Punct::new(';', Spacing::Alone)),
45				]));
46
47				cur_stmt = vec![TokenTree::Ident(stack.clone())];
48			},
49			token => cur_stmt.push(token),
50		}
51	}
52
53	tokens.extend([
54		TokenTree::Ident(Ident::new("let", Span::call_site())),
55		TokenTree::Ident(Ident::new("_", Span::call_site())),
56		TokenTree::Punct(Punct::new('=', Spacing::Alone)),
57		TokenTree::Ident(stack.clone()),
58		TokenTree::Punct(Punct::new(';', Spacing::Alone)),
59	]);
60
61	TokenStream::from_iter(tokens)
62}