displaydoc_lite_proc_macros/
lib.rs1use proc_macro::{
2 Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree as TT,
3};
4
5#[doc(hidden)]
6#[proc_macro]
7pub fn __tuple_bindings__(input: TokenStream) -> TokenStream {
8 let mut tokens = input.into_iter();
9 let name = if let Some(TT::Group(group)) = tokens.next() {
10 if let Some(TT::Ident(ident)) = group.stream().into_iter().next() {
11 ident
12 } else {
13 panic!()
14 }
15 } else {
16 panic!()
17 };
18
19 if !matches!(tokens.next(), Some(TT::Punct(x)) if x.as_char() == ',') {
20 panic!("missing comma");
21 }
22
23 let variant = if let Some(TT::Group(group)) = tokens.next() {
24 if let Some(TT::Ident(ident)) = group.stream().into_iter().next() {
25 ident
26 } else {
27 panic!()
28 }
29 } else {
30 panic!()
31 };
32
33 if !matches!(tokens.next(), Some(TT::Punct(x)) if x.as_char() == ',') {
34 panic!("missing comma");
35 }
36
37 let span = name.span();
38
39 let mut args = vec![];
40 let mut idx = 0;
41 while let Some(_tok) = tokens.next() {
42 let name = format!("_{}", idx);
43 args.push(TT::Ident(Ident::new(&name, span)));
44 args.push(TT::Punct(Punct::new(',', Spacing::Alone)));
45
46 if !matches!(tokens.next(), Some(TT::Punct(x)) if x.as_char() == ',') {
47 panic!("missing comma");
48 }
49 idx += 1;
50 }
51
52 vec![
53 TT::Ident(name),
54 TT::Punct(Punct::new(':', Spacing::Joint)),
55 TT::Punct(Punct::new(':', Spacing::Alone)),
56 TT::Ident(variant),
57 TT::Group(Group::new(
58 Delimiter::Parenthesis,
59 args.into_iter().collect(),
60 )),
61 ]
62 .into_iter()
63 .collect()
64}
65
66#[doc(hidden)]
67#[proc_macro]
68pub fn __struct_string__(input: TokenStream) -> TokenStream {
69 let mut tokens = input.into_iter();
72 let tok = tokens.next();
73 let fmt = if let Some(TT::Group(group)) = tok {
74 if let Some(TT::Ident(ident)) = group.stream().into_iter().next() {
75 ident
76 } else {
77 panic!()
78 }
79 } else {
80 panic!()
81 };
82
83 if !matches!(tokens.next(), Some(TT::Punct(x)) if x.as_char() == ',') {
84 panic!("missing comma");
85 }
86
87 let (lit_span, lit) = if let Some(TT::Literal(lit)) = tokens.next() {
88 (lit.span(), format!("{}", lit))
89 } else {
90 panic!()
91 };
92
93 let span = lit_span;
94
95 let mut orig_chars = lit
96 .trim_start_matches('r')
97 .trim_matches('"')
98 .chars()
99 .peekable();
100 let chars = orig_chars.by_ref();
101
102 let mut string = String::new();
103 let mut args = vec![];
104 while let Some(c) = chars.next() {
105 match c {
106 '{' => {
107 let name = chars
108 .clone()
109 .take_while(|c| c.is_ascii_alphanumeric() || *c == '_')
110 .collect::<String>();
111 chars.take(name.len()).for_each(drop);
112
113 match chars.peek() {
114 Some(':') => {
115 chars.next();
116 if let Some('?') = chars.peek() {
117 string.push_str("{:?}");
118 args.push(Ident::new(&name, span));
119 chars.take(2).for_each(drop);
120 continue;
121 }
122 }
123 Some('}') => chars.take(1).for_each(drop),
124 _ => {}
127 }
128
129 string.push_str("{}");
130 args.push(Ident::new(&name, span));
131 }
132 c => string.push(c),
133 }
134 }
135
136 let args = vec![
137 TT::Ident(fmt),
138 TT::Punct(Punct::new(',', Spacing::Alone)),
139 TT::Literal(Literal::string(string.trim())),
140 TT::Punct(Punct::new(',', Spacing::Alone)),
141 ]
142 .into_iter()
143 .chain(
144 args.into_iter()
145 .flat_map(|x| vec![TT::Ident(x), TT::Punct(Punct::new(',', Spacing::Alone))]),
146 );
147
148 vec![
149 TT::Ident(Ident::new("write", span)),
150 TT::Punct(Punct::new('!', Spacing::Alone)),
151 TT::Group(Group::new(Delimiter::Parenthesis, args.collect())),
152 ]
153 .into_iter()
154 .collect()
155}
156
157fn map(input: TokenStream) -> TokenStream {
158 let mut tokens = input.into_iter().peekable();
159 let mut ret = TokenStream::new();
160 while let Some(tt) = tokens.next() {
161 ret.extend(Some(match tt {
162 TT::Punct(ref p) if p.as_char() == '@' => match tokens.peek() {
163 Some(&TT::Group(ref group)) if group.delimiter() == Delimiter::None => {
164 ret.extend(map(group.stream()));
165 drop(tokens.next());
166 continue;
167 }
168 Some(TT::Punct(ref p)) if p.as_char() == '@' => tokens.next().unwrap(),
169 _ => continue,
170 },
171 TT::Group(group) => Group::new(group.delimiter(), map(group.stream())).into(),
172 _ => tt,
173 }));
174 }
175 ret
176}
177
178#[doc(hidden)]
180#[proc_macro_derive(__expr_hack__)]
181pub fn __expr_hack__(input: TokenStream) -> TokenStream {
182 let mut tokens = input.into_iter();
196 let _ = tokens.by_ref().take(2).for_each(drop);
198 let mut tokens = if let Some(TT::Group(it)) = tokens.next() {
200 it
201 } else {
202 panic!()
203 }
204 .stream()
205 .into_iter();
206 let _ = tokens.by_ref().take(2).for_each(drop);
208 let mut tokens = if let Some(TT::Group(it)) = tokens.next() {
210 it
211 } else {
212 panic!()
213 }
214 .stream()
215 .into_iter();
216 let _ = tokens.by_ref().take(2).for_each(drop);
218 let input = if let Some(TT::Group(it)) = tokens.next() {
220 it
221 } else {
222 panic!()
223 }
224 .stream();
225 let ret = map(input);
226 let span = Span::call_site();
227 vec![
228 TT::Ident(Ident::new("macro_rules", span)),
229 TT::Punct(Punct::new('!', Spacing::Alone)),
230 TT::Ident(Ident::new("__defile__Hack__", span)),
231 TT::Group(Group::new(
232 Delimiter::Brace,
233 vec![
234 TT::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
235 TT::Punct(Punct::new('=', Spacing::Joint)),
236 TT::Punct(Punct::new('>', Spacing::Alone)),
237 TT::Group(Group::new(Delimiter::Parenthesis, ret)),
238 ]
239 .into_iter()
240 .collect(),
241 )),
242 ]
243 .into_iter()
244 .collect()
245}