equator_macro/
lib.rs

1use proc_macro::*;
2
3fn parse_expr(tokens: &[TokenTree]) -> [usize; 4] {
4	let start = tokens.as_ptr().addr();
5
6	let mut tokens = tokens;
7	let mut give_up = false;
8
9	macro_rules! try_give_up {
10		($tt:tt) => {
11			if let [TokenTree::Punct(p0), TokenTree::Punct(p1), rest @ ..] = tokens {
12				if p0.as_char() == '&' && p0.spacing() == Spacing::Joint && p1.as_char() == '&' {
13					give_up = true;
14					tokens = rest;
15					$tt;
16				}
17			}
18
19			if let [TokenTree::Punct(p0), rest @ ..] = tokens {
20				if p0.as_char() == '|' {
21					give_up = true;
22					tokens = rest;
23					$tt;
24				}
25			}
26		};
27	}
28
29	macro_rules! skip_generics {
30		() => {
31			if let [TokenTree::Punct(p0), TokenTree::Punct(p1), TokenTree::Punct(p2), rest @ ..] = tokens {
32				if p0.as_char() == ':'
33					&& p0.spacing() == Spacing::Joint
34					&& p1.as_char() == ':'
35					&& p1.spacing() == Spacing::Joint
36					&& p2.as_char() == '<'
37				{
38					tokens = rest;
39
40					while let [tt, rest @ ..] = tokens {
41						tokens = rest;
42						if let TokenTree::Punct(p0) = tt {
43							if p0.as_char() == '>' {
44								break;
45							}
46						}
47					}
48
49					continue;
50				}
51			}
52			if let [TokenTree::Punct(p0), TokenTree::Punct(p1), rest @ ..] = tokens {
53				if p0.as_char() == ':' && p0.spacing() == Spacing::Joint && p1.as_char() == ':' {
54					tokens = rest;
55					continue;
56				}
57			}
58		};
59	}
60
61	macro_rules! offset {
62		() => {
63			(tokens.as_ptr().addr() - start) / size_of::<TokenTree>()
64		};
65	}
66
67	let lhs;
68	let op;
69	let rhs;
70
71	'main: loop {
72		skip_generics!();
73		try_give_up!({
74			op = 0;
75			lhs = 0;
76			break;
77		});
78
79		// shift
80		for c in ['<', '>'] {
81			if let [TokenTree::Punct(p0), TokenTree::Punct(p1), rest @ ..] = tokens {
82				if p0.as_char() == c && p0.spacing() == Spacing::Joint && p1.as_char() == c {
83					tokens = rest;
84					continue 'main;
85				}
86			}
87		}
88
89		// arrow
90		if let [TokenTree::Punct(p0), TokenTree::Punct(p1), rest @ ..] = tokens {
91			if p0.as_char() == '-' && p0.spacing() == Spacing::Joint && p1.as_char() == '>' {
92				tokens = rest;
93				continue 'main;
94			}
95		}
96
97		for c in ['<', '>', '=', '!'] {
98			if let [TokenTree::Punct(p0), TokenTree::Punct(p1), rest @ ..] = tokens {
99				if p0.as_char() == c && p0.spacing() == Spacing::Joint && p1.as_char() == '=' {
100					lhs = offset!();
101					op = lhs + 2;
102
103					tokens = rest;
104					break 'main;
105				}
106			}
107		}
108		for c in ['<', '>', '~'] {
109			if let [TokenTree::Punct(p0), rest @ ..] = tokens {
110				if p0.as_char() == c {
111					lhs = offset!();
112					op = lhs + 1;
113
114					tokens = rest;
115					break 'main;
116				}
117			}
118		}
119
120		if let [TokenTree::Punct(p0), rest @ ..] = tokens {
121			if p0.as_char() == ':' {
122				lhs = offset!();
123				tokens = rest;
124				'op: loop {
125					skip_generics!();
126					if let [tt, rest @ ..] = tokens {
127						tokens = rest;
128						if let TokenTree::Punct(p0) = tt {
129							if p0.as_char() == ':' {
130								op = offset!();
131								break 'op;
132							}
133						}
134					}
135				}
136				break 'main;
137			}
138		}
139
140		// comma
141		if let [TokenTree::Punct(p0), ..] = tokens {
142			if p0.as_char() == ',' {
143				lhs = offset!();
144				return [lhs, lhs, lhs, lhs + 1];
145			}
146		}
147		if let [_, rest @ ..] = tokens {
148			tokens = rest;
149			continue 'main;
150		}
151		lhs = offset!();
152		return [lhs, lhs, lhs, lhs];
153	}
154
155	'main: loop {
156		skip_generics!();
157		try_give_up!(continue);
158
159		if let [TokenTree::Punct(p0), ..] = tokens {
160			if p0.as_char() == ',' {
161				rhs = offset!();
162				if give_up {
163					return [rhs, rhs, rhs, rhs + 1];
164				}
165				return [lhs, op, rhs, rhs + 1];
166			}
167		}
168		if let [_, rest @ ..] = tokens {
169			tokens = rest;
170			continue 'main;
171		}
172		rhs = offset!();
173		if give_up {
174			return [rhs, rhs, rhs, rhs];
175		}
176		return [lhs, op, rhs, rhs];
177	}
178}
179
180fn parse(tokens: &[TokenTree]) -> (TokenStream, usize) {
181	let [lhs, op, rhs, next] = parse_expr(tokens);
182	if lhs < op {
183		let args = [TokenTree::Group(Group::new(
184			Delimiter::Parenthesis,
185			TokenStream::from_iter(
186				tokens[..lhs]
187					.iter()
188					.cloned()
189					.chain([TokenTree::Punct(Punct::new(',', Spacing::Alone))])
190					.chain(tokens[op..rhs].iter().cloned()),
191			),
192		))];
193
194		if let TokenTree::Punct(p) = &tokens[lhs] {
195			if p.as_char() == ':' {
196				return (
197					TokenStream::from_iter(
198						tokens[lhs + 1..op - 1]
199							.iter()
200							.cloned()
201							.chain([TokenTree::Punct(Punct::new(',', Spacing::Alone))])
202							.chain(args),
203					),
204					next,
205				);
206			}
207		}
208
209		if let TokenTree::Punct(p) = &tokens[lhs] {
210			if p.as_char() == '~' {
211				return (
212					TokenStream::from_iter(
213						[
214							TokenTree::Ident(Ident::new("approx_eq", p.span())),
215							TokenTree::Punct(Punct::new(',', Spacing::Alone)),
216							TokenTree::Punct(Punct::new('~', Spacing::Alone)),
217						]
218						.into_iter()
219						.chain(args),
220					),
221					next,
222				);
223			}
224		}
225
226		return (TokenStream::from_iter(tokens[lhs..op].iter().cloned().chain(args)), next);
227	}
228	assert_eq!(lhs, op);
229	assert_eq!(op, rhs);
230
231	let tokens = &tokens[..lhs];
232
233	if let [TokenTree::Ident(f), TokenTree::Group(g)] = tokens {
234		if matches!(&*f.to_string(), "any" | "all") {
235			let mut start = 0;
236			let mut cond = vec![];
237			let g = &*Vec::from_iter(g.stream());
238			while start < g.len() {
239				let (c, next) = parse(&g[start..]);
240				cond.push(TokenTree::Group(Group::new(Delimiter::Parenthesis, c)));
241				start += next;
242			}
243			return (
244				TokenStream::from_iter([
245					TokenTree::Ident(f.clone()),
246					TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::from_iter(cond))),
247				]),
248				next,
249			);
250		}
251	}
252
253	(TokenStream::from_iter(tokens.iter().cloned()), next)
254}
255
256#[proc_macro]
257pub fn assert(item: TokenStream) -> TokenStream {
258	let mut item = item.into_iter();
259	let Some(TokenTree::Group(krate)) = item.next() else { panic!() };
260	let item = &*Vec::from_iter(item);
261	let (cond, next) = parse(item);
262
263	let stream = TokenStream::from_iter(
264		krate.stream().into_iter().chain([
265			TokenTree::Punct(Punct::new(':', Spacing::Joint)),
266			TokenTree::Punct(Punct::new(':', Spacing::Alone)),
267			TokenTree::Ident(Ident::new("do_panic", Span::call_site())),
268			TokenTree::Group(Group::new(
269				Delimiter::Parenthesis,
270				TokenStream::from_iter(
271					TokenStream::new()
272						.into_iter()
273						.chain([
274							TokenTree::Ident(Ident::new("const", Span::call_site())),
275							TokenTree::Group(Group::new(
276								Delimiter::Brace,
277								TokenStream::from_iter(
278									TokenStream::new()
279										.into_iter()
280										.chain([TokenTree::Punct(Punct::new('&', Spacing::Alone))])
281										.chain(krate.stream())
282										.chain([
283											TokenTree::Punct(Punct::new(':', Spacing::Joint)),
284											TokenTree::Punct(Punct::new(':', Spacing::Alone)),
285											TokenTree::Ident(Ident::new("WithSource", Span::call_site())),
286											TokenTree::Group(Group::new(
287												Delimiter::Brace,
288												TokenStream::new()
289													.into_iter()
290													.chain([
291														TokenTree::Ident(Ident::new("file", Span::call_site())),
292														TokenTree::Punct(Punct::new(':', Spacing::Alone)),
293														TokenTree::Punct(Punct::new(':', Spacing::Joint)),
294														TokenTree::Punct(Punct::new(':', Spacing::Alone)),
295														TokenTree::Ident(Ident::new("core", Span::call_site())),
296														TokenTree::Punct(Punct::new(':', Spacing::Joint)),
297														TokenTree::Punct(Punct::new(':', Spacing::Alone)),
298														TokenTree::Ident(Ident::new("file", Span::call_site())),
299														TokenTree::Punct(Punct::new('!', Spacing::Alone)),
300														TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
301														TokenTree::Punct(Punct::new(',', Spacing::Alone)),
302													])
303													.chain([
304														TokenTree::Ident(Ident::new("line", Span::call_site())),
305														TokenTree::Punct(Punct::new(':', Spacing::Alone)),
306														TokenTree::Punct(Punct::new(':', Spacing::Joint)),
307														TokenTree::Punct(Punct::new(':', Spacing::Alone)),
308														TokenTree::Ident(Ident::new("core", Span::call_site())),
309														TokenTree::Punct(Punct::new(':', Spacing::Joint)),
310														TokenTree::Punct(Punct::new(':', Spacing::Alone)),
311														TokenTree::Ident(Ident::new("line", Span::call_site())),
312														TokenTree::Punct(Punct::new('!', Spacing::Alone)),
313														TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
314														TokenTree::Punct(Punct::new(',', Spacing::Alone)),
315													])
316													.chain([
317														TokenTree::Ident(Ident::new("col", Span::call_site())),
318														TokenTree::Punct(Punct::new(':', Spacing::Alone)),
319														TokenTree::Punct(Punct::new(':', Spacing::Joint)),
320														TokenTree::Punct(Punct::new(':', Spacing::Alone)),
321														TokenTree::Ident(Ident::new("core", Span::call_site())),
322														TokenTree::Punct(Punct::new(':', Spacing::Joint)),
323														TokenTree::Punct(Punct::new(':', Spacing::Alone)),
324														TokenTree::Ident(Ident::new("column", Span::call_site())),
325														TokenTree::Punct(Punct::new('!', Spacing::Alone)),
326														TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
327														TokenTree::Punct(Punct::new(',', Spacing::Alone)),
328													])
329													.chain([
330														TokenTree::Ident(Ident::new("source", Span::call_site())),
331														TokenTree::Punct(Punct::new(':', Spacing::Alone)),
332													])
333													.chain(krate.stream())
334													.chain([
335														TokenTree::Punct(Punct::new(':', Spacing::Joint)),
336														TokenTree::Punct(Punct::new(':', Spacing::Alone)),
337														TokenTree::Ident(Ident::new("source_imp", Span::call_site())),
338														TokenTree::Punct(Punct::new('!', Spacing::Alone)),
339														TokenTree::Group(Group::new(Delimiter::Parenthesis, cond.clone())),
340														TokenTree::Punct(Punct::new(',', Spacing::Alone)),
341													])
342													.collect(),
343											)),
344										]),
345								),
346							)),
347							TokenTree::Punct(Punct::new(',', Spacing::Alone)),
348						])
349						.chain(krate.stream())
350						.chain([
351							TokenTree::Punct(Punct::new(':', Spacing::Joint)),
352							TokenTree::Punct(Punct::new(':', Spacing::Alone)),
353							TokenTree::Ident(Ident::new("assert_imp", Span::call_site())),
354							TokenTree::Punct(Punct::new('!', Spacing::Alone)),
355							TokenTree::Group(Group::new(Delimiter::Parenthesis, cond.clone())),
356							TokenTree::Punct(Punct::new(',', Spacing::Alone)),
357						])
358						.chain(krate.stream())
359						.chain([
360							TokenTree::Punct(Punct::new(':', Spacing::Joint)),
361							TokenTree::Punct(Punct::new(':', Spacing::Alone)),
362							TokenTree::Ident(Ident::new("fmt_imp", Span::call_site())),
363							TokenTree::Punct(Punct::new('!', Spacing::Alone)),
364							TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::from_iter(item[next..].iter().cloned()))),
365							TokenTree::Punct(Punct::new(',', Spacing::Alone)),
366						]),
367				),
368			)),
369		]),
370	);
371
372	stream
373}