ezno_ast_generator/
generator.rs1use proc_macro::{token_stream, Delimiter, Spacing, TokenStream, TokenTree};
2use proc_macro2::Span;
3use quote::{format_ident, quote};
4
5#[proc_macro]
11pub fn expr(item: TokenStream) -> TokenStream {
12 token_stream_to_ast_node::<ezno_parser::Expression>(item)
13}
14
15#[proc_macro]
16pub fn stmt(item: TokenStream) -> TokenStream {
17 token_stream_to_ast_node::<ezno_parser::StatementOrDeclaration>(item)
18}
19
20fn token_stream_to_ast_node<T: ezno_parser::ASTNode + self_rust_tokenize::SelfRustTokenize>(
21 item: TokenStream,
22) -> TokenStream {
23 let mut string_to_parse = String::new();
24 let mut marker_items = Vec::new();
25 parse_token_stream(item.into_iter(), &mut string_to_parse, &mut marker_items);
26
27 let line_starts = ezno_parser::source_map::LineStarts::new("");
29 let options = ezno_parser::ParseOptions { interpolation_points: true, ..Default::default() };
30 let parse_result =
31 ezno_parser::lex_and_parse_script::<T>(line_starts, options, &string_to_parse, None);
32
33 let node = match parse_result {
34 Ok((node, _state)) => node,
35 Err(err) => {
36 let reason = err.reason;
37 return quote!(compile_error!(#reason)).into();
38 }
39 };
40
41 let node_as_tokens = self_rust_tokenize::SelfRustTokenize::to_tokens(&node);
42
43 let interpolation_tokens = marker_items.iter().enumerate().map(|(idx, name)| {
44 let ident = format_ident!("_marker_{idx}");
45 let expr_ident = proc_macro2::Ident::new(name, Span::call_site());
46 quote!(let #ident = #expr_ident)
47 });
48
49 let tokens = quote! {
50 {
51 use ezno_parser::ast::*;
52 use ezno_parser::{generator_helpers::IntoAST, source_map};
53
54 #(#interpolation_tokens;)*
55 const CURRENT_SOURCE_ID: source_map::SourceId = source_map::Nullable::NULL;
56 #node_as_tokens
57 }
58 };
59
60 tokens.into()
63}
64
65fn parse_token_stream(
66 mut token_iter: token_stream::IntoIter,
67 string: &mut String,
68 marker_items: &mut Vec<String>,
69) {
70 let mut last_was_ident = false;
71 while let Some(token_tree) = token_iter.next() {
72 let current_is_ident = matches!(token_tree, TokenTree::Ident(_));
73
74 match token_tree {
75 TokenTree::Group(group) => {
76 let delimiter = group.delimiter();
77 let (start, end) = match delimiter {
78 Delimiter::Parenthesis => ("(", ")"),
79 Delimiter::Brace => ("{", "}"),
80 Delimiter::Bracket => ("[", "]"),
81 Delimiter::None => ("", ""),
82 };
83 string.push_str(start);
84 parse_token_stream(group.stream().into_iter(), string, marker_items);
85 string.push_str(end);
86 }
87 TokenTree::Ident(ident) => {
88 if last_was_ident {
89 string.push(' ');
90 }
91 string.push_str(ident.to_string().as_str());
92 }
93 TokenTree::Punct(punctuation) => {
94 let chr = punctuation.as_char();
95 if chr == '#' {
96 if let Some(TokenTree::Ident(ident)) = token_iter.next() {
97 let expr_name = ident.to_string();
98 marker_items.push(expr_name);
99 } else {
100 panic!("Expected ident")
101 }
102 string.push(' ');
104 string.push_str(ezno_parser::marker::MARKER);
105 string.push(' ');
106 } else {
107 let spacing = matches!(punctuation.spacing(), Spacing::Alone)
108 && !matches!(chr, '<' | '>' | '/');
109 if spacing && !string.ends_with("</") {
110 string.push(' ');
111 }
112 string.push(chr);
113 if spacing {
114 string.push(' ');
115 }
116 }
117 }
118 TokenTree::Literal(literal) => {
119 string.push_str(literal.to_string().as_str());
120 }
121 }
122
123 last_was_ident = current_is_ident;
124 }
125}