1extern crate proc_macro;
2
3use proc_macro::{TokenStream, TokenTree, Ident, Span, Punct, Spacing, Group, Delimiter};
4use proc_macro::token_stream::IntoIter as TokenIter;
5use std::iter::Peekable;
6
7struct Parser {
8 iter: Peekable<TokenIter>,
9}
10
11enum Expression {
12 Simple(TokenStream),
13 Dot {
14 base: TokenStream,
15 dot: TokenTree,
16 field: TokenStream,
17 }
18}
19
20impl Expression {
21 fn simplify(self) -> TokenStream {
22 match self {
23 Expression::Simple(simple) => simple,
24 Expression::Dot { mut base, dot, field } => {
25 base.extend([dot]);
26 base.extend(field);
27 base
28 }
29 }
30 }
31
32 fn adding(self, dot: TokenTree, field: TokenStream) -> Expression {
33 let stream = self.simplify();
34 Expression::Dot {
35 base: stream,
36 dot,
37 field
38 }
39 }
40}
41
42impl Parser {
43 fn parse_expression(&mut self) -> Expression {
44 let base = self.iter.next().unwrap();
45 let mut expression = Expression::Simple(TokenStream::from(base));
46 while let Some(tok) = self.iter.peek() {
47 let dot = match tok {
48 TokenTree::Punct(punct) if punct.as_char() == '.' => {
49 self.iter.next().unwrap()
50 },
51 TokenTree::Punct(punct) if punct.as_char() == ',' => {
52 break;
53 },
54 _ => panic!("unexpected token {}", tok),
55 };
56 let tok = self.iter.next().unwrap();
57 match tok {
58 TokenTree::Ident(_) => expression = expression.adding(dot, TokenStream::from(tok)),
59 _ => panic!("unexpected token {}", tok),
60 }
61 }
62 expression
63 }
64
65 fn try_parse_comma(&mut self) -> bool {
66 match self.iter.peek() {
67 Some(TokenTree::Punct(punct)) if punct.as_char() == ',' => {
68 self.iter.next();
69 true
70 },
71 _ => false,
72 }
73 }
74}
75
76struct Builder {
77 stream: TokenStream,
78}
79
80impl Builder {
81 fn new() -> Self {
82 Builder { stream: TokenStream::new() }
83 }
84
85 fn ident(mut self, ident: &str) -> Self {
86 self.stream.extend([
87 TokenTree::Ident(Ident::new(ident, Span::call_site())),
88 ]);
89 self
90 }
91
92 fn field(mut self, name: &str) -> Self {
93 self.stream.extend([
94 TokenTree::Punct(Punct::new('.', Spacing::Alone)),
95 ]);
96 self.ident(name)
97 }
98
99 fn brackets(mut self, index: impl Into<TokenStream>) -> Self {
100 self.stream.extend([
101 TokenTree::Group(
102 Group::new(
103 Delimiter::Bracket,
104 index.into(),
105 )
106 )
107 ]);
108 self
109 }
110
111 fn hir_code(mut self, driver: impl Into<TokenStream>) -> Self {
112 self.stream.extend(driver.into());
113 self
114 .field("code")
115 .field("hir")
116 }
117
118 fn expr_to_item(self, driver: impl Into<TokenStream>, expr_id: impl Into<TokenStream>) -> Self {
119 self.hir_code(driver)
120 .field("expr_to_items")
121 .brackets(expr_id.into())
122 }
123
124 fn decl_to_item(self, driver: impl Into<TokenStream>, expr_id: impl Into<TokenStream>) -> Self {
125 self.hir_code(driver)
126 .field("decl_to_items")
127 .brackets(expr_id.into())
128 }
129
130 fn source_range(self, driver: impl Into<TokenStream>, item_id: impl Into<TokenStream>) -> Self {
131 self
132 .hir_code(driver)
133 .field("source_ranges")
134 .brackets(item_id)
135 }
136
137 fn generic_ctx_id(self, driver: impl Into<TokenStream>, item_id: impl Into<TokenStream>) -> Self {
138 self
139 .hir_code(driver)
140 .field("item_generic_ctxs")
141 .brackets(item_id)
142 }
143
144 fn generic_ctx_id_from_expr(self, driver: impl Into<TokenStream> + Clone, expr_id: impl Into<TokenStream>) -> Self {
145 self.generic_ctx_id(driver.clone(), Builder::new().expr_to_item(driver, expr_id))
146 }
147}
148
149impl From<Builder> for TokenStream {
150 fn from(b: Builder) -> TokenStream {
151 b.stream
152 }
153}
154
155fn parse_input(input: TokenStream) -> (TokenStream, Expression) {
156 let mut parser = Parser { iter: input.into_iter().peekable() };
157 let mut driver = Builder::new().ident("self").stream;
158 let mut field = parser.parse_expression();
159 if parser.try_parse_comma() {
160 driver = field.simplify();
161 field = parser.parse_expression();
162 }
163 (driver, field)
164}
165
166#[proc_macro]
167pub fn ef(input: TokenStream) -> TokenStream {
168 let (driver, field) = parse_input(input);
169 match field {
170 Expression::Dot { base, field, .. } => {
171 match field.to_string().as_str() {
172 "range" => {
173 Builder::new()
174 .source_range(
175 driver.clone(),
176 Builder::new().expr_to_item(driver, base)
177 )
178 },
179 "generic_ctx" => {
180 Builder::new()
181 .hir_code(driver.clone())
182 .field("generic_ctxs")
183 .brackets(Builder::new().generic_ctx_id_from_expr(driver, base))
184 }
185 "generic_ctx_id" => {
186 Builder::new()
187 .generic_ctx_id_from_expr(
188 driver,
189 base,
190 )
191 },
192 "hir" => {
193 Builder::new()
194 .hir_code(driver)
195 .field("exprs")
196 .brackets(base)
197 },
198 "item" => {
199 Builder::new()
200 .expr_to_item(driver, base)
201 }
202 name => panic!("Unrecognized expression field name {}", name),
203 }
204 },
205 Expression::Simple(expr_id) => panic!("Unexpected bare identifier '{}'. Must have field", expr_id),
206 }.stream
207}
208
209#[proc_macro]
210pub fn df(input: TokenStream) -> TokenStream {
211 let (driver, field) = parse_input(input);
212 match field {
213 Expression::Dot { base, field, .. } => {
214 match field.to_string().as_str() {
215 "range" => {
216 Builder::new()
217 .source_range(
218 driver.clone(),
219 Builder::new().decl_to_item(driver, base)
220 )
221 .stream
222 },
223 "generic_ctx_id" => {
224 Builder::new()
225 .generic_ctx_id(
226 driver.clone(),
227 Builder::new().decl_to_item(driver, base)
228 )
229 .stream
230 },
231 "hir" => {
232 Builder::new()
233 .hir_code(driver)
234 .field("decls")
235 .brackets(base)
236 .stream
237 },
238 "item" => {
239 Builder::new()
240 .decl_to_item(driver, base)
241 .stream
242 }
243 name => panic!("Unrecognized expression field name {}", name),
244 }
245 },
246 Expression::Simple(decl_id) => panic!("Unexpected bare identifier '{}'. Must have field", decl_id),
247 }
248}