1extern crate proc_macro;
2
3use proc_macro::TokenStream;
4use quote::quote;
5use syn::{
6 Expr, Ident, LitInt, LitStr, Result, Token, braced, bracketed,
7 parse::{Parse, ParseStream},
8 parse_macro_input,
9};
10
11#[proc_macro]
12pub fn test_lexer(input: TokenStream) -> TokenStream {
13 let config = parse_macro_input!(input as TestConfig);
14 let name = &config.name;
15 let language = &config.language;
16 let lexer = &config.lexer;
17 let extension = &config.extension;
18 let path = &config.path;
19 let timeout = config.timeout.unwrap_or(10);
20
21 let expanded = quote! {
22 #[test]
23 fn #name() -> Result<(), oak_core::OakError> {
24 use oak_core::helpers::LexerTester;
25 use std::{path::Path, time::Duration};
26
27 let here = Path::new(env!("CARGO_MANIFEST_DIR"));
28 let language = Box::leak(Box::new(#language::default()));
29 let lexer = #lexer::new(language);
30 let test_runner = LexerTester::new(here.join(#path))
31 .with_extension(#extension)
32 .with_timeout(Duration::from_secs(#timeout));
33
34 test_runner.run_tests::<#language, _>(lexer)
35 }
36 };
37 TokenStream::from(expanded)
38}
39
40#[proc_macro]
41pub fn test_parser(input: TokenStream) -> TokenStream {
42 let config = parse_macro_input!(input as TestConfig);
43 let name = &config.name;
44 let language = &config.language;
45 let parser = &config.lexer; let extension = &config.extension;
47 let path = &config.path;
48 let timeout = config.timeout.unwrap_or(10);
49
50 let expanded = quote! {
51 #[test]
52 fn #name() -> Result<(), oak_core::OakError> {
53 use oak_core::helpers::ParserTester;
54 use std::{path::Path, time::Duration};
55
56 let here = Path::new(env!("CARGO_MANIFEST_DIR"));
57 let lang = Box::leak(Box::new(#language::default()));
58 let parser = Box::leak(Box::new(#parser::new(lang)));
59 let test_runner = ParserTester::new(here.join(#path))
60 .with_extension(#extension)
61 .with_timeout(Duration::from_secs(#timeout));
62
63 test_runner.run_tests::<#language, _>(parser)
64 }
65 };
66 TokenStream::from(expanded)
67}
68
69#[proc_macro_attribute]
70pub fn oak_test(_attr: TokenStream, item: TokenStream) -> TokenStream {
71 let input = parse_macro_input!(item as syn::ItemFn);
72 let attrs = &input.attrs;
73 let vis = &input.vis;
74 let sig = &input.sig;
75 let body = &input.block;
76
77 let expanded = quote! {
78 #(#attrs)*
79 #[test]
80 #vis #sig {
81 use std::sync::mpsc;
82 use std::time::Duration;
83 use std::thread;
84
85 let (tx, rx) = mpsc::channel();
86 thread::spawn(move || {
87 let result = (move || #body)();
88 let _ = tx.send(result);
89 });
90
91 match rx.recv_timeout(Duration::from_secs(10)) {
92 Ok(result) => result,
93 Err(mpsc::RecvTimeoutError::Timeout) => {
94 panic!("Test timed out after 10 seconds. Possible infinite loop detected in parser.");
95 }
96 Err(mpsc::RecvTimeoutError::Disconnected) => {
97 panic!("Test thread panicked or disconnected unexpectedly.");
98 }
99 }
100 }
101 };
102 TokenStream::from(expanded)
103}
104
105struct TestConfig {
106 name: Ident,
107 language: Ident,
108 lexer: Ident,
109 extension: LitStr,
110 path: LitStr,
111 timeout: Option<u64>,
112}
113
114impl Parse for TestConfig {
115 fn parse(input: ParseStream) -> Result<Self> {
116 let mut name = None;
117 let mut language = None;
118 let mut lexer = None;
119 let mut extension = None;
120 let mut path = None;
121 let mut timeout = None;
122
123 while !input.is_empty() {
124 let key: Ident = input.parse()?;
125 input.parse::<Token![:]>()?;
126
127 match key.to_string().as_str() {
128 "name" => name = Some(input.parse()?),
129 "language" => language = Some(input.parse()?),
130 "lexer" | "parser" => lexer = Some(input.parse()?),
131 "extension" => extension = Some(input.parse()?),
132 "path" => path = Some(input.parse()?),
133 "timeout" => {
134 let lit: LitInt = input.parse()?;
135 timeout = Some(lit.base10_parse()?);
136 }
137 _ => return Err(syn::Error::new(key.span(), format!("未知字段: {}", key))),
138 }
139
140 if input.peek(Token![,]) {
141 input.parse::<Token![,]>()?;
142 }
143 }
144
145 Ok(TestConfig {
146 name: name.ok_or_else(|| input.error("缺少字段: name"))?,
147 language: language.ok_or_else(|| input.error("缺少字段: language"))?,
148 lexer: lexer.ok_or_else(|| input.error("缺少字段: lexer/parser"))?,
149 extension: extension.ok_or_else(|| input.error("缺少字段: extension"))?,
150 path: path.ok_or_else(|| input.error("缺少字段: path"))?,
151 timeout,
152 })
153 }
154}
155
156#[proc_macro]
157pub fn doc(input: TokenStream) -> TokenStream {
158 let input = parse_macro_input!(input as DocExpr);
159 let expanded = quote! { #input };
160 TokenStream::from(expanded)
161}
162
163enum DocExpr {
164 Nil,
165 Line,
166 SoftLine,
167 SoftLineSpace,
168 HardLine,
169 Indent(Box<DocExpr>),
170 Group(Box<DocExpr>),
171 Concat(Vec<DocExpr>),
172 Text(LitStr),
173 Expr(Expr),
174}
175
176impl Parse for DocExpr {
177 fn parse(input: ParseStream) -> Result<Self> {
178 if input.peek(syn::token::Bracket) {
179 let content;
180 bracketed!(content in input);
181 let mut items = Vec::new();
182 while !content.is_empty() {
183 items.push(content.parse::<DocExpr>()?);
184 if content.peek(Token![,]) {
185 content.parse::<Token![,]>()?;
186 }
187 }
188 Ok(DocExpr::Concat(items))
189 }
190 else if input.peek(Ident) {
191 let lookahead = input.fork();
192 let ident = lookahead.parse::<Ident>()?;
193 match ident.to_string().as_str() {
194 "nil" | "line" | "soft_line" | "soft_line_space" | "hard_line" => {
195 input.parse::<Ident>()?; match ident.to_string().as_str() {
197 "nil" => Ok(DocExpr::Nil),
198 "line" => Ok(DocExpr::Line),
199 "soft_line" => Ok(DocExpr::SoftLine),
200 "soft_line_space" => Ok(DocExpr::SoftLineSpace),
201 "hard_line" => Ok(DocExpr::HardLine),
202 _ => unreachable!(),
203 }
204 }
205 "indent" => {
206 let ident = input.parse::<Ident>()?; if input.peek(syn::token::Brace) {
208 let content;
209 braced!(content in input);
210 let inner = content.parse::<DocExpr>().map_err(|mut e| {
211 let new_error = syn::Error::new(ident.span(), "indent 内部语法错误");
212 e.combine(new_error);
213 e
214 })?;
215 if !content.is_empty() {
216 return Err(content.error("indent 只接受一个表达式,多个表达式请使用 [] 包裹"));
217 }
218 Ok(DocExpr::Indent(Box::new(inner)))
219 }
220 else {
221 Ok(DocExpr::Indent(Box::new(input.parse()?)))
222 }
223 }
224 "group" => {
225 let ident = input.parse::<Ident>()?; if input.peek(syn::token::Brace) {
227 let content;
228 braced!(content in input);
229 let inner = content.parse::<DocExpr>().map_err(|mut e| {
230 let new_error = syn::Error::new(ident.span(), "group 内部语法错误");
231 e.combine(new_error);
232 e
233 })?;
234 if !content.is_empty() {
235 return Err(content.error("group 只接受一个表达式,多个表达式请使用 [] 包裹"));
236 }
237 Ok(DocExpr::Group(Box::new(inner)))
238 }
239 else {
240 Ok(DocExpr::Group(Box::new(input.parse()?)))
241 }
242 }
243 _ => Ok(DocExpr::Expr(input.parse()?)),
244 }
245 }
246 else if input.peek(LitStr) {
247 Ok(DocExpr::Text(input.parse()?))
248 }
249 else {
250 Ok(DocExpr::Expr(input.parse()?))
251 }
252 }
253}
254
255impl quote::ToTokens for DocExpr {
256 fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
257 let crate_path = quote! { ::oak_pretty_print };
258 match self {
259 DocExpr::Nil => tokens.extend(quote! { #crate_path::Doc::Nil }),
260 DocExpr::Line => tokens.extend(quote! { #crate_path::Doc::Line }),
261 DocExpr::SoftLine => tokens.extend(quote! { #crate_path::Doc::SoftLine }),
262 DocExpr::SoftLineSpace => tokens.extend(quote! { #crate_path::Doc::SoftLineSpace }),
263 DocExpr::HardLine => tokens.extend(quote! { #crate_path::Doc::HardLine }),
264 DocExpr::Indent(inner) => tokens.extend(quote! { #crate_path::Doc::Indent(Box::new(#inner)) }),
265 DocExpr::Group(inner) => tokens.extend(quote! { #crate_path::Doc::Group(Box::new(#inner)) }),
266 DocExpr::Concat(items) => {
267 tokens.extend(quote! {
268 #crate_path::Doc::Concat(vec![
269 #( #items ),*
270 ])
271 });
272 }
273 DocExpr::Text(lit) => tokens.extend(quote! { #crate_path::Doc::Text(#lit.to_string()) }),
274 DocExpr::Expr(expr) => tokens.extend(quote! { #expr }),
275 }
276 }
277}