1pub mod ast;
2pub mod error;
3mod lexer;
4mod parser;
5pub mod span;
6mod token;
7
8pub use lexer::Lexer;
9pub use parser::*;
10pub use token::*;
11
12#[cfg(test)]
13mod tests {
14 use super::*;
15
16 #[test]
17 fn lex() {
18 let mut lexer = Lexer::new("'a' , \"ba\" | () {} [] ; ? asdasd ? (* as (d *) asd");
20 while let Ok(Some(token)) = lexer.next_token() {
21 println!("{token:?}");
22 }
23 }
24
25 #[test]
26 fn parse() {
27 let text = include_str!("../grammar.ebnf");
28 let res = Parser::new(Lexer::new(text)).parse();
29 match res {
30 Ok(res) => {
31 println!("{:#?}", res.syntax);
32 for (k, v) in res.comments {
33 println!(
34 "{}: {:?} -- {:?}",
35 k,
36 v.iter().map(|c| c.text).collect::<Vec<_>>(),
37 res.tokens.get(k),
38 );
39 }
40 }
41 Err(err) => eprintln!(
42 "\x1b[31m{}\x1b[1m{}\x1b[22m{}\x1b[0m\n{}",
43 &text[(err.span.start - 20)..(err.span.start)],
44 &text[err.span.start..err.span.end],
45 &text[(err.span.end)..(err.span.end + 20)],
46 err.message,
47 ),
48 }
49 }
50}