1#![deny(warnings)]
2
3struct Tokenizer<I: Iterator<Item=char>>(lexers::Scanner<I>);
4
5impl<I: Iterator<Item=char>> Iterator for Tokenizer<I> {
6 type Item = String;
7 fn next(&mut self) -> Option<Self::Item> {
8 self.0.scan_whitespace();
9 self.0.scan_math_op()
10 .or_else(|| self.0.scan_number())
11 .or_else(|| self.0.scan_identifier())
12 }
13}
14
15fn tokenizer<I: Iterator<Item=char>>(input: I) -> Tokenizer<I> {
16 Tokenizer(lexers::Scanner::new(input))
17}
18
19fn main() {
20 let grammar = r#"
21 expr := expr ('+'|'-') term | term ;
22 term := term ('*'|'/') factor | factor ;
23 factor := '-' factor | power ;
24 power := ufact '^' factor | ufact ;
25 ufact := ufact '!' | group ;
26 group := num | '(' expr ')' ;
27 "#;
28
29 let input = std::env::args().skip(1).
30 collect::<Vec<String>>().join(" ");
31
32 use std::str::FromStr;
33 let trificator = abackus::ParserBuilder::default()
34 .plug_terminal("num", |n| f64::from_str(n).is_ok())
35 .sexprificator(&grammar, "expr");
36
37 match trificator(&mut tokenizer(input.chars())) {
38 Ok(trees) => for t in trees { println!("{}", t.print()); },
39 Err(e) => println!("{:?}", e)
40 }
41}