policy_engine/
tokenizer.rs1use crate::error::TokenizeError;
2#[derive(Debug, Clone)]
4pub enum Token {
5 And,
6 Or,
7 Equal,
8 OpenParen,
9 CloseParen,
10 Greater,
11 Less,
12 GreaterEqual,
13 LessEqual,
14 NotEqual,
15 Identifier(String),
16 Literal(String),
17}
18
19pub fn tokenize(input: &str) -> Result<Vec<Token>, TokenizeError> {
25 let mut tokens = vec![];
26 let mut iter = input.chars().peekable();
27
28 while let Some(&c) = iter.peek() {
30 match c {
31 ' ' => {
32 iter.next();
33 }
34 '(' => {
35 tokens.push(Token::OpenParen);
36 iter.next();
37 }
38 ')' => {
39 tokens.push(Token::CloseParen);
40 iter.next();
41 }
42 '&' => {
43 iter.next();
44 if iter.next().filter(|&c| c == '&').is_some() {
45 tokens.push(Token::And)
46 }
47 }
48 '|' => {
49 iter.next();
50 if iter.next().filter(|&c| c == '|').is_some() {
51 tokens.push(Token::Or)
52 }
53 }
54 '=' => {
55 tokens.push(Token::Equal);
56 iter.next();
57 }
58 '!' => {
59 iter.next();
60 if iter.next().filter(|&c| c == '!').is_some() {
61 tokens.push(Token::NotEqual)
62 }
63 }
64 '>' => {
65 iter.next();
66 tokens.extend(iter.next().map(|c| match c {
67 '=' => {
68 iter.next();
69 Token::GreaterEqual
70 },
71 _ => Token::Greater,
72 }));
73 }
74 '<' => {
75 iter.next();
76 tokens.extend(iter.next().map(|c| match c {
77 '=' => {
78 iter.next();
79 Token::LessEqual
80 },
81 _ => Token::Less,
82 }));
83 }
84 c if c.is_ascii_digit() => {
85 let mut num = String::new();
87 while let Some(&ch) = iter.peek() {
88 if ch.is_ascii_digit() {
89 num.push(ch);
90 iter.next();
91 } else {
92 break;
93 }
94 }
95 if num.is_empty() {
96 return Err(TokenizeError::IncompleteToken);
97 }
98 if num.parse::<i32>().is_err() {
99 return Err(TokenizeError::MalformedNumericLiteral);
100 }
101 tokens.push(Token::Literal(num));
102 }
103 ch if ch.is_alphabetic() => {
104 let mut ident = String::new();
105 while let Some(&ch) = iter.peek() {
106 if ch.is_alphabetic() || ch.is_ascii_digit() || ch == '_' {
107 ident.push(ch);
108 iter.next();
109 } else {
110 break;
111 }
112 }
113 if ident.is_empty() {
114 return Err(TokenizeError::IncompleteToken);
115 }
116 tokens.push(Token::Identifier(ident));
117 }
118 _ => {
120 let mut s = String::new();
121 while let Some(&c) = iter.peek() {
122 if c.is_alphanumeric() {
123 s.push(c);
124 iter.next();
125 } else {
126 break;
127 }
128 }
129 if !s.is_empty() {
130 tokens.push(if s == "true" || s == "false" {
131 Token::Literal(s)
132 } else {
133 Token::Identifier(s)
134 });
135 }
136 }
137 }
138 }
139
140 Ok(tokens)
141}