1use std::fmt::Debug;
2use std::iter::Peekable;
3
4use crate::lexer::{Lexer, Span, SpanType};
5use crate::Error;
6
7pub mod filter;
8pub mod order;
9pub mod select;
10
11pub use filter::Filter;
12pub use filter::InnerFilter;
13pub use filter::Path as FilterPath;
14pub use order::Order;
15pub use order::OrderItem;
16pub use select::Select;
17pub use select::{Field, FieldKey};
18
19pub const SELECT: &str = "select";
21pub const ORDER: &str = "order";
22pub const LIMIT: &str = "limit";
23pub const OFFSET: &str = "offset";
24pub const RESERVED_KEYWORDS: [&str; 4] = [SELECT, ORDER, LIMIT, OFFSET];
26
27#[derive(Debug, PartialEq, Clone, Default)]
28pub struct Ast {
29 pub select: Option<Select>,
30 pub order: Option<Order>,
31 pub offset: Option<usize>,
32 pub limit: Option<usize>,
33 pub filter: Vec<Filter>,
34}
35
36impl Ast {
37 pub fn from_lexer<T>(input: &str, tokens: Lexer<T>) -> Result<Ast, Error>
40 where
41 T: Iterator<Item = char>,
42 {
43 let mut ast = Ast::default();
44
45 let mut peekable_tokens = tokens.peekable();
46 while let Some(token) = peekable_tokens.next() {
47 if token.span_type == SpanType::String && SELECT == &input[token.range.clone()] {
48 match peekable_tokens.next() {
49 Some(Span {
50 span_type: SpanType::Equal,
51 ..
52 }) => (),
53 Some(Span {
54 span_type: found,
55 range,
56 }) => return Err(Error::invalid_token(SpanType::Equal, found, range)),
57 None => return Err(Error::UnexpectedEnd),
58 }
59 ast.select = Some(Self::parse_select(input, &mut peekable_tokens, 0)?);
60 }
61 if token.span_type == SpanType::String && ORDER == &input[token.range.clone()] {
62 match peekable_tokens.next() {
63 Some(Span {
64 span_type: SpanType::Equal,
65 ..
66 }) => (),
67 Some(Span {
68 span_type: found,
69 range,
70 }) => return Err(Error::invalid_token(SpanType::Equal, found, range)),
71 None => return Err(Error::UnexpectedEnd),
72 }
73 ast.order = Some(Self::parse_order(input, &mut peekable_tokens)?);
74 }
75 if token.span_type == SpanType::String && OFFSET == &input[token.range.clone()] {
76 match peekable_tokens.next() {
77 Some(Span {
78 span_type: SpanType::Equal,
79 ..
80 }) => (),
81 Some(Span {
82 span_type: found,
83 range,
84 }) => return Err(Error::invalid_token(SpanType::Equal, found, range)),
85 None => return Err(Error::UnexpectedEnd),
86 }
87 ast.offset = Some(Self::parse_integer(input, &mut peekable_tokens)?);
88 }
89 if token.span_type == SpanType::String && LIMIT == &input[token.range.clone()] {
90 match peekable_tokens.next() {
91 Some(Span {
92 span_type: SpanType::Equal,
93 ..
94 }) => (),
95
96 Some(Span {
97 span_type: found,
98 range,
99 }) => return Err(Error::invalid_token(SpanType::Equal, found, range)),
100 None => return Err(Error::UnexpectedEnd),
101 }
102 ast.limit = Some(Self::parse_integer(input, &mut peekable_tokens)?);
103 }
104 if token.span_type == SpanType::String
105 && !RESERVED_KEYWORDS.contains(&&input[token.range.clone()])
106 {
107 match peekable_tokens.peek() {
108 Some(Span {
109 span_type: SpanType::Equal,
110 ..
111 }) => (),
112 Some(Span {
113 span_type: SpanType::PathSeparator,
114 ..
115 }) => (),
116 Some(Span {
117 span_type: found,
118 range,
119 }) => return Err(Error::invalid_token(SpanType::Equal, *found, range.clone())),
120 None => return Err(Error::UnexpectedEnd),
121 }
122 let field = &input[token.range];
123 ast.filter
124 .push(Self::parse_filter(field, input, &mut peekable_tokens)?);
125 }
126 }
127
128 Ok(ast)
129 }
130
131 pub(crate) fn parse_integer<T>(
132 input: &str,
133 tokens: &mut Peekable<Lexer<T>>,
134 ) -> Result<usize, Error>
135 where
136 T: Iterator<Item = char>,
137 {
138 match tokens.next() {
139 Some(token) if token.span_type == SpanType::String => {
140 let data = &input[token.range.clone()];
141 if let Ok(integer) = data.parse::<usize>() {
142 return Ok(integer);
143 } else {
144 return Err(Error::InvalidInteger {
145 found: data.to_string(),
146 range: token.range,
147 });
148 }
149 }
150 Some(token) => {
151 return Err(Error::invalid_token(
152 SpanType::String,
153 token.span_type,
154 token.range,
155 ));
156 }
157 None => return Err(Error::UnexpectedEnd),
158 }
159 }
160}
161
162#[test]
163fn simple_limit() {
164 let input = "limit=150";
165 let lexer = Lexer::new(input.chars());
166 let expected = Ast {
167 limit: Some(150),
168 ..Default::default()
169 };
170 let out = Ast::from_lexer(input, lexer).unwrap();
171
172 assert_eq!(expected, out);
173}
174
175#[test]
176fn simple_offset() {
177 let input = "offset=1000";
178 let lexer = Lexer::new(input.chars());
179 let expected = Ast {
180 offset: Some(1000),
181 ..Default::default()
182 };
183 let out = Ast::from_lexer(input, lexer).unwrap();
184
185 assert_eq!(expected, out);
186}
187
188#[test]
189fn offset_and_limit() {
190 let input = "limit=512&offset=9321";
191 let lexer = Lexer::new(input.chars());
192 let expected = Ast {
193 limit: Some(512),
194 offset: Some(9321),
195 ..Default::default()
196 };
197 let out = Ast::from_lexer(input, lexer).unwrap();
198
199 assert_eq!(expected, out);
200}
201
202#[test]
203fn simple_combined_query() {
204 let input = "id=gte.14&order=id.asc&select=id&id=lt.54";
205 let lexer = Lexer::new(input.chars());
206 let expected = Ast {
207 select: Some(Select {
208 fields: vec![Field::Key(FieldKey {
209 column: "id".to_string(),
210 alias: None,
211 })],
212 }),
213 order: Some(Order {
214 fields: vec![OrderItem {
215 field: "id".to_string(),
216 operator: order::Operator::Asc,
217 nulls_position: None,
218 }],
219 }),
220 filter: vec![
221 Filter::One(InnerFilter {
222 path: FilterPath::Leaf("id".to_string()),
223 operator: filter::Operator::GreaterThanEqual,
224 value: "14".to_string(),
225 }),
226 Filter::One(InnerFilter {
227 path: FilterPath::Leaf("id".to_string()),
228 operator: filter::Operator::LessThan,
229 value: "54".to_string(),
230 }),
231 ],
232 ..Default::default()
233 };
234 let out = Ast::from_lexer(input, lexer).unwrap();
235
236 assert_eq!(expected, out);
237}