1pub mod ast;
8pub mod error;
9pub(crate) mod grammar;
10pub(crate) mod lexer;
11pub(crate) mod resolver;
12pub(crate) mod suggest;
13pub mod token;
14
15pub use error::ParseError;
16
17const MAX_INPUT_LEN: usize = 1024;
20
21#[must_use = "parse returns a Result that should not be discarded"]
28pub fn parse(input: &str, now: &jiff::Zoned) -> std::result::Result<jiff::Zoned, ParseError> {
29 if input.len() > MAX_INPUT_LEN {
30 return Err(ParseError::input_too_long(input.len(), MAX_INPUT_LEN));
31 }
32
33 let trimmed = input.trim();
34 if trimmed.is_empty() {
35 return resolver::resolve(&ast::DateExpr::Now, now);
36 }
37
38 if let Ok(ts) = trimmed.parse::<jiff::Timestamp>() {
39 return Ok(ts.to_zoned(now.time_zone().clone()));
40 }
41
42 let tokens = lexer::tokenize(trimmed);
43 let mut parser = grammar::Parser::new(&tokens, trimmed);
44 let expr = parser.parse_expression()?;
45 resolver::resolve(&expr, now)
46}
47
48#[must_use = "parse_range_with_granularity returns a Result that should not be discarded"]
60pub fn parse_range_with_granularity(
61 input: &str,
62 now: &jiff::Zoned,
63) -> std::result::Result<(jiff::Zoned, jiff::Zoned), ParseError> {
64 if input.len() > MAX_INPUT_LEN {
65 return Err(ParseError::input_too_long(input.len(), MAX_INPUT_LEN));
66 }
67
68 let trimmed = input.trim();
69 if trimmed.is_empty() {
70 let z = now.clone();
71 return Ok((z.clone(), z));
72 }
73
74 if let Ok(ts) = trimmed.parse::<jiff::Timestamp>() {
75 let z = ts.to_zoned(now.time_zone().clone());
76 return Ok((z.clone(), z));
77 }
78
79 let tokens = lexer::tokenize(trimmed);
80 let mut parser = grammar::Parser::new(&tokens, trimmed);
81 let expr = parser.parse_expression()?;
82 resolver::resolve_range_with_granularity(&expr, now)
83}