pub mod ast;
pub mod error;
pub(crate) mod grammar;
pub(crate) mod lexer;
pub(crate) mod resolver;
pub(crate) mod suggest;
pub mod token;
pub use error::ParseError;
const MAX_INPUT_LEN: usize = 1024;
#[must_use = "parse returns a Result that should not be discarded"]
pub fn parse(input: &str, now: &jiff::Zoned) -> std::result::Result<jiff::Zoned, ParseError> {
if input.len() > MAX_INPUT_LEN {
return Err(ParseError::input_too_long(input.len(), MAX_INPUT_LEN));
}
let trimmed = input.trim();
if trimmed.is_empty() {
return resolver::resolve(&ast::DateExpr::Now, now);
}
if let Ok(ts) = trimmed.parse::<jiff::Timestamp>() {
return Ok(ts.to_zoned(now.time_zone().clone()));
}
let tokens = lexer::tokenize(trimmed);
let mut parser = grammar::Parser::new(&tokens, trimmed);
let expr = parser.parse_expression()?;
resolver::resolve(&expr, now)
}
#[must_use = "parse_range_with_granularity returns a Result that should not be discarded"]
pub fn parse_range_with_granularity(
input: &str,
now: &jiff::Zoned,
) -> std::result::Result<(jiff::Zoned, jiff::Zoned), ParseError> {
if input.len() > MAX_INPUT_LEN {
return Err(ParseError::input_too_long(input.len(), MAX_INPUT_LEN));
}
let trimmed = input.trim();
if trimmed.is_empty() {
let z = now.clone();
return Ok((z.clone(), z));
}
if let Ok(ts) = trimmed.parse::<jiff::Timestamp>() {
let z = ts.to_zoned(now.time_zone().clone());
return Ok((z.clone(), z));
}
let tokens = lexer::tokenize(trimmed);
let mut parser = grammar::Parser::new(&tokens, trimmed);
let expr = parser.parse_expression()?;
resolver::resolve_range_with_granularity(&expr, now)
}