use log::debug;
use nom::{
branch::alt,
combinator::{map, opt, success, value, verify},
multi::{many0, many1, separated_list1},
sequence::{delimited, pair, preceded, terminated, tuple},
IResult,
};
use super::ast::*;
use super::lexer::{tokenize, Token};
use super::pretty_printer::pretty_print_ast;
#[derive(Debug, thiserror::Error)]
pub enum ParserError {
#[error("Lexer error: {0}")]
LexerError(String),
#[error("Unexpected token: {0:?}")]
UnexpectedToken(Token),
#[error("Expected token: {0:?}")]
ExpectedToken(Token),
#[error("Invalid syntax 'DELETE SCHEMA'. Schema deletion uses 'DROP SCHEMA'. Correct syntax: DROP SCHEMA [IF EXISTS] schema_name [CASCADE | RESTRICT]. Example: DROP SCHEMA analytics_db")]
InvalidDeleteSchema,
#[error("Invalid syntax 'DELETE GRAPH'. Graph deletion uses 'DROP GRAPH'. Correct syntax: DROP [PROPERTY] GRAPH [IF EXISTS] graph_path. Example: DROP GRAPH /test_schema/test_restrict_graph2")]
InvalidDeleteGraph,
#[error("Incomplete UNION operation. Expected a query after UNION. Syntax: query1 UNION [ALL] query2")]
IncompleteUnion,
#[error("Incomplete EXCEPT operation. Expected a query after EXCEPT. Syntax: query1 EXCEPT [ALL] query2")]
IncompleteExcept,
#[error("Incomplete INTERSECT operation. Expected a query after INTERSECT. Syntax: query1 INTERSECT [ALL] query2")]
IncompleteIntersect,
}
fn filter_sql_comments(tokens: Vec<Token>) -> Vec<Token> {
let mut filtered = Vec::new();
let mut i = 0;
while i < tokens.len() {
if i + 1 < tokens.len()
&& matches!(tokens[i], Token::Dash)
&& matches!(tokens[i + 1], Token::Dash)
{
if i + 2 < tokens.len() {
match &tokens[i + 2] {
Token::Arrow
| Token::ArrowLeft
| Token::ArrowBoth
| Token::LeftParen
| Token::LeftBracket => {
filtered.push(tokens[i].clone());
i += 1;
}
Token::EOF => {
filtered.push(tokens[i].clone());
i += 1;
}
_ => {
while i < tokens.len() && !matches!(tokens[i], Token::EOF) {
i += 1;
}
}
}
} else {
filtered.push(tokens[i].clone());
i += 1;
}
} else {
filtered.push(tokens[i].clone());
i += 1;
}
}
filtered
}
pub fn parse_query(input: &str) -> Result<Document, ParserError> {
if input.contains("GROUP BY") {
log::debug!(
"PARSER: Parsing query with GROUP BY: {}",
input.trim().split('\n').collect::<Vec<_>>().join(" ")
);
}
let mut tokens = tokenize(input).map_err(ParserError::LexerError)?;
tokens = filter_sql_comments(tokens);
if input.contains("GROUP BY") {
let has_group_token = tokens.iter().any(|t| matches!(t, Token::Group));
let has_by_token = tokens.iter().any(|t| matches!(t, Token::By));
log::debug!(
"PARSER: Token stream has GROUP={}, BY={}",
has_group_token,
has_by_token
);
}
if tokens.len() >= 2 {
match (&tokens[0], &tokens[1]) {
(Token::Delete, Token::Schema) => {
return Err(ParserError::InvalidDeleteSchema);
}
(Token::Delete, Token::Graph) => {
return Err(ParserError::InvalidDeleteGraph);
}
_ => {}
}
}
for i in 0..tokens.len() {
match &tokens[i] {
Token::Union => {
if i + 1 >= tokens.len() || matches!(tokens.get(i + 1), Some(Token::EOF) | None) {
return Err(ParserError::IncompleteUnion);
}
}
Token::Except => {
if i + 1 >= tokens.len() || matches!(tokens.get(i + 1), Some(Token::EOF) | None) {
return Err(ParserError::IncompleteExcept);
}
}
Token::Intersect => {
if i + 1 >= tokens.len() || matches!(tokens.get(i + 1), Some(Token::EOF) | None) {
return Err(ParserError::IncompleteIntersect);
}
}
_ => {}
}
}
if tokens.is_empty() || (tokens.len() == 1 && matches!(tokens[0], Token::EOF)) {
return Err(ParserError::ExpectedToken(Token::Identifier(
"statement".to_string(),
)));
}
if let Ok((_, at_stmt)) = at_location_statement(&tokens) {
let document = Document {
statement: Statement::AtLocation(at_stmt),
location: Location::default(),
};
debug!("Successfully parsed AT location statement into AST");
pretty_print_ast(&document);
Ok(document)
} else if let Ok((_, declare_stmt)) = declare_statement(&tokens) {
let document = Document {
statement: Statement::Declare(declare_stmt),
location: Location::default(),
};
debug!("Successfully parsed DECLARE statement into AST");
pretty_print_ast(&document);
Ok(document)
} else if let Ok((_, session_stmt)) = session_statement(&tokens) {
let document = Document {
statement: Statement::SessionStatement(session_stmt),
location: Location::default(),
};
debug!("Successfully parsed session statement into AST");
pretty_print_ast(&document);
Ok(document)
} else if let Ok((_, transaction_stmt)) = transaction_statement(&tokens) {
let document = Document {
statement: Statement::TransactionStatement(transaction_stmt),
location: Location::default(),
};
debug!("Successfully parsed transaction statement into AST");
pretty_print_ast(&document);
Ok(document)
} else if let Ok((_, catalog_stmt)) = catalog_statement(&tokens) {
let document = Document {
statement: Statement::CatalogStatement(catalog_stmt),
location: Location::default(),
};
debug!("Successfully parsed catalog statement into AST");
pretty_print_ast(&document);
Ok(document)
} else if let Ok((_, index_stmt)) = index_statement(&tokens) {
let document = Document {
statement: Statement::IndexStatement(index_stmt),
location: Location::default(),
};
debug!("Successfully parsed index statement into AST");
pretty_print_ast(&document);
Ok(document)
} else if let Ok((_, data_stmt)) = data_statement(&tokens) {
log::debug!(
"PARSER: Matched as DataStatement: {:?}",
std::mem::discriminant(&data_stmt)
);
let document = Document {
statement: Statement::DataStatement(data_stmt.clone()),
location: Location::default(),
};
debug!(
"Successfully parsed data modification statement into AST: {:?}",
std::mem::discriminant(&data_stmt)
);
if let DataStatement::MatchSet(ref ms) = data_stmt {
debug!(
"PARSER: MatchSet has WITH clause: {}",
ms.with_clause.is_some()
);
}
pretty_print_ast(&document);
Ok(document)
} else if let Ok((_, procedure_body)) = procedure_body_statement(&tokens) {
let document = Document {
statement: Statement::ProcedureBody(procedure_body),
location: Location::default(),
};
debug!("Successfully parsed procedure body into AST");
pretty_print_ast(&document);
Ok(document)
} else if let Ok((remaining, call_stmt)) = call_statement(&tokens) {
let only_terminators = remaining
.iter()
.all(|t| matches!(t, Token::Semicolon | Token::EOF));
if !only_terminators {
let unexpected = remaining
.iter()
.find(|t| !matches!(t, Token::Semicolon | Token::EOF))
.unwrap_or(&Token::EOF);
return Err(ParserError::UnexpectedToken(unexpected.clone()));
}
let document = Document {
statement: Statement::Call(call_stmt),
location: Location::default(),
};
debug!("Successfully parsed CALL statement into AST");
pretty_print_ast(&document);
Ok(document)
} else if let Ok((_, select_stmt)) = select_statement(&tokens) {
let document = Document {
statement: Statement::Select(select_stmt),
location: Location::default(),
};
debug!("Successfully parsed SELECT statement into AST");
pretty_print_ast(&document);
Ok(document)
} else if let Ok((_, query)) = query(&tokens) {
log::debug!(
"PARSER: Matched as Query: {:?}",
std::mem::discriminant(&query)
);
if let Query::Basic(ref basic_query) = query {
log::debug!(
"PARSER: BasicQuery has GROUP BY: {}",
basic_query.group_clause.is_some()
);
} else if let Query::Limited { ref query, .. } = query {
log::debug!(
"PARSER: Limited query wrapping: {:?}",
std::mem::discriminant(query.as_ref())
);
if let Query::Basic(ref basic_query) = query.as_ref() {
log::debug!(
"PARSER: Wrapped BasicQuery has GROUP BY: {}",
basic_query.group_clause.is_some()
);
}
}
let document = Document {
statement: Statement::Query(query.clone()),
location: Location::default(),
};
debug!(
"Successfully parsed GQL query into AST: {:?}",
std::mem::discriminant(&query)
);
if let Query::MutationPipeline(ref mp) = query {
debug!(
"PARSER: MutationPipeline has {} segments",
mp.segments.len()
);
}
pretty_print_ast(&document);
Ok(document)
} else {
debug!("PARSER: Failed to parse as any statement type");
debug!(
"PARSER: First few tokens: {:?}",
tokens.get(0..10).unwrap_or(&[])
);
let unexpected = tokens.first().unwrap_or(&Token::EOF);
Err(ParserError::UnexpectedToken(unexpected.clone()))
}
}
fn query(tokens: &[Token]) -> IResult<&[Token], Query> {
parse_set_operation(tokens)
}
fn parse_core_query(tokens: &[Token]) -> IResult<&[Token], Query> {
parse_union_except(tokens)
}
fn apply_query_modifiers(
query: Query,
order_clause: Option<OrderClause>,
limit_clause: Option<LimitClause>,
) -> Query {
match (order_clause, limit_clause) {
(None, None) => query,
(order, limit) => {
match query {
Query::SetOperation(mut set_op) => {
set_op.order_clause = order;
set_op.limit_clause = limit;
Query::SetOperation(set_op)
}
Query::Basic(mut basic_query) => {
if basic_query.order_clause.is_none() {
basic_query.order_clause = order;
}
if basic_query.limit_clause.is_none() {
basic_query.limit_clause = limit;
}
Query::Basic(basic_query)
}
_ => Query::Limited {
query: Box::new(query),
order_clause: order,
limit_clause: limit,
},
}
}
}
}
fn parse_set_operation(tokens: &[Token]) -> IResult<&[Token], Query> {
let (remaining, core_query) = parse_core_query(tokens)?;
log::debug!(
"parse_set_operation: core_query type = {:?}",
std::mem::discriminant(&core_query)
);
match &core_query {
Query::Basic(bq) => {
log::debug!("parse_set_operation: BasicQuery has group_clause={}, order_clause={}, limit_clause={}",
bq.group_clause.is_some(), bq.order_clause.is_some(), bq.limit_clause.is_some());
}
Query::Limited {
query,
order_clause,
limit_clause,
} => {
log::debug!(
"parse_set_operation: Limited query has order={}, limit={}",
order_clause.is_some(),
limit_clause.is_some()
);
if let Query::Basic(bq) = query.as_ref() {
log::debug!("parse_set_operation: Wrapped BasicQuery has group_clause={}, order_clause={}, limit_clause={}",
bq.group_clause.is_some(), bq.order_clause.is_some(), bq.limit_clause.is_some());
}
}
_ => {
log::debug!("parse_set_operation: Other query type");
}
}
let (remaining, order_clause) = opt(order_clause)(remaining)?;
let (remaining, limit_clause) = opt(limit_clause)(remaining)?;
log::debug!(
"parse_set_operation: Parsed order_clause={}, limit_clause={}",
order_clause.is_some(),
limit_clause.is_some()
);
let final_query = apply_query_modifiers(core_query, order_clause, limit_clause);
log::debug!(
"parse_set_operation: final_query type = {:?}",
std::mem::discriminant(&final_query)
);
Ok((remaining, final_query))
}
fn parse_union_except(tokens: &[Token]) -> IResult<&[Token], Query> {
let (mut remaining, mut left) = parse_intersect(tokens)?;
while let Ok((new_remaining, (operation, right))) = parse_union_except_op(remaining) {
left = Query::SetOperation(SetOperation {
left: Box::new(left),
operation,
right: Box::new(right),
limit_clause: None,
order_clause: None,
location: Location::default(),
});
remaining = new_remaining;
}
if !remaining.is_empty() {
if let Some(token) = remaining.first() {
match token {
Token::Union => {
return Err(nom::Err::Error(nom::error::Error::new(
remaining,
nom::error::ErrorKind::Alt,
)));
}
Token::Except => {
return Err(nom::Err::Error(nom::error::Error::new(
remaining,
nom::error::ErrorKind::Alt,
)));
}
_ => {}
}
}
}
Ok((remaining, left))
}
fn parse_intersect(tokens: &[Token]) -> IResult<&[Token], Query> {
let (mut remaining, mut left) = parse_query_term(tokens)?;
while let Ok((new_remaining, (operation, right))) = parse_intersect_op(remaining) {
left = Query::SetOperation(SetOperation {
left: Box::new(left),
operation,
right: Box::new(right),
limit_clause: None,
order_clause: None,
location: Location::default(),
});
remaining = new_remaining;
}
if !remaining.is_empty() {
if let Some(token) = remaining.first() {
match token {
Token::Intersect => {
return Err(nom::Err::Error(nom::error::Error::new(
remaining,
nom::error::ErrorKind::Alt,
)));
}
_ => {}
}
}
}
Ok((remaining, left))
}
fn parse_query_term(tokens: &[Token]) -> IResult<&[Token], Query> {
alt((
parse_parenthesized_query_with_modifiers,
let_statement,
for_statement,
filter_statement,
unwind_statement,
mutation_pipeline,
basic_query,
with_query,
return_query,
))(tokens)
}
fn parse_parenthesized_query_with_modifiers(tokens: &[Token]) -> IResult<&[Token], Query> {
let (remaining, query) = delimited(
expect_token(Token::LeftParen),
parse_set_operation, expect_token(Token::RightParen),
)(tokens)?;
Ok((remaining, query))
}
fn basic_query(tokens: &[Token]) -> IResult<&[Token], Query> {
log::debug!("basic_query function called");
map(
tuple((
match_clause,
opt(where_clause),
return_clause,
opt(group_clause),
opt(having_clause),
opt(order_clause),
opt(limit_clause),
)),
|(
match_clause,
where_clause,
return_clause,
group_clause,
having_clause,
order_clause,
limit_clause,
)| {
log::debug!(
"basic_query parser: group_clause={}, order_clause={}, limit_clause={}",
group_clause.is_some(),
order_clause.is_some(),
limit_clause.is_some()
);
Query::Basic(BasicQuery {
match_clause,
where_clause,
return_clause,
group_clause,
having_clause,
order_clause,
limit_clause,
location: Location::default(),
})
},
)(tokens)
}
fn return_query(tokens: &[Token]) -> IResult<&[Token], Query> {
map(
tuple((
return_clause,
opt(group_clause),
opt(having_clause),
opt(order_clause),
opt(limit_clause),
)),
|(return_clause, group_clause, having_clause, order_clause, limit_clause)| {
Query::Return(ReturnQuery {
return_clause,
group_clause,
having_clause,
order_clause,
limit_clause,
location: Location::default(),
})
},
)(tokens)
}
fn final_mutation(tokens: &[Token]) -> IResult<&[Token], FinalMutation> {
alt((
map(
preceded(
expect_token(Token::Remove),
separated_list1(expect_token(Token::Comma), remove_item),
),
FinalMutation::Remove,
),
map(
preceded(
expect_token(Token::Set),
separated_list1(expect_token(Token::Comma), set_item),
),
FinalMutation::Set,
),
map(
tuple((
opt(alt((
expect_token(Token::Detach),
expect_token(Token::NoDetach),
))),
expect_token(Token::Delete),
separated_list1(expect_token(Token::Comma), expression),
)),
|(detach_mode, _, expressions)| FinalMutation::Delete {
expressions,
detach: matches!(detach_mode, Some(Token::Detach)),
},
),
))(tokens)
}
fn mutation_pipeline(tokens: &[Token]) -> IResult<&[Token], Query> {
log::debug!(
"mutation_pipeline: trying to parse with tokens: {:?}",
tokens.get(0..10)
);
let parse_with_unwind = map(
tuple((
many1(query_segment_no_unwind),
unwind_clause,
opt(where_clause),
final_mutation,
)),
|(mut segments, unwind, where_cl, final_mutation)| {
if let Some(last_segment) = segments.last_mut() {
last_segment.unwind_clause = Some(unwind);
last_segment.post_unwind_where = where_cl;
}
Query::MutationPipeline(MutationPipeline {
segments,
final_mutation,
location: Location::default(),
})
},
);
let parse_without_unwind = map(
tuple((many1(query_segment), final_mutation)),
|(segments, final_mutation)| {
Query::MutationPipeline(MutationPipeline {
segments,
final_mutation,
location: Location::default(),
})
},
);
alt((parse_with_unwind, parse_without_unwind))(tokens)
}
fn query_segment_no_unwind(tokens: &[Token]) -> IResult<&[Token], QuerySegment> {
map(
tuple((match_clause, opt(where_clause), opt(with_clause))),
|(match_clause, where_clause, with_clause)| QuerySegment {
match_clause,
where_clause,
with_clause,
unwind_clause: None,
post_unwind_where: None,
location: Location::default(),
},
)(tokens)
}
fn with_query(tokens: &[Token]) -> IResult<&[Token], Query> {
map(
tuple((
many1(query_segment),
return_clause,
opt(group_clause),
opt(having_clause),
opt(order_clause),
opt(limit_clause),
)),
|(segments, final_return, group_clause, having_clause, order_clause, limit_clause)| {
Query::WithQuery(WithQuery {
segments,
final_return,
group_clause,
having_clause,
order_clause,
limit_clause,
location: Location::default(),
})
},
)(tokens)
}
fn let_statement(tokens: &[Token]) -> IResult<&[Token], Query> {
preceded(
tag_token(Token::Let),
map(
separated_list1(tag_token(Token::Comma), variable_definition),
|variable_definitions| {
Query::Let(LetStatement {
variable_definitions,
location: Location::default(),
})
},
),
)(tokens)
}
fn variable_definition(tokens: &[Token]) -> IResult<&[Token], VariableDefinition> {
map(
tuple((identifier, tag_token(Token::Equal), expression)),
|(variable_name, _, expr)| VariableDefinition {
variable_name,
expression: expr,
location: Location::default(),
},
)(tokens)
}
fn for_statement(tokens: &[Token]) -> IResult<&[Token], Query> {
preceded(
tag_token(Token::For),
map(
tuple((
opt(terminated(identifier, tag_token(Token::Colon))),
identifier,
tag_token(Token::In),
expression,
)),
|(alias, variable, _, expr)| {
Query::For(ForStatement {
variable,
alias,
expression: expr,
location: Location::default(),
})
},
),
)(tokens)
}
fn filter_statement(tokens: &[Token]) -> IResult<&[Token], Query> {
preceded(
tag_token(Token::Filter),
map(
alt((
preceded(tag_token(Token::Where), expression),
expression,
)),
|expr| {
Query::Filter(FilterStatement {
where_clause: WhereClause {
condition: expr,
location: Location::default(),
},
location: Location::default(),
})
},
),
)(tokens)
}
fn unwind_statement(tokens: &[Token]) -> IResult<&[Token], Query> {
map(
tuple((
tag_token(Token::Unwind),
expression,
tag_token(Token::As),
identifier,
)),
|(_, expr, _, var)| {
Query::Unwind(UnwindStatement {
expression: expr,
variable: var,
location: Location::default(),
})
},
)(tokens)
}
fn unwind_clause(tokens: &[Token]) -> IResult<&[Token], UnwindClause> {
map(
tuple((
tag_token(Token::Unwind),
expression,
tag_token(Token::As),
identifier,
)),
|(_, expr, _, var)| UnwindClause {
expression: expr,
variable: var,
location: Location::default(),
},
)(tokens)
}
fn query_segment(tokens: &[Token]) -> IResult<&[Token], QuerySegment> {
map(
tuple((
match_clause,
opt(where_clause),
opt(with_clause),
opt(unwind_clause),
opt(where_clause),
)),
|(match_clause, where_clause, with_clause, unwind_clause, post_unwind_where)| {
QuerySegment {
match_clause,
where_clause,
with_clause,
unwind_clause,
post_unwind_where,
location: Location::default(),
}
},
)(tokens)
}
fn parse_union_except_op(tokens: &[Token]) -> IResult<&[Token], (SetOperationType, Query)> {
alt((
map(
tuple((
expect_token(Token::Union),
opt(expect_token(Token::All)),
parse_intersect,
)),
|(_, all, right)| {
let op = if all.is_some() {
SetOperationType::UnionAll
} else {
SetOperationType::Union
};
(op, right)
},
),
map(
tuple((
expect_token(Token::Except),
opt(expect_token(Token::All)),
parse_intersect,
)),
|(_, all, right)| {
let op = if all.is_some() {
SetOperationType::ExceptAll
} else {
SetOperationType::Except
};
(op, right)
},
),
))(tokens)
}
fn parse_intersect_op(tokens: &[Token]) -> IResult<&[Token], (SetOperationType, Query)> {
map(
tuple((
expect_token(Token::Intersect),
opt(expect_token(Token::All)),
parse_query_term,
)),
|(_, all, right)| {
let op = if all.is_some() {
SetOperationType::IntersectAll
} else {
SetOperationType::Intersect
};
(op, right)
},
)(tokens)
}
fn select_statement(tokens: &[Token]) -> IResult<&[Token], SelectStatement> {
map(
tuple((
expect_token(Token::Select),
opt(distinct_qualifier),
select_list,
opt(from_clause),
opt(where_clause),
opt(group_clause),
opt(having_clause),
opt(order_clause),
opt(limit_clause),
)),
|(
_,
distinct,
return_items,
from_clause,
where_clause,
group_clause,
having_clause,
order_clause,
limit_clause,
)| SelectStatement {
distinct: distinct.unwrap_or(DistinctQualifier::None),
return_items,
from_clause,
where_clause,
group_clause,
having_clause,
order_clause,
limit_clause,
location: Location::default(),
},
)(tokens)
}
fn select_list(tokens: &[Token]) -> IResult<&[Token], SelectItems> {
alt((
map(expect_token(Token::Star), |_| SelectItems::Wildcard {
location: Location::default(),
}),
map(
separated_list1(expect_token(Token::Comma), return_item),
|items| SelectItems::Explicit {
items,
location: Location::default(),
},
),
))(tokens)
}
fn from_clause(tokens: &[Token]) -> IResult<&[Token], FromClause> {
map(
tuple((
expect_token(Token::From),
separated_list1(expect_token(Token::Comma), from_graph_expression),
)),
|(_, graph_expressions)| FromClause {
graph_expressions,
location: Location::default(),
},
)(tokens)
}
fn from_graph_expression(tokens: &[Token]) -> IResult<&[Token], FromGraphExpression> {
alt((
map(match_clause, |match_statement| FromGraphExpression {
graph_expression: GraphExpression::CurrentGraph,
match_statement: Some(match_statement),
location: Location::default(),
}),
map(
tuple((graph_expression, opt(match_clause))),
|(graph_expression, match_statement)| FromGraphExpression {
graph_expression,
match_statement,
location: Location::default(),
},
),
))(tokens)
}
fn distinct_qualifier(tokens: &[Token]) -> IResult<&[Token], DistinctQualifier> {
alt((
value(DistinctQualifier::Distinct, expect_token(Token::Distinct)),
value(DistinctQualifier::All, expect_token(Token::All)),
))(tokens)
}
fn call_statement(tokens: &[Token]) -> IResult<&[Token], CallStatement> {
map(
tuple((
expect_token(Token::Call),
procedure_call,
opt(yield_clause),
opt(where_clause),
)),
|(_, (procedure_name, arguments), yield_clause, where_clause)| CallStatement {
procedure_name,
arguments,
yield_clause,
where_clause,
location: Location::default(),
},
)(tokens)
}
fn procedure_call(tokens: &[Token]) -> IResult<&[Token], (String, Vec<Expression>)> {
map(
tuple((
property_access_as_string,
expect_token(Token::LeftParen),
opt(expression_list),
expect_token(Token::RightParen),
)),
|(proc_name, _, args, _)| (proc_name, args.unwrap_or_default()),
)(tokens)
}
fn property_access_as_string(tokens: &[Token]) -> IResult<&[Token], String> {
if let Some(Token::PropertyAccess(prop_access)) = tokens.first() {
Ok((&tokens[1..], prop_access.clone()))
} else if let Some(Token::Identifier(name)) = tokens.first() {
let rest = &tokens[1..];
if let Some(Token::Dot) = rest.first() {
if let Some(Token::Identifier(property)) = rest.get(1) {
return Ok((&rest[2..], format!("{}.{}", name, property)));
}
}
Ok((rest, name.clone()))
} else {
Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Tag,
)))
}
}
fn yield_clause(tokens: &[Token]) -> IResult<&[Token], YieldClause> {
map(
tuple((
expect_token(Token::Yield),
yield_item,
many0(tuple((expect_token(Token::Comma), yield_item))),
)),
|(_, first_item, additional_items)| {
let mut items = vec![first_item];
items.extend(additional_items.into_iter().map(|(_, item)| item));
YieldClause {
items,
location: Location::default(),
}
},
)(tokens)
}
fn yield_item(tokens: &[Token]) -> IResult<&[Token], YieldItem> {
map(
tuple((
identifier,
opt(tuple((expect_token(Token::As), identifier))),
)),
|(column_name, alias_opt)| YieldItem {
column_name,
alias: alias_opt.map(|(_, alias)| alias),
location: Location::default(),
},
)(tokens)
}
fn match_clause(tokens: &[Token]) -> IResult<&[Token], MatchClause> {
map(
tuple((
expect_token(Token::Match),
path_pattern,
many0(tuple((expect_token(Token::Comma), path_pattern))),
)),
|(_, first_pattern, additional_patterns)| {
let mut patterns = vec![first_pattern];
patterns.extend(additional_patterns.into_iter().map(|(_, pattern)| pattern));
MatchClause {
patterns,
location: Location::default(),
}
},
)(tokens)
}
fn path_pattern(tokens: &[Token]) -> IResult<&[Token], PathPattern> {
map(
tuple((
opt(tuple((identifier, expect_token(Token::Equal)))),
opt(path_type_keywords),
many1(pattern_element),
)),
|(assignment, path_type, elements)| PathPattern {
assignment: assignment.map(|(id, _)| id),
path_type,
elements,
location: Location::default(),
},
)(tokens)
}
fn path_type_keywords(tokens: &[Token]) -> IResult<&[Token], PathType> {
alt((
map(
tuple((expect_token(Token::Acyclic), expect_token(Token::Path))),
|_| PathType::AcyclicPath,
),
map(
tuple((expect_token(Token::Simple), expect_token(Token::Path))),
|_| PathType::SimplePath,
),
map(expect_token(Token::Trail), |_| PathType::Trail),
map(expect_token(Token::Walk), |_| PathType::Walk),
))(tokens)
}
fn path_quantifier(tokens: &[Token]) -> IResult<&[Token], PathQuantifier> {
alt((
map(expect_token(Token::Question), |_| PathQuantifier::Optional),
delimited(
expect_token(Token::LeftBrace),
alt((
map(
tuple((integer_literal, expect_token(Token::Comma), integer_literal)),
|(min, _, max)| PathQuantifier::Range {
min: min as u32,
max: max as u32,
},
),
map(
tuple((integer_literal, expect_token(Token::Comma))),
|(min, _)| PathQuantifier::AtLeast(min as u32),
),
map(
tuple((expect_token(Token::Comma), integer_literal)),
|(_, max)| PathQuantifier::AtMost(max as u32),
),
map(integer_literal, |n| PathQuantifier::Exact(n as u32)),
)),
expect_token(Token::RightBrace),
),
))(tokens)
}
fn pattern_element(tokens: &[Token]) -> IResult<&[Token], PatternElement> {
alt((
map(node_pattern, PatternElement::Node),
map(edge_pattern, PatternElement::Edge),
))(tokens)
}
fn node_pattern(tokens: &[Token]) -> IResult<&[Token], Node> {
map(
tuple((
expect_token(Token::LeftParen),
opt(identifier),
opt(label_list),
opt(property_map),
expect_token(Token::RightParen),
)),
|(_, identifier, labels, properties, _)| Node {
identifier,
labels: labels.unwrap_or_default(),
properties,
location: Location::default(),
},
)(tokens)
}
fn edge_pattern(tokens: &[Token]) -> IResult<&[Token], Edge> {
map(
tuple((
edge_direction,
expect_token(Token::LeftBracket),
alt((
map(
tuple((
identifier,
opt(preceded(expect_token(Token::Colon), label_expression)),
)),
|(identifier, label_expr)| {
(
Some(identifier),
label_expr.map(label_expression_to_strings),
)
},
),
map(
preceded(expect_token(Token::Colon), label_expression),
|label_expr| (None, Some(label_expression_to_strings(label_expr))),
),
value((None, None), success(())),
)),
opt(property_map),
expect_token(Token::RightBracket),
opt(path_quantifier), edge_direction,
)),
|(left_dir, _, (identifier, labels), properties, _, quantifier, right_dir)| {
let direction = match (left_dir, right_dir) {
(EdgeDirection::Incoming, EdgeDirection::Outgoing) => EdgeDirection::Both,
(EdgeDirection::Incoming, _) => EdgeDirection::Incoming,
(_, EdgeDirection::Outgoing) => EdgeDirection::Outgoing,
_ => EdgeDirection::Undirected,
};
Edge {
identifier,
labels: labels.unwrap_or_default(),
properties,
direction,
quantifier, location: Location::default(),
}
},
)(tokens)
}
fn label_list(tokens: &[Token]) -> IResult<&[Token], Vec<String>> {
map(
tuple((
expect_token(Token::Colon),
identifier_or_quoted,
many0(tuple((
alt((
expect_token(Token::Ampersand), expect_token(Token::Colon), )),
identifier_or_quoted,
))),
)),
|(_, first_label, additional_labels)| {
let mut labels = vec![first_label];
labels.extend(additional_labels.into_iter().map(|(_, label)| label));
labels
},
)(tokens)
}
fn property_map(tokens: &[Token]) -> IResult<&[Token], PropertyMap> {
map(
delimited(
expect_token(Token::LeftBrace),
opt(tuple((
property_pair,
many0(tuple((expect_token(Token::Comma), property_pair))),
))),
expect_token(Token::RightBrace),
),
|opt_props| {
let properties = if let Some((first, rest)) = opt_props {
let mut props = vec![first];
props.extend(rest.into_iter().map(|(_, prop)| prop));
props
} else {
vec![]
};
PropertyMap {
properties,
location: Location::default(),
}
},
)(tokens)
}
fn property_pair(tokens: &[Token]) -> IResult<&[Token], Property> {
map(
tuple((identifier_or_quoted, expect_token(Token::Colon), expression)),
|(key, _, value)| Property {
key,
value,
location: Location::default(),
},
)(tokens)
}
fn where_clause(tokens: &[Token]) -> IResult<&[Token], WhereClause> {
map(
tuple((expect_token(Token::Where), expression)),
|(_, condition)| WhereClause {
condition,
location: Location::default(),
},
)(tokens)
}
fn return_clause(tokens: &[Token]) -> IResult<&[Token], ReturnClause> {
map(
tuple((
expect_token(Token::Return),
opt(alt((
value(DistinctQualifier::Distinct, expect_token(Token::Distinct)),
value(DistinctQualifier::All, expect_token(Token::All)),
))),
return_item,
many0(tuple((expect_token(Token::Comma), return_item))),
)),
|(_, distinct_opt, first, rest)| {
let mut items = vec![first];
items.extend(rest.into_iter().map(|(_, item)| item));
ReturnClause {
distinct: distinct_opt.unwrap_or(DistinctQualifier::None),
items,
location: Location::default(),
}
},
)(tokens)
}
fn return_item(tokens: &[Token]) -> IResult<&[Token], ReturnItem> {
map(
tuple((
expression,
opt(tuple((expect_token(Token::As), identifier))),
)),
|(expression, opt_alias)| ReturnItem {
expression,
alias: opt_alias.map(|(_, alias)| alias),
location: Location::default(),
},
)(tokens)
}
fn with_clause(tokens: &[Token]) -> IResult<&[Token], WithClause> {
map(
tuple((
expect_token(Token::With),
opt(alt((
value(DistinctQualifier::Distinct, expect_token(Token::Distinct)),
value(DistinctQualifier::All, expect_token(Token::All)),
))),
with_item,
many0(tuple((expect_token(Token::Comma), with_item))),
opt(where_clause),
opt(order_clause),
opt(limit_clause),
)),
|(_, distinct_opt, first, rest, where_clause, order_clause, limit_clause)| {
let mut items = vec![first];
items.extend(rest.into_iter().map(|(_, item)| item));
WithClause {
distinct: distinct_opt.unwrap_or(DistinctQualifier::None),
items,
where_clause,
order_clause,
limit_clause,
location: Location::default(),
}
},
)(tokens)
}
fn with_item(tokens: &[Token]) -> IResult<&[Token], WithItem> {
map(
tuple((
expression,
opt(tuple((expect_token(Token::As), identifier))),
)),
|(expression, opt_alias)| WithItem {
expression,
alias: opt_alias.map(|(_, alias)| alias),
location: Location::default(),
},
)(tokens)
}
fn order_clause(tokens: &[Token]) -> IResult<&[Token], OrderClause> {
map(
tuple((
expect_token(Token::Order),
expect_token(Token::By),
order_item,
many0(tuple((expect_token(Token::Comma), order_item))),
)),
|(_, _, first_item, additional_items)| {
let mut items = vec![first_item];
items.extend(additional_items.into_iter().map(|(_, item)| item));
OrderClause {
items,
location: Location::default(),
}
},
)(tokens)
}
fn order_item(tokens: &[Token]) -> IResult<&[Token], OrderItem> {
map(
tuple((
expression,
opt(alt((
value(OrderDirection::Ascending, expect_token(Token::Asc)),
value(OrderDirection::Ascending, expect_token(Token::Ascending)),
value(OrderDirection::Descending, expect_token(Token::Desc)),
value(OrderDirection::Descending, expect_token(Token::Descending)),
))),
opt(tuple((
expect_token(Token::Nulls),
alt((
value(NullsOrdering::First, expect_token(Token::First)),
value(NullsOrdering::Last, expect_token(Token::Last)),
)),
))),
)),
|(expression, direction, nulls_clause)| OrderItem {
expression,
direction: direction.unwrap_or(OrderDirection::Ascending), nulls_ordering: nulls_clause.map(|(_, nulls_order)| nulls_order),
location: Location::default(),
},
)(tokens)
}
fn group_clause(tokens: &[Token]) -> IResult<&[Token], GroupClause> {
map(
tuple((
expect_token(Token::Group),
expect_token(Token::By),
expression,
many0(tuple((expect_token(Token::Comma), expression))),
)),
|(_, _, first_expr, additional_exprs)| {
let mut expressions = vec![first_expr];
expressions.extend(additional_exprs.into_iter().map(|(_, expr)| expr));
GroupClause {
expressions,
location: Location::default(),
}
},
)(tokens)
}
fn having_clause(tokens: &[Token]) -> IResult<&[Token], HavingClause> {
map(
tuple((expect_token(Token::Having), expression)),
|(_, condition)| HavingClause {
condition,
location: Location::default(),
},
)(tokens)
}
fn limit_clause(tokens: &[Token]) -> IResult<&[Token], LimitClause> {
map(
tuple((
expect_token(Token::Limit),
integer_literal,
opt(tuple((expect_token(Token::Offset), integer_literal))),
)),
|(_, count, opt_offset)| LimitClause {
count: count as usize,
offset: opt_offset.map(|(_, offset)| offset as usize),
location: Location::default(),
},
)(tokens)
}
fn expression(tokens: &[Token]) -> IResult<&[Token], Expression> {
or_expression(tokens)
}
fn or_expression(tokens: &[Token]) -> IResult<&[Token], Expression> {
map(
tuple((
xor_expression,
many0(tuple((expect_token(Token::Or), xor_expression))),
)),
|(first, rest)| {
rest.into_iter().fold(first, |left, (_, right)| {
Expression::Binary(BinaryExpression {
left: Box::new(left),
operator: Operator::Or,
right: Box::new(right),
location: Location::default(),
})
})
},
)(tokens)
}
fn xor_expression(tokens: &[Token]) -> IResult<&[Token], Expression> {
map(
tuple((
and_expression,
many0(tuple((expect_token(Token::Xor), and_expression))),
)),
|(first, rest)| {
rest.into_iter().fold(first, |left, (_, right)| {
Expression::Binary(BinaryExpression {
left: Box::new(left),
operator: Operator::Xor,
right: Box::new(right),
location: Location::default(),
})
})
},
)(tokens)
}
fn and_expression(tokens: &[Token]) -> IResult<&[Token], Expression> {
map(
tuple((
not_expression,
many0(tuple((expect_token(Token::And), not_expression))),
)),
|(first, rest)| {
rest.into_iter().fold(first, |left, (_, right)| {
Expression::Binary(BinaryExpression {
left: Box::new(left),
operator: Operator::And,
right: Box::new(right),
location: Location::default(),
})
})
},
)(tokens)
}
fn not_expression(tokens: &[Token]) -> IResult<&[Token], Expression> {
alt((
map(
tuple((expect_token(Token::Not), comparison)),
|(_, expr)| {
Expression::Unary(UnaryExpression {
operator: Operator::Not,
expression: Box::new(expr),
location: Location::default(),
})
},
),
comparison,
))(tokens)
}
fn shorthand_label_predicate(tokens: &[Token]) -> IResult<&[Token], Expression> {
map(
tuple((identifier, expect_token(Token::Colon), identifier)),
|(var_name, _, label_name)| {
let label_expr = LabelExpression {
terms: vec![LabelTerm {
factors: vec![LabelFactor::Identifier(label_name)],
location: Location::default(),
}],
location: Location::default(),
};
Expression::IsPredicate(IsPredicateExpression {
subject: Box::new(Expression::Variable(Variable {
name: var_name,
location: Location::default(),
})),
predicate_type: IsPredicateType::Label(label_expr),
negated: false,
target: None,
type_spec: None,
location: Location::default(),
})
},
)(tokens)
}
fn comparison(tokens: &[Token]) -> IResult<&[Token], Expression> {
alt((
shorthand_label_predicate,
is_predicate,
map(
tuple((
additive_expression,
comparison_operator,
quantifier,
expect_token(Token::LeftParen),
basic_query,
expect_token(Token::RightParen),
)),
|(left, op, quant, _, query, _)| {
Expression::QuantifiedComparison(QuantifiedComparisonExpression {
left: Box::new(left),
operator: op,
quantifier: quant,
subquery: Box::new(Expression::Subquery(SubqueryExpression {
query: Box::new(query),
location: Location::default(),
})),
location: Location::default(),
})
},
),
map(
tuple((
additive_expression,
expect_token(Token::Not),
expect_token(Token::In),
alt((
map(
tuple((
expect_token(Token::LeftParen),
basic_query,
expect_token(Token::RightParen),
)),
|(_, query, _)| {
Expression::NotInSubquery(NotInSubqueryExpression {
expression: Box::new(Expression::Literal(Literal::String(
"placeholder".to_string(),
))), query: Box::new(query),
location: Location::default(),
})
},
),
map(
tuple((
expect_token(Token::LeftParen),
expression_list,
expect_token(Token::RightParen),
)),
|(_, exprs, _)| {
Expression::Literal(Literal::List(
exprs
.into_iter()
.map(|e| {
match e {
Expression::Literal(lit) => lit,
_ => Literal::String(format!("{:?}", e)), }
})
.collect(),
))
},
),
additive_expression,
)),
)),
|(left, _, _, right)| match right {
Expression::NotInSubquery(mut not_in_subquery) => {
not_in_subquery.expression = Box::new(left);
Expression::NotInSubquery(not_in_subquery)
}
_ => Expression::Binary(BinaryExpression {
left: Box::new(left),
operator: Operator::NotIn,
right: Box::new(right),
location: Location::default(),
}),
},
),
map(
tuple((
additive_expression,
expect_token(Token::In),
alt((
map(
tuple((
expect_token(Token::LeftParen),
basic_query,
expect_token(Token::RightParen),
)),
|(_, query, _)| {
Expression::InSubquery(InSubqueryExpression {
expression: Box::new(Expression::Literal(Literal::String(
"placeholder".to_string(),
))), query: Box::new(query),
location: Location::default(),
})
},
),
map(
tuple((
expect_token(Token::LeftParen),
expression_list,
expect_token(Token::RightParen),
)),
|(_, exprs, _)| {
Expression::Literal(Literal::List(
exprs
.into_iter()
.map(|e| {
match e {
Expression::Literal(lit) => lit,
_ => Literal::String(format!("{:?}", e)), }
})
.collect(),
))
},
),
additive_expression,
)),
)),
|(left, _, right)| match right {
Expression::InSubquery(mut in_subquery) => {
in_subquery.expression = Box::new(left);
Expression::InSubquery(in_subquery)
}
_ => Expression::Binary(BinaryExpression {
left: Box::new(left),
operator: Operator::In,
right: Box::new(right),
location: Location::default(),
}),
},
),
map(
tuple((
additive_expression,
opt(tuple((comparison_operator, additive_expression))),
)),
|(left, opt_right)| {
if let Some((op, right)) = opt_right {
Expression::Binary(BinaryExpression {
left: Box::new(left),
operator: op,
right: Box::new(right),
location: Location::default(),
})
} else {
left
}
},
),
))(tokens)
}
fn additive_expression(tokens: &[Token]) -> IResult<&[Token], Expression> {
map(
tuple((
multiplicative_expression,
many0(tuple((
alt((
expect_token(Token::Plus),
expect_token(Token::Minus),
expect_token(Token::Dash), expect_token(Token::Concat), )),
multiplicative_expression,
))),
)),
|(first, rest)| {
rest.into_iter().fold(first, |left, (op, right)| {
let operator = match op {
Token::Plus => Operator::Plus,
Token::Minus => Operator::Minus,
Token::Dash => Operator::Minus, Token::Concat => Operator::Concat, _ => Operator::Plus, };
Expression::Binary(BinaryExpression {
left: Box::new(left),
operator,
right: Box::new(right),
location: Location::default(),
})
})
},
)(tokens)
}
fn multiplicative_expression(tokens: &[Token]) -> IResult<&[Token], Expression> {
map(
tuple((
postfix_expression,
many0(tuple((
alt((
expect_token(Token::Star),
expect_token(Token::Slash),
expect_token(Token::Percent),
)),
postfix_expression,
))),
)),
|(first, rest)| {
rest.into_iter().fold(first, |left, (op, right)| {
let operator = match op {
Token::Star => Operator::Star,
Token::Slash => Operator::Slash,
Token::Percent => Operator::Percent,
_ => Operator::Star, };
Expression::Binary(BinaryExpression {
left: Box::new(left),
operator,
right: Box::new(right),
location: Location::default(),
})
})
},
)(tokens)
}
fn array_expression(tokens: &[Token]) -> IResult<&[Token], Literal> {
let (tokens, _) = expect_token(Token::LeftBracket)(tokens)?;
if matches!(tokens.first(), Some(Token::RightBracket)) {
let (tokens, _) = expect_token(Token::RightBracket)(tokens)?;
return Ok((tokens, Literal::Vector(vec![])));
}
let (tokens, expressions) = expression_list(tokens)?;
let (tokens, _) = expect_token(Token::RightBracket)(tokens)?;
let mut literals = Vec::new();
let mut all_numeric = true;
let mut numeric_values = Vec::new();
for expr in expressions {
match expr {
Expression::Literal(literal) => match &literal {
Literal::Integer(n) => {
numeric_values.push(*n as f64);
literals.push(literal);
}
Literal::Float(f) => {
numeric_values.push(*f);
literals.push(literal);
}
_ => {
all_numeric = false;
literals.push(literal);
}
},
_ => {
return Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Tag,
)));
}
}
}
if all_numeric
&& !literals.is_empty()
&& literals
.iter()
.all(|lit| matches!(lit, Literal::Integer(_) | Literal::Float(_)))
{
Ok((tokens, Literal::Vector(numeric_values)))
} else {
Ok((tokens, Literal::List(literals)))
}
}
fn path_constructor(tokens: &[Token]) -> IResult<&[Token], PathConstructor> {
let (tokens, _) = expect_token(Token::Path)(tokens)?;
if let Some(Token::Vector(values)) = tokens.first() {
let (tokens, _) = expect_token_variant(&|t| matches!(t, Token::Vector(_)))(tokens)?;
let elements = values
.iter()
.map(|&v| Expression::Literal(crate::ast::ast::Literal::Float(v)))
.collect();
return Ok((
tokens,
PathConstructor {
elements,
location: Location::default(),
},
));
}
let (tokens, _) = expect_token(Token::LeftBracket)(tokens)?;
if matches!(tokens.first(), Some(Token::RightBracket)) {
let (tokens, _) = expect_token(Token::RightBracket)(tokens)?;
return Ok((
tokens,
PathConstructor {
elements: vec![],
location: Location::default(),
},
));
}
let (tokens, elements) = expression_list(tokens)?;
let (tokens, _) = expect_token(Token::RightBracket)(tokens)?;
Ok((
tokens,
PathConstructor {
elements,
location: Location::default(),
},
))
}
fn postfix_expression(tokens: &[Token]) -> IResult<&[Token], Expression> {
let (mut remaining, mut expr) = primary_expression(tokens)?;
while let Ok((tokens_after_bracket, _)) = expect_token(Token::LeftBracket)(remaining) {
let (tokens_after_index, index_expr) = expression(tokens_after_bracket)?;
let (tokens_after_close, _) = expect_token(Token::RightBracket)(tokens_after_index)?;
expr = Expression::ArrayIndex(ArrayIndexExpression {
array: Box::new(expr),
index: Box::new(index_expr),
location: Location::default(),
});
remaining = tokens_after_close;
}
Ok((remaining, expr))
}
fn primary_expression(tokens: &[Token]) -> IResult<&[Token], Expression> {
alt((
map(not_exists_subquery, Expression::NotExistsSubquery),
map(exists_subquery, Expression::ExistsSubquery),
map(pattern_expression, Expression::Pattern),
map(
alt((
map(subquery_expression, Expression::Subquery),
map(
tuple((
expect_token(Token::LeftParen),
expression,
expect_token(Token::RightParen),
)),
|(_, expr, _)| expr,
),
)),
|expr| expr,
),
map(case_expression, Expression::Case),
map(cast_expression, Expression::Cast),
map(path_constructor, Expression::PathConstructor),
map(array_expression, Expression::Literal),
map(trim_function_call, Expression::FunctionCall), map(function_call, Expression::FunctionCall),
map(property_access, Expression::PropertyAccess),
map(property_access_continued, Expression::PropertyAccess),
map(property_access_token, Expression::PropertyAccess),
map(parameter, Expression::Parameter),
map(variable, Expression::Variable),
map(literal, Expression::Literal),
))(tokens)
}
fn expression_list(tokens: &[Token]) -> IResult<&[Token], Vec<Expression>> {
map(
tuple((
expression,
many0(tuple((expect_token(Token::Comma), expression))),
)),
|(first_expr, additional_exprs)| {
let mut expressions = vec![first_expr];
expressions.extend(additional_exprs.into_iter().map(|(_, expr)| expr));
expressions
},
)(tokens)
}
fn function_call(tokens: &[Token]) -> IResult<&[Token], FunctionCall> {
let (tokens, name) = identifier(tokens)?;
let name = name.to_uppercase();
let (tokens, _) = expect_token(Token::LeftParen)(tokens)?;
let (tokens, distinct_qualifier) = opt(distinct_qualifier)(tokens)?;
let distinct = distinct_qualifier.unwrap_or(DistinctQualifier::None);
let mut arguments = Vec::new();
let mut remaining = tokens;
if !matches!(remaining.first(), Some(Token::RightParen)) {
if matches!(remaining.first(), Some(Token::Star)) {
arguments.push(Expression::Variable(Variable {
name: "*".to_string(),
location: Location::default(),
}));
remaining = &remaining[1..]; } else {
loop {
let (new_remaining, expr) = expression(remaining)?;
arguments.push(expr);
remaining = new_remaining;
match remaining.first() {
Some(Token::Comma) => {
remaining = &remaining[1..]; continue;
}
Some(Token::RightParen) => break,
_ => {
return Err(nom::Err::Error(nom::error::Error::new(
remaining,
nom::error::ErrorKind::Tag,
)))
}
}
}
}
}
let (remaining, _) = expect_token(Token::RightParen)(remaining)?;
Ok((
remaining,
FunctionCall {
name,
distinct,
arguments,
location: Location::default(),
},
))
}
fn trim_function_call(tokens: &[Token]) -> IResult<&[Token], FunctionCall> {
if !matches!(tokens.first(), Some(Token::Identifier(name)) if name.eq_ignore_ascii_case("TRIM"))
{
return Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Tag,
)));
}
let (tokens, _name) = identifier(tokens)?; let (tokens, _) = expect_token(Token::LeftParen)(tokens)?;
let mut arguments = Vec::new();
let mut remaining = tokens;
let trim_mode = match remaining.first() {
Some(Token::Leading) => {
remaining = &remaining[1..];
Some("LEADING")
}
Some(Token::Trailing) => {
remaining = &remaining[1..];
Some("TRAILING")
}
Some(Token::Both) => {
remaining = &remaining[1..];
Some("BOTH")
}
_ => None,
};
if let Some(mode) = trim_mode {
arguments.push(Expression::Literal(Literal::String(mode.to_string())));
if matches!(remaining.first(), Some(Token::From)) {
remaining = &remaining[1..]; arguments.push(Expression::Literal(Literal::String(" ".to_string())));
} else {
let (new_remaining, trim_char_expr) = expression(remaining)?;
remaining = new_remaining;
arguments.push(trim_char_expr);
let (new_remaining, _) = expect_token(Token::From)(remaining)?;
remaining = new_remaining;
}
let (new_remaining, string_expr) = expression(remaining)?;
remaining = new_remaining;
arguments.push(string_expr);
} else {
if matches!(remaining.first(), Some(Token::From)) {
remaining = &remaining[1..]; arguments.push(Expression::Literal(Literal::String("BOTH".to_string()))); arguments.push(Expression::Literal(Literal::String(" ".to_string())));
let (new_remaining, string_expr) = expression(remaining)?;
remaining = new_remaining;
arguments.push(string_expr);
} else {
return Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Tag,
)));
}
}
let (remaining, _) = expect_token(Token::RightParen)(remaining)?;
Ok((
remaining,
FunctionCall {
name: "TRIM".to_string(),
distinct: DistinctQualifier::None,
arguments,
location: Location::default(),
},
))
}
fn property_access(tokens: &[Token]) -> IResult<&[Token], PropertyAccess> {
map(
tuple((
identifier,
expect_token(Token::Dot),
identifier,
many0(tuple((expect_token(Token::Dot), identifier))),
)),
|(object, _, first_property, additional_properties)| {
let mut property_path = first_property;
for (_, prop) in additional_properties {
property_path = format!("{}.{}", property_path, prop);
}
PropertyAccess {
object,
property: property_path,
location: Location::default(),
}
},
)(tokens)
}
fn property_access_continued(tokens: &[Token]) -> IResult<&[Token], PropertyAccess> {
map(
tuple((
property_access_token,
many0(tuple((expect_token(Token::Dot), identifier))),
)),
|(base_access, additional_properties)| {
let mut property_path = base_access.property;
for (_, prop) in additional_properties {
property_path = format!("{}.{}", property_path, prop);
}
PropertyAccess {
object: base_access.object,
property: property_path,
location: Location::default(),
}
},
)(tokens)
}
fn variable(tokens: &[Token]) -> IResult<&[Token], Variable> {
map(identifier, |name| Variable {
name,
location: Location::default(),
})(tokens)
}
fn property_access_token(tokens: &[Token]) -> IResult<&[Token], PropertyAccess> {
if let Some(Token::PropertyAccess(s)) = tokens.first() {
let parts: Vec<&str> = s.split('.').collect();
if parts.len() == 2 {
Ok((
&tokens[1..],
PropertyAccess {
object: parts[0].to_string(),
property: parts[1].to_string(),
location: Location::default(),
},
))
} else {
Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Tag,
)))
}
} else {
Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Tag,
)))
}
}
fn identifier(tokens: &[Token]) -> IResult<&[Token], String> {
if let Some(token) = tokens.first() {
let identifier_str = match token {
Token::Identifier(s) => Some(s.clone()),
Token::Value => Some("value".to_string()),
Token::Type => Some("type".to_string()),
Token::User => Some("user".to_string()),
Token::Role => Some("role".to_string()),
Token::Schema => Some("schema".to_string()),
Token::Data => Some("data".to_string()),
Token::Graph => Some("graph".to_string()),
Token::Node => Some("node".to_string()),
Token::Edge => Some("edge".to_string()),
Token::Path => Some("path".to_string()),
Token::Table => Some("table".to_string()),
Token::Property => Some("property".to_string()),
Token::Source => Some("source".to_string()),
Token::Destination => Some("destination".to_string()),
Token::Zone => Some("zone".to_string()),
Token::Time => Some("time".to_string()),
Token::Parameter => Some("parameter".to_string()),
Token::Order => Some("order".to_string()),
Token::Contains => Some("contains".to_string()),
Token::Next => Some("next".to_string()), Token::Start => Some("start".to_string()), Token::End => Some("end".to_string()), Token::Register => Some("register".to_string()), Token::Unregister => Some("unregister".to_string()), Token::Description => Some("description".to_string()), _ => None,
};
if let Some(s) = identifier_str {
Ok((&tokens[1..], s))
} else {
Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Tag,
)))
}
} else {
Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Tag,
)))
}
}
fn edge_direction(tokens: &[Token]) -> IResult<&[Token], EdgeDirection> {
alt((
value(EdgeDirection::Both, expect_token(Token::ArrowBoth)),
value(EdgeDirection::Outgoing, expect_token(Token::Arrow)),
value(EdgeDirection::Incoming, expect_token(Token::ArrowLeft)),
value(EdgeDirection::Undirected, expect_token(Token::Dash)),
))(tokens)
}
fn comparison_operator(tokens: &[Token]) -> IResult<&[Token], Operator> {
alt((
value(Operator::Equal, expect_token(Token::Equal)),
value(Operator::NotEqual, expect_token(Token::NotEqual)),
value(Operator::LessThan, expect_token(Token::LessThan)),
value(Operator::LessEqual, expect_token(Token::LessEqual)),
value(Operator::GreaterThan, expect_token(Token::GreaterThan)),
value(Operator::GreaterEqual, expect_token(Token::GreaterEqual)),
value(Operator::Within, expect_token(Token::Within)),
value(Operator::Like, expect_token(Token::Like)),
value(Operator::Contains, expect_token(Token::Contains)),
value(
Operator::Starts,
tuple((expect_token(Token::Starts), expect_token(Token::With))),
),
value(
Operator::Ends,
tuple((expect_token(Token::Ends), expect_token(Token::With))),
),
))(tokens)
}
fn quantifier(tokens: &[Token]) -> IResult<&[Token], Quantifier> {
alt((
value(Quantifier::All, expect_token(Token::All)),
value(Quantifier::Any, expect_token(Token::Any)),
value(Quantifier::Some, expect_token(Token::Some)),
))(tokens)
}
fn literal(tokens: &[Token]) -> IResult<&[Token], Literal> {
alt((
map(string_literal, Literal::String),
map(integer_literal, Literal::Integer),
map(float_literal, Literal::Float),
map(boolean_literal, Literal::Boolean),
map(null_literal, |_| Literal::Null),
map(vector_literal, Literal::Vector),
))(tokens)
}
fn identifier_or_quoted(tokens: &[Token]) -> IResult<&[Token], String> {
if let Some(token) = tokens.first() {
let identifier_str = match token {
Token::Identifier(s) => Some(s.clone()),
Token::BacktickString(s) => Some(s.clone()), Token::String(s) => Some(s.clone()), Token::Value => Some("value".to_string()),
Token::Type => Some("type".to_string()),
Token::User => Some("user".to_string()),
Token::Role => Some("role".to_string()),
Token::Schema => Some("schema".to_string()),
Token::Data => Some("data".to_string()),
Token::Graph => Some("graph".to_string()),
Token::Node => Some("node".to_string()),
Token::Edge => Some("edge".to_string()),
Token::Path => Some("path".to_string()),
Token::Table => Some("table".to_string()),
Token::Property => Some("property".to_string()),
Token::Source => Some("source".to_string()),
Token::Destination => Some("destination".to_string()),
Token::Zone => Some("zone".to_string()),
Token::Time => Some("time".to_string()),
Token::Parameter => Some("parameter".to_string()),
Token::Order => Some("order".to_string()),
Token::Contains => Some("contains".to_string()),
Token::Next => Some("next".to_string()),
Token::Start => Some("start".to_string()),
Token::End => Some("end".to_string()),
_ => None,
};
if let Some(s) = identifier_str {
if s.is_empty() {
return Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Verify,
)));
}
Ok((&tokens[1..], s))
} else {
Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Tag,
)))
}
} else {
Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Tag,
)))
}
}
fn string_literal(tokens: &[Token]) -> IResult<&[Token], String> {
if let Some(Token::String(s)) = tokens.first() {
Ok((&tokens[1..], s.clone()))
} else {
Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Tag,
)))
}
}
fn integer_literal(tokens: &[Token]) -> IResult<&[Token], i64> {
if let Some(Token::Integer(n)) = tokens.first() {
Ok((&tokens[1..], *n))
} else {
Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Tag,
)))
}
}
fn float_literal(tokens: &[Token]) -> IResult<&[Token], f64> {
if let Some(Token::Float(f)) = tokens.first() {
Ok((&tokens[1..], *f))
} else {
Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Tag,
)))
}
}
fn boolean_literal(tokens: &[Token]) -> IResult<&[Token], bool> {
if let Some(Token::Boolean(b)) = tokens.first() {
Ok((&tokens[1..], *b))
} else {
Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Tag,
)))
}
}
fn null_literal(tokens: &[Token]) -> IResult<&[Token], ()> {
value((), expect_token(Token::Null))(tokens)
}
fn vector_literal(tokens: &[Token]) -> IResult<&[Token], Vec<f64>> {
if let Some(Token::Vector(v)) = tokens.first() {
Ok((&tokens[1..], v.clone()))
} else {
Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Tag,
)))
}
}
fn catalog_statement(tokens: &[Token]) -> IResult<&[Token], CatalogStatement> {
alt((
map(create_schema_statement, CatalogStatement::CreateSchema),
map(drop_schema_statement, CatalogStatement::DropSchema),
map(
create_graph_type_statement,
CatalogStatement::CreateGraphType,
),
map(drop_graph_type_statement, CatalogStatement::DropGraphType),
map(alter_graph_type_statement, CatalogStatement::AlterGraphType),
map(create_graph_statement, CatalogStatement::CreateGraph),
map(drop_graph_statement, CatalogStatement::DropGraph),
map(truncate_graph_statement, CatalogStatement::TruncateGraph),
map(clear_graph_statement, CatalogStatement::ClearGraph),
map(create_user_statement, CatalogStatement::CreateUser),
map(drop_user_statement, CatalogStatement::DropUser),
map(create_role_statement, CatalogStatement::CreateRole),
map(drop_role_statement, CatalogStatement::DropRole),
map(grant_role_statement, CatalogStatement::GrantRole),
map(revoke_role_statement, CatalogStatement::RevokeRole),
map(
create_procedure_statement,
CatalogStatement::CreateProcedure,
),
map(drop_procedure_statement, CatalogStatement::DropProcedure),
))(tokens)
}
fn validated_schema_name(tokens: &[Token]) -> IResult<&[Token], CatalogPath> {
let (remaining, schema_path) = catalog_path(tokens)?;
if schema_path.segments.is_empty() || schema_path.segments.iter().any(|s| s.trim().is_empty()) {
return Err(nom::Err::Failure(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Verify,
)));
}
Ok((remaining, schema_path))
}
fn create_schema_statement(tokens: &[Token]) -> IResult<&[Token], CreateSchemaStatement> {
map(
tuple((
expect_token(Token::Create),
expect_token(Token::Schema),
opt(tuple((
expect_token(Token::If),
expect_token(Token::Not),
expect_token(Token::Exists),
))),
validated_schema_name,
)),
|(_, _, if_not_exists, schema_path)| CreateSchemaStatement {
schema_path,
if_not_exists: if_not_exists.is_some(),
location: Location::default(),
},
)(tokens)
}
fn drop_schema_statement(tokens: &[Token]) -> IResult<&[Token], DropSchemaStatement> {
map(
tuple((
expect_token(Token::Drop),
expect_token(Token::Schema),
opt(tuple((
expect_token(Token::If),
expect_token(Token::Exists),
))),
catalog_path,
opt(alt((
value(true, expect_token(Token::Cascade)),
value(false, expect_token(Token::Restrict)),
))),
)),
|(_, _, if_exists, schema_path, cascade)| DropSchemaStatement {
schema_path,
if_exists: if_exists.is_some(),
cascade: cascade.unwrap_or(false),
location: Location::default(),
},
)(tokens)
}
fn create_graph_statement(tokens: &[Token]) -> IResult<&[Token], CreateGraphStatement> {
if !tokens.is_empty() {
let mut skip_count = 1; if tokens.len() > skip_count && matches!(tokens[skip_count], Token::Or) {
skip_count += 2; }
if tokens.len() > skip_count && matches!(tokens[skip_count], Token::Property) {
skip_count += 1; }
skip_count += 1;
if tokens.len() > skip_count && matches!(tokens[skip_count], Token::Type) {
return Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Alt,
)));
}
}
map(
tuple((
expect_token(Token::Create),
opt(tuple((
expect_token(Token::Or),
expect_token(Token::Replace),
))),
opt(expect_token(Token::Property)),
expect_token(Token::Graph),
opt(tuple((
expect_token(Token::If),
expect_token(Token::Not),
expect_token(Token::Exists),
))),
catalog_path,
opt(graph_type_spec),
opt(tuple((expect_token(Token::As), query))),
)),
|(_, or_replace, _, _, if_not_exists, graph_path, graph_type_spec, as_query)| {
CreateGraphStatement {
graph_path,
graph_type_spec,
if_not_exists: if_not_exists.is_some(),
or_replace: or_replace.is_some(),
as_query: as_query.map(|(_, query)| Box::new(query)),
location: Location::default(),
}
},
)(tokens)
}
fn drop_graph_statement(tokens: &[Token]) -> IResult<&[Token], DropGraphStatement> {
if !tokens.is_empty() {
let mut skip_count = 1; if tokens.len() > skip_count && matches!(tokens[skip_count], Token::Property) {
skip_count += 1; }
skip_count += 1;
if tokens.len() > skip_count && matches!(tokens[skip_count], Token::Type) {
return Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Alt,
)));
}
}
map(
tuple((
expect_token(Token::Drop),
opt(expect_token(Token::Property)),
expect_token(Token::Graph),
opt(tuple((
expect_token(Token::If),
expect_token(Token::Exists),
))),
catalog_path,
opt(expect_token(Token::Cascade)),
)),
|(_, _, _, if_exists, graph_path, cascade)| DropGraphStatement {
graph_path,
if_exists: if_exists.is_some(),
cascade: cascade.is_some(),
location: Location::default(),
},
)(tokens)
}
fn truncate_graph_statement(tokens: &[Token]) -> IResult<&[Token], TruncateGraphStatement> {
map(
tuple((
expect_token(Token::Truncate),
expect_token(Token::Graph),
catalog_path,
)),
|(_, _, graph_path)| TruncateGraphStatement {
graph_path,
location: Location::default(),
},
)(tokens)
}
fn clear_graph_statement(tokens: &[Token]) -> IResult<&[Token], ClearGraphStatement> {
map(
tuple((
expect_token(Token::Clear),
expect_token(Token::Graph),
opt(catalog_path),
)),
|(_, _, graph_path)| ClearGraphStatement {
graph_path,
location: Location::default(),
},
)(tokens)
}
fn create_graph_type_statement(tokens: &[Token]) -> IResult<&[Token], CreateGraphTypeStatement> {
map(
tuple((
expect_token(Token::Create),
opt(tuple((
expect_token(Token::Or),
expect_token(Token::Replace),
))),
opt(expect_token(Token::Property)),
expect_token(Token::Graph),
expect_token(Token::Type),
opt(tuple((
expect_token(Token::If),
expect_token(Token::Not),
expect_token(Token::Exists),
))),
catalog_path,
opt(tuple((
expect_token(Token::Copy),
expect_token(Token::Of),
catalog_path,
))),
graph_type_spec,
)),
|(_, or_replace, _, _, _, if_not_exists, graph_type_path, copy_of, graph_type_spec)| {
CreateGraphTypeStatement {
graph_type_path,
copy_of: copy_of.map(|(_, _, path)| path),
graph_type_spec,
if_not_exists: if_not_exists.is_some(),
or_replace: or_replace.is_some(),
location: Location::default(),
}
},
)(tokens)
}
fn drop_graph_type_statement(tokens: &[Token]) -> IResult<&[Token], DropGraphTypeStatement> {
map(
tuple((
expect_token(Token::Drop),
opt(expect_token(Token::Property)),
expect_token(Token::Graph),
expect_token(Token::Type),
opt(tuple((
expect_token(Token::If),
expect_token(Token::Exists),
))),
catalog_path,
opt(alt((
value(true, expect_token(Token::Cascade)),
value(false, expect_token(Token::Restrict)),
))),
)),
|(_, _, _, _, if_exists, graph_type_path, cascade)| DropGraphTypeStatement {
graph_type_path,
if_exists: if_exists.is_some(),
cascade: cascade.unwrap_or(false),
location: Location::default(),
},
)(tokens)
}
fn alter_graph_type_statement(tokens: &[Token]) -> IResult<&[Token], AlterGraphTypeStatement> {
map(
tuple((
expect_token(Token::Alter),
expect_token(Token::Graph),
expect_token(Token::Type),
identifier,
)),
|(_, _, _, name)| AlterGraphTypeStatement {
name,
location: Location::default(),
},
)(tokens)
}
fn catalog_path(tokens: &[Token]) -> IResult<&[Token], CatalogPath> {
map(
tuple((
opt(expect_token(Token::Slash)),
separated_list1(expect_token(Token::Slash), identifier_or_quoted),
opt(expect_token(Token::Slash)),
)),
|(_, segments, _)| CatalogPath::new(segments, Location::default()),
)(tokens)
}
fn graph_type_spec(tokens: &[Token]) -> IResult<&[Token], GraphTypeSpec> {
map(
delimited(
expect_token(Token::LeftParen),
tuple((opt(vertex_types_clause), opt(edge_types_clause))),
expect_token(Token::RightParen),
),
|(vertex_types, edge_types)| GraphTypeSpec {
vertex_types: vertex_types.unwrap_or_default(),
edge_types: edge_types.unwrap_or_default(),
location: Location::default(),
},
)(tokens)
}
fn vertex_types_clause(tokens: &[Token]) -> IResult<&[Token], Vec<VertexTypeSpec>> {
map(
tuple((
alt((expect_token(Token::Vertex), expect_token(Token::Node))),
alt((expect_token(Token::Type), expect_token(Token::Types))),
vertex_type_spec,
many0(tuple((expect_token(Token::Comma), vertex_type_spec))),
)),
|(_, _, first_type, additional_types)| {
let mut types = vec![first_type];
types.extend(additional_types.into_iter().map(|(_, spec)| spec));
types
},
)(tokens)
}
fn edge_types_clause(tokens: &[Token]) -> IResult<&[Token], Vec<EdgeTypeSpec>> {
map(
tuple((
expect_token(Token::Edge),
alt((expect_token(Token::Type), expect_token(Token::Types))),
edge_type_spec,
many0(tuple((expect_token(Token::Comma), edge_type_spec))),
)),
|(_, _, first_type, additional_types)| {
let mut types = vec![first_type];
types.extend(additional_types.into_iter().map(|(_, spec)| spec));
types
},
)(tokens)
}
fn vertex_type_spec(tokens: &[Token]) -> IResult<&[Token], VertexTypeSpec> {
map(
tuple((
opt(identifier),
opt(tuple((
alt((expect_token(Token::Is), expect_token(Token::Colon))),
label_expression,
))),
opt(property_type_list),
)),
|(identifier, labels, properties)| VertexTypeSpec {
identifier,
labels: labels.map(|(_, expr)| expr),
properties,
location: Location::default(),
},
)(tokens)
}
fn edge_type_spec(tokens: &[Token]) -> IResult<&[Token], EdgeTypeSpec> {
map(
tuple((
opt(identifier),
opt(tuple((
alt((expect_token(Token::Is), expect_token(Token::Colon))),
label_expression,
))),
opt(property_type_list),
opt(tuple((expect_token(Token::Source), identifier))),
opt(tuple((expect_token(Token::Destination), identifier))),
)),
|(identifier, labels, properties, source, destination)| EdgeTypeSpec {
identifier,
labels: labels.map(|(_, expr)| expr),
properties,
source_vertex: source.map(|(_, id)| id),
destination_vertex: destination.map(|(_, id)| id),
location: Location::default(),
},
)(tokens)
}
fn label_expression(tokens: &[Token]) -> IResult<&[Token], LabelExpression> {
map(
tuple((
label_term,
many0(tuple((expect_token(Token::Pipe), label_term))),
)),
|(first_term, additional_terms)| {
let mut terms = vec![first_term];
terms.extend(additional_terms.into_iter().map(|(_, term)| term));
LabelExpression {
terms,
location: Location::default(),
}
},
)(tokens)
}
fn label_term(tokens: &[Token]) -> IResult<&[Token], LabelTerm> {
map(
tuple((
label_factor,
many0(alt((
tuple((expect_token(Token::Not), label_factor)),
map(
tuple((expect_token(Token::Colon), identifier_or_quoted)),
|(_, name)| (Token::Not, LabelFactor::Identifier(name)), ),
))),
)),
|(first_factor, additional_factors)| {
let mut factors = vec![first_factor];
factors.extend(additional_factors.into_iter().map(|(_, factor)| factor));
LabelTerm {
factors,
location: Location::default(),
}
},
)(tokens)
}
fn label_factor(tokens: &[Token]) -> IResult<&[Token], LabelFactor> {
alt((
map(
tuple((expect_token(Token::Colon), identifier_or_quoted)),
|(_, name)| LabelFactor::Identifier(name),
),
map(identifier_or_quoted, LabelFactor::Identifier),
value(LabelFactor::Wildcard, expect_token(Token::Percent)),
map(
delimited(
expect_token(Token::LeftParen),
label_expression,
expect_token(Token::RightParen),
),
|expr| LabelFactor::Parenthesized(Box::new(expr)),
),
))(tokens)
}
fn label_expression_to_strings(label_expr: LabelExpression) -> Vec<String> {
let mut labels = Vec::new();
for term in label_expr.terms {
for factor in term.factors {
match factor {
LabelFactor::Identifier(name) => labels.push(name),
LabelFactor::Wildcard => labels.push("%".to_string()),
LabelFactor::Parenthesized(boxed_expr) => {
labels.extend(label_expression_to_strings(*boxed_expr));
}
}
}
}
labels
}
fn property_type_list(tokens: &[Token]) -> IResult<&[Token], PropertyTypeList> {
map(
delimited(
expect_token(Token::LeftParen),
tuple((
property_type_decl,
many0(tuple((opt(expect_token(Token::Comma)), property_type_decl))),
)),
expect_token(Token::RightParen),
),
|(first_prop, additional_props)| {
let mut properties = vec![first_prop];
properties.extend(additional_props.into_iter().map(|(_, prop)| prop));
PropertyTypeList {
properties,
location: Location::default(),
}
},
)(tokens)
}
fn property_type_decl(tokens: &[Token]) -> IResult<&[Token], PropertyTypeDecl> {
map(tuple((identifier, type_spec)), |(name, type_spec)| {
PropertyTypeDecl {
name,
type_spec,
location: Location::default(),
}
})(tokens)
}
fn type_spec(tokens: &[Token]) -> IResult<&[Token], TypeSpec> {
alt((
value(TypeSpec::Boolean, expect_token(Token::BooleanType)),
value(TypeSpec::Integer, expect_token(Token::IntegerType)),
value(
TypeSpec::String { max_length: None },
expect_token(Token::StringType),
),
value(
TypeSpec::Float { precision: None },
expect_token(Token::FloatType),
),
value(TypeSpec::Real, expect_token(Token::RealType)),
value(TypeSpec::Double, expect_token(Token::DoubleType)),
value(TypeSpec::BigInt, expect_token(Token::BigIntType)),
value(TypeSpec::SmallInt, expect_token(Token::SmallIntType)),
value(
TypeSpec::Decimal {
precision: None,
scale: None,
},
expect_token(Token::DecimalType),
),
map(
verify(identifier, |id: &str| id.eq_ignore_ascii_case("VECTOR")),
|_| TypeSpec::Vector { dimension: None },
),
))(tokens)
}
fn session_statement(tokens: &[Token]) -> IResult<&[Token], SessionStatement> {
alt((
map(session_set_statement, SessionStatement::SessionSet),
map(session_reset_statement, SessionStatement::SessionReset),
map(session_close_statement, SessionStatement::SessionClose),
))(tokens)
}
fn session_set_statement(tokens: &[Token]) -> IResult<&[Token], SessionSetStatement> {
map(
tuple((
expect_token(Token::Session),
expect_token(Token::Set),
session_set_clause,
)),
|(_, _, clause)| SessionSetStatement {
clause,
location: Location::default(),
},
)(tokens)
}
fn session_set_clause(tokens: &[Token]) -> IResult<&[Token], SessionSetClause> {
alt((
map(
tuple((expect_token(Token::Schema), catalog_path)),
|(_, schema_reference)| SessionSetClause::Schema { schema_reference },
),
map(
tuple((
opt(expect_token(Token::Property)),
expect_token(Token::Graph),
graph_expression,
)),
|(_, _, graph_expression)| SessionSetClause::Graph { graph_expression },
),
map(
tuple((
expect_token(Token::Time),
expect_token(Token::Zone),
string_literal,
)),
|(_, _, time_zone)| SessionSetClause::TimeZone { time_zone },
),
map(
tuple((
opt(expect_token(Token::Property)),
expect_token(Token::Graph),
opt(tuple((
expect_token(Token::If),
expect_token(Token::Not),
expect_token(Token::Exists),
))),
parameter_name,
graph_initializer,
)),
|(_, _, if_not_exists, parameter, graph_initializer)| {
SessionSetClause::GraphParameter {
parameter,
graph_initializer,
if_not_exists: if_not_exists.is_some(),
}
},
),
map(
tuple((
opt(expect_token(Token::Binding)),
expect_token(Token::Table),
opt(tuple((
expect_token(Token::If),
expect_token(Token::Not),
expect_token(Token::Exists),
))),
parameter_name,
binding_table_initializer,
)),
|(_, _, if_not_exists, parameter, binding_table_initializer)| {
SessionSetClause::BindingTableParameter {
parameter,
binding_table_initializer,
if_not_exists: if_not_exists.is_some(),
}
},
),
map(
tuple((
expect_token(Token::Value),
opt(tuple((
expect_token(Token::If),
expect_token(Token::Not),
expect_token(Token::Exists),
))),
parameter_name,
value_initializer,
)),
|(_, if_not_exists, parameter, value_initializer)| SessionSetClause::ValueParameter {
parameter,
value_initializer,
if_not_exists: if_not_exists.is_some(),
},
),
))(tokens)
}
fn session_reset_statement(tokens: &[Token]) -> IResult<&[Token], SessionResetStatement> {
map(
tuple((
expect_token(Token::Session),
expect_token(Token::Reset),
opt(session_reset_args),
)),
|(_, _, args)| SessionResetStatement {
args,
location: Location::default(),
},
)(tokens)
}
fn session_reset_args(tokens: &[Token]) -> IResult<&[Token], SessionResetArgs> {
alt((
map(
tuple((
opt(expect_token(Token::All)),
alt((
value(
SessionResetTarget::Parameters,
expect_token(Token::Parameters),
),
value(
SessionResetTarget::Characteristics,
expect_token(Token::Characteristics),
),
)),
)),
|(_, target)| SessionResetArgs::All { target },
),
value(SessionResetArgs::Schema, expect_token(Token::Schema)),
map(
tuple((
opt(expect_token(Token::Property)),
expect_token(Token::Graph),
)),
|_| SessionResetArgs::Graph,
),
map(
tuple((expect_token(Token::Time), expect_token(Token::Zone))),
|_| SessionResetArgs::TimeZone,
),
map(
tuple((opt(expect_token(Token::Parameter)), parameter_name)),
|(_, parameter_name)| SessionResetArgs::Parameter {
parameter: parameter_name,
},
),
))(tokens)
}
fn session_close_statement(tokens: &[Token]) -> IResult<&[Token], SessionCloseStatement> {
map(
tuple((expect_token(Token::Session), expect_token(Token::Close))),
|(_, _)| SessionCloseStatement {
location: Location::default(),
},
)(tokens)
}
fn graph_expression(tokens: &[Token]) -> IResult<&[Token], GraphExpression> {
log::debug!(
"graph_expression called with tokens: {:?}",
tokens.get(0..3).unwrap_or(&[])
);
fn union_expr(tokens: &[Token]) -> IResult<&[Token], GraphExpression> {
let (tokens, left) = primary_graph_expr(tokens)?;
let mut current = left;
let mut remaining = tokens;
while let Ok((tokens_after_union, _)) = expect_token(Token::Union)(remaining) {
let (tokens_after_all, union_all) = opt(expect_token(Token::All))(tokens_after_union)?;
let all = union_all.is_some();
let (tokens_after_right, right) = primary_graph_expr(tokens_after_all)?;
current = GraphExpression::Union {
left: Box::new(current),
right: Box::new(right),
all,
};
remaining = tokens_after_right;
}
Ok((remaining, current))
}
fn primary_graph_expr(tokens: &[Token]) -> IResult<&[Token], GraphExpression> {
alt((
delimited(
expect_token(Token::LeftParen),
union_expr,
expect_token(Token::RightParen),
),
map(parameter_or_catalog_path, |path_or_param| {
log::debug!("graph_expression parsed catalog path: {:?}", path_or_param);
GraphExpression::Reference(path_or_param)
}),
))(tokens)
}
let result = union_expr(tokens);
if result.is_err() {
log::debug!("graph_expression parsing failed: {:?}", result);
}
result
}
fn parameter(tokens: &[Token]) -> IResult<&[Token], Parameter> {
map(
tuple((expect_token(Token::Dollar), identifier)),
|(_, name)| Parameter {
name,
location: Location::default(),
},
)(tokens)
}
fn parameter_name(tokens: &[Token]) -> IResult<&[Token], String> {
map(
tuple((expect_token(Token::Dollar), identifier)),
|(_, name)| name,
)(tokens)
}
fn graph_initializer(tokens: &[Token]) -> IResult<&[Token], GraphExpression> {
map(
tuple((expect_token(Token::Equal), graph_expression)),
|(_, graph_expr)| graph_expr,
)(tokens)
}
fn binding_table_initializer(tokens: &[Token]) -> IResult<&[Token], Box<Query>> {
map(tuple((expect_token(Token::Equal), query)), |(_, query)| {
Box::new(query)
})(tokens)
}
fn value_initializer(tokens: &[Token]) -> IResult<&[Token], Expression> {
map(
tuple((expect_token(Token::Equal), expression)),
|(_, expr)| expr,
)(tokens)
}
fn parameter_or_catalog_path(tokens: &[Token]) -> IResult<&[Token], CatalogPath> {
catalog_path(tokens)
}
fn expect_token(expected: Token) -> impl Fn(&[Token]) -> IResult<&[Token], Token> {
move |tokens: &[Token]| {
if let Some(token) = tokens.first() {
if std::mem::discriminant(token) == std::mem::discriminant(&expected) {
Ok((&tokens[1..], token.clone()))
} else {
Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Tag,
)))
}
} else {
Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Tag,
)))
}
}
}
fn tag_token(expected: Token) -> impl Fn(&[Token]) -> IResult<&[Token], Token> {
expect_token(expected)
}
fn expect_token_variant<F>(predicate: &F) -> impl Fn(&[Token]) -> IResult<&[Token], Token> + '_
where
F: Fn(&Token) -> bool,
{
move |tokens: &[Token]| {
if let Some(token) = tokens.first() {
if predicate(token) {
Ok((&tokens[1..], token.clone()))
} else {
Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Tag,
)))
}
} else {
Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Tag,
)))
}
}
}
fn expect_identifier(name: &str) -> impl Fn(&[Token]) -> IResult<&[Token], Token> + '_ {
move |tokens: &[Token]| {
if let Some(token) = tokens.first() {
match token {
Token::Identifier(id) if id.eq_ignore_ascii_case(name) => {
Ok((&tokens[1..], token.clone()))
}
_ => Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Tag,
))),
}
} else {
Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Tag,
)))
}
}
}
fn create_user_statement(tokens: &[Token]) -> IResult<&[Token], CreateUserStatement> {
map(
tuple((
expect_token(Token::Create),
expect_token(Token::User),
string_literal, expect_token(Token::Password),
string_literal, opt(tuple((
expect_token(Token::Roles),
expect_token(Token::LeftParen),
separated_list1(expect_token(Token::Comma), string_literal),
expect_token(Token::RightParen),
))),
)),
|(_, _, username, _, password, roles_opt)| CreateUserStatement {
username,
password: Some(password),
roles: roles_opt.map(|(_, _, roles, _)| roles).unwrap_or_default(),
if_not_exists: false, location: Location::default(),
},
)(tokens)
}
fn drop_user_statement(tokens: &[Token]) -> IResult<&[Token], DropUserStatement> {
map(
tuple((
expect_token(Token::Drop),
expect_token(Token::User),
opt(tuple((
expect_token(Token::If),
expect_token(Token::Exists),
))),
string_literal, )),
|(_, _, if_exists, username)| DropUserStatement {
username,
if_exists: if_exists.is_some(),
location: Location::default(),
},
)(tokens)
}
fn create_role_statement(tokens: &[Token]) -> IResult<&[Token], CreateRoleStatement> {
map(
tuple((
expect_token(Token::Create),
expect_token(Token::Role),
string_literal, )),
|(_, _, role_name)| CreateRoleStatement {
role_name,
description: None,
permissions: vec![],
if_not_exists: false,
location: Location::default(),
},
)(tokens)
}
fn drop_role_statement(tokens: &[Token]) -> IResult<&[Token], DropRoleStatement> {
map(
tuple((
expect_token(Token::Drop),
expect_token(Token::Role),
opt(tuple((
expect_token(Token::If),
expect_token(Token::Exists),
))),
string_literal, )),
|(_, _, if_exists, role_name)| DropRoleStatement {
role_name,
if_exists: if_exists.is_some(),
location: Location::default(),
},
)(tokens)
}
fn grant_role_statement(tokens: &[Token]) -> IResult<&[Token], GrantRoleStatement> {
map(
tuple((
expect_token(Token::Grant),
expect_token(Token::Role),
string_literal, expect_token(Token::To),
string_literal, )),
|(_, _, role_name, _, username)| GrantRoleStatement {
role_name,
username,
location: Location::default(),
},
)(tokens)
}
fn revoke_role_statement(tokens: &[Token]) -> IResult<&[Token], RevokeRoleStatement> {
map(
tuple((
expect_token(Token::Revoke),
expect_token(Token::Role),
string_literal, expect_token(Token::From),
string_literal, )),
|(_, _, role_name, _, username)| RevokeRoleStatement {
role_name,
username,
location: Location::default(),
},
)(tokens)
}
fn create_procedure_statement(tokens: &[Token]) -> IResult<&[Token], CreateProcedureStatement> {
map(
tuple((
expect_token(Token::Create),
opt(tuple((
expect_token(Token::Or),
expect_token(Token::Replace),
))),
expect_token(Token::Procedure),
opt(tuple((
expect_token(Token::If),
expect_token(Token::Not),
expect_token(Token::Exists),
))),
identifier_or_quoted, procedure_parameters,
procedure_body_statement,
)),
|(_, or_replace, _, if_not_exists, procedure_name, parameters, procedure_body)| {
CreateProcedureStatement {
procedure_name,
parameters,
procedure_body,
or_replace: or_replace.is_some(),
if_not_exists: if_not_exists.is_some(),
location: Location::default(),
}
},
)(tokens)
}
fn drop_procedure_statement(tokens: &[Token]) -> IResult<&[Token], DropProcedureStatement> {
map(
tuple((
expect_token(Token::Drop),
expect_token(Token::Procedure),
opt(tuple((
expect_token(Token::If),
expect_token(Token::Exists),
))),
identifier_or_quoted, )),
|(_, _, if_exists, procedure_name)| DropProcedureStatement {
procedure_name,
if_exists: if_exists.is_some(),
location: Location::default(),
},
)(tokens)
}
fn procedure_parameters(tokens: &[Token]) -> IResult<&[Token], Vec<ProcedureParameter>> {
delimited(
expect_token(Token::LeftParen),
opt(map(
tuple((
procedure_parameter,
many0(tuple((
opt(expect_token(Token::Comma)),
procedure_parameter,
))),
)),
|(first_param, additional_params)| {
let mut params = vec![first_param];
params.extend(additional_params.into_iter().map(|(_, param)| param));
params
},
)),
expect_token(Token::RightParen),
)(tokens)
.map(|(remaining, opt_params)| (remaining, opt_params.unwrap_or_default()))
}
fn procedure_parameter(tokens: &[Token]) -> IResult<&[Token], ProcedureParameter> {
map(
tuple((
identifier_or_quoted, type_spec, opt(preceded(expect_token(Token::Equal), expression)), )),
|(name, type_spec, default_value)| ProcedureParameter {
name,
type_spec,
default_value,
location: Location::default(),
},
)(tokens)
}
fn is_predicate(tokens: &[Token]) -> IResult<&[Token], Expression> {
map(
tuple((
additive_expression,
expect_token(Token::Is),
opt(expect_token(Token::Not)),
alt((
map(expect_token(Token::Null), |_| {
(IsPredicateType::Null, None, None)
}),
map(expect_token(Token::Boolean(true)), |_| {
(IsPredicateType::True, None, None)
}),
map(expect_token(Token::Boolean(false)), |_| {
(IsPredicateType::False, None, None)
}),
map(expect_token(Token::Unknown), |_| {
(IsPredicateType::Unknown, None, None)
}),
map(expect_token(Token::Normalized), |_| {
(IsPredicateType::Normalized, None, None)
}),
map(expect_token(Token::Directed), |_| {
(IsPredicateType::Directed, None, None)
}),
map(
tuple((
expect_token(Token::Source),
opt(tuple((expect_token(Token::Of), additive_expression))),
)),
|(_, target)| {
(
IsPredicateType::Source,
target.map(|(_, expr)| Box::new(expr)),
None,
)
},
),
map(
tuple((
expect_token(Token::Destination),
opt(tuple((expect_token(Token::Of), additive_expression))),
)),
|(_, target)| {
(
IsPredicateType::Destination,
target.map(|(_, expr)| Box::new(expr)),
None,
)
},
),
map(tuple((expect_token(Token::Typed), type_spec)), |(_, ts)| {
(IsPredicateType::Typed, None, Some(ts))
}),
map(label_expression, |label_expr| {
(IsPredicateType::Label(label_expr), None, None)
}),
)),
)),
|(subject, _, not_token, (predicate_type, target, type_spec))| {
Expression::IsPredicate(IsPredicateExpression {
subject: Box::new(subject),
predicate_type,
negated: not_token.is_some(),
target,
type_spec,
location: Location::default(),
})
},
)(tokens)
}
fn case_expression(tokens: &[Token]) -> IResult<&[Token], CaseExpression> {
alt((
map(simple_case_expression, |simple| CaseExpression {
case_type: CaseType::Simple(simple),
location: Location::default(),
}),
map(searched_case_expression, |searched| CaseExpression {
case_type: CaseType::Searched(searched),
location: Location::default(),
}),
))(tokens)
}
fn cast_expression(tokens: &[Token]) -> IResult<&[Token], CastExpression> {
map(
tuple((
expect_token(Token::Cast),
expect_token(Token::LeftParen),
expression,
expect_token(Token::As),
type_spec,
expect_token(Token::RightParen),
)),
|(_, _, expr, _, target_type, _)| CastExpression {
expression: Box::new(expr),
target_type,
location: Location::default(),
},
)(tokens)
}
fn simple_case_expression(tokens: &[Token]) -> IResult<&[Token], SimpleCaseExpression> {
map(
tuple((
expect_token(Token::Case),
expression, many1(simple_when_branch),
opt(tuple((expect_token(Token::Else), expression))),
expect_token(Token::End),
)),
|(_, test_expr, when_branches, else_clause, _)| SimpleCaseExpression {
test_expression: Box::new(test_expr),
when_branches,
else_expression: else_clause.map(|(_, expr)| Box::new(expr)),
},
)(tokens)
}
fn searched_case_expression(tokens: &[Token]) -> IResult<&[Token], SearchedCaseExpression> {
map(
tuple((
expect_token(Token::Case),
many1(searched_when_branch),
opt(tuple((expect_token(Token::Else), expression))),
expect_token(Token::End),
)),
|(_, when_branches, else_clause, _)| SearchedCaseExpression {
when_branches,
else_expression: else_clause.map(|(_, expr)| Box::new(expr)),
},
)(tokens)
}
fn simple_when_branch(tokens: &[Token]) -> IResult<&[Token], SimpleWhenBranch> {
map(
tuple((
expect_token(Token::When),
separated_list1(expect_token(Token::Comma), expression), expect_token(Token::Then),
expression,
)),
|(_, when_values, _, then_expr)| SimpleWhenBranch {
when_values,
then_expression: Box::new(then_expr),
location: Location::default(),
},
)(tokens)
}
fn searched_when_branch(tokens: &[Token]) -> IResult<&[Token], SearchedWhenBranch> {
map(
tuple((
expect_token(Token::When),
expression, expect_token(Token::Then),
expression, )),
|(_, condition, _, then_expr)| SearchedWhenBranch {
condition: Box::new(condition),
then_expression: Box::new(then_expr),
location: Location::default(),
},
)(tokens)
}
fn data_statement(tokens: &[Token]) -> IResult<&[Token], DataStatement> {
log::debug!(
"data_statement called with tokens: {:?}",
tokens.get(0..3).unwrap_or(&[])
);
let result = alt((
map(match_insert_statement, DataStatement::MatchInsert),
map(match_set_statement, DataStatement::MatchSet),
map(match_remove_statement, DataStatement::MatchRemove),
map(match_delete_statement, DataStatement::MatchDelete),
map(insert_statement, DataStatement::Insert),
map(set_statement, DataStatement::Set),
map(remove_statement, DataStatement::Remove),
map(delete_statement, DataStatement::Delete),
))(tokens);
if result.is_err() {
log::debug!("data_statement parsing failed");
}
result
}
fn match_insert_statement(tokens: &[Token]) -> IResult<&[Token], MatchInsertStatement> {
log::debug!(
"match_insert_statement called with tokens: {:?}",
tokens.get(0..5).unwrap_or(&[])
);
let result = map(
tuple((
match_clause,
opt(with_clause),
opt(where_clause),
expect_token(Token::Insert),
separated_list1(expect_token(Token::Comma), graph_pattern),
)),
|(match_clause, with_clause_opt, where_clause_opt, _, insert_graph_patterns)| {
MatchInsertStatement {
match_clause,
with_clause: with_clause_opt,
where_clause: where_clause_opt,
insert_graph_patterns,
location: Location::default(),
}
},
)(tokens);
match &result {
Ok(_) => log::debug!("match_insert_statement successfully parsed MATCH INSERT"),
Err(e) => log::debug!("match_insert_statement failed: {:?}", e),
}
result
}
fn match_set_statement(tokens: &[Token]) -> IResult<&[Token], MatchSetStatement> {
log::debug!(
"PARSER: match_set_statement called with first 5 tokens: {:?}",
tokens.get(0..5).unwrap_or(&[])
);
let result = map(
tuple((
match_clause,
opt(with_clause),
opt(where_clause),
expect_token(Token::Set),
separated_list1(expect_token(Token::Comma), set_item),
)),
|(match_clause, with_clause_opt, where_clause_opt, _, items)| {
log::debug!(
"PARSER: Successfully parsed MatchSetStatement with WITH clause: {}",
with_clause_opt.is_some()
);
MatchSetStatement {
match_clause,
with_clause: with_clause_opt,
where_clause: where_clause_opt,
items,
location: Location::default(),
}
},
)(tokens);
match &result {
Ok((remaining, _)) => log::debug!(
"PARSER: match_set_statement succeeded, remaining tokens: {}",
remaining.len()
),
Err(e) => log::debug!("PARSER: match_set_statement failed: {:?}", e),
}
result
}
fn match_remove_statement(tokens: &[Token]) -> IResult<&[Token], MatchRemoveStatement> {
map(
tuple((
match_clause,
opt(with_clause),
opt(where_clause),
expect_token(Token::Remove),
separated_list1(expect_token(Token::Comma), remove_item),
)),
|(match_clause, with_clause_opt, where_clause_opt, _, items)| MatchRemoveStatement {
match_clause,
with_clause: with_clause_opt,
where_clause: where_clause_opt,
items,
location: Location::default(),
},
)(tokens)
}
fn match_delete_statement(tokens: &[Token]) -> IResult<&[Token], MatchDeleteStatement> {
map(
tuple((
match_clause,
opt(with_clause),
opt(where_clause),
opt(alt((
expect_token(Token::Detach),
expect_token(Token::NoDetach),
))),
expect_token(Token::Delete),
separated_list1(expect_token(Token::Comma), expression),
)),
|(match_clause, with_clause_opt, where_clause_opt, detach_mode, _, expressions)| {
MatchDeleteStatement {
match_clause,
with_clause: with_clause_opt,
where_clause: where_clause_opt,
expressions,
detach: match detach_mode {
Some(Token::Detach) => true,
Some(Token::NoDetach) => false,
None => false,
_ => false, },
location: Location::default(),
}
},
)(tokens)
}
fn insert_statement(tokens: &[Token]) -> IResult<&[Token], InsertStatement> {
map(
tuple((
alt((expect_token(Token::Insert), expect_token(Token::Create))),
separated_list1(expect_token(Token::Comma), graph_pattern),
)),
|(_, graph_patterns)| InsertStatement {
graph_patterns,
location: Location::default(),
},
)(tokens)
}
fn graph_pattern(tokens: &[Token]) -> IResult<&[Token], PathPattern> {
alt((
path_pattern,
map(node_pattern, |node| PathPattern {
assignment: None,
path_type: None,
elements: vec![PatternElement::Node(node)],
location: Location::default(),
}),
))(tokens)
}
fn set_statement(tokens: &[Token]) -> IResult<&[Token], SetStatement> {
log::debug!(
"set_statement called with tokens: {:?}",
tokens.get(0..10).unwrap_or(&[])
);
let result = map(
tuple((
expect_token(Token::Set),
separated_list1(expect_token(Token::Comma), set_item),
)),
|(_, items)| {
log::debug!("SET parsing succeeded");
SetStatement {
items,
location: Location::default(),
}
},
)(tokens);
if result.is_err() {
log::debug!("set_statement parsing failed: {:?}", result);
}
result
}
fn set_item(tokens: &[Token]) -> IResult<&[Token], SetItem> {
alt((
map(
tuple((
property_access_token,
expect_token(Token::Equal),
expression,
)),
|(property, _, value)| SetItem::PropertyAssignment { property, value },
),
map(
tuple((
identifier,
alt((expect_token(Token::Colon), expect_token(Token::Is))),
label_expression,
)),
|(variable, _, labels)| SetItem::LabelAssignment { variable, labels },
),
map(
tuple((identifier, expect_token(Token::Equal), expression)),
|(variable, _, value)| SetItem::VariableAssignment { variable, value },
),
))(tokens)
}
fn remove_statement(tokens: &[Token]) -> IResult<&[Token], RemoveStatement> {
log::debug!(
"remove_statement called with tokens: {:?}",
tokens.get(0..10).unwrap_or(&[])
);
let result = map(
tuple((
expect_token(Token::Remove),
separated_list1(expect_token(Token::Comma), remove_item),
)),
|(_, items)| RemoveStatement {
items,
location: Location::default(),
},
)(tokens);
if result.is_err() {
log::debug!("remove_statement parsing failed");
}
result
}
fn remove_item(tokens: &[Token]) -> IResult<&[Token], RemoveItem> {
alt((
map(property_access_token, RemoveItem::Property),
map(
tuple((
identifier,
alt((expect_token(Token::Colon), expect_token(Token::Is))),
label_expression,
)),
|(variable, _, labels)| RemoveItem::Label { variable, labels },
),
map(identifier, RemoveItem::Variable),
))(tokens)
}
fn delete_statement(tokens: &[Token]) -> IResult<&[Token], DeleteStatement> {
map(
tuple((
opt(alt((
expect_token(Token::Detach),
expect_token(Token::NoDetach),
))),
expect_token(Token::Delete),
separated_list1(expect_token(Token::Comma), expression),
)),
|(detach_mode, _, expressions)| DeleteStatement {
expressions,
detach: match detach_mode {
Some(Token::Detach) => true,
Some(Token::NoDetach) => false,
None => true, _ => true,
},
location: Location::default(),
},
)(tokens)
}
fn exists_subquery(tokens: &[Token]) -> IResult<&[Token], ExistsSubqueryExpression> {
map(
tuple((
expect_token(Token::Exists),
expect_token(Token::LeftParen),
basic_query,
expect_token(Token::RightParen),
)),
|(_, _, query, _)| ExistsSubqueryExpression {
query: Box::new(query),
location: Location::default(),
},
)(tokens)
}
fn not_exists_subquery(tokens: &[Token]) -> IResult<&[Token], NotExistsSubqueryExpression> {
map(
tuple((
expect_token(Token::Not),
expect_token(Token::Exists),
expect_token(Token::LeftParen),
basic_query,
expect_token(Token::RightParen),
)),
|(_, _, _, query, _)| NotExistsSubqueryExpression {
query: Box::new(query),
location: Location::default(),
},
)(tokens)
}
fn subquery_expression(tokens: &[Token]) -> IResult<&[Token], SubqueryExpression> {
map(
tuple((
expect_token(Token::LeftParen),
basic_query,
expect_token(Token::RightParen),
)),
|(_, query, _)| SubqueryExpression {
query: Box::new(query),
location: Location::default(),
},
)(tokens)
}
fn declare_statement(tokens: &[Token]) -> IResult<&[Token], DeclareStatement> {
map(
tuple((
expect_token(Token::Declare),
separated_list1(expect_token(Token::Comma), variable_declaration),
)),
|(_, variable_declarations)| DeclareStatement {
variable_declarations,
location: Location::default(),
},
)(tokens)
}
fn variable_declaration(tokens: &[Token]) -> IResult<&[Token], VariableDeclaration> {
map(
tuple((
identifier,
type_spec,
opt(preceded(expect_token(Token::Equal), expression)),
)),
|(variable_name, type_spec, initial_value)| VariableDeclaration {
variable_name,
type_spec,
initial_value,
location: Location::default(),
},
)(tokens)
}
fn procedure_body_statement(tokens: &[Token]) -> IResult<&[Token], ProcedureBodyStatement> {
alt((
map(
tuple((
many1(variable_declaration_for_procedure_body), alt((
map(query, |q| Statement::Query(q)), map(data_statement, |ds| Statement::DataStatement(ds)),
map(catalog_statement, |cs| Statement::CatalogStatement(cs)),
)),
many0(tuple((
expect_token(Token::Next),
opt(yield_clause),
alt((
map(query, |q| Statement::Query(q)), map(data_statement, |ds| Statement::DataStatement(ds)),
)),
))),
)),
|(variable_defs, initial_statement, chained)| {
(variable_defs, initial_statement, chained)
},
),
map(
tuple((
alt((
map(query, |q| Statement::Query(q)), map(data_statement, |ds| Statement::DataStatement(ds)),
map(catalog_statement, |cs| Statement::CatalogStatement(cs)),
)),
many1(tuple((
expect_token(Token::Next),
opt(yield_clause),
alt((
map(query, |q| Statement::Query(q)), map(data_statement, |ds| Statement::DataStatement(ds)),
)),
))),
)),
|(initial_statement, chained)| (vec![], initial_statement, chained),
),
))(tokens)
.map(|(remaining, (variable_defs, initial_statement, chained))| {
let variable_definitions = if variable_defs.is_empty() {
vec![]
} else {
vec![DeclareStatement {
variable_declarations: variable_defs,
location: Location::default(),
}]
};
let chained_statements = chained
.into_iter()
.map(|(_, yield_clause, statement)| ChainedStatement {
yield_clause,
statement: Box::new(statement),
location: Location::default(),
})
.collect();
(
remaining,
ProcedureBodyStatement {
variable_definitions,
initial_statement: Box::new(initial_statement),
chained_statements,
location: Location::default(),
},
)
})
}
fn variable_declaration_for_procedure_body(
tokens: &[Token],
) -> IResult<&[Token], VariableDeclaration> {
map(
tuple((
opt(type_spec),
identifier,
opt(preceded(tag_token(Token::Equal), expression)),
)),
|(type_spec, variable_name, initial_value)| {
VariableDeclaration {
variable_name,
type_spec: type_spec.unwrap_or(TypeSpec::Vector { dimension: None }), initial_value,
location: Location::default(),
}
},
)(tokens)
}
#[allow(dead_code)] fn match_clause_for_procedure_body(tokens: &[Token]) -> IResult<&[Token], MatchClause> {
let stop_pos = tokens
.iter()
.position(|t| matches!(t, Token::Where | Token::Next))
.ok_or_else(|| {
nom::Err::Error(nom::error::Error::new(tokens, nom::error::ErrorKind::Alt))
})?;
let match_tokens = &tokens[..stop_pos];
let (remaining, match_clause) = match_clause(match_tokens)?;
if !remaining.is_empty() {
return Err(nom::Err::Error(nom::error::Error::new(
remaining,
nom::error::ErrorKind::Complete,
)));
}
Ok((&tokens[stop_pos..], match_clause))
}
#[allow(dead_code)] fn where_clause_for_procedure_body(tokens: &[Token]) -> IResult<&[Token], Option<WhereClause>> {
if tokens.is_empty() || !matches!(tokens[0], Token::Where) {
return Ok((tokens, None));
}
let next_pos = tokens
.iter()
.position(|t| matches!(t, Token::Next))
.ok_or_else(|| {
nom::Err::Error(nom::error::Error::new(tokens, nom::error::ErrorKind::Alt))
})?;
let where_tokens = &tokens[..next_pos];
let (remaining, where_clause) = where_clause(where_tokens)?;
if !remaining.is_empty() {
return Err(nom::Err::Error(nom::error::Error::new(
remaining,
nom::error::ErrorKind::Complete,
)));
}
Ok((&tokens[next_pos..], Some(where_clause)))
}
fn at_location_statement(tokens: &[Token]) -> IResult<&[Token], AtLocationStatement> {
map(
tuple((
expect_token(Token::At),
catalog_path,
many1(map(
tuple((
alt((
map(declare_statement, Statement::Declare),
map(basic_query, |q| Statement::Query(q)),
map(select_statement, Statement::Select),
map(set_statement, |s| {
Statement::DataStatement(DataStatement::Set(s))
}),
)),
opt(expect_token(Token::Semicolon)),
)),
|(statement, _)| statement,
)),
)),
|(_, location_path, statements)| AtLocationStatement {
location_path,
statements,
location: Location::default(),
},
)(tokens)
}
fn transaction_statement(tokens: &[Token]) -> IResult<&[Token], TransactionStatement> {
alt((
map(
start_transaction_statement,
TransactionStatement::StartTransaction,
),
map(commit_statement, TransactionStatement::Commit),
map(rollback_statement, TransactionStatement::Rollback),
map(
set_transaction_characteristics_statement,
TransactionStatement::SetTransactionCharacteristics,
),
))(tokens)
}
fn start_transaction_statement(tokens: &[Token]) -> IResult<&[Token], StartTransactionStatement> {
alt((
map(
tuple((
expect_token(Token::Start),
expect_identifier("TRANSACTION"),
opt(transaction_characteristics),
)),
|(_, _, characteristics)| StartTransactionStatement {
characteristics,
location: Location::default(),
},
),
map(
tuple((expect_token(Token::Begin), opt(transaction_characteristics))),
|(_, characteristics)| StartTransactionStatement {
characteristics,
location: Location::default(),
},
),
))(tokens)
}
fn commit_statement(tokens: &[Token]) -> IResult<&[Token], CommitStatement> {
map(
tuple((expect_token(Token::Commit), opt(expect_token(Token::Work)))),
|(_, work)| CommitStatement {
work: work.is_some(),
location: Location::default(),
},
)(tokens)
}
fn rollback_statement(tokens: &[Token]) -> IResult<&[Token], RollbackStatement> {
map(
tuple((
expect_token(Token::Rollback),
opt(expect_token(Token::Work)),
)),
|(_, work)| RollbackStatement {
work: work.is_some(),
location: Location::default(),
},
)(tokens)
}
fn set_transaction_characteristics_statement(
tokens: &[Token],
) -> IResult<&[Token], SetTransactionCharacteristicsStatement> {
map(
tuple((
expect_token(Token::Set),
expect_identifier("TRANSACTION"),
transaction_characteristics,
)),
|(_, _, characteristics)| SetTransactionCharacteristicsStatement {
characteristics,
location: Location::default(),
},
)(tokens)
}
fn transaction_characteristics(tokens: &[Token]) -> IResult<&[Token], TransactionCharacteristics> {
map(
tuple((opt(isolation_level_clause), opt(access_mode_clause))),
|(isolation_level, access_mode)| TransactionCharacteristics {
isolation_level,
access_mode,
location: Location::default(),
},
)(tokens)
}
fn isolation_level_clause(tokens: &[Token]) -> IResult<&[Token], IsolationLevel> {
map(
tuple((
expect_token(Token::Isolation),
expect_token(Token::Level),
alt((
map(
tuple((expect_token(Token::Read), expect_token(Token::Uncommitted))),
|_| IsolationLevel::ReadUncommitted,
),
map(
tuple((expect_token(Token::Read), expect_token(Token::Committed))),
|_| IsolationLevel::ReadCommitted,
),
map(
tuple((expect_token(Token::Repeatable), expect_token(Token::Read))),
|_| IsolationLevel::RepeatableRead,
),
map(expect_token(Token::Serializable), |_| {
IsolationLevel::Serializable
}),
)),
)),
|(_, _, level)| level,
)(tokens)
}
fn access_mode_clause(tokens: &[Token]) -> IResult<&[Token], AccessMode> {
alt((
map(
tuple((expect_token(Token::Read), expect_token(Token::Only))),
|_| AccessMode::ReadOnly,
),
map(
tuple((expect_token(Token::Read), expect_token(Token::Write))),
|_| AccessMode::ReadWrite,
),
))(tokens)
}
fn pattern_expression(tokens: &[Token]) -> IResult<&[Token], PatternExpression> {
if let Some(&Token::LeftParen) = tokens.first() {
if let Some(second_token) = tokens.get(1) {
match second_token {
Token::Identifier(_) => {
if let Some(third_token) = tokens.get(2) {
match third_token {
Token::Colon | Token::RightParen | Token::LeftBrace => {
return map(path_pattern, |pattern| PatternExpression {
pattern,
location: Location::default(),
})(tokens);
}
_ => {}
}
}
}
Token::Colon => {
return map(path_pattern, |pattern| PatternExpression {
pattern,
location: Location::default(),
})(tokens);
}
Token::RightParen => {
if tokens.len() > 3 {
if let Some(fourth_token) = tokens.get(3) {
match fourth_token {
Token::Dash | Token::LessThan => {
return map(path_pattern, |pattern| PatternExpression {
pattern,
location: Location::default(),
})(tokens);
}
_ => {}
}
}
}
}
_ => {}
}
}
}
Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Tag,
)))
}
fn index_statement(tokens: &[Token]) -> IResult<&[Token], IndexStatement> {
alt((
map(create_index_statement, IndexStatement::CreateIndex),
map(drop_index_statement, IndexStatement::DropIndex),
map(alter_index_statement, IndexStatement::AlterIndex),
map(optimize_index_statement, IndexStatement::OptimizeIndex),
map(reindex_statement, IndexStatement::ReindexIndex),
))(tokens)
}
fn parse_index_name(tokens: &[Token]) -> IResult<&[Token], String> {
if let Some(token) = tokens.first() {
let name = match token {
Token::Identifier(s) => Some(s.clone()),
Token::String(s) => Some(s.clone()),
Token::Integer(n) => Some(n.to_string()),
Token::Float(f) => Some(f.to_string()),
Token::Value => Some("value".to_string()),
Token::Type => Some("type".to_string()),
Token::User => Some("user".to_string()),
Token::Role => Some("role".to_string()),
Token::Schema => Some("schema".to_string()),
Token::Data => Some("data".to_string()),
Token::Graph => Some("graph".to_string()),
Token::Node => Some("node".to_string()),
Token::Edge => Some("edge".to_string()),
Token::Path => Some("path".to_string()),
Token::Table => Some("table".to_string()),
Token::Property => Some("property".to_string()),
_ => None,
};
if let Some(mut name_str) = name {
let mut remaining = &tokens[1..];
while let Some(Token::Minus) = remaining.first() {
if let Some(Token::Identifier(suffix)) = remaining.get(1) {
name_str.push('-');
name_str.push_str(suffix);
remaining = &remaining[2..];
} else {
break;
}
}
if let Token::Integer(_) = token {
while let Some(Token::Identifier(suffix)) = remaining.first() {
if suffix.starts_with('_') {
name_str.push_str(suffix);
remaining = &remaining[1..];
} else {
break;
}
}
}
return Ok((remaining, name_str));
}
}
Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Tag,
)))
}
fn create_index_statement(tokens: &[Token]) -> IResult<&[Token], CreateIndexStatement> {
let (tokens, _) = expect_token(Token::Create)(tokens)?;
let (tokens, index_type) = alt((
map(
pair(
preceded(expect_identifier("GRAPH"), expect_identifier("INDEX")),
opt(graph_index_type),
),
|(_, gtype)| {
IndexTypeSpecifier::Graph(gtype.unwrap_or(GraphIndexTypeSpecifier::AdjacencyList))
},
),
map(expect_identifier("INDEX"), |_| {
IndexTypeSpecifier::Graph(GraphIndexTypeSpecifier::AdjacencyList)
}),
))(tokens)?;
let (tokens, if_not_exists) = opt(tuple((
expect_token(Token::If),
expect_token(Token::Not),
expect_token(Token::Exists),
)))(tokens)?;
let if_not_exists = if_not_exists.is_some();
let (tokens, name) = parse_index_name(tokens)?;
let (tokens, _) = expect_token(Token::On)(tokens)?;
let (tokens, table) = parse_table_name(tokens)?;
let (tokens, columns) = opt(delimited(
expect_token(Token::LeftParen),
separated_list1(expect_token(Token::Comma), identifier),
expect_token(Token::RightParen),
))(tokens)?;
let (tokens, _using_type) = opt(preceded(
expect_identifier("USING"),
alt((
expect_identifier("IVF"),
expect_identifier("FLAT"),
expect_identifier("INVERTED"),
expect_identifier("BM25"),
expect_identifier("NGRAM"),
expect_identifier("ADJACENCY_LIST"),
expect_identifier("PATH_INDEX"),
expect_identifier("REACHABILITY"),
expect_identifier("PATTERN_INDEX"),
)),
))(tokens)?;
let (tokens, options) = opt(preceded(expect_identifier("WITH"), index_options))(tokens)?;
Ok((
tokens,
CreateIndexStatement {
name,
table,
columns: columns.unwrap_or_default(),
index_type,
options: options.unwrap_or_default(),
if_not_exists,
location: Location::default(),
},
))
}
fn drop_index_statement(tokens: &[Token]) -> IResult<&[Token], DropIndexStatement> {
let (tokens, _) = expect_token(Token::Drop)(tokens)?;
let (tokens, _) = expect_identifier("INDEX")(tokens)?;
let (tokens, if_exists) = opt(preceded(
expect_token(Token::If),
expect_token(Token::Exists),
))(tokens)?;
let (tokens, name) = parse_index_name(tokens)?;
Ok((
tokens,
DropIndexStatement {
name,
if_exists: if_exists.is_some(),
location: Location::default(),
},
))
}
fn alter_index_statement(tokens: &[Token]) -> IResult<&[Token], AlterIndexStatement> {
let (tokens, _) = expect_token(Token::Alter)(tokens)?;
let (tokens, _) = expect_identifier("INDEX")(tokens)?;
let (tokens, name) = identifier(tokens)?;
let (tokens, operation) = alt((
map(expect_identifier("REBUILD"), |_| {
AlterIndexOperation::Rebuild
}),
map(expect_identifier("OPTIMIZE"), |_| {
AlterIndexOperation::Optimize
}),
map(
preceded(
tuple((expect_identifier("SET"), expect_identifier("OPTION"))),
tuple((
identifier,
preceded(expect_token(Token::Equal), parse_value),
)),
),
|(key, value)| AlterIndexOperation::SetOption(key, value),
),
))(tokens)?;
Ok((
tokens,
AlterIndexStatement {
name,
operation,
location: Location::default(),
},
))
}
fn optimize_index_statement(tokens: &[Token]) -> IResult<&[Token], OptimizeIndexStatement> {
let (tokens, _) = expect_identifier("OPTIMIZE")(tokens)?;
let (tokens, _) = expect_identifier("INDEX")(tokens)?;
let (tokens, name) = identifier(tokens)?;
Ok((
tokens,
OptimizeIndexStatement {
name,
location: Location::default(),
},
))
}
fn reindex_statement(tokens: &[Token]) -> IResult<&[Token], ReindexStatement> {
let (tokens, _) = expect_identifier("REINDEX")(tokens)?;
let (tokens, name) = identifier(tokens)?;
Ok((
tokens,
ReindexStatement {
name,
location: Location::default(),
},
))
}
fn graph_index_type(tokens: &[Token]) -> IResult<&[Token], GraphIndexTypeSpecifier> {
alt((
map(expect_identifier("ADJACENCY_LIST"), |_| {
GraphIndexTypeSpecifier::AdjacencyList
}),
map(expect_identifier("PATH_INDEX"), |_| {
GraphIndexTypeSpecifier::PathIndex
}),
map(expect_identifier("REACHABILITY"), |_| {
GraphIndexTypeSpecifier::ReachabilityIndex
}),
map(expect_identifier("PATTERN_INDEX"), |_| {
GraphIndexTypeSpecifier::PatternIndex
}),
))(tokens)
}
fn index_options(tokens: &[Token]) -> IResult<&[Token], IndexOptions> {
let (tokens, params) = delimited(
expect_token(Token::LeftParen),
separated_list1(
expect_token(Token::Comma),
tuple((
identifier,
preceded(expect_token(Token::Equal), parse_value),
)),
),
expect_token(Token::RightParen),
)(tokens)?;
let parameters = params.into_iter().collect();
Ok((
tokens,
IndexOptions {
parameters,
location: Location::default(),
},
))
}
fn parse_value(tokens: &[Token]) -> IResult<&[Token], Value> {
alt((
map(parse_string_literal, Value::String),
map(parse_number, |n| Value::Number(n)),
map(parse_integer, |i| Value::Integer(i)),
map(expect_identifier("true"), |_| Value::Boolean(true)),
map(expect_identifier("false"), |_| Value::Boolean(false)),
map(expect_identifier("null"), |_| Value::Null),
))(tokens)
}
fn parse_string_literal(tokens: &[Token]) -> IResult<&[Token], String> {
if let Some((Token::String(s), rest)) = tokens.split_first() {
Ok((rest, s.clone()))
} else {
Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Tag,
)))
}
}
fn parse_number(tokens: &[Token]) -> IResult<&[Token], f64> {
if let Some((token, rest)) = tokens.split_first() {
match token {
Token::Integer(n) => Ok((rest, *n as f64)),
Token::Float(f) => Ok((rest, *f)),
_ => Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Tag,
))),
}
} else {
Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Tag,
)))
}
}
fn parse_integer(tokens: &[Token]) -> IResult<&[Token], i64> {
if let Some((Token::Integer(i), rest)) = tokens.split_first() {
Ok((rest, *i))
} else {
Err(nom::Err::Error(nom::error::Error::new(
tokens,
nom::error::ErrorKind::Tag,
)))
}
}
fn parse_table_name(tokens: &[Token]) -> IResult<&[Token], String> {
identifier(tokens)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_call_statement_with_where_clause() {
let query = r#"CALL system.list_functions()
YIELD name, category, description
WHERE category = 'text' OR category = 'hybrid' OR category = 'fuzzy';"#;
log::debug!("\n=== Testing WHERE clause parsing");
let result = parse_query(query);
assert!(result.is_ok(), "Query parsing should succeed");
let doc = result.unwrap();
if let Statement::Call(ref call_stmt) = doc.statement {
log::debug!("Procedure: {}", call_stmt.procedure_name);
log::debug!("Has YIELD: {}", call_stmt.yield_clause.is_some());
log::debug!("Has WHERE: {}", call_stmt.where_clause.is_some());
assert!(
call_stmt.where_clause.is_some(),
"WHERE clause should be captured!"
);
} else {
panic!("Expected Call statement");
}
}
}
#[test]
fn test_call_with_description_in_yield() {
let query = r#"CALL system.list_functions()
YIELD name, category, description;"#;
let result = parse_query(query);
assert!(
result.is_ok(),
"Failed to parse CALL with 'description' in YIELD"
);
let doc = result.unwrap();
if let Statement::Call(ref call_stmt) = doc.statement {
assert_eq!(call_stmt.procedure_name, "system.list_functions");
let yield_clause = call_stmt
.yield_clause
.as_ref()
.expect("YIELD clause should be present");
assert_eq!(yield_clause.items.len(), 3, "Should have 3 YIELD items");
assert_eq!(yield_clause.items[0].column_name, "name");
assert_eq!(yield_clause.items[1].column_name, "category");
assert_eq!(yield_clause.items[2].column_name, "description");
} else {
panic!(
"Expected CallStatement, got {:?}",
std::mem::discriminant(&doc.statement)
);
}
}
#[test]
fn test_call_with_where_and_description() {
let query = r#"CALL system.list_functions()
YIELD name, category, description
WHERE category = 'string';"#;
let result = parse_query(query);
assert!(
result.is_ok(),
"Failed to parse CALL with WHERE after description"
);
let doc = result.unwrap();
if let Statement::Call(ref call_stmt) = doc.statement {
assert!(
call_stmt.where_clause.is_some(),
"WHERE clause should be present after description in YIELD"
);
let yield_clause = call_stmt
.yield_clause
.as_ref()
.expect("YIELD clause should be present");
assert_eq!(
yield_clause.items.len(),
3,
"All 3 YIELD items should be parsed"
);
} else {
panic!("Expected CallStatement");
}
}
#[test]
fn test_call_where_in_with_list() {
let query = r#"CALL system.list_functions()
YIELD name, category
WHERE category IN ('string', 'numeric', 'aggregate');"#;
let result = parse_query(query);
assert!(
result.is_ok(),
"Failed to parse WHERE IN with parenthesized list"
);
let doc = result.unwrap();
if let Statement::Call(ref call_stmt) = doc.statement {
let where_clause = call_stmt
.where_clause
.as_ref()
.expect("WHERE clause should be present");
match &where_clause.condition {
Expression::Binary(binary) => {
assert_eq!(binary.operator, Operator::In, "Expected IN operator");
match &*binary.right {
Expression::Literal(Literal::List(list)) => {
assert_eq!(list.len(), 3, "Should have 3 items in IN list");
}
other => panic!("Expected Literal::List, got {:?}", other),
}
}
other => panic!("Expected Binary expression with IN, got {:?}", other),
}
} else {
panic!("Expected CallStatement");
}
}
#[test]
fn test_call_where_in_simple() {
let query = r#"CALL system.list_functions()
YIELD name
WHERE name IN ('UPPER', 'LOWER');"#;
let result = parse_query(query);
assert!(result.is_ok(), "Failed to parse simple WHERE IN");
let doc = result.unwrap();
if let Statement::Call(ref call_stmt) = doc.statement {
assert!(
call_stmt.where_clause.is_some(),
"WHERE clause should be present"
);
} else {
panic!("Expected CallStatement");
}
}
#[test]
fn test_call_where_not_in() {
let query = r#"CALL system.list_functions()
YIELD name, category
WHERE category NOT IN ('aggregate', 'utility');"#;
let result = parse_query(query);
assert!(result.is_ok(), "Failed to parse WHERE NOT IN");
let doc = result.unwrap();
if let Statement::Call(ref call_stmt) = doc.statement {
let where_clause = call_stmt
.where_clause
.as_ref()
.expect("WHERE clause should be present");
match &where_clause.condition {
Expression::Binary(binary) => {
assert_eq!(binary.operator, Operator::NotIn, "Expected NotIn operator");
}
other => panic!("Expected Binary expression with NOT IN, got {:?}", other),
}
} else {
panic!("Expected CallStatement");
}
}
#[test]
fn test_call_with_return_rejected() {
let query = r#"CALL system.list_functions()
YIELD name, category, description
WHERE category = 'string'
RETURN name;"#;
let result = parse_query(query);
assert!(
result.is_err(),
"Parser should reject CALL with RETURN clause"
);
let error = result.unwrap_err();
let error_msg = error.to_string();
assert!(
error_msg.contains("CALL statements cannot have additional clauses")
|| error_msg.contains("unexpected tokens")
|| error_msg.contains("Unexpected token"),
"Error message should mention invalid CALL syntax, got: {}",
error_msg
);
}
#[test]
fn test_call_with_match_rejected() {
let query = r#"CALL system.list_functions()
YIELD name
MATCH (n);"#;
let result = parse_query(query);
assert!(
result.is_err(),
"Parser should reject CALL with MATCH clause"
);
}
#[test]
fn test_call_complex_where_expression() {
let query = r#"CALL system.list_functions()
YIELD name, category, description
WHERE category IN ('string', 'numeric') OR category = 'aggregate';"#;
let result = parse_query(query);
assert!(result.is_ok(), "Failed to parse complex WHERE expression");
let doc = result.unwrap();
if let Statement::Call(ref call_stmt) = doc.statement {
assert!(call_stmt.yield_clause.is_some(), "YIELD should be present");
assert!(call_stmt.where_clause.is_some(), "WHERE should be present");
match &call_stmt.where_clause.as_ref().unwrap().condition {
Expression::Binary(binary) => {
assert_eq!(
binary.operator,
Operator::Or,
"Top-level should be OR operator"
);
}
other => panic!("Expected OR binary expression, got {:?}", other),
}
} else {
panic!("Expected CallStatement");
}
}
#[test]
fn test_call_valid_without_where() {
let query = r#"CALL system.list_functions()
YIELD name, category, description;"#;
let result = parse_query(query);
assert!(result.is_ok(), "Valid CALL without WHERE should parse");
let doc = result.unwrap();
if let Statement::Call(ref call_stmt) = doc.statement {
assert!(call_stmt.yield_clause.is_some(), "YIELD should be present");
assert!(
call_stmt.where_clause.is_none(),
"WHERE should not be present"
);
} else {
panic!("Expected CallStatement");
}
}
#[test]
fn test_call_valid_without_yield() {
let query = r#"CALL system.list_functions();"#;
let result = parse_query(query);
assert!(result.is_ok(), "Valid CALL without YIELD should parse");
let doc = result.unwrap();
if let Statement::Call(ref call_stmt) = doc.statement {
assert!(
call_stmt.yield_clause.is_none(),
"YIELD should not be present"
);
assert!(
call_stmt.where_clause.is_none(),
"WHERE should not be present"
);
} else {
panic!("Expected CallStatement");
}
}
#[test]
fn test_lexer_basic_match_pattern() {
let result = crate::ast::lexer::tokenize("MATCH (a:User)");
assert!(result.is_ok(), "Basic MATCH pattern should tokenize");
}
#[test]
fn test_lexer_simple_edge_pattern() {
let result = crate::ast::lexer::tokenize("MATCH (a)-[:NEXT]->(b) RETURN a");
assert!(result.is_ok(), "Simple edge pattern should tokenize");
}
#[test]
fn test_lexer_complex_where_pattern() {
let query = "MATCH (start:TestNode)-[:CONNECTS_TO]->(end:TestNode) WHERE start.id = 1 RETURN count(end) as connected_count";
let result = crate::ast::lexer::tokenize(query);
assert!(result.is_ok(), "Complex WHERE pattern should tokenize");
}
#[test]
fn test_lexer_variable_length_pattern() {
let result = crate::ast::lexer::tokenize("-[:NEXT]{1,3}->");
assert!(result.is_ok(), "Variable-length pattern should tokenize");
}
#[test]
fn test_parser_match_user_return() {
let result = parse_query("MATCH (a:User) RETURN a");
assert!(result.is_ok(), "MATCH User RETURN should parse");
}
#[test]
fn test_parser_match_with_label() {
let result = parse_query("MATCH (node0:ChainNode) RETURN node0");
assert!(result.is_ok(), "MATCH with label should parse");
}
#[test]
fn test_parser_simple_edge_pattern_no_quantifier() {
let result = parse_query("MATCH (a)-[:NEXT]->(b) RETURN a");
assert!(
result.is_ok(),
"Simple edge pattern without quantifier should parse"
);
}
#[test]
fn test_parser_variable_length_edge_pattern() {
let result = parse_query("MATCH (a)-[:NEXT]{1,3}->(b) RETURN a");
assert!(result.is_ok(), "Variable-length edge pattern should parse");
}
#[test]
fn test_parser_variable_length_with_path_assignment() {
let query = "MATCH path = (node0:ChainNode {id: 0})-[:NEXT]{1,3}->(node_end) RETURN count(path) as one_to_three_hop_paths";
let result = parse_query(query);
assert!(
result.is_ok(),
"Variable-length pattern with path assignment should parse"
);
}
#[test]
fn test_parser_connects_to_pattern() {
let query = "MATCH (start:TestNode)-[:CONNECTS_TO]->(end:TestNode) WHERE start.id = 1 RETURN count(end) as connected_count";
let result = parse_query(query);
assert!(result.is_ok(), "CONNECTS_TO pattern should parse");
}
#[test]
fn test_parser_pattern_comprehension() {
let query = "MATCH (a:Account)
RETURN a.account_number,
[(a)-[t:Transaction]->(m) | t.amount] as transaction_amounts,
[(a)-[t:Transaction]->(m) | m.category] as merchant_categories
LIMIT 10";
let result = parse_query(query);
assert!(result.is_ok(), "Pattern comprehension should parse");
}
#[test]
fn test_parser_with_clause_aggregation() {
let query = "MATCH (a:Account)-[t:Transaction]->(m:Merchant)
WITH a, m, count(t) as transaction_count, sum(t.amount) as total_spent
WHERE transaction_count > 5
MATCH (m)<-[:Transaction]-(other:Account)
WHERE other <> a
RETURN a.account_number,
m.name,
transaction_count,
total_spent,
count(DISTINCT other) as fellow_customers
ORDER BY total_spent DESC
LIMIT 10";
let result = parse_query(query);
assert!(
result.is_ok(),
"WITH clause with aggregation should parse. Error: {:?}",
result.err()
);
}
#[test]
fn test_parser_simple_with_clause() {
let query = "MATCH (a:Account)-[t:Transaction]->(m:Merchant)
WITH a, m, count(t) as transaction_count
RETURN a.account_number, transaction_count";
let result = parse_query(query);
assert!(result.is_ok(), "Simple WITH clause should parse");
}
#[test]
fn test_parser_with_where_clause() {
let query = "MATCH (a:Account)-[t:Transaction]->(m:Merchant)
WITH a, m, count(t) as transaction_count
WHERE transaction_count > 5
RETURN a.account_number, transaction_count";
let result = parse_query(query);
assert!(result.is_ok(), "WITH + WHERE clause should parse");
}
#[test]
fn test_parser_with_then_match() {
let query = "MATCH (a:Account)-[t:Transaction]->(m:Merchant)
WITH a, m, count(t) as transaction_count
MATCH (m)<-[:Transaction]-(other:Account)
RETURN a.account_number, other.account_number";
let result = parse_query(query);
assert!(
result.is_ok(),
"WITH then MATCH should parse. Error: {:?}",
result.err()
);
}