mod actors;
mod collections;
mod core;
mod effects;
mod expressions;
mod functions;
mod imports;
mod macro_parsing;
mod operator_precedence;
mod types;
mod utils;
use crate::frontend::arena::{Arena, StringInterner};
use crate::frontend::ast::{
Attribute,
BinaryOp,
Comment,
CommentKind,
Expr,
ExprKind,
Literal,
Param,
Pattern,
PipelineStage,
Span,
StringPart,
Type,
TypeKind,
};
use crate::frontend::lexer::{Token, TokenStream};
use crate::parser::error_recovery::ErrorNode;
use anyhow::{bail, Result};
pub use core::Parser;
use std::collections::VecDeque;
pub(crate) fn parse_use_statement_with_visibility(
state: &mut ParserState,
is_pub: bool,
) -> Result<Expr> {
let start_span = Span { start: 0, end: 0 };
let mut expr = expressions::parse_use_path(state, start_span)?;
if is_pub {
expr.attributes.push(crate::frontend::ast::Attribute {
name: "pub".to_string(),
args: vec![],
span: expr.span,
});
}
Ok(expr)
}
pub(crate) struct ParserState<'a> {
pub tokens: TokenStream<'a>,
pub errors: Vec<ErrorNode>,
#[allow(dead_code)]
pub arena: Arena,
#[allow(dead_code)]
pub interner: StringInterner,
#[allow(dead_code)]
pub expr_cache: VecDeque<(usize, Expr)>,
pub in_guard_context: bool,
pub in_let_value_context: bool,
}
impl<'a> ParserState<'a> {
#[must_use]
pub fn new(input: &'a str) -> Self {
Self {
tokens: TokenStream::new(input),
errors: Vec::new(),
arena: Arena::new(),
interner: StringInterner::new(),
expr_cache: VecDeque::with_capacity(8),
in_guard_context: false, in_let_value_context: false, }
}
pub fn get_errors(&self) -> &[ErrorNode] {
&self.errors
}
#[allow(dead_code)]
pub fn arena_stats(&self) -> (usize, usize) {
(self.arena.total_allocated(), self.arena.num_items())
}
#[allow(dead_code)]
pub fn interner_stats(&self) -> (usize, usize) {
self.interner.stats()
}
pub fn consume_leading_comments(&mut self) -> Vec<Comment> {
let mut comments = Vec::new();
while let Some((token, span)) = self.tokens.peek() {
if let Some(comment) = token_to_comment(token, *span) {
self.tokens.advance();
comments.push(comment);
} else {
break;
}
}
comments
}
pub fn consume_trailing_comment(&mut self, expr_span: &Span) -> Option<Comment> {
if let Some((_token, span)) = self.tokens.peek() {
let comment_start = span.start;
let source = self.tokens.source();
if !is_on_same_line(source, expr_span.end, comment_start) {
return None;
}
if let Some((token, span)) = self.tokens.peek() {
if let Some(comment) = token_to_comment(token, *span) {
self.tokens.advance();
return Some(comment);
}
}
}
None
}
pub fn skip_comments(&mut self) {
while let Some((token, _)) = self.tokens.peek() {
match token {
Token::LineComment(_) | Token::BlockComment(_) | Token::HashComment(_) => {
self.tokens.advance();
}
Token::DocComment(_) => break,
_ => break,
}
}
}
}
fn is_on_same_line(source: &str, pos1: usize, pos2: usize) -> bool {
if pos1 > pos2 {
return false; }
let start = pos1.min(source.len());
let end = pos2.min(source.len());
let start_safe = if source.is_char_boundary(start) {
start
} else {
(0..=start)
.rev()
.find(|&i| source.is_char_boundary(i))
.unwrap_or(0)
};
let end_safe = if source.is_char_boundary(end) {
end
} else {
(end..source.len())
.find(|&i| source.is_char_boundary(i))
.unwrap_or(source.len())
};
let between = &source[start_safe..end_safe];
!between.contains('\n')
}
fn token_to_comment(token: &Token, span: Span) -> Option<Comment> {
match token {
Token::LineComment(text) => Some(Comment::new(CommentKind::Line(text.clone()), span)),
Token::DocComment(text) => Some(Comment::new(CommentKind::Doc(text.clone()), span)),
Token::BlockComment(text) => Some(Comment::new(CommentKind::Block(text.clone()), span)),
Token::HashComment(text) => Some(Comment::new(
CommentKind::Line(text.clone()), span,
)),
_ => None,
}
}
pub(crate) fn parse_expr_recursive(state: &mut ParserState) -> Result<Expr> {
parse_expr_with_precedence_recursive(state, 0)
}
pub(crate) fn parse_expr_with_precedence_recursive(
state: &mut ParserState,
min_prec: i32,
) -> Result<Expr> {
let leading_comments = state.consume_leading_comments();
let mut left = expressions::parse_prefix(state)?;
left = parse_postfix_and_infix_chain(state, left, min_prec)?;
let trailing_comment = state.consume_trailing_comment(&left.span);
attach_comments_to_expr(left, leading_comments, trailing_comment)
}
fn attach_comments_to_expr(
mut expr: Expr,
leading_comments: Vec<Comment>,
trailing_comment: Option<Comment>,
) -> Result<Expr> {
expr.leading_comments = leading_comments;
expr.trailing_comment = trailing_comment;
Ok(expr)
}
fn parse_postfix_and_infix_chain(
state: &mut ParserState,
mut left: Expr,
min_prec: i32,
) -> Result<Expr> {
loop {
left = handle_postfix_operators(state, left)?;
if let Some(new_left) = try_handle_infix_operators(state, left.clone(), min_prec)? {
left = new_left;
} else {
break;
}
}
Ok(left)
}
pub(crate) fn try_handle_infix_operators(
state: &mut ParserState,
left: Expr,
min_prec: i32,
) -> Result<Option<Expr>> {
let saved_position = state.tokens.position();
while matches!(
state.tokens.peek(),
Some((
Token::LineComment(_)
| Token::DocComment(_)
| Token::BlockComment(_)
| Token::HashComment(_),
_
))
) {
state.tokens.advance();
}
let token_result = state.tokens.peek().map(|(t, _)| t.clone());
state.tokens.set_position(saved_position);
let Some(token) = token_result else {
return Ok(None);
};
let token_clone = token;
let handlers = [
try_new_actor_operators,
try_type_cast_operator,
try_ternary_operator, try_binary_operators,
try_assignment_operators,
try_pipeline_operators,
try_range_operators,
];
for handler in &handlers {
if let Some(new_left) = handler(state, left.clone(), &token_clone, min_prec)? {
return Ok(Some(new_left));
}
}
Ok(None)
}
pub(crate) fn handle_postfix_operators(state: &mut ParserState, mut left: Expr) -> Result<Expr> {
while let Some(new_left) = try_handle_single_postfix(state, left.clone())? {
left = new_left;
}
Ok(left)
}
fn try_handle_postfix_call(state: &mut ParserState, left: Expr) -> Result<Option<Expr>> {
if is_block_like_expression(&left) {
return Ok(None);
}
let paren_start = state.tokens.peek().map(|(_, s)| s.start);
if let Some(paren_pos) = paren_start {
let source = state.tokens.source();
if !is_on_same_line(source, left.span.end, paren_pos) {
return Ok(None);
}
}
Ok(Some(functions::parse_call(state, left)?))
}
fn try_handle_single_postfix(state: &mut ParserState, left: Expr) -> Result<Option<Expr>> {
while matches!(
state.tokens.peek(),
Some((
Token::LineComment(_)
| Token::DocComment(_)
| Token::BlockComment(_)
| Token::HashComment(_),
_
))
) {
state.tokens.advance();
}
let token_peek = state.tokens.peek().map(|(t, _)| t.clone());
match token_peek.as_ref() {
Some(Token::Dot) => handle_dot_operator(state, left).map(Some),
Some(Token::ColonColon) => handle_colon_colon_operator(state, left).map(Some),
Some(Token::SafeNav) => handle_safe_nav_operator(state, left).map(Some),
Some(Token::LeftParen) => try_handle_postfix_call(state, left),
Some(Token::LeftBracket) => {
if matches!(
left.kind,
ExprKind::Literal(_)
| ExprKind::StructLiteral { .. }
| ExprKind::Let { .. }
| ExprKind::Call { .. }
| ExprKind::Await { .. }
| ExprKind::Try { .. }
) {
Ok(None) } else {
Ok(Some(handle_array_indexing(state, left)?))
}
}
Some(Token::LeftBrace) => try_parse_struct_literal(state, &left),
Some(Token::Increment) => handle_increment_operator(state, left).map(Some),
Some(Token::Decrement) => handle_decrement_operator(state, left).map(Some),
Some(Token::Question) => {
if is_ternary_operator(state) {
Ok(None) } else {
handle_try_operator(state, left).map(Some)
}
}
Some(Token::Bang) => try_parse_macro_call(state, &left),
_ => Ok(None),
}
}
fn is_block_like_expression(expr: &Expr) -> bool {
matches!(
expr.kind,
ExprKind::Block(_)
| ExprKind::Loop { .. }
| ExprKind::While { .. }
| ExprKind::For { .. }
| ExprKind::If { .. }
| ExprKind::Match { .. }
| ExprKind::TryCatch { .. }
)
}
fn handle_dot_operator(state: &mut ParserState, left: Expr) -> Result<Expr> {
state.tokens.advance();
while matches!(
state.tokens.peek(),
Some((
Token::LineComment(_)
| Token::DocComment(_)
| Token::BlockComment(_)
| Token::HashComment(_),
_
))
) {
state.tokens.advance();
}
functions::parse_method_call(state, left)
}
fn token_as_identifier(token: &Token) -> Option<String> {
match token {
Token::Identifier(name) => Some(name.clone()),
Token::Ok => Some("Ok".to_string()),
Token::Err => Some("Err".to_string()),
Token::Some => Some("Some".to_string()),
Token::None => Some("None".to_string()),
Token::From => Some("from".to_string()), Token::As => Some("as".to_string()),
Token::In => Some("in".to_string()),
Token::Type => Some("type".to_string()),
Token::Var => Some("var".to_string()),
Token::Module => Some("module".to_string()),
_ => Option::None,
}
}
fn try_make_qualified_name(left: &Expr, field: &str, state: &mut ParserState) -> Option<(String, String)> {
if let ExprKind::Identifier(ref module) = left.kind {
if matches!(state.tokens.peek(), Some((Token::LeftParen, _))) {
let is_builtin = matches!(
module.as_str(),
"Command" | "DataFrame" | "Sql" | "Process" | "String"
);
if is_builtin {
return Some((module.clone(), field.to_string()));
}
}
}
None
}
fn handle_colon_colon_operator(state: &mut ParserState, left: Expr) -> Result<Expr> {
state.tokens.advance();
if let Some((Token::Less, _)) = state.tokens.peek() {
parse_turbofish(state)?;
if let Some((Token::ColonColon, _)) = state.tokens.peek() {
state.tokens.advance();
} else {
return Err(anyhow::anyhow!(
"Expected '::' after turbofish type parameters"
));
}
}
let (token, span) = state
.tokens
.peek()
.ok_or_else(|| anyhow::anyhow!("Expected identifier after '::' but reached end of input"))?;
let field = token_as_identifier(token).ok_or_else(|| {
anyhow::anyhow!("Expected identifier or keyword usable as identifier after '::' but got {token:?}")
})?;
let field_span = *span;
state.tokens.advance();
if let Some((module, name)) = try_make_qualified_name(&left, &field, state) {
return Ok(Expr::new(
ExprKind::QualifiedName { module, name },
field_span,
));
}
Ok(Expr::new(
ExprKind::FieldAccess {
object: Box::new(left),
field,
},
field_span,
))
}
fn parse_turbofish(state: &mut ParserState) -> Result<()> {
if let Some((Token::Less, _)) = state.tokens.peek() {
state.tokens.advance();
let mut depth = 1;
while depth > 0 {
match state.tokens.peek() {
Some((Token::Less, _)) => {
depth += 1;
state.tokens.advance();
}
Some((Token::Greater, _)) => {
depth -= 1;
state.tokens.advance();
}
Some((Token::RightShift, _)) => {
depth -= 2;
state.tokens.advance();
}
Some(_) => {
state.tokens.advance();
}
None => {
return Err(anyhow::anyhow!(
"Unexpected end of input while parsing turbofish type parameters"
));
}
}
}
Ok(())
} else {
Err(anyhow::anyhow!(
"Expected '<' to start turbofish type parameters"
))
}
}
fn handle_safe_nav_operator(state: &mut ParserState, left: Expr) -> Result<Expr> {
state.tokens.advance();
functions::parse_optional_method_call(state, left)
}
fn handle_increment_operator(state: &mut ParserState, left: Expr) -> Result<Expr> {
state.tokens.advance();
Ok(create_post_increment(left))
}
fn handle_decrement_operator(state: &mut ParserState, left: Expr) -> Result<Expr> {
state.tokens.advance();
Ok(create_post_decrement(left))
}
fn is_ternary_operator(state: &mut ParserState) -> bool {
!is_try_operator_not_ternary(state)
}
fn handle_try_operator(state: &mut ParserState, left: Expr) -> Result<Expr> {
state.tokens.advance();
Ok(Expr::new(
ExprKind::Try {
expr: Box::new(left),
},
Span { start: 0, end: 0 },
))
}
fn handle_array_indexing(state: &mut ParserState, left: Expr) -> Result<Expr> {
state.tokens.advance(); if is_colon_next(state) {
return parse_empty_start_slice(state, left);
}
let first_expr = parse_expr_recursive(state)?;
if is_colon_next(state) {
parse_slice_with_start(state, left, first_expr)
} else {
parse_index_access(state, left, first_expr)
}
}
fn is_colon_next(state: &mut ParserState) -> bool {
matches!(state.tokens.peek(), Some((Token::Colon, _)))
}
fn parse_empty_start_slice(state: &mut ParserState, left: Expr) -> Result<Expr> {
state.tokens.advance(); let end = parse_optional_slice_end(state)?;
state.tokens.expect(&Token::RightBracket)?;
Ok(create_slice_expr(left, None, end))
}
fn parse_slice_with_start(state: &mut ParserState, left: Expr, start: Expr) -> Result<Expr> {
state.tokens.advance(); let end = parse_optional_slice_end(state)?;
state.tokens.expect(&Token::RightBracket)?;
Ok(create_slice_expr(left, Some(Box::new(start)), end))
}
fn parse_optional_slice_end(state: &mut ParserState) -> Result<Option<Box<Expr>>> {
if matches!(state.tokens.peek(), Some((Token::RightBracket, _))) {
Ok(None)
} else {
Ok(Some(Box::new(parse_expr_recursive(state)?)))
}
}
#[inline]
fn create_expr(kind: ExprKind) -> Expr {
Expr {
kind,
span: Span { start: 0, end: 0 },
attributes: Vec::new(),
leading_comments: Vec::new(),
trailing_comment: None,
}
}
fn parse_index_access(state: &mut ParserState, left: Expr, index: Expr) -> Result<Expr> {
state.tokens.expect(&Token::RightBracket)?;
Ok(create_expr(ExprKind::IndexAccess {
object: Box::new(left),
index: Box::new(index),
}))
}
fn create_slice_expr(object: Expr, start: Option<Box<Expr>>, end: Option<Box<Expr>>) -> Expr {
create_expr(ExprKind::Slice {
object: Box::new(object),
start,
end,
})
}
fn try_parse_struct_literal(state: &mut ParserState, left: &Expr) -> Result<Option<Expr>> {
if let ExprKind::Identifier(name) = &left.kind {
if name.chars().next().is_some_and(char::is_uppercase) {
let name = name.clone();
let span = left.span;
return Ok(Some(types::parse_struct_literal(state, name, span)?));
}
}
if let Some(path) = extract_struct_literal_path(left) {
let span = left.span;
return Ok(Some(types::parse_struct_literal(state, path, span)?));
}
Ok(None)
}
fn extract_struct_literal_path(expr: &Expr) -> Option<String> {
match &expr.kind {
ExprKind::FieldAccess { object, field } => {
if !field.chars().next().is_some_and(char::is_uppercase) {
return None;
}
let obj_path = extract_struct_literal_path_component(object)?;
Some(format!("{obj_path}::{field}"))
}
_ => None,
}
}
fn extract_struct_literal_path_component(expr: &Expr) -> Option<String> {
match &expr.kind {
ExprKind::Identifier(name) => Some(name.clone()),
ExprKind::FieldAccess { object, field } => {
let obj_path = extract_struct_literal_path_component(object)?;
Some(format!("{obj_path}::{field}"))
}
_ => None,
}
}
fn create_post_increment(left: Expr) -> Expr {
create_expr(ExprKind::PostIncrement {
target: Box::new(left),
})
}
fn create_post_decrement(left: Expr) -> Expr {
create_expr(ExprKind::PostDecrement {
target: Box::new(left),
})
}
fn try_binary_operators(
state: &mut ParserState,
left: Expr,
token: &Token,
min_prec: i32,
) -> Result<Option<Expr>> {
if matches!(token, Token::Bang) {
if let Some((_, span)) = state.tokens.peek() {
if span.start > left.span.end + 1 {
return Ok(None);
}
}
}
if matches!(token, Token::In) && state.in_let_value_context {
return Ok(None);
}
if let Some(bin_op) = expressions::token_to_binary_op(token) {
let prec = expressions::get_precedence(bin_op);
if prec < min_prec {
return Ok(None);
}
while matches!(
state.tokens.peek(),
Some((
Token::LineComment(_)
| Token::DocComment(_)
| Token::BlockComment(_)
| Token::HashComment(_),
_
))
) {
state.tokens.advance();
}
state.tokens.advance();
let right = parse_expr_with_precedence_recursive(state, prec + 1)?;
Ok(Some(create_expr(ExprKind::Binary {
left: Box::new(left),
op: bin_op,
right: Box::new(right),
})))
} else {
Ok(None)
}
}
fn try_ternary_operator(
state: &mut ParserState,
left: Expr,
token: &Token,
min_prec: i32,
) -> Result<Option<Expr>> {
const TERNARY_PRECEDENCE: i32 = 1;
if !is_valid_ternary_start(token, min_prec, TERNARY_PRECEDENCE) {
return Ok(None);
}
if is_try_operator_not_ternary(state) {
return Ok(None);
}
parse_ternary_expression(state, left, TERNARY_PRECEDENCE)
}
fn is_valid_ternary_start(token: &Token, min_prec: i32, ternary_prec: i32) -> bool {
matches!(token, Token::Question) && min_prec <= ternary_prec
}
fn is_statement_keyword(token: &Token) -> bool {
matches!(
token,
Token::Let
| Token::For
| Token::While
| Token::Match
| Token::Return
| Token::Fn
| Token::Fun
| Token::Loop
| Token::Continue
| Token::Break
| Token::Pub
| Token::Const
| Token::Static
| Token::Struct
| Token::Enum
| Token::Impl
| Token::Trait
| Token::Type
| Token::Use
| Token::Mod
)
}
fn scan_for_ternary_colon(state: &mut ParserState) -> bool {
let pos = state.tokens.position();
state.tokens.advance();
let mut depth = 0i32;
let mut found_colon = false;
for _ in 0..30 {
match state.tokens.advance() {
Some((Token::LeftParen | Token::LeftBracket | Token::LeftBrace, _)) => depth += 1,
Some((Token::RightParen | Token::RightBracket | Token::RightBrace, _)) => {
depth -= 1;
if depth < 0 {
break;
}
}
Some((Token::Colon, _)) if depth == 0 => {
found_colon = true;
break;
}
Some((Token::Semicolon, _)) | None => break,
_ => {}
}
}
state.tokens.set_position(pos);
found_colon
}
fn is_try_operator_not_ternary(state: &mut ParserState) -> bool {
if let Some((next_token, _)) = state.tokens.peek_nth(1) {
if is_statement_keyword(&next_token) {
return true;
}
}
!scan_for_ternary_colon(state)
}
fn parse_ternary_expression(
state: &mut ParserState,
condition: Expr,
ternary_prec: i32,
) -> Result<Option<Expr>> {
state.tokens.advance(); let true_expr = parse_expr_with_precedence_recursive(state, ternary_prec + 1)?;
if !matches!(state.tokens.peek(), Some((Token::Colon, _))) {
bail!("Expected ':' in ternary expression");
}
state.tokens.advance();
let false_expr = parse_expr_with_precedence_recursive(state, ternary_prec)?;
Ok(Some(create_ternary_expr(condition, true_expr, false_expr)))
}
fn create_ternary_expr(condition: Expr, true_expr: Expr, false_expr: Expr) -> Expr {
create_expr(ExprKind::Ternary {
condition: Box::new(condition),
true_expr: Box::new(true_expr),
false_expr: Box::new(false_expr),
})
}
fn try_type_cast_operator(
state: &mut ParserState,
left: Expr,
token: &Token,
_min_prec: i32,
) -> Result<Option<Expr>> {
if !matches!(token, Token::As) {
return Ok(None);
}
state.tokens.advance(); let target_type = match state.tokens.peek() {
Some((Token::Identifier(t), _)) => {
let type_name = t.clone();
state.tokens.advance();
type_name
}
_ => bail!("Expected type name after 'as'"),
};
Ok(Some(create_expr(ExprKind::TypeCast {
expr: Box::new(left),
target_type,
})))
}
fn parse_actor_style_op<F>(
state: &mut ParserState,
left: Expr,
min_prec: i32,
op_name: &str,
make_kind: F,
) -> Result<ExprKind>
where
F: FnOnce(Box<Expr>, Box<Expr>) -> ExprKind,
{
const PREC: i32 = 1; if PREC < min_prec {
bail!("Precedence check failed for {op_name}");
}
state.tokens.advance();
let message = parse_expr_with_precedence_recursive(state, PREC)?;
Ok(make_kind(Box::new(left), Box::new(message)))
}
fn try_new_actor_operators(
state: &mut ParserState,
left: Expr,
token: &Token,
min_prec: i32,
) -> Result<Option<Expr>> {
let expr_kind = match token {
Token::LeftArrow => parse_actor_send_op(state, left, min_prec)?,
Token::ActorQuery => parse_actor_query_op(state, left, min_prec)?,
Token::Bang => {
if let Some((_, span)) = state.tokens.peek() {
if span.start > left.span.end + 1 {
return Ok(None);
}
}
parse_actor_bang_op(state, left, min_prec)?
}
_ => return Ok(None),
};
Ok(Some(create_expr(expr_kind)))
}
fn parse_actor_send_op(state: &mut ParserState, actor: Expr, min_prec: i32) -> Result<ExprKind> {
parse_actor_style_op(state, actor, min_prec, "actor send", |actor, message| {
ExprKind::ActorSend { actor, message }
})
}
fn parse_actor_query_op(state: &mut ParserState, actor: Expr, min_prec: i32) -> Result<ExprKind> {
parse_actor_style_op(state, actor, min_prec, "actor query", |actor, message| {
ExprKind::ActorQuery { actor, message }
})
}
fn parse_actor_bang_op(state: &mut ParserState, left: Expr, min_prec: i32) -> Result<ExprKind> {
parse_actor_style_op(state, left, min_prec, "actor bang", |left, right| {
ExprKind::Binary {
op: BinaryOp::Send,
left,
right,
}
})
}
fn try_assignment_operators(
state: &mut ParserState,
left: Expr,
token: &Token,
min_prec: i32,
) -> Result<Option<Expr>> {
if !token.is_assignment_op() {
return Ok(None);
}
let prec = 1;
if prec < min_prec {
return Ok(None);
}
state.tokens.advance();
let value = parse_expr_with_precedence_recursive(state, prec)?;
let expr = if *token == Token::Equal {
create_expr(ExprKind::Assign {
target: Box::new(left),
value: Box::new(value),
})
} else {
let bin_op = get_compound_assignment_op(token);
create_expr(ExprKind::CompoundAssign {
target: Box::new(left),
op: bin_op,
value: Box::new(value),
})
};
Ok(Some(expr))
}
fn get_compound_assignment_op(token: &Token) -> BinaryOp {
match token {
Token::PlusEqual => BinaryOp::Add,
Token::MinusEqual => BinaryOp::Subtract,
Token::StarEqual => BinaryOp::Multiply,
Token::SlashEqual => BinaryOp::Divide,
Token::PercentEqual => BinaryOp::Modulo,
Token::PowerEqual => BinaryOp::Power,
Token::AmpersandEqual => BinaryOp::BitwiseAnd,
Token::PipeEqual => BinaryOp::BitwiseOr,
Token::CaretEqual => BinaryOp::BitwiseXor,
Token::LeftShiftEqual => BinaryOp::LeftShift,
_ => unreachable!("Already checked is_assignment_op"),
}
}
fn try_pipeline_operators(
state: &mut ParserState,
left: Expr,
token: &Token,
min_prec: i32,
) -> Result<Option<Expr>> {
if !matches!(token, Token::Pipeline) {
return Ok(None);
}
let prec = 3;
if prec < min_prec {
return Ok(None);
}
state.tokens.advance();
let stage_expr = parse_expr_with_precedence_recursive(state, prec + 1)?;
let expr = if let ExprKind::Pipeline { expr, mut stages } = left.kind {
stages.push(PipelineStage {
op: Box::new(stage_expr),
span: Span { start: 0, end: 0 },
});
create_expr(ExprKind::Pipeline { expr, stages })
} else {
create_expr(ExprKind::Pipeline {
expr: Box::new(left),
stages: vec![PipelineStage {
op: Box::new(stage_expr),
span: Span { start: 0, end: 0 },
}],
})
};
Ok(Some(expr))
}
fn try_range_operators(
state: &mut ParserState,
left: Expr,
token: &Token,
min_prec: i32,
) -> Result<Option<Expr>> {
if !matches!(token, Token::DotDot | Token::DotDotEqual) {
return Ok(None);
}
let prec = 5;
if prec < min_prec {
return Ok(None);
}
let inclusive = matches!(token, Token::DotDotEqual);
state.tokens.advance();
let end = match state.tokens.peek() {
Some((
Token::RightBracket
| Token::Semicolon
| Token::Comma
| Token::RightParen
| Token::RightBrace,
_,
)) => {
Expr::new(ExprKind::Literal(Literal::Unit), Span { start: 0, end: 0 })
}
_ => {
parse_expr_with_precedence_recursive(state, prec + 1)?
}
};
Ok(Some(create_expr(ExprKind::Range {
start: Box::new(left),
end: Box::new(end),
inclusive,
})))
}
fn try_parse_macro_call(state: &mut ParserState, left: &Expr) -> Result<Option<Expr>> {
let ExprKind::Identifier(name) = &left.kind else {
return Ok(None);
};
if !is_valid_macro_call_syntax(state, name) {
return Ok(None);
}
parse_macro_call_by_type(state, name)
}
fn is_valid_macro_call_syntax(state: &mut ParserState, name: &str) -> bool {
let next_after_bang = state.tokens.peek_nth(1);
let is_macro_call = matches!(
next_after_bang,
Some((Token::LeftParen | Token::LeftBracket | Token::LeftBrace, _))
);
is_macro_call || name == "df" || name == "vec"
}
fn parse_macro_call_by_type(state: &mut ParserState, name: &str) -> Result<Option<Expr>> {
if let Some(df_result) = try_parse_dataframe_macro(state, name)? {
return Ok(Some(df_result));
}
if let Some(vec_result) = try_parse_vec_macro(state, name)? {
return Ok(Some(vec_result));
}
if name != "df" && name != "vec" {
state.tokens.advance();
}
if is_sql_macro(state, name) {
return Ok(Some(macro_parsing::parse_sql_macro(state, name)?));
}
parse_generic_macro(state, name)
}
fn try_parse_dataframe_macro(state: &mut ParserState, name: &str) -> Result<Option<Expr>> {
if name == "df" {
macro_parsing::parse_dataframe_macro(state)
} else {
Ok(None)
}
}
fn try_parse_vec_macro(state: &mut ParserState, name: &str) -> Result<Option<Expr>> {
if name == "vec" {
macro_parsing::parse_vec_macro(state)
} else {
Ok(None)
}
}
fn is_sql_macro(state: &mut ParserState, name: &str) -> bool {
name == "sql" && matches!(state.tokens.peek(), Some((Token::LeftBrace, _)))
}
fn parse_generic_macro(state: &mut ParserState, name: &str) -> Result<Option<Expr>> {
let Some((_style, closing_token)) = macro_parsing::get_macro_delimiters(state) else {
return Ok(None);
};
let args = macro_parsing::parse_macro_arguments(state, closing_token)?;
Ok(Some(macro_parsing::create_macro_expr(
name.to_string(),
args,
)))
}
#[cfg(test)]
#[path = "parser_tests.rs"]
mod tests;
#[cfg(test)]
#[path = "parser_mutation_tests.rs"]
mod mutation_tests;