use self::pattern::ParsePatternRules;
use crate::ast::new_node;
use crate::ast::new_node_with_flag;
use crate::ast::new_node_with_flags;
use crate::ast::Node;
use crate::ast::NodeFlag;
use crate::ast::Syntax;
use crate::error::SyntaxError;
use crate::error::SyntaxErrorType;
use crate::error::SyntaxResult;
use crate::flag::Flags;
use crate::lex::lex_next;
use crate::lex::LexMode;
use crate::lex::Lexer;
use crate::lex::LexerCheckpoint;
use crate::session::Session;
use crate::source::SourceRange;
use crate::symbol::Scope;
use crate::symbol::ScopeType;
use crate::token::Token;
use crate::token::TokenType;
pub mod class_or_object;
pub mod decl;
pub mod expr;
pub mod literal;
pub mod operator;
pub mod pattern;
pub mod signature;
pub mod stmt;
#[cfg(test)]
mod tests;
pub mod toplevel;
#[derive(Clone, Copy)]
pub struct ParseCtx<'a> {
pub session: &'a Session,
pub scope: Scope<'a>,
pub rules: ParsePatternRules, }
impl<'a> ParseCtx<'a> {
pub fn with_scope(&self, scope: Scope<'a>) -> ParseCtx<'a> {
ParseCtx { scope, ..*self }
}
pub fn with_rules(&self, rules: ParsePatternRules) -> ParseCtx<'a> {
ParseCtx { rules, ..*self }
}
pub fn create_child_scope(&self, typ: ScopeType) -> Scope<'a> {
self.scope.create_child_scope(self.session, typ)
}
pub fn create_node_with_flags(
&self,
loc: SourceRange<'a>,
stx: Syntax<'a>,
flags: Flags<NodeFlag>,
) -> Node<'a> {
new_node_with_flags(self.session, self.scope, loc, stx, flags)
}
pub fn create_node_with_flag(
&self,
loc: SourceRange<'a>,
stx: Syntax<'a>,
flag: NodeFlag,
) -> Node<'a> {
new_node_with_flag(self.session, self.scope, loc, stx, flag)
}
pub fn create_node(&self, loc: SourceRange<'a>, stx: Syntax<'a>) -> Node<'a> {
new_node(self.session, self.scope, loc, stx)
}
}
#[derive(Debug)]
#[must_use]
pub struct MaybeToken<'a> {
typ: TokenType,
range: SourceRange<'a>,
matched: bool,
}
impl<'a> MaybeToken<'a> {
pub fn is_match(&self) -> bool {
self.matched
}
pub fn match_loc(&self) -> Option<SourceRange<'a>> {
if self.matched {
Some(self.range)
} else {
None
}
}
pub fn error(&self, err: SyntaxErrorType) -> SyntaxError<'a> {
debug_assert!(!self.matched);
SyntaxError::from_loc(self.range, err, Some(self.typ))
}
pub fn and_then<R, F: FnOnce() -> SyntaxResult<'a, R>>(
self,
f: F,
) -> SyntaxResult<'a, Option<R>> {
Ok(if self.matched { Some(f()?) } else { None })
}
}
pub struct ParserCheckpoint {
checkpoint: LexerCheckpoint,
}
struct BufferedToken<'a> {
token: Token<'a>,
lex_mode: LexMode,
after_checkpoint: LexerCheckpoint,
}
pub struct Parser<'a> {
lexer: Lexer<'a>,
buffered: Option<BufferedToken<'a>>,
}
impl<'a> Parser<'a> {
pub fn new(lexer: Lexer<'a>) -> Parser<'a> {
Parser {
lexer,
buffered: None,
}
}
pub fn lexer_mut(&mut self) -> &mut Lexer<'a> {
&mut self.lexer
}
pub fn source_range(&self) -> SourceRange<'a> {
self.lexer.source_range()
}
pub fn checkpoint(&self) -> ParserCheckpoint {
ParserCheckpoint {
checkpoint: self.lexer.checkpoint(),
}
}
pub fn since_checkpoint(&self, checkpoint: ParserCheckpoint) -> SourceRange<'a> {
self.lexer.since_checkpoint(checkpoint.checkpoint)
}
pub fn restore_checkpoint(&mut self, checkpoint: ParserCheckpoint) -> () {
self.buffered = None;
self.lexer.apply_checkpoint(checkpoint.checkpoint);
}
pub fn clear_buffered(&mut self) -> () {
self.buffered = None;
}
fn forward<K: FnOnce(&Token) -> bool>(
&mut self,
mode: LexMode,
keep: K,
) -> SyntaxResult<'a, (bool, Token<'a>)> {
match self.buffered.as_ref() {
Some(b) if b.lex_mode == mode => Ok(if keep(&b.token) {
self.lexer.apply_checkpoint(b.after_checkpoint);
(true, self.buffered.take().unwrap().token)
} else {
(false, b.token.clone())
}),
_ => {
let cp = self.lexer.checkpoint();
let t = lex_next(&mut self.lexer, mode)?;
let k = keep(&t);
self.buffered = if k {
None
} else {
let after_checkpoint = self.lexer.checkpoint();
self.lexer.apply_checkpoint(cp);
Some(BufferedToken {
token: t.clone(),
lex_mode: mode,
after_checkpoint,
})
};
Ok((k, t))
}
}
}
pub fn next_with_mode(&mut self, mode: LexMode) -> SyntaxResult<'a, Token<'a>> {
self.forward(mode, |_| true).map(|r| r.1)
}
pub fn next(&mut self) -> SyntaxResult<'a, Token<'a>> {
self.next_with_mode(LexMode::Standard)
}
pub fn peek_with_mode(&mut self, mode: LexMode) -> SyntaxResult<'a, Token<'a>> {
self.forward(mode, |_| false).map(|r| r.1)
}
pub fn peek(&mut self) -> SyntaxResult<'a, Token<'a>> {
self.peek_with_mode(LexMode::Standard)
}
pub fn consume_peeked(&mut self) -> Token<'a> {
let b = self.buffered.take().unwrap();
self.lexer.apply_checkpoint(b.after_checkpoint);
b.token
}
pub fn maybe_with_mode(
&mut self,
typ: TokenType,
mode: LexMode,
) -> SyntaxResult<'a, MaybeToken<'a>> {
let (matched, t) = self.forward(mode, |t| t.typ == typ)?;
Ok(MaybeToken {
typ,
matched,
range: t.loc,
})
}
pub fn consume_if(&mut self, typ: TokenType) -> SyntaxResult<'a, MaybeToken<'a>> {
self.maybe_with_mode(typ, LexMode::Standard)
}
pub fn consume_if_pred<F: FnOnce(&Token) -> bool>(
&mut self,
pred: F,
) -> SyntaxResult<'a, MaybeToken<'a>> {
let (matched, t) = self.forward(LexMode::Standard, pred)?;
Ok(MaybeToken {
typ: t.typ,
matched,
range: t.loc,
})
}
pub fn require_with_mode(
&mut self,
typ: TokenType,
mode: LexMode,
) -> SyntaxResult<'a, Token<'a>> {
let t = self.next_with_mode(mode)?;
if t.typ != typ {
Err(t.error(SyntaxErrorType::RequiredTokenNotFound(typ)))
} else {
Ok(t)
}
}
pub fn require_predicate<P: FnOnce(TokenType) -> bool>(
&mut self,
pred: P,
expected: &'static str,
) -> SyntaxResult<'a, Token<'a>> {
let t = self.next_with_mode(LexMode::Standard)?;
if !pred(t.typ) {
Err(t.error(SyntaxErrorType::ExpectedSyntax(expected)))
} else {
Ok(t)
}
}
pub fn require(&mut self, typ: TokenType) -> SyntaxResult<'a, Token<'a>> {
self.require_with_mode(typ, LexMode::Standard)
}
}