use gramatika::{ParseStreamer, Spanned, SpannedError, Substr, Token as _, TokenCtor};
use crate::{
comment::Comment,
token::{Lexer, Token},
TokenKind,
};
#[cfg(test)]
use crate::utils::{WithComments, WithErrors, WithTokens};
pub(crate) trait ErrorRecoveringParseStream: ParseStreamer {
fn push_error(&mut self, error: SpannedError);
fn parse_seq_separated<P>(
&mut self,
separator: Self::Token,
loop_condition: impl Fn(&mut Self) -> bool,
) -> gramatika::Result<Vec<P>>
where
P: gramatika::Parse<Stream = Self>,
{
self.__parse_seq_impl(&loop_condition, |input, parsed| {
if loop_condition(input) {
input.consume(separator.clone())?;
}
Ok(parsed)
})
}
fn parse_seq_with_finisher<P, R>(
&mut self,
loop_condition: impl Fn(&mut Self) -> bool,
finish: impl Fn(&mut Self, P) -> gramatika::Result<R>,
) -> gramatika::Result<Vec<R>>
where
P: gramatika::Parse<Stream = Self>,
{
self.__parse_seq_impl(&loop_condition, finish)
}
fn parse_seq<P>(&mut self, loop_condition: impl Fn(&mut Self) -> bool) -> Vec<P>
where P: gramatika::Parse<Stream = Self> {
self.__parse_seq_impl(&loop_condition, |_, parsed| Ok(parsed))
.unwrap()
}
#[inline]
#[doc(hidden)]
fn __parse_seq_impl<P, R>(
&mut self,
loop_condition: &impl Fn(&mut Self) -> bool,
finish: impl Fn(&mut Self, P) -> gramatika::Result<R>,
) -> gramatika::Result<Vec<R>>
where
P: gramatika::Parse<Stream = Self>,
{
let mut results = vec![];
let mut recovering = false;
while !self.is_empty() && loop_condition(self) {
match self.parse::<P>() {
Ok(parsed) => {
if recovering {
recovering = false;
}
results.push(finish(self, parsed)?);
}
Err(err) => {
if recovering {
let needs_discard = self
.peek()
.and_then(|next_token| Some(err.span?.contains(next_token.span())));
if matches!(needs_discard, Some(true)) {
self.discard();
}
} else {
self.push_error(err);
recovering = true;
}
}
}
}
Ok(results)
}
}
pub struct ParseStream {
inner: gramatika::ParseStream<Token, Lexer>,
comments: Vec<Comment>,
parsing_comment: bool,
pub(crate) errors: Vec<SpannedError>,
}
pub struct ParseResult {
pub source: Substr,
pub tokens: Vec<Token>,
pub comments: Vec<Comment>,
pub errors: Vec<SpannedError>,
}
impl ParseStream {
pub fn new(lexer: Lexer) -> Self {
Self {
inner: gramatika::ParseStream::new(lexer),
comments: vec![],
parsing_comment: false,
errors: vec![],
}
}
pub fn source(&self) -> Substr {
self.inner.source()
}
pub fn into_inner(self) -> ParseResult {
let (source, tokens) = self.inner.into_inner();
ParseResult {
source,
tokens,
comments: self.comments,
errors: self.errors,
}
}
pub fn split_next(
&mut self,
split_at: usize,
ctors: (TokenCtor<Token>, TokenCtor<Token>),
) -> gramatika::Result<Token> {
self.inner.split_next(split_at, ctors)
}
fn did_parse_comment(&mut self) -> bool {
let Some(token) = self.inner.peek() else {
return false;
};
match token.kind() {
TokenKind::CommentStart => {
self.parsing_comment = true;
match self.parse::<Comment>() {
Ok(comment) => {
self.comments.push(comment);
self.parsing_comment = false;
true
}
Err(error) => {
self.errors.push(error);
self.parsing_comment = false;
true
}
}
}
_ => false,
}
}
}
impl ErrorRecoveringParseStream for ParseStream {
fn push_error(&mut self, error: SpannedError) {
self.errors.push(error);
}
}
impl<S> From<S> for ParseStream
where S: Into<Substr>
{
fn from(value: S) -> Self {
Self {
inner: gramatika::ParseStream::from(value),
comments: vec![],
parsing_comment: false,
errors: vec![],
}
}
}
impl ParseStreamer for ParseStream {
type Token = Token;
fn is_empty(&mut self) -> bool {
self.peek().is_none()
}
fn peek(&mut self) -> Option<&Token> {
if !self.parsing_comment && self.did_parse_comment() {
self.peek()
} else {
self.inner.peek()
}
}
fn prev(&mut self) -> Option<&Token> {
self.inner.prev()
}
fn check_kind(&mut self, kind: TokenKind) -> bool {
if !self.parsing_comment && self.did_parse_comment() {
self.check_kind(kind)
} else {
self.inner.check_kind(kind)
}
}
fn check(&mut self, compare: Token) -> bool {
if !self.parsing_comment && self.did_parse_comment() {
self.check(compare)
} else {
self.inner.check(compare)
}
}
fn consume(&mut self, compare: Token) -> gramatika::Result<Token> {
if !self.parsing_comment && self.did_parse_comment() {
self.consume(compare)
} else {
self.inner.consume(compare)
}
}
fn consume_kind(&mut self, kind: TokenKind) -> gramatika::Result<Token> {
if !self.parsing_comment && self.did_parse_comment() {
self.consume_kind(kind)
} else {
self.inner.consume_kind(kind)
}
}
fn consume_as(
&mut self,
kind: TokenKind,
convert: TokenCtor<Token>,
) -> gramatika::Result<Token> {
if !self.parsing_comment && self.did_parse_comment() {
self.consume_as(kind, convert)
} else {
self.inner.consume_as(kind, convert)
}
}
fn upgrade_last(
&mut self,
kind: TokenKind,
convert: TokenCtor<Token>,
) -> gramatika::Result<Token> {
self.inner.upgrade_last(kind, convert)
}
fn upgrade(
&mut self,
token: Self::Token,
convert: TokenCtor<Token>,
) -> gramatika::Result<Token> {
self.inner.upgrade(token, convert)
}
fn discard(&mut self) {
self.inner.discard();
}
}
impl Iterator for ParseStream {
type Item = Token;
fn next(&mut self) -> Option<Token> {
self.inner.next()
}
}
#[cfg(test)]
impl WithErrors for ParseStream {
type Output = SpannedError;
fn get(&self) -> &[Self::Output] {
&self.errors
}
}
#[cfg(test)]
impl WithComments for ParseStream {
type Output = Comment;
fn get(&self) -> &[Self::Output] {
&self.comments
}
}
#[cfg(test)]
impl WithTokens for ParseStream {
type Output = Token;
fn into(self) -> Vec<Self::Output> {
let ParseResult { tokens, .. } = self.into_inner();
tokens
}
}