use crate::lexer::token::Token;
use crate::lexer::token::TokenKind;
use crate::parser::ast::comments::Comment;
use crate::parser::ast::comments::CommentFormat;
use crate::parser::ast::comments::CommentGroup;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct TokenStream<'a> {
tokens: &'a [Token],
length: usize,
comments: Vec<&'a Token>,
cursor: usize,
}
impl<'a> TokenStream<'a> {
pub fn new(tokens: &'a [Token]) -> TokenStream {
let length = tokens.len();
let mut stream = TokenStream {
tokens,
length,
comments: vec![],
cursor: 0,
};
stream.collect_comments();
stream
}
pub fn next(&mut self) {
self.cursor += 1;
self.collect_comments();
}
pub const fn current(&self) -> &'a Token {
let position = if self.cursor >= self.length {
self.length - 1
} else {
self.cursor
};
&self.tokens[position]
}
pub const fn previous(&self) -> &'a Token {
let position = if self.cursor == 0 { 0 } else { self.cursor - 1 };
&self.tokens[position]
}
pub const fn peek(&self) -> &'a Token {
self.peek_nth(1)
}
pub const fn lookahead(&self, n: usize) -> &'a Token {
self.peek_nth(n + 1)
}
#[inline(always)]
const fn peek_nth(&self, n: usize) -> &'a Token {
let mut cursor = self.cursor + 1;
let mut target = 1;
loop {
if cursor >= self.length {
return &self.tokens[self.length - 1];
}
let current = &self.tokens[cursor];
if matches!(
current.kind,
TokenKind::SingleLineComment
| TokenKind::MultiLineComment
| TokenKind::HashMarkComment
| TokenKind::DocumentComment
) {
cursor += 1;
continue;
}
if target == n {
return current;
}
target += 1;
cursor += 1;
}
}
pub fn is_eof(&self) -> bool {
if self.cursor >= self.length {
return true;
}
self.tokens[self.cursor].kind == TokenKind::Eof
}
#[allow(dead_code)]
pub fn comments(&mut self) -> CommentGroup {
let mut comments = vec![];
std::mem::swap(&mut self.comments, &mut comments);
CommentGroup {
comments: comments
.iter()
.map(|token| match token {
Token {
kind: TokenKind::SingleLineComment,
span,
value,
} => Comment {
span: *span,
format: CommentFormat::SingleLine,
content: value.clone(),
},
Token {
kind: TokenKind::MultiLineComment,
span,
value,
} => Comment {
span: *span,
format: CommentFormat::MultiLine,
content: value.clone(),
},
Token {
kind: TokenKind::HashMarkComment,
span,
value,
} => Comment {
span: *span,
format: CommentFormat::HashMark,
content: value.clone(),
},
Token {
kind: TokenKind::DocumentComment,
span,
value,
} => Comment {
span: *span,
format: CommentFormat::Document,
content: value.clone(),
},
_ => unreachable!(),
})
.collect(),
}
}
fn collect_comments(&mut self) {
loop {
if self.cursor >= self.length {
break;
}
let current = &self.tokens[self.cursor];
if !matches!(
current.kind,
TokenKind::SingleLineComment
| TokenKind::MultiLineComment
| TokenKind::HashMarkComment
| TokenKind::DocumentComment
) {
break;
}
self.comments.push(current);
self.cursor += 1;
}
}
}
impl<'a> Default for TokenStream<'a> {
fn default() -> Self {
Self::new(&[])
}
}
impl<'a> From<&'a Vec<Token>> for TokenStream<'a> {
fn from(tokens: &'a Vec<Token>) -> Self {
Self::new(tokens.as_slice())
}
}