use crate::{
Error,
lexer::{InputElement, Lexer, Token, TokenKind},
parser::ParseResult,
source::{ReadChar, UTF8Input},
};
use boa_ast::{LinearPosition, PositionGroup};
use boa_interner::Interner;
#[cfg(test)]
mod tests;
const MAX_PEEK_SKIP: usize = 3;
const PEEK_BUF_SIZE: usize = (MAX_PEEK_SKIP + 1) * 2 + 1;
#[derive(Debug)]
pub(super) struct BufferedLexer<R> {
lexer: Lexer<R>,
peeked: [Option<Token>; PEEK_BUF_SIZE],
read_index: usize,
write_index: usize,
last_linear_pos: LinearPosition,
}
impl<R> From<Lexer<R>> for BufferedLexer<R>
where
R: ReadChar,
{
fn from(lexer: Lexer<R>) -> Self {
Self {
lexer,
peeked: [
None::<Token>,
None::<Token>,
None::<Token>,
None::<Token>,
None::<Token>,
None::<Token>,
None::<Token>,
None::<Token>,
None::<Token>,
],
read_index: 0,
write_index: 0,
last_linear_pos: LinearPosition::default(),
}
}
}
impl<R> From<R> for BufferedLexer<R>
where
R: ReadChar,
{
fn from(reader: R) -> Self {
Lexer::new(reader).into()
}
}
impl<'a> From<&'a [u8]> for BufferedLexer<UTF8Input<&'a [u8]>> {
fn from(reader: &'a [u8]) -> Self {
Lexer::from(reader).into()
}
}
impl<R> BufferedLexer<R>
where
R: ReadChar,
{
pub(super) fn set_goal(&mut self, elm: InputElement) {
self.lexer.set_goal(elm);
}
pub(super) fn lex_regex(
&mut self,
start: PositionGroup,
interner: &mut Interner,
init_with_eq: bool,
) -> ParseResult<Token> {
self.set_goal(InputElement::RegExp);
self.lexer
.lex_slash_token(start, interner, init_with_eq)
.map_err(Into::into)
}
pub(super) fn lex_template(
&mut self,
start: PositionGroup,
interner: &mut Interner,
) -> ParseResult<Token> {
self.lexer
.lex_template(start, interner)
.map_err(Error::from)
}
pub(super) const fn strict(&self) -> bool {
self.lexer.strict()
}
pub(super) fn set_strict(&mut self, strict: bool) {
self.lexer.set_strict(strict);
}
pub(super) const fn module(&self) -> bool {
self.lexer.module()
}
pub(super) fn set_module(&mut self, module: bool) {
self.lexer.set_module(module);
}
fn fill(&mut self, interner: &mut Interner) -> ParseResult<()> {
debug_assert!(
self.write_index < PEEK_BUF_SIZE,
"write index went out of bounds"
);
let previous_index = self.write_index.checked_sub(1).unwrap_or(PEEK_BUF_SIZE - 1);
if let Some(ref token) = self.peeked[previous_index]
&& token.kind() == &TokenKind::LineTerminator
{
let next = loop {
self.lexer.skip_html_close(interner)?;
let next = self.lexer.next_no_skip(interner)?;
if let Some(ref token) = next {
match token.kind() {
TokenKind::LineTerminator => { }
TokenKind::Comment => self.lexer.skip_html_close(interner)?,
_ => break next,
}
} else {
break None;
}
};
self.peeked[self.write_index] = next;
} else {
self.peeked[self.write_index] = self.lexer.next(interner)?;
}
self.write_index = (self.write_index + 1) % PEEK_BUF_SIZE;
debug_assert_ne!(
self.read_index, self.write_index,
"we reached the read index with the write index"
);
debug_assert!(
self.read_index < PEEK_BUF_SIZE,
"read index went out of bounds"
);
Ok(())
}
pub(super) fn next(
&mut self,
skip_line_terminators: bool,
interner: &mut Interner,
) -> ParseResult<Option<Token>> {
if self.read_index == self.write_index {
self.fill(interner)?;
}
if let Some(ref token) = self.peeked[self.read_index] {
if skip_line_terminators && token.kind() == &TokenKind::LineTerminator {
self.read_index = (self.read_index + 1) % PEEK_BUF_SIZE;
if self.read_index == self.write_index {
self.fill(interner)?;
}
}
let tok = self.peeked[self.read_index].take();
self.read_index = (self.read_index + 1) % PEEK_BUF_SIZE;
if let Some(tok) = &tok {
self.last_linear_pos = tok.linear_span().end();
}
Ok(tok)
} else {
Ok(None)
}
}
pub(super) fn peek(
&mut self,
skip_n: usize,
skip_line_terminators: bool,
interner: &mut Interner,
) -> ParseResult<Option<&Token>> {
assert!(
skip_n <= MAX_PEEK_SKIP,
"you cannot skip more than {MAX_PEEK_SKIP} elements",
);
let mut read_index = self.read_index;
let mut count = 0;
let res_token = loop {
if read_index == self.write_index {
self.fill(interner)?;
}
if let Some(ref token) = self.peeked[read_index] {
if skip_line_terminators && token.kind() == &TokenKind::LineTerminator {
read_index = (read_index + 1) % PEEK_BUF_SIZE;
if read_index == self.write_index {
self.fill(interner)?;
}
}
if count == skip_n {
break self.peeked[read_index].as_ref();
}
} else {
break None;
}
read_index = (read_index + 1) % PEEK_BUF_SIZE;
count += 1;
};
Ok(res_token)
}
#[inline]
pub(super) fn linear_pos(&self) -> LinearPosition {
self.last_linear_pos
}
pub(super) fn take_source(&mut self) -> boa_ast::SourceText {
self.lexer.take_source()
}
}