use super::{Cursor, Error, Tokenizer};
use crate::{
profiler::BoaProfiler,
syntax::{
ast::{Position, Span},
lexer::{Token, TokenKind},
},
};
use std::io::Read;
pub(super) struct SingleLineComment;
impl<R> Tokenizer<R> for SingleLineComment {
fn lex(&mut self, cursor: &mut Cursor<R>, start_pos: Position) -> Result<Token, Error>
where
R: Read,
{
let _timer = BoaProfiler::global().start_event("SingleLineComment", "Lexing");
while let Some(ch) = cursor.peek()? {
if ch == b'\n' || ch == b'\r' {
break;
} else {
cursor.next_byte()?.expect("Comment character vanished");
}
}
Ok(Token::new(
TokenKind::Comment,
Span::new(start_pos, cursor.pos()),
))
}
}
pub(super) struct MultiLineComment;
impl<R> Tokenizer<R> for MultiLineComment {
fn lex(&mut self, cursor: &mut Cursor<R>, start_pos: Position) -> Result<Token, Error>
where
R: Read,
{
let _timer = BoaProfiler::global().start_event("MultiLineComment", "Lexing");
let mut new_line = false;
loop {
if let Some(ch) = cursor.next_byte()? {
if ch == b'*' && cursor.next_is(b'/')? {
break;
} else if ch == b'\n' {
new_line = true;
}
} else {
return Err(Error::syntax(
"unterminated multiline comment",
cursor.pos(),
));
}
}
Ok(Token::new(
if new_line {
TokenKind::LineTerminator
} else {
TokenKind::Comment
},
Span::new(start_pos, cursor.pos()),
))
}
}