use crate::char::CharFilter;
use crate::char::DIGIT;
use crate::char::DIGIT_BIN;
use crate::char::DIGIT_HEX;
use crate::char::DIGIT_OCT;
use crate::char::ID_CONTINUE;
use crate::char::ID_CONTINUE_JSX;
use crate::char::ID_START;
use crate::char::ID_START_CHARSTR;
use crate::char::WHITESPACE;
use crate::error::SyntaxError;
use crate::error::SyntaxErrorType;
use crate::error::SyntaxResult;
use crate::source::SourceRange;
use crate::token::Token;
use crate::token::TokenType;
use aho_corasick::AhoCorasick;
use aho_corasick::AhoCorasickBuilder;
use aho_corasick::MatchKind;
use core::ops::Index;
use lazy_static::lazy_static;
use memchr::memchr;
use memchr::memchr2;
use memchr::memchr3;
use std::collections::HashMap;
#[cfg(feature = "serialize")]
mod tests;
#[derive(Copy, Clone, Eq, PartialEq)]
pub enum LexMode {
JsxTag,
JsxTextContent,
SlashIsRegex,
Standard,
}
#[derive(Copy, Clone)]
pub struct LexerCheckpoint {
next: usize,
}
#[derive(Copy, Clone)]
struct Match {
len: usize,
}
impl Match {
pub fn len(&self) -> usize {
self.len
}
pub fn prefix(&self, n: usize) -> Match {
debug_assert!(n <= self.len);
Match { len: n }
}
pub fn is_empty(&self) -> bool {
self.len == 0
}
}
#[derive(Copy, Clone)]
struct AhoCorasickMatch {
id: usize,
mat: Match,
}
pub struct Lexer<'a> {
source: &'a [u8],
next: usize,
}
impl<'a> Lexer<'a> {
pub fn new(code: &'a [u8]) -> Lexer<'a> {
Lexer {
source: code,
next: 0,
}
}
fn end(&self) -> usize {
self.source.len()
}
fn remaining(&self) -> usize {
self.end() - self.next
}
pub fn source_range(&self) -> SourceRange<'a> {
SourceRange::new(self.source, 0, self.end())
}
fn eof_range(&self) -> SourceRange<'a> {
SourceRange::new(self.source, self.end(), self.end())
}
fn error(&self, typ: SyntaxErrorType) -> SyntaxError<'a> {
SyntaxError::new(
typ,
SourceRange::new(self.source, self.next, self.end()),
None,
)
}
fn at_end(&self) -> bool {
self.next >= self.end()
}
fn peek(&self, n: usize) -> SyntaxResult<'a, u8> {
self
.peek_or_eof(n)
.ok_or_else(|| self.error(SyntaxErrorType::UnexpectedEnd))
}
fn peek_or_eof(&self, n: usize) -> Option<u8> {
self.source.get(self.next + n).map(|&c| c)
}
pub fn checkpoint(&self) -> LexerCheckpoint {
LexerCheckpoint { next: self.next }
}
pub fn since_checkpoint(&self, checkpoint: LexerCheckpoint) -> SourceRange<'a> {
SourceRange::new(self.source, checkpoint.next, self.next)
}
pub fn apply_checkpoint(&mut self, checkpoint: LexerCheckpoint) -> () {
self.next = checkpoint.next;
}
fn n(&self, n: usize) -> SyntaxResult<'a, Match> {
if self.next + n > self.end() {
return Err(self.error(SyntaxErrorType::UnexpectedEnd));
};
Ok(Match { len: n })
}
fn if_char(&self, c: u8) -> Match {
Match {
len: (!self.at_end() && self.source[self.next] == c) as usize,
}
}
fn through_char_or_end(&self, c: u8) -> Match {
memchr(c, &self.source[self.next..])
.map(|pos| Match { len: pos + 1 })
.unwrap_or_else(|| Match {
len: self.remaining(),
})
}
fn through_char(&self, c: u8) -> SyntaxResult<'a, Match> {
memchr(c, &self.source[self.next..])
.map(|pos| Match { len: pos + 1 })
.ok_or_else(|| self.error(SyntaxErrorType::UnexpectedEnd))
}
fn while_not_char(&self, a: u8) -> Match {
Match {
len: memchr(a, &self.source[self.next..]).unwrap_or(self.remaining()),
}
}
fn while_not_2_chars(&self, a: u8, b: u8) -> Match {
Match {
len: memchr2(a, b, &self.source[self.next..]).unwrap_or(self.remaining()),
}
}
fn while_not_3_chars(&self, a: u8, b: u8, c: u8) -> Match {
Match {
len: memchr3(a, b, c, &self.source[self.next..]).unwrap_or(self.remaining()),
}
}
fn while_chars(&self, chars: &CharFilter) -> Match {
let mut len = 0;
while len < self.remaining() && chars.has(self.source[self.next + len]) {
len += 1;
}
Match { len }
}
fn aho_corasick(&self, ac: &AhoCorasick) -> SyntaxResult<'a, AhoCorasickMatch> {
ac.find(&self.source[self.next..])
.map(|m| AhoCorasickMatch {
id: m.pattern(),
mat: Match { len: m.end() },
})
.ok_or_else(|| self.error(SyntaxErrorType::ExpectedNotFound))
}
fn range(&self, m: Match) -> SourceRange<'a> {
SourceRange::new(self.source, self.next, self.next + m.len)
}
fn consume(&mut self, m: Match) -> Match {
self.next += m.len;
m
}
fn consume_next(&mut self) -> SyntaxResult<'a, u8> {
let c = self.peek(0)?;
self.next += 1;
Ok(c)
}
fn skip_expect(&mut self, n: usize) -> () {
debug_assert!(self.next + n <= self.end());
self.next += n;
}
}
impl<'a> Index<SourceRange<'a>> for Lexer<'a> {
type Output = [u8];
fn index(&self, index: SourceRange<'a>) -> &Self::Output {
&self.source[index.start()..index.end()]
}
}
impl<'a> Index<Match> for Lexer<'a> {
type Output = [u8];
fn index(&self, index: Match) -> &Self::Output {
&self.source[self.next - index.len..self.next]
}
}
lazy_static! {
pub static ref OPERATORS_MAPPING: HashMap<TokenType, &'static [u8]> = {
let mut map = HashMap::<TokenType, &'static [u8]>::new();
map.insert(TokenType::Ampersand, b"&");
map.insert(TokenType::AmpersandAmpersand, b"&&");
map.insert(TokenType::AmpersandAmpersandEquals, b"&&=");
map.insert(TokenType::AmpersandEquals, b"&=");
map.insert(TokenType::Asterisk, b"*");
map.insert(TokenType::AsteriskAsterisk, b"**");
map.insert(TokenType::AsteriskAsteriskEquals, b"**=");
map.insert(TokenType::AsteriskEquals, b"*=");
map.insert(TokenType::Bar, b"|");
map.insert(TokenType::BarBar, b"||");
map.insert(TokenType::BarBarEquals, b"||=");
map.insert(TokenType::BarEquals, b"|=");
map.insert(TokenType::BraceClose, b"}");
map.insert(TokenType::BraceOpen, b"{");
map.insert(TokenType::BracketClose, b"]");
map.insert(TokenType::BracketOpen, b"[");
map.insert(TokenType::Caret, b"^");
map.insert(TokenType::CaretEquals, b"^=");
map.insert(TokenType::ChevronLeft, b"<");
map.insert(TokenType::ChevronLeftChevronLeft, b"<<");
map.insert(TokenType::ChevronLeftChevronLeftEquals, b"<<=");
map.insert(TokenType::ChevronLeftEquals, b"<=");
map.insert(TokenType::ChevronRight, b">");
map.insert(TokenType::ChevronRightChevronRight, b">>");
map.insert(TokenType::ChevronRightChevronRightChevronRight, b">>>");
map.insert(TokenType::ChevronRightChevronRightChevronRightEquals, b">>>=");
map.insert(TokenType::ChevronRightChevronRightEquals, b">>=");
map.insert(TokenType::ChevronRightEquals, b">=");
map.insert(TokenType::Colon, b":");
map.insert(TokenType::Comma, b",");
map.insert(TokenType::Dot, b".");
map.insert(TokenType::DotDotDot, b"...");
map.insert(TokenType::Equals, b"=");
map.insert(TokenType::EqualsChevronRight, b"=>");
map.insert(TokenType::EqualsEquals, b"==");
map.insert(TokenType::EqualsEqualsEquals, b"===");
map.insert(TokenType::Exclamation, b"!");
map.insert(TokenType::ExclamationEquals, b"!=");
map.insert(TokenType::ExclamationEqualsEquals, b"!==");
map.insert(TokenType::Hyphen, b"-");
map.insert(TokenType::HyphenEquals, b"-=");
map.insert(TokenType::HyphenHyphen, b"--");
map.insert(TokenType::ParenthesisClose, b")");
map.insert(TokenType::ParenthesisOpen, b"(");
map.insert(TokenType::Percent, b"%");
map.insert(TokenType::PercentEquals, b"%=");
map.insert(TokenType::Plus, b"+");
map.insert(TokenType::PlusEquals, b"+=");
map.insert(TokenType::PlusPlus, b"++");
map.insert(TokenType::PrivateMember, b"#");
map.insert(TokenType::Question, b"?");
map.insert(TokenType::QuestionDot, b"?.");
map.insert(TokenType::QuestionDotBracketOpen, b"?.[");
map.insert(TokenType::QuestionDotParenthesisOpen, b"?.(");
map.insert(TokenType::QuestionQuestion, b"??");
map.insert(TokenType::QuestionQuestionEquals, b"??=");
map.insert(TokenType::Semicolon, b";");
map.insert(TokenType::Slash, b"/");
map.insert(TokenType::SlashEquals, b"/=");
map.insert(TokenType::Tilde, b"~");
map
};
pub static ref KEYWORDS_MAPPING: HashMap<TokenType, &'static [u8]> = {
let mut map = HashMap::<TokenType, &'static [u8]>::new();
map.insert(TokenType::KeywordAs, b"as");
map.insert(TokenType::KeywordAsync, b"async");
map.insert(TokenType::KeywordAwait, b"await");
map.insert(TokenType::KeywordBreak, b"break");
map.insert(TokenType::KeywordCase, b"case");
map.insert(TokenType::KeywordCatch, b"catch");
map.insert(TokenType::KeywordClass, b"class");
map.insert(TokenType::KeywordConst, b"const");
map.insert(TokenType::KeywordConstructor, b"constructor");
map.insert(TokenType::KeywordContinue, b"continue");
map.insert(TokenType::KeywordDebugger, b"debugger");
map.insert(TokenType::KeywordDefault, b"default");
map.insert(TokenType::KeywordDelete, b"delete");
map.insert(TokenType::KeywordDo, b"do");
map.insert(TokenType::KeywordElse, b"else");
map.insert(TokenType::KeywordEnum, b"enum");
map.insert(TokenType::KeywordExport, b"export");
map.insert(TokenType::KeywordExtends, b"extends");
map.insert(TokenType::KeywordFinally, b"finally");
map.insert(TokenType::KeywordFor, b"for");
map.insert(TokenType::KeywordFrom, b"from");
map.insert(TokenType::KeywordFunction, b"function");
map.insert(TokenType::KeywordGet, b"get");
map.insert(TokenType::KeywordIf, b"if");
map.insert(TokenType::KeywordImport, b"import");
map.insert(TokenType::KeywordIn, b"in");
map.insert(TokenType::KeywordInstanceof, b"instanceof");
map.insert(TokenType::KeywordLet, b"let");
map.insert(TokenType::KeywordNew, b"new");
map.insert(TokenType::KeywordOf, b"of");
map.insert(TokenType::KeywordReturn, b"return");
map.insert(TokenType::KeywordSet, b"set");
map.insert(TokenType::KeywordStatic, b"static");
map.insert(TokenType::KeywordSuper, b"super");
map.insert(TokenType::KeywordSwitch, b"switch");
map.insert(TokenType::KeywordThis, b"this");
map.insert(TokenType::KeywordThrow, b"throw");
map.insert(TokenType::KeywordTry, b"try");
map.insert(TokenType::KeywordTypeof, b"typeof");
map.insert(TokenType::KeywordVar, b"var");
map.insert(TokenType::KeywordVoid, b"void");
map.insert(TokenType::KeywordWhile, b"while");
map.insert(TokenType::KeywordWith, b"with");
map.insert(TokenType::KeywordYield, b"yield");
map.insert(TokenType::LiteralFalse, b"false");
map.insert(TokenType::LiteralNull, b"null");
map.insert(TokenType::LiteralTrue, b"true");
map
};
pub static ref KEYWORD_STRS: HashMap<&'static [u8], usize> = {
HashMap::<&'static [u8], usize>::from_iter(KEYWORDS_MAPPING.values().enumerate().map(|(i, v)| (*v, i)))
};
static ref PATTERNS: Vec<(TokenType, &'static [u8])> = {
let mut patterns: Vec<(TokenType, &'static [u8])> = Vec::new();
for (&k, &v) in OPERATORS_MAPPING.iter() {
patterns.push((k, v));
};
for (&k, &v) in KEYWORDS_MAPPING.iter() {
patterns.push((k, &v));
};
patterns.push((TokenType::ChevronLeftSlash, b"</"));
patterns.push((TokenType::CommentMultiple, b"/*"));
patterns.push((TokenType::CommentSingle, b"//"));
for c in ID_START_CHARSTR.chunks(1) {
patterns.push((TokenType::Identifier, c));
};
for c in b"0123456789".chunks(1) {
patterns.push((TokenType::LiteralNumber, c));
};
patterns.push((TokenType::LiteralNumberBin, b"0b"));
patterns.push((TokenType::LiteralNumberBin, b"0B"));
patterns.push((TokenType::LiteralNumberHex, b"0x"));
patterns.push((TokenType::LiteralNumberHex, b"0X"));
patterns.push((TokenType::LiteralNumberOct, b"0o"));
patterns.push((TokenType::LiteralNumberOct, b"0O"));
for c in b".0.1.2.3.4.5.6.7.8.9".chunks(2) {
patterns.push((TokenType::LiteralNumber, c));
};
for c in b"?.0?.1?.2?.3?.4?.5?.6?.7?.8?.9".chunks(3) {
patterns.push((TokenType::Question, c));
};
patterns.push((TokenType::LiteralString, b"\""));
patterns.push((TokenType::LiteralString, b"'"));
patterns.push((TokenType::LiteralTemplatePartString, b"`"));
patterns
};
static ref MATCHER: AhoCorasick = AhoCorasickBuilder::new()
.anchored(true)
.dfa(true)
.match_kind(MatchKind::LeftmostLongest)
.build(PATTERNS.iter().map(|(_, pat)| pat));
static ref COMMENT_END: AhoCorasick = AhoCorasick::new(&[b"*/"]);
}
fn lex_multiple_comment<'a>(lexer: &mut Lexer<'a>) -> SyntaxResult<'a, ()> {
lexer.skip_expect(2);
lexer.consume(lexer.aho_corasick(&COMMENT_END)?.mat);
Ok(())
}
fn lex_single_comment<'a>(lexer: &mut Lexer<'a>) -> SyntaxResult<'a, ()> {
lexer.skip_expect(2);
lexer.consume(lexer.through_char_or_end(b'\n'));
Ok(())
}
fn lex_identifier<'a>(
lexer: &mut Lexer<'a>,
mode: LexMode,
preceded_by_line_terminator: bool,
) -> SyntaxResult<'a, Token<'a>> {
let cp = lexer.checkpoint();
lexer.skip_expect(1);
loop {
lexer.consume(lexer.while_chars(if mode == LexMode::JsxTag {
&ID_CONTINUE_JSX
} else {
&ID_CONTINUE
}));
if lexer.peek_or_eof(0).filter(|c| !c.is_ascii()).is_none() {
break;
};
lexer.skip_expect(1);
}
Ok(Token::new(
lexer.since_checkpoint(cp),
TokenType::Identifier,
preceded_by_line_terminator,
))
}
fn lex_bigint_or_number<'a>(
lexer: &mut Lexer<'a>,
preceded_by_line_terminator: bool,
) -> SyntaxResult<'a, Token<'a>> {
let cp = lexer.checkpoint();
lexer.consume(lexer.while_chars(&DIGIT));
if !lexer.consume(lexer.if_char(b'n')).is_empty() {
return Ok(Token::new(
lexer.since_checkpoint(cp),
TokenType::LiteralBigInt,
preceded_by_line_terminator,
));
}
lexer.consume(lexer.if_char(b'.'));
lexer.consume(lexer.while_chars(&DIGIT));
if lexer
.peek_or_eof(0)
.filter(|&c| c == b'e' || c == b'E')
.is_some()
{
lexer.skip_expect(1);
match lexer.peek(0)? {
b'+' | b'-' => lexer.skip_expect(1),
_ => {}
};
lexer.consume(lexer.while_chars(&DIGIT));
}
Ok(Token::new(
lexer.since_checkpoint(cp),
TokenType::LiteralNumber,
preceded_by_line_terminator,
))
}
fn lex_bigint_or_number_bin<'a>(
lexer: &mut Lexer<'a>,
preceded_by_line_terminator: bool,
) -> SyntaxResult<'a, Token<'a>> {
let cp = lexer.checkpoint();
lexer.skip_expect(2);
lexer.consume(lexer.while_chars(&DIGIT_BIN));
if !lexer.consume(lexer.if_char(b'n')).is_empty() {
return Ok(Token::new(
lexer.since_checkpoint(cp),
TokenType::LiteralBigInt,
preceded_by_line_terminator,
));
}
Ok(Token::new(
lexer.since_checkpoint(cp),
TokenType::LiteralNumber,
preceded_by_line_terminator,
))
}
fn lex_bigint_or_number_hex<'a>(
lexer: &mut Lexer<'a>,
preceded_by_line_terminator: bool,
) -> SyntaxResult<'a, Token<'a>> {
let cp = lexer.checkpoint();
lexer.skip_expect(2);
lexer.consume(lexer.while_chars(&DIGIT_HEX));
if !lexer.consume(lexer.if_char(b'n')).is_empty() {
return Ok(Token::new(
lexer.since_checkpoint(cp),
TokenType::LiteralBigInt,
preceded_by_line_terminator,
));
}
Ok(Token::new(
lexer.since_checkpoint(cp),
TokenType::LiteralNumber,
preceded_by_line_terminator,
))
}
fn lex_bigint_or_number_oct<'a>(
lexer: &mut Lexer<'a>,
preceded_by_line_terminator: bool,
) -> SyntaxResult<'a, Token<'a>> {
let cp = lexer.checkpoint();
lexer.skip_expect(2);
lexer.consume(lexer.while_chars(&DIGIT_OCT));
if !lexer.consume(lexer.if_char(b'n')).is_empty() {
return Ok(Token::new(
lexer.since_checkpoint(cp),
TokenType::LiteralBigInt,
preceded_by_line_terminator,
));
}
Ok(Token::new(
lexer.since_checkpoint(cp),
TokenType::LiteralNumber,
preceded_by_line_terminator,
))
}
fn lex_private_member<'a>(
lexer: &mut Lexer<'a>,
preceded_by_line_terminator: bool,
) -> SyntaxResult<'a, Token<'a>> {
let cp = lexer.checkpoint();
lexer.skip_expect(1);
if !ID_START.has(lexer.peek(0)?) {
return Err(lexer.error(SyntaxErrorType::ExpectedSyntax("private member")));
};
lexer.skip_expect(1);
loop {
lexer.consume(lexer.while_chars(&ID_CONTINUE));
if lexer.peek_or_eof(0).filter(|c| !c.is_ascii()).is_none() {
break;
};
lexer.skip_expect(1);
}
Ok(Token::new(
lexer.since_checkpoint(cp),
TokenType::PrivateMember,
preceded_by_line_terminator,
))
}
fn lex_regex<'a>(
lexer: &mut Lexer<'a>,
preceded_by_line_terminator: bool,
) -> SyntaxResult<'a, Token<'a>> {
let cp = lexer.checkpoint();
lexer.consume(lexer.n(1)?);
let mut in_charset = false;
loop {
match lexer.consume_next()? {
b'\\' => {
if lexer.peek(1)? == b'\n' {
return Err(lexer.error(SyntaxErrorType::LineTerminatorInRegex));
};
lexer.skip_expect(1);
}
b'/' if !in_charset => {
break;
}
b'[' => {
in_charset = true;
}
b']' if in_charset => {
in_charset = false;
}
b'\n' => {
return Err(lexer.error(SyntaxErrorType::LineTerminatorInRegex));
}
_ => {}
};
}
lexer.consume(lexer.while_chars(&ID_CONTINUE));
Ok(Token::new(
lexer.since_checkpoint(cp),
TokenType::LiteralRegex,
preceded_by_line_terminator,
))
}
fn lex_string<'a>(
lexer: &mut Lexer<'a>,
preceded_by_line_terminator: bool,
) -> SyntaxResult<'a, Token<'a>> {
let cp = lexer.checkpoint();
let quote = lexer.peek(0)?;
lexer.skip_expect(1);
loop {
lexer.consume(lexer.while_not_3_chars(b'\\', b'\n', quote));
match lexer.peek(0)? {
b'\\' => {
lexer.consume(lexer.n(2)?);
}
b'\n' => {
return Err(lexer.error(SyntaxErrorType::LineTerminatorInString));
}
c if c == quote => {
lexer.skip_expect(1);
break;
}
_ => unreachable!(),
};
}
Ok(Token::new(
lexer.since_checkpoint(cp),
TokenType::LiteralString,
preceded_by_line_terminator,
))
}
pub fn lex_template_string_continue<'a>(
lexer: &mut Lexer<'a>,
preceded_by_line_terminator: bool,
) -> SyntaxResult<'a, Token<'a>> {
let cp = lexer.checkpoint();
let mut ended = false;
let loc = loop {
lexer.consume(lexer.while_not_3_chars(b'\\', b'`', b'$'));
match lexer.peek(0)? {
b'\\' => {
lexer.consume(lexer.n(2)?);
}
b'`' => {
ended = true;
let loc = Some(lexer.since_checkpoint(cp));
lexer.skip_expect(1);
break loc;
}
b'$' => {
if lexer.peek(1)? == b'{' {
let loc = Some(lexer.since_checkpoint(cp));
lexer.skip_expect(2);
break loc;
} else {
lexer.skip_expect(1);
}
}
_ => unreachable!(),
};
};
Ok(Token::new(
loc.unwrap(),
if ended {
TokenType::LiteralTemplatePartStringEnd
} else {
TokenType::LiteralTemplatePartString
},
preceded_by_line_terminator,
))
}
fn lex_template<'a>(
lexer: &mut Lexer<'a>,
preceded_by_line_terminator: bool,
) -> SyntaxResult<'a, Token<'a>> {
lexer.skip_expect(1);
lex_template_string_continue(lexer, preceded_by_line_terminator)
}
pub fn lex_next<'a>(lexer: &mut Lexer<'a>, mode: LexMode) -> SyntaxResult<'a, Token<'a>> {
let mut preceded_by_line_terminator = false;
loop {
if mode == LexMode::JsxTextContent {
let cp = lexer.checkpoint();
lexer.consume(lexer.while_not_2_chars(b'{', b'<'));
return Ok(Token::new(
lexer.since_checkpoint(cp),
TokenType::JsxTextContent,
false,
));
};
let ws = lexer.while_chars(&WHITESPACE);
lexer.consume(ws);
preceded_by_line_terminator =
preceded_by_line_terminator || memchr(b'\n', &lexer[ws]).is_some();
if lexer.at_end() {
return Ok(Token::new(
lexer.eof_range(),
TokenType::EOF,
preceded_by_line_terminator,
));
};
let is_utf8_start = if let Some(c) = lexer.peek_or_eof(0) {
c >> 5 == 0b110 || c >> 4 == 0b1110 || c >> 3 == 0b11110
} else {
false
};
if is_utf8_start {
return lex_identifier(lexer, mode, preceded_by_line_terminator);
};
let AhoCorasickMatch { id, mut mat } = lexer.aho_corasick(&MATCHER)?;
match PATTERNS[id].0 {
TokenType::CommentMultiple => lex_multiple_comment(lexer)?,
TokenType::CommentSingle => {
preceded_by_line_terminator = true;
lex_single_comment(lexer)?
}
pat => {
return match pat {
TokenType::Identifier => lex_identifier(lexer, mode, preceded_by_line_terminator),
TokenType::LiteralNumber => lex_bigint_or_number(lexer, preceded_by_line_terminator),
TokenType::LiteralNumberBin => {
lex_bigint_or_number_bin(lexer, preceded_by_line_terminator)
}
TokenType::LiteralNumberHex => {
lex_bigint_or_number_hex(lexer, preceded_by_line_terminator)
}
TokenType::LiteralNumberOct => {
lex_bigint_or_number_oct(lexer, preceded_by_line_terminator)
}
TokenType::LiteralString => lex_string(lexer, preceded_by_line_terminator),
TokenType::LiteralTemplatePartString => lex_template(lexer, preceded_by_line_terminator),
TokenType::PrivateMember => lex_private_member(lexer, preceded_by_line_terminator),
TokenType::Slash | TokenType::SlashEquals if mode == LexMode::SlashIsRegex => {
lex_regex(lexer, preceded_by_line_terminator)
}
typ => {
if typ == TokenType::Question && mat.len() != 1 {
mat = mat.prefix(1);
} else if KEYWORDS_MAPPING.contains_key(&typ)
&& lexer
.peek_or_eof(mat.len())
.filter(|c| ID_CONTINUE.has(*c))
.is_some()
{
return lex_identifier(lexer, mode, preceded_by_line_terminator);
};
let loc = lexer.range(mat);
lexer.consume(mat);
Ok(Token::new(loc, typ, preceded_by_line_terminator))
}
};
}
};
}
}