use super::internal::LexerInternal;
use super::Logos;
use crate::source::{self, Source};
use core::fmt::{self, Debug};
use core::mem::ManuallyDrop;
use core::ops::{Deref, DerefMut};
pub type Span = core::ops::Range<usize>;
pub struct Lexer<'source, Token: Logos<'source>> {
source: &'source Token::Source,
token: ManuallyDrop<Option<Result<Token, Token::Error>>>,
token_start: usize,
token_end: usize,
pub extras: Token::Extras,
}
impl<'source, Token> Debug for Lexer<'source, Token>
where
Token: Logos<'source>,
Token::Source: Debug,
Token::Extras: Debug,
{
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_map()
.entry(&"source", &self.source)
.entry(&"extras", &self.extras)
.finish()
}
}
impl<'source, Token: Logos<'source>> Lexer<'source, Token> {
pub fn new(source: &'source Token::Source) -> Self
where
Token::Extras: Default,
{
Self::with_extras(source, Default::default())
}
pub fn with_extras(source: &'source Token::Source, extras: Token::Extras) -> Self {
Lexer {
source,
token: ManuallyDrop::new(None),
extras,
token_start: 0,
token_end: 0,
}
}
#[inline]
pub fn source(&self) -> &'source Token::Source {
self.source
}
#[inline]
pub fn spanned(self) -> SpannedIter<'source, Token> {
SpannedIter { lexer: self }
}
#[inline]
#[doc(hidden)]
#[deprecated(since = "0.11.0", note = "please use `span` instead")]
pub fn range(&self) -> Span {
self.span()
}
#[inline]
pub fn span(&self) -> Span {
self.token_start..self.token_end
}
#[inline]
pub fn slice(&self) -> &'source <Token::Source as Source>::Slice {
unsafe { self.source.slice_unchecked(self.span()) }
}
#[inline]
pub fn remainder(&self) -> &'source <Token::Source as Source>::Slice {
unsafe {
self.source
.slice_unchecked(self.token_end..self.source.len())
}
}
pub fn morph<Token2>(self) -> Lexer<'source, Token2>
where
Token2: Logos<'source, Source = Token::Source>,
Token::Extras: Into<Token2::Extras>,
{
Lexer {
source: self.source,
token: ManuallyDrop::new(None),
extras: self.extras.into(),
token_start: self.token_start,
token_end: self.token_end,
}
}
pub fn bump(&mut self, n: usize) {
self.token_end += n;
assert!(
self.source.is_boundary(self.token_end),
"Invalid Lexer bump",
)
}
}
impl<'source, Token> Clone for Lexer<'source, Token>
where
Token: Logos<'source> + Clone,
Token::Extras: Clone,
{
fn clone(&self) -> Self {
Lexer {
extras: self.extras.clone(),
token: self.token.clone(),
..*self
}
}
}
impl<'source, Token> Iterator for Lexer<'source, Token>
where
Token: Logos<'source>,
{
type Item = Result<Token, Token::Error>;
#[inline]
fn next(&mut self) -> Option<Result<Token, Token::Error>> {
self.token_start = self.token_end;
Token::lex(self);
unsafe { ManuallyDrop::take(&mut self.token) }
}
}
pub struct SpannedIter<'source, Token: Logos<'source>> {
lexer: Lexer<'source, Token>,
}
impl<'source, Token> Iterator for SpannedIter<'source, Token>
where
Token: Logos<'source>,
{
type Item = (Result<Token, Token::Error>, Span);
fn next(&mut self) -> Option<Self::Item> {
self.lexer.next().map(|token| (token, self.lexer.span()))
}
}
impl<'source, Token> Deref for SpannedIter<'source, Token>
where
Token: Logos<'source>,
{
type Target = Lexer<'source, Token>;
fn deref(&self) -> &Lexer<'source, Token> {
&self.lexer
}
}
impl<'source, Token> DerefMut for SpannedIter<'source, Token>
where
Token: Logos<'source>,
{
fn deref_mut(&mut self) -> &mut Lexer<'source, Token> {
&mut self.lexer
}
}
#[doc(hidden)]
impl<'source, Token> LexerInternal<'source> for Lexer<'source, Token>
where
Token: Logos<'source>,
{
type Token = Token;
#[inline]
fn read<Chunk>(&self) -> Option<Chunk>
where
Chunk: source::Chunk<'source>,
{
self.source.read(self.token_end)
}
#[inline]
fn read_at<Chunk>(&self, n: usize) -> Option<Chunk>
where
Chunk: source::Chunk<'source>,
{
self.source.read(self.token_end + n)
}
#[inline]
unsafe fn read_unchecked<Chunk>(&self, n: usize) -> Chunk
where
Chunk: source::Chunk<'source>,
{
self.source.read_unchecked(self.token_end + n)
}
#[inline]
fn test<T, F>(&self, test: F) -> bool
where
T: source::Chunk<'source>,
F: FnOnce(T) -> bool,
{
match self.source.read::<T>(self.token_end) {
Some(chunk) => test(chunk),
None => false,
}
}
#[inline]
fn test_at<T, F>(&self, n: usize, test: F) -> bool
where
T: source::Chunk<'source>,
F: FnOnce(T) -> bool,
{
match self.source.read::<T>(self.token_end + n) {
Some(chunk) => test(chunk),
None => false,
}
}
#[inline]
fn bump_unchecked(&mut self, size: usize) {
debug_assert!(
self.token_end + size <= self.source.len(),
"Bumping out of bounds!"
);
self.token_end += size;
}
#[inline]
fn trivia(&mut self) {
self.token_start = self.token_end;
}
#[inline]
fn error(&mut self) {
self.token_end = self.source.find_boundary(self.token_end);
self.token = ManuallyDrop::new(Some(Err(Token::Error::default())));
}
#[inline]
fn end(&mut self) {
self.token = ManuallyDrop::new(None);
}
#[inline]
fn set(
&mut self,
token: Result<
Self::Token,
<<Self as LexerInternal<'source>>::Token as Logos<'source>>::Error,
>,
) {
self.token = ManuallyDrop::new(Some(token));
}
}