pub struct Tokenizer<T: Grinder> { /* private fields */ }
Expand description
A grinder that combines character bundles into lexical tokens. This is the last stage of lexical analysis.
Implementations§
Trait Implementations§
Source§impl<T> Grinder for Tokenizer<T>
impl<T> Grinder for Tokenizer<T>
type Item = Option<Spanned<Token>>
type Error = DiagBuilder2
fn emit(&mut self, err: Self::Error)
fn next(&mut self) -> Self::Item
fn vent<F, E>(self, f: F) -> Vent<Self, F, E>
fn unwrap(self) -> Unwrap<Self>where
Self: Sized,
fn map_err<F, E>(self, f: F) -> MapErrGrinder<Self, F, E>
fn peekable(self) -> Peekable<Self>where
Self: Sized,
fn lookaheadable(self) -> Lookahead<Self>where
Self: Sized,
fn filter<F>(self, f: F) -> Filter<Self, F>
Auto Trait Implementations§
impl<T> Freeze for Tokenizer<T>where
T: Freeze,
impl<T> RefUnwindSafe for Tokenizer<T>
impl<T> Send for Tokenizer<T>
impl<T> Sync for Tokenizer<T>
impl<T> Unpin for Tokenizer<T>
impl<T> UnwindSafe for Tokenizer<T>
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more