Struct moore_vhdl_syntax::lexer::tokenizer::Tokenizer
[−]
[src]
pub struct Tokenizer<T: Grinder> { /* fields omitted */ }
A grinder that combines character bundles into lexical tokens. This is the last stage of lexical analysis.
Methods
impl<T: Grinder> Tokenizer<T> where
T: Grinder<Item = Option<Spanned<Bundle>>, Error = DiagBuilder2>,
[src]
T: Grinder<Item = Option<Spanned<Bundle>>, Error = DiagBuilder2>,
Trait Implementations
impl<T> Grinder for Tokenizer<T> where
T: Grinder<Item = Option<Spanned<Bundle>>, Error = DiagBuilder2>,
[src]
T: Grinder<Item = Option<Spanned<Bundle>>, Error = DiagBuilder2>,
type Item = Option<Spanned<Token>>
type Error = DiagBuilder2
fn emit(&mut self, err: Self::Error)
[src]
fn next(&mut self) -> Self::Item
[src]
fn vent<F, E>(self, f: F) -> Vent<Self, F, E> where
F: Fn(E) -> (),
[src]
F: Fn(E) -> (),
fn unwrap(self) -> Unwrap<Self>
[src]
fn map_err<F, E>(self, f: F) -> MapErrGrinder<Self, F, E> where
F: Fn(E) -> Self::Error,
[src]
F: Fn(E) -> Self::Error,
fn peekable(self) -> Peekable<Self>
[src]
fn lookaheadable(self) -> Lookahead<Self>
[src]
fn filter<F>(self, f: F) -> Filter<Self, F> where
F: Fn(&<Self::Item as Chisel>::Value) -> bool,
[src]
F: Fn(&<Self::Item as Chisel>::Value) -> bool,