[−][src]Struct antlr_rust::lexer::BaseLexer
Fields
interpreter: Option<LexerATNSimulator>input: Option<Input>token_start_char_index: isizetoken_start_line: isizetoken_start_column: isizetoken_type: isizetoken: Option<TF::Tok>channel: isizemode: usizetext: Option<<TF::Data as ToOwned>::Owned>text of token if overridden by user action
Implementations
impl<'input, T, Input, TF> BaseLexer<'input, T, Input, TF> where
T: LexerRecog<'input, Self> + 'static,
Input: CharStream<TF::From>,
TF: TokenFactory<'input>, [src]
T: LexerRecog<'input, Self> + 'static,
Input: CharStream<TF::From>,
TF: TokenFactory<'input>,
pub fn get_type(&self) -> isize[src]
pub fn get_char_index(&self) -> isize[src]
pub fn get_text<'a>(&'a self) -> Cow<'a, TF::Data> where
'input: 'a, [src]
'input: 'a,
Current token text
pub fn set_text(&mut self, _text: <TF::Data as ToOwned>::Owned)[src]
Used from lexer actions to override text of the token that will be emitted next
pub fn add_error_listener(
&mut self,
listener: Box<dyn ErrorListener<'input, Self>>
)[src]
&mut self,
listener: Box<dyn ErrorListener<'input, Self>>
)
Add error listener
pub fn remove_error_listeners(&mut self)[src]
pub fn new_base_lexer(
input: Input,
interpreter: LexerATNSimulator,
recog: T,
factory: &'input TF
) -> Self[src]
input: Input,
interpreter: LexerATNSimulator,
recog: T,
factory: &'input TF
) -> Self
Trait Implementations
impl<'input, T, Input, TF> Lexer<'input> for BaseLexer<'input, T, Input, TF> where
T: LexerRecog<'input, Self> + 'static,
Input: CharStream<TF::From>,
TF: TokenFactory<'input>, [src]
T: LexerRecog<'input, Self> + 'static,
Input: CharStream<TF::From>,
TF: TokenFactory<'input>,
fn set_channel(&mut self, v: isize)[src]
fn push_mode(&mut self, m: usize)[src]
fn pop_mode(&mut self) -> Option<usize>[src]
fn set_type(&mut self, t: isize)[src]
fn set_mode(&mut self, m: usize)[src]
fn more(&mut self)[src]
fn skip(&mut self)[src]
fn reset(&mut self)[src]
fn get_interpreter(&self) -> Option<&LexerATNSimulator>[src]
impl<'input, T, Input, TF> Recognizer<'input> for BaseLexer<'input, T, Input, TF> where
T: LexerRecog<'input, Self> + 'static,
Input: CharStream<TF::From>,
TF: TokenFactory<'input>, [src]
T: LexerRecog<'input, Self> + 'static,
Input: CharStream<TF::From>,
TF: TokenFactory<'input>,
type Node = EmptyContextType<'input, TF>
fn sempred(
&mut self,
_localctx: Option<&<Self::Node as ParserNodeType<'input>>::Type>,
rule_index: isize,
action_index: isize
) -> bool[src]
&mut self,
_localctx: Option<&<Self::Node as ParserNodeType<'input>>::Type>,
rule_index: isize,
action_index: isize
) -> bool
fn action(
&mut self,
_localctx: Option<&<Self::Node as ParserNodeType<'input>>::Type>,
rule_index: isize,
action_index: isize
)[src]
&mut self,
_localctx: Option<&<Self::Node as ParserNodeType<'input>>::Type>,
rule_index: isize,
action_index: isize
)
fn get_rule_names(&self) -> &[&str][src]
fn get_vocabulary(&self) -> &dyn Vocabulary[src]
fn get_grammar_file_name(&self) -> &str[src]
fn get_atn(&self) -> &ATN[src]
impl<'input, T, Input, TF> TokenAware<'input> for BaseLexer<'input, T, Input, TF> where
T: LexerRecog<'input, Self> + 'static,
Input: CharStream<TF::From>,
TF: TokenFactory<'input>, [src]
T: LexerRecog<'input, Self> + 'static,
Input: CharStream<TF::From>,
TF: TokenFactory<'input>,
type TF = TF
impl<'input, T, Input, TF> TokenSource<'input> for BaseLexer<'input, T, Input, TF> where
T: LexerRecog<'input, Self> + 'static,
Input: CharStream<TF::From>,
TF: TokenFactory<'input>, [src]
T: LexerRecog<'input, Self> + 'static,
Input: CharStream<TF::From>,
TF: TokenFactory<'input>,
fn next_token(&mut self) -> <Self::TF as TokenFactory<'input>>::Tok[src]
fn get_line(&self) -> isize[src]
fn get_char_position_in_line(&self) -> isize[src]
fn get_input_stream(&mut self) -> Option<&mut dyn IntStream>[src]
fn get_source_name(&self) -> String[src]
fn get_token_factory(&self) -> &'input TF[src]
Auto Trait Implementations
impl<'input, T, Input, TF = CommonTokenFactory> !RefUnwindSafe for BaseLexer<'input, T, Input, TF>
impl<'input, T, Input, TF = CommonTokenFactory> !Send for BaseLexer<'input, T, Input, TF>
impl<'input, T, Input, TF = CommonTokenFactory> !Sync for BaseLexer<'input, T, Input, TF>
impl<'input, T, Input, TF> Unpin for BaseLexer<'input, T, Input, TF> where
Input: Unpin,
T: Unpin,
<<TF as TokenFactory<'input>>::Data as ToOwned>::Owned: Unpin,
<TF as TokenFactory<'input>>::Tok: Unpin,
Input: Unpin,
T: Unpin,
<<TF as TokenFactory<'input>>::Data as ToOwned>::Owned: Unpin,
<TF as TokenFactory<'input>>::Tok: Unpin,
impl<'input, T, Input, TF = CommonTokenFactory> !UnwindSafe for BaseLexer<'input, T, Input, TF>
Blanket Implementations
impl<T> Any for T where
T: 'static + ?Sized, [src]
T: 'static + ?Sized,
impl<T> Borrow<T> for T where
T: ?Sized, [src]
T: ?Sized,
impl<T> BorrowMut<T> for T where
T: ?Sized, [src]
T: ?Sized,
pub fn borrow_mut(&mut self) -> &mut T[src]
impl<T> From<T> for T[src]
impl<T, U> Into<U> for T where
U: From<T>, [src]
U: From<T>,
impl<T> NodeText for T[src]
impl<'input, T> NodeText for T where
T: CustomRuleContext<'input>, [src]
T: CustomRuleContext<'input>,
impl<T, U> TryFrom<U> for T where
U: Into<T>, [src]
U: Into<T>,
type Error = Infallible
The type returned in the event of a conversion error.
pub fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>[src]
impl<T, U> TryInto<U> for T where
U: TryFrom<T>, [src]
U: TryFrom<T>,