Struct regen::sdk::TokenStream
source · pub struct TokenStream<'t, T>where
T: TokenType,{ /* private fields */ }Expand description
A generic implementation of a stream of tokens with type T
The TokenStream implementation does not own the Tokens. It returns references to the tokens on demand. The caller can clone them if needed.
The token streams are used internally to generate the AST. They are not meant to be used independently.
Implementations§
source§impl<'t, T> TokenStream<'t, T>where
T: TokenType,
impl<'t, T> TokenStream<'t, T>where T: TokenType,
sourcepub fn new(tokens: &'t [TokenImpl<T>], max_stack_size: usize) -> Self
pub fn new(tokens: &'t [TokenImpl<T>], max_stack_size: usize) -> Self
Create a new TokenStream with the given tokens
Examples found in repository?
examples/pt_html.rs (line 24)
8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58
fn main() {
// This example parses regen.grammar, and prints html code with prismjs classes
// This example is ran from the `docs` directory
let grammar_source = fs::read_to_string("../regen.grammar").unwrap();
// These is a wrapper regen::parse_language_from_grammar that creates
// a Language object from a grammar string. However, it does not store
// semantic info, so we can't use that here.
// Run tokenizer
let lex_output = grammar::tokenize(&grammar_source);
// Create token stream
// 200 is the stack size, meaning the AST can have depth <= 200
// which is plenty. The default stack size for CLI is 2048
let mut ts = TokenStream::new(&lex_output.tokens, 200);
// Generate AST (need the ASTParser trait)
let parser = grammar::Parser;
let asts = parser.parse_ast_all(&mut ts).unwrap(); // error if syntax error
// collect semantic info so far
let mut outer_tbs = TokenBlocks::new(&grammar_source);
lex_output.apply_semantic(&mut outer_tbs);
asts.iter()
.for_each(|ast| ast.apply_semantic(&mut outer_tbs, &None));
// Generate PT, because it fills in additional semantic info
// This requires a lang builder as a context, but we won't need it
let mut lang_builder: Box<LangBuilder> = Box::default();
for ast in &asts {
// if you don't need semantic you can use the parse_pt method
match ast.parse_pt_with_semantic(outer_tbs, lang_builder) {
ParseTreeResultSemantic::Ok { /*pt*/ ctx, tbs, .. } => {
outer_tbs = tbs;
lang_builder = ctx;
},
ParseTreeResultSemantic::Err { .. /*pt, ctx, tbs, err*/ } => {
// should not happen, but you also get the context and semantic info back here
unreachable!();
}
}
}
// now we have the semantic info in outer_tbs, we can convert it to HTML
let code = outer_tbs.get_html(to_prismjs);
println!("{}", code);
}sourcepub fn is_exhausted(&self) -> bool
pub fn is_exhausted(&self) -> bool
Returns if there is no token left after the current position
sourcepub fn get_guess_err_token(&self) -> Option<&'t TokenImpl<T>>
pub fn get_guess_err_token(&self) -> Option<&'t TokenImpl<T>>
Get the best guess at which token is causing a syntax error
Auto Trait Implementations§
impl<'t, T> RefUnwindSafe for TokenStream<'t, T>where T: RefUnwindSafe,
impl<'t, T> Send for TokenStream<'t, T>where T: Sync,
impl<'t, T> Sync for TokenStream<'t, T>where T: Sync,
impl<'t, T> Unpin for TokenStream<'t, T>
impl<'t, T> UnwindSafe for TokenStream<'t, T>where T: RefUnwindSafe,
Blanket Implementations§
source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere T: ?Sized,
source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more