use crate::{syntax_node::GreenNode, SyntaxError, SyntaxKind};
#[macro_use]
mod token_set;
mod event;
mod grammar;
pub mod lexer;
mod parser;
mod text_token_source;
mod text_tree_sink;
pub use lexer::tokenize;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ParseError(pub String);
trait TokenSource {
fn current(&self) -> Token;
fn lookahead_nth(&self, n: usize) -> Token;
fn bump(&mut self);
fn is_keyword(&self, kw: &str) -> bool;
}
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub struct Token {
pub kind: SyntaxKind,
pub is_jointed_to_next: bool,
}
pub trait TreeSink {
fn token(&mut self, kind: SyntaxKind, n_tokens: u8);
fn start_node(&mut self, kind: SyntaxKind);
fn finish_node(&mut self);
fn error(&mut self, error: ParseError);
}
pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
let tokens = tokenize(text);
let mut token_source = text_token_source::TextTokenSource::new(text, &tokens);
let mut tree_sink = text_tree_sink::TextTreeSink::new(text, &tokens);
parse(&mut token_source, &mut tree_sink);
tree_sink.finish()
}
fn parse_from_tokens<F>(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink, f: F)
where
F: FnOnce(&mut parser::Parser),
{
let mut p = parser::Parser::new(token_source);
f(&mut p);
let events = p.finish();
event::process(tree_sink, events);
}
fn parse(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
parse_from_tokens(token_source, tree_sink, grammar::root);
}