pub use crate::ast_impl::{Decl, Located, SurfaceExpr};
pub use crate::error_impl::{ParseError, ParseErrorKind};
pub use crate::lexer::Lexer;
pub use crate::parser_impl::Parser;
pub use crate::tokens::{Span, Token, TokenKind};
use super::functions::*;
#[allow(dead_code)]
#[allow(missing_docs)]
#[derive(Debug, Clone)]
pub struct ParseErrorSimple {
pub pos: usize,
pub message: String,
pub recovered: bool,
}
impl ParseErrorSimple {
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn new(pos: usize, msg: impl Into<String>) -> Self {
Self {
pos,
message: msg.into(),
recovered: false,
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn recovered(mut self) -> Self {
self.recovered = true;
self
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct CheckpointStack {
pub(super) stack: Vec<ParseCheckpoint>,
}
impl CheckpointStack {
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn new() -> Self {
Self { stack: Vec::new() }
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn push(&mut self, cp: ParseCheckpoint) {
self.stack.push(cp);
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn pop(&mut self) -> Option<ParseCheckpoint> {
self.stack.pop()
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn peek(&self) -> Option<&ParseCheckpoint> {
self.stack.last()
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn depth(&self) -> usize {
self.stack.len()
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn is_empty(&self) -> bool {
self.stack.is_empty()
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
#[derive(Debug, Clone)]
pub struct ParseAmbiguity {
pub position: usize,
pub alternatives: Vec<String>,
pub resolved_to: Option<String>,
}
impl ParseAmbiguity {
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn new(pos: usize, alternatives: Vec<String>) -> Self {
Self {
position: pos,
alternatives,
resolved_to: None,
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn resolve(&mut self, choice: impl Into<String>) {
self.resolved_to = Some(choice.into());
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn is_resolved(&self) -> bool {
self.resolved_to.is_some()
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
#[derive(Clone, Debug)]
pub struct ParseCheckpoint {
pub position: usize,
pub depth: usize,
pub error_count: usize,
}
impl ParseCheckpoint {
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn save(cursor: &TokenCursor, errors: usize) -> Self {
Self {
position: cursor.position,
depth: cursor.depth,
error_count: errors,
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn restore(&self, cursor: &mut TokenCursor) {
cursor.position = self.position;
cursor.depth = self.depth;
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
#[allow(missing_docs)]
pub struct ParseCacheKey {
pub hash: u64,
pub len: usize,
}
impl ParseCacheKey {
#[allow(missing_docs)]
pub fn from_src(src: &str) -> Self {
let hash = fnv1a(src.as_bytes());
Self {
hash,
len: src.len(),
}
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct AmbiguityRegistry {
ambiguities: Vec<ParseAmbiguity>,
}
impl AmbiguityRegistry {
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn new() -> Self {
Self {
ambiguities: Vec::new(),
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn report(&mut self, amb: ParseAmbiguity) {
self.ambiguities.push(amb);
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn count(&self) -> usize {
self.ambiguities.len()
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn unresolved(&self) -> usize {
self.ambiguities.iter().filter(|a| !a.is_resolved()).count()
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
#[derive(Clone, Debug)]
pub struct ParseConfig {
pub max_depth: usize,
pub max_errors: usize,
pub recover_from_errors: bool,
pub strict_mode: bool,
#[allow(missing_docs)]
pub track_whitespace: bool,
pub allow_holes: bool,
}
impl ParseConfig {
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn default_config() -> Self {
Self {
max_depth: 1000,
max_errors: 50,
recover_from_errors: true,
strict_mode: false,
track_whitespace: false,
allow_holes: true,
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn strict() -> Self {
Self {
strict_mode: true,
recover_from_errors: false,
..Self::default_config()
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn lenient() -> Self {
Self {
strict_mode: false,
recover_from_errors: true,
max_errors: 200,
..Self::default_config()
}
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
#[derive(Default, Debug, Clone)]
pub struct ParseStatsExt {
pub tokens_consumed: u64,
pub nodes_created: u64,
pub backtrack_count: u64,
pub error_count: u64,
#[allow(missing_docs)]
pub max_depth_reached: usize,
pub parse_time_us: u64,
}
impl ParseStatsExt {
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn new() -> Self {
Self::default()
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn efficiency(&self) -> f64 {
if self.tokens_consumed == 0 {
return 0.0;
}
let useful = self.tokens_consumed.saturating_sub(self.backtrack_count);
useful as f64 / self.tokens_consumed as f64
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn error_rate(&self) -> f64 {
if self.nodes_created == 0 {
return 0.0;
}
self.error_count as f64 / self.nodes_created as f64
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn summary(&self) -> String {
format!(
"tokens={} nodes={} backtracks={} errors={} depth={} time={}us efficiency={:.1}%",
self.tokens_consumed,
self.nodes_created,
self.backtrack_count,
self.error_count,
self.max_depth_reached,
self.parse_time_us,
self.efficiency() * 100.0,
)
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
#[derive(Clone, Debug)]
pub struct PackratEntry {
pub end_pos: usize,
pub success: bool,
pub result_repr: String,
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct ParseFuel {
pub(super) remaining: usize,
}
impl ParseFuel {
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn new(fuel: usize) -> Self {
Self { remaining: fuel }
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn consume(&mut self, amount: usize) -> bool {
if self.remaining >= amount {
self.remaining -= amount;
true
} else {
false
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn has_fuel(&self) -> bool {
self.remaining > 0
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn remaining(&self) -> usize {
self.remaining
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn refuel(&mut self, amount: usize) {
self.remaining = self.remaining.saturating_add(amount);
}
}
#[derive(Debug, Default)]
#[allow(missing_docs)]
pub struct ParseBatch {
pub entries: Vec<(String, String)>,
}
impl ParseBatch {
#[allow(missing_docs)]
pub fn new() -> Self {
Self::default()
}
#[allow(missing_docs)]
pub fn add(&mut self, name: &str, src: &str) {
self.entries.push((name.to_string(), src.to_string()));
}
#[allow(missing_docs)]
pub fn len(&self) -> usize {
self.entries.len()
}
#[allow(missing_docs)]
pub fn is_empty(&self) -> bool {
self.entries.is_empty()
}
#[allow(missing_docs)]
pub fn execute(self) -> ParseSession {
let mut session = ParseSession::new();
for (name, src) in self.entries {
session.parse_file(&name, &src);
}
session
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
#[derive(Clone, Debug)]
pub struct TokenCursor {
pub position: usize,
pub end: usize,
pub depth: usize,
}
impl TokenCursor {
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn new(end: usize) -> Self {
Self {
position: 0,
end,
depth: 0,
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn advance(&mut self) {
if self.position < self.end {
self.position += 1;
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn retreat(&mut self) {
if self.position > 0 {
self.position -= 1;
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn is_at_end(&self) -> bool {
self.position >= self.end
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn remaining(&self) -> usize {
self.end.saturating_sub(self.position)
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn enter_scope(&mut self) {
self.depth += 1;
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn exit_scope(&mut self) {
if self.depth > 0 {
self.depth -= 1;
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn is_at_root(&self) -> bool {
self.depth == 0
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub enum CombResult<T> {
Ok(T, usize),
Err(String, usize),
}
impl<T> CombResult<T> {
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn is_ok(&self) -> bool {
matches!(self, CombResult::Ok(_, _))
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn is_err(&self) -> bool {
matches!(self, CombResult::Err(_, _))
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn position(&self) -> usize {
match self {
CombResult::Ok(_, p) | CombResult::Err(_, p) => *p,
}
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum LookaheadResult {
Matches(usize),
NoMatch,
Ambiguous,
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct FixityRegistry {
entries: std::collections::HashMap<String, Fixity>,
}
impl FixityRegistry {
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn new() -> Self {
let mut reg = Self {
entries: std::collections::HashMap::new(),
};
reg.add("+", Fixity::InfixLeft(65));
reg.add("-", Fixity::InfixLeft(65));
reg.add("*", Fixity::InfixLeft(70));
reg.add("/", Fixity::InfixLeft(70));
reg.add("^", Fixity::InfixRight(75));
reg.add("=", Fixity::InfixNone(50));
reg.add("<", Fixity::InfixNone(50));
reg.add(">", Fixity::InfixNone(50));
reg.add("&&", Fixity::InfixRight(35));
reg.add("||", Fixity::InfixRight(30));
reg
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn add(&mut self, op: impl Into<String>, fixity: Fixity) {
self.entries.insert(op.into(), fixity);
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn lookup(&self, op: &str) -> Option<&Fixity> {
self.entries.get(op)
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn count(&self) -> usize {
self.entries.len()
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct RecoveryDecision {
pub strategy: RecoveryStrategy,
pub tokens_to_skip: usize,
pub message: String,
}
impl RecoveryDecision {
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn skip(n: usize, msg: impl Into<String>) -> Self {
Self {
strategy: RecoveryStrategy::Skip,
tokens_to_skip: n,
message: msg.into(),
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn sync(msg: impl Into<String>) -> Self {
Self {
strategy: RecoveryStrategy::SyncToKeyword,
tokens_to_skip: 0,
message: msg.into(),
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn abandon(msg: impl Into<String>) -> Self {
Self {
strategy: RecoveryStrategy::Abandon,
tokens_to_skip: 0,
message: msg.into(),
}
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct ParseResultWithErrors<T> {
pub value: Option<T>,
pub errors: Vec<ParseErrorSimple>,
}
impl<T> ParseResultWithErrors<T> {
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn ok(value: T) -> Self {
Self {
value: Some(value),
errors: Vec::new(),
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn err(e: ParseErrorSimple) -> Self {
Self {
value: None,
errors: vec![e],
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn ok_with_errors(value: T, errors: Vec<ParseErrorSimple>) -> Self {
Self {
value: Some(value),
errors,
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn is_ok(&self) -> bool {
self.value.is_some()
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn has_errors(&self) -> bool {
!self.errors.is_empty()
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn error_count(&self) -> usize {
self.errors.len()
}
}
#[derive(Debug)]
#[allow(missing_docs)]
pub struct ParseFileResult {
pub filename: String,
pub decls: Vec<Located<Decl>>,
#[allow(missing_docs)]
pub errors: Vec<ParseError>,
}
impl ParseFileResult {
#[allow(missing_docs)]
pub fn is_ok(&self) -> bool {
self.errors.is_empty()
}
#[allow(missing_docs)]
pub fn decl_count(&self) -> usize {
self.decls.len()
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
#[derive(Debug, Clone)]
pub struct TraceEvent {
pub rule: String,
pub start_pos: usize,
pub end_pos: usize,
pub success: bool,
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct PackratTable {
entries: std::collections::HashMap<(usize, String), PackratEntry>,
}
impl PackratTable {
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn new() -> Self {
Self {
entries: std::collections::HashMap::new(),
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn lookup(&self, pos: usize, rule: &str) -> Option<&PackratEntry> {
self.entries.get(&(pos, rule.to_string()))
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn store(&mut self, pos: usize, rule: impl Into<String>, entry: PackratEntry) {
self.entries.insert((pos, rule.into()), entry);
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn size(&self) -> usize {
self.entries.len()
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn hit_rate_estimate(&self) -> f64 {
if self.entries.is_empty() {
return 0.0;
}
let hits = self.entries.values().filter(|e| e.success).count();
hits as f64 / self.entries.len() as f64
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct RecoveryLog {
entries: Vec<(usize, RecoveryDecision)>,
}
impl RecoveryLog {
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn new() -> Self {
Self {
entries: Vec::new(),
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn record(&mut self, pos: usize, decision: RecoveryDecision) {
self.entries.push((pos, decision));
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn count(&self) -> usize {
self.entries.len()
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn strategies_used(&self) -> Vec<RecoveryStrategy> {
self.entries.iter().map(|(_, d)| d.strategy).collect()
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn abandon_count(&self) -> usize {
self.entries
.iter()
.filter(|(_, d)| d.strategy == RecoveryStrategy::Abandon)
.count()
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct ParseFrame {
pub rule: String,
pub start_pos: usize,
pub depth: usize,
pub in_type: bool,
#[allow(missing_docs)]
pub in_pattern: bool,
}
impl ParseFrame {
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn new(rule: impl Into<String>, pos: usize, depth: usize) -> Self {
Self {
rule: rule.into(),
start_pos: pos,
depth,
in_type: false,
in_pattern: false,
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn for_type(mut self) -> Self {
self.in_type = true;
self
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn for_pattern(mut self) -> Self {
self.in_pattern = true;
self
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct SourcePos {
pub file: String,
pub line: usize,
pub column: usize,
pub byte_offset: usize,
}
impl SourcePos {
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn new(file: impl Into<String>, line: usize, col: usize, offset: usize) -> Self {
Self {
file: file.into(),
line,
column: col,
byte_offset: offset,
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn unknown() -> Self {
Self::new("<unknown>", 0, 0, 0)
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn display(&self) -> String {
format!("{}:{}:{}", self.file, self.line + 1, self.column + 1)
}
}
#[derive(Clone, Debug)]
#[allow(missing_docs)]
pub struct SourceMap {
pub(super) line_starts: Vec<usize>,
source_len: usize,
}
impl SourceMap {
#[allow(missing_docs)]
pub fn new(src: &str) -> Self {
let mut line_starts = vec![0];
for (i, b) in src.bytes().enumerate() {
if b == b'\n' {
line_starts.push(i + 1);
}
}
Self {
line_starts,
source_len: src.len(),
}
}
#[allow(missing_docs)]
pub fn offset_to_line_col(&self, offset: usize) -> (u32, u32) {
let line = match self.line_starts.binary_search(&offset) {
Ok(i) => i,
Err(i) => i.saturating_sub(1),
};
let col = offset - self.line_starts[line];
((line + 1) as u32, (col + 1) as u32)
}
#[allow(missing_docs)]
pub fn num_lines(&self) -> usize {
self.line_starts.len()
}
#[allow(missing_docs)]
pub fn source_len(&self) -> usize {
self.source_len
}
}
#[derive(Clone, Debug)]
#[allow(missing_docs)]
pub struct ParseBuffer {
tokens: std::collections::VecDeque<Token>,
max_lookahead: usize,
}
impl ParseBuffer {
#[allow(missing_docs)]
pub fn new(max_lookahead: usize) -> Self {
Self {
tokens: std::collections::VecDeque::new(),
max_lookahead,
}
}
#[allow(missing_docs)]
pub fn push(&mut self, tok: Token) {
if self.tokens.len() >= self.max_lookahead {
self.tokens.pop_front();
}
self.tokens.push_back(tok);
}
#[allow(missing_docs)]
pub fn front(&self) -> Option<&Token> {
self.tokens.front()
}
#[allow(missing_docs)]
pub fn pop(&mut self) -> Option<Token> {
self.tokens.pop_front()
}
#[allow(missing_docs)]
pub fn len(&self) -> usize {
self.tokens.len()
}
#[allow(missing_docs)]
pub fn is_empty(&self) -> bool {
self.tokens.is_empty()
}
#[allow(missing_docs)]
pub fn clear(&mut self) {
self.tokens.clear();
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct ExpectedSet {
expected: Vec<String>,
}
impl ExpectedSet {
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn new() -> Self {
Self {
expected: Vec::new(),
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn add(&mut self, what: impl Into<String>) {
self.expected.push(what.into());
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn clear(&mut self) {
self.expected.clear();
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn is_empty(&self) -> bool {
self.expected.is_empty()
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn count(&self) -> usize {
self.expected.len()
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn to_message(&self) -> String {
match self.expected.len() {
0 => "nothing expected".to_string(),
1 => format!("expected {}", self.expected[0]),
_ => {
let last = &self.expected[self.expected.len() - 1];
let rest = &self.expected[..self.expected.len() - 1];
format!("expected {} or {}", rest.join(", "), last)
}
}
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Fixity {
InfixLeft(u8),
InfixRight(u8),
InfixNone(u8),
Prefix(u8),
Postfix(u8),
}
impl Fixity {
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn precedence(&self) -> u8 {
match self {
Fixity::InfixLeft(p)
| Fixity::InfixRight(p)
| Fixity::InfixNone(p)
| Fixity::Prefix(p)
| Fixity::Postfix(p) => *p,
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn is_infix(&self) -> bool {
matches!(
self,
Fixity::InfixLeft(_) | Fixity::InfixRight(_) | Fixity::InfixNone(_)
)
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn is_right_assoc(&self) -> bool {
matches!(self, Fixity::InfixRight(_))
}
}
#[derive(Debug, Default)]
#[allow(missing_docs)]
pub struct ParseSession {
pub file_names: Vec<String>,
pub results: Vec<ParseFileResult>,
#[allow(missing_docs)]
pub stats: ParseStats,
}
impl ParseSession {
#[allow(missing_docs)]
pub fn new() -> Self {
Self::default()
}
#[allow(missing_docs)]
pub fn parse_file(&mut self, filename: &str, src: &str) {
let mut errors = Vec::new();
let tokens = Lexer::new(src).tokenize();
let mut parser = Parser::new(tokens);
let mut decls = Vec::new();
loop {
match parser.parse_decl() {
Ok(d) => decls.push(d),
Err(e) if e.is_eof() => break,
Err(e) => {
errors.push(e);
break;
}
}
}
self.stats.files_parsed += 1;
self.stats.decls_parsed += decls.len() as u64;
self.stats.errors_total += errors.len() as u64;
self.file_names.push(filename.to_string());
self.results.push(ParseFileResult {
filename: filename.to_string(),
decls,
errors,
});
}
#[allow(missing_docs)]
pub fn all_ok(&self) -> bool {
self.results.iter().all(|r| r.is_ok())
}
#[allow(missing_docs)]
pub fn all_errors(&self) -> Vec<&ParseError> {
self.results.iter().flat_map(|r| r.errors.iter()).collect()
}
#[allow(missing_docs)]
pub fn total_decls(&self) -> usize {
self.results.iter().map(|r| r.decl_count()).sum()
}
#[allow(missing_docs)]
pub fn file_count(&self) -> usize {
self.file_names.len()
}
}
#[derive(Clone, Debug)]
#[allow(missing_docs)]
pub struct TokenStream {
pub(super) tokens: Vec<Token>,
pub(super) pos: usize,
}
impl TokenStream {
#[allow(missing_docs)]
pub fn new(tokens: Vec<Token>) -> Self {
Self { tokens, pos: 0 }
}
#[allow(missing_docs)]
pub fn from_src(src: &str) -> Self {
Self::new(Lexer::new(src).tokenize())
}
#[allow(missing_docs)]
pub fn peek(&self) -> Option<&Token> {
self.tokens.get(self.pos)
}
#[allow(clippy::should_implement_trait)]
#[allow(missing_docs)]
pub fn next(&mut self) -> Option<&Token> {
let tok = self.tokens.get(self.pos)?;
self.pos += 1;
Some(tok)
}
#[allow(missing_docs)]
pub fn is_empty(&self) -> bool {
self.pos >= self.tokens.len()
}
#[allow(missing_docs)]
pub fn remaining(&self) -> usize {
self.tokens.len().saturating_sub(self.pos)
}
#[allow(missing_docs)]
pub fn total_len(&self) -> usize {
self.tokens.len()
}
#[allow(missing_docs)]
pub fn reset(&mut self) {
self.pos = 0;
}
#[allow(missing_docs)]
pub fn collect_remaining(&self) -> Vec<&Token> {
self.tokens[self.pos..].iter().collect()
}
}
#[derive(Clone, Debug, Default)]
#[allow(missing_docs)]
pub struct ParseStats {
pub files_parsed: u64,
pub decls_parsed: u64,
#[allow(missing_docs)]
pub errors_total: u64,
pub tokens_lexed: u64,
pub bytes_processed: u64,
}
impl ParseStats {
#[allow(missing_docs)]
pub fn new() -> Self {
Self::default()
}
#[allow(missing_docs)]
pub fn avg_decls_per_file(&self) -> f64 {
if self.files_parsed == 0 {
0.0
} else {
self.decls_parsed as f64 / self.files_parsed as f64
}
}
#[allow(missing_docs)]
pub fn error_rate(&self) -> f64 {
if self.decls_parsed == 0 {
0.0
} else {
self.errors_total as f64 / self.decls_parsed as f64
}
}
#[allow(missing_docs)]
pub fn is_clean(&self) -> bool {
self.errors_total == 0
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
#[allow(missing_docs)]
pub enum AnnotationKind {
Info,
Deprecated,
Suggestion,
}
#[allow(dead_code)]
#[allow(missing_docs)]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum RecoveryStrategy {
Skip,
InsertToken,
SyncToKeyword,
Abandon,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
#[allow(missing_docs)]
pub enum ParseQuality {
Failed,
Partial,
WithWarnings,
Clean,
}
impl ParseQuality {
#[allow(missing_docs)]
pub fn rate(errors: usize, warnings: usize) -> Self {
if errors > 0 {
ParseQuality::Failed
} else if warnings > 0 {
ParseQuality::WithWarnings
} else {
ParseQuality::Clean
}
}
#[allow(missing_docs)]
pub fn is_usable(&self) -> bool {
*self >= ParseQuality::Partial
}
}
#[derive(Debug, Default)]
#[allow(missing_docs)]
pub struct ParsePipeline {
pub stages: Vec<String>,
}
impl ParsePipeline {
#[allow(missing_docs)]
pub fn new() -> Self {
Self::default()
}
#[allow(missing_docs)]
pub fn add_stage(&mut self, name: &str) {
self.stages.push(name.to_string());
}
#[allow(missing_docs)]
pub fn stage_count(&self) -> usize {
self.stages.len()
}
#[allow(missing_docs)]
pub fn execute(&self, src: &str) -> TokenStream {
TokenStream::from_src(src)
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct DepthLimiter {
current: usize,
max: usize,
}
impl DepthLimiter {
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn new(max: usize) -> Self {
Self { current: 0, max }
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn enter(&mut self) -> bool {
if self.current >= self.max {
return false;
}
self.current += 1;
true
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn exit(&mut self) {
if self.current > 0 {
self.current -= 1;
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn depth(&self) -> usize {
self.current
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn is_at_limit(&self) -> bool {
self.current >= self.max
}
}
#[derive(Clone, Debug)]
#[allow(missing_docs)]
pub struct ParseAnnotation {
pub kind: AnnotationKind,
pub span: Span,
#[allow(missing_docs)]
pub message: String,
}
impl ParseAnnotation {
#[allow(missing_docs)]
pub fn new(kind: AnnotationKind, span: Span, message: &str) -> Self {
Self {
kind,
span,
message: message.to_string(),
}
}
#[allow(missing_docs)]
pub fn info(span: Span, message: &str) -> Self {
Self::new(AnnotationKind::Info, span, message)
}
#[allow(missing_docs)]
pub fn deprecated(span: Span, message: &str) -> Self {
Self::new(AnnotationKind::Deprecated, span, message)
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct PrattContext {
pub min_prec: u8,
pub depth: usize,
pub max_depth: usize,
}
impl PrattContext {
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn new(min_prec: u8) -> Self {
Self {
min_prec,
depth: 0,
max_depth: 200,
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn with_min_prec(&self, p: u8) -> Self {
Self {
min_prec: p,
depth: self.depth + 1,
max_depth: self.max_depth,
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn is_too_deep(&self) -> bool {
self.depth >= self.max_depth
}
}
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
#[allow(missing_docs)]
pub struct ParseMode {
pub allow_tactics: bool,
pub recover_on_error: bool,
#[allow(missing_docs)]
pub lenient: bool,
}
impl ParseMode {
#[allow(missing_docs)]
pub fn strict() -> Self {
Self {
allow_tactics: false,
recover_on_error: false,
lenient: false,
}
}
#[allow(missing_docs)]
pub fn lenient() -> Self {
Self {
allow_tactics: false,
recover_on_error: true,
lenient: true,
}
}
}
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
#[allow(missing_docs)]
pub struct TokenKindSet {
bits: u64,
}
impl TokenKindSet {
#[allow(missing_docs)]
pub fn empty() -> Self {
Self { bits: 0 }
}
#[allow(missing_docs)]
pub fn insert(&mut self, idx: u32) {
if idx < 64 {
self.bits |= 1 << idx;
}
}
#[allow(missing_docs)]
pub fn contains(&self, idx: u32) -> bool {
idx < 64 && (self.bits >> idx) & 1 != 0
}
#[allow(missing_docs)]
pub fn is_empty(&self) -> bool {
self.bits == 0
}
#[allow(missing_docs)]
pub fn union(self, other: Self) -> Self {
Self {
bits: self.bits | other.bits,
}
}
#[allow(missing_docs)]
pub fn intersect(self, other: Self) -> Self {
Self {
bits: self.bits & other.bits,
}
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct ParseTrace {
events: Vec<TraceEvent>,
max_events: usize,
}
impl ParseTrace {
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn new(max_events: usize) -> Self {
Self {
events: Vec::new(),
max_events,
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn enter(&mut self, rule: impl Into<String>, pos: usize) -> usize {
let idx = self.events.len();
if self.events.len() < self.max_events {
self.events.push(TraceEvent {
rule: rule.into(),
start_pos: pos,
end_pos: pos,
success: false,
});
}
idx
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn exit(&mut self, idx: usize, end_pos: usize, success: bool) {
if let Some(e) = self.events.get_mut(idx) {
e.end_pos = end_pos;
e.success = success;
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn success_count(&self) -> usize {
self.events.iter().filter(|e| e.success).count()
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn fail_count(&self) -> usize {
self.events.iter().filter(|e| !e.success).count()
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn total(&self) -> usize {
self.events.len()
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn most_failing_rule(&self) -> Option<&str> {
let mut counts: std::collections::HashMap<&str, usize> = std::collections::HashMap::new();
for e in &self.events {
if !e.success {
*counts.entry(e.rule.as_str()).or_insert(0) += 1;
}
}
counts.into_iter().max_by_key(|(_, c)| *c).map(|(r, _)| r)
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct ParseStack {
frames: Vec<ParseFrame>,
}
impl ParseStack {
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn new() -> Self {
Self { frames: Vec::new() }
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn push(&mut self, frame: ParseFrame) {
self.frames.push(frame);
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn pop(&mut self) -> Option<ParseFrame> {
self.frames.pop()
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn depth(&self) -> usize {
self.frames.len()
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn current_rule(&self) -> Option<&str> {
self.frames.last().map(|f| f.rule.as_str())
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn in_type(&self) -> bool {
self.frames.iter().any(|f| f.in_type)
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn in_pattern(&self) -> bool {
self.frames.iter().any(|f| f.in_pattern)
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn rules_string(&self) -> String {
self.frames
.iter()
.map(|f| f.rule.as_str())
.collect::<Vec<_>>()
.join(" > ")
}
}
#[derive(Clone, Debug, Default)]
#[allow(missing_docs)]
pub struct ParseErrorSummary {
pub total: usize,
pub by_file: Vec<(String, usize)>,
}
impl ParseErrorSummary {
#[allow(missing_docs)]
pub fn from_session(session: &ParseSession) -> Self {
let mut by_file = Vec::new();
let mut total = 0;
for r in &session.results {
let n = r.errors.len();
if n > 0 {
by_file.push((r.filename.clone(), n));
total += n;
}
}
Self { total, by_file }
}
#[allow(missing_docs)]
pub fn is_clean(&self) -> bool {
self.total == 0
}
#[allow(missing_docs)]
pub fn worst_file(&self) -> Option<&str> {
self.by_file
.iter()
.max_by_key(|(_, n)| *n)
.map(|(f, _)| f.as_str())
}
}