Skip to main content

oxilean_parse/parser/
types.rs

1//! Auto-generated module
2//!
3//! 🤖 Generated with [SplitRS](https://github.com/cool-japan/splitrs)
4
5pub use crate::ast_impl::{Decl, Located, SurfaceExpr};
6pub use crate::error_impl::{ParseError, ParseErrorKind};
7pub use crate::lexer::Lexer;
8pub use crate::parser_impl::Parser;
9pub use crate::tokens::{Span, Token, TokenKind};
10
11use super::functions::*;
12
13/// Represents a parse error with position and message.
14#[allow(dead_code)]
15#[allow(missing_docs)]
16#[derive(Debug, Clone)]
17pub struct ParseErrorSimple {
18    pub pos: usize,
19    pub message: String,
20    pub recovered: bool,
21}
22impl ParseErrorSimple {
23    #[allow(dead_code)]
24    #[allow(missing_docs)]
25    pub fn new(pos: usize, msg: impl Into<String>) -> Self {
26        Self {
27            pos,
28            message: msg.into(),
29            recovered: false,
30        }
31    }
32    #[allow(dead_code)]
33    #[allow(missing_docs)]
34    pub fn recovered(mut self) -> Self {
35        self.recovered = true;
36        self
37    }
38}
39/// A stack of parse checkpoints.
40#[allow(dead_code)]
41#[allow(missing_docs)]
42pub struct CheckpointStack {
43    pub(super) stack: Vec<ParseCheckpoint>,
44}
45impl CheckpointStack {
46    #[allow(dead_code)]
47    #[allow(missing_docs)]
48    pub fn new() -> Self {
49        Self { stack: Vec::new() }
50    }
51    #[allow(dead_code)]
52    #[allow(missing_docs)]
53    pub fn push(&mut self, cp: ParseCheckpoint) {
54        self.stack.push(cp);
55    }
56    #[allow(dead_code)]
57    #[allow(missing_docs)]
58    pub fn pop(&mut self) -> Option<ParseCheckpoint> {
59        self.stack.pop()
60    }
61    #[allow(dead_code)]
62    #[allow(missing_docs)]
63    pub fn peek(&self) -> Option<&ParseCheckpoint> {
64        self.stack.last()
65    }
66    #[allow(dead_code)]
67    #[allow(missing_docs)]
68    pub fn depth(&self) -> usize {
69        self.stack.len()
70    }
71    #[allow(dead_code)]
72    #[allow(missing_docs)]
73    pub fn is_empty(&self) -> bool {
74        self.stack.is_empty()
75    }
76}
77/// Represents a parse ambiguity between two alternatives.
78#[allow(dead_code)]
79#[allow(missing_docs)]
80#[derive(Debug, Clone)]
81pub struct ParseAmbiguity {
82    pub position: usize,
83    pub alternatives: Vec<String>,
84    pub resolved_to: Option<String>,
85}
86impl ParseAmbiguity {
87    #[allow(dead_code)]
88    #[allow(missing_docs)]
89    pub fn new(pos: usize, alternatives: Vec<String>) -> Self {
90        Self {
91            position: pos,
92            alternatives,
93            resolved_to: None,
94        }
95    }
96    #[allow(dead_code)]
97    #[allow(missing_docs)]
98    pub fn resolve(&mut self, choice: impl Into<String>) {
99        self.resolved_to = Some(choice.into());
100    }
101    #[allow(dead_code)]
102    #[allow(missing_docs)]
103    pub fn is_resolved(&self) -> bool {
104        self.resolved_to.is_some()
105    }
106}
107/// A checkpoint for parser backtracking.
108#[allow(dead_code)]
109#[allow(missing_docs)]
110#[derive(Clone, Debug)]
111pub struct ParseCheckpoint {
112    pub position: usize,
113    pub depth: usize,
114    pub error_count: usize,
115}
116impl ParseCheckpoint {
117    #[allow(dead_code)]
118    #[allow(missing_docs)]
119    pub fn save(cursor: &TokenCursor, errors: usize) -> Self {
120        Self {
121            position: cursor.position,
122            depth: cursor.depth,
123            error_count: errors,
124        }
125    }
126    #[allow(dead_code)]
127    #[allow(missing_docs)]
128    pub fn restore(&self, cursor: &mut TokenCursor) {
129        cursor.position = self.position;
130        cursor.depth = self.depth;
131    }
132}
133/// A cache key for memoizing parse results by source hash.
134#[derive(Clone, Debug, PartialEq, Eq, Hash)]
135#[allow(missing_docs)]
136pub struct ParseCacheKey {
137    /// FNV-1a hash of the source text.
138    pub hash: u64,
139    /// Length of the source text in bytes.
140    pub len: usize,
141}
142impl ParseCacheKey {
143    /// Compute the cache key for a source string.
144    #[allow(missing_docs)]
145    pub fn from_src(src: &str) -> Self {
146        let hash = fnv1a(src.as_bytes());
147        Self {
148            hash,
149            len: src.len(),
150        }
151    }
152}
153/// A registry of parse ambiguities encountered.
154#[allow(dead_code)]
155#[allow(missing_docs)]
156pub struct AmbiguityRegistry {
157    ambiguities: Vec<ParseAmbiguity>,
158}
159impl AmbiguityRegistry {
160    #[allow(dead_code)]
161    #[allow(missing_docs)]
162    pub fn new() -> Self {
163        Self {
164            ambiguities: Vec::new(),
165        }
166    }
167    #[allow(dead_code)]
168    #[allow(missing_docs)]
169    pub fn report(&mut self, amb: ParseAmbiguity) {
170        self.ambiguities.push(amb);
171    }
172    #[allow(dead_code)]
173    #[allow(missing_docs)]
174    pub fn count(&self) -> usize {
175        self.ambiguities.len()
176    }
177    #[allow(dead_code)]
178    #[allow(missing_docs)]
179    pub fn unresolved(&self) -> usize {
180        self.ambiguities.iter().filter(|a| !a.is_resolved()).count()
181    }
182}
183/// Configuration for a single parse run.
184#[allow(dead_code)]
185#[allow(missing_docs)]
186#[derive(Clone, Debug)]
187pub struct ParseConfig {
188    pub max_depth: usize,
189    pub max_errors: usize,
190    pub recover_from_errors: bool,
191    pub strict_mode: bool,
192    #[allow(missing_docs)]
193    pub track_whitespace: bool,
194    pub allow_holes: bool,
195}
196impl ParseConfig {
197    #[allow(dead_code)]
198    #[allow(missing_docs)]
199    pub fn default_config() -> Self {
200        Self {
201            max_depth: 1000,
202            max_errors: 50,
203            recover_from_errors: true,
204            strict_mode: false,
205            track_whitespace: false,
206            allow_holes: true,
207        }
208    }
209    #[allow(dead_code)]
210    #[allow(missing_docs)]
211    pub fn strict() -> Self {
212        Self {
213            strict_mode: true,
214            recover_from_errors: false,
215            ..Self::default_config()
216        }
217    }
218    #[allow(dead_code)]
219    #[allow(missing_docs)]
220    pub fn lenient() -> Self {
221        Self {
222            strict_mode: false,
223            recover_from_errors: true,
224            max_errors: 200,
225            ..Self::default_config()
226        }
227    }
228}
229/// Statistics gathered during a single parse run.
230#[allow(dead_code)]
231#[allow(missing_docs)]
232#[derive(Default, Debug, Clone)]
233pub struct ParseStatsExt {
234    pub tokens_consumed: u64,
235    pub nodes_created: u64,
236    pub backtrack_count: u64,
237    pub error_count: u64,
238    #[allow(missing_docs)]
239    pub max_depth_reached: usize,
240    pub parse_time_us: u64,
241}
242impl ParseStatsExt {
243    #[allow(dead_code)]
244    #[allow(missing_docs)]
245    pub fn new() -> Self {
246        Self::default()
247    }
248    #[allow(dead_code)]
249    #[allow(missing_docs)]
250    pub fn efficiency(&self) -> f64 {
251        if self.tokens_consumed == 0 {
252            return 0.0;
253        }
254        let useful = self.tokens_consumed.saturating_sub(self.backtrack_count);
255        useful as f64 / self.tokens_consumed as f64
256    }
257    #[allow(dead_code)]
258    #[allow(missing_docs)]
259    pub fn error_rate(&self) -> f64 {
260        if self.nodes_created == 0 {
261            return 0.0;
262        }
263        self.error_count as f64 / self.nodes_created as f64
264    }
265    #[allow(dead_code)]
266    #[allow(missing_docs)]
267    pub fn summary(&self) -> String {
268        format!(
269            "tokens={} nodes={} backtracks={} errors={} depth={} time={}us efficiency={:.1}%",
270            self.tokens_consumed,
271            self.nodes_created,
272            self.backtrack_count,
273            self.error_count,
274            self.max_depth_reached,
275            self.parse_time_us,
276            self.efficiency() * 100.0,
277        )
278    }
279}
280#[allow(dead_code)]
281#[allow(missing_docs)]
282#[derive(Clone, Debug)]
283pub struct PackratEntry {
284    pub end_pos: usize,
285    pub success: bool,
286    pub result_repr: String,
287}
288/// A "fuel" mechanism to prevent infinite loops in parsers.
289#[allow(dead_code)]
290#[allow(missing_docs)]
291pub struct ParseFuel {
292    pub(super) remaining: usize,
293}
294impl ParseFuel {
295    #[allow(dead_code)]
296    #[allow(missing_docs)]
297    pub fn new(fuel: usize) -> Self {
298        Self { remaining: fuel }
299    }
300    #[allow(dead_code)]
301    #[allow(missing_docs)]
302    pub fn consume(&mut self, amount: usize) -> bool {
303        if self.remaining >= amount {
304            self.remaining -= amount;
305            true
306        } else {
307            false
308        }
309    }
310    #[allow(dead_code)]
311    #[allow(missing_docs)]
312    pub fn has_fuel(&self) -> bool {
313        self.remaining > 0
314    }
315    #[allow(dead_code)]
316    #[allow(missing_docs)]
317    pub fn remaining(&self) -> usize {
318        self.remaining
319    }
320    #[allow(dead_code)]
321    #[allow(missing_docs)]
322    pub fn refuel(&mut self, amount: usize) {
323        self.remaining = self.remaining.saturating_add(amount);
324    }
325}
326/// A batch of parse requests.
327#[derive(Debug, Default)]
328#[allow(missing_docs)]
329pub struct ParseBatch {
330    /// Source strings with associated names.
331    pub entries: Vec<(String, String)>,
332}
333impl ParseBatch {
334    /// Create an empty batch.
335    #[allow(missing_docs)]
336    pub fn new() -> Self {
337        Self::default()
338    }
339    /// Add a named source entry.
340    #[allow(missing_docs)]
341    pub fn add(&mut self, name: &str, src: &str) {
342        self.entries.push((name.to_string(), src.to_string()));
343    }
344    /// Number of entries in the batch.
345    #[allow(missing_docs)]
346    pub fn len(&self) -> usize {
347        self.entries.len()
348    }
349    /// Whether the batch is empty.
350    #[allow(missing_docs)]
351    pub fn is_empty(&self) -> bool {
352        self.entries.is_empty()
353    }
354    /// Execute the batch and return a `ParseSession`.
355    #[allow(missing_docs)]
356    pub fn execute(self) -> ParseSession {
357        let mut session = ParseSession::new();
358        for (name, src) in self.entries {
359            session.parse_file(&name, &src);
360        }
361        session
362    }
363}
364/// A token cursor for tracking position during parsing.
365#[allow(dead_code)]
366#[allow(missing_docs)]
367#[derive(Clone, Debug)]
368pub struct TokenCursor {
369    pub position: usize,
370    pub end: usize,
371    pub depth: usize,
372}
373impl TokenCursor {
374    #[allow(dead_code)]
375    #[allow(missing_docs)]
376    pub fn new(end: usize) -> Self {
377        Self {
378            position: 0,
379            end,
380            depth: 0,
381        }
382    }
383    #[allow(dead_code)]
384    #[allow(missing_docs)]
385    pub fn advance(&mut self) {
386        if self.position < self.end {
387            self.position += 1;
388        }
389    }
390    #[allow(dead_code)]
391    #[allow(missing_docs)]
392    pub fn retreat(&mut self) {
393        if self.position > 0 {
394            self.position -= 1;
395        }
396    }
397    #[allow(dead_code)]
398    #[allow(missing_docs)]
399    pub fn is_at_end(&self) -> bool {
400        self.position >= self.end
401    }
402    #[allow(dead_code)]
403    #[allow(missing_docs)]
404    pub fn remaining(&self) -> usize {
405        self.end.saturating_sub(self.position)
406    }
407    #[allow(dead_code)]
408    #[allow(missing_docs)]
409    pub fn enter_scope(&mut self) {
410        self.depth += 1;
411    }
412    #[allow(dead_code)]
413    #[allow(missing_docs)]
414    pub fn exit_scope(&mut self) {
415        if self.depth > 0 {
416            self.depth -= 1;
417        }
418    }
419    #[allow(dead_code)]
420    #[allow(missing_docs)]
421    pub fn is_at_root(&self) -> bool {
422        self.depth == 0
423    }
424}
425/// Represents a parser combinator result.
426#[allow(dead_code)]
427#[allow(missing_docs)]
428pub enum CombResult<T> {
429    Ok(T, usize),
430    Err(String, usize),
431}
432impl<T> CombResult<T> {
433    #[allow(dead_code)]
434    #[allow(missing_docs)]
435    pub fn is_ok(&self) -> bool {
436        matches!(self, CombResult::Ok(_, _))
437    }
438    #[allow(dead_code)]
439    #[allow(missing_docs)]
440    pub fn is_err(&self) -> bool {
441        matches!(self, CombResult::Err(_, _))
442    }
443    #[allow(dead_code)]
444    #[allow(missing_docs)]
445    pub fn position(&self) -> usize {
446        match self {
447            CombResult::Ok(_, p) | CombResult::Err(_, p) => *p,
448        }
449    }
450}
451/// Represents a parser lookahead result.
452#[allow(dead_code)]
453#[allow(missing_docs)]
454#[derive(Debug, Clone, PartialEq, Eq)]
455pub enum LookaheadResult {
456    Matches(usize),
457    NoMatch,
458    Ambiguous,
459}
460/// A registry of operator fixities.
461#[allow(dead_code)]
462#[allow(missing_docs)]
463pub struct FixityRegistry {
464    entries: std::collections::HashMap<String, Fixity>,
465}
466impl FixityRegistry {
467    #[allow(dead_code)]
468    #[allow(missing_docs)]
469    pub fn new() -> Self {
470        let mut reg = Self {
471            entries: std::collections::HashMap::new(),
472        };
473        reg.add("+", Fixity::InfixLeft(65));
474        reg.add("-", Fixity::InfixLeft(65));
475        reg.add("*", Fixity::InfixLeft(70));
476        reg.add("/", Fixity::InfixLeft(70));
477        reg.add("^", Fixity::InfixRight(75));
478        reg.add("=", Fixity::InfixNone(50));
479        reg.add("<", Fixity::InfixNone(50));
480        reg.add(">", Fixity::InfixNone(50));
481        reg.add("&&", Fixity::InfixRight(35));
482        reg.add("||", Fixity::InfixRight(30));
483        reg
484    }
485    #[allow(dead_code)]
486    #[allow(missing_docs)]
487    pub fn add(&mut self, op: impl Into<String>, fixity: Fixity) {
488        self.entries.insert(op.into(), fixity);
489    }
490    #[allow(dead_code)]
491    #[allow(missing_docs)]
492    pub fn lookup(&self, op: &str) -> Option<&Fixity> {
493        self.entries.get(op)
494    }
495    #[allow(dead_code)]
496    #[allow(missing_docs)]
497    pub fn count(&self) -> usize {
498        self.entries.len()
499    }
500}
501/// A parser recovery decision.
502#[allow(dead_code)]
503#[allow(missing_docs)]
504pub struct RecoveryDecision {
505    pub strategy: RecoveryStrategy,
506    pub tokens_to_skip: usize,
507    pub message: String,
508}
509impl RecoveryDecision {
510    #[allow(dead_code)]
511    #[allow(missing_docs)]
512    pub fn skip(n: usize, msg: impl Into<String>) -> Self {
513        Self {
514            strategy: RecoveryStrategy::Skip,
515            tokens_to_skip: n,
516            message: msg.into(),
517        }
518    }
519    #[allow(dead_code)]
520    #[allow(missing_docs)]
521    pub fn sync(msg: impl Into<String>) -> Self {
522        Self {
523            strategy: RecoveryStrategy::SyncToKeyword,
524            tokens_to_skip: 0,
525            message: msg.into(),
526        }
527    }
528    #[allow(dead_code)]
529    #[allow(missing_docs)]
530    pub fn abandon(msg: impl Into<String>) -> Self {
531        Self {
532            strategy: RecoveryStrategy::Abandon,
533            tokens_to_skip: 0,
534            message: msg.into(),
535        }
536    }
537}
538/// A parser result type with error accumulation.
539#[allow(dead_code)]
540#[allow(missing_docs)]
541pub struct ParseResultWithErrors<T> {
542    pub value: Option<T>,
543    pub errors: Vec<ParseErrorSimple>,
544}
545impl<T> ParseResultWithErrors<T> {
546    #[allow(dead_code)]
547    #[allow(missing_docs)]
548    pub fn ok(value: T) -> Self {
549        Self {
550            value: Some(value),
551            errors: Vec::new(),
552        }
553    }
554    #[allow(dead_code)]
555    #[allow(missing_docs)]
556    pub fn err(e: ParseErrorSimple) -> Self {
557        Self {
558            value: None,
559            errors: vec![e],
560        }
561    }
562    #[allow(dead_code)]
563    #[allow(missing_docs)]
564    pub fn ok_with_errors(value: T, errors: Vec<ParseErrorSimple>) -> Self {
565        Self {
566            value: Some(value),
567            errors,
568        }
569    }
570    #[allow(dead_code)]
571    #[allow(missing_docs)]
572    pub fn is_ok(&self) -> bool {
573        self.value.is_some()
574    }
575    #[allow(dead_code)]
576    #[allow(missing_docs)]
577    pub fn has_errors(&self) -> bool {
578        !self.errors.is_empty()
579    }
580    #[allow(dead_code)]
581    #[allow(missing_docs)]
582    pub fn error_count(&self) -> usize {
583        self.errors.len()
584    }
585}
586/// Result for a single file parsed within a session.
587#[derive(Debug)]
588#[allow(missing_docs)]
589pub struct ParseFileResult {
590    /// File name (or `"<stdin>"`).
591    pub filename: String,
592    /// Successfully parsed declarations.
593    pub decls: Vec<Located<Decl>>,
594    /// Errors encountered.
595    #[allow(missing_docs)]
596    pub errors: Vec<ParseError>,
597}
598impl ParseFileResult {
599    /// Whether the file parsed without errors.
600    #[allow(missing_docs)]
601    pub fn is_ok(&self) -> bool {
602        self.errors.is_empty()
603    }
604    /// Number of declarations successfully parsed.
605    #[allow(missing_docs)]
606    pub fn decl_count(&self) -> usize {
607        self.decls.len()
608    }
609}
610#[allow(dead_code)]
611#[allow(missing_docs)]
612#[derive(Debug, Clone)]
613pub struct TraceEvent {
614    pub rule: String,
615    pub start_pos: usize,
616    pub end_pos: usize,
617    pub success: bool,
618}
619/// A memoization table for parser results (Packrat parsing).
620#[allow(dead_code)]
621#[allow(missing_docs)]
622pub struct PackratTable {
623    entries: std::collections::HashMap<(usize, String), PackratEntry>,
624}
625impl PackratTable {
626    #[allow(dead_code)]
627    #[allow(missing_docs)]
628    pub fn new() -> Self {
629        Self {
630            entries: std::collections::HashMap::new(),
631        }
632    }
633    #[allow(dead_code)]
634    #[allow(missing_docs)]
635    pub fn lookup(&self, pos: usize, rule: &str) -> Option<&PackratEntry> {
636        self.entries.get(&(pos, rule.to_string()))
637    }
638    #[allow(dead_code)]
639    #[allow(missing_docs)]
640    pub fn store(&mut self, pos: usize, rule: impl Into<String>, entry: PackratEntry) {
641        self.entries.insert((pos, rule.into()), entry);
642    }
643    #[allow(dead_code)]
644    #[allow(missing_docs)]
645    pub fn size(&self) -> usize {
646        self.entries.len()
647    }
648    #[allow(dead_code)]
649    #[allow(missing_docs)]
650    pub fn hit_rate_estimate(&self) -> f64 {
651        if self.entries.is_empty() {
652            return 0.0;
653        }
654        let hits = self.entries.values().filter(|e| e.success).count();
655        hits as f64 / self.entries.len() as f64
656    }
657}
658/// Tracks all recovery events during parsing.
659#[allow(dead_code)]
660#[allow(missing_docs)]
661pub struct RecoveryLog {
662    entries: Vec<(usize, RecoveryDecision)>,
663}
664impl RecoveryLog {
665    #[allow(dead_code)]
666    #[allow(missing_docs)]
667    pub fn new() -> Self {
668        Self {
669            entries: Vec::new(),
670        }
671    }
672    #[allow(dead_code)]
673    #[allow(missing_docs)]
674    pub fn record(&mut self, pos: usize, decision: RecoveryDecision) {
675        self.entries.push((pos, decision));
676    }
677    #[allow(dead_code)]
678    #[allow(missing_docs)]
679    pub fn count(&self) -> usize {
680        self.entries.len()
681    }
682    #[allow(dead_code)]
683    #[allow(missing_docs)]
684    pub fn strategies_used(&self) -> Vec<RecoveryStrategy> {
685        self.entries.iter().map(|(_, d)| d.strategy).collect()
686    }
687    #[allow(dead_code)]
688    #[allow(missing_docs)]
689    pub fn abandon_count(&self) -> usize {
690        self.entries
691            .iter()
692            .filter(|(_, d)| d.strategy == RecoveryStrategy::Abandon)
693            .count()
694    }
695}
696/// Holds the state for a single parse "frame" (a call to a recursive production).
697#[allow(dead_code)]
698#[allow(missing_docs)]
699pub struct ParseFrame {
700    pub rule: String,
701    pub start_pos: usize,
702    pub depth: usize,
703    pub in_type: bool,
704    #[allow(missing_docs)]
705    pub in_pattern: bool,
706}
707impl ParseFrame {
708    #[allow(dead_code)]
709    #[allow(missing_docs)]
710    pub fn new(rule: impl Into<String>, pos: usize, depth: usize) -> Self {
711        Self {
712            rule: rule.into(),
713            start_pos: pos,
714            depth,
715            in_type: false,
716            in_pattern: false,
717        }
718    }
719    #[allow(dead_code)]
720    #[allow(missing_docs)]
721    pub fn for_type(mut self) -> Self {
722        self.in_type = true;
723        self
724    }
725    #[allow(dead_code)]
726    #[allow(missing_docs)]
727    pub fn for_pattern(mut self) -> Self {
728        self.in_pattern = true;
729        self
730    }
731}
732/// Source position with file information.
733#[allow(dead_code)]
734#[allow(missing_docs)]
735#[derive(Clone, Debug, PartialEq, Eq)]
736pub struct SourcePos {
737    pub file: String,
738    pub line: usize,
739    pub column: usize,
740    pub byte_offset: usize,
741}
742impl SourcePos {
743    #[allow(dead_code)]
744    #[allow(missing_docs)]
745    pub fn new(file: impl Into<String>, line: usize, col: usize, offset: usize) -> Self {
746        Self {
747            file: file.into(),
748            line,
749            column: col,
750            byte_offset: offset,
751        }
752    }
753    #[allow(dead_code)]
754    #[allow(missing_docs)]
755    pub fn unknown() -> Self {
756        Self::new("<unknown>", 0, 0, 0)
757    }
758    #[allow(dead_code)]
759    #[allow(missing_docs)]
760    pub fn display(&self) -> String {
761        format!("{}:{}:{}", self.file, self.line + 1, self.column + 1)
762    }
763}
764/// Maps byte offsets in a source file to line and column numbers.
765#[derive(Clone, Debug)]
766#[allow(missing_docs)]
767pub struct SourceMap {
768    /// Starting byte offset of each line.
769    pub(super) line_starts: Vec<usize>,
770    /// Total length of the source.
771    source_len: usize,
772}
773impl SourceMap {
774    /// Build a source map from source text.
775    #[allow(missing_docs)]
776    pub fn new(src: &str) -> Self {
777        let mut line_starts = vec![0];
778        for (i, b) in src.bytes().enumerate() {
779            if b == b'\n' {
780                line_starts.push(i + 1);
781            }
782        }
783        Self {
784            line_starts,
785            source_len: src.len(),
786        }
787    }
788    /// Convert a byte offset to (line, col), both 1-based.
789    #[allow(missing_docs)]
790    pub fn offset_to_line_col(&self, offset: usize) -> (u32, u32) {
791        let line = match self.line_starts.binary_search(&offset) {
792            Ok(i) => i,
793            Err(i) => i.saturating_sub(1),
794        };
795        let col = offset - self.line_starts[line];
796        ((line + 1) as u32, (col + 1) as u32)
797    }
798    /// Number of lines in the source.
799    #[allow(missing_docs)]
800    pub fn num_lines(&self) -> usize {
801        self.line_starts.len()
802    }
803    /// Total source length in bytes.
804    #[allow(missing_docs)]
805    pub fn source_len(&self) -> usize {
806        self.source_len
807    }
808}
809/// A ring buffer of `Token`s used for lookahead parsing.
810#[derive(Clone, Debug)]
811#[allow(missing_docs)]
812pub struct ParseBuffer {
813    tokens: std::collections::VecDeque<Token>,
814    /// Maximum lookahead.
815    max_lookahead: usize,
816}
817impl ParseBuffer {
818    /// Create an empty buffer.
819    #[allow(missing_docs)]
820    pub fn new(max_lookahead: usize) -> Self {
821        Self {
822            tokens: std::collections::VecDeque::new(),
823            max_lookahead,
824        }
825    }
826    /// Push a token onto the back of the buffer.
827    #[allow(missing_docs)]
828    pub fn push(&mut self, tok: Token) {
829        if self.tokens.len() >= self.max_lookahead {
830            self.tokens.pop_front();
831        }
832        self.tokens.push_back(tok);
833    }
834    /// Peek at the front of the buffer.
835    #[allow(missing_docs)]
836    pub fn front(&self) -> Option<&Token> {
837        self.tokens.front()
838    }
839    /// Pop the front token.
840    #[allow(missing_docs)]
841    pub fn pop(&mut self) -> Option<Token> {
842        self.tokens.pop_front()
843    }
844    /// Number of buffered tokens.
845    #[allow(missing_docs)]
846    pub fn len(&self) -> usize {
847        self.tokens.len()
848    }
849    /// Whether the buffer is empty.
850    #[allow(missing_docs)]
851    pub fn is_empty(&self) -> bool {
852        self.tokens.is_empty()
853    }
854    /// Clear all buffered tokens.
855    #[allow(missing_docs)]
856    pub fn clear(&mut self) {
857        self.tokens.clear();
858    }
859}
860/// A sequence of token kinds expected at the current position.
861#[allow(dead_code)]
862#[allow(missing_docs)]
863pub struct ExpectedSet {
864    expected: Vec<String>,
865}
866impl ExpectedSet {
867    #[allow(dead_code)]
868    #[allow(missing_docs)]
869    pub fn new() -> Self {
870        Self {
871            expected: Vec::new(),
872        }
873    }
874    #[allow(dead_code)]
875    #[allow(missing_docs)]
876    pub fn add(&mut self, what: impl Into<String>) {
877        self.expected.push(what.into());
878    }
879    #[allow(dead_code)]
880    #[allow(missing_docs)]
881    pub fn clear(&mut self) {
882        self.expected.clear();
883    }
884    #[allow(dead_code)]
885    #[allow(missing_docs)]
886    pub fn is_empty(&self) -> bool {
887        self.expected.is_empty()
888    }
889    #[allow(dead_code)]
890    #[allow(missing_docs)]
891    pub fn count(&self) -> usize {
892        self.expected.len()
893    }
894    #[allow(dead_code)]
895    #[allow(missing_docs)]
896    pub fn to_message(&self) -> String {
897        match self.expected.len() {
898            0 => "nothing expected".to_string(),
899            1 => format!("expected {}", self.expected[0]),
900            _ => {
901                let last = &self.expected[self.expected.len() - 1];
902                let rest = &self.expected[..self.expected.len() - 1];
903                format!("expected {} or {}", rest.join(", "), last)
904            }
905        }
906    }
907}
908/// Operator fixity information.
909#[allow(dead_code)]
910#[allow(missing_docs)]
911#[derive(Clone, Debug, PartialEq, Eq)]
912pub enum Fixity {
913    InfixLeft(u8),
914    InfixRight(u8),
915    InfixNone(u8),
916    Prefix(u8),
917    Postfix(u8),
918}
919impl Fixity {
920    #[allow(dead_code)]
921    #[allow(missing_docs)]
922    pub fn precedence(&self) -> u8 {
923        match self {
924            Fixity::InfixLeft(p)
925            | Fixity::InfixRight(p)
926            | Fixity::InfixNone(p)
927            | Fixity::Prefix(p)
928            | Fixity::Postfix(p) => *p,
929        }
930    }
931    #[allow(dead_code)]
932    #[allow(missing_docs)]
933    pub fn is_infix(&self) -> bool {
934        matches!(
935            self,
936            Fixity::InfixLeft(_) | Fixity::InfixRight(_) | Fixity::InfixNone(_)
937        )
938    }
939    #[allow(dead_code)]
940    #[allow(missing_docs)]
941    pub fn is_right_assoc(&self) -> bool {
942        matches!(self, Fixity::InfixRight(_))
943    }
944}
945/// A multi-file parse session that tracks per-file results and statistics.
946#[derive(Debug, Default)]
947#[allow(missing_docs)]
948pub struct ParseSession {
949    /// File names in order of addition.
950    pub file_names: Vec<String>,
951    /// Parse results for each file.
952    pub results: Vec<ParseFileResult>,
953    /// Aggregate statistics.
954    #[allow(missing_docs)]
955    pub stats: ParseStats,
956}
957impl ParseSession {
958    /// Create an empty session.
959    #[allow(missing_docs)]
960    pub fn new() -> Self {
961        Self::default()
962    }
963    /// Parse a named file's source text and add it to the session.
964    #[allow(missing_docs)]
965    pub fn parse_file(&mut self, filename: &str, src: &str) {
966        let mut errors = Vec::new();
967        let tokens = Lexer::new(src).tokenize();
968        let mut parser = Parser::new(tokens);
969        let mut decls = Vec::new();
970        loop {
971            match parser.parse_decl() {
972                Ok(d) => decls.push(d),
973                Err(e) if e.is_eof() => break,
974                Err(e) => {
975                    errors.push(e);
976                    break;
977                }
978            }
979        }
980        self.stats.files_parsed += 1;
981        self.stats.decls_parsed += decls.len() as u64;
982        self.stats.errors_total += errors.len() as u64;
983        self.file_names.push(filename.to_string());
984        self.results.push(ParseFileResult {
985            filename: filename.to_string(),
986            decls,
987            errors,
988        });
989    }
990    /// Whether all files in the session parsed without errors.
991    #[allow(missing_docs)]
992    pub fn all_ok(&self) -> bool {
993        self.results.iter().all(|r| r.is_ok())
994    }
995    /// Collect all errors across files.
996    #[allow(missing_docs)]
997    pub fn all_errors(&self) -> Vec<&ParseError> {
998        self.results.iter().flat_map(|r| r.errors.iter()).collect()
999    }
1000    /// Total number of declarations across all files.
1001    #[allow(missing_docs)]
1002    pub fn total_decls(&self) -> usize {
1003        self.results.iter().map(|r| r.decl_count()).sum()
1004    }
1005    /// Number of files in the session.
1006    #[allow(missing_docs)]
1007    pub fn file_count(&self) -> usize {
1008        self.file_names.len()
1009    }
1010}
1011/// A lightweight wrapper around a `Vec<Token>` that provides
1012/// cursor-based traversal without requiring a mutable `Parser`.
1013#[derive(Clone, Debug)]
1014#[allow(missing_docs)]
1015pub struct TokenStream {
1016    pub(super) tokens: Vec<Token>,
1017    pub(super) pos: usize,
1018}
1019impl TokenStream {
1020    /// Create a new token stream from a token vector.
1021    #[allow(missing_docs)]
1022    pub fn new(tokens: Vec<Token>) -> Self {
1023        Self { tokens, pos: 0 }
1024    }
1025    /// Lex `src` and wrap the result.
1026    #[allow(missing_docs)]
1027    pub fn from_src(src: &str) -> Self {
1028        Self::new(Lexer::new(src).tokenize())
1029    }
1030    /// Peek at the current token without consuming it.
1031    #[allow(missing_docs)]
1032    pub fn peek(&self) -> Option<&Token> {
1033        self.tokens.get(self.pos)
1034    }
1035    /// Advance and return the current token.
1036    #[allow(clippy::should_implement_trait)]
1037    #[allow(missing_docs)]
1038    pub fn next(&mut self) -> Option<&Token> {
1039        let tok = self.tokens.get(self.pos)?;
1040        self.pos += 1;
1041        Some(tok)
1042    }
1043    /// Whether the stream is exhausted.
1044    #[allow(missing_docs)]
1045    pub fn is_empty(&self) -> bool {
1046        self.pos >= self.tokens.len()
1047    }
1048    /// Number of remaining tokens.
1049    #[allow(missing_docs)]
1050    pub fn remaining(&self) -> usize {
1051        self.tokens.len().saturating_sub(self.pos)
1052    }
1053    /// Total number of tokens (including consumed ones).
1054    #[allow(missing_docs)]
1055    pub fn total_len(&self) -> usize {
1056        self.tokens.len()
1057    }
1058    /// Reset the cursor to the beginning.
1059    #[allow(missing_docs)]
1060    pub fn reset(&mut self) {
1061        self.pos = 0;
1062    }
1063    /// Collect remaining tokens into a `Vec`.
1064    #[allow(missing_docs)]
1065    pub fn collect_remaining(&self) -> Vec<&Token> {
1066        self.tokens[self.pos..].iter().collect()
1067    }
1068}
1069/// Aggregate statistics from one or more parse operations.
1070#[derive(Clone, Debug, Default)]
1071#[allow(missing_docs)]
1072pub struct ParseStats {
1073    /// Number of files parsed.
1074    pub files_parsed: u64,
1075    /// Total declarations parsed (across all files).
1076    pub decls_parsed: u64,
1077    /// Total parse errors encountered.
1078    #[allow(missing_docs)]
1079    pub errors_total: u64,
1080    /// Total tokens lexed.
1081    pub tokens_lexed: u64,
1082    /// Total source bytes processed.
1083    pub bytes_processed: u64,
1084}
1085impl ParseStats {
1086    /// Create zero-initialized stats.
1087    #[allow(missing_docs)]
1088    pub fn new() -> Self {
1089        Self::default()
1090    }
1091    /// Average declarations per file.
1092    #[allow(missing_docs)]
1093    pub fn avg_decls_per_file(&self) -> f64 {
1094        if self.files_parsed == 0 {
1095            0.0
1096        } else {
1097            self.decls_parsed as f64 / self.files_parsed as f64
1098        }
1099    }
1100    /// Error rate: errors per declaration.
1101    #[allow(missing_docs)]
1102    pub fn error_rate(&self) -> f64 {
1103        if self.decls_parsed == 0 {
1104            0.0
1105        } else {
1106            self.errors_total as f64 / self.decls_parsed as f64
1107        }
1108    }
1109    /// Whether parsing was entirely error-free.
1110    #[allow(missing_docs)]
1111    pub fn is_clean(&self) -> bool {
1112        self.errors_total == 0
1113    }
1114}
1115/// The kind of a parse annotation.
1116#[derive(Clone, Debug, PartialEq, Eq)]
1117#[allow(missing_docs)]
1118pub enum AnnotationKind {
1119    /// Informational note.
1120    Info,
1121    /// Non-fatal deprecation warning.
1122    Deprecated,
1123    /// Suggestion for alternative syntax.
1124    Suggestion,
1125}
1126/// Error recovery strategies for the parser.
1127#[allow(dead_code)]
1128#[allow(missing_docs)]
1129#[derive(Clone, Copy, Debug, PartialEq, Eq)]
1130pub enum RecoveryStrategy {
1131    Skip,
1132    InsertToken,
1133    SyncToKeyword,
1134    Abandon,
1135}
1136/// A quality rating for a parse result.
1137#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
1138#[allow(missing_docs)]
1139pub enum ParseQuality {
1140    /// Parse failed completely.
1141    Failed,
1142    /// Parse succeeded with recoverable errors.
1143    Partial,
1144    /// Parse succeeded with warnings only.
1145    WithWarnings,
1146    /// Parse succeeded cleanly.
1147    Clean,
1148}
1149impl ParseQuality {
1150    /// Compute a quality rating from error/warning counts.
1151    #[allow(missing_docs)]
1152    pub fn rate(errors: usize, warnings: usize) -> Self {
1153        if errors > 0 {
1154            ParseQuality::Failed
1155        } else if warnings > 0 {
1156            ParseQuality::WithWarnings
1157        } else {
1158            ParseQuality::Clean
1159        }
1160    }
1161    /// Whether the parse result is usable (no hard errors).
1162    #[allow(missing_docs)]
1163    pub fn is_usable(&self) -> bool {
1164        *self >= ParseQuality::Partial
1165    }
1166}
1167/// A multi-stage parse pipeline that applies a sequence of
1168/// transformations to the token stream before parsing.
1169#[derive(Debug, Default)]
1170#[allow(missing_docs)]
1171pub struct ParsePipeline {
1172    /// Stage names for diagnostics.
1173    pub stages: Vec<String>,
1174}
1175impl ParsePipeline {
1176    /// Create an empty pipeline.
1177    #[allow(missing_docs)]
1178    pub fn new() -> Self {
1179        Self::default()
1180    }
1181    /// Add a named stage.
1182    #[allow(missing_docs)]
1183    pub fn add_stage(&mut self, name: &str) {
1184        self.stages.push(name.to_string());
1185    }
1186    /// Number of stages.
1187    #[allow(missing_docs)]
1188    pub fn stage_count(&self) -> usize {
1189        self.stages.len()
1190    }
1191    /// Execute the pipeline on a source string, returning a token stream.
1192    #[allow(missing_docs)]
1193    pub fn execute(&self, src: &str) -> TokenStream {
1194        TokenStream::from_src(src)
1195    }
1196}
1197/// A depth-limited recursive descent helper.
1198#[allow(dead_code)]
1199#[allow(missing_docs)]
1200pub struct DepthLimiter {
1201    current: usize,
1202    max: usize,
1203}
1204impl DepthLimiter {
1205    #[allow(dead_code)]
1206    #[allow(missing_docs)]
1207    pub fn new(max: usize) -> Self {
1208        Self { current: 0, max }
1209    }
1210    #[allow(dead_code)]
1211    #[allow(missing_docs)]
1212    pub fn enter(&mut self) -> bool {
1213        if self.current >= self.max {
1214            return false;
1215        }
1216        self.current += 1;
1217        true
1218    }
1219    #[allow(dead_code)]
1220    #[allow(missing_docs)]
1221    pub fn exit(&mut self) {
1222        if self.current > 0 {
1223            self.current -= 1;
1224        }
1225    }
1226    #[allow(dead_code)]
1227    #[allow(missing_docs)]
1228    pub fn depth(&self) -> usize {
1229        self.current
1230    }
1231    #[allow(dead_code)]
1232    #[allow(missing_docs)]
1233    pub fn is_at_limit(&self) -> bool {
1234        self.current >= self.max
1235    }
1236}
1237/// An annotation attached to a parse result.
1238#[derive(Clone, Debug)]
1239#[allow(missing_docs)]
1240pub struct ParseAnnotation {
1241    /// Annotation kind.
1242    pub kind: AnnotationKind,
1243    /// Source span.
1244    pub span: Span,
1245    /// Message.
1246    #[allow(missing_docs)]
1247    pub message: String,
1248}
1249impl ParseAnnotation {
1250    /// Create a new annotation.
1251    #[allow(missing_docs)]
1252    pub fn new(kind: AnnotationKind, span: Span, message: &str) -> Self {
1253        Self {
1254            kind,
1255            span,
1256            message: message.to_string(),
1257        }
1258    }
1259    /// Create an info annotation.
1260    #[allow(missing_docs)]
1261    pub fn info(span: Span, message: &str) -> Self {
1262        Self::new(AnnotationKind::Info, span, message)
1263    }
1264    /// Create a deprecation annotation.
1265    #[allow(missing_docs)]
1266    pub fn deprecated(span: Span, message: &str) -> Self {
1267        Self::new(AnnotationKind::Deprecated, span, message)
1268    }
1269}
1270/// A context for operator-precedence parsing (Pratt parsing).
1271#[allow(dead_code)]
1272#[allow(missing_docs)]
1273pub struct PrattContext {
1274    pub min_prec: u8,
1275    pub depth: usize,
1276    pub max_depth: usize,
1277}
1278impl PrattContext {
1279    #[allow(dead_code)]
1280    #[allow(missing_docs)]
1281    pub fn new(min_prec: u8) -> Self {
1282        Self {
1283            min_prec,
1284            depth: 0,
1285            max_depth: 200,
1286        }
1287    }
1288    #[allow(dead_code)]
1289    #[allow(missing_docs)]
1290    pub fn with_min_prec(&self, p: u8) -> Self {
1291        Self {
1292            min_prec: p,
1293            depth: self.depth + 1,
1294            max_depth: self.max_depth,
1295        }
1296    }
1297    #[allow(dead_code)]
1298    #[allow(missing_docs)]
1299    pub fn is_too_deep(&self) -> bool {
1300        self.depth >= self.max_depth
1301    }
1302}
1303/// Flags controlling parser behavior.
1304#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
1305#[allow(missing_docs)]
1306pub struct ParseMode {
1307    /// Whether to allow tactics inside term mode.
1308    pub allow_tactics: bool,
1309    /// Whether to recover from errors (continue after first error).
1310    pub recover_on_error: bool,
1311    /// Whether to parse in lenient mode (accept partial expressions).
1312    #[allow(missing_docs)]
1313    pub lenient: bool,
1314}
1315impl ParseMode {
1316    /// Strict mode: no recovery, no tactics.
1317    #[allow(missing_docs)]
1318    pub fn strict() -> Self {
1319        Self {
1320            allow_tactics: false,
1321            recover_on_error: false,
1322            lenient: false,
1323        }
1324    }
1325    /// Lenient mode: allow partial results.
1326    #[allow(missing_docs)]
1327    pub fn lenient() -> Self {
1328        Self {
1329            allow_tactics: false,
1330            recover_on_error: true,
1331            lenient: true,
1332        }
1333    }
1334}
1335/// A compact bitset for up to 64 token kinds.
1336#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
1337#[allow(missing_docs)]
1338pub struct TokenKindSet {
1339    bits: u64,
1340}
1341impl TokenKindSet {
1342    /// Empty set.
1343    #[allow(missing_docs)]
1344    pub fn empty() -> Self {
1345        Self { bits: 0 }
1346    }
1347    /// Insert a kind by discriminant index.
1348    #[allow(missing_docs)]
1349    pub fn insert(&mut self, idx: u32) {
1350        if idx < 64 {
1351            self.bits |= 1 << idx;
1352        }
1353    }
1354    /// Test whether an index is present.
1355    #[allow(missing_docs)]
1356    pub fn contains(&self, idx: u32) -> bool {
1357        idx < 64 && (self.bits >> idx) & 1 != 0
1358    }
1359    /// Whether the set is empty.
1360    #[allow(missing_docs)]
1361    pub fn is_empty(&self) -> bool {
1362        self.bits == 0
1363    }
1364    /// Union of two sets.
1365    #[allow(missing_docs)]
1366    pub fn union(self, other: Self) -> Self {
1367        Self {
1368            bits: self.bits | other.bits,
1369        }
1370    }
1371    /// Intersection of two sets.
1372    #[allow(missing_docs)]
1373    pub fn intersect(self, other: Self) -> Self {
1374        Self {
1375            bits: self.bits & other.bits,
1376        }
1377    }
1378}
1379/// Tracks which tokens were consumed by each parser rule.
1380#[allow(dead_code)]
1381#[allow(missing_docs)]
1382pub struct ParseTrace {
1383    events: Vec<TraceEvent>,
1384    max_events: usize,
1385}
1386impl ParseTrace {
1387    #[allow(dead_code)]
1388    #[allow(missing_docs)]
1389    pub fn new(max_events: usize) -> Self {
1390        Self {
1391            events: Vec::new(),
1392            max_events,
1393        }
1394    }
1395    #[allow(dead_code)]
1396    #[allow(missing_docs)]
1397    pub fn enter(&mut self, rule: impl Into<String>, pos: usize) -> usize {
1398        let idx = self.events.len();
1399        if self.events.len() < self.max_events {
1400            self.events.push(TraceEvent {
1401                rule: rule.into(),
1402                start_pos: pos,
1403                end_pos: pos,
1404                success: false,
1405            });
1406        }
1407        idx
1408    }
1409    #[allow(dead_code)]
1410    #[allow(missing_docs)]
1411    pub fn exit(&mut self, idx: usize, end_pos: usize, success: bool) {
1412        if let Some(e) = self.events.get_mut(idx) {
1413            e.end_pos = end_pos;
1414            e.success = success;
1415        }
1416    }
1417    #[allow(dead_code)]
1418    #[allow(missing_docs)]
1419    pub fn success_count(&self) -> usize {
1420        self.events.iter().filter(|e| e.success).count()
1421    }
1422    #[allow(dead_code)]
1423    #[allow(missing_docs)]
1424    pub fn fail_count(&self) -> usize {
1425        self.events.iter().filter(|e| !e.success).count()
1426    }
1427    #[allow(dead_code)]
1428    #[allow(missing_docs)]
1429    pub fn total(&self) -> usize {
1430        self.events.len()
1431    }
1432    #[allow(dead_code)]
1433    #[allow(missing_docs)]
1434    pub fn most_failing_rule(&self) -> Option<&str> {
1435        let mut counts: std::collections::HashMap<&str, usize> = std::collections::HashMap::new();
1436        for e in &self.events {
1437            if !e.success {
1438                *counts.entry(e.rule.as_str()).or_insert(0) += 1;
1439            }
1440        }
1441        counts.into_iter().max_by_key(|(_, c)| *c).map(|(r, _)| r)
1442    }
1443}
1444/// A stack of parse frames for debugging.
1445#[allow(dead_code)]
1446#[allow(missing_docs)]
1447pub struct ParseStack {
1448    frames: Vec<ParseFrame>,
1449}
1450impl ParseStack {
1451    #[allow(dead_code)]
1452    #[allow(missing_docs)]
1453    pub fn new() -> Self {
1454        Self { frames: Vec::new() }
1455    }
1456    #[allow(dead_code)]
1457    #[allow(missing_docs)]
1458    pub fn push(&mut self, frame: ParseFrame) {
1459        self.frames.push(frame);
1460    }
1461    #[allow(dead_code)]
1462    #[allow(missing_docs)]
1463    pub fn pop(&mut self) -> Option<ParseFrame> {
1464        self.frames.pop()
1465    }
1466    #[allow(dead_code)]
1467    #[allow(missing_docs)]
1468    pub fn depth(&self) -> usize {
1469        self.frames.len()
1470    }
1471    #[allow(dead_code)]
1472    #[allow(missing_docs)]
1473    pub fn current_rule(&self) -> Option<&str> {
1474        self.frames.last().map(|f| f.rule.as_str())
1475    }
1476    #[allow(dead_code)]
1477    #[allow(missing_docs)]
1478    pub fn in_type(&self) -> bool {
1479        self.frames.iter().any(|f| f.in_type)
1480    }
1481    #[allow(dead_code)]
1482    #[allow(missing_docs)]
1483    pub fn in_pattern(&self) -> bool {
1484        self.frames.iter().any(|f| f.in_pattern)
1485    }
1486    #[allow(dead_code)]
1487    #[allow(missing_docs)]
1488    pub fn rules_string(&self) -> String {
1489        self.frames
1490            .iter()
1491            .map(|f| f.rule.as_str())
1492            .collect::<Vec<_>>()
1493            .join(" > ")
1494    }
1495}
1496/// A summary of parse errors across a session.
1497#[derive(Clone, Debug, Default)]
1498#[allow(missing_docs)]
1499pub struct ParseErrorSummary {
1500    /// Total errors.
1501    pub total: usize,
1502    /// Errors by file.
1503    pub by_file: Vec<(String, usize)>,
1504}
1505impl ParseErrorSummary {
1506    /// Build a summary from a parse session.
1507    #[allow(missing_docs)]
1508    pub fn from_session(session: &ParseSession) -> Self {
1509        let mut by_file = Vec::new();
1510        let mut total = 0;
1511        for r in &session.results {
1512            let n = r.errors.len();
1513            if n > 0 {
1514                by_file.push((r.filename.clone(), n));
1515                total += n;
1516            }
1517        }
1518        Self { total, by_file }
1519    }
1520    /// Whether there are no errors.
1521    #[allow(missing_docs)]
1522    pub fn is_clean(&self) -> bool {
1523        self.total == 0
1524    }
1525    /// File with the most errors (if any).
1526    #[allow(missing_docs)]
1527    pub fn worst_file(&self) -> Option<&str> {
1528        self.by_file
1529            .iter()
1530            .max_by_key(|(_, n)| *n)
1531            .map(|(f, _)| f.as_str())
1532    }
1533}