Skip to main content

oxilean_parse/incremental/
types.rs

1//! Auto-generated module
2//!
3//! 🤖 Generated with [SplitRS](https://github.com/cool-japan/splitrs)
4
5use super::functions::*;
6use std::collections::HashMap;
7use std::ops::Range;
8
9/// A red node: a green node viewed at a specific byte offset.
10#[allow(missing_docs)]
11pub struct RedNode<'a> {
12    pub green: &'a GreenNode,
13    pub offset: usize,
14}
15impl<'a> RedNode<'a> {
16    #[allow(missing_docs)]
17    pub fn new(green: &'a GreenNode, offset: usize) -> Self {
18        Self { green, offset }
19    }
20    #[allow(missing_docs)]
21    pub fn range(&self) -> Range<usize> {
22        self.offset..self.offset + self.green.width
23    }
24    #[allow(missing_docs)]
25    pub fn children(&self) -> Vec<RedNode<'_>> {
26        let mut pos = self.offset;
27        self.green
28            .children
29            .iter()
30            .map(|child| {
31                let node = RedNode::new(child, pos);
32                pos += child.width;
33                node
34            })
35            .collect()
36    }
37    #[allow(missing_docs)]
38    pub fn kind(&self) -> &SyntaxKind {
39        &self.green.kind
40    }
41}
42/// A green node: an immutable, position-independent syntax tree node.
43#[derive(Debug, Clone)]
44#[allow(missing_docs)]
45pub struct GreenNode {
46    pub kind: SyntaxKind,
47    pub width: usize,
48    pub children: Vec<GreenNode>,
49    pub text: Option<String>,
50}
51impl GreenNode {
52    #[allow(missing_docs)]
53    pub fn leaf(kind: SyntaxKind, text: impl Into<String>) -> Self {
54        let text = text.into();
55        let width = text.len();
56        GreenNode {
57            kind,
58            width,
59            children: Vec::new(),
60            text: Some(text),
61        }
62    }
63    #[allow(missing_docs)]
64    pub fn interior(kind: SyntaxKind, children: Vec<GreenNode>) -> Self {
65        let width = children.iter().map(|c| c.width).sum();
66        GreenNode {
67            kind,
68            width,
69            children,
70            text: None,
71        }
72    }
73    #[allow(missing_docs)]
74    pub fn is_leaf(&self) -> bool {
75        self.children.is_empty()
76    }
77    #[allow(missing_docs)]
78    pub fn to_text(&self) -> String {
79        if let Some(t) = &self.text {
80            return t.clone();
81        }
82        self.children.iter().map(|c| c.to_text()).collect()
83    }
84}
85/// A simple incremental lexer that caches line-level token fingerprints.
86#[allow(missing_docs)]
87pub struct IncrementalLexer {
88    line_fingerprints: Vec<Option<TokenFingerprint>>,
89    line_tokens: Vec<Vec<String>>,
90}
91impl IncrementalLexer {
92    #[allow(missing_docs)]
93    pub fn new() -> Self {
94        Self {
95            line_fingerprints: Vec::new(),
96            line_tokens: Vec::new(),
97        }
98    }
99    #[allow(missing_docs)]
100    pub fn lex(&mut self, source: &str, dirty_lines: &[usize]) -> Vec<String> {
101        let lines: Vec<&str> = source.lines().collect();
102        self.line_fingerprints.resize(lines.len(), None);
103        self.line_tokens.resize(lines.len(), Vec::new());
104        for (i, line) in lines.iter().enumerate() {
105            let fp = TokenFingerprint::compute(&[line]);
106            if dirty_lines.contains(&i) || self.line_fingerprints[i].as_ref() != Some(&fp) {
107                let tokens = self.tokenize_line(line);
108                self.line_fingerprints[i] = Some(fp);
109                self.line_tokens[i] = tokens;
110            }
111        }
112        self.line_tokens.iter().flatten().cloned().collect()
113    }
114    fn tokenize_line(&self, line: &str) -> Vec<String> {
115        line.split_whitespace().map(String::from).collect()
116    }
117    #[allow(missing_docs)]
118    pub fn invalidate_lines(&mut self, range: Range<usize>) {
119        for i in range {
120            if i < self.line_fingerprints.len() {
121                self.line_fingerprints[i] = None;
122            }
123        }
124    }
125    #[allow(missing_docs)]
126    pub fn reset(&mut self) {
127        self.line_fingerprints.clear();
128        self.line_tokens.clear();
129    }
130}
131/// A cache mapping source ranges to AST node IDs for incremental updates.
132#[allow(dead_code)]
133#[allow(missing_docs)]
134pub struct NodeRangeCache {
135    entries: std::collections::BTreeMap<(usize, usize), u32>,
136}
137impl NodeRangeCache {
138    #[allow(dead_code)]
139    #[allow(missing_docs)]
140    pub fn new() -> Self {
141        Self {
142            entries: std::collections::BTreeMap::new(),
143        }
144    }
145    #[allow(dead_code)]
146    #[allow(missing_docs)]
147    pub fn insert(&mut self, start: usize, end: usize, node_id: u32) {
148        self.entries.insert((start, end), node_id);
149    }
150    #[allow(dead_code)]
151    #[allow(missing_docs)]
152    pub fn lookup(&self, start: usize, end: usize) -> Option<u32> {
153        self.entries.get(&(start, end)).copied()
154    }
155    #[allow(dead_code)]
156    #[allow(missing_docs)]
157    pub fn invalidate_range(&mut self, inv: &InvalidatedRange) {
158        let to_remove: Vec<_> = self
159            .entries
160            .keys()
161            .filter(|(s, e)| *s < inv.end && *e > inv.start)
162            .copied()
163            .collect();
164        for k in to_remove {
165            self.entries.remove(&k);
166        }
167    }
168    #[allow(dead_code)]
169    #[allow(missing_docs)]
170    pub fn size(&self) -> usize {
171        self.entries.len()
172    }
173}
174#[allow(dead_code)]
175#[allow(missing_docs)]
176#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
177pub enum ReparsePriority {
178    Low,
179    Normal,
180    High,
181    Urgent,
182}
183#[allow(dead_code)]
184#[allow(missing_docs)]
185#[derive(Clone, Copy, Debug, PartialEq, Eq)]
186pub enum ScopeKind2 {
187    Paren,
188    Bracket,
189    Brace,
190    Do,
191    Where,
192    Let,
193}
194/// A "fiber" representing a partial parse continuation.
195#[allow(dead_code)]
196#[allow(missing_docs)]
197pub struct ParseFiber {
198    pub id: u64,
199    pub start_offset: usize,
200    pub depth: usize,
201    pub state_repr: String,
202}
203impl ParseFiber {
204    #[allow(dead_code)]
205    #[allow(missing_docs)]
206    pub fn new(id: u64, start: usize, depth: usize, state: impl Into<String>) -> Self {
207        Self {
208            id,
209            start_offset: start,
210            depth,
211            state_repr: state.into(),
212        }
213    }
214    #[allow(dead_code)]
215    #[allow(missing_docs)]
216    pub fn is_at_root(&self) -> bool {
217        self.depth == 0
218    }
219}
220/// An undo/redo stack for source text.
221#[allow(missing_docs)]
222pub struct UndoRedoStack {
223    undo_stack: Vec<String>,
224    redo_stack: Vec<String>,
225    current: String,
226}
227impl UndoRedoStack {
228    #[allow(missing_docs)]
229    pub fn new(initial: impl Into<String>) -> Self {
230        Self {
231            undo_stack: Vec::new(),
232            redo_stack: Vec::new(),
233            current: initial.into(),
234        }
235    }
236    #[allow(missing_docs)]
237    pub fn push(&mut self, new_source: impl Into<String>) {
238        let new_source = new_source.into();
239        self.undo_stack
240            .push(std::mem::replace(&mut self.current, new_source));
241        self.redo_stack.clear();
242    }
243    #[allow(missing_docs)]
244    pub fn apply(&mut self, change: &TextChange) {
245        let new_source = change.apply(&self.current);
246        self.push(new_source);
247    }
248    #[allow(missing_docs)]
249    pub fn undo(&mut self) -> Option<&str> {
250        if let Some(prev) = self.undo_stack.pop() {
251            let old_current = std::mem::replace(&mut self.current, prev);
252            self.redo_stack.push(old_current);
253            Some(&self.current)
254        } else {
255            None
256        }
257    }
258    #[allow(missing_docs)]
259    pub fn redo(&mut self) -> Option<&str> {
260        if let Some(next) = self.redo_stack.pop() {
261            let old_current = std::mem::replace(&mut self.current, next);
262            self.undo_stack.push(old_current);
263            Some(&self.current)
264        } else {
265            None
266        }
267    }
268    #[allow(missing_docs)]
269    pub fn current(&self) -> &str {
270        &self.current
271    }
272    #[allow(missing_docs)]
273    pub fn can_undo(&self) -> bool {
274        !self.undo_stack.is_empty()
275    }
276    #[allow(missing_docs)]
277    pub fn can_redo(&self) -> bool {
278        !self.redo_stack.is_empty()
279    }
280    #[allow(missing_docs)]
281    pub fn undo_depth(&self) -> usize {
282        self.undo_stack.len()
283    }
284    #[allow(missing_docs)]
285    pub fn redo_depth(&self) -> usize {
286        self.redo_stack.len()
287    }
288}
289/// A version counter for incremental parsing state.
290#[allow(dead_code)]
291#[allow(missing_docs)]
292pub struct ParseVersion {
293    version: u64,
294    last_full_parse: u64,
295}
296impl ParseVersion {
297    #[allow(dead_code)]
298    #[allow(missing_docs)]
299    pub fn new() -> Self {
300        Self {
301            version: 0,
302            last_full_parse: 0,
303        }
304    }
305    #[allow(dead_code)]
306    #[allow(missing_docs)]
307    pub fn increment(&mut self) -> u64 {
308        self.version += 1;
309        self.version
310    }
311    #[allow(dead_code)]
312    #[allow(missing_docs)]
313    pub fn mark_full_parse(&mut self) {
314        self.last_full_parse = self.version;
315    }
316    #[allow(dead_code)]
317    #[allow(missing_docs)]
318    pub fn current(&self) -> u64 {
319        self.version
320    }
321    #[allow(dead_code)]
322    #[allow(missing_docs)]
323    pub fn edits_since_full_parse(&self) -> u64 {
324        self.version - self.last_full_parse
325    }
326    #[allow(dead_code)]
327    #[allow(missing_docs)]
328    pub fn needs_full_reparse(&self, threshold: u64) -> bool {
329        self.edits_since_full_parse() >= threshold
330    }
331}
332/// A text change applied to the source
333#[derive(Debug, Clone)]
334#[allow(missing_docs)]
335pub struct TextChange {
336    pub range: Range<usize>,
337    pub new_text: String,
338}
339impl TextChange {
340    #[allow(missing_docs)]
341    pub fn new(start: usize, end: usize, new_text: impl Into<String>) -> Self {
342        TextChange {
343            range: start..end,
344            new_text: new_text.into(),
345        }
346    }
347    #[allow(missing_docs)]
348    pub fn insertion(at: usize, text: impl Into<String>) -> Self {
349        TextChange {
350            range: at..at,
351            new_text: text.into(),
352        }
353    }
354    #[allow(missing_docs)]
355    pub fn deletion(start: usize, end: usize) -> Self {
356        TextChange {
357            range: start..end,
358            new_text: String::new(),
359        }
360    }
361    #[allow(missing_docs)]
362    pub fn replacement(start: usize, end: usize, text: impl Into<String>) -> Self {
363        TextChange {
364            range: start..end,
365            new_text: text.into(),
366        }
367    }
368    #[allow(missing_docs)]
369    pub fn apply(&self, source: &str) -> String {
370        let start = self.range.start.min(source.len());
371        let end = self.range.end.min(source.len());
372        let mut result = String::with_capacity(source.len() + self.new_text.len());
373        result.push_str(&source[..start]);
374        result.push_str(&self.new_text);
375        result.push_str(&source[end..]);
376        result
377    }
378    #[allow(missing_docs)]
379    pub fn delta(&self) -> i64 {
380        (self.new_text.len() as i64) - ((self.range.end - self.range.start) as i64)
381    }
382    #[allow(missing_docs)]
383    pub fn is_insertion(&self) -> bool {
384        self.range.start == self.range.end && !self.new_text.is_empty()
385    }
386    #[allow(missing_docs)]
387    pub fn is_deletion(&self) -> bool {
388        self.new_text.is_empty() && self.range.start < self.range.end
389    }
390    #[allow(missing_docs)]
391    pub fn is_replacement(&self) -> bool {
392        !self.new_text.is_empty() && self.range.start < self.range.end
393    }
394}
395/// A rope-like structure for efficient incremental text editing.
396#[allow(dead_code)]
397#[allow(missing_docs)]
398pub struct SimpleRope {
399    pub(crate) chunks: Vec<String>,
400    pub(crate) len: usize,
401}
402impl SimpleRope {
403    #[allow(dead_code)]
404    #[allow(missing_docs)]
405    pub fn new(text: impl Into<String>) -> Self {
406        let s = text.into();
407        let len = s.len();
408        Self {
409            chunks: vec![s],
410            len,
411        }
412    }
413    #[allow(dead_code)]
414    #[allow(missing_docs)]
415    pub fn as_string(&self) -> String {
416        self.chunks.concat()
417    }
418    #[allow(dead_code)]
419    #[allow(missing_docs)]
420    pub fn len(&self) -> usize {
421        self.len
422    }
423    #[allow(dead_code)]
424    #[allow(missing_docs)]
425    pub fn is_empty(&self) -> bool {
426        self.len == 0
427    }
428    #[allow(dead_code)]
429    #[allow(missing_docs)]
430    pub fn insert(&mut self, pos: usize, text: &str) {
431        let full = self.as_string();
432        let pos = pos.min(full.len());
433        let new_text = format!("{}{}{}", &full[..pos], text, &full[pos..]);
434        self.len = new_text.len();
435        self.chunks = vec![new_text];
436    }
437    #[allow(dead_code)]
438    #[allow(missing_docs)]
439    pub fn delete(&mut self, start: usize, end: usize) {
440        let full = self.as_string();
441        let start = start.min(full.len());
442        let end = end.min(full.len());
443        let new_text = format!("{}{}", &full[..start], &full[end..]);
444        self.len = new_text.len();
445        self.chunks = vec![new_text];
446    }
447    #[allow(dead_code)]
448    #[allow(missing_docs)]
449    pub fn chunk_count(&self) -> usize {
450        self.chunks.len()
451    }
452}
453/// Tracks valid token ranges for incremental re-lexing.
454#[allow(dead_code)]
455#[allow(missing_docs)]
456pub struct TokenValidity {
457    valid_ranges: Vec<(usize, usize)>,
458}
459impl TokenValidity {
460    #[allow(dead_code)]
461    #[allow(missing_docs)]
462    pub fn new() -> Self {
463        Self {
464            valid_ranges: Vec::new(),
465        }
466    }
467    #[allow(dead_code)]
468    #[allow(missing_docs)]
469    pub fn mark_valid(&mut self, start: usize, end: usize) {
470        self.valid_ranges.push((start, end));
471    }
472    #[allow(dead_code)]
473    #[allow(missing_docs)]
474    pub fn invalidate(&mut self, range: &InvalidatedRange) {
475        self.valid_ranges
476            .retain(|(s, e)| *e <= range.start || *s >= range.end);
477    }
478    #[allow(dead_code)]
479    #[allow(missing_docs)]
480    pub fn is_valid_at(&self, pos: usize) -> bool {
481        self.valid_ranges.iter().any(|(s, e)| pos >= *s && pos < *e)
482    }
483    #[allow(dead_code)]
484    #[allow(missing_docs)]
485    pub fn valid_count(&self) -> usize {
486        self.valid_ranges.len()
487    }
488}
489/// A priority queue for reparse requests.
490#[allow(dead_code)]
491#[allow(missing_docs)]
492pub struct ReparseQueue {
493    requests: Vec<ReparseRequest>,
494}
495impl ReparseQueue {
496    #[allow(dead_code)]
497    #[allow(missing_docs)]
498    pub fn new() -> Self {
499        Self {
500            requests: Vec::new(),
501        }
502    }
503    #[allow(dead_code)]
504    #[allow(missing_docs)]
505    pub fn push(&mut self, req: ReparseRequest) {
506        self.requests.push(req);
507        self.requests.sort_by(|a, b| b.priority.cmp(&a.priority));
508    }
509    #[allow(dead_code)]
510    #[allow(missing_docs)]
511    pub fn pop(&mut self) -> Option<ReparseRequest> {
512        if self.requests.is_empty() {
513            None
514        } else {
515            Some(self.requests.remove(0))
516        }
517    }
518    #[allow(dead_code)]
519    #[allow(missing_docs)]
520    pub fn len(&self) -> usize {
521        self.requests.len()
522    }
523    #[allow(dead_code)]
524    #[allow(missing_docs)]
525    pub fn is_empty(&self) -> bool {
526        self.requests.is_empty()
527    }
528    #[allow(dead_code)]
529    #[allow(missing_docs)]
530    pub fn has_urgent(&self) -> bool {
531        self.requests
532            .iter()
533            .any(|r| r.priority == ReparsePriority::Urgent)
534    }
535}
536/// A rolling checksum for incremental validation.
537#[allow(dead_code)]
538#[allow(missing_docs)]
539pub struct IncrementalChecksum {
540    partial_sums: Vec<u64>,
541}
542impl IncrementalChecksum {
543    #[allow(dead_code)]
544    #[allow(missing_docs)]
545    pub fn build(source: &str) -> Self {
546        let mut sums = vec![0u64; source.len() + 1];
547        for (i, b) in source.bytes().enumerate() {
548            sums[i + 1] = sums[i].wrapping_add(b as u64);
549        }
550        Self { partial_sums: sums }
551    }
552    #[allow(dead_code)]
553    #[allow(missing_docs)]
554    pub fn range_sum(&self, start: usize, end: usize) -> u64 {
555        let end = end.min(self.partial_sums.len().saturating_sub(1));
556        let start = start.min(end);
557        self.partial_sums[end].wrapping_sub(self.partial_sums[start])
558    }
559    #[allow(dead_code)]
560    #[allow(missing_docs)]
561    pub fn total(&self) -> u64 {
562        *self.partial_sums.last().unwrap_or(&0)
563    }
564}
565/// Represents a bracket/indentation scope for incremental scope tracking.
566#[allow(dead_code)]
567#[allow(missing_docs)]
568#[derive(Clone, Debug)]
569pub struct IncrScopeEntry {
570    pub start: usize,
571    pub kind: ScopeKind2,
572    pub depth: usize,
573}
574impl IncrScopeEntry {
575    #[allow(dead_code)]
576    #[allow(missing_docs)]
577    pub fn new(start: usize, kind: ScopeKind2, depth: usize) -> Self {
578        Self { start, kind, depth }
579    }
580}
581/// A dependency graph for declarations.
582#[allow(missing_docs)]
583pub struct DependencyGraph {
584    edges: HashMap<String, Vec<String>>,
585    reverse: HashMap<String, Vec<String>>,
586}
587impl DependencyGraph {
588    #[allow(missing_docs)]
589    pub fn new() -> Self {
590        Self {
591            edges: HashMap::new(),
592            reverse: HashMap::new(),
593        }
594    }
595    #[allow(missing_docs)]
596    pub fn add_edge(&mut self, dependent: &str, dependency: &str) {
597        self.edges
598            .entry(dependent.to_string())
599            .or_default()
600            .push(dependency.to_string());
601        self.reverse
602            .entry(dependency.to_string())
603            .or_default()
604            .push(dependent.to_string());
605    }
606    #[allow(missing_docs)]
607    pub fn dependents_of(&self, name: &str) -> Vec<String> {
608        let mut visited = std::collections::HashSet::new();
609        let mut queue = vec![name.to_string()];
610        let mut result = Vec::new();
611        while let Some(current) = queue.pop() {
612            if let Some(deps) = self.reverse.get(&current) {
613                for dep in deps {
614                    if visited.insert(dep.clone()) {
615                        result.push(dep.clone());
616                        queue.push(dep.clone());
617                    }
618                }
619            }
620        }
621        result
622    }
623    #[allow(missing_docs)]
624    pub fn direct_dependencies(&self, name: &str) -> &[String] {
625        self.edges.get(name).map(Vec::as_slice).unwrap_or(&[])
626    }
627    #[allow(missing_docs)]
628    pub fn remove_node(&mut self, name: &str) {
629        if let Some(deps) = self.edges.remove(name) {
630            for dep in deps {
631                if let Some(rev) = self.reverse.get_mut(&dep) {
632                    rev.retain(|n| n != name);
633                }
634            }
635        }
636        if let Some(rev_deps) = self.reverse.remove(name) {
637            for rev_dep in rev_deps {
638                if let Some(fwd) = self.edges.get_mut(&rev_dep) {
639                    fwd.retain(|n| n != name);
640                }
641            }
642        }
643    }
644    #[allow(missing_docs)]
645    pub fn node_count(&self) -> usize {
646        let mut all: std::collections::HashSet<&str> = std::collections::HashSet::new();
647        for k in self.edges.keys() {
648            all.insert(k.as_str());
649        }
650        for k in self.reverse.keys() {
651            all.insert(k.as_str());
652        }
653        all.len()
654    }
655}
656/// Source version tracking for LSP
657#[derive(Debug, Clone)]
658#[allow(missing_docs)]
659pub struct VersionedSource {
660    pub uri: String,
661    pub version: i32,
662    pub content: String,
663}
664impl VersionedSource {
665    #[allow(missing_docs)]
666    pub fn new(uri: impl Into<String>, content: impl Into<String>) -> Self {
667        VersionedSource {
668            uri: uri.into(),
669            version: 0,
670            content: content.into(),
671        }
672    }
673    #[allow(missing_docs)]
674    pub fn apply_change(&mut self, change: TextChange) -> &mut Self {
675        self.content = change.apply(&self.content);
676        self
677    }
678    #[allow(missing_docs)]
679    pub fn update(&mut self, new_content: impl Into<String>, new_version: i32) -> &mut Self {
680        self.content = new_content.into();
681        self.version = new_version;
682        self
683    }
684    #[allow(missing_docs)]
685    pub fn len(&self) -> usize {
686        self.content.len()
687    }
688    #[allow(missing_docs)]
689    pub fn is_empty(&self) -> bool {
690        self.content.is_empty()
691    }
692    #[allow(missing_docs)]
693    pub fn offset_to_position(&self, offset: usize) -> (usize, usize) {
694        let offset = offset.min(self.content.len());
695        let before = &self.content[..offset];
696        let line = before.chars().filter(|&c| c == '\n').count();
697        let col = before
698            .rfind('\n')
699            .map(|nl| offset - nl - 1)
700            .unwrap_or(offset);
701        (line, col)
702    }
703    #[allow(missing_docs)]
704    pub fn position_to_offset(&self, line: usize, col: usize) -> Option<usize> {
705        let mut current_line = 0usize;
706        let mut line_start = 0usize;
707        for (i, ch) in self.content.char_indices() {
708            if current_line == line {
709                let mut col_offset = 0usize;
710                let mut offset = line_start;
711                for c in self.content[line_start..].chars() {
712                    if col_offset == col {
713                        return Some(offset);
714                    }
715                    offset += c.len_utf8();
716                    col_offset += 1;
717                    if c == '\n' {
718                        break;
719                    }
720                }
721                if col_offset == col {
722                    return Some(offset);
723                }
724                return None;
725            }
726            if ch == '\n' {
727                current_line += 1;
728                line_start = i + 1;
729            }
730        }
731        if current_line == line {
732            let line_len = self.content[line_start..].len();
733            if col <= line_len {
734                return Some(line_start + col);
735            }
736        }
737        None
738    }
739}
740/// A token range that becomes invalid after an edit.
741#[allow(dead_code)]
742#[allow(missing_docs)]
743#[derive(Clone, Debug, PartialEq, Eq)]
744pub struct InvalidatedRange {
745    pub start: usize,
746    pub end: usize,
747}
748impl InvalidatedRange {
749    #[allow(dead_code)]
750    #[allow(missing_docs)]
751    pub fn new(start: usize, end: usize) -> Self {
752        Self { start, end }
753    }
754    #[allow(dead_code)]
755    #[allow(missing_docs)]
756    pub fn len(&self) -> usize {
757        self.end - self.start
758    }
759    #[allow(dead_code)]
760    #[allow(missing_docs)]
761    pub fn is_empty(&self) -> bool {
762        self.end <= self.start
763    }
764    #[allow(dead_code)]
765    #[allow(missing_docs)]
766    pub fn contains(&self, pos: usize) -> bool {
767        pos >= self.start && pos < self.end
768    }
769    #[allow(dead_code)]
770    #[allow(missing_docs)]
771    pub fn overlaps(&self, other: &Self) -> bool {
772        self.start < other.end && self.end > other.start
773    }
774    #[allow(dead_code)]
775    #[allow(missing_docs)]
776    pub fn merge(&self, other: &Self) -> Self {
777        Self {
778            start: self.start.min(other.start),
779            end: self.end.max(other.end),
780        }
781    }
782}
783/// Represents a "dirty" region in the source that needs re-parsing.
784#[allow(dead_code)]
785#[allow(missing_docs)]
786pub struct DirtyRegion {
787    pub start_line: usize,
788    pub end_line: usize,
789    pub start_byte: usize,
790    pub end_byte: usize,
791}
792impl DirtyRegion {
793    #[allow(dead_code)]
794    #[allow(missing_docs)]
795    pub fn new(start_line: usize, end_line: usize, start_byte: usize, end_byte: usize) -> Self {
796        Self {
797            start_line,
798            end_line,
799            start_byte,
800            end_byte,
801        }
802    }
803    #[allow(dead_code)]
804    #[allow(missing_docs)]
805    pub fn line_count(&self) -> usize {
806        self.end_line.saturating_sub(self.start_line) + 1
807    }
808    #[allow(dead_code)]
809    #[allow(missing_docs)]
810    pub fn byte_count(&self) -> usize {
811        self.end_byte.saturating_sub(self.start_byte)
812    }
813    #[allow(dead_code)]
814    #[allow(missing_docs)]
815    pub fn is_single_line(&self) -> bool {
816        self.start_line == self.end_line
817    }
818}
819/// A simple persistent (copy-on-write) vector.
820#[derive(Debug, Clone)]
821#[allow(missing_docs)]
822pub struct PersistentVec<T: Clone> {
823    data: std::rc::Rc<Vec<T>>,
824}
825impl<T: Clone> PersistentVec<T> {
826    #[allow(missing_docs)]
827    pub fn new() -> Self {
828        Self {
829            data: std::rc::Rc::new(Vec::new()),
830        }
831    }
832    #[allow(missing_docs)]
833    pub fn push(&self, value: T) -> Self {
834        let mut new_data = (*self.data).clone();
835        new_data.push(value);
836        Self {
837            data: std::rc::Rc::new(new_data),
838        }
839    }
840    #[allow(missing_docs)]
841    pub fn set(&self, idx: usize, value: T) -> Option<Self> {
842        if idx >= self.data.len() {
843            return None;
844        }
845        let mut new_data = (*self.data).clone();
846        new_data[idx] = value;
847        Some(Self {
848            data: std::rc::Rc::new(new_data),
849        })
850    }
851    #[allow(missing_docs)]
852    pub fn get(&self, idx: usize) -> Option<&T> {
853        self.data.get(idx)
854    }
855    #[allow(missing_docs)]
856    pub fn len(&self) -> usize {
857        self.data.len()
858    }
859    #[allow(missing_docs)]
860    pub fn is_empty(&self) -> bool {
861        self.data.is_empty()
862    }
863    #[allow(missing_docs)]
864    pub fn iter(&self) -> std::slice::Iter<'_, T> {
865        self.data.iter()
866    }
867}
868/// Represents a single source edit (insert or delete).
869#[allow(dead_code)]
870#[allow(missing_docs)]
871#[derive(Clone, Debug)]
872pub struct SourceEdit {
873    pub start: usize,
874    pub end: usize,
875    pub new_text: String,
876}
877impl SourceEdit {
878    #[allow(dead_code)]
879    #[allow(missing_docs)]
880    pub fn insert(pos: usize, text: impl Into<String>) -> Self {
881        Self {
882            start: pos,
883            end: pos,
884            new_text: text.into(),
885        }
886    }
887    #[allow(dead_code)]
888    #[allow(missing_docs)]
889    pub fn delete(start: usize, end: usize) -> Self {
890        Self {
891            start,
892            end,
893            new_text: String::new(),
894        }
895    }
896    #[allow(dead_code)]
897    #[allow(missing_docs)]
898    pub fn replace(start: usize, end: usize, text: impl Into<String>) -> Self {
899        Self {
900            start,
901            end,
902            new_text: text.into(),
903        }
904    }
905    #[allow(dead_code)]
906    #[allow(missing_docs)]
907    pub fn is_insert(&self) -> bool {
908        self.start == self.end && !self.new_text.is_empty()
909    }
910    #[allow(dead_code)]
911    #[allow(missing_docs)]
912    pub fn is_delete(&self) -> bool {
913        self.start < self.end && self.new_text.is_empty()
914    }
915    #[allow(dead_code)]
916    #[allow(missing_docs)]
917    pub fn is_replace(&self) -> bool {
918        self.start < self.end && !self.new_text.is_empty()
919    }
920    #[allow(dead_code)]
921    #[allow(missing_docs)]
922    pub fn delta(&self) -> i64 {
923        self.new_text.len() as i64 - (self.end - self.start) as i64
924    }
925}
926/// A map from byte offset to token ID for incremental relexing.
927#[allow(dead_code)]
928#[allow(missing_docs)]
929pub struct OffsetToTokenMap {
930    map: std::collections::BTreeMap<usize, u32>,
931}
932impl OffsetToTokenMap {
933    #[allow(dead_code)]
934    #[allow(missing_docs)]
935    pub fn new() -> Self {
936        Self {
937            map: std::collections::BTreeMap::new(),
938        }
939    }
940    #[allow(dead_code)]
941    #[allow(missing_docs)]
942    pub fn insert(&mut self, offset: usize, token_id: u32) {
943        self.map.insert(offset, token_id);
944    }
945    #[allow(dead_code)]
946    #[allow(missing_docs)]
947    pub fn token_at(&self, offset: usize) -> Option<u32> {
948        self.map.range(..=offset).next_back().map(|(_, &id)| id)
949    }
950    #[allow(dead_code)]
951    #[allow(missing_docs)]
952    pub fn invalidate_from(&mut self, offset: usize) {
953        let to_remove: Vec<_> = self.map.range(offset..).map(|(&k, _)| k).collect();
954        for k in to_remove {
955            self.map.remove(&k);
956        }
957    }
958    #[allow(dead_code)]
959    #[allow(missing_docs)]
960    pub fn shift(&mut self, from: usize, delta: i64) {
961        let to_shift: Vec<_> = self.map.range(from..).map(|(&k, &v)| (k, v)).collect();
962        for (k, _) in &to_shift {
963            self.map.remove(k);
964        }
965        for (k, v) in to_shift {
966            let new_k = (k as i64 + delta).max(0) as usize;
967            self.map.insert(new_k, v);
968        }
969    }
970    #[allow(dead_code)]
971    #[allow(missing_docs)]
972    pub fn count(&self) -> usize {
973        self.map.len()
974    }
975}
976/// A stack of scopes for incremental parsing.
977#[allow(dead_code)]
978#[allow(missing_docs)]
979pub struct IncrScopeStack {
980    stack: Vec<IncrScopeEntry>,
981}
982impl IncrScopeStack {
983    #[allow(dead_code)]
984    #[allow(missing_docs)]
985    pub fn new() -> Self {
986        Self { stack: Vec::new() }
987    }
988    #[allow(dead_code)]
989    #[allow(missing_docs)]
990    pub fn push(&mut self, entry: IncrScopeEntry) {
991        self.stack.push(entry);
992    }
993    #[allow(dead_code)]
994    #[allow(missing_docs)]
995    pub fn pop(&mut self) -> Option<IncrScopeEntry> {
996        self.stack.pop()
997    }
998    #[allow(dead_code)]
999    #[allow(missing_docs)]
1000    pub fn peek(&self) -> Option<&IncrScopeEntry> {
1001        self.stack.last()
1002    }
1003    #[allow(dead_code)]
1004    #[allow(missing_docs)]
1005    pub fn depth(&self) -> usize {
1006        self.stack.len()
1007    }
1008    #[allow(dead_code)]
1009    #[allow(missing_docs)]
1010    pub fn is_empty(&self) -> bool {
1011        self.stack.is_empty()
1012    }
1013    #[allow(dead_code)]
1014    #[allow(missing_docs)]
1015    pub fn current_scope(&self) -> Option<ScopeKind2> {
1016        self.stack.last().map(|e| e.kind)
1017    }
1018}
1019/// Represents the "reachability" of tokens from a parse entry point.
1020#[allow(dead_code)]
1021#[allow(missing_docs)]
1022pub struct TokenReachability {
1023    reachable: std::collections::HashSet<usize>,
1024}
1025impl TokenReachability {
1026    #[allow(dead_code)]
1027    #[allow(missing_docs)]
1028    pub fn new() -> Self {
1029        Self {
1030            reachable: std::collections::HashSet::new(),
1031        }
1032    }
1033    #[allow(dead_code)]
1034    #[allow(missing_docs)]
1035    pub fn mark_reachable(&mut self, offset: usize) {
1036        self.reachable.insert(offset);
1037    }
1038    #[allow(dead_code)]
1039    #[allow(missing_docs)]
1040    pub fn is_reachable(&self, offset: usize) -> bool {
1041        self.reachable.contains(&offset)
1042    }
1043    #[allow(dead_code)]
1044    #[allow(missing_docs)]
1045    pub fn reachable_count(&self) -> usize {
1046        self.reachable.len()
1047    }
1048    #[allow(dead_code)]
1049    #[allow(missing_docs)]
1050    pub fn coverage_fraction(&self, total_tokens: usize) -> f64 {
1051        if total_tokens == 0 {
1052            0.0
1053        } else {
1054            self.reachable.len() as f64 / total_tokens as f64
1055        }
1056    }
1057}
1058/// A pool of parse fibers for parallel/concurrent parsing.
1059#[allow(dead_code)]
1060#[allow(missing_docs)]
1061pub struct FiberPool {
1062    fibers: Vec<ParseFiber>,
1063    next_id: u64,
1064}
1065impl FiberPool {
1066    #[allow(dead_code)]
1067    #[allow(missing_docs)]
1068    pub fn new() -> Self {
1069        Self {
1070            fibers: Vec::new(),
1071            next_id: 0,
1072        }
1073    }
1074    #[allow(dead_code)]
1075    #[allow(missing_docs)]
1076    pub fn spawn(&mut self, start: usize, depth: usize, state: impl Into<String>) -> u64 {
1077        let id = self.next_id;
1078        self.next_id += 1;
1079        self.fibers.push(ParseFiber::new(id, start, depth, state));
1080        id
1081    }
1082    #[allow(dead_code)]
1083    #[allow(missing_docs)]
1084    pub fn get(&self, id: u64) -> Option<&ParseFiber> {
1085        self.fibers.iter().find(|f| f.id == id)
1086    }
1087    #[allow(dead_code)]
1088    #[allow(missing_docs)]
1089    pub fn remove(&mut self, id: u64) {
1090        self.fibers.retain(|f| f.id != id);
1091    }
1092    #[allow(dead_code)]
1093    #[allow(missing_docs)]
1094    pub fn active_count(&self) -> usize {
1095        self.fibers.len()
1096    }
1097}
1098/// Manages multiple snapshots with a limit.
1099#[allow(dead_code)]
1100#[allow(missing_docs)]
1101pub struct SnapshotManager {
1102    snapshots: Vec<ParseSnapshot>,
1103    max_snapshots: usize,
1104}
1105impl SnapshotManager {
1106    #[allow(dead_code)]
1107    #[allow(missing_docs)]
1108    pub fn new(max_snapshots: usize) -> Self {
1109        Self {
1110            snapshots: Vec::new(),
1111            max_snapshots,
1112        }
1113    }
1114    #[allow(dead_code)]
1115    #[allow(missing_docs)]
1116    pub fn save(&mut self, snapshot: ParseSnapshot) {
1117        if self.snapshots.len() >= self.max_snapshots {
1118            self.snapshots.remove(0);
1119        }
1120        self.snapshots.push(snapshot);
1121    }
1122    #[allow(dead_code)]
1123    #[allow(missing_docs)]
1124    pub fn best(&self) -> Option<&ParseSnapshot> {
1125        self.snapshots.iter().min_by_key(|s| s.error_count)
1126    }
1127    #[allow(dead_code)]
1128    #[allow(missing_docs)]
1129    pub fn latest(&self) -> Option<&ParseSnapshot> {
1130        self.snapshots.last()
1131    }
1132    #[allow(dead_code)]
1133    #[allow(missing_docs)]
1134    pub fn count(&self) -> usize {
1135        self.snapshots.len()
1136    }
1137}
1138/// A simple edit buffer that accumulates edits before applying them.
1139#[allow(dead_code)]
1140#[allow(missing_docs)]
1141pub struct EditBuffer {
1142    pending: Vec<SourceEdit>,
1143    max_pending: usize,
1144}
1145impl EditBuffer {
1146    #[allow(dead_code)]
1147    #[allow(missing_docs)]
1148    pub fn new(max_pending: usize) -> Self {
1149        Self {
1150            pending: Vec::new(),
1151            max_pending,
1152        }
1153    }
1154    #[allow(dead_code)]
1155    #[allow(missing_docs)]
1156    pub fn add(&mut self, edit: SourceEdit) -> bool {
1157        if self.pending.len() >= self.max_pending {
1158            return false;
1159        }
1160        self.pending.push(edit);
1161        true
1162    }
1163    #[allow(dead_code)]
1164    #[allow(missing_docs)]
1165    pub fn flush(&mut self) -> Vec<SourceEdit> {
1166        std::mem::take(&mut self.pending)
1167    }
1168    #[allow(dead_code)]
1169    #[allow(missing_docs)]
1170    pub fn pending_count(&self) -> usize {
1171        self.pending.len()
1172    }
1173    #[allow(dead_code)]
1174    #[allow(missing_docs)]
1175    pub fn is_empty(&self) -> bool {
1176        self.pending.is_empty()
1177    }
1178    #[allow(dead_code)]
1179    #[allow(missing_docs)]
1180    pub fn total_delta(&self) -> i64 {
1181        self.pending.iter().map(|e| e.delta()).sum()
1182    }
1183}
1184/// A session-level incremental parse manager.
1185#[allow(dead_code)]
1186#[allow(missing_docs)]
1187pub struct IncrementalSession {
1188    pub source: SimpleRope,
1189    pub version: ParseVersion,
1190    pub errors: IncrementalErrorMap,
1191    pub stats: IncrParseStats,
1192}
1193impl IncrementalSession {
1194    #[allow(dead_code)]
1195    #[allow(missing_docs)]
1196    pub fn new(source: impl Into<String>) -> Self {
1197        Self {
1198            source: SimpleRope::new(source),
1199            version: ParseVersion::new(),
1200            errors: IncrementalErrorMap::new(),
1201            stats: IncrParseStats::new(),
1202        }
1203    }
1204    #[allow(dead_code)]
1205    #[allow(missing_docs)]
1206    pub fn apply_edit(&mut self, edit: SourceEdit) {
1207        let v = self.version.increment();
1208        let _ = v;
1209        let start = edit.start;
1210        let end = edit.end + edit.new_text.len();
1211        self.errors.clear_range(start, end);
1212        self.stats.total_edits += 1;
1213        let delta = edit.delta();
1214        let src = self.source.as_string();
1215        let new_src = apply_edits(&src, &[edit]);
1216        self.source = SimpleRope::new(new_src);
1217        let _ = delta;
1218    }
1219    #[allow(dead_code)]
1220    #[allow(missing_docs)]
1221    pub fn source_text(&self) -> String {
1222        self.source.as_string()
1223    }
1224    #[allow(dead_code)]
1225    #[allow(missing_docs)]
1226    pub fn has_errors(&self) -> bool {
1227        self.errors.total_error_count() > 0
1228    }
1229    #[allow(dead_code)]
1230    #[allow(missing_docs)]
1231    pub fn current_version(&self) -> u64 {
1232        self.version.current()
1233    }
1234}
1235/// Represents the set of changed lines in a diff.
1236#[allow(dead_code)]
1237#[allow(missing_docs)]
1238pub struct LineDiff {
1239    changed_lines: Vec<(usize, String, String)>,
1240}
1241impl LineDiff {
1242    #[allow(dead_code)]
1243    #[allow(missing_docs)]
1244    pub fn new() -> Self {
1245        Self {
1246            changed_lines: Vec::new(),
1247        }
1248    }
1249    #[allow(dead_code)]
1250    #[allow(missing_docs)]
1251    pub fn add_change(&mut self, line: usize, old: impl Into<String>, new: impl Into<String>) {
1252        self.changed_lines.push((line, old.into(), new.into()));
1253    }
1254    #[allow(dead_code)]
1255    #[allow(missing_docs)]
1256    pub fn count(&self) -> usize {
1257        self.changed_lines.len()
1258    }
1259    #[allow(dead_code)]
1260    #[allow(missing_docs)]
1261    pub fn affected_lines(&self) -> Vec<usize> {
1262        self.changed_lines.iter().map(|(l, _, _)| *l).collect()
1263    }
1264}
1265/// A fingerprint computed from a slice of tokens.
1266#[derive(Debug, Clone, PartialEq, Eq, Hash)]
1267#[allow(missing_docs)]
1268pub struct TokenFingerprint(u64);
1269impl TokenFingerprint {
1270    #[allow(missing_docs)]
1271    pub fn compute(tokens: &[&str]) -> Self {
1272        let mut hash: u64 = 0xcbf2_9ce4_8422_2325;
1273        for tok in tokens {
1274            for byte in tok.bytes() {
1275                hash ^= byte as u64;
1276                hash = hash.wrapping_mul(0x0000_0100_0000_01B3);
1277            }
1278            hash ^= 0x1f;
1279        }
1280        TokenFingerprint(hash)
1281    }
1282    #[allow(missing_docs)]
1283    pub fn value(&self) -> u64 {
1284        self.0
1285    }
1286}
1287/// A transaction groups multiple `TextChange`s into an atomic unit.
1288#[allow(missing_docs)]
1289pub struct Transaction {
1290    changes: Vec<TextChange>,
1291    snapshot: Option<String>,
1292}
1293impl Transaction {
1294    #[allow(missing_docs)]
1295    pub fn new() -> Self {
1296        Self {
1297            changes: Vec::new(),
1298            snapshot: None,
1299        }
1300    }
1301    #[allow(missing_docs)]
1302    pub fn begin(source: &str) -> Self {
1303        Self {
1304            changes: Vec::new(),
1305            snapshot: Some(source.to_string()),
1306        }
1307    }
1308    #[allow(missing_docs)]
1309    pub fn add(&mut self, change: TextChange) {
1310        self.changes.push(change);
1311    }
1312    #[allow(missing_docs)]
1313    pub fn commit(&self, source: &str) -> String {
1314        let mut s = source.to_string();
1315        let mut sorted = self.changes.clone();
1316        sorted.sort_by(|a, b| b.range.start.cmp(&a.range.start));
1317        for change in &sorted {
1318            s = change.apply(&s);
1319        }
1320        s
1321    }
1322    #[allow(missing_docs)]
1323    pub fn rollback(&self) -> Option<&str> {
1324        self.snapshot.as_deref()
1325    }
1326    #[allow(missing_docs)]
1327    pub fn len(&self) -> usize {
1328        self.changes.len()
1329    }
1330    #[allow(missing_docs)]
1331    pub fn is_empty(&self) -> bool {
1332        self.changes.is_empty()
1333    }
1334}
1335/// A map of byte offset to error messages for incremental error tracking.
1336#[allow(dead_code)]
1337#[allow(missing_docs)]
1338pub struct IncrementalErrorMap {
1339    errors: std::collections::BTreeMap<usize, Vec<String>>,
1340}
1341impl IncrementalErrorMap {
1342    #[allow(dead_code)]
1343    #[allow(missing_docs)]
1344    pub fn new() -> Self {
1345        Self {
1346            errors: std::collections::BTreeMap::new(),
1347        }
1348    }
1349    #[allow(dead_code)]
1350    #[allow(missing_docs)]
1351    pub fn add_error(&mut self, offset: usize, msg: impl Into<String>) {
1352        self.errors.entry(offset).or_default().push(msg.into());
1353    }
1354    #[allow(dead_code)]
1355    #[allow(missing_docs)]
1356    pub fn clear_range(&mut self, start: usize, end: usize) {
1357        let keys: Vec<_> = self.errors.range(start..end).map(|(&k, _)| k).collect();
1358        for k in keys {
1359            self.errors.remove(&k);
1360        }
1361    }
1362    #[allow(dead_code)]
1363    #[allow(missing_docs)]
1364    pub fn errors_in_range(&self, start: usize, end: usize) -> Vec<&String> {
1365        self.errors
1366            .range(start..end)
1367            .flat_map(|(_, msgs)| msgs.iter())
1368            .collect()
1369    }
1370    #[allow(dead_code)]
1371    #[allow(missing_docs)]
1372    pub fn total_error_count(&self) -> usize {
1373        self.errors.values().map(|v| v.len()).sum()
1374    }
1375}
1376/// A cached parse result for one declaration
1377#[derive(Debug, Clone)]
1378#[allow(missing_docs)]
1379pub struct ParsedDecl {
1380    pub source_range: Range<usize>,
1381    pub name: Option<String>,
1382    pub decl_text: String,
1383    pub valid: bool,
1384}
1385/// The incremental parser state — tracks source + cache
1386#[allow(missing_docs)]
1387pub struct IncrementalParser {
1388    source: String,
1389    cache: HashMap<usize, ParsedDecl>,
1390    dirty_ranges: Vec<Range<usize>>,
1391    version: u32,
1392}
1393impl IncrementalParser {
1394    #[allow(missing_docs)]
1395    pub fn new(source: impl Into<String>) -> Self {
1396        let source = source.into();
1397        let mut parser = IncrementalParser {
1398            source,
1399            cache: HashMap::new(),
1400            dirty_ranges: Vec::new(),
1401            version: 0,
1402        };
1403        parser.reparse_dirty();
1404        parser
1405    }
1406    #[allow(missing_docs)]
1407    pub fn apply_change(&mut self, change: TextChange) {
1408        let affected_start = change.range.start;
1409        let affected_end = change.range.start + change.new_text.len();
1410        self.source = change.apply(&self.source);
1411        self.version += 1;
1412        let dirty_end = affected_end.max(change.range.end);
1413        self.mark_dirty(affected_start..dirty_end);
1414    }
1415    #[allow(missing_docs)]
1416    pub fn apply_changes(&mut self, mut changes: Vec<TextChange>) {
1417        changes.sort_by(|a, b| b.range.start.cmp(&a.range.start));
1418        for change in changes {
1419            self.apply_change(change);
1420        }
1421    }
1422    #[allow(missing_docs)]
1423    pub fn source(&self) -> &str {
1424        &self.source
1425    }
1426    #[allow(missing_docs)]
1427    pub fn version(&self) -> u32 {
1428        self.version
1429    }
1430    #[allow(missing_docs)]
1431    pub fn split_declarations(source: &str) -> Vec<(usize, &str)> {
1432        let keywords = [
1433            "def ",
1434            "theorem ",
1435            "axiom ",
1436            "inductive ",
1437            "structure ",
1438            "class ",
1439        ];
1440        let mut result = Vec::new();
1441        let mut current_start: Option<usize> = None;
1442        let mut pos = 0usize;
1443        for line in source.split_inclusive('\n') {
1444            let is_decl_start = keywords.iter().any(|kw| line.starts_with(kw));
1445            if is_decl_start {
1446                if let Some(start) = current_start {
1447                    result.push((start, &source[start..pos]));
1448                }
1449                current_start = Some(pos);
1450            }
1451            pos += line.len();
1452        }
1453        if let Some(start) = current_start {
1454            result.push((start, &source[start..]));
1455        }
1456        result
1457    }
1458    #[allow(missing_docs)]
1459    pub fn reparse_dirty(&mut self) {
1460        let source = self.source.clone();
1461        let decls = Self::split_declarations(&source);
1462        for (start, text) in decls {
1463            let end = start + text.len();
1464            let range = start..end;
1465            if self.dirty_ranges.is_empty() || self.is_dirty(&range) {
1466                let name = Self::extract_decl_name(text);
1467                let entry = ParsedDecl {
1468                    source_range: range,
1469                    name,
1470                    decl_text: text.to_string(),
1471                    valid: true,
1472                };
1473                self.cache.insert(start, entry);
1474            }
1475        }
1476        self.clear_dirty();
1477    }
1478    #[allow(missing_docs)]
1479    pub fn declarations(&self) -> Vec<&ParsedDecl> {
1480        let mut decls: Vec<&ParsedDecl> = self.cache.values().collect();
1481        decls.sort_by_key(|d| d.source_range.start);
1482        decls
1483    }
1484    #[allow(missing_docs)]
1485    pub fn decl_at(&self, offset: usize) -> Option<&ParsedDecl> {
1486        self.cache
1487            .values()
1488            .find(|d| d.source_range.contains(&offset))
1489    }
1490    fn mark_dirty(&mut self, range: Range<usize>) {
1491        for decl in self.cache.values_mut() {
1492            if decl.source_range.start < range.end && decl.source_range.end > range.start {
1493                decl.valid = false;
1494            }
1495        }
1496        self.dirty_ranges.push(range);
1497    }
1498    fn is_dirty(&self, range: &Range<usize>) -> bool {
1499        self.dirty_ranges
1500            .iter()
1501            .any(|d| d.start < range.end && d.end > range.start)
1502    }
1503    fn clear_dirty(&mut self) {
1504        self.dirty_ranges.clear();
1505    }
1506    #[allow(missing_docs)]
1507    pub fn cache_size(&self) -> usize {
1508        self.cache.len()
1509    }
1510    #[allow(missing_docs)]
1511    pub fn dirty_count(&self) -> usize {
1512        self.dirty_ranges.len()
1513    }
1514    fn extract_decl_name(text: &str) -> Option<String> {
1515        let keywords = [
1516            "def ",
1517            "theorem ",
1518            "axiom ",
1519            "inductive ",
1520            "structure ",
1521            "class ",
1522        ];
1523        for kw in &keywords {
1524            if let Some(rest) = text.strip_prefix(kw) {
1525                let name: String = rest
1526                    .chars()
1527                    .take_while(|c| c.is_alphanumeric() || *c == '_' || *c == '\'')
1528                    .collect();
1529                if !name.is_empty() {
1530                    return Some(name);
1531                }
1532            }
1533        }
1534        None
1535    }
1536    #[allow(missing_docs)]
1537    pub fn invalid_declarations(&self) -> Vec<&ParsedDecl> {
1538        let mut decls: Vec<&ParsedDecl> = self.cache.values().filter(|d| !d.valid).collect();
1539        decls.sort_by_key(|d| d.source_range.start);
1540        decls
1541    }
1542    #[allow(missing_docs)]
1543    pub fn invalidate_by_name(&mut self, name: &str) {
1544        for decl in self.cache.values_mut() {
1545            if decl.name.as_deref() == Some(name) {
1546                decl.valid = false;
1547            }
1548        }
1549    }
1550}
1551/// A reparse request indicating which region to re-parse.
1552#[allow(dead_code)]
1553#[allow(missing_docs)]
1554#[derive(Debug, Clone)]
1555pub struct ReparseRequest {
1556    pub start_byte: usize,
1557    pub end_byte: usize,
1558    pub source_version: u64,
1559    pub priority: ReparsePriority,
1560}
1561impl ReparseRequest {
1562    #[allow(dead_code)]
1563    #[allow(missing_docs)]
1564    pub fn new(start: usize, end: usize, version: u64) -> Self {
1565        Self {
1566            start_byte: start,
1567            end_byte: end,
1568            source_version: version,
1569            priority: ReparsePriority::Normal,
1570        }
1571    }
1572    #[allow(dead_code)]
1573    #[allow(missing_docs)]
1574    pub fn with_priority(mut self, p: ReparsePriority) -> Self {
1575        self.priority = p;
1576        self
1577    }
1578    #[allow(dead_code)]
1579    #[allow(missing_docs)]
1580    pub fn byte_span(&self) -> usize {
1581        self.end_byte.saturating_sub(self.start_byte)
1582    }
1583}
1584/// Incremental parse cache: maps dirty-region hashes to parse results.
1585#[allow(dead_code)]
1586#[allow(missing_docs)]
1587pub struct IncrementalParseCache {
1588    entries: std::collections::HashMap<u64, IncrParseEntry>,
1589    max_entries: usize,
1590    hits: u64,
1591    misses: u64,
1592}
1593impl IncrementalParseCache {
1594    #[allow(dead_code)]
1595    #[allow(missing_docs)]
1596    pub fn new(max_entries: usize) -> Self {
1597        Self {
1598            entries: std::collections::HashMap::new(),
1599            max_entries,
1600            hits: 0,
1601            misses: 0,
1602        }
1603    }
1604    #[allow(dead_code)]
1605    #[allow(missing_docs)]
1606    pub fn lookup(&mut self, region_hash: u64) -> Option<&IncrParseEntry> {
1607        if self.entries.contains_key(&region_hash) {
1608            self.hits += 1;
1609            self.entries.get(&region_hash)
1610        } else {
1611            self.misses += 1;
1612            None
1613        }
1614    }
1615    #[allow(dead_code)]
1616    #[allow(missing_docs)]
1617    pub fn store(&mut self, entry: IncrParseEntry) {
1618        if self.entries.len() >= self.max_entries {
1619            if let Some(&k) = self.entries.keys().next() {
1620                self.entries.remove(&k);
1621            }
1622        }
1623        self.entries.insert(entry.region_hash, entry);
1624    }
1625    #[allow(dead_code)]
1626    #[allow(missing_docs)]
1627    pub fn hit_rate(&self) -> f64 {
1628        let total = self.hits + self.misses;
1629        if total == 0 {
1630            0.0
1631        } else {
1632            self.hits as f64 / total as f64
1633        }
1634    }
1635    #[allow(dead_code)]
1636    #[allow(missing_docs)]
1637    pub fn stats(&self) -> (u64, u64) {
1638        (self.hits, self.misses)
1639    }
1640}
1641/// A "change detector" that tracks whether a portion of source has changed.
1642#[allow(dead_code)]
1643#[allow(missing_docs)]
1644pub struct ChangeDetector {
1645    hashes: std::collections::HashMap<(usize, usize), u64>,
1646}
1647impl ChangeDetector {
1648    #[allow(dead_code)]
1649    #[allow(missing_docs)]
1650    pub fn new() -> Self {
1651        Self {
1652            hashes: std::collections::HashMap::new(),
1653        }
1654    }
1655    #[allow(dead_code)]
1656    #[allow(missing_docs)]
1657    pub fn record(&mut self, source: &str, start: usize, end: usize) {
1658        let end = end.min(source.len());
1659        let start = start.min(end);
1660        let h = {
1661            let data = &source.as_bytes()[start..end];
1662            let mut hash = 14695981039346656037u64;
1663            for &b in data {
1664                hash = hash.wrapping_mul(1099511628211u64) ^ b as u64;
1665            }
1666            hash
1667        };
1668        self.hashes.insert((start, end), h);
1669    }
1670    #[allow(dead_code)]
1671    #[allow(missing_docs)]
1672    pub fn has_changed(&self, source: &str, start: usize, end: usize) -> bool {
1673        let end = end.min(source.len());
1674        let start = start.min(end);
1675        let current = {
1676            let data = &source.as_bytes()[start..end];
1677            let mut hash = 14695981039346656037u64;
1678            for &b in data {
1679                hash = hash.wrapping_mul(1099511628211u64) ^ b as u64;
1680            }
1681            hash
1682        };
1683        self.hashes
1684            .get(&(start, end))
1685            .map_or(true, |&stored| stored != current)
1686    }
1687    #[allow(dead_code)]
1688    #[allow(missing_docs)]
1689    pub fn recorded_count(&self) -> usize {
1690        self.hashes.len()
1691    }
1692}
1693/// Represents a snapshot of incremental parse state for rollback.
1694#[allow(dead_code)]
1695#[allow(missing_docs)]
1696pub struct ParseSnapshot {
1697    pub source: String,
1698    pub version: u64,
1699    pub node_count: usize,
1700    pub error_count: usize,
1701}
1702impl ParseSnapshot {
1703    #[allow(dead_code)]
1704    #[allow(missing_docs)]
1705    pub fn capture(source: &str, version: u64, node_count: usize, error_count: usize) -> Self {
1706        Self {
1707            source: source.to_string(),
1708            version,
1709            node_count,
1710            error_count,
1711        }
1712    }
1713    #[allow(dead_code)]
1714    #[allow(missing_docs)]
1715    pub fn is_cleaner_than(&self, other: &Self) -> bool {
1716        self.error_count < other.error_count
1717    }
1718}
1719/// Tracks which declarations are affected by an edit.
1720#[allow(dead_code)]
1721#[allow(missing_docs)]
1722pub struct DeclDependencyTracker {
1723    decl_ranges: Vec<(String, usize, usize)>,
1724}
1725impl DeclDependencyTracker {
1726    #[allow(dead_code)]
1727    #[allow(missing_docs)]
1728    pub fn new() -> Self {
1729        Self {
1730            decl_ranges: Vec::new(),
1731        }
1732    }
1733    #[allow(dead_code)]
1734    #[allow(missing_docs)]
1735    pub fn register_decl(&mut self, name: impl Into<String>, start: usize, end: usize) {
1736        self.decl_ranges.push((name.into(), start, end));
1737    }
1738    #[allow(dead_code)]
1739    #[allow(missing_docs)]
1740    pub fn affected_by_edit(&self, edit: &SourceEdit) -> Vec<&str> {
1741        self.decl_ranges
1742            .iter()
1743            .filter(|(_, s, e)| edit.start < *e && edit.end > *s)
1744            .map(|(n, _, _)| n.as_str())
1745            .collect()
1746    }
1747    #[allow(dead_code)]
1748    #[allow(missing_docs)]
1749    pub fn decl_count(&self) -> usize {
1750        self.decl_ranges.len()
1751    }
1752}
1753/// Statistics for an incremental parsing session.
1754#[allow(dead_code)]
1755#[allow(missing_docs)]
1756#[derive(Default, Debug)]
1757pub struct IncrParseStats {
1758    pub total_edits: u64,
1759    pub partial_reparses: u64,
1760    pub full_reparses: u64,
1761    pub tokens_reused: u64,
1762    #[allow(missing_docs)]
1763    pub tokens_relexed: u64,
1764    pub nodes_reused: u64,
1765    pub nodes_rebuilt: u64,
1766}
1767impl IncrParseStats {
1768    #[allow(dead_code)]
1769    #[allow(missing_docs)]
1770    pub fn new() -> Self {
1771        Self::default()
1772    }
1773    #[allow(dead_code)]
1774    #[allow(missing_docs)]
1775    pub fn reuse_fraction_tokens(&self) -> f64 {
1776        let total = self.tokens_reused + self.tokens_relexed;
1777        if total == 0 {
1778            0.0
1779        } else {
1780            self.tokens_reused as f64 / total as f64
1781        }
1782    }
1783    #[allow(dead_code)]
1784    #[allow(missing_docs)]
1785    pub fn reuse_fraction_nodes(&self) -> f64 {
1786        let total = self.nodes_reused + self.nodes_rebuilt;
1787        if total == 0 {
1788            0.0
1789        } else {
1790            self.nodes_reused as f64 / total as f64
1791        }
1792    }
1793    #[allow(dead_code)]
1794    #[allow(missing_docs)]
1795    pub fn summary(&self) -> String {
1796        format!(
1797            "edits={} partial={} full={} token_reuse={:.1}% node_reuse={:.1}%",
1798            self.total_edits,
1799            self.partial_reparses,
1800            self.full_reparses,
1801            self.reuse_fraction_tokens() * 100.0,
1802            self.reuse_fraction_nodes() * 100.0,
1803        )
1804    }
1805}
1806/// Tracks a history of edits for undo/redo.
1807#[allow(dead_code)]
1808#[allow(missing_docs)]
1809pub struct EditHistory {
1810    history: Vec<SourceEdit>,
1811    undo_stack: Vec<SourceEdit>,
1812    max_history: usize,
1813}
1814impl EditHistory {
1815    #[allow(dead_code)]
1816    #[allow(missing_docs)]
1817    pub fn new(max_history: usize) -> Self {
1818        Self {
1819            history: Vec::new(),
1820            undo_stack: Vec::new(),
1821            max_history,
1822        }
1823    }
1824    #[allow(dead_code)]
1825    #[allow(missing_docs)]
1826    pub fn push(&mut self, edit: SourceEdit) {
1827        if self.history.len() >= self.max_history {
1828            self.history.remove(0);
1829        }
1830        self.history.push(edit);
1831        self.undo_stack.clear();
1832    }
1833    #[allow(dead_code)]
1834    #[allow(missing_docs)]
1835    pub fn undo(&mut self) -> Option<SourceEdit> {
1836        let edit = self.history.pop()?;
1837        self.undo_stack.push(edit.clone());
1838        Some(edit)
1839    }
1840    #[allow(dead_code)]
1841    #[allow(missing_docs)]
1842    pub fn redo(&mut self) -> Option<SourceEdit> {
1843        let edit = self.undo_stack.pop()?;
1844        self.history.push(edit.clone());
1845        Some(edit)
1846    }
1847    #[allow(dead_code)]
1848    #[allow(missing_docs)]
1849    pub fn history_len(&self) -> usize {
1850        self.history.len()
1851    }
1852    #[allow(dead_code)]
1853    #[allow(missing_docs)]
1854    pub fn undo_count(&self) -> usize {
1855        self.undo_stack.len()
1856    }
1857}
1858/// Incremental lexer: re-lexes only the invalidated region.
1859#[allow(dead_code)]
1860#[allow(missing_docs)]
1861pub struct IncrementalLexerExt {
1862    source: String,
1863    validity: TokenValidity,
1864    version: u64,
1865}
1866impl IncrementalLexerExt {
1867    #[allow(dead_code)]
1868    #[allow(missing_docs)]
1869    pub fn new(source: impl Into<String>) -> Self {
1870        Self {
1871            source: source.into(),
1872            validity: TokenValidity::new(),
1873            version: 0,
1874        }
1875    }
1876    #[allow(dead_code)]
1877    #[allow(missing_docs)]
1878    pub fn apply_edit(&mut self, edit: SourceEdit) {
1879        let inv = compute_invalidated_range(&edit, 64);
1880        self.validity.invalidate(&inv);
1881        self.source = apply_edits(&self.source, &[edit]);
1882        self.version += 1;
1883    }
1884    #[allow(dead_code)]
1885    #[allow(missing_docs)]
1886    pub fn source(&self) -> &str {
1887        &self.source
1888    }
1889    #[allow(dead_code)]
1890    #[allow(missing_docs)]
1891    pub fn version(&self) -> u64 {
1892        self.version
1893    }
1894    #[allow(dead_code)]
1895    #[allow(missing_docs)]
1896    pub fn valid_token_count(&self) -> usize {
1897        self.validity.valid_count()
1898    }
1899    #[allow(dead_code)]
1900    #[allow(missing_docs)]
1901    pub fn needs_relex(&self, pos: usize) -> bool {
1902        !self.validity.is_valid_at(pos)
1903    }
1904}
1905/// A concurrency-safe version counter for incremental state.
1906#[allow(dead_code)]
1907#[allow(missing_docs)]
1908pub struct AtomicVersion {
1909    inner: std::sync::atomic::AtomicU64,
1910}
1911impl AtomicVersion {
1912    #[allow(dead_code)]
1913    #[allow(missing_docs)]
1914    pub fn new() -> Self {
1915        Self {
1916            inner: std::sync::atomic::AtomicU64::new(0),
1917        }
1918    }
1919    #[allow(dead_code)]
1920    #[allow(missing_docs)]
1921    pub fn increment(&self) -> u64 {
1922        self.inner.fetch_add(1, std::sync::atomic::Ordering::SeqCst) + 1
1923    }
1924    #[allow(dead_code)]
1925    #[allow(missing_docs)]
1926    pub fn load(&self) -> u64 {
1927        self.inner.load(std::sync::atomic::Ordering::SeqCst)
1928    }
1929    #[allow(dead_code)]
1930    #[allow(missing_docs)]
1931    pub fn reset(&self) {
1932        self.inner.store(0, std::sync::atomic::Ordering::SeqCst);
1933    }
1934}
1935/// The kind of a syntax node.
1936#[derive(Debug, Clone, PartialEq, Eq)]
1937#[allow(missing_docs)]
1938pub enum SyntaxKind {
1939    Root,
1940    Def,
1941    Theorem,
1942    Axiom,
1943    Ident,
1944    Literal,
1945    Token(String),
1946    Error,
1947}
1948/// The result of an incremental parse attempt.
1949#[allow(dead_code)]
1950#[allow(missing_docs)]
1951#[derive(Debug)]
1952pub struct IncrementalParseResult {
1953    pub success: bool,
1954    pub reused_nodes: usize,
1955    pub rebuilt_nodes: usize,
1956    pub parse_time_us: u64,
1957    #[allow(missing_docs)]
1958    pub errors: Vec<String>,
1959}
1960impl IncrementalParseResult {
1961    #[allow(dead_code)]
1962    #[allow(missing_docs)]
1963    pub fn new(success: bool, reused: usize, rebuilt: usize, time_us: u64) -> Self {
1964        Self {
1965            success,
1966            reused_nodes: reused,
1967            rebuilt_nodes: rebuilt,
1968            parse_time_us: time_us,
1969            errors: Vec::new(),
1970        }
1971    }
1972    #[allow(dead_code)]
1973    #[allow(missing_docs)]
1974    pub fn add_error(&mut self, e: impl Into<String>) {
1975        self.errors.push(e.into());
1976    }
1977    #[allow(dead_code)]
1978    #[allow(missing_docs)]
1979    pub fn reuse_ratio(&self) -> f64 {
1980        let total = self.reused_nodes + self.rebuilt_nodes;
1981        if total == 0 {
1982            0.0
1983        } else {
1984            self.reused_nodes as f64 / total as f64
1985        }
1986    }
1987    #[allow(dead_code)]
1988    #[allow(missing_docs)]
1989    pub fn has_errors(&self) -> bool {
1990        !self.errors.is_empty()
1991    }
1992}
1993#[allow(dead_code)]
1994#[allow(missing_docs)]
1995#[derive(Clone)]
1996pub struct IncrParseEntry {
1997    pub region_hash: u64,
1998    pub result_repr: String,
1999    pub version: u64,
2000}