Skip to main content

oxilean_parse/incremental/types/
impls.rs

1//! Auto-generated module (split from types.rs)
2//!
3//! Second half of type definitions and impl blocks.
4
5use super::super::functions::*;
6use super::defs::*;
7use std::collections::HashMap;
8use std::ops::Range;
9
10/// Represents the "reachability" of tokens from a parse entry point.
11#[allow(dead_code)]
12#[allow(missing_docs)]
13pub struct TokenReachability {
14    reachable: std::collections::HashSet<usize>,
15}
16impl TokenReachability {
17    #[allow(dead_code)]
18    #[allow(missing_docs)]
19    pub fn new() -> Self {
20        Self {
21            reachable: std::collections::HashSet::new(),
22        }
23    }
24    #[allow(dead_code)]
25    #[allow(missing_docs)]
26    pub fn mark_reachable(&mut self, offset: usize) {
27        self.reachable.insert(offset);
28    }
29    #[allow(dead_code)]
30    #[allow(missing_docs)]
31    pub fn is_reachable(&self, offset: usize) -> bool {
32        self.reachable.contains(&offset)
33    }
34    #[allow(dead_code)]
35    #[allow(missing_docs)]
36    pub fn reachable_count(&self) -> usize {
37        self.reachable.len()
38    }
39    #[allow(dead_code)]
40    #[allow(missing_docs)]
41    pub fn coverage_fraction(&self, total_tokens: usize) -> f64 {
42        if total_tokens == 0 {
43            0.0
44        } else {
45            self.reachable.len() as f64 / total_tokens as f64
46        }
47    }
48}
49/// A pool of parse fibers for parallel/concurrent parsing.
50#[allow(dead_code)]
51#[allow(missing_docs)]
52pub struct FiberPool {
53    fibers: Vec<ParseFiber>,
54    next_id: u64,
55}
56impl FiberPool {
57    #[allow(dead_code)]
58    #[allow(missing_docs)]
59    pub fn new() -> Self {
60        Self {
61            fibers: Vec::new(),
62            next_id: 0,
63        }
64    }
65    #[allow(dead_code)]
66    #[allow(missing_docs)]
67    pub fn spawn(&mut self, start: usize, depth: usize, state: impl Into<String>) -> u64 {
68        let id = self.next_id;
69        self.next_id += 1;
70        self.fibers.push(ParseFiber::new(id, start, depth, state));
71        id
72    }
73    #[allow(dead_code)]
74    #[allow(missing_docs)]
75    pub fn get(&self, id: u64) -> Option<&ParseFiber> {
76        self.fibers.iter().find(|f| f.id == id)
77    }
78    #[allow(dead_code)]
79    #[allow(missing_docs)]
80    pub fn remove(&mut self, id: u64) {
81        self.fibers.retain(|f| f.id != id);
82    }
83    #[allow(dead_code)]
84    #[allow(missing_docs)]
85    pub fn active_count(&self) -> usize {
86        self.fibers.len()
87    }
88}
89/// Manages multiple snapshots with a limit.
90#[allow(dead_code)]
91#[allow(missing_docs)]
92pub struct SnapshotManager {
93    snapshots: Vec<ParseSnapshot>,
94    max_snapshots: usize,
95}
96impl SnapshotManager {
97    #[allow(dead_code)]
98    #[allow(missing_docs)]
99    pub fn new(max_snapshots: usize) -> Self {
100        Self {
101            snapshots: Vec::new(),
102            max_snapshots,
103        }
104    }
105    #[allow(dead_code)]
106    #[allow(missing_docs)]
107    pub fn save(&mut self, snapshot: ParseSnapshot) {
108        if self.snapshots.len() >= self.max_snapshots {
109            self.snapshots.remove(0);
110        }
111        self.snapshots.push(snapshot);
112    }
113    #[allow(dead_code)]
114    #[allow(missing_docs)]
115    pub fn best(&self) -> Option<&ParseSnapshot> {
116        self.snapshots.iter().min_by_key(|s| s.error_count)
117    }
118    #[allow(dead_code)]
119    #[allow(missing_docs)]
120    pub fn latest(&self) -> Option<&ParseSnapshot> {
121        self.snapshots.last()
122    }
123    #[allow(dead_code)]
124    #[allow(missing_docs)]
125    pub fn count(&self) -> usize {
126        self.snapshots.len()
127    }
128}
129/// A simple edit buffer that accumulates edits before applying them.
130#[allow(dead_code)]
131#[allow(missing_docs)]
132pub struct EditBuffer {
133    pending: Vec<SourceEdit>,
134    max_pending: usize,
135}
136impl EditBuffer {
137    #[allow(dead_code)]
138    #[allow(missing_docs)]
139    pub fn new(max_pending: usize) -> Self {
140        Self {
141            pending: Vec::new(),
142            max_pending,
143        }
144    }
145    #[allow(dead_code)]
146    #[allow(missing_docs)]
147    pub fn add(&mut self, edit: SourceEdit) -> bool {
148        if self.pending.len() >= self.max_pending {
149            return false;
150        }
151        self.pending.push(edit);
152        true
153    }
154    #[allow(dead_code)]
155    #[allow(missing_docs)]
156    pub fn flush(&mut self) -> Vec<SourceEdit> {
157        std::mem::take(&mut self.pending)
158    }
159    #[allow(dead_code)]
160    #[allow(missing_docs)]
161    pub fn pending_count(&self) -> usize {
162        self.pending.len()
163    }
164    #[allow(dead_code)]
165    #[allow(missing_docs)]
166    pub fn is_empty(&self) -> bool {
167        self.pending.is_empty()
168    }
169    #[allow(dead_code)]
170    #[allow(missing_docs)]
171    pub fn total_delta(&self) -> i64 {
172        self.pending.iter().map(|e| e.delta()).sum()
173    }
174}
175/// A session-level incremental parse manager.
176#[allow(dead_code)]
177#[allow(missing_docs)]
178pub struct IncrementalSession {
179    pub source: SimpleRope,
180    pub version: ParseVersion,
181    pub errors: IncrementalErrorMap,
182    pub stats: IncrParseStats,
183}
184impl IncrementalSession {
185    #[allow(dead_code)]
186    #[allow(missing_docs)]
187    pub fn new(source: impl Into<String>) -> Self {
188        Self {
189            source: SimpleRope::new(source),
190            version: ParseVersion::new(),
191            errors: IncrementalErrorMap::new(),
192            stats: IncrParseStats::new(),
193        }
194    }
195    #[allow(dead_code)]
196    #[allow(missing_docs)]
197    pub fn apply_edit(&mut self, edit: SourceEdit) {
198        let v = self.version.increment();
199        let _ = v;
200        let start = edit.start;
201        let end = edit.end + edit.new_text.len();
202        self.errors.clear_range(start, end);
203        self.stats.total_edits += 1;
204        let delta = edit.delta();
205        let src = self.source.as_string();
206        let new_src = apply_edits(&src, &[edit]);
207        self.source = SimpleRope::new(new_src);
208        let _ = delta;
209    }
210    #[allow(dead_code)]
211    #[allow(missing_docs)]
212    pub fn source_text(&self) -> String {
213        self.source.as_string()
214    }
215    #[allow(dead_code)]
216    #[allow(missing_docs)]
217    pub fn has_errors(&self) -> bool {
218        self.errors.total_error_count() > 0
219    }
220    #[allow(dead_code)]
221    #[allow(missing_docs)]
222    pub fn current_version(&self) -> u64 {
223        self.version.current()
224    }
225}
226/// Represents the set of changed lines in a diff.
227#[allow(dead_code)]
228#[allow(missing_docs)]
229pub struct LineDiff {
230    changed_lines: Vec<(usize, String, String)>,
231}
232impl LineDiff {
233    #[allow(dead_code)]
234    #[allow(missing_docs)]
235    pub fn new() -> Self {
236        Self {
237            changed_lines: Vec::new(),
238        }
239    }
240    #[allow(dead_code)]
241    #[allow(missing_docs)]
242    pub fn add_change(&mut self, line: usize, old: impl Into<String>, new: impl Into<String>) {
243        self.changed_lines.push((line, old.into(), new.into()));
244    }
245    #[allow(dead_code)]
246    #[allow(missing_docs)]
247    pub fn count(&self) -> usize {
248        self.changed_lines.len()
249    }
250    #[allow(dead_code)]
251    #[allow(missing_docs)]
252    pub fn affected_lines(&self) -> Vec<usize> {
253        self.changed_lines.iter().map(|(l, _, _)| *l).collect()
254    }
255}
256/// A fingerprint computed from a slice of tokens.
257#[derive(Debug, Clone, PartialEq, Eq, Hash)]
258#[allow(missing_docs)]
259pub struct TokenFingerprint(u64);
260impl TokenFingerprint {
261    #[allow(missing_docs)]
262    pub fn compute(tokens: &[&str]) -> Self {
263        let mut hash: u64 = 0xcbf2_9ce4_8422_2325;
264        for tok in tokens {
265            for byte in tok.bytes() {
266                hash ^= byte as u64;
267                hash = hash.wrapping_mul(0x0000_0100_0000_01B3);
268            }
269            hash ^= 0x1f;
270        }
271        TokenFingerprint(hash)
272    }
273    #[allow(missing_docs)]
274    pub fn value(&self) -> u64 {
275        self.0
276    }
277}
278/// A transaction groups multiple `TextChange`s into an atomic unit.
279#[allow(missing_docs)]
280pub struct Transaction {
281    changes: Vec<TextChange>,
282    snapshot: Option<String>,
283}
284impl Transaction {
285    #[allow(missing_docs)]
286    pub fn new() -> Self {
287        Self {
288            changes: Vec::new(),
289            snapshot: None,
290        }
291    }
292    #[allow(missing_docs)]
293    pub fn begin(source: &str) -> Self {
294        Self {
295            changes: Vec::new(),
296            snapshot: Some(source.to_string()),
297        }
298    }
299    #[allow(missing_docs)]
300    pub fn add(&mut self, change: TextChange) {
301        self.changes.push(change);
302    }
303    #[allow(missing_docs)]
304    pub fn commit(&self, source: &str) -> String {
305        let mut s = source.to_string();
306        let mut sorted = self.changes.clone();
307        sorted.sort_by_key(|b| std::cmp::Reverse(b.range.start));
308        for change in &sorted {
309            s = change.apply(&s);
310        }
311        s
312    }
313    #[allow(missing_docs)]
314    pub fn rollback(&self) -> Option<&str> {
315        self.snapshot.as_deref()
316    }
317    #[allow(missing_docs)]
318    pub fn len(&self) -> usize {
319        self.changes.len()
320    }
321    #[allow(missing_docs)]
322    pub fn is_empty(&self) -> bool {
323        self.changes.is_empty()
324    }
325}
326/// A map of byte offset to error messages for incremental error tracking.
327#[allow(dead_code)]
328#[allow(missing_docs)]
329pub struct IncrementalErrorMap {
330    errors: std::collections::BTreeMap<usize, Vec<String>>,
331}
332impl IncrementalErrorMap {
333    #[allow(dead_code)]
334    #[allow(missing_docs)]
335    pub fn new() -> Self {
336        Self {
337            errors: std::collections::BTreeMap::new(),
338        }
339    }
340    #[allow(dead_code)]
341    #[allow(missing_docs)]
342    pub fn add_error(&mut self, offset: usize, msg: impl Into<String>) {
343        self.errors.entry(offset).or_default().push(msg.into());
344    }
345    #[allow(dead_code)]
346    #[allow(missing_docs)]
347    pub fn clear_range(&mut self, start: usize, end: usize) {
348        let keys: Vec<_> = self.errors.range(start..end).map(|(&k, _)| k).collect();
349        for k in keys {
350            self.errors.remove(&k);
351        }
352    }
353    #[allow(dead_code)]
354    #[allow(missing_docs)]
355    pub fn errors_in_range(&self, start: usize, end: usize) -> Vec<&String> {
356        self.errors
357            .range(start..end)
358            .flat_map(|(_, msgs)| msgs.iter())
359            .collect()
360    }
361    #[allow(dead_code)]
362    #[allow(missing_docs)]
363    pub fn total_error_count(&self) -> usize {
364        self.errors.values().map(|v| v.len()).sum()
365    }
366}
367/// A cached parse result for one declaration
368#[derive(Debug, Clone)]
369#[allow(missing_docs)]
370pub struct ParsedDecl {
371    pub source_range: Range<usize>,
372    pub name: Option<String>,
373    pub decl_text: String,
374    pub valid: bool,
375}
376/// The incremental parser state — tracks source + cache
377#[allow(missing_docs)]
378pub struct IncrementalParser {
379    source: String,
380    cache: HashMap<usize, ParsedDecl>,
381    dirty_ranges: Vec<Range<usize>>,
382    version: u32,
383}
384impl IncrementalParser {
385    #[allow(missing_docs)]
386    pub fn new(source: impl Into<String>) -> Self {
387        let source = source.into();
388        let mut parser = IncrementalParser {
389            source,
390            cache: HashMap::new(),
391            dirty_ranges: Vec::new(),
392            version: 0,
393        };
394        parser.reparse_dirty();
395        parser
396    }
397    #[allow(missing_docs)]
398    pub fn apply_change(&mut self, change: TextChange) {
399        let affected_start = change.range.start;
400        let affected_end = change.range.start + change.new_text.len();
401        self.source = change.apply(&self.source);
402        self.version += 1;
403        let dirty_end = affected_end.max(change.range.end);
404        self.mark_dirty(affected_start..dirty_end);
405    }
406    #[allow(missing_docs)]
407    pub fn apply_changes(&mut self, mut changes: Vec<TextChange>) {
408        changes.sort_by_key(|b| std::cmp::Reverse(b.range.start));
409        for change in changes {
410            self.apply_change(change);
411        }
412    }
413    #[allow(missing_docs)]
414    pub fn source(&self) -> &str {
415        &self.source
416    }
417    #[allow(missing_docs)]
418    pub fn version(&self) -> u32 {
419        self.version
420    }
421    #[allow(missing_docs)]
422    pub fn split_declarations(source: &str) -> Vec<(usize, &str)> {
423        let keywords = [
424            "def ",
425            "theorem ",
426            "axiom ",
427            "inductive ",
428            "structure ",
429            "class ",
430        ];
431        let mut result = Vec::new();
432        let mut current_start: Option<usize> = None;
433        let mut pos = 0usize;
434        for line in source.split_inclusive('\n') {
435            let is_decl_start = keywords.iter().any(|kw| line.starts_with(kw));
436            if is_decl_start {
437                if let Some(start) = current_start {
438                    result.push((start, &source[start..pos]));
439                }
440                current_start = Some(pos);
441            }
442            pos += line.len();
443        }
444        if let Some(start) = current_start {
445            result.push((start, &source[start..]));
446        }
447        result
448    }
449    #[allow(missing_docs)]
450    pub fn reparse_dirty(&mut self) {
451        let source = self.source.clone();
452        let decls = Self::split_declarations(&source);
453        for (start, text) in decls {
454            let end = start + text.len();
455            let range = start..end;
456            if self.dirty_ranges.is_empty() || self.is_dirty(&range) {
457                let name = Self::extract_decl_name(text);
458                let entry = ParsedDecl {
459                    source_range: range,
460                    name,
461                    decl_text: text.to_string(),
462                    valid: true,
463                };
464                self.cache.insert(start, entry);
465            }
466        }
467        self.clear_dirty();
468    }
469    #[allow(missing_docs)]
470    pub fn declarations(&self) -> Vec<&ParsedDecl> {
471        let mut decls: Vec<&ParsedDecl> = self.cache.values().collect();
472        decls.sort_by_key(|d| d.source_range.start);
473        decls
474    }
475    #[allow(missing_docs)]
476    pub fn decl_at(&self, offset: usize) -> Option<&ParsedDecl> {
477        self.cache
478            .values()
479            .find(|d| d.source_range.contains(&offset))
480    }
481    fn mark_dirty(&mut self, range: Range<usize>) {
482        for decl in self.cache.values_mut() {
483            if decl.source_range.start < range.end && decl.source_range.end > range.start {
484                decl.valid = false;
485            }
486        }
487        self.dirty_ranges.push(range);
488    }
489    fn is_dirty(&self, range: &Range<usize>) -> bool {
490        self.dirty_ranges
491            .iter()
492            .any(|d| d.start < range.end && d.end > range.start)
493    }
494    fn clear_dirty(&mut self) {
495        self.dirty_ranges.clear();
496    }
497    #[allow(missing_docs)]
498    pub fn cache_size(&self) -> usize {
499        self.cache.len()
500    }
501    #[allow(missing_docs)]
502    pub fn dirty_count(&self) -> usize {
503        self.dirty_ranges.len()
504    }
505    fn extract_decl_name(text: &str) -> Option<String> {
506        let keywords = [
507            "def ",
508            "theorem ",
509            "axiom ",
510            "inductive ",
511            "structure ",
512            "class ",
513        ];
514        for kw in &keywords {
515            if let Some(rest) = text.strip_prefix(kw) {
516                let name: String = rest
517                    .chars()
518                    .take_while(|c| c.is_alphanumeric() || *c == '_' || *c == '\'')
519                    .collect();
520                if !name.is_empty() {
521                    return Some(name);
522                }
523            }
524        }
525        None
526    }
527    #[allow(missing_docs)]
528    pub fn invalid_declarations(&self) -> Vec<&ParsedDecl> {
529        let mut decls: Vec<&ParsedDecl> = self.cache.values().filter(|d| !d.valid).collect();
530        decls.sort_by_key(|d| d.source_range.start);
531        decls
532    }
533    #[allow(missing_docs)]
534    pub fn invalidate_by_name(&mut self, name: &str) {
535        for decl in self.cache.values_mut() {
536            if decl.name.as_deref() == Some(name) {
537                decl.valid = false;
538            }
539        }
540    }
541}
542/// A reparse request indicating which region to re-parse.
543#[allow(dead_code)]
544#[allow(missing_docs)]
545#[derive(Debug, Clone)]
546pub struct ReparseRequest {
547    pub start_byte: usize,
548    pub end_byte: usize,
549    pub source_version: u64,
550    pub priority: ReparsePriority,
551}
552impl ReparseRequest {
553    #[allow(dead_code)]
554    #[allow(missing_docs)]
555    pub fn new(start: usize, end: usize, version: u64) -> Self {
556        Self {
557            start_byte: start,
558            end_byte: end,
559            source_version: version,
560            priority: ReparsePriority::Normal,
561        }
562    }
563    #[allow(dead_code)]
564    #[allow(missing_docs)]
565    pub fn with_priority(mut self, p: ReparsePriority) -> Self {
566        self.priority = p;
567        self
568    }
569    #[allow(dead_code)]
570    #[allow(missing_docs)]
571    pub fn byte_span(&self) -> usize {
572        self.end_byte.saturating_sub(self.start_byte)
573    }
574}
575/// Incremental parse cache: maps dirty-region hashes to parse results.
576#[allow(dead_code)]
577#[allow(missing_docs)]
578pub struct IncrementalParseCache {
579    entries: std::collections::HashMap<u64, IncrParseEntry>,
580    max_entries: usize,
581    hits: u64,
582    misses: u64,
583}
584impl IncrementalParseCache {
585    #[allow(dead_code)]
586    #[allow(missing_docs)]
587    pub fn new(max_entries: usize) -> Self {
588        Self {
589            entries: std::collections::HashMap::new(),
590            max_entries,
591            hits: 0,
592            misses: 0,
593        }
594    }
595    #[allow(dead_code)]
596    #[allow(missing_docs)]
597    pub fn lookup(&mut self, region_hash: u64) -> Option<&IncrParseEntry> {
598        if self.entries.contains_key(&region_hash) {
599            self.hits += 1;
600            self.entries.get(&region_hash)
601        } else {
602            self.misses += 1;
603            None
604        }
605    }
606    #[allow(dead_code)]
607    #[allow(missing_docs)]
608    pub fn store(&mut self, entry: IncrParseEntry) {
609        if self.entries.len() >= self.max_entries {
610            if let Some(&k) = self.entries.keys().next() {
611                self.entries.remove(&k);
612            }
613        }
614        self.entries.insert(entry.region_hash, entry);
615    }
616    #[allow(dead_code)]
617    #[allow(missing_docs)]
618    pub fn hit_rate(&self) -> f64 {
619        let total = self.hits + self.misses;
620        if total == 0 {
621            0.0
622        } else {
623            self.hits as f64 / total as f64
624        }
625    }
626    #[allow(dead_code)]
627    #[allow(missing_docs)]
628    pub fn stats(&self) -> (u64, u64) {
629        (self.hits, self.misses)
630    }
631}
632/// A "change detector" that tracks whether a portion of source has changed.
633#[allow(dead_code)]
634#[allow(missing_docs)]
635pub struct ChangeDetector {
636    hashes: std::collections::HashMap<(usize, usize), u64>,
637}
638impl ChangeDetector {
639    #[allow(dead_code)]
640    #[allow(missing_docs)]
641    pub fn new() -> Self {
642        Self {
643            hashes: std::collections::HashMap::new(),
644        }
645    }
646    #[allow(dead_code)]
647    #[allow(missing_docs)]
648    pub fn record(&mut self, source: &str, start: usize, end: usize) {
649        let end = end.min(source.len());
650        let start = start.min(end);
651        let h = {
652            let data = &source.as_bytes()[start..end];
653            let mut hash = 14695981039346656037u64;
654            for &b in data {
655                hash = hash.wrapping_mul(1099511628211u64) ^ b as u64;
656            }
657            hash
658        };
659        self.hashes.insert((start, end), h);
660    }
661    #[allow(dead_code)]
662    #[allow(missing_docs)]
663    pub fn has_changed(&self, source: &str, start: usize, end: usize) -> bool {
664        let end = end.min(source.len());
665        let start = start.min(end);
666        let current = {
667            let data = &source.as_bytes()[start..end];
668            let mut hash = 14695981039346656037u64;
669            for &b in data {
670                hash = hash.wrapping_mul(1099511628211u64) ^ b as u64;
671            }
672            hash
673        };
674        self.hashes
675            .get(&(start, end))
676            .map_or(true, |&stored| stored != current)
677    }
678    #[allow(dead_code)]
679    #[allow(missing_docs)]
680    pub fn recorded_count(&self) -> usize {
681        self.hashes.len()
682    }
683}
684/// Represents a snapshot of incremental parse state for rollback.
685#[allow(dead_code)]
686#[allow(missing_docs)]
687pub struct ParseSnapshot {
688    pub source: String,
689    pub version: u64,
690    pub node_count: usize,
691    pub error_count: usize,
692}
693impl ParseSnapshot {
694    #[allow(dead_code)]
695    #[allow(missing_docs)]
696    pub fn capture(source: &str, version: u64, node_count: usize, error_count: usize) -> Self {
697        Self {
698            source: source.to_string(),
699            version,
700            node_count,
701            error_count,
702        }
703    }
704    #[allow(dead_code)]
705    #[allow(missing_docs)]
706    pub fn is_cleaner_than(&self, other: &Self) -> bool {
707        self.error_count < other.error_count
708    }
709}
710/// Tracks which declarations are affected by an edit.
711#[allow(dead_code)]
712#[allow(missing_docs)]
713pub struct DeclDependencyTracker {
714    decl_ranges: Vec<(String, usize, usize)>,
715}
716impl DeclDependencyTracker {
717    #[allow(dead_code)]
718    #[allow(missing_docs)]
719    pub fn new() -> Self {
720        Self {
721            decl_ranges: Vec::new(),
722        }
723    }
724    #[allow(dead_code)]
725    #[allow(missing_docs)]
726    pub fn register_decl(&mut self, name: impl Into<String>, start: usize, end: usize) {
727        self.decl_ranges.push((name.into(), start, end));
728    }
729    #[allow(dead_code)]
730    #[allow(missing_docs)]
731    pub fn affected_by_edit(&self, edit: &SourceEdit) -> Vec<&str> {
732        self.decl_ranges
733            .iter()
734            .filter(|(_, s, e)| edit.start < *e && edit.end > *s)
735            .map(|(n, _, _)| n.as_str())
736            .collect()
737    }
738    #[allow(dead_code)]
739    #[allow(missing_docs)]
740    pub fn decl_count(&self) -> usize {
741        self.decl_ranges.len()
742    }
743}
744/// Statistics for an incremental parsing session.
745#[allow(dead_code)]
746#[allow(missing_docs)]
747#[derive(Default, Debug)]
748pub struct IncrParseStats {
749    pub total_edits: u64,
750    pub partial_reparses: u64,
751    pub full_reparses: u64,
752    pub tokens_reused: u64,
753    #[allow(missing_docs)]
754    pub tokens_relexed: u64,
755    pub nodes_reused: u64,
756    pub nodes_rebuilt: u64,
757}
758impl IncrParseStats {
759    #[allow(dead_code)]
760    #[allow(missing_docs)]
761    pub fn new() -> Self {
762        Self::default()
763    }
764    #[allow(dead_code)]
765    #[allow(missing_docs)]
766    pub fn reuse_fraction_tokens(&self) -> f64 {
767        let total = self.tokens_reused + self.tokens_relexed;
768        if total == 0 {
769            0.0
770        } else {
771            self.tokens_reused as f64 / total as f64
772        }
773    }
774    #[allow(dead_code)]
775    #[allow(missing_docs)]
776    pub fn reuse_fraction_nodes(&self) -> f64 {
777        let total = self.nodes_reused + self.nodes_rebuilt;
778        if total == 0 {
779            0.0
780        } else {
781            self.nodes_reused as f64 / total as f64
782        }
783    }
784    #[allow(dead_code)]
785    #[allow(missing_docs)]
786    pub fn summary(&self) -> String {
787        format!(
788            "edits={} partial={} full={} token_reuse={:.1}% node_reuse={:.1}%",
789            self.total_edits,
790            self.partial_reparses,
791            self.full_reparses,
792            self.reuse_fraction_tokens() * 100.0,
793            self.reuse_fraction_nodes() * 100.0,
794        )
795    }
796}
797/// Tracks a history of edits for undo/redo.
798#[allow(dead_code)]
799#[allow(missing_docs)]
800pub struct EditHistory {
801    history: Vec<SourceEdit>,
802    undo_stack: Vec<SourceEdit>,
803    max_history: usize,
804}
805impl EditHistory {
806    #[allow(dead_code)]
807    #[allow(missing_docs)]
808    pub fn new(max_history: usize) -> Self {
809        Self {
810            history: Vec::new(),
811            undo_stack: Vec::new(),
812            max_history,
813        }
814    }
815    #[allow(dead_code)]
816    #[allow(missing_docs)]
817    pub fn push(&mut self, edit: SourceEdit) {
818        if self.history.len() >= self.max_history {
819            self.history.remove(0);
820        }
821        self.history.push(edit);
822        self.undo_stack.clear();
823    }
824    #[allow(dead_code)]
825    #[allow(missing_docs)]
826    pub fn undo(&mut self) -> Option<SourceEdit> {
827        let edit = self.history.pop()?;
828        self.undo_stack.push(edit.clone());
829        Some(edit)
830    }
831    #[allow(dead_code)]
832    #[allow(missing_docs)]
833    pub fn redo(&mut self) -> Option<SourceEdit> {
834        let edit = self.undo_stack.pop()?;
835        self.history.push(edit.clone());
836        Some(edit)
837    }
838    #[allow(dead_code)]
839    #[allow(missing_docs)]
840    pub fn history_len(&self) -> usize {
841        self.history.len()
842    }
843    #[allow(dead_code)]
844    #[allow(missing_docs)]
845    pub fn undo_count(&self) -> usize {
846        self.undo_stack.len()
847    }
848}
849/// Incremental lexer: re-lexes only the invalidated region.
850#[allow(dead_code)]
851#[allow(missing_docs)]
852pub struct IncrementalLexerExt {
853    source: String,
854    validity: TokenValidity,
855    version: u64,
856}
857impl IncrementalLexerExt {
858    #[allow(dead_code)]
859    #[allow(missing_docs)]
860    pub fn new(source: impl Into<String>) -> Self {
861        Self {
862            source: source.into(),
863            validity: TokenValidity::new(),
864            version: 0,
865        }
866    }
867    #[allow(dead_code)]
868    #[allow(missing_docs)]
869    pub fn apply_edit(&mut self, edit: SourceEdit) {
870        let inv = compute_invalidated_range(&edit, 64);
871        self.validity.invalidate(&inv);
872        self.source = apply_edits(&self.source, &[edit]);
873        self.version += 1;
874    }
875    #[allow(dead_code)]
876    #[allow(missing_docs)]
877    pub fn source(&self) -> &str {
878        &self.source
879    }
880    #[allow(dead_code)]
881    #[allow(missing_docs)]
882    pub fn version(&self) -> u64 {
883        self.version
884    }
885    #[allow(dead_code)]
886    #[allow(missing_docs)]
887    pub fn valid_token_count(&self) -> usize {
888        self.validity.valid_count()
889    }
890    #[allow(dead_code)]
891    #[allow(missing_docs)]
892    pub fn needs_relex(&self, pos: usize) -> bool {
893        !self.validity.is_valid_at(pos)
894    }
895}
896/// A concurrency-safe version counter for incremental state.
897#[allow(dead_code)]
898#[allow(missing_docs)]
899pub struct AtomicVersion {
900    inner: std::sync::atomic::AtomicU64,
901}
902impl AtomicVersion {
903    #[allow(dead_code)]
904    #[allow(missing_docs)]
905    pub fn new() -> Self {
906        Self {
907            inner: std::sync::atomic::AtomicU64::new(0),
908        }
909    }
910    #[allow(dead_code)]
911    #[allow(missing_docs)]
912    pub fn increment(&self) -> u64 {
913        self.inner.fetch_add(1, std::sync::atomic::Ordering::SeqCst) + 1
914    }
915    #[allow(dead_code)]
916    #[allow(missing_docs)]
917    pub fn load(&self) -> u64 {
918        self.inner.load(std::sync::atomic::Ordering::SeqCst)
919    }
920    #[allow(dead_code)]
921    #[allow(missing_docs)]
922    pub fn reset(&self) {
923        self.inner.store(0, std::sync::atomic::Ordering::SeqCst);
924    }
925}
926/// The kind of a syntax node.
927#[derive(Debug, Clone, PartialEq, Eq)]
928#[allow(missing_docs)]
929pub enum SyntaxKind {
930    Root,
931    Def,
932    Theorem,
933    Axiom,
934    Ident,
935    Literal,
936    Token(String),
937    Error,
938}
939/// The result of an incremental parse attempt.
940#[allow(dead_code)]
941#[allow(missing_docs)]
942#[derive(Debug)]
943pub struct IncrementalParseResult {
944    pub success: bool,
945    pub reused_nodes: usize,
946    pub rebuilt_nodes: usize,
947    pub parse_time_us: u64,
948    #[allow(missing_docs)]
949    pub errors: Vec<String>,
950}
951impl IncrementalParseResult {
952    #[allow(dead_code)]
953    #[allow(missing_docs)]
954    pub fn new(success: bool, reused: usize, rebuilt: usize, time_us: u64) -> Self {
955        Self {
956            success,
957            reused_nodes: reused,
958            rebuilt_nodes: rebuilt,
959            parse_time_us: time_us,
960            errors: Vec::new(),
961        }
962    }
963    #[allow(dead_code)]
964    #[allow(missing_docs)]
965    pub fn add_error(&mut self, e: impl Into<String>) {
966        self.errors.push(e.into());
967    }
968    #[allow(dead_code)]
969    #[allow(missing_docs)]
970    pub fn reuse_ratio(&self) -> f64 {
971        let total = self.reused_nodes + self.rebuilt_nodes;
972        if total == 0 {
973            0.0
974        } else {
975            self.reused_nodes as f64 / total as f64
976        }
977    }
978    #[allow(dead_code)]
979    #[allow(missing_docs)]
980    pub fn has_errors(&self) -> bool {
981        !self.errors.is_empty()
982    }
983}
984#[allow(dead_code)]
985#[allow(missing_docs)]
986#[derive(Clone)]
987pub struct IncrParseEntry {
988    pub region_hash: u64,
989    pub result_repr: String,
990    pub version: u64,
991}