Skip to main content

oxilean_parse/incremental/types/
defs.rs

1//! Auto-generated module
2//!
3//! 🤖 Generated with [SplitRS](https://github.com/cool-japan/splitrs)
4
5use super::super::functions::*;
6#[allow(unused_imports)]
7use super::impls::*;
8use std::collections::HashMap;
9use std::ops::Range;
10
11/// A red node: a green node viewed at a specific byte offset.
12#[allow(missing_docs)]
13pub struct RedNode<'a> {
14    pub green: &'a GreenNode,
15    pub offset: usize,
16}
17impl<'a> RedNode<'a> {
18    #[allow(missing_docs)]
19    pub fn new(green: &'a GreenNode, offset: usize) -> Self {
20        Self { green, offset }
21    }
22    #[allow(missing_docs)]
23    pub fn range(&self) -> Range<usize> {
24        self.offset..self.offset + self.green.width
25    }
26    #[allow(missing_docs)]
27    pub fn children(&self) -> Vec<RedNode<'_>> {
28        let mut pos = self.offset;
29        self.green
30            .children
31            .iter()
32            .map(|child| {
33                let node = RedNode::new(child, pos);
34                pos += child.width;
35                node
36            })
37            .collect()
38    }
39    #[allow(missing_docs)]
40    pub fn kind(&self) -> &SyntaxKind {
41        &self.green.kind
42    }
43}
44/// A green node: an immutable, position-independent syntax tree node.
45#[derive(Debug, Clone)]
46#[allow(missing_docs)]
47pub struct GreenNode {
48    pub kind: SyntaxKind,
49    pub width: usize,
50    pub children: Vec<GreenNode>,
51    pub text: Option<String>,
52}
53impl GreenNode {
54    #[allow(missing_docs)]
55    pub fn leaf(kind: SyntaxKind, text: impl Into<String>) -> Self {
56        let text = text.into();
57        let width = text.len();
58        GreenNode {
59            kind,
60            width,
61            children: Vec::new(),
62            text: Some(text),
63        }
64    }
65    #[allow(missing_docs)]
66    pub fn interior(kind: SyntaxKind, children: Vec<GreenNode>) -> Self {
67        let width = children.iter().map(|c| c.width).sum();
68        GreenNode {
69            kind,
70            width,
71            children,
72            text: None,
73        }
74    }
75    #[allow(missing_docs)]
76    pub fn is_leaf(&self) -> bool {
77        self.children.is_empty()
78    }
79    #[allow(missing_docs)]
80    pub fn to_text(&self) -> String {
81        if let Some(t) = &self.text {
82            return t.clone();
83        }
84        self.children.iter().map(|c| c.to_text()).collect()
85    }
86}
87/// A simple incremental lexer that caches line-level token fingerprints.
88#[allow(missing_docs)]
89pub struct IncrementalLexer {
90    line_fingerprints: Vec<Option<TokenFingerprint>>,
91    line_tokens: Vec<Vec<String>>,
92}
93impl IncrementalLexer {
94    #[allow(missing_docs)]
95    pub fn new() -> Self {
96        Self {
97            line_fingerprints: Vec::new(),
98            line_tokens: Vec::new(),
99        }
100    }
101    #[allow(missing_docs)]
102    pub fn lex(&mut self, source: &str, dirty_lines: &[usize]) -> Vec<String> {
103        let lines: Vec<&str> = source.lines().collect();
104        self.line_fingerprints.resize(lines.len(), None);
105        self.line_tokens.resize(lines.len(), Vec::new());
106        for (i, line) in lines.iter().enumerate() {
107            let fp = TokenFingerprint::compute(&[line]);
108            if dirty_lines.contains(&i) || self.line_fingerprints[i].as_ref() != Some(&fp) {
109                let tokens = self.tokenize_line(line);
110                self.line_fingerprints[i] = Some(fp);
111                self.line_tokens[i] = tokens;
112            }
113        }
114        self.line_tokens.iter().flatten().cloned().collect()
115    }
116    fn tokenize_line(&self, line: &str) -> Vec<String> {
117        line.split_whitespace().map(String::from).collect()
118    }
119    #[allow(missing_docs)]
120    pub fn invalidate_lines(&mut self, range: Range<usize>) {
121        for i in range {
122            if i < self.line_fingerprints.len() {
123                self.line_fingerprints[i] = None;
124            }
125        }
126    }
127    #[allow(missing_docs)]
128    pub fn reset(&mut self) {
129        self.line_fingerprints.clear();
130        self.line_tokens.clear();
131    }
132}
133/// A cache mapping source ranges to AST node IDs for incremental updates.
134#[allow(dead_code)]
135#[allow(missing_docs)]
136pub struct NodeRangeCache {
137    entries: std::collections::BTreeMap<(usize, usize), u32>,
138}
139impl NodeRangeCache {
140    #[allow(dead_code)]
141    #[allow(missing_docs)]
142    pub fn new() -> Self {
143        Self {
144            entries: std::collections::BTreeMap::new(),
145        }
146    }
147    #[allow(dead_code)]
148    #[allow(missing_docs)]
149    pub fn insert(&mut self, start: usize, end: usize, node_id: u32) {
150        self.entries.insert((start, end), node_id);
151    }
152    #[allow(dead_code)]
153    #[allow(missing_docs)]
154    pub fn lookup(&self, start: usize, end: usize) -> Option<u32> {
155        self.entries.get(&(start, end)).copied()
156    }
157    #[allow(dead_code)]
158    #[allow(missing_docs)]
159    pub fn invalidate_range(&mut self, inv: &InvalidatedRange) {
160        let to_remove: Vec<_> = self
161            .entries
162            .keys()
163            .filter(|(s, e)| *s < inv.end && *e > inv.start)
164            .copied()
165            .collect();
166        for k in to_remove {
167            self.entries.remove(&k);
168        }
169    }
170    #[allow(dead_code)]
171    #[allow(missing_docs)]
172    pub fn size(&self) -> usize {
173        self.entries.len()
174    }
175}
176#[allow(dead_code)]
177#[allow(missing_docs)]
178#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
179pub enum ReparsePriority {
180    Low,
181    Normal,
182    High,
183    Urgent,
184}
185#[allow(dead_code)]
186#[allow(missing_docs)]
187#[derive(Clone, Copy, Debug, PartialEq, Eq)]
188pub enum ScopeKind2 {
189    Paren,
190    Bracket,
191    Brace,
192    Do,
193    Where,
194    Let,
195}
196/// A "fiber" representing a partial parse continuation.
197#[allow(dead_code)]
198#[allow(missing_docs)]
199pub struct ParseFiber {
200    pub id: u64,
201    pub start_offset: usize,
202    pub depth: usize,
203    pub state_repr: String,
204}
205impl ParseFiber {
206    #[allow(dead_code)]
207    #[allow(missing_docs)]
208    pub fn new(id: u64, start: usize, depth: usize, state: impl Into<String>) -> Self {
209        Self {
210            id,
211            start_offset: start,
212            depth,
213            state_repr: state.into(),
214        }
215    }
216    #[allow(dead_code)]
217    #[allow(missing_docs)]
218    pub fn is_at_root(&self) -> bool {
219        self.depth == 0
220    }
221}
222/// An undo/redo stack for source text.
223#[allow(missing_docs)]
224pub struct UndoRedoStack {
225    undo_stack: Vec<String>,
226    redo_stack: Vec<String>,
227    current: String,
228}
229impl UndoRedoStack {
230    #[allow(missing_docs)]
231    pub fn new(initial: impl Into<String>) -> Self {
232        Self {
233            undo_stack: Vec::new(),
234            redo_stack: Vec::new(),
235            current: initial.into(),
236        }
237    }
238    #[allow(missing_docs)]
239    pub fn push(&mut self, new_source: impl Into<String>) {
240        let new_source = new_source.into();
241        self.undo_stack
242            .push(std::mem::replace(&mut self.current, new_source));
243        self.redo_stack.clear();
244    }
245    #[allow(missing_docs)]
246    pub fn apply(&mut self, change: &TextChange) {
247        let new_source = change.apply(&self.current);
248        self.push(new_source);
249    }
250    #[allow(missing_docs)]
251    pub fn undo(&mut self) -> Option<&str> {
252        if let Some(prev) = self.undo_stack.pop() {
253            let old_current = std::mem::replace(&mut self.current, prev);
254            self.redo_stack.push(old_current);
255            Some(&self.current)
256        } else {
257            None
258        }
259    }
260    #[allow(missing_docs)]
261    pub fn redo(&mut self) -> Option<&str> {
262        if let Some(next) = self.redo_stack.pop() {
263            let old_current = std::mem::replace(&mut self.current, next);
264            self.undo_stack.push(old_current);
265            Some(&self.current)
266        } else {
267            None
268        }
269    }
270    #[allow(missing_docs)]
271    pub fn current(&self) -> &str {
272        &self.current
273    }
274    #[allow(missing_docs)]
275    pub fn can_undo(&self) -> bool {
276        !self.undo_stack.is_empty()
277    }
278    #[allow(missing_docs)]
279    pub fn can_redo(&self) -> bool {
280        !self.redo_stack.is_empty()
281    }
282    #[allow(missing_docs)]
283    pub fn undo_depth(&self) -> usize {
284        self.undo_stack.len()
285    }
286    #[allow(missing_docs)]
287    pub fn redo_depth(&self) -> usize {
288        self.redo_stack.len()
289    }
290}
291/// A version counter for incremental parsing state.
292#[allow(dead_code)]
293#[allow(missing_docs)]
294pub struct ParseVersion {
295    version: u64,
296    last_full_parse: u64,
297}
298impl ParseVersion {
299    #[allow(dead_code)]
300    #[allow(missing_docs)]
301    pub fn new() -> Self {
302        Self {
303            version: 0,
304            last_full_parse: 0,
305        }
306    }
307    #[allow(dead_code)]
308    #[allow(missing_docs)]
309    pub fn increment(&mut self) -> u64 {
310        self.version += 1;
311        self.version
312    }
313    #[allow(dead_code)]
314    #[allow(missing_docs)]
315    pub fn mark_full_parse(&mut self) {
316        self.last_full_parse = self.version;
317    }
318    #[allow(dead_code)]
319    #[allow(missing_docs)]
320    pub fn current(&self) -> u64 {
321        self.version
322    }
323    #[allow(dead_code)]
324    #[allow(missing_docs)]
325    pub fn edits_since_full_parse(&self) -> u64 {
326        self.version - self.last_full_parse
327    }
328    #[allow(dead_code)]
329    #[allow(missing_docs)]
330    pub fn needs_full_reparse(&self, threshold: u64) -> bool {
331        self.edits_since_full_parse() >= threshold
332    }
333}
334/// A text change applied to the source
335#[derive(Debug, Clone)]
336#[allow(missing_docs)]
337pub struct TextChange {
338    pub range: Range<usize>,
339    pub new_text: String,
340}
341impl TextChange {
342    #[allow(missing_docs)]
343    pub fn new(start: usize, end: usize, new_text: impl Into<String>) -> Self {
344        TextChange {
345            range: start..end,
346            new_text: new_text.into(),
347        }
348    }
349    #[allow(missing_docs)]
350    pub fn insertion(at: usize, text: impl Into<String>) -> Self {
351        TextChange {
352            range: at..at,
353            new_text: text.into(),
354        }
355    }
356    #[allow(missing_docs)]
357    pub fn deletion(start: usize, end: usize) -> Self {
358        TextChange {
359            range: start..end,
360            new_text: String::new(),
361        }
362    }
363    #[allow(missing_docs)]
364    pub fn replacement(start: usize, end: usize, text: impl Into<String>) -> Self {
365        TextChange {
366            range: start..end,
367            new_text: text.into(),
368        }
369    }
370    #[allow(missing_docs)]
371    pub fn apply(&self, source: &str) -> String {
372        let start = self.range.start.min(source.len());
373        let end = self.range.end.min(source.len());
374        let mut result = String::with_capacity(source.len() + self.new_text.len());
375        result.push_str(&source[..start]);
376        result.push_str(&self.new_text);
377        result.push_str(&source[end..]);
378        result
379    }
380    #[allow(missing_docs)]
381    pub fn delta(&self) -> i64 {
382        (self.new_text.len() as i64) - ((self.range.end - self.range.start) as i64)
383    }
384    #[allow(missing_docs)]
385    pub fn is_insertion(&self) -> bool {
386        self.range.start == self.range.end && !self.new_text.is_empty()
387    }
388    #[allow(missing_docs)]
389    pub fn is_deletion(&self) -> bool {
390        self.new_text.is_empty() && self.range.start < self.range.end
391    }
392    #[allow(missing_docs)]
393    pub fn is_replacement(&self) -> bool {
394        !self.new_text.is_empty() && self.range.start < self.range.end
395    }
396}
397/// A rope-like structure for efficient incremental text editing.
398#[allow(dead_code)]
399#[allow(missing_docs)]
400pub struct SimpleRope {
401    pub(crate) chunks: Vec<String>,
402    pub(crate) len: usize,
403}
404impl SimpleRope {
405    #[allow(dead_code)]
406    #[allow(missing_docs)]
407    pub fn new(text: impl Into<String>) -> Self {
408        let s = text.into();
409        let len = s.len();
410        Self {
411            chunks: vec![s],
412            len,
413        }
414    }
415    #[allow(dead_code)]
416    #[allow(missing_docs)]
417    pub fn as_string(&self) -> String {
418        self.chunks.concat()
419    }
420    #[allow(dead_code)]
421    #[allow(missing_docs)]
422    pub fn len(&self) -> usize {
423        self.len
424    }
425    #[allow(dead_code)]
426    #[allow(missing_docs)]
427    pub fn is_empty(&self) -> bool {
428        self.len == 0
429    }
430    #[allow(dead_code)]
431    #[allow(missing_docs)]
432    pub fn insert(&mut self, pos: usize, text: &str) {
433        let full = self.as_string();
434        let pos = pos.min(full.len());
435        let new_text = format!("{}{}{}", &full[..pos], text, &full[pos..]);
436        self.len = new_text.len();
437        self.chunks = vec![new_text];
438    }
439    #[allow(dead_code)]
440    #[allow(missing_docs)]
441    pub fn delete(&mut self, start: usize, end: usize) {
442        let full = self.as_string();
443        let start = start.min(full.len());
444        let end = end.min(full.len());
445        let new_text = format!("{}{}", &full[..start], &full[end..]);
446        self.len = new_text.len();
447        self.chunks = vec![new_text];
448    }
449    #[allow(dead_code)]
450    #[allow(missing_docs)]
451    pub fn chunk_count(&self) -> usize {
452        self.chunks.len()
453    }
454}
455/// Tracks valid token ranges for incremental re-lexing.
456#[allow(dead_code)]
457#[allow(missing_docs)]
458pub struct TokenValidity {
459    valid_ranges: Vec<(usize, usize)>,
460}
461impl TokenValidity {
462    #[allow(dead_code)]
463    #[allow(missing_docs)]
464    pub fn new() -> Self {
465        Self {
466            valid_ranges: Vec::new(),
467        }
468    }
469    #[allow(dead_code)]
470    #[allow(missing_docs)]
471    pub fn mark_valid(&mut self, start: usize, end: usize) {
472        self.valid_ranges.push((start, end));
473    }
474    #[allow(dead_code)]
475    #[allow(missing_docs)]
476    pub fn invalidate(&mut self, range: &InvalidatedRange) {
477        self.valid_ranges
478            .retain(|(s, e)| *e <= range.start || *s >= range.end);
479    }
480    #[allow(dead_code)]
481    #[allow(missing_docs)]
482    pub fn is_valid_at(&self, pos: usize) -> bool {
483        self.valid_ranges.iter().any(|(s, e)| pos >= *s && pos < *e)
484    }
485    #[allow(dead_code)]
486    #[allow(missing_docs)]
487    pub fn valid_count(&self) -> usize {
488        self.valid_ranges.len()
489    }
490}
491/// A priority queue for reparse requests.
492#[allow(dead_code)]
493#[allow(missing_docs)]
494pub struct ReparseQueue {
495    requests: Vec<ReparseRequest>,
496}
497impl ReparseQueue {
498    #[allow(dead_code)]
499    #[allow(missing_docs)]
500    pub fn new() -> Self {
501        Self {
502            requests: Vec::new(),
503        }
504    }
505    #[allow(dead_code)]
506    #[allow(missing_docs)]
507    pub fn push(&mut self, req: ReparseRequest) {
508        self.requests.push(req);
509        self.requests.sort_by_key(|b| std::cmp::Reverse(b.priority));
510    }
511    #[allow(dead_code)]
512    #[allow(missing_docs)]
513    pub fn pop(&mut self) -> Option<ReparseRequest> {
514        if self.requests.is_empty() {
515            None
516        } else {
517            Some(self.requests.remove(0))
518        }
519    }
520    #[allow(dead_code)]
521    #[allow(missing_docs)]
522    pub fn len(&self) -> usize {
523        self.requests.len()
524    }
525    #[allow(dead_code)]
526    #[allow(missing_docs)]
527    pub fn is_empty(&self) -> bool {
528        self.requests.is_empty()
529    }
530    #[allow(dead_code)]
531    #[allow(missing_docs)]
532    pub fn has_urgent(&self) -> bool {
533        self.requests
534            .iter()
535            .any(|r| r.priority == ReparsePriority::Urgent)
536    }
537}
538/// A rolling checksum for incremental validation.
539#[allow(dead_code)]
540#[allow(missing_docs)]
541pub struct IncrementalChecksum {
542    partial_sums: Vec<u64>,
543}
544impl IncrementalChecksum {
545    #[allow(dead_code)]
546    #[allow(missing_docs)]
547    pub fn build(source: &str) -> Self {
548        let mut sums = vec![0u64; source.len() + 1];
549        for (i, b) in source.bytes().enumerate() {
550            sums[i + 1] = sums[i].wrapping_add(b as u64);
551        }
552        Self { partial_sums: sums }
553    }
554    #[allow(dead_code)]
555    #[allow(missing_docs)]
556    pub fn range_sum(&self, start: usize, end: usize) -> u64 {
557        let end = end.min(self.partial_sums.len().saturating_sub(1));
558        let start = start.min(end);
559        self.partial_sums[end].wrapping_sub(self.partial_sums[start])
560    }
561    #[allow(dead_code)]
562    #[allow(missing_docs)]
563    pub fn total(&self) -> u64 {
564        *self.partial_sums.last().unwrap_or(&0)
565    }
566}
567/// Represents a bracket/indentation scope for incremental scope tracking.
568#[allow(dead_code)]
569#[allow(missing_docs)]
570#[derive(Clone, Debug)]
571pub struct IncrScopeEntry {
572    pub start: usize,
573    pub kind: ScopeKind2,
574    pub depth: usize,
575}
576impl IncrScopeEntry {
577    #[allow(dead_code)]
578    #[allow(missing_docs)]
579    pub fn new(start: usize, kind: ScopeKind2, depth: usize) -> Self {
580        Self { start, kind, depth }
581    }
582}
583/// A dependency graph for declarations.
584#[allow(missing_docs)]
585pub struct DependencyGraph {
586    edges: HashMap<String, Vec<String>>,
587    reverse: HashMap<String, Vec<String>>,
588}
589impl DependencyGraph {
590    #[allow(missing_docs)]
591    pub fn new() -> Self {
592        Self {
593            edges: HashMap::new(),
594            reverse: HashMap::new(),
595        }
596    }
597    #[allow(missing_docs)]
598    pub fn add_edge(&mut self, dependent: &str, dependency: &str) {
599        self.edges
600            .entry(dependent.to_string())
601            .or_default()
602            .push(dependency.to_string());
603        self.reverse
604            .entry(dependency.to_string())
605            .or_default()
606            .push(dependent.to_string());
607    }
608    #[allow(missing_docs)]
609    pub fn dependents_of(&self, name: &str) -> Vec<String> {
610        let mut visited = std::collections::HashSet::new();
611        let mut queue = vec![name.to_string()];
612        let mut result = Vec::new();
613        while let Some(current) = queue.pop() {
614            if let Some(deps) = self.reverse.get(&current) {
615                for dep in deps {
616                    if visited.insert(dep.clone()) {
617                        result.push(dep.clone());
618                        queue.push(dep.clone());
619                    }
620                }
621            }
622        }
623        result
624    }
625    #[allow(missing_docs)]
626    pub fn direct_dependencies(&self, name: &str) -> &[String] {
627        self.edges.get(name).map(Vec::as_slice).unwrap_or(&[])
628    }
629    #[allow(missing_docs)]
630    pub fn remove_node(&mut self, name: &str) {
631        if let Some(deps) = self.edges.remove(name) {
632            for dep in deps {
633                if let Some(rev) = self.reverse.get_mut(&dep) {
634                    rev.retain(|n| n != name);
635                }
636            }
637        }
638        if let Some(rev_deps) = self.reverse.remove(name) {
639            for rev_dep in rev_deps {
640                if let Some(fwd) = self.edges.get_mut(&rev_dep) {
641                    fwd.retain(|n| n != name);
642                }
643            }
644        }
645    }
646    #[allow(missing_docs)]
647    pub fn node_count(&self) -> usize {
648        let mut all: std::collections::HashSet<&str> = std::collections::HashSet::new();
649        for k in self.edges.keys() {
650            all.insert(k.as_str());
651        }
652        for k in self.reverse.keys() {
653            all.insert(k.as_str());
654        }
655        all.len()
656    }
657}
658/// Source version tracking for LSP
659#[derive(Debug, Clone)]
660#[allow(missing_docs)]
661pub struct VersionedSource {
662    pub uri: String,
663    pub version: i32,
664    pub content: String,
665}
666impl VersionedSource {
667    #[allow(missing_docs)]
668    pub fn new(uri: impl Into<String>, content: impl Into<String>) -> Self {
669        VersionedSource {
670            uri: uri.into(),
671            version: 0,
672            content: content.into(),
673        }
674    }
675    #[allow(missing_docs)]
676    pub fn apply_change(&mut self, change: TextChange) -> &mut Self {
677        self.content = change.apply(&self.content);
678        self
679    }
680    #[allow(missing_docs)]
681    pub fn update(&mut self, new_content: impl Into<String>, new_version: i32) -> &mut Self {
682        self.content = new_content.into();
683        self.version = new_version;
684        self
685    }
686    #[allow(missing_docs)]
687    pub fn len(&self) -> usize {
688        self.content.len()
689    }
690    #[allow(missing_docs)]
691    pub fn is_empty(&self) -> bool {
692        self.content.is_empty()
693    }
694    #[allow(missing_docs)]
695    pub fn offset_to_position(&self, offset: usize) -> (usize, usize) {
696        let offset = offset.min(self.content.len());
697        let before = &self.content[..offset];
698        let line = before.chars().filter(|&c| c == '\n').count();
699        let col = before
700            .rfind('\n')
701            .map(|nl| offset - nl - 1)
702            .unwrap_or(offset);
703        (line, col)
704    }
705    #[allow(missing_docs)]
706    pub fn position_to_offset(&self, line: usize, col: usize) -> Option<usize> {
707        let mut current_line = 0usize;
708        let mut line_start = 0usize;
709        for (i, ch) in self.content.char_indices() {
710            if current_line == line {
711                let mut col_offset = 0usize;
712                let mut offset = line_start;
713                for c in self.content[line_start..].chars() {
714                    if col_offset == col {
715                        return Some(offset);
716                    }
717                    offset += c.len_utf8();
718                    col_offset += 1;
719                    if c == '\n' {
720                        break;
721                    }
722                }
723                if col_offset == col {
724                    return Some(offset);
725                }
726                return None;
727            }
728            if ch == '\n' {
729                current_line += 1;
730                line_start = i + 1;
731            }
732        }
733        if current_line == line {
734            let line_len = self.content[line_start..].len();
735            if col <= line_len {
736                return Some(line_start + col);
737            }
738        }
739        None
740    }
741}
742/// A token range that becomes invalid after an edit.
743#[allow(dead_code)]
744#[allow(missing_docs)]
745#[derive(Clone, Debug, PartialEq, Eq)]
746pub struct InvalidatedRange {
747    pub start: usize,
748    pub end: usize,
749}
750impl InvalidatedRange {
751    #[allow(dead_code)]
752    #[allow(missing_docs)]
753    pub fn new(start: usize, end: usize) -> Self {
754        Self { start, end }
755    }
756    #[allow(dead_code)]
757    #[allow(missing_docs)]
758    pub fn len(&self) -> usize {
759        self.end - self.start
760    }
761    #[allow(dead_code)]
762    #[allow(missing_docs)]
763    pub fn is_empty(&self) -> bool {
764        self.end <= self.start
765    }
766    #[allow(dead_code)]
767    #[allow(missing_docs)]
768    pub fn contains(&self, pos: usize) -> bool {
769        pos >= self.start && pos < self.end
770    }
771    #[allow(dead_code)]
772    #[allow(missing_docs)]
773    pub fn overlaps(&self, other: &Self) -> bool {
774        self.start < other.end && self.end > other.start
775    }
776    #[allow(dead_code)]
777    #[allow(missing_docs)]
778    pub fn merge(&self, other: &Self) -> Self {
779        Self {
780            start: self.start.min(other.start),
781            end: self.end.max(other.end),
782        }
783    }
784}
785/// Represents a "dirty" region in the source that needs re-parsing.
786#[allow(dead_code)]
787#[allow(missing_docs)]
788pub struct DirtyRegion {
789    pub start_line: usize,
790    pub end_line: usize,
791    pub start_byte: usize,
792    pub end_byte: usize,
793}
794impl DirtyRegion {
795    #[allow(dead_code)]
796    #[allow(missing_docs)]
797    pub fn new(start_line: usize, end_line: usize, start_byte: usize, end_byte: usize) -> Self {
798        Self {
799            start_line,
800            end_line,
801            start_byte,
802            end_byte,
803        }
804    }
805    #[allow(dead_code)]
806    #[allow(missing_docs)]
807    pub fn line_count(&self) -> usize {
808        self.end_line.saturating_sub(self.start_line) + 1
809    }
810    #[allow(dead_code)]
811    #[allow(missing_docs)]
812    pub fn byte_count(&self) -> usize {
813        self.end_byte.saturating_sub(self.start_byte)
814    }
815    #[allow(dead_code)]
816    #[allow(missing_docs)]
817    pub fn is_single_line(&self) -> bool {
818        self.start_line == self.end_line
819    }
820}
821/// A simple persistent (copy-on-write) vector.
822#[derive(Debug, Clone)]
823#[allow(missing_docs)]
824pub struct PersistentVec<T: Clone> {
825    data: std::rc::Rc<Vec<T>>,
826}
827impl<T: Clone> PersistentVec<T> {
828    #[allow(missing_docs)]
829    pub fn new() -> Self {
830        Self {
831            data: std::rc::Rc::new(Vec::new()),
832        }
833    }
834    #[allow(missing_docs)]
835    pub fn push(&self, value: T) -> Self {
836        let mut new_data = (*self.data).clone();
837        new_data.push(value);
838        Self {
839            data: std::rc::Rc::new(new_data),
840        }
841    }
842    #[allow(missing_docs)]
843    pub fn set(&self, idx: usize, value: T) -> Option<Self> {
844        if idx >= self.data.len() {
845            return None;
846        }
847        let mut new_data = (*self.data).clone();
848        new_data[idx] = value;
849        Some(Self {
850            data: std::rc::Rc::new(new_data),
851        })
852    }
853    #[allow(missing_docs)]
854    pub fn get(&self, idx: usize) -> Option<&T> {
855        self.data.get(idx)
856    }
857    #[allow(missing_docs)]
858    pub fn len(&self) -> usize {
859        self.data.len()
860    }
861    #[allow(missing_docs)]
862    pub fn is_empty(&self) -> bool {
863        self.data.is_empty()
864    }
865    #[allow(missing_docs)]
866    pub fn iter(&self) -> std::slice::Iter<'_, T> {
867        self.data.iter()
868    }
869}
870/// Represents a single source edit (insert or delete).
871#[allow(dead_code)]
872#[allow(missing_docs)]
873#[derive(Clone, Debug)]
874pub struct SourceEdit {
875    pub start: usize,
876    pub end: usize,
877    pub new_text: String,
878}
879impl SourceEdit {
880    #[allow(dead_code)]
881    #[allow(missing_docs)]
882    pub fn insert(pos: usize, text: impl Into<String>) -> Self {
883        Self {
884            start: pos,
885            end: pos,
886            new_text: text.into(),
887        }
888    }
889    #[allow(dead_code)]
890    #[allow(missing_docs)]
891    pub fn delete(start: usize, end: usize) -> Self {
892        Self {
893            start,
894            end,
895            new_text: String::new(),
896        }
897    }
898    #[allow(dead_code)]
899    #[allow(missing_docs)]
900    pub fn replace(start: usize, end: usize, text: impl Into<String>) -> Self {
901        Self {
902            start,
903            end,
904            new_text: text.into(),
905        }
906    }
907    #[allow(dead_code)]
908    #[allow(missing_docs)]
909    pub fn is_insert(&self) -> bool {
910        self.start == self.end && !self.new_text.is_empty()
911    }
912    #[allow(dead_code)]
913    #[allow(missing_docs)]
914    pub fn is_delete(&self) -> bool {
915        self.start < self.end && self.new_text.is_empty()
916    }
917    #[allow(dead_code)]
918    #[allow(missing_docs)]
919    pub fn is_replace(&self) -> bool {
920        self.start < self.end && !self.new_text.is_empty()
921    }
922    #[allow(dead_code)]
923    #[allow(missing_docs)]
924    pub fn delta(&self) -> i64 {
925        self.new_text.len() as i64 - (self.end - self.start) as i64
926    }
927}
928/// A map from byte offset to token ID for incremental relexing.
929#[allow(dead_code)]
930#[allow(missing_docs)]
931pub struct OffsetToTokenMap {
932    map: std::collections::BTreeMap<usize, u32>,
933}
934impl OffsetToTokenMap {
935    #[allow(dead_code)]
936    #[allow(missing_docs)]
937    pub fn new() -> Self {
938        Self {
939            map: std::collections::BTreeMap::new(),
940        }
941    }
942    #[allow(dead_code)]
943    #[allow(missing_docs)]
944    pub fn insert(&mut self, offset: usize, token_id: u32) {
945        self.map.insert(offset, token_id);
946    }
947    #[allow(dead_code)]
948    #[allow(missing_docs)]
949    pub fn token_at(&self, offset: usize) -> Option<u32> {
950        self.map.range(..=offset).next_back().map(|(_, &id)| id)
951    }
952    #[allow(dead_code)]
953    #[allow(missing_docs)]
954    pub fn invalidate_from(&mut self, offset: usize) {
955        let to_remove: Vec<_> = self.map.range(offset..).map(|(&k, _)| k).collect();
956        for k in to_remove {
957            self.map.remove(&k);
958        }
959    }
960    #[allow(dead_code)]
961    #[allow(missing_docs)]
962    pub fn shift(&mut self, from: usize, delta: i64) {
963        let to_shift: Vec<_> = self.map.range(from..).map(|(&k, &v)| (k, v)).collect();
964        for (k, _) in &to_shift {
965            self.map.remove(k);
966        }
967        for (k, v) in to_shift {
968            let new_k = (k as i64 + delta).max(0) as usize;
969            self.map.insert(new_k, v);
970        }
971    }
972    #[allow(dead_code)]
973    #[allow(missing_docs)]
974    pub fn count(&self) -> usize {
975        self.map.len()
976    }
977}
978/// A stack of scopes for incremental parsing.
979#[allow(dead_code)]
980#[allow(missing_docs)]
981pub struct IncrScopeStack {
982    stack: Vec<IncrScopeEntry>,
983}
984impl IncrScopeStack {
985    #[allow(dead_code)]
986    #[allow(missing_docs)]
987    pub fn new() -> Self {
988        Self { stack: Vec::new() }
989    }
990    #[allow(dead_code)]
991    #[allow(missing_docs)]
992    pub fn push(&mut self, entry: IncrScopeEntry) {
993        self.stack.push(entry);
994    }
995    #[allow(dead_code)]
996    #[allow(missing_docs)]
997    pub fn pop(&mut self) -> Option<IncrScopeEntry> {
998        self.stack.pop()
999    }
1000    #[allow(dead_code)]
1001    #[allow(missing_docs)]
1002    pub fn peek(&self) -> Option<&IncrScopeEntry> {
1003        self.stack.last()
1004    }
1005    #[allow(dead_code)]
1006    #[allow(missing_docs)]
1007    pub fn depth(&self) -> usize {
1008        self.stack.len()
1009    }
1010    #[allow(dead_code)]
1011    #[allow(missing_docs)]
1012    pub fn is_empty(&self) -> bool {
1013        self.stack.is_empty()
1014    }
1015    #[allow(dead_code)]
1016    #[allow(missing_docs)]
1017    pub fn current_scope(&self) -> Option<ScopeKind2> {
1018        self.stack.last().map(|e| e.kind)
1019    }
1020}