Skip to main content

oxilean_parse/tokens/
types.rs

1//! Auto-generated module
2//!
3//! 🤖 Generated with [SplitRS](https://github.com/cool-japan/splitrs)
4
5/// A token with optional semantic annotations.
6use super::functions::*;
7#[allow(dead_code)]
8#[derive(Debug, Clone)]
9pub struct AnnotatedToken {
10    /// The underlying token.
11    pub token: Token,
12    /// Zero or more annotations.
13    pub annotations: Vec<TokenAnnotation>,
14}
15#[allow(dead_code)]
16impl AnnotatedToken {
17    /// Create an unannotated token.
18    pub fn plain(token: Token) -> Self {
19        Self {
20            token,
21            annotations: Vec::new(),
22        }
23    }
24    /// Add an annotation.
25    pub fn annotate(&mut self, ann: TokenAnnotation) {
26        self.annotations.push(ann);
27    }
28    /// Check if the token has a given annotation.
29    pub fn has_annotation(&self, ann: &TokenAnnotation) -> bool {
30        self.annotations.contains(ann)
31    }
32    /// Check if the token is a binding site.
33    pub fn is_binding_site(&self) -> bool {
34        self.annotations
35            .iter()
36            .any(|a| matches!(a, TokenAnnotation::BindingSite(_)))
37    }
38    /// Check if the token is a use site.
39    pub fn is_use_site(&self) -> bool {
40        self.annotations
41            .iter()
42            .any(|a| matches!(a, TokenAnnotation::UseSite(_)))
43    }
44    /// Get the resolved name, if any.
45    pub fn resolved_name(&self) -> Option<&str> {
46        self.annotations.iter().find_map(|a| {
47            if let TokenAnnotation::ResolvedName(s) = a {
48                Some(s.as_str())
49            } else {
50                None
51            }
52        })
53    }
54}
55/// The broad role of a token in OxiLean syntax.
56#[allow(dead_code)]
57#[derive(Debug, Clone, Copy, PartialEq, Eq)]
58pub enum TokenRole {
59    /// A declaration-starting keyword.
60    DeclStart,
61    /// A proof/tactic keyword.
62    TacticKeyword,
63    /// A type-level keyword.
64    TypeLevel,
65    /// A term-level keyword.
66    TermLevel,
67    /// A punctuation/operator token.
68    Punctuation,
69    /// A name or identifier.
70    Name,
71    /// A literal value.
72    Literal,
73    /// An end-of-input marker.
74    Eof,
75    /// An error token.
76    Error,
77}
78/// Statistics about a token sequence.
79#[derive(Debug, Clone, Default)]
80pub struct TokenStats {
81    /// Total number of tokens.
82    pub total: usize,
83    /// Number of identifiers.
84    pub identifiers: usize,
85    /// Number of keyword tokens.
86    pub keywords: usize,
87    /// Number of numeric literals.
88    pub nat_literals: usize,
89    /// Number of string literals.
90    pub string_literals: usize,
91    /// Number of operators.
92    pub operators: usize,
93    /// Number of error tokens.
94    pub errors: usize,
95    /// Number of doc comments.
96    pub doc_comments: usize,
97}
98impl TokenStats {
99    /// Compute statistics for a slice of tokens.
100    pub fn compute(tokens: &[Token]) -> Self {
101        let mut stats = Self {
102            total: tokens.len(),
103            ..Default::default()
104        };
105        for tok in tokens {
106            match &tok.kind {
107                TokenKind::Ident(_) => stats.identifiers += 1,
108                TokenKind::Nat(_) => stats.nat_literals += 1,
109                TokenKind::String(_) => stats.string_literals += 1,
110                TokenKind::Error(_) => stats.errors += 1,
111                TokenKind::DocComment(_) => stats.doc_comments += 1,
112                k if Token {
113                    kind: k.clone(),
114                    span: tok.span.clone(),
115                }
116                .is_keyword() =>
117                {
118                    stats.keywords += 1
119                }
120                k if is_arithmetic_op(k) || is_comparison_op(k) || is_logical_op(k) => {
121                    stats.operators += 1;
122                }
123                _ => {}
124            }
125        }
126        stats
127    }
128    /// Check if there are any error tokens.
129    pub fn has_errors(&self) -> bool {
130        self.errors > 0
131    }
132}
133/// An iterator over a sequence of tokens.
134///
135/// Supports peeking, consuming, and backtracking (via snapshots).
136pub struct TokenStream {
137    /// The underlying token sequence.
138    tokens: Vec<Token>,
139    /// Current read position.
140    pos: usize,
141}
142impl TokenStream {
143    /// Create a new token stream from a vector of tokens.
144    pub fn new(tokens: Vec<Token>) -> Self {
145        Self { tokens, pos: 0 }
146    }
147    /// Peek at the current token without consuming it.
148    pub fn peek(&self) -> Option<&Token> {
149        self.tokens.get(self.pos)
150    }
151    /// Peek at the token `n` positions ahead (0 = current).
152    pub fn peek_ahead(&self, n: usize) -> Option<&Token> {
153        self.tokens.get(self.pos + n)
154    }
155    /// Get the current token kind without consuming.
156    pub fn current_kind(&self) -> Option<&TokenKind> {
157        self.tokens.get(self.pos).map(|t| &t.kind)
158    }
159    /// Check if the current token matches a given kind (by tag, ignoring data).
160    pub fn at_keyword(&self, kw: &TokenKind) -> bool {
161        self.tokens
162            .get(self.pos)
163            .is_some_and(|t| std::mem::discriminant(&t.kind) == std::mem::discriminant(kw))
164    }
165    /// Check if the current token is an identifier with a specific name.
166    pub fn at_ident(&self, name: &str) -> bool {
167        matches!(
168            self.tokens.get(self.pos).map(| t | & t.kind), Some(TokenKind::Ident(s)) if s
169            == name
170        )
171    }
172    /// Check if we are at the end of the stream.
173    pub fn is_eof(&self) -> bool {
174        self.pos >= self.tokens.len()
175            || matches!(
176                self.tokens.get(self.pos).map(|t| &t.kind),
177                Some(TokenKind::Eof)
178            )
179    }
180    /// Consume and return the current token.
181    #[allow(clippy::should_implement_trait)]
182    pub fn next(&mut self) -> Option<&Token> {
183        if self.pos < self.tokens.len() {
184            let tok = &self.tokens[self.pos];
185            self.pos += 1;
186            Some(tok)
187        } else {
188            None
189        }
190    }
191    /// Consume the current token if it matches the given kind.
192    ///
193    /// Returns `true` if consumed, `false` otherwise.
194    pub fn consume_if(&mut self, kind: &TokenKind) -> bool {
195        if self.tokens.get(self.pos).is_some_and(|t| &t.kind == kind) {
196            self.pos += 1;
197            true
198        } else {
199            false
200        }
201    }
202    /// Expect a specific token kind; return an error string if it doesn't match.
203    pub fn expect(&mut self, kind: &TokenKind) -> Result<&Token, String> {
204        match self.tokens.get(self.pos) {
205            Some(t) if &t.kind == kind => {
206                self.pos += 1;
207                Ok(&self.tokens[self.pos - 1])
208            }
209            Some(t) => Err(format!(
210                "Expected {} but got {} at line {}",
211                kind, t.kind, t.span.line
212            )),
213            None => Err(format!("Expected {} but got end of file", kind)),
214        }
215    }
216    /// Expect an identifier and return its name.
217    pub fn expect_ident(&mut self) -> Result<String, String> {
218        match self.tokens.get(self.pos) {
219            Some(Token {
220                kind: TokenKind::Ident(s),
221                ..
222            }) => {
223                let name = s.clone();
224                self.pos += 1;
225                Ok(name)
226            }
227            Some(t) => Err(format!(
228                "Expected identifier but got {} at line {}",
229                t.kind, t.span.line
230            )),
231            None => Err("Expected identifier but got end of file".to_string()),
232        }
233    }
234    /// Take a snapshot of the current position (for backtracking).
235    pub fn snapshot(&self) -> usize {
236        self.pos
237    }
238    /// Restore to a previous snapshot position.
239    pub fn restore(&mut self, snap: usize) {
240        self.pos = snap;
241    }
242    /// Get the remaining token count.
243    pub fn remaining(&self) -> usize {
244        self.tokens.len().saturating_sub(self.pos)
245    }
246    /// Get the current span (or a dummy span if at EOF).
247    pub fn current_span(&self) -> Span {
248        self.tokens
249            .get(self.pos)
250            .map(|t| t.span.clone())
251            .unwrap_or_else(|| Span::new(0, 0, 1, 1))
252    }
253    /// Skip tokens until we hit one of the given token kinds (for error recovery).
254    pub fn skip_until(&mut self, kinds: &[TokenKind]) {
255        while let Some(t) = self.tokens.get(self.pos) {
256            if kinds
257                .iter()
258                .any(|k| std::mem::discriminant(&t.kind) == std::mem::discriminant(k))
259            {
260                break;
261            }
262            self.pos += 1;
263        }
264    }
265    /// Collect all tokens from the current position to the end.
266    pub fn drain_remaining(&mut self) -> Vec<Token> {
267        let rest = self.tokens[self.pos..].to_vec();
268        self.pos = self.tokens.len();
269        rest
270    }
271    /// Get the total number of tokens (including EOF).
272    pub fn total_len(&self) -> usize {
273        self.tokens.len()
274    }
275}
276/// A token with location information.
277#[derive(Clone, Debug, PartialEq)]
278pub struct Token {
279    /// Token kind
280    pub kind: TokenKind,
281    /// Source span
282    pub span: Span,
283}
284impl Token {
285    /// Create a new token.
286    pub fn new(kind: TokenKind, span: Span) -> Self {
287        Self { kind, span }
288    }
289    /// Check if this token is an identifier.
290    pub fn is_ident(&self) -> bool {
291        matches!(self.kind, TokenKind::Ident(_))
292    }
293    /// Get the identifier name if this is an identifier.
294    pub fn as_ident(&self) -> Option<&str> {
295        match &self.kind {
296            TokenKind::Ident(s) => Some(s),
297            _ => None,
298        }
299    }
300    /// Check if this token is a keyword.
301    pub fn is_keyword(&self) -> bool {
302        matches!(
303            self.kind,
304            TokenKind::Axiom
305                | TokenKind::Definition
306                | TokenKind::Theorem
307                | TokenKind::Lemma
308                | TokenKind::Fun
309                | TokenKind::Forall
310                | TokenKind::Let
311                | TokenKind::In
312                | TokenKind::If
313                | TokenKind::Then
314                | TokenKind::Else
315                | TokenKind::Match
316                | TokenKind::With
317                | TokenKind::Do
318                | TokenKind::Have
319                | TokenKind::Suffices
320                | TokenKind::Show
321                | TokenKind::Where
322                | TokenKind::From
323                | TokenKind::By
324                | TokenKind::Return
325        )
326    }
327}
328/// A cursor for reading tokens with position tracking and backtracking.
329#[allow(dead_code)]
330pub struct TokenCursor<'a> {
331    tokens: &'a [Token],
332    pos: usize,
333    /// Saved positions for backtracking.
334    checkpoints: Vec<usize>,
335}
336#[allow(dead_code)]
337impl<'a> TokenCursor<'a> {
338    /// Create a cursor over a slice of tokens.
339    pub fn new(tokens: &'a [Token]) -> Self {
340        Self {
341            tokens,
342            pos: 0,
343            checkpoints: Vec::new(),
344        }
345    }
346    /// Peek at the current token.
347    pub fn peek(&self) -> Option<&'a Token> {
348        self.tokens.get(self.pos)
349    }
350    /// Peek `n` positions ahead.
351    pub fn peek_n(&self, n: usize) -> Option<&'a Token> {
352        self.tokens.get(self.pos + n)
353    }
354    /// Consume the current token.
355    pub fn advance(&mut self) -> Option<&'a Token> {
356        if self.pos < self.tokens.len() {
357            let tok = &self.tokens[self.pos];
358            self.pos += 1;
359            Some(tok)
360        } else {
361            None
362        }
363    }
364    /// Current position.
365    pub fn position(&self) -> usize {
366        self.pos
367    }
368    /// Remaining tokens.
369    pub fn remaining(&self) -> usize {
370        self.tokens.len().saturating_sub(self.pos)
371    }
372    /// Whether we are at end of input.
373    pub fn is_eof(&self) -> bool {
374        self.pos >= self.tokens.len()
375    }
376    /// Save current position as a checkpoint.
377    pub fn save(&mut self) {
378        self.checkpoints.push(self.pos);
379    }
380    /// Restore the most recent checkpoint.
381    pub fn restore(&mut self) {
382        if let Some(saved) = self.checkpoints.pop() {
383            self.pos = saved;
384        }
385    }
386    /// Discard the most recent checkpoint (commit the advance).
387    pub fn commit(&mut self) {
388        self.checkpoints.pop();
389    }
390    /// Consume if the current token kind matches.
391    pub fn try_consume(&mut self, kind: &TokenKind) -> bool {
392        if self.tokens.get(self.pos).is_some_and(|t| &t.kind == kind) {
393            self.pos += 1;
394            true
395        } else {
396            false
397        }
398    }
399    /// Expect the current kind matches, else return an error string.
400    pub fn expect_kind(&mut self, kind: &TokenKind) -> Result<&'a Token, String> {
401        match self.tokens.get(self.pos) {
402            Some(t) if &t.kind == kind => {
403                self.pos += 1;
404                Ok(&self.tokens[self.pos - 1])
405            }
406            Some(t) => Err(format!("expected {:?} got {:?}", kind, t.kind)),
407            None => Err(format!("expected {:?} got EOF", kind)),
408        }
409    }
410    /// Get a slice of the remaining tokens.
411    pub fn remaining_slice(&self) -> &'a [Token] {
412        &self.tokens[self.pos..]
413    }
414    /// Collect tokens until `pred` returns `true`.
415    pub fn collect_until<F: Fn(&Token) -> bool>(&mut self, pred: F) -> Vec<&'a Token> {
416        let mut result = Vec::new();
417        while let Some(tok) = self.tokens.get(self.pos) {
418            if pred(tok) {
419                break;
420            }
421            result.push(tok);
422            self.pos += 1;
423        }
424        result
425    }
426    /// Skip tokens as long as `pred` returns `true`.
427    pub fn skip_while<F: Fn(&Token) -> bool>(&mut self, pred: F) {
428        while self.tokens.get(self.pos).is_some_and(&pred) {
429            self.pos += 1;
430        }
431    }
432}
433/// A set of token kind discriminants for fast membership testing.
434#[allow(dead_code)]
435pub struct TokenKindSet {
436    /// The included kinds (compared by discriminant).
437    kinds: Vec<std::mem::Discriminant<TokenKind>>,
438}
439#[allow(dead_code)]
440impl TokenKindSet {
441    /// Create an empty set.
442    pub fn new() -> Self {
443        Self { kinds: Vec::new() }
444    }
445    /// Add a kind to the set.
446    pub fn add(&mut self, kind: &TokenKind) {
447        let d = std::mem::discriminant(kind);
448        if !self.kinds.contains(&d) {
449            self.kinds.push(d);
450        }
451    }
452    /// Check if the set contains a kind.
453    pub fn contains(&self, kind: &TokenKind) -> bool {
454        self.kinds.contains(&std::mem::discriminant(kind))
455    }
456    /// Number of kinds in the set.
457    pub fn len(&self) -> usize {
458        self.kinds.len()
459    }
460    /// Whether the set is empty.
461    pub fn is_empty(&self) -> bool {
462        self.kinds.is_empty()
463    }
464}
465/// A contiguous range of tokens with a combined span.
466#[allow(dead_code)]
467pub struct TokenRange {
468    /// The tokens in this range.
469    pub tokens: Vec<Token>,
470    /// Combined span covering all tokens.
471    pub span: Span,
472}
473#[allow(dead_code)]
474impl TokenRange {
475    /// Create a token range from a vector of tokens.
476    pub fn from_tokens(tokens: Vec<Token>) -> Option<Self> {
477        if tokens.is_empty() {
478            return None;
479        }
480        let first = tokens
481            .first()
482            .expect("tokens non-empty per is_empty check above");
483        let last = tokens
484            .last()
485            .expect("tokens non-empty per is_empty check above");
486        let span = first.span.merge(&last.span);
487        Some(Self { tokens, span })
488    }
489    /// Number of tokens.
490    pub fn len(&self) -> usize {
491        self.tokens.len()
492    }
493    /// Whether the range is empty.
494    pub fn is_empty(&self) -> bool {
495        self.tokens.is_empty()
496    }
497    /// Get a token by index.
498    pub fn get(&self, i: usize) -> Option<&Token> {
499        self.tokens.get(i)
500    }
501    /// Iterate over the tokens.
502    pub fn iter(&self) -> std::slice::Iter<'_, Token> {
503        self.tokens.iter()
504    }
505}
506/// Token kind.
507#[derive(Clone, Debug, PartialEq)]
508pub enum TokenKind {
509    /// Axiom keyword
510    Axiom,
511    /// Definition keyword
512    Definition,
513    /// Theorem keyword
514    Theorem,
515    /// Lemma keyword
516    Lemma,
517    /// Opaque keyword
518    Opaque,
519    /// Inductive keyword
520    Inductive,
521    /// Structure keyword
522    Structure,
523    /// Class keyword
524    Class,
525    /// Instance keyword
526    Instance,
527    /// Namespace keyword
528    Namespace,
529    /// Section keyword
530    Section,
531    /// Variable keyword
532    Variable,
533    /// Variables keyword
534    Variables,
535    /// Parameter keyword
536    Parameter,
537    /// Parameters keyword
538    Parameters,
539    /// Constant keyword
540    Constant,
541    /// Constants keyword
542    Constants,
543    /// End keyword
544    End,
545    /// Import keyword
546    Import,
547    /// Export keyword
548    Export,
549    /// Open keyword
550    Open,
551    /// Attribute keyword
552    Attribute,
553    /// Return keyword
554    Return,
555    /// Type keyword
556    Type,
557    /// Prop keyword
558    Prop,
559    /// Sort keyword
560    Sort,
561    /// Fun or lambda (lambda)
562    Fun,
563    /// Forall
564    Forall,
565    /// Let keyword
566    Let,
567    /// In keyword
568    In,
569    /// If keyword
570    If,
571    /// Then keyword
572    Then,
573    /// Else keyword
574    Else,
575    /// Match keyword
576    Match,
577    /// With keyword
578    With,
579    /// Do keyword
580    Do,
581    /// Have keyword
582    Have,
583    /// Suffices keyword
584    Suffices,
585    /// Show keyword
586    Show,
587    /// Where keyword
588    Where,
589    /// From keyword
590    From,
591    /// By keyword
592    By,
593    /// Arrow ->
594    Arrow,
595    /// Fat arrow =>
596    FatArrow,
597    /// And
598    And,
599    /// Or
600    Or,
601    /// Not
602    Not,
603    /// Iff
604    Iff,
605    /// Exists
606    Exists,
607    /// Equality =
608    Eq,
609    /// Not equal
610    Ne,
611    /// Less than <
612    Lt,
613    /// Less than or equal
614    Le,
615    /// Greater than >
616    Gt,
617    /// Greater than or equal
618    Ge,
619    /// Left parenthesis (
620    LParen,
621    /// Right parenthesis )
622    RParen,
623    /// Left brace {
624    LBrace,
625    /// Right brace }
626    RBrace,
627    /// Left bracket [
628    LBracket,
629    /// Right bracket ]
630    RBracket,
631    /// Left angle bracket (for anonymous ctor)
632    LAngle,
633    /// Right angle bracket (for anonymous ctor)
634    RAngle,
635    /// Comma ,
636    Comma,
637    /// Colon :
638    Colon,
639    /// Semicolon ;
640    Semicolon,
641    /// Dot .
642    Dot,
643    /// Dot dot ..
644    DotDot,
645    /// Bar |
646    Bar,
647    /// At @
648    At,
649    /// Hash #
650    Hash,
651    /// Plus +
652    Plus,
653    /// Minus -
654    Minus,
655    /// Star *
656    Star,
657    /// Slash /
658    Slash,
659    /// Percent %
660    Percent,
661    /// Caret ^
662    Caret,
663    /// Assignment :=
664    Assign,
665    /// Double ampersand &&
666    AndAnd,
667    /// Double pipe ||
668    OrOr,
669    /// Not equal !=
670    BangEq,
671    /// Bang !
672    Bang,
673    /// Left arrow <-
674    LeftArrow,
675    /// Identifier
676    Ident(String),
677    /// Natural number literal
678    Nat(u64),
679    /// Float literal
680    Float(f64),
681    /// String literal
682    String(String),
683    /// Character literal
684    Char(char),
685    /// Doc comment (block `/-- ... -/` or line `--- ...`)
686    DocComment(String),
687    /// Interpolated string: `s!"hello {name}"`
688    InterpolatedString(Vec<StringPart>),
689    /// Underscore _
690    Underscore,
691    /// Question mark ?
692    Question,
693    /// End of file
694    Eof,
695    /// Error token
696    Error(String),
697}
698/// Extra semantic information attached to a token.
699#[allow(dead_code)]
700#[derive(Debug, Clone, PartialEq, Eq)]
701pub enum TokenAnnotation {
702    /// This token is an introduction site for the named variable.
703    BindingSite(String),
704    /// This token is a use-site for the named variable.
705    UseSite(String),
706    /// This token's identifier refers to a declaration with this qualified name.
707    ResolvedName(String),
708    /// This token starts a tactic block.
709    TacticStart,
710    /// This token ends a tactic block.
711    TacticEnd,
712    /// This token is a hole (implicit argument).
713    ImplicitHole,
714    /// This token is a type annotation separator.
715    TypeSeparator,
716}
717/// Source location information.
718#[derive(Clone, Debug, PartialEq, Eq)]
719pub struct Span {
720    /// Start position (byte offset)
721    pub start: usize,
722    /// End position (byte offset)
723    pub end: usize,
724    /// Line number (1-indexed)
725    pub line: usize,
726    /// Column number (1-indexed)
727    pub column: usize,
728}
729impl Span {
730    /// Create a new span.
731    pub fn new(start: usize, end: usize, line: usize, column: usize) -> Self {
732        Self {
733            start,
734            end,
735            line,
736            column,
737        }
738    }
739    /// Create a span that covers both spans.
740    pub fn merge(&self, other: &Span) -> Span {
741        Span {
742            start: self.start.min(other.start),
743            end: self.end.max(other.end),
744            line: self.line,
745            column: self.column,
746        }
747    }
748}
749/// A matcher that finds the closing bracket/delimiter for an opening one.
750#[allow(dead_code)]
751pub struct TokenPairMatcher;
752#[allow(dead_code)]
753impl TokenPairMatcher {
754    /// Return the closing counterpart of an opening token kind.
755    pub fn closing_of(kind: &TokenKind) -> Option<TokenKind> {
756        match kind {
757            TokenKind::LParen => Some(TokenKind::RParen),
758            TokenKind::LBrace => Some(TokenKind::RBrace),
759            TokenKind::LBracket => Some(TokenKind::RBracket),
760            TokenKind::LAngle => Some(TokenKind::RAngle),
761            _ => None,
762        }
763    }
764    /// Return the opening counterpart of a closing token kind.
765    pub fn opening_of(kind: &TokenKind) -> Option<TokenKind> {
766        match kind {
767            TokenKind::RParen => Some(TokenKind::LParen),
768            TokenKind::RBrace => Some(TokenKind::LBrace),
769            TokenKind::RBracket => Some(TokenKind::LBracket),
770            TokenKind::RAngle => Some(TokenKind::LAngle),
771            _ => None,
772        }
773    }
774    /// Find the index of the closing token matching the opener at `start`.
775    ///
776    /// Returns `None` if no balanced closer is found.
777    pub fn find_closing(tokens: &[Token], start: usize) -> Option<usize> {
778        let opener = &tokens.get(start)?.kind;
779        let closer = Self::closing_of(opener)?;
780        let mut depth = 0usize;
781        for (i, tok) in tokens[start..].iter().enumerate() {
782            if std::mem::discriminant(&tok.kind) == std::mem::discriminant(opener) {
783                depth += 1;
784            } else if std::mem::discriminant(&tok.kind) == std::mem::discriminant(&closer) {
785                depth -= 1;
786                if depth == 0 {
787                    return Some(start + i);
788                }
789            }
790        }
791        None
792    }
793    /// Extract the balanced group starting at `start`, returning the inner tokens.
794    pub fn extract_group(tokens: &[Token], start: usize) -> Option<&[Token]> {
795        let end = Self::find_closing(tokens, start)?;
796        Some(&tokens[start + 1..end])
797    }
798}
799/// A token together with optional preceding and following tokens.
800#[allow(dead_code)]
801#[derive(Debug, Clone)]
802pub struct ContextualToken {
803    /// The main token.
804    pub token: Token,
805    /// The immediately preceding token (if any).
806    pub prev: Option<Token>,
807    /// The immediately following token (if any).
808    pub next: Option<Token>,
809}
810#[allow(dead_code)]
811impl ContextualToken {
812    /// Create a contextual token.
813    pub fn new(token: Token, prev: Option<Token>, next: Option<Token>) -> Self {
814        Self { token, prev, next }
815    }
816    /// Build contextual tokens from a token slice.
817    pub fn from_slice(tokens: &[Token]) -> Vec<ContextualToken> {
818        tokens
819            .iter()
820            .enumerate()
821            .map(|(i, tok)| {
822                let prev = if i > 0 {
823                    Some(tokens[i - 1].clone())
824                } else {
825                    None
826                };
827                let next = tokens.get(i + 1).cloned();
828                ContextualToken::new(tok.clone(), prev, next)
829            })
830            .collect()
831    }
832    /// Whether this token starts a line (column 1).
833    pub fn is_line_start(&self) -> bool {
834        self.token.span.column == 1
835    }
836    /// Whether the previous and current token are on the same line.
837    pub fn same_line_as_prev(&self) -> bool {
838        self.prev
839            .as_ref()
840            .is_some_and(|p| p.span.line == self.token.span.line)
841    }
842    /// Whether the current and next token are on the same line.
843    pub fn same_line_as_next(&self) -> bool {
844        self.next
845            .as_ref()
846            .is_some_and(|n| n.span.line == self.token.span.line)
847    }
848}
849/// A part of an interpolated string.
850#[derive(Clone, Debug, PartialEq)]
851pub enum StringPart {
852    /// A literal text segment.
853    Literal(String),
854    /// An interpolation hole containing tokens.
855    Interpolation(Vec<Token>),
856}
857/// A growable buffer of tokens that supports efficient push and random access.
858#[allow(dead_code)]
859pub struct TokenBuffer {
860    tokens: Vec<Token>,
861}
862#[allow(dead_code)]
863impl TokenBuffer {
864    /// Create an empty token buffer.
865    pub fn new() -> Self {
866        Self { tokens: Vec::new() }
867    }
868    /// Create a buffer with pre-allocated capacity.
869    pub fn with_capacity(cap: usize) -> Self {
870        Self {
871            tokens: Vec::with_capacity(cap),
872        }
873    }
874    /// Push a token onto the buffer.
875    pub fn push(&mut self, token: Token) {
876        self.tokens.push(token);
877    }
878    /// Get a reference to the token at position `i`.
879    pub fn get(&self, i: usize) -> Option<&Token> {
880        self.tokens.get(i)
881    }
882    /// The number of tokens in the buffer.
883    pub fn len(&self) -> usize {
884        self.tokens.len()
885    }
886    /// Whether the buffer is empty.
887    pub fn is_empty(&self) -> bool {
888        self.tokens.is_empty()
889    }
890    /// Consume the buffer into a `Vec<Token>`.
891    pub fn into_tokens(self) -> Vec<Token> {
892        self.tokens
893    }
894    /// Drain the buffer into a `TokenStream`.
895    pub fn into_stream(self) -> TokenStream {
896        TokenStream::new(self.tokens)
897    }
898    /// Append all tokens from another buffer.
899    pub fn extend_from(&mut self, other: TokenBuffer) {
900        self.tokens.extend(other.tokens);
901    }
902    /// Truncate to the first `n` tokens.
903    pub fn truncate(&mut self, n: usize) {
904        self.tokens.truncate(n);
905    }
906    /// Get the last token in the buffer.
907    pub fn last(&self) -> Option<&Token> {
908        self.tokens.last()
909    }
910    /// Retain only tokens that satisfy `pred`.
911    pub fn retain<F: FnMut(&Token) -> bool>(&mut self, pred: F) {
912        self.tokens.retain(pred);
913    }
914    /// Split the buffer at position `mid`, returning the second half.
915    pub fn split_off(&mut self, mid: usize) -> TokenBuffer {
916        TokenBuffer {
917            tokens: self.tokens.split_off(mid),
918        }
919    }
920    /// Count tokens of a given kind discriminant.
921    pub fn count_kind(&self, kind: &TokenKind) -> usize {
922        count_tokens(&self.tokens, kind)
923    }
924    /// Find the first position of a token matching a predicate.
925    pub fn find_first<F: Fn(&Token) -> bool>(&self, pred: F) -> Option<usize> {
926        self.tokens.iter().position(pred)
927    }
928    /// Slice a range from the buffer.
929    pub fn slice(&self, start: usize, end: usize) -> &[Token] {
930        &self.tokens[start..end.min(self.tokens.len())]
931    }
932}