Skip to main content

oxihuman_core/
token_stream.rs

1// Copyright (C) 2026 COOLJAPAN OU (Team KitaSan)
2// SPDX-License-Identifier: Apache-2.0
3#![allow(dead_code)]
4
5//! Simple token stream for lexing / parsing utilities.
6
7/// A single token produced by the lexer.
8#[allow(dead_code)]
9#[derive(Debug, Clone, PartialEq)]
10pub struct Token {
11    pub kind: TokenKind,
12    pub text: String,
13    pub pos: usize,
14}
15
16/// Token category.
17#[allow(dead_code)]
18#[derive(Debug, Clone, PartialEq)]
19pub enum TokenKind {
20    Ident,
21    Number,
22    Punct,
23    Str,
24    Eof,
25}
26
27/// Token stream backed by a Vec.
28#[allow(dead_code)]
29#[derive(Debug, Clone)]
30pub struct TokenStream {
31    tokens: Vec<Token>,
32    cursor: usize,
33}
34
35/// Create a new empty `TokenStream`.
36#[allow(dead_code)]
37pub fn new_token_stream() -> TokenStream {
38    TokenStream {
39        tokens: Vec::new(),
40        cursor: 0,
41    }
42}
43
44/// Push a token onto the stream.
45#[allow(dead_code)]
46pub fn tks_push(ts: &mut TokenStream, kind: TokenKind, text: &str, pos: usize) {
47    ts.tokens.push(Token {
48        kind,
49        text: text.to_string(),
50        pos,
51    });
52}
53
54/// Peek at the current token without consuming.
55#[allow(dead_code)]
56pub fn tks_peek(ts: &TokenStream) -> Option<&Token> {
57    ts.tokens.get(ts.cursor)
58}
59
60/// Consume and return the next token.
61#[allow(dead_code)]
62pub fn tks_next(ts: &mut TokenStream) -> Option<Token> {
63    if ts.cursor < ts.tokens.len() {
64        let tok = ts.tokens[ts.cursor].clone();
65        ts.cursor += 1;
66        Some(tok)
67    } else {
68        None
69    }
70}
71
72/// Whether the stream is exhausted.
73#[allow(dead_code)]
74pub fn tks_is_empty(ts: &TokenStream) -> bool {
75    ts.cursor >= ts.tokens.len()
76}
77
78/// Number of remaining tokens.
79#[allow(dead_code)]
80pub fn tks_remaining(ts: &TokenStream) -> usize {
81    ts.tokens.len().saturating_sub(ts.cursor)
82}
83
84/// Rewind to the beginning.
85#[allow(dead_code)]
86pub fn tks_rewind(ts: &mut TokenStream) {
87    ts.cursor = 0;
88}
89
90/// Total token count (including consumed).
91#[allow(dead_code)]
92pub fn tks_total(ts: &TokenStream) -> usize {
93    ts.tokens.len()
94}
95
96/// Skip tokens while predicate holds.
97#[allow(dead_code)]
98pub fn tks_skip_while(ts: &mut TokenStream, pred: impl Fn(&Token) -> bool) {
99    while ts.cursor < ts.tokens.len() && pred(&ts.tokens[ts.cursor]) {
100        ts.cursor += 1;
101    }
102}
103
104/// Collect all remaining tokens as a Vec.
105#[allow(dead_code)]
106pub fn tks_drain(ts: &mut TokenStream) -> Vec<Token> {
107    let rest = ts.tokens[ts.cursor..].to_vec();
108    ts.cursor = ts.tokens.len();
109    rest
110}
111
112#[cfg(test)]
113mod tests {
114    use super::*;
115
116    #[test]
117    fn test_empty_stream() {
118        let ts = new_token_stream();
119        assert!(tks_is_empty(&ts));
120        assert_eq!(tks_remaining(&ts), 0);
121    }
122
123    #[test]
124    fn test_push_and_peek() {
125        let mut ts = new_token_stream();
126        tks_push(&mut ts, TokenKind::Ident, "hello", 0);
127        let tok = tks_peek(&ts).expect("should succeed");
128        assert_eq!(tok.text, "hello".to_string());
129    }
130
131    #[test]
132    fn test_next_consumes() {
133        let mut ts = new_token_stream();
134        tks_push(&mut ts, TokenKind::Number, "42", 0);
135        let tok = tks_next(&mut ts).expect("should succeed");
136        assert_eq!(tok.kind, TokenKind::Number);
137        assert!(tks_is_empty(&ts));
138    }
139
140    #[test]
141    fn test_remaining_decreases() {
142        let mut ts = new_token_stream();
143        tks_push(&mut ts, TokenKind::Ident, "a", 0);
144        tks_push(&mut ts, TokenKind::Ident, "b", 1);
145        assert_eq!(tks_remaining(&ts), 2);
146        tks_next(&mut ts);
147        assert_eq!(tks_remaining(&ts), 1);
148    }
149
150    #[test]
151    fn test_rewind() {
152        let mut ts = new_token_stream();
153        tks_push(&mut ts, TokenKind::Ident, "x", 0);
154        tks_next(&mut ts);
155        tks_rewind(&mut ts);
156        assert_eq!(tks_remaining(&ts), 1);
157    }
158
159    #[test]
160    fn test_total_count() {
161        let mut ts = new_token_stream();
162        tks_push(&mut ts, TokenKind::Ident, "a", 0);
163        tks_push(&mut ts, TokenKind::Ident, "b", 0);
164        assert_eq!(tks_total(&ts), 2);
165    }
166
167    #[test]
168    fn test_skip_while() {
169        let mut ts = new_token_stream();
170        tks_push(&mut ts, TokenKind::Ident, "a", 0);
171        tks_push(&mut ts, TokenKind::Ident, "b", 1);
172        tks_push(&mut ts, TokenKind::Number, "1", 2);
173        tks_skip_while(&mut ts, |t| t.kind == TokenKind::Ident);
174        let tok = tks_peek(&ts).expect("should succeed");
175        assert_eq!(tok.kind, TokenKind::Number);
176    }
177
178    #[test]
179    fn test_drain() {
180        let mut ts = new_token_stream();
181        tks_push(&mut ts, TokenKind::Punct, ";", 0);
182        tks_push(&mut ts, TokenKind::Eof, "", 1);
183        let drained = tks_drain(&mut ts);
184        assert_eq!(drained.len(), 2);
185        assert!(tks_is_empty(&ts));
186    }
187
188    #[test]
189    fn test_next_none_on_empty() {
190        let mut ts = new_token_stream();
191        assert!(tks_next(&mut ts).is_none());
192    }
193}