1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
/// Rust token type definitions.
pub use ;
use crateRustLanguage;
use ;
/// A lexer for the Rust programming language.
///
/// The `RustLexer` is responsible for tokenizing Rust source code into a sequence of tokens
/// that can be used by the parser. It handles all Rust syntax including modern features like
/// raw strings, byte strings, lifetimes, and all standard Rust keywords.
///
/// # Examples
///
/// Basic usage:
///
/// ```rust,ignore
/// use oak_core::{Lexer, LexerState, SourceText};
/// use oak_rust::{RustLanguage, RustLexer};
///
/// let language = RustLanguage::default();
/// let lexer = RustLexer::new(&language);
/// let source = SourceText::new("fn main() { println!(\"Hello, world!\") }");
/// let mut cache = oak_core::parser::session::ParseSession::<RustLanguage>::default();
/// let output = lexer.lex(&source, &[], &mut cache);
///
/// // The output contains tokens for the entire source code
/// assert!(output.result.is_ok());
/// ```
///
/// Tokenizing different Rust constructs:
///
/// ```rust,ignore
/// use oak_core::{Lexer, LexerState, SourceText};
/// use oak_rust::{RustLanguage, RustLexer};
///
/// let language = RustLanguage::default();
/// let lexer = RustLexer::new(&language);
///
/// // Tokenize a function with various Rust features
/// let source = SourceText::new(
/// r#"
/// fn calculate<'a>(x: &'a i32, y: i32) -> i32 {
/// let result = x + y;
/// println!("Result: {}", result);
/// result
/// }
/// "#,
/// );
/// let mut cache = oak_core::parser::session::ParseSession::<RustLanguage>::default();
/// let output = lexer.lex(&source, &[], &mut cache);
///
/// // Verify that tokens were generated
/// assert!(output.result.is_ok());
/// ```
pub type State<'a, S> = ;