use crate::frontend::ast::Span;
use logos::{Lexer, Logos};
fn process_basic_escape(ch: char) -> Option<char> {
match ch {
'n' => Some('\n'),
't' => Some('\t'),
'r' => Some('\r'),
'\\' => Some('\\'),
'"' => Some('"'),
'\'' => Some('\''),
'0' => Some('\0'),
_ => None,
}
}
fn extract_unicode_hex(chars: &mut std::str::Chars) -> String {
chars.next(); let mut hex = String::with_capacity(6); for hex_char in chars.by_ref() {
if hex_char == '}' {
break;
}
hex.push(hex_char);
}
hex
}
fn process_unicode_escape(chars: &mut std::str::Chars) -> String {
let hex = extract_unicode_hex(chars);
u32::from_str_radix(&hex, 16)
.ok()
.and_then(char::from_u32)
.map_or_else(|| format!("\\u{{{hex}}}"), |c| c.to_string())
}
fn process_backslash_escape(chars: &mut std::str::Chars, result: &mut String) {
match chars.next() {
None => result.push('\\'), Some('u') if chars.as_str().starts_with('{') => {
result.push_str(&process_unicode_escape(chars));
}
Some(escape_ch) => {
if let Some(escaped) = process_basic_escape(escape_ch) {
result.push(escaped);
} else {
result.push('\\');
result.push(escape_ch);
}
}
}
}
fn process_escapes(s: &str) -> String {
let mut result = String::with_capacity(s.len());
let mut chars = s.chars();
while let Some(ch) = chars.next() {
if ch == '\\' {
process_backslash_escape(&mut chars, &mut result);
} else {
result.push(ch);
}
}
result
}
fn lex_nested_block_comment(lex: &mut Lexer<Token>) -> Option<String> {
let remainder = lex.remainder();
let bytes = remainder.as_bytes();
let mut depth = 1; let mut content = String::new();
let mut i = 0;
while i < bytes.len() {
if i + 1 < bytes.len() && bytes[i] == b'/' && bytes[i + 1] == b'*' {
depth += 1;
content.push('/');
content.push('*');
i += 2;
} else if i + 1 < bytes.len() && bytes[i] == b'*' && bytes[i + 1] == b'/' {
depth -= 1;
if depth == 0 {
lex.bump(i + 2);
return Some(content);
}
content.push('*');
content.push('/');
i += 2;
} else {
let ch = remainder[i..].chars().next()?;
content.push(ch);
i += ch.len_utf8();
}
}
lex.bump(remainder.len());
Some(content)
}
#[derive(Logos, Debug, PartialEq, Clone)]
#[logos(skip r"[ \t\n\r\f]+")]
pub enum Token {
#[regex(r"///[^\n]*", |lex| lex.slice()[3..].to_string())]
DocComment(String),
#[regex(r"//[^\n]*", |lex| lex.slice()[2..].to_string())]
LineComment(String),
#[token("/*", lex_nested_block_comment)]
BlockComment(String),
#[regex(r"#(?:[^\[\n][^\n]*)?", |lex| {
let s = lex.slice();
if s.len() > 1 { s[1..].to_string() } else { String::new() }
})]
HashComment(String),
#[regex(r"0[xX][0-9a-fA-F]+(?:i8|i16|i32|i64|i128|isize|u8|u16|u32|u64|u128|usize)?", |lex| {
let slice = lex.slice();
slice.to_string()
})]
HexInteger(String),
#[regex(r"[0-9]+(?:i8|i16|i32|i64|i128|isize|u8|u16|u32|u64|u128|usize)?", |lex| {
let slice = lex.slice();
// Parse type suffix and numeric value separately - store as string to preserve suffix
slice.to_string()
})]
Integer(String),
#[regex(r"[0-9]+\.[0-9]+([eE][+-]?[0-9]+)?|[0-9]+[eE][+-]?[0-9]+", |lex| lex.slice().parse::<f64>().ok())]
Float(f64),
#[regex(r#""([^"\\]|\\.)*""#, |lex| {
let s = lex.slice();
let inner = &s[1..s.len()-1];
Some(process_escapes(inner))
})]
#[regex(r"'(([^'\\>\n \t;},):]|\\.)([^'\\>\n \t;},):]|\\.)+|)'", |lex| {
let s = lex.slice();
let inner = &s[1..s.len()-1];
// Only match if it's NOT a single character (let Char handle that)
if inner.len() != 1 && !(inner.starts_with('\\') && inner.len() == 2) {
Some(process_escapes(inner))
} else {
None
}
})]
String(String),
#[regex(r#"f"([^"\\]|\\.)*""#, |lex| {
let s = lex.slice();
// Remove f" prefix and " suffix
let inner = &s[2..s.len()-1];
Some(process_escapes(inner))
})]
FString(String),
#[regex(r####"r#"([^"]|"[^#])*"#"####, |lex| {
let s = lex.slice();
// Remove r#" prefix and "# suffix - no escape processing for raw strings
Some(s[3..s.len()-2].to_string())
})]
#[regex(r#"r"([^"])*""#, |lex| {
let s = lex.slice();
// Remove r" prefix and " suffix - no escape processing for raw strings
Some(s[2..s.len()-1].to_string())
})]
RawString(String),
#[regex(r"'([^'\\]|\\.)'", priority = 7, callback = |lex| {
let s = lex.slice();
let inner = &s[1..s.len()-1];
if inner.len() == 1 {
inner.chars().next()
} else if inner.starts_with('\\') && inner.len() == 2 {
match inner.chars().nth(1) {
Some('n') => Some('\n'),
Some('t') => Some('\t'),
Some('r') => Some('\r'),
Some('\\') => Some('\\'),
Some('\'') => Some('\''),
Some('0') => Some('\0'),
_ => None,
}
} else {
None
}
})]
Char(char),
#[regex(r"b'([^'\\]|\\.)'", |lex| {
let s = lex.slice();
let inner = &s[2..s.len()-1]; // Skip b' prefix
if inner.len() == 1 {
Some(inner.as_bytes()[0])
} else if inner.starts_with('\\') && inner.len() == 2 {
match inner.chars().nth(1) {
Some('n') => Some(b'\n'),
Some('t') => Some(b'\t'),
Some('r') => Some(b'\r'),
Some('\\') => Some(b'\\'),
Some('\'') => Some(b'\''),
Some('0') => Some(b'\0'),
_ => None,
}
} else {
None
}
})]
Byte(u8),
#[token("true", |_| true)]
#[token("false", |_| false)]
Bool(bool),
#[token("fun")]
Fun,
#[token("fn")]
Fn,
#[token("let")]
Let,
#[token("var")]
Var,
#[token("mod")]
Mod,
#[token("if")]
If,
#[token("else")]
Else,
#[token("match")]
Match,
#[token("for")]
For,
#[token("in")]
In,
#[token("while")]
While,
#[token("loop")]
Loop,
#[token("lazy")]
Lazy,
#[token("async")]
Async,
#[token("await")]
Await,
#[token("throw")]
Throw,
#[token("try")]
Try,
#[token("catch")]
Catch,
#[token("finally")]
Finally,
#[token("return")]
Return,
#[token("Ok")]
Ok,
#[token("Err")]
Err,
#[token("Some")]
Some,
#[token("None")]
None,
#[token("null")]
Null,
#[token("Result")]
Result,
#[token("Option")]
Option,
#[token("break")]
Break,
#[token("continue")]
Continue,
#[token("struct")]
Struct,
#[token("enum")]
Enum,
#[token("impl")]
Impl,
#[token("trait")]
Trait,
#[token("extend")]
Extend,
#[token("actor")]
Actor,
#[token("spawn")]
Spawn,
#[token("effect")]
Effect,
#[token("handle")]
Handle,
#[token("handler")]
Handler,
#[token("property")]
Property,
#[token("private")]
Private,
#[token("protected")]
Protected,
#[token("sealed")]
Sealed,
#[token("final")]
Final,
#[token("abstract")]
Abstract,
#[token("mixin")]
Mixin,
#[token("operator")]
Operator,
#[token("interface")]
Interface,
#[token("implements")]
Implements,
#[token("override")]
Override,
#[token("receive")]
Receive,
#[token("send")]
Send,
#[token("ask")]
Ask,
#[token("type")]
Type,
#[token("where")]
Where,
#[token("const", priority = 2)]
Const,
#[token("unsafe", priority = 2)]
Unsafe,
#[token("static")]
Static,
#[token("mut")]
Mut,
#[regex("@[a-zA-Z_][a-zA-Z0-9_]*", priority = 3, callback = |lex| lex.slice().to_string())]
Label(String),
#[regex(r":[a-zA-Z_][a-zA-Z0-9_]*", priority = 3, callback = |lex| lex.slice()[1..].to_string())]
Atom(String),
#[regex(r"'[a-zA-Z_][a-zA-Z0-9_]*", priority = 5, callback = |lex| lex.slice().to_string())]
Lifetime(String),
#[token("pub")]
Pub,
#[token("import")]
Import,
#[token("use")]
Use,
#[token("as")]
As,
#[token("with")]
With,
#[token("from")]
From,
#[token("module")]
Module,
#[token("export")]
Export,
#[token("default")]
Default,
#[token("class")]
Class,
#[token("self")]
Self_,
#[token("super")]
Super,
#[token("crate")]
Crate,
#[token("df", priority = 2)]
DataFrame,
#[regex(r"[a-zA-Z_][a-zA-Z0-9_]*", |lex| lex.slice().to_string(), priority = 1)]
Identifier(String),
#[token("+")]
Plus,
#[token("-")]
Minus,
#[token("*")]
Star,
#[token("/")]
Slash,
#[token("%")]
Percent,
#[token("**")]
Power,
#[token("==")]
EqualEqual,
#[token("!=")]
NotEqual,
#[token("<?")]
ActorQuery,
#[token("<-")]
LeftArrow,
#[token("<")]
Less,
#[token("<=")]
LessEqual,
#[token(">")]
Greater,
#[token(">=")]
GreaterEqual,
#[token("&&")]
AndAnd,
#[token("||")]
OrOr,
#[token("!")]
Bang,
#[token("&")]
Ampersand,
#[token("|")]
Pipe,
#[token("^")]
Caret,
#[token("@")]
At,
#[token("~")]
Tilde,
#[token("\\")]
Backslash,
#[token("<<")]
LeftShift,
#[token(">>")]
RightShift,
#[token("=")]
Equal,
#[token("+=")]
PlusEqual,
#[token("-=")]
MinusEqual,
#[token("*=")]
StarEqual,
#[token("/=")]
SlashEqual,
#[token("%=")]
PercentEqual,
#[token("**=")]
PowerEqual,
#[token("&=")]
AmpersandEqual,
#[token("|=")]
PipeEqual,
#[token("^=")]
CaretEqual,
#[token("<<=")]
LeftShiftEqual,
#[token("++")]
Increment,
#[token("--")]
Decrement,
#[token("|>")]
Pipeline,
#[token("->")]
Arrow,
#[token("=>")]
FatArrow,
#[token("..")]
DotDot,
#[token("..=")]
DotDotEqual,
#[token("...")]
DotDotDot,
#[token("??")]
NullCoalesce,
#[token("?")]
Question,
#[token("?.")]
SafeNav,
#[token("(")]
LeftParen,
#[token(")")]
RightParen,
#[token("[")]
LeftBracket,
#[token("]")]
RightBracket,
#[token("{")]
LeftBrace,
#[token("}")]
RightBrace,
#[token(",")]
Comma,
#[token(".")]
Dot,
#[token(":")]
Colon,
#[token("::")]
ColonColon,
#[token(";")]
Semicolon,
#[token("_", priority = 2)]
Underscore,
#[token("#[", priority = 3)]
AttributeStart,
}
impl Token {
#[must_use]
pub fn is_binary_op(&self) -> bool {
matches!(
self,
Token::Plus
| Token::Minus
| Token::Star
| Token::Slash
| Token::Percent
| Token::Power
| Token::EqualEqual
| Token::NotEqual
| Token::Less
| Token::LessEqual
| Token::Greater
| Token::GreaterEqual
| Token::AndAnd
| Token::OrOr
| Token::Ampersand
| Token::Pipe
| Token::Caret
| Token::LeftShift
)
}
#[must_use]
pub fn is_unary_op(&self) -> bool {
matches!(
self,
Token::Bang | Token::Minus | Token::Tilde | Token::Ampersand
)
}
#[must_use]
pub fn is_assignment_op(&self) -> bool {
matches!(
self,
Token::Equal
| Token::PlusEqual
| Token::MinusEqual
| Token::StarEqual
| Token::SlashEqual
| Token::PercentEqual
| Token::PowerEqual
| Token::AmpersandEqual
| Token::PipeEqual
| Token::CaretEqual
| Token::LeftShiftEqual
)
}
}
pub struct TokenStream<'a> {
lexer: Lexer<'a, Token>,
peeked: Option<(Token, Span)>,
input: &'a str,
current_position: usize,
}
#[derive(Clone)]
pub struct TokenStreamPosition<'a> {
lexer: Lexer<'a, Token>,
peeked: Option<(Token, Span)>,
current_position: usize,
}
impl<'a> TokenStream<'a> {
#[must_use]
pub fn source(&self) -> &'a str {
self.input
}
#[must_use]
pub fn new(input: &'a str) -> Self {
let processed_input = if input.starts_with("#!") {
if let Some(newline_pos) = input.find('\n') {
&input[newline_pos + 1..]
} else {
""
}
} else {
input
};
Self {
lexer: Token::lexer(processed_input),
peeked: None,
input,
current_position: 0,
}
}
pub fn current_position(&self) -> (usize, usize) {
let mut line = 1;
let mut col = 1;
for (i, ch) in self.input.chars().enumerate() {
if i >= self.current_position {
break;
}
if ch == '\n' {
line += 1;
col = 1;
} else {
col += 1;
}
}
(line, col)
}
pub fn get_context_string(&self) -> String {
let start = self.current_position.saturating_sub(20);
let end = (self.current_position + 20).min(self.input.len());
let context = &self.input[start..end];
format!("...{context}...")
}
#[must_use]
pub fn position(&self) -> TokenStreamPosition<'a> {
TokenStreamPosition {
lexer: self.lexer.clone(),
peeked: self.peeked.clone(),
current_position: self.current_position,
}
}
pub fn set_position(&mut self, pos: TokenStreamPosition<'a>) {
self.lexer = pos.lexer;
self.peeked = pos.peeked;
self.current_position = pos.current_position;
}
#[allow(clippy::should_implement_trait)]
pub fn next(&mut self) -> Option<(Token, Span)> {
if let Some(peeked) = self.peeked.take() {
self.current_position = peeked.1.end;
return Some(peeked);
}
self.lexer.next().map(|result| {
let token = result.unwrap_or(Token::Bang); let span = Span::new(self.lexer.span().start, self.lexer.span().end);
self.current_position = span.end;
(token, span)
})
}
pub fn peek(&mut self) -> Option<&(Token, Span)> {
if self.peeked.is_none() {
self.peeked = self.next();
}
self.peeked.as_ref()
}
pub fn peek_ahead(&mut self, n: usize) -> Option<(Token, Span)> {
self.peek_nth(n)
}
pub fn peek_nth(&mut self, n: usize) -> Option<(Token, Span)> {
if n == 0 {
return self.peek().cloned();
}
let saved_peeked = self.peeked.clone();
let saved_lexer = self.lexer.clone();
for _ in 0..n {
let _ = self.peek();
self.advance();
}
let result = self.peek().cloned();
self.lexer = saved_lexer;
self.peeked = saved_peeked;
result
}
pub fn peek_nth_is_colon(&mut self, n: usize) -> bool {
if n == 0 {
self.peek().is_some_and(|(t, _)| matches!(t, Token::Colon))
} else {
self.peek_nth(n)
.is_some_and(|(t, _)| matches!(t, Token::Colon))
}
}
pub fn expect(&mut self, expected: &Token) -> anyhow::Result<Span> {
match self.next() {
Some((token, span)) if token == *expected => Ok(span),
Some((token, _)) => anyhow::bail!("Expected {expected:?}, found {token:?}"),
None => anyhow::bail!("Expected {expected:?}, found EOF"),
}
}
pub fn advance(&mut self) -> Option<(Token, Span)> {
self.next()
}
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::panic)]
mod tests {
use super::*;
use proptest::prelude::*;
#[test]
#[allow(clippy::approx_constant)] fn test_tokenize_basic() {
let mut stream = TokenStream::new("let x = 42 + 3.15");
assert_eq!(stream.next().map(|(t, _)| t), Some(Token::Let));
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Identifier("x".to_string()))
);
assert_eq!(stream.next().map(|(t, _)| t), Some(Token::Equal));
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Integer("42".to_string()))
);
assert_eq!(stream.next().map(|(t, _)| t), Some(Token::Plus));
assert_eq!(stream.next().map(|(t, _)| t), Some(Token::Float(3.15))); assert_eq!(stream.next().map(|(t, _)| t), None);
}
#[test]
fn test_tokenize_pipeline() {
let mut stream = TokenStream::new("[1, 2, 3] >> map(|x| x * 2)");
assert_eq!(stream.next().map(|(t, _)| t), Some(Token::LeftBracket));
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Integer("1".to_string()))
);
}
#[test]
fn test_tokenize_comments() {
let mut stream = TokenStream::new("x // comment\n+ /* block */ y");
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Identifier("x".to_string()))
);
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::LineComment(" comment".to_string()))
);
assert_eq!(stream.next().map(|(t, _)| t), Some(Token::Plus));
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::BlockComment(" block ".to_string()))
);
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Identifier("y".to_string()))
);
}
#[test]
fn test_process_basic_escape() {
assert_eq!(process_basic_escape('n'), Some('\n'));
assert_eq!(process_basic_escape('t'), Some('\t'));
assert_eq!(process_basic_escape('r'), Some('\r'));
assert_eq!(process_basic_escape('\\'), Some('\\'));
assert_eq!(process_basic_escape('"'), Some('"'));
assert_eq!(process_basic_escape('\''), Some('\''));
assert_eq!(process_basic_escape('0'), Some('\0'));
assert_eq!(process_basic_escape('x'), None); }
#[test]
fn test_process_unicode_escape() {
let mut chars = "{41}".chars();
assert_eq!(process_unicode_escape(&mut chars), "A");
let mut chars = "{1F600}".chars();
assert_eq!(process_unicode_escape(&mut chars), "😀");
let mut chars = "{INVALID}".chars();
assert_eq!(process_unicode_escape(&mut chars), "\\u{INVALID}");
}
#[test]
fn test_process_escapes() {
assert_eq!(process_escapes("Hello\\nWorld"), "Hello\nWorld");
assert_eq!(process_escapes("Tab\\tHere"), "Tab\tHere");
assert_eq!(process_escapes("Quote\\\"Here"), "Quote\"Here");
assert_eq!(process_escapes("Unicode\\u{41}"), "UnicodeA");
assert_eq!(process_escapes("Invalid\\x"), "Invalid\\x");
assert_eq!(process_escapes("Backslash\\\\"), "Backslash\\");
}
#[test]
fn test_tokenize_strings() {
let mut stream = TokenStream::new(r#""Hello, World!""#);
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::String("Hello, World!".to_string()))
);
let mut stream = TokenStream::new(r"'c'");
assert_eq!(stream.next().map(|(t, _)| t), Some(Token::Char('c')));
}
#[test]
fn test_tokenize_keywords() {
let keywords = vec![
("let", Token::Let),
("var", Token::Var),
("fun", Token::Fun),
("fn", Token::Fn),
("if", Token::If),
("else", Token::Else),
("match", Token::Match),
("for", Token::For),
("while", Token::While),
("loop", Token::Loop),
("return", Token::Return),
("break", Token::Break),
("continue", Token::Continue),
("true", Token::Bool(true)),
("false", Token::Bool(false)),
("null", Token::Null),
];
for (keyword_str, expected_token) in keywords {
let mut stream = TokenStream::new(keyword_str);
assert_eq!(
stream.next().map(|(t, _)| t),
Some(expected_token),
"Failed to tokenize keyword: {keyword_str}"
);
}
}
#[test]
fn test_tokenize_operators() {
let operators = vec![
("+", Token::Plus),
("-", Token::Minus),
("*", Token::Star),
("/", Token::Slash),
("%", Token::Percent),
("**", Token::Power),
("==", Token::EqualEqual),
("!=", Token::NotEqual),
("<", Token::Less),
("<=", Token::LessEqual),
(">", Token::Greater),
(">=", Token::GreaterEqual),
("&&", Token::AndAnd),
("||", Token::OrOr),
("!", Token::Bang),
("=", Token::Equal),
("|>", Token::Pipeline),
("<<", Token::LeftShift),
];
for (op_str, expected_token) in operators {
let mut stream = TokenStream::new(op_str);
assert_eq!(
stream.next().map(|(t, _)| t),
Some(expected_token),
"Failed to tokenize operator: {op_str}"
);
}
}
#[test]
fn test_tokenize_punctuation() {
let punctuation = vec![
("(", Token::LeftParen),
(")", Token::RightParen),
("[", Token::LeftBracket),
("]", Token::RightBracket),
("{", Token::LeftBrace),
("}", Token::RightBrace),
(",", Token::Comma),
(".", Token::Dot),
(":", Token::Colon),
("::", Token::ColonColon),
(";", Token::Semicolon),
("->", Token::Arrow),
("=>", Token::FatArrow),
];
for (punct_str, expected_token) in punctuation {
let mut stream = TokenStream::new(punct_str);
assert_eq!(
stream.next().map(|(t, _)| t),
Some(expected_token),
"Failed to tokenize punctuation: {punct_str}"
);
}
}
#[test]
fn test_tokenize_floats() {
let floats = vec!["3.15", "0.0", "1.0", "999.999", "0.001"];
for float_str in floats {
let mut stream = TokenStream::new(float_str);
match stream.next() {
Some((Token::Float(_), _)) => {}
_ => panic!("Failed to tokenize float: {float_str}"),
}
}
}
#[test]
fn test_tokenize_complex_expression() {
let mut stream = TokenStream::new("fun add(x: i32, y: i32) -> i32 { x + y }");
assert_eq!(stream.next().map(|(t, _)| t), Some(Token::Fun));
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Identifier("add".to_string()))
);
assert_eq!(stream.next().map(|(t, _)| t), Some(Token::LeftParen));
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Identifier("x".to_string()))
);
assert_eq!(stream.next().map(|(t, _)| t), Some(Token::Colon));
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Identifier("i32".to_string()))
);
assert_eq!(stream.next().map(|(t, _)| t), Some(Token::Comma));
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Identifier("y".to_string()))
);
assert_eq!(stream.next().map(|(t, _)| t), Some(Token::Colon));
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Identifier("i32".to_string()))
);
assert_eq!(stream.next().map(|(t, _)| t), Some(Token::RightParen));
assert_eq!(stream.next().map(|(t, _)| t), Some(Token::Arrow));
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Identifier("i32".to_string()))
);
assert_eq!(stream.next().map(|(t, _)| t), Some(Token::LeftBrace));
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Identifier("x".to_string()))
);
assert_eq!(stream.next().map(|(t, _)| t), Some(Token::Plus));
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Identifier("y".to_string()))
);
assert_eq!(stream.next().map(|(t, _)| t), Some(Token::RightBrace));
}
#[test]
fn test_token_stream_peek() {
let mut stream = TokenStream::new("let x = 42");
let peeked = stream.peek().map(|(t, _)| t.clone());
assert_eq!(peeked, Some(Token::Let));
let peeked2 = stream.peek().map(|(t, _)| t.clone());
assert_eq!(peeked2, Some(Token::Let));
assert_eq!(stream.next().map(|(t, _)| t), Some(Token::Let));
let peeked3 = stream.peek().map(|(t, _)| t.clone());
assert_eq!(peeked3, Some(Token::Identifier("x".to_string())));
}
#[test]
fn test_token_stream_position() {
let mut stream = TokenStream::new("a + b");
let pos = stream.position();
stream.advance();
stream.advance();
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Identifier("b".to_string()))
);
stream.set_position(pos);
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Identifier("a".to_string()))
);
}
#[test]
fn test_token_stream_expect() {
let mut stream = TokenStream::new("let x");
let span = stream.expect(&Token::Let);
assert!(span.is_ok());
let result = stream.expect(&Token::If);
assert!(result.is_err());
}
#[test]
fn test_tokenize_interpolated_string() {
let mut stream = TokenStream::new(r#"f"Hello {name}!""#);
match stream.next() {
Some((Token::FString(s), _)) => {
assert!(s.contains("Hello"));
}
_ => panic!("Failed to tokenize interpolated string"),
}
}
#[test]
fn test_fstring_in_function_body() {
let input = r#"fn test() {
f"test {}"
}"#;
let mut stream = TokenStream::new(input);
let tokens: Vec<Token> = std::iter::from_fn(|| stream.next().map(|(t, _)| t)).collect();
for (i, token) in tokens.iter().enumerate() {
eprintln!("Token {i}: {token:?}");
}
assert!(
tokens.iter().any(|t| matches!(t, Token::FString(_))),
"FString token should exist"
);
}
#[test]
fn test_tokenize_special_tokens() {
let mut stream = TokenStream::new("_");
assert_eq!(stream.next().map(|(t, _)| t), Some(Token::Underscore));
}
#[test]
fn test_peek_nth() {
let mut stream = TokenStream::new("a b c");
let second = stream.peek_nth(1).map(|(t, _)| t);
assert_eq!(second, Some(Token::Identifier("b".to_string())));
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Identifier("a".to_string()))
);
}
#[test]
fn test_peek_nth_is_colon() {
let mut stream = TokenStream::new(": x");
assert!(stream.peek_nth_is_colon(0));
let mut stream = TokenStream::new("x : y");
assert!(!stream.peek_nth_is_colon(0));
assert!(stream.peek_nth_is_colon(1));
}
#[test]
fn test_tokenize_enum_variant() {
let mut stream = TokenStream::new("Status::Success");
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Identifier("Status".to_string()))
);
assert_eq!(stream.next().map(|(t, _)| t), Some(Token::ColonColon));
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Identifier("Success".to_string()))
);
assert_eq!(stream.next(), None);
}
#[test]
fn test_parser_079_lifetime_with_space() {
let mut stream = TokenStream::new("'outer ");
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Lifetime("'outer".to_string())),
"Lifetime followed by space should tokenize as Lifetime"
);
}
#[test]
fn test_parser_079_lifetime_with_semicolon() {
let mut stream = TokenStream::new("'outer;");
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Lifetime("'outer".to_string())),
"Lifetime followed by semicolon should tokenize as Lifetime"
);
}
#[test]
fn test_parser_079_lifetime_with_brace() {
let mut stream = TokenStream::new("'outer}");
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Lifetime("'outer".to_string())),
"Lifetime followed by brace should tokenize as Lifetime"
);
}
#[test]
fn test_parser_079_lifetime_with_comma() {
let mut stream = TokenStream::new("'outer,");
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Lifetime("'outer".to_string())),
"Lifetime followed by comma should tokenize as Lifetime"
);
}
#[test]
fn test_parser_079_lifetime_in_break_statement() {
let mut stream = TokenStream::new("break 'outer");
assert_eq!(stream.next().map(|(t, _)| t), Some(Token::Break));
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Lifetime("'outer".to_string())),
"break 'outer should tokenize break then lifetime"
);
}
#[test]
fn test_parser_079_lifetime_in_block() {
let mut stream = TokenStream::new("{ break 'outer }");
assert_eq!(stream.next().map(|(t, _)| t), Some(Token::LeftBrace));
assert_eq!(stream.next().map(|(t, _)| t), Some(Token::Break));
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Lifetime("'outer".to_string())),
"Lifetime in block should tokenize correctly"
);
}
#[test]
fn test_parser_079_lifetime_in_for_loop() {
let code = "for x in xs { break 'outer; }";
let mut stream = TokenStream::new(code);
let tokens: Vec<Token> = std::iter::from_fn(|| stream.next().map(|(t, _)| t)).collect();
let lifetime_found = tokens.iter().any(|t| matches!(t, Token::Lifetime(_)));
assert!(
lifetime_found,
"for loop with labeled break should contain Lifetime token, got: {tokens:?}"
);
}
#[test]
fn test_parser_079_multiple_lifetimes() {
let mut stream = TokenStream::new("'outer 'inner");
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Lifetime("'outer".to_string()))
);
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Lifetime("'inner".to_string()))
);
}
#[test]
fn test_parser_079_lifetime_with_colon() {
let mut stream = TokenStream::new("'outer:");
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Lifetime("'outer".to_string())),
"Lifetime followed by colon should tokenize as Lifetime"
);
assert_eq!(
stream.next().map(|(t, _)| t),
Some(Token::Colon),
"Second token should be Colon"
);
}
#[test]
fn test_parser_079_lifetime_for_loop_full() {
let code = "'outer: for i in [1] { }";
let mut stream = TokenStream::new(code);
let tokens: Vec<Token> = std::iter::from_fn(|| stream.next().map(|(t, _)| t)).collect();
println!("Tokens for '{}': {:?}", code, tokens);
assert!(
matches!(&tokens[0], Token::Lifetime(s) if s == "'outer"),
"First token should be Lifetime('outer), got: {:?}",
tokens[0]
);
assert!(
matches!(&tokens[1], Token::Colon),
"Second token should be Colon, got: {:?}",
tokens[1]
);
assert!(
matches!(&tokens[2], Token::For),
"Third token should be For, got: {:?}",
tokens[2]
);
}
#[test]
fn test_parser_081_01_at_label_tokenizes_correctly() {
let mut stream = TokenStream::new("@outer");
let token = stream.next();
assert!(
matches!(token, Some((Token::Label(ref s), _)) if s == "@outer"),
"Expected Label(@outer), got {token:?}"
);
}
#[test]
fn test_parser_081_02_at_label_followed_by_colon() {
let mut stream = TokenStream::new("@outer:");
assert!(
matches!(stream.next(), Some((Token::Label(ref s), _)) if s == "@outer"),
"Label should tokenize before colon"
);
assert!(
matches!(stream.next(), Some((Token::Colon, _))),
"Colon should follow label"
);
}
#[test]
fn test_parser_082_atom_tokenization() {
let mut stream = TokenStream::new(":status :valid_id :_hidden");
assert!(matches!(stream.next(), Some((Token::Atom(s), _)) if s == "status"));
assert!(matches!(stream.next(), Some((Token::Atom(s), _)) if s == "valid_id"));
assert!(matches!(stream.next(), Some((Token::Atom(s), _)) if s == "_hidden"));
}
#[test]
fn test_parser_081_04_double_quote_json_string() {
let input = r#"{"name": "Alice"}"#;
let mut stream = TokenStream::new(input);
let token = stream.next();
assert!(
matches!(token, Some((Token::LeftBrace, _))),
"Double-quoted strings work for JSON, got {token:?}"
);
}
#[test]
fn test_tokenize_float_zero() {
let mut stream = TokenStream::new("0.0");
let token = stream.next();
assert!(matches!(token, Some((Token::Float(f), _)) if (f - 0.0).abs() < 0.001));
}
#[test]
fn test_tokenize_negative_integer() {
let mut stream = TokenStream::new("-42");
let token = stream.next();
assert!(matches!(token, Some((Token::Minus, _))));
let token = stream.next();
assert!(matches!(token, Some((Token::Integer(i), _)) if i == "42"));
}
#[test]
fn test_tokenize_scientific_notation() {
let mut stream = TokenStream::new("1e10");
let token = stream.next();
assert!(matches!(
token,
Some((Token::Float(_), _)) | Some((Token::Integer(_), _))
));
}
#[test]
fn test_tokenize_underscore_in_number() {
let mut stream = TokenStream::new("1_000_000");
let token = stream.next();
assert!(token.is_some());
}
#[test]
fn test_tokenize_empty_string() {
let mut stream = TokenStream::new(r#""""#);
let token = stream.next();
assert!(matches!(token, Some((Token::String(ref s), _)) if s.is_empty()));
}
#[test]
fn test_tokenize_string_with_newline() {
let mut stream = TokenStream::new("\"hello\\nworld\"");
let token = stream.next();
assert!(matches!(token, Some((Token::String(_), _))));
}
#[test]
fn test_tokenize_string_with_tab() {
let mut stream = TokenStream::new("\"hello\\tworld\"");
let token = stream.next();
assert!(matches!(token, Some((Token::String(_), _))));
}
#[test]
fn test_tokenize_arrow() {
let mut stream = TokenStream::new("->");
let token = stream.next();
assert!(matches!(token, Some((Token::Arrow, _))));
}
#[test]
fn test_tokenize_fat_arrow() {
let mut stream = TokenStream::new("=>");
let token = stream.next();
assert!(matches!(token, Some((Token::FatArrow, _))));
}
#[test]
fn test_tokenize_colon_colon() {
let mut stream = TokenStream::new("::");
let token = stream.next();
assert!(matches!(token, Some((Token::ColonColon, _))));
}
#[test]
fn test_tokenize_dot_dot() {
let mut stream = TokenStream::new("..");
let token = stream.next();
assert!(matches!(token, Some((Token::DotDot, _))));
}
#[test]
fn test_tokenize_range_inclusive() {
let mut stream = TokenStream::new("..=");
let token = stream.next();
assert!(matches!(token, Some((Token::DotDotEqual, _))));
}
#[test]
fn test_tokenize_pipe() {
let mut stream = TokenStream::new("|");
let token = stream.next();
assert!(matches!(token, Some((Token::Pipe, _))));
}
#[test]
fn test_tokenize_ampersand() {
let mut stream = TokenStream::new("&");
let token = stream.next();
assert!(matches!(token, Some((Token::Ampersand, _))));
}
#[test]
fn test_tokenize_bang() {
let mut stream = TokenStream::new("!");
let token = stream.next();
assert!(matches!(token, Some((Token::Bang, _))));
}
#[test]
fn test_tokenize_question_mark() {
let mut stream = TokenStream::new("?");
let token = stream.next();
assert!(matches!(token, Some((Token::Question, _))));
}
#[test]
fn test_tokenize_at_sign() {
let mut stream = TokenStream::new("@test");
let token = stream.next();
assert!(matches!(token, Some((Token::Label(_), _))));
}
#[test]
fn test_tokenize_hash_comment() {
let mut stream = TokenStream::new("# this is a comment");
let token = stream.next();
assert!(matches!(token, Some((Token::HashComment(_), _))));
}
#[test]
fn test_tokenize_integer_zero_r161() {
let mut stream = TokenStream::new("0");
let token = stream.next();
assert!(matches!(token, Some((Token::Integer(i), _)) if i == "0"));
}
#[test]
fn test_tokenize_integer_large_r161() {
let mut stream = TokenStream::new("9223372036854775807");
let token = stream.next();
assert!(matches!(token, Some((Token::Integer(_), _))));
}
#[test]
fn test_tokenize_float_simple_r161() {
let mut stream = TokenStream::new("3.14");
let token = stream.next();
assert!(matches!(token, Some((Token::Float(f), _)) if (f - 3.14).abs() < f64::EPSILON));
}
#[test]
fn test_tokenize_float_no_integer_part_r161() {
let mut stream = TokenStream::new(".5");
let token = stream.next();
assert!(token.is_some());
}
#[test]
fn test_tokenize_float_scientific_r161() {
let mut stream = TokenStream::new("1.5e10");
let token = stream.next();
assert!(matches!(token, Some((Token::Float(_), _))));
}
#[test]
fn test_tokenize_float_negative_exponent_r161() {
let mut stream = TokenStream::new("1.5e-10");
let token = stream.next();
assert!(matches!(token, Some((Token::Float(_), _))));
}
#[test]
fn test_tokenize_string_empty_r161() {
let mut stream = TokenStream::new(r#""""#);
let token = stream.next();
assert!(matches!(token, Some((Token::String(_), _))));
}
#[test]
fn test_tokenize_string_with_newline_r161() {
let mut stream = TokenStream::new(r#""hello\nworld""#);
let token = stream.next();
assert!(matches!(token, Some((Token::String(_), _))));
}
#[test]
fn test_tokenize_string_with_tab_r161() {
let mut stream = TokenStream::new(r#""hello\tworld""#);
let token = stream.next();
assert!(matches!(token, Some((Token::String(_), _))));
}
#[test]
fn test_tokenize_string_with_escape_quote_r161() {
let mut stream = TokenStream::new(r#""he said \"hi\"""#);
let token = stream.next();
assert!(matches!(token, Some((Token::String(_), _))));
}
#[test]
fn test_tokenize_char_simple_r161() {
let mut stream = TokenStream::new("'a'");
let token = stream.next();
assert!(matches!(token, Some((Token::Char(_), _))));
}
#[test]
fn test_tokenize_char_escape_n_r161() {
let mut stream = TokenStream::new(r"'\n'");
let token = stream.next();
assert!(matches!(token, Some((Token::Char(_), _))));
}
#[test]
fn test_tokenize_char_escape_t_r161() {
let mut stream = TokenStream::new(r"'\t'");
let token = stream.next();
assert!(matches!(token, Some((Token::Char(_), _))));
}
#[test]
fn test_tokenize_byte_literal_r161() {
let mut stream = TokenStream::new("0b101");
let token = stream.next();
assert!(matches!(token, Some((Token::Integer(_), _))));
}
#[test]
fn test_tokenize_hex_literal_r161() {
let mut stream = TokenStream::new("0xFF");
let token = stream.next();
assert!(matches!(token, Some((Token::HexInteger(_), _))));
}
#[test]
fn test_tokenize_octal_literal_r161() {
let mut stream = TokenStream::new("0o777");
let token = stream.next();
assert!(matches!(token, Some((Token::Integer(_), _))));
}
#[test]
fn test_tokenize_plus_r161() {
let mut stream = TokenStream::new("+");
let token = stream.next();
assert!(matches!(token, Some((Token::Plus, _))));
}
#[test]
fn test_tokenize_minus_r161() {
let mut stream = TokenStream::new("-");
let token = stream.next();
assert!(matches!(token, Some((Token::Minus, _))));
}
#[test]
fn test_tokenize_star_r161() {
let mut stream = TokenStream::new("*");
let token = stream.next();
assert!(matches!(token, Some((Token::Star, _))));
}
#[test]
fn test_tokenize_slash_r161() {
let mut stream = TokenStream::new("/");
let token = stream.next();
assert!(matches!(token, Some((Token::Slash, _))));
}
#[test]
fn test_tokenize_percent_r161() {
let mut stream = TokenStream::new("%");
let token = stream.next();
assert!(matches!(token, Some((Token::Percent, _))));
}
#[test]
fn test_tokenize_double_star_r161() {
let mut stream = TokenStream::new("**");
let token = stream.next();
assert!(matches!(token, Some((Token::Power, _))));
}
#[test]
fn test_tokenize_equal_r161() {
let mut stream = TokenStream::new("=");
let token = stream.next();
assert!(matches!(token, Some((Token::Equal, _))));
}
#[test]
fn test_tokenize_double_equal_r161() {
let mut stream = TokenStream::new("==");
let token = stream.next();
assert!(matches!(token, Some((Token::EqualEqual, _))));
}
#[test]
fn test_tokenize_not_equal_r161() {
let mut stream = TokenStream::new("!=");
let token = stream.next();
assert!(matches!(token, Some((Token::NotEqual, _))));
}
#[test]
fn test_tokenize_less_than_r161() {
let mut stream = TokenStream::new("<");
let token = stream.next();
assert!(matches!(token, Some((Token::Less, _))));
}
#[test]
fn test_tokenize_less_equal_r161() {
let mut stream = TokenStream::new("<=");
let token = stream.next();
assert!(matches!(token, Some((Token::LessEqual, _))));
}
#[test]
fn test_tokenize_greater_than_r161() {
let mut stream = TokenStream::new(">");
let token = stream.next();
assert!(matches!(token, Some((Token::Greater, _))));
}
#[test]
fn test_tokenize_greater_equal_r161() {
let mut stream = TokenStream::new(">=");
let token = stream.next();
assert!(matches!(token, Some((Token::GreaterEqual, _))));
}
#[test]
fn test_tokenize_and_r161() {
let mut stream = TokenStream::new("&&");
let token = stream.next();
assert!(matches!(token, Some((Token::AndAnd, _))));
}
#[test]
fn test_tokenize_or_r161() {
let mut stream = TokenStream::new("||");
let token = stream.next();
assert!(matches!(token, Some((Token::OrOr, _))));
}
#[test]
fn test_tokenize_not_r161() {
let mut stream = TokenStream::new("!");
let token = stream.next();
assert!(matches!(token, Some((Token::Bang, _))));
}
#[test]
fn test_tokenize_ampersand_r161() {
let mut stream = TokenStream::new("&");
let token = stream.next();
assert!(matches!(token, Some((Token::Ampersand, _))));
}
#[test]
fn test_tokenize_pipe_r161() {
let mut stream = TokenStream::new("|");
let token = stream.next();
assert!(matches!(token, Some((Token::Pipe, _))));
}
#[test]
fn test_tokenize_caret_r161() {
let mut stream = TokenStream::new("^");
let token = stream.next();
assert!(matches!(token, Some((Token::Caret, _))));
}
#[test]
fn test_tokenize_tilde_r161() {
let mut stream = TokenStream::new("~");
let token = stream.next();
assert!(matches!(token, Some((Token::Tilde, _))));
}
#[test]
fn test_tokenize_left_shift_r161() {
let mut stream = TokenStream::new("<<");
let token = stream.next();
assert!(matches!(token, Some((Token::LeftShift, _))));
}
#[test]
fn test_tokenize_right_shift_r161() {
let mut stream = TokenStream::new(">>");
let token = stream.next();
assert!(matches!(token, Some((Token::RightShift, _))));
}
#[test]
fn test_tokenize_lparen_r161() {
let mut stream = TokenStream::new("(");
let token = stream.next();
assert!(matches!(token, Some((Token::LeftParen, _))));
}
#[test]
fn test_tokenize_rparen_r161() {
let mut stream = TokenStream::new(")");
let token = stream.next();
assert!(matches!(token, Some((Token::RightParen, _))));
}
#[test]
fn test_tokenize_lbracket_r161() {
let mut stream = TokenStream::new("[");
let token = stream.next();
assert!(matches!(token, Some((Token::LeftBracket, _))));
}
#[test]
fn test_tokenize_rbracket_r161() {
let mut stream = TokenStream::new("]");
let token = stream.next();
assert!(matches!(token, Some((Token::RightBracket, _))));
}
#[test]
fn test_tokenize_lbrace_r161() {
let mut stream = TokenStream::new("{");
let token = stream.next();
assert!(matches!(token, Some((Token::LeftBrace, _))));
}
#[test]
fn test_tokenize_rbrace_r161() {
let mut stream = TokenStream::new("}");
let token = stream.next();
assert!(matches!(token, Some((Token::RightBrace, _))));
}
#[test]
fn test_tokenize_comma_r161() {
let mut stream = TokenStream::new(",");
let token = stream.next();
assert!(matches!(token, Some((Token::Comma, _))));
}
#[test]
fn test_tokenize_dot_r161() {
let mut stream = TokenStream::new(".");
let token = stream.next();
assert!(matches!(token, Some((Token::Dot, _))));
}
#[test]
fn test_tokenize_colon_r161() {
let mut stream = TokenStream::new(":");
let token = stream.next();
assert!(matches!(token, Some((Token::Colon, _))));
}
#[test]
fn test_tokenize_double_colon_r161() {
let mut stream = TokenStream::new("::");
let token = stream.next();
assert!(matches!(token, Some((Token::ColonColon, _))));
}
#[test]
fn test_tokenize_semicolon_r161() {
let mut stream = TokenStream::new(";");
let token = stream.next();
assert!(matches!(token, Some((Token::Semicolon, _))));
}
#[test]
fn test_tokenize_arrow_r161() {
let mut stream = TokenStream::new("->");
let token = stream.next();
assert!(matches!(token, Some((Token::Arrow, _))));
}
#[test]
fn test_tokenize_fat_arrow_r161() {
let mut stream = TokenStream::new("=>");
let token = stream.next();
assert!(matches!(token, Some((Token::FatArrow, _))));
}
#[test]
fn test_tokenize_question_mark_r161() {
let mut stream = TokenStream::new("?");
let token = stream.next();
assert!(matches!(token, Some((Token::Question, _))));
}
#[test]
fn test_tokenize_underscore_r161() {
let mut stream = TokenStream::new("_");
let token = stream.next();
assert!(matches!(token, Some((Token::Underscore, _))));
}
#[test]
fn test_tokenize_dotdot_r161() {
let mut stream = TokenStream::new("..");
let token = stream.next();
assert!(matches!(token, Some((Token::DotDot, _))));
}
#[test]
fn test_tokenize_dotdoteq_r161() {
let mut stream = TokenStream::new("..=");
let token = stream.next();
assert!(matches!(token, Some((Token::DotDotEqual, _))));
}
#[test]
fn test_tokenize_kw_fun_r161() {
let mut stream = TokenStream::new("fun");
let token = stream.next();
assert!(matches!(token, Some((Token::Fun, _))));
}
#[test]
fn test_tokenize_kw_fn_r161() {
let mut stream = TokenStream::new("fn");
let token = stream.next();
assert!(matches!(token, Some((Token::Fn, _))));
}
#[test]
fn test_tokenize_kw_let_r161() {
let mut stream = TokenStream::new("let");
let token = stream.next();
assert!(matches!(token, Some((Token::Let, _))));
}
#[test]
fn test_tokenize_kw_var_r161() {
let mut stream = TokenStream::new("var");
let token = stream.next();
assert!(matches!(token, Some((Token::Var, _))));
}
#[test]
fn test_tokenize_kw_if_r161() {
let mut stream = TokenStream::new("if");
let token = stream.next();
assert!(matches!(token, Some((Token::If, _))));
}
#[test]
fn test_tokenize_kw_else_r161() {
let mut stream = TokenStream::new("else");
let token = stream.next();
assert!(matches!(token, Some((Token::Else, _))));
}
#[test]
fn test_tokenize_kw_match_r161() {
let mut stream = TokenStream::new("match");
let token = stream.next();
assert!(matches!(token, Some((Token::Match, _))));
}
#[test]
fn test_tokenize_kw_for_r161() {
let mut stream = TokenStream::new("for");
let token = stream.next();
assert!(matches!(token, Some((Token::For, _))));
}
#[test]
fn test_tokenize_kw_while_r161() {
let mut stream = TokenStream::new("while");
let token = stream.next();
assert!(matches!(token, Some((Token::While, _))));
}
#[test]
fn test_tokenize_kw_loop_r161() {
let mut stream = TokenStream::new("loop");
let token = stream.next();
assert!(matches!(token, Some((Token::Loop, _))));
}
#[test]
fn test_tokenize_kw_return_r161() {
let mut stream = TokenStream::new("return");
let token = stream.next();
assert!(matches!(token, Some((Token::Return, _))));
}
#[test]
fn test_tokenize_kw_break_r161() {
let mut stream = TokenStream::new("break");
let token = stream.next();
assert!(matches!(token, Some((Token::Break, _))));
}
#[test]
fn test_tokenize_kw_continue_r161() {
let mut stream = TokenStream::new("continue");
let token = stream.next();
assert!(matches!(token, Some((Token::Continue, _))));
}
#[test]
fn test_tokenize_kw_true_r161() {
let mut stream = TokenStream::new("true");
let token = stream.next();
assert!(matches!(token, Some((Token::Bool(true), _))));
}
#[test]
fn test_tokenize_kw_false_r161() {
let mut stream = TokenStream::new("false");
let token = stream.next();
assert!(matches!(token, Some((Token::Bool(false), _))));
}
#[test]
fn test_tokenize_kw_null_r161() {
let mut stream = TokenStream::new("null");
let token = stream.next();
assert!(matches!(token, Some((Token::Null, _))));
}
#[test]
fn test_tokenize_kw_struct_r161() {
let mut stream = TokenStream::new("struct");
let token = stream.next();
assert!(matches!(token, Some((Token::Struct, _))));
}
#[test]
fn test_tokenize_kw_enum_r161() {
let mut stream = TokenStream::new("enum");
let token = stream.next();
assert!(matches!(token, Some((Token::Enum, _))));
}
#[test]
fn test_tokenize_kw_impl_r161() {
let mut stream = TokenStream::new("impl");
let token = stream.next();
assert!(matches!(token, Some((Token::Impl, _))));
}
#[test]
fn test_tokenize_kw_trait_r161() {
let mut stream = TokenStream::new("trait");
let token = stream.next();
assert!(matches!(token, Some((Token::Trait, _))));
}
#[test]
fn test_tokenize_kw_async_r161() {
let mut stream = TokenStream::new("async");
let token = stream.next();
assert!(matches!(token, Some((Token::Async, _))));
}
#[test]
fn test_tokenize_kw_await_r161() {
let mut stream = TokenStream::new("await");
let token = stream.next();
assert!(matches!(token, Some((Token::Await, _))));
}
#[test]
fn test_tokenize_kw_try_r161() {
let mut stream = TokenStream::new("try");
let token = stream.next();
assert!(matches!(token, Some((Token::Try, _))));
}
#[test]
fn test_tokenize_kw_catch_r161() {
let mut stream = TokenStream::new("catch");
let token = stream.next();
assert!(matches!(token, Some((Token::Catch, _))));
}
#[test]
fn test_tokenize_kw_throw_r161() {
let mut stream = TokenStream::new("throw");
let token = stream.next();
assert!(matches!(token, Some((Token::Throw, _))));
}
#[test]
fn test_tokenize_kw_pub_r161() {
let mut stream = TokenStream::new("pub");
let token = stream.next();
assert!(matches!(token, Some((Token::Pub, _))));
}
#[test]
fn test_tokenize_kw_use_r161() {
let mut stream = TokenStream::new("use");
let token = stream.next();
assert!(matches!(token, Some((Token::Use, _))));
}
#[test]
fn test_tokenize_kw_mod_r161() {
let mut stream = TokenStream::new("mod");
let token = stream.next();
assert!(matches!(token, Some((Token::Mod, _))));
}
proptest! {
#[test]
fn test_tokenize_identifiers(s in "[a-zA-Z_][a-zA-Z0-9_]{0,100}") {
let reserved_keywords = [
"true", "false", "fun", "fn", "let", "var", "mod", "if", "else", "match",
"for", "in", "while", "loop", "async", "await", "throw", "try", "catch",
"return", "command", "Ok", "Err", "Some", "None", "null", "Result", "Option",
"break", "continue", "struct", "enum", "impl", "trait", "extend", "actor",
"state", "receive", "send", "ask", "type", "where", "const", "static",
"mut", "pub", "import", "use", "as", "module", "export", "df"
];
if reserved_keywords.contains(&s.as_str()) {
return Ok(()); }
let mut stream = TokenStream::new(&s);
match stream.advance() {
Some((Token::Identifier(id), _)) => prop_assert_eq!(id, s),
Some((Token::Underscore, _)) if s == "_" => {}, _ => panic!("Failed to tokenize identifier: {s}"),
}
}
#[test]
fn test_tokenize_integers(n in 0i64..1_000_000) {
let s = n.to_string();
let mut stream = TokenStream::new(&s);
match stream.advance() {
Some((Token::Integer(i), _)) => prop_assert_eq!(i, n.to_string()),
_ => panic!("Failed to tokenize integer"),
}
}
#[test]
fn test_process_escapes_never_panics(s: String) {
let _ = process_escapes(&s);
}
#[test]
fn test_tokenize_never_panics(s: String) {
let mut stream = TokenStream::new(&s);
while stream.next().is_some() {}
}
}
}