#![allow(missing_docs)]
use ruchy::frontend::lexer::{Token, TokenStream};
fn tokenize(source: &str) -> Vec<Token> {
let mut stream = TokenStream::new(source);
let mut tokens = Vec::new();
while let Some((token, _span)) = stream.next() {
tokens.push(token);
}
tokens
}
#[test]
fn test_parser_075_01_simple_block_comment() {
let source = "/* simple comment */";
let tokens = tokenize(source);
assert_eq!(tokens.len(), 1);
assert!(matches!(&tokens[0], Token::BlockComment(content) if content == " simple comment "));
}
#[test]
fn test_parser_075_01_block_comment_with_code() {
let source = "let x = /* comment */ 42";
let tokens = tokenize(source);
assert!(matches!(&tokens[0], Token::Let));
assert!(matches!(&tokens[1], Token::Identifier(name) if name == "x"));
assert!(matches!(&tokens[2], Token::Equal));
assert!(matches!(&tokens[3], Token::BlockComment(content) if content == " comment "));
assert!(matches!(&tokens[4], Token::Integer(n) if n == "42"));
}
#[test]
fn test_parser_075_01_empty_block_comment() {
let source = "/**/";
let tokens = tokenize(source);
assert_eq!(tokens.len(), 1);
assert!(matches!(&tokens[0], Token::BlockComment(content) if content.is_empty()));
}
#[test]
fn test_parser_075_01_multiline_block_comment() {
let source = r"/*
* This is a
* multiline comment
*/";
let tokens = tokenize(source);
assert_eq!(tokens.len(), 1);
assert!(matches!(&tokens[0], Token::BlockComment(content) if content.contains("multiline")));
}
#[test]
fn test_parser_075_02_single_nested_comment() {
let source = "/* outer /* inner */ still outer */";
let tokens = tokenize(source);
assert_eq!(tokens.len(), 1);
assert!(matches!(&tokens[0], Token::BlockComment(content)
if content.contains("outer") && content.contains("inner") && content.contains("still outer")));
}
#[test]
fn test_parser_075_02_nested_at_start() {
let source = "/* /* nested */ rest */";
let tokens = tokenize(source);
assert_eq!(tokens.len(), 1);
assert!(matches!(&tokens[0], Token::BlockComment(content)
if content.contains("/* nested */ rest ")));
}
#[test]
fn test_parser_075_02_nested_at_end() {
let source = "/* start /* nested */ */";
let tokens = tokenize(source);
assert_eq!(tokens.len(), 1);
assert!(matches!(&tokens[0], Token::BlockComment(content)
if content.contains("start /* nested */")));
}
#[test]
fn test_parser_075_02_multiple_nested_siblings() {
let source = "/* first /* a */ middle /* b */ end */";
let tokens = tokenize(source);
assert_eq!(tokens.len(), 1);
assert!(matches!(&tokens[0], Token::BlockComment(content)
if content.contains("first") && content.contains("middle") && content.contains("end")));
}
#[test]
fn test_parser_075_03_triple_nested() {
let source = "/* level1 /* level2 /* level3 */ back2 */ back1 */";
let tokens = tokenize(source);
assert_eq!(tokens.len(), 1);
assert!(matches!(&tokens[0], Token::BlockComment(content)
if content.contains("level1")
&& content.contains("level2")
&& content.contains("level3")
&& content.contains("back1")));
}
#[test]
fn test_parser_075_03_deep_nesting() {
let source = "/* 1 /* 2 /* 3 /* 4 /* 5 */ 4 */ 3 */ 2 */ 1 */";
let tokens = tokenize(source);
assert_eq!(tokens.len(), 1);
assert!(matches!(&tokens[0], Token::BlockComment(_)));
}
#[test]
fn test_parser_075_04_commented_out_code_with_nesting() {
let source = r"
let active = 42;
/* temporarily disabled
let disabled = /* old value */ 99;
*/
let also_active = 7;
";
let tokens = tokenize(source);
let block_comments: Vec<_> = tokens
.iter()
.filter(|t| matches!(t, Token::BlockComment(_)))
.collect();
assert_eq!(
block_comments.len(),
1,
"Should have exactly one block comment"
);
if let Token::BlockComment(content) = &block_comments[0] {
assert!(content.contains("disabled"));
assert!(content.contains("/* old value */"));
}
}
#[test]
fn test_parser_075_04_nested_comment_preserves_structure() {
let source = "/* outer /* inner */ outer */";
let tokens = tokenize(source);
assert_eq!(tokens.len(), 1);
if let Token::BlockComment(content) = &tokens[0] {
assert_eq!(content, " outer /* inner */ outer ");
} else {
panic!("Expected BlockComment token");
}
}
#[test]
fn test_parser_075_05_star_not_followed_by_slash() {
let source = "/* 2 * 3 = 6 */";
let tokens = tokenize(source);
assert_eq!(tokens.len(), 1);
assert!(matches!(&tokens[0], Token::BlockComment(content) if content == " 2 * 3 = 6 "));
}
#[test]
fn test_parser_075_05_slash_not_followed_by_star() {
let source = "/* division: a/b */";
let tokens = tokenize(source);
assert_eq!(tokens.len(), 1);
assert!(matches!(&tokens[0], Token::BlockComment(content) if content.contains("a/b")));
}
#[test]
fn test_parser_075_05_unclosed_comment_error_recovery() {
let source = "/* unclosed comment";
let tokens = tokenize(source);
assert_eq!(tokens.len(), 1);
assert!(matches!(&tokens[0], Token::BlockComment(content) if content.contains("unclosed")));
}
#[test]
fn test_parser_075_05_unclosed_nested_comment() {
let source = "/* outer /* inner";
let tokens = tokenize(source);
assert_eq!(tokens.len(), 1);
assert!(matches!(&tokens[0], Token::BlockComment(_)));
}
#[test]
fn test_parser_075_05_consecutive_block_comments() {
let source = "/* first */ /* second */ /* third */";
let tokens = tokenize(source);
assert_eq!(tokens.len(), 3);
assert!(matches!(&tokens[0], Token::BlockComment(c) if c == " first "));
assert!(matches!(&tokens[1], Token::BlockComment(c) if c == " second "));
assert!(matches!(&tokens[2], Token::BlockComment(c) if c == " third "));
}
#[test]
fn test_parser_075_06_block_comment_with_strings() {
let source = r#"/* "this looks like a string" */"#;
let tokens = tokenize(source);
assert_eq!(tokens.len(), 1);
assert!(matches!(&tokens[0], Token::BlockComment(content)
if content.contains(r#""this looks like a string""#)));
}
#[test]
fn test_parser_075_06_block_comment_with_line_comment_syntax() {
let source = "/* this // looks like line comment */";
let tokens = tokenize(source);
assert_eq!(tokens.len(), 1);
assert!(matches!(&tokens[0], Token::BlockComment(content) if content.contains("//")));
}
#[test]
fn test_parser_075_06_mixed_comment_types() {
let source = r"
// line comment
/* block comment */
let x = 42; // another line comment
";
let tokens = tokenize(source);
let line_comments: Vec<_> = tokens
.iter()
.filter(|t| matches!(t, Token::LineComment(_)))
.collect();
let block_comments: Vec<_> = tokens
.iter()
.filter(|t| matches!(t, Token::BlockComment(_)))
.collect();
assert_eq!(line_comments.len(), 2);
assert_eq!(block_comments.len(), 1);
}