use crate::parser::lexing::{Keyword, Token, TokenKind, TokenSpan};
use super::TokenStream;
use pretty_assertions::assert_eq;
#[test]
fn test_lexing_trixy() {
let input = "
mod commands {
fn expect(event: String) -> String;
}
";
let token_stream = TokenStream::lex(input).unwrap();
let expected_token_stream = {
let tokens = vec![
Token {
span: TokenSpan { start: 1, end: 4 },
kind: TokenKind::Keyword(Keyword::r#mod),
},
Token {
span: TokenSpan { start: 5, end: 13 },
kind: TokenKind::Identifier("commands".to_owned()),
},
Token {
span: TokenSpan { start: 14, end: 15 },
kind: TokenKind::CurlyBracketOpen,
},
Token {
span: TokenSpan { start: 20, end: 22 },
kind: TokenKind::Keyword(Keyword::r#fn),
},
Token {
span: TokenSpan { start: 23, end: 29 },
kind: TokenKind::Identifier("expect".to_owned()),
},
Token {
span: TokenSpan { start: 29, end: 30 },
kind: TokenKind::CurvedBracketOpen,
},
Token {
span: TokenSpan { start: 30, end: 35 },
kind: TokenKind::Identifier("event".to_owned()),
},
Token {
span: TokenSpan { start: 35, end: 36 },
kind: TokenKind::Colon,
},
Token {
span: TokenSpan { start: 37, end: 43 },
kind: TokenKind::Identifier("String".to_owned()),
},
Token {
span: TokenSpan { start: 43, end: 44 },
kind: TokenKind::CurvedBracketClose,
},
Token {
span: TokenSpan { start: 45, end: 47 },
kind: TokenKind::Arrow,
},
Token {
span: TokenSpan { start: 48, end: 54 },
kind: TokenKind::Identifier("String".to_owned()),
},
Token {
span: TokenSpan { start: 54, end: 55 },
kind: TokenKind::Semicolon,
},
Token {
span: TokenSpan { start: 56, end: 57 },
kind: TokenKind::CurlyBracketClose,
},
];
TokenStream {
tokens,
original_file: input.to_owned(),
}
};
assert_eq!(token_stream, expected_token_stream)
}
#[test]
fn test_failing_lexing() {
let input = "
mod trinitrix {
mod - commands {
fn hi(strings: String) -> String;
}
}
";
let token_stream = TokenStream::lex(input);
eprintln!("{}", token_stream.as_ref().unwrap_err());
assert!(token_stream.is_err());
}
#[test]
fn test_multiple_tokens() {
let input = "
mod mod {{
}}
";
let token_stream = TokenStream::lex(input).unwrap();
let expected_token_stream = {
let tokens = vec![
Token {
span: TokenSpan { start: 1, end: 4 },
kind: TokenKind::Keyword(Keyword::r#mod),
},
Token {
span: TokenSpan { start: 5, end: 8 },
kind: TokenKind::Keyword(Keyword::r#mod),
},
Token {
span: TokenSpan { start: 9, end: 10 },
kind: TokenKind::CurlyBracketOpen,
},
Token {
span: TokenSpan { start: 10, end: 11 },
kind: TokenKind::CurlyBracketOpen,
},
Token {
span: TokenSpan { start: 12, end: 13 },
kind: TokenKind::CurlyBracketClose,
},
Token {
span: TokenSpan { start: 13, end: 14 },
kind: TokenKind::CurlyBracketClose,
},
];
TokenStream {
tokens,
original_file: input.to_owned(),
}
};
assert_eq!(token_stream, expected_token_stream)
}
#[test]
fn test_comments() {
let input = "
// Some comment
mod mod {{
}}
// NOTE(@soispha): We do not support nested multi line comments <2023-12-16>
/* Some
* multi
* line
* comment
*/
";
let token_stream = TokenStream::lex(input)
.map_err(|e| {
eprintln!("{}", e);
panic!();
})
.unwrap();
let expected_token_stream = {
let tokens = vec![
Token {
span: TokenSpan { start: 33, end: 36 },
kind: TokenKind::Keyword(Keyword::r#mod),
},
Token {
span: TokenSpan { start: 37, end: 40 },
kind: TokenKind::Keyword(Keyword::r#mod),
},
Token {
span: TokenSpan { start: 41, end: 42 },
kind: TokenKind::CurlyBracketOpen,
},
Token {
span: TokenSpan { start: 42, end: 43 },
kind: TokenKind::CurlyBracketOpen,
},
Token {
span: TokenSpan { start: 53, end: 54 },
kind: TokenKind::CurlyBracketClose,
},
Token {
span: TokenSpan { start: 54, end: 55 },
kind: TokenKind::CurlyBracketClose,
},
];
TokenStream {
tokens,
original_file: input.to_owned(),
}
};
assert_eq!(token_stream, expected_token_stream)
}