1use logos::Logos;
2use std::ops::Range;
3
4use crate::file_error;
5
6#[derive(Logos, Debug, PartialEq, Clone)]
7pub enum Token {
8 #[regex(r"[ \t\f]+", logos::skip)]
9 #[regex(r"//.*", logos::skip)]
10 #[error]
11 Error,
12
13 #[token("/*")]
14 StartMultiLineComment,
15
16 #[token("*/")]
17 EndMultiLineComment,
18
19 #[token("=>")]
20 ConvertTo,
21
22 #[token("{{")]
23 MultiEqualStart,
24
25 #[token("}}")]
26 MultiEqualEnd,
27
28 #[token("{")]
29 ReplacementStart,
30
31 #[token("}")]
32 ReplacementEnd,
33
34 #[token("\n")]
35 NewLine,
36
37 #[token("+")]
38 Add,
39
40 #[token("-")]
41 Subtract,
42
43 #[token("*")]
44 Times,
45
46 #[token("/")]
47 Divide,
48
49 #[token(";")]
50 SemiColon,
51
52 #[regex(r"[0-9.]+", |x| x.slice().parse().ok())]
53 Number(f64),
54
55 #[regex(r"\.?[^{} \t\f\n+\-*/0-9\.;][^{} \t\f\n;]*", |x| x.slice().to_string())]
56 String(String),
57}
58
59pub fn lex(file: String, content: String) -> Vec<(Token, Range<usize>)> {
60 let mut multi_line_comment_number = 0;
61 let mut needs_nl = false;
62 Token::lexer(&content.clone())
63 .spanned()
64 .filter(|(token, location)| {
65 if token == &Token::StartMultiLineComment {
68 multi_line_comment_number += 1;
69 } else if token == &Token::EndMultiLineComment {
70 if multi_line_comment_number == 0 {
71 file_error!(
72 file.clone(),
73 content.clone(),
74 location.clone(),
75 "No starting multiline comment"
76 );
77 }
78 multi_line_comment_number -= 1;
79 }
80
81 if token == &Token::Error {
83 file_error!(
84 file.clone(),
85 content.clone(),
86 location.clone(),
87 "Unkown token"
88 );
89 }
90
91 let nl_needed = token == &Token::NewLine && !needs_nl;
93 needs_nl = matches!(
94 token,
95 Token::String(_)
96 | Token::ReplacementStart
97 | Token::ReplacementEnd
98 | Token::Number(_)
99 );
100
101 multi_line_comment_number == 0 && !nl_needed
103 })
104 .collect::<Vec<_>>()
105}