oak_tailwind/lexer/
mod.rs1#![doc = include_str!("readme.md")]
2pub mod token_type;
5
6use crate::{language::TailwindLanguage, lexer::token_type::TailwindTokenType};
7use oak_core::{Lexer, LexerCache, LexerState, OakError, lexer::LexOutput, source::Source};
8
9#[derive(Clone, Debug, Default)]
11pub struct TailwindLexer {
12 pub config: TailwindLanguage,
14}
15
16pub(crate) type State<'a, S> = LexerState<'a, S, TailwindLanguage>;
17
18impl TailwindLexer {
19 pub fn new(config: TailwindLanguage) -> Self {
21 Self { config }
22 }
23}
24
25impl Lexer<TailwindLanguage> for TailwindLexer {
26 fn lex<'a, S: Source + ?Sized>(&self, source: &S, _edits: &[oak_core::TextEdit], cache: &'a mut impl LexerCache<TailwindLanguage>) -> LexOutput<TailwindLanguage> {
28 let mut state = LexerState::new(source);
29 let result = self.run(&mut state);
30 if result.is_ok() {
31 state.add_eof()
32 }
33 state.finish_with_cache(result, cache)
34 }
35}
36
37impl TailwindLexer {
38 pub fn run<S: Source + ?Sized>(&self, state: &mut State<'_, S>) -> Result<(), OakError> {
40 while state.not_at_end() {
41 let safe_point = state.get_position();
42
43 if self.skip_whitespace(state) {
44 continue;
45 }
46
47 if self.lex_comment(state) {
48 continue;
49 }
50
51 if self.lex_directive(state) {
52 continue;
53 }
54
55 if self.lex_tailwind_class_part(state) {
56 continue;
57 }
58
59 if self.lex_punctuation(state) {
60 continue;
61 }
62
63 state.advance_if_dead_lock(safe_point)
64 }
65
66 Ok(())
67 }
68
69 pub fn skip_whitespace<S: Source + ?Sized>(&self, state: &mut State<'_, S>) -> bool {
71 let start = state.get_position();
72 let mut found = false;
73
74 while let Some(ch) = state.peek() {
75 if ch.is_whitespace() {
76 state.advance(ch.len_utf8());
77 found = true
78 }
79 else {
80 break;
81 }
82 }
83
84 if found {
85 state.add_token(TailwindTokenType::Whitespace, start, state.get_position())
86 }
87
88 found
89 }
90
91 pub fn lex_comment<S: Source + ?Sized>(&self, state: &mut State<'_, S>) -> bool {
93 let start = state.get_position();
94 if state.consume_if_starts_with("/*") {
95 while state.not_at_end() {
96 if state.consume_if_starts_with("*/") {
97 break;
98 }
99 if let Some(ch) = state.peek() {
100 state.advance(ch.len_utf8())
101 }
102 }
103 state.add_token(TailwindTokenType::Comment, start, state.get_position());
104 return true;
105 }
106 if state.consume_if_starts_with("//") {
107 while state.not_at_end() {
108 if let Some(ch) = state.peek() {
109 if ch == '\n' {
110 break;
111 }
112 state.advance(ch.len_utf8())
113 }
114 }
115 state.add_token(TailwindTokenType::Comment, start, state.get_position());
116 return true;
117 }
118 false
119 }
120
121 pub fn lex_directive<S: Source + ?Sized>(&self, state: &mut State<'_, S>) -> bool {
123 let start = state.get_position();
124 if state.consume_if_starts_with("@") {
125 while let Some(ch) = state.peek() {
126 if ch.is_alphabetic() || ch == '-' {
127 state.advance(ch.len_utf8());
128 }
129 else {
130 break;
131 }
132 }
133 state.add_token(TailwindTokenType::Directive, start, state.get_position());
134 return true;
135 }
136 false
137 }
138
139 pub fn lex_tailwind_class_part<S: Source + ?Sized>(&self, state: &mut State<'_, S>) -> bool {
141 let start = state.get_position();
142
143 if state.consume_if_starts_with("!") {
144 state.add_token(TailwindTokenType::Important, start, state.get_position());
145 return true;
146 }
147
148 if state.peek() == Some('[') {
149 return self.lex_arbitrary_value(state);
150 }
151
152 let mut has_content = false;
154 let _current_pos = state.get_position();
155
156 while let Some(ch) = state.peek() {
157 if ch.is_alphanumeric() || ch == '-' || ch == '/' || ch == '.' || ch == '_' {
158 state.advance(ch.len_utf8());
159 has_content = true;
160
161 if state.peek() == Some(':') {
162 state.advance(':'.len_utf8());
163 state.add_token(TailwindTokenType::Modifier, start, state.get_position());
164 return true;
165 }
166 }
167 else {
168 break;
169 }
170 }
171
172 if has_content {
173 state.add_token(TailwindTokenType::Utility, start, state.get_position());
174 return true;
175 }
176
177 false
178 }
179
180 pub fn lex_arbitrary_value<S: Source + ?Sized>(&self, state: &mut State<'_, S>) -> bool {
182 let start = state.get_position();
183 if state.consume_if_starts_with("[") {
184 let mut depth = 1;
185 while state.not_at_end() && depth > 0 {
186 if let Some(ch) = state.peek() {
187 if ch == '[' {
188 depth += 1;
189 }
190 else if ch == ']' {
191 depth -= 1;
192 }
193 state.advance(ch.len_utf8());
194 }
195 else {
196 break;
197 }
198 }
199 state.add_token(TailwindTokenType::ArbitraryValue, start, state.get_position());
200 return true;
201 }
202 false
203 }
204
205 pub fn lex_punctuation<S: Source + ?Sized>(&self, state: &mut State<'_, S>) -> bool {
207 let start = state.get_position();
208
209 macro_rules! check {
210 ($s:expr, $t:ident) => {
211 if state.consume_if_starts_with($s) {
212 state.add_token(TailwindTokenType::$t, start, state.get_position());
213 return true;
214 }
215 };
216 }
217
218 check!("[", LeftBracket);
219 check!("]", RightBracket);
220 check!("(", LeftParen);
221 check!(")", RightParen);
222 check!(":", Colon);
223 check!(";", Semicolon);
224 check!("@", At);
225 check!("!", Bang);
226 check!("-", Dash);
227 check!("/", Slash);
228 check!(".", Dot);
229 check!("#", Hash);
230 check!(",", Comma);
231
232 false
233 }
234}