ludtwig_parser/
lexer.rs

1use logos::Logos;
2
3use crate::syntax::untyped::{SyntaxKind, TextRange, TextSize};
4
5/// Lex the source code into a Vec of tokens with their corresponding span (position in source code).
6/// These tokens are produced by a dumb lexer and don't have any meaning / semantic attached to them.
7pub(crate) fn lex(source: &str) -> Vec<Token> {
8    let mut lexer = SyntaxKind::lexer(source);
9    let mut result = vec![];
10
11    while let Some(kind) = lexer.next() {
12        let range = {
13            let span = lexer.span();
14            let start = TextSize::try_from(span.start)
15                .expect("lexer span range should fit into a u32 (file should be smaller than 4GB)");
16            let end = TextSize::try_from(span.end)
17                .expect("lexer span range should fit into a u32 (file should be smaller than 4GB)");
18            TextRange::new(start, end)
19        };
20
21        let kind = kind.unwrap_or(SyntaxKind::TK_UNKNOWN);
22        result.push(Token {
23            kind,
24            text: lexer.slice(),
25            range,
26        });
27    }
28
29    result
30}
31
32#[derive(Debug, Clone, Eq, PartialEq)]
33pub(crate) struct Token<'source> {
34    pub(crate) kind: SyntaxKind,
35    pub(crate) text: &'source str,
36    pub(crate) range: TextRange,
37}
38
39// ToDo: might be able to remove this annotation in future rust-analyzer version
40#[allow(clippy::needless_lifetimes)]
41impl<'source> Token<'source> {
42    #[cfg(test)]
43    pub(crate) fn new(kind: SyntaxKind, text: &'source str, range: TextRange) -> Self {
44        Self { kind, text, range }
45    }
46
47    #[cfg(test)]
48    pub(crate) fn new_wrong_range(kind: SyntaxKind, text: &'source str) -> Self {
49        use crate::syntax::untyped::TextLen;
50        let range = TextRange::up_to(text.text_len());
51
52        Self { kind, text, range }
53    }
54}
55
56#[cfg(test)]
57mod tests {
58    use crate::syntax::untyped::TextLen;
59    use crate::T;
60
61    use super::*;
62
63    fn check_regex(input: &str, kind: SyntaxKind, display: &str) {
64        let range = TextRange::up_to(input.text_len());
65        let lexer_results = lex(input);
66
67        // compare lex result
68        assert_eq!(
69            lexer_results,
70            vec![Token {
71                kind,
72                text: input,
73                range
74            }]
75        );
76
77        // SyntaxKind display implementation should be there and match
78        assert_eq!(display, format!("{}", lexer_results[0].kind));
79    }
80
81    fn check_token(input: &str, kind: SyntaxKind) {
82        let range = TextRange::up_to(input.text_len());
83        let lexer_results = lex(input);
84
85        // compare lex result
86        assert_eq!(
87            lexer_results,
88            vec![Token {
89                kind,
90                text: input,
91                range
92            }]
93        );
94
95        // compare SyntaxKind display implementation
96        assert_eq!(input, format!("{}", lexer_results[0].kind));
97    }
98
99    #[test]
100    fn lex_simple_output() {
101        let results = lex("</div>");
102
103        assert_eq!(
104            results,
105            vec![
106                Token::new(T!["</"], "</", TextRange::up_to("</".text_len())),
107                Token::new(
108                    T![word],
109                    "div",
110                    TextRange::at(TextSize::from(2), "div".text_len())
111                ),
112                Token::new(
113                    T![">"],
114                    ">",
115                    TextRange::at(TextSize::from(5), ">".text_len())
116                )
117            ]
118        );
119    }
120
121    #[test]
122    fn lex_simple_expression() {
123        let results = lex("{{ not a }}");
124        let syntax_kinds: Vec<SyntaxKind> = results.into_iter().map(|t| t.kind).collect();
125
126        assert_eq!(
127            syntax_kinds,
128            vec![
129                T!["{{"],
130                T![ws],
131                T!["not"],
132                T![ws],
133                T![word],
134                T![ws],
135                T!["}}"],
136            ]
137        );
138    }
139
140    #[test]
141    fn lex_hashtag_curly_curly() {
142        let results = lex("#{{");
143        let syntax_kinds: Vec<SyntaxKind> = results.into_iter().map(|t| t.kind).collect();
144
145        assert_eq!(syntax_kinds, vec![T!["#"], T!["{{"],]);
146    }
147
148    #[test]
149    #[allow(clippy::too_many_lines)]
150    fn lex_all_tokens_chained_together() {
151        use std::fmt::Write;
152
153        let mut source = String::new();
154        let mut expected_kinds: Vec<SyntaxKind> = vec![];
155        let mut add = |text: &str, kind: SyntaxKind| {
156            write!(source, "{text} ").unwrap();
157            expected_kinds.push(kind);
158            expected_kinds.push(T![ws]);
159        };
160
161        // add every token here (except ws)
162        add("\n", T![lb]);
163        add("word", T![word]);
164        add("42.3", T![number]);
165        add("&#10;", T![html escape character]);
166        add(".", T!["."]);
167        add("..", T![".."]);
168        add(",", T![","]);
169        add(":", T![":"]);
170        add(";", T![";"]);
171        add("!", T!["!"]);
172        add("!=", T!["!="]);
173        add("!==", T!["!=="]);
174        add("?", T!["?"]);
175        add("??", T!["??"]);
176        add("%", T!["%"]);
177        add("~", T!["~"]);
178        add("|", T!["|"]);
179        add("||", T!["||"]);
180        add("&", T!["&"]);
181        add("&&", T!["&&"]);
182        add("/", T!["/"]);
183        add("//", T!["//"]);
184        add("\\", T!["\\"]);
185        add("(", T!["("]);
186        add(")", T![")"]);
187        add("{", T!["{"]);
188        add("}", T!["}"]);
189        add("[", T!["["]);
190        add("]", T!["]"]);
191        add("<", T!["<"]);
192        add("<=", T!["<="]);
193        add("<=>", T!["<=>"]);
194        add("</", T!["</"]);
195        add("<!", T!["<!"]);
196        add("doctype", T!["DOCTYPE"]);
197        add(">", T![">"]);
198        add(">=", T![">="]);
199        add("=>", T!["=>"]);
200        add("/>", T!["/>"]);
201        add("<!--", T!["<!--"]);
202        add("-->", T!["-->"]);
203        add("=", T!["="]);
204        add("==", T!["=="]);
205        add("===", T!["==="]);
206        add("+", T!["+"]);
207        add("-", T!["-"]);
208        add("*", T!["*"]);
209        add("**", T!["**"]);
210        add("\"", T!["\""]);
211        add("'", T!["'"]);
212        add("`", T!["`"]);
213        add("{%", T!["{%"]);
214        add("%}", T!["%}"]);
215        add("{{", T!["{{"]);
216        add("}}", T!["}}"]);
217        add("{#", T!["{#"]);
218        add("#", T!["#"]);
219        add("#}", T!["#}"]);
220        add("true", T!["true"]);
221        add("false", T!["false"]);
222        add("block", T!["block"]);
223        add("endblock", T!["endblock"]);
224        add("if", T!["if"]);
225        add("elseif", T!["elseif"]);
226        add("else", T!["else"]);
227        add("endif", T!["endif"]);
228        add("apply", T!["apply"]);
229        add("endapply", T!["endapply"]);
230        add("autoescape", T!["autoescape"]);
231        add("endautoescape", T!["endautoescape"]);
232        add("cache", T!["cache"]);
233        add("endcache", T!["endcache"]);
234        add("deprecated", T!["deprecated"]);
235        add("do", T!["do"]);
236        add("embed", T!["embed"]);
237        add("endembed", T!["endembed"]);
238        add("extends", T!["extends"]);
239        add("flush", T!["flush"]);
240        add("for", T!["for"]);
241        add("endfor", T!["endfor"]);
242        add("from", T!["from"]);
243        add("import", T!["import"]);
244        add("macro", T!["macro"]);
245        add("endmacro", T!["endmacro"]);
246        add("sandbox", T!["sandbox"]);
247        add("endsandbox", T!["endsandbox"]);
248        add("set", T!["set"]);
249        add("endset", T!["endset"]);
250        add("use", T!["use"]);
251        add("verbatim", T!["verbatim"]);
252        add("endverbatim", T!["endverbatim"]);
253        add("only", T!["only"]);
254        add("ignore missing", T!["ignore missing"]);
255        add("with", T!["with"]);
256        add("endwith", T!["endwith"]);
257        add("ttl", T!["ttl"]);
258        add("tags", T!["tags"]);
259        add("props", T!["props"]);
260        add("component", T!["component"]);
261        add("endcomponent", T!["endcomponent"]);
262        add("not", T!["not"]);
263        add("or", T!["or"]);
264        add("and", T!["and"]);
265        add("b-or", T!["b-or"]);
266        add("b-xor", T!["b-xor"]);
267        add("b-and", T!["b-and"]);
268        add("in", T!["in"]);
269        add("matches", T!["matches"]);
270        add("starts with", T!["starts with"]);
271        add("ends with", T!["ends with"]);
272        add("is", T!["is"]);
273        add("even", T!["even"]);
274        add("odd", T!["odd"]);
275        add("defined", T!["defined"]);
276        add("same as", T!["same as"]);
277        add("as", T!["as"]);
278        add("none", T!["none"]);
279        add("null", T!["null"]);
280        add("divisible by", T!["divisible by"]);
281        add("constant", T!["constant"]);
282        add("empty", T!["empty"]);
283        add("iterable", T!["iterable"]);
284        add("max", T!["max"]);
285        add("min", T!["min"]);
286        add("range", T!["range"]);
287        add("cycle", T!["cycle"]);
288        add("random", T!["random"]);
289        add("date", T!["date"]);
290        add("include", T!["include"]);
291        add("source", T!["source"]);
292        add("sw_extends", T!["sw_extends"]);
293        add("sw_silent_feature_call", T!["sw_silent_feature_call"]);
294        add("endsw_silent_feature_call", T!["endsw_silent_feature_call"]);
295        add("sw_include", T!["sw_include"]);
296        add("return", T!["return"]);
297        add("sw_icon", T!["sw_icon"]);
298        add("sw_thumbnails", T!["sw_thumbnails"]);
299        add("style", T!["style"]);
300        add("ludtwig-ignore-file", T!["ludtwig-ignore-file"]);
301        add("ludtwig-ignore", T!["ludtwig-ignore"]);
302        add("€", T![unknown]);
303        add("trans", T!["trans"]);
304        add("endtrans", T!["endtrans"]);
305
306        // lex and compare
307        let results = lex(&source);
308        let found_syntax_kinds: Vec<SyntaxKind> = results.into_iter().map(|t| t.kind).collect();
309        assert_eq!(found_syntax_kinds, expected_kinds);
310    }
311
312    #[test]
313    fn lex_whitespace() {
314        check_regex("   ", T![ws], "whitespace");
315        check_regex(" \t  ", T![ws], "whitespace");
316        check_regex("\t", T![ws], "whitespace");
317    }
318
319    #[test]
320    fn lex_line_break() {
321        check_regex("\n", T![lb], "line break");
322        check_regex("\n\n", T![lb], "line break");
323        check_regex("\r\n", T![lb], "line break");
324        check_regex("\r\n\r\n", T![lb], "line break");
325        check_regex("\r\n\n\r\n", T![lb], "line break");
326    }
327
328    #[test]
329    fn lex_word() {
330        check_regex("hello", T![word], "word");
331        check_regex("hello123", T![word], "word");
332        check_regex("camelCase", T![word], "word");
333        check_regex("kebab-case", T![word], "word");
334        check_regex("snake_case", T![word], "word");
335        check_regex("#hello123", T![word], "word");
336        check_regex("@hello123", T![word], "word");
337        check_regex("block1", T![word], "word");
338        check_regex("block_", T![word], "word");
339        check_regex("blocks", T![word], "word");
340        check_regex("_blank", T![word], "word");
341        check_regex("$special", T![word], "word");
342    }
343
344    #[test]
345    fn lex_number() {
346        check_regex("123", T![number], "number");
347        check_regex("0.0", T![number], "number");
348        check_regex("3.123456789", T![number], "number");
349        check_regex("3e+2", T![number], "number");
350        check_regex("3e-2", T![number], "number");
351        check_regex("10E-7", T![number], "number");
352        check_regex("10E+6", T![number], "number");
353        check_regex("1.23E+10", T![number], "number");
354    }
355
356    #[test]
357    fn lex_html_escape_character() {
358        check_regex(
359            "&NewLine;",
360            T![html escape character],
361            "html escape character",
362        );
363        check_regex("&nbsp;", T![html escape character], "html escape character");
364        check_regex("&#39;", T![html escape character], "html escape character");
365        check_regex(
366            "&#8721;",
367            T![html escape character],
368            "html escape character",
369        );
370        check_regex("&sup3;", T![html escape character], "html escape character");
371        check_regex(
372            "&#x00B3;",
373            T![html escape character],
374            "html escape character",
375        );
376    }
377
378    #[test]
379    fn lex_dot() {
380        check_token(".", T!["."]);
381    }
382
383    #[test]
384    fn lex_double_dot() {
385        check_token("..", T![".."]);
386    }
387
388    #[test]
389    fn lex_comma() {
390        check_token(",", T![","]);
391    }
392
393    #[test]
394    fn lex_colon() {
395        check_token(":", T![":"]);
396    }
397
398    #[test]
399    fn lex_semicolon() {
400        check_token(";", T![";"]);
401    }
402
403    #[test]
404    fn lex_exclamation_mark() {
405        check_token("!", T!["!"]);
406    }
407
408    #[test]
409    fn lex_exclamation_mark_equals() {
410        check_token("!=", T!["!="]);
411    }
412
413    #[test]
414    fn lex_exclamation_mark_double_equals() {
415        check_token("!==", T!["!=="]);
416    }
417
418    #[test]
419    fn lex_question_mark() {
420        check_token("?", T!["?"]);
421    }
422
423    #[test]
424    fn lex_double_question_mark() {
425        check_token("??", T!["??"]);
426    }
427
428    #[test]
429    fn lex_percent() {
430        check_token("%", T!["%"]);
431    }
432
433    #[test]
434    fn lex_tilde() {
435        check_token("~", T!["~"]);
436    }
437
438    #[test]
439    fn lex_single_pipe() {
440        check_token("|", T!["|"]);
441    }
442
443    #[test]
444    fn lex_double_pipe() {
445        check_token("||", T!["||"]);
446    }
447
448    #[test]
449    fn lex_ampersand() {
450        check_token("&", T!["&"]);
451    }
452
453    #[test]
454    fn lex_double_ampersand() {
455        check_token("&&", T!["&&"]);
456    }
457
458    #[test]
459    fn lex_forward_slash() {
460        check_token("/", T!["/"]);
461    }
462
463    #[test]
464    fn lex_double_forward_slash() {
465        check_token("//", T!["//"]);
466    }
467
468    #[test]
469    fn lex_backward_slash() {
470        check_token("\\", T!["\\"]);
471    }
472
473    #[test]
474    fn lex_open_parenthesis() {
475        check_token("(", T!["("]);
476    }
477
478    #[test]
479    fn lex_close_parenthesis() {
480        check_token(")", T![")"]);
481    }
482
483    #[test]
484    fn lex_open_curly() {
485        check_token("{", T!["{"]);
486    }
487
488    #[test]
489    fn lex_close_curly() {
490        check_token("}", T!["}"]);
491    }
492
493    #[test]
494    fn lex_open_square() {
495        check_token("[", T!["["]);
496    }
497
498    #[test]
499    fn lex_close_square() {
500        check_token("]", T!["]"]);
501    }
502
503    #[test]
504    fn lex_less_than() {
505        check_token("<", T!["<"]);
506    }
507
508    #[test]
509    fn lex_less_than_equal() {
510        check_token("<=", T!["<="]);
511    }
512
513    #[test]
514    fn lex_less_than_equal_greater_than() {
515        check_token("<=>", T!["<=>"]);
516    }
517
518    #[test]
519    fn lex_less_than_slash() {
520        check_token("</", T!["</"]);
521    }
522
523    #[test]
524    fn lex_less_than_exclamation_mark() {
525        check_token("<!", T!["<!"]);
526    }
527
528    #[test]
529    fn lex_doctype() {
530        check_token("DOCTYPE", T!["DOCTYPE"]);
531    }
532
533    #[test]
534    fn lex_greater_than() {
535        check_token(">", T![">"]);
536    }
537
538    #[test]
539    fn lex_greater_than_equal() {
540        check_token(">=", T![">="]);
541    }
542
543    #[test]
544    fn lex_equal_greater_than() {
545        check_token("=>", T!["=>"]);
546    }
547
548    #[test]
549    fn lex_slash_greater_than() {
550        check_token("/>", T!["/>"]);
551    }
552
553    #[test]
554    fn lex_less_than_exclamation_mark_minus_minus() {
555        check_token("<!--", T!["<!--"]);
556    }
557
558    #[test]
559    fn lex_minus_minus_greater_than() {
560        check_token("-->", T!["-->"]);
561    }
562
563    #[test]
564    fn lex_equal() {
565        check_token("=", T!["="]);
566    }
567
568    #[test]
569    fn lex_double_equal() {
570        check_token("==", T!["=="]);
571    }
572
573    #[test]
574    fn lex_triple_equal() {
575        check_token("===", T!["==="]);
576    }
577
578    #[test]
579    fn lex_plus() {
580        check_token("+", T!["+"]);
581    }
582
583    #[test]
584    fn lex_minus() {
585        check_token("-", T!["-"]);
586    }
587
588    #[test]
589    fn lex_star() {
590        check_token("*", T!["*"]);
591    }
592
593    #[test]
594    fn lex_double_star() {
595        check_token("**", T!["**"]);
596    }
597
598    #[test]
599    fn lex_double_quotes() {
600        check_token("\"", T!["\""]);
601    }
602
603    #[test]
604    fn lex_single_quotes() {
605        check_token("'", T!["'"]);
606    }
607
608    #[test]
609    fn lex_grave_accent_quotes() {
610        check_token("`", T!["`"]);
611    }
612
613    #[test]
614    fn lex_curly_percent() {
615        check_token("{%", T!["{%"]);
616    }
617
618    #[test]
619    fn lex_percent_curly() {
620        check_token("%}", T!["%}"]);
621    }
622
623    #[test]
624    fn lex_open_curly_curly() {
625        check_token("{{", T!["{{"]);
626    }
627
628    #[test]
629    fn lex_close_curly_curly() {
630        check_token("}}", T!["}}"]);
631    }
632
633    #[test]
634    fn lex_open_curly_hashtag() {
635        check_token("{#", T!["{#"]);
636    }
637
638    #[test]
639    fn lex_hashtag_close_curly() {
640        check_token("#}", T!["#}"]);
641    }
642
643    #[test]
644    fn lex_hashtag() {
645        check_token("#", T!["#"]);
646    }
647
648    #[test]
649    fn lex_true() {
650        check_token("true", T!["true"]);
651    }
652
653    #[test]
654    fn lex_false() {
655        check_token("false", T!["false"]);
656    }
657
658    #[test]
659    fn lex_block() {
660        check_token("block", T!["block"]);
661    }
662
663    #[test]
664    fn lex_endblock() {
665        check_token("endblock", T!["endblock"]);
666    }
667
668    #[test]
669    fn lex_if() {
670        check_token("if", T!["if"]);
671    }
672
673    #[test]
674    fn lex_else_if() {
675        check_token("elseif", T!["elseif"]);
676    }
677
678    #[test]
679    fn lex_else() {
680        check_token("else", T!["else"]);
681    }
682
683    #[test]
684    fn lex_endif() {
685        check_token("endif", T!["endif"]);
686    }
687
688    #[test]
689    fn lex_apply() {
690        check_token("apply", T!["apply"]);
691    }
692
693    #[test]
694    fn lex_endapply() {
695        check_token("endapply", T!["endapply"]);
696    }
697
698    #[test]
699    fn lex_autoescape() {
700        check_token("autoescape", T!["autoescape"]);
701    }
702
703    #[test]
704    fn lex_endautoescape() {
705        check_token("endautoescape", T!["endautoescape"]);
706    }
707
708    #[test]
709    fn lex_cache() {
710        check_token("cache", T!["cache"]);
711    }
712
713    #[test]
714    fn lex_endcache() {
715        check_token("endcache", T!["endcache"]);
716    }
717
718    #[test]
719    fn lex_deprecated() {
720        check_token("deprecated", T!["deprecated"]);
721    }
722
723    #[test]
724    fn lex_do() {
725        check_token("do", T!["do"]);
726    }
727
728    #[test]
729    fn lex_embed() {
730        check_token("embed", T!["embed"]);
731    }
732
733    #[test]
734    fn lex_endembed() {
735        check_token("endembed", T!["endembed"]);
736    }
737
738    #[test]
739    fn lex_extends() {
740        check_token("extends", T!["extends"]);
741    }
742
743    #[test]
744    fn lex_flush() {
745        check_token("flush", T!["flush"]);
746    }
747
748    #[test]
749    fn lex_for() {
750        check_token("for", T!["for"]);
751    }
752
753    #[test]
754    fn lex_endfor() {
755        check_token("endfor", T!["endfor"]);
756    }
757
758    #[test]
759    fn lex_from() {
760        check_token("from", T!["from"]);
761    }
762
763    #[test]
764    fn lex_import() {
765        check_token("import", T!["import"]);
766    }
767
768    #[test]
769    fn lex_macro() {
770        check_token("macro", T!["macro"]);
771    }
772
773    #[test]
774    fn lex_endmacro() {
775        check_token("endmacro", T!["endmacro"]);
776    }
777
778    #[test]
779    fn lex_sandbox() {
780        check_token("sandbox", T!["sandbox"]);
781    }
782
783    #[test]
784    fn lex_endsandbox() {
785        check_token("endsandbox", T!["endsandbox"]);
786    }
787
788    #[test]
789    fn lex_set() {
790        check_token("set", T!["set"]);
791    }
792
793    #[test]
794    fn lex_endset() {
795        check_token("endset", T!["endset"]);
796    }
797
798    #[test]
799    fn lex_use() {
800        check_token("use", T!["use"]);
801    }
802
803    #[test]
804    fn lex_verbatim() {
805        check_token("verbatim", T!["verbatim"]);
806    }
807
808    #[test]
809    fn lex_endverbatim() {
810        check_token("endverbatim", T!["endverbatim"]);
811    }
812
813    #[test]
814    fn lex_only() {
815        check_token("only", T!["only"]);
816    }
817
818    #[test]
819    fn lex_ignore_missing() {
820        check_token("ignore missing", T!["ignore missing"]);
821    }
822
823    #[test]
824    fn lex_with() {
825        check_token("with", T!["with"]);
826    }
827
828    #[test]
829    fn lex_endwith() {
830        check_token("endwith", T!["endwith"]);
831    }
832
833    #[test]
834    fn lex_ttl() {
835        check_token("ttl", T!["ttl"]);
836    }
837
838    #[test]
839    fn lex_tags() {
840        check_token("tags", T!["tags"]);
841    }
842
843    #[test]
844    fn lex_props() {
845        check_token("props", T!["props"]);
846    }
847
848    #[test]
849    fn lex_component() {
850        check_token("component", T!["component"]);
851    }
852
853    #[test]
854    fn lex_endcomponent() {
855        check_token("endcomponent", T!["endcomponent"]);
856    }
857
858    #[test]
859    fn lex_not() {
860        check_token("not", T!["not"]);
861    }
862
863    #[test]
864    fn lex_or() {
865        check_token("or", T!["or"]);
866    }
867
868    #[test]
869    fn lex_and() {
870        check_token("and", T!["and"]);
871    }
872
873    #[test]
874    fn lex_binary_or() {
875        check_token("b-or", T!["b-or"]);
876    }
877
878    #[test]
879    fn lex_binary_xor() {
880        check_token("b-xor", T!["b-xor"]);
881    }
882
883    #[test]
884    fn lex_binary_and() {
885        check_token("b-and", T!["b-and"]);
886    }
887
888    #[test]
889    fn lex_in() {
890        check_token("in", T!["in"]);
891    }
892
893    #[test]
894    fn lex_matches() {
895        check_token("matches", T!["matches"]);
896    }
897
898    #[test]
899    fn lex_starts_with() {
900        check_token("starts with", T!["starts with"]);
901    }
902
903    #[test]
904    fn lex_ends_with() {
905        check_token("ends with", T!["ends with"]);
906    }
907
908    #[test]
909    fn lex_is() {
910        check_token("is", T!["is"]);
911    }
912
913    #[test]
914    fn lex_even() {
915        check_token("even", T!["even"]);
916    }
917
918    #[test]
919    fn lex_odd() {
920        check_token("odd", T!["odd"]);
921    }
922
923    #[test]
924    fn lex_defined() {
925        check_token("defined", T!["defined"]);
926    }
927
928    #[test]
929    fn lex_same_as() {
930        check_token("same as", T!["same as"]);
931    }
932
933    #[test]
934    fn lex_as() {
935        check_token("as", T!["as"]);
936    }
937
938    #[test]
939    fn lex_none() {
940        check_token("none", T!["none"]);
941    }
942
943    #[test]
944    fn lex_null() {
945        check_token("null", T!["null"]);
946    }
947
948    #[test]
949    fn lex_divisible_by() {
950        check_token("divisible by", T!["divisible by"]);
951    }
952
953    #[test]
954    fn lex_constant() {
955        check_token("constant", T!["constant"]);
956    }
957
958    #[test]
959    fn lex_empty() {
960        check_token("empty", T!["empty"]);
961    }
962
963    #[test]
964    fn lex_iterable() {
965        check_token("iterable", T!["iterable"]);
966    }
967
968    #[test]
969    fn lex_max() {
970        check_token("max", T!["max"]);
971    }
972
973    #[test]
974    fn lex_min() {
975        check_token("min", T!["min"]);
976    }
977
978    #[test]
979    fn lex_range() {
980        check_token("range", T!["range"]);
981    }
982
983    #[test]
984    fn lex_cycle() {
985        check_token("cycle", T!["cycle"]);
986    }
987
988    #[test]
989    fn lex_random() {
990        check_token("random", T!["random"]);
991    }
992
993    #[test]
994    fn lex_date() {
995        check_token("date", T!["date"]);
996    }
997
998    #[test]
999    fn lex_include() {
1000        check_token("include", T!["include"]);
1001    }
1002
1003    #[test]
1004    fn lex_source() {
1005        check_token("source", T!["source"]);
1006    }
1007
1008    #[test]
1009    fn lex_sw_extends() {
1010        check_token("sw_extends", T!["sw_extends"]);
1011    }
1012
1013    #[test]
1014    fn lex_sw_silent_feature_call() {
1015        check_token("sw_silent_feature_call", T!["sw_silent_feature_call"]);
1016    }
1017
1018    #[test]
1019    fn lex_endsw_silent_feature_call() {
1020        check_token("endsw_silent_feature_call", T!["endsw_silent_feature_call"]);
1021    }
1022
1023    #[test]
1024    fn lex_sw_include() {
1025        check_token("sw_include", T!["sw_include"]);
1026    }
1027
1028    #[test]
1029    fn lex_return() {
1030        check_token("return", T!["return"]);
1031    }
1032
1033    #[test]
1034    fn lex_sw_icon() {
1035        check_token("sw_icon", T!["sw_icon"]);
1036    }
1037
1038    #[test]
1039    fn lex_sw_thumbnails() {
1040        check_token("sw_thumbnails", T!["sw_thumbnails"]);
1041    }
1042
1043    #[test]
1044    fn lex_style() {
1045        check_token("style", T!["style"]);
1046    }
1047
1048    #[test]
1049    fn lex_ludtwig_ignore_file() {
1050        check_token("ludtwig-ignore-file", T!["ludtwig-ignore-file"]);
1051    }
1052
1053    #[test]
1054    fn lex_ludtwig_ignore() {
1055        check_token("ludtwig-ignore", T!["ludtwig-ignore"]);
1056    }
1057}