1use dcbor::prelude::*;
2use dcbor_parse::parse_dcbor_item_partial;
3use logos::{Lexer, Logos};
4
5use crate::{DigestPattern, Error, Quantifier, Reluctance, Result};
6
7#[derive(Debug, Clone, Logos, PartialEq)]
9#[rustfmt::skip]
10#[logos(error = Error)]
11#[logos(skip r"[ \t\r\n\f]+")]
12pub enum Token {
13 #[token("&")]
14 And,
15
16 #[token("|")]
17 Or,
18
19 #[token("!")]
20 Not,
21
22 #[token("*")]
23 RepeatZeroOrMore,
24
25 #[token("*?")]
26 RepeatZeroOrMoreLazy,
27
28 #[token("*+")]
29 RepeatZeroOrMorePossessive,
30
31 #[token("+")]
32 RepeatOneOrMore,
33
34 #[token("+?")]
35 RepeatOneOrMoreLazy,
36
37 #[token("++")]
38 RepeatOneOrMorePossessive,
39
40 #[token("?")]
41 RepeatZeroOrOne,
42
43 #[token("??")]
44 RepeatZeroOrOneLazy,
45
46 #[token("?+")]
47 RepeatZeroOrOnePossessive,
48
49 #[token("tagged")]
51 Tagged,
52
53 #[token("array")]
54 Array,
55
56 #[token("map")]
57 Map,
58
59 #[token("bool")]
61 Bool,
62
63 #[token("bstr")]
64 ByteString,
65
66 #[token("date")]
67 Date,
68
69 #[token("date'", parse_date_quoted)]
70 DateQuoted(Result<crate::pattern::DatePattern>),
71
72 #[token("known")]
73 Known,
74
75 #[token("null")]
76 Null,
77
78 #[token("number")]
79 Number,
80
81 #[token("text")]
82 Text,
83
84 #[token("digest")]
85 Digest,
86
87 #[token("digest'", parse_digest_quoted)]
88 DigestQuoted(Result<DigestPattern>),
89
90 #[token("search")]
92 Search,
93
94 #[token("true")]
96 BoolTrue,
97
98 #[token("false")]
99 BoolFalse,
100
101 #[token("NaN")]
102 NaN,
103
104 #[token("Infinity")]
105 Infinity,
106
107 #[token("-Infinity")]
108 NegInfinity,
109
110 #[token("(")]
112 ParenOpen,
113
114 #[token(")")]
115 ParenClose,
116
117 #[token("[")]
118 BracketOpen,
119
120 #[token("]")]
121 BracketClose,
122
123 #[token("{", parse_brace_open)]
124 BraceOpen,
125
126 #[token("}")]
127 BraceClose,
128
129 #[token(",")]
130 Comma,
131
132 #[token(":")]
133 Colon,
134
135 #[token("...")]
136 Ellipsis,
137
138 #[token(">=")]
139 GreaterThanOrEqual,
140
141 #[token("<=")]
142 LessThanOrEqual,
143
144 #[token(">", priority = 1)]
145 GreaterThan,
146
147 #[token("<")]
148 LessThan,
149
150 #[regex(r"-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?", callback = parse_number)]
152 NumberLiteral(Result<f64>),
153
154 #[regex(r"@[a-zA-Z_][a-zA-Z0-9_]*", |lex|
155 lex.slice()[1..].to_string()
156 )]
157 GroupName(String),
158
159 #[token("\"", parse_string)]
160 StringLiteral(Result<String>),
161
162 #[token("'", parse_single_quoted)]
163 SingleQuoted(Result<String>),
164
165 #[token("/", parse_regex)]
166 Regex(Result<String>),
167
168 #[token("h'", parse_hex_string)]
169 HexString(Result<Vec<u8>>),
170
171 #[token("h'/", parse_hex_regex)]
172 HexRegex(Result<String>),
173
174 Range(Result<Quantifier>),
175}
176
177fn parse_number(lex: &mut Lexer<Token>) -> Result<f64> {
179 let number_str = lex.slice();
180 match parse_dcbor_item_partial(number_str) {
181 Ok((cbor, _)) => match f64::try_from_cbor(&cbor) {
182 Ok(value) => Ok(value),
183 Err(_) => Err(Error::InvalidNumberFormat(lex.span())),
184 },
185 Err(_) => Err(Error::InvalidNumberFormat(lex.span())),
186 }
187}
188
189fn parse_regex(lex: &mut Lexer<Token>) -> Result<String> {
191 let src = lex.remainder(); let mut escape = false;
193
194 for (i, ch) in src.char_indices() {
195 match (ch, escape) {
196 ('\\', false) => escape = true, ('/', false) => {
198 lex.bump(i + 1); let content = src[..i].to_owned();
201 match regex::Regex::new(&content) {
202 Ok(_) => return Ok(content),
203 Err(_) => return Err(Error::InvalidRegex(lex.span())),
204 }
205 }
206 _ => escape = false, }
208 }
209
210 Err(Error::UnterminatedRegex(lex.span()))
212}
213
214fn parse_string(lex: &mut Lexer<Token>) -> Result<String> {
216 let src = lex.remainder(); let mut escape = false;
218 let mut result = String::new();
219
220 for (i, ch) in src.char_indices() {
221 match (ch, escape) {
222 ('\\', false) => escape = true, ('"', false) => {
224 lex.bump(i + 1); return Ok(result);
227 }
228 (c, true) => {
229 match c {
231 '"' => result.push('"'),
232 '\\' => result.push('\\'),
233 'n' => result.push('\n'),
234 'r' => result.push('\r'),
235 't' => result.push('\t'),
236 _ => {
237 result.push('\\');
238 result.push(c);
239 }
240 }
241 escape = false;
242 }
243 (c, false) => {
244 result.push(c);
245 escape = false;
246 }
247 }
248 }
249
250 Err(Error::UnterminatedString(lex.span()))
252}
253
254fn parse_hex_string(lex: &mut Lexer<Token>) -> Result<Vec<u8>> {
256 let src = lex.remainder(); for (i, ch) in src.char_indices() {
260 match ch {
261 '\'' => {
262 let hex_content = &src[..i];
264 match hex::decode(hex_content) {
265 Ok(bytes) => {
266 lex.bump(i + 1); return Ok(bytes);
268 }
269 Err(_) => return Err(Error::InvalidHexString(lex.span())),
270 }
271 }
272 c if c.is_ascii_hexdigit() => {
273 }
275 _ => {
276 return Err(Error::InvalidHexString(lex.span()));
278 }
279 }
280 }
281
282 Err(Error::UnterminatedHexString(lex.span()))
284}
285
286fn parse_hex_regex(lex: &mut Lexer<Token>) -> Result<String> {
288 let src = lex.remainder(); let mut escape = false;
290
291 for (i, ch) in src.char_indices() {
292 match (ch, escape) {
293 ('\\', false) => escape = true, ('/', false) => {
295 let remainder = &src[i + 1..];
297 if remainder.starts_with('\'') {
298 lex.bump(i + 2); let content = src[..i].to_owned();
301 match regex::bytes::Regex::new(&content) {
302 Ok(_) => return Ok(content),
303 Err(_) => return Err(Error::InvalidRegex(lex.span())),
304 }
305 }
306 escape = false;
308 }
309 _ => escape = false, }
311 }
312
313 Err(Error::UnterminatedRegex(lex.span()))
315}
316
317fn parse_digest_quoted(lex: &mut Lexer<Token>) -> Result<DigestPattern> {
319 use bc_components::Digest;
320 use bc_ur::URDecodable;
321
322 let src = lex.remainder(); for (i, ch) in src.char_indices() {
326 if ch == '\'' {
327 let content = &src[..i];
328 lex.bump(i + 1); if content.is_empty() {
332 return Err(Error::InvalidDigestPattern(
333 "empty content".to_string(),
334 lex.span(),
335 ));
336 }
337
338 if content.starts_with("ur:") {
340 match Digest::from_ur_string(content) {
341 Ok(digest) => return Ok(DigestPattern::digest(digest)),
342 Err(_) => {
343 return Err(Error::InvalidUr(
344 content.to_string(),
345 lex.span(),
346 ));
347 }
348 }
349 }
350
351 if content.starts_with('/')
353 && content.ends_with('/')
354 && content.len() > 2
355 {
356 let regex_content = &content[1..content.len() - 1];
357 match regex::bytes::Regex::new(regex_content) {
358 Ok(regex) => return Ok(DigestPattern::binary_regex(regex)),
359 Err(_) => return Err(Error::InvalidRegex(lex.span())),
360 }
361 }
362
363 if content.chars().all(|c| c.is_ascii_hexdigit()) {
365 if content.len() % 2 == 0 {
366 match hex::decode(content) {
367 Ok(bytes) => {
368 if bytes.len() <= Digest::DIGEST_SIZE {
369 return Ok(DigestPattern::prefix(bytes));
370 } else {
371 return Err(Error::InvalidHexString(
372 lex.span(),
373 ));
374 }
375 }
376 Err(_) => {
377 return Err(Error::InvalidHexString(lex.span()));
378 }
379 }
380 } else {
381 return Err(Error::InvalidHexString(lex.span()));
382 }
383 }
384
385 return Err(Error::InvalidDigestPattern(
387 content.to_string(),
388 lex.span(),
389 ));
390 }
391 }
392
393 Err(Error::UnterminatedDigestQuoted(lex.span()))
395}
396
397fn parse_date_quoted(
399 lex: &mut Lexer<Token>,
400) -> Result<crate::pattern::DatePattern> {
401 use dcbor_parse::parse_dcbor_item;
402
403 let src = lex.remainder(); for (i, ch) in src.char_indices() {
407 if ch == '\'' {
408 let content = &src[..i];
409 lex.bump(i + 1); if content.is_empty() {
413 return Err(Error::InvalidDateFormat(lex.span()));
414 }
415
416 if content.starts_with('/')
418 && content.ends_with('/')
419 && content.len() > 2
420 {
421 let regex_content = &content[1..content.len() - 1];
422 match regex::Regex::new(regex_content) {
423 Ok(regex) => {
424 return Ok(crate::pattern::DatePattern::regex(regex));
425 }
426 Err(_) => return Err(Error::InvalidRegex(lex.span())),
427 }
428 }
429
430 if content.contains("...") {
432 if let Some(iso_str) = content.strip_prefix("...") {
433 match parse_dcbor_item(iso_str) {
435 Ok(cbor) => match Date::try_from(cbor) {
436 Ok(date) => {
437 return Ok(
438 crate::pattern::DatePattern::latest(date),
439 );
440 }
441 Err(_) => {
442 return Err(Error::InvalidDateFormat(
443 lex.span(),
444 ));
445 }
446 },
447 Err(_) => {
448 return Err(Error::InvalidDateFormat(lex.span()));
449 }
450 }
451 } else if let Some(iso_str) = content.strip_suffix("...") {
452 match parse_dcbor_item(iso_str) {
454 Ok(cbor) => match Date::try_from(cbor) {
455 Ok(date) => {
456 return Ok(
457 crate::pattern::DatePattern::earliest(date),
458 );
459 }
460 Err(_) => {
461 return Err(Error::InvalidDateFormat(
462 lex.span(),
463 ));
464 }
465 },
466 Err(_) => {
467 return Err(Error::InvalidDateFormat(lex.span()));
468 }
469 }
470 } else {
471 let parts: Vec<&str> = content.split("...").collect();
473 if parts.len() == 2 {
474 let start_date = match parse_dcbor_item(parts[0]) {
475 Ok(cbor) => match Date::try_from(cbor) {
476 Ok(date) => date,
477 Err(_) => {
478 return Err(Error::InvalidDateFormat(
479 lex.span(),
480 ));
481 }
482 },
483 Err(_) => {
484 return Err(Error::InvalidDateFormat(
485 lex.span(),
486 ));
487 }
488 };
489 let end_date = match parse_dcbor_item(parts[1]) {
490 Ok(cbor) => match Date::try_from(cbor) {
491 Ok(date) => date,
492 Err(_) => {
493 return Err(Error::InvalidDateFormat(
494 lex.span(),
495 ));
496 }
497 },
498 Err(_) => {
499 return Err(Error::InvalidDateFormat(
500 lex.span(),
501 ));
502 }
503 };
504 return Ok(crate::pattern::DatePattern::range(
505 start_date..=end_date,
506 ));
507 } else {
508 return Err(Error::InvalidDateFormat(lex.span()));
509 }
510 }
511 }
512
513 match parse_dcbor_item(content) {
515 Ok(cbor) => match Date::try_from(cbor) {
516 Ok(date) => {
517 return Ok(crate::pattern::DatePattern::value(date));
518 }
519 Err(_) => return Err(Error::InvalidDateFormat(lex.span())),
520 },
521 Err(_) => return Err(Error::InvalidDateFormat(lex.span())),
522 }
523 }
524 }
525
526 Err(Error::UnterminatedDateQuoted(lex.span()))
528}
529
530fn parse_brace_open(lex: &mut Lexer<Token>) -> Token {
532 let remainder = lex.remainder();
533
534 let chars = remainder.chars();
536 let mut pos = 0;
537
538 for ch in chars {
540 if !matches!(ch, ' ' | '\t' | '\n' | '\r' | '\u{0c}') {
541 if ch.is_ascii_digit() {
545 if looks_like_range_pattern(&remainder[pos..]) {
547 let quantifier_result = parse_range_from_remainder(lex);
548 return Token::Range(quantifier_result);
549 }
550 }
551 break;
553 }
554 pos += ch.len_utf8();
555 }
556
557 Token::BraceOpen
558}
559
560fn looks_like_range_pattern(content: &str) -> bool {
563 let mut chars = content.chars();
564 let mut has_digit = false;
565
566 for ch in chars.by_ref() {
568 if matches!(ch, ' ' | '\t' | '\n' | '\r' | '\u{0c}') {
569 continue;
570 } else if ch.is_ascii_digit() {
571 has_digit = true;
572 break;
573 } else {
574 return false;
575 }
576 }
577
578 if !has_digit {
579 return false;
580 }
581
582 while let Some(ch) = chars.next() {
584 if ch.is_ascii_digit() {
585 continue;
586 } else {
587 if ch == ':' {
591 return false;
592 }
593 if matches!(ch, ' ' | '\t' | '\n' | '\r' | '\u{0c}') {
595 for next_ch in chars.by_ref() {
597 if matches!(next_ch, ' ' | '\t' | '\n' | '\r' | '\u{0c}') {
598 continue;
599 } else {
600 return next_ch == ',' || next_ch == '}';
601 }
602 }
603 }
604 return ch == ',' || ch == '}';
607 }
608 }
609
610 false
611}
612
613fn parse_range_from_remainder(lex: &mut Lexer<Token>) -> Result<Quantifier> {
615 let remainder = lex.remainder(); fn skip_ws(s: &str, pos: &mut usize) {
619 while let Some(ch) = s[*pos..].chars().next() {
620 if matches!(ch, ' ' | '\t' | '\n' | '\r' | '\u{0c}') {
621 *pos += ch.len_utf8();
622 } else {
623 break;
624 }
625 }
626 }
627
628 let mut pos = 0;
629
630 skip_ws(remainder, &mut pos);
632
633 if !remainder[pos..]
635 .chars()
636 .next()
637 .is_some_and(|c| c.is_ascii_digit())
638 {
639 return Err(Error::InvalidRange(lex.span()));
640 }
641
642 let start = pos;
643 while let Some(ch) = remainder[pos..].chars().next() {
644 if ch.is_ascii_digit() {
645 pos += ch.len_utf8();
646 } else {
647 break;
648 }
649 }
650
651 let min: usize = remainder[start..pos]
652 .parse()
653 .map_err(|_| Error::InvalidRange(lex.span()))?;
654
655 skip_ws(remainder, &mut pos);
656
657 let max: Option<usize>;
659
660 match remainder[pos..].chars().next() {
661 Some(',') => {
662 pos += 1;
663 skip_ws(remainder, &mut pos);
664
665 match remainder[pos..].chars().next() {
667 Some('}') => {
668 pos += 1;
669 max = None;
670 }
671 Some(ch) if ch.is_ascii_digit() => {
672 let start = pos;
673 while let Some(ch) = remainder[pos..].chars().next() {
674 if ch.is_ascii_digit() {
675 pos += ch.len_utf8();
676 } else {
677 break;
678 }
679 }
680 if start == pos {
681 return Err(Error::InvalidRange(lex.span()));
682 }
683 let m: usize = remainder[start..pos]
684 .parse()
685 .map_err(|_| Error::InvalidRange(lex.span()))?;
686 skip_ws(remainder, &mut pos);
687 if !matches!(remainder[pos..].chars().next(), Some('}')) {
688 return Err(Error::InvalidRange(lex.span()));
689 }
690 pos += 1;
691 max = Some(m);
692 }
693 _ => return Err(Error::InvalidRange(lex.span())),
694 }
695 }
696 Some('}') => {
697 pos += 1;
698 max = Some(min);
699 }
700 _ => return Err(Error::InvalidRange(lex.span())),
701 }
702
703 let mode = match remainder[pos..].chars().next() {
705 Some('?') => {
706 pos += 1;
707 Reluctance::Lazy
708 }
709 Some('+') => {
710 pos += 1;
711 Reluctance::Possessive
712 }
713 _ => Reluctance::Greedy,
714 };
715
716 lex.bump(pos);
718
719 if let Some(max) = max {
720 if min > max {
721 return Err(Error::InvalidRange(lex.span()));
722 }
723 Ok(Quantifier::new(min..=max, mode))
724 } else {
725 Ok(Quantifier::new(min.., mode))
726 }
727}
728
729fn parse_single_quoted(lex: &mut Lexer<Token>) -> Result<String> {
731 let src = lex.remainder(); let mut escape = false;
733 let mut result = String::new();
734
735 for (i, ch) in src.char_indices() {
736 match (ch, escape) {
737 ('\\', false) => escape = true, ('\'', false) => {
739 lex.bump(i + 1); return Ok(result);
742 }
743 (c, true) => {
744 match c {
746 '\'' => result.push('\''),
747 '\\' => result.push('\\'),
748 'n' => result.push('\n'),
749 'r' => result.push('\r'),
750 't' => result.push('\t'),
751 _ => {
752 result.push('\\');
753 result.push(c);
754 }
755 }
756 escape = false;
757 }
758 (c, false) => {
759 result.push(c);
760 escape = false;
761 }
762 }
763 }
764
765 Err(Error::UnterminatedString(lex.span()))
767}
768
769#[cfg(test)]
770mod tests {
771 use super::*;
772 #[test]
773 fn test_basic_tokens() {
774 assert_eq!(Token::lexer("&").next(), Some(Ok(Token::And)));
776 assert_eq!(Token::lexer("|").next(), Some(Ok(Token::Or)));
777 assert_eq!(Token::lexer("!").next(), Some(Ok(Token::Not)));
778 assert_eq!(Token::lexer("*").next(), Some(Ok(Token::RepeatZeroOrMore)));
779 assert_eq!(Token::lexer("+").next(), Some(Ok(Token::RepeatOneOrMore)));
780 assert_eq!(Token::lexer("?").next(), Some(Ok(Token::RepeatZeroOrOne)));
781
782 assert_eq!(Token::lexer("tagged").next(), Some(Ok(Token::Tagged)));
784
785 assert_eq!(Token::lexer("bool").next(), Some(Ok(Token::Bool)));
787 assert_eq!(Token::lexer("bstr").next(), Some(Ok(Token::ByteString)));
788 assert_eq!(Token::lexer("text").next(), Some(Ok(Token::Text)));
789 assert_eq!(Token::lexer("number").next(), Some(Ok(Token::Number)));
790
791 assert_eq!(Token::lexer("true").next(), Some(Ok(Token::BoolTrue)));
793 assert_eq!(Token::lexer("false").next(), Some(Ok(Token::BoolFalse)));
794 assert_eq!(Token::lexer("NaN").next(), Some(Ok(Token::NaN)));
795 }
796
797 #[test]
798 fn test_complex_tokens() {
799 let mut lexer = Token::lexer("@name");
801 if let Some(Ok(Token::GroupName(name))) = lexer.next() {
802 assert_eq!(name, "name");
803 } else {
804 panic!("Failed to parse group name");
805 }
806
807 let mut lexer = Token::lexer("/[a-z]+/");
809 if let Some(Ok(Token::Regex(Ok(regex)))) = lexer.next() {
810 assert_eq!(regex, "[a-z]+");
811 } else {
812 panic!("Failed to parse regex");
813 }
814
815 let mut lx = Token::lexer(r"/abc\/def/ / / // /a\//");
816 assert_eq!(
817 lx.next(),
818 Some(Ok(Token::Regex(Ok("abc\\/def".to_string()))))
819 );
820 assert_eq!(lx.next(), Some(Ok(Token::Regex(Ok(" ".to_string())))));
821 assert_eq!(lx.next(), Some(Ok(Token::Regex(Ok("".to_string())))));
822 assert_eq!(lx.next(), Some(Ok(Token::Regex(Ok("a\\/".to_string())))));
823 assert_eq!(lx.next(), None);
824 }
825
826 #[test]
827 fn test_hex_tokens() {
828 let mut lexer = Token::lexer("h'deadbeef'");
830 if let Some(Ok(Token::HexString(Ok(bytes)))) = lexer.next() {
831 assert_eq!(bytes, vec![0xde, 0xad, 0xbe, 0xef]);
832 } else {
833 panic!("Failed to parse hex string");
834 }
835
836 let mut lexer = Token::lexer("h''");
838 if let Some(Ok(Token::HexString(Ok(bytes)))) = lexer.next() {
839 assert_eq!(bytes, vec![]);
840 } else {
841 panic!("Failed to parse empty hex string");
842 }
843
844 let mut lexer = Token::lexer("h'/^[0-9]+$/'");
846 if let Some(Ok(Token::HexRegex(Ok(regex)))) = lexer.next() {
847 assert_eq!(regex, "^[0-9]+$");
848 } else {
849 panic!("Failed to parse hex regex");
850 }
851
852 let mut lexer = Token::lexer(r"h'/a\/b/'");
854 if let Some(Ok(Token::HexRegex(Ok(regex)))) = lexer.next() {
855 assert_eq!(regex, r"a\/b");
856 } else {
857 panic!("Failed to parse hex regex with escaped slash");
858 }
859 }
860
861 #[test]
862 fn test_number_literals() {
863 let mut lexer = Token::lexer("42");
864 let token = lexer.next();
865 println!("Token for '42': {:?}", token);
866 if let Some(Ok(Token::NumberLiteral(Ok(value)))) = token {
867 assert_eq!(value, 42.0);
868 } else {
869 panic!("Failed to parse integer literal");
870 }
871
872 let mut lexer = Token::lexer("0");
874 if let Some(Ok(Token::NumberLiteral(Ok(value)))) = lexer.next() {
875 assert_eq!(value, 0.0);
876 } else {
877 panic!("Failed to parse zero literal");
878 }
879
880 let mut lexer = Token::lexer("-10");
882 if let Some(Ok(Token::NumberLiteral(Ok(value)))) = lexer.next() {
883 assert_eq!(value, -10.0);
884 } else {
885 panic!("Failed to parse negative literal");
886 }
887
888 let mut lexer = Token::lexer("3.2222");
890 if let Some(Ok(Token::NumberLiteral(Ok(value)))) = lexer.next() {
891 assert_eq!(value, 3.2222);
892 } else {
893 panic!("Failed to parse float literal");
894 }
895
896 let mut lexer = Token::lexer("1e5");
898 if let Some(Ok(Token::NumberLiteral(Ok(value)))) = lexer.next() {
899 assert_eq!(value, 100000.0);
900 } else {
901 panic!("Failed to parse scientific notation literal");
902 }
903 }
904
905 #[test]
906 fn test_range() {
907 struct RangeTestCase {
908 input: &'static str,
909 expected: Quantifier,
910 }
911 let test_cases = vec![
912 RangeTestCase {
913 input: "{1, 5}",
914 expected: Quantifier::new(1..=5, Reluctance::default()),
915 },
916 RangeTestCase {
917 input: "{ 3 , }",
918 expected: Quantifier::new(3.., Reluctance::default()),
919 },
920 RangeTestCase {
921 input: "{ 5 }",
922 expected: Quantifier::new(5..=5, Reluctance::default()),
923 },
924 RangeTestCase {
925 input: "{1, 5 }?",
926 expected: Quantifier::new(1..=5, Reluctance::Lazy),
927 },
928 RangeTestCase {
929 input: "{ 3 , }?",
930 expected: Quantifier::new(3.., Reluctance::Lazy),
931 },
932 RangeTestCase {
933 input: "{5}?",
934 expected: Quantifier::new(5..=5, Reluctance::Lazy),
935 },
936 RangeTestCase {
937 input: "{ 1,5}+",
938 expected: Quantifier::new(1..=5, Reluctance::Possessive),
939 },
940 RangeTestCase {
941 input: "{ 3 , }+",
942 expected: Quantifier::new(3.., Reluctance::Possessive),
943 },
944 RangeTestCase {
945 input: "{5}+",
946 expected: Quantifier::new(5..=5, Reluctance::Possessive),
947 },
948 ];
949
950 let mut failed_cases = vec![];
951
952 for test_case in test_cases {
953 let mut lexer = Token::lexer(test_case.input);
954 if let Some(Ok(Token::Range(Ok(range)))) = lexer.next() {
955 assert_eq!(range, test_case.expected);
956 } else {
957 failed_cases.push(test_case.input);
958 }
959 }
960
961 if !failed_cases.is_empty() {
962 panic!("Failed to parse ranges: {:?}", failed_cases);
963 }
964 }
965}