1use dcbor::prelude::*;
2use dcbor_parse::parse_dcbor_item_partial;
3use logos::{Lexer, Logos};
4
5use crate::{DigestPattern, Error, Quantifier, Reluctance, Result};
6
7#[derive(Debug, Clone, Logos, PartialEq)]
9#[rustfmt::skip]
10#[logos(error = Error)]
11#[logos(skip r"[ \t\r\n\f]+")]
12pub enum Token {
13 #[token("&")]
14 And,
15
16 #[token("|")]
17 Or,
18
19 #[token("!")]
20 Not,
21
22 #[token("*")]
23 RepeatZeroOrMore,
24
25 #[token("*?")]
26 RepeatZeroOrMoreLazy,
27
28 #[token("*+")]
29 RepeatZeroOrMorePossessive,
30
31 #[token("+")]
32 RepeatOneOrMore,
33
34 #[token("+?")]
35 RepeatOneOrMoreLazy,
36
37 #[token("++")]
38 RepeatOneOrMorePossessive,
39
40 #[token("?")]
41 RepeatZeroOrOne,
42
43 #[token("??")]
44 RepeatZeroOrOneLazy,
45
46 #[token("?+")]
47 RepeatZeroOrOnePossessive,
48
49 #[token("tagged")]
51 Tagged,
52
53 #[token("array")]
54 Array,
55
56 #[token("map")]
57 Map,
58
59 #[token("bool")]
61 Bool,
62
63 #[token("bstr")]
64 ByteString,
65
66 #[token("date")]
67 Date,
68
69 #[token("date'", parse_date_quoted)]
70 DateQuoted(Result<crate::pattern::DatePattern>),
71
72 #[token("known")]
73 Known,
74
75 #[token("null")]
76 Null,
77
78 #[token("number")]
79 Number,
80
81 #[token("text")]
82 Text,
83
84 #[token("digest")]
85 Digest,
86
87 #[token("digest'", parse_digest_quoted)]
88 DigestQuoted(Result<DigestPattern>),
89
90 #[token("search")]
92 Search,
93
94 #[token("true")]
96 BoolTrue,
97
98 #[token("false")]
99 BoolFalse,
100
101 #[token("NaN")]
102 NaN,
103
104 #[token("Infinity")]
105 Infinity,
106
107 #[token("-Infinity")]
108 NegInfinity,
109
110 #[token("(")]
112 ParenOpen,
113
114 #[token(")")]
115 ParenClose,
116
117 #[token("[")]
118 BracketOpen,
119
120 #[token("]")]
121 BracketClose,
122
123 #[token("{", parse_brace_open)]
124 BraceOpen,
125
126 #[token("}")]
127 BraceClose,
128
129 #[token(",")]
130 Comma,
131
132 #[token(":")]
133 Colon,
134
135 #[token("...")]
136 Ellipsis,
137
138 #[token(">=")]
139 GreaterThanOrEqual,
140
141 #[token("<=")]
142 LessThanOrEqual,
143
144 #[token(">", priority = 1)]
145 GreaterThan,
146
147 #[token("<")]
148 LessThan,
149
150 #[regex(r"-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?", callback = parse_number)]
152 NumberLiteral(Result<f64>),
153
154 #[regex(r"@[a-zA-Z_][a-zA-Z0-9_]*", |lex|
155 lex.slice()[1..].to_string()
156 )]
157 GroupName(String),
158
159 #[token("\"", parse_string)]
160 StringLiteral(Result<String>),
161
162 #[token("'", parse_single_quoted)]
163 SingleQuoted(Result<String>),
164
165 #[token("/", parse_regex)]
166 Regex(Result<String>),
167
168 #[token("h'", parse_hex_string)]
169 HexString(Result<Vec<u8>>),
170
171 #[token("h'/", parse_hex_regex)]
172 HexRegex(Result<String>),
173
174 Range(Result<Quantifier>),
175}
176
177fn parse_number(lex: &mut Lexer<Token>) -> Result<f64> {
179 let number_str = lex.slice();
180 match parse_dcbor_item_partial(number_str) {
181 Ok((cbor, _)) => match f64::try_from_cbor(&cbor) {
182 Ok(value) => Ok(value),
183 Err(_) => Err(Error::InvalidNumberFormat(lex.span())),
184 },
185 Err(_) => Err(Error::InvalidNumberFormat(lex.span())),
186 }
187}
188
189fn parse_regex(lex: &mut Lexer<Token>) -> Result<String> {
191 let src = lex.remainder(); let mut escape = false;
193
194 for (i, ch) in src.char_indices() {
195 match (ch, escape) {
196 ('\\', false) => escape = true, ('/', false) => {
198 lex.bump(i + 1); let content = src[..i].to_owned();
201 match regex::Regex::new(&content) {
202 Ok(_) => return Ok(content),
203 Err(_) => return Err(Error::InvalidRegex(lex.span())),
204 }
205 }
206 _ => escape = false, }
208 }
209
210 Err(Error::UnterminatedRegex(lex.span()))
212}
213
214fn parse_string(lex: &mut Lexer<Token>) -> Result<String> {
216 let src = lex.remainder(); let mut escape = false;
218 let mut result = String::new();
219
220 for (i, ch) in src.char_indices() {
221 match (ch, escape) {
222 ('\\', false) => escape = true, ('"', false) => {
224 lex.bump(i + 1); return Ok(result);
227 }
228 (c, true) => {
229 match c {
231 '"' => result.push('"'),
232 '\\' => result.push('\\'),
233 'n' => result.push('\n'),
234 'r' => result.push('\r'),
235 't' => result.push('\t'),
236 _ => {
237 result.push('\\');
238 result.push(c);
239 }
240 }
241 escape = false;
242 }
243 (c, false) => {
244 result.push(c);
245 escape = false;
246 }
247 }
248 }
249
250 Err(Error::UnterminatedString(lex.span()))
252}
253
254fn parse_hex_string(lex: &mut Lexer<Token>) -> Result<Vec<u8>> {
256 let src = lex.remainder(); for (i, ch) in src.char_indices() {
260 match ch {
261 '\'' => {
262 let hex_content = &src[..i];
264 match hex::decode(hex_content) {
265 Ok(bytes) => {
266 lex.bump(i + 1); return Ok(bytes);
268 }
269 Err(_) => return Err(Error::InvalidHexString(lex.span())),
270 }
271 }
272 c if c.is_ascii_hexdigit() => {
273 }
275 _ => {
276 return Err(Error::InvalidHexString(lex.span()));
278 }
279 }
280 }
281
282 Err(Error::UnterminatedHexString(lex.span()))
284}
285
286fn parse_hex_regex(lex: &mut Lexer<Token>) -> Result<String> {
288 let src = lex.remainder(); let mut escape = false;
290
291 for (i, ch) in src.char_indices() {
292 match (ch, escape) {
293 ('\\', false) => escape = true, ('/', false) => {
295 let remainder = &src[i + 1..];
297 if remainder.starts_with('\'') {
298 lex.bump(i + 2); let content = src[..i].to_owned();
301 match regex::bytes::Regex::new(&content) {
302 Ok(_) => return Ok(content),
303 Err(_) => return Err(Error::InvalidRegex(lex.span())),
304 }
305 }
306 escape = false;
308 }
309 _ => escape = false, }
311 }
312
313 Err(Error::UnterminatedRegex(lex.span()))
315}
316
317fn parse_digest_quoted(lex: &mut Lexer<Token>) -> Result<DigestPattern> {
319 use bc_components::Digest;
320 use bc_ur::URDecodable;
321
322 let src = lex.remainder(); for (i, ch) in src.char_indices() {
326 if ch == '\'' {
327 let content = &src[..i];
328 lex.bump(i + 1); if content.is_empty() {
332 return Err(Error::InvalidDigestPattern(
333 "empty content".to_string(),
334 lex.span(),
335 ));
336 }
337
338 if content.starts_with("ur:") {
340 match Digest::from_ur_string(content) {
341 Ok(digest) => return Ok(DigestPattern::digest(digest)),
342 Err(_) => {
343 return Err(Error::InvalidUr(
344 content.to_string(),
345 lex.span(),
346 ));
347 }
348 }
349 }
350
351 if content.starts_with('/')
353 && content.ends_with('/')
354 && content.len() > 2
355 {
356 let regex_content = &content[1..content.len() - 1];
357 match regex::bytes::Regex::new(regex_content) {
358 Ok(regex) => return Ok(DigestPattern::binary_regex(regex)),
359 Err(_) => return Err(Error::InvalidRegex(lex.span())),
360 }
361 }
362
363 if content.chars().all(|c| c.is_ascii_hexdigit()) {
365 if content.len() % 2 == 0 {
366 match hex::decode(content) {
367 Ok(bytes) => {
368 if bytes.len() <= Digest::DIGEST_SIZE {
369 return Ok(DigestPattern::prefix(bytes));
370 } else {
371 return Err(Error::InvalidHexString(
372 lex.span(),
373 ));
374 }
375 }
376 Err(_) => {
377 return Err(Error::InvalidHexString(lex.span()));
378 }
379 }
380 } else {
381 return Err(Error::InvalidHexString(lex.span()));
382 }
383 }
384
385 return Err(Error::InvalidDigestPattern(
387 content.to_string(),
388 lex.span(),
389 ));
390 }
391 }
392
393 Err(Error::UnterminatedDigestQuoted(lex.span()))
395}
396
397fn parse_date_quoted(
399 lex: &mut Lexer<Token>,
400) -> Result<crate::pattern::DatePattern> {
401 use dcbor_parse::parse_dcbor_item;
402
403 let src = lex.remainder(); for (i, ch) in src.char_indices() {
407 if ch == '\'' {
408 let content = &src[..i];
409 lex.bump(i + 1); if content.is_empty() {
413 return Err(Error::InvalidDateFormat(lex.span()));
414 }
415
416 if content.starts_with('/')
418 && content.ends_with('/')
419 && content.len() > 2
420 {
421 let regex_content = &content[1..content.len() - 1];
422 match regex::Regex::new(regex_content) {
423 Ok(regex) => {
424 return Ok(crate::pattern::DatePattern::regex(regex));
425 }
426 Err(_) => return Err(Error::InvalidRegex(lex.span())),
427 }
428 }
429
430 if content.contains("...") {
432 if let Some(iso_str) = content.strip_prefix("...") {
433 match parse_dcbor_item(iso_str) {
435 Ok(cbor) => match Date::try_from(cbor) {
436 Ok(date) => {
437 return Ok(
438 crate::pattern::DatePattern::latest(date),
439 );
440 }
441 Err(_) => {
442 return Err(Error::InvalidDateFormat(
443 lex.span(),
444 ));
445 }
446 },
447 Err(_) => {
448 return Err(Error::InvalidDateFormat(lex.span()));
449 }
450 }
451 } else if let Some(iso_str) = content.strip_suffix("...") {
452 match parse_dcbor_item(iso_str) {
454 Ok(cbor) => match Date::try_from(cbor) {
455 Ok(date) => {
456 return Ok(
457 crate::pattern::DatePattern::earliest(date),
458 );
459 }
460 Err(_) => {
461 return Err(Error::InvalidDateFormat(
462 lex.span(),
463 ));
464 }
465 },
466 Err(_) => {
467 return Err(Error::InvalidDateFormat(lex.span()));
468 }
469 }
470 } else {
471 let parts: Vec<&str> = content.split("...").collect();
473 if parts.len() == 2 {
474 let start_date = match parse_dcbor_item(parts[0]) {
475 Ok(cbor) => match Date::try_from(cbor) {
476 Ok(date) => date,
477 Err(_) => {
478 return Err(Error::InvalidDateFormat(
479 lex.span(),
480 ));
481 }
482 },
483 Err(_) => {
484 return Err(Error::InvalidDateFormat(
485 lex.span(),
486 ));
487 }
488 };
489 let end_date = match parse_dcbor_item(parts[1]) {
490 Ok(cbor) => match Date::try_from(cbor) {
491 Ok(date) => date,
492 Err(_) => {
493 return Err(Error::InvalidDateFormat(
494 lex.span(),
495 ));
496 }
497 },
498 Err(_) => {
499 return Err(Error::InvalidDateFormat(
500 lex.span(),
501 ));
502 }
503 };
504 return Ok(crate::pattern::DatePattern::range(
505 start_date..=end_date,
506 ));
507 } else {
508 return Err(Error::InvalidDateFormat(lex.span()));
509 }
510 }
511 }
512
513 match parse_dcbor_item(content) {
515 Ok(cbor) => match Date::try_from(cbor) {
516 Ok(date) => {
517 return Ok(crate::pattern::DatePattern::value(date));
518 }
519 Err(_) => return Err(Error::InvalidDateFormat(lex.span())),
520 },
521 Err(_) => return Err(Error::InvalidDateFormat(lex.span())),
522 }
523 }
524 }
525
526 Err(Error::UnterminatedDateQuoted(lex.span()))
528}
529
530fn parse_brace_open(lex: &mut Lexer<Token>) -> Token {
532 let remainder = lex.remainder();
533
534 let mut chars = remainder.chars();
536 let mut pos = 0;
537
538 while let Some(ch) = chars.next() {
540 if !matches!(ch, ' ' | '\t' | '\n' | '\r' | '\u{0c}') {
541 if ch.is_ascii_digit() {
545 if looks_like_range_pattern(&remainder[pos..]) {
547 let quantifier_result = parse_range_from_remainder(lex);
548 return Token::Range(quantifier_result);
549 }
550 }
551 break;
553 }
554 pos += ch.len_utf8();
555 }
556
557 Token::BraceOpen
558}
559
560fn looks_like_range_pattern(content: &str) -> bool {
563 let mut chars = content.chars();
564 let mut has_digit = false;
565
566 while let Some(ch) = chars.next() {
568 if matches!(ch, ' ' | '\t' | '\n' | '\r' | '\u{0c}') {
569 continue;
570 } else if ch.is_ascii_digit() {
571 has_digit = true;
572 break;
573 } else {
574 return false;
575 }
576 }
577
578 if !has_digit {
579 return false;
580 }
581
582 while let Some(ch) = chars.next() {
584 if ch.is_ascii_digit() {
585 continue;
586 } else {
587 if ch == ':' {
591 return false;
592 }
593 if matches!(ch, ' ' | '\t' | '\n' | '\r' | '\u{0c}') {
595 while let Some(next_ch) = chars.next() {
597 if matches!(next_ch, ' ' | '\t' | '\n' | '\r' | '\u{0c}') {
598 continue;
599 } else if next_ch == ',' || next_ch == '}' {
600 return true;
601 } else if next_ch == ':' {
602 return false;
603 } else {
604 return false;
605 }
606 }
607 }
608 return ch == ',' || ch == '}';
611 }
612 }
613
614 false
615}
616
617fn parse_range_from_remainder(lex: &mut Lexer<Token>) -> Result<Quantifier> {
619 let remainder = lex.remainder(); fn skip_ws(s: &str, pos: &mut usize) {
623 while let Some(ch) = s[*pos..].chars().next() {
624 if matches!(ch, ' ' | '\t' | '\n' | '\r' | '\u{0c}') {
625 *pos += ch.len_utf8();
626 } else {
627 break;
628 }
629 }
630 }
631
632 let mut pos = 0;
633
634 skip_ws(remainder, &mut pos);
636
637 if !remainder[pos..]
639 .chars()
640 .next()
641 .is_some_and(|c| c.is_ascii_digit())
642 {
643 return Err(Error::InvalidRange(lex.span()));
644 }
645
646 let start = pos;
647 while let Some(ch) = remainder[pos..].chars().next() {
648 if ch.is_ascii_digit() {
649 pos += ch.len_utf8();
650 } else {
651 break;
652 }
653 }
654
655 let min: usize = remainder[start..pos]
656 .parse()
657 .map_err(|_| Error::InvalidRange(lex.span()))?;
658
659 skip_ws(remainder, &mut pos);
660
661 let max: Option<usize>;
663
664 match remainder[pos..].chars().next() {
665 Some(',') => {
666 pos += 1;
667 skip_ws(remainder, &mut pos);
668
669 match remainder[pos..].chars().next() {
671 Some('}') => {
672 pos += 1;
673 max = None;
674 }
675 Some(ch) if ch.is_ascii_digit() => {
676 let start = pos;
677 while let Some(ch) = remainder[pos..].chars().next() {
678 if ch.is_ascii_digit() {
679 pos += ch.len_utf8();
680 } else {
681 break;
682 }
683 }
684 if start == pos {
685 return Err(Error::InvalidRange(lex.span()));
686 }
687 let m: usize = remainder[start..pos]
688 .parse()
689 .map_err(|_| Error::InvalidRange(lex.span()))?;
690 skip_ws(remainder, &mut pos);
691 if !matches!(remainder[pos..].chars().next(), Some('}')) {
692 return Err(Error::InvalidRange(lex.span()));
693 }
694 pos += 1;
695 max = Some(m);
696 }
697 _ => return Err(Error::InvalidRange(lex.span())),
698 }
699 }
700 Some('}') => {
701 pos += 1;
702 max = Some(min);
703 }
704 _ => return Err(Error::InvalidRange(lex.span())),
705 }
706
707 let mode = match remainder[pos..].chars().next() {
709 Some('?') => {
710 pos += 1;
711 Reluctance::Lazy
712 }
713 Some('+') => {
714 pos += 1;
715 Reluctance::Possessive
716 }
717 _ => Reluctance::Greedy,
718 };
719
720 lex.bump(pos);
722
723 if let Some(max) = max {
724 if min > max {
725 return Err(Error::InvalidRange(lex.span()));
726 }
727 Ok(Quantifier::new(min..=max, mode))
728 } else {
729 Ok(Quantifier::new(min.., mode))
730 }
731}
732
733fn parse_single_quoted(lex: &mut Lexer<Token>) -> Result<String> {
735 let src = lex.remainder(); let mut escape = false;
737 let mut result = String::new();
738
739 for (i, ch) in src.char_indices() {
740 match (ch, escape) {
741 ('\\', false) => escape = true, ('\'', false) => {
743 lex.bump(i + 1); return Ok(result);
746 }
747 (c, true) => {
748 match c {
750 '\'' => result.push('\''),
751 '\\' => result.push('\\'),
752 'n' => result.push('\n'),
753 'r' => result.push('\r'),
754 't' => result.push('\t'),
755 _ => {
756 result.push('\\');
757 result.push(c);
758 }
759 }
760 escape = false;
761 }
762 (c, false) => {
763 result.push(c);
764 escape = false;
765 }
766 }
767 }
768
769 Err(Error::UnterminatedString(lex.span()))
771}
772
773#[cfg(test)]
774mod tests {
775 use super::*;
776 #[test]
777 fn test_basic_tokens() {
778 assert_eq!(Token::lexer("&").next(), Some(Ok(Token::And)));
780 assert_eq!(Token::lexer("|").next(), Some(Ok(Token::Or)));
781 assert_eq!(Token::lexer("!").next(), Some(Ok(Token::Not)));
782 assert_eq!(Token::lexer("*").next(), Some(Ok(Token::RepeatZeroOrMore)));
783 assert_eq!(Token::lexer("+").next(), Some(Ok(Token::RepeatOneOrMore)));
784 assert_eq!(Token::lexer("?").next(), Some(Ok(Token::RepeatZeroOrOne)));
785
786 assert_eq!(Token::lexer("tagged").next(), Some(Ok(Token::Tagged)));
788
789 assert_eq!(Token::lexer("bool").next(), Some(Ok(Token::Bool)));
791 assert_eq!(Token::lexer("bstr").next(), Some(Ok(Token::ByteString)));
792 assert_eq!(Token::lexer("text").next(), Some(Ok(Token::Text)));
793 assert_eq!(Token::lexer("number").next(), Some(Ok(Token::Number)));
794
795 assert_eq!(Token::lexer("true").next(), Some(Ok(Token::BoolTrue)));
797 assert_eq!(Token::lexer("false").next(), Some(Ok(Token::BoolFalse)));
798 assert_eq!(Token::lexer("NaN").next(), Some(Ok(Token::NaN)));
799 }
800
801 #[test]
802 fn test_complex_tokens() {
803 let mut lexer = Token::lexer("@name");
805 if let Some(Ok(Token::GroupName(name))) = lexer.next() {
806 assert_eq!(name, "name");
807 } else {
808 panic!("Failed to parse group name");
809 }
810
811 let mut lexer = Token::lexer("/[a-z]+/");
813 if let Some(Ok(Token::Regex(Ok(regex)))) = lexer.next() {
814 assert_eq!(regex, "[a-z]+");
815 } else {
816 panic!("Failed to parse regex");
817 }
818
819 let mut lx = Token::lexer(r"/abc\/def/ / / // /a\//");
820 assert_eq!(
821 lx.next(),
822 Some(Ok(Token::Regex(Ok("abc\\/def".to_string()))))
823 );
824 assert_eq!(lx.next(), Some(Ok(Token::Regex(Ok(" ".to_string())))));
825 assert_eq!(lx.next(), Some(Ok(Token::Regex(Ok("".to_string())))));
826 assert_eq!(lx.next(), Some(Ok(Token::Regex(Ok("a\\/".to_string())))));
827 assert_eq!(lx.next(), None);
828 }
829
830 #[test]
831 fn test_hex_tokens() {
832 let mut lexer = Token::lexer("h'deadbeef'");
834 if let Some(Ok(Token::HexString(Ok(bytes)))) = lexer.next() {
835 assert_eq!(bytes, vec![0xde, 0xad, 0xbe, 0xef]);
836 } else {
837 panic!("Failed to parse hex string");
838 }
839
840 let mut lexer = Token::lexer("h''");
842 if let Some(Ok(Token::HexString(Ok(bytes)))) = lexer.next() {
843 assert_eq!(bytes, vec![]);
844 } else {
845 panic!("Failed to parse empty hex string");
846 }
847
848 let mut lexer = Token::lexer("h'/^[0-9]+$/'");
850 if let Some(Ok(Token::HexRegex(Ok(regex)))) = lexer.next() {
851 assert_eq!(regex, "^[0-9]+$");
852 } else {
853 panic!("Failed to parse hex regex");
854 }
855
856 let mut lexer = Token::lexer(r"h'/a\/b/'");
858 if let Some(Ok(Token::HexRegex(Ok(regex)))) = lexer.next() {
859 assert_eq!(regex, r"a\/b");
860 } else {
861 panic!("Failed to parse hex regex with escaped slash");
862 }
863 }
864
865 #[test]
866 fn test_number_literals() {
867 let mut lexer = Token::lexer("42");
868 let token = lexer.next();
869 println!("Token for '42': {:?}", token);
870 if let Some(Ok(Token::NumberLiteral(Ok(value)))) = token {
871 assert_eq!(value, 42.0);
872 } else {
873 panic!("Failed to parse integer literal");
874 }
875
876 let mut lexer = Token::lexer("0");
878 if let Some(Ok(Token::NumberLiteral(Ok(value)))) = lexer.next() {
879 assert_eq!(value, 0.0);
880 } else {
881 panic!("Failed to parse zero literal");
882 }
883
884 let mut lexer = Token::lexer("-10");
886 if let Some(Ok(Token::NumberLiteral(Ok(value)))) = lexer.next() {
887 assert_eq!(value, -10.0);
888 } else {
889 panic!("Failed to parse negative literal");
890 }
891
892 let mut lexer = Token::lexer("3.2222");
894 if let Some(Ok(Token::NumberLiteral(Ok(value)))) = lexer.next() {
895 assert_eq!(value, 3.2222);
896 } else {
897 panic!("Failed to parse float literal");
898 }
899
900 let mut lexer = Token::lexer("1e5");
902 if let Some(Ok(Token::NumberLiteral(Ok(value)))) = lexer.next() {
903 assert_eq!(value, 100000.0);
904 } else {
905 panic!("Failed to parse scientific notation literal");
906 }
907 }
908
909 #[test]
910 fn test_range() {
911 struct RangeTestCase {
912 input: &'static str,
913 expected: Quantifier,
914 }
915 let test_cases = vec![
916 RangeTestCase {
917 input: "{1, 5}",
918 expected: Quantifier::new(1..=5, Reluctance::default()),
919 },
920 RangeTestCase {
921 input: "{ 3 , }",
922 expected: Quantifier::new(3.., Reluctance::default()),
923 },
924 RangeTestCase {
925 input: "{ 5 }",
926 expected: Quantifier::new(5..=5, Reluctance::default()),
927 },
928 RangeTestCase {
929 input: "{1, 5 }?",
930 expected: Quantifier::new(1..=5, Reluctance::Lazy),
931 },
932 RangeTestCase {
933 input: "{ 3 , }?",
934 expected: Quantifier::new(3.., Reluctance::Lazy),
935 },
936 RangeTestCase {
937 input: "{5}?",
938 expected: Quantifier::new(5..=5, Reluctance::Lazy),
939 },
940 RangeTestCase {
941 input: "{ 1,5}+",
942 expected: Quantifier::new(1..=5, Reluctance::Possessive),
943 },
944 RangeTestCase {
945 input: "{ 3 , }+",
946 expected: Quantifier::new(3.., Reluctance::Possessive),
947 },
948 RangeTestCase {
949 input: "{5}+",
950 expected: Quantifier::new(5..=5, Reluctance::Possessive),
951 },
952 ];
953
954 let mut failed_cases = vec![];
955
956 for test_case in test_cases {
957 let mut lexer = Token::lexer(test_case.input);
958 if let Some(Ok(Token::Range(Ok(range)))) = lexer.next() {
959 assert_eq!(range, test_case.expected);
960 } else {
961 failed_cases.push(test_case.input);
962 }
963 }
964
965 if !failed_cases.is_empty() {
966 panic!("Failed to parse ranges: {:?}", failed_cases);
967 }
968 }
969}