1use dcbor::prelude::*;
2use dcbor_parse::parse_dcbor_item_partial;
3use logos::{Lexer, Logos};
4
5use crate::{DigestPattern, Error, Quantifier, Reluctance, Result};
6
7#[derive(Debug, Clone, Logos, PartialEq)]
9#[rustfmt::skip]
10#[logos(error = Error)]
11#[logos(skip r"[ \t\r\n\f]+")]
12pub enum Token {
13 #[token("&")]
14 And,
15
16 #[token("|")]
17 Or,
18
19 #[token("!")]
20 Not,
21
22 #[token("*")]
23 RepeatZeroOrMore,
24
25 #[token("*?")]
26 RepeatZeroOrMoreLazy,
27
28 #[token("*+")]
29 RepeatZeroOrMorePossessive,
30
31 #[token("+")]
32 RepeatOneOrMore,
33
34 #[token("+?")]
35 RepeatOneOrMoreLazy,
36
37 #[token("++")]
38 RepeatOneOrMorePossessive,
39
40 #[token("?")]
41 RepeatZeroOrOne,
42
43 #[token("??")]
44 RepeatZeroOrOneLazy,
45
46 #[token("?+")]
47 RepeatZeroOrOnePossessive,
48
49 #[token("tagged")]
51 Tagged,
52
53 #[token("bool")]
55 Bool,
56
57 #[token("bstr")]
58 ByteString,
59
60 #[token("date")]
61 Date,
62
63 #[token("date'", parse_date_quoted)]
64 DateQuoted(Result<crate::pattern::DatePattern>),
65
66 #[token("known")]
67 Known,
68
69 #[token("null")]
70 Null,
71
72 #[token("number")]
73 Number,
74
75 #[token("text")]
76 Text,
77
78 #[token("digest")]
79 Digest,
80
81 #[token("digest'", parse_digest_quoted)]
82 DigestQuoted(Result<DigestPattern>),
83
84 #[token("search")]
86 Search,
87
88 #[token("true")]
90 BoolTrue,
91
92 #[token("false")]
93 BoolFalse,
94
95 #[token("NaN")]
96 NaN,
97
98 #[token("Infinity")]
99 Infinity,
100
101 #[token("-Infinity")]
102 NegInfinity,
103
104 #[token("(")]
106 ParenOpen,
107
108 #[token(")")]
109 ParenClose,
110
111 #[token("[")]
112 BracketOpen,
113
114 #[token("]")]
115 BracketClose,
116
117 #[token("{", parse_brace_open)]
118 BraceOpen,
119
120 #[token("}")]
121 BraceClose,
122
123 #[token(",")]
124 Comma,
125
126 #[token(":")]
127 Colon,
128
129 #[token("...")]
130 Ellipsis,
131
132 #[token(">=")]
133 GreaterThanOrEqual,
134
135 #[token("<=")]
136 LessThanOrEqual,
137
138 #[token(">", priority = 1)]
139 GreaterThan,
140
141 #[token("<")]
142 LessThan,
143
144 #[regex(r"-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?", callback = parse_number)]
146 NumberLiteral(Result<f64>),
147
148 #[regex(r"@[a-zA-Z_][a-zA-Z0-9_]*", |lex|
149 lex.slice()[1..].to_string()
150 )]
151 GroupName(String),
152
153 #[token("\"", parse_string)]
154 StringLiteral(Result<String>),
155
156 #[token("'", parse_single_quoted)]
157 SingleQuoted(Result<String>),
158
159 #[token("/", parse_regex)]
160 Regex(Result<String>),
161
162 #[token("h'", parse_hex_string)]
163 HexString(Result<Vec<u8>>),
164
165 #[token("h'/", parse_hex_regex)]
166 HexRegex(Result<String>),
167
168 Range(Result<Quantifier>),
169}
170
171fn parse_number(lex: &mut Lexer<Token>) -> Result<f64> {
173 let number_str = lex.slice();
174 match parse_dcbor_item_partial(number_str) {
175 Ok((cbor, _)) => match f64::try_from_cbor(&cbor) {
176 Ok(value) => Ok(value),
177 Err(_) => Err(Error::InvalidNumberFormat(lex.span())),
178 },
179 Err(_) => Err(Error::InvalidNumberFormat(lex.span())),
180 }
181}
182
183fn parse_regex(lex: &mut Lexer<Token>) -> Result<String> {
185 let src = lex.remainder(); let mut escape = false;
187
188 for (i, ch) in src.char_indices() {
189 match (ch, escape) {
190 ('\\', false) => escape = true, ('/', false) => {
192 lex.bump(i + 1); let content = src[..i].to_owned();
195 match regex::Regex::new(&content) {
196 Ok(_) => return Ok(content),
197 Err(_) => return Err(Error::InvalidRegex(lex.span())),
198 }
199 }
200 _ => escape = false, }
202 }
203
204 Err(Error::UnterminatedRegex(lex.span()))
206}
207
208fn parse_string(lex: &mut Lexer<Token>) -> Result<String> {
210 let src = lex.remainder(); let mut escape = false;
212 let mut result = String::new();
213
214 for (i, ch) in src.char_indices() {
215 match (ch, escape) {
216 ('\\', false) => escape = true, ('"', false) => {
218 lex.bump(i + 1); return Ok(result);
221 }
222 (c, true) => {
223 match c {
225 '"' => result.push('"'),
226 '\\' => result.push('\\'),
227 'n' => result.push('\n'),
228 'r' => result.push('\r'),
229 't' => result.push('\t'),
230 _ => {
231 result.push('\\');
232 result.push(c);
233 }
234 }
235 escape = false;
236 }
237 (c, false) => {
238 result.push(c);
239 escape = false;
240 }
241 }
242 }
243
244 Err(Error::UnterminatedString(lex.span()))
246}
247
248fn parse_hex_string(lex: &mut Lexer<Token>) -> Result<Vec<u8>> {
250 let src = lex.remainder(); for (i, ch) in src.char_indices() {
254 match ch {
255 '\'' => {
256 let hex_content = &src[..i];
258 match hex::decode(hex_content) {
259 Ok(bytes) => {
260 lex.bump(i + 1); return Ok(bytes);
262 }
263 Err(_) => return Err(Error::InvalidHexString(lex.span())),
264 }
265 }
266 c if c.is_ascii_hexdigit() => {
267 }
269 _ => {
270 return Err(Error::InvalidHexString(lex.span()));
272 }
273 }
274 }
275
276 Err(Error::UnterminatedHexString(lex.span()))
278}
279
280fn parse_hex_regex(lex: &mut Lexer<Token>) -> Result<String> {
282 let src = lex.remainder(); let mut escape = false;
284
285 for (i, ch) in src.char_indices() {
286 match (ch, escape) {
287 ('\\', false) => escape = true, ('/', false) => {
289 let remainder = &src[i + 1..];
291 if remainder.starts_with('\'') {
292 lex.bump(i + 2); let content = src[..i].to_owned();
295 match regex::bytes::Regex::new(&content) {
296 Ok(_) => return Ok(content),
297 Err(_) => return Err(Error::InvalidRegex(lex.span())),
298 }
299 }
300 escape = false;
302 }
303 _ => escape = false, }
305 }
306
307 Err(Error::UnterminatedRegex(lex.span()))
309}
310
311fn parse_digest_quoted(lex: &mut Lexer<Token>) -> Result<DigestPattern> {
313 use bc_components::Digest;
314 use bc_ur::{URDecodable, UREncodable};
315
316 let src = lex.remainder(); for (i, ch) in src.char_indices() {
320 if ch == '\'' {
321 let content = &src[..i];
322 lex.bump(i + 1); if content.is_empty() {
326 return Err(Error::InvalidDigestPattern(
327 "empty content".to_string(),
328 lex.span(),
329 ));
330 }
331
332 if content.starts_with("ur:") {
334 match Digest::from_ur_string(content) {
335 Ok(digest) => return Ok(DigestPattern::digest(digest)),
336 Err(_) => {
337 return Err(Error::InvalidUr(
338 content.to_string(),
339 lex.span(),
340 ));
341 }
342 }
343 }
344
345 if content.starts_with('/')
347 && content.ends_with('/')
348 && content.len() > 2
349 {
350 let regex_content = &content[1..content.len() - 1];
351 match regex::bytes::Regex::new(regex_content) {
352 Ok(regex) => return Ok(DigestPattern::binary_regex(regex)),
353 Err(_) => return Err(Error::InvalidRegex(lex.span())),
354 }
355 }
356
357 if content.chars().all(|c| c.is_ascii_hexdigit()) {
359 if content.len() % 2 == 0 {
360 match hex::decode(content) {
361 Ok(bytes) => {
362 if bytes.len() <= Digest::DIGEST_SIZE {
363 return Ok(DigestPattern::prefix(bytes));
364 } else {
365 return Err(Error::InvalidHexString(
366 lex.span(),
367 ));
368 }
369 }
370 Err(_) => {
371 return Err(Error::InvalidHexString(lex.span()));
372 }
373 }
374 } else {
375 return Err(Error::InvalidHexString(lex.span()));
376 }
377 }
378
379 return Err(Error::InvalidDigestPattern(
381 content.to_string(),
382 lex.span(),
383 ));
384 }
385 }
386
387 Err(Error::UnterminatedDigestQuoted(lex.span()))
389}
390
391fn parse_date_quoted(
393 lex: &mut Lexer<Token>,
394) -> Result<crate::pattern::DatePattern> {
395 use dcbor_parse::parse_dcbor_item;
396
397 let src = lex.remainder(); for (i, ch) in src.char_indices() {
401 if ch == '\'' {
402 let content = &src[..i];
403 lex.bump(i + 1); if content.is_empty() {
407 return Err(Error::InvalidDateFormat(lex.span()));
408 }
409
410 if content.starts_with('/')
412 && content.ends_with('/')
413 && content.len() > 2
414 {
415 let regex_content = &content[1..content.len() - 1];
416 match regex::Regex::new(regex_content) {
417 Ok(regex) => {
418 return Ok(crate::pattern::DatePattern::regex(regex));
419 }
420 Err(_) => return Err(Error::InvalidRegex(lex.span())),
421 }
422 }
423
424 if content.contains("...") {
426 if let Some(iso_str) = content.strip_prefix("...") {
427 match parse_dcbor_item(iso_str) {
429 Ok(cbor) => match Date::try_from(cbor) {
430 Ok(date) => {
431 return Ok(
432 crate::pattern::DatePattern::latest(date),
433 );
434 }
435 Err(_) => {
436 return Err(Error::InvalidDateFormat(
437 lex.span(),
438 ));
439 }
440 },
441 Err(_) => {
442 return Err(Error::InvalidDateFormat(lex.span()));
443 }
444 }
445 } else if let Some(iso_str) = content.strip_suffix("...") {
446 match parse_dcbor_item(iso_str) {
448 Ok(cbor) => match Date::try_from(cbor) {
449 Ok(date) => {
450 return Ok(
451 crate::pattern::DatePattern::earliest(date),
452 );
453 }
454 Err(_) => {
455 return Err(Error::InvalidDateFormat(
456 lex.span(),
457 ));
458 }
459 },
460 Err(_) => {
461 return Err(Error::InvalidDateFormat(lex.span()));
462 }
463 }
464 } else {
465 let parts: Vec<&str> = content.split("...").collect();
467 if parts.len() == 2 {
468 let start_date = match parse_dcbor_item(parts[0]) {
469 Ok(cbor) => match Date::try_from(cbor) {
470 Ok(date) => date,
471 Err(_) => {
472 return Err(Error::InvalidDateFormat(
473 lex.span(),
474 ));
475 }
476 },
477 Err(_) => {
478 return Err(Error::InvalidDateFormat(
479 lex.span(),
480 ));
481 }
482 };
483 let end_date = match parse_dcbor_item(parts[1]) {
484 Ok(cbor) => match Date::try_from(cbor) {
485 Ok(date) => date,
486 Err(_) => {
487 return Err(Error::InvalidDateFormat(
488 lex.span(),
489 ));
490 }
491 },
492 Err(_) => {
493 return Err(Error::InvalidDateFormat(
494 lex.span(),
495 ));
496 }
497 };
498 return Ok(crate::pattern::DatePattern::range(
499 start_date..=end_date,
500 ));
501 } else {
502 return Err(Error::InvalidDateFormat(lex.span()));
503 }
504 }
505 }
506
507 match parse_dcbor_item(content) {
509 Ok(cbor) => match Date::try_from(cbor) {
510 Ok(date) => {
511 return Ok(crate::pattern::DatePattern::value(date));
512 }
513 Err(_) => return Err(Error::InvalidDateFormat(lex.span())),
514 },
515 Err(_) => return Err(Error::InvalidDateFormat(lex.span())),
516 }
517 }
518 }
519
520 Err(Error::UnterminatedDateQuoted(lex.span()))
522}
523
524fn parse_brace_open(lex: &mut Lexer<Token>) -> Token {
526 let remainder = lex.remainder();
527
528 let mut chars = remainder.chars();
530 let mut pos = 0;
531
532 while let Some(ch) = chars.next() {
534 if !matches!(ch, ' ' | '\t' | '\n' | '\r' | '\u{0c}') {
535 if ch.is_ascii_digit() {
538 if looks_like_range_pattern(&remainder[pos..]) {
540 let quantifier_result = parse_range_from_remainder(lex);
541 return Token::Range(quantifier_result);
542 }
543 }
544 break;
546 }
547 pos += ch.len_utf8();
548 }
549
550 Token::BraceOpen
551}
552
553fn looks_like_range_pattern(content: &str) -> bool {
555 let mut chars = content.chars();
556 let mut has_digit = false;
557
558 while let Some(ch) = chars.next() {
560 if matches!(ch, ' ' | '\t' | '\n' | '\r' | '\u{0c}') {
561 continue;
562 } else if ch.is_ascii_digit() {
563 has_digit = true;
564 break;
565 } else {
566 return false;
567 }
568 }
569
570 if !has_digit {
571 return false;
572 }
573
574 while let Some(ch) = chars.next() {
576 if ch.is_ascii_digit() {
577 continue;
578 } else {
579 if ch == ':' {
582 return false;
583 }
584 if matches!(ch, ' ' | '\t' | '\n' | '\r' | '\u{0c}') {
586 while let Some(next_ch) = chars.next() {
588 if matches!(next_ch, ' ' | '\t' | '\n' | '\r' | '\u{0c}') {
589 continue;
590 } else if next_ch == ',' || next_ch == '}' {
591 return true;
592 } else if next_ch == ':' {
593 return false;
594 } else {
595 return false;
596 }
597 }
598 }
599 return ch == ',' || ch == '}';
601 }
602 }
603
604 false
605}
606
607fn parse_range_from_remainder(lex: &mut Lexer<Token>) -> Result<Quantifier> {
609 let remainder = lex.remainder(); fn skip_ws(s: &str, pos: &mut usize) {
613 while let Some(ch) = s[*pos..].chars().next() {
614 if matches!(ch, ' ' | '\t' | '\n' | '\r' | '\u{0c}') {
615 *pos += ch.len_utf8();
616 } else {
617 break;
618 }
619 }
620 }
621
622 let mut pos = 0;
623
624 skip_ws(remainder, &mut pos);
626
627 if !remainder[pos..]
629 .chars()
630 .next()
631 .is_some_and(|c| c.is_ascii_digit())
632 {
633 return Err(Error::InvalidRange(lex.span()));
634 }
635
636 let start = pos;
637 while let Some(ch) = remainder[pos..].chars().next() {
638 if ch.is_ascii_digit() {
639 pos += ch.len_utf8();
640 } else {
641 break;
642 }
643 }
644
645 let min: usize = remainder[start..pos]
646 .parse()
647 .map_err(|_| Error::InvalidRange(lex.span()))?;
648
649 skip_ws(remainder, &mut pos);
650
651 let max: Option<usize>;
653
654 match remainder[pos..].chars().next() {
655 Some(',') => {
656 pos += 1;
657 skip_ws(remainder, &mut pos);
658
659 match remainder[pos..].chars().next() {
661 Some('}') => {
662 pos += 1;
663 max = None;
664 }
665 Some(ch) if ch.is_ascii_digit() => {
666 let start = pos;
667 while let Some(ch) = remainder[pos..].chars().next() {
668 if ch.is_ascii_digit() {
669 pos += ch.len_utf8();
670 } else {
671 break;
672 }
673 }
674 if start == pos {
675 return Err(Error::InvalidRange(lex.span()));
676 }
677 let m: usize = remainder[start..pos]
678 .parse()
679 .map_err(|_| Error::InvalidRange(lex.span()))?;
680 skip_ws(remainder, &mut pos);
681 if !matches!(remainder[pos..].chars().next(), Some('}')) {
682 return Err(Error::InvalidRange(lex.span()));
683 }
684 pos += 1;
685 max = Some(m);
686 }
687 _ => return Err(Error::InvalidRange(lex.span())),
688 }
689 }
690 Some('}') => {
691 pos += 1;
692 max = Some(min);
693 }
694 _ => return Err(Error::InvalidRange(lex.span())),
695 }
696
697 let mode = match remainder[pos..].chars().next() {
699 Some('?') => {
700 pos += 1;
701 Reluctance::Lazy
702 }
703 Some('+') => {
704 pos += 1;
705 Reluctance::Possessive
706 }
707 _ => Reluctance::Greedy,
708 };
709
710 lex.bump(pos);
712
713 if let Some(max) = max {
714 if min > max {
715 return Err(Error::InvalidRange(lex.span()));
716 }
717 Ok(Quantifier::new(min..=max, mode))
718 } else {
719 Ok(Quantifier::new(min.., mode))
720 }
721}
722
723fn parse_single_quoted(lex: &mut Lexer<Token>) -> Result<String> {
725 let src = lex.remainder(); let mut escape = false;
727 let mut result = String::new();
728
729 for (i, ch) in src.char_indices() {
730 match (ch, escape) {
731 ('\\', false) => escape = true, ('\'', false) => {
733 lex.bump(i + 1); return Ok(result);
736 }
737 (c, true) => {
738 match c {
740 '\'' => result.push('\''),
741 '\\' => result.push('\\'),
742 'n' => result.push('\n'),
743 'r' => result.push('\r'),
744 't' => result.push('\t'),
745 _ => {
746 result.push('\\');
747 result.push(c);
748 }
749 }
750 escape = false;
751 }
752 (c, false) => {
753 result.push(c);
754 escape = false;
755 }
756 }
757 }
758
759 Err(Error::UnterminatedString(lex.span()))
761}
762
763#[cfg(test)]
764mod tests {
765 use super::*;
766 #[test]
767 fn test_basic_tokens() {
768 assert_eq!(Token::lexer("&").next(), Some(Ok(Token::And)));
770 assert_eq!(Token::lexer("|").next(), Some(Ok(Token::Or)));
771 assert_eq!(Token::lexer("!").next(), Some(Ok(Token::Not)));
772 assert_eq!(Token::lexer("*").next(), Some(Ok(Token::RepeatZeroOrMore)));
773 assert_eq!(Token::lexer("+").next(), Some(Ok(Token::RepeatOneOrMore)));
774 assert_eq!(Token::lexer("?").next(), Some(Ok(Token::RepeatZeroOrOne)));
775
776 assert_eq!(Token::lexer("tagged").next(), Some(Ok(Token::Tagged)));
778
779 assert_eq!(Token::lexer("bool").next(), Some(Ok(Token::Bool)));
781 assert_eq!(Token::lexer("bstr").next(), Some(Ok(Token::ByteString)));
782 assert_eq!(Token::lexer("text").next(), Some(Ok(Token::Text)));
783 assert_eq!(Token::lexer("number").next(), Some(Ok(Token::Number)));
784
785 assert_eq!(Token::lexer("true").next(), Some(Ok(Token::BoolTrue)));
787 assert_eq!(Token::lexer("false").next(), Some(Ok(Token::BoolFalse)));
788 assert_eq!(Token::lexer("NaN").next(), Some(Ok(Token::NaN)));
789 }
790
791 #[test]
792 fn test_complex_tokens() {
793 let mut lexer = Token::lexer("@name");
795 if let Some(Ok(Token::GroupName(name))) = lexer.next() {
796 assert_eq!(name, "name");
797 } else {
798 panic!("Failed to parse group name");
799 }
800
801 let mut lexer = Token::lexer("/[a-z]+/");
803 if let Some(Ok(Token::Regex(Ok(regex)))) = lexer.next() {
804 assert_eq!(regex, "[a-z]+");
805 } else {
806 panic!("Failed to parse regex");
807 }
808
809 let mut lx = Token::lexer(r"/abc\/def/ / / // /a\//");
810 assert_eq!(
811 lx.next(),
812 Some(Ok(Token::Regex(Ok("abc\\/def".to_string()))))
813 );
814 assert_eq!(lx.next(), Some(Ok(Token::Regex(Ok(" ".to_string())))));
815 assert_eq!(lx.next(), Some(Ok(Token::Regex(Ok("".to_string())))));
816 assert_eq!(lx.next(), Some(Ok(Token::Regex(Ok("a\\/".to_string())))));
817 assert_eq!(lx.next(), None);
818 }
819
820 #[test]
821 fn test_hex_tokens() {
822 let mut lexer = Token::lexer("h'deadbeef'");
824 if let Some(Ok(Token::HexString(Ok(bytes)))) = lexer.next() {
825 assert_eq!(bytes, vec![0xde, 0xad, 0xbe, 0xef]);
826 } else {
827 panic!("Failed to parse hex string");
828 }
829
830 let mut lexer = Token::lexer("h''");
832 if let Some(Ok(Token::HexString(Ok(bytes)))) = lexer.next() {
833 assert_eq!(bytes, vec![]);
834 } else {
835 panic!("Failed to parse empty hex string");
836 }
837
838 let mut lexer = Token::lexer("h'/^[0-9]+$/'");
840 if let Some(Ok(Token::HexRegex(Ok(regex)))) = lexer.next() {
841 assert_eq!(regex, "^[0-9]+$");
842 } else {
843 panic!("Failed to parse hex regex");
844 }
845
846 let mut lexer = Token::lexer(r"h'/a\/b/'");
848 if let Some(Ok(Token::HexRegex(Ok(regex)))) = lexer.next() {
849 assert_eq!(regex, r"a\/b");
850 } else {
851 panic!("Failed to parse hex regex with escaped slash");
852 }
853 }
854
855 #[test]
856 fn test_number_literals() {
857 let mut lexer = Token::lexer("42");
858 let token = lexer.next();
859 println!("Token for '42': {:?}", token);
860 if let Some(Ok(Token::NumberLiteral(Ok(value)))) = token {
861 assert_eq!(value, 42.0);
862 } else {
863 panic!("Failed to parse integer literal");
864 }
865
866 let mut lexer = Token::lexer("0");
868 if let Some(Ok(Token::NumberLiteral(Ok(value)))) = lexer.next() {
869 assert_eq!(value, 0.0);
870 } else {
871 panic!("Failed to parse zero literal");
872 }
873
874 let mut lexer = Token::lexer("-10");
876 if let Some(Ok(Token::NumberLiteral(Ok(value)))) = lexer.next() {
877 assert_eq!(value, -10.0);
878 } else {
879 panic!("Failed to parse negative literal");
880 }
881
882 let mut lexer = Token::lexer("3.2222");
884 if let Some(Ok(Token::NumberLiteral(Ok(value)))) = lexer.next() {
885 assert_eq!(value, 3.2222);
886 } else {
887 panic!("Failed to parse float literal");
888 }
889
890 let mut lexer = Token::lexer("1e5");
892 if let Some(Ok(Token::NumberLiteral(Ok(value)))) = lexer.next() {
893 assert_eq!(value, 100000.0);
894 } else {
895 panic!("Failed to parse scientific notation literal");
896 }
897 }
898
899 #[test]
900 fn test_range() {
901 struct RangeTestCase {
902 input: &'static str,
903 expected: Quantifier,
904 }
905 let test_cases = vec![
906 RangeTestCase {
907 input: "{1, 5}",
908 expected: Quantifier::new(1..=5, Reluctance::default()),
909 },
910 RangeTestCase {
911 input: "{ 3 , }",
912 expected: Quantifier::new(3.., Reluctance::default()),
913 },
914 RangeTestCase {
915 input: "{ 5 }",
916 expected: Quantifier::new(5..=5, Reluctance::default()),
917 },
918 RangeTestCase {
919 input: "{1, 5 }?",
920 expected: Quantifier::new(1..=5, Reluctance::Lazy),
921 },
922 RangeTestCase {
923 input: "{ 3 , }?",
924 expected: Quantifier::new(3.., Reluctance::Lazy),
925 },
926 RangeTestCase {
927 input: "{5}?",
928 expected: Quantifier::new(5..=5, Reluctance::Lazy),
929 },
930 RangeTestCase {
931 input: "{ 1,5}+",
932 expected: Quantifier::new(1..=5, Reluctance::Possessive),
933 },
934 RangeTestCase {
935 input: "{ 3 , }+",
936 expected: Quantifier::new(3.., Reluctance::Possessive),
937 },
938 RangeTestCase {
939 input: "{5}+",
940 expected: Quantifier::new(5..=5, Reluctance::Possessive),
941 },
942 ];
943
944 let mut failed_cases = vec![];
945
946 for test_case in test_cases {
947 let mut lexer = Token::lexer(test_case.input);
948 if let Some(Ok(Token::Range(Ok(range)))) = lexer.next() {
949 assert_eq!(range, test_case.expected);
950 } else {
951 failed_cases.push(test_case.input);
952 }
953 }
954
955 if !failed_cases.is_empty() {
956 panic!("Failed to parse ranges: {:?}", failed_cases);
957 }
958 }
959}