dcbor_pattern/parse/
token.rs

1use dcbor::prelude::*;
2use dcbor_parse::parse_dcbor_item_partial;
3use logos::{Lexer, Logos};
4
5use crate::{DigestPattern, Error, Quantifier, Reluctance, Result};
6
7/// Tokens for the Gordian Envelope pattern syntax.
8#[derive(Debug, Clone, Logos, PartialEq)]
9#[rustfmt::skip]
10#[logos(error = Error)]
11#[logos(skip r"[ \t\r\n\f]+")]
12pub enum Token {
13    #[token("&")]
14    And,
15
16    #[token("|")]
17    Or,
18
19    #[token("!")]
20    Not,
21
22    #[token("*")]
23    RepeatZeroOrMore,
24
25    #[token("*?")]
26    RepeatZeroOrMoreLazy,
27
28    #[token("*+")]
29    RepeatZeroOrMorePossessive,
30
31    #[token("+")]
32    RepeatOneOrMore,
33
34    #[token("+?")]
35    RepeatOneOrMoreLazy,
36
37    #[token("++")]
38    RepeatOneOrMorePossessive,
39
40    #[token("?")]
41    RepeatZeroOrOne,
42
43    #[token("??")]
44    RepeatZeroOrOneLazy,
45
46    #[token("?+")]
47    RepeatZeroOrOnePossessive,
48
49    // Structure Pattern Keywords
50    #[token("tagged")]
51    Tagged,
52
53    #[token("array")]
54    Array,
55
56    #[token("map")]
57    Map,
58
59    // Value Pattern Keywords
60    #[token("bool")]
61    Bool,
62
63    #[token("bstr")]
64    ByteString,
65
66    #[token("date")]
67    Date,
68
69    #[token("date'", parse_date_quoted)]
70    DateQuoted(Result<crate::pattern::DatePattern>),
71
72    #[token("known")]
73    Known,
74
75    #[token("null")]
76    Null,
77
78    #[token("number")]
79    Number,
80
81    #[token("text")]
82    Text,
83
84    #[token("digest")]
85    Digest,
86
87    #[token("digest'", parse_digest_quoted)]
88    DigestQuoted(Result<DigestPattern>),
89
90    // Meta Pattern Keywords
91    #[token("search")]
92    Search,
93
94    // Special literals
95    #[token("true")]
96    BoolTrue,
97
98    #[token("false")]
99    BoolFalse,
100
101    #[token("NaN")]
102    NaN,
103
104    #[token("Infinity")]
105    Infinity,
106
107    #[token("-Infinity")]
108    NegInfinity,
109
110    // Grouping and Range delimiters
111    #[token("(")]
112    ParenOpen,
113
114    #[token(")")]
115    ParenClose,
116
117    #[token("[")]
118    BracketOpen,
119
120    #[token("]")]
121    BracketClose,
122
123    #[token("{", parse_brace_open)]
124    BraceOpen,
125
126    #[token("}")]
127    BraceClose,
128
129    #[token(",")]
130    Comma,
131
132    #[token(":")]
133    Colon,
134
135    #[token("...")]
136    Ellipsis,
137
138    #[token(">=")]
139    GreaterThanOrEqual,
140
141    #[token("<=")]
142    LessThanOrEqual,
143
144    #[token(">", priority = 1)]
145    GreaterThan,
146
147    #[token("<")]
148    LessThan,
149
150    /// Number literal parsed using dcbor-parse for consistency with dCBOR
151    #[regex(r"-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?", callback = parse_number)]
152    NumberLiteral(Result<f64>),
153
154    #[regex(r"@[a-zA-Z_][a-zA-Z0-9_]*", |lex|
155        lex.slice()[1..].to_string()
156    )]
157    GroupName(String),
158
159    #[token("\"", parse_string)]
160    StringLiteral(Result<String>),
161
162    #[token("'", parse_single_quoted)]
163    SingleQuoted(Result<String>),
164
165    #[token("/", parse_regex)]
166    Regex(Result<String>),
167
168    #[token("h'", parse_hex_string)]
169    HexString(Result<Vec<u8>>),
170
171    #[token("h'/", parse_hex_regex)]
172    HexRegex(Result<String>),
173
174    Range(Result<Quantifier>),
175}
176
177/// Callback to parse numbers using dcbor-parse for consistency with dCBOR
178fn parse_number(lex: &mut Lexer<Token>) -> Result<f64> {
179    let number_str = lex.slice();
180    match parse_dcbor_item_partial(number_str) {
181        Ok((cbor, _)) => match f64::try_from_cbor(&cbor) {
182            Ok(value) => Ok(value),
183            Err(_) => Err(Error::InvalidNumberFormat(lex.span())),
184        },
185        Err(_) => Err(Error::InvalidNumberFormat(lex.span())),
186    }
187}
188
189/// Callback used by the `Regex` variant above.
190fn parse_regex(lex: &mut Lexer<Token>) -> Result<String> {
191    let src = lex.remainder(); // everything after the first '/'
192    let mut escape = false;
193
194    for (i, ch) in src.char_indices() {
195        match (ch, escape) {
196            ('\\', false) => escape = true, // start of an escape
197            ('/', false) => {
198                // Found the closing delimiter ------------------
199                lex.bump(i + 1); // +1 to also eat the '/'
200                let content = src[..i].to_owned();
201                match regex::Regex::new(&content) {
202                    Ok(_) => return Ok(content),
203                    Err(_) => return Err(Error::InvalidRegex(lex.span())),
204                }
205            }
206            _ => escape = false, // any other char ends an escape
207        }
208    }
209
210    // Unterminated literal – treat as lexing error
211    Err(Error::UnterminatedRegex(lex.span()))
212}
213
214/// Callback used by the `StringLiteral` variant above.
215fn parse_string(lex: &mut Lexer<Token>) -> Result<String> {
216    let src = lex.remainder(); // everything after the first '"'
217    let mut escape = false;
218    let mut result = String::new();
219
220    for (i, ch) in src.char_indices() {
221        match (ch, escape) {
222            ('\\', false) => escape = true, // start of an escape
223            ('"', false) => {
224                // Found the closing delimiter
225                lex.bump(i + 1); // +1 to also eat the '"'
226                return Ok(result);
227            }
228            (c, true) => {
229                // Handle escape sequences
230                match c {
231                    '"' => result.push('"'),
232                    '\\' => result.push('\\'),
233                    'n' => result.push('\n'),
234                    'r' => result.push('\r'),
235                    't' => result.push('\t'),
236                    _ => {
237                        result.push('\\');
238                        result.push(c);
239                    }
240                }
241                escape = false;
242            }
243            (c, false) => {
244                result.push(c);
245                escape = false;
246            }
247        }
248    }
249
250    // Unterminated literal – treat as lexing error
251    Err(Error::UnterminatedString(lex.span()))
252}
253
254/// Callback used by the `HexString` variant above.
255fn parse_hex_string(lex: &mut Lexer<Token>) -> Result<Vec<u8>> {
256    let src = lex.remainder(); // everything after the first h'
257
258    // Parse as hex string h'...'
259    for (i, ch) in src.char_indices() {
260        match ch {
261            '\'' => {
262                // Found the closing delimiter
263                let hex_content = &src[..i];
264                match hex::decode(hex_content) {
265                    Ok(bytes) => {
266                        lex.bump(i + 1); // +1 to also eat the '\''
267                        return Ok(bytes);
268                    }
269                    Err(_) => return Err(Error::InvalidHexString(lex.span())),
270                }
271            }
272            c if c.is_ascii_hexdigit() => {
273                // Valid hex character, continue
274            }
275            _ => {
276                // Invalid character in hex string
277                return Err(Error::InvalidHexString(lex.span()));
278            }
279        }
280    }
281
282    // Unterminated literal – treat as lexing error
283    Err(Error::UnterminatedHexString(lex.span()))
284}
285
286/// Callback used by the `HexRegex` variant above.
287fn parse_hex_regex(lex: &mut Lexer<Token>) -> Result<String> {
288    let src = lex.remainder(); // everything after the first h'/
289    let mut escape = false;
290
291    for (i, ch) in src.char_indices() {
292        match (ch, escape) {
293            ('\\', false) => escape = true, // start of an escape
294            ('/', false) => {
295                // Look for the closing '
296                let remainder = &src[i + 1..];
297                if remainder.starts_with('\'') {
298                    // Found the closing h'/.../'
299                    lex.bump(i + 2); // +2 to eat both '/' and '\''
300                    let content = src[..i].to_owned();
301                    match regex::bytes::Regex::new(&content) {
302                        Ok(_) => return Ok(content),
303                        Err(_) => return Err(Error::InvalidRegex(lex.span())),
304                    }
305                }
306                // Not the end, continue
307                escape = false;
308            }
309            _ => escape = false, // any other char ends an escape
310        }
311    }
312
313    // Unterminated literal – treat as lexing error
314    Err(Error::UnterminatedRegex(lex.span()))
315}
316
317/// Callback used by the `DigestQuoted` variant above.
318fn parse_digest_quoted(lex: &mut Lexer<Token>) -> Result<DigestPattern> {
319    use bc_components::Digest;
320    use bc_ur::URDecodable;
321
322    let src = lex.remainder(); // everything after "digest'"
323
324    // Find the closing quote
325    for (i, ch) in src.char_indices() {
326        if ch == '\'' {
327            let content = &src[..i];
328            lex.bump(i + 1); // +1 to eat the closing quote
329
330            // Check for empty content
331            if content.is_empty() {
332                return Err(Error::InvalidDigestPattern(
333                    "empty content".to_string(),
334                    lex.span(),
335                ));
336            }
337
338            // Check if it's a UR string
339            if content.starts_with("ur:") {
340                match Digest::from_ur_string(content) {
341                    Ok(digest) => return Ok(DigestPattern::digest(digest)),
342                    Err(_) => {
343                        return Err(Error::InvalidUr(
344                            content.to_string(),
345                            lex.span(),
346                        ));
347                    }
348                }
349            }
350
351            // Check if it's a regex pattern /.../
352            if content.starts_with('/')
353                && content.ends_with('/')
354                && content.len() > 2
355            {
356                let regex_content = &content[1..content.len() - 1];
357                match regex::bytes::Regex::new(regex_content) {
358                    Ok(regex) => return Ok(DigestPattern::binary_regex(regex)),
359                    Err(_) => return Err(Error::InvalidRegex(lex.span())),
360                }
361            }
362
363            // Try to parse as hex
364            if content.chars().all(|c| c.is_ascii_hexdigit()) {
365                if content.len() % 2 == 0 {
366                    match hex::decode(content) {
367                        Ok(bytes) => {
368                            if bytes.len() <= Digest::DIGEST_SIZE {
369                                return Ok(DigestPattern::prefix(bytes));
370                            } else {
371                                return Err(Error::InvalidHexString(
372                                    lex.span(),
373                                ));
374                            }
375                        }
376                        Err(_) => {
377                            return Err(Error::InvalidHexString(lex.span()));
378                        }
379                    }
380                } else {
381                    return Err(Error::InvalidHexString(lex.span()));
382                }
383            }
384
385            // If it's not UR, regex, or hex, it's an error
386            return Err(Error::InvalidDigestPattern(
387                content.to_string(),
388                lex.span(),
389            ));
390        }
391    }
392
393    // Unterminated literal
394    Err(Error::UnterminatedDigestQuoted(lex.span()))
395}
396
397/// Callback used by the `DateQuoted` variant above.
398fn parse_date_quoted(
399    lex: &mut Lexer<Token>,
400) -> Result<crate::pattern::DatePattern> {
401    use dcbor_parse::parse_dcbor_item;
402
403    let src = lex.remainder(); // everything after "date'"
404
405    // Find the closing quote
406    for (i, ch) in src.char_indices() {
407        if ch == '\'' {
408            let content = &src[..i];
409            lex.bump(i + 1); // +1 to eat the closing quote
410
411            // Check for empty content
412            if content.is_empty() {
413                return Err(Error::InvalidDateFormat(lex.span()));
414            }
415
416            // Check if it's a regex pattern /.../
417            if content.starts_with('/')
418                && content.ends_with('/')
419                && content.len() > 2
420            {
421                let regex_content = &content[1..content.len() - 1];
422                match regex::Regex::new(regex_content) {
423                    Ok(regex) => {
424                        return Ok(crate::pattern::DatePattern::regex(regex));
425                    }
426                    Err(_) => return Err(Error::InvalidRegex(lex.span())),
427                }
428            }
429
430            // Check for range patterns
431            if content.contains("...") {
432                if let Some(iso_str) = content.strip_prefix("...") {
433                    // Latest pattern: "...iso-8601"
434                    match parse_dcbor_item(iso_str) {
435                        Ok(cbor) => match Date::try_from(cbor) {
436                            Ok(date) => {
437                                return Ok(
438                                    crate::pattern::DatePattern::latest(date),
439                                );
440                            }
441                            Err(_) => {
442                                return Err(Error::InvalidDateFormat(
443                                    lex.span(),
444                                ));
445                            }
446                        },
447                        Err(_) => {
448                            return Err(Error::InvalidDateFormat(lex.span()));
449                        }
450                    }
451                } else if let Some(iso_str) = content.strip_suffix("...") {
452                    // Earliest pattern: "iso-8601..."
453                    match parse_dcbor_item(iso_str) {
454                        Ok(cbor) => match Date::try_from(cbor) {
455                            Ok(date) => {
456                                return Ok(
457                                    crate::pattern::DatePattern::earliest(date),
458                                );
459                            }
460                            Err(_) => {
461                                return Err(Error::InvalidDateFormat(
462                                    lex.span(),
463                                ));
464                            }
465                        },
466                        Err(_) => {
467                            return Err(Error::InvalidDateFormat(lex.span()));
468                        }
469                    }
470                } else {
471                    // Range pattern: "iso-8601...iso-8601"
472                    let parts: Vec<&str> = content.split("...").collect();
473                    if parts.len() == 2 {
474                        let start_date = match parse_dcbor_item(parts[0]) {
475                            Ok(cbor) => match Date::try_from(cbor) {
476                                Ok(date) => date,
477                                Err(_) => {
478                                    return Err(Error::InvalidDateFormat(
479                                        lex.span(),
480                                    ));
481                                }
482                            },
483                            Err(_) => {
484                                return Err(Error::InvalidDateFormat(
485                                    lex.span(),
486                                ));
487                            }
488                        };
489                        let end_date = match parse_dcbor_item(parts[1]) {
490                            Ok(cbor) => match Date::try_from(cbor) {
491                                Ok(date) => date,
492                                Err(_) => {
493                                    return Err(Error::InvalidDateFormat(
494                                        lex.span(),
495                                    ));
496                                }
497                            },
498                            Err(_) => {
499                                return Err(Error::InvalidDateFormat(
500                                    lex.span(),
501                                ));
502                            }
503                        };
504                        return Ok(crate::pattern::DatePattern::range(
505                            start_date..=end_date,
506                        ));
507                    } else {
508                        return Err(Error::InvalidDateFormat(lex.span()));
509                    }
510                }
511            }
512
513            // Try to parse as single ISO-8601 date
514            match parse_dcbor_item(content) {
515                Ok(cbor) => match Date::try_from(cbor) {
516                    Ok(date) => {
517                        return Ok(crate::pattern::DatePattern::value(date));
518                    }
519                    Err(_) => return Err(Error::InvalidDateFormat(lex.span())),
520                },
521                Err(_) => return Err(Error::InvalidDateFormat(lex.span())),
522            }
523        }
524    }
525
526    // Unterminated literal
527    Err(Error::UnterminatedDateQuoted(lex.span()))
528}
529
530/// Callback to handle `{` token - determines if it's a Range or BraceOpen
531fn parse_brace_open(lex: &mut Lexer<Token>) -> Token {
532    let remainder = lex.remainder();
533
534    // Skip whitespace and see if we have a digit pattern
535    let mut chars = remainder.chars();
536    let mut pos = 0;
537
538    // Skip whitespace
539    while let Some(ch) = chars.next() {
540        if !matches!(ch, ' ' | '\t' | '\n' | '\r' | '\u{0c}') {
541            // If the first non-whitespace character is a digit, we need to look
542            // ahead further to determine if this is really a range
543            // pattern or a map key-value constraint
544            if ch.is_ascii_digit() {
545                // Look ahead to see if this looks like a range pattern
546                if looks_like_range_pattern(&remainder[pos..]) {
547                    let quantifier_result = parse_range_from_remainder(lex);
548                    return Token::Range(quantifier_result);
549                }
550            }
551            // Otherwise, it's just a regular BraceOpen
552            break;
553        }
554        pos += ch.len_utf8();
555    }
556
557    Token::BraceOpen
558}
559
560/// Helper function to determine if the content after `{` looks like a range
561/// pattern
562fn looks_like_range_pattern(content: &str) -> bool {
563    let mut chars = content.chars();
564    let mut has_digit = false;
565
566    // Skip whitespace
567    while let Some(ch) = chars.next() {
568        if matches!(ch, ' ' | '\t' | '\n' | '\r' | '\u{0c}') {
569            continue;
570        } else if ch.is_ascii_digit() {
571            has_digit = true;
572            break;
573        } else {
574            return false;
575        }
576    }
577
578    if !has_digit {
579        return false;
580    }
581
582    // Skip remaining digits
583    while let Some(ch) = chars.next() {
584        if ch.is_ascii_digit() {
585            continue;
586        } else {
587            // After digits, we should see whitespace, comma, or closing brace
588            // for a range If we see a colon, it's definitely a map
589            // key-value constraint
590            if ch == ':' {
591                return false;
592            }
593            // Skip whitespace
594            if matches!(ch, ' ' | '\t' | '\n' | '\r' | '\u{0c}') {
595                // Continue to look for comma or closing brace
596                while let Some(next_ch) = chars.next() {
597                    if matches!(next_ch, ' ' | '\t' | '\n' | '\r' | '\u{0c}') {
598                        continue;
599                    } else if next_ch == ',' || next_ch == '}' {
600                        return true;
601                    } else if next_ch == ':' {
602                        return false;
603                    } else {
604                        return false;
605                    }
606                }
607            }
608            // First non-digit, non-whitespace char should be comma or closing
609            // brace
610            return ch == ',' || ch == '}';
611        }
612    }
613
614    false
615}
616
617/// Helper function to parse a range pattern from the current position
618fn parse_range_from_remainder(lex: &mut Lexer<Token>) -> Result<Quantifier> {
619    let remainder = lex.remainder(); // everything after the '{'
620
621    // Helper to skip whitespace inside the range specification
622    fn skip_ws(s: &str, pos: &mut usize) {
623        while let Some(ch) = s[*pos..].chars().next() {
624            if matches!(ch, ' ' | '\t' | '\n' | '\r' | '\u{0c}') {
625                *pos += ch.len_utf8();
626            } else {
627                break;
628            }
629        }
630    }
631
632    let mut pos = 0;
633
634    // Skip initial whitespace
635    skip_ws(remainder, &mut pos);
636
637    // Parse the first number
638    if !remainder[pos..]
639        .chars()
640        .next()
641        .is_some_and(|c| c.is_ascii_digit())
642    {
643        return Err(Error::InvalidRange(lex.span()));
644    }
645
646    let start = pos;
647    while let Some(ch) = remainder[pos..].chars().next() {
648        if ch.is_ascii_digit() {
649            pos += ch.len_utf8();
650        } else {
651            break;
652        }
653    }
654
655    let min: usize = remainder[start..pos]
656        .parse()
657        .map_err(|_| Error::InvalidRange(lex.span()))?;
658
659    skip_ws(remainder, &mut pos);
660
661    // Parse optional comma and maximum value
662    let max: Option<usize>;
663
664    match remainder[pos..].chars().next() {
665        Some(',') => {
666            pos += 1;
667            skip_ws(remainder, &mut pos);
668
669            // If the next non-space char is '}', the range is open ended
670            match remainder[pos..].chars().next() {
671                Some('}') => {
672                    pos += 1;
673                    max = None;
674                }
675                Some(ch) if ch.is_ascii_digit() => {
676                    let start = pos;
677                    while let Some(ch) = remainder[pos..].chars().next() {
678                        if ch.is_ascii_digit() {
679                            pos += ch.len_utf8();
680                        } else {
681                            break;
682                        }
683                    }
684                    if start == pos {
685                        return Err(Error::InvalidRange(lex.span()));
686                    }
687                    let m: usize = remainder[start..pos]
688                        .parse()
689                        .map_err(|_| Error::InvalidRange(lex.span()))?;
690                    skip_ws(remainder, &mut pos);
691                    if !matches!(remainder[pos..].chars().next(), Some('}')) {
692                        return Err(Error::InvalidRange(lex.span()));
693                    }
694                    pos += 1;
695                    max = Some(m);
696                }
697                _ => return Err(Error::InvalidRange(lex.span())),
698            }
699        }
700        Some('}') => {
701            pos += 1;
702            max = Some(min);
703        }
704        _ => return Err(Error::InvalidRange(lex.span())),
705    }
706
707    // Determine greediness
708    let mode = match remainder[pos..].chars().next() {
709        Some('?') => {
710            pos += 1;
711            Reluctance::Lazy
712        }
713        Some('+') => {
714            pos += 1;
715            Reluctance::Possessive
716        }
717        _ => Reluctance::Greedy,
718    };
719
720    // Consume parsed characters
721    lex.bump(pos);
722
723    if let Some(max) = max {
724        if min > max {
725            return Err(Error::InvalidRange(lex.span()));
726        }
727        Ok(Quantifier::new(min..=max, mode))
728    } else {
729        Ok(Quantifier::new(min.., mode))
730    }
731}
732
733/// Callback used by the `SingleQuoted` variant above.
734fn parse_single_quoted(lex: &mut Lexer<Token>) -> Result<String> {
735    let src = lex.remainder(); // everything after the first '\''
736    let mut escape = false;
737    let mut result = String::new();
738
739    for (i, ch) in src.char_indices() {
740        match (ch, escape) {
741            ('\\', false) => escape = true, // start of an escape
742            ('\'', false) => {
743                // Found the closing delimiter
744                lex.bump(i + 1); // +1 to also eat the '\''
745                return Ok(result);
746            }
747            (c, true) => {
748                // Handle escape sequences
749                match c {
750                    '\'' => result.push('\''),
751                    '\\' => result.push('\\'),
752                    'n' => result.push('\n'),
753                    'r' => result.push('\r'),
754                    't' => result.push('\t'),
755                    _ => {
756                        result.push('\\');
757                        result.push(c);
758                    }
759                }
760                escape = false;
761            }
762            (c, false) => {
763                result.push(c);
764                escape = false;
765            }
766        }
767    }
768
769    // Unterminated literal – treat as lexing error
770    Err(Error::UnterminatedString(lex.span()))
771}
772
773#[cfg(test)]
774mod tests {
775    use super::*;
776    #[test]
777    fn test_basic_tokens() {
778        // Test meta pattern operators
779        assert_eq!(Token::lexer("&").next(), Some(Ok(Token::And)));
780        assert_eq!(Token::lexer("|").next(), Some(Ok(Token::Or)));
781        assert_eq!(Token::lexer("!").next(), Some(Ok(Token::Not)));
782        assert_eq!(Token::lexer("*").next(), Some(Ok(Token::RepeatZeroOrMore)));
783        assert_eq!(Token::lexer("+").next(), Some(Ok(Token::RepeatOneOrMore)));
784        assert_eq!(Token::lexer("?").next(), Some(Ok(Token::RepeatZeroOrOne)));
785
786        // Test structure pattern keywords
787        assert_eq!(Token::lexer("tagged").next(), Some(Ok(Token::Tagged)));
788
789        // Test leaf pattern keywords
790        assert_eq!(Token::lexer("bool").next(), Some(Ok(Token::Bool)));
791        assert_eq!(Token::lexer("bstr").next(), Some(Ok(Token::ByteString)));
792        assert_eq!(Token::lexer("text").next(), Some(Ok(Token::Text)));
793        assert_eq!(Token::lexer("number").next(), Some(Ok(Token::Number)));
794
795        // Test literals
796        assert_eq!(Token::lexer("true").next(), Some(Ok(Token::BoolTrue)));
797        assert_eq!(Token::lexer("false").next(), Some(Ok(Token::BoolFalse)));
798        assert_eq!(Token::lexer("NaN").next(), Some(Ok(Token::NaN)));
799    }
800
801    #[test]
802    fn test_complex_tokens() {
803        // Group name
804        let mut lexer = Token::lexer("@name");
805        if let Some(Ok(Token::GroupName(name))) = lexer.next() {
806            assert_eq!(name, "name");
807        } else {
808            panic!("Failed to parse group name");
809        }
810
811        // Test regex
812        let mut lexer = Token::lexer("/[a-z]+/");
813        if let Some(Ok(Token::Regex(Ok(regex)))) = lexer.next() {
814            assert_eq!(regex, "[a-z]+");
815        } else {
816            panic!("Failed to parse regex");
817        }
818
819        let mut lx = Token::lexer(r"/abc\/def/  / /  //  /a\//");
820        assert_eq!(
821            lx.next(),
822            Some(Ok(Token::Regex(Ok("abc\\/def".to_string()))))
823        );
824        assert_eq!(lx.next(), Some(Ok(Token::Regex(Ok(" ".to_string())))));
825        assert_eq!(lx.next(), Some(Ok(Token::Regex(Ok("".to_string())))));
826        assert_eq!(lx.next(), Some(Ok(Token::Regex(Ok("a\\/".to_string())))));
827        assert_eq!(lx.next(), None);
828    }
829
830    #[test]
831    fn test_hex_tokens() {
832        // Test hex string
833        let mut lexer = Token::lexer("h'deadbeef'");
834        if let Some(Ok(Token::HexString(Ok(bytes)))) = lexer.next() {
835            assert_eq!(bytes, vec![0xde, 0xad, 0xbe, 0xef]);
836        } else {
837            panic!("Failed to parse hex string");
838        }
839
840        // Test empty hex string
841        let mut lexer = Token::lexer("h''");
842        if let Some(Ok(Token::HexString(Ok(bytes)))) = lexer.next() {
843            assert_eq!(bytes, vec![]);
844        } else {
845            panic!("Failed to parse empty hex string");
846        }
847
848        // Test hex regex
849        let mut lexer = Token::lexer("h'/^[0-9]+$/'");
850        if let Some(Ok(Token::HexRegex(Ok(regex)))) = lexer.next() {
851            assert_eq!(regex, "^[0-9]+$");
852        } else {
853            panic!("Failed to parse hex regex");
854        }
855
856        // Test hex regex with escaped slash
857        let mut lexer = Token::lexer(r"h'/a\/b/'");
858        if let Some(Ok(Token::HexRegex(Ok(regex)))) = lexer.next() {
859            assert_eq!(regex, r"a\/b");
860        } else {
861            panic!("Failed to parse hex regex with escaped slash");
862        }
863    }
864
865    #[test]
866    fn test_number_literals() {
867        let mut lexer = Token::lexer("42");
868        let token = lexer.next();
869        println!("Token for '42': {:?}", token);
870        if let Some(Ok(Token::NumberLiteral(Ok(value)))) = token {
871            assert_eq!(value, 42.0);
872        } else {
873            panic!("Failed to parse integer literal");
874        }
875
876        // Test zero
877        let mut lexer = Token::lexer("0");
878        if let Some(Ok(Token::NumberLiteral(Ok(value)))) = lexer.next() {
879            assert_eq!(value, 0.0);
880        } else {
881            panic!("Failed to parse zero literal");
882        }
883
884        // Test negative number
885        let mut lexer = Token::lexer("-10");
886        if let Some(Ok(Token::NumberLiteral(Ok(value)))) = lexer.next() {
887            assert_eq!(value, -10.0);
888        } else {
889            panic!("Failed to parse negative literal");
890        }
891
892        // Test floating point
893        let mut lexer = Token::lexer("3.2222");
894        if let Some(Ok(Token::NumberLiteral(Ok(value)))) = lexer.next() {
895            assert_eq!(value, 3.2222);
896        } else {
897            panic!("Failed to parse float literal");
898        }
899
900        // Test scientific notation
901        let mut lexer = Token::lexer("1e5");
902        if let Some(Ok(Token::NumberLiteral(Ok(value)))) = lexer.next() {
903            assert_eq!(value, 100000.0);
904        } else {
905            panic!("Failed to parse scientific notation literal");
906        }
907    }
908
909    #[test]
910    fn test_range() {
911        struct RangeTestCase {
912            input: &'static str,
913            expected: Quantifier,
914        }
915        let test_cases = vec![
916            RangeTestCase {
917                input: "{1, 5}",
918                expected: Quantifier::new(1..=5, Reluctance::default()),
919            },
920            RangeTestCase {
921                input: "{ 3 , }",
922                expected: Quantifier::new(3.., Reluctance::default()),
923            },
924            RangeTestCase {
925                input: "{ 5 }",
926                expected: Quantifier::new(5..=5, Reluctance::default()),
927            },
928            RangeTestCase {
929                input: "{1, 5 }?",
930                expected: Quantifier::new(1..=5, Reluctance::Lazy),
931            },
932            RangeTestCase {
933                input: "{ 3 , }?",
934                expected: Quantifier::new(3.., Reluctance::Lazy),
935            },
936            RangeTestCase {
937                input: "{5}?",
938                expected: Quantifier::new(5..=5, Reluctance::Lazy),
939            },
940            RangeTestCase {
941                input: "{ 1,5}+",
942                expected: Quantifier::new(1..=5, Reluctance::Possessive),
943            },
944            RangeTestCase {
945                input: "{ 3 , }+",
946                expected: Quantifier::new(3.., Reluctance::Possessive),
947            },
948            RangeTestCase {
949                input: "{5}+",
950                expected: Quantifier::new(5..=5, Reluctance::Possessive),
951            },
952        ];
953
954        let mut failed_cases = vec![];
955
956        for test_case in test_cases {
957            let mut lexer = Token::lexer(test_case.input);
958            if let Some(Ok(Token::Range(Ok(range)))) = lexer.next() {
959                assert_eq!(range, test_case.expected);
960            } else {
961                failed_cases.push(test_case.input);
962            }
963        }
964
965        if !failed_cases.is_empty() {
966            panic!("Failed to parse ranges: {:?}", failed_cases);
967        }
968    }
969}