lnmp_codec/
lexer.rs

1//! Lexer for tokenizing LNMP text format.
2
3/// Token types in LNMP format
4#[derive(Debug, Clone, PartialEq)]
5pub enum Token {
6    /// Field prefix 'F'
7    FieldPrefix,
8    /// Numeric value (field ID or number)
9    Number(String),
10    /// Equals sign '='
11    Equals,
12    /// Semicolon ';'
13    Semicolon,
14    /// Colon ':' (for type hints)
15    Colon,
16    /// Type hint identifier (i, f, b, s, sa, r, ra)
17    TypeHint(String),
18    /// Left bracket '['
19    LeftBracket,
20    /// Right bracket ']'
21    RightBracket,
22    /// Left brace '{' (for nested records)
23    LeftBrace,
24    /// Right brace '}' (for nested records)
25    RightBrace,
26    /// Comma ','
27    Comma,
28    /// Quoted string "..."
29    QuotedString(String),
30    /// Unquoted string (identifier)
31    UnquotedString(String),
32    /// Hash '#' (for checksums or comments)
33    Hash,
34    /// Newline character
35    Newline,
36    /// End of file
37    Eof,
38}
39
40/// Lexer for LNMP text format
41pub struct Lexer<'a> {
42    input: Input<'a>,
43    position: usize,
44    line: usize,
45    column: usize,
46}
47
48impl<'a> Lexer<'a> {
49    /// Creates a new lexer for the given input
50    pub fn new(input: &'a str) -> Self {
51        Self {
52            input: Input::Borrowed(input),
53            position: 0,
54            line: 1,
55            column: 1,
56        }
57    }
58
59    /// Creates a new lexer owning its input (used after sanitization)
60    pub fn new_owned(input: String) -> Self {
61        Self {
62            input: Input::Owned {
63                sanitized: input,
64                original: None,
65                span_map: None,
66            },
67            position: 0,
68            line: 1,
69            column: 1,
70        }
71    }
72
73    /// Creates a new lexer owning sanitized input and carrying original for span mapping.
74    pub fn new_owned_with_original(sanitized: String, original: String, span_map: SpanMap) -> Self {
75        Self {
76            input: Input::Owned {
77                sanitized,
78                original: Some(original),
79                span_map: Some(span_map),
80            },
81            position: 0,
82            line: 1,
83            column: 1,
84        }
85    }
86
87    /// Returns the current position (line, column)
88    pub fn position(&self) -> (usize, usize) {
89        (self.line, self.column)
90    }
91
92    /// Returns position mapped to original input if available (lenient mode).
93    pub fn position_original(&self) -> (usize, usize) {
94        let offset = match self.input.span_map() {
95            Some(map) => map.map_offset(self.position),
96            None => self.position,
97        };
98        if let Some(orig) = self.input.original_str() {
99            compute_line_col(orig, offset)
100        } else {
101            (self.line, self.column)
102        }
103    }
104
105    /// Peeks at the current character without consuming it
106    fn peek(&self) -> Option<char> {
107        self.input.as_str()[self.position..].chars().next()
108    }
109
110    /// Peeks at the character at offset from current position
111    fn peek_ahead(&self, offset: usize) -> Option<char> {
112        self.input.as_str()[self.position..].chars().nth(offset)
113    }
114
115    /// Advances to the next character and returns it
116    fn advance(&mut self) -> Option<char> {
117        let ch = self.peek()?;
118        self.position += ch.len_utf8();
119        if ch == '\n' {
120            self.line += 1;
121            self.column = 1;
122        } else {
123            self.column += 1;
124        }
125        Some(ch)
126    }
127
128    /// Skips whitespace (spaces and tabs, but not newlines)
129    fn skip_whitespace(&mut self) {
130        while let Some(ch) = self.peek() {
131            if ch == ' ' || ch == '\t' {
132                self.advance();
133            } else {
134                break;
135            }
136        }
137    }
138}
139
140use crate::error::LnmpError;
141
142impl<'a> Lexer<'a> {
143    /// Reads the next token from the input
144    pub fn next_token(&mut self) -> Result<Token, LnmpError> {
145        self.skip_whitespace();
146
147        match self.peek() {
148            Some('#') => {
149                self.advance();
150                Ok(Token::Hash)
151            }
152            None => Ok(Token::Eof),
153            Some('\n') => {
154                self.advance();
155                Ok(Token::Newline)
156            }
157            Some('=') => {
158                self.advance();
159                Ok(Token::Equals)
160            }
161            Some(';') => {
162                self.advance();
163                Ok(Token::Semicolon)
164            }
165            Some(':') => {
166                self.advance();
167                // After colon, try to read a type hint
168                self.read_type_hint()
169            }
170            Some('[') => {
171                self.advance();
172                Ok(Token::LeftBracket)
173            }
174            Some(']') => {
175                self.advance();
176                Ok(Token::RightBracket)
177            }
178            Some('{') => {
179                self.advance();
180                Ok(Token::LeftBrace)
181            }
182            Some('}') => {
183                self.advance();
184                Ok(Token::RightBrace)
185            }
186            Some(',') => {
187                self.advance();
188                Ok(Token::Comma)
189            }
190            Some('F') => {
191                // Treat 'F' as a field prefix only when followed by a digit (e.g., F12).
192                // Otherwise, it's an unquoted identifier (e.g., 'False').
193                if let Some(next_ch) = self.peek_ahead(1) {
194                    if next_ch.is_ascii_digit() {
195                        self.advance(); // consume 'F'
196                        return Ok(Token::FieldPrefix);
197                    }
198                }
199                // Not a field prefix: read as an unquoted string starting with 'F'
200                self.read_unquoted_string()
201            }
202            Some('"') => self.read_quoted_string(),
203            Some(ch) if ch.is_ascii_digit() || ch == '-' => self.read_number(),
204            Some(ch) if is_unquoted_char(ch) => self.read_unquoted_string(),
205            Some(ch) => {
206                let (line, column) = self.position();
207                Err(LnmpError::UnexpectedToken {
208                    expected: "valid token".to_string(),
209                    found: Token::UnquotedString(ch.to_string()),
210                    line,
211                    column,
212                })
213            }
214        }
215    }
216
217    /// Reads a number (integer or float)
218    fn read_number(&mut self) -> Result<Token, LnmpError> {
219        let mut number = String::new();
220
221        // Handle optional negative sign
222        if self.peek() == Some('-') {
223            number.push('-');
224            self.advance();
225        }
226
227        // Read digits
228        while let Some(ch) = self.peek() {
229            if ch.is_ascii_digit() || ch == '.' {
230                number.push(ch);
231                self.advance();
232            } else {
233                break;
234            }
235        }
236
237        Ok(Token::Number(number))
238    }
239
240    /// Reads a quoted string with escape sequences
241    fn read_quoted_string(&mut self) -> Result<Token, LnmpError> {
242        let (start_line, start_column) = self.position();
243        self.advance(); // consume opening quote
244
245        let mut result = String::new();
246
247        loop {
248            match self.peek() {
249                None => {
250                    return Err(LnmpError::UnterminatedString {
251                        line: start_line,
252                        column: start_column,
253                    });
254                }
255                Some('"') => {
256                    self.advance(); // consume closing quote
257                    return Ok(Token::QuotedString(result));
258                }
259                Some('\\') => {
260                    self.advance(); // consume backslash
261                    match self.peek() {
262                        Some('"') => {
263                            result.push('"');
264                            self.advance();
265                        }
266                        Some('\\') => {
267                            result.push('\\');
268                            self.advance();
269                        }
270                        Some('n') => {
271                            result.push('\n');
272                            self.advance();
273                        }
274                        Some('r') => {
275                            result.push('\r');
276                            self.advance();
277                        }
278                        Some('t') => {
279                            result.push('\t');
280                            self.advance();
281                        }
282                        Some(ch) => {
283                            return Err(LnmpError::InvalidEscapeSequence {
284                                sequence: format!("\\{}", ch),
285                                line: start_line,
286                                column: start_column,
287                            });
288                        }
289                        None => {
290                            return Err(LnmpError::UnterminatedString {
291                                line: start_line,
292                                column: start_column,
293                            });
294                        }
295                    }
296                }
297                Some(ch) => {
298                    result.push(ch);
299                    self.advance();
300                }
301            }
302        }
303    }
304
305    /// Reads an unquoted string (identifier)
306    fn read_unquoted_string(&mut self) -> Result<Token, LnmpError> {
307        let mut result = String::new();
308
309        while let Some(ch) = self.peek() {
310            if is_unquoted_char(ch) {
311                result.push(ch);
312                self.advance();
313            } else {
314                break;
315            }
316        }
317
318        Ok(Token::UnquotedString(result))
319    }
320
321    /// Reads a type hint identifier after a colon
322    fn read_type_hint(&mut self) -> Result<Token, LnmpError> {
323        let mut hint = String::new();
324
325        // Read lowercase letters for type hint (i, f, b, s, sa)
326        while let Some(ch) = self.peek() {
327            if ch.is_ascii_lowercase() {
328                hint.push(ch);
329                self.advance();
330            } else {
331                break;
332            }
333        }
334
335        if hint.is_empty() {
336            // If no type hint follows colon, return just the colon token
337            Ok(Token::Colon)
338        } else {
339            Ok(Token::TypeHint(hint))
340        }
341    }
342}
343
344/// Checks if a character is valid in an unquoted string
345fn is_unquoted_char(ch: char) -> bool {
346    ch.is_ascii_alphanumeric() || ch == '_' || ch == '-' || ch == '.'
347}
348
349/// Builds a best-effort mapping from sanitized byte offsets to original byte offsets.
350pub fn build_span_map(sanitized: &str, original: &str) -> SpanMap {
351    let mut map = vec![0; sanitized.len() + 1];
352    let iter_s = sanitized.char_indices();
353    let mut iter_o = original.char_indices().peekable();
354    let mut prev_o = 0;
355
356    for (idx_s, ch_s) in iter_s {
357        if let Some(&(idx_o, ch_o)) = iter_o.peek() {
358            if ch_s == ch_o {
359                prev_o = idx_o;
360                iter_o.next();
361            } else {
362                prev_o = idx_o + ch_o.len_utf8();
363                iter_o.next();
364            }
365        }
366
367        for b in 0..ch_s.len_utf8() {
368            let pos = idx_s + b;
369            if pos < map.len() {
370                map[pos] = prev_o;
371            }
372        }
373    }
374
375    let last = iter_o.peek().map(|&(idx, _)| idx).unwrap_or(prev_o);
376    if let Some(entry) = map.last_mut() {
377        *entry = last;
378    }
379
380    SpanMap {
381        sanitized_to_original: map,
382    }
383}
384
385fn compute_line_col(text: &str, offset: usize) -> (usize, usize) {
386    let mut line = 1;
387    let mut col = 1;
388    for (i, ch) in text.char_indices() {
389        if i >= offset {
390            break;
391        }
392        if ch == '\n' {
393            line += 1;
394            col = 1;
395        } else {
396            col += 1;
397        }
398    }
399    (line, col)
400}
401
402#[cfg(test)]
403mod tests {
404    use super::*;
405
406    #[test]
407    fn test_single_character_tokens() {
408        let mut lexer = Lexer::new("=;[],");
409        assert_eq!(lexer.next_token().unwrap(), Token::Equals);
410        assert_eq!(lexer.next_token().unwrap(), Token::Semicolon);
411        assert_eq!(lexer.next_token().unwrap(), Token::LeftBracket);
412        assert_eq!(lexer.next_token().unwrap(), Token::RightBracket);
413        assert_eq!(lexer.next_token().unwrap(), Token::Comma);
414        assert_eq!(lexer.next_token().unwrap(), Token::Eof);
415    }
416
417    #[test]
418    fn test_field_prefix() {
419        let mut lexer = Lexer::new("F12");
420        assert_eq!(lexer.next_token().unwrap(), Token::FieldPrefix);
421        assert_eq!(lexer.next_token().unwrap(), Token::Number("12".to_string()));
422    }
423
424    #[test]
425    fn test_numbers() {
426        let mut lexer = Lexer::new("123 -456 3.14 -2.5");
427        assert_eq!(
428            lexer.next_token().unwrap(),
429            Token::Number("123".to_string())
430        );
431        assert_eq!(
432            lexer.next_token().unwrap(),
433            Token::Number("-456".to_string())
434        );
435        assert_eq!(
436            lexer.next_token().unwrap(),
437            Token::Number("3.14".to_string())
438        );
439        assert_eq!(
440            lexer.next_token().unwrap(),
441            Token::Number("-2.5".to_string())
442        );
443    }
444
445    #[test]
446    fn test_quoted_string() {
447        let mut lexer = Lexer::new(r#""hello world""#);
448        assert_eq!(
449            lexer.next_token().unwrap(),
450            Token::QuotedString("hello world".to_string())
451        );
452    }
453
454    #[test]
455    fn test_quoted_string_with_escapes() {
456        let mut lexer = Lexer::new(r#""hello \"world\"""#);
457        assert_eq!(
458            lexer.next_token().unwrap(),
459            Token::QuotedString("hello \"world\"".to_string())
460        );
461
462        let mut lexer = Lexer::new(r#""line1\nline2""#);
463        assert_eq!(
464            lexer.next_token().unwrap(),
465            Token::QuotedString("line1\nline2".to_string())
466        );
467
468        let mut lexer = Lexer::new(r#""tab\there""#);
469        assert_eq!(
470            lexer.next_token().unwrap(),
471            Token::QuotedString("tab\there".to_string())
472        );
473
474        let mut lexer = Lexer::new(r#""back\\slash""#);
475        assert_eq!(
476            lexer.next_token().unwrap(),
477            Token::QuotedString("back\\slash".to_string())
478        );
479    }
480
481    #[test]
482    fn test_unquoted_string() {
483        let mut lexer = Lexer::new("hello_world test-123 file.txt");
484        assert_eq!(
485            lexer.next_token().unwrap(),
486            Token::UnquotedString("hello_world".to_string())
487        );
488        assert_eq!(
489            lexer.next_token().unwrap(),
490            Token::UnquotedString("test-123".to_string())
491        );
492        assert_eq!(
493            lexer.next_token().unwrap(),
494            Token::UnquotedString("file.txt".to_string())
495        );
496    }
497
498    #[test]
499    fn test_newline() {
500        let mut lexer = Lexer::new("F1=2\nF3=4");
501        assert_eq!(lexer.next_token().unwrap(), Token::FieldPrefix);
502        assert_eq!(lexer.next_token().unwrap(), Token::Number("1".to_string()));
503        assert_eq!(lexer.next_token().unwrap(), Token::Equals);
504        assert_eq!(lexer.next_token().unwrap(), Token::Number("2".to_string()));
505        assert_eq!(lexer.next_token().unwrap(), Token::Newline);
506        assert_eq!(lexer.next_token().unwrap(), Token::FieldPrefix);
507    }
508
509    #[test]
510    fn test_whitespace_handling() {
511        let mut lexer = Lexer::new("F1  =  2");
512        assert_eq!(lexer.next_token().unwrap(), Token::FieldPrefix);
513        assert_eq!(lexer.next_token().unwrap(), Token::Number("1".to_string()));
514        assert_eq!(lexer.next_token().unwrap(), Token::Equals);
515        assert_eq!(lexer.next_token().unwrap(), Token::Number("2".to_string()));
516    }
517
518    #[test]
519    fn test_comment_skipping() {
520        // Comments are now represented as Hash token followed by content
521        let mut lexer = Lexer::new("# This is a comment\nF1=2");
522        assert_eq!(lexer.next_token().unwrap(), Token::Hash);
523        // The rest of the comment line is tokenized as unquoted strings/numbers
524        // Parser will handle skipping until newline
525    }
526
527    #[test]
528    fn test_comment_at_end() {
529        let mut lexer = Lexer::new("F1=2\n# Comment at end");
530        assert_eq!(lexer.next_token().unwrap(), Token::FieldPrefix);
531        assert_eq!(lexer.next_token().unwrap(), Token::Number("1".to_string()));
532        assert_eq!(lexer.next_token().unwrap(), Token::Equals);
533        assert_eq!(lexer.next_token().unwrap(), Token::Number("2".to_string()));
534        assert_eq!(lexer.next_token().unwrap(), Token::Newline);
535        assert_eq!(lexer.next_token().unwrap(), Token::Hash);
536        // Parser will handle skipping the rest of the comment
537    }
538
539    #[test]
540    fn test_string_array() {
541        let mut lexer = Lexer::new(r#"["admin","dev"]"#);
542        assert_eq!(lexer.next_token().unwrap(), Token::LeftBracket);
543        assert_eq!(
544            lexer.next_token().unwrap(),
545            Token::QuotedString("admin".to_string())
546        );
547        assert_eq!(lexer.next_token().unwrap(), Token::Comma);
548        assert_eq!(
549            lexer.next_token().unwrap(),
550            Token::QuotedString("dev".to_string())
551        );
552        assert_eq!(lexer.next_token().unwrap(), Token::RightBracket);
553    }
554
555    #[test]
556    fn test_complete_field_assignment() {
557        let mut lexer = Lexer::new("F12=14532");
558        assert_eq!(lexer.next_token().unwrap(), Token::FieldPrefix);
559        assert_eq!(lexer.next_token().unwrap(), Token::Number("12".to_string()));
560        assert_eq!(lexer.next_token().unwrap(), Token::Equals);
561        assert_eq!(
562            lexer.next_token().unwrap(),
563            Token::Number("14532".to_string())
564        );
565    }
566
567    #[test]
568    fn test_position_tracking() {
569        let mut lexer = Lexer::new("F1=2\nF3=4");
570        assert_eq!(lexer.position(), (1, 1));
571        lexer.next_token().unwrap(); // F
572        assert_eq!(lexer.position(), (1, 2));
573        lexer.next_token().unwrap(); // 1
574        lexer.next_token().unwrap(); // =
575        lexer.next_token().unwrap(); // 2
576        lexer.next_token().unwrap(); // \n
577        assert_eq!(lexer.position(), (2, 1));
578    }
579
580    #[test]
581    fn test_unterminated_string_error() {
582        let mut lexer = Lexer::new(r#""unterminated"#);
583        let result = lexer.next_token();
584        assert!(result.is_err());
585        match result {
586            Err(LnmpError::UnterminatedString { .. }) => {}
587            Err(LnmpError::UnexpectedEof { .. }) => {}
588            _ => panic!("Expected UnterminatedString or UnexpectedEof error"),
589        }
590    }
591
592    #[test]
593    fn test_invalid_escape_sequence() {
594        let mut lexer = Lexer::new(r#""\x""#);
595        let result = lexer.next_token();
596        assert!(result.is_err());
597        match result {
598            Err(LnmpError::InvalidEscapeSequence { sequence, .. }) => {
599                assert_eq!(sequence, "\\x");
600            }
601            _ => panic!("Expected InvalidEscapeSequence error"),
602        }
603    }
604
605    #[test]
606    fn test_f_as_unquoted_string() {
607        let mut lexer = Lexer::new("False");
608        assert_eq!(
609            lexer.next_token().unwrap(),
610            Token::UnquotedString("False".to_string())
611        );
612    }
613
614    #[test]
615    fn test_multiline_record() {
616        let input = "F12=14532\nF7=1\nF23=[\"admin\",\"dev\"]";
617        let mut lexer = Lexer::new(input);
618
619        // F12=14532
620        assert_eq!(lexer.next_token().unwrap(), Token::FieldPrefix);
621        assert_eq!(lexer.next_token().unwrap(), Token::Number("12".to_string()));
622        assert_eq!(lexer.next_token().unwrap(), Token::Equals);
623        assert_eq!(
624            lexer.next_token().unwrap(),
625            Token::Number("14532".to_string())
626        );
627        assert_eq!(lexer.next_token().unwrap(), Token::Newline);
628
629        // F7=1
630        assert_eq!(lexer.next_token().unwrap(), Token::FieldPrefix);
631        assert_eq!(lexer.next_token().unwrap(), Token::Number("7".to_string()));
632        assert_eq!(lexer.next_token().unwrap(), Token::Equals);
633        assert_eq!(lexer.next_token().unwrap(), Token::Number("1".to_string()));
634        assert_eq!(lexer.next_token().unwrap(), Token::Newline);
635    }
636
637    #[test]
638    fn test_inline_record() {
639        let input = r#"F12=14532;F7=1;F23=["admin","dev"]"#;
640        let mut lexer = Lexer::new(input);
641
642        assert_eq!(lexer.next_token().unwrap(), Token::FieldPrefix);
643        assert_eq!(lexer.next_token().unwrap(), Token::Number("12".to_string()));
644        assert_eq!(lexer.next_token().unwrap(), Token::Equals);
645        assert_eq!(
646            lexer.next_token().unwrap(),
647            Token::Number("14532".to_string())
648        );
649        assert_eq!(lexer.next_token().unwrap(), Token::Semicolon);
650    }
651
652    #[test]
653    fn test_type_hint_tokenization() {
654        // Test F12:i=14532 format
655        let mut lexer = Lexer::new("F12:i=14532");
656        assert_eq!(lexer.next_token().unwrap(), Token::FieldPrefix);
657        assert_eq!(lexer.next_token().unwrap(), Token::Number("12".to_string()));
658        assert_eq!(
659            lexer.next_token().unwrap(),
660            Token::TypeHint("i".to_string())
661        );
662        assert_eq!(lexer.next_token().unwrap(), Token::Equals);
663        assert_eq!(
664            lexer.next_token().unwrap(),
665            Token::Number("14532".to_string())
666        );
667    }
668
669    #[test]
670    fn test_all_type_hint_codes() {
671        // Test integer type hint
672        let mut lexer = Lexer::new(":i");
673        assert_eq!(
674            lexer.next_token().unwrap(),
675            Token::TypeHint("i".to_string())
676        );
677
678        // Test float type hint
679        let mut lexer = Lexer::new(":f");
680        assert_eq!(
681            lexer.next_token().unwrap(),
682            Token::TypeHint("f".to_string())
683        );
684
685        // Test boolean type hint
686        let mut lexer = Lexer::new(":b");
687        assert_eq!(
688            lexer.next_token().unwrap(),
689            Token::TypeHint("b".to_string())
690        );
691
692        // Test string type hint
693        let mut lexer = Lexer::new(":s");
694        assert_eq!(
695            lexer.next_token().unwrap(),
696            Token::TypeHint("s".to_string())
697        );
698
699        // Test string array type hint
700        let mut lexer = Lexer::new(":sa");
701        assert_eq!(
702            lexer.next_token().unwrap(),
703            Token::TypeHint("sa".to_string())
704        );
705    }
706
707    #[test]
708    fn test_invalid_type_hint_codes() {
709        // Invalid type hint should be tokenized as TypeHint with invalid value
710        let mut lexer = Lexer::new(":xyz");
711        assert_eq!(
712            lexer.next_token().unwrap(),
713            Token::TypeHint("xyz".to_string())
714        );
715
716        // Colon followed by non-letter should return just Colon
717        let mut lexer = Lexer::new(":=");
718        assert_eq!(lexer.next_token().unwrap(), Token::Colon);
719        assert_eq!(lexer.next_token().unwrap(), Token::Equals);
720
721        // Colon followed by number should return just Colon
722        let mut lexer = Lexer::new(":123");
723        assert_eq!(lexer.next_token().unwrap(), Token::Colon);
724        assert_eq!(
725            lexer.next_token().unwrap(),
726            Token::Number("123".to_string())
727        );
728    }
729
730    #[test]
731    fn test_type_hint_in_complete_field() {
732        // Test complete field with type hint: F7:b=1
733        let mut lexer = Lexer::new("F7:b=1");
734        assert_eq!(lexer.next_token().unwrap(), Token::FieldPrefix);
735        assert_eq!(lexer.next_token().unwrap(), Token::Number("7".to_string()));
736        assert_eq!(
737            lexer.next_token().unwrap(),
738            Token::TypeHint("b".to_string())
739        );
740        assert_eq!(lexer.next_token().unwrap(), Token::Equals);
741        assert_eq!(lexer.next_token().unwrap(), Token::Number("1".to_string()));
742
743        // Test float with type hint: F5:f=3.14
744        let mut lexer = Lexer::new("F5:f=3.14");
745        assert_eq!(lexer.next_token().unwrap(), Token::FieldPrefix);
746        assert_eq!(lexer.next_token().unwrap(), Token::Number("5".to_string()));
747        assert_eq!(
748            lexer.next_token().unwrap(),
749            Token::TypeHint("f".to_string())
750        );
751        assert_eq!(lexer.next_token().unwrap(), Token::Equals);
752        assert_eq!(
753            lexer.next_token().unwrap(),
754            Token::Number("3.14".to_string())
755        );
756
757        // Test string with type hint: F10:s="test"
758        let mut lexer = Lexer::new(r#"F10:s="test""#);
759        assert_eq!(lexer.next_token().unwrap(), Token::FieldPrefix);
760        assert_eq!(lexer.next_token().unwrap(), Token::Number("10".to_string()));
761        assert_eq!(
762            lexer.next_token().unwrap(),
763            Token::TypeHint("s".to_string())
764        );
765        assert_eq!(lexer.next_token().unwrap(), Token::Equals);
766        assert_eq!(
767            lexer.next_token().unwrap(),
768            Token::QuotedString("test".to_string())
769        );
770
771        // Test string array with type hint: F23:sa=["admin","dev"]
772        let mut lexer = Lexer::new(r#"F23:sa=["admin","dev"]"#);
773        assert_eq!(lexer.next_token().unwrap(), Token::FieldPrefix);
774        assert_eq!(lexer.next_token().unwrap(), Token::Number("23".to_string()));
775        assert_eq!(
776            lexer.next_token().unwrap(),
777            Token::TypeHint("sa".to_string())
778        );
779        assert_eq!(lexer.next_token().unwrap(), Token::Equals);
780        assert_eq!(lexer.next_token().unwrap(), Token::LeftBracket);
781    }
782
783    #[test]
784    fn test_type_hint_with_whitespace() {
785        // Type hints should work with whitespace around them
786        let mut lexer = Lexer::new("F12 :i =14532");
787        assert_eq!(lexer.next_token().unwrap(), Token::FieldPrefix);
788        assert_eq!(lexer.next_token().unwrap(), Token::Number("12".to_string()));
789        assert_eq!(
790            lexer.next_token().unwrap(),
791            Token::TypeHint("i".to_string())
792        );
793        assert_eq!(lexer.next_token().unwrap(), Token::Equals);
794        assert_eq!(
795            lexer.next_token().unwrap(),
796            Token::Number("14532".to_string())
797        );
798    }
799
800    #[test]
801    fn test_span_map_alignment_with_trimmed_space() {
802        let original = "F1= hello";
803        let sanitized = "F1=hello";
804        let map = build_span_map(sanitized, original);
805        // Sanitized offset at 'h' should point to original offset 4 (after trimmed space)
806        assert_eq!(map.map_offset(3), 4);
807    }
808}
809/// Lexer input ownership model
810#[derive(Debug, Clone)]
811enum Input<'a> {
812    Borrowed(&'a str),
813    Owned {
814        sanitized: String,
815        original: Option<String>,
816        span_map: Option<SpanMap>,
817    },
818}
819
820impl<'a> Input<'a> {
821    fn as_str(&self) -> &str {
822        match self {
823            Input::Borrowed(s) => s,
824            Input::Owned { sanitized, .. } => sanitized.as_str(),
825        }
826    }
827
828    fn original_str(&self) -> Option<&str> {
829        match self {
830            Input::Borrowed(_) => None,
831            Input::Owned { original, .. } => original.as_deref(),
832        }
833    }
834
835    fn span_map(&self) -> Option<&SpanMap> {
836        match self {
837            Input::Borrowed(_) => None,
838            Input::Owned { span_map, .. } => span_map.as_ref(),
839        }
840    }
841}
842
843/// Maps positions between sanitized and original input for lenient mode
844#[derive(Debug, Clone)]
845pub struct SpanMap {
846    /// offset in sanitized -> offset in original
847    pub sanitized_to_original: Vec<usize>,
848}
849
850impl SpanMap {
851    /// Maps a sanitized byte offset to a best-effort original byte offset.
852    pub fn map_offset(&self, sanitized: usize) -> usize {
853        if sanitized < self.sanitized_to_original.len() {
854            self.sanitized_to_original[sanitized]
855        } else {
856            sanitized
857        }
858    }
859}