1use crate::token::*;
2use std::fmt;
3
4#[derive(Debug, Clone, PartialEq)]
6pub enum LexerError {
7 UnexpectedCharacter(char, Span),
8 UnterminatedString(Span),
9 UnterminatedBlockComment(Span),
10}
11
12impl fmt::Display for LexerError {
13 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
14 match self {
15 LexerError::UnexpectedCharacter(ch, span) => {
16 write!(f, "Unexpected character '{ch}' at {span}")
17 }
18 LexerError::UnterminatedString(span) => {
19 write!(f, "Unterminated string at {span}")
20 }
21 LexerError::UnterminatedBlockComment(span) => {
22 write!(f, "Unterminated block comment at {span}")
23 }
24 }
25 }
26}
27
28impl std::error::Error for LexerError {}
29
30pub struct Lexer {
32 source: Vec<char>,
33 pos: usize,
34 byte_pos: usize,
35 line: usize,
36 column: usize,
37}
38
39impl Lexer {
40 pub fn new(source: &str) -> Self {
41 Self {
42 source: source.chars().collect(),
43 pos: 0,
44 byte_pos: 0,
45 line: 1,
46 column: 1,
47 }
48 }
49
50 pub fn tokenize_with_comments(&mut self) -> Result<Vec<Token>, LexerError> {
52 self.tokenize_inner(true)
53 }
54
55 pub fn tokenize(&mut self) -> Result<Vec<Token>, LexerError> {
56 self.tokenize_inner(false)
57 }
58
59 fn tokenize_inner(&mut self, keep_comments: bool) -> Result<Vec<Token>, LexerError> {
60 let mut tokens = Vec::new();
61
62 while self.pos < self.source.len() {
63 let ch = self.source[self.pos];
64
65 if ch == ' ' || ch == '\t' || ch == '\r' {
67 self.advance();
68 continue;
69 }
70
71 if ch == '\\' && self.peek() == Some('\n') {
73 self.advance(); self.advance(); self.line += 1;
76 self.column = 1;
77 continue;
78 }
79
80 if ch == '\n' {
82 let start = self.byte_pos;
83 tokens.push(Token::with_span(
84 TokenKind::Newline,
85 Span::with_offsets(start, start + 1, self.line, self.column),
86 ));
87 self.advance();
88 self.line += 1;
89 self.column = 1;
90 continue;
91 }
92
93 if ch == '/' {
95 if self.peek() == Some('/') {
96 let tok = self.read_line_comment();
97 if keep_comments {
98 tokens.push(tok);
99 }
100 continue;
101 }
102 if self.peek() == Some('*') {
103 let tok = self.read_block_comment()?;
104 if keep_comments {
105 tokens.push(tok);
106 }
107 continue;
108 }
109 }
110
111 if ch == '"' {
113 tokens.push(self.read_string()?);
114 continue;
115 }
116
117 if ch.is_ascii_digit() {
119 tokens.push(self.read_number());
120 continue;
121 }
122
123 if ch.is_alphabetic() || ch == '_' {
125 tokens.push(self.read_identifier());
126 continue;
127 }
128
129 if let Some(tok) = self.try_two_char_op() {
131 tokens.push(tok);
132 continue;
133 }
134
135 if let Some(kind) = self.single_char_token(ch) {
137 let start = self.byte_pos;
138 let col = self.column;
139 self.advance();
140 tokens.push(Token::with_span(
141 kind,
142 Span::with_offsets(start, self.byte_pos, self.line, col),
143 ));
144 continue;
145 }
146
147 return Err(LexerError::UnexpectedCharacter(
148 ch,
149 Span::with_offsets(
150 self.byte_pos,
151 self.byte_pos + ch.len_utf8(),
152 self.line,
153 self.column,
154 ),
155 ));
156 }
157
158 tokens.push(self.token(TokenKind::Eof));
159 Ok(tokens)
160 }
161
162 fn peek(&self) -> Option<char> {
163 self.source.get(self.pos + 1).copied()
164 }
165
166 fn advance(&mut self) {
167 if self.pos < self.source.len() {
168 self.byte_pos += self.source[self.pos].len_utf8();
169 }
170 self.pos += 1;
171 self.column += 1;
172 }
173
174 fn token(&self, kind: TokenKind) -> Token {
175 Token::with_span(
176 kind,
177 Span::with_offsets(self.byte_pos, self.byte_pos, self.line, self.column),
178 )
179 }
180
181 fn read_line_comment(&mut self) -> Token {
182 let start_byte = self.byte_pos;
183 let start_col = self.column;
184 let start_line = self.line;
185 self.advance(); self.advance(); let mut text = String::new();
188 while self.pos < self.source.len() && self.source[self.pos] != '\n' {
189 text.push(self.source[self.pos]);
190 self.advance();
191 }
192 Token::with_span(
193 TokenKind::LineComment(text),
194 Span::with_offsets(start_byte, self.byte_pos, start_line, start_col),
195 )
196 }
197
198 fn read_block_comment(&mut self) -> Result<Token, LexerError> {
199 let start_byte = self.byte_pos;
200 let start = Span::with_offsets(self.byte_pos, self.byte_pos, self.line, self.column);
201 self.advance(); self.advance(); let mut text = String::new();
204 let mut depth = 1;
205 while self.pos < self.source.len() && depth > 0 {
206 if self.source[self.pos] == '/' && self.peek() == Some('*') {
207 depth += 1;
208 text.push('/');
209 text.push('*');
210 self.advance();
211 self.advance();
212 } else if self.source[self.pos] == '*' && self.peek() == Some('/') {
213 depth -= 1;
214 if depth > 0 {
215 text.push('*');
216 text.push('/');
217 }
218 self.advance();
219 self.advance();
220 } else if self.source[self.pos] == '\n' {
221 text.push('\n');
222 self.byte_pos += self.source[self.pos].len_utf8();
223 self.line += 1;
224 self.column = 1;
225 self.pos += 1;
226 } else {
227 text.push(self.source[self.pos]);
228 self.advance();
229 }
230 }
231 if depth > 0 {
232 return Err(LexerError::UnterminatedBlockComment(start));
233 }
234 Ok(Token::with_span(
235 TokenKind::BlockComment(text),
236 Span::with_offsets(start_byte, self.byte_pos, self.line, start.column),
237 ))
238 }
239
240 fn read_string(&mut self) -> Result<Token, LexerError> {
241 let start_byte = self.byte_pos;
242 let start = Span::with_offsets(start_byte, start_byte, self.line, self.column);
243
244 if self.pos + 2 < self.source.len()
246 && self.source[self.pos + 1] == '"'
247 && self.source[self.pos + 2] == '"'
248 {
249 return self.read_multi_line_string(start_byte, start);
250 }
251
252 self.advance(); let mut value = String::new();
255 let mut segments: Vec<StringSegment> = Vec::new();
256 let mut has_interpolation = false;
257
258 while self.pos < self.source.len() {
259 let ch = self.source[self.pos];
260 if ch == '"' {
261 self.advance(); if has_interpolation {
263 if !value.is_empty() {
264 segments.push(StringSegment::Literal(value));
265 }
266 return Ok(Token::with_span(
267 TokenKind::InterpolatedString(segments),
268 Span::with_offsets(start_byte, self.byte_pos, start.line, start.column),
269 ));
270 }
271 return Ok(Token::with_span(
272 TokenKind::StringLiteral(value),
273 Span::with_offsets(start_byte, self.byte_pos, start.line, start.column),
274 ));
275 }
276
277 if ch == '$' && self.peek() == Some('{') {
279 has_interpolation = true;
280 if !value.is_empty() {
281 segments.push(StringSegment::Literal(std::mem::take(&mut value)));
282 }
283 self.advance(); self.advance(); let mut depth = 1;
286 let mut expr = String::new();
287 while self.pos < self.source.len() && depth > 0 {
288 if self.source[self.pos] == '{' {
289 depth += 1;
290 }
291 if self.source[self.pos] == '}' {
292 depth -= 1;
293 if depth == 0 {
294 break;
295 }
296 }
297 expr.push(self.source[self.pos]);
298 self.advance();
299 }
300 if self.pos >= self.source.len() {
301 return Err(LexerError::UnterminatedString(start));
302 }
303 self.advance(); if expr.trim().is_empty() {
305 return Err(LexerError::UnexpectedCharacter(
306 '}',
307 Span::with_offsets(
308 self.byte_pos,
309 self.byte_pos + 1,
310 self.line,
311 self.column,
312 ),
313 ));
314 }
315 segments.push(StringSegment::Expression(expr));
316 continue;
317 }
318
319 if ch == '\\' {
320 self.advance();
321 if self.pos >= self.source.len() {
322 return Err(LexerError::UnterminatedString(start));
323 }
324 let escaped = self.source[self.pos];
325 match escaped {
326 'n' => value.push('\n'),
327 't' => value.push('\t'),
328 '\\' => value.push('\\'),
329 '"' => value.push('"'),
330 '$' => value.push('$'),
331 _ => {
332 value.push('\\');
333 value.push(escaped);
334 }
335 }
336 self.advance();
337 continue;
338 }
339
340 if ch == '\n' {
341 return Err(LexerError::UnterminatedString(start));
342 }
343
344 value.push(ch);
345 self.advance();
346 }
347 Err(LexerError::UnterminatedString(start))
348 }
349
350 fn read_multi_line_string(
351 &mut self,
352 start_byte: usize,
353 start: Span,
354 ) -> Result<Token, LexerError> {
355 self.advance(); self.advance(); self.advance(); if self.pos < self.source.len() && self.source[self.pos] == '\n' {
361 self.advance();
362 self.line += 1;
363 self.column = 1;
364 }
365
366 let mut value = String::new();
367 while self.pos < self.source.len() {
368 if self.source[self.pos] == '"'
369 && self.pos + 2 < self.source.len()
370 && self.source[self.pos + 1] == '"'
371 && self.source[self.pos + 2] == '"'
372 {
373 self.advance(); self.advance(); self.advance(); let stripped = strip_common_indent(&value);
377 return Ok(Token::with_span(
378 TokenKind::StringLiteral(stripped),
379 Span::with_offsets(start_byte, self.byte_pos, start.line, start.column),
380 ));
381 }
382 if self.source[self.pos] == '\n' {
383 value.push('\n');
384 self.advance();
385 self.line += 1;
386 self.column = 1;
387 } else {
388 value.push(self.source[self.pos]);
389 self.advance();
390 }
391 }
392 Err(LexerError::UnterminatedString(start))
393 }
394
395 fn read_number(&mut self) -> Token {
396 let start_byte = self.byte_pos;
397 let start_col = self.column;
398 let mut num_str = String::new();
399 let mut is_float = false;
400
401 while self.pos < self.source.len()
402 && (self.source[self.pos].is_ascii_digit() || self.source[self.pos] == '.')
403 {
404 if self.source[self.pos] == '.' {
405 if is_float {
406 break; }
408 if let Some(next) = self.source.get(self.pos + 1) {
410 if !next.is_ascii_digit() {
411 break;
412 }
413 } else {
414 break;
415 }
416 is_float = true;
417 }
418 num_str.push(self.source[self.pos]);
419 self.advance();
420 }
421
422 if !is_float {
424 if let Some(ms) = self.try_duration_suffix(&num_str) {
425 return Token::with_span(
426 TokenKind::DurationLiteral(ms),
427 Span::with_offsets(start_byte, self.byte_pos, self.line, start_col),
428 );
429 }
430 }
431
432 if is_float {
433 let n: f64 = num_str.parse().unwrap_or(0.0);
434 Token::with_span(
435 TokenKind::FloatLiteral(n),
436 Span::with_offsets(start_byte, self.byte_pos, self.line, start_col),
437 )
438 } else {
439 match num_str.parse::<i64>() {
440 Ok(n) => Token::with_span(
441 TokenKind::IntLiteral(n),
442 Span::with_offsets(start_byte, self.byte_pos, self.line, start_col),
443 ),
444 Err(_) => {
445 let n: f64 = num_str.parse().unwrap_or(0.0);
447 Token::with_span(
448 TokenKind::FloatLiteral(n),
449 Span::with_offsets(start_byte, self.byte_pos, self.line, start_col),
450 )
451 }
452 }
453 }
454 }
455
456 fn try_duration_suffix(&mut self, num_str: &str) -> Option<u64> {
459 let n: u64 = num_str.parse().ok()?;
460 if self.pos < self.source.len() {
461 let ch = self.source[self.pos];
462 if ch == 'm' && self.source.get(self.pos + 1) == Some(&'s') {
463 self.advance(); self.advance(); return Some(n);
466 }
467 if ch == 's'
468 && self
469 .source
470 .get(self.pos + 1)
471 .is_none_or(|c| !c.is_alphanumeric())
472 {
473 self.advance(); return Some(n * 1000);
475 }
476 if ch == 'm'
477 && self
478 .source
479 .get(self.pos + 1)
480 .is_none_or(|c| !c.is_alphanumeric() && *c != 's')
481 {
482 self.advance(); return Some(n * 60 * 1000);
484 }
485 if ch == 'h'
486 && self
487 .source
488 .get(self.pos + 1)
489 .is_none_or(|c| !c.is_alphanumeric())
490 {
491 self.advance(); return Some(n * 60 * 60 * 1000);
493 }
494 }
495 None
496 }
497
498 fn read_identifier(&mut self) -> Token {
499 let start_byte = self.byte_pos;
500 let start_col = self.column;
501 let mut ident = String::new();
502
503 while self.pos < self.source.len() {
504 let ch = self.source[self.pos];
505 if ch.is_alphanumeric() || ch == '_' {
506 ident.push(ch);
507 self.advance();
508 } else {
509 break;
510 }
511 }
512
513 let kind = match ident.as_str() {
514 "pipeline" => TokenKind::Pipeline,
515 "extends" => TokenKind::Extends,
516 "override" => TokenKind::Override,
517 "let" => TokenKind::Let,
518 "var" => TokenKind::Var,
519 "if" => TokenKind::If,
520 "else" => TokenKind::Else,
521 "for" => TokenKind::For,
522 "in" => TokenKind::In,
523 "match" => TokenKind::Match,
524 "retry" => TokenKind::Retry,
525 "parallel" => TokenKind::Parallel,
526 "parallel_map" => TokenKind::ParallelMap,
527 "parallel_settle" => TokenKind::ParallelSettle,
528 "return" => TokenKind::Return,
529 "import" => TokenKind::Import,
530 "true" => TokenKind::True,
531 "false" => TokenKind::False,
532 "nil" => TokenKind::Nil,
533 "try" => TokenKind::Try,
534 "catch" => TokenKind::Catch,
535 "throw" => TokenKind::Throw,
536 "finally" => TokenKind::Finally,
537 "fn" => TokenKind::Fn,
538 "spawn" => TokenKind::Spawn,
539 "while" => TokenKind::While,
540 "type" => TokenKind::TypeKw,
541 "enum" => TokenKind::Enum,
542 "struct" => TokenKind::Struct,
543 "interface" => TokenKind::Interface,
544 "pub" => TokenKind::Pub,
545 "from" => TokenKind::From,
546 "thru" => TokenKind::Thru,
547 "upto" => TokenKind::Upto,
548 "guard" => TokenKind::Guard,
549 "ask" => TokenKind::Ask,
550 "deadline" => TokenKind::Deadline,
551 "yield" => TokenKind::Yield,
552 "mutex" => TokenKind::Mutex,
553 "break" => TokenKind::Break,
554 "continue" => TokenKind::Continue,
555 "select" => TokenKind::Select,
556 "impl" => TokenKind::Impl,
557 _ => TokenKind::Identifier(ident),
558 };
559
560 Token::with_span(
561 kind,
562 Span::with_offsets(start_byte, self.byte_pos, self.line, start_col),
563 )
564 }
565
566 fn try_two_char_op(&mut self) -> Option<Token> {
567 if self.pos >= self.source.len() {
568 return None;
569 }
570 let ch = self.source[self.pos];
571 let next = self.peek()?;
572
573 let kind = match (ch, next) {
574 ('=', '=') => TokenKind::Eq,
575 ('!', '=') => TokenKind::Neq,
576 ('&', '&') => TokenKind::And,
577 ('|', '|') => TokenKind::Or,
578 ('|', '>') => TokenKind::Pipe,
579 ('?', '?') => TokenKind::NilCoal,
580 ('?', '.') => TokenKind::QuestionDot,
581 ('-', '>') => TokenKind::Arrow,
582 ('-', '=') => TokenKind::MinusAssign,
583 ('+', '=') => TokenKind::PlusAssign,
584 ('*', '=') => TokenKind::StarAssign,
585 ('/', '=') => TokenKind::SlashAssign,
586 ('%', '=') => TokenKind::PercentAssign,
587 ('<', '=') => TokenKind::Lte,
588 ('>', '=') => TokenKind::Gte,
589 _ => return None,
590 };
591
592 let start_byte = self.byte_pos;
593 let col = self.column;
594 self.advance();
595 self.advance();
596 Some(Token::with_span(
597 kind,
598 Span::with_offsets(start_byte, self.byte_pos, self.line, col),
599 ))
600 }
601
602 fn single_char_token(&self, ch: char) -> Option<TokenKind> {
603 match ch {
604 '{' => Some(TokenKind::LBrace),
605 '}' => Some(TokenKind::RBrace),
606 '(' => Some(TokenKind::LParen),
607 ')' => Some(TokenKind::RParen),
608 '[' => Some(TokenKind::LBracket),
609 ']' => Some(TokenKind::RBracket),
610 ',' => Some(TokenKind::Comma),
611 ':' => Some(TokenKind::Colon),
612 ';' => Some(TokenKind::Semicolon),
613 '.' => Some(TokenKind::Dot),
614 '=' => Some(TokenKind::Assign),
615 '!' => Some(TokenKind::Not),
616 '+' => Some(TokenKind::Plus),
617 '-' => Some(TokenKind::Minus),
618 '*' => Some(TokenKind::Star),
619 '/' => Some(TokenKind::Slash),
620 '%' => Some(TokenKind::Percent),
621 '<' => Some(TokenKind::Lt),
622 '>' => Some(TokenKind::Gt),
623 '?' => Some(TokenKind::Question),
624 '|' => Some(TokenKind::Bar),
625 _ => None,
626 }
627 }
628}
629
630fn strip_common_indent(text: &str) -> String {
632 let lines: Vec<&str> = text.split('\n').collect();
633 let content_lines: Vec<&&str> = lines.iter().filter(|l| !l.trim().is_empty()).collect();
634
635 if content_lines.is_empty() {
636 return text.to_string();
637 }
638
639 let min_indent = content_lines
640 .iter()
641 .map(|line| line.chars().take_while(|c| *c == ' ' || *c == '\t').count())
642 .min()
643 .unwrap_or(0);
644
645 if min_indent == 0 {
646 return text.strip_suffix('\n').unwrap_or(text).to_string();
647 }
648
649 let stripped: String = lines
650 .iter()
651 .map(|line| {
652 if line.trim().is_empty() {
653 ""
654 } else {
655 let skip = min_indent.min(line.len());
656 &line[skip..]
657 }
658 })
659 .collect::<Vec<&str>>()
660 .join("\n");
661
662 stripped.strip_suffix('\n').unwrap_or(&stripped).to_string()
663}
664
665#[cfg(test)]
666mod tests {
667 use super::*;
668
669 #[test]
670 fn test_keywords() {
671 let mut lexer = Lexer::new("pipeline let var if else for in");
672 let tokens = lexer.tokenize().unwrap();
673 assert_eq!(tokens[0].kind, TokenKind::Pipeline);
674 assert_eq!(tokens[1].kind, TokenKind::Let);
675 assert_eq!(tokens[2].kind, TokenKind::Var);
676 assert_eq!(tokens[3].kind, TokenKind::If);
677 assert_eq!(tokens[4].kind, TokenKind::Else);
678 assert_eq!(tokens[5].kind, TokenKind::For);
679 assert_eq!(tokens[6].kind, TokenKind::In);
680 }
681
682 #[test]
683 fn test_parallel_map_keyword() {
684 let mut lexer = Lexer::new("parallel_map parallel");
685 let tokens = lexer.tokenize().unwrap();
686 assert_eq!(tokens[0].kind, TokenKind::ParallelMap);
687 assert_eq!(tokens[1].kind, TokenKind::Parallel);
688 }
689
690 #[test]
691 fn test_numbers() {
692 let mut lexer = Lexer::new("42 3.14");
693 let tokens = lexer.tokenize().unwrap();
694 assert_eq!(tokens[0].kind, TokenKind::IntLiteral(42));
695 #[allow(clippy::approx_constant)]
696 let expected = 3.14;
697 assert_eq!(tokens[1].kind, TokenKind::FloatLiteral(expected));
698 }
699
700 #[test]
701 fn test_string() {
702 let mut lexer = Lexer::new(r#""hello world""#);
703 let tokens = lexer.tokenize().unwrap();
704 assert_eq!(
705 tokens[0].kind,
706 TokenKind::StringLiteral("hello world".into())
707 );
708 }
709
710 #[test]
711 fn test_interpolated_string() {
712 let mut lexer = Lexer::new(r#""hello ${name}!""#);
713 let tokens = lexer.tokenize().unwrap();
714 if let TokenKind::InterpolatedString(segs) = &tokens[0].kind {
715 assert_eq!(segs.len(), 3);
716 assert_eq!(segs[0], StringSegment::Literal("hello ".into()));
717 assert_eq!(segs[1], StringSegment::Expression("name".into()));
718 assert_eq!(segs[2], StringSegment::Literal("!".into()));
719 } else {
720 panic!("Expected interpolated string");
721 }
722 }
723
724 #[test]
725 fn test_two_char_operators() {
726 let mut lexer = Lexer::new("== != && || |> ?? -> <= >=");
727 let tokens = lexer.tokenize().unwrap();
728 assert_eq!(tokens[0].kind, TokenKind::Eq);
729 assert_eq!(tokens[1].kind, TokenKind::Neq);
730 assert_eq!(tokens[2].kind, TokenKind::And);
731 assert_eq!(tokens[3].kind, TokenKind::Or);
732 assert_eq!(tokens[4].kind, TokenKind::Pipe);
733 assert_eq!(tokens[5].kind, TokenKind::NilCoal);
734 assert_eq!(tokens[6].kind, TokenKind::Arrow);
735 assert_eq!(tokens[7].kind, TokenKind::Lte);
736 assert_eq!(tokens[8].kind, TokenKind::Gte);
737 }
738
739 #[test]
740 fn test_block_comments() {
741 let mut lexer = Lexer::new("/* outer /* nested */ still */ 42");
742 let tokens = lexer.tokenize().unwrap();
743 assert_eq!(tokens[0].kind, TokenKind::IntLiteral(42));
744 }
745
746 #[test]
747 fn test_line_comment() {
748 let mut lexer = Lexer::new("42 // comment\n43");
749 let tokens = lexer.tokenize().unwrap();
750 assert_eq!(tokens[0].kind, TokenKind::IntLiteral(42));
751 assert_eq!(tokens[1].kind, TokenKind::Newline);
752 assert_eq!(tokens[2].kind, TokenKind::IntLiteral(43));
753 }
754
755 #[test]
756 fn test_newlines() {
757 let mut lexer = Lexer::new("a\nb");
758 let tokens = lexer.tokenize().unwrap();
759 assert_eq!(tokens[0].kind, TokenKind::Identifier("a".into()));
760 assert_eq!(tokens[1].kind, TokenKind::Newline);
761 assert_eq!(tokens[2].kind, TokenKind::Identifier("b".into()));
762 }
763
764 #[test]
765 fn test_backslash_continuation() {
766 let mut lexer = Lexer::new("10 \\\n- 3");
768 let tokens = lexer.tokenize().unwrap();
769 assert_eq!(tokens[0].kind, TokenKind::IntLiteral(10));
770 assert_eq!(tokens[1].kind, TokenKind::Minus);
771 assert_eq!(tokens[2].kind, TokenKind::IntLiteral(3));
772 assert_eq!(tokens.len(), 4); }
775
776 #[test]
777 fn test_unexpected_character() {
778 let mut lexer = Lexer::new("@");
779 let err = lexer.tokenize().unwrap_err();
780 assert!(matches!(err, LexerError::UnexpectedCharacter('@', _)));
781 }
782
783 #[test]
784 fn test_unterminated_string() {
785 let mut lexer = Lexer::new("\"unterminated");
786 let err = lexer.tokenize().unwrap_err();
787 assert!(matches!(err, LexerError::UnterminatedString(_)));
788 }
789
790 #[test]
791 fn test_escape_sequences() {
792 let mut lexer = Lexer::new(r#""a\nb\t\\""#);
793 let tokens = lexer.tokenize().unwrap();
794 assert_eq!(tokens[0].kind, TokenKind::StringLiteral("a\nb\t\\".into()));
795 }
796
797 #[test]
798 fn test_number_then_dot_method() {
799 let mut lexer = Lexer::new("42.method");
800 let tokens = lexer.tokenize().unwrap();
801 assert_eq!(tokens[0].kind, TokenKind::IntLiteral(42));
802 assert_eq!(tokens[1].kind, TokenKind::Dot);
803 assert_eq!(tokens[2].kind, TokenKind::Identifier("method".into()));
804 }
805}