1use core::{
2 fmt::{self, Display},
3 ops::Range,
4};
5
6use logos::{Lexer, Logos};
7
8pub type SourceId = usize;
9
10#[derive(Debug, Clone, PartialEq, Hash)]
11pub struct Loc {
12 pub source: SourceId,
13 pub span: Range<usize>,
14}
15use crate::parser::errors::{ParserError, SpannedParserError};
16use crate::values::core_values::{
17 decimal::typed_decimal::DecimalTypeVariant,
18 integer::typed_integer::IntegerTypeVariant,
19};
20use strum::IntoEnumIterator;
21
22impl Loc {
23 pub fn new(source: SourceId, span: core::ops::Range<usize>) -> Self {
24 Self { source, span }
25 }
26}
27fn extract_line_doc(lex: &mut Lexer<Token>) -> String {
28 lex.slice()[3..].to_owned()
29}
30
31#[derive(Debug, Clone, PartialEq, Eq)]
56pub struct NumericLiteralParts {
57 pub integer_part: String,
58 pub exponent_part: Option<String>,
59 pub variant_part: Option<String>,
60}
61impl Display for NumericLiteralParts {
62 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
63 core::write!(f, "{}", self.integer_part)?;
64 if let Some(exp) = &self.exponent_part {
65 core::write!(f, "e{}", exp)?;
66 }
67 if let Some(var) = &self.variant_part {
68 core::write!(f, "{}", var)?;
69 }
70 Ok(())
71 }
72}
73impl From<&str> for NumericLiteralParts {
74 fn from(value: &str) -> Self {
75 NumericLiteralParts::from(value.to_string())
76 }
77}
78impl From<String> for NumericLiteralParts {
79 fn from(mut value: String) -> Self {
80 value = value.replace('_', "");
81 let chars: Vec<char> = value.chars().collect();
82 let mut i = 0;
83
84 let start = i;
86 while i < chars.len() && chars[i].is_ascii_digit() {
87 i += 1;
88 }
89 let mut integer_part = value[start..i].to_string();
90
91 if i < chars.len() && chars[i] == '.' {
93 i += 1; let frac_start = i;
95 while i < chars.len() && chars[i].is_ascii_digit() {
96 i += 1;
97 }
98 integer_part = value[frac_start..i].to_string();
99 }
100
101 let mut exponent_part = None;
103 if i < chars.len() && (chars[i] == 'e' || chars[i] == 'E') {
104 let exp_start = i;
105 i += 1; if i < chars.len() && (chars[i] == '+' || chars[i] == '-') {
109 i += 1;
110 }
111
112 let digits_start = i;
113 while i < chars.len() && chars[i].is_ascii_digit() {
114 i += 1;
115 }
116
117 if digits_start < i {
119 exponent_part = Some(value[exp_start + 1..i].to_string());
120 } else {
121 i = exp_start;
123 }
124 }
125
126 let variant_part = if i < chars.len() {
128 Some(value[i..].to_string())
129 } else {
130 None
131 };
132 NumericLiteralParts {
133 exponent_part,
134 integer_part,
135 variant_part,
136 }
137 }
138}
139
140#[derive(Logos, Debug, Clone, PartialEq, Eq)]
141#[logos(error = Range<usize>)]
142#[logos(skip r"//[^\n]*")]
144#[logos(skip r"/\*[^*]*\*+(?:[^/*][^*]*\*+)*/")]
146#[logos(skip r"[ \n\t\r\f]+")]
148#[rustfmt::skip]
149pub enum Token {
150 #[regex(r"///[^\n]*", extract_line_doc)]
151 LineDoc(String),
152
153 #[regex(r"#![^\n]*", allocated_string)]
155 Shebang(String),
156
157 #[token("(")] LeftParen,
159 #[token(")")] RightParen,
160 #[token("[")] LeftBracket,
161 #[token("]")] RightBracket,
162 #[token("{")] LeftCurly,
163 #[token("}")] RightCurly,
164 #[token("<")] LeftAngle,
165 #[token(">")] RightAngle,
166
167 #[token("%")] Percent,
168 #[token("+")] Plus,
169 #[token("-")] Minus,
170 #[token("*")] Star,
171 #[token("^")] Caret,
172 #[token("/")] Slash,
173 #[token(":")] Colon,
174 #[token("::")] DoubleColon,
175 #[token(":::")] TripleColon,
176 #[token(";")] Semicolon,
177 #[token(",")] Comma,
178 #[token("=")] Assign,
179
180 #[token("++")] Increment,
181 #[token("--")] Decrement,
182 #[token("&&")] DoubleAnd,
183 #[token("||")] DoublePipe,
184 #[token("+=")] AddAssign,
185 #[token("-=")] SubAssign,
186 #[token("*=")] MulAssign,
187 #[token("/=")] DivAssign,
188 #[token("%=")] ModAssign,
189
190 #[token("->")] Arrow,
191 #[token("=>")] FatArrow,
192 #[token("..")] Range,
193 #[token("..=")] RangeInclusive,
194 #[token("...")] Spread,
195 #[token("@")] At,
196 #[token("&")] Ampersand,
197 #[token("|")] Pipe,
198 #[token("!")] Exclamation,
199 #[token("`")] Backtick,
200
201 #[token("<=")] LessEqual,
202 #[token(">=")] GreaterEqual, #[token("!=")] NotStructuralEqual,
204 #[token("!==")] NotEqual,
205 #[token("==")] StructuralEqual,
206 #[token("===")] Equal,
207 #[token("is")] Is,
208 #[token("matches")] Matches,
209 #[token("and")] And,
210 #[token("or")] Or,
211
212 #[token("true")] True,
214 #[token("false")] False,
215 #[token("null")] Null,
216
217 #[token("?")] Placeholder,
218 #[token("const")] Const,
219 #[token("var")] Variable,
220 #[token("mut")] Mutable,
221 #[token("&mut")] MutRef,
222 #[token("function")] Function,
223 #[token("procedure")] Procedure,
224 #[token("if")] If,
225 #[token("else")] Else,
226
227 #[token("type")] TypeDeclaration,
228 #[token("type<")] TypeExpressionStart,
229 #[token("typealias")] TypeAlias,
230
231 #[token(".")]
232 Dot,
233 #[regex(r"\$(?:[0-9a-fA-F]{6}|[0-9a-fA-F]{10}|[0-9a-fA-F]{52})", allocated_string)] PointerAddress(String),
235
236 #[regex(r"[Ii]nfinity")] Infinity,
238 #[regex(r"(?:nan|NaN)")] Nan,
239
240 #[regex(r"\d+[_\d]*(?:[eE][+-]?\d+[_\d]*)?(?:u8|u16|u32|u64|u128|i8|i16|i32|i64|i128|ubig|ibig|f32|f64|dbig)?", allocated_string)]
247 IntegerLiteral(String),
248
249 #[regex(
250 r"0[bB][01][01_]*(?:u8|u16|u32|u64|u128|i8|i16|i32|i64|i128|ubig|ibig)?",
251 parse_typed_literal::<IntegerTypeVariant>
252 )]
253 BinaryIntegerLiteral(IntegerWithVariant),
254
255 #[regex(
256 r"0[oO][0-7][0-7_]*(?:u8|u16|u32|u64|u128|i8|i16|i32|i64|i128|ubig|ibig)?",
257 parse_typed_literal::<IntegerTypeVariant>
258 )]
259 OctalIntegerLiteral(IntegerWithVariant),
260
261 #[regex(
262 r"0[xX][0-9a-fA-F][0-9a-fA-F_]*(?:u8|u16|u32|u64|u128|i8|i16|i32|i64|i128|ubig|ibig)?",
263 parse_typed_literal::<IntegerTypeVariant>
264 )]
265 HexadecimalIntegerLiteral(IntegerWithVariant),
266
267 #[regex(r"\d+[_\d]*\.[_\d]+(?:[eE][+-]?\d+[_\d]*)?(?:f32|f64|dbig)?", parse_typed_literal::<DecimalTypeVariant>)]
269 DecimalLiteral(LiteralWithVariant<DecimalTypeVariant>),
270
271 #[regex(r"\d+[_\d]*/\d+[_\d]*", allocated_string)]
273 FractionLiteral(String),
274
275 #[regex(r#"[a-z0-9]*("(?:\\.|[^\\"])*"|'(?:\\.|[^\\'])*')"#, allocated_string)] StringLiteral(String),
276
277
278 #[regex(r"@[+@]?[a-zA-Z0-9_-]+", allocated_string)] Endpoint(String),
279
280 #[regex(r"[_\p{L}][_\p{L}\p{N}]*", allocated_string, priority=1)] Identifier(String),
282
283 #[regex(r"#\d+", allocated_string)] Slot(String),
285
286 #[regex(r"#[_a-zA-Z][_a-zA-Z0-9]*", allocated_string)] NamedSlot(String),
288}
289
290impl Token {
291 pub fn as_const_str(&self) -> Option<&str> {
292 match self {
293 Token::LeftParen => Some("("),
294 Token::RightParen => Some(")"),
295 Token::LeftBracket => Some("["),
296 Token::RightBracket => Some("]"),
297 Token::LeftCurly => Some("{"),
298 Token::RightCurly => Some("}"),
299 Token::LeftAngle => Some("<"),
300 Token::RightAngle => Some(">"),
301 Token::Percent => Some("%"),
302 Token::Plus => Some("+"),
303 Token::Minus => Some("-"),
304 Token::Slash => Some("/"),
305 Token::Colon => Some(":"),
306 Token::DoubleColon => Some("::"),
307 Token::TripleColon => Some(":::"),
308 Token::Semicolon => Some(";"),
309 Token::Dot => Some("."),
310 Token::Comma => Some(","),
311 Token::Assign => Some("="),
312 Token::Increment => Some("++"),
313 Token::Decrement => Some("--"),
314 Token::DoubleAnd => Some("&&"),
315 Token::DoublePipe => Some("||"),
316 Token::AddAssign => Some("+="),
317 Token::SubAssign => Some("-="),
318 Token::MulAssign => Some("*="),
319 Token::DivAssign => Some("/="),
320 Token::ModAssign => Some("%="),
321 Token::Arrow => Some("->"),
322 Token::FatArrow => Some("=>"),
323 Token::Range => Some(".."),
324 Token::RangeInclusive => Some("..="),
325 Token::Spread => Some("..."),
326 Token::At => Some("@"),
327 Token::Ampersand => Some("&"),
328 Token::Pipe => Some("|"),
329 Token::Backtick => Some("`"),
330 Token::LessEqual => Some("<="),
331 Token::GreaterEqual => Some(">="),
332 Token::NotStructuralEqual => Some("!="),
333 Token::NotEqual => Some("!=="),
334 Token::StructuralEqual => Some("=="),
335 Token::Equal => Some("==="),
336 Token::Is => Some("is"),
337 Token::True => Some("true"),
338 Token::False => Some("false"),
339 Token::Null => Some("null"),
340 Token::Placeholder => Some("?"),
341 Token::Const => Some("const"),
342 Token::Variable => Some("var"),
343 Token::Mutable => Some("mut"),
344 Token::Function => Some("function"),
345 Token::Procedure => Some("procedure"),
346 Token::Infinity => Some("infinity"),
347 Token::Nan => Some("nan"),
348 Token::TypeDeclaration => Some("type"),
349 Token::TypeExpressionStart => Some("type<"),
350 Token::TypeAlias => Some("typealias"),
351 Token::MutRef => Some("&mut"),
352 Token::And => Some("and"),
353 Token::Or => Some("or"),
354 Token::Star => Some("*"),
355 Token::Exclamation => Some("!"),
356 Token::Caret => Some("^"),
357 Token::Matches => Some("matches"),
358 Token::If => Some("if"),
359 Token::Else => Some("else"),
360 _ => None,
361 }
362 }
363}
364
365#[derive(Debug, Clone, PartialEq, Eq)]
366pub struct SpannedToken {
367 pub token: Token,
368 pub span: Range<usize>,
369}
370
371pub fn get_spanned_tokens_from_source(
372 src: &str,
373) -> (Vec<SpannedToken>, Vec<SpannedParserError>) {
374 let lexer = Token::lexer(src);
375 let (oks, errs): (Vec<_>, Vec<_>) = lexer
376 .spanned()
377 .map(|(tok, span)| {
378 tok.map(|token| SpannedToken { token, span })
379 .map_err(|span| SpannedParserError {
380 error: ParserError::InvalidToken,
381 span,
382 })
383 })
384 .partition(Result::is_ok);
385
386 let tokens = oks.into_iter().map(Result::unwrap).collect();
387 let errors = errs.into_iter().map(Result::unwrap_err).collect();
388
389 (tokens, errors)
390}
391
392pub type IntegerWithVariant = LiteralWithVariant<IntegerTypeVariant>;
393pub type DecimalWithVariant = LiteralWithVariant<DecimalTypeVariant>;
394
395#[derive(Debug, Clone, PartialEq, Eq)]
396pub struct LiteralWithVariant<T> {
397 pub value: String,
398 pub variant: Option<T>,
399}
400
401impl Display for LiteralWithVariant<IntegerTypeVariant> {
402 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
403 if let Some(variant) = &self.variant {
404 core::write!(f, "{}{}", self.value, variant.as_ref())
405 } else {
406 core::write!(f, "{}", self.value)
407 }
408 }
409}
410
411trait TypeSuffix: IntoEnumIterator + Copy + AsRef<str> {}
412impl<T> TypeSuffix for T where T: IntoEnumIterator + Copy + AsRef<str> {}
413
414fn parse_typed_literal<T: TypeSuffix>(
415 lex: &mut Lexer<Token>,
416) -> LiteralWithVariant<T> {
417 let mut variant = None;
418 let mut number_part = lex.slice();
419 for suffix in T::iter() {
420 let suffix_str = suffix.as_ref();
421 if number_part.ends_with(suffix_str) {
422 variant = Some(suffix);
423 number_part = &number_part[..number_part.len() - suffix_str.len()];
424 break;
425 }
426 }
427 LiteralWithVariant {
428 value: number_part.to_string(),
429 variant,
430 }
431}
432
433impl fmt::Display for Token {
434 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
435 core::write!(f, "{self:?}")
436 }
437}
438
439#[inline(always)]
440fn allocated_string(lex: &mut Lexer<Token>) -> String {
441 lex.slice().to_owned()
442}
443
444fn numeric_parts(lex: &mut Lexer<Token>) -> NumericLiteralParts {
445 let s = lex.slice().replace('_', "");
447 NumericLiteralParts::from(s)
448}
449
450#[cfg(test)]
451mod tests {
452 use super::*;
453 use logos::Logos;
454
455 #[test]
456 fn integer() {
457 let mut lexer = Token::lexer("42");
458 assert_eq!(
459 lexer.next().unwrap(),
460 Ok(Token::IntegerLiteral("42".to_string()))
461 );
462 }
463
464 #[test]
465 fn integer_type() {
466 let mut lexer = Token::lexer("42u8");
467 let res = lexer.next().unwrap();
468 if let Ok(Token::IntegerLiteral(literal)) = res {
469 assert_eq!(literal, "42u8".to_string());
470 assert_eq!(format!("{}", literal), "42u8".to_string());
471 } else {
472 core::panic!("Expected DecimalIntegerLiteral with variant U8");
473 }
474
475 let mut lexer = Token::lexer("42");
476 let res = lexer.next().unwrap();
477 if let Ok(Token::IntegerLiteral(literal)) = res {
478 assert_eq!(literal, "42".to_string());
479 } else {
480 core::panic!("Expected DecimalIntegerLiteral with no variant");
481 }
482 }
483
484 #[test]
485 fn integer_with_type() {
486 let mut lexer = Token::lexer("42u8");
487 assert_eq!(
488 lexer.next().unwrap(),
489 Ok(Token::IntegerLiteral("42u8".to_string()))
490 );
491
492 let mut lexer = Token::lexer("42i32");
493 assert_eq!(
494 lexer.next().unwrap(),
495 Ok(Token::IntegerLiteral("42i32".to_string()))
496 );
497
498 let mut lexer = Token::lexer("42ibig");
499 assert_eq!(
500 lexer.next().unwrap(),
501 Ok(Token::IntegerLiteral("42ibig".to_string()))
502 );
503 }
504
505 #[test]
506 fn decimal() {
507 let mut lexer = Token::lexer("3.14");
508 assert_eq!(
509 lexer.next().unwrap(),
510 Ok(Token::DecimalLiteral(DecimalWithVariant {
511 value: "3.14".to_string(),
512 variant: None
513 }))
514 );
515 }
516
517 #[test]
543 fn infinity() {
544 let mut lexer = Token::lexer("Infinity");
545 assert_eq!(lexer.next().unwrap(), Ok(Token::Infinity));
546
547 let mut lexer = Token::lexer("infinity");
548 assert_eq!(lexer.next().unwrap(), Ok(Token::Infinity));
549
550 let lexer = Token::lexer("-Infinity");
551 assert_eq!(
552 lexer.map(Result::unwrap).collect::<Vec<_>>(),
553 vec![Token::Minus, Token::Infinity]
554 );
555
556 let lexer = Token::lexer("+Infinity");
557 assert_eq!(
558 lexer.map(Result::unwrap).collect::<Vec<_>>(),
559 vec![Token::Plus, Token::Infinity]
560 );
561 }
562
563 #[test]
564 fn nan() {
565 let mut lexer = Token::lexer("NaN");
566 assert_eq!(lexer.next().unwrap(), Ok(Token::Nan));
567
568 let mut lexer = Token::lexer("nan");
569 assert_eq!(lexer.next().unwrap(), Ok(Token::Nan));
570
571 let lexer = Token::lexer("-NaN");
572 assert_eq!(
573 lexer.map(Result::unwrap).collect::<Vec<_>>(),
574 vec![Token::Minus, Token::Nan]
575 );
576
577 let lexer = Token::lexer("+NaN");
578 assert_eq!(
579 lexer.map(Result::unwrap).collect::<Vec<_>>(),
580 vec![Token::Plus, Token::Nan]
581 );
582 }
583
584 #[test]
585 fn fraction() {
586 let mut lexer = Token::lexer("1/2");
587 assert_eq!(
588 lexer.next().unwrap(),
589 Ok(Token::FractionLiteral("1/2".to_string()))
590 );
591
592 let mut lexer = Token::lexer("3/4");
593 assert_eq!(
594 lexer.next().unwrap(),
595 Ok(Token::FractionLiteral("3/4".to_string()))
596 );
597
598 let mut lexer = Token::lexer("51_11/6");
599 assert_eq!(
600 lexer.next().unwrap(),
601 Ok(Token::FractionLiteral("51_11/6".to_string()))
602 );
603 }
604
605 #[test]
606 fn hexadecimal_integer() {
607 let mut lexer = Token::lexer("0x1A3F");
608 assert_eq!(
609 lexer.next().unwrap(),
610 Ok(Token::HexadecimalIntegerLiteral(IntegerWithVariant {
611 value: "0x1A3F".to_string(),
612 variant: None
613 }))
614 );
615
616 let mut lexer = Token::lexer("0XABC");
617 assert_eq!(
618 lexer.next().unwrap(),
619 Ok(Token::HexadecimalIntegerLiteral(IntegerWithVariant {
620 value: "0XABC".to_string(),
621 variant: None
622 }))
623 );
624 }
625
626 #[test]
627 fn binary_integer() {
628 let mut lexer = Token::lexer("0b1010");
629 assert_eq!(
630 lexer.next().unwrap(),
631 Ok(Token::BinaryIntegerLiteral(IntegerWithVariant {
632 value: "0b1010".to_string(),
633 variant: None
634 }))
635 );
636
637 let mut lexer = Token::lexer("0B1101");
638 assert_eq!(
639 lexer.next().unwrap(),
640 Ok(Token::BinaryIntegerLiteral(IntegerWithVariant {
641 value: "0B1101".to_string(),
642 variant: None
643 }))
644 );
645 }
646
647 #[test]
648 fn octal_integer() {
649 let mut lexer = Token::lexer("0o755");
650 assert_eq!(
651 lexer.next().unwrap(),
652 Ok(Token::OctalIntegerLiteral(IntegerWithVariant {
653 value: "0o755".to_string(),
654 variant: None
655 }))
656 );
657
658 let mut lexer = Token::lexer("0O644");
659 assert_eq!(
660 lexer.next().unwrap(),
661 Ok(Token::OctalIntegerLiteral(IntegerWithVariant {
662 value: "0O644".to_string(),
663 variant: None
664 }))
665 );
666 }
667
668 #[test]
669 fn integers_with_underscores() {
670 let mut lexer = Token::lexer("1_000");
671 assert_eq!(
672 lexer.next().unwrap(),
673 Ok(Token::IntegerLiteral("1_000".to_string()))
674 );
675
676 let mut lexer = Token::lexer("0xFF_FF_FF");
677 assert_eq!(
678 lexer.next().unwrap(),
679 Ok(Token::HexadecimalIntegerLiteral(IntegerWithVariant {
680 value: "0xFF_FF_FF".to_string(),
681 variant: None
682 }))
683 );
684
685 let mut lexer = Token::lexer("0b1010_1010");
686 assert_eq!(
687 lexer.next().unwrap(),
688 Ok(Token::BinaryIntegerLiteral(IntegerWithVariant {
689 value: "0b1010_1010".to_string(),
690 variant: None
691 }))
692 );
693 }
694
695 #[test]
696 fn decimals() {
697 let mut lexer = Token::lexer("10.234_567e-8f32");
699 assert_eq!(
700 lexer.next().unwrap(),
701 Ok(Token::DecimalLiteral(DecimalWithVariant {
702 value: "10.234_567e-8".to_string(),
703 variant: Some(DecimalTypeVariant::F32)
704 }))
705 );
706
707 let mut lexer = Token::lexer("10.234_567e-8");
709 assert_eq!(
710 lexer.next().unwrap(),
711 Ok(Token::DecimalLiteral(DecimalWithVariant {
712 value: "10.234_567e-8".to_string(),
713 variant: None
714 }))
715 );
716
717 let mut lexer = Token::lexer("0.123_456");
719 assert_eq!(
720 lexer.next().unwrap(),
721 Ok(Token::DecimalLiteral(DecimalWithVariant {
722 value: "0.123_456".to_string(),
723 variant: None
724 }))
725 );
726
727 let mut lexer = Token::lexer("1_000.123_456f32");
729 assert_eq!(
730 lexer.next().unwrap(),
731 Ok(Token::DecimalLiteral(DecimalWithVariant {
732 value: "1_000.123_456".to_string(),
733 variant: Some(DecimalTypeVariant::F32)
734 }))
735 );
736 }
737
738 #[test]
739 fn add() {
740 let mut lexer = Token::lexer("1 + 2");
741 assert_eq!(
742 lexer.next().unwrap(),
743 Ok(Token::IntegerLiteral("1".to_string()))
744 );
745 assert_eq!(lexer.next().unwrap(), Ok(Token::Plus));
746 assert_eq!(
747 lexer.next().unwrap(),
748 Ok(Token::IntegerLiteral("2".to_string()))
749 );
750 assert_eq!(lexer.next(), None);
751 }
752
753 #[test]
754 fn invalid_fraction() {
755 let mut lexer = Token::lexer("42.4/3");
756 assert_eq!(
757 lexer.next().unwrap(),
758 Ok(Token::DecimalLiteral(DecimalWithVariant {
759 value: "42.4".to_string(),
760 variant: None
761 }))
762 );
763 assert_eq!(lexer.next().unwrap(), Ok(Token::Slash));
764 assert_eq!(
765 lexer.next().unwrap(),
766 Ok(Token::IntegerLiteral("3".to_string()))
767 );
768 }
769
770 #[test]
771 fn equality() {
772 let mut lexer = Token::lexer("a == b");
773 assert_eq!(
774 lexer.next().unwrap(),
775 Ok(Token::Identifier("a".to_string()))
776 );
777 assert_eq!(lexer.next().unwrap(), Ok(Token::StructuralEqual));
778 assert_eq!(
779 lexer.next().unwrap(),
780 Ok(Token::Identifier("b".to_string()))
781 );
782 assert_eq!(lexer.next(), None);
783 }
784
785 #[test]
786 fn is_operator() {
787 let mut lexer = Token::lexer("a is b");
788 assert_eq!(
789 lexer.next().unwrap(),
790 Ok(Token::Identifier("a".to_string()))
791 );
792 assert_eq!(lexer.next().unwrap(), Ok(Token::Is));
793 assert_eq!(
794 lexer.next().unwrap(),
795 Ok(Token::Identifier("b".to_string()))
796 );
797 assert_eq!(lexer.next(), None);
798 }
799
800 #[test]
801 fn matches_operator() {
802 let mut lexer = Token::lexer("a matches b");
803 assert_eq!(
804 lexer.next().unwrap(),
805 Ok(Token::Identifier("a".to_string()))
806 );
807 assert_eq!(lexer.next().unwrap(), Ok(Token::Matches));
808 assert_eq!(
809 lexer.next().unwrap(),
810 Ok(Token::Identifier("b".to_string()))
811 );
812 assert_eq!(lexer.next(), None);
813 }
814
815 #[test]
816 fn line_doc() {
817 let mut lexer = Token::lexer("/// This is a line doc\n42");
818 assert_eq!(
819 lexer.next().unwrap(),
820 Ok(Token::LineDoc(" This is a line doc".to_string()))
821 );
822 assert_eq!(
823 lexer.next().unwrap(),
824 Ok(Token::IntegerLiteral("42".to_string()))
825 );
826 assert_eq!(lexer.next(), None);
827 }
828
829 #[test]
830 fn divide() {
831 let mut lexer = Token::lexer("8 /2");
832 assert_eq!(
833 lexer.next().unwrap(),
834 Ok(Token::IntegerLiteral("8".to_string()))
835 );
836 assert_eq!(lexer.next().unwrap(), Ok(Token::Slash));
837 assert_eq!(
838 lexer.next().unwrap(),
839 Ok(Token::IntegerLiteral("2".to_string()))
840 );
841 assert_eq!(lexer.next(), None);
842 }
843}