1use crate::ast::{self, Span};
2use crate::lex;
3use crate::lex::TokenKind::*;
4use crate::lex::{Token, TokenKind};
5use crate::types::Type;
6
7pub const MAX_TUPLE_ARITY: usize = 5;
8pub const TUPLE_FIELDS: &[&str] = &["First", "Second", "Third", "Fourth", "Fifth"];
9const MAX_DEPTH: u32 = 64;
10const MAX_ERRORS: usize = 50;
11const MAX_LOOKAHEAD: usize = 256;
12
13mod annotations;
14mod control_flow;
15mod definitions;
16mod directives;
17mod error;
18mod expressions;
19mod identifiers;
20mod patterns;
21mod pratt;
22
23pub use error::ParseError;
24
25pub struct ParseResult {
26 pub ast: Vec<ast::Expression>,
27 pub errors: Vec<ParseError>,
28}
29
30impl ParseResult {
31 pub fn failed(&self) -> bool {
32 !self.errors.is_empty()
33 }
34}
35
36pub struct Parser<'source> {
37 stream: TokenStream<'source>,
38 previous_token: Token<'source>,
39 pub errors: Vec<ParseError>,
40 file_id: u32,
41 in_control_flow_header: bool,
42 source: &'source str,
43 depth: u32,
44}
45
46impl<'source> Parser<'source> {
47 pub fn new(tokens: Vec<Token<'source>>, source: &'source str) -> Parser<'source> {
48 Self::with_file_id(tokens, source, 0)
49 }
50
51 pub fn lex_and_parse_file(source: &str, file_id: u32) -> ParseResult {
52 let lex_result = lex::Lexer::new(source, file_id).lex();
53
54 if lex_result.failed() {
55 return ParseResult {
56 ast: vec![],
57 errors: lex_result.errors,
58 };
59 }
60
61 Parser::with_file_id(lex_result.tokens, source, file_id).parse()
62 }
63
64 fn with_file_id(
65 tokens: Vec<Token<'source>>,
66 source: &'source str,
67 file_id: u32,
68 ) -> Parser<'source> {
69 let stream = TokenStream::new(tokens);
70 let first_token = stream.peek();
71
72 let mut parser = Parser {
73 stream,
74 previous_token: first_token,
75 errors: Default::default(),
76 file_id,
77 in_control_flow_header: false,
78 source,
79 depth: 0,
80 };
81
82 parser.skip_comments();
83
84 parser
85 }
86
87 pub fn parse(mut self) -> ParseResult {
88 let mut top_items = vec![];
89
90 self.skip_comments();
91
92 while !self.at_eof() && !self.too_many_errors() {
93 let position = self.position();
94 let item = self.parse_top_item();
95 if !matches!(item, ast::Expression::Unit { .. }) {
96 top_items.push(item);
97 }
98 self.advance_if(Semicolon);
99 if self.position() == position {
100 self.next();
101 }
102 }
103
104 ParseResult {
105 ast: top_items,
106 errors: self.errors,
107 }
108 }
109
110 pub fn parse_top_item(&mut self) -> ast::Expression {
111 let doc_with_span = self.collect_doc_comments();
112
113 let attributes = self.parse_attributes();
114
115 let pub_token = if self.is(Pub) {
116 Some(self.current_token())
117 } else {
118 None
119 };
120 let is_public = pub_token.is_some();
121 if is_public {
122 self.next();
123 }
124
125 if is_public && self.is(Impl) {
126 let token = pub_token.unwrap();
127 let span = ast::Span::new(self.file_id, token.byte_offset, token.byte_length);
128 let error = ParseError::new("Misplaced `pub`", span, "not allowed here")
129 .with_parse_code("syntax_error")
130 .with_help("Place `pub` on individual methods inside the `impl` block instead");
131 self.errors.push(error);
132 }
133
134 let is_documentable = matches!(
135 self.current_token().kind,
136 Enum | Struct | Interface | Function | Const | Var | Type
137 );
138
139 if let Some((_, ref span)) = doc_with_span
140 && !is_documentable
141 {
142 self.error_detached_doc_comment(*span);
143 }
144
145 let doc = doc_with_span.map(|(text, _)| text);
146
147 let expression = match self.current_token().kind {
148 Enum => self.parse_enum_definition(doc, attributes),
149 Struct => self.parse_struct_definition(doc, attributes),
150 Interface => self.parse_interface_definition(doc),
151 Function => self.parse_function(doc, attributes),
152 Impl => self.parse_impl_block(),
153 Const => self.parse_const_definition(doc),
154 Var => self.parse_var_declaration(doc),
155 Import => self.parse_import(),
156 Type => self.parse_type_alias_with_doc(doc),
157 Comment => {
158 let start = self.current_token();
159 self.skip_comments();
160 ast::Expression::Unit {
161 ty: Type::uninferred(),
162 span: self.span_from_tokens(start),
163 }
164 }
165 _ => self.unexpected_token("top_item"),
166 };
167
168 if is_public {
169 return expression.set_public();
170 }
171
172 expression
173 }
174
175 pub fn parse_block_item(&mut self) -> ast::Expression {
176 match self.current_token().kind {
177 Enum => {
178 self.track_error(
179 "misplaced",
180 "Move this enum definition to the top level of the file.",
181 );
182 self.parse_enum_definition(None, vec![])
183 }
184 Struct => {
185 self.track_error(
186 "misplaced",
187 "Move this struct definition to the top level of the file.",
188 );
189 self.parse_struct_definition(None, vec![])
190 }
191 Type => {
192 self.track_error(
193 "misplaced",
194 "Move this type alias to the top level of the file.",
195 );
196 self.parse_type_alias_with_doc(None)
197 }
198 Import => {
199 self.track_error(
200 "misplaced",
201 "Move this import to the top level of the file.",
202 );
203 self.parse_import()
204 }
205 Impl => {
206 self.track_error(
207 "misplaced",
208 "Move this `impl` block to the top level of the file.",
209 );
210 self.parse_impl_block()
211 }
212 Interface => {
213 self.track_error(
214 "misplaced",
215 "Move this interface definition to the top level of the file.",
216 );
217 self.parse_interface_definition(None)
218 }
219 Function => self.parse_function(None, vec![]),
220 Const => self.parse_const_definition(None),
221
222 Let => self.parse_let(),
223 Return => self.parse_return(),
224 For => self.parse_for(),
225 While => self.parse_while(),
226 Loop => self.parse_loop(),
227 Break => self.parse_break(),
228 Continue => self.parse_continue(),
229 Defer => self.parse_defer(),
230 Directive => self.parse_directive(),
231 _ => self.parse_assignment(),
232 }
233 }
234
235 fn current_token(&self) -> Token<'source> {
236 self.stream.peek()
237 }
238
239 fn newline_before_current(&self) -> bool {
240 let prev_end = (self.previous_token.byte_offset + self.previous_token.byte_length) as usize;
241 let curr_start = self.current_token().byte_offset as usize;
242 if prev_end <= curr_start && curr_start <= self.source.len() {
243 return self.source[prev_end..curr_start].contains('\n');
244 }
245 false
246 }
247
248 fn next(&mut self) {
249 self.previous_token = self.current_token();
250 self.stream.consume();
251 self.skip_comments();
252 }
253
254 fn skip_comments(&mut self) {
255 while self.is(Comment) {
256 self.previous_token = self.current_token();
257 self.stream.consume();
258 }
259 }
260
261 fn collect_doc_comments(&mut self) -> Option<(std::string::String, ast::Span)> {
262 let mut docs = Vec::new();
263 let mut first_span: Option<ast::Span> = None;
264
265 while self.is(DocComment) {
266 let token = self.current_token();
267 if first_span.is_none() {
268 first_span = Some(self.span_from_token(token));
269 }
270 docs.push(token.text.to_string());
271 self.previous_token = token;
272 self.stream.consume();
273 self.skip_comments();
274 }
275
276 if docs.is_empty() {
277 None
278 } else {
279 Some((docs.join("\n"), first_span.unwrap()))
280 }
281 }
282
283 fn expect_comma_or(&mut self, closing: TokenKind) {
284 if self.is(Comma) || self.is(closing) || self.at_item_boundary() {
285 self.advance_if(Comma);
286 return;
287 }
288
289 self.track_error(
290 format!("expected `,` or {}", closing),
291 "Add a comma between elements.",
292 );
293
294 loop {
295 if self.at_eof() || self.is(Comma) || self.is(closing) || self.at_item_boundary() {
296 break;
297 }
298 self.next();
299 }
300
301 self.advance_if(Comma);
302 }
303
304 pub fn at_eof(&self) -> bool {
305 self.is(EOF)
306 }
307
308 fn at_range(&self) -> bool {
309 matches!(self.current_token().kind, DotDot | DotDotEqual)
310 }
311
312 fn advance_if(&mut self, token_kind: TokenKind) -> bool {
313 if self.is(token_kind) {
314 self.next();
315 return true;
316 }
317
318 false
319 }
320
321 fn is(&self, token_kind: TokenKind) -> bool {
322 self.current_token().kind == token_kind
323 }
324
325 fn is_not(&self, token_kind: TokenKind) -> bool {
326 if self.at_eof() {
327 return false;
328 }
329
330 self.current_token().kind != token_kind
331 }
332
333 fn ensure(&mut self, token_kind: TokenKind) {
334 if self.current_token().kind != token_kind {
335 self.track_ensure_error(token_kind);
336 }
337
338 if self.at_eof() {
339 return;
340 }
341
342 self.next();
343 }
344
345 fn ensure_progress(&mut self, start_position: usize, closing: TokenKind) {
346 if self.stream.position == start_position && self.is_not(closing) && !self.at_eof() {
347 self.next();
348 }
349 }
350
351 fn span_from_token(&self, token: Token<'source>) -> ast::Span {
352 ast::Span::new(self.file_id, token.byte_offset, token.byte_length)
353 }
354
355 fn span_from_tokens(&self, start_token: Token<'source>) -> ast::Span {
356 let end_byte_offset = self.previous_token.byte_offset + self.previous_token.byte_length;
357 let byte_length = end_byte_offset.saturating_sub(start_token.byte_offset);
358
359 ast::Span::new(self.file_id, start_token.byte_offset, byte_length)
360 }
361
362 fn span_from_offset(&self, start_byte_offset: u32) -> ast::Span {
363 let end_byte_offset = self.previous_token.byte_offset + self.previous_token.byte_length;
364 let byte_length = end_byte_offset.saturating_sub(start_byte_offset);
365
366 ast::Span::new(self.file_id, start_byte_offset, byte_length)
367 }
368
369 fn is_type_args_call(&self) -> bool {
370 let mut position = 1; let mut depth = 1;
372
373 loop {
374 if position > MAX_LOOKAHEAD {
375 return false;
376 }
377 match self.stream.peek_ahead(position).kind {
378 LeftAngleBracket => depth += 1,
379 RightAngleBracket if depth == 1 => {
380 let next = self.stream.peek_ahead(position + 1).kind;
381 return next == LeftParen
382 || (next == Dot
383 && self.stream.peek_ahead(position + 2).kind == Identifier
384 && self.stream.peek_ahead(position + 3).kind == LeftParen);
385 }
386 RightAngleBracket => depth -= 1,
387 LeftParen => {
388 let mut paren_depth = 1;
389 position += 1;
390 while paren_depth > 0 {
391 if position > MAX_LOOKAHEAD {
392 return false;
393 }
394 match self.stream.peek_ahead(position).kind {
395 LeftParen => paren_depth += 1,
396 RightParen => paren_depth -= 1,
397 EOF => return false,
398 _ => {}
399 }
400 position += 1;
401 }
402 continue;
403 }
404 EOF | Plus | Minus | Star | Slash | Percent | EqualDouble | NotEqual
405 | AmpersandDouble | PipeDouble | Semicolon | LeftCurlyBrace | RightCurlyBrace
406 | LeftSquareBracket | RightSquareBracket => return false,
407 _ => {}
408 }
409 position += 1;
410 }
411 }
412
413 fn has_block_after_struct(&self) -> bool {
414 let mut depth = 1;
415 let mut i = 1;
416 while depth > 0 {
417 i += 1;
418 if i > MAX_LOOKAHEAD {
419 return false;
420 }
421 let token = self.stream.peek_ahead(i);
422 match token.kind {
423 LeftCurlyBrace => depth += 1,
424 RightCurlyBrace => depth -= 1,
425 EOF => return false,
426 _ => {}
427 }
428 }
429 let after = self.stream.peek_ahead(i + 1);
430 matches!(
431 after.kind,
432 LeftCurlyBrace
433 | RightParen
434 | EqualDouble
435 | NotEqual
436 | LeftAngleBracket
437 | RightAngleBracket
438 | LessThanOrEqual
439 | GreaterThanOrEqual
440 | AmpersandDouble
441 | PipeDouble
442 | Plus
443 | Minus
444 | Star
445 | Slash
446 | Percent
447 )
448 }
449
450 fn is_struct_instantiation(&self) -> bool {
451 if self.previous_token.kind != Identifier {
452 return false;
453 }
454
455 let is_uppercase = self
456 .previous_token
457 .text
458 .starts_with(|c: char| c.is_uppercase());
459 let first_ahead = self.stream.peek_ahead(1);
460
461 if first_ahead.kind == DotDot {
462 return true;
463 }
464
465 if first_ahead.kind == RightCurlyBrace {
466 if self.in_control_flow_header {
467 return is_uppercase && self.has_block_after_struct();
468 }
469 return is_uppercase;
470 }
471
472 if first_ahead.kind == Identifier {
473 let second_ahead = self.stream.peek_ahead(2);
474 return match second_ahead.kind {
475 Colon => self.stream.peek_ahead(3).kind != Colon,
476 Comma | RightCurlyBrace => {
477 if self.in_control_flow_header {
478 is_uppercase && self.has_block_after_struct()
479 } else {
480 is_uppercase
481 }
482 }
483 _ => false,
484 };
485 }
486
487 false
488 }
489
490 fn enter_recursion(&mut self) -> bool {
491 if self.depth >= MAX_DEPTH {
492 let span = self.span_from_token(self.current_token());
493 self.track_error_at(span, "too deeply nested", "Reduce nesting depth");
494 return false;
495 }
496 self.depth += 1;
497 true
498 }
499
500 fn leave_recursion(&mut self) {
501 self.depth -= 1;
502 }
503
504 fn too_many_errors(&self) -> bool {
505 self.errors.len() >= MAX_ERRORS
506 }
507
508 fn position(&self) -> u32 {
509 self.current_token().byte_offset
510 }
511
512 fn at_sync_point(&self) -> bool {
513 matches!(
514 self.current_token().kind,
515 Semicolon
516 | RightCurlyBrace
517 | RightParen
518 | RightSquareBracket
519 | Comma
520 | Function
521 | Struct
522 | Enum
523 | Const
524 | Impl
525 | Interface
526 | Type
527 | Import
528 )
529 }
530
531 fn can_start_annotation(&self) -> bool {
532 matches!(self.current_token().kind, Identifier | Function | LeftParen)
533 }
534
535 fn at_item_boundary(&self) -> bool {
536 matches!(
537 self.current_token().kind,
538 Let | Function | Struct | Enum | Impl | Interface | Type | Const | Import
539 )
540 }
541
542 fn resync_on_error(&mut self) {
543 if !self.at_eof() {
544 self.next();
545 }
546
547 while !self.at_sync_point() && !self.at_eof() {
548 self.next();
549 }
550 }
551
552 fn track_error(
553 &mut self,
554 label: impl Into<std::string::String>,
555 help: impl Into<std::string::String>,
556 ) {
557 let current = self.current_token();
558 let span = ast::Span::new(self.file_id, current.byte_offset, current.byte_length);
559 self.track_error_at(span, label, help);
560 }
561
562 fn track_error_at(
563 &mut self,
564 span: ast::Span,
565 label: impl Into<std::string::String>,
566 help: impl Into<std::string::String>,
567 ) {
568 if self.too_many_errors() {
569 return;
570 }
571 let error = ParseError::new("Syntax error", span, label.into())
572 .with_parse_code("syntax_error")
573 .with_help(help.into());
574
575 self.errors.push(error);
576 }
577
578 fn track_ensure_error(&mut self, expected_token: TokenKind) {
579 if self.too_many_errors() {
580 return;
581 }
582 let current = self.current_token();
583
584 let error_code = match expected_token {
585 Semicolon => "missing_semicolon",
586 RightCurlyBrace => "unclosed_block",
587 _ => "unexpected_token",
588 };
589
590 let span = ast::Span::new(self.file_id, current.byte_offset, current.byte_length);
591 let error = ParseError::new("Syntax error", span, format!("expected {}", expected_token))
592 .with_parse_code(error_code);
593
594 self.errors.push(error);
595 }
596
597 fn error_unclosed_block(&mut self, open_brace: &Token) {
598 let span = ast::Span::new(self.file_id, open_brace.byte_offset, open_brace.byte_length);
599 let error = ParseError::new("Unclosed block", span, "opening brace here")
600 .with_parse_code("unclosed_block")
601 .with_help("Add a closing `}`");
602
603 self.errors.push(error);
604 }
605
606 fn error_tuple_arity(&mut self, arity: usize, span: Span) {
607 let help = if arity == 0 {
608 "Use `()` for unit type".to_string()
609 } else if arity == 1 {
610 "Use the type directly without wrapping in a tuple".to_string()
611 } else {
612 "For >5 elements, use a struct with named fields".to_string()
613 };
614
615 let error = ParseError::new(
616 "Invalid tuple",
617 span,
618 format!("{}-element tuple not allowed", arity),
619 )
620 .with_parse_code("tuple_element_count")
621 .with_help(help);
622
623 self.errors.push(error);
624 }
625
626 fn error_duplicate_field_in_pattern(
627 &mut self,
628 field_name: &str,
629 first_span: Span,
630 second_span: Span,
631 ) {
632 let error = ParseError::new(
633 "Duplicate field",
634 first_span,
635 format!("first use of `{}`", field_name),
636 )
637 .with_span_label(second_span, "used again")
638 .with_parse_code("duplicate_field_in_pattern")
639 .with_help("Remove the duplicate binding");
640
641 self.errors.push(error);
642 }
643
644 fn error_duplicate_impl_parent(&mut self, first_span: Span, second_span: Span) {
645 let error = ParseError::new("Duplicate impl", first_span, "first use")
646 .with_span_label(second_span, "used again")
647 .with_parse_code("duplicate_impl_parent")
648 .with_help("Remove the duplicate parent");
649
650 self.errors.push(error);
651 }
652
653 fn error_duplicate_struct_field(&mut self, name: &str, first_span: Span, second_span: Span) {
654 let error = ParseError::new("Duplicate field", first_span, "first defined")
655 .with_span_label(second_span, "defined again")
656 .with_parse_code("duplicate_struct_field")
657 .with_help(format!("Remove the duplicate field `{}`", name));
658
659 self.errors.push(error);
660 }
661
662 fn error_duplicate_enum_variant(&mut self, name: &str, first_span: Span, second_span: Span) {
663 let error = ParseError::new("Duplicate variant", first_span, "first defined")
664 .with_span_label(second_span, "defined again")
665 .with_parse_code("duplicate_enum_variant")
666 .with_help(format!("Remove the duplicate variant `{}`", name));
667
668 self.errors.push(error);
669 }
670
671 fn error_duplicate_interface_method(
672 &mut self,
673 name: &str,
674 first_span: Span,
675 second_span: Span,
676 ) {
677 let error = ParseError::new("Duplicate method", first_span, "first defined")
678 .with_span_label(second_span, "defined again")
679 .with_parse_code("duplicate_interface_method")
680 .with_help(format!("Remove the duplicate method `{}`", name));
681
682 self.errors.push(error);
683 }
684
685 fn error_float_pattern_not_allowed(&mut self, span: Span, float_text: &str) {
686 let error = ParseError::new("Invalid pattern", span, "float literal not allowed here")
687 .with_parse_code("float_pattern")
688 .with_help(format!(
689 "Use a guard instead: `x if x == {} =>`",
690 float_text
691 ));
692
693 self.errors.push(error);
694 }
695
696 fn error_uppercase_binding(&mut self, span: Span) {
697 let error = ParseError::new("Invalid binding name", span, "uppercase not allowed here")
698 .with_parse_code("uppercase_binding")
699 .with_help("Lowercase the binding");
700
701 self.errors.push(error);
702 }
703
704 fn error_detached_doc_comment(&mut self, span: Span) {
705 let error = ParseError::new("Unattached doc comment", span, "is detached")
706 .with_parse_code("detached_doc_comment")
707 .with_help("Place the doc comment on the line immediately above a symbol definition");
708
709 self.errors.push(error);
710 }
711
712 fn error_interface_method_with_type_parameters(&mut self, span: Span, count: usize) {
713 let label = if count == 1 {
714 "type parameter not allowed"
715 } else {
716 "type parameters not allowed"
717 };
718 let error = ParseError::new("Invalid interface method", span, label)
719 .with_parse_code("interface_method_with_type_parameters")
720 .with_help(
721 "Interface methods cannot have type parameters, because Go interfaces do not support generic methods",
722 );
723
724 self.errors.push(error);
725 }
726
727 fn parse_integer_text(&mut self, text: &str) -> ast::Literal {
728 let clean = if text.contains('_') {
729 std::borrow::Cow::Owned(text.replace('_', ""))
730 } else {
731 std::borrow::Cow::Borrowed(text)
732 };
733
734 let (n, preserve_text) = if clean.starts_with("0x") || clean.starts_with("0X") {
735 let value = u64::from_str_radix(&clean[2..], 16).unwrap_or_else(|_| {
736 self.track_error(
737 format!("hex literal '{}' is too large", text),
738 "Maximum value is `0xFFFFFFFFFFFFFFFF`.",
739 );
740 0
741 });
742 (value, true)
743 } else if clean.starts_with("0o") || clean.starts_with("0O") {
744 let value = u64::from_str_radix(&clean[2..], 8).unwrap_or_else(|_| {
745 self.track_error(
746 format!("octal literal '{}' is too large", text),
747 "Maximum value is `0o1777777777777777777777`.",
748 );
749 0
750 });
751 (value, true)
752 } else if clean.starts_with("0b") || clean.starts_with("0B") {
753 let value = u64::from_str_radix(&clean[2..], 2).unwrap_or_else(|_| {
754 self.track_error(
755 format!("binary literal '{}' is too large", text),
756 "Value must fit in 64 bits.",
757 );
758 0
759 });
760 (value, true)
761 } else if clean.len() > 1
762 && clean.starts_with('0')
763 && clean.chars().skip(1).all(|c| c.is_ascii_digit())
764 {
765 let value = u64::from_str_radix(&clean[1..], 8).unwrap_or_else(|_| {
766 self.track_error(
767 format!("octal literal '{}' is too large", text),
768 "Maximum value is `01777777777777777777777`.",
769 );
770 0
771 });
772 (value, true)
773 } else {
774 let value = clean.parse().unwrap_or_else(|_| {
775 self.track_error(
776 format!("integer literal '{}' is too large", text),
777 "Maximum value is `18446744073709551615`.",
778 );
779 0
780 });
781 (value, false)
782 };
783
784 let original_text = if preserve_text {
785 Some(text.to_string())
786 } else {
787 None
788 };
789
790 ast::Literal::Integer {
791 value: n,
792 text: original_text,
793 }
794 }
795
796 fn unexpected_token(&mut self, ctx: &str) -> ast::Expression {
797 let token = self.current_token();
798 let token_descriptor = if token.text.is_empty() {
799 format!("{:?}", token.kind)
800 } else {
801 format!("`{}`", token.text)
802 };
803
804 let span = ast::Span::new(self.file_id, token.byte_offset, token.byte_length);
805
806 let (label, error_code, help) = match ctx {
807 "expr" => (
808 format!("expected expression, found {}", token_descriptor),
809 "expected_expression",
810 "Check your syntax.",
811 ),
812 "pattern" => (
813 format!("unexpected {} in pattern", token_descriptor),
814 "invalid_pattern",
815 "Patterns include literals, variables, and destructuring.",
816 ),
817 "literal" => (
818 format!("expected literal, found {}", token_descriptor),
819 "expected_literal",
820 "Literals include numbers, strings, characters, and booleans.",
821 ),
822 "top_item" if token.text == "trait" => (
823 format!("unexpected {}", token_descriptor),
824 "trait_unsupported",
825 "Lisette uses `interface` with Go-style structural typing. Types automatically satisfy interfaces if they have the required methods.",
826 ),
827 "top_item" if token.text == "use" => (
828 "unexpected syntax for import".to_string(),
829 "use_unsupported",
830 "Use `import` instead of `use` for imports: `import \"module/path\"`",
831 ),
832 "top_item" => (
833 "expected declaration".to_string(),
834 "expected_declaration",
835 "At the top level of a file, Lisette expects `fn`, `struct`, `enum`, `interface`, `import`, or `type`.",
836 ),
837 _ => (
838 format!("unexpected {}", token_descriptor),
839 "unexpected_token",
840 "Check your syntax.",
841 ),
842 };
843
844 let error = ParseError::new("Syntax error", span, label)
845 .with_parse_code(error_code)
846 .with_help(help);
847
848 if !self.too_many_errors() {
849 self.errors.push(error);
850 }
851
852 self.resync_on_error();
853
854 ast::Expression::Unit {
855 ty: Type::uninferred(),
856 span,
857 }
858 }
859}
860
861struct TokenStream<'source> {
862 tokens: Vec<Token<'source>>,
863 position: usize,
864}
865
866impl<'source> TokenStream<'source> {
867 fn new(tokens: Vec<Token<'source>>) -> Self {
868 Self {
869 tokens,
870 position: 0,
871 }
872 }
873
874 fn peek(&self) -> Token<'source> {
875 self.tokens
876 .get(self.position)
877 .copied()
878 .unwrap_or_else(|| Token {
879 kind: TokenKind::EOF,
880 text: "",
881 byte_offset: self
882 .tokens
883 .last()
884 .map(|t| t.byte_offset + t.byte_length)
885 .unwrap_or(0),
886 byte_length: 0,
887 })
888 }
889
890 fn peek_ahead(&self, n: usize) -> Token<'source> {
891 self.tokens
892 .get(self.position + n)
893 .copied()
894 .unwrap_or_else(|| Token {
895 kind: TokenKind::EOF,
896 text: "",
897 byte_offset: self
898 .tokens
899 .last()
900 .map(|t| t.byte_offset + t.byte_length)
901 .unwrap_or(0),
902 byte_length: 0,
903 })
904 }
905
906 fn consume(&mut self) -> Token<'source> {
907 let token = self.peek();
908 if self.position < self.tokens.len() {
909 self.position += 1;
910 }
911 token
912 }
913}