1#![warn(clippy::todo)]
2#![warn(clippy::dbg_macro)]
3#![allow(clippy::collapsible_if)]
4#![allow(clippy::collapsible_else_if)]
5#![allow(clippy::if_same_then_else)]
7#![allow(clippy::large_enum_variant)]
9
10pub mod ast;
11pub mod diagnostics;
12pub mod lex;
13pub mod position;
14pub mod visitor;
15
16use std::collections::HashSet;
17use std::path::Path;
18use std::rc::Rc;
19
20use ast::FieldInfo;
21use ast::StructInfo;
22use position::Position;
23
24use crate::ast::*;
25use crate::diagnostics::ErrorMessage;
26use crate::diagnostics::MessagePart::*;
27use crate::lex::lex;
28use crate::lex::lex_between;
29use crate::lex::Token;
30use crate::lex::TokenStream;
31use crate::lex::INTEGER_RE;
32use crate::lex::SYMBOL_RE;
33
34#[derive(Debug)]
38#[allow(dead_code)] pub enum ParseError {
40 Invalid {
41 position: Position,
42 message: ErrorMessage,
43 additional: Vec<(Position, String)>,
44 },
45 Incomplete {
46 message: ErrorMessage,
47 position: Position,
48 },
49}
50
51impl ParseError {
52 pub fn position(&self) -> &Position {
53 match self {
54 ParseError::Invalid { position, .. } => position,
55 ParseError::Incomplete { position, .. } => position,
56 }
57 }
58
59 pub fn message(&self) -> &ErrorMessage {
60 match self {
61 ParseError::Invalid { message, .. } => message,
62 ParseError::Incomplete { message, .. } => message,
63 }
64 }
65}
66
67fn peeked_symbol_is(tokens: &TokenStream, token: &str) -> bool {
68 tokens.peek().map(|t| t.text == token).unwrap_or(false)
69}
70
71fn require_a_token<'a>(
72 tokens: &mut TokenStream<'a>,
73 diagnostics: &mut Vec<ParseError>,
74 token_description: &str,
75) -> Token<'a> {
76 match tokens.pop() {
77 Some(token) => token,
78 None => {
79 diagnostics.push(ParseError::Incomplete {
80 message: ErrorMessage(vec![Text(format!(
81 "Expected {}, got EOF",
82 token_description
83 ))]),
84 position: Position::todo(tokens.path.clone()),
85 });
86
87 tokens.prev().expect("TODO: handle empty token streams")
91 }
92 }
93}
94
95fn required_token_ok(
96 tokens: &mut TokenStream<'_>,
97 diagnostics: &mut Vec<ParseError>,
98 expected: &str,
99) -> bool {
100 let (ok, _) = check_required_token(tokens, diagnostics, expected);
101 ok
102}
103
104fn check_required_token<'a>(
105 tokens: &mut TokenStream<'a>,
106 diagnostics: &mut Vec<ParseError>,
107 expected: &str,
108) -> (bool, Token<'a>) {
109 match tokens.pop() {
110 Some(token) => {
111 let mut ok = true;
112 if token.text != expected {
113 let position = token.position.clone();
114
115 diagnostics.push(ParseError::Invalid {
116 position,
117 message: ErrorMessage(vec![Text(format!(
118 "Expected `{}`, got `{}`",
119 expected, token.text
120 ))]),
121 additional: vec![],
122 });
123 ok = false;
124
125 tokens.unpop();
129 }
130
131 (ok, token)
132 }
133 None => {
134 diagnostics.push(ParseError::Incomplete {
135 message: ErrorMessage(vec![Text(format!("Expected `{}`, got EOF", expected))]),
136 position: Position::todo(tokens.path.clone()),
137 });
138
139 (
140 false,
141 tokens.prev().expect("TODO: handle empty file properly"),
142 )
143 }
144 }
145}
146
147fn require_token<'a>(
148 tokens: &mut TokenStream<'a>,
149 diagnostics: &mut Vec<ParseError>,
150 expected: &str,
151) -> Token<'a> {
152 let (_, token) = check_required_token(tokens, diagnostics, expected);
153 token
154}
155
156fn parse_integer(
157 tokens: &mut TokenStream,
158 id_gen: &mut IdGenerator,
159 diagnostics: &mut Vec<ParseError>,
160) -> Expression {
161 let token = require_a_token(tokens, diagnostics, "integer literal");
162
163 if INTEGER_RE.is_match(token.text) {
164 let i: i64 = token.text.parse().unwrap();
165 Expression::new(token.position, Expression_::IntLiteral(i), id_gen.next())
166 } else {
167 diagnostics.push(ParseError::Invalid {
168 position: token.position.clone(),
169 message: ErrorMessage(vec![Text(format!(
170 "Not a valid integer literal: {}",
171 token.text
172 ))]),
173 additional: vec![],
174 });
175
176 Expression::new(
179 token.position,
180 Expression_::IntLiteral(11223344),
181 id_gen.next(),
182 )
183 }
184}
185
186fn parse_variable(
187 tokens: &mut TokenStream,
188 id_gen: &mut IdGenerator,
189 diagnostics: &mut Vec<ParseError>,
190) -> Expression {
191 let variable = parse_symbol(tokens, id_gen, diagnostics);
192
193 Expression::new(
194 variable.position.clone(),
195 Expression_::Variable(variable),
196 id_gen.next(),
197 )
198}
199
200fn parse_tuple_literal_or_parentheses(
203 tokens: &mut TokenStream,
204 id_gen: &mut IdGenerator,
205 diagnostics: &mut Vec<ParseError>,
206) -> Expression {
207 let open_paren = require_token(tokens, diagnostics, "(");
208 if peeked_symbol_is(tokens, ")") {
209 let close_paren = require_token(tokens, diagnostics, ")");
211
212 return Expression::new(
213 Position::merge(&open_paren.position, &close_paren.position),
214 Expression_::TupleLiteral(vec![]),
215 id_gen.next(),
216 );
217 }
218
219 let expr = parse_expression(tokens, id_gen, diagnostics);
220 let expr_pos = expr.position.clone();
221
222 if peeked_symbol_is(tokens, ",") {
223 let mut exprs = vec![expr];
224
225 loop {
226 if peeked_symbol_is(tokens, ",") {
227 tokens.pop();
228 } else if !peeked_symbol_is(tokens, ")") {
229 let position = if let Some(token) = tokens.peek() {
230 token.position.clone()
231 } else {
232 expr_pos
233 };
234
235 diagnostics.push(ParseError::Invalid {
236 position,
237 message: ErrorMessage(vec![Text("Expected `,` or `)`. ".to_owned())]),
238 additional: vec![],
239 });
240
241 break;
242 }
243
244 if peeked_symbol_is(tokens, ")") {
246 break;
247 }
248
249 let start_idx = tokens.idx;
250 exprs.push(parse_expression(tokens, id_gen, diagnostics));
251 assert!(
252 tokens.idx > start_idx,
253 "The parser should always make forward progress."
254 );
255 }
256
257 let close_paren = require_token(tokens, diagnostics, ")");
258
259 return Expression::new(
260 Position::merge(&open_paren.position, &close_paren.position),
261 Expression_::TupleLiteral(exprs.into_iter().map(Rc::new).collect()),
262 id_gen.next(),
263 );
264 }
265
266 let close_paren = require_token(tokens, diagnostics, ")");
268
269 let position = Position::merge(&open_paren.position, &close_paren.position);
270 Expression::new(
271 position,
272 Expression_::Parentheses(
273 open_paren.position.clone(),
274 Rc::new(expr),
275 close_paren.position.clone(),
276 ),
277 id_gen.next(),
278 )
279}
280
281fn parse_list_literal(
282 tokens: &mut TokenStream,
283 id_gen: &mut IdGenerator,
284 diagnostics: &mut Vec<ParseError>,
285) -> Expression {
286 let open_bracket = require_token(tokens, diagnostics, "[");
287 let items = parse_comma_separated_exprs(tokens, id_gen, diagnostics, "]");
288 let close_bracket = require_token(tokens, diagnostics, "]");
289
290 Expression::new(
291 Position::merge(&open_bracket.position, &close_bracket.position),
292 Expression_::ListLiteral(items),
293 id_gen.next(),
294 )
295}
296
297fn parse_lambda(
298 tokens: &mut TokenStream,
299 id_gen: &mut IdGenerator,
300 diagnostics: &mut Vec<ParseError>,
301) -> Expression {
302 let fun_keyword = require_token(tokens, diagnostics, "fun");
303 let type_params = parse_type_params(tokens, id_gen, diagnostics);
304
305 let params = parse_parameters(tokens, id_gen, diagnostics);
306 let return_hint = parse_colon_and_hint_opt(tokens, id_gen, diagnostics);
307
308 let body = parse_block(tokens, id_gen, diagnostics, false);
309
310 let pos = Position::merge(&fun_keyword.position, &body.close_brace);
311
312 Expression::new(
313 pos.clone(),
314 Expression_::FunLiteral(FunInfo {
315 pos,
316 params,
317 body,
318 doc_comment: None,
319 name_sym: None,
320 item_id: None,
321 type_params,
322 return_hint,
323 }),
324 id_gen.next(),
325 )
326}
327
328fn parse_assert(
329 tokens: &mut TokenStream,
330 id_gen: &mut IdGenerator,
331 diagnostics: &mut Vec<ParseError>,
332) -> Expression {
333 let assert_keyword = require_token(tokens, diagnostics, "assert");
334
335 let open_paren = require_token(tokens, diagnostics, "(");
336
337 if peeked_symbol_is(tokens, ")") {
338 let close_paren = tokens.pop().unwrap();
339
340 let position = Position::merge(&open_paren.position, &close_paren.position);
341 diagnostics.push(ParseError::Invalid {
342 position: position.clone(),
343 message: ErrorMessage(vec![Text(
344 "Assert requires an expression, e.g. `assert(x == 42)`.".to_owned(),
345 )]),
346 additional: vec![],
347 });
348
349 return Expression::new(position, Expression_::Invalid, id_gen.next());
350 }
351
352 let expr = parse_expression(tokens, id_gen, diagnostics);
353 let close_paren = require_token(tokens, diagnostics, ")");
354
355 Expression::new(
356 Position::merge(&assert_keyword.position, &close_paren.position),
357 Expression_::Assert(Rc::new(expr)),
358 id_gen.next(),
359 )
360}
361
362fn parse_if(
363 tokens: &mut TokenStream,
364 id_gen: &mut IdGenerator,
365 diagnostics: &mut Vec<ParseError>,
366) -> Expression {
367 let if_token = require_token(tokens, diagnostics, "if");
368
369 let cond_expr = parse_expression(tokens, id_gen, diagnostics);
370 let mut then_body = parse_block(tokens, id_gen, diagnostics, false);
371
372 let else_body: Option<Block> = if peeked_symbol_is(tokens, "else") {
373 tokens.pop();
374
375 if peeked_symbol_is(tokens, "if") {
376 let if_expr = parse_if(tokens, id_gen, diagnostics);
377 Some(Block {
378 open_brace: if_expr.position.clone(),
382 close_brace: if_expr.position.clone(),
383 exprs: vec![if_expr.into()],
384 })
385 } else {
386 Some(parse_block(tokens, id_gen, diagnostics, false))
387 }
388 } else {
389 None
390 };
391
392 if else_body.is_none() {
393 let then_body_exprs: Vec<_> = then_body
396 .exprs
397 .iter()
398 .map(|e| {
399 let mut e = e.as_ref().clone();
400 e.value_is_used = false;
401 Rc::new(e)
402 })
403 .collect();
404 then_body.exprs = then_body_exprs;
405 }
406
407 let last_brace_pos = match &else_body {
408 Some(else_body) => &else_body.close_brace,
409 None => &then_body.close_brace,
410 };
411
412 Expression::new(
413 Position::merge(&if_token.position, last_brace_pos),
414 Expression_::If(Rc::new(cond_expr), then_body, else_body),
415 id_gen.next(),
416 )
417}
418
419fn parse_while(
420 tokens: &mut TokenStream,
421 id_gen: &mut IdGenerator,
422 diagnostics: &mut Vec<ParseError>,
423) -> Expression {
424 let while_token = require_token(tokens, diagnostics, "while");
425
426 let cond_expr = parse_expression(tokens, id_gen, diagnostics);
427 let body = parse_block(tokens, id_gen, diagnostics, true);
428
429 Expression::new(
430 Position::merge(&while_token.position, &body.close_brace),
431 Expression_::While(Rc::new(cond_expr), body),
432 id_gen.next(),
433 )
434}
435
436fn parse_for_in(
437 tokens: &mut TokenStream,
438 id_gen: &mut IdGenerator,
439 diagnostics: &mut Vec<ParseError>,
440) -> Expression {
441 let for_token = require_token(tokens, diagnostics, "for");
442 let destination = parse_let_destination(tokens, id_gen, diagnostics);
443
444 require_token(tokens, diagnostics, "in");
445
446 let expr = parse_expression(tokens, id_gen, diagnostics);
447
448 let body = parse_block(tokens, id_gen, diagnostics, true);
449
450 Expression::new(
451 Position::merge(&for_token.position, &body.close_brace),
452 Expression_::ForIn(destination, Rc::new(expr), body),
453 id_gen.next(),
454 )
455}
456fn parse_break(
457 tokens: &mut TokenStream,
458 id_gen: &mut IdGenerator,
459 diagnostics: &mut Vec<ParseError>,
460) -> Expression {
461 let break_token = require_token(tokens, diagnostics, "break");
462 Expression::new(break_token.position, Expression_::Break, id_gen.next())
463}
464
465fn parse_continue(
466 tokens: &mut TokenStream,
467 id_gen: &mut IdGenerator,
468 diagnostics: &mut Vec<ParseError>,
469) -> Expression {
470 let continue_token = require_token(tokens, diagnostics, "continue");
471 Expression::new(
472 continue_token.position,
473 Expression_::Continue,
474 id_gen.next(),
475 )
476}
477
478fn parse_return(
479 tokens: &mut TokenStream,
480 id_gen: &mut IdGenerator,
481 diagnostics: &mut Vec<ParseError>,
482) -> Expression {
483 let return_token = require_token(tokens, diagnostics, "return");
484
485 let mut expr = None;
486 let mut pos = return_token.position.clone();
487
488 if let Some(next_token) = tokens.peek() {
499 if return_token.position.end_line_number == next_token.position.line_number {
500 let returned_expr = parse_expression(tokens, id_gen, diagnostics);
501 pos = Position::merge(&pos, &returned_expr.position);
502 expr = Some(Rc::new(returned_expr));
503 }
504 }
505
506 Expression::new(pos, Expression_::Return(expr), id_gen.next())
507}
508
509fn unescape_string(src: &str) -> String {
510 let s = &src[1..src.len() - 1];
512
513 let mut res = String::with_capacity(s.len());
514
515 let mut i = 0;
516 let chars: Vec<_> = s.chars().collect();
517 while i < chars.len() {
518 let c = chars[i];
519 if c == '\\' {
520 match chars.get(i + 1) {
521 Some('n') => {
522 res.push('\n');
523 i += 2;
524 }
525 Some('\\') => {
526 res.push('\\');
527 i += 2;
528 }
529 Some('"') => {
530 res.push('"');
531 i += 2;
532 }
533 _ => {
534 res.push(c);
537
538 i += 1;
539 }
540 }
541 } else {
542 res.push(c);
543 i += 1;
544 }
545 }
546
547 res
548}
549
550fn parse_simple_expression(
551 tokens: &mut TokenStream,
552 id_gen: &mut IdGenerator,
553 diagnostics: &mut Vec<ParseError>,
554) -> Expression {
555 if let Some(token) = tokens.peek() {
556 if token.text == "(" {
557 return parse_tuple_literal_or_parentheses(tokens, id_gen, diagnostics);
558 }
559
560 if token.text == "[" {
561 return parse_list_literal(tokens, id_gen, diagnostics);
562 }
563
564 if token.text == "fun" {
565 return parse_lambda(tokens, id_gen, diagnostics);
566 }
567
568 if token.text == "assert" {
569 return parse_assert(tokens, id_gen, diagnostics);
570 }
571
572 if SYMBOL_RE.is_match(token.text) {
573 if let Some((prev_token, token)) = tokens.peek_two() {
574 if token.text == "{"
579 && prev_token.position.end_offset == token.position.start_offset
580 {
581 return parse_struct_literal(tokens, id_gen, diagnostics);
582 }
583 }
584
585 return parse_variable(tokens, id_gen, diagnostics);
586 }
587
588 if token.text.starts_with('\"') {
589 tokens.pop();
590
591 return Expression::new(
592 token.position,
593 Expression_::StringLiteral(unescape_string(token.text)),
594 id_gen.next(),
595 );
596 }
597
598 if INTEGER_RE.is_match(token.text) {
599 return parse_integer(tokens, id_gen, diagnostics);
600 }
601
602 diagnostics.push(ParseError::Invalid {
603 position: token.position.clone(),
604 message: ErrorMessage(vec![Text(format!(
605 "Expected an expression, got: `{}`.",
606 token.text
607 ))]),
608 additional: vec![],
609 });
610
611 return Expression::new(token.position, Expression_::Invalid, id_gen.next());
612 }
613
614 diagnostics.push(ParseError::Incomplete {
615 message: ErrorMessage(vec![Text("Expected an expression.".to_owned())]),
616 position: Position::todo(tokens.path.clone()),
617 });
618
619 Expression::new(
620 Position::todo(tokens.path.clone()),
621 Expression_::Invalid,
622 id_gen.next(),
623 )
624}
625
626fn parse_struct_literal_fields(
627 tokens: &mut TokenStream,
628 id_gen: &mut IdGenerator,
629 diagnostics: &mut Vec<ParseError>,
630) -> Vec<(Symbol, Rc<Expression>)> {
631 let mut fields: Vec<(Symbol, Rc<Expression>)> = vec![];
632 loop {
633 if peeked_symbol_is(tokens, "}") {
634 break;
635 }
636
637 let start_idx = tokens.idx;
638 let sym = parse_symbol(tokens, id_gen, diagnostics);
639 require_token(tokens, diagnostics, ":");
640 let expr = parse_expression(tokens, id_gen, diagnostics);
641
642 if tokens.idx == start_idx {
643 while let Some(t) = tokens.peek() {
647 if t.text == "}" {
648 break;
649 } else {
650 tokens.pop();
651 }
652 }
653 break;
654 }
655
656 fields.push((sym, Rc::new(expr)));
657
658 let Some(token) = tokens.peek() else {
659 diagnostics.push(ParseError::Incomplete {
660 position: Position::todo(tokens.path.clone()),
661 message: ErrorMessage(vec![Text(
662 "Invalid syntax: Expected `,` or `}` here, but got EOF".to_string(),
663 )]),
664 });
665 break;
666 };
667
668 if token.text == "," {
669 tokens.pop();
670 }
671 }
672
673 fields
674}
675
676fn parse_struct_literal(
677 tokens: &mut TokenStream,
678 id_gen: &mut IdGenerator,
679 diagnostics: &mut Vec<ParseError>,
680) -> Expression {
681 let name = parse_type_symbol(tokens, id_gen, diagnostics);
682 require_token(tokens, diagnostics, "{");
683 let fields = parse_struct_literal_fields(tokens, id_gen, diagnostics);
684
685 let close_brace = require_token(tokens, diagnostics, "}");
686
687 Expression::new(
688 Position::merge(&name.position, &close_brace.position),
689 Expression_::StructLiteral(name, fields),
690 id_gen.next(),
691 )
692}
693
694fn parse_match(
695 tokens: &mut TokenStream,
696 id_gen: &mut IdGenerator,
697 diagnostics: &mut Vec<ParseError>,
698) -> Expression {
699 let match_keyword = require_token(tokens, diagnostics, "match");
700 let scrutinee_expr = parse_expression(tokens, id_gen, diagnostics);
701
702 let open_brace = require_token(tokens, diagnostics, "{");
703 if open_brace.text != "{" {
704 return Expression::new(
705 scrutinee_expr.position.clone(),
706 Expression_::Match(Rc::new(scrutinee_expr), vec![]),
707 id_gen.next(),
708 );
709 }
710
711 let mut cases = vec![];
712 loop {
713 let Some(token) = tokens.peek() else {
714 diagnostics.push(ParseError::Incomplete {
715 position: Position::todo(tokens.path.clone()),
716 message: ErrorMessage(vec![Text(
717 "Invalid syntax: Expected `}` here, but got EOF".to_string(),
718 )]),
719 });
720 break;
721 };
722
723 if token.text == "}" {
724 break;
725 }
726
727 let start_idx = tokens.idx;
728 let pattern = parse_pattern(tokens, id_gen, diagnostics);
729 require_token(tokens, diagnostics, "=>");
730 let case_block = parse_case_block(tokens, id_gen, diagnostics);
731
732 if tokens.idx <= start_idx {
733 break;
734 }
735
736 assert!(
737 tokens.idx > start_idx,
738 "The parser should always make forward progress."
739 );
740
741 cases.push((pattern, case_block));
742 }
743
744 let close_paren = require_token(tokens, diagnostics, "}");
745
746 Expression::new(
747 Position::merge(&match_keyword.position, &close_paren.position),
748 Expression_::Match(Rc::new(scrutinee_expr), cases),
749 id_gen.next(),
750 )
751}
752
753fn parse_case_block(
754 tokens: &mut TokenStream,
755 id_gen: &mut IdGenerator,
756 diagnostics: &mut Vec<ParseError>,
757) -> Block {
758 let block = if peeked_symbol_is(tokens, "{") {
759 parse_block(tokens, id_gen, diagnostics, false)
760 } else {
761 let case_expr = parse_expression(tokens, id_gen, diagnostics);
764
765 let pos = case_expr.position.clone();
766 Block {
767 open_brace: pos.clone(),
768 exprs: vec![case_expr.into()],
769 close_brace: pos,
770 }
771 };
772
773 if peeked_symbol_is(tokens, ",") {
775 tokens.pop().unwrap();
776 }
777 block
778}
779
780fn parse_pattern(
781 tokens: &mut TokenStream,
782 id_gen: &mut IdGenerator,
783 diagnostics: &mut Vec<ParseError>,
784) -> Pattern {
785 let variant_sym = parse_symbol(tokens, id_gen, diagnostics);
786
787 let payload = if peeked_symbol_is(tokens, "(") {
788 require_token(tokens, diagnostics, "(");
789 let dest = parse_let_destination(tokens, id_gen, diagnostics);
790 require_token(tokens, diagnostics, ")");
791 Some(dest)
792 } else {
793 None
794 };
795
796 Pattern {
797 variant_sym,
798 payload,
799 }
800}
801
802fn parse_comma_separated_exprs(
803 tokens: &mut TokenStream,
804 id_gen: &mut IdGenerator,
805 diagnostics: &mut Vec<ParseError>,
806 terminator: &str,
807) -> Vec<ExpressionWithComma> {
808 let mut items: Vec<ExpressionWithComma> = vec![];
809 loop {
810 if peeked_symbol_is(tokens, terminator) {
811 break;
812 }
813
814 let start_idx = tokens.idx;
815 let arg = parse_expression(tokens, id_gen, diagnostics);
816 let arg_pos = arg.position.clone();
817
818 if arg.expr_.is_invalid_or_placeholder() {
819 break;
820 }
821
822 assert!(
823 tokens.idx > start_idx,
824 "The parser should always make forward progress."
825 );
826
827 if let Some(token) = tokens.peek() {
828 if token.text == "," {
829 items.push(ExpressionWithComma {
830 expr: Rc::new(arg),
831 comma: Some(token.position),
832 });
833
834 tokens.pop();
835 } else if token.text != terminator {
836 items.push(ExpressionWithComma {
837 expr: Rc::new(arg),
838 comma: None,
839 });
840
841 diagnostics.push(ParseError::Invalid {
842 position: token.position.clone(),
843 message: ErrorMessage(vec![Text(format!(
844 "Invalid syntax: Expected `,` or `{}`, got `{}`",
845 terminator, token.text
846 ))]),
847 additional: vec![],
848 });
849
850 if arg_pos.line_number == token.position.line_number {
852 continue;
855 } else {
856 break;
859 }
860 } else {
861 items.push(ExpressionWithComma {
862 expr: Rc::new(arg),
863 comma: None,
864 });
865 }
866 } else {
867 items.push(ExpressionWithComma {
868 expr: Rc::new(arg),
869 comma: None,
870 });
871
872 diagnostics.push(ParseError::Incomplete {
873 position: Position::todo(tokens.path.clone()),
874 message: ErrorMessage(vec![Text(format!(
875 "Invalid syntax: Expected `,` or `{}` here, but got EOF",
876 terminator
877 ))]),
878 });
879 break;
880 }
881 }
882
883 items
884}
885
886fn parse_call_arguments(
887 tokens: &mut TokenStream,
888 id_gen: &mut IdGenerator,
889 diagnostics: &mut Vec<ParseError>,
890) -> ParenthesizedArguments {
891 let open_paren_token = require_token(tokens, diagnostics, "(");
892 let arguments = parse_comma_separated_exprs(tokens, id_gen, diagnostics, ")");
893 let close_paren_token = require_token(tokens, diagnostics, ")");
894
895 ParenthesizedArguments {
896 arguments,
897 open_paren: open_paren_token.position,
898 close_paren: close_paren_token.position,
899 }
900}
901
902fn parse_simple_expression_with_trailing(
909 tokens: &mut TokenStream,
910 id_gen: &mut IdGenerator,
911 diagnostics: &mut Vec<ParseError>,
912) -> Expression {
913 let mut expr = parse_simple_expression(tokens, id_gen, diagnostics);
914
915 loop {
916 let start_idx = tokens.idx;
917 match tokens.peek() {
918 Some(token)
919 if token.text == "(" && expr.position.end_offset == token.position.start_offset =>
920 {
921 let arguments = parse_call_arguments(tokens, id_gen, diagnostics);
926
927 expr = Expression::new(
928 Position::merge(&expr.position, &arguments.close_paren),
929 Expression_::Call(Rc::new(expr), arguments),
930 id_gen.next(),
931 );
932 }
933 Some(token) if token.text == "." => {
934 tokens.pop();
935
936 let next_token = tokens.peek();
937
938 if Some(token.position.end_offset)
943 == next_token.map(|tok| tok.position.start_offset)
944 {
945 let variable = parse_symbol(tokens, id_gen, diagnostics);
946
947 if peeked_symbol_is(tokens, "(") {
948 let arguments = parse_call_arguments(tokens, id_gen, diagnostics);
950
951 expr = Expression::new(
952 Position::merge(&expr.position, &arguments.close_paren),
953 Expression_::MethodCall(Rc::new(expr), variable, arguments),
954 id_gen.next(),
955 );
956 } else {
957 expr = Expression::new(
958 Position::merge(&expr.position, &variable.position),
959 Expression_::DotAccess(Rc::new(expr), variable),
960 id_gen.next(),
961 );
962 }
963 } else {
964 let variable = placeholder_symbol(token.position, id_gen);
965
966 expr = Expression::new(
967 Position::merge(&expr.position, &variable.position),
968 Expression_::DotAccess(Rc::new(expr), variable),
969 id_gen.next(),
970 );
971 }
972 }
973 _ => break,
974 }
975 assert!(
976 tokens.idx > start_idx,
977 "The parser should always make forward progress."
978 );
979 }
980
981 expr
982}
983
984fn token_as_binary_op(token: Token<'_>) -> Option<BinaryOperatorKind> {
985 match token.text {
986 "+" => Some(BinaryOperatorKind::Add),
987 "-" => Some(BinaryOperatorKind::Subtract),
988 "*" => Some(BinaryOperatorKind::Multiply),
989 "/" => Some(BinaryOperatorKind::Divide),
990 "%" => Some(BinaryOperatorKind::Modulo),
991 "**" => Some(BinaryOperatorKind::Exponent),
992 "==" => Some(BinaryOperatorKind::Equal),
993 "!=" => Some(BinaryOperatorKind::NotEqual),
994 "&&" => Some(BinaryOperatorKind::And),
995 "||" => Some(BinaryOperatorKind::Or),
996 "<" => Some(BinaryOperatorKind::LessThan),
997 "<=" => Some(BinaryOperatorKind::LessThanOrEqual),
998 ">" => Some(BinaryOperatorKind::GreaterThan),
999 ">=" => Some(BinaryOperatorKind::GreaterThanOrEqual),
1000 "^" => Some(BinaryOperatorKind::StringConcat),
1001 _ => None,
1002 }
1003}
1004
1005fn parse_expression(
1016 tokens: &mut TokenStream,
1017 id_gen: &mut IdGenerator,
1018 diagnostics: &mut Vec<ParseError>,
1019) -> Expression {
1020 if let Some((_, token)) = tokens.peek_two() {
1023 if token.text == "=" {
1024 return parse_assign(tokens, id_gen, diagnostics);
1025 }
1026 if token.text == "+=" || token.text == "-=" {
1027 return parse_assign_update(tokens, id_gen, diagnostics);
1028 }
1029 }
1030
1031 if let Some(token) = tokens.peek() {
1032 if token.text == "let" {
1033 return parse_let(tokens, id_gen, diagnostics);
1034 }
1035 if token.text == "return" {
1036 return parse_return(tokens, id_gen, diagnostics);
1037 }
1038 if token.text == "while" {
1039 return parse_while(tokens, id_gen, diagnostics);
1040 }
1041 if token.text == "for" {
1042 return parse_for_in(tokens, id_gen, diagnostics);
1043 }
1044 if token.text == "break" {
1045 return parse_break(tokens, id_gen, diagnostics);
1046 }
1047 if token.text == "continue" {
1048 return parse_continue(tokens, id_gen, diagnostics);
1049 }
1050 if token.text == "if" {
1051 return parse_if(tokens, id_gen, diagnostics);
1052 }
1053 if token.text == "match" {
1054 return parse_match(tokens, id_gen, diagnostics);
1055 }
1056 }
1057
1058 parse_simple_expression_or_binop(tokens, id_gen, diagnostics)
1059}
1060
1061fn parse_simple_expression_or_binop(
1073 tokens: &mut TokenStream,
1074 id_gen: &mut IdGenerator,
1075 diagnostics: &mut Vec<ParseError>,
1076) -> Expression {
1077 let mut expr = parse_simple_expression_with_trailing(tokens, id_gen, diagnostics);
1078
1079 if let Some(token) = tokens.peek() {
1080 if let Some(op) = token_as_binary_op(token) {
1081 tokens.pop();
1082
1083 let rhs_expr = parse_simple_expression_with_trailing(tokens, id_gen, diagnostics);
1084
1085 expr = Expression::new(
1086 Position::merge(&expr.position, &rhs_expr.position),
1087 Expression_::BinaryOperator(Rc::new(expr), op, Rc::new(rhs_expr)),
1088 id_gen.next(),
1089 );
1090 }
1091 }
1092
1093 expr
1094}
1095
1096fn parse_definition(
1097 tokens: &mut TokenStream,
1098 id_gen: &mut IdGenerator,
1099 diagnostics: &mut Vec<ParseError>,
1100) -> Option<ToplevelItem> {
1101 if let Some((token, next_token)) = tokens.peek_two() {
1102 if token.text == "fun" || token.text == "external" && next_token.text == "fun" {
1103 return parse_function_or_method(tokens, id_gen, diagnostics);
1104 }
1105 if token.text == "test" {
1106 return Some(parse_test(tokens, id_gen, diagnostics));
1107 }
1108 if token.text == "enum" || token.text == "external" && next_token.text == "enum" {
1109 return Some(parse_enum(tokens, id_gen, diagnostics));
1110 }
1111 if token.text == "struct" || token.text == "external" && next_token.text == "struct" {
1112 return Some(parse_struct(tokens, id_gen, diagnostics));
1113 }
1114 if token.text == "import" {
1115 return parse_import(tokens, id_gen, diagnostics);
1116 }
1117
1118 diagnostics.push(ParseError::Invalid {
1120 position: token.position,
1121 message: ErrorMessage(vec![Text("Expected a definition".to_string())]),
1122 additional: vec![],
1123 });
1124 return None;
1125 }
1126
1127 diagnostics.push(ParseError::Incomplete {
1128 position: Position::todo(tokens.path.clone()),
1129 message: ErrorMessage(vec![Text("Unfinished definition".to_owned())]),
1130 });
1131 None
1132}
1133
1134fn parse_enum_body(
1135 tokens: &mut TokenStream,
1136 id_gen: &mut IdGenerator,
1137 diagnostics: &mut Vec<ParseError>,
1138) -> Vec<VariantInfo> {
1139 let mut variants = vec![];
1140 loop {
1141 if peeked_symbol_is(tokens, "}") {
1142 break;
1143 }
1144
1145 let variant = parse_variant(tokens, id_gen, diagnostics);
1146 variants.push(variant);
1147
1148 if let Some(token) = tokens.peek() {
1149 if token.text == "," {
1150 tokens.pop();
1151 } else if token.text == "}" {
1152 break;
1153 } else {
1154 diagnostics.push(ParseError::Invalid {
1155 position: token.position,
1156 message: ErrorMessage(vec![Text(format!(
1157 "Invalid syntax: Expected `,` or `}}` here, but got `{}`",
1158 token.text
1159 ))]),
1160 additional: vec![],
1161 });
1162 break;
1163 }
1164 } else {
1165 diagnostics.push(ParseError::Incomplete {
1166 position: Position::todo(tokens.path.clone()),
1167 message: ErrorMessage(vec![Text(
1168 "Invalid syntax: Expected `,` or `}` here, but got EOF".to_string(),
1169 )]),
1170 });
1171 break;
1172 }
1173 }
1174
1175 variants
1176}
1177
1178fn parse_variant(
1180 tokens: &mut TokenStream,
1181 id_gen: &mut IdGenerator,
1182 diagnostics: &mut Vec<ParseError>,
1183) -> VariantInfo {
1184 let name_symbol = parse_symbol(tokens, id_gen, diagnostics);
1185
1186 let mut payload_hint = None;
1187 if peeked_symbol_is(tokens, "(") {
1188 tokens.pop();
1189 payload_hint = Some(parse_type_hint(tokens, id_gen, diagnostics));
1190 require_token(tokens, diagnostics, ")");
1191 }
1192
1193 VariantInfo {
1194 name_sym: name_symbol,
1195 payload_hint,
1196 }
1197}
1198
1199fn parse_enum(
1200 tokens: &mut TokenStream,
1201 id_gen: &mut IdGenerator,
1202 diagnostics: &mut Vec<ParseError>,
1203) -> ToplevelItem {
1204 let mut visibility = Visibility::CurrentFile;
1205 let mut first_token = None;
1206
1207 if let Some(token) = tokens.peek() {
1208 if token.text == "external" {
1209 let token = tokens.pop().unwrap();
1210 visibility = Visibility::External(token.position.clone());
1211 first_token = Some(token);
1212 }
1213 }
1214
1215 let enum_token = require_token(tokens, diagnostics, "enum");
1216 let first_token = first_token.unwrap_or_else(|| enum_token.clone());
1217
1218 let doc_comment = parse_doc_comment(&first_token);
1219 let name_symbol = parse_type_symbol(tokens, id_gen, diagnostics);
1220 let type_params = parse_type_params(tokens, id_gen, diagnostics);
1221
1222 let saw_open_brace = required_token_ok(tokens, diagnostics, "{");
1223 let (variants, close_brace_pos) = if !saw_open_brace {
1224 (vec![], name_symbol.position.clone())
1225 } else {
1226 let variants = parse_enum_body(tokens, id_gen, diagnostics);
1227 let close_brace = require_token(tokens, diagnostics, "}");
1228 (variants, close_brace.position)
1229 };
1230
1231 let position = Position::merge_token(&first_token, &close_brace_pos);
1232
1233 ToplevelItem::Enum(EnumInfo {
1234 pos: position,
1235 visibility,
1236 doc_comment,
1237 name_sym: name_symbol,
1238 type_params,
1239 variants,
1240 })
1241}
1242
1243fn parse_struct(
1244 tokens: &mut TokenStream,
1245 id_gen: &mut IdGenerator,
1246 diagnostics: &mut Vec<ParseError>,
1247) -> ToplevelItem {
1248 let mut visibility = Visibility::CurrentFile;
1249 let mut first_token = None;
1250
1251 if let Some(token) = tokens.peek() {
1252 if token.text == "external" {
1253 let token = tokens.pop().unwrap();
1254 visibility = Visibility::External(token.position.clone());
1255 first_token = Some(token);
1256 }
1257 }
1258
1259 let struct_token = require_token(tokens, diagnostics, "struct");
1260 let first_token = first_token.unwrap_or_else(|| struct_token.clone());
1261
1262 let doc_comment = parse_doc_comment(&first_token);
1263 let name_sym = parse_type_symbol(tokens, id_gen, diagnostics);
1264 let type_params = parse_type_params(tokens, id_gen, diagnostics);
1265
1266 let saw_open_brace = required_token_ok(tokens, diagnostics, "{");
1267
1268 let (fields, close_brace_pos) = if !saw_open_brace {
1269 (vec![], name_sym.position.clone())
1270 } else {
1271 let fields = parse_struct_fields(tokens, id_gen, diagnostics);
1272 let close_brace = require_token(tokens, diagnostics, "}");
1273 (fields, close_brace.position)
1274 };
1275
1276 let position = Position::merge_token(&struct_token, &close_brace_pos);
1277
1278 ToplevelItem::Struct(StructInfo {
1279 pos: position,
1280 visibility,
1281 doc_comment,
1282 name_sym,
1283 type_params,
1284 fields,
1285 })
1286}
1287
1288fn parse_test(
1289 tokens: &mut TokenStream,
1290 id_gen: &mut IdGenerator,
1291 diagnostics: &mut Vec<ParseError>,
1292) -> ToplevelItem {
1293 let test_token = require_token(tokens, diagnostics, "test");
1294 let doc_comment = parse_doc_comment(&test_token);
1295
1296 let name = parse_symbol(tokens, id_gen, diagnostics);
1297
1298 if let Some(token) = tokens.peek() {
1299 if token.text == "(" {
1300 let mut param_diagnostics = vec![];
1304 let params = parse_parameters(tokens, id_gen, &mut param_diagnostics);
1305
1306 diagnostics.push(ParseError::Invalid {
1307 position: Position::merge(¶ms.open_paren, ¶ms.close_paren),
1308 message: ErrorMessage(vec![Text(
1309 "Tests should not have arguments, e.g. `test foo {}`.".to_owned(),
1310 )]),
1311 additional: vec![],
1312 });
1313 }
1314 }
1315
1316 let body = parse_block(tokens, id_gen, diagnostics, false);
1317 let position = Position::merge_token(&test_token, &body.close_brace);
1318
1319 ToplevelItem::Test(TestInfo {
1320 pos: position,
1321 doc_comment,
1322 name_sym: name,
1323 body,
1324 })
1325}
1326
1327fn parse_import(
1328 tokens: &mut TokenStream,
1329 id_gen: &mut IdGenerator,
1330 diagnostics: &mut Vec<ParseError>,
1331) -> Option<ToplevelItem> {
1332 let import_token = require_token(tokens, diagnostics, "import");
1333
1334 let Some(path_token) = tokens.pop() else {
1335 diagnostics.push(ParseError::Incomplete {
1336 position: Position::todo(tokens.path.clone()),
1337 message: ErrorMessage(vec![Text("Unfinished `import`.".to_owned())]),
1338 });
1339
1340 return None;
1341 };
1342
1343 let position = Position::merge_token(&import_token, &path_token.position);
1344
1345 let path_s = if path_token.text.starts_with('\"') {
1346 unescape_string(path_token.text)
1347 } else {
1348 diagnostics.push(ParseError::Incomplete {
1349 position: path_token.position,
1350 message: ErrorMessage(vec![Text(
1351 "`import` requires a path, e.g. `import \"./foo.gdn\"`.".to_owned(),
1352 )]),
1353 });
1354
1355 return None;
1356 };
1357
1358 let import_info = ImportInfo {
1359 pos: position.clone(),
1360 path: path_s.into(),
1361 path_pos: path_token.position.clone(),
1362 id: id_gen.next(),
1363 };
1364
1365 Some(ToplevelItem::Import(import_info))
1366}
1367
1368fn parse_type_symbol(
1369 tokens: &mut TokenStream,
1370 id_gen: &mut IdGenerator,
1371 diagnostics: &mut Vec<ParseError>,
1372) -> TypeSymbol {
1373 let name = parse_symbol(tokens, id_gen, diagnostics);
1374 TypeSymbol {
1375 name: TypeName {
1376 name: name.name.name,
1377 },
1378 position: name.position,
1379 id: id_gen.next(),
1380 }
1381}
1382
1383fn parse_type_arguments(
1385 tokens: &mut TokenStream,
1386 id_gen: &mut IdGenerator,
1387 diagnostics: &mut Vec<ParseError>,
1388) -> (Vec<TypeHint>, Option<Position>) {
1389 if !peeked_symbol_is(tokens, "<") {
1390 return (vec![], None);
1391 }
1392
1393 require_token(tokens, diagnostics, "<");
1394
1395 let mut args = vec![];
1396 let close_pos = loop {
1397 if let Some(token) = tokens.peek() {
1398 if token.text == ">" {
1399 break token.position;
1400 }
1401 }
1402 let arg = parse_type_hint(tokens, id_gen, diagnostics);
1403 args.push(arg);
1404
1405 if let Some(token) = tokens.peek() {
1406 if token.text == "," {
1407 tokens.pop();
1408 } else if token.text == ">" {
1409 break token.position;
1410 } else {
1411 diagnostics.push(ParseError::Invalid {
1412 position: token.position.clone(),
1413 message: ErrorMessage(vec![Text(format!(
1414 "Invalid syntax: Expected `,` or `>` here, but got `{}`",
1415 token.text
1416 ))]),
1417 additional: vec![],
1418 });
1419 break token.position;
1420 }
1421 } else {
1422 diagnostics.push(ParseError::Incomplete {
1423 position: Position::todo(tokens.path.clone()),
1424 message: ErrorMessage(vec![Text(
1425 "Invalid syntax: Expected `,` or `>` here, but got EOF".to_owned(),
1426 )]),
1427 });
1428 break Position::todo(tokens.path.clone());
1429 }
1430 };
1431
1432 require_token(tokens, diagnostics, ">");
1433
1434 (args, Some(close_pos))
1435}
1436
1437fn parse_type_params(
1439 tokens: &mut TokenStream,
1440 id_gen: &mut IdGenerator,
1441 diagnostics: &mut Vec<ParseError>,
1442) -> Vec<TypeSymbol> {
1443 if !peeked_symbol_is(tokens, "<") {
1444 return vec![];
1445 }
1446
1447 require_token(tokens, diagnostics, "<");
1448
1449 let mut params = vec![];
1450 loop {
1451 if peeked_symbol_is(tokens, ">") {
1452 break;
1453 }
1454
1455 let arg = parse_type_symbol(tokens, id_gen, diagnostics);
1456 params.push(arg);
1457
1458 if let Some(token) = tokens.peek() {
1459 if token.text == "," {
1460 tokens.pop();
1461 } else if token.text == ">" {
1462 break;
1463 } else {
1464 diagnostics.push(ParseError::Invalid {
1465 position: token.position,
1466 message: ErrorMessage(vec![Text(format!(
1467 "Invalid syntax: Expected `,` or `>` here, but got `{}`",
1468 token.text
1469 ))]),
1470 additional: vec![],
1471 });
1472 break;
1473 }
1474 } else {
1475 diagnostics.push(ParseError::Incomplete {
1476 position: Position::todo(tokens.path.clone()),
1477 message: ErrorMessage(vec![Text(
1478 "Invalid syntax: Expected `,` or `>` here, but got EOF".to_owned(),
1479 )]),
1480 });
1481 break;
1482 }
1483 }
1484
1485 require_token(tokens, diagnostics, ">");
1486
1487 params
1488}
1489
1490fn parse_tuple_type_hint(
1493 tokens: &mut TokenStream,
1494 id_gen: &mut IdGenerator,
1495 diagnostics: &mut Vec<ParseError>,
1496) -> TypeHint {
1497 let open_paren = require_token(tokens, diagnostics, "(");
1498
1499 let mut item_hints = vec![];
1500 loop {
1501 if peeked_symbol_is(tokens, ")") {
1502 break;
1503 }
1504
1505 item_hints.push(parse_type_hint(tokens, id_gen, diagnostics));
1506
1507 if let Some(token) = tokens.peek() {
1508 if token.text == "," {
1509 tokens.pop();
1510 }
1511 } else {
1512 diagnostics.push(ParseError::Incomplete {
1513 position: Position::todo(tokens.path.clone()),
1514 message: ErrorMessage(vec![Text(
1515 "Invalid syntax: Expected `,` or `)` here, but got EOF".to_owned(),
1516 )]),
1517 });
1518 break;
1519 }
1520 }
1521
1522 let close_paren = require_token(tokens, diagnostics, ")");
1523
1524 TypeHint {
1525 sym: TypeSymbol {
1526 name: TypeName {
1527 name: "Tuple".to_owned(),
1528 },
1529 position: open_paren.position.clone(),
1530 id: id_gen.next(),
1531 },
1532 args: item_hints,
1533 position: Position::merge(&open_paren.position, &close_paren.position),
1534 }
1535}
1536
1537fn parse_type_hint(
1539 tokens: &mut TokenStream,
1540 id_gen: &mut IdGenerator,
1541 diagnostics: &mut Vec<ParseError>,
1542) -> TypeHint {
1543 if peeked_symbol_is(tokens, "(") {
1544 return parse_tuple_type_hint(tokens, id_gen, diagnostics);
1545 }
1546
1547 let sym = parse_type_symbol(tokens, id_gen, diagnostics);
1548 let (args, close_pos) = parse_type_arguments(tokens, id_gen, diagnostics);
1549
1550 let position = match close_pos {
1551 Some(close_pos) => Position::merge(&sym.position, &close_pos),
1552 None => sym.position.clone(),
1553 };
1554
1555 if sym.name.name == "Tuple" {
1556 let formatted_args = args
1557 .iter()
1558 .map(|h| h.as_src())
1559 .collect::<Vec<_>>()
1560 .join(", ");
1561 let equivalent_tuple_src = format!("({})", formatted_args);
1562
1563 diagnostics.push(ParseError::Invalid {
1564 position: position.clone(),
1565 message: ErrorMessage(vec![
1566 msgcode!("Tuple"),
1567 msgtext!(" cannot be used a type hint. Use "),
1568 msgcode!("{}", equivalent_tuple_src),
1569 msgtext!(" instead."),
1570 ]),
1571 additional: vec![],
1572 });
1573 }
1574
1575 TypeHint {
1576 sym,
1577 args,
1578 position,
1579 }
1580}
1581
1582fn parse_colon_and(
1584 tokens: &mut TokenStream,
1585 id_gen: &mut IdGenerator,
1586 diagnostics: &mut Vec<ParseError>,
1587) -> TypeHint {
1588 require_token(tokens, diagnostics, ":");
1589 parse_type_hint(tokens, id_gen, diagnostics)
1590}
1591
1592fn parse_colon_and_hint_opt(
1594 tokens: &mut TokenStream,
1595 id_gen: &mut IdGenerator,
1596 diagnostics: &mut Vec<ParseError>,
1597) -> Option<TypeHint> {
1598 if peeked_symbol_is(tokens, ":") {
1599 let type_hint = parse_colon_and(tokens, id_gen, diagnostics);
1600 return Some(type_hint);
1601 }
1602
1603 None
1604}
1605
1606fn parse_parameter(
1607 tokens: &mut TokenStream,
1608 id_gen: &mut IdGenerator,
1609 diagnostics: &mut Vec<ParseError>,
1610 require_type_hint: bool,
1611) -> SymbolWithHint {
1612 let param = parse_symbol(tokens, id_gen, diagnostics);
1613
1614 let hint = if require_type_hint {
1615 Some(parse_colon_and(tokens, id_gen, diagnostics))
1616 } else {
1617 parse_colon_and_hint_opt(tokens, id_gen, diagnostics)
1618 };
1619
1620 SymbolWithHint {
1621 symbol: param,
1622 hint,
1623 }
1624}
1625
1626fn parse_parameters(
1627 tokens: &mut TokenStream,
1628 id_gen: &mut IdGenerator,
1629 diagnostics: &mut Vec<ParseError>,
1630) -> ParenthesizedParameters {
1631 let (ok, open_paren) = check_required_token(tokens, diagnostics, "(");
1632
1633 if !ok {
1634 return ParenthesizedParameters {
1635 open_paren: open_paren.position.clone(),
1636 params: vec![],
1637 close_paren: open_paren.position.clone(),
1638 };
1639 }
1640
1641 let mut params = vec![];
1642 loop {
1643 if peeked_symbol_is(tokens, ")") {
1644 break;
1645 }
1646
1647 let param = parse_parameter(tokens, id_gen, diagnostics, false);
1648 params.push(param);
1649
1650 if let Some(token) = tokens.peek() {
1651 if token.text == "," {
1652 tokens.pop();
1653 } else if token.text == ")" {
1654 break;
1655 } else {
1656 diagnostics.push(ParseError::Invalid {
1657 position: token.position,
1658 message: ErrorMessage(vec![Text(format!(
1659 "Invalid syntax: Expected `,` or `)` here, but got `{}`",
1660 token.text
1661 ))]),
1662 additional: vec![],
1663 });
1664 break;
1665 }
1666 } else {
1667 diagnostics.push(ParseError::Incomplete {
1668 position: Position::todo(tokens.path.clone()),
1669 message: ErrorMessage(vec![Text(
1670 "Invalid syntax: Expected `,` or `)` here, but got EOF".to_string(),
1671 )]),
1672 });
1673 break;
1674 }
1675 }
1676
1677 let close_paren = require_token(tokens, diagnostics, ")");
1678
1679 let mut seen = HashSet::new();
1681 for param in ¶ms {
1682 if param.symbol.name.is_underscore() {
1683 continue;
1684 }
1685
1686 let param_name = ¶m.symbol.name.name;
1687 if seen.contains(param_name) {
1688 diagnostics.push(ParseError::Invalid {
1689 position: param.symbol.position.clone(),
1690 message: ErrorMessage(vec![Text(format!("Duplicate parameter: `{}`", param_name))]),
1691 additional: vec![],
1693 });
1694 } else {
1695 seen.insert(param_name.clone());
1696 }
1697 }
1698
1699 ParenthesizedParameters {
1700 open_paren: open_paren.position.clone(),
1701 params,
1702 close_paren: close_paren.position.clone(),
1703 }
1704}
1705
1706fn parse_struct_fields(
1707 tokens: &mut TokenStream,
1708 id_gen: &mut IdGenerator,
1709 diagnostics: &mut Vec<ParseError>,
1710) -> Vec<FieldInfo> {
1711 let mut fields = vec![];
1712 loop {
1713 if peeked_symbol_is(tokens, "}") {
1714 break;
1715 }
1716
1717 if let Some(token) = tokens.peek() {
1718 let doc_comment = parse_doc_comment(&token);
1719 let sym = parse_symbol(tokens, id_gen, diagnostics);
1720 let hint = parse_colon_and(tokens, id_gen, diagnostics);
1721
1722 fields.push(FieldInfo {
1723 sym,
1724 hint,
1725 doc_comment,
1726 });
1727 } else {
1728 diagnostics.push(ParseError::Incomplete {
1729 position: Position::todo(tokens.path.clone()),
1730 message: ErrorMessage(vec![Text(
1731 "Invalid syntax: Expected a struct field name here like `foo: String`, but got EOF".to_string(),
1732 )]),
1733 });
1734 break;
1735 }
1736
1737 if let Some(token) = tokens.peek() {
1738 if token.text == "," {
1739 tokens.pop();
1740 } else if token.text == "}" {
1741 break;
1742 } else {
1743 diagnostics.push(ParseError::Invalid {
1744 position: token.position,
1745 message: ErrorMessage(vec![Text(format!(
1746 "Invalid syntax: Expected `,` or `}}` here, but got `{}`",
1747 token.text
1748 ))]),
1749 additional: vec![],
1750 });
1751 break;
1752 }
1753 } else {
1754 diagnostics.push(ParseError::Incomplete {
1755 position: Position::todo(tokens.path.clone()),
1756 message: ErrorMessage(vec![Text(
1757 "Invalid syntax: Expected `,` or `}}` here, but got EOF".to_string(),
1758 )]),
1759 });
1760 break;
1761 }
1762 }
1763
1764 fields
1767}
1768
1769fn parse_block(
1770 tokens: &mut TokenStream,
1771 id_gen: &mut IdGenerator,
1772 diagnostics: &mut Vec<ParseError>,
1773 is_loop_body: bool,
1774) -> Block {
1775 let open_brace = require_token(tokens, diagnostics, "{");
1776 if open_brace.text != "{" {
1777 return Block {
1780 open_brace: open_brace.position.clone(),
1781 exprs: vec![],
1782 close_brace: open_brace.position,
1783 };
1784 }
1785
1786 let mut exprs: Vec<Expression> = vec![];
1787 loop {
1788 if let Some(token) = tokens.peek() {
1789 if token.text == "}" {
1790 break;
1791 }
1792 } else {
1793 diagnostics.push(ParseError::Incomplete {
1794 position: Position::todo(tokens.path.clone()),
1795 message: ErrorMessage(vec![Text(
1796 "Invalid syntax: Expected `}` here, but got EOF".to_string(),
1797 )]),
1798 });
1799 break;
1800 }
1801
1802 let start_idx = tokens.idx;
1803 let expr = parse_expression(tokens, id_gen, diagnostics);
1804 if expr.expr_.is_invalid_or_placeholder() {
1805 break;
1806 }
1807 exprs.push(expr);
1808 assert!(
1809 tokens.idx > start_idx,
1810 "The parser should always make forward progress."
1811 );
1812 }
1813
1814 let exprs_len = exprs.len();
1817 for (i, expr) in exprs.iter_mut().enumerate() {
1818 if i < exprs_len - 1 || is_loop_body {
1819 expr.value_is_used = false;
1820 }
1821 }
1822
1823 let exprs: Vec<Rc<Expression>> = exprs.into_iter().map(Rc::new).collect();
1824
1825 let close_brace = require_token(tokens, diagnostics, "}");
1826 Block {
1827 open_brace: open_brace.position,
1828 exprs,
1829 close_brace: close_brace.position,
1830 }
1831}
1832
1833fn join_comments(comments: &[(Position, &str)]) -> String {
1834 let mut comment_texts = comments
1835 .iter()
1836 .map(|(_, comment)| {
1837 let comment_text = comment.strip_prefix("//").unwrap_or(comment);
1838 comment_text.strip_prefix(" ").unwrap_or(comment_text)
1839 })
1840 .collect::<Vec<_>>();
1841
1842 if let Some(comment_text) = comment_texts.last_mut() {
1843 *comment_text = comment_text.strip_suffix('\n').unwrap_or(comment_text)
1844 }
1845
1846 comment_texts.join("")
1847}
1848
1849fn parse_doc_comment(token: &Token) -> Option<String> {
1850 if !token.preceding_comments.is_empty() {
1851 return Some(join_comments(&token.preceding_comments));
1852 }
1853 None
1854}
1855
1856fn parse_function_or_method(
1857 tokens: &mut TokenStream,
1858 id_gen: &mut IdGenerator,
1859 diagnostics: &mut Vec<ParseError>,
1860) -> Option<ToplevelItem> {
1861 let mut visibility = Visibility::CurrentFile;
1862 let mut first_token = None;
1863
1864 if let Some(token) = tokens.peek() {
1865 if token.text == "external" {
1866 let token = tokens.pop().unwrap();
1867 visibility = Visibility::External(token.position.clone());
1868 first_token = Some(token);
1869 }
1870 }
1871
1872 let fun_token = require_token(tokens, diagnostics, "fun");
1873 let first_token = first_token.unwrap_or_else(|| fun_token.clone());
1874
1875 match tokens.peek() {
1883 Some(token) => {
1884 if token.text == "(" {
1885 Some(parse_method(
1886 tokens,
1887 id_gen,
1888 diagnostics,
1889 first_token,
1890 visibility,
1891 ))
1892 } else {
1893 parse_function(tokens, id_gen, diagnostics, first_token, visibility)
1894 }
1895 }
1896 None => {
1897 diagnostics.push(ParseError::Incomplete {
1898 position: Position::todo(tokens.path.clone()),
1899 message: ErrorMessage(vec![Text(
1900 "Unfinished function or method definition.".to_owned(),
1901 )]),
1902 });
1903 None
1904 }
1905 }
1906}
1907
1908fn parse_method(
1909 tokens: &mut TokenStream,
1910 id_gen: &mut IdGenerator,
1911 diagnostics: &mut Vec<ParseError>,
1912 first_token: Token,
1913 visibility: Visibility,
1914) -> ToplevelItem {
1915 let doc_comment = parse_doc_comment(&first_token);
1916
1917 require_token(tokens, diagnostics, "(");
1918 let receiver_param = parse_parameter(tokens, id_gen, diagnostics, true);
1919 let receiver_sym = receiver_param.symbol.clone();
1920 let receiver_hint = match receiver_param.hint {
1921 Some(type_name) => type_name,
1922 None => {
1923 diagnostics.push(ParseError::Incomplete {
1924 position: receiver_param.symbol.position.clone(),
1925 message: ErrorMessage(vec![Text(
1926 "This `self` argument requires a type.".to_owned(),
1927 )]),
1928 });
1929 TypeHint {
1930 sym: TypeSymbol {
1931 name: TypeName {
1932 name: "__MISSING_TYPE".to_owned(),
1933 },
1934 position: receiver_sym.position.clone(),
1935 id: id_gen.next(),
1936 },
1937 args: vec![],
1938 position: receiver_sym.position.clone(),
1939 }
1940 }
1941 };
1942 require_token(tokens, diagnostics, ")");
1943
1944 let name_sym = parse_symbol(tokens, id_gen, diagnostics);
1945
1946 let type_params = parse_type_params(tokens, id_gen, diagnostics);
1947 let params = parse_parameters(tokens, id_gen, diagnostics);
1948 let return_hint = parse_colon_and_hint_opt(tokens, id_gen, diagnostics);
1949
1950 let body = parse_block(tokens, id_gen, diagnostics, false);
1951 let close_brace_pos = body.close_brace.clone();
1952
1953 let position = Position::merge_token(&first_token, &close_brace_pos);
1954
1955 let fun_info = FunInfo {
1956 pos: position.clone(),
1957 doc_comment,
1958 name_sym: Some(name_sym.clone()),
1959 item_id: Some(ToplevelItemId(id_gen.next().0)),
1960 type_params,
1961 params,
1962 body,
1963 return_hint,
1964 };
1965 let meth_info = MethodInfo {
1966 pos: position.clone(),
1967 receiver_hint,
1968 receiver_sym,
1969 name_sym,
1970 kind: MethodKind::UserDefinedMethod(fun_info),
1971 };
1972
1973 ToplevelItem::Method(meth_info, visibility)
1974}
1975
1976fn parse_function(
1977 tokens: &mut TokenStream,
1978 id_gen: &mut IdGenerator,
1979 diagnostics: &mut Vec<ParseError>,
1980 first_token: Token,
1981 visibility: Visibility,
1982) -> Option<ToplevelItem> {
1983 let doc_comment = parse_doc_comment(&first_token);
1984
1985 let name_sym = parse_symbol(tokens, id_gen, diagnostics);
1986 if is_reserved_word_placeholder(&name_sym) {
1987 return None;
1991 }
1992
1993 let type_params = parse_type_params(tokens, id_gen, diagnostics);
1994 let params = parse_parameters(tokens, id_gen, diagnostics);
1995 let return_hint = parse_colon_and_hint_opt(tokens, id_gen, diagnostics);
1996
1997 let body = parse_block(tokens, id_gen, diagnostics, false);
1998 let close_brace_pos = body.close_brace.clone();
1999 let position = Position::merge_token(&first_token, &close_brace_pos);
2000
2001 Some(ToplevelItem::Fun(
2002 name_sym.clone(),
2003 FunInfo {
2004 pos: position.clone(),
2005 doc_comment,
2006 name_sym: Some(name_sym),
2007 item_id: Some(ToplevelItemId(id_gen.next().0)),
2008 type_params,
2009 params,
2010 body,
2011 return_hint,
2012 },
2013 visibility,
2014 ))
2015}
2016
2017const RESERVED_WORDS: &[&str] = &[
2018 "let", "fun", "enum", "struct", "internal", "external", "import", "if", "else", "while",
2019 "return", "test", "match", "break", "continue", "for", "in", "assert",
2020];
2021
2022pub fn placeholder_symbol(position: Position, id_gen: &mut IdGenerator) -> Symbol {
2023 let name = SymbolName {
2024 name: "__placeholder".to_string(),
2025 };
2026 Symbol {
2027 interned_id: id_gen.intern_symbol(&name),
2028 position,
2029 name,
2030 id: id_gen.next(),
2031 }
2032}
2033
2034fn reserved_word_placeholder(position: Position, id_gen: &mut IdGenerator) -> Symbol {
2035 let name = SymbolName {
2036 name: "__reserved_word_placeholder".to_string(),
2037 };
2038 Symbol {
2039 interned_id: id_gen.intern_symbol(&name),
2040 position,
2041 name,
2042 id: id_gen.next(),
2043 }
2044}
2045
2046fn is_reserved_word_placeholder(symbol: &Symbol) -> bool {
2047 symbol.name.name == "__reserved_word_placeholder"
2048}
2049
2050fn parse_let_destination(
2051 tokens: &mut TokenStream,
2052 id_gen: &mut IdGenerator,
2053 diagnostics: &mut Vec<ParseError>,
2054) -> LetDestination {
2055 if peeked_symbol_is(tokens, "(") {
2056 tokens.pop();
2057
2058 let mut symbols = vec![];
2059 loop {
2060 if peeked_symbol_is(tokens, ")") {
2061 tokens.pop();
2062 break;
2063 }
2064
2065 let start_idx = tokens.idx;
2066
2067 let symbol = parse_symbol(tokens, id_gen, diagnostics);
2068 if symbol.is_placeholder() {
2069 if !peeked_symbol_is(tokens, ",") {
2073 break;
2074 }
2075 }
2076
2077 symbols.push(symbol);
2078
2079 if !peeked_symbol_is(tokens, ")") {
2080 require_token(tokens, diagnostics, ",");
2081 }
2082
2083 assert!(
2084 tokens.idx > start_idx,
2085 "The parser should always make forward progress."
2086 );
2087 }
2088
2089 let mut seen = HashSet::new();
2090 for symbol in &symbols {
2091 if symbol.name.is_underscore() {
2092 continue;
2093 }
2094
2095 let name = &symbol.name.name;
2096 if seen.contains(name) {
2097 diagnostics.push(ParseError::Invalid {
2098 position: symbol.position.clone(),
2099 message: ErrorMessage(vec![Text(format!(
2100 "Duplicate destructure variable: `{}`.",
2101 name
2102 ))]),
2103 additional: vec![],
2105 });
2106 } else {
2107 seen.insert(name.clone());
2108 }
2109 }
2110
2111 LetDestination::Destructure(symbols)
2112 } else {
2113 LetDestination::Symbol(parse_symbol(tokens, id_gen, diagnostics))
2114 }
2115}
2116
2117fn parse_symbol(
2118 tokens: &mut TokenStream,
2119 id_gen: &mut IdGenerator,
2120 diagnostics: &mut Vec<ParseError>,
2121) -> Symbol {
2122 let variable_token = require_a_token(tokens, diagnostics, "variable name");
2123 if !SYMBOL_RE.is_match(variable_token.text) {
2124 diagnostics.push(ParseError::Invalid {
2125 position: variable_token.position.clone(),
2126 message: ErrorMessage(vec![Text(format!(
2127 "Invalid name: '{}'",
2128 variable_token.text
2129 ))]),
2130 additional: vec![],
2131 });
2132 tokens.unpop();
2133 return placeholder_symbol(variable_token.position, id_gen);
2134 }
2135
2136 for reserved in RESERVED_WORDS {
2137 if variable_token.text == *reserved {
2138 diagnostics.push(ParseError::Invalid {
2139 position: variable_token.position.clone(),
2140 message: ErrorMessage(vec![Text(format!(
2141 "'{}' is a reserved word that cannot be used as a name",
2142 variable_token.text
2143 ))]),
2144 additional: vec![],
2145 });
2146 tokens.unpop();
2147 return reserved_word_placeholder(variable_token.position, id_gen);
2148 }
2149 }
2150
2151 let name = SymbolName {
2152 name: variable_token.text.to_string(),
2153 };
2154 Symbol {
2155 interned_id: id_gen.intern_symbol(&name),
2156 position: variable_token.position,
2157 name,
2158 id: id_gen.next(),
2159 }
2160}
2161
2162fn parse_let(
2163 tokens: &mut TokenStream,
2164 id_gen: &mut IdGenerator,
2165 diagnostics: &mut Vec<ParseError>,
2166) -> Expression {
2167 let let_token = require_token(tokens, diagnostics, "let");
2168 let destination = parse_let_destination(tokens, id_gen, diagnostics);
2169
2170 let hint = parse_colon_and_hint_opt(tokens, id_gen, diagnostics);
2171
2172 require_token(tokens, diagnostics, "=");
2173 let expr = parse_expression(tokens, id_gen, diagnostics);
2174
2175 Expression::new(
2176 Position::merge(&let_token.position, &expr.position),
2177 Expression_::Let(destination, hint, Rc::new(expr)),
2178 id_gen.next(),
2179 )
2180}
2181
2182fn parse_assign(
2183 tokens: &mut TokenStream,
2184 id_gen: &mut IdGenerator,
2185 diagnostics: &mut Vec<ParseError>,
2186) -> Expression {
2187 let variable = parse_symbol(tokens, id_gen, diagnostics);
2188
2189 if !peeked_symbol_is(tokens, "=") {
2190 let position = Position::todo(tokens.path.clone());
2194 return Expression::invalid(position, id_gen.next());
2195 }
2196 require_token(tokens, diagnostics, "=");
2197 let expr = parse_expression(tokens, id_gen, diagnostics);
2198
2199 Expression::new(
2200 Position::merge(&variable.position, &expr.position),
2201 Expression_::Assign(variable, Rc::new(expr)),
2202 id_gen.next(),
2203 )
2204}
2205
2206fn parse_assign_update(
2207 tokens: &mut TokenStream,
2208 id_gen: &mut IdGenerator,
2209 diagnostics: &mut Vec<ParseError>,
2210) -> Expression {
2211 let variable = parse_symbol(tokens, id_gen, diagnostics);
2212
2213 let op_token = require_a_token(tokens, diagnostics, "`+=` or `-=`");
2214
2215 let op = match op_token.text {
2216 "+=" => AssignUpdateKind::Add,
2217 "-=" => AssignUpdateKind::Subtract,
2218 _ => {
2219 diagnostics.push(ParseError::Invalid {
2220 position: op_token.position.clone(),
2221 message: ErrorMessage(vec![Text(format!(
2222 "Invalid syntax: Expected `+=` or `-=`, but got `{}`",
2223 op_token.text
2224 ))]),
2225 additional: vec![],
2226 });
2227 AssignUpdateKind::Add
2228 }
2229 };
2230
2231 let expr = parse_expression(tokens, id_gen, diagnostics);
2232
2233 Expression::new(
2234 Position::merge(&variable.position, &expr.position),
2235 Expression_::AssignUpdate(variable, op, Rc::new(expr)),
2236 id_gen.next(),
2237 )
2238}
2239
2240fn parse_toplevel_expr(
2241 tokens: &mut TokenStream,
2242 id_gen: &mut IdGenerator,
2243 diagnostics: &mut Vec<ParseError>,
2244) -> ToplevelItem {
2245 let expr = parse_expression(tokens, id_gen, diagnostics);
2246 ToplevelItem::Expr(ToplevelExpression(expr))
2247}
2248
2249fn parse_toplevel_block(
2250 tokens: &mut TokenStream,
2251 id_gen: &mut IdGenerator,
2252 diagnostics: &mut Vec<ParseError>,
2253) -> ToplevelItem {
2254 let block = parse_block(tokens, id_gen, diagnostics, false);
2255 ToplevelItem::Block(block)
2256}
2257
2258fn parse_toplevel_items_from_tokens(
2259 tokens: &mut TokenStream,
2260 id_gen: &mut IdGenerator,
2261 diagnostics: &mut Vec<ParseError>,
2262) -> Vec<ToplevelItem> {
2263 let mut items: Vec<ToplevelItem> = vec![];
2264
2265 while !tokens.is_empty() {
2266 let start_idx = tokens.idx;
2267 match parse_toplevel_item_from_tokens(tokens, id_gen, diagnostics) {
2268 Some(item) => {
2269 let was_invalid = item.is_invalid_or_placeholder();
2270
2271 items.push(item);
2272 if was_invalid {
2273 break;
2274 }
2275
2276 assert!(
2277 tokens.idx > start_idx,
2278 "The parser should always make forward progress",
2279 );
2280 }
2281 None => break,
2282 }
2283 }
2284 items
2285}
2286
2287fn parse_toplevel_item_from_tokens(
2288 tokens: &mut TokenStream,
2289 id_gen: &mut IdGenerator,
2290 diagnostics: &mut Vec<ParseError>,
2291) -> Option<ToplevelItem> {
2292 if let Some(token) = tokens.peek() {
2293 if token.text == "fun"
2294 || token.text == "test"
2295 || token.text == "enum"
2296 || token.text == "struct"
2297 || token.text == "external"
2298 || token.text == "import"
2299 {
2300 return parse_definition(tokens, id_gen, diagnostics);
2301 }
2302
2303 if token.text == "{" {
2304 return Some(parse_toplevel_block(tokens, id_gen, diagnostics));
2305 }
2306 }
2307
2308 Some(parse_toplevel_expr(tokens, id_gen, diagnostics))
2309}
2310
2311pub fn parse_inline_expr_from_str(
2312 path: &Path,
2313 src: &str,
2314 id_gen: &mut IdGenerator,
2315) -> (Expression, Vec<ParseError>) {
2316 let mut diagnostics = vec![];
2317
2318 let (mut tokens, lex_errors) = lex(path, src);
2319 for error in lex_errors {
2320 diagnostics.push(error);
2321 }
2322
2323 let expr = parse_expression(&mut tokens, id_gen, &mut diagnostics);
2324 (expr, diagnostics)
2325}
2326
2327pub fn parse_toplevel_items(
2328 path: &Path,
2329 src: &str,
2330 vfs: &mut Vfs,
2331 id_gen: &mut IdGenerator,
2332) -> (Vec<ToplevelItem>, Vec<ParseError>) {
2333 vfs.insert(path.to_owned(), src.to_owned());
2334
2335 let mut diagnostics = vec![];
2336
2337 let (mut tokens, lex_errors) = lex(path, src);
2338 for error in lex_errors {
2339 diagnostics.push(error);
2340 }
2341
2342 let items = parse_toplevel_items_from_tokens(&mut tokens, id_gen, &mut diagnostics);
2343 (items, diagnostics)
2344}
2345
2346pub fn parse_toplevel_items_from_span(
2349 path: &Path,
2350 src: &str,
2351 vfs: &mut Vfs,
2352 id_gen: &mut IdGenerator,
2353 offset: usize,
2354 end_offset: usize,
2355) -> (Vec<ToplevelItem>, Vec<ParseError>) {
2356 vfs.insert(path.to_owned(), src.to_owned());
2357
2358 let mut diagnostics = vec![];
2359
2360 let (mut tokens, lex_errors) = lex_between(path, src, offset, end_offset);
2361 for error in lex_errors {
2362 diagnostics.push(error);
2363 }
2364
2365 let items = parse_toplevel_items_from_tokens(&mut tokens, id_gen, &mut diagnostics);
2366 (items, diagnostics)
2367}
2368
2369#[cfg(test)]
2370mod tests {
2371 use std::path::PathBuf;
2372
2373 use super::*;
2374
2375 #[test]
2376 fn test_incomplete_expression() {
2377 let mut vfs = Vfs::default();
2378 let (_, errors) = parse_toplevel_items(
2379 &PathBuf::from("__test.gdn"),
2380 "1 + ",
2381 &mut vfs,
2382 &mut IdGenerator::default(),
2383 );
2384 assert!(!errors.is_empty())
2385 }
2386
2387 #[test]
2388 fn test_repeated_param() {
2389 let mut vfs = Vfs::default();
2390 let (_, errors) = parse_toplevel_items(
2391 &PathBuf::from("__test.gdn"),
2392 "fun f(x, x) {} ",
2393 &mut vfs,
2394 &mut IdGenerator::default(),
2395 );
2396 assert!(!errors.is_empty())
2397 }
2398
2399 #[test]
2400 fn test_repeated_param_underscore() {
2401 let mut vfs = Vfs::default();
2402 let (_, errors) = parse_toplevel_items(
2403 &PathBuf::from("__test.gdn"),
2404 "fun f(_, _) {} ",
2405 &mut vfs,
2406 &mut IdGenerator::default(),
2407 );
2408 assert!(errors.is_empty())
2409 }
2410}