1pub mod ast;
2
3#[cfg(test)]
4mod tests;
5
6use crate::lexer::{CodePosition, Lexer, Token, TokenType};
7use crate::parser::ast::{
8 ClassDefinition, ClassMember, ConditionalNode, Constructor, FunctionDefinition,
9 Method, Node, NodeData, OperationExpression, Operator, OperatorType,
10 StructDefinition, StructMember, Visibility, AST
11};
12use crate::{regex_patterns, utils};
13
14use std::collections::VecDeque;
15use std::fmt::{Display, Formatter};
16use std::mem;
17use std::str::FromStr;
18
19#[derive(Debug, Clone, Eq, PartialEq)]
20pub enum ParsingError {
21 BracketMismatch,
22 ContFlowArgMissing,
23 Eof,
24 InvalidConPart,
25 InvalidAssignment,
26 InvalidParameter,
27 LexerError,
28}
29
30impl ParsingError {
31 pub fn error_code(&self) -> i32 {
32 match self {
33 ParsingError::BracketMismatch => -1,
34 ParsingError::ContFlowArgMissing => -2,
35 ParsingError::Eof => -3,
36 ParsingError::InvalidConPart => -4,
37 ParsingError::InvalidAssignment => -5,
38 ParsingError::InvalidParameter => -6,
39 ParsingError::LexerError => -7,
40 }
41 }
42
43 pub fn error_text(&self) -> &'static str {
44 match self {
45 ParsingError::BracketMismatch => "Bracket mismatch",
46 ParsingError::ContFlowArgMissing => "Control flow statement condition(s) or argument(s) is/are missing",
47 ParsingError::Eof => "End of file was reached early",
48 ParsingError::InvalidConPart => "Invalid statement part in control flow statement",
49 ParsingError::InvalidAssignment => "Invalid assignment operation",
50 ParsingError::InvalidParameter => "Invalid function parameter",
51 ParsingError::LexerError => "Error during lexical parsing",
52 }
53 }
54}
55
56impl Display for ParsingError {
57 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
58 f.write_str(match self {
59 ParsingError::BracketMismatch => "BRACKET_MISMATCH",
60 ParsingError::ContFlowArgMissing => "CONT_FLOW_ARG_MISSING",
61 ParsingError::Eof => "EOF",
62 ParsingError::InvalidConPart => "INVALID_CON_PART",
63 ParsingError::InvalidAssignment => "INVALID_ASSIGNMENT",
64 ParsingError::InvalidParameter => "INVALID_PARAMETER",
65 ParsingError::LexerError => "LEXER_ERROR",
66 })
67 }
68}
69
70#[derive(Debug)]
71pub struct Parser {
72 lexer: Lexer,
73 lang_doc_comment: Option<String>,
74}
75
76impl Parser {
77 pub fn new() -> Self {
78 Self {
79 lexer: Lexer::new(),
80 lang_doc_comment: None,
81 }
82 }
83
84 pub fn reset_position_vars(&mut self) {
85 self.lexer.reset_position_vars();
86 self.lang_doc_comment = None;
87 }
88
89 pub fn line_number(&self) -> usize {
90 self.lexer.line_number()
91 }
92
93 pub fn set_line_number(&mut self, line_number: usize) {
94 self.lexer.set_line_number(line_number);
95 self.lexer.set_column(1);
96 }
97
98 pub fn parse_lines(&mut self, lines: impl Into<String>) -> Option<AST> {
99 let tokens = VecDeque::from(self.lexer.read_tokens(lines));
100
101 self.parse_tokens(tokens)
102 }
103
104 pub fn parse_tokens(&mut self, mut tokens: VecDeque<Token>) -> Option<AST> {
105 Self::remove_line_continuation_and_single_line_text_quotes_tokens(&mut tokens);
106
107 self.parse_tokens_internal(&mut tokens).map(|mut ast| {
108 ast.optimize_ast();
109
110 ast
111 })
112 }
113
114 fn parse_tokens_internal(&mut self, tokens: &mut VecDeque<Token>) -> Option<AST> {
115 if tokens.is_empty() {
116 return None;
117 }
118
119 let mut ast = AST::new();
120 let mut block_pos = 0;
121
122 let mut error_nodes = Vec::new();
123
124 while !tokens.is_empty() {
125 Self::trim_first_line(tokens);
126
127 self.parse_comment_tokens(tokens, &mut error_nodes);
128 if !error_nodes.is_empty() {
129 if !error_nodes.is_empty() {
130 error_nodes.into_iter().for_each(|token| ast.add_child(token));
131 }
132
133 break;
134 }
135
136 Self::trim_first_line(tokens);
137
138 if tokens.is_empty() {
139 break;
140 }
141
142 if matches!(tokens[0].token_type(), TokenType::Eol) {
143 tokens.pop_front();
144
145 continue;
146 }
147
148 if matches!(tokens[0].token_type(), TokenType::Eof) {
149 let token = tokens.pop_front().unwrap();
150
151 if !tokens.is_empty() {
152 ast.add_child(Node::new_parsing_error_node(
153 token.pos(),
154 ParsingError::LexerError,
155 "Tokens after EOF are not allowed",
156 ));
157 }
158
159 break;
160 }
161
162 let current_token = &tokens[0];
163
164 if matches!(current_token.token_type(), TokenType::OpeningBlockBracket) {
166 tokens.pop_front();
167
168 block_pos += 1;
169
170 continue;
171 }else if matches!(current_token.token_type(), TokenType::ClosingBlockBracket) {
172 tokens.pop_front();
173
174 if block_pos == 0 {
175 break;
176 }
177
178 block_pos -= 1;
179
180 continue;
181 }
182
183 if !matches!(current_token.token_type(), TokenType::Other) || !(
185 current_token.value() == "return" || current_token.value() == "throw") {
186 let returned_node = self.parse_assignment(tokens, false);
187 if let Some(returned_node) = returned_node {
188 ast.add_child(returned_node);
189
190 continue;
191 }
192 }
193
194 let returned_ast = self.parse_line(tokens);
196 if let Some(returned_ast) = returned_ast {
197 ast.add_child(returned_ast.into_node());
198 }else {
199 return Some(ast);
201 }
202 }
203
204 Some(ast)
205 }
206
207 #[inline(always)]
208 fn parse_condition_expr(&mut self, tokens: &mut VecDeque<Token>) -> Option<Node> {
209 self.parse_operator_expr(tokens, OperatorType::Condition)
210 }
211
212 #[inline(always)]
213 fn parse_math_expr(&mut self, tokens: &mut VecDeque<Token>) -> Option<Node> {
214 self.parse_operator_expr(tokens, OperatorType::Math)
215 }
216
217 #[inline(always)]
218 fn parse_operation_expr(&mut self, tokens: &mut VecDeque<Token>) -> Option<Node> {
219 self.parse_operator_expr(tokens, OperatorType::General)
220 }
221
222 #[inline(always)]
223 fn parse_operator_expr(&mut self, tokens: &mut VecDeque<Token>, operator_type: OperatorType) -> Option<Node> {
224 self.parse_operator_expression(tokens, &mut None, &mut None, 0, operator_type)
225 }
226
227 fn parse_operator_expression(
228 &mut self,
229 tokens: &mut VecDeque<Token>,
230 tokens_left: &mut Option<&mut VecDeque<Token>>,
231 tokens_left_behind_middle_part_end: &mut Option<&mut VecDeque<Token>>,
232 current_operator_precedence: isize,
233 operator_type: OperatorType,
234 ) -> Option<Node> {
235 let non_operator = match operator_type {
236 OperatorType::Math => Operator::MathNon,
237 OperatorType::Condition => Operator::ConditionalNon,
238 OperatorType::General => Operator::Non,
239 OperatorType::All => {
240 return None;
241 },
242 };
243
244 Self::trim_first_line(tokens);
245
246 let mut operator = None;
247 let mut left_nodes = Vec::new();
248 let mut middle_node = None;
249 let mut right_node = None;
250
251 let mut whitespaces = VecDeque::new();
252
253 let mut other_tokens = VecDeque::new();
254
255 'tokenProcessing:
256 while !tokens.is_empty() {
257 let t = tokens[0].clone();
258
259 match t.token_type() {
260 TokenType::Eol | TokenType::Eof => {
261 break 'tokenProcessing;
262 },
263
264 TokenType::StartComment | TokenType::StartDocComment => {
265 self.parse_comment_tokens(tokens, &mut left_nodes);
266 },
267
268 TokenType::LiteralNull | TokenType::LiteralText | TokenType::LiteralNumber |
269 TokenType::EscapeSequence | TokenType::Assignment | TokenType::ClosingBracket |
270 TokenType::LexerError => {
271 if !whitespaces.is_empty() {
272 other_tokens.append(&mut whitespaces);
273 }
274
275 if !other_tokens.is_empty() {
276 self.parse_text_and_char_value(&mut other_tokens, &mut left_nodes);
277 other_tokens.clear();
278 }
279
280 tokens.pop_front();
281
282 match t.token_type() {
283 TokenType::LiteralNull => {
284 left_nodes.push(Node::new_null_value_node(t.pos()));
285 },
286
287 TokenType::LiteralText | TokenType::Assignment | TokenType::ClosingBracket => {
288 left_nodes.push(Node::new_text_value_node(t.pos(), t.value()));
289 },
290
291 TokenType::LiteralNumber => {
292 self.parse_number_token(t, &mut left_nodes);
293 },
294
295 TokenType::EscapeSequence => {
296 self.parse_escape_sequence_token(t, &mut left_nodes);
297 },
298
299 TokenType::LexerError => {
300 self.parse_lexer_error_token(t, &mut left_nodes);
301 },
302
303 _ => {},
304 }
305 },
306
307 TokenType::StartMultilineText => {
308 if !whitespaces.is_empty() {
309 other_tokens.append(&mut whitespaces);
310 }
311
312 if !other_tokens.is_empty() {
313 self.parse_text_and_char_value(&mut other_tokens, &mut left_nodes);
314 other_tokens.clear();
315 }
316
317 tokens.pop_front();
318
319 loop {
320 if let Some(t) = tokens.pop_front() {
321 if matches!(t.token_type(), TokenType::EndMultilineText) {
322 break;
323 }
324
325 if matches!(t.token_type(), TokenType::LiteralText | TokenType::Eol) {
326 left_nodes.push(Node::new_text_value_node(t.pos(), t.value()));
327 }else if matches!(t.token_type(), TokenType::EscapeSequence) {
328 self.parse_escape_sequence_token(t, &mut left_nodes);
329 }else if matches!(t.token_type(), TokenType::LexerError) {
330 left_nodes.push(Node::new_parsing_error_node(
331 t.pos(),
332 ParsingError::LexerError,
333 t.value()
334 ));
335 }else {
336 left_nodes.push(Node::new_parsing_error_node(
337 CodePosition::EMPTY,
338 ParsingError::Eof,
339 format!(
340 "Invalid token type ({}) in multiline text during operator parsing",
341 t.token_type(),
342 ),
343 ));
344 }
345 }else {
346 left_nodes.push(Node::new_parsing_error_node(
347 CodePosition::EMPTY,
348 ParsingError::Eof,
349 "Missing multiline text end token during operator parsing",
350 ));
351
352 break 'tokenProcessing;
353 }
354 }
355 },
356
357 TokenType::Whitespace => {
358 tokens.pop_front();
359
360 whitespaces.push_back(t);
361 },
362
363 TokenType::Identifier | TokenType::ParserFunctionIdentifier => {
364 self.parse_operator_expression_variable_name_and_function_call(
365 tokens, operator_type, &mut other_tokens, &mut left_nodes, t, &mut whitespaces,
366 );
367 },
368
369 TokenType::OpeningBracket | TokenType::Operator | TokenType::ArgumentSeparator => {
370 let mut t = t;
371 let mut value = t.value();
372
373 if matches!(t.token_type(), TokenType::ArgumentSeparator) {
375 let byte_index = value.find(",").unwrap();
376
377 if byte_index > 0 {
378 whitespaces.push_back(Token::new(t.pos(), &value[..byte_index], TokenType::Whitespace));
379 }
380
381 if byte_index < value.len() - 1 {
382 tokens.insert(1, Token::new(t.pos(), &value[byte_index+1..], TokenType::Whitespace));
383 }
384
385 t = Token::new(t.pos(), ",", TokenType::Operator);
386 value = t.value();
387 tokens[0] = t.clone();
388 }
389
390 if matches!(t.token_type(), TokenType::OpeningBracket) && value == "(" {
392 let end_index = utils::get_index_of_matching_bracket_tok(
393 tokens.make_contiguous(), 0, usize::MAX, "(", ")", true,
394 );
395 let Some(end_index) = end_index else {
396 left_nodes.push(Node::new_parsing_error_node(
397 CodePosition::EMPTY,
398 ParsingError::BracketMismatch,
399 "Bracket in operator expression is missing",
400 ));
401
402 break 'tokenProcessing;
403 };
404
405 if other_tokens.is_empty() && left_nodes.is_empty() {
407 if !whitespaces.is_empty() {
408 whitespaces.clear();
409 }
410
411 let mut parameter_tokens = utils::split_off_arguments(tokens, end_index);
412
413 left_nodes.push(self.parse_operator_expr(&mut parameter_tokens, operator_type).unwrap());
414 }else {
415 if !whitespaces.is_empty() {
416 whitespaces.clear();
417 }
418
419 if !other_tokens.is_empty() {
420 self.parse_text_and_char_value(&mut other_tokens, &mut left_nodes);
421 other_tokens.clear();
422 }
423
424 let opening_bracket_token = &tokens[0];
425 let closing_bracket_token = &tokens[end_index];
426 let pos = opening_bracket_token.pos().combine(&closing_bracket_token.pos());
427
428 let mut function_call_tokens = utils::split_off_arguments(tokens, end_index);
429
430 let node = self.parse_operator_expr(&mut function_call_tokens, operator_type).unwrap();
431 left_nodes.push(Node::new_function_call_previous_node_value_node(
432 pos, "", "",
433 Self::convert_comma_operators_to_argument_separators(node),
434 ));
435 }
436
437 continue 'tokenProcessing;
438 }
439
440 if (matches!(t.token_type(), TokenType::OpeningBracket) && value == "[") ||
442 (matches!(t.token_type(), TokenType::Operator) && value == "?." &&
443 tokens.len() > 2 && matches!(tokens[1].token_type(), TokenType::OpeningBracket) &&
444 tokens[1].value() == "[") {
445 let starts_with_optional_marker = matches!(t.token_type(), TokenType::Operator);
446 let end_index = utils::get_index_of_matching_bracket_tok(
447 tokens.make_contiguous(),
448 if starts_with_optional_marker { 1 } else { 0 }, usize::MAX,
449 "[", "]", true,
450 );
451 let Some(end_index) = end_index else {
452 left_nodes.push(Node::new_parsing_error_node(
453 CodePosition::EMPTY,
454 ParsingError::BracketMismatch,
455 "Bracket in operator expression is missing",
456 ));
457
458 break 'tokenProcessing;
459 };
460
461 if OperatorType::All.is_compatible_with(operator_type) &&
463 (!other_tokens.is_empty() || !left_nodes.is_empty()) {
464 let old_operator = operator.replace(
465 if starts_with_optional_marker {
466 Operator::OptionalGetItem
467 }else {
468 Operator::GetItem
469 }
470 );
471
472 if current_operator_precedence <= operator.unwrap().precedence() {
473 if let Some(tokens_left) = tokens_left {
474 tokens_left.append(tokens);
475
476 if !whitespaces.is_empty() {
477 whitespaces.clear();
478 }
479
480 operator = old_operator;
481
482 break 'tokenProcessing;
483 }
484 }
485
486 if !whitespaces.is_empty() {
487 whitespaces.clear();
488 }
489
490 if !other_tokens.is_empty() {
491 self.parse_text_and_char_value(&mut other_tokens, &mut left_nodes);
492 other_tokens.clear();
493 }
494
495 let mut inner_tokens_left_behind_middle_part_end = VecDeque::new();
497
498 inner_tokens_left_behind_middle_part_end.push_back(Token::new(
500 CodePosition::EMPTY,
501 "DUMMY-A",
502 TokenType::Whitespace,
503 ));
504
505 let start_index = if starts_with_optional_marker { 2 } else { 1 };
506 let mut tokens_list = VecDeque::from_iter(
507 tokens.make_contiguous()[start_index..end_index].iter().cloned(),
508 );
509
510 let inner_middle_node_ret = self.parse_operator_expression(
511 &mut tokens_list,
512 &mut None,
513 &mut Some(&mut inner_tokens_left_behind_middle_part_end),
514 0,
515 operator_type,
516 );
517 if let Some(inner_middle_node_ret) = inner_middle_node_ret {
518 operator.replace(
521 if starts_with_optional_marker {
522 Operator::OptionalSlice
523 }else {
524 Operator::Slice
525 }
526 );
527
528 inner_tokens_left_behind_middle_part_end.pop_front();
530
531 tokens.drain(..=end_index);
532
533 let mut tokens_list = inner_tokens_left_behind_middle_part_end;
534
535 let inner_right_node_ret = self.parse_operator_expr(
536 &mut tokens_list, operator_type,
537 ).unwrap();
538 if tokens.is_empty() {
539 if inner_middle_node_ret.operator() == Some(non_operator) {
541 middle_node = inner_middle_node_ret.into_left_side_operand();
542 }else {
543 middle_node = Some(inner_middle_node_ret);
544 }
545
546 if inner_right_node_ret.operator() == Some(non_operator) {
548 right_node = inner_right_node_ret.into_left_side_operand();
549 }else {
550 right_node = Some(inner_right_node_ret);
551 }
552
553 break 'tokenProcessing;
554 }else {
555 let middle_node = if inner_middle_node_ret.operator() == Some(non_operator) {
557 inner_middle_node_ret.into_left_side_operand().unwrap()
558 }else {
559 inner_middle_node_ret
560 };
561
562 let right_node = if inner_right_node_ret.operator() == Some(non_operator) {
564 inner_right_node_ret.into_left_side_operand().unwrap()
565 }else {
566 inner_right_node_ret
567 };
568
569 let left_node = if left_nodes.len() == 1 {
570 left_nodes.pop().unwrap()
571 }else {
572 Node::new_list_node(Vec::from_iter(left_nodes.drain(..)))
573 };
574
575 left_nodes.push(Node::new_operation_statement_node(
576 left_node.pos().combine(&right_node.pos()),
577 OperationExpression::new(
578 Some(Box::new(left_node)),
579 Some(Box::new(middle_node)),
580 Some(Box::new(right_node)),
581 operator.take().unwrap(), operator_type,
582 ),
583 ));
584 }
585
586 continue 'tokenProcessing;
587 }
588
589 let mut tokens_list = utils::split_off_arguments(tokens, end_index);
590 if starts_with_optional_marker {
591 tokens_list.pop_front();
593 }
594
595 let node = self.parse_operator_expr(&mut tokens_list, operator_type).unwrap();
596 if tokens.is_empty() {
597 if node.operator() == Some(non_operator) {
599 right_node = node.into_left_side_operand();
600 }else {
601 right_node = Some(node);
602 }
603
604 break 'tokenProcessing;
605 }else {
606 let right_node = if node.operator() == Some(non_operator) {
608 node.into_left_side_operand().unwrap()
609 }else {
610 node
611 };
612
613 let left_node = if left_nodes.len() == 1 {
614 left_nodes.pop().unwrap()
615 }else {
616 Node::new_list_node(Vec::from_iter(left_nodes.drain(..)))
617 };
618
619 left_nodes.push(Node::new_operation_statement_node(
620 left_node.pos().combine(&right_node.pos()),
621 OperationExpression::new(
622 Some(Box::new(left_node)),
623 None,
624 Some(Box::new(right_node)),
625 operator.take().unwrap(), operator_type,
626 ),
627 ));
628
629 continue 'tokenProcessing;
630 }
631 }else if OperatorType::All.is_compatible_with(operator_type) && !starts_with_optional_marker {
632 if !whitespaces.is_empty() {
633 whitespaces.clear();
634 }
635
636 if !other_tokens.is_empty() {
637 self.parse_text_and_char_value(&mut other_tokens, &mut left_nodes);
638 other_tokens.clear();
639 }
640
641 let pos = t.pos().combine(&tokens[end_index].pos());
643
644 let mut tokens_list = utils::split_off_arguments(tokens, end_index);
645
646 let node = self.parse_operator_expr(&mut tokens_list, operator_type).unwrap();
647 left_nodes.push(Node::new_array_value_node(
648 pos,
649 Self::convert_comma_operators_to_argument_separators(node),
650 ));
651
652 if tokens.is_empty() {
653 operator = None;
654
655 break 'tokenProcessing;
656 }
657
658 continue 'tokenProcessing;
659 }else {
660 if !whitespaces.is_empty() {
662 other_tokens.append(&mut whitespaces);
663 }
664 }
665 }
666
667 if value == "**" {
668 let old_operator = operator.replace(Operator::Pow);
669
670 if operator.unwrap().operator_type().is_compatible_with(operator_type) &&
672 (!other_tokens.is_empty() || !left_nodes.is_empty()) {
673 if current_operator_precedence < operator.unwrap().precedence() {
675 if let Some(tokens_left) = tokens_left {
676 tokens_left.append(tokens);
677
678 if !whitespaces.is_empty() {
679 whitespaces.clear();
680 }
681
682 operator = old_operator;
683
684 break 'tokenProcessing;
685 }
686 }
687
688 if tokens.len() == 1 {
690 if !whitespaces.is_empty() {
691 other_tokens.append(&mut whitespaces);
692 }
693
694 operator = None;
695 other_tokens.push_back(t);
696 tokens.pop_front();
697
698 break 'tokenProcessing;
699 }
700
701 if !whitespaces.is_empty() {
702 whitespaces.clear();
703 }
704
705 if !other_tokens.is_empty() {
706 self.parse_text_and_char_value(&mut other_tokens, &mut left_nodes);
707 other_tokens.clear();
708 }
709
710 let mut inner_tokens_left = VecDeque::new();
711 let mut tokens_list = VecDeque::from_iter(
712 tokens.make_contiguous()[1..].iter().cloned(),
713 );
714
715 let node = self.parse_operator_expression(
717 &mut tokens_list,
718 &mut Some(&mut inner_tokens_left),
719 tokens_left_behind_middle_part_end,
720 operator.unwrap().precedence(),
721 operator_type,
722 )?;
723
724 *tokens = inner_tokens_left;
725
726 if tokens.is_empty() {
727 if node.operator() == Some(non_operator) {
729 right_node = node.into_left_side_operand();
730 }else {
731 right_node = Some(node);
732 }
733
734 break 'tokenProcessing;
735 }else {
736 let right_node = if node.operator() == Some(non_operator) {
738 node.into_left_side_operand().unwrap()
739 }else {
740 node
741 };
742
743 let left_node = if left_nodes.len() == 1 {
744 left_nodes.pop().unwrap()
745 }else {
746 Node::new_list_node(Vec::from_iter(left_nodes.drain(..)))
747 };
748
749 left_nodes.push(Node::new_operation_statement_node(
750 left_node.pos().combine(&right_node.pos()),
751 OperationExpression::new(
752 Some(Box::new(left_node)),
753 None,
754 Some(Box::new(right_node)),
755 operator.take().unwrap(), operator_type,
756 ),
757 ));
758
759 continue 'tokenProcessing;
760 }
761 }else {
762 operator = old_operator;
763
764 if !whitespaces.is_empty() {
766 other_tokens.append(&mut whitespaces);
767 }
768 }
769 }
770
771 if matches!(
772 value,
773 "!==" | "!=~" | "!=" | "===" | "=~" | "==" | "<=>" | "<=" | ">=" | "<" |
774 ">" | "|||" | "&&" | "||" | "!" | "&" | "~~" | "~/" | "~" | "\u{25b2}" |
775 "\u{25bc}" | "*" | "//" | "^/" | "/" | "%" | "^" | "|" | "<<" | ">>>" |
776 ">>" | "+|" | "-|" | "+" | "->" | "-" | "@" | "?:" | "??" | "," | "?::" |
777 "::"
778 ) {
779 let something_before_operator = !other_tokens.is_empty() || !left_nodes.is_empty();
780
781 let old_operator = operator.take();
782
783 if operator.is_none() && OperatorType::All.is_compatible_with(operator_type) {
784 match value {
785 "?::" => {
786 operator = Some(Operator::OptionalMemberAccess);
787 },
788 "::" => {
789 if something_before_operator {
790 operator = Some(Operator::MemberAccess);
791 }else {
792 operator = Some(Operator::MemberAccessThis);
793 }
794 },
795 "->" => {
796 operator = Some(Operator::MemberAccessPointer);
797 },
798 "," => {
799 operator = Some(Operator::Comma);
800 },
801
802 _ => {},
803 }
804 }
805
806 if operator.is_none() && OperatorType::General.is_compatible_with(operator_type) {
807 match value {
808 "|||" => {
809 operator = Some(Operator::Concat);
810 },
811 "@" => {
812 operator = Some(Operator::Len);
813 },
814 "?:" => {
815 operator = Some(Operator::Elvis);
816 },
817 "??" => {
818 operator = Some(Operator::NullCoalescing);
819 },
820 "^" => {
821 if !something_before_operator {
822 operator = Some(Operator::DeepCopy);
823 }
824 },
825
826 _ => {},
827 }
828 }
829
830 if operator.is_none() && OperatorType::Math.is_compatible_with(operator_type) {
831 match value {
832 "<<" => {
833 operator = Some(Operator::Lshift);
834 },
835 ">>>" => {
836 operator = Some(Operator::Rzshift);
837 },
838 ">>" => {
839 operator = Some(Operator::Rshift);
840 },
841 "<=>" => {
842 operator = Some(Operator::Spaceship);
843 },
844 "&" => {
845 operator = Some(Operator::BitwiseAnd);
846 },
847 "~/" => {
848 operator = Some(Operator::TruncDiv);
849 },
850 "~" => {
851 operator = Some(Operator::BitwiseNot);
852 },
853 "+|" | "\u{25b2}" => {
854 operator = Some(Operator::Inc);
855 },
856 "-|" | "\u{25bc}" => {
857 operator = Some(Operator::Dec);
858 },
859 "*" => {
860 operator = Some(Operator::Mul);
861 },
862 "^/" => {
863 operator = Some(Operator::CeilDiv);
864 },
865 "//" => {
866 operator = Some(Operator::FloorDiv);
867 },
868 "/" => {
869 operator = Some(Operator::Div);
870 },
871 "%" => {
872 operator = Some(Operator::Mod);
873 },
874 "|" => {
875 operator = Some(Operator::BitwiseOr);
876 },
877 "+" => {
878 if something_before_operator {
879 operator = Some(Operator::Add);
880 }else {
881 operator = Some(Operator::Pos);
882 }
883 },
884 "-" => {
885 if something_before_operator {
886 operator = Some(Operator::Sub);
887 }else {
888 operator = Some(Operator::Inv);
889 }
890 },
891 "^" => {
892 if something_before_operator {
893 operator = Some(Operator::BitwiseXor);
894 }
895 },
896
897 _ => {},
898 }
899 }
900
901 if operator.is_none() && OperatorType::Condition.is_compatible_with(operator_type) {
902 match value {
903 "!==" => {
904 operator = Some(Operator::StrictNotEquals);
905 },
906 "!=~" => {
907 operator = Some(Operator::NotMatches);
908 },
909 "!=" => {
910 operator = Some(Operator::NotEquals);
911 },
912 "===" => {
913 operator = Some(Operator::StrictEquals);
914 },
915 "=~" => {
916 operator = Some(Operator::Matches);
917 },
918 "==" => {
919 operator = Some(Operator::Equals);
920 },
921 "<=" => {
922 operator = Some(Operator::LessThanOrEquals);
923 },
924 ">=" => {
925 operator = Some(Operator::GreaterThanOrEquals);
926 },
927 "<" => {
928 operator = Some(Operator::LessThan);
929 },
930 ">" => {
931 operator = Some(Operator::GreaterThan);
932 },
933 "&&" => {
934 operator = Some(Operator::And);
935 },
936 "||" => {
937 operator = Some(Operator::Or);
938 },
939 "!" => {
940 operator = Some(Operator::Not);
941 },
942 "~~" => {
943 operator = Some(Operator::InstanceOf);
944 },
945
946 _ => {},
947 }
948 }
949
950 match operator {
951 Some(op) if op.is_binary() && something_before_operator => {
952 if current_operator_precedence <= op.precedence() {
953 if let Some(tokens_left) = tokens_left {
954 tokens_left.append(tokens);
955
956 if !whitespaces.is_empty() {
957 whitespaces.clear();
958 }
959
960 operator = old_operator;
961
962 break 'tokenProcessing;
963 }
964 }
965
966 if tokens.len() == 1 {
968 other_tokens.append(&mut whitespaces);
969
970 operator = None;
971 other_tokens.push_back(t);
972 tokens.pop_front();
973
974 break 'tokenProcessing;
975 }
976
977 if !whitespaces.is_empty() {
978 whitespaces.clear();
979 }
980
981 if !other_tokens.is_empty() {
982 self.parse_text_and_char_value(&mut other_tokens, &mut left_nodes);
983 other_tokens.clear();
984 }
985
986 let mut inner_tokens_left = VecDeque::new();
987 let mut tokens_list = VecDeque::from_iter(
988 tokens.make_contiguous()[1..].iter().cloned(),
989 );
990
991 let node = self.parse_operator_expression(
993 &mut tokens_list,
994 &mut Some(&mut inner_tokens_left),
995 tokens_left_behind_middle_part_end,
996 operator.unwrap().precedence(),
997 operator_type,
998 )?;
999
1000 *tokens = inner_tokens_left;
1001
1002 if tokens.is_empty() {
1003 if node.operator() == Some(non_operator) {
1005 right_node = node.into_left_side_operand();
1006 }else {
1007 right_node = Some(node);
1008 }
1009
1010 break 'tokenProcessing;
1011 }else {
1012 let right_node = if node.operator() == Some(non_operator) {
1014 node.into_left_side_operand().unwrap()
1015 }else {
1016 node
1017 };
1018
1019 let left_node = if left_nodes.len() == 1 {
1020 left_nodes.pop().unwrap()
1021 }else {
1022 Node::new_list_node(Vec::from_iter(left_nodes.drain(..)))
1023 };
1024
1025 left_nodes.push(Node::new_operation_statement_node(
1026 left_node.pos().combine(&right_node.pos()),
1027 OperationExpression::new(
1028 Some(Box::new(left_node)),
1029 None,
1030 Some(Box::new(right_node)),
1031 operator.take().unwrap(), operator_type,
1032 ),
1033 ));
1034
1035 continue 'tokenProcessing;
1036 }
1037 },
1038
1039 Some(op) if op.is_unary() && !something_before_operator => {
1040 if !whitespaces.is_empty() {
1041 whitespaces.clear();
1042 }
1043
1044 let pos_start = t.pos();
1045
1046 let mut inner_tokens_left = VecDeque::new();
1047 let mut tokens_list = VecDeque::from_iter(
1048 tokens.make_contiguous()[1..].iter().cloned(),
1049 );
1050
1051 let node = self.parse_operator_expression(
1053 &mut tokens_list,
1054 &mut Some(&mut inner_tokens_left),
1055 tokens_left_behind_middle_part_end,
1056 operator.unwrap().precedence(),
1057 operator_type,
1058 )?;
1059
1060 *tokens = inner_tokens_left;
1061
1062 let left_node = if node.operator() == Some(non_operator) {
1064 node.into_left_side_operand().unwrap()
1065 }else {
1066 node
1067 };
1068
1069 left_nodes.push(Node::new_operation_statement_node(
1070 pos_start.combine(&left_node.pos()),
1071 OperationExpression::new(
1072 Some(Box::new(left_node)),
1073 None,
1074 None,
1075 operator.take().unwrap(), operator_type,
1076 ),
1077 ));
1078
1079 if tokens.is_empty() {
1080 break 'tokenProcessing;
1081 }else {
1082 continue 'tokenProcessing;
1083 }
1084 },
1085
1086 _ => {
1087 operator = old_operator;
1088
1089 if !whitespaces.is_empty() {
1091 other_tokens.append(&mut whitespaces);
1092 }
1093 },
1094 }
1095 }
1096
1097 if value == "?" {
1098 let old_operator = operator.replace(Operator::InlineIf);
1099
1100 if operator.unwrap().operator_type().is_compatible_with(operator_type) &&
1103 (!other_tokens.is_empty() || !left_nodes.is_empty()) {
1104 if current_operator_precedence < operator.unwrap().precedence() {
1106 if let Some(tokens_left) = tokens_left {
1107 tokens_left.append(tokens);
1108
1109 if !whitespaces.is_empty() {
1110 whitespaces.clear();
1111 }
1112
1113 operator = old_operator;
1114
1115 break 'tokenProcessing;
1116 }
1117 }
1118
1119 let mut inner_tokens_left_behind_middle_part_end = VecDeque::new();
1121 let mut tokens_list = VecDeque::from_iter(
1122 tokens.make_contiguous()[1..].iter().cloned(),
1123 );
1124
1125 let inner_middle_node_ret = self.parse_operator_expression(
1126 &mut tokens_list,
1127 &mut None,
1128 &mut Some(&mut inner_tokens_left_behind_middle_part_end),
1129 0,
1130 operator_type,
1131 );
1132 if let Some(inner_middle_node_ret) = inner_middle_node_ret {
1133 if inner_tokens_left_behind_middle_part_end.is_empty() {
1137 if !whitespaces.is_empty() {
1138 other_tokens.append(&mut whitespaces);
1139 }
1140
1141 operator = None;
1142 other_tokens.push_back(t);
1143
1144 break 'tokenProcessing;
1145 }
1146
1147 *tokens = inner_tokens_left_behind_middle_part_end;
1148
1149 if !whitespaces.is_empty() {
1150 whitespaces.clear();
1151 }
1152
1153 if !other_tokens.is_empty() {
1154 self.parse_text_and_char_value(&mut other_tokens, &mut left_nodes);
1155 other_tokens.clear();
1156 }
1157
1158 let mut inner_tokens_left = VecDeque::new();
1159
1160 let inner_right_node_ret = self.parse_operator_expression(
1161 tokens,
1162 &mut Some(&mut inner_tokens_left),
1163 tokens_left_behind_middle_part_end,
1164 operator.unwrap().precedence(),
1165 operator_type,
1166 ).unwrap();
1167
1168 *tokens = inner_tokens_left;
1169
1170 if tokens.is_empty() {
1171 if inner_middle_node_ret.operator() == Some(non_operator) {
1173 middle_node = inner_middle_node_ret.into_left_side_operand();
1174 }else {
1175 middle_node = Some(inner_middle_node_ret);
1176 }
1177
1178 if inner_right_node_ret.operator() == Some(non_operator) {
1180 right_node = inner_right_node_ret.into_left_side_operand();
1181 }else {
1182 right_node = Some(inner_right_node_ret);
1183 }
1184
1185 break 'tokenProcessing;
1186 }else {
1187 let middle_node = if inner_middle_node_ret.operator() == Some(non_operator) {
1189 inner_middle_node_ret.into_left_side_operand().unwrap()
1190 }else {
1191 inner_middle_node_ret
1192 };
1193
1194 let right_node = if inner_right_node_ret.operator() == Some(non_operator) {
1196 inner_right_node_ret.into_left_side_operand().unwrap()
1197 }else {
1198 inner_right_node_ret
1199 };
1200
1201 let left_node = if left_nodes.len() == 1 {
1202 left_nodes.pop().unwrap()
1203 }else {
1204 Node::new_list_node(Vec::from_iter(left_nodes.drain(..)))
1205 };
1206
1207 left_nodes.push(Node::new_operation_statement_node(
1208 left_node.pos().combine(&right_node.pos()),
1209 OperationExpression::new(
1210 Some(Box::new(left_node)),
1211 Some(Box::new(middle_node)),
1212 Some(Box::new(right_node)),
1213 operator.take().unwrap(), operator_type,
1214 ),
1215 ));
1216
1217 continue 'tokenProcessing;
1218 }
1219 }else {
1220 operator = old_operator;
1221
1222 if !whitespaces.is_empty() {
1224 other_tokens.append(&mut whitespaces);
1225 }
1226 }
1227 }else {
1228 operator = old_operator;
1229
1230 if !whitespaces.is_empty() {
1232 other_tokens.append(&mut whitespaces);
1233 }
1234 }
1235 }
1236
1237 if value == ":" {
1238 if let Some(tokens_left_behind_middle_part_end) = tokens_left_behind_middle_part_end {
1239 if tokens_left_behind_middle_part_end.front().is_some_and(|token|
1243 matches!(token.token_type(), TokenType::Whitespace) && token.value() == "DUMMY-A") {
1244 tokens_left_behind_middle_part_end[0] = Token::new(CodePosition::EMPTY, "DUMMY-B", TokenType::Whitespace);
1245 }
1246
1247 if !whitespaces.is_empty() {
1248 whitespaces.clear();
1249 }
1250
1251 tokens.pop_front();
1252 tokens_left_behind_middle_part_end.append(tokens);
1253
1254 if let Some(tokens_left) = tokens_left {
1256 if !tokens_left.is_empty() {
1257 tokens_left.clear();
1258 }
1259 }
1260
1261 break 'tokenProcessing;
1262 }
1263 }
1264
1265 tokens.pop_front();
1266
1267 if !whitespaces.is_empty() {
1268 other_tokens.append(&mut whitespaces);
1269 }
1270
1271 if !other_tokens.is_empty() {
1272 self.parse_text_and_char_value(&mut other_tokens, &mut left_nodes);
1273 other_tokens.clear();
1274 }
1275
1276 if other_tokens.is_empty() && left_nodes.is_empty() && matches!(t.value(), "+" | "-") &&
1278 !tokens.is_empty() && matches!(tokens[0].token_type(), TokenType::LiteralNumber) {
1279 let number_token = tokens.pop_front().unwrap();
1280
1281 let combined_number_token = Token::new(
1282 t.pos().combine(&number_token.pos()),
1283 &(t.value().to_string() + number_token.value()),
1284 TokenType::LiteralNumber,
1285 );
1286
1287 self.parse_number_token(combined_number_token, &mut left_nodes);
1288 }else {
1289 left_nodes.push(Node::new_text_value_node(t.pos(), value));
1290 }
1291 },
1292
1293 TokenType::Other => {
1294 if !whitespaces.is_empty() {
1295 whitespaces.clear();
1296 }
1297
1298 if !other_tokens.is_empty() {
1299 self.parse_text_and_char_value(&mut other_tokens, &mut left_nodes);
1300 other_tokens.clear();
1301 }
1302
1303 let ret = self.parse_function_call_without_prefix(tokens, Some(operator_type));
1304 if let Some(ret) = ret {
1305 left_nodes.push(ret);
1306 }else {
1307 tokens.pop_front();
1308 other_tokens.push_back(t);
1309 }
1310 },
1311
1312 TokenType::OpeningBlockBracket | TokenType::ClosingBlockBracket |
1313 TokenType::LineContinuation | TokenType::EndComment | TokenType::EndMultilineText |
1314 TokenType::SingleLineTextQuotes => {
1315 left_nodes.push(Node::new_parsing_error_node(
1316 CodePosition::EMPTY,
1317 ParsingError::LexerError,
1318 format!(
1319 "Invalid token type in operator expression: \"{}\"",
1320 t.token_type(),
1321 ),
1322 ));
1323
1324 break 'tokenProcessing;
1325 },
1326 }
1327 }
1328
1329 if let Some(tokens_left_behind_middle_part_end) = tokens_left_behind_middle_part_end {
1331 if tokens_left_behind_middle_part_end.front().is_some_and(|token|
1332 matches!(token.token_type(), TokenType::Whitespace) && token.value() == "DUMMY-A") {
1333 tokens_left_behind_middle_part_end.pop_front();
1334 }
1335
1336 if tokens_left_behind_middle_part_end.is_empty() {
1338 return None;
1339 }
1340 }
1341
1342 if !whitespaces.is_empty() {
1343 other_tokens.append(&mut whitespaces);
1344 }
1345
1346 if !other_tokens.is_empty() {
1347 self.parse_text_and_char_value(&mut other_tokens, &mut left_nodes);
1348 other_tokens.clear();
1349 }
1350
1351 let operator = operator.unwrap_or(non_operator);
1352
1353 let left_node = if left_nodes.len() == 1 {
1354 left_nodes.pop().unwrap()
1355 }else {
1356 Node::new_list_node(Vec::from_iter(left_nodes.drain(..)))
1357 };
1358
1359 if let Some(tokens_left) = tokens_left {
1360 if !tokens.is_empty() {
1361 tokens_left.append(tokens);
1362 }
1363 }
1364
1365 let pos = left_node.pos().combine(&right_node.as_ref().map(Node::pos).unwrap_or(left_node.pos()));
1366
1367 Some(Node::new_operation_statement_node(
1368 pos,
1369 OperationExpression::new(
1370 Some(Box::new(left_node)),
1371 middle_node.map(Box::new),
1372 right_node.map(Box::new),
1373 operator, operator_type,
1374 ),
1375 ))
1376 }
1377
1378 fn parse_operator_expression_variable_name_and_function_call(
1379 &mut self,
1380 tokens: &mut VecDeque<Token>,
1381 operator_type: OperatorType,
1382 other_tokens: &mut VecDeque<Token>,
1383 left_nodes: &mut Vec<Node>,
1384 t: Token,
1385 whitespaces: &mut VecDeque<Token>,
1386 ) {
1387 if (!other_tokens.is_empty() || !left_nodes.is_empty()) && t.value().starts_with("&") {
1390 tokens[0] = Token::new(t.pos(), "&", TokenType::Operator);
1391 tokens.insert(1, self.lexer.tokenize_other_value(&t.value()[1..], t.pos()));
1392
1393 return;
1394 }
1395
1396 if !whitespaces.is_empty() {
1397 other_tokens.append(whitespaces);
1398 }
1399
1400 if !other_tokens.is_empty() {
1401 self.parse_text_and_char_value(other_tokens, left_nodes);
1402 other_tokens.clear();
1403 }
1404
1405 let is_identifier = matches!(t.token_type(), TokenType::Identifier);
1406 let ret = if is_identifier {
1407 self.parse_variable_name_and_function_call(tokens, Some(operator_type))
1408 }else {
1409 self.parse_parser_function_call(tokens)
1410 };
1411
1412 if let Some(ret) = ret {
1413 if let NodeData::UnprocessedVariableName(variable_name) = ret.node_data() {
1414 if is_identifier && tokens.front().is_some_and(|token|
1415 matches!(token.token_type(), TokenType::Operator) && token.value() == "...") {
1416 let array_unpacking_operator_token = tokens.pop_front().unwrap();
1417 left_nodes.push(Node::new_unprocessed_variable_name_node(
1418 ret.pos().combine(&array_unpacking_operator_token.pos()),
1419 variable_name.to_string() + array_unpacking_operator_token.value(),
1420 ));
1421 }else {
1422 left_nodes.push(ret);
1423 }
1424 }else {
1425 left_nodes.push(ret);
1426 }
1427 }
1428 }
1429
1430 fn convert_comma_operators_to_argument_separators(operator_node: Node) -> Vec<Node> {
1431 let mut nodes = Vec::new();
1432
1433 if let Some(operator) = operator_node.operator() {
1434 match operator {
1435 Operator::Non | Operator::MathNon | Operator::ConditionalNon => {
1436 let operand = operator_node.into_left_side_operand().unwrap();
1438
1439 if matches!(
1440 operand.node_data(),
1441 NodeData::Operation {..} | NodeData::Math {..} | NodeData::Condition {..},
1442 ) {
1443 nodes.append(&mut Self::convert_comma_operators_to_argument_separators(operand));
1444 }else {
1445 nodes.push(operand);
1446 }
1447 },
1448
1449 Operator::Comma => {
1450 let argument_separator_pos = operator_node.pos();
1452 let (left_side_operand, _, right_side_operand) = operator_node.into_operands();
1453 let left_side_operand = left_side_operand.unwrap();
1454 let right_side_operand = right_side_operand.unwrap();
1455
1456 if matches!(
1458 left_side_operand.node_data(),
1459 NodeData::Operation {..} | NodeData::Math {..} | NodeData::Condition {..},
1460 ) {
1461 nodes.append(&mut Self::convert_comma_operators_to_argument_separators(left_side_operand));
1462 }else {
1463 nodes.push(left_side_operand);
1464 }
1465
1466 nodes.push(Node::new_argument_separator_node(argument_separator_pos, ", "));
1468
1469 nodes.push(right_side_operand);
1471 },
1472
1473 _ => {
1474 nodes.push(operator_node);
1475 }
1476 }
1477 }
1478
1479 nodes
1480 }
1481
1482 fn parse_assignment(&mut self, tokens: &mut VecDeque<Token>, inner_assignment: bool) -> Option<Node> {
1483 if tokens.is_empty() {
1484 return None;
1485 }
1486
1487 Self::trim_first_line(tokens);
1488
1489 let mut assignment_index = None;
1490 let mut token_count_first_line = None;
1491 for (i, token) in tokens.iter().
1492 enumerate() {
1493 if matches!(token.token_type(), TokenType::Eol | TokenType::Eof) {
1494 token_count_first_line = Some(i);
1495
1496 break;
1497 }
1498
1499 if assignment_index.is_none() {
1500 if i + 2 < tokens.len() && matches!(token.token_type(), TokenType::ClosingBracket) &&
1502 token.value() == ")" && matches!(tokens[i + 1].token_type(), TokenType::Whitespace) &&
1503 matches!(tokens[i + 2].token_type(), TokenType::Operator) && tokens[i + 2].value() == "->" {
1504 return None;
1505 }
1506
1507 if matches!(token.token_type(), TokenType::Assignment) {
1508 assignment_index = Some(i);
1509 }
1510 }
1511 }
1512
1513 let token_count_first_line = token_count_first_line.unwrap_or(tokens.len());
1514
1515 let Some(assignment_index) = assignment_index else {
1516 if inner_assignment || token_count_first_line != 1 || !matches!(tokens[0].token_type(), TokenType::Identifier) {
1517 return None;
1518 }
1519
1520 if regex_patterns::VAR_NAME_FULL.is_match(tokens[0].value()) {
1521 let variable_name_token = tokens.pop_front().unwrap();
1522
1523 return Some(Node::new_assignment_node(
1524 Node::new_unprocessed_variable_name_node(variable_name_token.pos(), variable_name_token.value()),
1525 Node::new_null_value_node(variable_name_token.pos()),
1526 ));
1527 }
1528
1529 return None;
1530 };
1531
1532 let mut lvalue_tokens = VecDeque::from_iter(
1533 tokens.make_contiguous()[..assignment_index].iter().cloned(),
1534 );
1535
1536 Self::trim_first_line(&mut lvalue_tokens);
1537
1538 if lvalue_tokens.is_empty() {
1539 return None;
1540 }
1541
1542 let assignment_token = &tokens[assignment_index];
1543
1544 let is_simple_assignment = assignment_token.value() == "=";
1545 if is_simple_assignment || assignment_token.value() == " = " {
1546 let pos = lvalue_tokens[0].pos().combine(&lvalue_tokens[assignment_index - 1].pos());
1547
1548 let regex = if is_simple_assignment {
1549 ®ex_patterns::PARSING_SIMPLE_ASSIGNMENT_VARIABLE_NAME_LVALUE
1550 }else {
1551 ®ex_patterns::VAR_NAME_FULL
1552 };
1553
1554 if lvalue_tokens.len() == 1 && matches!(lvalue_tokens[0].token_type(), TokenType::Identifier) &&
1555 regex.is_match(lvalue_tokens[0].value()) {
1556 tokens.drain(..=assignment_index);
1557 Self::trim_first_line(tokens);
1558
1559 if is_simple_assignment {
1560 return Some(Node::new_assignment_node(
1562 Node::new_unprocessed_variable_name_node(pos, lvalue_tokens[0].value()),
1563 self.parse_simple_assignment_value(tokens).into_node(),
1564 ));
1565 }
1566
1567 let returned_node = self.parse_assignment(tokens, true);
1568 let rvalue_node = returned_node.
1569 unwrap_or_else(|| self.parse_lrvalue(tokens, true).into_node());
1570 return Some(Node::new_assignment_node(
1571 Node::new_unprocessed_variable_name_node(pos, lvalue_tokens[0].value()),
1572 rvalue_node,
1573 ));
1574 }
1575
1576 let lvalue = lvalue_tokens.iter().
1577 map(|token| token.to_raw_string().to_string()).
1578 collect::<Vec<String>>().
1579 join("");
1580 if regex_patterns::PARSING_PARSER_FLAG.is_match(&lvalue) {
1581 let mut rvalue_tokens = VecDeque::from_iter(
1582 tokens.make_contiguous()[assignment_index + 1..token_count_first_line].iter().cloned(),
1583 );
1584
1585 Self::trim_first_line(&mut rvalue_tokens);
1586
1587 let ast = if is_simple_assignment {
1588 self.parse_simple_assignment_value(&mut rvalue_tokens)
1589 }else {
1590 self.parse_lrvalue(&mut rvalue_tokens, true)
1591 };
1592
1593 self.parse_parser_flags(lvalue, ast.into_node());
1594
1595 tokens.drain(..token_count_first_line);
1596
1597 return None;
1598 }
1599
1600 if regex_patterns::PARSING_SIMPLE_TRANSLATION_KEY.is_match(&lvalue) {
1601 tokens.drain(..=assignment_index);
1602
1603 let ast = if is_simple_assignment {
1605 self.parse_simple_assignment_value(tokens)
1606 }else {
1607 self.parse_lrvalue(tokens, true)
1608 };
1609
1610 return Some(Node::new_assignment_node(
1611 Node::new_text_value_node(pos, lvalue),
1612 ast.into_node(),
1613 ));
1614 }
1615 }
1616
1617 let is_variable_assignment = lvalue_tokens.len() == 1 &&
1618 matches!(lvalue_tokens[0].token_type(), TokenType::Identifier) &&
1619 regex_patterns::VAR_NAME_FULL.is_match(lvalue_tokens[0].value());
1620
1621 if assignment_token.value() == " =" {
1622 let pos = assignment_token.pos();
1623
1624 tokens.drain(..token_count_first_line);
1625
1626 let ast = if is_variable_assignment {
1627 self.parse_lrvalue(&mut lvalue_tokens, false)
1628 }else {
1629 self.parse_translation_key(&mut lvalue_tokens)
1630 };
1631
1632 return Some(Node::new_assignment_node(
1633 ast.into_node(),
1634 Node::new_null_value_node(pos),
1635 ));
1636 }
1637
1638 if regex_patterns::PARSING_ASSIGNMENT_OPERATOR.is_match(assignment_token.value()) {
1639 let assignment_token_pos = assignment_token.pos();
1640 let assignment_operator = assignment_token.value();
1641 let assignment_operator = assignment_operator[1..assignment_operator.len()-2].to_string();
1642
1643 tokens.drain(..=assignment_index);
1644
1645 let mut operator = None;
1646 if !assignment_operator.is_empty() {
1647 match assignment_operator.as_str() {
1648 "**" => {
1649 operator = Some(Operator::Pow);
1650 },
1651 "*" => {
1652 operator = Some(Operator::Mul);
1653 },
1654 "/" => {
1655 operator = Some(Operator::Div);
1656 },
1657 "~/" => {
1658 operator = Some(Operator::TruncDiv);
1659 },
1660 "//" => {
1661 operator = Some(Operator::FloorDiv);
1662 },
1663 "^/" => {
1664 operator = Some(Operator::CeilDiv);
1665 },
1666 "%" => {
1667 operator = Some(Operator::Mod);
1668 },
1669 "+" => {
1670 operator = Some(Operator::Add);
1671 },
1672 "-" => {
1673 operator = Some(Operator::Sub);
1674 },
1675 "<<" => {
1676 operator = Some(Operator::Lshift);
1677 },
1678 ">>" => {
1679 operator = Some(Operator::Rshift);
1680 },
1681 ">>>" => {
1682 operator = Some(Operator::Rzshift);
1683 },
1684 "&" => {
1685 operator = Some(Operator::BitwiseAnd);
1686 },
1687 "^" => {
1688 operator = Some(Operator::BitwiseXor);
1689 },
1690 "|" => {
1691 operator = Some(Operator::BitwiseOr);
1692 },
1693 "|||" => {
1694 operator = Some(Operator::Concat);
1695 },
1696 "?:" => {
1697 operator = Some(Operator::Elvis);
1698 },
1699 "??" => {
1700 operator = Some(Operator::NullCoalescing);
1701 },
1702 "?" => {
1703 operator = Some(Operator::ConditionalNon);
1704 },
1705 ":" => {
1706 operator = Some(Operator::MathNon);
1707 },
1708 "$" => {
1709 operator = Some(Operator::Non);
1710 },
1711
1712 _ => {}
1713 }
1714 }
1715
1716 let lvalue_node = match operator {
1717 _ if is_variable_assignment => {
1718 self.parse_lrvalue(&mut lvalue_tokens, false).into_node()
1719 },
1720
1721 Some(Operator::ConditionalNon) => {
1722 self.parse_condition_expr(&mut lvalue_tokens).unwrap()
1723 },
1724
1725 Some(Operator::MathNon) => {
1726 self.parse_math_expr(&mut lvalue_tokens).unwrap()
1727 },
1728
1729 _ => {
1730 self.parse_operation_expr(&mut lvalue_tokens).unwrap()
1731 },
1732 };
1733
1734 let rvalue_node = match operator {
1735 _ if assignment_operator == "::" => {
1736 let returned_node = self.parse_assignment(tokens, true);
1737
1738 returned_node.unwrap_or_else(|| self.parse_lrvalue(tokens, true).into_node())
1739 },
1740
1741 None => {
1742 Node::new_parsing_error_node(
1743 assignment_token_pos,
1744 ParsingError::InvalidAssignment,
1745 format!("Invalid assignment operator: \" {}= \"", assignment_operator),
1746 )
1747 },
1748
1749 Some(Operator::ConditionalNon) => {
1750 self.parse_condition_expr(tokens).unwrap()
1751 },
1752
1753 Some(Operator::MathNon) => {
1754 self.parse_math_expr(tokens).unwrap()
1755 },
1756
1757 Some(Operator::Non) => {
1758 self.parse_operation_expr(tokens).unwrap()
1759 },
1760
1761 Some(operator) => {
1762 let left_side_operand = lvalue_node.clone();
1763 let right_side_operand = self.parse_operation_expr(tokens).unwrap();
1764
1765 Node::new_operation_statement_node(
1766 left_side_operand.pos().combine(&right_side_operand.pos()),
1767 OperationExpression::new(
1768 Some(Box::new(left_side_operand)),
1769 None,
1770 Some(Box::new(right_side_operand)),
1771 operator, operator.operator_type(),
1772 ),
1773 )
1774 },
1775 };
1776
1777 return Some(Node::new_assignment_node(lvalue_node, rvalue_node));
1778 }
1779
1780 if assignment_token.value() == " = " {
1782 tokens.drain(..=assignment_index);
1783
1784 return Some(Node::new_assignment_node(
1786 self.parse_translation_key(&mut lvalue_tokens).into_node(),
1787 self.parse_lrvalue(tokens, true).into_node(),
1788 ));
1789 }
1790
1791 None
1792 }
1793
1794 fn parse_line(&mut self, tokens: &mut VecDeque<Token>) -> Option<AST> {
1795 let mut ast = AST::new();
1796 let nodes = ast.nodes_mut();
1797
1798 Self::trim_first_line(tokens);
1799
1800 let mut token_count_first_line = Self::get_token_count_first_line(tokens.make_contiguous());
1801
1802 let starts_with_con_expression = tokens.front().is_some_and(|token|
1804 matches!(token.token_type(), TokenType::Other) && token.value().starts_with("con."));
1805 let ends_with_opening_bracket = tokens.get(token_count_first_line - 1).is_some_and(|token|
1806 matches!(token.token_type(), TokenType::OpeningBlockBracket));
1807 if starts_with_con_expression || ends_with_opening_bracket {
1808 let mut con_expression = tokens[0].value().to_string();
1809 let original_con_expression = con_expression.clone();
1810
1811 if ends_with_opening_bracket && !starts_with_con_expression {
1813 con_expression = "con.".to_string() + &con_expression;
1814 }
1815
1816 match con_expression.as_str() {
1817 "con.continue" | "con.break" if !ends_with_opening_bracket => {
1818 let con_expression_token = tokens.pop_front().unwrap();
1819 let mut pos_last_token = con_expression_token.pos();
1820
1821 let number_node = if token_count_first_line > 1 && matches!(tokens[0].token_type(), TokenType::OpeningBracket) &&
1822 tokens[0].value() == "(" {
1823 let arguments_end_index = utils::get_index_of_matching_bracket_tok(
1824 tokens.make_contiguous(),
1825 0, usize::MAX,
1826 "(", ")", true,
1827 );
1828 let Some(arguments_end_index) = arguments_end_index else {
1829 nodes.push(Node::new_parsing_error_node(
1830 tokens[0].pos(),
1831 ParsingError::BracketMismatch,
1832 "Bracket for con.break or con.continue is missing",
1833 ));
1834
1835 return Some(ast);
1836 };
1837
1838 pos_last_token = tokens[arguments_end_index].pos();
1839
1840 let mut argument_tokens = utils::split_off_arguments(tokens, arguments_end_index);
1841
1842 Some(self.parse_function_parameter_list(&mut argument_tokens, false).into_node())
1843 }else {
1844 None
1845 };
1846
1847 let pos = con_expression_token.pos().combine(&pos_last_token);
1848 nodes.push(Node::new_continue_break_statement_node(
1849 pos,
1850 number_node.map(Box::new),
1851 con_expression_token.value() == "con.continue",
1852 ));
1853
1854 return Some(ast);
1855 },
1856
1857 "con.try" | "con.softtry" | "con.nontry" => {
1858 let mut try_statement_parts = Vec::new();
1859
1860 let block_bracket_flag = ends_with_opening_bracket;
1861 while !tokens.is_empty() {
1862 Self::trim_first_line(tokens);
1863
1864 let mut token_count_first_line = Self::get_token_count_first_line(tokens.make_contiguous());
1865 if token_count_first_line == 0 {
1866 break;
1867 }
1868
1869 let ends_with_opening_bracket = matches!(tokens[token_count_first_line - 1].token_type(), TokenType::OpeningBlockBracket);
1870
1871 con_expression = tokens[0].value().to_string();
1872
1873 if ends_with_opening_bracket && !starts_with_con_expression {
1875 con_expression = "con.".to_string() + &con_expression;
1876 }
1877
1878 if block_bracket_flag {
1879 let pos = tokens[token_count_first_line - 1].pos();
1881
1882 if !ends_with_opening_bracket {
1883 nodes.push(Node::new_parsing_error_node(
1884 pos,
1885 ParsingError::InvalidConPart,
1886 "Missing \"{\" token after con statement",
1887 ));
1888 }
1889
1890 tokens.remove(token_count_first_line - 1);
1891
1892 token_count_first_line -= 1;
1893
1894 if token_count_first_line == 0 || !matches!(tokens[0].token_type(), TokenType::Other) {
1895 nodes.push(Node::new_parsing_error_node(
1896 pos,
1897 ParsingError::InvalidConPart,
1898 "Missing con statement",
1899 ));
1900 }
1901
1902 con_expression = tokens[0].value().to_string();
1903
1904 if !con_expression.starts_with("con.") {
1906 con_expression = "con.".to_string() + &con_expression;
1907 }
1908
1909 Self::trim_first_line(tokens);
1910
1911 token_count_first_line = Self::get_token_count_first_line(tokens.make_contiguous());
1912 }
1913
1914 let mut try_arguments;
1915 match con_expression.as_str() {
1916 "con.try" | "con.softtry" | "con.nontry" | "con.else" | "con.finally" => {
1917 let try_statement_token = tokens.pop_front().unwrap();
1918 token_count_first_line -= 1;
1919
1920 if token_count_first_line >= 1 && matches!(tokens[0].token_type(), TokenType::OpeningBracket) &&
1921 tokens[0].value() == "(" {
1922 nodes.push(Node::new_parsing_error_node(
1923 try_statement_token.pos(),
1924 ParsingError::InvalidConPart,
1925 "Try/Softtry/Nontry/Finally/Else part with arguments",
1926 ));
1927
1928 return Some(ast);
1929 }
1930
1931 try_arguments = None;
1932 },
1933
1934 "con.catch" => {
1935 if token_count_first_line == 1 {
1936 try_arguments = None;
1937 }else {
1938 tokens.pop_front().unwrap();
1939 token_count_first_line -= 1;
1940
1941 if token_count_first_line > 1 && matches!(tokens[0].token_type(), TokenType::OpeningBracket) &&
1942 tokens[0].value() == "(" {
1943 let arguments_end_index = utils::get_index_of_matching_bracket_tok(
1944 tokens.make_contiguous(),
1945 0, usize::MAX,
1946 "(", ")", true,
1947 );
1948 let Some(arguments_end_index) = arguments_end_index else {
1949 nodes.push(Node::new_parsing_error_node(
1950 tokens[0].pos(),
1951 ParsingError::BracketMismatch,
1952 "Missing catch statement arguments",
1953 ));
1954
1955 return Some(ast);
1956 };
1957
1958 try_arguments = Some(utils::split_off_arguments(tokens, arguments_end_index));
1959 token_count_first_line -= arguments_end_index + 1;
1960 }else {
1961 try_arguments = None;
1962 }
1963
1964 if token_count_first_line != 0 {
1965 nodes.push(Node::new_parsing_error_node(
1966 tokens[0].pos(),
1967 ParsingError::InvalidConPart,
1968 "Trailing stuff behind arguments",
1969 ));
1970
1971 return Some(ast);
1972 }
1973 }
1974 },
1975
1976 "con.endtry" if !block_bracket_flag => {
1977 tokens.pop_front();
1978
1979 break;
1980 },
1981
1982 _ => {
1983 nodes.push(Node::new_parsing_error_node(
1985 CodePosition::EMPTY,
1986 ParsingError::InvalidConPart,
1987 format!("Try statement part is invalid: \"{}\"", con_expression),
1988 ));
1989
1990 return Some(ast);
1991 },
1992 };
1993
1994 let try_body = self.parse_tokens_internal(tokens);
1995 let Some(try_body) = try_body else {
1996 nodes.push(Node::new_try_statement_node(CodePosition::EMPTY, try_statement_parts));
1998 nodes.push(Node::new_parsing_error_node(
1999 CodePosition::EMPTY,
2000 ParsingError::Eof,
2001 "In try body",
2002 ));
2003
2004 return Some(ast);
2005 };
2006
2007 match con_expression.as_str() {
2009 "con.try" => {
2010 try_statement_parts.push(Node::new_try_statement_part_try_node(
2011 CodePosition::EMPTY,
2012 try_body,
2013 ));
2014 },
2015
2016 "con.softtry" => {
2017 try_statement_parts.push(Node::new_try_statement_part_soft_try_node(
2018 CodePosition::EMPTY,
2019 try_body,
2020 ));
2021 },
2022
2023 "con.nontry" => {
2024 try_statement_parts.push(Node::new_try_statement_part_non_try_node(
2025 CodePosition::EMPTY,
2026 try_body,
2027 ));
2028 },
2029
2030 "con.catch" => {
2031 try_statement_parts.push(Node::new_try_statement_part_catch_node(
2032 CodePosition::EMPTY,
2033 try_body,
2034 try_arguments.as_mut().map(|tokens|
2035 self.parse_function_parameter_list(tokens, false).
2036 into_nodes()),
2037 ));
2038 },
2039
2040 "con.else" => {
2041 try_statement_parts.push(Node::new_try_statement_part_else_node(
2042 CodePosition::EMPTY,
2043 try_body,
2044 ));
2045 },
2046
2047 "con.finally" => {
2048 try_statement_parts.push(Node::new_try_statement_part_finally_node(
2049 CodePosition::EMPTY,
2050 try_body,
2051 ));
2052 },
2053
2054 _ => {},
2055 }
2056 }
2057
2058 nodes.push(Node::new_try_statement_node(CodePosition::EMPTY, try_statement_parts));
2060 return Some(ast);
2061 },
2062
2063 "con.loop" | "con.while" | "con.until" | "con.repeat" | "con.foreach" => {
2064 let mut loop_statement_parts = Vec::new();
2065
2066 let block_bracket_flag = ends_with_opening_bracket;
2067 while !tokens.is_empty() {
2068 Self::trim_first_line(tokens);
2069
2070 let mut token_count_first_line = Self::get_token_count_first_line(tokens.make_contiguous());
2071 if token_count_first_line == 0 {
2072 break;
2073 }
2074
2075 let ends_with_opening_bracket = matches!(tokens[token_count_first_line - 1].token_type(), TokenType::OpeningBlockBracket);
2076
2077 con_expression = tokens[0].value().to_string();
2078
2079 if ends_with_opening_bracket && !starts_with_con_expression {
2081 con_expression = "con.".to_string() + &con_expression;
2082 }
2083
2084 if block_bracket_flag {
2085 let pos = tokens[token_count_first_line - 1].pos();
2087
2088 if !ends_with_opening_bracket {
2089 nodes.push(Node::new_parsing_error_node(
2090 pos,
2091 ParsingError::InvalidConPart,
2092 "Missing \"{\" token after con statement",
2093 ));
2094 }
2095
2096 tokens.remove(token_count_first_line - 1);
2097
2098 token_count_first_line -= 1;
2099
2100 if token_count_first_line == 0 || !matches!(tokens[0].token_type(), TokenType::Other) {
2101 nodes.push(Node::new_parsing_error_node(
2102 pos,
2103 ParsingError::InvalidConPart,
2104 "Missing con statement",
2105 ));
2106 }
2107
2108 con_expression = tokens[0].value().to_string();
2109
2110 if !con_expression.starts_with("con.") {
2112 con_expression = "con.".to_string() + &con_expression;
2113 }
2114
2115 Self::trim_first_line(tokens);
2116
2117 token_count_first_line = Self::get_token_count_first_line(tokens.make_contiguous());
2118 }
2119
2120 let loop_condition;
2121 match con_expression.as_str() {
2122 "con.else" | "con.loop" => {
2123 let try_statement_token = tokens.pop_front().unwrap();
2124 token_count_first_line -= 1;
2125
2126 if token_count_first_line >= 1 && matches!(tokens[0].token_type(), TokenType::OpeningBracket) &&
2127 tokens[0].value() == "(" {
2128 nodes.push(Node::new_parsing_error_node(
2129 try_statement_token.pos(),
2130 ParsingError::InvalidConPart,
2131 "Loop/Else part with arguments",
2132 ));
2133
2134 return Some(ast);
2135 }
2136
2137 loop_condition = None;
2138 },
2139
2140 "con.while" | "con.until" | "con.repeat" | "con.foreach" => {
2141 tokens.pop_front().unwrap();
2142 token_count_first_line -= 1;
2143
2144 if token_count_first_line > 1 && matches!(tokens[0].token_type(), TokenType::OpeningBracket) &&
2145 tokens[0].value() == "(" {
2146 let arguments_end_index = utils::get_index_of_matching_bracket_tok(
2147 tokens.make_contiguous(),
2148 0, usize::MAX,
2149 "(", ")", true,
2150 );
2151 let Some(arguments_end_index) = arguments_end_index else {
2152 nodes.push(Node::new_parsing_error_node(
2153 tokens[0].pos(),
2154 ParsingError::BracketMismatch,
2155 "Missing loop statement arguments",
2156 ));
2157
2158 return Some(ast);
2159 };
2160
2161 loop_condition = Some(utils::split_off_arguments(tokens, arguments_end_index));
2162 token_count_first_line -= arguments_end_index + 1;
2163 }else {
2164 nodes.push(Node::new_parsing_error_node(
2165 tokens[0].pos(),
2166 ParsingError::BracketMismatch,
2167 "Bracket for loop statement missing",
2168 ));
2169
2170 return Some(ast);
2171 }
2172
2173 if token_count_first_line != 0 {
2174 nodes.push(Node::new_parsing_error_node(
2175 tokens[0].pos(),
2176 ParsingError::InvalidConPart,
2177 "Trailing stuff behind arguments",
2178 ));
2179
2180 return Some(ast);
2181 }
2182 },
2183
2184 "con.endloop" if !block_bracket_flag => {
2185 tokens.pop_front();
2186
2187 break;
2188 },
2189
2190 _ => {
2191 nodes.push(Node::new_parsing_error_node(
2193 CodePosition::EMPTY,
2194 ParsingError::InvalidConPart,
2195 format!("Loop statement part is invalid: \"{}\"", con_expression),
2196 ));
2197
2198 return Some(ast);
2199 },
2200 }
2201
2202 let loop_body = self.parse_tokens_internal(tokens);
2203 let Some(loop_body) = loop_body else {
2204 nodes.push(Node::new_loop_statement_node(CodePosition::EMPTY, loop_statement_parts));
2206 nodes.push(Node::new_parsing_error_node(
2207 CodePosition::EMPTY,
2208 ParsingError::Eof,
2209 "In loop body",
2210 ));
2211
2212 return Some(ast);
2213 };
2214
2215 match con_expression.as_str() {
2217 "con.else" => {
2218 loop_statement_parts.push(Node::new_loop_statement_part_else_node(
2219 CodePosition::EMPTY,
2220 loop_body,
2221 ));
2222 },
2223
2224 "con.loop" => {
2225 loop_statement_parts.push(Node::new_loop_statement_part_loop_node(
2226 CodePosition::EMPTY,
2227 loop_body,
2228 ));
2229 },
2230
2231 "con.while" => {
2232 let operand = self.parse_operation_expr(&mut loop_condition.unwrap()).
2233 unwrap();
2234
2235 let conditional_non_node = ConditionalNode::new(Node::new_operation_statement_node(
2236 operand.pos(),
2237 OperationExpression::new(
2238 Some(Box::new(operand)),
2239 None,
2240 None,
2241 Operator::ConditionalNon, OperatorType::Condition,
2242 ),
2243 ));
2244
2245 loop_statement_parts.push(Node::new_loop_statement_part_while_node(
2246 CodePosition::EMPTY,
2247 loop_body,
2248 conditional_non_node,
2249 ));
2250 },
2251
2252 "con.until" => {
2253 let operand = self.parse_operation_expr(&mut loop_condition.unwrap()).
2254 unwrap();
2255
2256 let conditional_non_node = ConditionalNode::new(Node::new_operation_statement_node(
2257 operand.pos(),
2258 OperationExpression::new(
2259 Some(Box::new(operand)),
2260 None,
2261 None,
2262 Operator::ConditionalNon, OperatorType::Condition,
2263 ),
2264 ));
2265
2266 loop_statement_parts.push(Node::new_loop_statement_part_until_node(
2267 CodePosition::EMPTY,
2268 loop_body,
2269 conditional_non_node,
2270 ));
2271 },
2272
2273 "con.repeat" | "con.foreach" => {
2274 let arguments = self.parse_operation_expr(&mut loop_condition.unwrap()).unwrap();
2275 let arguments = Self::convert_comma_operators_to_argument_separators(arguments);
2276
2277 let mut argument_iter = arguments.into_iter();
2278
2279 let mut var_pointer_node = None;
2280 let mut flag = false;
2281 for node in argument_iter.by_ref() {
2282 if matches!(node.node_data(), NodeData::ArgumentSeparator(_)) ||
2283 var_pointer_node.is_some() {
2284 flag = true;
2285 break;
2286 }
2287
2288 var_pointer_node = Some(node);
2289 }
2290 if !flag {
2291 nodes.push(Node::new_parsing_error_node(
2292 CodePosition::EMPTY,
2293 ParsingError::InvalidConPart,
2294 "con.repeat or con.foreach arguments are invalid",
2295 ));
2296
2297 return Some(ast);
2298 }
2299
2300 let mut repeat_count_argument = Vec::new();
2301 for node in argument_iter {
2302 if matches!(node.node_data(), NodeData::ArgumentSeparator(_)) {
2303 nodes.push(Node::new_parsing_error_node(
2304 CodePosition::EMPTY,
2305 ParsingError::InvalidConPart,
2306 "con.repeat or con.foreach arguments are invalid",
2307 ));
2308
2309 return Some(ast);
2310 }
2311
2312 repeat_count_argument.push(node);
2313 }
2314
2315 let repeat_count_or_array_or_text_node = if repeat_count_argument.len() == 1 {
2316 repeat_count_argument.into_iter().next().unwrap()
2317 }else {
2318 Node::new_list_node(repeat_count_argument)
2319 };
2320
2321 if con_expression == "con.repeat" {
2322 loop_statement_parts.push(Node::new_loop_statement_part_repeat_node(
2323 CodePosition::EMPTY,
2324 loop_body,
2325 var_pointer_node.unwrap(),
2326 repeat_count_or_array_or_text_node,
2327 ));
2328 }else {
2329 loop_statement_parts.push(Node::new_loop_statement_part_for_each_node(
2330 CodePosition::EMPTY,
2331 loop_body,
2332 var_pointer_node.unwrap(),
2333 repeat_count_or_array_or_text_node,
2334 ));
2335 }
2336 },
2337
2338 _ => {},
2339 }
2340 }
2341
2342 nodes.push(Node::new_loop_statement_node(CodePosition::EMPTY, loop_statement_parts));
2344 return Some(ast);
2345 },
2346
2347 "con.if" => {
2348 let mut if_statement_parts = Vec::new();
2349
2350 let block_bracket_flag = ends_with_opening_bracket;
2351 while !tokens.is_empty() {
2352 Self::trim_first_line(tokens);
2353
2354 let mut token_count_first_line = Self::get_token_count_first_line(tokens.make_contiguous());
2355 if token_count_first_line == 0 {
2356 break;
2357 }
2358
2359 let ends_with_opening_bracket = matches!(tokens[token_count_first_line - 1].token_type(), TokenType::OpeningBlockBracket);
2360
2361 con_expression = tokens[0].value().to_string();
2362
2363 if ends_with_opening_bracket && !starts_with_con_expression {
2365 con_expression = "con.".to_string() + &con_expression;
2366 }
2367
2368 if block_bracket_flag {
2369 let pos = tokens[token_count_first_line - 1].pos();
2371
2372 if !ends_with_opening_bracket {
2373 nodes.push(Node::new_parsing_error_node(
2374 pos,
2375 ParsingError::InvalidConPart,
2376 "Missing \"{\" token after con statement",
2377 ));
2378 }
2379
2380 tokens.remove(token_count_first_line - 1);
2381
2382 token_count_first_line -= 1;
2383
2384 if token_count_first_line == 0 || !matches!(tokens[0].token_type(), TokenType::Other) {
2385 nodes.push(Node::new_parsing_error_node(
2386 pos,
2387 ParsingError::InvalidConPart,
2388 "Missing con statement",
2389 ));
2390 }
2391
2392 con_expression = tokens[0].value().to_string();
2393
2394 if !con_expression.starts_with("con.") {
2396 con_expression = "con.".to_string() + &con_expression;
2397 }
2398
2399 Self::trim_first_line(tokens);
2400
2401 token_count_first_line = Self::get_token_count_first_line(tokens.make_contiguous());
2402 }
2403
2404 let mut if_condition;
2405 match con_expression.as_str() {
2406 "con.else" => {
2407 let try_statement_token = tokens.pop_front().unwrap();
2408 token_count_first_line -= 1;
2409
2410 if token_count_first_line >= 1 && matches!(tokens[0].token_type(), TokenType::OpeningBracket) &&
2411 tokens[0].value() == "(" {
2412 nodes.push(Node::new_parsing_error_node(
2413 try_statement_token.pos(),
2414 ParsingError::InvalidConPart,
2415 "Else part with arguments",
2416 ));
2417
2418 return Some(ast);
2419 }
2420
2421 if_condition = None;
2422 },
2423
2424 "con.if" | "con.elif" => {
2425 tokens.pop_front().unwrap();
2426 token_count_first_line -= 1;
2427
2428 if token_count_first_line > 1 && matches!(tokens[0].token_type(), TokenType::OpeningBracket) &&
2429 tokens[0].value() == "(" {
2430 let arguments_end_index = utils::get_index_of_matching_bracket_tok(
2431 tokens.make_contiguous(),
2432 0, usize::MAX,
2433 "(", ")", true,
2434 );
2435 let Some(arguments_end_index) = arguments_end_index else {
2436 nodes.push(Node::new_parsing_error_node(
2437 tokens[0].pos(),
2438 ParsingError::BracketMismatch,
2439 "Missing if statement arguments",
2440 ));
2441
2442 return Some(ast);
2443 };
2444
2445 if_condition = Some(utils::split_off_arguments(tokens, arguments_end_index));
2446 token_count_first_line -= arguments_end_index + 1;
2447 }else {
2448 nodes.push(Node::new_parsing_error_node(
2449 tokens[0].pos(),
2450 ParsingError::BracketMismatch,
2451 "Bracket for if statement missing",
2452 ));
2453
2454 return Some(ast);
2455 }
2456
2457 if token_count_first_line != 0 {
2458 nodes.push(Node::new_parsing_error_node(
2459 tokens[0].pos(),
2460 ParsingError::InvalidConPart,
2461 "Trailing stuff behind arguments",
2462 ));
2463
2464 return Some(ast);
2465 }
2466 },
2467
2468 "con.endif" if !block_bracket_flag => {
2469 tokens.pop_front();
2470
2471 break;
2472 },
2473
2474 _ => {
2475 nodes.push(Node::new_parsing_error_node(
2477 CodePosition::EMPTY,
2478 ParsingError::InvalidConPart,
2479 format!("If statement part is invalid: \"{}\"", con_expression),
2480 ));
2481
2482 return Some(ast);
2483 },
2484 }
2485
2486 let if_body = self.parse_tokens_internal(tokens);
2487 let Some(if_body) = if_body else {
2488 nodes.push(Node::new_if_statement_node(CodePosition::EMPTY, if_statement_parts));
2490 nodes.push(Node::new_parsing_error_node(
2491 CodePosition::EMPTY,
2492 ParsingError::Eof,
2493 "In if body",
2494 ));
2495
2496 return Some(ast);
2497 };
2498
2499 if let Some(ref mut if_condition) = if_condition {
2501 let operand = self.parse_operation_expr(if_condition).
2502 unwrap();
2503
2504 let conditional_non_node = ConditionalNode::new(Node::new_operation_statement_node(
2505 operand.pos(),
2506 OperationExpression::new(
2507 Some(Box::new(operand)),
2508 None,
2509 None,
2510 Operator::ConditionalNon, OperatorType::Condition,
2511 ),
2512 ));
2513
2514 if_statement_parts.push(Node::new_if_statement_part_if_node(
2515 CodePosition::EMPTY,
2516 if_body,
2517 conditional_non_node,
2518 ));
2519 }else {
2520 if_statement_parts.push(Node::new_if_statement_part_else_node(
2521 CodePosition::EMPTY,
2522 if_body,
2523 ));
2524 }
2525 }
2526
2527 nodes.push(Node::new_if_statement_node(CodePosition::EMPTY, if_statement_parts));
2529 return Some(ast);
2530 }
2531
2532 _ if original_con_expression.starts_with("con.") => {
2533 return None;
2534 },
2535
2536 _ => {},
2537 }
2538 }
2539
2540 if token_count_first_line >= 1 && matches!(tokens[0].token_type(), TokenType::Other) &&
2542 tokens[0].value() == "return" {
2543 let return_statement_token_pos = tokens.front().unwrap().pos();
2544
2545 if token_count_first_line == 1 {
2547 nodes.push(Node::new_return_statement_node(return_statement_token_pos, None));
2548 tokens.pop_front();
2549
2550 return Some(ast);
2551 }
2552
2553 tokens.pop_front();
2555
2556 if matches!(tokens[0].token_type(), TokenType::Whitespace) {
2557 let node = self.parse_assignment(tokens, true).
2558 unwrap_or_else(|| self.parse_lrvalue(tokens, true).into_node());
2559 nodes.push(Node::new_return_statement_node(
2560 return_statement_token_pos.combine(&node.pos()),
2561 Some(node),
2562 ));
2563
2564 return Some(ast);
2565 }
2566 }
2567
2568 if token_count_first_line > 1 && matches!(tokens[0].token_type(), TokenType::Other) &&
2570 tokens[0].value() == "throw" {
2571 let throw_statement_token_pos = tokens.pop_front().unwrap().pos();
2572
2573 let arguments = self.parse_operation_expr(tokens).unwrap();
2574 let arguments = Self::convert_comma_operators_to_argument_separators(arguments);
2575
2576 let mut argument_iter = arguments.into_iter();
2577
2578 let mut error_nodes = Vec::new();
2579 let mut flag = false;
2580 for node in argument_iter.by_ref() {
2581 if matches!(node.node_data(), NodeData::ArgumentSeparator(_)) {
2582 flag = true;
2583 break;
2584 }
2585
2586 error_nodes.push(node);
2587 }
2588 if !flag && error_nodes.is_empty() {
2589 nodes.push(Node::new_parsing_error_node(
2590 throw_statement_token_pos,
2591 ParsingError::LexerError,
2592 "throw arguments are invalid",
2593 ));
2594
2595 return Some(ast);
2596 }
2597
2598 let mut message_nodes = Vec::new();
2599 for node in argument_iter {
2600 if matches!(node.node_data(), NodeData::ArgumentSeparator(_)) {
2601 nodes.push(Node::new_parsing_error_node(
2602 CodePosition::EMPTY,
2603 ParsingError::LexerError,
2604 "throw arguments are invalid",
2605 ));
2606
2607 return Some(ast);
2608 }
2609
2610 message_nodes.push(node);
2611 }
2612
2613 let error_node = if error_nodes.len() == 1 {
2614 error_nodes.into_iter().next().unwrap()
2615 }else {
2616 Node::new_list_node(error_nodes)
2617 };
2618
2619 let message_node = if message_nodes.is_empty() {
2620 None
2621 }else if message_nodes.len() == 1 {
2622 message_nodes.into_iter().next()
2623 }else {
2624 Some(Node::new_list_node(message_nodes))
2625 };
2626
2627 let pos = throw_statement_token_pos.combine(&message_node.as_ref().unwrap_or(&error_node).pos());
2628
2629 nodes.push(Node::new_throw_statement_node(pos, error_node, message_node));
2630
2631 return Some(ast);
2632 }
2633
2634 if token_count_first_line > 3 && matches!(tokens[0].token_type(), TokenType::Other) &&
2636 tokens[0].value() == "function" && ends_with_opening_bracket {
2637 let function_definition_start_token = tokens.pop_front().unwrap();
2638 token_count_first_line -= 1;
2639
2640 if !matches!(tokens[0].token_type(), TokenType::Whitespace) {
2641 nodes.push(Node::new_parsing_error_node(
2642 function_definition_start_token.pos(),
2643 ParsingError::LexerError,
2644 "Invalid function definition: Whitespace is missing after \"function\"",
2645 ));
2646
2647 return Some(ast);
2648 }
2649
2650 tokens.pop_front();
2651 token_count_first_line -= 1;
2652
2653 let overloaded = matches!(tokens[0].token_type(), TokenType::Other) &&
2654 tokens[0].value() == "overload" && matches!(tokens[1].token_type(), TokenType::Whitespace);
2655 if overloaded {
2656 tokens.pop_front();
2657 tokens.pop_front();
2658 token_count_first_line -= 2;
2659 }
2660
2661 let combinator = matches!(tokens[0].token_type(), TokenType::Other) &&
2662 tokens[0].value() == "combinator" && matches!(tokens[1].token_type(), TokenType::Whitespace);
2663 if combinator {
2664 tokens.pop_front();
2665 tokens.pop_front();
2666 token_count_first_line -= 2;
2667 }
2668
2669 #[expect(clippy::nonminimal_bool)]
2670 if !(matches!(tokens[0].token_type(), TokenType::Identifier) &&
2671 regex_patterns::VAR_NAME_NORMAL_FUNCTION_WITHOUT_PREFIX.is_match(tokens[0].value())) &&
2672 !(matches!(tokens[0].token_type(), TokenType::Other) &&
2673 regex_patterns::WORD.is_match(tokens[0].value())) {
2674 nodes.push(Node::new_parsing_error_node(
2675 function_definition_start_token.pos(),
2676 ParsingError::LexerError,
2677 format!(
2678 "Invalid function definition: Invalid function identifier: {}",
2679 tokens[0].value(),
2680 ),
2681 ));
2682
2683 return Some(ast);
2684 }
2685
2686 let function_name_token = tokens.pop_front().unwrap();
2687 token_count_first_line -= 1;
2688
2689 let mut function_name = function_name_token.value().to_string();
2690 if !function_name.starts_with("fp.") && !function_name.starts_with("$") {
2691 function_name = "fp.".to_string() + &function_name;
2692 }
2693
2694 if matches!(tokens[0].token_type(), TokenType::Whitespace) {
2695 tokens.pop_front();
2696 token_count_first_line -= 1;
2697 }
2698
2699 if !matches!(tokens[0].token_type(), TokenType::OpeningBracket) ||
2700 tokens[0].value() != "(" {
2701 nodes.push(Node::new_parsing_error_node(
2702 function_definition_start_token.pos(),
2703 ParsingError::BracketMismatch,
2704 "Bracket is missing in parameter list in function definition",
2705 ));
2706
2707 return Some(ast);
2708 }
2709
2710 let bracket_end_index = utils::get_index_of_matching_bracket_tok(
2711 tokens.make_contiguous(),
2712 0, usize::MAX,
2713 "(", ")", true,
2714 );
2715 let Some(bracket_end_index) = bracket_end_index else {
2716 nodes.push(Node::new_parsing_error_node(
2717 function_definition_start_token.pos(),
2718 ParsingError::BracketMismatch,
2719 "Bracket is missing in parameter list in function definition",
2720 ));
2721
2722 return Some(ast);
2723 };
2724
2725 let mut parameter_list = utils::split_off_arguments(tokens, bracket_end_index);
2726 token_count_first_line -= bracket_end_index + 1;
2727
2728 let type_constraint = if token_count_first_line > 2 &&
2729 matches!(tokens[0].token_type(), TokenType::Operator) && tokens[0].value() == ":" &&
2730 matches!(tokens[1].token_type(), TokenType::OpeningBracket) && tokens[1].value() == "{" {
2731 tokens.pop_front();
2732 token_count_first_line -= 1;
2733
2734 let bracket_end_index = utils::get_index_of_matching_bracket_tok(
2735 tokens.make_contiguous(),
2736 0, usize::MAX,
2737 "{", "}", true,
2738 );
2739 let Some(bracket_end_index) = bracket_end_index else {
2740 nodes.push(Node::new_parsing_error_node(
2741 function_definition_start_token.pos(),
2742 ParsingError::BracketMismatch,
2743 "Bracket is missing in return type constraint in function definition",
2744 ));
2745
2746 return Some(ast);
2747 };
2748
2749 let mut type_constraint_tokens = tokens.split_off(bracket_end_index + 1);
2750 mem::swap(tokens, &mut type_constraint_tokens);
2751 token_count_first_line -= bracket_end_index + 1;
2752
2753 self.parse_type_constraint(&mut type_constraint_tokens, false, nodes)
2754 }else {
2755 None
2756 };
2757
2758 if matches!(tokens[0].token_type(), TokenType::Whitespace) {
2759 tokens.pop_front();
2760 token_count_first_line -= 1;
2761 }
2762
2763 if token_count_first_line != 1 {
2764 nodes.push(Node::new_parsing_error_node(
2765 function_definition_start_token.pos(),
2766 ParsingError::LexerError,
2767 "Invalid tokens after function return type constraint",
2768 ));
2769
2770 return Some(ast);
2771 }
2772
2773 tokens.pop_front();
2774
2775 if !tokens.is_empty() && matches!(tokens[0].token_type(), TokenType::Eol) {
2776 tokens.pop_front();
2777 }
2778
2779 nodes.append(&mut self.parse_function_definition(
2780 Some(function_name),
2781 overloaded,
2782 combinator,
2783 &mut parameter_list,
2784 type_constraint,
2785 tokens,
2786 ).into_nodes());
2787
2788 return Some(ast);
2789 }
2790
2791 if token_count_first_line > 1 && matches!(tokens[0].token_type(), TokenType::Other) &&
2793 tokens[0].value() == "struct" && ends_with_opening_bracket {
2794 let struct_definition_start_token = tokens.pop_front().unwrap();
2795 token_count_first_line -= 1;
2796
2797 if !matches!(tokens[0].token_type(), TokenType::Whitespace) {
2798 nodes.push(Node::new_parsing_error_node(
2799 struct_definition_start_token.pos(),
2800 ParsingError::LexerError,
2801 "Invalid struct definition: Whitespace is missing after \"struct\"",
2802 ));
2803
2804 return Some(ast);
2805 }
2806
2807 tokens.pop_front();
2808 token_count_first_line -= 1;
2809
2810 if !matches!(tokens[0].token_type(), TokenType::Identifier) ||
2811 !regex_patterns::VAR_NAME_NORMAL_ARRAY_WITHOUT_PREFIX.is_match(tokens[0].value()) {
2812 nodes.push(Node::new_parsing_error_node(
2813 struct_definition_start_token.pos(),
2814 ParsingError::LexerError,
2815 format!("Invalid struct definition: Invalid struct identifier: \"{}\"", tokens[0]),
2816 ));
2817
2818 return Some(ast);
2819 }
2820
2821 let struct_name_token = tokens.pop_front().unwrap();
2822 token_count_first_line -= 1;
2823
2824 let struct_name = struct_name_token.value().to_string();
2825
2826 if matches!(tokens[0].token_type(), TokenType::Whitespace) {
2827 tokens.pop_front();
2828 token_count_first_line -= 1;
2829 }
2830
2831 if token_count_first_line != 1 {
2832 nodes.push(Node::new_parsing_error_node(
2833 struct_definition_start_token.pos(),
2834 ParsingError::LexerError,
2835 "Invalid tokens after struct identifier",
2836 ));
2837
2838 return Some(ast);
2839 }
2840
2841 tokens.pop_front();
2842
2843 if !tokens.is_empty() && matches!(tokens[0].token_type(), TokenType::Eol) {
2844 tokens.pop_front();
2845 }
2846
2847 nodes.append(&mut self.parse_struct_definition(
2848 struct_definition_start_token.pos(),
2849 Some(struct_name),
2850 tokens,
2851 ).into_nodes());
2852
2853 return Some(ast);
2854 }
2855
2856 if token_count_first_line > 1 && matches!(tokens[0].token_type(), TokenType::Other) &&
2858 tokens[0].value() == "class" && ends_with_opening_bracket {
2859 let class_definition_start_token = tokens.pop_front().unwrap();
2860 token_count_first_line -= 1;
2861
2862 if !matches!(tokens[0].token_type(), TokenType::Whitespace) {
2863 nodes.push(Node::new_parsing_error_node(
2864 class_definition_start_token.pos(),
2865 ParsingError::LexerError,
2866 "Invalid class definition: Whitespace is missing after \"class\"",
2867 ));
2868
2869 return Some(ast);
2870 }
2871
2872 tokens.pop_front();
2873 token_count_first_line -= 1;
2874
2875 if !matches!(tokens[0].token_type(), TokenType::Identifier) ||
2876 !regex_patterns::VAR_NAME_NORMAL_ARRAY_WITHOUT_PREFIX.is_match(tokens[0].value()) {
2877 nodes.push(Node::new_parsing_error_node(
2878 class_definition_start_token.pos(),
2879 ParsingError::LexerError,
2880 format!("Invalid class definition: Invalid class identifier: \"{}\"", tokens[0]),
2881 ));
2882
2883 return Some(ast);
2884 }
2885
2886 let class_name_token = tokens.pop_front().unwrap();
2887 token_count_first_line -= 1;
2888
2889 let class_name = class_name_token.value().to_string();
2890
2891 if matches!(tokens[0].token_type(), TokenType::Whitespace) {
2892 tokens.pop_front();
2893 token_count_first_line -= 1;
2894 }
2895
2896 let mut parent_class_tokens = if matches!(tokens[0].token_type(), TokenType::Operator) &&
2897 tokens[0].value() == "<" && ends_with_opening_bracket {
2898 let mut parent_class_end_index = None;
2901 for i in (0..token_count_first_line).rev() {
2902 if matches!(tokens[i].token_type(), TokenType::Operator) &&
2903 tokens[i].value() == ">" && ends_with_opening_bracket {
2904 parent_class_end_index = Some(i);
2905
2906 break;
2907 }
2908 }
2909
2910 let Some(parent_class_end_index) = parent_class_end_index else {
2911 nodes.push(Node::new_parsing_error_node(
2912 class_definition_start_token.pos(),
2913 ParsingError::BracketMismatch,
2914 "Bracket is missing in class definition",
2915 ));
2916
2917 return Some(ast);
2918 };
2919
2920 let parent_class_tokens = utils::split_off_arguments(tokens, parent_class_end_index);
2921 token_count_first_line -= parent_class_end_index + 1;
2922
2923 if matches!(tokens[0].token_type(), TokenType::Whitespace) {
2924 tokens.pop_front();
2925 token_count_first_line -= 1;
2926 }
2927
2928 parent_class_tokens
2929 }else {
2930 VecDeque::new()
2931 };
2932
2933 if token_count_first_line != 1 {
2934 nodes.push(Node::new_parsing_error_node(
2935 class_definition_start_token.pos(),
2936 ParsingError::LexerError,
2937 "Invalid tokens after class definition",
2938 ));
2939
2940 return Some(ast);
2941 }
2942
2943 tokens.pop_front();
2944
2945 if !tokens.is_empty() && matches!(tokens[0].token_type(), TokenType::Eol) {
2946 tokens.pop_front();
2947 }
2948
2949 nodes.append(&mut self.parse_class_definition(
2950 class_definition_start_token.pos(),
2951 Some(class_name),
2952 &mut parent_class_tokens,
2953 tokens,
2954 ).into_nodes());
2955
2956 return Some(ast);
2957 }
2958
2959 nodes.append(&mut self.parse_token(tokens).into_nodes());
2960
2961 Some(ast)
2962 }
2963
2964 fn parse_parser_flags(&mut self, _parse_flag: impl Into<String>, _value: Node) -> bool {
2968 false
2971 }
2972
2973 fn parse_translation_key(&mut self, tokens: &mut VecDeque<Token>) -> AST {
2974 let mut ast = AST::new();
2975 let nodes = ast.nodes_mut();
2976
2977 let mut pos = tokens.front().map(|token| token.pos()).
2978 unwrap_or(CodePosition::EMPTY);
2979
2980 Self::trim_first_line(tokens);
2981
2982 if pos == CodePosition::EMPTY {
2983 pos = tokens.front().map(|token| token.pos()).unwrap_or(CodePosition::EMPTY);
2984 }
2985
2986 nodes.push(Node::new_text_value_node(pos, ""));
2987
2988 if tokens.len() >= 2 && matches!(tokens[0].token_type(), TokenType::Operator) &&
2989 tokens[0].value() == "%" && matches!(tokens[1].token_type(), TokenType::Identifier) &&
2990 tokens[1].value().starts_with("$") {
2991 tokens.pop_front();
2993 }
2994
2995 'tokenProcessing:
2996 while !tokens.is_empty() {
2997 let t = tokens[0].clone();
2998
2999 match t.token_type() {
3000 TokenType::Eol | TokenType::Eof => {
3001 break 'tokenProcessing;
3002 },
3003
3004 TokenType::StartComment | TokenType::StartDocComment => {
3005 self.parse_comment_tokens(tokens, nodes);
3006 },
3007
3008 TokenType::LiteralNull | TokenType::LiteralText | TokenType::LiteralNumber |
3009 TokenType::ArgumentSeparator | TokenType::Assignment | TokenType::Operator |
3010 TokenType::OpeningBracket | TokenType::ClosingBracket | TokenType::OpeningBlockBracket |
3011 TokenType::ClosingBlockBracket | TokenType::Whitespace | TokenType::Other => {
3012 tokens.pop_front();
3013
3014 nodes.push(Node::new_text_value_node(pos, t.value()));
3015 },
3016
3017 TokenType::EscapeSequence => {
3018 tokens.pop_front();
3019
3020 self.parse_escape_sequence_token(t, nodes);
3021 },
3022
3023 TokenType::LexerError => {
3024 tokens.pop_front();
3025
3026 self.parse_lexer_error_token(t, nodes);
3027 },
3028
3029 TokenType::StartMultilineText => {
3030 tokens.pop_front();
3031
3032 loop {
3033 if let Some(t) = tokens.pop_front() {
3034 if matches!(t.token_type(), TokenType::EndMultilineText) {
3035 break;
3036 }
3037
3038 if matches!(t.token_type(), TokenType::LiteralText | TokenType::Eol) {
3039 nodes.push(Node::new_text_value_node(t.pos(), t.value()));
3040 }else if matches!(t.token_type(), TokenType::EscapeSequence) {
3041 self.parse_escape_sequence_token(t, nodes);
3042 }else if matches!(t.token_type(), TokenType::LexerError) {
3043 nodes.push(Node::new_parsing_error_node(
3044 t.pos(),
3045 ParsingError::LexerError,
3046 t.value()
3047 ));
3048 }else {
3049 nodes.push(Node::new_parsing_error_node(
3050 CodePosition::EMPTY,
3051 ParsingError::Eof,
3052 format!(
3053 "Invalid token type ({}) in multiline text during translation key parsing",
3054 t.token_type(),
3055 ),
3056 ));
3057 }
3058 }else {
3059 nodes.push(Node::new_parsing_error_node(
3060 CodePosition::EMPTY,
3061 ParsingError::Eof,
3062 "Missing multiline text end token during translation key parsing",
3063 ));
3064
3065 break 'tokenProcessing;
3066 }
3067 }
3068 },
3069
3070 TokenType::Identifier | TokenType::ParserFunctionIdentifier => {
3071 if matches!(t.token_type(), TokenType::Identifier) &&
3072 !regex_patterns::VAR_NAME_FULL.is_match(t.value()) {
3073 tokens.pop_front();
3074
3075 nodes.push(Node::new_text_value_node(pos, t.value()));
3076 }
3077
3078 let ret = if matches!(t.token_type(), TokenType::Identifier) {
3079 self.parse_variable_name_and_function_call(tokens, None)
3080 }else {
3081 self.parse_parser_function_call(tokens)
3082 };
3083
3084 if let Some(ret) = ret {
3085 nodes.push(ret);
3086 }
3087 },
3088
3089 TokenType::LineContinuation | TokenType::EndComment | TokenType::EndMultilineText |
3090 TokenType::SingleLineTextQuotes => {
3091 nodes.push(Node::new_parsing_error_node(
3092 CodePosition::EMPTY,
3093 ParsingError::LexerError,
3094 format!(
3095 "Invalid token type in translation key expression: \"{}\"",
3096 t.token_type(),
3097 ),
3098 ));
3099
3100 break 'tokenProcessing;
3101 },
3102 }
3103 }
3104
3105 ast
3106 }
3107
3108 fn parse_lrvalue(&mut self, tokens: &mut VecDeque<Token>, is_rvalue: bool) -> AST {
3109 let mut ast = AST::new();
3110 let nodes = ast.nodes_mut();
3111
3112 Self::trim_first_line(tokens);
3113
3114 if is_rvalue {
3115 let mut token_count_first_line = Self::get_token_count_first_line(tokens.make_contiguous());
3116
3117 if token_count_first_line >= 1 && matches!(tokens[0].token_type(), TokenType::OpeningBracket) &&
3118 tokens[0].value() == "(" {
3119 let parameter_end_index = utils::get_index_of_matching_bracket_tok(
3122 tokens.make_contiguous(), 0, usize::MAX, "(", ")", true,
3123 );
3124 let Some(parameter_end_index) = parameter_end_index else {
3125 nodes.push(Node::new_parsing_error_node(
3126 CodePosition::EMPTY,
3127 ParsingError::BracketMismatch,
3128 "Bracket is missing in function definition",
3129 ));
3130
3131 return ast;
3132 };
3133
3134 let mut parameter_list_tokens = utils::split_off_arguments(tokens, parameter_end_index);
3135 let parameter_list = self.parse_function_parameter_list(&mut parameter_list_tokens, true).into_nodes();
3136
3137 token_count_first_line -= parameter_end_index + 1;
3138
3139 let mut return_type_constraint = None;
3140 if token_count_first_line >= 2 && matches!(tokens[0].token_type(), TokenType::Operator) &&
3141 tokens[0].value() == ":" && matches!(tokens[1].token_type(), TokenType::OpeningBracket) &&
3142 tokens[1].value() == "{" {
3143 tokens.pop_front();
3144 token_count_first_line -= 1;
3145
3146 let return_type_constraint_end_index = utils::get_index_of_matching_bracket_tok(
3147 tokens.make_contiguous(), 0, usize::MAX, "{", "}", true,
3148 );
3149 let Some(return_type_constraint_end_index) = return_type_constraint_end_index else {
3150 nodes.push(Node::new_parsing_error_node(
3151 CodePosition::EMPTY,
3152 ParsingError::BracketMismatch,
3153 "Bracket is missing in return type constraint of function definition",
3154 ));
3155
3156 return ast;
3157 };
3158
3159 let mut type_constraint_tokens = tokens.split_off(return_type_constraint_end_index + 1);
3160 mem::swap(tokens, &mut type_constraint_tokens);
3161
3162 token_count_first_line -= return_type_constraint_end_index + 1;
3163
3164 return_type_constraint = self.parse_type_constraint(
3165 &mut type_constraint_tokens,
3166 false,
3167 nodes,
3168 );
3169 }
3170
3171 if token_count_first_line >= 3 && matches!(tokens[0].token_type(), TokenType::Whitespace) &&
3172 matches!(tokens[1].token_type(), TokenType::Operator) && tokens[1].value() == "->" &&
3173 matches!(tokens[2].token_type(), TokenType::Whitespace) {
3174 tokens.pop_front();
3175 tokens.pop_front();
3176 tokens.pop_front();
3177 token_count_first_line -= 3;
3178
3179 if token_count_first_line >= 1 && matches!(tokens[0].token_type(), TokenType::OpeningBlockBracket) {
3181 tokens.pop_front();
3182
3183 nodes.push(Node::new_function_definition_node(
3184 CodePosition::EMPTY,
3185 FunctionDefinition::new(
3186 None, false, false,
3187 self.lang_doc_comment.take().map(Box::from),
3188 return_type_constraint.map(Box::from),
3189 self.parse_tokens_internal(tokens).unwrap(),
3190 ),
3191 parameter_list,
3192 ));
3193 }else {
3194 let mut function_body = tokens.split_off(token_count_first_line);
3195 mem::swap(tokens, &mut function_body);
3196
3197 nodes.push(Node::new_function_definition_node(
3198 CodePosition::EMPTY,
3199 FunctionDefinition::new(
3200 None, false, false,
3201 self.lang_doc_comment.take().map(Box::from),
3202 return_type_constraint.map(Box::from),
3203 self.parse_tokens_internal(&mut function_body).unwrap(),
3204 ),
3205 parameter_list,
3206 ));
3207 }
3208
3209 return ast;
3210 }
3211 }
3212
3213 if token_count_first_line == 1 && matches!(tokens[0].token_type(), TokenType::Identifier) &&
3214 regex_patterns::VAR_NAME_FUNC_PTR_WITH_FUNCS.is_match(tokens[0].value()) {
3215 let t = tokens.pop_front().unwrap();
3218
3219 nodes.push(Node::new_unprocessed_variable_name_node(t.pos(), t.value()));
3220
3221 return ast;
3222 }else if token_count_first_line == 1 && tokens.len() > token_count_first_line &&
3223 matches!(tokens[0].token_type(), TokenType::OpeningBlockBracket) {
3224 let start_pos = tokens[0].pos();
3227
3228 tokens.pop_front();
3229 tokens.pop_front();
3230
3231 nodes.append(&mut self.parse_struct_definition(
3232 start_pos,
3233 None,
3234 tokens
3235 ).into_nodes());
3236
3237 return ast;
3238 }else if token_count_first_line > 3 && tokens.len() > token_count_first_line &&
3239 matches!(tokens[0].token_type(), TokenType::Operator) && tokens[0].value() == "<" &&
3240 matches!(tokens[token_count_first_line - 2].token_type(), TokenType::Operator) &&
3241 tokens[token_count_first_line - 2].value() == ">" &&
3242 matches!(tokens[token_count_first_line - 1].token_type(), TokenType::OpeningBlockBracket) {
3243 let start_pos = tokens[0].pos();
3246
3247 let mut parent_class_tokens = utils::split_off_arguments(tokens, token_count_first_line - 2);
3249 tokens.pop_front();
3251 tokens.pop_front();
3252
3253 nodes.append(&mut self.parse_class_definition(
3254 start_pos,
3255 None,
3256 &mut parent_class_tokens,
3257 tokens,
3258 ).into_nodes());
3259
3260 return ast;
3261 }
3262 }
3263
3264 nodes.append(&mut self.parse_token(tokens).into_nodes());
3265
3266 ast
3267 }
3268
3269 fn parse_function_definition(
3270 &mut self,
3271 function_name: Option<String>,
3272 overloaded: bool,
3273 combinator: bool,
3274 parameter_list_tokens: &mut VecDeque<Token>,
3275 return_value_type_constraint: Option<String>,
3276 tokens: &mut VecDeque<Token>,
3277 ) -> AST {
3278 let mut ast = AST::new();
3279 let nodes = ast.nodes_mut();
3280
3281 Self::trim_first_line(tokens);
3282
3283 let parameter_list_nodes = self.parse_function_parameter_list(
3284 parameter_list_tokens,
3285 true,
3286 ).into_nodes();
3287
3288 nodes.push(Node::new_function_definition_node(
3290 CodePosition::EMPTY,
3291 FunctionDefinition::new(
3292 function_name.map(Box::from),
3293 overloaded,
3294 combinator,
3295 self.lang_doc_comment.take().map(Box::from),
3296 return_value_type_constraint.map(Box::from),
3297 self.parse_tokens_internal(tokens).unwrap(),
3298 ),
3299 parameter_list_nodes,
3300 ));
3301
3302 ast
3303 }
3304
3305 fn parse_struct_definition(
3306 &mut self,
3307 start_pos: CodePosition,
3308 struct_name: Option<String>,
3309 tokens: &mut VecDeque<Token>,
3310 ) -> AST {
3311 let mut ast = AST::new();
3312 let nodes = ast.nodes_mut();
3313
3314 Self::trim_first_line(tokens);
3315
3316 let mut has_end_brace = false;
3317
3318 let mut members = Vec::new();
3319
3320 let mut end_pos = CodePosition::EMPTY;
3321
3322 'tokenProcessing:
3323 while !tokens.is_empty() {
3324 let t = tokens[0].clone();
3325 end_pos = t.pos();
3326
3327 match t.token_type() {
3328 TokenType::Eof => {
3329 break 'tokenProcessing;
3330 },
3331
3332 TokenType::Eol => {
3333 tokens.pop_front();
3334
3335 Self::trim_first_line(tokens);
3336 },
3337
3338 TokenType::Whitespace => {
3339 tokens.pop_front();
3340 },
3341
3342 TokenType::StartComment | TokenType::StartDocComment => {
3343 self.parse_comment_tokens(tokens, nodes);
3344 },
3345
3346 TokenType::ClosingBlockBracket => {
3347 tokens.pop_front();
3348
3349 has_end_brace = true;
3350
3351 break 'tokenProcessing;
3352 },
3353
3354 TokenType::Identifier => {
3355 if !regex_patterns::VAR_NAME_WITHOUT_PREFIX.is_match(t.value()) {
3356 nodes.push(Node::new_parsing_error_node(
3357 t.pos(),
3358 ParsingError::InvalidAssignment,
3359 format!(
3360 "Invalid struct member name: \"{}\"",
3361 t.value(),
3362 ),
3363 ));
3364
3365 return ast;
3366 }
3367
3368 let identifier_token = tokens.pop_front().unwrap();
3369
3370 let mut type_constraint = None;
3371 if !tokens.is_empty() && matches!(tokens[0].token_type(), TokenType::OpeningBracket) &&
3372 tokens[0].value() == "{" {
3373 let bracket_end_index = utils::get_index_of_matching_bracket_tok(
3374 tokens.make_contiguous(),
3375 0, usize::MAX,
3376 "{", "}", true,
3377 );
3378 let Some(bracket_end_index) = bracket_end_index else {
3379 nodes.push(Node::new_parsing_error_node(
3380 identifier_token.pos(),
3381 ParsingError::BracketMismatch,
3382 format!(
3383 "Bracket is missing in type constraint in struct definition for member: \"{}\"",
3384 identifier_token.value(),
3385 ),
3386 ));
3387
3388 return ast;
3389 };
3390
3391 let mut type_constraint_tokens = tokens.split_off(bracket_end_index + 1);
3392 mem::swap(tokens, &mut type_constraint_tokens);
3393
3394 type_constraint = self.parse_type_constraint(
3395 &mut type_constraint_tokens,
3396 false,
3397 nodes,
3398 );
3399 }
3400
3401 if members.iter().any(|member: &StructMember| member.name() == identifier_token.value()) {
3402 nodes.push(Node::new_parsing_error_node(
3403 identifier_token.pos(),
3404 ParsingError::InvalidAssignment,
3405 format!(
3406 "Duplicated struct member name: \"{}\"", identifier_token.value(),
3407 ),
3408 ));
3409
3410 return ast;
3411 };
3412
3413 members.push(StructMember::new(Box::from(identifier_token.value()), type_constraint.map(Box::from)));
3414 },
3415
3416 TokenType::LexerError => {
3417 tokens.pop_front();
3418
3419 self.parse_lexer_error_token(t, nodes);
3420
3421 break 'tokenProcessing;
3422 },
3423
3424 TokenType::ParserFunctionIdentifier | TokenType::LiteralNull | TokenType::LiteralNumber |
3425 TokenType::LiteralText | TokenType::ArgumentSeparator | TokenType::Assignment |
3426 TokenType::Other | TokenType::Operator | TokenType::OpeningBracket | TokenType::ClosingBracket |
3427 TokenType::OpeningBlockBracket | TokenType::EscapeSequence | TokenType::StartMultilineText |
3428 TokenType::LineContinuation | TokenType::EndComment | TokenType::EndMultilineText |
3429 TokenType::SingleLineTextQuotes => {
3430 nodes.push(Node::new_parsing_error_node(
3431 CodePosition::EMPTY,
3432 ParsingError::LexerError,
3433 format!(
3434 "Invalid token type for struct definition expression: \"{}\"",
3435 t.token_type(),
3436 ),
3437 ));
3438
3439 return ast;
3440 },
3441 }
3442 }
3443
3444 let pos = start_pos.combine(&end_pos);
3445
3446 if !has_end_brace {
3447 nodes.push(Node::new_parsing_error_node(
3448 pos,
3449 ParsingError::Eof,
3450 "\"}\" is missing in struct definition",
3451 ));
3452
3453 return ast;
3454 }
3455
3456 nodes.push(Node::new_struct_definition_node(pos, StructDefinition::new(
3457 struct_name.map(Box::from),
3458 members,
3459 )));
3460
3461 ast
3462 }
3463
3464 fn parse_class_definition(
3465 &mut self,
3466 start_pos: CodePosition,
3467 class_name: Option<String>,
3468 parent_class_tokens: &mut VecDeque<Token>,
3469 tokens: &mut VecDeque<Token>,
3470 ) -> AST {
3471 let mut ast = AST::new();
3472 let nodes = ast.nodes_mut();
3473
3474 Self::trim_first_line(tokens);
3475
3476 let parent_classes = self.parse_function_parameter_list(parent_class_tokens, false).into_nodes();
3477
3478 let mut has_end_brace = false;
3479
3480 let mut static_members: Vec<ClassMember> = Vec::new();
3481 let mut members = Vec::new();
3482 let mut methods = Vec::new();
3483 let mut constructors = Vec::new();
3484
3485 let mut end_pos = CodePosition::EMPTY;
3486
3487 'tokenProcessing:
3488 while !tokens.is_empty() {
3489 let t = tokens[0].clone();
3490 end_pos = t.pos();
3491
3492 match t.token_type() {
3493 TokenType::Eof => {
3494 break 'tokenProcessing;
3495 },
3496
3497 TokenType::Eol => {
3498 tokens.pop_front();
3499
3500 Self::trim_first_line(tokens);
3501 },
3502
3503 TokenType::Whitespace => {
3504 tokens.pop_front();
3505 },
3506
3507 TokenType::StartComment | TokenType::StartDocComment => {
3508 self.parse_comment_tokens(tokens, nodes);
3509 },
3510
3511 TokenType::ClosingBlockBracket => {
3512 tokens.pop_front();
3513
3514 has_end_brace = true;
3515
3516 break 'tokenProcessing;
3517 },
3518
3519 TokenType::Other | TokenType::Operator => {
3520 let visibility = if t.value().len() == 1 {
3521 let visibility_symbol = t.value().as_bytes()[0];
3522 let visibility = Visibility::from_symbol(visibility_symbol);
3523
3524 if visibility.is_none() {
3525 nodes.push(Node::new_parsing_error_node(
3526 t.pos(),
3527 ParsingError::LexerError,
3528 "Invalid visibility symbol (One of [\"-\", \"~\", or \"+\"] must be used)"
3529 ));
3530
3531 return ast;
3532 }
3533
3534 tokens.pop_front();
3535
3536 visibility.unwrap()
3537 }else {
3538 let visibility_keyword = t.value();
3539 let visibility = Visibility::from_keyword(visibility_keyword);
3540
3541 if visibility.is_none() {
3542 nodes.push(Node::new_parsing_error_node(
3543 t.pos(),
3544 ParsingError::LexerError,
3545 "Invalid visibility keyword (One of [\"private\", \"protected\", or \"public\"] must be used)"
3546 ));
3547
3548 return ast;
3549 }
3550
3551 tokens.pop_front();
3552
3553 if tokens.front().is_none_or(|token|
3554 !matches!(token.token_type(), TokenType::Whitespace)) {
3555 nodes.push(Node::new_parsing_error_node(
3556 t.pos(),
3557 ParsingError::Eof,
3558 "Missing whitespace after visibility keyword specifier"
3559 ));
3560
3561 return ast;
3562 }
3563
3564 tokens.pop_front();
3565
3566 visibility.unwrap()
3567 };
3568
3569 let Some(mut t) = tokens.front().cloned() else {
3570 nodes.push(Node::new_parsing_error_node(
3571 t.pos(),
3572 ParsingError::Eof,
3573 "Missing value after visibility specifier"
3574 ));
3575
3576 return ast;
3577 };
3578
3579 if matches!(t.token_type(), TokenType::Other) && t.value() == "construct" {
3581 tokens.pop_front();
3582
3583 let Some(t) = tokens.front() else {
3584 nodes.push(Node::new_parsing_error_node(
3585 t.pos(),
3586 ParsingError::Eof,
3587 "Missing value after construct method"
3588 ));
3589
3590 return ast;
3591 };
3592
3593 if !matches!(t.token_type(), TokenType::Assignment) || t.value() != " = " {
3594 nodes.push(Node::new_parsing_error_node(
3595 t.pos(),
3596 ParsingError::InvalidAssignment,
3597 "Invalid assignment for constructor (only \" = \" is allowed)"
3598 ));
3599
3600 return ast;
3601 }
3602
3603 tokens.pop_front();
3604
3605 constructors.push(Constructor::new(
3606 self.parse_lrvalue(tokens, true).into_node(),
3607 visibility,
3608 ));
3609
3610 continue 'tokenProcessing;
3611 }
3612
3613 let is_override_method = tokens.len() >= 2 && matches!(t.token_type(), TokenType::Other) &&
3615 t.value() == "override" && matches!(tokens[1].token_type(), TokenType::Operator) &&
3616 tokens[1].value() == ":";
3617 if is_override_method {
3618 tokens.pop_front();
3619 tokens.pop_front();
3620
3621 if tokens.is_empty() {
3622 nodes.push(Node::new_parsing_error_node(
3623 t.pos(),
3624 ParsingError::Eof,
3625 "Missing identifier after override keyword"
3626 ));
3627
3628 return ast;
3629 }
3630
3631 t = tokens[0].clone();
3632 }
3633
3634 if matches!(t.token_type(), TokenType::Identifier) && t.value().starts_with("op:") {
3635 if !matches!(visibility, Visibility::Public) {
3636 nodes.push(Node::new_parsing_error_node(
3637 t.pos(),
3638 ParsingError::InvalidAssignment,
3639 "Operator method must be public"
3640 ));
3641
3642 return ast;
3643 }
3644
3645 let method_name_token = tokens.pop_front().unwrap();
3646 let method_name = method_name_token.value();
3647 if !regex_patterns::OPERATOR_METHOD_NAME.is_match(method_name) {
3648 nodes.push(Node::new_parsing_error_node(
3649 t.pos(),
3650 ParsingError::InvalidAssignment,
3651 format!(
3652 "Invalid operator method name: \"{}\"",
3653 method_name,
3654 ),
3655 ));
3656
3657 return ast;
3658 }
3659
3660 if tokens.is_empty() {
3661 nodes.push(Node::new_parsing_error_node(
3662 t.pos(),
3663 ParsingError::Eof,
3664 "Missing value after operator method",
3665 ));
3666
3667 return ast;
3668 }
3669
3670 let t = tokens[0].clone();
3671 if !matches!(t.token_type(), TokenType::Assignment) || t.value() != " = " {
3672 nodes.push(Node::new_parsing_error_node(
3673 t.pos(),
3674 ParsingError::InvalidAssignment,
3675 "Invalid assignment for operator method (only \" = \" is allowed)",
3676 ));
3677
3678 return ast;
3679 }
3680
3681 tokens.pop_front();
3682
3683 methods.push(Method::new(
3684 Box::from(method_name),
3685 self.parse_lrvalue(tokens, true).into_node(),
3686 is_override_method,
3687 visibility,
3688 ));
3689
3690 continue 'tokenProcessing;
3691 }
3692
3693 if matches!(t.token_type(), TokenType::Identifier) && t.value().starts_with("to:") {
3694 if !matches!(visibility, Visibility::Public) {
3695 nodes.push(Node::new_parsing_error_node(
3696 t.pos(),
3697 ParsingError::InvalidAssignment,
3698 "Conversion method must be public"
3699 ));
3700
3701 return ast;
3702 }
3703
3704 let method_name_token = tokens.pop_front().unwrap();
3705 let method_name = method_name_token.value();
3706 if !regex_patterns::CONVERSION_METHOD_NAME.is_match(method_name) {
3707 nodes.push(Node::new_parsing_error_node(
3708 t.pos(),
3709 ParsingError::InvalidAssignment,
3710 format!(
3711 "Invalid conversion method name: \"{}\"",
3712 method_name,
3713 ),
3714 ));
3715
3716 return ast;
3717 }
3718
3719 if tokens.is_empty() {
3720 nodes.push(Node::new_parsing_error_node(
3721 t.pos(),
3722 ParsingError::Eof,
3723 "Missing value after conversion method",
3724 ));
3725
3726 return ast;
3727 }
3728
3729 let t = tokens[0].clone();
3730 if !matches!(t.token_type(), TokenType::Assignment) || t.value() != " = " {
3731 nodes.push(Node::new_parsing_error_node(
3732 t.pos(),
3733 ParsingError::InvalidAssignment,
3734 "Invalid assignment for conversion method (only \" = \" is allowed)",
3735 ));
3736
3737 return ast;
3738 }
3739
3740 tokens.pop_front();
3741
3742 methods.push(Method::new(
3743 Box::from(method_name),
3744 self.parse_lrvalue(tokens, true).into_node(),
3745 is_override_method,
3746 visibility,
3747 ));
3748
3749 continue 'tokenProcessing;
3750 }
3751
3752 if matches!(t.token_type(), TokenType::Identifier) &&
3753 regex_patterns::METHOD_NAME.is_match(t.value()) {
3754 let method_name_token = tokens.pop_front().unwrap();
3755 let method_name = method_name_token.value();
3756
3757 if tokens.is_empty() {
3758 nodes.push(Node::new_parsing_error_node(
3759 t.pos(),
3760 ParsingError::Eof,
3761 "Missing value after normal method",
3762 ));
3763
3764 return ast;
3765 }
3766
3767 let t = tokens[0].clone();
3768 if !matches!(t.token_type(), TokenType::Assignment) || t.value() != " = " {
3769 nodes.push(Node::new_parsing_error_node(
3770 t.pos(),
3771 ParsingError::InvalidAssignment,
3772 "Invalid assignment for conversion method (only \" = \" is allowed)",
3773 ));
3774
3775 return ast;
3776 }
3777
3778 tokens.pop_front();
3779
3780 methods.push(Method::new(
3781 Box::from(method_name),
3782 self.parse_lrvalue(tokens, true).into_node(),
3783 is_override_method,
3784 visibility,
3785 ));
3786
3787 continue 'tokenProcessing;
3788 }
3789
3790 if is_override_method {
3791 nodes.push(Node::new_parsing_error_node(
3792 t.pos(),
3793 ParsingError::LexerError,
3794 "The override keyword can only be used for methods",
3795 ));
3796
3797 return ast;
3798 }
3799
3800 let mut is_static_member = tokens.len() >= 2 && matches!(t.token_type(), TokenType::Other) &&
3802 t.value() == "static" && matches!(tokens[1].token_type(), TokenType::Operator) &&
3803 tokens[1].value() == ":";
3804 if is_static_member {
3805 tokens.pop_front();
3806 tokens.pop_front();
3807
3808 if tokens.is_empty() {
3809 nodes.push(Node::new_parsing_error_node(
3810 t.pos(),
3811 ParsingError::Eof,
3812 "Missing identifier after static keyword",
3813 ));
3814
3815 return ast;
3816 }
3817
3818 t = tokens[0].clone();
3819 }
3820
3821 let is_final_member = tokens.len() >= 2 && matches!(t.token_type(), TokenType::Other) &&
3822 t.value() == "final" && matches!(tokens[1].token_type(), TokenType::Operator) &&
3823 tokens[1].value() == ":";
3824 if is_final_member {
3825 tokens.pop_front();
3826 tokens.pop_front();
3827
3828 if tokens.is_empty() {
3829 nodes.push(Node::new_parsing_error_node(
3830 t.pos(),
3831 ParsingError::Eof,
3832 "Missing identifier after final keyword",
3833 ));
3834
3835 return ast;
3836 }
3837
3838 t = tokens[0].clone();
3839 }
3840
3841 if !is_static_member && is_final_member && tokens.len() >= 2 &&
3842 matches!(t.token_type(), TokenType::Other) && t.value() == "static" &&
3843 matches!(tokens[1].token_type(), TokenType::Operator) && tokens[1].value() == ":" {
3844 is_static_member = true;
3845
3846 tokens.pop_front();
3847 tokens.pop_front();
3848
3849 if tokens.is_empty() {
3850 nodes.push(Node::new_parsing_error_node(
3851 t.pos(),
3852 ParsingError::Eof,
3853 "Missing identifier after static keyword",
3854 ));
3855
3856 return ast;
3857 }
3858
3859 t = tokens[0].clone();
3860 }
3861
3862 if !matches!(t.token_type(), TokenType::Identifier) {
3863 nodes.push(Node::new_parsing_error_node(
3864 t.pos(),
3865 ParsingError::LexerError,
3866 format!(
3867 "Invalid token type for class definition expression: \"{}\"",
3868 t.token_type(),
3869 ),
3870 ));
3871
3872 return ast;
3873 }
3874
3875 if !regex_patterns::VAR_NAME_WITHOUT_PREFIX.is_match(t.value()) {
3876 nodes.push(Node::new_parsing_error_node(
3877 t.pos(),
3878 ParsingError::InvalidAssignment,
3879 format!(
3880 "Invalid {}member name: \"{}\"",
3881 if is_static_member {
3882 "static "
3883 }else {
3884 ""
3885 },
3886 t.value(),
3887 ),
3888 ));
3889
3890 return ast;
3891 }
3892
3893 let member_name_token = tokens.pop_front().unwrap();
3894 let member_name = member_name_token.value();
3895
3896 let mut type_constraint = None;
3897 if !tokens.is_empty() && matches!(tokens[0].token_type(), TokenType::OpeningBracket) &&
3898 tokens[0].value() == "{" {
3899 let bracket_end_index = utils::get_index_of_matching_bracket_tok(
3900 tokens.make_contiguous(),
3901 0, usize::MAX,
3902 "{", "}", true,
3903 );
3904 let Some(bracket_end_index) = bracket_end_index else {
3905 nodes.push(Node::new_parsing_error_node(
3906 member_name_token.pos(),
3907 ParsingError::BracketMismatch,
3908 format!(
3909 "Bracket is missing in type constraint in class definition for {}member: \"{}\"",
3910 if is_static_member {
3911 "static"
3912 }else {
3913 ""
3914 },
3915 member_name,
3916 ),
3917 ));
3918
3919 return ast;
3920 };
3921
3922 let mut type_constraint_tokens = tokens.split_off(bracket_end_index + 1);
3923 mem::swap(tokens, &mut type_constraint_tokens);
3924
3925 type_constraint = self.parse_type_constraint(
3926 &mut type_constraint_tokens,
3927 false,
3928 nodes,
3929 );
3930 }
3931
3932 let is_duplicate = if is_static_member {
3933 static_members.iter()
3934 }else {
3935 members.iter()
3936 }.any(|member| member.name() == member_name);
3937 if is_duplicate {
3938 nodes.push(Node::new_parsing_error_node(
3939 t.pos(),
3940 ParsingError::InvalidAssignment,
3941 format!(
3942 "Duplicated {}member name: \"{}\"",
3943 if is_static_member {
3944 "static "
3945 }else {
3946 ""
3947 },
3948 member_name,
3949 ),
3950 ));
3951
3952 return ast;
3953 }
3954
3955 if is_static_member {
3956 let mut static_member_value = None;
3957 if !tokens.is_empty() && matches!(tokens[0].token_type(), TokenType::Assignment) {
3958 let assignment_token = tokens.pop_front().unwrap();
3959 let assignment_operator = assignment_token.value();
3960
3961 if tokens.is_empty() ||
3962 matches!(tokens[0].token_type(), TokenType::Eol | TokenType::Eof) {
3963 if assignment_operator != "=" && assignment_operator != " =" {
3964 nodes.push(Node::new_parsing_error_node(
3965 t.pos(),
3966 ParsingError::InvalidAssignment,
3967 "Rvalue is missing in member assignment",
3968 ));
3969
3970 return ast;
3971 }
3972
3973 static_member_value = Some(if assignment_operator == "=" {
3974 Node::new_text_value_node(assignment_token.pos(), "")
3975 }else {
3976 Node::new_null_value_node(assignment_token.pos())
3977 });
3978 }else {
3979 match assignment_operator {
3980 "=" => {
3981 static_member_value = Some(self.parse_simple_assignment_value(
3982 tokens,
3983 ).into_node());
3984 },
3985
3986 " = " => {
3987 static_member_value = Some(self.parse_lrvalue(
3988 tokens, true,
3989 ).into_node());
3990 },
3991
3992 " ?= " => {
3993 static_member_value = Some(self.parse_condition_expr(
3994 tokens
3995 ).unwrap());
3996 },
3997
3998 " := " => {
3999 static_member_value = Some(self.parse_math_expr(
4000 tokens
4001 ).unwrap());
4002 },
4003
4004 " $= " => {
4005 static_member_value = Some(self.parse_operation_expr(
4006 tokens
4007 ).unwrap());
4008 },
4009
4010 _ => {
4011 nodes.push(Node::new_parsing_error_node(
4012 t.pos(),
4013 ParsingError::InvalidAssignment,
4014 "Invalid assignment for static member (only the following operators are allowed: \"=\", \" = \", \" ?= \", \" := \", and \" $= \")",
4015 ));
4016
4017 return ast;
4018 },
4019 }
4020 }
4021 }
4022
4023 static_members.push(ClassMember::new(
4024 Box::from(member_name),
4025 type_constraint.map(Box::from),
4026 static_member_value,
4027 is_final_member,
4028 visibility,
4029 ));
4030
4031 continue 'tokenProcessing;
4032 }
4033
4034 members.push(ClassMember::new(
4035 Box::from(member_name),
4036 type_constraint.map(Box::from),
4037 None,
4038 is_final_member,
4039 visibility,
4040 ));
4041 },
4042
4043 TokenType::LexerError => {
4044 tokens.pop_front();
4045
4046 self.parse_lexer_error_token(t, nodes);
4047
4048 break 'tokenProcessing;
4049 },
4050
4051 TokenType::Identifier | TokenType::ParserFunctionIdentifier | TokenType::LiteralNull |
4052 TokenType::LiteralNumber | TokenType::LiteralText | TokenType::ArgumentSeparator |
4053 TokenType::Assignment | TokenType::OpeningBracket | TokenType::ClosingBracket |
4054 TokenType::OpeningBlockBracket | TokenType::EscapeSequence | TokenType::StartMultilineText |
4055 TokenType::LineContinuation | TokenType::EndComment | TokenType::EndMultilineText |
4056 TokenType::SingleLineTextQuotes => {
4057 nodes.push(Node::new_parsing_error_node(
4058 CodePosition::EMPTY,
4059 ParsingError::LexerError,
4060 format!(
4061 "Invalid token type for class definition expression: \"{}\"",
4062 t.token_type(),
4063 ),
4064 ));
4065
4066 return ast;
4067 },
4068 }
4069 }
4070
4071 let pos = start_pos.combine(&end_pos);
4072
4073 if !has_end_brace {
4074 nodes.push(Node::new_parsing_error_node(
4075 pos,
4076 ParsingError::Eof,
4077 "\"}\" is missing in class definition",
4078 ));
4079
4080 return ast;
4081 }
4082
4083 nodes.push(Node::new_class_definition_node(pos, ClassDefinition::new(
4084 class_name.map(Box::from),
4085 static_members,
4086 members,
4087 methods,
4088 constructors,
4089 parent_classes,
4090 )));
4091
4092 ast
4093 }
4094
4095 fn parse_token(
4096 &mut self,
4097 tokens: &mut VecDeque<Token>,
4098 ) -> AST {
4099 let mut ast = AST::new();
4100 let nodes = ast.nodes_mut();
4101
4102 Self::trim_first_line(tokens);
4103
4104 'tokenProcessing:
4105 while !tokens.is_empty() {
4106 let t = tokens[0].clone();
4107
4108 match t.token_type() {
4109 TokenType::Eol | TokenType::Eof => {
4110 break 'tokenProcessing;
4111 },
4112
4113 TokenType::StartComment | TokenType::StartDocComment => {
4114 self.parse_comment_tokens(tokens, nodes);
4115 },
4116
4117 TokenType::LiteralNull => {
4118 tokens.pop_front();
4119
4120 nodes.push(Node::new_null_value_node(t.pos()));
4121 },
4122
4123 TokenType::LiteralText | TokenType::ArgumentSeparator | TokenType::Assignment |
4124 TokenType::ClosingBracket | TokenType::OpeningBlockBracket | TokenType::ClosingBlockBracket |
4125 TokenType::Whitespace => {
4126 tokens.pop_front();
4127
4128 nodes.push(Node::new_text_value_node(t.pos(), t.value()));
4129 },
4130
4131 TokenType::Other => {
4132 tokens.pop_front();
4133
4134 self.parse_text_and_char_value(&mut VecDeque::from([t]), nodes);
4135 },
4136
4137 TokenType::Operator => {
4138 tokens.pop_front();
4139
4140 if nodes.is_empty() && matches!(t.value(), "+" | "-") &&
4141 !tokens.is_empty() && matches!(tokens[0].token_type(), TokenType::LiteralNumber) {
4142 let number_token = tokens.pop_front().unwrap();
4143
4144 let combined_number_token = Token::new(
4145 t.pos().combine(&number_token.pos()),
4146 &(t.value().to_string() + number_token.value()),
4147 TokenType::LiteralNumber,
4148 );
4149
4150 self.parse_number_token(combined_number_token, nodes);
4151
4152 continue 'tokenProcessing;
4153 }
4154
4155 nodes.push(Node::new_text_value_node(t.pos(), t.value()));
4156 },
4157
4158 TokenType::LiteralNumber => {
4159 tokens.pop_front();
4160
4161 self.parse_number_token(t, nodes);
4162 },
4163
4164 TokenType::OpeningBracket => {
4165 if t.value() == "(" {
4166 let end_index = utils::get_index_of_matching_bracket_tok(
4167 tokens.make_contiguous(), 0, usize::MAX, "(", ")", true,
4168 );
4169 if let Some(end_index) = end_index {
4170 let opening_bracket_token = &tokens[0];
4171 let closing_bracket_token = &tokens[end_index];
4172 let pos = opening_bracket_token.pos().combine(&closing_bracket_token.pos());
4173
4174 let mut function_call = utils::split_off_arguments(tokens, end_index);
4175
4176 nodes.push(Node::new_function_call_previous_node_value_node(
4177 pos, "", "",
4178 self.parse_function_parameter_list(&mut function_call, false).into_nodes(),
4179 ));
4180
4181 continue 'tokenProcessing;
4182 }
4183 }
4184
4185 tokens.pop_front();
4186
4187 nodes.push(Node::new_text_value_node(t.pos(), t.value()));
4188 },
4189
4190 TokenType::EscapeSequence => {
4191 tokens.pop_front();
4192
4193 self.parse_escape_sequence_token(t, nodes);
4194 },
4195
4196 TokenType::LexerError => {
4197 tokens.pop_front();
4198
4199 self.parse_lexer_error_token(t, nodes);
4200 },
4201
4202 TokenType::StartMultilineText => {
4203 tokens.pop_front();
4204
4205 loop {
4206 if let Some(t) = tokens.pop_front() {
4207 if matches!(t.token_type(), TokenType::EndMultilineText) {
4208 break;
4209 }
4210
4211 if matches!(t.token_type(), TokenType::LiteralText | TokenType::Eol) {
4212 nodes.push(Node::new_text_value_node(t.pos(), t.value()));
4213 }else if matches!(t.token_type(), TokenType::EscapeSequence) {
4214 self.parse_escape_sequence_token(t, nodes);
4215 }else if matches!(t.token_type(), TokenType::LexerError) {
4216 nodes.push(Node::new_parsing_error_node(
4217 t.pos(),
4218 ParsingError::LexerError,
4219 t.value()
4220 ));
4221 }else {
4222 nodes.push(Node::new_parsing_error_node(
4223 CodePosition::EMPTY,
4224 ParsingError::Eof,
4225 format!(
4226 "Invalid token type ({}) in multiline text during token value parsing",
4227 t.token_type(),
4228 ),
4229 ));
4230 }
4231 }else {
4232 nodes.push(Node::new_parsing_error_node(
4233 CodePosition::EMPTY,
4234 ParsingError::Eof,
4235 "Missing multiline text end token during token value parsing",
4236 ));
4237
4238 break 'tokenProcessing;
4239 }
4240 }
4241 },
4242
4243 TokenType::Identifier | TokenType::ParserFunctionIdentifier => {
4244 let ret = if matches!(t.token_type(), TokenType::Identifier) {
4245 self.parse_variable_name_and_function_call(tokens, None)
4246 }else {
4247 self.parse_parser_function_call(tokens)
4248 };
4249 if let Some(ret) = ret {
4250 nodes.push(ret);
4251 }
4252 },
4253
4254 TokenType::LineContinuation | TokenType::EndComment | TokenType::EndMultilineText |
4255 TokenType::SingleLineTextQuotes => {
4256 nodes.push(Node::new_parsing_error_node(
4257 CodePosition::EMPTY,
4258 ParsingError::LexerError,
4259 format!(
4260 "Invalid token type in token value expression: \"{}\"",
4261 t.token_type(),
4262 ),
4263 ));
4264
4265 break 'tokenProcessing;
4266 },
4267 }
4268 }
4269
4270 ast
4271 }
4272
4273 fn parse_simple_assignment_value(&mut self, tokens: &mut VecDeque<Token>) -> AST {
4274 let mut ast = AST::new();
4275 let nodes = ast.nodes_mut();
4276
4277 Self::trim_first_line(tokens);
4278
4279 let token_count_first_line = Self::get_token_count_first_line(tokens.make_contiguous());
4280 if token_count_first_line == 0 || token_count_first_line != tokens.len() {
4281 nodes.push(Node::new_text_value_node(CodePosition::EMPTY, ""));
4282 }
4283
4284
4285 'tokenProcessing:
4286 while !tokens.is_empty() {
4287 let t = tokens[0].clone();
4288
4289 match t.token_type() {
4290 TokenType::Eol | TokenType::Eof => {
4291 break 'tokenProcessing;
4292 },
4293
4294 TokenType::StartComment | TokenType::StartDocComment => {
4295 self.parse_comment_tokens(tokens, nodes);
4296 },
4297
4298 TokenType::LiteralNull | TokenType::LiteralText | TokenType::LiteralNumber |
4299 TokenType::ArgumentSeparator | TokenType::Identifier | TokenType::ParserFunctionIdentifier |
4300 TokenType::Assignment | TokenType::Operator | TokenType::OpeningBracket |
4301 TokenType::ClosingBracket | TokenType::OpeningBlockBracket | TokenType::ClosingBlockBracket |
4302 TokenType::Whitespace | TokenType::Other => {
4303 tokens.pop_front();
4304
4305 nodes.push(Node::new_text_value_node(t.pos(), t.value()));
4306 },
4307
4308 TokenType::EscapeSequence => {
4309 tokens.pop_front();
4310
4311 self.parse_escape_sequence_token(t, nodes);
4312 },
4313
4314 TokenType::LexerError => {
4315 tokens.pop_front();
4316
4317 self.parse_lexer_error_token(t, nodes);
4318 },
4319
4320 TokenType::StartMultilineText => {
4321 tokens.pop_front();
4322
4323 loop {
4324 if let Some(t) = tokens.pop_front() {
4325 if matches!(t.token_type(), TokenType::EndMultilineText) {
4326 break;
4327 }
4328
4329 if matches!(t.token_type(), TokenType::LiteralText | TokenType::Eol) {
4330 nodes.push(Node::new_text_value_node(t.pos(), t.value()));
4331 }else if matches!(t.token_type(), TokenType::EscapeSequence) {
4332 self.parse_escape_sequence_token(t, nodes);
4333 }else if matches!(t.token_type(), TokenType::LexerError) {
4334 nodes.push(Node::new_parsing_error_node(
4335 t.pos(),
4336 ParsingError::LexerError,
4337 t.value()
4338 ));
4339 }else {
4340 nodes.push(Node::new_parsing_error_node(
4341 CodePosition::EMPTY,
4342 ParsingError::Eof,
4343 format!(
4344 "Invalid token type ({}) in multiline text during simple assignment value parsing",
4345 t.token_type(),
4346 ),
4347 ));
4348 }
4349 }else {
4350 nodes.push(Node::new_parsing_error_node(
4351 CodePosition::EMPTY,
4352 ParsingError::Eof,
4353 "Missing multiline text end token during simple assignment value parsing",
4354 ));
4355
4356 break 'tokenProcessing;
4357 }
4358 }
4359 },
4360
4361 TokenType::LineContinuation | TokenType::EndComment | TokenType::EndMultilineText |
4362 TokenType::SingleLineTextQuotes => {
4363 nodes.push(Node::new_parsing_error_node(
4364 CodePosition::EMPTY,
4365 ParsingError::LexerError,
4366 format!(
4367 "Invalid token type for simple assignment value expression: \"{}\"",
4368 t.token_type(),
4369 ),
4370 ));
4371
4372 break 'tokenProcessing;
4373 },
4374 }
4375 }
4376
4377 ast
4378 }
4379
4380 fn parse_text_and_char_value(&mut self, value_tokens: &mut VecDeque<Token>, nodes: &mut Vec<Node>) {
4381 if value_tokens.is_empty() {
4382 return;
4383 }
4384
4385 let pos = value_tokens[0].pos().combine(&value_tokens[value_tokens.len() - 1].pos());
4386 let value = value_tokens.iter().
4387 map(|token| token.to_raw_string().to_string()).
4388 collect::<Vec<String>>().
4389 join("");
4390
4391 if !value.is_empty() {
4392 let code_point = value.chars().next().unwrap();
4393
4394 if value.chars().count() == 1 {
4396 nodes.push(Node::new_char_value_node(pos, code_point));
4397
4398 return;
4399 }
4400 }
4401
4402 nodes.push(Node::new_text_value_node(pos, value));
4404 }
4405
4406 fn parse_escape_sequence_token(&mut self, escape_sequence_token: Token, nodes: &mut Vec<Node>) {
4407 if matches!(escape_sequence_token.value().len(), 5..=10) {
4408 if !escape_sequence_token.value().starts_with("\\u{") ||
4409 !escape_sequence_token.value().ends_with("}") {
4410 nodes.push(Node::new_parsing_error_node(
4411 escape_sequence_token.pos(),
4412 ParsingError::LexerError,
4413 format!(
4414 "Invalid unicode escape sequence: {}",
4415 escape_sequence_token.value(),
4416 ),
4417 ));
4418
4419 return;
4420 }
4421
4422 let hex_codepoint = &escape_sequence_token.value()[3..escape_sequence_token.value().len() - 1];
4423 for c in hex_codepoint.bytes() {
4424 if !(c.is_ascii_digit() || matches!(c, b'a'..=b'f') || matches!(c, b'A'..=b'F')) {
4425 nodes.push(Node::new_parsing_error_node(
4426 escape_sequence_token.pos(),
4427 ParsingError::LexerError,
4428 format!(
4429 "Invalid unicode escape sequence: {}",
4430 escape_sequence_token.value(),
4431 ),
4432 ));
4433
4434 return;
4435 }
4436 }
4437
4438 nodes.push(Node::new_unicode_escape_sequence_node(
4439 escape_sequence_token.pos(),
4440 hex_codepoint,
4441 ));
4442
4443 return;
4444 }
4445
4446 if escape_sequence_token.value().chars().count() != 2 || escape_sequence_token.value().as_bytes()[0] != b'\\' {
4447 nodes.push(Node::new_parsing_error_node(
4448 escape_sequence_token.pos(),
4449 ParsingError::LexerError,
4450 format!(
4451 "Invalid escape sequence: {}",
4452 escape_sequence_token.value(),
4453 ),
4454 ));
4455
4456 return;
4457 }
4458
4459 nodes.push(Node::new_escape_sequence_node(
4460 escape_sequence_token.pos(),
4461 escape_sequence_token.value().chars().nth(1).unwrap(),
4462 ));
4463 }
4464
4465 fn parse_number_token(&mut self, number_token: Token, nodes: &mut Vec<Node>) {
4466 let token = number_token.value();
4467
4468 if let Ok(value) = i32::from_str(token) {
4470 nodes.push(Node::new_int_value_node(number_token.pos(), value));
4471
4472 return;
4473 }
4474
4475 if token.ends_with("l") || token.ends_with("L") {
4477 if let Ok(value) = i64::from_str(&token[..token.len() - 1]) {
4478 nodes.push(Node::new_long_value_node(number_token.pos(), value));
4479
4480 return;
4481 }
4482 }else if let Ok(value) = i64::from_str(token) {
4483 nodes.push(Node::new_long_value_node(number_token.pos(), value));
4484
4485 return;
4486 }
4487
4488 if token.ends_with("f") || token.ends_with("F") {
4490 if let Ok(value) = f32::from_str(&token[..token.len() - 1]) {
4491 nodes.push(Node::new_float_value_node(number_token.pos(), value));
4492
4493 return;
4494 }
4495 }
4496
4497 #[expect(clippy::needless_return)]
4499 if let Ok(value) = f64::from_str(token) {
4500 nodes.push(Node::new_double_value_node(number_token.pos(), value));
4501
4502 return;
4503 }
4504 }
4505
4506 fn parse_lexer_error_token(&mut self, lexer_error_token: Token, nodes: &mut Vec<Node>) {
4507 if matches!(lexer_error_token.token_type(), TokenType::LexerError) {
4508 nodes.push(Node::new_parsing_error_node(
4509 lexer_error_token.pos(),
4510 ParsingError::LexerError,
4511 lexer_error_token.value(),
4512 ));
4513 }
4514 }
4515
4516 fn parse_function_parameter_list(&mut self, tokens: &mut VecDeque<Token>, function_definition: bool) -> AST {
4517 let mut ast = AST::new();
4518 let nodes = ast.nodes_mut();
4519
4520 Self::trim_first_line(tokens);
4521
4522 if function_definition {
4523 'tokenProcessing:
4524 while !tokens.is_empty() {
4525 let t = tokens[0].clone();
4526
4527 match t.token_type() {
4528 TokenType::Eol | TokenType::Eof => {
4529 break 'tokenProcessing;
4530 },
4531
4532 TokenType::StartComment | TokenType::StartDocComment => {
4533 self.parse_comment_tokens(tokens, nodes);
4534 },
4535
4536 TokenType::ArgumentSeparator => {
4537 tokens.pop_front();
4538
4539 if nodes.is_empty() {
4540 nodes.push(Node::new_parsing_error_node(
4541 t.pos(),
4542 ParsingError::InvalidParameter,
4543 "Empty function parameter"
4544 ));
4545 }
4546
4547 if tokens.is_empty() || matches!(tokens[0].token_type(), TokenType::Eol | TokenType::Eof) {
4548 nodes.push(Node::new_parsing_error_node(
4549 t.pos(),
4550 ParsingError::InvalidParameter,
4551 "Empty function parameter"
4552 ));
4553 }
4554 },
4555
4556 TokenType::Identifier => {
4557 tokens.pop_front();
4558
4559 let mut variable_name = t.value().to_string();
4560 let mut pos = t.pos();
4561
4562 let mut type_constraint = None;
4563 if !tokens.is_empty() && matches!(tokens[0].token_type(), TokenType::OpeningBracket) &&
4564 tokens[0].value() == "{" {
4565 let bracket_end_index = utils::get_index_of_matching_bracket_tok(
4566 tokens.make_contiguous(),
4567 0, usize::MAX,
4568 "{", "}", true,
4569 );
4570 let Some(bracket_end_index) = bracket_end_index else {
4571 nodes.push(Node::new_parsing_error_node(
4572 t.pos(),
4573 ParsingError::BracketMismatch,
4574 format!(
4575 "Bracket is missing in return type constraint in function parameter list definition for parameter \"{}\"",
4576 variable_name,
4577 ),
4578 ));
4579
4580 return ast;
4581 };
4582
4583 pos = pos.combine(&tokens.get(bracket_end_index).unwrap().pos());
4584
4585 let mut type_constraint_tokens = tokens.split_off(bracket_end_index + 1);
4586 mem::swap(tokens, &mut type_constraint_tokens);
4587
4588 type_constraint = self.parse_type_constraint(
4589 &mut type_constraint_tokens,
4590 true,
4591 nodes,
4592 );
4593 }
4594
4595 if !tokens.is_empty() && matches!(tokens[0].token_type(), TokenType::Operator) &&
4596 tokens[0].value() == "..." {
4597 pos = pos.combine(&tokens[0].pos());
4598
4599 tokens.pop_front();
4601
4602 variable_name += "...";
4603 }
4604
4605 nodes.push(Node::new_variable_name_node(
4606 pos,
4607 variable_name,
4608 type_constraint.map(Box::from),
4609 ));
4610 },
4611
4612 TokenType::LexerError => {
4613 tokens.pop_front();
4614
4615 self.parse_lexer_error_token(t, nodes);
4616 },
4617
4618 TokenType::LiteralNull | TokenType::LiteralNumber | TokenType::LiteralText |
4619 TokenType::Assignment | TokenType::ClosingBracket | TokenType::Whitespace |
4620 TokenType::Other | TokenType::Operator | TokenType::OpeningBracket |
4621 TokenType::OpeningBlockBracket | TokenType::ClosingBlockBracket |
4622 TokenType::EscapeSequence | TokenType::ParserFunctionIdentifier |
4623 TokenType::StartMultilineText | TokenType::LineContinuation | TokenType::EndComment |
4624 TokenType::EndMultilineText | TokenType::SingleLineTextQuotes => {
4625 nodes.push(Node::new_parsing_error_node(
4626 CodePosition::EMPTY,
4627 ParsingError::LexerError,
4628 format!(
4629 "Invalid token type for function parameter list expression: \"{}\"",
4630 t.token_type(),
4631 ),
4632 ));
4633
4634 return ast;
4635 },
4636 }
4637 }
4638 }else {'tokenProcessing:
4639 while !tokens.is_empty() {
4640 let t = tokens[0].clone();
4641
4642 match t.token_type() {
4643 TokenType::Eol | TokenType::Eof => {
4644 break 'tokenProcessing;
4645 },
4646
4647 TokenType::StartComment | TokenType::StartDocComment => {
4648 self.parse_comment_tokens(tokens, nodes);
4649 },
4650
4651 TokenType::ArgumentSeparator => {
4652 tokens.pop_front();
4653
4654 if nodes.is_empty() ||
4655 matches!(nodes[nodes.len() - 1].node_data(), NodeData::ArgumentSeparator(..)) {
4656 nodes.push(Node::new_text_value_node(t.pos(), ""));
4658 }
4659
4660 nodes.push(Node::new_argument_separator_node(
4661 t.pos(),
4662 t.value(),
4663 ));
4664
4665 if tokens.is_empty() || matches!(tokens[0].token_type(), TokenType::Eol | TokenType::Eof) {
4666 nodes.push(Node::new_text_value_node(t.pos(), ""));
4668 }
4669 },
4670
4671 TokenType::LiteralNull => {
4672 tokens.pop_front();
4673
4674 nodes.push(Node::new_null_value_node(t.pos()));
4675 },
4676
4677 TokenType::LiteralText | TokenType::Assignment | TokenType::ClosingBracket |
4678 TokenType::Whitespace => {
4679 tokens.pop_front();
4680
4681 nodes.push(Node::new_text_value_node(t.pos(), t.value()));
4682 },
4683
4684 TokenType::Other => {
4685 tokens.pop_front();
4686
4687 self.parse_text_and_char_value(&mut VecDeque::from([t]), nodes);
4688 },
4689
4690 TokenType::Operator => {
4691 tokens.pop_front();
4692
4693 if (nodes.is_empty() || matches!(nodes[nodes.len() - 1].node_data(), NodeData::ArgumentSeparator(..))) &&
4694 matches!(t.value(), "+" | "-") && !tokens.is_empty() &&
4695 matches!(tokens[0].token_type(), TokenType::LiteralNumber) {
4696 let number_token = tokens.pop_front().unwrap();
4697
4698 let combined_number_token = Token::new(
4699 t.pos().combine(&number_token.pos()),
4700 &(t.value().to_string() + number_token.value()),
4701 TokenType::LiteralNumber,
4702 );
4703
4704 self.parse_number_token(combined_number_token, nodes);
4705
4706 continue 'tokenProcessing;
4707 }
4708
4709 nodes.push(Node::new_text_value_node(t.pos(), t.value()));
4710 },
4711
4712 TokenType::LiteralNumber => {
4713 tokens.pop_front();
4714
4715 self.parse_number_token(t, nodes);
4716 },
4717
4718 TokenType::OpeningBracket => {
4719 if t.value() == "(" {
4720 let end_index = utils::get_index_of_matching_bracket_tok(
4721 tokens.make_contiguous(), 0, usize::MAX, "(", ")", true,
4722 );
4723 if let Some(end_index) = end_index {
4724 let opening_bracket_token = &tokens[0];
4725 let closing_bracket_token = &tokens[end_index];
4726 let pos = opening_bracket_token.pos().combine(&closing_bracket_token.pos());
4727
4728 let mut function_call = utils::split_off_arguments(tokens, end_index);
4729
4730 nodes.push(Node::new_function_call_previous_node_value_node(
4731 pos, "", "",
4732 self.parse_function_parameter_list(&mut function_call, false).into_nodes(),
4733 ));
4734
4735 continue 'tokenProcessing;
4736 }
4737 }
4738
4739 tokens.pop_front();
4740
4741 nodes.push(Node::new_text_value_node(t.pos(), t.value()));
4742 },
4743
4744 TokenType::EscapeSequence => {
4745 tokens.pop_front();
4746
4747 self.parse_escape_sequence_token(t, nodes);
4748 },
4749
4750 TokenType::LexerError => {
4751 tokens.pop_front();
4752
4753 self.parse_lexer_error_token(t, nodes);
4754 },
4755
4756 TokenType::StartMultilineText => {
4757 tokens.pop_front();
4758
4759 loop {
4760 if let Some(t) = tokens.pop_front() {
4761 if matches!(t.token_type(), TokenType::EndMultilineText) {
4762 break;
4763 }
4764
4765 if matches!(t.token_type(), TokenType::LiteralText | TokenType::Eol) {
4766 nodes.push(Node::new_text_value_node(t.pos(), t.value()));
4767 }else if matches!(t.token_type(), TokenType::EscapeSequence) {
4768 self.parse_escape_sequence_token(t, nodes);
4769 }else if matches!(t.token_type(), TokenType::LexerError) {
4770 nodes.push(Node::new_parsing_error_node(
4771 t.pos(),
4772 ParsingError::LexerError,
4773 t.value()
4774 ));
4775 }else {
4776 nodes.push(Node::new_parsing_error_node(
4777 CodePosition::EMPTY,
4778 ParsingError::Eof,
4779 format!(
4780 "Invalid token type ({}) in multiline text during simple assignment value parsing",
4781 t.token_type(),
4782 ),
4783 ));
4784 }
4785 }else {
4786 nodes.push(Node::new_parsing_error_node(
4787 CodePosition::EMPTY,
4788 ParsingError::Eof,
4789 "Missing multiline text end token during simple assignment value parsing",
4790 ));
4791
4792 break 'tokenProcessing;
4793 }
4794 }
4795 },
4796
4797 TokenType::Identifier | TokenType::ParserFunctionIdentifier => {
4798 let is_identifier = matches!(t.token_type(), TokenType::Identifier);
4799
4800 if is_identifier && tokens.len() >= 2 &&
4801 matches!(tokens[1].token_type(), TokenType::Operator) &&
4802 tokens[1].value() == "..." {
4803 let identifier_token = tokens.pop_front().unwrap();
4806 let operator_token = tokens.pop_front().unwrap();
4807
4808 let pos = identifier_token.pos().combine(&operator_token.pos());
4809
4810 nodes.push(Node::new_unprocessed_variable_name_node(
4811 pos,
4812 identifier_token.value().to_string() + operator_token.value(),
4813 ));
4814 }else {
4815 let ret = if is_identifier {
4816 self.parse_variable_name_and_function_call(tokens, None)
4817 }else {
4818 self.parse_parser_function_call(tokens)
4819 };
4820 if let Some(ret) = ret {
4821 nodes.push(ret);
4822 }
4823 }
4824 },
4825
4826 TokenType::OpeningBlockBracket | TokenType::ClosingBlockBracket |
4827 TokenType::LineContinuation | TokenType::EndComment | TokenType::EndMultilineText |
4828 TokenType::SingleLineTextQuotes => {
4829 nodes.push(Node::new_parsing_error_node(
4830 CodePosition::EMPTY,
4831 ParsingError::LexerError,
4832 format!(
4833 "Invalid token type for function argument expression: \"{}\"",
4834 t.token_type(),
4835 ),
4836 ));
4837
4838 break 'tokenProcessing;
4839 },
4840 }
4841 }
4842 }
4843
4844 ast
4845 }
4846
4847 fn parse_function_call_without_prefix(&mut self, tokens: &mut VecDeque<Token>, operator_type: Option<OperatorType>) -> Option<Node> {
4848 if tokens.len() < 2 {
4849 return None;
4850 }
4851
4852 let identifier_token = tokens.front().unwrap().clone();
4853 if !matches!(identifier_token.token_type(), TokenType::Other) ||
4854 !regex_patterns::WORD.is_match(identifier_token.value()) ||
4855 !matches!(tokens[1].token_type(), TokenType::OpeningBracket) ||
4856 tokens[1].value() != "(" {
4857 return None;
4858 }
4859
4860 tokens.pop_front();
4861
4862 self.parse_function_call(identifier_token, tokens, operator_type)
4863 }
4864
4865 fn parse_variable_name_and_function_call(&mut self, tokens: &mut VecDeque<Token>, operator_type: Option<OperatorType>) -> Option<Node> {
4866 if tokens.is_empty() {
4867 return None;
4868 }
4869
4870 let identifier_token = tokens.front().unwrap().clone();
4871 if !matches!(identifier_token.token_type(), TokenType::Identifier) {
4872 return None;
4873 }
4874
4875 tokens.pop_front();
4876
4877 if tokens.is_empty() || !matches!(tokens[0].token_type(), TokenType::OpeningBracket) ||
4878 tokens[0].value() != "(" || !regex_patterns::VAR_NAME_FUNCS_WITH_OPERATOR_AND_CONVERSION_METHOD.is_match(identifier_token.value()) {
4879 return Some(Node::new_unprocessed_variable_name_node(
4880 identifier_token.pos(),
4881 identifier_token.value(),
4882 ));
4883 }
4884
4885 self.parse_function_call(identifier_token, tokens, operator_type)
4886 }
4887
4888 fn parse_function_call(&mut self, identifier_token: Token, tokens: &mut VecDeque<Token>, operator_type: Option<OperatorType>) -> Option<Node> {
4889 let end_index = utils::get_index_of_matching_bracket_tok(
4890 tokens.make_contiguous(),
4891 0, usize::MAX,
4892 "(", ")", true,
4893 );
4894 let Some(end_index) = end_index else {
4895 return Some(Node::new_parsing_error_node(
4896 identifier_token.pos(),
4897 ParsingError::BracketMismatch,
4898 "Bracket is missing in function call",
4899 ));
4900 };
4901
4902 let pos = identifier_token.pos().combine(&tokens[end_index].pos());
4903
4904 let mut function_parameter_tokens = utils::split_off_arguments(tokens, end_index);
4905
4906 if let Some(operator_type) = operator_type {
4907 let raw_function_args = self.parse_operator_expr(&mut function_parameter_tokens, operator_type).unwrap();
4908
4909 return Some(Node::new_function_call_node(
4910 pos,
4911 Self::convert_comma_operators_to_argument_separators(raw_function_args),
4912 identifier_token.value(),
4913 ));
4914 }
4915
4916 Some(Node::new_function_call_node(
4917 pos,
4918 self.parse_function_parameter_list(&mut function_parameter_tokens, false).into_nodes(),
4919 identifier_token.value(),
4920 ))
4921 }
4922
4923 fn parse_parser_function_call(&mut self, tokens: &mut VecDeque<Token>) -> Option<Node> {
4924 if tokens.is_empty() {
4925 return None;
4926 }
4927
4928 let parser_function_identifier_token = tokens.front().unwrap().clone();
4929 if !matches!(parser_function_identifier_token.token_type(), TokenType::ParserFunctionIdentifier) {
4930 return None;
4931 }
4932
4933 tokens.pop_front();
4934
4935 let end_index = utils::get_index_of_matching_bracket_tok(
4936 tokens.make_contiguous(),
4937 0, usize::MAX,
4938 "(", ")", true,
4939 );
4940 let Some(end_index) = end_index else {
4941 return Some(Node::new_parsing_error_node(
4942 parser_function_identifier_token.pos(),
4943 ParsingError::BracketMismatch,
4944 "Bracket is missing in parser function call",
4945 ));
4946 };
4947
4948 let mut parameter_tokens = utils::split_off_arguments(tokens, end_index);
4949
4950 match parser_function_identifier_token.value() {
4951 "parser.con" => Some(self.parse_condition_expr(&mut parameter_tokens).unwrap()),
4952 "parser.math" => Some(self.parse_math_expr(&mut parameter_tokens).unwrap()),
4953 "parser.norm" => Some(self.parse_token(&mut parameter_tokens).into_node()),
4954 "parser.op" => Some(self.parse_operation_expr(&mut parameter_tokens).unwrap()),
4955
4956 _ => {
4957 Some(Node::new_parsing_error_node(
4958 parser_function_identifier_token.pos(),
4959 ParsingError::InvalidParameter,
4960 format!(
4961 "Invalid parser function: \"{}\"",
4962 parser_function_identifier_token.value(),
4963 ),
4964 ))
4965 },
4966 }
4967 }
4968
4969 fn parse_type_constraint(
4970 &mut self,
4971 tokens: &mut VecDeque<Token>,
4972 allow_special_type_constraints: bool,
4973 error_nodes: &mut Vec<Node>,
4974 ) -> Option<String> {
4975 if tokens.is_empty() {
4976 return None;
4977 }
4978
4979 let mut type_constraint = tokens.iter().
4980 map(|token| token.to_raw_string().to_string()).
4981 collect::<Vec<String>>().
4982 join("");
4983 let regex = if allow_special_type_constraints {
4984 ®ex_patterns::TYPE_CONSTRAINT_WITH_SPECIAL_TYPES
4985 }else {
4986 ®ex_patterns::PARSING_TYPE_CONSTRAINT
4987 };
4988 if !regex.is_match(&type_constraint) {
4989 let pos = tokens[0].pos().combine(&tokens[tokens.len() - 1].pos());
4990
4991 error_nodes.push(Node::new_parsing_error_node(
4992 pos,
4993 ParsingError::LexerError,
4994 "Invalid type constraint syntax"
4995 ));
4996
4997 return None;
4998 }
4999
5000 type_constraint.remove(0);
5002 type_constraint.remove(type_constraint.len() - 1);
5003
5004 Some(type_constraint)
5005 }
5006
5007 fn parse_comment_tokens(&mut self, tokens: &mut VecDeque<Token>, error_nodes: &mut Vec<Node>) {
5008 if tokens.is_empty() {
5009 return;
5010 }
5011
5012 let mut current_token = tokens[0].clone();
5013 while matches!(current_token.token_type(), TokenType::StartComment | TokenType::StartDocComment) {
5014 tokens.pop_front();
5015
5016 let is_doc_comment = matches!(current_token.token_type(), TokenType::StartDocComment);
5017 let mut comment = String::new();
5018
5019 while !matches!(current_token.token_type(), TokenType::EndComment) {
5020 if tokens.is_empty() {
5021 break;
5022 }
5023
5024 current_token = tokens.pop_front().unwrap();
5025 if matches!(current_token.token_type(), TokenType::LexerError) {
5026 error_nodes.push(Node::new_parsing_error_node(
5027 current_token.pos(),
5028 ParsingError::LexerError,
5029 current_token.value(),
5030 ));
5031 }
5032
5033 if is_doc_comment {
5034 match current_token.token_type() {
5035 TokenType::LiteralText | TokenType::EscapeSequence => {
5036 comment += current_token.value();
5037 },
5038
5039 TokenType::Eol => {
5040 comment += "\n";
5041 },
5042
5043 _ => {},
5044 }
5045 }
5046 }
5047
5048 if is_doc_comment {
5049 if let Some(ref mut lang_doc_comment) = self.lang_doc_comment {
5050 *lang_doc_comment += "\n";
5051 *lang_doc_comment += comment.as_str();
5052 }else {
5053 self.lang_doc_comment = Some(comment);
5054 }
5055 }
5056
5057 if tokens.is_empty() {
5058 break;
5059 }
5060
5061 current_token = tokens[0].clone();
5062 }
5063 }
5064
5065 fn trim_first_line(tokens: &mut VecDeque<Token>) {
5066 while !tokens.is_empty() && matches!(tokens[0].token_type(), TokenType::Whitespace) {
5067 tokens.pop_front();
5068 }
5069
5070 let token_count_first_line = Self::get_token_count_first_line(tokens.make_contiguous());
5071
5072 if token_count_first_line == 0 {
5073 return;
5074 }
5075
5076 let mut i = token_count_first_line - 1;
5077 while matches!(tokens[i].token_type(), TokenType::Whitespace | TokenType::EndComment) {
5078 if matches!(tokens[i].token_type(), TokenType::EndComment) {
5080 while !matches!(tokens[i].token_type(), TokenType::StartComment | TokenType::StartDocComment) {
5081 if i == 0 {
5082 break;
5083 }
5084
5085 i -= 1;
5086 }
5087
5088 if i == 0 {
5089 break;
5090 }
5091
5092 i -= 1;
5093
5094 continue;
5095 }
5096
5097 tokens.remove(i);
5098
5099 if i == 0 {
5100 break;
5101 }
5102
5103 i -= 1;
5104 }
5105 }
5106
5107 fn remove_line_continuation_and_single_line_text_quotes_tokens(tokens: &mut VecDeque<Token>) {
5108 let mut i = 0;
5109 while i < tokens.len() {
5110 let token = &tokens[i];
5111
5112 if matches!(token.token_type(), TokenType::LineContinuation) {
5113 tokens.remove(i);
5114
5115 if tokens.get(i).is_some_and(|token| matches!(token.token_type(), TokenType::Eol)) {
5116 tokens.remove(i);
5117 i -= 1;
5118 }
5119
5120 i -= 1;
5121 }else if matches!(token.token_type(), TokenType::SingleLineTextQuotes) {
5122 tokens.remove(i);
5123 i -= 1;
5124 }
5125
5126 i += 1;
5127 }
5128
5129 tokens.make_contiguous();
5130 }
5131
5132 fn get_token_count_first_line(tokens: &[Token]) -> usize {
5133 for (i, token) in tokens.iter().enumerate() {
5134 if matches!(token.token_type(), TokenType::Eol | TokenType::Eof) {
5135 return i;
5136 }
5137 }
5138
5139 tokens.len()
5140 }
5141}
5142
5143impl Default for Parser {
5144 fn default() -> Self {
5145 Self::new()
5146 }
5147}