1use crate::lex::lex;
2use crate::SyntaxKind;
3use crate::SyntaxKind::*;
4use rowan::ast::AstNode;
5use std::str::FromStr;
6
7#[derive(Debug)]
8pub enum Error {
10 Io(std::io::Error),
12
13 Parse(ParseError),
15}
16
17impl std::fmt::Display for Error {
18 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
19 match &self {
20 Error::Io(e) => write!(f, "IO error: {}", e),
21 Error::Parse(e) => write!(f, "Parse error: {}", e),
22 }
23 }
24}
25
26impl From<std::io::Error> for Error {
27 fn from(e: std::io::Error) -> Self {
28 Error::Io(e)
29 }
30}
31
32impl std::error::Error for Error {}
33
34#[derive(Debug, Clone, PartialEq, Eq, Hash)]
35pub struct ParseError {
37 errors: Vec<ErrorInfo>,
38}
39
40#[derive(Debug, Clone, PartialEq, Eq, Hash)]
41pub struct ErrorInfo {
43 message: String,
44 line: usize,
45 context: String,
46}
47
48impl std::fmt::Display for ParseError {
49 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
50 for err in &self.errors {
51 writeln!(f, "Error at line {}: {}", err.line, err.message)?;
52 writeln!(f, "{}| {}", err.line, err.context)?;
53 }
54 Ok(())
55 }
56}
57
58impl std::error::Error for ParseError {}
59
60impl From<ParseError> for Error {
61 fn from(e: ParseError) -> Self {
62 Error::Parse(e)
63 }
64}
65
66#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
70pub enum Lang {}
71impl rowan::Language for Lang {
72 type Kind = SyntaxKind;
73 fn kind_from_raw(raw: rowan::SyntaxKind) -> Self::Kind {
74 unsafe { std::mem::transmute::<u16, SyntaxKind>(raw.0) }
75 }
76 fn kind_to_raw(kind: Self::Kind) -> rowan::SyntaxKind {
77 kind.into()
78 }
79}
80
81use rowan::GreenNode;
84
85use rowan::GreenNodeBuilder;
89
90#[derive(Debug)]
93struct Parse {
94 green_node: GreenNode,
95 #[allow(unused)]
96 errors: Vec<ErrorInfo>,
97}
98
99fn parse(text: &str) -> Parse {
100 struct Parser {
101 tokens: Vec<(SyntaxKind, String)>,
104 builder: GreenNodeBuilder<'static>,
106 errors: Vec<ErrorInfo>,
109 original_text: String,
111 }
112
113 impl Parser {
114 fn error(&mut self, msg: String) {
115 self.builder.start_node(ERROR.into());
116
117 let (line, context) = if self.current() == Some(INDENT) {
118 let lines: Vec<&str> = self.original_text.lines().collect();
120 let tab_line = lines
121 .iter()
122 .enumerate()
123 .find(|(_, line)| line.starts_with('\t'))
124 .map(|(i, _)| i + 1)
125 .unwrap_or(1);
126
127 let next_line = tab_line + 1;
129 if next_line <= lines.len() {
130 (next_line, lines[next_line - 1].to_string())
131 } else {
132 (tab_line, lines[tab_line - 1].to_string())
133 }
134 } else {
135 let line = self.get_line_number_for_position(self.tokens.len());
136 (line, self.get_context_for_line(line))
137 };
138
139 let message = if self.current() == Some(INDENT) && !msg.contains("indented") {
140 if self.tokens.len() > 0 && self.tokens[self.tokens.len() - 1].0 == IDENTIFIER {
141 "expected ':'".to_string()
142 } else {
143 "indented line not part of a rule".to_string()
144 }
145 } else {
146 msg
147 };
148
149 self.errors.push(ErrorInfo {
150 message,
151 line,
152 context,
153 });
154
155 if self.current().is_some() {
156 self.bump();
157 }
158 self.builder.finish_node();
159 }
160
161 fn get_line_number_for_position(&self, position: usize) -> usize {
162 if position >= self.tokens.len() {
163 return self.original_text.matches('\n').count() + 1;
164 }
165
166 self.tokens[0..position]
168 .iter()
169 .filter(|(kind, _)| *kind == NEWLINE)
170 .count()
171 + 1
172 }
173
174 fn get_context_for_line(&self, line_number: usize) -> String {
175 self.original_text
176 .lines()
177 .nth(line_number - 1)
178 .unwrap_or("")
179 .to_string()
180 }
181
182 fn parse_recipe_line(&mut self) {
183 self.builder.start_node(RECIPE.into());
184
185 if self.current() != Some(INDENT) {
187 self.error("recipe line must start with a tab".into());
188 self.builder.finish_node();
189 return;
190 }
191 self.bump();
192
193 while self.current().is_some() && self.current() != Some(NEWLINE) {
196 self.bump();
197 }
198
199 if self.current() == Some(NEWLINE) {
201 self.bump();
202 }
203
204 self.builder.finish_node();
205 }
206
207 fn parse_rule_target(&mut self) -> bool {
208 match self.current() {
209 Some(IDENTIFIER) => {
210 self.bump();
211 true
212 }
213 Some(DOLLAR) => {
214 self.parse_variable_reference();
215 true
216 }
217 _ => {
218 self.error("expected rule target".into());
219 false
220 }
221 }
222 }
223
224 fn parse_rule_dependencies(&mut self) {
225 self.builder.start_node(EXPR.into());
226 while self.current().is_some() && self.current() != Some(NEWLINE) {
227 self.bump();
228 }
229 self.builder.finish_node();
230 }
231
232 fn parse_rule_recipes(&mut self) {
233 loop {
234 match self.current() {
235 Some(INDENT) => {
236 self.parse_recipe_line();
237 }
238 Some(NEWLINE) => {
239 self.bump();
240 break;
241 }
242 _ => break,
243 }
244 }
245 }
246
247 fn find_and_consume_colon(&mut self) -> bool {
248 self.skip_ws();
250
251 if self.current() == Some(OPERATOR) && self.tokens.last().unwrap().1 == ":" {
253 self.bump();
254 return true;
255 }
256
257 let has_colon = self
259 .tokens
260 .iter()
261 .rev()
262 .any(|(kind, text)| *kind == OPERATOR && text == ":");
263
264 if has_colon {
265 while self.current().is_some() {
267 if self.current() == Some(OPERATOR) && self.tokens.last().unwrap().1 == ":" {
268 self.bump();
269 return true;
270 }
271 self.bump();
272 }
273 }
274
275 self.error("expected ':'".into());
276 false
277 }
278
279 fn parse_rule(&mut self) {
280 self.builder.start_node(RULE.into());
281
282 self.skip_ws();
284 let has_target = self.parse_rule_target();
285
286 let has_colon = if has_target {
288 self.find_and_consume_colon()
289 } else {
290 false
291 };
292
293 if has_target && has_colon {
295 self.skip_ws();
296 self.parse_rule_dependencies();
297 self.expect_eol();
298
299 self.parse_rule_recipes();
301 }
302
303 self.builder.finish_node();
304 }
305
306 fn parse_comment(&mut self) {
307 if self.current() == Some(COMMENT) {
308 self.bump(); if self.current() == Some(NEWLINE) {
312 self.bump(); } else if self.current() == Some(WHITESPACE) {
314 self.skip_ws();
316 if self.current() == Some(NEWLINE) {
317 self.bump();
318 }
319 }
320 } else {
322 self.error("expected comment".into());
323 }
324 }
325
326 fn parse_assignment(&mut self) {
327 self.builder.start_node(VARIABLE.into());
328
329 self.skip_ws();
331 if self.current() == Some(IDENTIFIER) && self.tokens.last().unwrap().1 == "export" {
332 self.bump();
333 self.skip_ws();
334 }
335
336 match self.current() {
338 Some(IDENTIFIER) => self.bump(),
339 Some(DOLLAR) => self.parse_variable_reference(),
340 _ => {
341 self.error("expected variable name".into());
342 self.builder.finish_node();
343 return;
344 }
345 }
346
347 self.skip_ws();
349 match self.current() {
350 Some(OPERATOR) => {
351 let op = self.tokens.last().unwrap().1.clone();
352 if ["=", ":=", "::=", ":::=", "+=", "?=", "!="].contains(&op.as_str()) {
353 self.bump();
354 self.skip_ws();
355
356 self.builder.start_node(EXPR.into());
358 while self.current().is_some() && self.current() != Some(NEWLINE) {
359 self.bump();
360 }
361 self.builder.finish_node();
362
363 if self.current() == Some(NEWLINE) {
365 self.bump();
366 } else {
367 self.error("expected newline after variable value".into());
368 }
369 } else {
370 self.error(format!("invalid assignment operator: {}", op));
371 }
372 }
373 _ => self.error("expected assignment operator".into()),
374 }
375
376 self.builder.finish_node();
377 }
378
379 fn parse_variable_reference(&mut self) {
380 self.builder.start_node(EXPR.into());
381 self.bump(); if self.current() == Some(LPAREN) {
384 self.bump(); let mut is_function = false;
388
389 if self.current() == Some(IDENTIFIER) {
390 let function_name = self.tokens.last().unwrap().1.clone();
391 let known_functions = [
393 "shell", "wildcard", "call", "eval", "file", "abspath", "dir",
394 ];
395 if known_functions.contains(&function_name.as_str()) {
396 is_function = true;
397 }
398 }
399
400 if is_function {
401 self.bump();
403
404 self.consume_balanced_parens(1);
406 } else {
407 self.parse_parenthesized_expr_internal(true);
409 }
410 } else {
411 self.error("expected ( after $ in variable reference".into());
412 }
413
414 self.builder.finish_node();
415 }
416
417 fn parse_parenthesized_expr(&mut self) {
419 self.builder.start_node(EXPR.into());
420
421 if self.current() != Some(LPAREN) {
422 self.error("expected opening parenthesis".into());
423 self.builder.finish_node();
424 return;
425 }
426
427 self.bump(); self.parse_parenthesized_expr_internal(false);
429 self.builder.finish_node();
430 }
431
432 fn parse_parenthesized_expr_internal(&mut self, is_variable_ref: bool) {
434 let mut paren_count = 1;
435
436 while paren_count > 0 && self.current().is_some() {
437 match self.current() {
438 Some(LPAREN) => {
439 paren_count += 1;
440 self.bump();
441 self.builder.start_node(EXPR.into());
443 }
444 Some(RPAREN) => {
445 paren_count -= 1;
446 self.bump();
447 if paren_count > 0 {
448 self.builder.finish_node();
449 }
450 }
451 Some(QUOTE) => {
452 self.parse_quoted_string();
454 }
455 Some(DOLLAR) => {
456 self.parse_variable_reference();
458 }
459 Some(_) => self.bump(),
460 None => {
461 self.error(if is_variable_ref {
462 "unclosed variable reference".into()
463 } else {
464 "unclosed parenthesis".into()
465 });
466 break;
467 }
468 }
469 }
470
471 if !is_variable_ref {
472 self.skip_ws();
473 self.expect_eol();
474 }
475 }
476
477 fn parse_quoted_string(&mut self) {
479 self.bump(); while !self.is_at_eof() && self.current() != Some(QUOTE) {
481 self.bump();
482 }
483 if self.current() == Some(QUOTE) {
484 self.bump();
485 }
486 }
487
488 fn parse_conditional_keyword(&mut self) -> Option<String> {
489 if self.current() != Some(IDENTIFIER) {
490 self.error("expected conditional keyword (ifdef, ifndef, ifeq, or ifneq)".into());
491 return None;
492 }
493
494 let token = self.tokens.last().unwrap().1.clone();
495 if !["ifdef", "ifndef", "ifeq", "ifneq"].contains(&token.as_str()) {
496 self.error(format!("unknown conditional directive: {}", token));
497 return None;
498 }
499
500 self.bump();
501 Some(token)
502 }
503
504 fn parse_simple_condition(&mut self) {
505 self.builder.start_node(EXPR.into());
506
507 self.skip_ws();
509
510 let mut found_var = false;
512
513 while !self.is_at_eof() && self.current() != Some(NEWLINE) {
514 match self.current() {
515 Some(WHITESPACE) => self.skip_ws(),
516 Some(DOLLAR) => {
517 found_var = true;
518 self.parse_variable_reference();
519 }
520 Some(_) => {
521 found_var = true;
523 self.bump();
524 }
525 None => break,
526 }
527 }
528
529 if !found_var {
530 self.error("expected condition after conditional directive".into());
532 }
533
534 self.builder.finish_node();
535
536 if self.current() == Some(NEWLINE) {
538 self.bump();
539 } else if !self.is_at_eof() {
540 self.skip_until_newline();
541 }
542 }
543
544 fn is_conditional_directive(&self, token: &str) -> bool {
546 token == "ifdef"
547 || token == "ifndef"
548 || token == "ifeq"
549 || token == "ifneq"
550 || token == "else"
551 || token == "elif"
552 || token == "endif"
553 }
554
555 fn handle_conditional_token(&mut self, token: &str, depth: &mut usize) -> bool {
557 match token {
558 "ifdef" | "ifndef" | "ifeq" | "ifneq" => {
559 *depth += 1;
560 self.parse_conditional();
561 true
562 }
563 "else" | "elif" => {
564 if *depth == 0 {
566 self.error(format!("{} without matching if", token));
567 self.bump();
569 false
570 } else {
571 self.bump();
573
574 if token == "elif" {
576 self.skip_ws();
577
578 if self.current() == Some(IDENTIFIER) {
580 let next_token = self.tokens.last().unwrap().1.clone();
581 if next_token == "ifeq"
582 || next_token == "ifdef"
583 || next_token == "ifndef"
584 || next_token == "ifneq"
585 {
586 match next_token.as_str() {
588 "ifdef" | "ifndef" => {
589 self.bump(); self.skip_ws();
591 self.parse_simple_condition();
592 }
593 "ifeq" | "ifneq" => {
594 self.bump(); self.skip_ws();
596 self.parse_parenthesized_expr();
597 }
598 _ => unreachable!(),
599 }
600 } else {
601 self.builder.start_node(EXPR.into());
603 while self.current().is_some()
605 && self.current() != Some(NEWLINE)
606 {
607 self.bump();
608 }
609 self.builder.finish_node();
610 if self.current() == Some(NEWLINE) {
611 self.bump();
612 }
613 }
614 } else {
615 self.builder.start_node(EXPR.into());
617 while self.current().is_some() && self.current() != Some(NEWLINE) {
619 self.bump();
620 }
621 self.builder.finish_node();
622 if self.current() == Some(NEWLINE) {
623 self.bump();
624 }
625 }
626 } else {
627 self.expect_eol();
629 }
630 true
631 }
632 }
633 "endif" => {
634 if *depth == 0 {
636 self.error("endif without matching if".into());
637 self.bump();
639 false
640 } else {
641 *depth -= 1;
642 self.bump();
644
645 self.skip_ws();
647
648 if self.current() == Some(COMMENT) {
653 self.parse_comment();
654 } else if self.current() == Some(NEWLINE) {
655 self.bump();
656 } else if self.current() == Some(WHITESPACE) {
657 self.skip_ws();
659 if self.current() == Some(NEWLINE) {
660 self.bump();
661 }
662 } else if !self.is_at_eof() {
664 while !self.is_at_eof() && self.current() != Some(NEWLINE) {
667 self.bump();
668 }
669 if self.current() == Some(NEWLINE) {
670 self.bump();
671 }
672 }
673 true
676 }
677 }
678 _ => false,
679 }
680 }
681
682 fn parse_conditional(&mut self) {
683 self.builder.start_node(CONDITIONAL.into());
684
685 let Some(token) = self.parse_conditional_keyword() else {
687 self.skip_until_newline();
688 self.builder.finish_node();
689 return;
690 };
691
692 self.skip_ws();
694
695 match token.as_str() {
697 "ifdef" | "ifndef" => {
698 self.parse_simple_condition();
699 }
700 "ifeq" | "ifneq" => {
701 self.parse_parenthesized_expr();
702 }
703 _ => unreachable!("Invalid conditional token"),
704 }
705
706 self.skip_ws();
708 if self.current() == Some(COMMENT) {
709 self.parse_comment();
710 } else {
711 self.expect_eol();
712 }
713
714 let mut depth = 1;
716
717 let mut position_count = std::collections::HashMap::<usize, usize>::new();
719 let max_repetitions = 15; while depth > 0 && !self.is_at_eof() {
722 let current_pos = self.tokens.len();
724 *position_count.entry(current_pos).or_insert(0) += 1;
725
726 if position_count.get(¤t_pos).unwrap() > &max_repetitions {
729 break;
732 }
733
734 match self.current() {
735 None => {
736 self.error("unterminated conditional (missing endif)".into());
737 break;
738 }
739 Some(IDENTIFIER) => {
740 let token = self.tokens.last().unwrap().1.clone();
741 if !self.handle_conditional_token(&token, &mut depth) {
742 if token == "include" || token == "-include" || token == "sinclude" {
743 self.parse_include();
744 } else {
745 self.parse_normal_content();
746 }
747 }
748 }
749 Some(INDENT) => self.parse_recipe_line(),
750 Some(WHITESPACE) => self.bump(),
751 Some(COMMENT) => self.parse_comment(),
752 Some(NEWLINE) => self.bump(),
753 Some(DOLLAR) => self.parse_normal_content(),
754 Some(QUOTE) => self.parse_quoted_string(),
755 Some(_) => {
756 self.bump();
758 }
759 }
760 }
761
762 self.builder.finish_node();
763 }
764
765 fn parse_normal_content(&mut self) {
767 self.skip_ws();
769
770 if self.is_assignment_line() {
772 self.parse_assignment();
773 } else {
774 self.parse_rule();
776 }
777 }
778
779 fn parse_include(&mut self) {
780 self.builder.start_node(INCLUDE.into());
781
782 if self.current() != Some(IDENTIFIER)
784 || (!["include", "-include", "sinclude"]
785 .contains(&self.tokens.last().unwrap().1.as_str()))
786 {
787 self.error("expected include directive".into());
788 self.builder.finish_node();
789 return;
790 }
791 self.bump();
792 self.skip_ws();
793
794 self.builder.start_node(EXPR.into());
796 let mut found_path = false;
797
798 while !self.is_at_eof() && self.current() != Some(NEWLINE) {
799 match self.current() {
800 Some(WHITESPACE) => self.skip_ws(),
801 Some(DOLLAR) => {
802 found_path = true;
803 self.parse_variable_reference();
804 }
805 Some(_) => {
806 found_path = true;
808 self.bump();
809 }
810 None => break,
811 }
812 }
813
814 if !found_path {
815 self.error("expected file path after include".into());
816 }
817
818 self.builder.finish_node();
819
820 if self.current() == Some(NEWLINE) {
822 self.bump();
823 } else if !self.is_at_eof() {
824 self.error("expected newline after include".into());
825 self.skip_until_newline();
826 }
827
828 self.builder.finish_node();
829 }
830
831 fn parse_identifier_token(&mut self) -> bool {
832 let token = self.tokens.last().unwrap().1.clone();
833
834 if token.starts_with("%") {
836 self.parse_rule();
837 return true;
838 }
839
840 if token.starts_with("if") {
841 self.parse_conditional();
842 return true;
843 }
844
845 if token == "include" || token == "-include" || token == "sinclude" {
846 self.parse_include();
847 return true;
848 }
849
850 self.parse_normal_content();
852 true
853 }
854
855 fn parse_token(&mut self) -> bool {
856 match self.current() {
857 None => false,
858 Some(IDENTIFIER) => {
859 let token = self.tokens.last().unwrap().1.clone();
860 if self.is_conditional_directive(&token) {
861 self.parse_conditional();
862 true
863 } else {
864 self.parse_identifier_token()
865 }
866 }
867 Some(DOLLAR) => {
868 self.parse_normal_content();
869 true
870 }
871 Some(NEWLINE) => {
872 self.bump();
873 true
874 }
875 Some(COMMENT) => {
876 self.parse_comment();
877 true
878 }
879 Some(WHITESPACE) => {
880 if self.is_end_of_file_or_newline_after_whitespace() {
882 self.skip_ws();
885 return true;
886 }
887
888 let look_ahead_pos = self.tokens.len().saturating_sub(1);
891 let mut is_documentation_or_help = false;
892
893 if look_ahead_pos > 0 {
894 let next_token = &self.tokens[look_ahead_pos - 1];
895 if next_token.0 == IDENTIFIER
898 || next_token.0 == COMMENT
899 || next_token.0 == TEXT
900 {
901 is_documentation_or_help = true;
902 }
903 }
904
905 if is_documentation_or_help {
906 self.skip_ws();
909 while self.current().is_some() && self.current() != Some(NEWLINE) {
910 self.bump();
911 }
912 if self.current() == Some(NEWLINE) {
913 self.bump();
914 }
915 } else {
916 self.skip_ws();
917 }
918 true
919 }
920 Some(INDENT) => {
921 #[cfg(test)]
926 {
927 let is_in_test = self.original_text.lines().count() < 20;
930 let tokens_as_str = self
931 .tokens
932 .iter()
933 .rev()
934 .take(10)
935 .map(|(_kind, text)| text.to_string())
936 .collect::<Vec<_>>()
937 .join(" ");
938
939 let in_conditional = tokens_as_str.contains("ifdef")
941 || tokens_as_str.contains("ifndef")
942 || tokens_as_str.contains("ifeq")
943 || tokens_as_str.contains("ifneq")
944 || tokens_as_str.contains("else")
945 || tokens_as_str.contains("endif");
946
947 if is_in_test && !in_conditional {
948 self.error("indented line not part of a rule".into());
949 }
950 }
951
952 self.bump();
954
955 while !self.is_at_eof() && self.current() != Some(NEWLINE) {
957 self.bump();
958 }
959 if self.current() == Some(NEWLINE) {
960 self.bump();
961 }
962 true
963 }
964 Some(kind) => {
965 self.error(format!("unexpected token {:?}", kind));
966 self.bump();
967 true
968 }
969 }
970 }
971
972 fn parse(mut self) -> Parse {
973 self.builder.start_node(ROOT.into());
974
975 while self.parse_token() {}
976
977 self.builder.finish_node();
978
979 Parse {
980 green_node: self.builder.finish(),
981 errors: self.errors,
982 }
983 }
984
985 fn is_assignment_line(&mut self) -> bool {
987 let assignment_ops = ["=", ":=", "::=", ":::=", "+=", "?=", "!="];
988 let mut pos = self.tokens.len().saturating_sub(1);
989 let mut seen_identifier = false;
990 let mut seen_export = false;
991
992 while pos > 0 {
993 let (kind, text) = &self.tokens[pos];
994
995 match kind {
996 NEWLINE => break,
997 IDENTIFIER if text == "export" => seen_export = true,
998 IDENTIFIER if !seen_identifier => seen_identifier = true,
999 OPERATOR if assignment_ops.contains(&text.as_str()) => {
1000 return seen_identifier || seen_export
1001 }
1002 OPERATOR if text == ":" => return false, WHITESPACE => (),
1004 _ if seen_export => return true, _ => return false,
1006 }
1007 pos = pos.saturating_sub(1);
1008 }
1009 false
1010 }
1011
1012 fn bump(&mut self) {
1014 let (kind, text) = self.tokens.pop().unwrap();
1015 self.builder.token(kind.into(), text.as_str());
1016 }
1017 fn current(&self) -> Option<SyntaxKind> {
1019 self.tokens.last().map(|(kind, _)| *kind)
1020 }
1021
1022 fn expect_eol(&mut self) {
1023 self.skip_ws();
1025
1026 match self.current() {
1027 Some(NEWLINE) => {
1028 self.bump();
1029 }
1030 None => {
1031 }
1033 n => {
1034 self.error(format!("expected newline, got {:?}", n));
1035 self.skip_until_newline();
1037 }
1038 }
1039 }
1040
1041 fn is_at_eof(&self) -> bool {
1043 self.current().is_none()
1044 }
1045
1046 fn is_at_eof_or_only_whitespace(&self) -> bool {
1048 if self.is_at_eof() {
1049 return true;
1050 }
1051
1052 for i in (0..self.tokens.len()).rev() {
1054 match self.tokens[i].0 {
1055 WHITESPACE | NEWLINE => continue,
1056 _ => return false,
1057 }
1058 }
1059
1060 true
1061 }
1062
1063 fn expect(&mut self, expected: SyntaxKind) {
1064 if self.current() != Some(expected) {
1065 self.error(format!("expected {:?}, got {:?}", expected, self.current()));
1066 } else {
1067 self.bump();
1068 }
1069 }
1070 fn skip_ws(&mut self) {
1071 while self.current() == Some(WHITESPACE) {
1072 self.bump()
1073 }
1074 }
1075
1076 fn skip_until_newline(&mut self) {
1077 while !self.is_at_eof() && self.current() != Some(NEWLINE) {
1078 self.bump();
1079 }
1080 if self.current() == Some(NEWLINE) {
1081 self.bump();
1082 }
1083 }
1084
1085 fn consume_balanced_parens(&mut self, start_paren_count: usize) -> usize {
1087 let mut paren_count = start_paren_count;
1088
1089 while paren_count > 0 && self.current().is_some() {
1090 match self.current() {
1091 Some(LPAREN) => {
1092 paren_count += 1;
1093 self.bump();
1094 }
1095 Some(RPAREN) => {
1096 paren_count -= 1;
1097 self.bump();
1098 if paren_count == 0 {
1099 break;
1100 }
1101 }
1102 Some(DOLLAR) => {
1103 self.parse_variable_reference();
1105 }
1106 Some(_) => self.bump(),
1107 None => {
1108 self.error("unclosed parenthesis".into());
1109 break;
1110 }
1111 }
1112 }
1113
1114 paren_count
1115 }
1116
1117 fn is_end_of_file_or_newline_after_whitespace(&self) -> bool {
1119 if self.is_at_eof_or_only_whitespace() {
1121 return true;
1122 }
1123
1124 if self.tokens.len() <= 1 {
1126 return true;
1127 }
1128
1129 false
1130 }
1131
1132 #[cfg(test)]
1134 fn is_in_test_environment(&self) -> bool {
1135 self.original_text.lines().count() < 20
1138 }
1139 }
1140
1141 let mut tokens = lex(text);
1142 tokens.reverse();
1143 Parser {
1144 tokens,
1145 builder: GreenNodeBuilder::new(),
1146 errors: Vec::new(),
1147 original_text: text.to_string(),
1148 }
1149 .parse()
1150}
1151
1152type SyntaxNode = rowan::SyntaxNode<Lang>;
1159#[allow(unused)]
1160type SyntaxToken = rowan::SyntaxToken<Lang>;
1161#[allow(unused)]
1162type SyntaxElement = rowan::NodeOrToken<SyntaxNode, SyntaxToken>;
1163
1164impl Parse {
1165 fn syntax(&self) -> SyntaxNode {
1166 SyntaxNode::new_root_mut(self.green_node.clone())
1167 }
1168
1169 fn root(&self) -> Makefile {
1170 Makefile::cast(self.syntax()).unwrap()
1171 }
1172}
1173
1174macro_rules! ast_node {
1175 ($ast:ident, $kind:ident) => {
1176 #[derive(PartialEq, Eq, Hash)]
1177 #[repr(transparent)]
1178 pub struct $ast(SyntaxNode);
1180
1181 impl AstNode for $ast {
1182 type Language = Lang;
1183
1184 fn can_cast(kind: SyntaxKind) -> bool {
1185 kind == $kind
1186 }
1187
1188 fn cast(syntax: SyntaxNode) -> Option<Self> {
1189 if Self::can_cast(syntax.kind()) {
1190 Some(Self(syntax))
1191 } else {
1192 None
1193 }
1194 }
1195
1196 fn syntax(&self) -> &SyntaxNode {
1197 &self.0
1198 }
1199 }
1200
1201 impl core::fmt::Display for $ast {
1202 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
1203 write!(f, "{}", self.0.text())
1204 }
1205 }
1206 };
1207}
1208
1209ast_node!(Makefile, ROOT);
1210ast_node!(Rule, RULE);
1211ast_node!(Identifier, IDENTIFIER);
1212ast_node!(VariableDefinition, VARIABLE);
1213ast_node!(Include, INCLUDE);
1214
1215impl VariableDefinition {
1216 pub fn name(&self) -> Option<String> {
1218 self.syntax().children_with_tokens().find_map(|it| {
1219 it.as_token().and_then(|it| {
1220 if it.kind() == IDENTIFIER && it.text() != "export" {
1221 Some(it.text().to_string())
1222 } else {
1223 None
1224 }
1225 })
1226 })
1227 }
1228
1229 pub fn raw_value(&self) -> Option<String> {
1231 self.syntax()
1232 .children()
1233 .find(|it| it.kind() == EXPR)
1234 .map(|it| it.text().to_string())
1235 }
1236}
1237
1238impl Makefile {
1239 pub fn new() -> Makefile {
1241 let mut builder = GreenNodeBuilder::new();
1242
1243 builder.start_node(ROOT.into());
1244 builder.finish_node();
1245
1246 let syntax = SyntaxNode::new_root_mut(builder.finish());
1247 Makefile(syntax)
1248 }
1249
1250 pub fn code(&self) -> String {
1252 self.syntax().text().to_string()
1253 }
1254
1255 pub fn is_root(&self) -> bool {
1257 self.syntax().kind() == ROOT
1258 }
1259
1260 pub fn read<R: std::io::Read>(mut r: R) -> Result<Makefile, Error> {
1264 let mut buf = String::new();
1265 r.read_to_string(&mut buf)?;
1266 Ok(buf.parse()?)
1267 }
1268
1269 pub fn read_relaxed<R: std::io::Read>(mut r: R) -> Result<Makefile, Error> {
1271 let mut buf = String::new();
1272 r.read_to_string(&mut buf)?;
1273
1274 let parsed = parse(&buf);
1275 Ok(parsed.root())
1276 }
1277
1278 pub fn rules(&self) -> impl Iterator<Item = Rule> + '_ {
1287 self.syntax().children().filter_map(Rule::cast)
1288 }
1289
1290 pub fn rules_by_target<'a>(&'a self, target: &'a str) -> impl Iterator<Item = Rule> + 'a {
1292 self.rules()
1293 .filter(move |rule| rule.targets().any(|t| t == target))
1294 }
1295
1296 pub fn variable_definitions(&self) -> impl Iterator<Item = VariableDefinition> {
1298 self.syntax()
1299 .children()
1300 .filter_map(VariableDefinition::cast)
1301 }
1302
1303 pub fn add_rule(&mut self, target: &str) -> Rule {
1313 let mut builder = GreenNodeBuilder::new();
1314 builder.start_node(RULE.into());
1315 builder.token(IDENTIFIER.into(), target);
1316 builder.token(OPERATOR.into(), ":");
1317 builder.token(NEWLINE.into(), "\n");
1318 builder.finish_node();
1319
1320 let syntax = SyntaxNode::new_root_mut(builder.finish());
1321 let pos = self.0.children_with_tokens().count();
1322 self.0.splice_children(pos..pos, vec![syntax.into()]);
1323 Rule(self.0.children().nth(pos).unwrap())
1324 }
1325
1326 pub fn from_reader<R: std::io::Read>(mut r: R) -> Result<Makefile, Error> {
1328 let mut buf = String::new();
1329 r.read_to_string(&mut buf)?;
1330
1331 let parsed = parse(&buf);
1332 if !parsed.errors.is_empty() {
1333 Err(Error::Parse(ParseError {
1334 errors: parsed.errors,
1335 }))
1336 } else {
1337 Ok(parsed.root())
1338 }
1339 }
1340
1341 pub fn includes(&self) -> impl Iterator<Item = Include> {
1351 self.syntax().children().filter_map(Include::cast)
1352 }
1353
1354 pub fn included_files(&self) -> impl Iterator<Item = String> + '_ {
1364 fn collect_includes(node: &SyntaxNode) -> Vec<Include> {
1367 let mut includes = Vec::new();
1368
1369 if let Some(include) = Include::cast(node.clone()) {
1371 includes.push(include);
1372 }
1373
1374 for child in node.children() {
1376 includes.extend(collect_includes(&child));
1377 }
1378
1379 includes
1380 }
1381
1382 let includes = collect_includes(self.syntax());
1384
1385 includes.into_iter().map(|include| {
1387 include
1388 .syntax()
1389 .children()
1390 .find(|node| node.kind() == EXPR)
1391 .map(|expr| expr.text().to_string().trim().to_string())
1392 .unwrap_or_default()
1393 .trim()
1394 .to_string()
1395 })
1396 }
1397}
1398
1399impl FromStr for Rule {
1400 type Err = ParseError;
1401
1402 fn from_str(s: &str) -> Result<Self, Self::Err> {
1403 let parsed = parse(s);
1404
1405 if !parsed.errors.is_empty() {
1406 return Err(ParseError {
1407 errors: parsed.errors,
1408 });
1409 }
1410
1411 let rules = parsed.root().rules().collect::<Vec<_>>();
1412 if rules.len() == 1 {
1413 Ok(rules.into_iter().next().unwrap())
1414 } else {
1415 Err(ParseError {
1416 errors: vec![ErrorInfo {
1417 message: "expected a single rule".to_string(),
1418 line: 1,
1419 context: s.lines().next().unwrap_or("").to_string(),
1420 }],
1421 })
1422 }
1423 }
1424}
1425
1426impl FromStr for Makefile {
1427 type Err = ParseError;
1428
1429 fn from_str(s: &str) -> Result<Self, Self::Err> {
1430 let parsed = parse(s);
1431 if parsed.errors.is_empty() {
1432 Ok(parsed.root())
1433 } else {
1434 Err(ParseError {
1435 errors: parsed.errors,
1436 })
1437 }
1438 }
1439}
1440
1441impl Rule {
1442 fn collect_variable_reference(
1444 &self,
1445 tokens: &mut std::iter::Peekable<impl Iterator<Item = SyntaxElement>>,
1446 ) -> Option<String> {
1447 let mut var_ref = String::new();
1448
1449 if let Some(token) = tokens.next() {
1451 if let Some(t) = token.as_token() {
1452 if t.kind() == DOLLAR {
1453 var_ref.push_str(t.text());
1454
1455 if let Some(next) = tokens.peek() {
1457 if let Some(nt) = next.as_token() {
1458 if nt.kind() == LPAREN {
1459 var_ref.push_str(nt.text());
1461 tokens.next();
1462
1463 let mut paren_count = 1;
1465
1466 while let Some(next_token) = tokens.next() {
1468 if let Some(nt) = next_token.as_token() {
1469 var_ref.push_str(nt.text());
1470
1471 if nt.kind() == LPAREN {
1472 paren_count += 1;
1473 } else if nt.kind() == RPAREN {
1474 paren_count -= 1;
1475 if paren_count == 0 {
1476 break;
1477 }
1478 }
1479 }
1480 }
1481
1482 return Some(var_ref);
1483 }
1484 }
1485 }
1486
1487 while let Some(next_token) = tokens.next() {
1489 if let Some(nt) = next_token.as_token() {
1490 var_ref.push_str(nt.text());
1491 if nt.kind() == RPAREN {
1492 break;
1493 }
1494 }
1495 }
1496 return Some(var_ref);
1497 }
1498 }
1499 }
1500
1501 None
1502 }
1503
1504 pub fn targets(&self) -> impl Iterator<Item = String> + '_ {
1514 let mut result = Vec::new();
1515 let mut tokens = self
1516 .syntax()
1517 .children_with_tokens()
1518 .take_while(|it| it.as_token().map_or(true, |t| t.kind() != OPERATOR))
1519 .peekable();
1520
1521 while let Some(token) = tokens.peek().cloned() {
1522 if let Some(node) = token.as_node() {
1523 tokens.next(); if node.kind() == EXPR {
1525 let mut var_content = String::new();
1527 for child in node.children_with_tokens() {
1528 if let Some(t) = child.as_token() {
1529 var_content.push_str(t.text());
1530 }
1531 }
1532 if !var_content.is_empty() {
1533 result.push(var_content);
1534 }
1535 }
1536 } else if let Some(t) = token.as_token() {
1537 if t.kind() == DOLLAR {
1538 if let Some(var_ref) = self.collect_variable_reference(&mut tokens) {
1539 result.push(var_ref);
1540 }
1541 } else if t.kind() == IDENTIFIER {
1542 result.push(t.text().to_string());
1543 tokens.next(); } else {
1545 tokens.next(); }
1547 }
1548 }
1549 result.into_iter()
1550 }
1551
1552 pub fn prerequisites(&self) -> impl Iterator<Item = String> + '_ {
1561 let mut found_operator = false;
1563 let mut result = Vec::new();
1564
1565 for token in self.syntax().children_with_tokens() {
1566 if let Some(t) = token.as_token() {
1567 if t.kind() == OPERATOR {
1568 found_operator = true;
1569 continue;
1570 }
1571 }
1572
1573 if found_operator {
1574 if let Some(node) = token.as_node() {
1575 if node.kind() == EXPR {
1576 let mut tokens = node.children_with_tokens().peekable();
1578 while let Some(token) = tokens.peek().cloned() {
1579 if let Some(t) = token.as_token() {
1580 if t.kind() == DOLLAR {
1581 if let Some(var_ref) =
1582 self.collect_variable_reference(&mut tokens)
1583 {
1584 result.push(var_ref);
1585 }
1586 } else if t.kind() == IDENTIFIER {
1587 result.push(t.text().to_string());
1588 tokens.next(); } else {
1590 tokens.next(); }
1592 } else {
1593 tokens.next(); }
1595 }
1596 break; }
1598 }
1599 }
1600 }
1601
1602 result.into_iter()
1603 }
1604
1605 pub fn recipes(&self) -> impl Iterator<Item = String> {
1614 self.syntax()
1615 .children()
1616 .filter(|it| it.kind() == RECIPE)
1617 .flat_map(|it| {
1618 it.children_with_tokens().filter_map(|it| {
1619 it.as_token().and_then(|t| {
1620 if t.kind() == TEXT {
1621 Some(t.text().to_string())
1622 } else {
1623 None
1624 }
1625 })
1626 })
1627 })
1628 }
1629
1630 pub fn replace_command(&self, i: usize, line: &str) -> Option<Rule> {
1640 let index = self
1642 .syntax()
1643 .children()
1644 .filter(|it| it.kind() == RECIPE)
1645 .nth(i);
1646
1647 let index = match index {
1648 Some(node) => node.index(),
1649 None => return None,
1650 };
1651
1652 let mut builder = GreenNodeBuilder::new();
1653 builder.start_node(RECIPE.into());
1654 builder.token(INDENT.into(), "\t");
1655 builder.token(TEXT.into(), line);
1656 builder.token(NEWLINE.into(), "\n");
1657 builder.finish_node();
1658
1659 let syntax = SyntaxNode::new_root_mut(builder.finish());
1660
1661 let clone = self.0.clone();
1662 clone.splice_children(index..index + 1, vec![syntax.into()]);
1663
1664 Some(Rule(clone))
1665 }
1666
1667 pub fn push_command(&self, line: &str) -> Rule {
1677 let index = self
1679 .0
1680 .children_with_tokens()
1681 .filter(|it| it.kind() == RECIPE)
1682 .last();
1683
1684 let index = index.map_or_else(
1685 || self.0.children_with_tokens().count(),
1686 |it| it.index() + 1,
1687 );
1688
1689 let mut builder = GreenNodeBuilder::new();
1690 builder.start_node(RECIPE.into());
1691 builder.token(INDENT.into(), "\t");
1692 builder.token(TEXT.into(), line);
1693 builder.token(NEWLINE.into(), "\n");
1694 builder.finish_node();
1695 let syntax = SyntaxNode::new_root_mut(builder.finish());
1696
1697 let clone = self.0.clone();
1698 clone.splice_children(index..index, vec![syntax.into()]);
1699
1700 Rule(clone)
1701 }
1702}
1703
1704impl Default for Makefile {
1705 fn default() -> Self {
1706 Self::new()
1707 }
1708}
1709
1710impl Include {
1711 pub fn path(&self) -> Option<String> {
1713 self.syntax()
1714 .children()
1715 .find(|it| it.kind() == EXPR)
1716 .map(|it| it.text().to_string().trim().to_string())
1717 }
1718
1719 pub fn is_optional(&self) -> bool {
1721 let text = self.syntax().text();
1722 text.to_string().starts_with("-include") || text.to_string().starts_with("sinclude")
1723 }
1724}
1725
1726#[cfg(test)]
1727mod tests {
1728 use super::*;
1729
1730 #[test]
1731 fn test_conditionals() {
1732 let code = "ifdef DEBUG\n DEBUG_FLAG := 1\nendif\n";
1736 let mut buf = code.as_bytes();
1737 let makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse basic ifdef");
1738 assert!(makefile.code().contains("DEBUG_FLAG"));
1739
1740 let code =
1742 "ifeq ($(OS),Windows_NT)\n RESULT := windows\nelse\n RESULT := unix\nendif\n";
1743 let mut buf = code.as_bytes();
1744 let makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse ifeq/ifneq");
1745 assert!(makefile.code().contains("RESULT"));
1746 assert!(makefile.code().contains("windows"));
1747
1748 let code = "ifdef DEBUG\n CFLAGS += -g\n ifdef VERBOSE\n CFLAGS += -v\n endif\nelse\n CFLAGS += -O2\nendif\n";
1750 let mut buf = code.as_bytes();
1751 let makefile = Makefile::read_relaxed(&mut buf)
1752 .expect("Failed to parse nested conditionals with else");
1753 assert!(makefile.code().contains("CFLAGS"));
1754 assert!(makefile.code().contains("VERBOSE"));
1755
1756 let code = "ifdef DEBUG\nendif\n";
1758 let mut buf = code.as_bytes();
1759 let makefile =
1760 Makefile::read_relaxed(&mut buf).expect("Failed to parse empty conditionals");
1761 assert!(makefile.code().contains("ifdef DEBUG"));
1762
1763 let code = "ifeq ($(OS),Windows)\n EXT := .exe\nelif ifeq ($(OS),Linux)\n EXT := .bin\nelse\n EXT := .out\nendif\n";
1765 let mut buf = code.as_bytes();
1766 let makefile =
1767 Makefile::read_relaxed(&mut buf).expect("Failed to parse conditionals with elif");
1768 assert!(makefile.code().contains("EXT"));
1769
1770 let code = "ifXYZ DEBUG\nDEBUG := 1\nendif\n";
1772 let mut buf = code.as_bytes();
1773 let makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse with recovery");
1774 assert!(makefile.code().contains("DEBUG"));
1775
1776 let code = "ifdef \nDEBUG := 1\nendif\n";
1778 let mut buf = code.as_bytes();
1779 let makefile = Makefile::read_relaxed(&mut buf)
1780 .expect("Failed to parse with recovery - missing condition");
1781 assert!(makefile.code().contains("DEBUG"));
1782 }
1783
1784 #[test]
1785 fn test_parse_simple() {
1786 const SIMPLE: &str = r#"VARIABLE = value
1787
1788rule: dependency
1789 command
1790"#;
1791 let parsed = parse(SIMPLE);
1792 assert!(parsed.errors.is_empty());
1793 let node = parsed.syntax();
1794 assert_eq!(
1795 format!("{:#?}", node),
1796 r#"ROOT@0..44
1797 VARIABLE@0..17
1798 IDENTIFIER@0..8 "VARIABLE"
1799 WHITESPACE@8..9 " "
1800 OPERATOR@9..10 "="
1801 WHITESPACE@10..11 " "
1802 EXPR@11..16
1803 IDENTIFIER@11..16 "value"
1804 NEWLINE@16..17 "\n"
1805 NEWLINE@17..18 "\n"
1806 RULE@18..44
1807 IDENTIFIER@18..22 "rule"
1808 OPERATOR@22..23 ":"
1809 WHITESPACE@23..24 " "
1810 EXPR@24..34
1811 IDENTIFIER@24..34 "dependency"
1812 NEWLINE@34..35 "\n"
1813 RECIPE@35..44
1814 INDENT@35..36 "\t"
1815 TEXT@36..43 "command"
1816 NEWLINE@43..44 "\n"
1817"#
1818 );
1819
1820 let root = parsed.root();
1821
1822 let mut rules = root.rules().collect::<Vec<_>>();
1823 assert_eq!(rules.len(), 1);
1824 let rule = rules.pop().unwrap();
1825 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["rule"]);
1826 assert_eq!(rule.prerequisites().collect::<Vec<_>>(), vec!["dependency"]);
1827 assert_eq!(rule.recipes().collect::<Vec<_>>(), vec!["command"]);
1828
1829 let mut variables = root.variable_definitions().collect::<Vec<_>>();
1830 assert_eq!(variables.len(), 1);
1831 let variable = variables.pop().unwrap();
1832 assert_eq!(variable.name(), Some("VARIABLE".to_string()));
1833 assert_eq!(variable.raw_value(), Some("value".to_string()));
1834 }
1835
1836 #[test]
1837 fn test_parse_export_assign() {
1838 const EXPORT: &str = r#"export VARIABLE := value
1839"#;
1840 let parsed = parse(EXPORT);
1841 assert!(parsed.errors.is_empty());
1842 let node = parsed.syntax();
1843 assert_eq!(
1844 format!("{:#?}", node),
1845 r#"ROOT@0..25
1846 VARIABLE@0..25
1847 IDENTIFIER@0..6 "export"
1848 WHITESPACE@6..7 " "
1849 IDENTIFIER@7..15 "VARIABLE"
1850 WHITESPACE@15..16 " "
1851 OPERATOR@16..18 ":="
1852 WHITESPACE@18..19 " "
1853 EXPR@19..24
1854 IDENTIFIER@19..24 "value"
1855 NEWLINE@24..25 "\n"
1856"#
1857 );
1858
1859 let root = parsed.root();
1860
1861 let mut variables = root.variable_definitions().collect::<Vec<_>>();
1862 assert_eq!(variables.len(), 1);
1863 let variable = variables.pop().unwrap();
1864 assert_eq!(variable.name(), Some("VARIABLE".to_string()));
1865 assert_eq!(variable.raw_value(), Some("value".to_string()));
1866 }
1867
1868 #[test]
1869 fn test_parse_multiple_prerequisites() {
1870 const MULTIPLE_PREREQUISITES: &str = r#"rule: dependency1 dependency2
1871 command
1872
1873"#;
1874 let parsed = parse(MULTIPLE_PREREQUISITES);
1875 assert!(parsed.errors.is_empty());
1876 let node = parsed.syntax();
1877 assert_eq!(
1878 format!("{:#?}", node),
1879 r#"ROOT@0..40
1880 RULE@0..40
1881 IDENTIFIER@0..4 "rule"
1882 OPERATOR@4..5 ":"
1883 WHITESPACE@5..6 " "
1884 EXPR@6..29
1885 IDENTIFIER@6..17 "dependency1"
1886 WHITESPACE@17..18 " "
1887 IDENTIFIER@18..29 "dependency2"
1888 NEWLINE@29..30 "\n"
1889 RECIPE@30..39
1890 INDENT@30..31 "\t"
1891 TEXT@31..38 "command"
1892 NEWLINE@38..39 "\n"
1893 NEWLINE@39..40 "\n"
1894"#
1895 );
1896 let root = parsed.root();
1897
1898 let rule = root.rules().next().unwrap();
1899 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["rule"]);
1900 assert_eq!(
1901 rule.prerequisites().collect::<Vec<_>>(),
1902 vec!["dependency1", "dependency2"]
1903 );
1904 assert_eq!(rule.recipes().collect::<Vec<_>>(), vec!["command"]);
1905 }
1906
1907 #[test]
1908 fn test_add_rule() {
1909 let mut makefile = Makefile::new();
1910 let rule = makefile.add_rule("rule");
1911 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["rule"]);
1912 assert_eq!(
1913 rule.prerequisites().collect::<Vec<_>>(),
1914 Vec::<String>::new()
1915 );
1916
1917 assert_eq!(makefile.to_string(), "rule:\n");
1918 }
1919
1920 #[test]
1921 fn test_push_command() {
1922 let mut makefile = Makefile::new();
1923 let rule = makefile.add_rule("rule");
1924
1925 let rule_with_cmd1 = rule.push_command("command");
1927 let rule_with_both = rule_with_cmd1.push_command("command2");
1929
1930 assert_eq!(
1932 rule_with_both.recipes().collect::<Vec<_>>(),
1933 vec!["command", "command2"]
1934 );
1935
1936 let rule_with_all = rule_with_both.push_command("command3");
1938 assert_eq!(
1939 rule_with_all.recipes().collect::<Vec<_>>(),
1940 vec!["command", "command2", "command3"]
1941 );
1942
1943 assert_eq!(
1945 rule.recipes().collect::<Vec<_>>(),
1946 vec!["command", "command2", "command3"]
1947 );
1948
1949 assert_eq!(
1951 makefile.to_string(),
1952 "rule:\n\tcommand\n\tcommand2\n\tcommand3\n"
1953 );
1954
1955 assert_eq!(
1957 rule_with_all.to_string(),
1958 "rule:\n\tcommand\n\tcommand2\n\tcommand3\n"
1959 );
1960 }
1961
1962 #[test]
1963 fn test_replace_command() {
1964 let mut makefile = Makefile::new();
1965 let rule = makefile.add_rule("rule");
1966
1967 let rule_with_cmd1 = rule.push_command("command");
1969 let rule_with_both = rule_with_cmd1.push_command("command2");
1971
1972 assert_eq!(
1974 rule_with_both.recipes().collect::<Vec<_>>(),
1975 vec!["command", "command2"]
1976 );
1977
1978 let modified_rule = rule_with_both.replace_command(0, "new command").unwrap();
1980 assert_eq!(
1981 modified_rule.recipes().collect::<Vec<_>>(),
1982 vec!["new command", "command2"]
1983 );
1984
1985 assert_eq!(
1987 rule.recipes().collect::<Vec<_>>(),
1988 vec!["new command", "command2"]
1989 );
1990
1991 assert_eq!(makefile.to_string(), "rule:\n\tnew command\n\tcommand2\n");
1993
1994 assert_eq!(
1996 modified_rule.to_string(),
1997 "rule:\n\tnew command\n\tcommand2\n"
1998 );
1999 }
2000
2001 #[test]
2002 fn test_parse_rule_without_newline() {
2003 let rule = "rule: dependency\n\tcommand".parse::<Rule>().unwrap();
2004 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["rule"]);
2005 assert_eq!(rule.recipes().collect::<Vec<_>>(), vec!["command"]);
2006 let rule = "rule: dependency".parse::<Rule>().unwrap();
2007 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["rule"]);
2008 assert_eq!(rule.recipes().collect::<Vec<_>>(), Vec::<String>::new());
2009 }
2010
2011 #[test]
2012 fn test_parse_makefile_without_newline() {
2013 let makefile = "rule: dependency\n\tcommand".parse::<Makefile>().unwrap();
2014 assert_eq!(makefile.rules().count(), 1);
2015 }
2016
2017 #[test]
2018 fn test_from_reader() {
2019 let makefile = Makefile::from_reader("rule: dependency\n\tcommand".as_bytes()).unwrap();
2020 assert_eq!(makefile.rules().count(), 1);
2021 }
2022
2023 #[test]
2024 fn test_parse_with_tab_after_last_newline() {
2025 let makefile = Makefile::from_reader("rule: dependency\n\tcommand\n\t".as_bytes()).unwrap();
2026 assert_eq!(makefile.rules().count(), 1);
2027 }
2028
2029 #[test]
2030 fn test_parse_with_space_after_last_newline() {
2031 let makefile = Makefile::from_reader("rule: dependency\n\tcommand\n ".as_bytes()).unwrap();
2032 assert_eq!(makefile.rules().count(), 1);
2033 }
2034
2035 #[test]
2036 fn test_parse_with_comment_after_last_newline() {
2037 let makefile =
2038 Makefile::from_reader("rule: dependency\n\tcommand\n#comment".as_bytes()).unwrap();
2039 assert_eq!(makefile.rules().count(), 1);
2040 }
2041
2042 #[test]
2043 fn test_parse_with_variable_rule() {
2044 let makefile =
2045 Makefile::from_reader("RULE := rule\n$(RULE): dependency\n\tcommand".as_bytes())
2046 .unwrap();
2047
2048 let vars = makefile.variable_definitions().collect::<Vec<_>>();
2050 assert_eq!(vars.len(), 1);
2051 assert_eq!(vars[0].name(), Some("RULE".to_string()));
2052 assert_eq!(vars[0].raw_value(), Some("rule".to_string()));
2053
2054 let rules = makefile.rules().collect::<Vec<_>>();
2056 assert_eq!(rules.len(), 1);
2057 assert_eq!(rules[0].targets().collect::<Vec<_>>(), vec!["$(RULE)"]);
2058 assert_eq!(
2059 rules[0].prerequisites().collect::<Vec<_>>(),
2060 vec!["dependency"]
2061 );
2062 assert_eq!(rules[0].recipes().collect::<Vec<_>>(), vec!["command"]);
2063 }
2064
2065 #[test]
2066 fn test_parse_with_variable_dependency() {
2067 let makefile =
2068 Makefile::from_reader("DEP := dependency\nrule: $(DEP)\n\tcommand".as_bytes()).unwrap();
2069
2070 let vars = makefile.variable_definitions().collect::<Vec<_>>();
2072 assert_eq!(vars.len(), 1);
2073 assert_eq!(vars[0].name(), Some("DEP".to_string()));
2074 assert_eq!(vars[0].raw_value(), Some("dependency".to_string()));
2075
2076 let rules = makefile.rules().collect::<Vec<_>>();
2078 assert_eq!(rules.len(), 1);
2079 assert_eq!(rules[0].targets().collect::<Vec<_>>(), vec!["rule"]);
2080 assert_eq!(rules[0].prerequisites().collect::<Vec<_>>(), vec!["$(DEP)"]);
2081 assert_eq!(rules[0].recipes().collect::<Vec<_>>(), vec!["command"]);
2082 }
2083
2084 #[test]
2085 fn test_parse_with_variable_command() {
2086 let makefile =
2087 Makefile::from_reader("COM := command\nrule: dependency\n\t$(COM)".as_bytes()).unwrap();
2088
2089 let vars = makefile.variable_definitions().collect::<Vec<_>>();
2091 assert_eq!(vars.len(), 1);
2092 assert_eq!(vars[0].name(), Some("COM".to_string()));
2093 assert_eq!(vars[0].raw_value(), Some("command".to_string()));
2094
2095 let rules = makefile.rules().collect::<Vec<_>>();
2097 assert_eq!(rules.len(), 1);
2098 assert_eq!(rules[0].targets().collect::<Vec<_>>(), vec!["rule"]);
2099 assert_eq!(
2100 rules[0].prerequisites().collect::<Vec<_>>(),
2101 vec!["dependency"]
2102 );
2103 assert_eq!(rules[0].recipes().collect::<Vec<_>>(), vec!["$(COM)"]);
2104 }
2105
2106 #[test]
2107 fn test_regular_line_error_reporting() {
2108 let input = "rule target\n\tcommand";
2109
2110 let parsed = parse(input);
2112 let direct_error = &parsed.errors[0];
2113
2114 assert_eq!(direct_error.line, 2);
2116 assert!(
2117 direct_error.message.contains("expected"),
2118 "Error message should contain 'expected': {}",
2119 direct_error.message
2120 );
2121 assert_eq!(direct_error.context, "\tcommand");
2122
2123 let reader_result = Makefile::from_reader(input.as_bytes());
2125 let parse_error = match reader_result {
2126 Ok(_) => panic!("Expected Parse error from from_reader"),
2127 Err(err) => match err {
2128 self::Error::Parse(parse_err) => parse_err,
2129 _ => panic!("Expected Parse error"),
2130 },
2131 };
2132
2133 let error_text = parse_error.to_string();
2135 assert!(error_text.contains("Error at line 2:"));
2136 assert!(error_text.contains("2| \tcommand"));
2137 }
2138
2139 #[test]
2140 fn test_parsing_error_context_with_bad_syntax() {
2141 let input = "#begin comment\n\t(╯°□°)╯︵ ┻━┻\n#end comment";
2143
2144 match Makefile::from_reader(input.as_bytes()) {
2146 Ok(makefile) => {
2147 assert_eq!(
2149 makefile.rules().count(),
2150 0,
2151 "Should not have found any rules"
2152 );
2153 }
2154 Err(err) => match err {
2155 self::Error::Parse(error) => {
2156 assert!(error.errors[0].line >= 2, "Error line should be at least 2");
2158 assert!(
2159 !error.errors[0].context.is_empty(),
2160 "Error context should not be empty"
2161 );
2162 }
2163 _ => panic!("Unexpected error type"),
2164 },
2165 };
2166 }
2167
2168 #[test]
2169 fn test_error_message_format() {
2170 let parse_error = ParseError {
2172 errors: vec![ErrorInfo {
2173 message: "test error".to_string(),
2174 line: 42,
2175 context: "some problematic code".to_string(),
2176 }],
2177 };
2178
2179 let error_text = parse_error.to_string();
2180 assert!(error_text.contains("Error at line 42: test error"));
2181 assert!(error_text.contains("42| some problematic code"));
2182 }
2183
2184 #[test]
2185 fn test_line_number_calculation() {
2186 let test_cases = [
2188 ("rule dependency\n\tcommand", 2), ("#comment\n\t(╯°□°)╯︵ ┻━┻", 2), ("var = value\n#comment\n\tindented line", 3), ];
2192
2193 for (input, expected_line) in test_cases {
2194 match input.parse::<Makefile>() {
2196 Ok(_) => {
2197 continue;
2200 }
2201 Err(err) => {
2202 assert_eq!(
2204 err.errors[0].line, expected_line,
2205 "Line number should match the expected line"
2206 );
2207
2208 if err.errors[0].message.contains("indented") {
2210 assert!(
2211 err.errors[0].context.starts_with('\t'),
2212 "Context for indentation errors should include the tab character"
2213 );
2214 }
2215 }
2216 }
2217 }
2218 }
2219
2220 #[test]
2221 fn test_conditional_features() {
2222 let code = r#"
2224# Set variables based on DEBUG flag
2225ifdef DEBUG
2226 CFLAGS += -g -DDEBUG
2227else
2228 CFLAGS = -O2
2229endif
2230
2231# Define a build rule
2232all: $(OBJS)
2233 $(CC) $(CFLAGS) -o $@ $^
2234"#;
2235
2236 let mut buf = code.as_bytes();
2237 let makefile =
2238 Makefile::read_relaxed(&mut buf).expect("Failed to parse conditional features");
2239
2240 assert!(!makefile.code().is_empty(), "Makefile has content");
2243
2244 let rules = makefile.rules().collect::<Vec<_>>();
2246 assert!(!rules.is_empty(), "Should have found rules");
2247
2248 assert!(code.contains("ifdef DEBUG"));
2250 assert!(code.contains("endif"));
2251
2252 let code_with_var = r#"
2254# Define a variable first
2255CC = gcc
2256
2257ifdef DEBUG
2258 CFLAGS += -g -DDEBUG
2259else
2260 CFLAGS = -O2
2261endif
2262
2263all: $(OBJS)
2264 $(CC) $(CFLAGS) -o $@ $^
2265"#;
2266
2267 let mut buf = code_with_var.as_bytes();
2268 let makefile =
2269 Makefile::read_relaxed(&mut buf).expect("Failed to parse with explicit variable");
2270
2271 let vars = makefile.variable_definitions().collect::<Vec<_>>();
2273 assert!(
2274 !vars.is_empty(),
2275 "Should have found at least the CC variable definition"
2276 );
2277 }
2278
2279 #[test]
2280 fn test_include_directive() {
2281 let parsed = parse("include config.mk\ninclude $(TOPDIR)/rules.mk\ninclude *.mk\n");
2282 assert!(parsed.errors.is_empty());
2283 let node = parsed.syntax();
2284 assert!(format!("{:#?}", node).contains("INCLUDE@"));
2285 }
2286
2287 #[test]
2288 fn test_export_variables() {
2289 let parsed = parse("export SHELL := /bin/bash\n");
2290 assert!(parsed.errors.is_empty());
2291 let makefile = parsed.root();
2292 let vars = makefile.variable_definitions().collect::<Vec<_>>();
2293 assert_eq!(vars.len(), 1);
2294 let shell_var = vars
2295 .iter()
2296 .find(|v| v.name() == Some("SHELL".to_string()))
2297 .unwrap();
2298 assert!(shell_var.raw_value().unwrap().contains("bin/bash"));
2299 }
2300
2301 #[test]
2302 fn test_variable_scopes() {
2303 let parsed =
2304 parse("SIMPLE = value\nIMMEDIATE := value\nCONDITIONAL ?= value\nAPPEND += value\n");
2305 assert!(parsed.errors.is_empty());
2306 let makefile = parsed.root();
2307 let vars = makefile.variable_definitions().collect::<Vec<_>>();
2308 assert_eq!(vars.len(), 4);
2309 let var_names: Vec<_> = vars.iter().filter_map(|v| v.name()).collect();
2310 assert!(var_names.contains(&"SIMPLE".to_string()));
2311 assert!(var_names.contains(&"IMMEDIATE".to_string()));
2312 assert!(var_names.contains(&"CONDITIONAL".to_string()));
2313 assert!(var_names.contains(&"APPEND".to_string()));
2314 }
2315
2316 #[test]
2317 fn test_pattern_rule_parsing() {
2318 let parsed = parse("%.o: %.c\n\t$(CC) -c -o $@ $<\n");
2319 assert!(parsed.errors.is_empty());
2320 let makefile = parsed.root();
2321 let rules = makefile.rules().collect::<Vec<_>>();
2322 assert_eq!(rules.len(), 1);
2323 assert_eq!(rules[0].targets().next().unwrap(), "%.o");
2324 assert!(rules[0].recipes().next().unwrap().contains("$@"));
2325 }
2326
2327 #[test]
2328 fn test_include_variants() {
2329 let makefile_str = "include simple.mk\n-include optional.mk\nsinclude synonym.mk\ninclude $(VAR)/generated.mk\n";
2331 let parsed = parse(makefile_str);
2332 assert!(parsed.errors.is_empty());
2333
2334 let node = parsed.syntax();
2336 let debug_str = format!("{:#?}", node);
2337
2338 assert_eq!(debug_str.matches("INCLUDE@").count(), 4);
2340
2341 let makefile = parsed.root();
2343
2344 let include_count = makefile
2346 .syntax()
2347 .children()
2348 .filter(|child| child.kind() == INCLUDE)
2349 .count();
2350 assert_eq!(include_count, 4);
2351
2352 assert!(makefile
2354 .included_files()
2355 .any(|path| path.contains("$(VAR)")));
2356 }
2357
2358 #[test]
2359 fn test_include_api() {
2360 let makefile_str = "include simple.mk\n-include optional.mk\nsinclude synonym.mk\n";
2362 let makefile: Makefile = makefile_str.parse().unwrap();
2363
2364 let includes: Vec<_> = makefile.includes().collect();
2366 assert_eq!(includes.len(), 3);
2367
2368 assert!(!includes[0].is_optional()); assert!(includes[1].is_optional()); assert!(includes[2].is_optional()); let files: Vec<_> = makefile.included_files().collect();
2375 assert_eq!(files, vec!["simple.mk", "optional.mk", "synonym.mk"]);
2376
2377 assert_eq!(includes[0].path(), Some("simple.mk".to_string()));
2379 assert_eq!(includes[1].path(), Some("optional.mk".to_string()));
2380 assert_eq!(includes[2].path(), Some("synonym.mk".to_string()));
2381 }
2382
2383 #[test]
2384 fn test_include_integration() {
2385 let phony_makefile = Makefile::from_reader(
2389 ".PHONY: build\n\nVERBOSE ?= 0\n\n# comment\n-include .env\n\nrule: dependency\n\tcommand"
2390 .as_bytes()
2391 ).unwrap();
2392
2393 assert_eq!(phony_makefile.rules().count(), 2);
2395
2396 let normal_rules_count = phony_makefile
2398 .rules()
2399 .filter(|r| !r.targets().any(|t| t.starts_with('.')))
2400 .count();
2401 assert_eq!(normal_rules_count, 1);
2402
2403 assert_eq!(phony_makefile.includes().count(), 1);
2405 assert_eq!(phony_makefile.included_files().next().unwrap(), ".env");
2406
2407 let simple_makefile = Makefile::from_reader(
2409 "\n\nVERBOSE ?= 0\n\n# comment\n-include .env\n\nrule: dependency\n\tcommand"
2410 .as_bytes(),
2411 )
2412 .unwrap();
2413 assert_eq!(simple_makefile.rules().count(), 1);
2414 assert_eq!(simple_makefile.includes().count(), 1);
2415 }
2416
2417 #[test]
2418 fn test_real_conditional_directives() {
2419 let conditional = "ifdef DEBUG\nCFLAGS = -g\nelse\nCFLAGS = -O2\nendif\n";
2421 let mut buf = conditional.as_bytes();
2422 let makefile =
2423 Makefile::read_relaxed(&mut buf).expect("Failed to parse basic if/else conditional");
2424 let code = makefile.code();
2425 assert!(code.contains("ifdef DEBUG"));
2426 assert!(code.contains("else"));
2427 assert!(code.contains("endif"));
2428
2429 let nested = "ifdef DEBUG\nCFLAGS = -g\nifdef VERBOSE\nCFLAGS += -v\nendif\nendif\n";
2431 let mut buf = nested.as_bytes();
2432 let makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse nested ifdef");
2433 let code = makefile.code();
2434 assert!(code.contains("ifdef DEBUG"));
2435 assert!(code.contains("ifdef VERBOSE"));
2436
2437 let ifeq = "ifeq ($(OS),Windows_NT)\nTARGET = app.exe\nelse\nTARGET = app\nendif\n";
2439 let mut buf = ifeq.as_bytes();
2440 let makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse ifeq form");
2441 let code = makefile.code();
2442 assert!(code.contains("ifeq"));
2443 assert!(code.contains("Windows_NT"));
2444 }
2445
2446 #[test]
2447 fn test_indented_text_outside_rules() {
2448 let help_text = "help:\n\t@echo \"Available targets:\"\n\t@echo \" help show help\"\n";
2450 let parsed = parse(help_text);
2451 assert!(parsed.errors.is_empty());
2452
2453 let root = parsed.root();
2455 let rules = root.rules().collect::<Vec<_>>();
2456 assert_eq!(rules.len(), 1);
2457
2458 let help_rule = &rules[0];
2459 let recipes = help_rule.recipes().collect::<Vec<_>>();
2460 assert_eq!(recipes.len(), 2);
2461 assert!(recipes[0].contains("Available targets"));
2462 assert!(recipes[1].contains("help"));
2463 }
2464
2465 #[test]
2466 fn test_comment_handling_in_recipes() {
2467 let recipe_comment = "build:\n\t# This is a comment\n\tgcc -o app main.c\n";
2469
2470 let parsed = parse(recipe_comment);
2472
2473 assert!(
2475 parsed.errors.is_empty(),
2476 "Should parse recipe with comments without errors"
2477 );
2478
2479 let root = parsed.root();
2481 let rules = root.rules().collect::<Vec<_>>();
2482 assert_eq!(rules.len(), 1, "Should find exactly one rule");
2483
2484 let build_rule = &rules[0];
2486 assert_eq!(
2487 build_rule.targets().collect::<Vec<_>>(),
2488 vec!["build"],
2489 "Rule should have 'build' as target"
2490 );
2491
2492 let recipes = build_rule.recipes().collect::<Vec<_>>();
2496 assert_eq!(
2497 recipes.len(),
2498 1,
2499 "Should find exactly one recipe line (comment lines are filtered)"
2500 );
2501 assert!(
2502 recipes[0].contains("gcc -o app"),
2503 "Recipe should be the command line"
2504 );
2505 assert!(
2506 !recipes[0].contains("This is a comment"),
2507 "Comments should not be included in recipe lines"
2508 );
2509 }
2510
2511 #[test]
2512 fn test_multiline_variables() {
2513 let multiline = "SOURCES = main.c \\\n util.c\n";
2515
2516 let parsed = parse(multiline);
2518
2519 let root = parsed.root();
2521 let vars = root.variable_definitions().collect::<Vec<_>>();
2522 assert!(!vars.is_empty(), "Should find at least one variable");
2523
2524 let operators = "CFLAGS := -Wall \\\n -Werror\n";
2528 let parsed_operators = parse(operators);
2529
2530 let root = parsed_operators.root();
2532 let vars = root.variable_definitions().collect::<Vec<_>>();
2533 assert!(
2534 !vars.is_empty(),
2535 "Should find at least one variable with := operator"
2536 );
2537
2538 let append = "LDFLAGS += -L/usr/lib \\\n -lm\n";
2540 let parsed_append = parse(append);
2541
2542 let root = parsed_append.root();
2544 let vars = root.variable_definitions().collect::<Vec<_>>();
2545 assert!(
2546 !vars.is_empty(),
2547 "Should find at least one variable with += operator"
2548 );
2549 }
2550
2551 #[test]
2552 fn test_whitespace_and_eof_handling() {
2553 let blank_lines = "VAR = value\n\n\n";
2555
2556 let parsed_blank = parse(blank_lines);
2557
2558 let root = parsed_blank.root();
2560 let vars = root.variable_definitions().collect::<Vec<_>>();
2561 assert_eq!(
2562 vars.len(),
2563 1,
2564 "Should find one variable in blank lines test"
2565 );
2566
2567 let trailing_space = "VAR = value \n";
2569
2570 let parsed_space = parse(trailing_space);
2571
2572 let root = parsed_space.root();
2574 let vars = root.variable_definitions().collect::<Vec<_>>();
2575 assert_eq!(
2576 vars.len(),
2577 1,
2578 "Should find one variable in trailing space test"
2579 );
2580
2581 let no_newline = "VAR = value";
2583
2584 let parsed_no_newline = parse(no_newline);
2585
2586 let root = parsed_no_newline.root();
2588 let vars = root.variable_definitions().collect::<Vec<_>>();
2589 assert_eq!(vars.len(), 1, "Should find one variable in no newline test");
2590 assert_eq!(
2591 vars[0].name(),
2592 Some("VAR".to_string()),
2593 "Variable name should be VAR"
2594 );
2595 }
2596
2597 #[test]
2598 fn test_complex_variable_references() {
2599 let wildcard = "SOURCES = $(wildcard *.c)\n";
2601 let parsed = parse(wildcard);
2602 assert!(parsed.errors.is_empty());
2603
2604 let nested = "PREFIX = /usr\nBINDIR = $(PREFIX)/bin\n";
2606 let parsed = parse(nested);
2607 assert!(parsed.errors.is_empty());
2608
2609 let patsubst = "OBJECTS = $(patsubst %.c,%.o,$(SOURCES))\n";
2611 let parsed = parse(patsubst);
2612 assert!(parsed.errors.is_empty());
2613 }
2614
2615 #[test]
2616 fn test_complex_variable_references_minimal() {
2617 let wildcard = "SOURCES = $(wildcard *.c)\n";
2619 let parsed = parse(wildcard);
2620 assert!(parsed.errors.is_empty());
2621
2622 let nested = "PREFIX = /usr\nBINDIR = $(PREFIX)/bin\n";
2624 let parsed = parse(nested);
2625 assert!(parsed.errors.is_empty());
2626
2627 let patsubst = "OBJECTS = $(patsubst %.c,%.o,$(SOURCES))\n";
2629 let parsed = parse(patsubst);
2630 assert!(parsed.errors.is_empty());
2631 }
2632
2633 #[test]
2634 fn test_multiline_variable_with_backslash() {
2635 let content = r#"
2636LONG_VAR = This is a long variable \
2637 that continues on the next line \
2638 and even one more line
2639"#;
2640
2641 let mut buf = content.as_bytes();
2643 let makefile =
2644 Makefile::read_relaxed(&mut buf).expect("Failed to parse multiline variable");
2645
2646 let vars = makefile.variable_definitions().collect::<Vec<_>>();
2648 assert_eq!(
2649 vars.len(),
2650 1,
2651 "Expected 1 variable but found {}",
2652 vars.len()
2653 );
2654 let var_value = vars[0].raw_value();
2655 assert!(var_value.is_some(), "Variable value is None");
2656
2657 let value_str = var_value.unwrap();
2659 assert!(
2660 value_str.contains("long variable"),
2661 "Value doesn't contain expected content"
2662 );
2663 }
2664
2665 #[test]
2666 fn test_multiline_variable_with_mixed_operators() {
2667 let content = r#"
2668PREFIX ?= /usr/local
2669CFLAGS := -Wall -O2 \
2670 -I$(PREFIX)/include \
2671 -DDEBUG
2672"#;
2673 let mut buf = content.as_bytes();
2675 let makefile = Makefile::read_relaxed(&mut buf)
2676 .expect("Failed to parse multiline variable with operators");
2677
2678 let vars = makefile.variable_definitions().collect::<Vec<_>>();
2680 assert!(
2681 vars.len() >= 1,
2682 "Expected at least 1 variable, found {}",
2683 vars.len()
2684 );
2685
2686 let prefix_var = vars
2688 .iter()
2689 .find(|v| v.name().unwrap_or_default() == "PREFIX");
2690 assert!(prefix_var.is_some(), "Expected to find PREFIX variable");
2691 assert!(
2692 prefix_var.unwrap().raw_value().is_some(),
2693 "PREFIX variable has no value"
2694 );
2695
2696 let cflags_var = vars
2698 .iter()
2699 .find(|v| v.name().unwrap_or_default().contains("CFLAGS"));
2700 assert!(
2701 cflags_var.is_some(),
2702 "Expected to find CFLAGS variable (or part of it)"
2703 );
2704 }
2705
2706 #[test]
2707 fn test_indented_help_text() {
2708 let content = r#"
2709.PHONY: help
2710help:
2711 @echo "Available targets:"
2712 @echo " build - Build the project"
2713 @echo " test - Run tests"
2714 @echo " clean - Remove build artifacts"
2715"#;
2716 let mut buf = content.as_bytes();
2718 let makefile =
2719 Makefile::read_relaxed(&mut buf).expect("Failed to parse indented help text");
2720
2721 let rules = makefile.rules().collect::<Vec<_>>();
2723 assert!(!rules.is_empty(), "Expected at least one rule");
2724
2725 let help_rule = rules.iter().find(|r| r.targets().any(|t| t == "help"));
2727 assert!(help_rule.is_some(), "Expected to find help rule");
2728
2729 let recipes = help_rule.unwrap().recipes().collect::<Vec<_>>();
2731 assert!(
2732 !recipes.is_empty(),
2733 "Expected at least one recipe line in help rule"
2734 );
2735 assert!(
2736 recipes.iter().any(|r| r.contains("Available targets")),
2737 "Expected to find 'Available targets' in recipes"
2738 );
2739 }
2740
2741 #[test]
2742 fn test_indented_lines_in_conditionals() {
2743 let content = r#"
2744ifdef DEBUG
2745 CFLAGS += -g -DDEBUG
2746 # This is a comment inside conditional
2747 ifdef VERBOSE
2748 CFLAGS += -v
2749 endif
2750endif
2751"#;
2752 let mut buf = content.as_bytes();
2754 let makefile = Makefile::read_relaxed(&mut buf)
2755 .expect("Failed to parse indented lines in conditionals");
2756
2757 let code = makefile.code();
2759 assert!(code.contains("ifdef DEBUG"));
2760 assert!(code.contains("ifdef VERBOSE"));
2761 assert!(code.contains("endif"));
2762 }
2763
2764 #[test]
2765 fn test_recipe_with_colon() {
2766 let content = r#"
2767build:
2768 @echo "Building at: $(shell date)"
2769 gcc -o program main.c
2770"#;
2771 let parsed = parse(content);
2772 assert!(
2773 parsed.errors.is_empty(),
2774 "Failed to parse recipe with colon: {:?}",
2775 parsed.errors
2776 );
2777 }
2778
2779 #[test]
2780 #[ignore]
2781 fn test_double_colon_rules() {
2782 let content = r#"
2785%.o :: %.c
2786 $(CC) -c $< -o $@
2787
2788# Double colon allows multiple rules for same target
2789all:: prerequisite1
2790 @echo "First rule for all"
2791
2792all:: prerequisite2
2793 @echo "Second rule for all"
2794"#;
2795 let mut buf = content.as_bytes();
2796 let makefile =
2797 Makefile::read_relaxed(&mut buf).expect("Failed to parse double colon rules");
2798
2799 let rules = makefile.rules().collect::<Vec<_>>();
2801 assert!(!rules.is_empty(), "Expected at least one rule");
2802
2803 let all_rules = rules
2805 .iter()
2806 .filter(|r| r.targets().any(|t| t.contains("all")));
2807 assert!(
2808 all_rules.count() > 0,
2809 "Expected to find at least one rule containing 'all'"
2810 );
2811 }
2812
2813 #[test]
2814 fn test_elif_directive() {
2815 let content = r#"
2816ifeq ($(OS),Windows_NT)
2817 TARGET = windows
2818elif ifeq ($(OS),Darwin)
2819 TARGET = macos
2820elif ifeq ($(OS),Linux)
2821 TARGET = linux
2822else
2823 TARGET = unknown
2824endif
2825"#;
2826 let mut buf = content.as_bytes();
2828 let _makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse elif directive");
2829
2830 }
2833
2834 #[test]
2835 fn test_ambiguous_assignment_vs_rule() {
2836 const VAR_ASSIGNMENT: &str = "VARIABLE = value\n";
2838
2839 let mut buf = std::io::Cursor::new(VAR_ASSIGNMENT);
2840 let makefile =
2841 Makefile::read_relaxed(&mut buf).expect("Failed to parse variable assignment");
2842
2843 let vars = makefile.variable_definitions().collect::<Vec<_>>();
2844 let rules = makefile.rules().collect::<Vec<_>>();
2845
2846 assert_eq!(vars.len(), 1, "Expected 1 variable, found {}", vars.len());
2847 assert_eq!(rules.len(), 0, "Expected 0 rules, found {}", rules.len());
2848
2849 assert_eq!(vars[0].name(), Some("VARIABLE".to_string()));
2850
2851 const SIMPLE_RULE: &str = "target: dependency\n";
2853
2854 let mut buf = std::io::Cursor::new(SIMPLE_RULE);
2855 let makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse simple rule");
2856
2857 let vars = makefile.variable_definitions().collect::<Vec<_>>();
2858 let rules = makefile.rules().collect::<Vec<_>>();
2859
2860 assert_eq!(vars.len(), 0, "Expected 0 variables, found {}", vars.len());
2861 assert_eq!(rules.len(), 1, "Expected 1 rule, found {}", rules.len());
2862
2863 let rule = &rules[0];
2864 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["target"]);
2865 }
2866
2867 #[test]
2868 fn test_nested_conditionals() {
2869 let content = r#"
2870ifdef RELEASE
2871 CFLAGS += -O3
2872 ifndef DEBUG
2873 ifneq ($(ARCH),arm)
2874 CFLAGS += -march=native
2875 else
2876 CFLAGS += -mcpu=cortex-a72
2877 endif
2878 endif
2879endif
2880"#;
2881 let mut buf = content.as_bytes();
2883 let makefile =
2884 Makefile::read_relaxed(&mut buf).expect("Failed to parse nested conditionals");
2885
2886 let code = makefile.code();
2888 assert!(code.contains("ifdef RELEASE"));
2889 assert!(code.contains("ifndef DEBUG"));
2890 assert!(code.contains("ifneq"));
2891 }
2892
2893 #[test]
2894 fn test_space_indented_recipes() {
2895 let content = r#"
2898build:
2899 @echo "Building with spaces instead of tabs"
2900 gcc -o program main.c
2901"#;
2902 let mut buf = content.as_bytes();
2904 let makefile =
2905 Makefile::read_relaxed(&mut buf).expect("Failed to parse space-indented recipes");
2906
2907 let rules = makefile.rules().collect::<Vec<_>>();
2909 assert!(!rules.is_empty(), "Expected at least one rule");
2910
2911 let build_rule = rules.iter().find(|r| r.targets().any(|t| t == "build"));
2913 assert!(build_rule.is_some(), "Expected to find build rule");
2914 }
2915
2916 #[test]
2917 fn test_complex_variable_functions() {
2918 let content = r#"
2919FILES := $(shell find . -name "*.c")
2920OBJS := $(patsubst %.c,%.o,$(FILES))
2921NAME := $(if $(PROGRAM),$(PROGRAM),a.out)
2922HEADERS := ${wildcard *.h}
2923"#;
2924 let parsed = parse(content);
2925 assert!(
2926 parsed.errors.is_empty(),
2927 "Failed to parse complex variable functions: {:?}",
2928 parsed.errors
2929 );
2930 }
2931
2932 #[test]
2933 fn test_nested_variable_expansions() {
2934 let content = r#"
2935VERSION = 1.0
2936PACKAGE = myapp
2937TARBALL = $(PACKAGE)-$(VERSION).tar.gz
2938INSTALL_PATH = $(shell echo $(PREFIX) | sed 's/\/$//')
2939"#;
2940 let parsed = parse(content);
2941 assert!(
2942 parsed.errors.is_empty(),
2943 "Failed to parse nested variable expansions: {:?}",
2944 parsed.errors
2945 );
2946 }
2947
2948 #[test]
2949 fn test_special_directives() {
2950 let content = r#"
2951# Special makefile directives
2952.PHONY: all clean
2953.SUFFIXES: .c .o
2954.DEFAULT: all
2955
2956# Variable definition and export directive
2957export PATH := /usr/bin:/bin
2958"#;
2959 let mut buf = content.as_bytes();
2961 let makefile =
2962 Makefile::read_relaxed(&mut buf).expect("Failed to parse special directives");
2963
2964 let rules = makefile.rules().collect::<Vec<_>>();
2966
2967 let phony_rule = rules
2969 .iter()
2970 .find(|r| r.targets().any(|t| t.contains(".PHONY")));
2971 assert!(phony_rule.is_some(), "Expected to find .PHONY rule");
2972
2973 let vars = makefile.variable_definitions().collect::<Vec<_>>();
2975 assert!(!vars.is_empty(), "Expected to find at least one variable");
2976 }
2977
2978 #[test]
2981 fn test_comprehensive_real_world_makefile() {
2982 let content = r#"
2984# Basic variable assignment
2985VERSION = 1.0.0
2986
2987# Phony target
2988.PHONY: all clean
2989
2990# Simple rule
2991all:
2992 echo "Building version $(VERSION)"
2993
2994# Another rule with dependencies
2995clean:
2996 rm -f *.o
2997"#;
2998
2999 let parsed = parse(content);
3001
3002 assert!(parsed.errors.is_empty(), "Expected no parsing errors");
3004
3005 let variables = parsed.root().variable_definitions().collect::<Vec<_>>();
3007 assert!(!variables.is_empty(), "Expected at least one variable");
3008 assert_eq!(
3009 variables[0].name(),
3010 Some("VERSION".to_string()),
3011 "Expected VERSION variable"
3012 );
3013
3014 let rules = parsed.root().rules().collect::<Vec<_>>();
3016 assert!(!rules.is_empty(), "Expected at least one rule");
3017
3018 let rule_targets: Vec<String> = rules
3020 .iter()
3021 .flat_map(|r| r.targets().collect::<Vec<_>>())
3022 .collect();
3023 assert!(
3024 rule_targets.contains(&".PHONY".to_string()),
3025 "Expected .PHONY rule"
3026 );
3027 assert!(
3028 rule_targets.contains(&"all".to_string()),
3029 "Expected 'all' rule"
3030 );
3031 assert!(
3032 rule_targets.contains(&"clean".to_string()),
3033 "Expected 'clean' rule"
3034 );
3035 }
3036
3037 #[test]
3038 fn test_indented_help_text_outside_rules() {
3039 let content = r#"
3041# Targets with help text
3042help:
3043 @echo "Available targets:"
3044 @echo " build build the project"
3045 @echo " test run tests"
3046 @echo " clean clean build artifacts"
3047
3048# Another target
3049clean:
3050 rm -rf build/
3051"#;
3052
3053 let parsed = parse(content);
3055
3056 assert!(
3058 parsed.errors.is_empty(),
3059 "Failed to parse indented help text"
3060 );
3061
3062 let rules = parsed.root().rules().collect::<Vec<_>>();
3064 assert_eq!(rules.len(), 2, "Expected to find two rules");
3065
3066 let help_rule = rules
3068 .iter()
3069 .find(|r| r.targets().any(|t| t == "help"))
3070 .expect("Expected to find help rule");
3071
3072 let clean_rule = rules
3073 .iter()
3074 .find(|r| r.targets().any(|t| t == "clean"))
3075 .expect("Expected to find clean rule");
3076
3077 let help_recipes = help_rule.recipes().collect::<Vec<_>>();
3079 assert!(
3080 !help_recipes.is_empty(),
3081 "Help rule should have recipe lines"
3082 );
3083 assert!(
3084 help_recipes
3085 .iter()
3086 .any(|line| line.contains("Available targets")),
3087 "Help recipes should include 'Available targets' line"
3088 );
3089
3090 let clean_recipes = clean_rule.recipes().collect::<Vec<_>>();
3092 assert!(
3093 !clean_recipes.is_empty(),
3094 "Clean rule should have recipe lines"
3095 );
3096 assert!(
3097 clean_recipes.iter().any(|line| line.contains("rm -rf")),
3098 "Clean recipes should include 'rm -rf' command"
3099 );
3100 }
3101
3102 #[test]
3103 fn test_makefile1_phony_pattern() {
3104 let content = "#line 2145\n.PHONY: $(PHONY)\n";
3106
3107 let result = parse(content);
3109
3110 assert!(
3112 result.errors.is_empty(),
3113 "Failed to parse .PHONY: $(PHONY) pattern"
3114 );
3115
3116 let rules = result.root().rules().collect::<Vec<_>>();
3118 assert_eq!(rules.len(), 1, "Expected 1 rule");
3119 assert_eq!(
3120 rules[0].targets().next().unwrap(),
3121 ".PHONY",
3122 "Expected .PHONY rule"
3123 );
3124
3125 let prereqs = rules[0].prerequisites().collect::<Vec<_>>();
3127 assert_eq!(prereqs.len(), 1, "Expected 1 prerequisite");
3128 assert_eq!(prereqs[0], "$(PHONY)", "Expected $(PHONY) prerequisite");
3129 }
3130}