1use crate::lex::lex;
2use crate::SyntaxKind;
3use crate::SyntaxKind::*;
4use rowan::ast::AstNode;
5use std::str::FromStr;
6
7#[derive(Debug)]
8pub enum Error {
10 Io(std::io::Error),
12
13 Parse(ParseError),
15}
16
17impl std::fmt::Display for Error {
18 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
19 match &self {
20 Error::Io(e) => write!(f, "IO error: {}", e),
21 Error::Parse(e) => write!(f, "Parse error: {}", e),
22 }
23 }
24}
25
26impl From<std::io::Error> for Error {
27 fn from(e: std::io::Error) -> Self {
28 Error::Io(e)
29 }
30}
31
32impl std::error::Error for Error {}
33
34#[derive(Debug, Clone, PartialEq, Eq, Hash)]
35pub struct ParseError {
37 pub errors: Vec<ErrorInfo>,
39}
40
41#[derive(Debug, Clone, PartialEq, Eq, Hash)]
42pub struct ErrorInfo {
44 pub message: String,
46 pub line: usize,
48 pub context: String,
50}
51
52impl std::fmt::Display for ParseError {
53 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
54 for err in &self.errors {
55 writeln!(f, "Error at line {}: {}", err.line, err.message)?;
56 writeln!(f, "{}| {}", err.line, err.context)?;
57 }
58 Ok(())
59 }
60}
61
62impl std::error::Error for ParseError {}
63
64impl From<ParseError> for Error {
65 fn from(e: ParseError) -> Self {
66 Error::Parse(e)
67 }
68}
69
70#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
74pub enum Lang {}
75impl rowan::Language for Lang {
76 type Kind = SyntaxKind;
77 fn kind_from_raw(raw: rowan::SyntaxKind) -> Self::Kind {
78 unsafe { std::mem::transmute::<u16, SyntaxKind>(raw.0) }
79 }
80 fn kind_to_raw(kind: Self::Kind) -> rowan::SyntaxKind {
81 kind.into()
82 }
83}
84
85use rowan::GreenNode;
88
89use rowan::GreenNodeBuilder;
93
94#[derive(Debug)]
97pub(crate) struct Parse {
98 pub(crate) green_node: GreenNode,
99 #[allow(unused)]
100 pub(crate) errors: Vec<ErrorInfo>,
101}
102
103pub(crate) fn parse(text: &str) -> Parse {
104 struct Parser {
105 tokens: Vec<(SyntaxKind, String)>,
108 builder: GreenNodeBuilder<'static>,
110 errors: Vec<ErrorInfo>,
113 original_text: String,
115 }
116
117 impl Parser {
118 fn error(&mut self, msg: String) {
119 self.builder.start_node(ERROR.into());
120
121 let (line, context) = if self.current() == Some(INDENT) {
122 let lines: Vec<&str> = self.original_text.lines().collect();
124 let tab_line = lines
125 .iter()
126 .enumerate()
127 .find(|(_, line)| line.starts_with('\t'))
128 .map(|(i, _)| i + 1)
129 .unwrap_or(1);
130
131 let next_line = tab_line + 1;
133 if next_line <= lines.len() {
134 (next_line, lines[next_line - 1].to_string())
135 } else {
136 (tab_line, lines[tab_line - 1].to_string())
137 }
138 } else {
139 let line = self.get_line_number_for_position(self.tokens.len());
140 (line, self.get_context_for_line(line))
141 };
142
143 let message = if self.current() == Some(INDENT) && !msg.contains("indented") {
144 if !self.tokens.is_empty() && self.tokens[self.tokens.len() - 1].0 == IDENTIFIER {
145 "expected ':'".to_string()
146 } else {
147 "indented line not part of a rule".to_string()
148 }
149 } else {
150 msg
151 };
152
153 self.errors.push(ErrorInfo {
154 message,
155 line,
156 context,
157 });
158
159 if self.current().is_some() {
160 self.bump();
161 }
162 self.builder.finish_node();
163 }
164
165 fn get_line_number_for_position(&self, position: usize) -> usize {
166 if position >= self.tokens.len() {
167 return self.original_text.matches('\n').count() + 1;
168 }
169
170 self.tokens[0..position]
172 .iter()
173 .filter(|(kind, _)| *kind == NEWLINE)
174 .count()
175 + 1
176 }
177
178 fn get_context_for_line(&self, line_number: usize) -> String {
179 self.original_text
180 .lines()
181 .nth(line_number - 1)
182 .unwrap_or("")
183 .to_string()
184 }
185
186 fn parse_recipe_line(&mut self) {
187 self.builder.start_node(RECIPE.into());
188
189 if self.current() != Some(INDENT) {
191 self.error("recipe line must start with a tab".to_string());
192 self.builder.finish_node();
193 return;
194 }
195 self.bump();
196
197 while self.current().is_some() && self.current() != Some(NEWLINE) {
200 self.bump();
201 }
202
203 if self.current() == Some(NEWLINE) {
205 self.bump();
206 }
207
208 self.builder.finish_node();
209 }
210
211 fn parse_rule_target(&mut self) -> bool {
212 match self.current() {
213 Some(IDENTIFIER) => {
214 if self.is_archive_member() {
216 self.parse_archive_member();
217 } else {
218 self.bump();
219 }
220 true
221 }
222 Some(DOLLAR) => {
223 self.parse_variable_reference();
224 true
225 }
226 _ => {
227 self.error("expected rule target".to_string());
228 false
229 }
230 }
231 }
232
233 fn is_archive_member(&self) -> bool {
234 if self.tokens.len() < 2 {
237 return false;
238 }
239
240 let current_is_identifier = self.current() == Some(IDENTIFIER);
242 let next_is_lparen =
243 self.tokens.len() > 1 && self.tokens[self.tokens.len() - 2].0 == LPAREN;
244
245 current_is_identifier && next_is_lparen
246 }
247
248 fn parse_archive_member(&mut self) {
249 if self.current() == Some(IDENTIFIER) {
260 self.bump();
261 }
262
263 if self.current() == Some(LPAREN) {
265 self.bump();
266
267 self.builder.start_node(ARCHIVE_MEMBERS.into());
269
270 while self.current().is_some() && self.current() != Some(RPAREN) {
272 match self.current() {
273 Some(IDENTIFIER) | Some(TEXT) => {
274 self.builder.start_node(ARCHIVE_MEMBER.into());
276 self.bump();
277 self.builder.finish_node();
278 }
279 Some(WHITESPACE) => self.bump(),
280 Some(DOLLAR) => {
281 self.builder.start_node(ARCHIVE_MEMBER.into());
283 self.parse_variable_reference();
284 self.builder.finish_node();
285 }
286 _ => break,
287 }
288 }
289
290 self.builder.finish_node();
292
293 if self.current() == Some(RPAREN) {
295 self.bump();
296 } else {
297 self.error("expected ')' to close archive member".to_string());
298 }
299 }
300 }
301
302 fn parse_rule_dependencies(&mut self) {
303 self.builder.start_node(PREREQUISITES.into());
304
305 while self.current().is_some() && self.current() != Some(NEWLINE) {
306 match self.current() {
307 Some(WHITESPACE) => {
308 self.bump(); }
310 Some(IDENTIFIER) => {
311 self.builder.start_node(PREREQUISITE.into());
313
314 if self.is_archive_member() {
315 self.parse_archive_member();
316 } else {
317 self.bump(); }
319
320 self.builder.finish_node(); }
322 Some(DOLLAR) => {
323 self.builder.start_node(PREREQUISITE.into());
325
326 self.bump(); if self.current() == Some(LPAREN) {
330 self.bump(); let mut paren_count = 1;
332
333 while self.current().is_some() && paren_count > 0 {
334 if self.current() == Some(LPAREN) {
335 paren_count += 1;
336 } else if self.current() == Some(RPAREN) {
337 paren_count -= 1;
338 }
339 self.bump();
340 }
341 } else {
342 if self.current().is_some() {
344 self.bump();
345 }
346 }
347
348 self.builder.finish_node(); }
350 _ => {
351 self.bump();
353 }
354 }
355 }
356
357 self.builder.finish_node(); }
359
360 fn parse_rule_recipes(&mut self) {
361 loop {
362 match self.current() {
363 Some(INDENT) => {
364 self.parse_recipe_line();
365 }
366 Some(NEWLINE) => {
367 self.bump();
368 break;
369 }
370 _ => break,
371 }
372 }
373 }
374
375 fn find_and_consume_colon(&mut self) -> bool {
376 self.skip_ws();
378
379 if self.current() == Some(OPERATOR) && self.tokens.last().unwrap().1 == ":" {
381 self.bump();
382 return true;
383 }
384
385 let has_colon = self
387 .tokens
388 .iter()
389 .rev()
390 .any(|(kind, text)| *kind == OPERATOR && text == ":");
391
392 if has_colon {
393 while self.current().is_some() {
395 if self.current() == Some(OPERATOR)
396 && self.tokens.last().map(|(_, text)| text.as_str()) == Some(":")
397 {
398 self.bump();
399 return true;
400 }
401 self.bump();
402 }
403 }
404
405 self.error("expected ':'".to_string());
406 false
407 }
408
409 fn parse_rule(&mut self) {
410 self.builder.start_node(RULE.into());
411
412 self.skip_ws();
414 self.builder.start_node(TARGETS.into());
415 let has_target = self.parse_rule_targets();
416 self.builder.finish_node();
417
418 let has_colon = if has_target {
420 self.find_and_consume_colon()
421 } else {
422 false
423 };
424
425 if has_target && has_colon {
427 self.skip_ws();
428 self.parse_rule_dependencies();
429 self.expect_eol();
430
431 self.parse_rule_recipes();
433 }
434
435 self.builder.finish_node();
436 }
437
438 fn parse_rule_targets(&mut self) -> bool {
439 let has_first_target = self.parse_rule_target();
441
442 if !has_first_target {
443 return false;
444 }
445
446 loop {
448 self.skip_ws();
449
450 if self.current() == Some(OPERATOR) && self.tokens.last().unwrap().1 == ":" {
452 break;
453 }
454
455 match self.current() {
457 Some(IDENTIFIER) | Some(DOLLAR) => {
458 if !self.parse_rule_target() {
459 break;
460 }
461 }
462 _ => break,
463 }
464 }
465
466 true
467 }
468
469 fn parse_comment(&mut self) {
470 if self.current() == Some(COMMENT) {
471 self.bump(); if self.current() == Some(NEWLINE) {
475 self.bump(); } else if self.current() == Some(WHITESPACE) {
477 self.skip_ws();
479 if self.current() == Some(NEWLINE) {
480 self.bump();
481 }
482 }
483 } else {
485 self.error("expected comment".to_string());
486 }
487 }
488
489 fn parse_assignment(&mut self) {
490 self.builder.start_node(VARIABLE.into());
491
492 self.skip_ws();
494 if self.current() == Some(IDENTIFIER) && self.tokens.last().unwrap().1 == "export" {
495 self.bump();
496 self.skip_ws();
497 }
498
499 match self.current() {
501 Some(IDENTIFIER) => self.bump(),
502 Some(DOLLAR) => self.parse_variable_reference(),
503 _ => {
504 self.error("expected variable name".to_string());
505 self.builder.finish_node();
506 return;
507 }
508 }
509
510 self.skip_ws();
512 match self.current() {
513 Some(OPERATOR) => {
514 let op = &self.tokens.last().unwrap().1;
515 if ["=", ":=", "::=", ":::=", "+=", "?=", "!="].contains(&op.as_str()) {
516 self.bump();
517 self.skip_ws();
518
519 self.builder.start_node(EXPR.into());
521 while self.current().is_some() && self.current() != Some(NEWLINE) {
522 self.bump();
523 }
524 self.builder.finish_node();
525
526 if self.current() == Some(NEWLINE) {
528 self.bump();
529 } else {
530 self.error("expected newline after variable value".to_string());
531 }
532 } else {
533 self.error(format!("invalid assignment operator: {}", op));
534 }
535 }
536 _ => self.error("expected assignment operator".to_string()),
537 }
538
539 self.builder.finish_node();
540 }
541
542 fn parse_variable_reference(&mut self) {
543 self.builder.start_node(EXPR.into());
544 self.bump(); if self.current() == Some(LPAREN) {
547 self.bump(); let mut is_function = false;
551
552 if self.current() == Some(IDENTIFIER) {
553 let function_name = &self.tokens.last().unwrap().1;
554 let known_functions = [
556 "shell", "wildcard", "call", "eval", "file", "abspath", "dir",
557 ];
558 if known_functions.contains(&function_name.as_str()) {
559 is_function = true;
560 }
561 }
562
563 if is_function {
564 self.bump();
566
567 self.consume_balanced_parens(1);
569 } else {
570 self.parse_parenthesized_expr_internal(true);
572 }
573 } else {
574 self.error("expected ( after $ in variable reference".to_string());
575 }
576
577 self.builder.finish_node();
578 }
579
580 fn parse_parenthesized_expr(&mut self) {
582 self.builder.start_node(EXPR.into());
583
584 if self.current() != Some(LPAREN) {
585 self.error("expected opening parenthesis".to_string());
586 self.builder.finish_node();
587 return;
588 }
589
590 self.bump(); self.parse_parenthesized_expr_internal(false);
592 self.builder.finish_node();
593 }
594
595 fn parse_parenthesized_expr_internal(&mut self, is_variable_ref: bool) {
597 let mut paren_count = 1;
598
599 while paren_count > 0 && self.current().is_some() {
600 match self.current() {
601 Some(LPAREN) => {
602 paren_count += 1;
603 self.bump();
604 self.builder.start_node(EXPR.into());
606 }
607 Some(RPAREN) => {
608 paren_count -= 1;
609 self.bump();
610 if paren_count > 0 {
611 self.builder.finish_node();
612 }
613 }
614 Some(QUOTE) => {
615 self.parse_quoted_string();
617 }
618 Some(DOLLAR) => {
619 self.parse_variable_reference();
621 }
622 Some(_) => self.bump(),
623 None => {
624 self.error(if is_variable_ref {
625 "unclosed variable reference".to_string()
626 } else {
627 "unclosed parenthesis".to_string()
628 });
629 break;
630 }
631 }
632 }
633
634 if !is_variable_ref {
635 self.skip_ws();
636 self.expect_eol();
637 }
638 }
639
640 fn parse_quoted_string(&mut self) {
642 self.bump(); while !self.is_at_eof() && self.current() != Some(QUOTE) {
644 self.bump();
645 }
646 if self.current() == Some(QUOTE) {
647 self.bump();
648 }
649 }
650
651 fn parse_conditional_keyword(&mut self) -> Option<String> {
652 if self.current() != Some(IDENTIFIER) {
653 self.error(
654 "expected conditional keyword (ifdef, ifndef, ifeq, or ifneq)".to_string(),
655 );
656 return None;
657 }
658
659 let token = self.tokens.last().unwrap().1.clone();
660 if !["ifdef", "ifndef", "ifeq", "ifneq"].contains(&token.as_str()) {
661 self.error(format!("unknown conditional directive: {}", token));
662 return None;
663 }
664
665 self.bump();
666 Some(token)
667 }
668
669 fn parse_simple_condition(&mut self) {
670 self.builder.start_node(EXPR.into());
671
672 self.skip_ws();
674
675 let mut found_var = false;
677
678 while !self.is_at_eof() && self.current() != Some(NEWLINE) {
679 match self.current() {
680 Some(WHITESPACE) => self.skip_ws(),
681 Some(DOLLAR) => {
682 found_var = true;
683 self.parse_variable_reference();
684 }
685 Some(_) => {
686 found_var = true;
688 self.bump();
689 }
690 None => break,
691 }
692 }
693
694 if !found_var {
695 self.error("expected condition after conditional directive".to_string());
697 }
698
699 self.builder.finish_node();
700
701 if self.current() == Some(NEWLINE) {
703 self.bump();
704 } else if !self.is_at_eof() {
705 self.skip_until_newline();
706 }
707 }
708
709 fn is_conditional_directive(&self, token: &str) -> bool {
711 token == "ifdef"
712 || token == "ifndef"
713 || token == "ifeq"
714 || token == "ifneq"
715 || token == "else"
716 || token == "elif"
717 || token == "endif"
718 }
719
720 fn handle_conditional_token(&mut self, token: &str, depth: &mut usize) -> bool {
722 match token {
723 "ifdef" | "ifndef" | "ifeq" | "ifneq" => {
724 *depth += 1;
725 self.parse_conditional();
726 true
727 }
728 "else" | "elif" => {
729 if *depth == 0 {
731 self.error(format!("{} without matching if", token));
732 self.bump();
734 false
735 } else {
736 self.bump();
738
739 if token == "elif" {
741 self.skip_ws();
742
743 if self.current() == Some(IDENTIFIER) {
745 let next_token = &self.tokens.last().unwrap().1;
746 if next_token == "ifeq"
747 || next_token == "ifdef"
748 || next_token == "ifndef"
749 || next_token == "ifneq"
750 {
751 match next_token.as_str() {
753 "ifdef" | "ifndef" => {
754 self.bump(); self.skip_ws();
756 self.parse_simple_condition();
757 }
758 "ifeq" | "ifneq" => {
759 self.bump(); self.skip_ws();
761 self.parse_parenthesized_expr();
762 }
763 _ => unreachable!(),
764 }
765 } else {
766 self.builder.start_node(EXPR.into());
768 while self.current().is_some()
770 && self.current() != Some(NEWLINE)
771 {
772 self.bump();
773 }
774 self.builder.finish_node();
775 if self.current() == Some(NEWLINE) {
776 self.bump();
777 }
778 }
779 } else {
780 self.builder.start_node(EXPR.into());
782 while self.current().is_some() && self.current() != Some(NEWLINE) {
784 self.bump();
785 }
786 self.builder.finish_node();
787 if self.current() == Some(NEWLINE) {
788 self.bump();
789 }
790 }
791 } else {
792 self.expect_eol();
794 }
795 true
796 }
797 }
798 "endif" => {
799 if *depth == 0 {
801 self.error("endif without matching if".to_string());
802 self.bump();
804 false
805 } else {
806 *depth -= 1;
807 self.bump();
809
810 self.skip_ws();
812
813 if self.current() == Some(COMMENT) {
818 self.parse_comment();
819 } else if self.current() == Some(NEWLINE) {
820 self.bump();
821 } else if self.current() == Some(WHITESPACE) {
822 self.skip_ws();
824 if self.current() == Some(NEWLINE) {
825 self.bump();
826 }
827 } else if !self.is_at_eof() {
829 while !self.is_at_eof() && self.current() != Some(NEWLINE) {
832 self.bump();
833 }
834 if self.current() == Some(NEWLINE) {
835 self.bump();
836 }
837 }
838 true
841 }
842 }
843 _ => false,
844 }
845 }
846
847 fn parse_conditional(&mut self) {
848 self.builder.start_node(CONDITIONAL.into());
849
850 let Some(token) = self.parse_conditional_keyword() else {
852 self.skip_until_newline();
853 self.builder.finish_node();
854 return;
855 };
856
857 self.skip_ws();
859
860 match token.as_str() {
862 "ifdef" | "ifndef" => {
863 self.parse_simple_condition();
864 }
865 "ifeq" | "ifneq" => {
866 self.parse_parenthesized_expr();
867 }
868 _ => unreachable!("Invalid conditional token"),
869 }
870
871 self.skip_ws();
873 if self.current() == Some(COMMENT) {
874 self.parse_comment();
875 } else {
876 self.expect_eol();
877 }
878
879 let mut depth = 1;
881
882 let mut position_count = std::collections::HashMap::<usize, usize>::new();
884 let max_repetitions = 15; while depth > 0 && !self.is_at_eof() {
887 let current_pos = self.tokens.len();
889 *position_count.entry(current_pos).or_insert(0) += 1;
890
891 if position_count.get(¤t_pos).unwrap() > &max_repetitions {
894 break;
897 }
898
899 match self.current() {
900 None => {
901 self.error("unterminated conditional (missing endif)".to_string());
902 break;
903 }
904 Some(IDENTIFIER) => {
905 let token = self.tokens.last().unwrap().1.clone();
906 if !self.handle_conditional_token(&token, &mut depth) {
907 if token == "include" || token == "-include" || token == "sinclude" {
908 self.parse_include();
909 } else {
910 self.parse_normal_content();
911 }
912 }
913 }
914 Some(INDENT) => self.parse_recipe_line(),
915 Some(WHITESPACE) => self.bump(),
916 Some(COMMENT) => self.parse_comment(),
917 Some(NEWLINE) => self.bump(),
918 Some(DOLLAR) => self.parse_normal_content(),
919 Some(QUOTE) => self.parse_quoted_string(),
920 Some(_) => {
921 self.bump();
923 }
924 }
925 }
926
927 self.builder.finish_node();
928 }
929
930 fn parse_normal_content(&mut self) {
932 self.skip_ws();
934
935 if self.is_assignment_line() {
937 self.parse_assignment();
938 } else {
939 self.parse_rule();
941 }
942 }
943
944 fn parse_include(&mut self) {
945 self.builder.start_node(INCLUDE.into());
946
947 if self.current() != Some(IDENTIFIER)
949 || (!["include", "-include", "sinclude"]
950 .contains(&self.tokens.last().unwrap().1.as_str()))
951 {
952 self.error("expected include directive".to_string());
953 self.builder.finish_node();
954 return;
955 }
956 self.bump();
957 self.skip_ws();
958
959 self.builder.start_node(EXPR.into());
961 let mut found_path = false;
962
963 while !self.is_at_eof() && self.current() != Some(NEWLINE) {
964 match self.current() {
965 Some(WHITESPACE) => self.skip_ws(),
966 Some(DOLLAR) => {
967 found_path = true;
968 self.parse_variable_reference();
969 }
970 Some(_) => {
971 found_path = true;
973 self.bump();
974 }
975 None => break,
976 }
977 }
978
979 if !found_path {
980 self.error("expected file path after include".to_string());
981 }
982
983 self.builder.finish_node();
984
985 if self.current() == Some(NEWLINE) {
987 self.bump();
988 } else if !self.is_at_eof() {
989 self.error("expected newline after include".to_string());
990 self.skip_until_newline();
991 }
992
993 self.builder.finish_node();
994 }
995
996 fn parse_identifier_token(&mut self) -> bool {
997 let token = &self.tokens.last().unwrap().1;
998
999 if token.starts_with("%") {
1001 self.parse_rule();
1002 return true;
1003 }
1004
1005 if token.starts_with("if") {
1006 self.parse_conditional();
1007 return true;
1008 }
1009
1010 if token == "include" || token == "-include" || token == "sinclude" {
1011 self.parse_include();
1012 return true;
1013 }
1014
1015 self.parse_normal_content();
1017 true
1018 }
1019
1020 fn parse_token(&mut self) -> bool {
1021 match self.current() {
1022 None => false,
1023 Some(IDENTIFIER) => {
1024 let token = &self.tokens.last().unwrap().1;
1025 if self.is_conditional_directive(token) {
1026 self.parse_conditional();
1027 true
1028 } else {
1029 self.parse_identifier_token()
1030 }
1031 }
1032 Some(DOLLAR) => {
1033 self.parse_normal_content();
1034 true
1035 }
1036 Some(NEWLINE) => {
1037 self.bump();
1038 true
1039 }
1040 Some(COMMENT) => {
1041 self.parse_comment();
1042 true
1043 }
1044 Some(WHITESPACE) => {
1045 if self.is_end_of_file_or_newline_after_whitespace() {
1047 self.skip_ws();
1050 return true;
1051 }
1052
1053 let look_ahead_pos = self.tokens.len().saturating_sub(1);
1056 let mut is_documentation_or_help = false;
1057
1058 if look_ahead_pos > 0 {
1059 let next_token = &self.tokens[look_ahead_pos - 1];
1060 if next_token.0 == IDENTIFIER
1063 || next_token.0 == COMMENT
1064 || next_token.0 == TEXT
1065 {
1066 is_documentation_or_help = true;
1067 }
1068 }
1069
1070 if is_documentation_or_help {
1071 self.skip_ws();
1074 while self.current().is_some() && self.current() != Some(NEWLINE) {
1075 self.bump();
1076 }
1077 if self.current() == Some(NEWLINE) {
1078 self.bump();
1079 }
1080 } else {
1081 self.skip_ws();
1082 }
1083 true
1084 }
1085 Some(INDENT) => {
1086 #[cfg(test)]
1091 {
1092 let is_in_test = self.original_text.lines().count() < 20;
1095 let tokens_as_str = self
1096 .tokens
1097 .iter()
1098 .rev()
1099 .take(10)
1100 .map(|(_kind, text)| text.as_str())
1101 .collect::<Vec<_>>()
1102 .join(" ");
1103
1104 let in_conditional = tokens_as_str.contains("ifdef")
1106 || tokens_as_str.contains("ifndef")
1107 || tokens_as_str.contains("ifeq")
1108 || tokens_as_str.contains("ifneq")
1109 || tokens_as_str.contains("else")
1110 || tokens_as_str.contains("endif");
1111
1112 if is_in_test && !in_conditional {
1113 self.error("indented line not part of a rule".to_string());
1114 }
1115 }
1116
1117 self.bump();
1119
1120 while !self.is_at_eof() && self.current() != Some(NEWLINE) {
1122 self.bump();
1123 }
1124 if self.current() == Some(NEWLINE) {
1125 self.bump();
1126 }
1127 true
1128 }
1129 Some(kind) => {
1130 self.error(format!("unexpected token {:?}", kind));
1131 self.bump();
1132 true
1133 }
1134 }
1135 }
1136
1137 fn parse(mut self) -> Parse {
1138 self.builder.start_node(ROOT.into());
1139
1140 while self.parse_token() {}
1141
1142 self.builder.finish_node();
1143
1144 Parse {
1145 green_node: self.builder.finish(),
1146 errors: self.errors,
1147 }
1148 }
1149
1150 fn is_assignment_line(&mut self) -> bool {
1152 let assignment_ops = ["=", ":=", "::=", ":::=", "+=", "?=", "!="];
1153 let mut pos = self.tokens.len().saturating_sub(1);
1154 let mut seen_identifier = false;
1155 let mut seen_export = false;
1156
1157 while pos > 0 {
1158 let (kind, text) = &self.tokens[pos];
1159
1160 match kind {
1161 NEWLINE => break,
1162 IDENTIFIER if text == "export" => seen_export = true,
1163 IDENTIFIER if !seen_identifier => seen_identifier = true,
1164 OPERATOR if assignment_ops.contains(&text.as_str()) => {
1165 return seen_identifier || seen_export
1166 }
1167 OPERATOR if text == ":" => return false, WHITESPACE => (),
1169 _ if seen_export => return true, _ => return false,
1171 }
1172 pos = pos.saturating_sub(1);
1173 }
1174 false
1175 }
1176
1177 fn bump(&mut self) {
1179 let (kind, text) = self.tokens.pop().unwrap();
1180 self.builder.token(kind.into(), text.as_str());
1181 }
1182 fn current(&self) -> Option<SyntaxKind> {
1184 self.tokens.last().map(|(kind, _)| *kind)
1185 }
1186
1187 fn expect_eol(&mut self) {
1188 self.skip_ws();
1190
1191 match self.current() {
1192 Some(NEWLINE) => {
1193 self.bump();
1194 }
1195 None => {
1196 }
1198 n => {
1199 self.error(format!("expected newline, got {:?}", n));
1200 self.skip_until_newline();
1202 }
1203 }
1204 }
1205
1206 fn is_at_eof(&self) -> bool {
1208 self.current().is_none()
1209 }
1210
1211 fn is_at_eof_or_only_whitespace(&self) -> bool {
1213 if self.is_at_eof() {
1214 return true;
1215 }
1216
1217 self.tokens
1219 .iter()
1220 .rev()
1221 .all(|(kind, _)| matches!(*kind, WHITESPACE | NEWLINE))
1222 }
1223
1224 fn skip_ws(&mut self) {
1225 while self.current() == Some(WHITESPACE) {
1226 self.bump()
1227 }
1228 }
1229
1230 fn skip_until_newline(&mut self) {
1231 while !self.is_at_eof() && self.current() != Some(NEWLINE) {
1232 self.bump();
1233 }
1234 if self.current() == Some(NEWLINE) {
1235 self.bump();
1236 }
1237 }
1238
1239 fn consume_balanced_parens(&mut self, start_paren_count: usize) -> usize {
1241 let mut paren_count = start_paren_count;
1242
1243 while paren_count > 0 && self.current().is_some() {
1244 match self.current() {
1245 Some(LPAREN) => {
1246 paren_count += 1;
1247 self.bump();
1248 }
1249 Some(RPAREN) => {
1250 paren_count -= 1;
1251 self.bump();
1252 if paren_count == 0 {
1253 break;
1254 }
1255 }
1256 Some(DOLLAR) => {
1257 self.parse_variable_reference();
1259 }
1260 Some(_) => self.bump(),
1261 None => {
1262 self.error("unclosed parenthesis".to_string());
1263 break;
1264 }
1265 }
1266 }
1267
1268 paren_count
1269 }
1270
1271 fn is_end_of_file_or_newline_after_whitespace(&self) -> bool {
1273 if self.is_at_eof_or_only_whitespace() {
1275 return true;
1276 }
1277
1278 if self.tokens.len() <= 1 {
1280 return true;
1281 }
1282
1283 false
1284 }
1285
1286 #[cfg(test)]
1288 fn is_in_test_environment(&self) -> bool {
1289 self.original_text.lines().count() < 20
1292 }
1293 }
1294
1295 let mut tokens = lex(text);
1296 tokens.reverse();
1297 Parser {
1298 tokens,
1299 builder: GreenNodeBuilder::new(),
1300 errors: Vec::new(),
1301 original_text: text.to_string(),
1302 }
1303 .parse()
1304}
1305
1306type SyntaxNode = rowan::SyntaxNode<Lang>;
1312#[allow(unused)]
1313type SyntaxToken = rowan::SyntaxToken<Lang>;
1314#[allow(unused)]
1315type SyntaxElement = rowan::NodeOrToken<SyntaxNode, SyntaxToken>;
1316
1317impl Parse {
1318 fn syntax(&self) -> SyntaxNode {
1319 SyntaxNode::new_root_mut(self.green_node.clone())
1320 }
1321
1322 fn root(&self) -> Makefile {
1323 Makefile::cast(self.syntax()).unwrap()
1324 }
1325}
1326
1327macro_rules! ast_node {
1328 ($ast:ident, $kind:ident) => {
1329 #[derive(PartialEq, Eq, Hash)]
1330 #[repr(transparent)]
1331 pub struct $ast(SyntaxNode);
1333
1334 impl AstNode for $ast {
1335 type Language = Lang;
1336
1337 fn can_cast(kind: SyntaxKind) -> bool {
1338 kind == $kind
1339 }
1340
1341 fn cast(syntax: SyntaxNode) -> Option<Self> {
1342 if Self::can_cast(syntax.kind()) {
1343 Some(Self(syntax))
1344 } else {
1345 None
1346 }
1347 }
1348
1349 fn syntax(&self) -> &SyntaxNode {
1350 &self.0
1351 }
1352 }
1353
1354 impl core::fmt::Display for $ast {
1355 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
1356 write!(f, "{}", self.0.text())
1357 }
1358 }
1359 };
1360}
1361
1362ast_node!(Makefile, ROOT);
1363ast_node!(Rule, RULE);
1364ast_node!(Identifier, IDENTIFIER);
1365ast_node!(VariableDefinition, VARIABLE);
1366ast_node!(Include, INCLUDE);
1367ast_node!(ArchiveMembers, ARCHIVE_MEMBERS);
1368ast_node!(ArchiveMember, ARCHIVE_MEMBER);
1369
1370impl ArchiveMembers {
1371 pub fn archive_name(&self) -> Option<String> {
1373 for element in self.syntax().children_with_tokens() {
1375 if let Some(token) = element.as_token() {
1376 if token.kind() == IDENTIFIER {
1377 return Some(token.text().to_string());
1378 } else if token.kind() == LPAREN {
1379 break;
1381 }
1382 }
1383 }
1384 None
1385 }
1386
1387 pub fn members(&self) -> impl Iterator<Item = ArchiveMember> + '_ {
1389 self.syntax().children().filter_map(ArchiveMember::cast)
1390 }
1391
1392 pub fn member_names(&self) -> Vec<String> {
1394 self.members().map(|m| m.text()).collect()
1395 }
1396}
1397
1398impl ArchiveMember {
1399 pub fn text(&self) -> String {
1401 self.syntax().text().to_string().trim().to_string()
1402 }
1403}
1404
1405fn remove_with_preceding_comments(node: &SyntaxNode, parent: &SyntaxNode) {
1413 let mut collected_elements = vec![];
1414 let mut found_comment = false;
1415
1416 let mut current = node.prev_sibling_or_token();
1418 while let Some(element) = current {
1419 match &element {
1420 rowan::NodeOrToken::Token(token) => match token.kind() {
1421 COMMENT => {
1422 if token.text().starts_with("#!") {
1423 break; }
1425 found_comment = true;
1426 collected_elements.push(element.clone());
1427 }
1428 NEWLINE | WHITESPACE => {
1429 collected_elements.push(element.clone());
1430 }
1431 _ => break, },
1433 rowan::NodeOrToken::Node(_) => break, }
1435 current = element.prev_sibling_or_token();
1436 }
1437
1438 let node_index = node.index();
1440 parent.splice_children(node_index..node_index + 1, vec![]);
1441
1442 if found_comment {
1444 let mut consecutive_newlines = 0;
1445 for element in collected_elements.iter().rev() {
1446 let should_remove = match element {
1447 rowan::NodeOrToken::Token(token) => match token.kind() {
1448 COMMENT => {
1449 consecutive_newlines = 0;
1450 true
1451 }
1452 NEWLINE => {
1453 consecutive_newlines += 1;
1454 consecutive_newlines <= 1
1455 }
1456 WHITESPACE => true,
1457 _ => false,
1458 },
1459 _ => false,
1460 };
1461
1462 if should_remove {
1463 let idx = element.index();
1464 parent.splice_children(idx..idx + 1, vec![]);
1465 }
1466 }
1467 }
1468}
1469
1470impl VariableDefinition {
1471 pub fn name(&self) -> Option<String> {
1473 self.syntax().children_with_tokens().find_map(|it| {
1474 it.as_token().and_then(|it| {
1475 if it.kind() == IDENTIFIER && it.text() != "export" {
1476 Some(it.text().to_string())
1477 } else {
1478 None
1479 }
1480 })
1481 })
1482 }
1483
1484 pub fn is_export(&self) -> bool {
1486 self.syntax()
1487 .children_with_tokens()
1488 .any(|it| it.as_token().is_some_and(|token| token.text() == "export"))
1489 }
1490
1491 pub fn raw_value(&self) -> Option<String> {
1493 self.syntax()
1494 .children()
1495 .find(|it| it.kind() == EXPR)
1496 .map(|it| it.text().into())
1497 }
1498
1499 pub fn remove(&mut self) {
1512 if let Some(parent) = self.syntax().parent() {
1513 remove_with_preceding_comments(self.syntax(), &parent);
1514 }
1515 }
1516
1517 pub fn set_value(&mut self, new_value: &str) {
1530 let expr_index = self
1532 .syntax()
1533 .children()
1534 .find(|it| it.kind() == EXPR)
1535 .map(|it| it.index());
1536
1537 if let Some(expr_idx) = expr_index {
1538 let mut builder = GreenNodeBuilder::new();
1540 builder.start_node(EXPR.into());
1541 builder.token(IDENTIFIER.into(), new_value);
1542 builder.finish_node();
1543
1544 let new_expr = SyntaxNode::new_root_mut(builder.finish());
1545
1546 self.0
1548 .splice_children(expr_idx..expr_idx + 1, vec![new_expr.into()]);
1549 }
1550 }
1551}
1552
1553impl Makefile {
1554 pub fn new() -> Makefile {
1556 let mut builder = GreenNodeBuilder::new();
1557
1558 builder.start_node(ROOT.into());
1559 builder.finish_node();
1560
1561 let syntax = SyntaxNode::new_root_mut(builder.finish());
1562 Makefile(syntax)
1563 }
1564
1565 pub fn parse(text: &str) -> crate::Parse<Makefile> {
1567 crate::Parse::<Makefile>::parse_makefile(text)
1568 }
1569
1570 pub fn code(&self) -> String {
1572 self.syntax().text().to_string()
1573 }
1574
1575 pub fn is_root(&self) -> bool {
1577 self.syntax().kind() == ROOT
1578 }
1579
1580 pub fn read<R: std::io::Read>(mut r: R) -> Result<Makefile, Error> {
1582 let mut buf = String::new();
1583 r.read_to_string(&mut buf)?;
1584 buf.parse()
1585 }
1586
1587 pub fn read_relaxed<R: std::io::Read>(mut r: R) -> Result<Makefile, Error> {
1589 let mut buf = String::new();
1590 r.read_to_string(&mut buf)?;
1591
1592 let parsed = parse(&buf);
1593 Ok(parsed.root())
1594 }
1595
1596 pub fn rules(&self) -> impl Iterator<Item = Rule> + '_ {
1605 self.syntax().children().filter_map(Rule::cast)
1606 }
1607
1608 pub fn rules_by_target<'a>(&'a self, target: &'a str) -> impl Iterator<Item = Rule> + 'a {
1610 self.rules()
1611 .filter(move |rule| rule.targets().any(|t| t == target))
1612 }
1613
1614 pub fn variable_definitions(&self) -> impl Iterator<Item = VariableDefinition> {
1616 self.syntax()
1617 .children()
1618 .filter_map(VariableDefinition::cast)
1619 }
1620
1621 pub fn find_variable<'a>(
1636 &'a self,
1637 name: &'a str,
1638 ) -> impl Iterator<Item = VariableDefinition> + 'a {
1639 self.variable_definitions()
1640 .filter(move |var| var.name().as_deref() == Some(name))
1641 }
1642
1643 pub fn add_rule(&mut self, target: &str) -> Rule {
1653 let mut builder = GreenNodeBuilder::new();
1654 builder.start_node(RULE.into());
1655 builder.token(IDENTIFIER.into(), target);
1656 builder.token(OPERATOR.into(), ":");
1657 builder.token(NEWLINE.into(), "\n");
1658 builder.finish_node();
1659
1660 let syntax = SyntaxNode::new_root_mut(builder.finish());
1661 let pos = self.0.children_with_tokens().count();
1662 self.0.splice_children(pos..pos, vec![syntax.into()]);
1663 Rule(self.0.children().nth(pos).unwrap())
1664 }
1665
1666 pub fn from_reader<R: std::io::Read>(mut r: R) -> Result<Makefile, Error> {
1668 let mut buf = String::new();
1669 r.read_to_string(&mut buf)?;
1670
1671 let parsed = parse(&buf);
1672 if !parsed.errors.is_empty() {
1673 Err(Error::Parse(ParseError {
1674 errors: parsed.errors,
1675 }))
1676 } else {
1677 Ok(parsed.root())
1678 }
1679 }
1680
1681 pub fn replace_rule(&mut self, index: usize, new_rule: Rule) -> Result<(), Error> {
1692 let rules: Vec<_> = self.0.children().filter(|n| n.kind() == RULE).collect();
1693
1694 if rules.is_empty() {
1695 return Err(Error::Parse(ParseError {
1696 errors: vec![ErrorInfo {
1697 message: "Cannot replace rule in empty makefile".to_string(),
1698 line: 1,
1699 context: "replace_rule".to_string(),
1700 }],
1701 }));
1702 }
1703
1704 if index >= rules.len() {
1705 return Err(Error::Parse(ParseError {
1706 errors: vec![ErrorInfo {
1707 message: format!(
1708 "Rule index {} out of bounds (max {})",
1709 index,
1710 rules.len() - 1
1711 ),
1712 line: 1,
1713 context: "replace_rule".to_string(),
1714 }],
1715 }));
1716 }
1717
1718 let target_node = &rules[index];
1719 let target_index = target_node.index();
1720
1721 self.0.splice_children(
1723 target_index..target_index + 1,
1724 vec![new_rule.0.clone().into()],
1725 );
1726 Ok(())
1727 }
1728
1729 pub fn remove_rule(&mut self, index: usize) -> Result<Rule, Error> {
1740 let rules: Vec<_> = self.0.children().filter(|n| n.kind() == RULE).collect();
1741
1742 if rules.is_empty() {
1743 return Err(Error::Parse(ParseError {
1744 errors: vec![ErrorInfo {
1745 message: "Cannot remove rule from empty makefile".to_string(),
1746 line: 1,
1747 context: "remove_rule".to_string(),
1748 }],
1749 }));
1750 }
1751
1752 if index >= rules.len() {
1753 return Err(Error::Parse(ParseError {
1754 errors: vec![ErrorInfo {
1755 message: format!(
1756 "Rule index {} out of bounds (max {})",
1757 index,
1758 rules.len() - 1
1759 ),
1760 line: 1,
1761 context: "remove_rule".to_string(),
1762 }],
1763 }));
1764 }
1765
1766 let target_node = rules[index].clone();
1767 let target_index = target_node.index();
1768
1769 self.0
1771 .splice_children(target_index..target_index + 1, vec![]);
1772 Ok(Rule(target_node))
1773 }
1774
1775 pub fn insert_rule(&mut self, index: usize, new_rule: Rule) -> Result<(), Error> {
1787 let rules: Vec<_> = self.0.children().filter(|n| n.kind() == RULE).collect();
1788
1789 if index > rules.len() {
1790 return Err(Error::Parse(ParseError {
1791 errors: vec![ErrorInfo {
1792 message: format!("Rule index {} out of bounds (max {})", index, rules.len()),
1793 line: 1,
1794 context: "insert_rule".to_string(),
1795 }],
1796 }));
1797 }
1798
1799 let target_index = if index == rules.len() {
1800 self.0.children_with_tokens().count()
1802 } else {
1803 rules[index].index()
1805 };
1806
1807 self.0
1809 .splice_children(target_index..target_index, vec![new_rule.0.clone().into()]);
1810 Ok(())
1811 }
1812
1813 pub fn includes(&self) -> impl Iterator<Item = Include> {
1823 self.syntax().children().filter_map(Include::cast)
1824 }
1825
1826 pub fn included_files(&self) -> impl Iterator<Item = String> + '_ {
1836 fn collect_includes(node: &SyntaxNode) -> Vec<Include> {
1839 let mut includes = Vec::new();
1840
1841 if let Some(include) = Include::cast(node.clone()) {
1843 includes.push(include);
1844 }
1845
1846 for child in node.children() {
1848 includes.extend(collect_includes(&child));
1849 }
1850
1851 includes
1852 }
1853
1854 let includes = collect_includes(self.syntax());
1856
1857 includes.into_iter().map(|include| {
1859 include
1860 .syntax()
1861 .children()
1862 .find(|node| node.kind() == EXPR)
1863 .map(|expr| expr.text().to_string().trim().to_string())
1864 .unwrap_or_default()
1865 })
1866 }
1867
1868 pub fn find_rule_by_target(&self, target: &str) -> Option<Rule> {
1879 self.rules()
1880 .find(|rule| rule.targets().any(|t| t == target))
1881 }
1882
1883 pub fn find_rules_by_target<'a>(&'a self, target: &'a str) -> impl Iterator<Item = Rule> + 'a {
1893 self.rules_by_target(target)
1894 }
1895
1896 pub fn add_phony_target(&mut self, target: &str) -> Result<(), Error> {
1906 if let Some(mut phony_rule) = self.find_rule_by_target(".PHONY") {
1908 if !phony_rule.prerequisites().any(|p| p == target) {
1910 phony_rule.add_prerequisite(target)?;
1911 }
1912 } else {
1913 let mut phony_rule = self.add_rule(".PHONY");
1915 phony_rule.add_prerequisite(target)?;
1916 }
1917 Ok(())
1918 }
1919
1920 pub fn remove_phony_target(&mut self, target: &str) -> Result<bool, Error> {
1934 let mut phony_rule = None;
1936 for rule in self.rules_by_target(".PHONY") {
1937 if rule.prerequisites().any(|p| p == target) {
1938 phony_rule = Some(rule);
1939 break;
1940 }
1941 }
1942
1943 let mut phony_rule = match phony_rule {
1944 Some(rule) => rule,
1945 None => return Ok(false),
1946 };
1947
1948 let prereq_count = phony_rule.prerequisites().count();
1950
1951 phony_rule.remove_prerequisite(target)?;
1953
1954 if prereq_count == 1 {
1956 phony_rule.remove()?;
1958 }
1959
1960 Ok(true)
1961 }
1962
1963 pub fn is_phony(&self, target: &str) -> bool {
1974 self.rules_by_target(".PHONY")
1976 .any(|rule| rule.prerequisites().any(|p| p == target))
1977 }
1978
1979 pub fn phony_targets(&self) -> impl Iterator<Item = String> + '_ {
1989 self.rules_by_target(".PHONY")
1991 .flat_map(|rule| rule.prerequisites().collect::<Vec<_>>())
1992 }
1993}
1994
1995impl FromStr for Rule {
1996 type Err = crate::Error;
1997
1998 fn from_str(s: &str) -> Result<Self, Self::Err> {
1999 Rule::parse(s).to_rule_result()
2000 }
2001}
2002
2003impl FromStr for Makefile {
2004 type Err = crate::Error;
2005
2006 fn from_str(s: &str) -> Result<Self, Self::Err> {
2007 Makefile::parse(s).to_result()
2008 }
2009}
2010
2011fn build_prerequisites_node(prereqs: &[String]) -> SyntaxNode {
2013 let mut builder = GreenNodeBuilder::new();
2014 builder.start_node(PREREQUISITES.into());
2015
2016 for (i, prereq) in prereqs.iter().enumerate() {
2017 if i > 0 {
2018 builder.token(WHITESPACE.into(), " ");
2019 }
2020
2021 builder.start_node(PREREQUISITE.into());
2023 builder.token(IDENTIFIER.into(), prereq);
2024 builder.finish_node();
2025 }
2026
2027 builder.finish_node();
2028 SyntaxNode::new_root_mut(builder.finish())
2029}
2030
2031fn build_targets_node(targets: &[String]) -> SyntaxNode {
2033 let mut builder = GreenNodeBuilder::new();
2034 builder.start_node(TARGETS.into());
2035
2036 for (i, target) in targets.iter().enumerate() {
2037 if i > 0 {
2038 builder.token(WHITESPACE.into(), " ");
2039 }
2040 builder.token(IDENTIFIER.into(), target);
2041 }
2042
2043 builder.finish_node();
2044 SyntaxNode::new_root_mut(builder.finish())
2045}
2046
2047impl Rule {
2048 pub fn parse(text: &str) -> crate::Parse<Rule> {
2050 crate::Parse::<Rule>::parse_rule(text)
2051 }
2052
2053 fn collect_variable_reference(
2055 &self,
2056 tokens: &mut std::iter::Peekable<impl Iterator<Item = SyntaxElement>>,
2057 ) -> Option<String> {
2058 let mut var_ref = String::new();
2059
2060 if let Some(token) = tokens.next() {
2062 if let Some(t) = token.as_token() {
2063 if t.kind() == DOLLAR {
2064 var_ref.push_str(t.text());
2065
2066 if let Some(next) = tokens.peek() {
2068 if let Some(nt) = next.as_token() {
2069 if nt.kind() == LPAREN {
2070 var_ref.push_str(nt.text());
2072 tokens.next();
2073
2074 let mut paren_count = 1;
2076
2077 for next_token in tokens.by_ref() {
2079 if let Some(nt) = next_token.as_token() {
2080 var_ref.push_str(nt.text());
2081
2082 if nt.kind() == LPAREN {
2083 paren_count += 1;
2084 } else if nt.kind() == RPAREN {
2085 paren_count -= 1;
2086 if paren_count == 0 {
2087 break;
2088 }
2089 }
2090 }
2091 }
2092
2093 return Some(var_ref);
2094 }
2095 }
2096 }
2097
2098 for next_token in tokens.by_ref() {
2100 if let Some(nt) = next_token.as_token() {
2101 var_ref.push_str(nt.text());
2102 if nt.kind() == RPAREN {
2103 break;
2104 }
2105 }
2106 }
2107 return Some(var_ref);
2108 }
2109 }
2110 }
2111
2112 None
2113 }
2114
2115 fn extract_targets_from_node(node: &SyntaxNode) -> Vec<String> {
2117 let mut result = Vec::new();
2118 let mut current_target = String::new();
2119 let mut in_parens = 0;
2120
2121 for child in node.children_with_tokens() {
2122 if let Some(token) = child.as_token() {
2123 match token.kind() {
2124 IDENTIFIER => {
2125 current_target.push_str(token.text());
2126 }
2127 WHITESPACE => {
2128 if in_parens == 0 && !current_target.is_empty() {
2130 result.push(current_target.clone());
2131 current_target.clear();
2132 } else if in_parens > 0 {
2133 current_target.push_str(token.text());
2134 }
2135 }
2136 LPAREN => {
2137 in_parens += 1;
2138 current_target.push_str(token.text());
2139 }
2140 RPAREN => {
2141 in_parens -= 1;
2142 current_target.push_str(token.text());
2143 }
2144 DOLLAR => {
2145 current_target.push_str(token.text());
2146 }
2147 _ => {
2148 current_target.push_str(token.text());
2149 }
2150 }
2151 } else if let Some(child_node) = child.as_node() {
2152 current_target.push_str(&child_node.text().to_string());
2154 }
2155 }
2156
2157 if !current_target.is_empty() {
2159 result.push(current_target);
2160 }
2161
2162 result
2163 }
2164
2165 pub fn targets(&self) -> impl Iterator<Item = String> + '_ {
2175 for child in self.syntax().children_with_tokens() {
2177 if let Some(node) = child.as_node() {
2178 if node.kind() == TARGETS {
2179 return Self::extract_targets_from_node(node).into_iter();
2181 }
2182 }
2183 if let Some(token) = child.as_token() {
2185 if token.kind() == OPERATOR {
2186 break;
2187 }
2188 }
2189 }
2190
2191 let mut result = Vec::new();
2193 let mut tokens = self
2194 .syntax()
2195 .children_with_tokens()
2196 .take_while(|it| it.as_token().map(|t| t.kind() != OPERATOR).unwrap_or(true))
2197 .peekable();
2198
2199 while let Some(token) = tokens.peek().cloned() {
2200 if let Some(node) = token.as_node() {
2201 tokens.next(); if node.kind() == EXPR {
2203 let mut var_content = String::new();
2205 for child in node.children_with_tokens() {
2206 if let Some(t) = child.as_token() {
2207 var_content.push_str(t.text());
2208 }
2209 }
2210 if !var_content.is_empty() {
2211 result.push(var_content);
2212 }
2213 }
2214 } else if let Some(t) = token.as_token() {
2215 if t.kind() == DOLLAR {
2216 if let Some(var_ref) = self.collect_variable_reference(&mut tokens) {
2217 result.push(var_ref);
2218 }
2219 } else if t.kind() == IDENTIFIER {
2220 let ident_text = t.text().to_string();
2222 tokens.next(); if let Some(next) = tokens.peek() {
2226 if let Some(next_token) = next.as_token() {
2227 if next_token.kind() == LPAREN {
2228 let mut archive_target = ident_text;
2230 archive_target.push_str(next_token.text()); tokens.next(); while let Some(token) = tokens.peek() {
2235 if let Some(node) = token.as_node() {
2236 if node.kind() == ARCHIVE_MEMBERS {
2237 archive_target.push_str(&node.text().to_string());
2238 tokens.next();
2239 } else {
2240 tokens.next();
2241 }
2242 } else if let Some(t) = token.as_token() {
2243 if t.kind() == RPAREN {
2244 archive_target.push_str(t.text());
2245 tokens.next();
2246 break;
2247 } else {
2248 tokens.next();
2249 }
2250 } else {
2251 break;
2252 }
2253 }
2254 result.push(archive_target);
2255 } else {
2256 result.push(ident_text);
2258 }
2259 } else {
2260 result.push(ident_text);
2262 }
2263 } else {
2264 result.push(ident_text);
2266 }
2267 } else {
2268 tokens.next(); }
2270 }
2271 }
2272 result.into_iter()
2273 }
2274
2275 pub fn prerequisites(&self) -> impl Iterator<Item = String> + '_ {
2284 let mut found_operator = false;
2286 let mut prerequisites_node = None;
2287
2288 for element in self.syntax().children_with_tokens() {
2289 if let Some(token) = element.as_token() {
2290 if token.kind() == OPERATOR {
2291 found_operator = true;
2292 }
2293 } else if let Some(node) = element.as_node() {
2294 if found_operator && node.kind() == PREREQUISITES {
2295 prerequisites_node = Some(node.clone());
2296 break;
2297 }
2298 }
2299 }
2300
2301 let result: Vec<String> = if let Some(prereqs) = prerequisites_node {
2302 prereqs
2304 .children()
2305 .filter(|child| child.kind() == PREREQUISITE)
2306 .map(|child| child.text().to_string().trim().to_string())
2307 .collect()
2308 } else {
2309 Vec::new()
2310 };
2311
2312 result.into_iter()
2313 }
2314
2315 pub fn recipes(&self) -> impl Iterator<Item = String> {
2324 self.syntax()
2325 .children()
2326 .filter(|it| it.kind() == RECIPE)
2327 .flat_map(|it| {
2328 it.children_with_tokens().filter_map(|it| {
2329 it.as_token().and_then(|t| {
2330 if t.kind() == TEXT {
2331 Some(t.text().to_string())
2332 } else {
2333 None
2334 }
2335 })
2336 })
2337 })
2338 }
2339
2340 pub fn replace_command(&mut self, i: usize, line: &str) -> bool {
2350 let index = self
2352 .syntax()
2353 .children()
2354 .filter(|it| it.kind() == RECIPE)
2355 .nth(i);
2356
2357 let index = match index {
2358 Some(node) => node.index(),
2359 None => return false,
2360 };
2361
2362 let mut builder = GreenNodeBuilder::new();
2363 builder.start_node(RECIPE.into());
2364 builder.token(INDENT.into(), "\t");
2365 builder.token(TEXT.into(), line);
2366 builder.token(NEWLINE.into(), "\n");
2367 builder.finish_node();
2368
2369 let syntax = SyntaxNode::new_root_mut(builder.finish());
2370
2371 self.0
2372 .splice_children(index..index + 1, vec![syntax.into()]);
2373
2374 true
2375 }
2376
2377 pub fn push_command(&mut self, line: &str) {
2387 let index = self
2389 .0
2390 .children_with_tokens()
2391 .filter(|it| it.kind() == RECIPE)
2392 .last();
2393
2394 let index = index.map_or_else(
2395 || self.0.children_with_tokens().count(),
2396 |it| it.index() + 1,
2397 );
2398
2399 let mut builder = GreenNodeBuilder::new();
2400 builder.start_node(RECIPE.into());
2401 builder.token(INDENT.into(), "\t");
2402 builder.token(TEXT.into(), line);
2403 builder.token(NEWLINE.into(), "\n");
2404 builder.finish_node();
2405 let syntax = SyntaxNode::new_root_mut(builder.finish());
2406
2407 self.0.splice_children(index..index, vec![syntax.into()]);
2408 }
2409
2410 pub fn remove_command(&mut self, index: usize) -> bool {
2420 let recipes: Vec<_> = self
2421 .syntax()
2422 .children()
2423 .filter(|n| n.kind() == RECIPE)
2424 .collect();
2425
2426 if index >= recipes.len() {
2427 return false;
2428 }
2429
2430 let target_node = &recipes[index];
2431 let target_index = target_node.index();
2432
2433 self.0
2434 .splice_children(target_index..target_index + 1, vec![]);
2435 true
2436 }
2437
2438 pub fn insert_command(&mut self, index: usize, line: &str) -> bool {
2449 let recipes: Vec<_> = self
2450 .syntax()
2451 .children()
2452 .filter(|n| n.kind() == RECIPE)
2453 .collect();
2454
2455 if index > recipes.len() {
2456 return false;
2457 }
2458
2459 let target_index = if index == recipes.len() {
2460 recipes.last().map(|n| n.index() + 1).unwrap_or_else(|| {
2462 self.0.children_with_tokens().count()
2464 })
2465 } else {
2466 recipes[index].index()
2468 };
2469
2470 let mut builder = GreenNodeBuilder::new();
2471 builder.start_node(RECIPE.into());
2472 builder.token(INDENT.into(), "\t");
2473 builder.token(TEXT.into(), line);
2474 builder.token(NEWLINE.into(), "\n");
2475 builder.finish_node();
2476 let syntax = SyntaxNode::new_root_mut(builder.finish());
2477
2478 self.0
2479 .splice_children(target_index..target_index, vec![syntax.into()]);
2480 true
2481 }
2482
2483 pub fn recipe_count(&self) -> usize {
2492 self.syntax()
2493 .children()
2494 .filter(|n| n.kind() == RECIPE)
2495 .count()
2496 }
2497
2498 pub fn clear_commands(&mut self) {
2508 let recipes: Vec<_> = self
2509 .syntax()
2510 .children()
2511 .filter(|n| n.kind() == RECIPE)
2512 .collect();
2513
2514 if recipes.is_empty() {
2515 return;
2516 }
2517
2518 for recipe in recipes.iter().rev() {
2520 let index = recipe.index();
2521 self.0.splice_children(index..index + 1, vec![]);
2522 }
2523 }
2524
2525 pub fn remove_prerequisite(&mut self, target: &str) -> Result<bool, Error> {
2538 let mut found_operator = false;
2540 let mut prereqs_node = None;
2541
2542 for child in self.syntax().children_with_tokens() {
2543 if let Some(token) = child.as_token() {
2544 if token.kind() == OPERATOR {
2545 found_operator = true;
2546 }
2547 } else if let Some(node) = child.as_node() {
2548 if found_operator && node.kind() == PREREQUISITES {
2549 prereqs_node = Some(node.clone());
2550 break;
2551 }
2552 }
2553 }
2554
2555 let prereqs_node = match prereqs_node {
2556 Some(node) => node,
2557 None => return Ok(false), };
2559
2560 let current_prereqs: Vec<String> = self.prerequisites().collect();
2562
2563 if !current_prereqs.iter().any(|p| p == target) {
2565 return Ok(false);
2566 }
2567
2568 let new_prereqs: Vec<String> = current_prereqs
2570 .into_iter()
2571 .filter(|p| p != target)
2572 .collect();
2573
2574 let prereqs_index = prereqs_node.index();
2576 let new_prereqs_node = build_prerequisites_node(&new_prereqs);
2577
2578 self.0.splice_children(
2579 prereqs_index..prereqs_index + 1,
2580 vec![new_prereqs_node.into()],
2581 );
2582
2583 Ok(true)
2584 }
2585
2586 pub fn add_prerequisite(&mut self, target: &str) -> Result<(), Error> {
2596 let mut current_prereqs: Vec<String> = self.prerequisites().collect();
2597 current_prereqs.push(target.to_string());
2598 self.set_prerequisites(current_prereqs.iter().map(|s| s.as_str()).collect())
2599 }
2600
2601 pub fn set_prerequisites(&mut self, prereqs: Vec<&str>) -> Result<(), Error> {
2611 let mut prereqs_index = None;
2613 let mut operator_found = false;
2614
2615 for child in self.syntax().children_with_tokens() {
2616 if let Some(token) = child.as_token() {
2617 if token.kind() == OPERATOR {
2618 operator_found = true;
2619 }
2620 } else if let Some(node) = child.as_node() {
2621 if operator_found && node.kind() == PREREQUISITES {
2622 prereqs_index = Some((node.index(), true)); break;
2624 }
2625 }
2626 }
2627
2628 let new_prereqs =
2630 build_prerequisites_node(&prereqs.iter().map(|s| s.to_string()).collect::<Vec<_>>());
2631
2632 match prereqs_index {
2633 Some((idx, true)) => {
2634 self.0
2636 .splice_children(idx..idx + 1, vec![new_prereqs.into()]);
2637 }
2638 _ => {
2639 let insert_pos = self
2641 .syntax()
2642 .children_with_tokens()
2643 .position(|t| t.as_token().map(|t| t.kind() == OPERATOR).unwrap_or(false))
2644 .map(|p| p + 1)
2645 .ok_or_else(|| {
2646 Error::Parse(ParseError {
2647 errors: vec![ErrorInfo {
2648 message: "No operator found in rule".to_string(),
2649 line: 1,
2650 context: "set_prerequisites".to_string(),
2651 }],
2652 })
2653 })?;
2654
2655 self.0
2656 .splice_children(insert_pos..insert_pos, vec![new_prereqs.into()]);
2657 }
2658 }
2659
2660 Ok(())
2661 }
2662
2663 pub fn rename_target(&mut self, old_name: &str, new_name: &str) -> Result<bool, Error> {
2675 let current_targets: Vec<String> = self.targets().collect();
2677
2678 if !current_targets.iter().any(|t| t == old_name) {
2680 return Ok(false);
2681 }
2682
2683 let new_targets: Vec<String> = current_targets
2685 .into_iter()
2686 .map(|t| {
2687 if t == old_name {
2688 new_name.to_string()
2689 } else {
2690 t
2691 }
2692 })
2693 .collect();
2694
2695 let mut targets_index = None;
2697 for (idx, child) in self.syntax().children_with_tokens().enumerate() {
2698 if let Some(node) = child.as_node() {
2699 if node.kind() == TARGETS {
2700 targets_index = Some(idx);
2701 break;
2702 }
2703 }
2704 }
2705
2706 let targets_index = targets_index.ok_or_else(|| {
2707 Error::Parse(ParseError {
2708 errors: vec![ErrorInfo {
2709 message: "No TARGETS node found in rule".to_string(),
2710 line: 1,
2711 context: "rename_target".to_string(),
2712 }],
2713 })
2714 })?;
2715
2716 let new_targets_node = build_targets_node(&new_targets);
2718
2719 self.0.splice_children(
2721 targets_index..targets_index + 1,
2722 vec![new_targets_node.into()],
2723 );
2724
2725 Ok(true)
2726 }
2727
2728 pub fn add_target(&mut self, target: &str) -> Result<(), Error> {
2738 let mut current_targets: Vec<String> = self.targets().collect();
2739 current_targets.push(target.to_string());
2740 self.set_targets(current_targets.iter().map(|s| s.as_str()).collect())
2741 }
2742
2743 pub fn set_targets(&mut self, targets: Vec<&str>) -> Result<(), Error> {
2755 if targets.is_empty() {
2757 return Err(Error::Parse(ParseError {
2758 errors: vec![ErrorInfo {
2759 message: "Cannot set empty targets list for a rule".to_string(),
2760 line: 1,
2761 context: "set_targets".to_string(),
2762 }],
2763 }));
2764 }
2765
2766 let mut targets_index = None;
2768 for (idx, child) in self.syntax().children_with_tokens().enumerate() {
2769 if let Some(node) = child.as_node() {
2770 if node.kind() == TARGETS {
2771 targets_index = Some(idx);
2772 break;
2773 }
2774 }
2775 }
2776
2777 let targets_index = targets_index.ok_or_else(|| {
2778 Error::Parse(ParseError {
2779 errors: vec![ErrorInfo {
2780 message: "No TARGETS node found in rule".to_string(),
2781 line: 1,
2782 context: "set_targets".to_string(),
2783 }],
2784 })
2785 })?;
2786
2787 let new_targets_node =
2789 build_targets_node(&targets.iter().map(|s| s.to_string()).collect::<Vec<_>>());
2790
2791 self.0.splice_children(
2793 targets_index..targets_index + 1,
2794 vec![new_targets_node.into()],
2795 );
2796
2797 Ok(())
2798 }
2799
2800 pub fn has_target(&self, target: &str) -> bool {
2811 self.targets().any(|t| t == target)
2812 }
2813
2814 pub fn remove_target(&mut self, target_name: &str) -> Result<bool, Error> {
2827 let current_targets: Vec<String> = self.targets().collect();
2829
2830 if !current_targets.iter().any(|t| t == target_name) {
2832 return Ok(false);
2833 }
2834
2835 let new_targets: Vec<String> = current_targets
2837 .into_iter()
2838 .filter(|t| t != target_name)
2839 .collect();
2840
2841 if new_targets.is_empty() {
2843 return Err(Error::Parse(ParseError {
2844 errors: vec![ErrorInfo {
2845 message: "Cannot remove all targets from a rule".to_string(),
2846 line: 1,
2847 context: "remove_target".to_string(),
2848 }],
2849 }));
2850 }
2851
2852 let mut targets_index = None;
2854 for (idx, child) in self.syntax().children_with_tokens().enumerate() {
2855 if let Some(node) = child.as_node() {
2856 if node.kind() == TARGETS {
2857 targets_index = Some(idx);
2858 break;
2859 }
2860 }
2861 }
2862
2863 let targets_index = targets_index.ok_or_else(|| {
2864 Error::Parse(ParseError {
2865 errors: vec![ErrorInfo {
2866 message: "No TARGETS node found in rule".to_string(),
2867 line: 1,
2868 context: "remove_target".to_string(),
2869 }],
2870 })
2871 })?;
2872
2873 let new_targets_node = build_targets_node(&new_targets);
2875
2876 self.0.splice_children(
2878 targets_index..targets_index + 1,
2879 vec![new_targets_node.into()],
2880 );
2881
2882 Ok(true)
2883 }
2884
2885 pub fn remove(self) -> Result<(), Error> {
2898 let parent = self.syntax().parent().ok_or_else(|| {
2899 Error::Parse(ParseError {
2900 errors: vec![ErrorInfo {
2901 message: "Rule has no parent".to_string(),
2902 line: 1,
2903 context: "remove".to_string(),
2904 }],
2905 })
2906 })?;
2907
2908 remove_with_preceding_comments(self.syntax(), &parent);
2909 Ok(())
2910 }
2911}
2912
2913impl Default for Makefile {
2914 fn default() -> Self {
2915 Self::new()
2916 }
2917}
2918
2919impl Include {
2920 pub fn path(&self) -> Option<String> {
2922 self.syntax()
2923 .children()
2924 .find(|it| it.kind() == EXPR)
2925 .map(|it| it.text().to_string().trim().to_string())
2926 }
2927
2928 pub fn is_optional(&self) -> bool {
2930 let text = self.syntax().text();
2931 text.to_string().starts_with("-include") || text.to_string().starts_with("sinclude")
2932 }
2933}
2934
2935#[cfg(test)]
2936mod tests {
2937 use super::*;
2938
2939 #[test]
2940 fn test_conditionals() {
2941 let code = "ifdef DEBUG\n DEBUG_FLAG := 1\nendif\n";
2945 let mut buf = code.as_bytes();
2946 let makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse basic ifdef");
2947 assert!(makefile.code().contains("DEBUG_FLAG"));
2948
2949 let code =
2951 "ifeq ($(OS),Windows_NT)\n RESULT := windows\nelse\n RESULT := unix\nendif\n";
2952 let mut buf = code.as_bytes();
2953 let makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse ifeq/ifneq");
2954 assert!(makefile.code().contains("RESULT"));
2955 assert!(makefile.code().contains("windows"));
2956
2957 let code = "ifdef DEBUG\n CFLAGS += -g\n ifdef VERBOSE\n CFLAGS += -v\n endif\nelse\n CFLAGS += -O2\nendif\n";
2959 let mut buf = code.as_bytes();
2960 let makefile = Makefile::read_relaxed(&mut buf)
2961 .expect("Failed to parse nested conditionals with else");
2962 assert!(makefile.code().contains("CFLAGS"));
2963 assert!(makefile.code().contains("VERBOSE"));
2964
2965 let code = "ifdef DEBUG\nendif\n";
2967 let mut buf = code.as_bytes();
2968 let makefile =
2969 Makefile::read_relaxed(&mut buf).expect("Failed to parse empty conditionals");
2970 assert!(makefile.code().contains("ifdef DEBUG"));
2971
2972 let code = "ifeq ($(OS),Windows)\n EXT := .exe\nelif ifeq ($(OS),Linux)\n EXT := .bin\nelse\n EXT := .out\nendif\n";
2974 let mut buf = code.as_bytes();
2975 let makefile =
2976 Makefile::read_relaxed(&mut buf).expect("Failed to parse conditionals with elif");
2977 assert!(makefile.code().contains("EXT"));
2978
2979 let code = "ifXYZ DEBUG\nDEBUG := 1\nendif\n";
2981 let mut buf = code.as_bytes();
2982 let makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse with recovery");
2983 assert!(makefile.code().contains("DEBUG"));
2984
2985 let code = "ifdef \nDEBUG := 1\nendif\n";
2987 let mut buf = code.as_bytes();
2988 let makefile = Makefile::read_relaxed(&mut buf)
2989 .expect("Failed to parse with recovery - missing condition");
2990 assert!(makefile.code().contains("DEBUG"));
2991 }
2992
2993 #[test]
2994 fn test_parse_simple() {
2995 const SIMPLE: &str = r#"VARIABLE = value
2996
2997rule: dependency
2998 command
2999"#;
3000 let parsed = parse(SIMPLE);
3001 assert!(parsed.errors.is_empty());
3002 let node = parsed.syntax();
3003 assert_eq!(
3004 format!("{:#?}", node),
3005 r#"ROOT@0..44
3006 VARIABLE@0..17
3007 IDENTIFIER@0..8 "VARIABLE"
3008 WHITESPACE@8..9 " "
3009 OPERATOR@9..10 "="
3010 WHITESPACE@10..11 " "
3011 EXPR@11..16
3012 IDENTIFIER@11..16 "value"
3013 NEWLINE@16..17 "\n"
3014 NEWLINE@17..18 "\n"
3015 RULE@18..44
3016 TARGETS@18..22
3017 IDENTIFIER@18..22 "rule"
3018 OPERATOR@22..23 ":"
3019 WHITESPACE@23..24 " "
3020 PREREQUISITES@24..34
3021 PREREQUISITE@24..34
3022 IDENTIFIER@24..34 "dependency"
3023 NEWLINE@34..35 "\n"
3024 RECIPE@35..44
3025 INDENT@35..36 "\t"
3026 TEXT@36..43 "command"
3027 NEWLINE@43..44 "\n"
3028"#
3029 );
3030
3031 let root = parsed.root();
3032
3033 let mut rules = root.rules().collect::<Vec<_>>();
3034 assert_eq!(rules.len(), 1);
3035 let rule = rules.pop().unwrap();
3036 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["rule"]);
3037 assert_eq!(rule.prerequisites().collect::<Vec<_>>(), vec!["dependency"]);
3038 assert_eq!(rule.recipes().collect::<Vec<_>>(), vec!["command"]);
3039
3040 let mut variables = root.variable_definitions().collect::<Vec<_>>();
3041 assert_eq!(variables.len(), 1);
3042 let variable = variables.pop().unwrap();
3043 assert_eq!(variable.name(), Some("VARIABLE".to_string()));
3044 assert_eq!(variable.raw_value(), Some("value".to_string()));
3045 }
3046
3047 #[test]
3048 fn test_parse_export_assign() {
3049 const EXPORT: &str = r#"export VARIABLE := value
3050"#;
3051 let parsed = parse(EXPORT);
3052 assert!(parsed.errors.is_empty());
3053 let node = parsed.syntax();
3054 assert_eq!(
3055 format!("{:#?}", node),
3056 r#"ROOT@0..25
3057 VARIABLE@0..25
3058 IDENTIFIER@0..6 "export"
3059 WHITESPACE@6..7 " "
3060 IDENTIFIER@7..15 "VARIABLE"
3061 WHITESPACE@15..16 " "
3062 OPERATOR@16..18 ":="
3063 WHITESPACE@18..19 " "
3064 EXPR@19..24
3065 IDENTIFIER@19..24 "value"
3066 NEWLINE@24..25 "\n"
3067"#
3068 );
3069
3070 let root = parsed.root();
3071
3072 let mut variables = root.variable_definitions().collect::<Vec<_>>();
3073 assert_eq!(variables.len(), 1);
3074 let variable = variables.pop().unwrap();
3075 assert_eq!(variable.name(), Some("VARIABLE".to_string()));
3076 assert_eq!(variable.raw_value(), Some("value".to_string()));
3077 }
3078
3079 #[test]
3080 fn test_parse_multiple_prerequisites() {
3081 const MULTIPLE_PREREQUISITES: &str = r#"rule: dependency1 dependency2
3082 command
3083
3084"#;
3085 let parsed = parse(MULTIPLE_PREREQUISITES);
3086 assert!(parsed.errors.is_empty());
3087 let node = parsed.syntax();
3088 assert_eq!(
3089 format!("{:#?}", node),
3090 r#"ROOT@0..40
3091 RULE@0..40
3092 TARGETS@0..4
3093 IDENTIFIER@0..4 "rule"
3094 OPERATOR@4..5 ":"
3095 WHITESPACE@5..6 " "
3096 PREREQUISITES@6..29
3097 PREREQUISITE@6..17
3098 IDENTIFIER@6..17 "dependency1"
3099 WHITESPACE@17..18 " "
3100 PREREQUISITE@18..29
3101 IDENTIFIER@18..29 "dependency2"
3102 NEWLINE@29..30 "\n"
3103 RECIPE@30..39
3104 INDENT@30..31 "\t"
3105 TEXT@31..38 "command"
3106 NEWLINE@38..39 "\n"
3107 NEWLINE@39..40 "\n"
3108"#
3109 );
3110 let root = parsed.root();
3111
3112 let rule = root.rules().next().unwrap();
3113 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["rule"]);
3114 assert_eq!(
3115 rule.prerequisites().collect::<Vec<_>>(),
3116 vec!["dependency1", "dependency2"]
3117 );
3118 assert_eq!(rule.recipes().collect::<Vec<_>>(), vec!["command"]);
3119 }
3120
3121 #[test]
3122 fn test_add_rule() {
3123 let mut makefile = Makefile::new();
3124 let rule = makefile.add_rule("rule");
3125 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["rule"]);
3126 assert_eq!(
3127 rule.prerequisites().collect::<Vec<_>>(),
3128 Vec::<String>::new()
3129 );
3130
3131 assert_eq!(makefile.to_string(), "rule:\n");
3132 }
3133
3134 #[test]
3135 fn test_push_command() {
3136 let mut makefile = Makefile::new();
3137 let mut rule = makefile.add_rule("rule");
3138
3139 rule.push_command("command");
3141 rule.push_command("command2");
3142
3143 assert_eq!(
3145 rule.recipes().collect::<Vec<_>>(),
3146 vec!["command", "command2"]
3147 );
3148
3149 rule.push_command("command3");
3151 assert_eq!(
3152 rule.recipes().collect::<Vec<_>>(),
3153 vec!["command", "command2", "command3"]
3154 );
3155
3156 assert_eq!(
3158 makefile.to_string(),
3159 "rule:\n\tcommand\n\tcommand2\n\tcommand3\n"
3160 );
3161
3162 assert_eq!(
3164 rule.to_string(),
3165 "rule:\n\tcommand\n\tcommand2\n\tcommand3\n"
3166 );
3167 }
3168
3169 #[test]
3170 fn test_replace_command() {
3171 let mut makefile = Makefile::new();
3172 let mut rule = makefile.add_rule("rule");
3173
3174 rule.push_command("command");
3176 rule.push_command("command2");
3177
3178 assert_eq!(
3180 rule.recipes().collect::<Vec<_>>(),
3181 vec!["command", "command2"]
3182 );
3183
3184 rule.replace_command(0, "new command");
3186 assert_eq!(
3187 rule.recipes().collect::<Vec<_>>(),
3188 vec!["new command", "command2"]
3189 );
3190
3191 assert_eq!(makefile.to_string(), "rule:\n\tnew command\n\tcommand2\n");
3193
3194 assert_eq!(rule.to_string(), "rule:\n\tnew command\n\tcommand2\n");
3196 }
3197
3198 #[test]
3199 fn test_parse_rule_without_newline() {
3200 let rule = "rule: dependency\n\tcommand".parse::<Rule>().unwrap();
3201 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["rule"]);
3202 assert_eq!(rule.recipes().collect::<Vec<_>>(), vec!["command"]);
3203 let rule = "rule: dependency".parse::<Rule>().unwrap();
3204 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["rule"]);
3205 assert_eq!(rule.recipes().collect::<Vec<_>>(), Vec::<String>::new());
3206 }
3207
3208 #[test]
3209 fn test_parse_makefile_without_newline() {
3210 let makefile = "rule: dependency\n\tcommand".parse::<Makefile>().unwrap();
3211 assert_eq!(makefile.rules().count(), 1);
3212 }
3213
3214 #[test]
3215 fn test_from_reader() {
3216 let makefile = Makefile::from_reader("rule: dependency\n\tcommand".as_bytes()).unwrap();
3217 assert_eq!(makefile.rules().count(), 1);
3218 }
3219
3220 #[test]
3221 fn test_parse_with_tab_after_last_newline() {
3222 let makefile = Makefile::from_reader("rule: dependency\n\tcommand\n\t".as_bytes()).unwrap();
3223 assert_eq!(makefile.rules().count(), 1);
3224 }
3225
3226 #[test]
3227 fn test_parse_with_space_after_last_newline() {
3228 let makefile = Makefile::from_reader("rule: dependency\n\tcommand\n ".as_bytes()).unwrap();
3229 assert_eq!(makefile.rules().count(), 1);
3230 }
3231
3232 #[test]
3233 fn test_parse_with_comment_after_last_newline() {
3234 let makefile =
3235 Makefile::from_reader("rule: dependency\n\tcommand\n#comment".as_bytes()).unwrap();
3236 assert_eq!(makefile.rules().count(), 1);
3237 }
3238
3239 #[test]
3240 fn test_parse_with_variable_rule() {
3241 let makefile =
3242 Makefile::from_reader("RULE := rule\n$(RULE): dependency\n\tcommand".as_bytes())
3243 .unwrap();
3244
3245 let vars = makefile.variable_definitions().collect::<Vec<_>>();
3247 assert_eq!(vars.len(), 1);
3248 assert_eq!(vars[0].name(), Some("RULE".to_string()));
3249 assert_eq!(vars[0].raw_value(), Some("rule".to_string()));
3250
3251 let rules = makefile.rules().collect::<Vec<_>>();
3253 assert_eq!(rules.len(), 1);
3254 assert_eq!(rules[0].targets().collect::<Vec<_>>(), vec!["$(RULE)"]);
3255 assert_eq!(
3256 rules[0].prerequisites().collect::<Vec<_>>(),
3257 vec!["dependency"]
3258 );
3259 assert_eq!(rules[0].recipes().collect::<Vec<_>>(), vec!["command"]);
3260 }
3261
3262 #[test]
3263 fn test_parse_with_variable_dependency() {
3264 let makefile =
3265 Makefile::from_reader("DEP := dependency\nrule: $(DEP)\n\tcommand".as_bytes()).unwrap();
3266
3267 let vars = makefile.variable_definitions().collect::<Vec<_>>();
3269 assert_eq!(vars.len(), 1);
3270 assert_eq!(vars[0].name(), Some("DEP".to_string()));
3271 assert_eq!(vars[0].raw_value(), Some("dependency".to_string()));
3272
3273 let rules = makefile.rules().collect::<Vec<_>>();
3275 assert_eq!(rules.len(), 1);
3276 assert_eq!(rules[0].targets().collect::<Vec<_>>(), vec!["rule"]);
3277 assert_eq!(rules[0].prerequisites().collect::<Vec<_>>(), vec!["$(DEP)"]);
3278 assert_eq!(rules[0].recipes().collect::<Vec<_>>(), vec!["command"]);
3279 }
3280
3281 #[test]
3282 fn test_parse_with_variable_command() {
3283 let makefile =
3284 Makefile::from_reader("COM := command\nrule: dependency\n\t$(COM)".as_bytes()).unwrap();
3285
3286 let vars = makefile.variable_definitions().collect::<Vec<_>>();
3288 assert_eq!(vars.len(), 1);
3289 assert_eq!(vars[0].name(), Some("COM".to_string()));
3290 assert_eq!(vars[0].raw_value(), Some("command".to_string()));
3291
3292 let rules = makefile.rules().collect::<Vec<_>>();
3294 assert_eq!(rules.len(), 1);
3295 assert_eq!(rules[0].targets().collect::<Vec<_>>(), vec!["rule"]);
3296 assert_eq!(
3297 rules[0].prerequisites().collect::<Vec<_>>(),
3298 vec!["dependency"]
3299 );
3300 assert_eq!(rules[0].recipes().collect::<Vec<_>>(), vec!["$(COM)"]);
3301 }
3302
3303 #[test]
3304 fn test_regular_line_error_reporting() {
3305 let input = "rule target\n\tcommand";
3306
3307 let parsed = parse(input);
3309 let direct_error = &parsed.errors[0];
3310
3311 assert_eq!(direct_error.line, 2);
3313 assert!(
3314 direct_error.message.contains("expected"),
3315 "Error message should contain 'expected': {}",
3316 direct_error.message
3317 );
3318 assert_eq!(direct_error.context, "\tcommand");
3319
3320 let reader_result = Makefile::from_reader(input.as_bytes());
3322 let parse_error = match reader_result {
3323 Ok(_) => panic!("Expected Parse error from from_reader"),
3324 Err(err) => match err {
3325 self::Error::Parse(parse_err) => parse_err,
3326 _ => panic!("Expected Parse error"),
3327 },
3328 };
3329
3330 let error_text = parse_error.to_string();
3332 assert!(error_text.contains("Error at line 2:"));
3333 assert!(error_text.contains("2| \tcommand"));
3334 }
3335
3336 #[test]
3337 fn test_parsing_error_context_with_bad_syntax() {
3338 let input = "#begin comment\n\t(╯°□°)╯︵ ┻━┻\n#end comment";
3340
3341 match Makefile::from_reader(input.as_bytes()) {
3343 Ok(makefile) => {
3344 assert_eq!(
3346 makefile.rules().count(),
3347 0,
3348 "Should not have found any rules"
3349 );
3350 }
3351 Err(err) => match err {
3352 self::Error::Parse(error) => {
3353 assert!(error.errors[0].line >= 2, "Error line should be at least 2");
3355 assert!(
3356 !error.errors[0].context.is_empty(),
3357 "Error context should not be empty"
3358 );
3359 }
3360 _ => panic!("Unexpected error type"),
3361 },
3362 };
3363 }
3364
3365 #[test]
3366 fn test_error_message_format() {
3367 let parse_error = ParseError {
3369 errors: vec![ErrorInfo {
3370 message: "test error".to_string(),
3371 line: 42,
3372 context: "some problematic code".to_string(),
3373 }],
3374 };
3375
3376 let error_text = parse_error.to_string();
3377 assert!(error_text.contains("Error at line 42: test error"));
3378 assert!(error_text.contains("42| some problematic code"));
3379 }
3380
3381 #[test]
3382 fn test_line_number_calculation() {
3383 let test_cases = [
3385 ("rule dependency\n\tcommand", 2), ("#comment\n\t(╯°□°)╯︵ ┻━┻", 2), ("var = value\n#comment\n\tindented line", 3), ];
3389
3390 for (input, expected_line) in test_cases {
3391 match input.parse::<Makefile>() {
3393 Ok(_) => {
3394 continue;
3397 }
3398 Err(err) => {
3399 if let Error::Parse(parse_err) = err {
3400 assert_eq!(
3402 parse_err.errors[0].line, expected_line,
3403 "Line number should match the expected line"
3404 );
3405
3406 if parse_err.errors[0].message.contains("indented") {
3408 assert!(
3409 parse_err.errors[0].context.starts_with('\t'),
3410 "Context for indentation errors should include the tab character"
3411 );
3412 }
3413 } else {
3414 panic!("Expected parse error, got: {:?}", err);
3415 }
3416 }
3417 }
3418 }
3419 }
3420
3421 #[test]
3422 fn test_conditional_features() {
3423 let code = r#"
3425# Set variables based on DEBUG flag
3426ifdef DEBUG
3427 CFLAGS += -g -DDEBUG
3428else
3429 CFLAGS = -O2
3430endif
3431
3432# Define a build rule
3433all: $(OBJS)
3434 $(CC) $(CFLAGS) -o $@ $^
3435"#;
3436
3437 let mut buf = code.as_bytes();
3438 let makefile =
3439 Makefile::read_relaxed(&mut buf).expect("Failed to parse conditional features");
3440
3441 assert!(!makefile.code().is_empty(), "Makefile has content");
3444
3445 let rules = makefile.rules().collect::<Vec<_>>();
3447 assert!(!rules.is_empty(), "Should have found rules");
3448
3449 assert!(code.contains("ifdef DEBUG"));
3451 assert!(code.contains("endif"));
3452
3453 let code_with_var = r#"
3455# Define a variable first
3456CC = gcc
3457
3458ifdef DEBUG
3459 CFLAGS += -g -DDEBUG
3460else
3461 CFLAGS = -O2
3462endif
3463
3464all: $(OBJS)
3465 $(CC) $(CFLAGS) -o $@ $^
3466"#;
3467
3468 let mut buf = code_with_var.as_bytes();
3469 let makefile =
3470 Makefile::read_relaxed(&mut buf).expect("Failed to parse with explicit variable");
3471
3472 let vars = makefile.variable_definitions().collect::<Vec<_>>();
3474 assert!(
3475 !vars.is_empty(),
3476 "Should have found at least the CC variable definition"
3477 );
3478 }
3479
3480 #[test]
3481 fn test_include_directive() {
3482 let parsed = parse("include config.mk\ninclude $(TOPDIR)/rules.mk\ninclude *.mk\n");
3483 assert!(parsed.errors.is_empty());
3484 let node = parsed.syntax();
3485 assert!(format!("{:#?}", node).contains("INCLUDE@"));
3486 }
3487
3488 #[test]
3489 fn test_export_variables() {
3490 let parsed = parse("export SHELL := /bin/bash\n");
3491 assert!(parsed.errors.is_empty());
3492 let makefile = parsed.root();
3493 let vars = makefile.variable_definitions().collect::<Vec<_>>();
3494 assert_eq!(vars.len(), 1);
3495 let shell_var = vars
3496 .iter()
3497 .find(|v| v.name() == Some("SHELL".to_string()))
3498 .unwrap();
3499 assert!(shell_var.raw_value().unwrap().contains("bin/bash"));
3500 }
3501
3502 #[test]
3503 fn test_variable_scopes() {
3504 let parsed =
3505 parse("SIMPLE = value\nIMMEDIATE := value\nCONDITIONAL ?= value\nAPPEND += value\n");
3506 assert!(parsed.errors.is_empty());
3507 let makefile = parsed.root();
3508 let vars = makefile.variable_definitions().collect::<Vec<_>>();
3509 assert_eq!(vars.len(), 4);
3510 let var_names: Vec<_> = vars.iter().filter_map(|v| v.name()).collect();
3511 assert!(var_names.contains(&"SIMPLE".to_string()));
3512 assert!(var_names.contains(&"IMMEDIATE".to_string()));
3513 assert!(var_names.contains(&"CONDITIONAL".to_string()));
3514 assert!(var_names.contains(&"APPEND".to_string()));
3515 }
3516
3517 #[test]
3518 fn test_pattern_rule_parsing() {
3519 let parsed = parse("%.o: %.c\n\t$(CC) -c -o $@ $<\n");
3520 assert!(parsed.errors.is_empty());
3521 let makefile = parsed.root();
3522 let rules = makefile.rules().collect::<Vec<_>>();
3523 assert_eq!(rules.len(), 1);
3524 assert_eq!(rules[0].targets().next().unwrap(), "%.o");
3525 assert!(rules[0].recipes().next().unwrap().contains("$@"));
3526 }
3527
3528 #[test]
3529 fn test_include_variants() {
3530 let makefile_str = "include simple.mk\n-include optional.mk\nsinclude synonym.mk\ninclude $(VAR)/generated.mk\n";
3532 let parsed = parse(makefile_str);
3533 assert!(parsed.errors.is_empty());
3534
3535 let node = parsed.syntax();
3537 let debug_str = format!("{:#?}", node);
3538
3539 assert_eq!(debug_str.matches("INCLUDE@").count(), 4);
3541
3542 let makefile = parsed.root();
3544
3545 let include_count = makefile
3547 .syntax()
3548 .children()
3549 .filter(|child| child.kind() == INCLUDE)
3550 .count();
3551 assert_eq!(include_count, 4);
3552
3553 assert!(makefile
3555 .included_files()
3556 .any(|path| path.contains("$(VAR)")));
3557 }
3558
3559 #[test]
3560 fn test_include_api() {
3561 let makefile_str = "include simple.mk\n-include optional.mk\nsinclude synonym.mk\n";
3563 let makefile: Makefile = makefile_str.parse().unwrap();
3564
3565 let includes: Vec<_> = makefile.includes().collect();
3567 assert_eq!(includes.len(), 3);
3568
3569 assert!(!includes[0].is_optional()); assert!(includes[1].is_optional()); assert!(includes[2].is_optional()); let files: Vec<_> = makefile.included_files().collect();
3576 assert_eq!(files, vec!["simple.mk", "optional.mk", "synonym.mk"]);
3577
3578 assert_eq!(includes[0].path(), Some("simple.mk".to_string()));
3580 assert_eq!(includes[1].path(), Some("optional.mk".to_string()));
3581 assert_eq!(includes[2].path(), Some("synonym.mk".to_string()));
3582 }
3583
3584 #[test]
3585 fn test_include_integration() {
3586 let phony_makefile = Makefile::from_reader(
3590 ".PHONY: build\n\nVERBOSE ?= 0\n\n# comment\n-include .env\n\nrule: dependency\n\tcommand"
3591 .as_bytes()
3592 ).unwrap();
3593
3594 assert_eq!(phony_makefile.rules().count(), 2);
3596
3597 let normal_rules_count = phony_makefile
3599 .rules()
3600 .filter(|r| !r.targets().any(|t| t.starts_with('.')))
3601 .count();
3602 assert_eq!(normal_rules_count, 1);
3603
3604 assert_eq!(phony_makefile.includes().count(), 1);
3606 assert_eq!(phony_makefile.included_files().next().unwrap(), ".env");
3607
3608 let simple_makefile = Makefile::from_reader(
3610 "\n\nVERBOSE ?= 0\n\n# comment\n-include .env\n\nrule: dependency\n\tcommand"
3611 .as_bytes(),
3612 )
3613 .unwrap();
3614 assert_eq!(simple_makefile.rules().count(), 1);
3615 assert_eq!(simple_makefile.includes().count(), 1);
3616 }
3617
3618 #[test]
3619 fn test_real_conditional_directives() {
3620 let conditional = "ifdef DEBUG\nCFLAGS = -g\nelse\nCFLAGS = -O2\nendif\n";
3622 let mut buf = conditional.as_bytes();
3623 let makefile =
3624 Makefile::read_relaxed(&mut buf).expect("Failed to parse basic if/else conditional");
3625 let code = makefile.code();
3626 assert!(code.contains("ifdef DEBUG"));
3627 assert!(code.contains("else"));
3628 assert!(code.contains("endif"));
3629
3630 let nested = "ifdef DEBUG\nCFLAGS = -g\nifdef VERBOSE\nCFLAGS += -v\nendif\nendif\n";
3632 let mut buf = nested.as_bytes();
3633 let makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse nested ifdef");
3634 let code = makefile.code();
3635 assert!(code.contains("ifdef DEBUG"));
3636 assert!(code.contains("ifdef VERBOSE"));
3637
3638 let ifeq = "ifeq ($(OS),Windows_NT)\nTARGET = app.exe\nelse\nTARGET = app\nendif\n";
3640 let mut buf = ifeq.as_bytes();
3641 let makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse ifeq form");
3642 let code = makefile.code();
3643 assert!(code.contains("ifeq"));
3644 assert!(code.contains("Windows_NT"));
3645 }
3646
3647 #[test]
3648 fn test_indented_text_outside_rules() {
3649 let help_text = "help:\n\t@echo \"Available targets:\"\n\t@echo \" help show help\"\n";
3651 let parsed = parse(help_text);
3652 assert!(parsed.errors.is_empty());
3653
3654 let root = parsed.root();
3656 let rules = root.rules().collect::<Vec<_>>();
3657 assert_eq!(rules.len(), 1);
3658
3659 let help_rule = &rules[0];
3660 let recipes = help_rule.recipes().collect::<Vec<_>>();
3661 assert_eq!(recipes.len(), 2);
3662 assert!(recipes[0].contains("Available targets"));
3663 assert!(recipes[1].contains("help"));
3664 }
3665
3666 #[test]
3667 fn test_comment_handling_in_recipes() {
3668 let recipe_comment = "build:\n\t# This is a comment\n\tgcc -o app main.c\n";
3670
3671 let parsed = parse(recipe_comment);
3673
3674 assert!(
3676 parsed.errors.is_empty(),
3677 "Should parse recipe with comments without errors"
3678 );
3679
3680 let root = parsed.root();
3682 let rules = root.rules().collect::<Vec<_>>();
3683 assert_eq!(rules.len(), 1, "Should find exactly one rule");
3684
3685 let build_rule = &rules[0];
3687 assert_eq!(
3688 build_rule.targets().collect::<Vec<_>>(),
3689 vec!["build"],
3690 "Rule should have 'build' as target"
3691 );
3692
3693 let recipes = build_rule.recipes().collect::<Vec<_>>();
3697 assert_eq!(
3698 recipes.len(),
3699 1,
3700 "Should find exactly one recipe line (comment lines are filtered)"
3701 );
3702 assert!(
3703 recipes[0].contains("gcc -o app"),
3704 "Recipe should be the command line"
3705 );
3706 assert!(
3707 !recipes[0].contains("This is a comment"),
3708 "Comments should not be included in recipe lines"
3709 );
3710 }
3711
3712 #[test]
3713 fn test_multiline_variables() {
3714 let multiline = "SOURCES = main.c \\\n util.c\n";
3716
3717 let parsed = parse(multiline);
3719
3720 let root = parsed.root();
3722 let vars = root.variable_definitions().collect::<Vec<_>>();
3723 assert!(!vars.is_empty(), "Should find at least one variable");
3724
3725 let operators = "CFLAGS := -Wall \\\n -Werror\n";
3729 let parsed_operators = parse(operators);
3730
3731 let root = parsed_operators.root();
3733 let vars = root.variable_definitions().collect::<Vec<_>>();
3734 assert!(
3735 !vars.is_empty(),
3736 "Should find at least one variable with := operator"
3737 );
3738
3739 let append = "LDFLAGS += -L/usr/lib \\\n -lm\n";
3741 let parsed_append = parse(append);
3742
3743 let root = parsed_append.root();
3745 let vars = root.variable_definitions().collect::<Vec<_>>();
3746 assert!(
3747 !vars.is_empty(),
3748 "Should find at least one variable with += operator"
3749 );
3750 }
3751
3752 #[test]
3753 fn test_whitespace_and_eof_handling() {
3754 let blank_lines = "VAR = value\n\n\n";
3756
3757 let parsed_blank = parse(blank_lines);
3758
3759 let root = parsed_blank.root();
3761 let vars = root.variable_definitions().collect::<Vec<_>>();
3762 assert_eq!(
3763 vars.len(),
3764 1,
3765 "Should find one variable in blank lines test"
3766 );
3767
3768 let trailing_space = "VAR = value \n";
3770
3771 let parsed_space = parse(trailing_space);
3772
3773 let root = parsed_space.root();
3775 let vars = root.variable_definitions().collect::<Vec<_>>();
3776 assert_eq!(
3777 vars.len(),
3778 1,
3779 "Should find one variable in trailing space test"
3780 );
3781
3782 let no_newline = "VAR = value";
3784
3785 let parsed_no_newline = parse(no_newline);
3786
3787 let root = parsed_no_newline.root();
3789 let vars = root.variable_definitions().collect::<Vec<_>>();
3790 assert_eq!(vars.len(), 1, "Should find one variable in no newline test");
3791 assert_eq!(
3792 vars[0].name(),
3793 Some("VAR".to_string()),
3794 "Variable name should be VAR"
3795 );
3796 }
3797
3798 #[test]
3799 fn test_complex_variable_references() {
3800 let wildcard = "SOURCES = $(wildcard *.c)\n";
3802 let parsed = parse(wildcard);
3803 assert!(parsed.errors.is_empty());
3804
3805 let nested = "PREFIX = /usr\nBINDIR = $(PREFIX)/bin\n";
3807 let parsed = parse(nested);
3808 assert!(parsed.errors.is_empty());
3809
3810 let patsubst = "OBJECTS = $(patsubst %.c,%.o,$(SOURCES))\n";
3812 let parsed = parse(patsubst);
3813 assert!(parsed.errors.is_empty());
3814 }
3815
3816 #[test]
3817 fn test_complex_variable_references_minimal() {
3818 let wildcard = "SOURCES = $(wildcard *.c)\n";
3820 let parsed = parse(wildcard);
3821 assert!(parsed.errors.is_empty());
3822
3823 let nested = "PREFIX = /usr\nBINDIR = $(PREFIX)/bin\n";
3825 let parsed = parse(nested);
3826 assert!(parsed.errors.is_empty());
3827
3828 let patsubst = "OBJECTS = $(patsubst %.c,%.o,$(SOURCES))\n";
3830 let parsed = parse(patsubst);
3831 assert!(parsed.errors.is_empty());
3832 }
3833
3834 #[test]
3835 fn test_multiline_variable_with_backslash() {
3836 let content = r#"
3837LONG_VAR = This is a long variable \
3838 that continues on the next line \
3839 and even one more line
3840"#;
3841
3842 let mut buf = content.as_bytes();
3844 let makefile =
3845 Makefile::read_relaxed(&mut buf).expect("Failed to parse multiline variable");
3846
3847 let vars = makefile.variable_definitions().collect::<Vec<_>>();
3849 assert_eq!(
3850 vars.len(),
3851 1,
3852 "Expected 1 variable but found {}",
3853 vars.len()
3854 );
3855 let var_value = vars[0].raw_value();
3856 assert!(var_value.is_some(), "Variable value is None");
3857
3858 let value_str = var_value.unwrap();
3860 assert!(
3861 value_str.contains("long variable"),
3862 "Value doesn't contain expected content"
3863 );
3864 }
3865
3866 #[test]
3867 fn test_multiline_variable_with_mixed_operators() {
3868 let content = r#"
3869PREFIX ?= /usr/local
3870CFLAGS := -Wall -O2 \
3871 -I$(PREFIX)/include \
3872 -DDEBUG
3873"#;
3874 let mut buf = content.as_bytes();
3876 let makefile = Makefile::read_relaxed(&mut buf)
3877 .expect("Failed to parse multiline variable with operators");
3878
3879 let vars = makefile.variable_definitions().collect::<Vec<_>>();
3881 assert!(
3882 vars.len() >= 1,
3883 "Expected at least 1 variable, found {}",
3884 vars.len()
3885 );
3886
3887 let prefix_var = vars
3889 .iter()
3890 .find(|v| v.name().unwrap_or_default() == "PREFIX");
3891 assert!(prefix_var.is_some(), "Expected to find PREFIX variable");
3892 assert!(
3893 prefix_var.unwrap().raw_value().is_some(),
3894 "PREFIX variable has no value"
3895 );
3896
3897 let cflags_var = vars
3899 .iter()
3900 .find(|v| v.name().unwrap_or_default().contains("CFLAGS"));
3901 assert!(
3902 cflags_var.is_some(),
3903 "Expected to find CFLAGS variable (or part of it)"
3904 );
3905 }
3906
3907 #[test]
3908 fn test_indented_help_text() {
3909 let content = r#"
3910.PHONY: help
3911help:
3912 @echo "Available targets:"
3913 @echo " build - Build the project"
3914 @echo " test - Run tests"
3915 @echo " clean - Remove build artifacts"
3916"#;
3917 let mut buf = content.as_bytes();
3919 let makefile =
3920 Makefile::read_relaxed(&mut buf).expect("Failed to parse indented help text");
3921
3922 let rules = makefile.rules().collect::<Vec<_>>();
3924 assert!(!rules.is_empty(), "Expected at least one rule");
3925
3926 let help_rule = rules.iter().find(|r| r.targets().any(|t| t == "help"));
3928 assert!(help_rule.is_some(), "Expected to find help rule");
3929
3930 let recipes = help_rule.unwrap().recipes().collect::<Vec<_>>();
3932 assert!(
3933 !recipes.is_empty(),
3934 "Expected at least one recipe line in help rule"
3935 );
3936 assert!(
3937 recipes.iter().any(|r| r.contains("Available targets")),
3938 "Expected to find 'Available targets' in recipes"
3939 );
3940 }
3941
3942 #[test]
3943 fn test_indented_lines_in_conditionals() {
3944 let content = r#"
3945ifdef DEBUG
3946 CFLAGS += -g -DDEBUG
3947 # This is a comment inside conditional
3948 ifdef VERBOSE
3949 CFLAGS += -v
3950 endif
3951endif
3952"#;
3953 let mut buf = content.as_bytes();
3955 let makefile = Makefile::read_relaxed(&mut buf)
3956 .expect("Failed to parse indented lines in conditionals");
3957
3958 let code = makefile.code();
3960 assert!(code.contains("ifdef DEBUG"));
3961 assert!(code.contains("ifdef VERBOSE"));
3962 assert!(code.contains("endif"));
3963 }
3964
3965 #[test]
3966 fn test_recipe_with_colon() {
3967 let content = r#"
3968build:
3969 @echo "Building at: $(shell date)"
3970 gcc -o program main.c
3971"#;
3972 let parsed = parse(content);
3973 assert!(
3974 parsed.errors.is_empty(),
3975 "Failed to parse recipe with colon: {:?}",
3976 parsed.errors
3977 );
3978 }
3979
3980 #[test]
3981 #[ignore]
3982 fn test_double_colon_rules() {
3983 let content = r#"
3986%.o :: %.c
3987 $(CC) -c $< -o $@
3988
3989# Double colon allows multiple rules for same target
3990all:: prerequisite1
3991 @echo "First rule for all"
3992
3993all:: prerequisite2
3994 @echo "Second rule for all"
3995"#;
3996 let mut buf = content.as_bytes();
3997 let makefile =
3998 Makefile::read_relaxed(&mut buf).expect("Failed to parse double colon rules");
3999
4000 let rules = makefile.rules().collect::<Vec<_>>();
4002 assert!(!rules.is_empty(), "Expected at least one rule");
4003
4004 let all_rules = rules
4006 .iter()
4007 .filter(|r| r.targets().any(|t| t.contains("all")));
4008 assert!(
4009 all_rules.count() > 0,
4010 "Expected to find at least one rule containing 'all'"
4011 );
4012 }
4013
4014 #[test]
4015 fn test_elif_directive() {
4016 let content = r#"
4017ifeq ($(OS),Windows_NT)
4018 TARGET = windows
4019elif ifeq ($(OS),Darwin)
4020 TARGET = macos
4021elif ifeq ($(OS),Linux)
4022 TARGET = linux
4023else
4024 TARGET = unknown
4025endif
4026"#;
4027 let mut buf = content.as_bytes();
4029 let _makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse elif directive");
4030
4031 }
4034
4035 #[test]
4036 fn test_ambiguous_assignment_vs_rule() {
4037 const VAR_ASSIGNMENT: &str = "VARIABLE = value\n";
4039
4040 let mut buf = std::io::Cursor::new(VAR_ASSIGNMENT);
4041 let makefile =
4042 Makefile::read_relaxed(&mut buf).expect("Failed to parse variable assignment");
4043
4044 let vars = makefile.variable_definitions().collect::<Vec<_>>();
4045 let rules = makefile.rules().collect::<Vec<_>>();
4046
4047 assert_eq!(vars.len(), 1, "Expected 1 variable, found {}", vars.len());
4048 assert_eq!(rules.len(), 0, "Expected 0 rules, found {}", rules.len());
4049
4050 assert_eq!(vars[0].name(), Some("VARIABLE".to_string()));
4051
4052 const SIMPLE_RULE: &str = "target: dependency\n";
4054
4055 let mut buf = std::io::Cursor::new(SIMPLE_RULE);
4056 let makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse simple rule");
4057
4058 let vars = makefile.variable_definitions().collect::<Vec<_>>();
4059 let rules = makefile.rules().collect::<Vec<_>>();
4060
4061 assert_eq!(vars.len(), 0, "Expected 0 variables, found {}", vars.len());
4062 assert_eq!(rules.len(), 1, "Expected 1 rule, found {}", rules.len());
4063
4064 let rule = &rules[0];
4065 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["target"]);
4066 }
4067
4068 #[test]
4069 fn test_nested_conditionals() {
4070 let content = r#"
4071ifdef RELEASE
4072 CFLAGS += -O3
4073 ifndef DEBUG
4074 ifneq ($(ARCH),arm)
4075 CFLAGS += -march=native
4076 else
4077 CFLAGS += -mcpu=cortex-a72
4078 endif
4079 endif
4080endif
4081"#;
4082 let mut buf = content.as_bytes();
4084 let makefile =
4085 Makefile::read_relaxed(&mut buf).expect("Failed to parse nested conditionals");
4086
4087 let code = makefile.code();
4089 assert!(code.contains("ifdef RELEASE"));
4090 assert!(code.contains("ifndef DEBUG"));
4091 assert!(code.contains("ifneq"));
4092 }
4093
4094 #[test]
4095 fn test_space_indented_recipes() {
4096 let content = r#"
4099build:
4100 @echo "Building with spaces instead of tabs"
4101 gcc -o program main.c
4102"#;
4103 let mut buf = content.as_bytes();
4105 let makefile =
4106 Makefile::read_relaxed(&mut buf).expect("Failed to parse space-indented recipes");
4107
4108 let rules = makefile.rules().collect::<Vec<_>>();
4110 assert!(!rules.is_empty(), "Expected at least one rule");
4111
4112 let build_rule = rules.iter().find(|r| r.targets().any(|t| t == "build"));
4114 assert!(build_rule.is_some(), "Expected to find build rule");
4115 }
4116
4117 #[test]
4118 fn test_complex_variable_functions() {
4119 let content = r#"
4120FILES := $(shell find . -name "*.c")
4121OBJS := $(patsubst %.c,%.o,$(FILES))
4122NAME := $(if $(PROGRAM),$(PROGRAM),a.out)
4123HEADERS := ${wildcard *.h}
4124"#;
4125 let parsed = parse(content);
4126 assert!(
4127 parsed.errors.is_empty(),
4128 "Failed to parse complex variable functions: {:?}",
4129 parsed.errors
4130 );
4131 }
4132
4133 #[test]
4134 fn test_nested_variable_expansions() {
4135 let content = r#"
4136VERSION = 1.0
4137PACKAGE = myapp
4138TARBALL = $(PACKAGE)-$(VERSION).tar.gz
4139INSTALL_PATH = $(shell echo $(PREFIX) | sed 's/\/$//')
4140"#;
4141 let parsed = parse(content);
4142 assert!(
4143 parsed.errors.is_empty(),
4144 "Failed to parse nested variable expansions: {:?}",
4145 parsed.errors
4146 );
4147 }
4148
4149 #[test]
4150 fn test_special_directives() {
4151 let content = r#"
4152# Special makefile directives
4153.PHONY: all clean
4154.SUFFIXES: .c .o
4155.DEFAULT: all
4156
4157# Variable definition and export directive
4158export PATH := /usr/bin:/bin
4159"#;
4160 let mut buf = content.as_bytes();
4162 let makefile =
4163 Makefile::read_relaxed(&mut buf).expect("Failed to parse special directives");
4164
4165 let rules = makefile.rules().collect::<Vec<_>>();
4167
4168 let phony_rule = rules
4170 .iter()
4171 .find(|r| r.targets().any(|t| t.contains(".PHONY")));
4172 assert!(phony_rule.is_some(), "Expected to find .PHONY rule");
4173
4174 let vars = makefile.variable_definitions().collect::<Vec<_>>();
4176 assert!(!vars.is_empty(), "Expected to find at least one variable");
4177 }
4178
4179 #[test]
4182 fn test_comprehensive_real_world_makefile() {
4183 let content = r#"
4185# Basic variable assignment
4186VERSION = 1.0.0
4187
4188# Phony target
4189.PHONY: all clean
4190
4191# Simple rule
4192all:
4193 echo "Building version $(VERSION)"
4194
4195# Another rule with dependencies
4196clean:
4197 rm -f *.o
4198"#;
4199
4200 let parsed = parse(content);
4202
4203 assert!(parsed.errors.is_empty(), "Expected no parsing errors");
4205
4206 let variables = parsed.root().variable_definitions().collect::<Vec<_>>();
4208 assert!(!variables.is_empty(), "Expected at least one variable");
4209 assert_eq!(
4210 variables[0].name(),
4211 Some("VERSION".to_string()),
4212 "Expected VERSION variable"
4213 );
4214
4215 let rules = parsed.root().rules().collect::<Vec<_>>();
4217 assert!(!rules.is_empty(), "Expected at least one rule");
4218
4219 let rule_targets: Vec<String> = rules
4221 .iter()
4222 .flat_map(|r| r.targets().collect::<Vec<_>>())
4223 .collect();
4224 assert!(
4225 rule_targets.contains(&".PHONY".to_string()),
4226 "Expected .PHONY rule"
4227 );
4228 assert!(
4229 rule_targets.contains(&"all".to_string()),
4230 "Expected 'all' rule"
4231 );
4232 assert!(
4233 rule_targets.contains(&"clean".to_string()),
4234 "Expected 'clean' rule"
4235 );
4236 }
4237
4238 #[test]
4239 fn test_indented_help_text_outside_rules() {
4240 let content = r#"
4242# Targets with help text
4243help:
4244 @echo "Available targets:"
4245 @echo " build build the project"
4246 @echo " test run tests"
4247 @echo " clean clean build artifacts"
4248
4249# Another target
4250clean:
4251 rm -rf build/
4252"#;
4253
4254 let parsed = parse(content);
4256
4257 assert!(
4259 parsed.errors.is_empty(),
4260 "Failed to parse indented help text"
4261 );
4262
4263 let rules = parsed.root().rules().collect::<Vec<_>>();
4265 assert_eq!(rules.len(), 2, "Expected to find two rules");
4266
4267 let help_rule = rules
4269 .iter()
4270 .find(|r| r.targets().any(|t| t == "help"))
4271 .expect("Expected to find help rule");
4272
4273 let clean_rule = rules
4274 .iter()
4275 .find(|r| r.targets().any(|t| t == "clean"))
4276 .expect("Expected to find clean rule");
4277
4278 let help_recipes = help_rule.recipes().collect::<Vec<_>>();
4280 assert!(
4281 !help_recipes.is_empty(),
4282 "Help rule should have recipe lines"
4283 );
4284 assert!(
4285 help_recipes
4286 .iter()
4287 .any(|line| line.contains("Available targets")),
4288 "Help recipes should include 'Available targets' line"
4289 );
4290
4291 let clean_recipes = clean_rule.recipes().collect::<Vec<_>>();
4293 assert!(
4294 !clean_recipes.is_empty(),
4295 "Clean rule should have recipe lines"
4296 );
4297 assert!(
4298 clean_recipes.iter().any(|line| line.contains("rm -rf")),
4299 "Clean recipes should include 'rm -rf' command"
4300 );
4301 }
4302
4303 #[test]
4304 fn test_makefile1_phony_pattern() {
4305 let content = "#line 2145\n.PHONY: $(PHONY)\n";
4307
4308 let result = parse(content);
4310
4311 assert!(
4313 result.errors.is_empty(),
4314 "Failed to parse .PHONY: $(PHONY) pattern"
4315 );
4316
4317 let rules = result.root().rules().collect::<Vec<_>>();
4319 assert_eq!(rules.len(), 1, "Expected 1 rule");
4320 assert_eq!(
4321 rules[0].targets().next().unwrap(),
4322 ".PHONY",
4323 "Expected .PHONY rule"
4324 );
4325
4326 let prereqs = rules[0].prerequisites().collect::<Vec<_>>();
4328 assert_eq!(prereqs.len(), 1, "Expected 1 prerequisite");
4329 assert_eq!(prereqs[0], "$(PHONY)", "Expected $(PHONY) prerequisite");
4330 }
4331
4332 #[test]
4333 fn test_skip_until_newline_behavior() {
4334 let input = "text without newline";
4336 let parsed = parse(input);
4337 assert!(parsed.errors.is_empty() || !parsed.errors.is_empty());
4339
4340 let input_with_newline = "text\nafter newline";
4341 let parsed2 = parse(input_with_newline);
4342 assert!(parsed2.errors.is_empty() || !parsed2.errors.is_empty());
4343 }
4344
4345 #[test]
4346 fn test_error_with_indent_token() {
4347 let input = "\tinvalid indented line";
4349 let parsed = parse(input);
4350 assert!(!parsed.errors.is_empty());
4352
4353 let error_msg = &parsed.errors[0].message;
4354 assert!(error_msg.contains("indented") || error_msg.contains("part of a rule"));
4355 }
4356
4357 #[test]
4358 fn test_conditional_token_handling() {
4359 let input = r#"
4361ifndef VAR
4362 CFLAGS = -DTEST
4363endif
4364"#;
4365 let parsed = parse(input);
4366 let makefile = parsed.root();
4368 let _vars = makefile.variable_definitions().collect::<Vec<_>>();
4369 let nested = r#"
4373ifdef DEBUG
4374 ifndef RELEASE
4375 CFLAGS = -g
4376 endif
4377endif
4378"#;
4379 let parsed_nested = parse(nested);
4380 let _makefile = parsed_nested.root();
4382 }
4383
4384 #[test]
4385 fn test_include_vs_conditional_logic() {
4386 let input = r#"
4388include file.mk
4389ifdef VAR
4390 VALUE = 1
4391endif
4392"#;
4393 let parsed = parse(input);
4394 let makefile = parsed.root();
4396 let includes = makefile.includes().collect::<Vec<_>>();
4397 assert!(includes.len() >= 1 || parsed.errors.len() > 0);
4399
4400 let optional_include = r#"
4402-include optional.mk
4403ifndef VAR
4404 VALUE = default
4405endif
4406"#;
4407 let parsed2 = parse(optional_include);
4408 let _makefile = parsed2.root();
4410 }
4411
4412 #[test]
4413 fn test_balanced_parens_counting() {
4414 let input = r#"
4416VAR = $(call func,$(nested,arg),extra)
4417COMPLEX = $(if $(condition),$(then_val),$(else_val))
4418"#;
4419 let parsed = parse(input);
4420 assert!(parsed.errors.is_empty());
4421
4422 let makefile = parsed.root();
4423 let vars = makefile.variable_definitions().collect::<Vec<_>>();
4424 assert_eq!(vars.len(), 2);
4425 }
4426
4427 #[test]
4428 fn test_documentation_lookahead() {
4429 let input = r#"
4431# Documentation comment
4432help:
4433 @echo "Usage instructions"
4434 @echo "More help text"
4435"#;
4436 let parsed = parse(input);
4437 assert!(parsed.errors.is_empty());
4438
4439 let makefile = parsed.root();
4440 let rules = makefile.rules().collect::<Vec<_>>();
4441 assert_eq!(rules.len(), 1);
4442 assert_eq!(rules[0].targets().next().unwrap(), "help");
4443 }
4444
4445 #[test]
4446 fn test_edge_case_empty_input() {
4447 let parsed = parse("");
4449 assert!(parsed.errors.is_empty());
4450
4451 let parsed2 = parse(" \n \n");
4453 let _makefile = parsed2.root();
4456 }
4457
4458 #[test]
4459 fn test_malformed_conditional_recovery() {
4460 let input = r#"
4462ifdef
4463 # Missing condition variable
4464endif
4465"#;
4466 let parsed = parse(input);
4467 assert!(parsed.errors.is_empty() || !parsed.errors.is_empty());
4470 }
4471
4472 #[test]
4473 fn test_replace_rule() {
4474 let mut makefile: Makefile = "rule1:\n\tcommand1\nrule2:\n\tcommand2\n".parse().unwrap();
4475 let new_rule: Rule = "new_rule:\n\tnew_command\n".parse().unwrap();
4476
4477 makefile.replace_rule(0, new_rule).unwrap();
4478
4479 let targets: Vec<_> = makefile
4480 .rules()
4481 .flat_map(|r| r.targets().collect::<Vec<_>>())
4482 .collect();
4483 assert_eq!(targets, vec!["new_rule", "rule2"]);
4484
4485 let recipes: Vec<_> = makefile.rules().next().unwrap().recipes().collect();
4486 assert_eq!(recipes, vec!["new_command"]);
4487 }
4488
4489 #[test]
4490 fn test_replace_rule_out_of_bounds() {
4491 let mut makefile: Makefile = "rule1:\n\tcommand1\n".parse().unwrap();
4492 let new_rule: Rule = "new_rule:\n\tnew_command\n".parse().unwrap();
4493
4494 let result = makefile.replace_rule(5, new_rule);
4495 assert!(result.is_err());
4496 }
4497
4498 #[test]
4499 fn test_remove_rule() {
4500 let mut makefile: Makefile = "rule1:\n\tcommand1\nrule2:\n\tcommand2\nrule3:\n\tcommand3\n"
4501 .parse()
4502 .unwrap();
4503
4504 let removed = makefile.remove_rule(1).unwrap();
4505 assert_eq!(removed.targets().collect::<Vec<_>>(), vec!["rule2"]);
4506
4507 let remaining_targets: Vec<_> = makefile
4508 .rules()
4509 .flat_map(|r| r.targets().collect::<Vec<_>>())
4510 .collect();
4511 assert_eq!(remaining_targets, vec!["rule1", "rule3"]);
4512 assert_eq!(makefile.rules().count(), 2);
4513 }
4514
4515 #[test]
4516 fn test_remove_rule_out_of_bounds() {
4517 let mut makefile: Makefile = "rule1:\n\tcommand1\n".parse().unwrap();
4518
4519 let result = makefile.remove_rule(5);
4520 assert!(result.is_err());
4521 }
4522
4523 #[test]
4524 fn test_insert_rule() {
4525 let mut makefile: Makefile = "rule1:\n\tcommand1\nrule2:\n\tcommand2\n".parse().unwrap();
4526 let new_rule: Rule = "inserted_rule:\n\tinserted_command\n".parse().unwrap();
4527
4528 makefile.insert_rule(1, new_rule).unwrap();
4529
4530 let targets: Vec<_> = makefile
4531 .rules()
4532 .flat_map(|r| r.targets().collect::<Vec<_>>())
4533 .collect();
4534 assert_eq!(targets, vec!["rule1", "inserted_rule", "rule2"]);
4535 assert_eq!(makefile.rules().count(), 3);
4536 }
4537
4538 #[test]
4539 fn test_insert_rule_at_end() {
4540 let mut makefile: Makefile = "rule1:\n\tcommand1\n".parse().unwrap();
4541 let new_rule: Rule = "end_rule:\n\tend_command\n".parse().unwrap();
4542
4543 makefile.insert_rule(1, new_rule).unwrap();
4544
4545 let targets: Vec<_> = makefile
4546 .rules()
4547 .flat_map(|r| r.targets().collect::<Vec<_>>())
4548 .collect();
4549 assert_eq!(targets, vec!["rule1", "end_rule"]);
4550 }
4551
4552 #[test]
4553 fn test_insert_rule_out_of_bounds() {
4554 let mut makefile: Makefile = "rule1:\n\tcommand1\n".parse().unwrap();
4555 let new_rule: Rule = "new_rule:\n\tnew_command\n".parse().unwrap();
4556
4557 let result = makefile.insert_rule(5, new_rule);
4558 assert!(result.is_err());
4559 }
4560
4561 #[test]
4562 fn test_remove_command() {
4563 let mut rule: Rule = "rule:\n\tcommand1\n\tcommand2\n\tcommand3\n"
4564 .parse()
4565 .unwrap();
4566
4567 rule.remove_command(1);
4568 let recipes: Vec<_> = rule.recipes().collect();
4569 assert_eq!(recipes, vec!["command1", "command3"]);
4570 assert_eq!(rule.recipe_count(), 2);
4571 }
4572
4573 #[test]
4574 fn test_remove_command_out_of_bounds() {
4575 let mut rule: Rule = "rule:\n\tcommand1\n".parse().unwrap();
4576
4577 let result = rule.remove_command(5);
4578 assert!(!result);
4579 }
4580
4581 #[test]
4582 fn test_insert_command() {
4583 let mut rule: Rule = "rule:\n\tcommand1\n\tcommand3\n".parse().unwrap();
4584
4585 rule.insert_command(1, "command2");
4586 let recipes: Vec<_> = rule.recipes().collect();
4587 assert_eq!(recipes, vec!["command1", "command2", "command3"]);
4588 }
4589
4590 #[test]
4591 fn test_insert_command_at_end() {
4592 let mut rule: Rule = "rule:\n\tcommand1\n".parse().unwrap();
4593
4594 rule.insert_command(1, "command2");
4595 let recipes: Vec<_> = rule.recipes().collect();
4596 assert_eq!(recipes, vec!["command1", "command2"]);
4597 }
4598
4599 #[test]
4600 fn test_insert_command_in_empty_rule() {
4601 let mut rule: Rule = "rule:\n".parse().unwrap();
4602
4603 rule.insert_command(0, "new_command");
4604 let recipes: Vec<_> = rule.recipes().collect();
4605 assert_eq!(recipes, vec!["new_command"]);
4606 }
4607
4608 #[test]
4609 fn test_recipe_count() {
4610 let rule1: Rule = "rule:\n".parse().unwrap();
4611 assert_eq!(rule1.recipe_count(), 0);
4612
4613 let rule2: Rule = "rule:\n\tcommand1\n\tcommand2\n".parse().unwrap();
4614 assert_eq!(rule2.recipe_count(), 2);
4615 }
4616
4617 #[test]
4618 fn test_clear_commands() {
4619 let mut rule: Rule = "rule:\n\tcommand1\n\tcommand2\n\tcommand3\n"
4620 .parse()
4621 .unwrap();
4622
4623 rule.clear_commands();
4624 assert_eq!(rule.recipe_count(), 0);
4625
4626 let recipes: Vec<_> = rule.recipes().collect();
4627 assert_eq!(recipes, Vec::<String>::new());
4628
4629 let targets: Vec<_> = rule.targets().collect();
4631 assert_eq!(targets, vec!["rule"]);
4632 }
4633
4634 #[test]
4635 fn test_clear_commands_empty_rule() {
4636 let mut rule: Rule = "rule:\n".parse().unwrap();
4637
4638 rule.clear_commands();
4639 assert_eq!(rule.recipe_count(), 0);
4640
4641 let targets: Vec<_> = rule.targets().collect();
4642 assert_eq!(targets, vec!["rule"]);
4643 }
4644
4645 #[test]
4646 fn test_rule_manipulation_preserves_structure() {
4647 let input = r#"# Comment
4649VAR = value
4650
4651rule1:
4652 command1
4653
4654# Another comment
4655rule2:
4656 command2
4657
4658VAR2 = value2
4659"#;
4660
4661 let mut makefile: Makefile = input.parse().unwrap();
4662 let new_rule: Rule = "new_rule:\n\tnew_command\n".parse().unwrap();
4663
4664 makefile.insert_rule(1, new_rule).unwrap();
4666
4667 let targets: Vec<_> = makefile
4669 .rules()
4670 .flat_map(|r| r.targets().collect::<Vec<_>>())
4671 .collect();
4672 assert_eq!(targets, vec!["rule1", "new_rule", "rule2"]);
4673
4674 let vars: Vec<_> = makefile.variable_definitions().collect();
4676 assert_eq!(vars.len(), 2);
4677
4678 let output = makefile.code();
4680 assert!(output.contains("# Comment"));
4681 assert!(output.contains("VAR = value"));
4682 assert!(output.contains("# Another comment"));
4683 assert!(output.contains("VAR2 = value2"));
4684 }
4685
4686 #[test]
4687 fn test_replace_rule_with_multiple_targets() {
4688 let mut makefile: Makefile = "target1 target2: dep\n\tcommand\n".parse().unwrap();
4689 let new_rule: Rule = "new_target: new_dep\n\tnew_command\n".parse().unwrap();
4690
4691 makefile.replace_rule(0, new_rule).unwrap();
4692
4693 let targets: Vec<_> = makefile
4694 .rules()
4695 .flat_map(|r| r.targets().collect::<Vec<_>>())
4696 .collect();
4697 assert_eq!(targets, vec!["new_target"]);
4698 }
4699
4700 #[test]
4701 fn test_empty_makefile_operations() {
4702 let mut makefile = Makefile::new();
4703
4704 assert!(makefile
4706 .replace_rule(0, "rule:\n\tcommand\n".parse().unwrap())
4707 .is_err());
4708 assert!(makefile.remove_rule(0).is_err());
4709
4710 let new_rule: Rule = "first_rule:\n\tcommand\n".parse().unwrap();
4712 makefile.insert_rule(0, new_rule).unwrap();
4713 assert_eq!(makefile.rules().count(), 1);
4714 }
4715
4716 #[test]
4717 fn test_command_operations_preserve_indentation() {
4718 let mut rule: Rule = "rule:\n\t\tdeep_indent\n\tshallow_indent\n"
4719 .parse()
4720 .unwrap();
4721
4722 rule.insert_command(1, "middle_command");
4723 let recipes: Vec<_> = rule.recipes().collect();
4724 assert_eq!(
4725 recipes,
4726 vec!["\tdeep_indent", "middle_command", "shallow_indent"]
4727 );
4728 }
4729
4730 #[test]
4731 fn test_rule_operations_with_variables_and_includes() {
4732 let input = r#"VAR1 = value1
4733include common.mk
4734
4735rule1:
4736 command1
4737
4738VAR2 = value2
4739include other.mk
4740
4741rule2:
4742 command2
4743"#;
4744
4745 let mut makefile: Makefile = input.parse().unwrap();
4746
4747 makefile.remove_rule(0).unwrap();
4749
4750 let output = makefile.code();
4752 assert!(output.contains("VAR1 = value1"));
4753 assert!(output.contains("include common.mk"));
4754 assert!(output.contains("VAR2 = value2"));
4755 assert!(output.contains("include other.mk"));
4756
4757 assert_eq!(makefile.rules().count(), 1);
4759 let remaining_targets: Vec<_> = makefile
4760 .rules()
4761 .flat_map(|r| r.targets().collect::<Vec<_>>())
4762 .collect();
4763 assert_eq!(remaining_targets, vec!["rule2"]);
4764 }
4765
4766 #[test]
4767 fn test_command_manipulation_edge_cases() {
4768 let mut empty_rule: Rule = "empty:\n".parse().unwrap();
4770 assert_eq!(empty_rule.recipe_count(), 0);
4771
4772 empty_rule.insert_command(0, "first_command");
4773 assert_eq!(empty_rule.recipe_count(), 1);
4774
4775 let mut empty_rule2: Rule = "empty:\n".parse().unwrap();
4777 empty_rule2.clear_commands();
4778 assert_eq!(empty_rule2.recipe_count(), 0);
4779 }
4780
4781 #[test]
4782 fn test_archive_member_parsing() {
4783 let input = "libfoo.a(bar.o): bar.c\n\tgcc -c bar.c -o bar.o\n\tar r libfoo.a bar.o\n";
4785 let parsed = parse(input);
4786 assert!(
4787 parsed.errors.is_empty(),
4788 "Should parse archive member without errors"
4789 );
4790
4791 let makefile = parsed.root();
4792 let rules: Vec<_> = makefile.rules().collect();
4793 assert_eq!(rules.len(), 1);
4794
4795 let target_text = rules[0].targets().next().unwrap();
4797 assert_eq!(target_text, "libfoo.a(bar.o)");
4798 }
4799
4800 #[test]
4801 fn test_archive_member_multiple_members() {
4802 let input = "libfoo.a(bar.o baz.o): bar.c baz.c\n\tgcc -c bar.c baz.c\n\tar r libfoo.a bar.o baz.o\n";
4804 let parsed = parse(input);
4805 assert!(
4806 parsed.errors.is_empty(),
4807 "Should parse multiple archive members"
4808 );
4809
4810 let makefile = parsed.root();
4811 let rules: Vec<_> = makefile.rules().collect();
4812 assert_eq!(rules.len(), 1);
4813 }
4814
4815 #[test]
4816 fn test_archive_member_in_dependencies() {
4817 let input =
4819 "program: main.o libfoo.a(bar.o) libfoo.a(baz.o)\n\tgcc -o program main.o libfoo.a\n";
4820 let parsed = parse(input);
4821 assert!(
4822 parsed.errors.is_empty(),
4823 "Should parse archive members in dependencies"
4824 );
4825
4826 let makefile = parsed.root();
4827 let rules: Vec<_> = makefile.rules().collect();
4828 assert_eq!(rules.len(), 1);
4829 }
4830
4831 #[test]
4832 fn test_archive_member_with_variables() {
4833 let input = "$(LIB)($(OBJ)): $(SRC)\n\t$(CC) -c $(SRC)\n\t$(AR) r $(LIB) $(OBJ)\n";
4835 let parsed = parse(input);
4836 assert!(
4838 parsed.errors.is_empty(),
4839 "Should parse archive members with variables"
4840 );
4841 }
4842
4843 #[test]
4844 fn test_archive_member_ast_access() {
4845 let input = "libtest.a(foo.o bar.o): foo.c bar.c\n\tgcc -c foo.c bar.c\n";
4847 let parsed = parse(input);
4848 let makefile = parsed.root();
4849
4850 let archive_member_count = makefile
4852 .syntax()
4853 .descendants()
4854 .filter(|n| n.kind() == ARCHIVE_MEMBERS)
4855 .count();
4856
4857 assert!(
4858 archive_member_count > 0,
4859 "Should find ARCHIVE_MEMBERS nodes in AST"
4860 );
4861 }
4862
4863 #[test]
4864 fn test_large_makefile_performance() {
4865 let mut makefile = Makefile::new();
4867
4868 for i in 0..100 {
4870 let rule_name = format!("rule{}", i);
4871 let _rule = makefile
4872 .add_rule(&rule_name)
4873 .push_command(&format!("command{}", i));
4874 }
4875
4876 assert_eq!(makefile.rules().count(), 100);
4877
4878 let new_rule: Rule = "middle_rule:\n\tmiddle_command\n".parse().unwrap();
4880 makefile.replace_rule(50, new_rule).unwrap();
4881
4882 let rule_50_targets: Vec<_> = makefile.rules().nth(50).unwrap().targets().collect();
4884 assert_eq!(rule_50_targets, vec!["middle_rule"]);
4885
4886 assert_eq!(makefile.rules().count(), 100); }
4888
4889 #[test]
4890 fn test_complex_recipe_manipulation() {
4891 let mut complex_rule: Rule = r#"complex:
4892 @echo "Starting build"
4893 $(CC) $(CFLAGS) -o $@ $<
4894 @echo "Build complete"
4895 chmod +x $@
4896"#
4897 .parse()
4898 .unwrap();
4899
4900 assert_eq!(complex_rule.recipe_count(), 4);
4901
4902 complex_rule.remove_command(0); complex_rule.remove_command(1); let final_recipes: Vec<_> = complex_rule.recipes().collect();
4907 assert_eq!(final_recipes.len(), 2);
4908 assert!(final_recipes[0].contains("$(CC)"));
4909 assert!(final_recipes[1].contains("chmod"));
4910 }
4911
4912 #[test]
4913 fn test_variable_definition_remove() {
4914 let makefile: Makefile = r#"VAR1 = value1
4915VAR2 = value2
4916VAR3 = value3
4917"#
4918 .parse()
4919 .unwrap();
4920
4921 assert_eq!(makefile.variable_definitions().count(), 3);
4923
4924 let mut var2 = makefile
4926 .variable_definitions()
4927 .nth(1)
4928 .expect("Should have second variable");
4929 assert_eq!(var2.name(), Some("VAR2".to_string()));
4930 var2.remove();
4931
4932 assert_eq!(makefile.variable_definitions().count(), 2);
4934 let var_names: Vec<_> = makefile
4935 .variable_definitions()
4936 .filter_map(|v| v.name())
4937 .collect();
4938 assert_eq!(var_names, vec!["VAR1", "VAR3"]);
4939 }
4940
4941 #[test]
4942 fn test_variable_definition_set_value() {
4943 let makefile: Makefile = "VAR = old_value\n".parse().unwrap();
4944
4945 let mut var = makefile
4946 .variable_definitions()
4947 .next()
4948 .expect("Should have variable");
4949 assert_eq!(var.raw_value(), Some("old_value".to_string()));
4950
4951 var.set_value("new_value");
4953
4954 assert_eq!(var.raw_value(), Some("new_value".to_string()));
4956 assert!(makefile.code().contains("VAR = new_value"));
4957 }
4958
4959 #[test]
4960 fn test_variable_definition_set_value_preserves_format() {
4961 let makefile: Makefile = "export VAR := old_value\n".parse().unwrap();
4962
4963 let mut var = makefile
4964 .variable_definitions()
4965 .next()
4966 .expect("Should have variable");
4967 assert_eq!(var.raw_value(), Some("old_value".to_string()));
4968
4969 var.set_value("new_value");
4971
4972 assert_eq!(var.raw_value(), Some("new_value".to_string()));
4974 let code = makefile.code();
4975 assert!(code.contains("export"), "Should preserve export prefix");
4976 assert!(code.contains(":="), "Should preserve := operator");
4977 assert!(code.contains("new_value"), "Should have new value");
4978 }
4979
4980 #[test]
4981 fn test_makefile_find_variable() {
4982 let makefile: Makefile = r#"VAR1 = value1
4983VAR2 = value2
4984VAR3 = value3
4985"#
4986 .parse()
4987 .unwrap();
4988
4989 let vars: Vec<_> = makefile.find_variable("VAR2").collect();
4991 assert_eq!(vars.len(), 1);
4992 assert_eq!(vars[0].name(), Some("VAR2".to_string()));
4993 assert_eq!(vars[0].raw_value(), Some("value2".to_string()));
4994
4995 assert_eq!(makefile.find_variable("NONEXISTENT").count(), 0);
4997 }
4998
4999 #[test]
5000 fn test_makefile_find_variable_with_export() {
5001 let makefile: Makefile = r#"VAR1 = value1
5002export VAR2 := value2
5003VAR3 = value3
5004"#
5005 .parse()
5006 .unwrap();
5007
5008 let vars: Vec<_> = makefile.find_variable("VAR2").collect();
5010 assert_eq!(vars.len(), 1);
5011 assert_eq!(vars[0].name(), Some("VAR2".to_string()));
5012 assert_eq!(vars[0].raw_value(), Some("value2".to_string()));
5013 }
5014
5015 #[test]
5016 fn test_variable_definition_is_export() {
5017 let makefile: Makefile = r#"VAR1 = value1
5018export VAR2 := value2
5019export VAR3 = value3
5020VAR4 := value4
5021"#
5022 .parse()
5023 .unwrap();
5024
5025 let vars: Vec<_> = makefile.variable_definitions().collect();
5026 assert_eq!(vars.len(), 4);
5027
5028 assert_eq!(vars[0].is_export(), false);
5029 assert_eq!(vars[1].is_export(), true);
5030 assert_eq!(vars[2].is_export(), true);
5031 assert_eq!(vars[3].is_export(), false);
5032 }
5033
5034 #[test]
5035 fn test_makefile_find_variable_multiple() {
5036 let makefile: Makefile = r#"VAR1 = value1
5037VAR1 = value2
5038VAR2 = other
5039VAR1 = value3
5040"#
5041 .parse()
5042 .unwrap();
5043
5044 let vars: Vec<_> = makefile.find_variable("VAR1").collect();
5046 assert_eq!(vars.len(), 3);
5047 assert_eq!(vars[0].raw_value(), Some("value1".to_string()));
5048 assert_eq!(vars[1].raw_value(), Some("value2".to_string()));
5049 assert_eq!(vars[2].raw_value(), Some("value3".to_string()));
5050
5051 let var2s: Vec<_> = makefile.find_variable("VAR2").collect();
5053 assert_eq!(var2s.len(), 1);
5054 assert_eq!(var2s[0].raw_value(), Some("other".to_string()));
5055 }
5056
5057 #[test]
5058 fn test_variable_remove_and_find() {
5059 let makefile: Makefile = r#"VAR1 = value1
5060VAR2 = value2
5061VAR3 = value3
5062"#
5063 .parse()
5064 .unwrap();
5065
5066 let mut var2 = makefile
5068 .find_variable("VAR2")
5069 .next()
5070 .expect("Should find VAR2");
5071 var2.remove();
5072
5073 assert_eq!(makefile.find_variable("VAR2").count(), 0);
5075
5076 assert_eq!(makefile.find_variable("VAR1").count(), 1);
5078 assert_eq!(makefile.find_variable("VAR3").count(), 1);
5079 }
5080
5081 #[test]
5082 fn test_variable_remove_with_comment() {
5083 let makefile: Makefile = r#"VAR1 = value1
5084# This is a comment about VAR2
5085VAR2 = value2
5086VAR3 = value3
5087"#
5088 .parse()
5089 .unwrap();
5090
5091 let mut var2 = makefile
5093 .variable_definitions()
5094 .nth(1)
5095 .expect("Should have second variable");
5096 assert_eq!(var2.name(), Some("VAR2".to_string()));
5097 var2.remove();
5098
5099 assert_eq!(makefile.code(), "VAR1 = value1\nVAR3 = value3\n");
5101 }
5102
5103 #[test]
5104 fn test_variable_remove_with_multiple_comments() {
5105 let makefile: Makefile = r#"VAR1 = value1
5106# Comment line 1
5107# Comment line 2
5108# Comment line 3
5109VAR2 = value2
5110VAR3 = value3
5111"#
5112 .parse()
5113 .unwrap();
5114
5115 let mut var2 = makefile
5117 .variable_definitions()
5118 .nth(1)
5119 .expect("Should have second variable");
5120 var2.remove();
5121
5122 assert_eq!(makefile.code(), "VAR1 = value1\nVAR3 = value3\n");
5124 }
5125
5126 #[test]
5127 fn test_variable_remove_with_empty_line() {
5128 let makefile: Makefile = r#"VAR1 = value1
5129
5130# Comment about VAR2
5131VAR2 = value2
5132VAR3 = value3
5133"#
5134 .parse()
5135 .unwrap();
5136
5137 let mut var2 = makefile
5139 .variable_definitions()
5140 .nth(1)
5141 .expect("Should have second variable");
5142 var2.remove();
5143
5144 assert_eq!(makefile.code(), "VAR1 = value1\nVAR3 = value3\n");
5147 }
5148
5149 #[test]
5150 fn test_variable_remove_with_multiple_empty_lines() {
5151 let makefile: Makefile = r#"VAR1 = value1
5152
5153
5154# Comment about VAR2
5155VAR2 = value2
5156VAR3 = value3
5157"#
5158 .parse()
5159 .unwrap();
5160
5161 let mut var2 = makefile
5163 .variable_definitions()
5164 .nth(1)
5165 .expect("Should have second variable");
5166 var2.remove();
5167
5168 assert_eq!(makefile.code(), "VAR1 = value1\n\nVAR3 = value3\n");
5171 }
5172
5173 #[test]
5174 fn test_rule_remove_with_comment() {
5175 let makefile: Makefile = r#"rule1:
5176 command1
5177
5178# Comment about rule2
5179rule2:
5180 command2
5181rule3:
5182 command3
5183"#
5184 .parse()
5185 .unwrap();
5186
5187 let rule2 = makefile.rules().nth(1).expect("Should have second rule");
5189 rule2.remove().unwrap();
5190
5191 assert_eq!(
5194 makefile.code(),
5195 "rule1:\n\tcommand1\n\nrule3:\n\tcommand3\n"
5196 );
5197 }
5198
5199 #[test]
5200 fn test_variable_remove_preserves_shebang() {
5201 let makefile: Makefile = r#"#!/usr/bin/make -f
5202# This is a regular comment
5203VAR1 = value1
5204VAR2 = value2
5205"#
5206 .parse()
5207 .unwrap();
5208
5209 let mut var1 = makefile.variable_definitions().next().unwrap();
5211 var1.remove();
5212
5213 let code = makefile.code();
5215 assert!(code.starts_with("#!/usr/bin/make -f"));
5216 assert!(!code.contains("regular comment"));
5217 assert!(!code.contains("VAR1"));
5218 assert!(code.contains("VAR2"));
5219 }
5220
5221 #[test]
5222 fn test_variable_remove_preserves_subsequent_comments() {
5223 let makefile: Makefile = r#"VAR1 = value1
5224# Comment about VAR2
5225VAR2 = value2
5226
5227# Comment about VAR3
5228VAR3 = value3
5229"#
5230 .parse()
5231 .unwrap();
5232
5233 let mut var2 = makefile
5235 .variable_definitions()
5236 .nth(1)
5237 .expect("Should have second variable");
5238 var2.remove();
5239
5240 let code = makefile.code();
5242 assert_eq!(
5243 code,
5244 "VAR1 = value1\n\n# Comment about VAR3\nVAR3 = value3\n"
5245 );
5246 }
5247
5248 #[test]
5249 fn test_variable_remove_after_shebang_preserves_empty_line() {
5250 let makefile: Makefile = r#"#!/usr/bin/make -f
5251export DEB_LDFLAGS_MAINT_APPEND = -Wl,--as-needed
5252
5253%:
5254 dh $@
5255"#
5256 .parse()
5257 .unwrap();
5258
5259 let mut var = makefile.variable_definitions().next().unwrap();
5261 var.remove();
5262
5263 assert_eq!(makefile.code(), "#!/usr/bin/make -f\n\n%:\n\tdh $@\n");
5265 }
5266
5267 #[test]
5268 fn test_rule_add_prerequisite() {
5269 let mut rule: Rule = "target: dep1\n".parse().unwrap();
5270 rule.add_prerequisite("dep2").unwrap();
5271 assert_eq!(
5272 rule.prerequisites().collect::<Vec<_>>(),
5273 vec!["dep1", "dep2"]
5274 );
5275 }
5276
5277 #[test]
5278 fn test_rule_remove_prerequisite() {
5279 let mut rule: Rule = "target: dep1 dep2 dep3\n".parse().unwrap();
5280 assert!(rule.remove_prerequisite("dep2").unwrap());
5281 assert_eq!(
5282 rule.prerequisites().collect::<Vec<_>>(),
5283 vec!["dep1", "dep3"]
5284 );
5285 assert!(!rule.remove_prerequisite("nonexistent").unwrap());
5286 }
5287
5288 #[test]
5289 fn test_rule_set_prerequisites() {
5290 let mut rule: Rule = "target: old_dep\n".parse().unwrap();
5291 rule.set_prerequisites(vec!["new_dep1", "new_dep2"])
5292 .unwrap();
5293 assert_eq!(
5294 rule.prerequisites().collect::<Vec<_>>(),
5295 vec!["new_dep1", "new_dep2"]
5296 );
5297 }
5298
5299 #[test]
5300 fn test_rule_set_prerequisites_empty() {
5301 let mut rule: Rule = "target: dep1 dep2\n".parse().unwrap();
5302 rule.set_prerequisites(vec![]).unwrap();
5303 assert_eq!(rule.prerequisites().collect::<Vec<_>>().len(), 0);
5304 }
5305
5306 #[test]
5307 fn test_rule_add_target() {
5308 let mut rule: Rule = "target1: dep1\n".parse().unwrap();
5309 rule.add_target("target2").unwrap();
5310 assert_eq!(
5311 rule.targets().collect::<Vec<_>>(),
5312 vec!["target1", "target2"]
5313 );
5314 }
5315
5316 #[test]
5317 fn test_rule_set_targets() {
5318 let mut rule: Rule = "old_target: dependency\n".parse().unwrap();
5319 rule.set_targets(vec!["new_target1", "new_target2"])
5320 .unwrap();
5321 assert_eq!(
5322 rule.targets().collect::<Vec<_>>(),
5323 vec!["new_target1", "new_target2"]
5324 );
5325 }
5326
5327 #[test]
5328 fn test_rule_set_targets_empty() {
5329 let mut rule: Rule = "target: dep1\n".parse().unwrap();
5330 let result = rule.set_targets(vec![]);
5331 assert!(result.is_err());
5332 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["target"]);
5334 }
5335
5336 #[test]
5337 fn test_rule_has_target() {
5338 let rule: Rule = "target1 target2: dependency\n".parse().unwrap();
5339 assert!(rule.has_target("target1"));
5340 assert!(rule.has_target("target2"));
5341 assert!(!rule.has_target("target3"));
5342 assert!(!rule.has_target("nonexistent"));
5343 }
5344
5345 #[test]
5346 fn test_rule_rename_target() {
5347 let mut rule: Rule = "old_target: dependency\n".parse().unwrap();
5348 assert!(rule.rename_target("old_target", "new_target").unwrap());
5349 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["new_target"]);
5350 assert!(!rule.rename_target("nonexistent", "something").unwrap());
5352 }
5353
5354 #[test]
5355 fn test_rule_rename_target_multiple() {
5356 let mut rule: Rule = "target1 target2 target3: dependency\n".parse().unwrap();
5357 assert!(rule.rename_target("target2", "renamed_target").unwrap());
5358 assert_eq!(
5359 rule.targets().collect::<Vec<_>>(),
5360 vec!["target1", "renamed_target", "target3"]
5361 );
5362 }
5363
5364 #[test]
5365 fn test_rule_remove_target() {
5366 let mut rule: Rule = "target1 target2 target3: dependency\n".parse().unwrap();
5367 assert!(rule.remove_target("target2").unwrap());
5368 assert_eq!(
5369 rule.targets().collect::<Vec<_>>(),
5370 vec!["target1", "target3"]
5371 );
5372 assert!(!rule.remove_target("nonexistent").unwrap());
5374 }
5375
5376 #[test]
5377 fn test_rule_remove_target_last() {
5378 let mut rule: Rule = "single_target: dependency\n".parse().unwrap();
5379 let result = rule.remove_target("single_target");
5380 assert!(result.is_err());
5381 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["single_target"]);
5383 }
5384
5385 #[test]
5386 fn test_rule_target_manipulation_preserves_prerequisites() {
5387 let mut rule: Rule = "target1 target2: dep1 dep2\n\tcommand".parse().unwrap();
5388
5389 rule.remove_target("target1").unwrap();
5391 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["target2"]);
5392 assert_eq!(
5393 rule.prerequisites().collect::<Vec<_>>(),
5394 vec!["dep1", "dep2"]
5395 );
5396 assert_eq!(rule.recipes().collect::<Vec<_>>(), vec!["command"]);
5397
5398 rule.add_target("target3").unwrap();
5400 assert_eq!(
5401 rule.targets().collect::<Vec<_>>(),
5402 vec!["target2", "target3"]
5403 );
5404 assert_eq!(
5405 rule.prerequisites().collect::<Vec<_>>(),
5406 vec!["dep1", "dep2"]
5407 );
5408 assert_eq!(rule.recipes().collect::<Vec<_>>(), vec!["command"]);
5409
5410 rule.rename_target("target2", "renamed").unwrap();
5412 assert_eq!(
5413 rule.targets().collect::<Vec<_>>(),
5414 vec!["renamed", "target3"]
5415 );
5416 assert_eq!(
5417 rule.prerequisites().collect::<Vec<_>>(),
5418 vec!["dep1", "dep2"]
5419 );
5420 assert_eq!(rule.recipes().collect::<Vec<_>>(), vec!["command"]);
5421 }
5422
5423 #[test]
5424 fn test_rule_remove() {
5425 let makefile: Makefile = "rule1:\n\tcommand1\nrule2:\n\tcommand2\n".parse().unwrap();
5426 let rule = makefile.find_rule_by_target("rule1").unwrap();
5427 rule.remove().unwrap();
5428 assert_eq!(makefile.rules().count(), 1);
5429 assert!(makefile.find_rule_by_target("rule1").is_none());
5430 assert!(makefile.find_rule_by_target("rule2").is_some());
5431 }
5432
5433 #[test]
5434 fn test_makefile_find_rule_by_target() {
5435 let makefile: Makefile = "rule1:\n\tcommand1\nrule2:\n\tcommand2\n".parse().unwrap();
5436 let rule = makefile.find_rule_by_target("rule2");
5437 assert!(rule.is_some());
5438 assert_eq!(rule.unwrap().targets().collect::<Vec<_>>(), vec!["rule2"]);
5439 assert!(makefile.find_rule_by_target("nonexistent").is_none());
5440 }
5441
5442 #[test]
5443 fn test_makefile_find_rules_by_target() {
5444 let makefile: Makefile = "rule1:\n\tcommand1\nrule1:\n\tcommand2\nrule2:\n\tcommand3\n"
5445 .parse()
5446 .unwrap();
5447 assert_eq!(makefile.find_rules_by_target("rule1").count(), 2);
5448 assert_eq!(makefile.find_rules_by_target("rule2").count(), 1);
5449 assert_eq!(makefile.find_rules_by_target("nonexistent").count(), 0);
5450 }
5451
5452 #[test]
5453 fn test_makefile_add_phony_target() {
5454 let mut makefile = Makefile::new();
5455 makefile.add_phony_target("clean").unwrap();
5456 assert!(makefile.is_phony("clean"));
5457 assert_eq!(makefile.phony_targets().collect::<Vec<_>>(), vec!["clean"]);
5458 }
5459
5460 #[test]
5461 fn test_makefile_add_phony_target_existing() {
5462 let mut makefile: Makefile = ".PHONY: test\n".parse().unwrap();
5463 makefile.add_phony_target("clean").unwrap();
5464 assert!(makefile.is_phony("test"));
5465 assert!(makefile.is_phony("clean"));
5466 let targets: Vec<_> = makefile.phony_targets().collect();
5467 assert!(targets.contains(&"test".to_string()));
5468 assert!(targets.contains(&"clean".to_string()));
5469 }
5470
5471 #[test]
5472 fn test_makefile_remove_phony_target() {
5473 let mut makefile: Makefile = ".PHONY: clean test\n".parse().unwrap();
5474 assert!(makefile.remove_phony_target("clean").unwrap());
5475 assert!(!makefile.is_phony("clean"));
5476 assert!(makefile.is_phony("test"));
5477 assert!(!makefile.remove_phony_target("nonexistent").unwrap());
5478 }
5479
5480 #[test]
5481 fn test_makefile_remove_phony_target_last() {
5482 let mut makefile: Makefile = ".PHONY: clean\n".parse().unwrap();
5483 assert!(makefile.remove_phony_target("clean").unwrap());
5484 assert!(!makefile.is_phony("clean"));
5485 assert!(makefile.find_rule_by_target(".PHONY").is_none());
5487 }
5488
5489 #[test]
5490 fn test_makefile_is_phony() {
5491 let makefile: Makefile = ".PHONY: clean test\n".parse().unwrap();
5492 assert!(makefile.is_phony("clean"));
5493 assert!(makefile.is_phony("test"));
5494 assert!(!makefile.is_phony("build"));
5495 }
5496
5497 #[test]
5498 fn test_makefile_phony_targets() {
5499 let makefile: Makefile = ".PHONY: clean test build\n".parse().unwrap();
5500 let phony_targets: Vec<_> = makefile.phony_targets().collect();
5501 assert_eq!(phony_targets, vec!["clean", "test", "build"]);
5502 }
5503
5504 #[test]
5505 fn test_makefile_phony_targets_empty() {
5506 let makefile = Makefile::new();
5507 assert_eq!(makefile.phony_targets().count(), 0);
5508 }
5509}