1use crate::lex::lex;
2use crate::SyntaxKind;
3use crate::SyntaxKind::*;
4use rowan::ast::AstNode;
5use std::str::FromStr;
6
7#[derive(Debug)]
8pub enum Error {
10 Io(std::io::Error),
12
13 Parse(ParseError),
15}
16
17impl std::fmt::Display for Error {
18 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
19 match &self {
20 Error::Io(e) => write!(f, "IO error: {}", e),
21 Error::Parse(e) => write!(f, "Parse error: {}", e),
22 }
23 }
24}
25
26impl From<std::io::Error> for Error {
27 fn from(e: std::io::Error) -> Self {
28 Error::Io(e)
29 }
30}
31
32impl std::error::Error for Error {}
33
34#[derive(Debug, Clone, PartialEq, Eq, Hash)]
35pub struct ParseError {
37 pub errors: Vec<ErrorInfo>,
39}
40
41#[derive(Debug, Clone, PartialEq, Eq, Hash)]
42pub struct ErrorInfo {
44 pub message: String,
46 pub line: usize,
48 pub context: String,
50}
51
52impl std::fmt::Display for ParseError {
53 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
54 for err in &self.errors {
55 writeln!(f, "Error at line {}: {}", err.line, err.message)?;
56 writeln!(f, "{}| {}", err.line, err.context)?;
57 }
58 Ok(())
59 }
60}
61
62impl std::error::Error for ParseError {}
63
64impl From<ParseError> for Error {
65 fn from(e: ParseError) -> Self {
66 Error::Parse(e)
67 }
68}
69
70#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
74pub enum Lang {}
75impl rowan::Language for Lang {
76 type Kind = SyntaxKind;
77 fn kind_from_raw(raw: rowan::SyntaxKind) -> Self::Kind {
78 unsafe { std::mem::transmute::<u16, SyntaxKind>(raw.0) }
79 }
80 fn kind_to_raw(kind: Self::Kind) -> rowan::SyntaxKind {
81 kind.into()
82 }
83}
84
85use rowan::GreenNode;
88
89use rowan::GreenNodeBuilder;
93
94#[derive(Debug)]
97pub(crate) struct Parse {
98 pub(crate) green_node: GreenNode,
99 #[allow(unused)]
100 pub(crate) errors: Vec<ErrorInfo>,
101}
102
103pub(crate) fn parse(text: &str) -> Parse {
104 struct Parser {
105 tokens: Vec<(SyntaxKind, String)>,
108 builder: GreenNodeBuilder<'static>,
110 errors: Vec<ErrorInfo>,
113 original_text: String,
115 }
116
117 impl Parser {
118 fn error(&mut self, msg: String) {
119 self.builder.start_node(ERROR.into());
120
121 let (line, context) = if self.current() == Some(INDENT) {
122 let lines: Vec<&str> = self.original_text.lines().collect();
124 let tab_line = lines
125 .iter()
126 .enumerate()
127 .find(|(_, line)| line.starts_with('\t'))
128 .map(|(i, _)| i + 1)
129 .unwrap_or(1);
130
131 let next_line = tab_line + 1;
133 if next_line <= lines.len() {
134 (next_line, lines[next_line - 1].to_string())
135 } else {
136 (tab_line, lines[tab_line - 1].to_string())
137 }
138 } else {
139 let line = self.get_line_number_for_position(self.tokens.len());
140 (line, self.get_context_for_line(line))
141 };
142
143 let message = if self.current() == Some(INDENT) && !msg.contains("indented") {
144 if !self.tokens.is_empty() && self.tokens[self.tokens.len() - 1].0 == IDENTIFIER {
145 "expected ':'".to_string()
146 } else {
147 "indented line not part of a rule".to_string()
148 }
149 } else {
150 msg
151 };
152
153 self.errors.push(ErrorInfo {
154 message,
155 line,
156 context,
157 });
158
159 if self.current().is_some() {
160 self.bump();
161 }
162 self.builder.finish_node();
163 }
164
165 fn get_line_number_for_position(&self, position: usize) -> usize {
166 if position >= self.tokens.len() {
167 return self.original_text.matches('\n').count() + 1;
168 }
169
170 self.tokens[0..position]
172 .iter()
173 .filter(|(kind, _)| *kind == NEWLINE)
174 .count()
175 + 1
176 }
177
178 fn get_context_for_line(&self, line_number: usize) -> String {
179 self.original_text
180 .lines()
181 .nth(line_number - 1)
182 .unwrap_or("")
183 .to_string()
184 }
185
186 fn parse_recipe_line(&mut self) {
187 self.builder.start_node(RECIPE.into());
188
189 if self.current() != Some(INDENT) {
191 self.error("recipe line must start with a tab".to_string());
192 self.builder.finish_node();
193 return;
194 }
195 self.bump();
196
197 while self.current().is_some() && self.current() != Some(NEWLINE) {
200 self.bump();
201 }
202
203 if self.current() == Some(NEWLINE) {
205 self.bump();
206 }
207
208 self.builder.finish_node();
209 }
210
211 fn parse_rule_target(&mut self) -> bool {
212 match self.current() {
213 Some(IDENTIFIER) => {
214 if self.is_archive_member() {
216 self.parse_archive_member();
217 } else {
218 self.bump();
219 }
220 true
221 }
222 Some(DOLLAR) => {
223 self.parse_variable_reference();
224 true
225 }
226 _ => {
227 self.error("expected rule target".to_string());
228 false
229 }
230 }
231 }
232
233 fn is_archive_member(&self) -> bool {
234 if self.tokens.len() < 2 {
237 return false;
238 }
239
240 let current_is_identifier = self.current() == Some(IDENTIFIER);
242 let next_is_lparen =
243 self.tokens.len() > 1 && self.tokens[self.tokens.len() - 2].0 == LPAREN;
244
245 current_is_identifier && next_is_lparen
246 }
247
248 fn parse_archive_member(&mut self) {
249 if self.current() == Some(IDENTIFIER) {
260 self.bump();
261 }
262
263 if self.current() == Some(LPAREN) {
265 self.bump();
266
267 self.builder.start_node(ARCHIVE_MEMBERS.into());
269
270 while self.current().is_some() && self.current() != Some(RPAREN) {
272 match self.current() {
273 Some(IDENTIFIER) | Some(TEXT) => {
274 self.builder.start_node(ARCHIVE_MEMBER.into());
276 self.bump();
277 self.builder.finish_node();
278 }
279 Some(WHITESPACE) => self.bump(),
280 Some(DOLLAR) => {
281 self.builder.start_node(ARCHIVE_MEMBER.into());
283 self.parse_variable_reference();
284 self.builder.finish_node();
285 }
286 _ => break,
287 }
288 }
289
290 self.builder.finish_node();
292
293 if self.current() == Some(RPAREN) {
295 self.bump();
296 } else {
297 self.error("expected ')' to close archive member".to_string());
298 }
299 }
300 }
301
302 fn parse_rule_dependencies(&mut self) {
303 self.builder.start_node(PREREQUISITES.into());
304
305 while self.current().is_some() && self.current() != Some(NEWLINE) {
306 match self.current() {
307 Some(WHITESPACE) => {
308 self.bump(); }
310 Some(IDENTIFIER) => {
311 self.builder.start_node(PREREQUISITE.into());
313
314 if self.is_archive_member() {
315 self.parse_archive_member();
316 } else {
317 self.bump(); }
319
320 self.builder.finish_node(); }
322 Some(DOLLAR) => {
323 self.builder.start_node(PREREQUISITE.into());
325
326 self.bump(); if self.current() == Some(LPAREN) {
330 self.bump(); let mut paren_count = 1;
332
333 while self.current().is_some() && paren_count > 0 {
334 if self.current() == Some(LPAREN) {
335 paren_count += 1;
336 } else if self.current() == Some(RPAREN) {
337 paren_count -= 1;
338 }
339 self.bump();
340 }
341 } else {
342 if self.current().is_some() {
344 self.bump();
345 }
346 }
347
348 self.builder.finish_node(); }
350 _ => {
351 self.bump();
353 }
354 }
355 }
356
357 self.builder.finish_node(); }
359
360 fn parse_rule_recipes(&mut self) {
361 loop {
362 match self.current() {
363 Some(INDENT) => {
364 self.parse_recipe_line();
365 }
366 Some(NEWLINE) => {
367 self.bump();
371 }
372 _ => break,
373 }
374 }
375 }
376
377 fn find_and_consume_colon(&mut self) -> bool {
378 self.skip_ws();
380
381 if self.current() == Some(OPERATOR) && self.tokens.last().unwrap().1 == ":" {
383 self.bump();
384 return true;
385 }
386
387 let has_colon = self
389 .tokens
390 .iter()
391 .rev()
392 .any(|(kind, text)| *kind == OPERATOR && text == ":");
393
394 if has_colon {
395 while self.current().is_some() {
397 if self.current() == Some(OPERATOR)
398 && self.tokens.last().map(|(_, text)| text.as_str()) == Some(":")
399 {
400 self.bump();
401 return true;
402 }
403 self.bump();
404 }
405 }
406
407 self.error("expected ':'".to_string());
408 false
409 }
410
411 fn parse_rule(&mut self) {
412 self.builder.start_node(RULE.into());
413
414 self.skip_ws();
416 self.builder.start_node(TARGETS.into());
417 let has_target = self.parse_rule_targets();
418 self.builder.finish_node();
419
420 let has_colon = if has_target {
422 self.find_and_consume_colon()
423 } else {
424 false
425 };
426
427 if has_target && has_colon {
429 self.skip_ws();
430 self.parse_rule_dependencies();
431 self.expect_eol();
432
433 self.parse_rule_recipes();
435 }
436
437 self.builder.finish_node();
438 }
439
440 fn parse_rule_targets(&mut self) -> bool {
441 let has_first_target = self.parse_rule_target();
443
444 if !has_first_target {
445 return false;
446 }
447
448 loop {
450 self.skip_ws();
451
452 if self.current() == Some(OPERATOR) && self.tokens.last().unwrap().1 == ":" {
454 break;
455 }
456
457 match self.current() {
459 Some(IDENTIFIER) | Some(DOLLAR) => {
460 if !self.parse_rule_target() {
461 break;
462 }
463 }
464 _ => break,
465 }
466 }
467
468 true
469 }
470
471 fn parse_comment(&mut self) {
472 if self.current() == Some(COMMENT) {
473 self.bump(); if self.current() == Some(NEWLINE) {
477 self.bump(); } else if self.current() == Some(WHITESPACE) {
479 self.skip_ws();
481 if self.current() == Some(NEWLINE) {
482 self.bump();
483 }
484 }
485 } else {
487 self.error("expected comment".to_string());
488 }
489 }
490
491 fn parse_assignment(&mut self) {
492 self.builder.start_node(VARIABLE.into());
493
494 self.skip_ws();
496 if self.current() == Some(IDENTIFIER) && self.tokens.last().unwrap().1 == "export" {
497 self.bump();
498 self.skip_ws();
499 }
500
501 match self.current() {
503 Some(IDENTIFIER) => self.bump(),
504 Some(DOLLAR) => self.parse_variable_reference(),
505 _ => {
506 self.error("expected variable name".to_string());
507 self.builder.finish_node();
508 return;
509 }
510 }
511
512 self.skip_ws();
514 match self.current() {
515 Some(OPERATOR) => {
516 let op = &self.tokens.last().unwrap().1;
517 if ["=", ":=", "::=", ":::=", "+=", "?=", "!="].contains(&op.as_str()) {
518 self.bump();
519 self.skip_ws();
520
521 self.builder.start_node(EXPR.into());
523 while self.current().is_some() && self.current() != Some(NEWLINE) {
524 self.bump();
525 }
526 self.builder.finish_node();
527
528 if self.current() == Some(NEWLINE) {
530 self.bump();
531 } else {
532 self.error("expected newline after variable value".to_string());
533 }
534 } else {
535 self.error(format!("invalid assignment operator: {}", op));
536 }
537 }
538 _ => self.error("expected assignment operator".to_string()),
539 }
540
541 self.builder.finish_node();
542 }
543
544 fn parse_variable_reference(&mut self) {
545 self.builder.start_node(EXPR.into());
546 self.bump(); if self.current() == Some(LPAREN) {
549 self.bump(); let mut is_function = false;
553
554 if self.current() == Some(IDENTIFIER) {
555 let function_name = &self.tokens.last().unwrap().1;
556 let known_functions = [
558 "shell", "wildcard", "call", "eval", "file", "abspath", "dir",
559 ];
560 if known_functions.contains(&function_name.as_str()) {
561 is_function = true;
562 }
563 }
564
565 if is_function {
566 self.bump();
568
569 self.consume_balanced_parens(1);
571 } else {
572 self.parse_parenthesized_expr_internal(true);
574 }
575 } else {
576 self.error("expected ( after $ in variable reference".to_string());
577 }
578
579 self.builder.finish_node();
580 }
581
582 fn parse_parenthesized_expr(&mut self) {
584 self.builder.start_node(EXPR.into());
585
586 if self.current() != Some(LPAREN) {
587 self.error("expected opening parenthesis".to_string());
588 self.builder.finish_node();
589 return;
590 }
591
592 self.bump(); self.parse_parenthesized_expr_internal(false);
594 self.builder.finish_node();
595 }
596
597 fn parse_parenthesized_expr_internal(&mut self, is_variable_ref: bool) {
599 let mut paren_count = 1;
600
601 while paren_count > 0 && self.current().is_some() {
602 match self.current() {
603 Some(LPAREN) => {
604 paren_count += 1;
605 self.bump();
606 self.builder.start_node(EXPR.into());
608 }
609 Some(RPAREN) => {
610 paren_count -= 1;
611 self.bump();
612 if paren_count > 0 {
613 self.builder.finish_node();
614 }
615 }
616 Some(QUOTE) => {
617 self.parse_quoted_string();
619 }
620 Some(DOLLAR) => {
621 self.parse_variable_reference();
623 }
624 Some(_) => self.bump(),
625 None => {
626 self.error(if is_variable_ref {
627 "unclosed variable reference".to_string()
628 } else {
629 "unclosed parenthesis".to_string()
630 });
631 break;
632 }
633 }
634 }
635
636 if !is_variable_ref {
637 self.skip_ws();
638 self.expect_eol();
639 }
640 }
641
642 fn parse_quoted_string(&mut self) {
644 self.bump(); while !self.is_at_eof() && self.current() != Some(QUOTE) {
646 self.bump();
647 }
648 if self.current() == Some(QUOTE) {
649 self.bump();
650 }
651 }
652
653 fn parse_conditional_keyword(&mut self) -> Option<String> {
654 if self.current() != Some(IDENTIFIER) {
655 self.error(
656 "expected conditional keyword (ifdef, ifndef, ifeq, or ifneq)".to_string(),
657 );
658 return None;
659 }
660
661 let token = self.tokens.last().unwrap().1.clone();
662 if !["ifdef", "ifndef", "ifeq", "ifneq"].contains(&token.as_str()) {
663 self.error(format!("unknown conditional directive: {}", token));
664 return None;
665 }
666
667 self.bump();
668 Some(token)
669 }
670
671 fn parse_simple_condition(&mut self) {
672 self.builder.start_node(EXPR.into());
673
674 self.skip_ws();
676
677 let mut found_var = false;
679
680 while !self.is_at_eof() && self.current() != Some(NEWLINE) {
681 match self.current() {
682 Some(WHITESPACE) => self.skip_ws(),
683 Some(DOLLAR) => {
684 found_var = true;
685 self.parse_variable_reference();
686 }
687 Some(_) => {
688 found_var = true;
690 self.bump();
691 }
692 None => break,
693 }
694 }
695
696 if !found_var {
697 self.error("expected condition after conditional directive".to_string());
699 }
700
701 self.builder.finish_node();
702
703 if self.current() == Some(NEWLINE) {
705 self.bump();
706 } else if !self.is_at_eof() {
707 self.skip_until_newline();
708 }
709 }
710
711 fn is_conditional_directive(&self, token: &str) -> bool {
713 token == "ifdef"
714 || token == "ifndef"
715 || token == "ifeq"
716 || token == "ifneq"
717 || token == "else"
718 || token == "elif"
719 || token == "endif"
720 }
721
722 fn handle_conditional_token(&mut self, token: &str, depth: &mut usize) -> bool {
724 match token {
725 "ifdef" | "ifndef" | "ifeq" | "ifneq" => {
726 *depth += 1;
727 self.parse_conditional();
728 true
729 }
730 "else" | "elif" => {
731 if *depth == 0 {
733 self.error(format!("{} without matching if", token));
734 self.bump();
736 false
737 } else {
738 self.bump();
740
741 if token == "elif" {
743 self.skip_ws();
744
745 if self.current() == Some(IDENTIFIER) {
747 let next_token = &self.tokens.last().unwrap().1;
748 if next_token == "ifeq"
749 || next_token == "ifdef"
750 || next_token == "ifndef"
751 || next_token == "ifneq"
752 {
753 match next_token.as_str() {
755 "ifdef" | "ifndef" => {
756 self.bump(); self.skip_ws();
758 self.parse_simple_condition();
759 }
760 "ifeq" | "ifneq" => {
761 self.bump(); self.skip_ws();
763 self.parse_parenthesized_expr();
764 }
765 _ => unreachable!(),
766 }
767 } else {
768 self.builder.start_node(EXPR.into());
770 while self.current().is_some()
772 && self.current() != Some(NEWLINE)
773 {
774 self.bump();
775 }
776 self.builder.finish_node();
777 if self.current() == Some(NEWLINE) {
778 self.bump();
779 }
780 }
781 } else {
782 self.builder.start_node(EXPR.into());
784 while self.current().is_some() && self.current() != Some(NEWLINE) {
786 self.bump();
787 }
788 self.builder.finish_node();
789 if self.current() == Some(NEWLINE) {
790 self.bump();
791 }
792 }
793 } else {
794 self.expect_eol();
796 }
797 true
798 }
799 }
800 "endif" => {
801 if *depth == 0 {
803 self.error("endif without matching if".to_string());
804 self.bump();
806 false
807 } else {
808 *depth -= 1;
809 self.bump();
811
812 self.skip_ws();
814
815 if self.current() == Some(COMMENT) {
820 self.parse_comment();
821 } else if self.current() == Some(NEWLINE) {
822 self.bump();
823 } else if self.current() == Some(WHITESPACE) {
824 self.skip_ws();
826 if self.current() == Some(NEWLINE) {
827 self.bump();
828 }
829 } else if !self.is_at_eof() {
831 while !self.is_at_eof() && self.current() != Some(NEWLINE) {
834 self.bump();
835 }
836 if self.current() == Some(NEWLINE) {
837 self.bump();
838 }
839 }
840 true
843 }
844 }
845 _ => false,
846 }
847 }
848
849 fn parse_conditional(&mut self) {
850 self.builder.start_node(CONDITIONAL.into());
851
852 let Some(token) = self.parse_conditional_keyword() else {
854 self.skip_until_newline();
855 self.builder.finish_node();
856 return;
857 };
858
859 self.skip_ws();
861
862 match token.as_str() {
864 "ifdef" | "ifndef" => {
865 self.parse_simple_condition();
866 }
867 "ifeq" | "ifneq" => {
868 self.parse_parenthesized_expr();
869 }
870 _ => unreachable!("Invalid conditional token"),
871 }
872
873 self.skip_ws();
875 if self.current() == Some(COMMENT) {
876 self.parse_comment();
877 } else {
878 self.expect_eol();
879 }
880
881 let mut depth = 1;
883
884 let mut position_count = std::collections::HashMap::<usize, usize>::new();
886 let max_repetitions = 15; while depth > 0 && !self.is_at_eof() {
889 let current_pos = self.tokens.len();
891 *position_count.entry(current_pos).or_insert(0) += 1;
892
893 if position_count.get(¤t_pos).unwrap() > &max_repetitions {
896 break;
899 }
900
901 match self.current() {
902 None => {
903 self.error("unterminated conditional (missing endif)".to_string());
904 break;
905 }
906 Some(IDENTIFIER) => {
907 let token = self.tokens.last().unwrap().1.clone();
908 if !self.handle_conditional_token(&token, &mut depth) {
909 if token == "include" || token == "-include" || token == "sinclude" {
910 self.parse_include();
911 } else {
912 self.parse_normal_content();
913 }
914 }
915 }
916 Some(INDENT) => self.parse_recipe_line(),
917 Some(WHITESPACE) => self.bump(),
918 Some(COMMENT) => self.parse_comment(),
919 Some(NEWLINE) => self.bump(),
920 Some(DOLLAR) => self.parse_normal_content(),
921 Some(QUOTE) => self.parse_quoted_string(),
922 Some(_) => {
923 self.bump();
925 }
926 }
927 }
928
929 self.builder.finish_node();
930 }
931
932 fn parse_normal_content(&mut self) {
934 self.skip_ws();
936
937 if self.is_assignment_line() {
939 self.parse_assignment();
940 } else {
941 self.parse_rule();
943 }
944 }
945
946 fn parse_include(&mut self) {
947 self.builder.start_node(INCLUDE.into());
948
949 if self.current() != Some(IDENTIFIER)
951 || (!["include", "-include", "sinclude"]
952 .contains(&self.tokens.last().unwrap().1.as_str()))
953 {
954 self.error("expected include directive".to_string());
955 self.builder.finish_node();
956 return;
957 }
958 self.bump();
959 self.skip_ws();
960
961 self.builder.start_node(EXPR.into());
963 let mut found_path = false;
964
965 while !self.is_at_eof() && self.current() != Some(NEWLINE) {
966 match self.current() {
967 Some(WHITESPACE) => self.skip_ws(),
968 Some(DOLLAR) => {
969 found_path = true;
970 self.parse_variable_reference();
971 }
972 Some(_) => {
973 found_path = true;
975 self.bump();
976 }
977 None => break,
978 }
979 }
980
981 if !found_path {
982 self.error("expected file path after include".to_string());
983 }
984
985 self.builder.finish_node();
986
987 if self.current() == Some(NEWLINE) {
989 self.bump();
990 } else if !self.is_at_eof() {
991 self.error("expected newline after include".to_string());
992 self.skip_until_newline();
993 }
994
995 self.builder.finish_node();
996 }
997
998 fn parse_identifier_token(&mut self) -> bool {
999 let token = &self.tokens.last().unwrap().1;
1000
1001 if token.starts_with("%") {
1003 self.parse_rule();
1004 return true;
1005 }
1006
1007 if token.starts_with("if") {
1008 self.parse_conditional();
1009 return true;
1010 }
1011
1012 if token == "include" || token == "-include" || token == "sinclude" {
1013 self.parse_include();
1014 return true;
1015 }
1016
1017 self.parse_normal_content();
1019 true
1020 }
1021
1022 fn parse_token(&mut self) -> bool {
1023 match self.current() {
1024 None => false,
1025 Some(IDENTIFIER) => {
1026 let token = &self.tokens.last().unwrap().1;
1027 if self.is_conditional_directive(token) {
1028 self.parse_conditional();
1029 true
1030 } else {
1031 self.parse_identifier_token()
1032 }
1033 }
1034 Some(DOLLAR) => {
1035 self.parse_normal_content();
1036 true
1037 }
1038 Some(NEWLINE) => {
1039 self.bump();
1040 true
1041 }
1042 Some(COMMENT) => {
1043 self.parse_comment();
1044 true
1045 }
1046 Some(WHITESPACE) => {
1047 if self.is_end_of_file_or_newline_after_whitespace() {
1049 self.skip_ws();
1052 return true;
1053 }
1054
1055 let look_ahead_pos = self.tokens.len().saturating_sub(1);
1058 let mut is_documentation_or_help = false;
1059
1060 if look_ahead_pos > 0 {
1061 let next_token = &self.tokens[look_ahead_pos - 1];
1062 if next_token.0 == IDENTIFIER
1065 || next_token.0 == COMMENT
1066 || next_token.0 == TEXT
1067 {
1068 is_documentation_or_help = true;
1069 }
1070 }
1071
1072 if is_documentation_or_help {
1073 self.skip_ws();
1076 while self.current().is_some() && self.current() != Some(NEWLINE) {
1077 self.bump();
1078 }
1079 if self.current() == Some(NEWLINE) {
1080 self.bump();
1081 }
1082 } else {
1083 self.skip_ws();
1084 }
1085 true
1086 }
1087 Some(INDENT) => {
1088 #[cfg(test)]
1093 {
1094 let is_in_test = self.original_text.lines().count() < 20;
1097 let tokens_as_str = self
1098 .tokens
1099 .iter()
1100 .rev()
1101 .take(10)
1102 .map(|(_kind, text)| text.as_str())
1103 .collect::<Vec<_>>()
1104 .join(" ");
1105
1106 let in_conditional = tokens_as_str.contains("ifdef")
1108 || tokens_as_str.contains("ifndef")
1109 || tokens_as_str.contains("ifeq")
1110 || tokens_as_str.contains("ifneq")
1111 || tokens_as_str.contains("else")
1112 || tokens_as_str.contains("endif");
1113
1114 if is_in_test && !in_conditional {
1115 self.error("indented line not part of a rule".to_string());
1116 }
1117 }
1118
1119 self.bump();
1121
1122 while !self.is_at_eof() && self.current() != Some(NEWLINE) {
1124 self.bump();
1125 }
1126 if self.current() == Some(NEWLINE) {
1127 self.bump();
1128 }
1129 true
1130 }
1131 Some(kind) => {
1132 self.error(format!("unexpected token {:?}", kind));
1133 self.bump();
1134 true
1135 }
1136 }
1137 }
1138
1139 fn parse(mut self) -> Parse {
1140 self.builder.start_node(ROOT.into());
1141
1142 while self.parse_token() {}
1143
1144 self.builder.finish_node();
1145
1146 Parse {
1147 green_node: self.builder.finish(),
1148 errors: self.errors,
1149 }
1150 }
1151
1152 fn is_assignment_line(&mut self) -> bool {
1154 let assignment_ops = ["=", ":=", "::=", ":::=", "+=", "?=", "!="];
1155 let mut pos = self.tokens.len().saturating_sub(1);
1156 let mut seen_identifier = false;
1157 let mut seen_export = false;
1158
1159 while pos > 0 {
1160 let (kind, text) = &self.tokens[pos];
1161
1162 match kind {
1163 NEWLINE => break,
1164 IDENTIFIER if text == "export" => seen_export = true,
1165 IDENTIFIER if !seen_identifier => seen_identifier = true,
1166 OPERATOR if assignment_ops.contains(&text.as_str()) => {
1167 return seen_identifier || seen_export
1168 }
1169 OPERATOR if text == ":" => return false, WHITESPACE => (),
1171 _ if seen_export => return true, _ => return false,
1173 }
1174 pos = pos.saturating_sub(1);
1175 }
1176 false
1177 }
1178
1179 fn bump(&mut self) {
1181 let (kind, text) = self.tokens.pop().unwrap();
1182 self.builder.token(kind.into(), text.as_str());
1183 }
1184 fn current(&self) -> Option<SyntaxKind> {
1186 self.tokens.last().map(|(kind, _)| *kind)
1187 }
1188
1189 fn expect_eol(&mut self) {
1190 self.skip_ws();
1192
1193 match self.current() {
1194 Some(NEWLINE) => {
1195 self.bump();
1196 }
1197 None => {
1198 }
1200 n => {
1201 self.error(format!("expected newline, got {:?}", n));
1202 self.skip_until_newline();
1204 }
1205 }
1206 }
1207
1208 fn is_at_eof(&self) -> bool {
1210 self.current().is_none()
1211 }
1212
1213 fn is_at_eof_or_only_whitespace(&self) -> bool {
1215 if self.is_at_eof() {
1216 return true;
1217 }
1218
1219 self.tokens
1221 .iter()
1222 .rev()
1223 .all(|(kind, _)| matches!(*kind, WHITESPACE | NEWLINE))
1224 }
1225
1226 fn skip_ws(&mut self) {
1227 while self.current() == Some(WHITESPACE) {
1228 self.bump()
1229 }
1230 }
1231
1232 fn skip_until_newline(&mut self) {
1233 while !self.is_at_eof() && self.current() != Some(NEWLINE) {
1234 self.bump();
1235 }
1236 if self.current() == Some(NEWLINE) {
1237 self.bump();
1238 }
1239 }
1240
1241 fn consume_balanced_parens(&mut self, start_paren_count: usize) -> usize {
1243 let mut paren_count = start_paren_count;
1244
1245 while paren_count > 0 && self.current().is_some() {
1246 match self.current() {
1247 Some(LPAREN) => {
1248 paren_count += 1;
1249 self.bump();
1250 }
1251 Some(RPAREN) => {
1252 paren_count -= 1;
1253 self.bump();
1254 if paren_count == 0 {
1255 break;
1256 }
1257 }
1258 Some(DOLLAR) => {
1259 self.parse_variable_reference();
1261 }
1262 Some(_) => self.bump(),
1263 None => {
1264 self.error("unclosed parenthesis".to_string());
1265 break;
1266 }
1267 }
1268 }
1269
1270 paren_count
1271 }
1272
1273 fn is_end_of_file_or_newline_after_whitespace(&self) -> bool {
1275 if self.is_at_eof_or_only_whitespace() {
1277 return true;
1278 }
1279
1280 if self.tokens.len() <= 1 {
1282 return true;
1283 }
1284
1285 false
1286 }
1287
1288 #[cfg(test)]
1290 fn is_in_test_environment(&self) -> bool {
1291 self.original_text.lines().count() < 20
1294 }
1295 }
1296
1297 let mut tokens = lex(text);
1298 tokens.reverse();
1299 Parser {
1300 tokens,
1301 builder: GreenNodeBuilder::new(),
1302 errors: Vec::new(),
1303 original_text: text.to_string(),
1304 }
1305 .parse()
1306}
1307
1308type SyntaxNode = rowan::SyntaxNode<Lang>;
1314#[allow(unused)]
1315type SyntaxToken = rowan::SyntaxToken<Lang>;
1316#[allow(unused)]
1317type SyntaxElement = rowan::NodeOrToken<SyntaxNode, SyntaxToken>;
1318
1319impl Parse {
1320 fn syntax(&self) -> SyntaxNode {
1321 SyntaxNode::new_root_mut(self.green_node.clone())
1322 }
1323
1324 fn root(&self) -> Makefile {
1325 Makefile::cast(self.syntax()).unwrap()
1326 }
1327}
1328
1329macro_rules! ast_node {
1330 ($ast:ident, $kind:ident) => {
1331 #[derive(PartialEq, Eq, Hash)]
1332 #[repr(transparent)]
1333 pub struct $ast(SyntaxNode);
1335
1336 impl AstNode for $ast {
1337 type Language = Lang;
1338
1339 fn can_cast(kind: SyntaxKind) -> bool {
1340 kind == $kind
1341 }
1342
1343 fn cast(syntax: SyntaxNode) -> Option<Self> {
1344 if Self::can_cast(syntax.kind()) {
1345 Some(Self(syntax))
1346 } else {
1347 None
1348 }
1349 }
1350
1351 fn syntax(&self) -> &SyntaxNode {
1352 &self.0
1353 }
1354 }
1355
1356 impl core::fmt::Display for $ast {
1357 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
1358 write!(f, "{}", self.0.text())
1359 }
1360 }
1361 };
1362}
1363
1364ast_node!(Makefile, ROOT);
1365ast_node!(Rule, RULE);
1366ast_node!(Identifier, IDENTIFIER);
1367ast_node!(VariableDefinition, VARIABLE);
1368ast_node!(Include, INCLUDE);
1369ast_node!(ArchiveMembers, ARCHIVE_MEMBERS);
1370ast_node!(ArchiveMember, ARCHIVE_MEMBER);
1371
1372impl ArchiveMembers {
1373 pub fn archive_name(&self) -> Option<String> {
1375 for element in self.syntax().children_with_tokens() {
1377 if let Some(token) = element.as_token() {
1378 if token.kind() == IDENTIFIER {
1379 return Some(token.text().to_string());
1380 } else if token.kind() == LPAREN {
1381 break;
1383 }
1384 }
1385 }
1386 None
1387 }
1388
1389 pub fn members(&self) -> impl Iterator<Item = ArchiveMember> + '_ {
1391 self.syntax().children().filter_map(ArchiveMember::cast)
1392 }
1393
1394 pub fn member_names(&self) -> Vec<String> {
1396 self.members().map(|m| m.text()).collect()
1397 }
1398}
1399
1400impl ArchiveMember {
1401 pub fn text(&self) -> String {
1403 self.syntax().text().to_string().trim().to_string()
1404 }
1405}
1406
1407fn remove_with_preceding_comments(node: &SyntaxNode, parent: &SyntaxNode) {
1415 let mut collected_elements = vec![];
1416 let mut found_comment = false;
1417
1418 let mut current = node.prev_sibling_or_token();
1420 while let Some(element) = current {
1421 match &element {
1422 rowan::NodeOrToken::Token(token) => match token.kind() {
1423 COMMENT => {
1424 if token.text().starts_with("#!") {
1425 break; }
1427 found_comment = true;
1428 collected_elements.push(element.clone());
1429 }
1430 NEWLINE | WHITESPACE => {
1431 collected_elements.push(element.clone());
1432 }
1433 _ => break, },
1435 rowan::NodeOrToken::Node(_) => break, }
1437 current = element.prev_sibling_or_token();
1438 }
1439
1440 let node_index = node.index();
1442 parent.splice_children(node_index..node_index + 1, vec![]);
1443
1444 if found_comment {
1446 let mut consecutive_newlines = 0;
1447 for element in collected_elements.iter().rev() {
1448 let should_remove = match element {
1449 rowan::NodeOrToken::Token(token) => match token.kind() {
1450 COMMENT => {
1451 consecutive_newlines = 0;
1452 true
1453 }
1454 NEWLINE => {
1455 consecutive_newlines += 1;
1456 consecutive_newlines <= 1
1457 }
1458 WHITESPACE => true,
1459 _ => false,
1460 },
1461 _ => false,
1462 };
1463
1464 if should_remove {
1465 let idx = element.index();
1466 parent.splice_children(idx..idx + 1, vec![]);
1467 }
1468 }
1469 }
1470}
1471
1472impl VariableDefinition {
1473 pub fn name(&self) -> Option<String> {
1475 self.syntax().children_with_tokens().find_map(|it| {
1476 it.as_token().and_then(|it| {
1477 if it.kind() == IDENTIFIER && it.text() != "export" {
1478 Some(it.text().to_string())
1479 } else {
1480 None
1481 }
1482 })
1483 })
1484 }
1485
1486 pub fn is_export(&self) -> bool {
1488 self.syntax()
1489 .children_with_tokens()
1490 .any(|it| it.as_token().is_some_and(|token| token.text() == "export"))
1491 }
1492
1493 pub fn raw_value(&self) -> Option<String> {
1495 self.syntax()
1496 .children()
1497 .find(|it| it.kind() == EXPR)
1498 .map(|it| it.text().into())
1499 }
1500
1501 pub fn remove(&mut self) {
1514 if let Some(parent) = self.syntax().parent() {
1515 remove_with_preceding_comments(self.syntax(), &parent);
1516 }
1517 }
1518
1519 pub fn set_value(&mut self, new_value: &str) {
1532 let expr_index = self
1534 .syntax()
1535 .children()
1536 .find(|it| it.kind() == EXPR)
1537 .map(|it| it.index());
1538
1539 if let Some(expr_idx) = expr_index {
1540 let mut builder = GreenNodeBuilder::new();
1542 builder.start_node(EXPR.into());
1543 builder.token(IDENTIFIER.into(), new_value);
1544 builder.finish_node();
1545
1546 let new_expr = SyntaxNode::new_root_mut(builder.finish());
1547
1548 self.0
1550 .splice_children(expr_idx..expr_idx + 1, vec![new_expr.into()]);
1551 }
1552 }
1553}
1554
1555impl Makefile {
1556 pub fn new() -> Makefile {
1558 let mut builder = GreenNodeBuilder::new();
1559
1560 builder.start_node(ROOT.into());
1561 builder.finish_node();
1562
1563 let syntax = SyntaxNode::new_root_mut(builder.finish());
1564 Makefile(syntax)
1565 }
1566
1567 pub fn parse(text: &str) -> crate::Parse<Makefile> {
1569 crate::Parse::<Makefile>::parse_makefile(text)
1570 }
1571
1572 pub fn code(&self) -> String {
1574 self.syntax().text().to_string()
1575 }
1576
1577 pub fn is_root(&self) -> bool {
1579 self.syntax().kind() == ROOT
1580 }
1581
1582 pub fn read<R: std::io::Read>(mut r: R) -> Result<Makefile, Error> {
1584 let mut buf = String::new();
1585 r.read_to_string(&mut buf)?;
1586 buf.parse()
1587 }
1588
1589 pub fn read_relaxed<R: std::io::Read>(mut r: R) -> Result<Makefile, Error> {
1591 let mut buf = String::new();
1592 r.read_to_string(&mut buf)?;
1593
1594 let parsed = parse(&buf);
1595 Ok(parsed.root())
1596 }
1597
1598 pub fn rules(&self) -> impl Iterator<Item = Rule> + '_ {
1607 self.syntax().children().filter_map(Rule::cast)
1608 }
1609
1610 pub fn rules_by_target<'a>(&'a self, target: &'a str) -> impl Iterator<Item = Rule> + 'a {
1612 self.rules()
1613 .filter(move |rule| rule.targets().any(|t| t == target))
1614 }
1615
1616 pub fn variable_definitions(&self) -> impl Iterator<Item = VariableDefinition> {
1618 self.syntax()
1619 .children()
1620 .filter_map(VariableDefinition::cast)
1621 }
1622
1623 pub fn find_variable<'a>(
1638 &'a self,
1639 name: &'a str,
1640 ) -> impl Iterator<Item = VariableDefinition> + 'a {
1641 self.variable_definitions()
1642 .filter(move |var| var.name().as_deref() == Some(name))
1643 }
1644
1645 pub fn add_rule(&mut self, target: &str) -> Rule {
1655 let mut builder = GreenNodeBuilder::new();
1656 builder.start_node(RULE.into());
1657 builder.token(IDENTIFIER.into(), target);
1658 builder.token(OPERATOR.into(), ":");
1659 builder.token(NEWLINE.into(), "\n");
1660 builder.finish_node();
1661
1662 let syntax = SyntaxNode::new_root_mut(builder.finish());
1663 let pos = self.0.children_with_tokens().count();
1664 self.0.splice_children(pos..pos, vec![syntax.into()]);
1665 Rule(self.0.children().nth(pos).unwrap())
1666 }
1667
1668 pub fn from_reader<R: std::io::Read>(mut r: R) -> Result<Makefile, Error> {
1670 let mut buf = String::new();
1671 r.read_to_string(&mut buf)?;
1672
1673 let parsed = parse(&buf);
1674 if !parsed.errors.is_empty() {
1675 Err(Error::Parse(ParseError {
1676 errors: parsed.errors,
1677 }))
1678 } else {
1679 Ok(parsed.root())
1680 }
1681 }
1682
1683 pub fn replace_rule(&mut self, index: usize, new_rule: Rule) -> Result<(), Error> {
1694 let rules: Vec<_> = self.0.children().filter(|n| n.kind() == RULE).collect();
1695
1696 if rules.is_empty() {
1697 return Err(Error::Parse(ParseError {
1698 errors: vec![ErrorInfo {
1699 message: "Cannot replace rule in empty makefile".to_string(),
1700 line: 1,
1701 context: "replace_rule".to_string(),
1702 }],
1703 }));
1704 }
1705
1706 if index >= rules.len() {
1707 return Err(Error::Parse(ParseError {
1708 errors: vec![ErrorInfo {
1709 message: format!(
1710 "Rule index {} out of bounds (max {})",
1711 index,
1712 rules.len() - 1
1713 ),
1714 line: 1,
1715 context: "replace_rule".to_string(),
1716 }],
1717 }));
1718 }
1719
1720 let target_node = &rules[index];
1721 let target_index = target_node.index();
1722
1723 self.0.splice_children(
1725 target_index..target_index + 1,
1726 vec![new_rule.0.clone().into()],
1727 );
1728 Ok(())
1729 }
1730
1731 pub fn remove_rule(&mut self, index: usize) -> Result<Rule, Error> {
1742 let rules: Vec<_> = self.0.children().filter(|n| n.kind() == RULE).collect();
1743
1744 if rules.is_empty() {
1745 return Err(Error::Parse(ParseError {
1746 errors: vec![ErrorInfo {
1747 message: "Cannot remove rule from empty makefile".to_string(),
1748 line: 1,
1749 context: "remove_rule".to_string(),
1750 }],
1751 }));
1752 }
1753
1754 if index >= rules.len() {
1755 return Err(Error::Parse(ParseError {
1756 errors: vec![ErrorInfo {
1757 message: format!(
1758 "Rule index {} out of bounds (max {})",
1759 index,
1760 rules.len() - 1
1761 ),
1762 line: 1,
1763 context: "remove_rule".to_string(),
1764 }],
1765 }));
1766 }
1767
1768 let target_node = rules[index].clone();
1769 let target_index = target_node.index();
1770
1771 self.0
1773 .splice_children(target_index..target_index + 1, vec![]);
1774 Ok(Rule(target_node))
1775 }
1776
1777 pub fn insert_rule(&mut self, index: usize, new_rule: Rule) -> Result<(), Error> {
1789 let rules: Vec<_> = self.0.children().filter(|n| n.kind() == RULE).collect();
1790
1791 if index > rules.len() {
1792 return Err(Error::Parse(ParseError {
1793 errors: vec![ErrorInfo {
1794 message: format!("Rule index {} out of bounds (max {})", index, rules.len()),
1795 line: 1,
1796 context: "insert_rule".to_string(),
1797 }],
1798 }));
1799 }
1800
1801 let target_index = if index == rules.len() {
1802 self.0.children_with_tokens().count()
1804 } else {
1805 rules[index].index()
1807 };
1808
1809 self.0
1811 .splice_children(target_index..target_index, vec![new_rule.0.clone().into()]);
1812 Ok(())
1813 }
1814
1815 pub fn includes(&self) -> impl Iterator<Item = Include> {
1825 self.syntax().children().filter_map(Include::cast)
1826 }
1827
1828 pub fn included_files(&self) -> impl Iterator<Item = String> + '_ {
1838 fn collect_includes(node: &SyntaxNode) -> Vec<Include> {
1841 let mut includes = Vec::new();
1842
1843 if let Some(include) = Include::cast(node.clone()) {
1845 includes.push(include);
1846 }
1847
1848 for child in node.children() {
1850 includes.extend(collect_includes(&child));
1851 }
1852
1853 includes
1854 }
1855
1856 let includes = collect_includes(self.syntax());
1858
1859 includes.into_iter().map(|include| {
1861 include
1862 .syntax()
1863 .children()
1864 .find(|node| node.kind() == EXPR)
1865 .map(|expr| expr.text().to_string().trim().to_string())
1866 .unwrap_or_default()
1867 })
1868 }
1869
1870 pub fn find_rule_by_target(&self, target: &str) -> Option<Rule> {
1881 self.rules()
1882 .find(|rule| rule.targets().any(|t| t == target))
1883 }
1884
1885 pub fn find_rules_by_target<'a>(&'a self, target: &'a str) -> impl Iterator<Item = Rule> + 'a {
1895 self.rules_by_target(target)
1896 }
1897
1898 pub fn add_phony_target(&mut self, target: &str) -> Result<(), Error> {
1908 if let Some(mut phony_rule) = self.find_rule_by_target(".PHONY") {
1910 if !phony_rule.prerequisites().any(|p| p == target) {
1912 phony_rule.add_prerequisite(target)?;
1913 }
1914 } else {
1915 let mut phony_rule = self.add_rule(".PHONY");
1917 phony_rule.add_prerequisite(target)?;
1918 }
1919 Ok(())
1920 }
1921
1922 pub fn remove_phony_target(&mut self, target: &str) -> Result<bool, Error> {
1936 let mut phony_rule = None;
1938 for rule in self.rules_by_target(".PHONY") {
1939 if rule.prerequisites().any(|p| p == target) {
1940 phony_rule = Some(rule);
1941 break;
1942 }
1943 }
1944
1945 let mut phony_rule = match phony_rule {
1946 Some(rule) => rule,
1947 None => return Ok(false),
1948 };
1949
1950 let prereq_count = phony_rule.prerequisites().count();
1952
1953 phony_rule.remove_prerequisite(target)?;
1955
1956 if prereq_count == 1 {
1958 phony_rule.remove()?;
1960 }
1961
1962 Ok(true)
1963 }
1964
1965 pub fn is_phony(&self, target: &str) -> bool {
1976 self.rules_by_target(".PHONY")
1978 .any(|rule| rule.prerequisites().any(|p| p == target))
1979 }
1980
1981 pub fn phony_targets(&self) -> impl Iterator<Item = String> + '_ {
1991 self.rules_by_target(".PHONY")
1993 .flat_map(|rule| rule.prerequisites().collect::<Vec<_>>())
1994 }
1995}
1996
1997impl FromStr for Rule {
1998 type Err = crate::Error;
1999
2000 fn from_str(s: &str) -> Result<Self, Self::Err> {
2001 Rule::parse(s).to_rule_result()
2002 }
2003}
2004
2005impl FromStr for Makefile {
2006 type Err = crate::Error;
2007
2008 fn from_str(s: &str) -> Result<Self, Self::Err> {
2009 Makefile::parse(s).to_result()
2010 }
2011}
2012
2013fn build_prerequisites_node(prereqs: &[String]) -> SyntaxNode {
2015 let mut builder = GreenNodeBuilder::new();
2016 builder.start_node(PREREQUISITES.into());
2017
2018 for (i, prereq) in prereqs.iter().enumerate() {
2019 if i > 0 {
2020 builder.token(WHITESPACE.into(), " ");
2021 }
2022
2023 builder.start_node(PREREQUISITE.into());
2025 builder.token(IDENTIFIER.into(), prereq);
2026 builder.finish_node();
2027 }
2028
2029 builder.finish_node();
2030 SyntaxNode::new_root_mut(builder.finish())
2031}
2032
2033fn build_targets_node(targets: &[String]) -> SyntaxNode {
2035 let mut builder = GreenNodeBuilder::new();
2036 builder.start_node(TARGETS.into());
2037
2038 for (i, target) in targets.iter().enumerate() {
2039 if i > 0 {
2040 builder.token(WHITESPACE.into(), " ");
2041 }
2042 builder.token(IDENTIFIER.into(), target);
2043 }
2044
2045 builder.finish_node();
2046 SyntaxNode::new_root_mut(builder.finish())
2047}
2048
2049impl Rule {
2050 pub fn parse(text: &str) -> crate::Parse<Rule> {
2052 crate::Parse::<Rule>::parse_rule(text)
2053 }
2054
2055 fn collect_variable_reference(
2057 &self,
2058 tokens: &mut std::iter::Peekable<impl Iterator<Item = SyntaxElement>>,
2059 ) -> Option<String> {
2060 let mut var_ref = String::new();
2061
2062 if let Some(token) = tokens.next() {
2064 if let Some(t) = token.as_token() {
2065 if t.kind() == DOLLAR {
2066 var_ref.push_str(t.text());
2067
2068 if let Some(next) = tokens.peek() {
2070 if let Some(nt) = next.as_token() {
2071 if nt.kind() == LPAREN {
2072 var_ref.push_str(nt.text());
2074 tokens.next();
2075
2076 let mut paren_count = 1;
2078
2079 for next_token in tokens.by_ref() {
2081 if let Some(nt) = next_token.as_token() {
2082 var_ref.push_str(nt.text());
2083
2084 if nt.kind() == LPAREN {
2085 paren_count += 1;
2086 } else if nt.kind() == RPAREN {
2087 paren_count -= 1;
2088 if paren_count == 0 {
2089 break;
2090 }
2091 }
2092 }
2093 }
2094
2095 return Some(var_ref);
2096 }
2097 }
2098 }
2099
2100 for next_token in tokens.by_ref() {
2102 if let Some(nt) = next_token.as_token() {
2103 var_ref.push_str(nt.text());
2104 if nt.kind() == RPAREN {
2105 break;
2106 }
2107 }
2108 }
2109 return Some(var_ref);
2110 }
2111 }
2112 }
2113
2114 None
2115 }
2116
2117 fn extract_targets_from_node(node: &SyntaxNode) -> Vec<String> {
2119 let mut result = Vec::new();
2120 let mut current_target = String::new();
2121 let mut in_parens = 0;
2122
2123 for child in node.children_with_tokens() {
2124 if let Some(token) = child.as_token() {
2125 match token.kind() {
2126 IDENTIFIER => {
2127 current_target.push_str(token.text());
2128 }
2129 WHITESPACE => {
2130 if in_parens == 0 && !current_target.is_empty() {
2132 result.push(current_target.clone());
2133 current_target.clear();
2134 } else if in_parens > 0 {
2135 current_target.push_str(token.text());
2136 }
2137 }
2138 LPAREN => {
2139 in_parens += 1;
2140 current_target.push_str(token.text());
2141 }
2142 RPAREN => {
2143 in_parens -= 1;
2144 current_target.push_str(token.text());
2145 }
2146 DOLLAR => {
2147 current_target.push_str(token.text());
2148 }
2149 _ => {
2150 current_target.push_str(token.text());
2151 }
2152 }
2153 } else if let Some(child_node) = child.as_node() {
2154 current_target.push_str(&child_node.text().to_string());
2156 }
2157 }
2158
2159 if !current_target.is_empty() {
2161 result.push(current_target);
2162 }
2163
2164 result
2165 }
2166
2167 pub fn targets(&self) -> impl Iterator<Item = String> + '_ {
2177 for child in self.syntax().children_with_tokens() {
2179 if let Some(node) = child.as_node() {
2180 if node.kind() == TARGETS {
2181 return Self::extract_targets_from_node(node).into_iter();
2183 }
2184 }
2185 if let Some(token) = child.as_token() {
2187 if token.kind() == OPERATOR {
2188 break;
2189 }
2190 }
2191 }
2192
2193 let mut result = Vec::new();
2195 let mut tokens = self
2196 .syntax()
2197 .children_with_tokens()
2198 .take_while(|it| it.as_token().map(|t| t.kind() != OPERATOR).unwrap_or(true))
2199 .peekable();
2200
2201 while let Some(token) = tokens.peek().cloned() {
2202 if let Some(node) = token.as_node() {
2203 tokens.next(); if node.kind() == EXPR {
2205 let mut var_content = String::new();
2207 for child in node.children_with_tokens() {
2208 if let Some(t) = child.as_token() {
2209 var_content.push_str(t.text());
2210 }
2211 }
2212 if !var_content.is_empty() {
2213 result.push(var_content);
2214 }
2215 }
2216 } else if let Some(t) = token.as_token() {
2217 if t.kind() == DOLLAR {
2218 if let Some(var_ref) = self.collect_variable_reference(&mut tokens) {
2219 result.push(var_ref);
2220 }
2221 } else if t.kind() == IDENTIFIER {
2222 let ident_text = t.text().to_string();
2224 tokens.next(); if let Some(next) = tokens.peek() {
2228 if let Some(next_token) = next.as_token() {
2229 if next_token.kind() == LPAREN {
2230 let mut archive_target = ident_text;
2232 archive_target.push_str(next_token.text()); tokens.next(); while let Some(token) = tokens.peek() {
2237 if let Some(node) = token.as_node() {
2238 if node.kind() == ARCHIVE_MEMBERS {
2239 archive_target.push_str(&node.text().to_string());
2240 tokens.next();
2241 } else {
2242 tokens.next();
2243 }
2244 } else if let Some(t) = token.as_token() {
2245 if t.kind() == RPAREN {
2246 archive_target.push_str(t.text());
2247 tokens.next();
2248 break;
2249 } else {
2250 tokens.next();
2251 }
2252 } else {
2253 break;
2254 }
2255 }
2256 result.push(archive_target);
2257 } else {
2258 result.push(ident_text);
2260 }
2261 } else {
2262 result.push(ident_text);
2264 }
2265 } else {
2266 result.push(ident_text);
2268 }
2269 } else {
2270 tokens.next(); }
2272 }
2273 }
2274 result.into_iter()
2275 }
2276
2277 pub fn prerequisites(&self) -> impl Iterator<Item = String> + '_ {
2286 let mut found_operator = false;
2288 let mut prerequisites_node = None;
2289
2290 for element in self.syntax().children_with_tokens() {
2291 if let Some(token) = element.as_token() {
2292 if token.kind() == OPERATOR {
2293 found_operator = true;
2294 }
2295 } else if let Some(node) = element.as_node() {
2296 if found_operator && node.kind() == PREREQUISITES {
2297 prerequisites_node = Some(node.clone());
2298 break;
2299 }
2300 }
2301 }
2302
2303 let result: Vec<String> = if let Some(prereqs) = prerequisites_node {
2304 prereqs
2306 .children()
2307 .filter(|child| child.kind() == PREREQUISITE)
2308 .map(|child| child.text().to_string().trim().to_string())
2309 .collect()
2310 } else {
2311 Vec::new()
2312 };
2313
2314 result.into_iter()
2315 }
2316
2317 pub fn recipes(&self) -> impl Iterator<Item = String> {
2326 self.syntax()
2327 .children()
2328 .filter(|it| it.kind() == RECIPE)
2329 .flat_map(|it| {
2330 it.children_with_tokens().filter_map(|it| {
2331 it.as_token().and_then(|t| {
2332 if t.kind() == TEXT {
2333 Some(t.text().to_string())
2334 } else {
2335 None
2336 }
2337 })
2338 })
2339 })
2340 }
2341
2342 pub fn replace_command(&mut self, i: usize, line: &str) -> bool {
2352 let index = self
2354 .syntax()
2355 .children()
2356 .filter(|it| it.kind() == RECIPE)
2357 .nth(i);
2358
2359 let index = match index {
2360 Some(node) => node.index(),
2361 None => return false,
2362 };
2363
2364 let mut builder = GreenNodeBuilder::new();
2365 builder.start_node(RECIPE.into());
2366 builder.token(INDENT.into(), "\t");
2367 builder.token(TEXT.into(), line);
2368 builder.token(NEWLINE.into(), "\n");
2369 builder.finish_node();
2370
2371 let syntax = SyntaxNode::new_root_mut(builder.finish());
2372
2373 self.0
2374 .splice_children(index..index + 1, vec![syntax.into()]);
2375
2376 true
2377 }
2378
2379 pub fn push_command(&mut self, line: &str) {
2389 let index = self
2391 .0
2392 .children_with_tokens()
2393 .filter(|it| it.kind() == RECIPE)
2394 .last();
2395
2396 let index = index.map_or_else(
2397 || self.0.children_with_tokens().count(),
2398 |it| it.index() + 1,
2399 );
2400
2401 let mut builder = GreenNodeBuilder::new();
2402 builder.start_node(RECIPE.into());
2403 builder.token(INDENT.into(), "\t");
2404 builder.token(TEXT.into(), line);
2405 builder.token(NEWLINE.into(), "\n");
2406 builder.finish_node();
2407 let syntax = SyntaxNode::new_root_mut(builder.finish());
2408
2409 self.0.splice_children(index..index, vec![syntax.into()]);
2410 }
2411
2412 pub fn remove_command(&mut self, index: usize) -> bool {
2422 let recipes: Vec<_> = self
2423 .syntax()
2424 .children()
2425 .filter(|n| n.kind() == RECIPE)
2426 .collect();
2427
2428 if index >= recipes.len() {
2429 return false;
2430 }
2431
2432 let target_node = &recipes[index];
2433 let target_index = target_node.index();
2434
2435 self.0
2436 .splice_children(target_index..target_index + 1, vec![]);
2437 true
2438 }
2439
2440 pub fn insert_command(&mut self, index: usize, line: &str) -> bool {
2451 let recipes: Vec<_> = self
2452 .syntax()
2453 .children()
2454 .filter(|n| n.kind() == RECIPE)
2455 .collect();
2456
2457 if index > recipes.len() {
2458 return false;
2459 }
2460
2461 let target_index = if index == recipes.len() {
2462 recipes.last().map(|n| n.index() + 1).unwrap_or_else(|| {
2464 self.0.children_with_tokens().count()
2466 })
2467 } else {
2468 recipes[index].index()
2470 };
2471
2472 let mut builder = GreenNodeBuilder::new();
2473 builder.start_node(RECIPE.into());
2474 builder.token(INDENT.into(), "\t");
2475 builder.token(TEXT.into(), line);
2476 builder.token(NEWLINE.into(), "\n");
2477 builder.finish_node();
2478 let syntax = SyntaxNode::new_root_mut(builder.finish());
2479
2480 self.0
2481 .splice_children(target_index..target_index, vec![syntax.into()]);
2482 true
2483 }
2484
2485 pub fn recipe_count(&self) -> usize {
2494 self.syntax()
2495 .children()
2496 .filter(|n| n.kind() == RECIPE)
2497 .count()
2498 }
2499
2500 pub fn clear_commands(&mut self) {
2510 let recipes: Vec<_> = self
2511 .syntax()
2512 .children()
2513 .filter(|n| n.kind() == RECIPE)
2514 .collect();
2515
2516 if recipes.is_empty() {
2517 return;
2518 }
2519
2520 for recipe in recipes.iter().rev() {
2522 let index = recipe.index();
2523 self.0.splice_children(index..index + 1, vec![]);
2524 }
2525 }
2526
2527 pub fn remove_prerequisite(&mut self, target: &str) -> Result<bool, Error> {
2540 let mut found_operator = false;
2542 let mut prereqs_node = None;
2543
2544 for child in self.syntax().children_with_tokens() {
2545 if let Some(token) = child.as_token() {
2546 if token.kind() == OPERATOR {
2547 found_operator = true;
2548 }
2549 } else if let Some(node) = child.as_node() {
2550 if found_operator && node.kind() == PREREQUISITES {
2551 prereqs_node = Some(node.clone());
2552 break;
2553 }
2554 }
2555 }
2556
2557 let prereqs_node = match prereqs_node {
2558 Some(node) => node,
2559 None => return Ok(false), };
2561
2562 let current_prereqs: Vec<String> = self.prerequisites().collect();
2564
2565 if !current_prereqs.iter().any(|p| p == target) {
2567 return Ok(false);
2568 }
2569
2570 let new_prereqs: Vec<String> = current_prereqs
2572 .into_iter()
2573 .filter(|p| p != target)
2574 .collect();
2575
2576 let prereqs_index = prereqs_node.index();
2578 let new_prereqs_node = build_prerequisites_node(&new_prereqs);
2579
2580 self.0.splice_children(
2581 prereqs_index..prereqs_index + 1,
2582 vec![new_prereqs_node.into()],
2583 );
2584
2585 Ok(true)
2586 }
2587
2588 pub fn add_prerequisite(&mut self, target: &str) -> Result<(), Error> {
2598 let mut current_prereqs: Vec<String> = self.prerequisites().collect();
2599 current_prereqs.push(target.to_string());
2600 self.set_prerequisites(current_prereqs.iter().map(|s| s.as_str()).collect())
2601 }
2602
2603 pub fn set_prerequisites(&mut self, prereqs: Vec<&str>) -> Result<(), Error> {
2613 let mut prereqs_index = None;
2615 let mut operator_found = false;
2616
2617 for child in self.syntax().children_with_tokens() {
2618 if let Some(token) = child.as_token() {
2619 if token.kind() == OPERATOR {
2620 operator_found = true;
2621 }
2622 } else if let Some(node) = child.as_node() {
2623 if operator_found && node.kind() == PREREQUISITES {
2624 prereqs_index = Some((node.index(), true)); break;
2626 }
2627 }
2628 }
2629
2630 let new_prereqs =
2632 build_prerequisites_node(&prereqs.iter().map(|s| s.to_string()).collect::<Vec<_>>());
2633
2634 match prereqs_index {
2635 Some((idx, true)) => {
2636 self.0
2638 .splice_children(idx..idx + 1, vec![new_prereqs.into()]);
2639 }
2640 _ => {
2641 let insert_pos = self
2643 .syntax()
2644 .children_with_tokens()
2645 .position(|t| t.as_token().map(|t| t.kind() == OPERATOR).unwrap_or(false))
2646 .map(|p| p + 1)
2647 .ok_or_else(|| {
2648 Error::Parse(ParseError {
2649 errors: vec![ErrorInfo {
2650 message: "No operator found in rule".to_string(),
2651 line: 1,
2652 context: "set_prerequisites".to_string(),
2653 }],
2654 })
2655 })?;
2656
2657 self.0
2658 .splice_children(insert_pos..insert_pos, vec![new_prereqs.into()]);
2659 }
2660 }
2661
2662 Ok(())
2663 }
2664
2665 pub fn rename_target(&mut self, old_name: &str, new_name: &str) -> Result<bool, Error> {
2677 let current_targets: Vec<String> = self.targets().collect();
2679
2680 if !current_targets.iter().any(|t| t == old_name) {
2682 return Ok(false);
2683 }
2684
2685 let new_targets: Vec<String> = current_targets
2687 .into_iter()
2688 .map(|t| {
2689 if t == old_name {
2690 new_name.to_string()
2691 } else {
2692 t
2693 }
2694 })
2695 .collect();
2696
2697 let mut targets_index = None;
2699 for (idx, child) in self.syntax().children_with_tokens().enumerate() {
2700 if let Some(node) = child.as_node() {
2701 if node.kind() == TARGETS {
2702 targets_index = Some(idx);
2703 break;
2704 }
2705 }
2706 }
2707
2708 let targets_index = targets_index.ok_or_else(|| {
2709 Error::Parse(ParseError {
2710 errors: vec![ErrorInfo {
2711 message: "No TARGETS node found in rule".to_string(),
2712 line: 1,
2713 context: "rename_target".to_string(),
2714 }],
2715 })
2716 })?;
2717
2718 let new_targets_node = build_targets_node(&new_targets);
2720
2721 self.0.splice_children(
2723 targets_index..targets_index + 1,
2724 vec![new_targets_node.into()],
2725 );
2726
2727 Ok(true)
2728 }
2729
2730 pub fn add_target(&mut self, target: &str) -> Result<(), Error> {
2740 let mut current_targets: Vec<String> = self.targets().collect();
2741 current_targets.push(target.to_string());
2742 self.set_targets(current_targets.iter().map(|s| s.as_str()).collect())
2743 }
2744
2745 pub fn set_targets(&mut self, targets: Vec<&str>) -> Result<(), Error> {
2757 if targets.is_empty() {
2759 return Err(Error::Parse(ParseError {
2760 errors: vec![ErrorInfo {
2761 message: "Cannot set empty targets list for a rule".to_string(),
2762 line: 1,
2763 context: "set_targets".to_string(),
2764 }],
2765 }));
2766 }
2767
2768 let mut targets_index = None;
2770 for (idx, child) in self.syntax().children_with_tokens().enumerate() {
2771 if let Some(node) = child.as_node() {
2772 if node.kind() == TARGETS {
2773 targets_index = Some(idx);
2774 break;
2775 }
2776 }
2777 }
2778
2779 let targets_index = targets_index.ok_or_else(|| {
2780 Error::Parse(ParseError {
2781 errors: vec![ErrorInfo {
2782 message: "No TARGETS node found in rule".to_string(),
2783 line: 1,
2784 context: "set_targets".to_string(),
2785 }],
2786 })
2787 })?;
2788
2789 let new_targets_node =
2791 build_targets_node(&targets.iter().map(|s| s.to_string()).collect::<Vec<_>>());
2792
2793 self.0.splice_children(
2795 targets_index..targets_index + 1,
2796 vec![new_targets_node.into()],
2797 );
2798
2799 Ok(())
2800 }
2801
2802 pub fn has_target(&self, target: &str) -> bool {
2813 self.targets().any(|t| t == target)
2814 }
2815
2816 pub fn remove_target(&mut self, target_name: &str) -> Result<bool, Error> {
2829 let current_targets: Vec<String> = self.targets().collect();
2831
2832 if !current_targets.iter().any(|t| t == target_name) {
2834 return Ok(false);
2835 }
2836
2837 let new_targets: Vec<String> = current_targets
2839 .into_iter()
2840 .filter(|t| t != target_name)
2841 .collect();
2842
2843 if new_targets.is_empty() {
2845 return Err(Error::Parse(ParseError {
2846 errors: vec![ErrorInfo {
2847 message: "Cannot remove all targets from a rule".to_string(),
2848 line: 1,
2849 context: "remove_target".to_string(),
2850 }],
2851 }));
2852 }
2853
2854 let mut targets_index = None;
2856 for (idx, child) in self.syntax().children_with_tokens().enumerate() {
2857 if let Some(node) = child.as_node() {
2858 if node.kind() == TARGETS {
2859 targets_index = Some(idx);
2860 break;
2861 }
2862 }
2863 }
2864
2865 let targets_index = targets_index.ok_or_else(|| {
2866 Error::Parse(ParseError {
2867 errors: vec![ErrorInfo {
2868 message: "No TARGETS node found in rule".to_string(),
2869 line: 1,
2870 context: "remove_target".to_string(),
2871 }],
2872 })
2873 })?;
2874
2875 let new_targets_node = build_targets_node(&new_targets);
2877
2878 self.0.splice_children(
2880 targets_index..targets_index + 1,
2881 vec![new_targets_node.into()],
2882 );
2883
2884 Ok(true)
2885 }
2886
2887 pub fn remove(self) -> Result<(), Error> {
2900 let parent = self.syntax().parent().ok_or_else(|| {
2901 Error::Parse(ParseError {
2902 errors: vec![ErrorInfo {
2903 message: "Rule has no parent".to_string(),
2904 line: 1,
2905 context: "remove".to_string(),
2906 }],
2907 })
2908 })?;
2909
2910 remove_with_preceding_comments(self.syntax(), &parent);
2911 Ok(())
2912 }
2913}
2914
2915impl Default for Makefile {
2916 fn default() -> Self {
2917 Self::new()
2918 }
2919}
2920
2921impl Include {
2922 pub fn path(&self) -> Option<String> {
2924 self.syntax()
2925 .children()
2926 .find(|it| it.kind() == EXPR)
2927 .map(|it| it.text().to_string().trim().to_string())
2928 }
2929
2930 pub fn is_optional(&self) -> bool {
2932 let text = self.syntax().text();
2933 text.to_string().starts_with("-include") || text.to_string().starts_with("sinclude")
2934 }
2935}
2936
2937#[cfg(test)]
2938mod tests {
2939 use super::*;
2940
2941 #[test]
2942 fn test_conditionals() {
2943 let code = "ifdef DEBUG\n DEBUG_FLAG := 1\nendif\n";
2947 let mut buf = code.as_bytes();
2948 let makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse basic ifdef");
2949 assert!(makefile.code().contains("DEBUG_FLAG"));
2950
2951 let code =
2953 "ifeq ($(OS),Windows_NT)\n RESULT := windows\nelse\n RESULT := unix\nendif\n";
2954 let mut buf = code.as_bytes();
2955 let makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse ifeq/ifneq");
2956 assert!(makefile.code().contains("RESULT"));
2957 assert!(makefile.code().contains("windows"));
2958
2959 let code = "ifdef DEBUG\n CFLAGS += -g\n ifdef VERBOSE\n CFLAGS += -v\n endif\nelse\n CFLAGS += -O2\nendif\n";
2961 let mut buf = code.as_bytes();
2962 let makefile = Makefile::read_relaxed(&mut buf)
2963 .expect("Failed to parse nested conditionals with else");
2964 assert!(makefile.code().contains("CFLAGS"));
2965 assert!(makefile.code().contains("VERBOSE"));
2966
2967 let code = "ifdef DEBUG\nendif\n";
2969 let mut buf = code.as_bytes();
2970 let makefile =
2971 Makefile::read_relaxed(&mut buf).expect("Failed to parse empty conditionals");
2972 assert!(makefile.code().contains("ifdef DEBUG"));
2973
2974 let code = "ifeq ($(OS),Windows)\n EXT := .exe\nelif ifeq ($(OS),Linux)\n EXT := .bin\nelse\n EXT := .out\nendif\n";
2976 let mut buf = code.as_bytes();
2977 let makefile =
2978 Makefile::read_relaxed(&mut buf).expect("Failed to parse conditionals with elif");
2979 assert!(makefile.code().contains("EXT"));
2980
2981 let code = "ifXYZ DEBUG\nDEBUG := 1\nendif\n";
2983 let mut buf = code.as_bytes();
2984 let makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse with recovery");
2985 assert!(makefile.code().contains("DEBUG"));
2986
2987 let code = "ifdef \nDEBUG := 1\nendif\n";
2989 let mut buf = code.as_bytes();
2990 let makefile = Makefile::read_relaxed(&mut buf)
2991 .expect("Failed to parse with recovery - missing condition");
2992 assert!(makefile.code().contains("DEBUG"));
2993 }
2994
2995 #[test]
2996 fn test_parse_simple() {
2997 const SIMPLE: &str = r#"VARIABLE = value
2998
2999rule: dependency
3000 command
3001"#;
3002 let parsed = parse(SIMPLE);
3003 assert!(parsed.errors.is_empty());
3004 let node = parsed.syntax();
3005 assert_eq!(
3006 format!("{:#?}", node),
3007 r#"ROOT@0..44
3008 VARIABLE@0..17
3009 IDENTIFIER@0..8 "VARIABLE"
3010 WHITESPACE@8..9 " "
3011 OPERATOR@9..10 "="
3012 WHITESPACE@10..11 " "
3013 EXPR@11..16
3014 IDENTIFIER@11..16 "value"
3015 NEWLINE@16..17 "\n"
3016 NEWLINE@17..18 "\n"
3017 RULE@18..44
3018 TARGETS@18..22
3019 IDENTIFIER@18..22 "rule"
3020 OPERATOR@22..23 ":"
3021 WHITESPACE@23..24 " "
3022 PREREQUISITES@24..34
3023 PREREQUISITE@24..34
3024 IDENTIFIER@24..34 "dependency"
3025 NEWLINE@34..35 "\n"
3026 RECIPE@35..44
3027 INDENT@35..36 "\t"
3028 TEXT@36..43 "command"
3029 NEWLINE@43..44 "\n"
3030"#
3031 );
3032
3033 let root = parsed.root();
3034
3035 let mut rules = root.rules().collect::<Vec<_>>();
3036 assert_eq!(rules.len(), 1);
3037 let rule = rules.pop().unwrap();
3038 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["rule"]);
3039 assert_eq!(rule.prerequisites().collect::<Vec<_>>(), vec!["dependency"]);
3040 assert_eq!(rule.recipes().collect::<Vec<_>>(), vec!["command"]);
3041
3042 let mut variables = root.variable_definitions().collect::<Vec<_>>();
3043 assert_eq!(variables.len(), 1);
3044 let variable = variables.pop().unwrap();
3045 assert_eq!(variable.name(), Some("VARIABLE".to_string()));
3046 assert_eq!(variable.raw_value(), Some("value".to_string()));
3047 }
3048
3049 #[test]
3050 fn test_parse_export_assign() {
3051 const EXPORT: &str = r#"export VARIABLE := value
3052"#;
3053 let parsed = parse(EXPORT);
3054 assert!(parsed.errors.is_empty());
3055 let node = parsed.syntax();
3056 assert_eq!(
3057 format!("{:#?}", node),
3058 r#"ROOT@0..25
3059 VARIABLE@0..25
3060 IDENTIFIER@0..6 "export"
3061 WHITESPACE@6..7 " "
3062 IDENTIFIER@7..15 "VARIABLE"
3063 WHITESPACE@15..16 " "
3064 OPERATOR@16..18 ":="
3065 WHITESPACE@18..19 " "
3066 EXPR@19..24
3067 IDENTIFIER@19..24 "value"
3068 NEWLINE@24..25 "\n"
3069"#
3070 );
3071
3072 let root = parsed.root();
3073
3074 let mut variables = root.variable_definitions().collect::<Vec<_>>();
3075 assert_eq!(variables.len(), 1);
3076 let variable = variables.pop().unwrap();
3077 assert_eq!(variable.name(), Some("VARIABLE".to_string()));
3078 assert_eq!(variable.raw_value(), Some("value".to_string()));
3079 }
3080
3081 #[test]
3082 fn test_parse_multiple_prerequisites() {
3083 const MULTIPLE_PREREQUISITES: &str = r#"rule: dependency1 dependency2
3084 command
3085
3086"#;
3087 let parsed = parse(MULTIPLE_PREREQUISITES);
3088 assert!(parsed.errors.is_empty());
3089 let node = parsed.syntax();
3090 assert_eq!(
3091 format!("{:#?}", node),
3092 r#"ROOT@0..40
3093 RULE@0..40
3094 TARGETS@0..4
3095 IDENTIFIER@0..4 "rule"
3096 OPERATOR@4..5 ":"
3097 WHITESPACE@5..6 " "
3098 PREREQUISITES@6..29
3099 PREREQUISITE@6..17
3100 IDENTIFIER@6..17 "dependency1"
3101 WHITESPACE@17..18 " "
3102 PREREQUISITE@18..29
3103 IDENTIFIER@18..29 "dependency2"
3104 NEWLINE@29..30 "\n"
3105 RECIPE@30..39
3106 INDENT@30..31 "\t"
3107 TEXT@31..38 "command"
3108 NEWLINE@38..39 "\n"
3109 NEWLINE@39..40 "\n"
3110"#
3111 );
3112 let root = parsed.root();
3113
3114 let rule = root.rules().next().unwrap();
3115 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["rule"]);
3116 assert_eq!(
3117 rule.prerequisites().collect::<Vec<_>>(),
3118 vec!["dependency1", "dependency2"]
3119 );
3120 assert_eq!(rule.recipes().collect::<Vec<_>>(), vec!["command"]);
3121 }
3122
3123 #[test]
3124 fn test_add_rule() {
3125 let mut makefile = Makefile::new();
3126 let rule = makefile.add_rule("rule");
3127 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["rule"]);
3128 assert_eq!(
3129 rule.prerequisites().collect::<Vec<_>>(),
3130 Vec::<String>::new()
3131 );
3132
3133 assert_eq!(makefile.to_string(), "rule:\n");
3134 }
3135
3136 #[test]
3137 fn test_push_command() {
3138 let mut makefile = Makefile::new();
3139 let mut rule = makefile.add_rule("rule");
3140
3141 rule.push_command("command");
3143 rule.push_command("command2");
3144
3145 assert_eq!(
3147 rule.recipes().collect::<Vec<_>>(),
3148 vec!["command", "command2"]
3149 );
3150
3151 rule.push_command("command3");
3153 assert_eq!(
3154 rule.recipes().collect::<Vec<_>>(),
3155 vec!["command", "command2", "command3"]
3156 );
3157
3158 assert_eq!(
3160 makefile.to_string(),
3161 "rule:\n\tcommand\n\tcommand2\n\tcommand3\n"
3162 );
3163
3164 assert_eq!(
3166 rule.to_string(),
3167 "rule:\n\tcommand\n\tcommand2\n\tcommand3\n"
3168 );
3169 }
3170
3171 #[test]
3172 fn test_replace_command() {
3173 let mut makefile = Makefile::new();
3174 let mut rule = makefile.add_rule("rule");
3175
3176 rule.push_command("command");
3178 rule.push_command("command2");
3179
3180 assert_eq!(
3182 rule.recipes().collect::<Vec<_>>(),
3183 vec!["command", "command2"]
3184 );
3185
3186 rule.replace_command(0, "new command");
3188 assert_eq!(
3189 rule.recipes().collect::<Vec<_>>(),
3190 vec!["new command", "command2"]
3191 );
3192
3193 assert_eq!(makefile.to_string(), "rule:\n\tnew command\n\tcommand2\n");
3195
3196 assert_eq!(rule.to_string(), "rule:\n\tnew command\n\tcommand2\n");
3198 }
3199
3200 #[test]
3201 fn test_parse_rule_without_newline() {
3202 let rule = "rule: dependency\n\tcommand".parse::<Rule>().unwrap();
3203 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["rule"]);
3204 assert_eq!(rule.recipes().collect::<Vec<_>>(), vec!["command"]);
3205 let rule = "rule: dependency".parse::<Rule>().unwrap();
3206 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["rule"]);
3207 assert_eq!(rule.recipes().collect::<Vec<_>>(), Vec::<String>::new());
3208 }
3209
3210 #[test]
3211 fn test_parse_makefile_without_newline() {
3212 let makefile = "rule: dependency\n\tcommand".parse::<Makefile>().unwrap();
3213 assert_eq!(makefile.rules().count(), 1);
3214 }
3215
3216 #[test]
3217 fn test_from_reader() {
3218 let makefile = Makefile::from_reader("rule: dependency\n\tcommand".as_bytes()).unwrap();
3219 assert_eq!(makefile.rules().count(), 1);
3220 }
3221
3222 #[test]
3223 fn test_parse_with_tab_after_last_newline() {
3224 let makefile = Makefile::from_reader("rule: dependency\n\tcommand\n\t".as_bytes()).unwrap();
3225 assert_eq!(makefile.rules().count(), 1);
3226 }
3227
3228 #[test]
3229 fn test_parse_with_space_after_last_newline() {
3230 let makefile = Makefile::from_reader("rule: dependency\n\tcommand\n ".as_bytes()).unwrap();
3231 assert_eq!(makefile.rules().count(), 1);
3232 }
3233
3234 #[test]
3235 fn test_parse_with_comment_after_last_newline() {
3236 let makefile =
3237 Makefile::from_reader("rule: dependency\n\tcommand\n#comment".as_bytes()).unwrap();
3238 assert_eq!(makefile.rules().count(), 1);
3239 }
3240
3241 #[test]
3242 fn test_parse_with_variable_rule() {
3243 let makefile =
3244 Makefile::from_reader("RULE := rule\n$(RULE): dependency\n\tcommand".as_bytes())
3245 .unwrap();
3246
3247 let vars = makefile.variable_definitions().collect::<Vec<_>>();
3249 assert_eq!(vars.len(), 1);
3250 assert_eq!(vars[0].name(), Some("RULE".to_string()));
3251 assert_eq!(vars[0].raw_value(), Some("rule".to_string()));
3252
3253 let rules = makefile.rules().collect::<Vec<_>>();
3255 assert_eq!(rules.len(), 1);
3256 assert_eq!(rules[0].targets().collect::<Vec<_>>(), vec!["$(RULE)"]);
3257 assert_eq!(
3258 rules[0].prerequisites().collect::<Vec<_>>(),
3259 vec!["dependency"]
3260 );
3261 assert_eq!(rules[0].recipes().collect::<Vec<_>>(), vec!["command"]);
3262 }
3263
3264 #[test]
3265 fn test_parse_with_variable_dependency() {
3266 let makefile =
3267 Makefile::from_reader("DEP := dependency\nrule: $(DEP)\n\tcommand".as_bytes()).unwrap();
3268
3269 let vars = makefile.variable_definitions().collect::<Vec<_>>();
3271 assert_eq!(vars.len(), 1);
3272 assert_eq!(vars[0].name(), Some("DEP".to_string()));
3273 assert_eq!(vars[0].raw_value(), Some("dependency".to_string()));
3274
3275 let rules = makefile.rules().collect::<Vec<_>>();
3277 assert_eq!(rules.len(), 1);
3278 assert_eq!(rules[0].targets().collect::<Vec<_>>(), vec!["rule"]);
3279 assert_eq!(rules[0].prerequisites().collect::<Vec<_>>(), vec!["$(DEP)"]);
3280 assert_eq!(rules[0].recipes().collect::<Vec<_>>(), vec!["command"]);
3281 }
3282
3283 #[test]
3284 fn test_parse_with_variable_command() {
3285 let makefile =
3286 Makefile::from_reader("COM := command\nrule: dependency\n\t$(COM)".as_bytes()).unwrap();
3287
3288 let vars = makefile.variable_definitions().collect::<Vec<_>>();
3290 assert_eq!(vars.len(), 1);
3291 assert_eq!(vars[0].name(), Some("COM".to_string()));
3292 assert_eq!(vars[0].raw_value(), Some("command".to_string()));
3293
3294 let rules = makefile.rules().collect::<Vec<_>>();
3296 assert_eq!(rules.len(), 1);
3297 assert_eq!(rules[0].targets().collect::<Vec<_>>(), vec!["rule"]);
3298 assert_eq!(
3299 rules[0].prerequisites().collect::<Vec<_>>(),
3300 vec!["dependency"]
3301 );
3302 assert_eq!(rules[0].recipes().collect::<Vec<_>>(), vec!["$(COM)"]);
3303 }
3304
3305 #[test]
3306 fn test_regular_line_error_reporting() {
3307 let input = "rule target\n\tcommand";
3308
3309 let parsed = parse(input);
3311 let direct_error = &parsed.errors[0];
3312
3313 assert_eq!(direct_error.line, 2);
3315 assert!(
3316 direct_error.message.contains("expected"),
3317 "Error message should contain 'expected': {}",
3318 direct_error.message
3319 );
3320 assert_eq!(direct_error.context, "\tcommand");
3321
3322 let reader_result = Makefile::from_reader(input.as_bytes());
3324 let parse_error = match reader_result {
3325 Ok(_) => panic!("Expected Parse error from from_reader"),
3326 Err(err) => match err {
3327 self::Error::Parse(parse_err) => parse_err,
3328 _ => panic!("Expected Parse error"),
3329 },
3330 };
3331
3332 let error_text = parse_error.to_string();
3334 assert!(error_text.contains("Error at line 2:"));
3335 assert!(error_text.contains("2| \tcommand"));
3336 }
3337
3338 #[test]
3339 fn test_parsing_error_context_with_bad_syntax() {
3340 let input = "#begin comment\n\t(╯°□°)╯︵ ┻━┻\n#end comment";
3342
3343 match Makefile::from_reader(input.as_bytes()) {
3345 Ok(makefile) => {
3346 assert_eq!(
3348 makefile.rules().count(),
3349 0,
3350 "Should not have found any rules"
3351 );
3352 }
3353 Err(err) => match err {
3354 self::Error::Parse(error) => {
3355 assert!(error.errors[0].line >= 2, "Error line should be at least 2");
3357 assert!(
3358 !error.errors[0].context.is_empty(),
3359 "Error context should not be empty"
3360 );
3361 }
3362 _ => panic!("Unexpected error type"),
3363 },
3364 };
3365 }
3366
3367 #[test]
3368 fn test_error_message_format() {
3369 let parse_error = ParseError {
3371 errors: vec![ErrorInfo {
3372 message: "test error".to_string(),
3373 line: 42,
3374 context: "some problematic code".to_string(),
3375 }],
3376 };
3377
3378 let error_text = parse_error.to_string();
3379 assert!(error_text.contains("Error at line 42: test error"));
3380 assert!(error_text.contains("42| some problematic code"));
3381 }
3382
3383 #[test]
3384 fn test_line_number_calculation() {
3385 let test_cases = [
3387 ("rule dependency\n\tcommand", 2), ("#comment\n\t(╯°□°)╯︵ ┻━┻", 2), ("var = value\n#comment\n\tindented line", 3), ];
3391
3392 for (input, expected_line) in test_cases {
3393 match input.parse::<Makefile>() {
3395 Ok(_) => {
3396 continue;
3399 }
3400 Err(err) => {
3401 if let Error::Parse(parse_err) = err {
3402 assert_eq!(
3404 parse_err.errors[0].line, expected_line,
3405 "Line number should match the expected line"
3406 );
3407
3408 if parse_err.errors[0].message.contains("indented") {
3410 assert!(
3411 parse_err.errors[0].context.starts_with('\t'),
3412 "Context for indentation errors should include the tab character"
3413 );
3414 }
3415 } else {
3416 panic!("Expected parse error, got: {:?}", err);
3417 }
3418 }
3419 }
3420 }
3421 }
3422
3423 #[test]
3424 fn test_conditional_features() {
3425 let code = r#"
3427# Set variables based on DEBUG flag
3428ifdef DEBUG
3429 CFLAGS += -g -DDEBUG
3430else
3431 CFLAGS = -O2
3432endif
3433
3434# Define a build rule
3435all: $(OBJS)
3436 $(CC) $(CFLAGS) -o $@ $^
3437"#;
3438
3439 let mut buf = code.as_bytes();
3440 let makefile =
3441 Makefile::read_relaxed(&mut buf).expect("Failed to parse conditional features");
3442
3443 assert!(!makefile.code().is_empty(), "Makefile has content");
3446
3447 let rules = makefile.rules().collect::<Vec<_>>();
3449 assert!(!rules.is_empty(), "Should have found rules");
3450
3451 assert!(code.contains("ifdef DEBUG"));
3453 assert!(code.contains("endif"));
3454
3455 let code_with_var = r#"
3457# Define a variable first
3458CC = gcc
3459
3460ifdef DEBUG
3461 CFLAGS += -g -DDEBUG
3462else
3463 CFLAGS = -O2
3464endif
3465
3466all: $(OBJS)
3467 $(CC) $(CFLAGS) -o $@ $^
3468"#;
3469
3470 let mut buf = code_with_var.as_bytes();
3471 let makefile =
3472 Makefile::read_relaxed(&mut buf).expect("Failed to parse with explicit variable");
3473
3474 let vars = makefile.variable_definitions().collect::<Vec<_>>();
3476 assert!(
3477 !vars.is_empty(),
3478 "Should have found at least the CC variable definition"
3479 );
3480 }
3481
3482 #[test]
3483 fn test_include_directive() {
3484 let parsed = parse("include config.mk\ninclude $(TOPDIR)/rules.mk\ninclude *.mk\n");
3485 assert!(parsed.errors.is_empty());
3486 let node = parsed.syntax();
3487 assert!(format!("{:#?}", node).contains("INCLUDE@"));
3488 }
3489
3490 #[test]
3491 fn test_export_variables() {
3492 let parsed = parse("export SHELL := /bin/bash\n");
3493 assert!(parsed.errors.is_empty());
3494 let makefile = parsed.root();
3495 let vars = makefile.variable_definitions().collect::<Vec<_>>();
3496 assert_eq!(vars.len(), 1);
3497 let shell_var = vars
3498 .iter()
3499 .find(|v| v.name() == Some("SHELL".to_string()))
3500 .unwrap();
3501 assert!(shell_var.raw_value().unwrap().contains("bin/bash"));
3502 }
3503
3504 #[test]
3505 fn test_variable_scopes() {
3506 let parsed =
3507 parse("SIMPLE = value\nIMMEDIATE := value\nCONDITIONAL ?= value\nAPPEND += value\n");
3508 assert!(parsed.errors.is_empty());
3509 let makefile = parsed.root();
3510 let vars = makefile.variable_definitions().collect::<Vec<_>>();
3511 assert_eq!(vars.len(), 4);
3512 let var_names: Vec<_> = vars.iter().filter_map(|v| v.name()).collect();
3513 assert!(var_names.contains(&"SIMPLE".to_string()));
3514 assert!(var_names.contains(&"IMMEDIATE".to_string()));
3515 assert!(var_names.contains(&"CONDITIONAL".to_string()));
3516 assert!(var_names.contains(&"APPEND".to_string()));
3517 }
3518
3519 #[test]
3520 fn test_pattern_rule_parsing() {
3521 let parsed = parse("%.o: %.c\n\t$(CC) -c -o $@ $<\n");
3522 assert!(parsed.errors.is_empty());
3523 let makefile = parsed.root();
3524 let rules = makefile.rules().collect::<Vec<_>>();
3525 assert_eq!(rules.len(), 1);
3526 assert_eq!(rules[0].targets().next().unwrap(), "%.o");
3527 assert!(rules[0].recipes().next().unwrap().contains("$@"));
3528 }
3529
3530 #[test]
3531 fn test_include_variants() {
3532 let makefile_str = "include simple.mk\n-include optional.mk\nsinclude synonym.mk\ninclude $(VAR)/generated.mk\n";
3534 let parsed = parse(makefile_str);
3535 assert!(parsed.errors.is_empty());
3536
3537 let node = parsed.syntax();
3539 let debug_str = format!("{:#?}", node);
3540
3541 assert_eq!(debug_str.matches("INCLUDE@").count(), 4);
3543
3544 let makefile = parsed.root();
3546
3547 let include_count = makefile
3549 .syntax()
3550 .children()
3551 .filter(|child| child.kind() == INCLUDE)
3552 .count();
3553 assert_eq!(include_count, 4);
3554
3555 assert!(makefile
3557 .included_files()
3558 .any(|path| path.contains("$(VAR)")));
3559 }
3560
3561 #[test]
3562 fn test_include_api() {
3563 let makefile_str = "include simple.mk\n-include optional.mk\nsinclude synonym.mk\n";
3565 let makefile: Makefile = makefile_str.parse().unwrap();
3566
3567 let includes: Vec<_> = makefile.includes().collect();
3569 assert_eq!(includes.len(), 3);
3570
3571 assert!(!includes[0].is_optional()); assert!(includes[1].is_optional()); assert!(includes[2].is_optional()); let files: Vec<_> = makefile.included_files().collect();
3578 assert_eq!(files, vec!["simple.mk", "optional.mk", "synonym.mk"]);
3579
3580 assert_eq!(includes[0].path(), Some("simple.mk".to_string()));
3582 assert_eq!(includes[1].path(), Some("optional.mk".to_string()));
3583 assert_eq!(includes[2].path(), Some("synonym.mk".to_string()));
3584 }
3585
3586 #[test]
3587 fn test_include_integration() {
3588 let phony_makefile = Makefile::from_reader(
3592 ".PHONY: build\n\nVERBOSE ?= 0\n\n# comment\n-include .env\n\nrule: dependency\n\tcommand"
3593 .as_bytes()
3594 ).unwrap();
3595
3596 assert_eq!(phony_makefile.rules().count(), 2);
3598
3599 let normal_rules_count = phony_makefile
3601 .rules()
3602 .filter(|r| !r.targets().any(|t| t.starts_with('.')))
3603 .count();
3604 assert_eq!(normal_rules_count, 1);
3605
3606 assert_eq!(phony_makefile.includes().count(), 1);
3608 assert_eq!(phony_makefile.included_files().next().unwrap(), ".env");
3609
3610 let simple_makefile = Makefile::from_reader(
3612 "\n\nVERBOSE ?= 0\n\n# comment\n-include .env\n\nrule: dependency\n\tcommand"
3613 .as_bytes(),
3614 )
3615 .unwrap();
3616 assert_eq!(simple_makefile.rules().count(), 1);
3617 assert_eq!(simple_makefile.includes().count(), 1);
3618 }
3619
3620 #[test]
3621 fn test_real_conditional_directives() {
3622 let conditional = "ifdef DEBUG\nCFLAGS = -g\nelse\nCFLAGS = -O2\nendif\n";
3624 let mut buf = conditional.as_bytes();
3625 let makefile =
3626 Makefile::read_relaxed(&mut buf).expect("Failed to parse basic if/else conditional");
3627 let code = makefile.code();
3628 assert!(code.contains("ifdef DEBUG"));
3629 assert!(code.contains("else"));
3630 assert!(code.contains("endif"));
3631
3632 let nested = "ifdef DEBUG\nCFLAGS = -g\nifdef VERBOSE\nCFLAGS += -v\nendif\nendif\n";
3634 let mut buf = nested.as_bytes();
3635 let makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse nested ifdef");
3636 let code = makefile.code();
3637 assert!(code.contains("ifdef DEBUG"));
3638 assert!(code.contains("ifdef VERBOSE"));
3639
3640 let ifeq = "ifeq ($(OS),Windows_NT)\nTARGET = app.exe\nelse\nTARGET = app\nendif\n";
3642 let mut buf = ifeq.as_bytes();
3643 let makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse ifeq form");
3644 let code = makefile.code();
3645 assert!(code.contains("ifeq"));
3646 assert!(code.contains("Windows_NT"));
3647 }
3648
3649 #[test]
3650 fn test_indented_text_outside_rules() {
3651 let help_text = "help:\n\t@echo \"Available targets:\"\n\t@echo \" help show help\"\n";
3653 let parsed = parse(help_text);
3654 assert!(parsed.errors.is_empty());
3655
3656 let root = parsed.root();
3658 let rules = root.rules().collect::<Vec<_>>();
3659 assert_eq!(rules.len(), 1);
3660
3661 let help_rule = &rules[0];
3662 let recipes = help_rule.recipes().collect::<Vec<_>>();
3663 assert_eq!(recipes.len(), 2);
3664 assert!(recipes[0].contains("Available targets"));
3665 assert!(recipes[1].contains("help"));
3666 }
3667
3668 #[test]
3669 fn test_comment_handling_in_recipes() {
3670 let recipe_comment = "build:\n\t# This is a comment\n\tgcc -o app main.c\n";
3672
3673 let parsed = parse(recipe_comment);
3675
3676 assert!(
3678 parsed.errors.is_empty(),
3679 "Should parse recipe with comments without errors"
3680 );
3681
3682 let root = parsed.root();
3684 let rules = root.rules().collect::<Vec<_>>();
3685 assert_eq!(rules.len(), 1, "Should find exactly one rule");
3686
3687 let build_rule = &rules[0];
3689 assert_eq!(
3690 build_rule.targets().collect::<Vec<_>>(),
3691 vec!["build"],
3692 "Rule should have 'build' as target"
3693 );
3694
3695 let recipes = build_rule.recipes().collect::<Vec<_>>();
3699 assert_eq!(
3700 recipes.len(),
3701 1,
3702 "Should find exactly one recipe line (comment lines are filtered)"
3703 );
3704 assert!(
3705 recipes[0].contains("gcc -o app"),
3706 "Recipe should be the command line"
3707 );
3708 assert!(
3709 !recipes[0].contains("This is a comment"),
3710 "Comments should not be included in recipe lines"
3711 );
3712 }
3713
3714 #[test]
3715 fn test_multiline_variables() {
3716 let multiline = "SOURCES = main.c \\\n util.c\n";
3718
3719 let parsed = parse(multiline);
3721
3722 let root = parsed.root();
3724 let vars = root.variable_definitions().collect::<Vec<_>>();
3725 assert!(!vars.is_empty(), "Should find at least one variable");
3726
3727 let operators = "CFLAGS := -Wall \\\n -Werror\n";
3731 let parsed_operators = parse(operators);
3732
3733 let root = parsed_operators.root();
3735 let vars = root.variable_definitions().collect::<Vec<_>>();
3736 assert!(
3737 !vars.is_empty(),
3738 "Should find at least one variable with := operator"
3739 );
3740
3741 let append = "LDFLAGS += -L/usr/lib \\\n -lm\n";
3743 let parsed_append = parse(append);
3744
3745 let root = parsed_append.root();
3747 let vars = root.variable_definitions().collect::<Vec<_>>();
3748 assert!(
3749 !vars.is_empty(),
3750 "Should find at least one variable with += operator"
3751 );
3752 }
3753
3754 #[test]
3755 fn test_whitespace_and_eof_handling() {
3756 let blank_lines = "VAR = value\n\n\n";
3758
3759 let parsed_blank = parse(blank_lines);
3760
3761 let root = parsed_blank.root();
3763 let vars = root.variable_definitions().collect::<Vec<_>>();
3764 assert_eq!(
3765 vars.len(),
3766 1,
3767 "Should find one variable in blank lines test"
3768 );
3769
3770 let trailing_space = "VAR = value \n";
3772
3773 let parsed_space = parse(trailing_space);
3774
3775 let root = parsed_space.root();
3777 let vars = root.variable_definitions().collect::<Vec<_>>();
3778 assert_eq!(
3779 vars.len(),
3780 1,
3781 "Should find one variable in trailing space test"
3782 );
3783
3784 let no_newline = "VAR = value";
3786
3787 let parsed_no_newline = parse(no_newline);
3788
3789 let root = parsed_no_newline.root();
3791 let vars = root.variable_definitions().collect::<Vec<_>>();
3792 assert_eq!(vars.len(), 1, "Should find one variable in no newline test");
3793 assert_eq!(
3794 vars[0].name(),
3795 Some("VAR".to_string()),
3796 "Variable name should be VAR"
3797 );
3798 }
3799
3800 #[test]
3801 fn test_complex_variable_references() {
3802 let wildcard = "SOURCES = $(wildcard *.c)\n";
3804 let parsed = parse(wildcard);
3805 assert!(parsed.errors.is_empty());
3806
3807 let nested = "PREFIX = /usr\nBINDIR = $(PREFIX)/bin\n";
3809 let parsed = parse(nested);
3810 assert!(parsed.errors.is_empty());
3811
3812 let patsubst = "OBJECTS = $(patsubst %.c,%.o,$(SOURCES))\n";
3814 let parsed = parse(patsubst);
3815 assert!(parsed.errors.is_empty());
3816 }
3817
3818 #[test]
3819 fn test_complex_variable_references_minimal() {
3820 let wildcard = "SOURCES = $(wildcard *.c)\n";
3822 let parsed = parse(wildcard);
3823 assert!(parsed.errors.is_empty());
3824
3825 let nested = "PREFIX = /usr\nBINDIR = $(PREFIX)/bin\n";
3827 let parsed = parse(nested);
3828 assert!(parsed.errors.is_empty());
3829
3830 let patsubst = "OBJECTS = $(patsubst %.c,%.o,$(SOURCES))\n";
3832 let parsed = parse(patsubst);
3833 assert!(parsed.errors.is_empty());
3834 }
3835
3836 #[test]
3837 fn test_multiline_variable_with_backslash() {
3838 let content = r#"
3839LONG_VAR = This is a long variable \
3840 that continues on the next line \
3841 and even one more line
3842"#;
3843
3844 let mut buf = content.as_bytes();
3846 let makefile =
3847 Makefile::read_relaxed(&mut buf).expect("Failed to parse multiline variable");
3848
3849 let vars = makefile.variable_definitions().collect::<Vec<_>>();
3851 assert_eq!(
3852 vars.len(),
3853 1,
3854 "Expected 1 variable but found {}",
3855 vars.len()
3856 );
3857 let var_value = vars[0].raw_value();
3858 assert!(var_value.is_some(), "Variable value is None");
3859
3860 let value_str = var_value.unwrap();
3862 assert!(
3863 value_str.contains("long variable"),
3864 "Value doesn't contain expected content"
3865 );
3866 }
3867
3868 #[test]
3869 fn test_multiline_variable_with_mixed_operators() {
3870 let content = r#"
3871PREFIX ?= /usr/local
3872CFLAGS := -Wall -O2 \
3873 -I$(PREFIX)/include \
3874 -DDEBUG
3875"#;
3876 let mut buf = content.as_bytes();
3878 let makefile = Makefile::read_relaxed(&mut buf)
3879 .expect("Failed to parse multiline variable with operators");
3880
3881 let vars = makefile.variable_definitions().collect::<Vec<_>>();
3883 assert!(
3884 vars.len() >= 1,
3885 "Expected at least 1 variable, found {}",
3886 vars.len()
3887 );
3888
3889 let prefix_var = vars
3891 .iter()
3892 .find(|v| v.name().unwrap_or_default() == "PREFIX");
3893 assert!(prefix_var.is_some(), "Expected to find PREFIX variable");
3894 assert!(
3895 prefix_var.unwrap().raw_value().is_some(),
3896 "PREFIX variable has no value"
3897 );
3898
3899 let cflags_var = vars
3901 .iter()
3902 .find(|v| v.name().unwrap_or_default().contains("CFLAGS"));
3903 assert!(
3904 cflags_var.is_some(),
3905 "Expected to find CFLAGS variable (or part of it)"
3906 );
3907 }
3908
3909 #[test]
3910 fn test_indented_help_text() {
3911 let content = r#"
3912.PHONY: help
3913help:
3914 @echo "Available targets:"
3915 @echo " build - Build the project"
3916 @echo " test - Run tests"
3917 @echo " clean - Remove build artifacts"
3918"#;
3919 let mut buf = content.as_bytes();
3921 let makefile =
3922 Makefile::read_relaxed(&mut buf).expect("Failed to parse indented help text");
3923
3924 let rules = makefile.rules().collect::<Vec<_>>();
3926 assert!(!rules.is_empty(), "Expected at least one rule");
3927
3928 let help_rule = rules.iter().find(|r| r.targets().any(|t| t == "help"));
3930 assert!(help_rule.is_some(), "Expected to find help rule");
3931
3932 let recipes = help_rule.unwrap().recipes().collect::<Vec<_>>();
3934 assert!(
3935 !recipes.is_empty(),
3936 "Expected at least one recipe line in help rule"
3937 );
3938 assert!(
3939 recipes.iter().any(|r| r.contains("Available targets")),
3940 "Expected to find 'Available targets' in recipes"
3941 );
3942 }
3943
3944 #[test]
3945 fn test_indented_lines_in_conditionals() {
3946 let content = r#"
3947ifdef DEBUG
3948 CFLAGS += -g -DDEBUG
3949 # This is a comment inside conditional
3950 ifdef VERBOSE
3951 CFLAGS += -v
3952 endif
3953endif
3954"#;
3955 let mut buf = content.as_bytes();
3957 let makefile = Makefile::read_relaxed(&mut buf)
3958 .expect("Failed to parse indented lines in conditionals");
3959
3960 let code = makefile.code();
3962 assert!(code.contains("ifdef DEBUG"));
3963 assert!(code.contains("ifdef VERBOSE"));
3964 assert!(code.contains("endif"));
3965 }
3966
3967 #[test]
3968 fn test_recipe_with_colon() {
3969 let content = r#"
3970build:
3971 @echo "Building at: $(shell date)"
3972 gcc -o program main.c
3973"#;
3974 let parsed = parse(content);
3975 assert!(
3976 parsed.errors.is_empty(),
3977 "Failed to parse recipe with colon: {:?}",
3978 parsed.errors
3979 );
3980 }
3981
3982 #[test]
3983 #[ignore]
3984 fn test_double_colon_rules() {
3985 let content = r#"
3988%.o :: %.c
3989 $(CC) -c $< -o $@
3990
3991# Double colon allows multiple rules for same target
3992all:: prerequisite1
3993 @echo "First rule for all"
3994
3995all:: prerequisite2
3996 @echo "Second rule for all"
3997"#;
3998 let mut buf = content.as_bytes();
3999 let makefile =
4000 Makefile::read_relaxed(&mut buf).expect("Failed to parse double colon rules");
4001
4002 let rules = makefile.rules().collect::<Vec<_>>();
4004 assert!(!rules.is_empty(), "Expected at least one rule");
4005
4006 let all_rules = rules
4008 .iter()
4009 .filter(|r| r.targets().any(|t| t.contains("all")));
4010 assert!(
4011 all_rules.count() > 0,
4012 "Expected to find at least one rule containing 'all'"
4013 );
4014 }
4015
4016 #[test]
4017 fn test_elif_directive() {
4018 let content = r#"
4019ifeq ($(OS),Windows_NT)
4020 TARGET = windows
4021elif ifeq ($(OS),Darwin)
4022 TARGET = macos
4023elif ifeq ($(OS),Linux)
4024 TARGET = linux
4025else
4026 TARGET = unknown
4027endif
4028"#;
4029 let mut buf = content.as_bytes();
4031 let _makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse elif directive");
4032
4033 }
4036
4037 #[test]
4038 fn test_ambiguous_assignment_vs_rule() {
4039 const VAR_ASSIGNMENT: &str = "VARIABLE = value\n";
4041
4042 let mut buf = std::io::Cursor::new(VAR_ASSIGNMENT);
4043 let makefile =
4044 Makefile::read_relaxed(&mut buf).expect("Failed to parse variable assignment");
4045
4046 let vars = makefile.variable_definitions().collect::<Vec<_>>();
4047 let rules = makefile.rules().collect::<Vec<_>>();
4048
4049 assert_eq!(vars.len(), 1, "Expected 1 variable, found {}", vars.len());
4050 assert_eq!(rules.len(), 0, "Expected 0 rules, found {}", rules.len());
4051
4052 assert_eq!(vars[0].name(), Some("VARIABLE".to_string()));
4053
4054 const SIMPLE_RULE: &str = "target: dependency\n";
4056
4057 let mut buf = std::io::Cursor::new(SIMPLE_RULE);
4058 let makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse simple rule");
4059
4060 let vars = makefile.variable_definitions().collect::<Vec<_>>();
4061 let rules = makefile.rules().collect::<Vec<_>>();
4062
4063 assert_eq!(vars.len(), 0, "Expected 0 variables, found {}", vars.len());
4064 assert_eq!(rules.len(), 1, "Expected 1 rule, found {}", rules.len());
4065
4066 let rule = &rules[0];
4067 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["target"]);
4068 }
4069
4070 #[test]
4071 fn test_nested_conditionals() {
4072 let content = r#"
4073ifdef RELEASE
4074 CFLAGS += -O3
4075 ifndef DEBUG
4076 ifneq ($(ARCH),arm)
4077 CFLAGS += -march=native
4078 else
4079 CFLAGS += -mcpu=cortex-a72
4080 endif
4081 endif
4082endif
4083"#;
4084 let mut buf = content.as_bytes();
4086 let makefile =
4087 Makefile::read_relaxed(&mut buf).expect("Failed to parse nested conditionals");
4088
4089 let code = makefile.code();
4091 assert!(code.contains("ifdef RELEASE"));
4092 assert!(code.contains("ifndef DEBUG"));
4093 assert!(code.contains("ifneq"));
4094 }
4095
4096 #[test]
4097 fn test_space_indented_recipes() {
4098 let content = r#"
4101build:
4102 @echo "Building with spaces instead of tabs"
4103 gcc -o program main.c
4104"#;
4105 let mut buf = content.as_bytes();
4107 let makefile =
4108 Makefile::read_relaxed(&mut buf).expect("Failed to parse space-indented recipes");
4109
4110 let rules = makefile.rules().collect::<Vec<_>>();
4112 assert!(!rules.is_empty(), "Expected at least one rule");
4113
4114 let build_rule = rules.iter().find(|r| r.targets().any(|t| t == "build"));
4116 assert!(build_rule.is_some(), "Expected to find build rule");
4117 }
4118
4119 #[test]
4120 fn test_complex_variable_functions() {
4121 let content = r#"
4122FILES := $(shell find . -name "*.c")
4123OBJS := $(patsubst %.c,%.o,$(FILES))
4124NAME := $(if $(PROGRAM),$(PROGRAM),a.out)
4125HEADERS := ${wildcard *.h}
4126"#;
4127 let parsed = parse(content);
4128 assert!(
4129 parsed.errors.is_empty(),
4130 "Failed to parse complex variable functions: {:?}",
4131 parsed.errors
4132 );
4133 }
4134
4135 #[test]
4136 fn test_nested_variable_expansions() {
4137 let content = r#"
4138VERSION = 1.0
4139PACKAGE = myapp
4140TARBALL = $(PACKAGE)-$(VERSION).tar.gz
4141INSTALL_PATH = $(shell echo $(PREFIX) | sed 's/\/$//')
4142"#;
4143 let parsed = parse(content);
4144 assert!(
4145 parsed.errors.is_empty(),
4146 "Failed to parse nested variable expansions: {:?}",
4147 parsed.errors
4148 );
4149 }
4150
4151 #[test]
4152 fn test_special_directives() {
4153 let content = r#"
4154# Special makefile directives
4155.PHONY: all clean
4156.SUFFIXES: .c .o
4157.DEFAULT: all
4158
4159# Variable definition and export directive
4160export PATH := /usr/bin:/bin
4161"#;
4162 let mut buf = content.as_bytes();
4164 let makefile =
4165 Makefile::read_relaxed(&mut buf).expect("Failed to parse special directives");
4166
4167 let rules = makefile.rules().collect::<Vec<_>>();
4169
4170 let phony_rule = rules
4172 .iter()
4173 .find(|r| r.targets().any(|t| t.contains(".PHONY")));
4174 assert!(phony_rule.is_some(), "Expected to find .PHONY rule");
4175
4176 let vars = makefile.variable_definitions().collect::<Vec<_>>();
4178 assert!(!vars.is_empty(), "Expected to find at least one variable");
4179 }
4180
4181 #[test]
4184 fn test_comprehensive_real_world_makefile() {
4185 let content = r#"
4187# Basic variable assignment
4188VERSION = 1.0.0
4189
4190# Phony target
4191.PHONY: all clean
4192
4193# Simple rule
4194all:
4195 echo "Building version $(VERSION)"
4196
4197# Another rule with dependencies
4198clean:
4199 rm -f *.o
4200"#;
4201
4202 let parsed = parse(content);
4204
4205 assert!(parsed.errors.is_empty(), "Expected no parsing errors");
4207
4208 let variables = parsed.root().variable_definitions().collect::<Vec<_>>();
4210 assert!(!variables.is_empty(), "Expected at least one variable");
4211 assert_eq!(
4212 variables[0].name(),
4213 Some("VERSION".to_string()),
4214 "Expected VERSION variable"
4215 );
4216
4217 let rules = parsed.root().rules().collect::<Vec<_>>();
4219 assert!(!rules.is_empty(), "Expected at least one rule");
4220
4221 let rule_targets: Vec<String> = rules
4223 .iter()
4224 .flat_map(|r| r.targets().collect::<Vec<_>>())
4225 .collect();
4226 assert!(
4227 rule_targets.contains(&".PHONY".to_string()),
4228 "Expected .PHONY rule"
4229 );
4230 assert!(
4231 rule_targets.contains(&"all".to_string()),
4232 "Expected 'all' rule"
4233 );
4234 assert!(
4235 rule_targets.contains(&"clean".to_string()),
4236 "Expected 'clean' rule"
4237 );
4238 }
4239
4240 #[test]
4241 fn test_indented_help_text_outside_rules() {
4242 let content = r#"
4244# Targets with help text
4245help:
4246 @echo "Available targets:"
4247 @echo " build build the project"
4248 @echo " test run tests"
4249 @echo " clean clean build artifacts"
4250
4251# Another target
4252clean:
4253 rm -rf build/
4254"#;
4255
4256 let parsed = parse(content);
4258
4259 assert!(
4261 parsed.errors.is_empty(),
4262 "Failed to parse indented help text"
4263 );
4264
4265 let rules = parsed.root().rules().collect::<Vec<_>>();
4267 assert_eq!(rules.len(), 2, "Expected to find two rules");
4268
4269 let help_rule = rules
4271 .iter()
4272 .find(|r| r.targets().any(|t| t == "help"))
4273 .expect("Expected to find help rule");
4274
4275 let clean_rule = rules
4276 .iter()
4277 .find(|r| r.targets().any(|t| t == "clean"))
4278 .expect("Expected to find clean rule");
4279
4280 let help_recipes = help_rule.recipes().collect::<Vec<_>>();
4282 assert!(
4283 !help_recipes.is_empty(),
4284 "Help rule should have recipe lines"
4285 );
4286 assert!(
4287 help_recipes
4288 .iter()
4289 .any(|line| line.contains("Available targets")),
4290 "Help recipes should include 'Available targets' line"
4291 );
4292
4293 let clean_recipes = clean_rule.recipes().collect::<Vec<_>>();
4295 assert!(
4296 !clean_recipes.is_empty(),
4297 "Clean rule should have recipe lines"
4298 );
4299 assert!(
4300 clean_recipes.iter().any(|line| line.contains("rm -rf")),
4301 "Clean recipes should include 'rm -rf' command"
4302 );
4303 }
4304
4305 #[test]
4306 fn test_makefile1_phony_pattern() {
4307 let content = "#line 2145\n.PHONY: $(PHONY)\n";
4309
4310 let result = parse(content);
4312
4313 assert!(
4315 result.errors.is_empty(),
4316 "Failed to parse .PHONY: $(PHONY) pattern"
4317 );
4318
4319 let rules = result.root().rules().collect::<Vec<_>>();
4321 assert_eq!(rules.len(), 1, "Expected 1 rule");
4322 assert_eq!(
4323 rules[0].targets().next().unwrap(),
4324 ".PHONY",
4325 "Expected .PHONY rule"
4326 );
4327
4328 let prereqs = rules[0].prerequisites().collect::<Vec<_>>();
4330 assert_eq!(prereqs.len(), 1, "Expected 1 prerequisite");
4331 assert_eq!(prereqs[0], "$(PHONY)", "Expected $(PHONY) prerequisite");
4332 }
4333
4334 #[test]
4335 fn test_skip_until_newline_behavior() {
4336 let input = "text without newline";
4338 let parsed = parse(input);
4339 assert!(parsed.errors.is_empty() || !parsed.errors.is_empty());
4341
4342 let input_with_newline = "text\nafter newline";
4343 let parsed2 = parse(input_with_newline);
4344 assert!(parsed2.errors.is_empty() || !parsed2.errors.is_empty());
4345 }
4346
4347 #[test]
4348 fn test_error_with_indent_token() {
4349 let input = "\tinvalid indented line";
4351 let parsed = parse(input);
4352 assert!(!parsed.errors.is_empty());
4354
4355 let error_msg = &parsed.errors[0].message;
4356 assert!(error_msg.contains("indented") || error_msg.contains("part of a rule"));
4357 }
4358
4359 #[test]
4360 fn test_conditional_token_handling() {
4361 let input = r#"
4363ifndef VAR
4364 CFLAGS = -DTEST
4365endif
4366"#;
4367 let parsed = parse(input);
4368 let makefile = parsed.root();
4370 let _vars = makefile.variable_definitions().collect::<Vec<_>>();
4371 let nested = r#"
4375ifdef DEBUG
4376 ifndef RELEASE
4377 CFLAGS = -g
4378 endif
4379endif
4380"#;
4381 let parsed_nested = parse(nested);
4382 let _makefile = parsed_nested.root();
4384 }
4385
4386 #[test]
4387 fn test_include_vs_conditional_logic() {
4388 let input = r#"
4390include file.mk
4391ifdef VAR
4392 VALUE = 1
4393endif
4394"#;
4395 let parsed = parse(input);
4396 let makefile = parsed.root();
4398 let includes = makefile.includes().collect::<Vec<_>>();
4399 assert!(includes.len() >= 1 || parsed.errors.len() > 0);
4401
4402 let optional_include = r#"
4404-include optional.mk
4405ifndef VAR
4406 VALUE = default
4407endif
4408"#;
4409 let parsed2 = parse(optional_include);
4410 let _makefile = parsed2.root();
4412 }
4413
4414 #[test]
4415 fn test_balanced_parens_counting() {
4416 let input = r#"
4418VAR = $(call func,$(nested,arg),extra)
4419COMPLEX = $(if $(condition),$(then_val),$(else_val))
4420"#;
4421 let parsed = parse(input);
4422 assert!(parsed.errors.is_empty());
4423
4424 let makefile = parsed.root();
4425 let vars = makefile.variable_definitions().collect::<Vec<_>>();
4426 assert_eq!(vars.len(), 2);
4427 }
4428
4429 #[test]
4430 fn test_documentation_lookahead() {
4431 let input = r#"
4433# Documentation comment
4434help:
4435 @echo "Usage instructions"
4436 @echo "More help text"
4437"#;
4438 let parsed = parse(input);
4439 assert!(parsed.errors.is_empty());
4440
4441 let makefile = parsed.root();
4442 let rules = makefile.rules().collect::<Vec<_>>();
4443 assert_eq!(rules.len(), 1);
4444 assert_eq!(rules[0].targets().next().unwrap(), "help");
4445 }
4446
4447 #[test]
4448 fn test_edge_case_empty_input() {
4449 let parsed = parse("");
4451 assert!(parsed.errors.is_empty());
4452
4453 let parsed2 = parse(" \n \n");
4455 let _makefile = parsed2.root();
4458 }
4459
4460 #[test]
4461 fn test_malformed_conditional_recovery() {
4462 let input = r#"
4464ifdef
4465 # Missing condition variable
4466endif
4467"#;
4468 let parsed = parse(input);
4469 assert!(parsed.errors.is_empty() || !parsed.errors.is_empty());
4472 }
4473
4474 #[test]
4475 fn test_replace_rule() {
4476 let mut makefile: Makefile = "rule1:\n\tcommand1\nrule2:\n\tcommand2\n".parse().unwrap();
4477 let new_rule: Rule = "new_rule:\n\tnew_command\n".parse().unwrap();
4478
4479 makefile.replace_rule(0, new_rule).unwrap();
4480
4481 let targets: Vec<_> = makefile
4482 .rules()
4483 .flat_map(|r| r.targets().collect::<Vec<_>>())
4484 .collect();
4485 assert_eq!(targets, vec!["new_rule", "rule2"]);
4486
4487 let recipes: Vec<_> = makefile.rules().next().unwrap().recipes().collect();
4488 assert_eq!(recipes, vec!["new_command"]);
4489 }
4490
4491 #[test]
4492 fn test_replace_rule_out_of_bounds() {
4493 let mut makefile: Makefile = "rule1:\n\tcommand1\n".parse().unwrap();
4494 let new_rule: Rule = "new_rule:\n\tnew_command\n".parse().unwrap();
4495
4496 let result = makefile.replace_rule(5, new_rule);
4497 assert!(result.is_err());
4498 }
4499
4500 #[test]
4501 fn test_remove_rule() {
4502 let mut makefile: Makefile = "rule1:\n\tcommand1\nrule2:\n\tcommand2\nrule3:\n\tcommand3\n"
4503 .parse()
4504 .unwrap();
4505
4506 let removed = makefile.remove_rule(1).unwrap();
4507 assert_eq!(removed.targets().collect::<Vec<_>>(), vec!["rule2"]);
4508
4509 let remaining_targets: Vec<_> = makefile
4510 .rules()
4511 .flat_map(|r| r.targets().collect::<Vec<_>>())
4512 .collect();
4513 assert_eq!(remaining_targets, vec!["rule1", "rule3"]);
4514 assert_eq!(makefile.rules().count(), 2);
4515 }
4516
4517 #[test]
4518 fn test_remove_rule_out_of_bounds() {
4519 let mut makefile: Makefile = "rule1:\n\tcommand1\n".parse().unwrap();
4520
4521 let result = makefile.remove_rule(5);
4522 assert!(result.is_err());
4523 }
4524
4525 #[test]
4526 fn test_insert_rule() {
4527 let mut makefile: Makefile = "rule1:\n\tcommand1\nrule2:\n\tcommand2\n".parse().unwrap();
4528 let new_rule: Rule = "inserted_rule:\n\tinserted_command\n".parse().unwrap();
4529
4530 makefile.insert_rule(1, new_rule).unwrap();
4531
4532 let targets: Vec<_> = makefile
4533 .rules()
4534 .flat_map(|r| r.targets().collect::<Vec<_>>())
4535 .collect();
4536 assert_eq!(targets, vec!["rule1", "inserted_rule", "rule2"]);
4537 assert_eq!(makefile.rules().count(), 3);
4538 }
4539
4540 #[test]
4541 fn test_insert_rule_at_end() {
4542 let mut makefile: Makefile = "rule1:\n\tcommand1\n".parse().unwrap();
4543 let new_rule: Rule = "end_rule:\n\tend_command\n".parse().unwrap();
4544
4545 makefile.insert_rule(1, new_rule).unwrap();
4546
4547 let targets: Vec<_> = makefile
4548 .rules()
4549 .flat_map(|r| r.targets().collect::<Vec<_>>())
4550 .collect();
4551 assert_eq!(targets, vec!["rule1", "end_rule"]);
4552 }
4553
4554 #[test]
4555 fn test_insert_rule_out_of_bounds() {
4556 let mut makefile: Makefile = "rule1:\n\tcommand1\n".parse().unwrap();
4557 let new_rule: Rule = "new_rule:\n\tnew_command\n".parse().unwrap();
4558
4559 let result = makefile.insert_rule(5, new_rule);
4560 assert!(result.is_err());
4561 }
4562
4563 #[test]
4564 fn test_remove_command() {
4565 let mut rule: Rule = "rule:\n\tcommand1\n\tcommand2\n\tcommand3\n"
4566 .parse()
4567 .unwrap();
4568
4569 rule.remove_command(1);
4570 let recipes: Vec<_> = rule.recipes().collect();
4571 assert_eq!(recipes, vec!["command1", "command3"]);
4572 assert_eq!(rule.recipe_count(), 2);
4573 }
4574
4575 #[test]
4576 fn test_remove_command_out_of_bounds() {
4577 let mut rule: Rule = "rule:\n\tcommand1\n".parse().unwrap();
4578
4579 let result = rule.remove_command(5);
4580 assert!(!result);
4581 }
4582
4583 #[test]
4584 fn test_insert_command() {
4585 let mut rule: Rule = "rule:\n\tcommand1\n\tcommand3\n".parse().unwrap();
4586
4587 rule.insert_command(1, "command2");
4588 let recipes: Vec<_> = rule.recipes().collect();
4589 assert_eq!(recipes, vec!["command1", "command2", "command3"]);
4590 }
4591
4592 #[test]
4593 fn test_insert_command_at_end() {
4594 let mut rule: Rule = "rule:\n\tcommand1\n".parse().unwrap();
4595
4596 rule.insert_command(1, "command2");
4597 let recipes: Vec<_> = rule.recipes().collect();
4598 assert_eq!(recipes, vec!["command1", "command2"]);
4599 }
4600
4601 #[test]
4602 fn test_insert_command_in_empty_rule() {
4603 let mut rule: Rule = "rule:\n".parse().unwrap();
4604
4605 rule.insert_command(0, "new_command");
4606 let recipes: Vec<_> = rule.recipes().collect();
4607 assert_eq!(recipes, vec!["new_command"]);
4608 }
4609
4610 #[test]
4611 fn test_recipe_count() {
4612 let rule1: Rule = "rule:\n".parse().unwrap();
4613 assert_eq!(rule1.recipe_count(), 0);
4614
4615 let rule2: Rule = "rule:\n\tcommand1\n\tcommand2\n".parse().unwrap();
4616 assert_eq!(rule2.recipe_count(), 2);
4617 }
4618
4619 #[test]
4620 fn test_clear_commands() {
4621 let mut rule: Rule = "rule:\n\tcommand1\n\tcommand2\n\tcommand3\n"
4622 .parse()
4623 .unwrap();
4624
4625 rule.clear_commands();
4626 assert_eq!(rule.recipe_count(), 0);
4627
4628 let recipes: Vec<_> = rule.recipes().collect();
4629 assert_eq!(recipes, Vec::<String>::new());
4630
4631 let targets: Vec<_> = rule.targets().collect();
4633 assert_eq!(targets, vec!["rule"]);
4634 }
4635
4636 #[test]
4637 fn test_clear_commands_empty_rule() {
4638 let mut rule: Rule = "rule:\n".parse().unwrap();
4639
4640 rule.clear_commands();
4641 assert_eq!(rule.recipe_count(), 0);
4642
4643 let targets: Vec<_> = rule.targets().collect();
4644 assert_eq!(targets, vec!["rule"]);
4645 }
4646
4647 #[test]
4648 fn test_rule_manipulation_preserves_structure() {
4649 let input = r#"# Comment
4651VAR = value
4652
4653rule1:
4654 command1
4655
4656# Another comment
4657rule2:
4658 command2
4659
4660VAR2 = value2
4661"#;
4662
4663 let mut makefile: Makefile = input.parse().unwrap();
4664 let new_rule: Rule = "new_rule:\n\tnew_command\n".parse().unwrap();
4665
4666 makefile.insert_rule(1, new_rule).unwrap();
4668
4669 let targets: Vec<_> = makefile
4671 .rules()
4672 .flat_map(|r| r.targets().collect::<Vec<_>>())
4673 .collect();
4674 assert_eq!(targets, vec!["rule1", "new_rule", "rule2"]);
4675
4676 let vars: Vec<_> = makefile.variable_definitions().collect();
4678 assert_eq!(vars.len(), 2);
4679
4680 let output = makefile.code();
4682 assert!(output.contains("# Comment"));
4683 assert!(output.contains("VAR = value"));
4684 assert!(output.contains("# Another comment"));
4685 assert!(output.contains("VAR2 = value2"));
4686 }
4687
4688 #[test]
4689 fn test_replace_rule_with_multiple_targets() {
4690 let mut makefile: Makefile = "target1 target2: dep\n\tcommand\n".parse().unwrap();
4691 let new_rule: Rule = "new_target: new_dep\n\tnew_command\n".parse().unwrap();
4692
4693 makefile.replace_rule(0, new_rule).unwrap();
4694
4695 let targets: Vec<_> = makefile
4696 .rules()
4697 .flat_map(|r| r.targets().collect::<Vec<_>>())
4698 .collect();
4699 assert_eq!(targets, vec!["new_target"]);
4700 }
4701
4702 #[test]
4703 fn test_empty_makefile_operations() {
4704 let mut makefile = Makefile::new();
4705
4706 assert!(makefile
4708 .replace_rule(0, "rule:\n\tcommand\n".parse().unwrap())
4709 .is_err());
4710 assert!(makefile.remove_rule(0).is_err());
4711
4712 let new_rule: Rule = "first_rule:\n\tcommand\n".parse().unwrap();
4714 makefile.insert_rule(0, new_rule).unwrap();
4715 assert_eq!(makefile.rules().count(), 1);
4716 }
4717
4718 #[test]
4719 fn test_command_operations_preserve_indentation() {
4720 let mut rule: Rule = "rule:\n\t\tdeep_indent\n\tshallow_indent\n"
4721 .parse()
4722 .unwrap();
4723
4724 rule.insert_command(1, "middle_command");
4725 let recipes: Vec<_> = rule.recipes().collect();
4726 assert_eq!(
4727 recipes,
4728 vec!["\tdeep_indent", "middle_command", "shallow_indent"]
4729 );
4730 }
4731
4732 #[test]
4733 fn test_rule_operations_with_variables_and_includes() {
4734 let input = r#"VAR1 = value1
4735include common.mk
4736
4737rule1:
4738 command1
4739
4740VAR2 = value2
4741include other.mk
4742
4743rule2:
4744 command2
4745"#;
4746
4747 let mut makefile: Makefile = input.parse().unwrap();
4748
4749 makefile.remove_rule(0).unwrap();
4751
4752 let output = makefile.code();
4754 assert!(output.contains("VAR1 = value1"));
4755 assert!(output.contains("include common.mk"));
4756 assert!(output.contains("VAR2 = value2"));
4757 assert!(output.contains("include other.mk"));
4758
4759 assert_eq!(makefile.rules().count(), 1);
4761 let remaining_targets: Vec<_> = makefile
4762 .rules()
4763 .flat_map(|r| r.targets().collect::<Vec<_>>())
4764 .collect();
4765 assert_eq!(remaining_targets, vec!["rule2"]);
4766 }
4767
4768 #[test]
4769 fn test_command_manipulation_edge_cases() {
4770 let mut empty_rule: Rule = "empty:\n".parse().unwrap();
4772 assert_eq!(empty_rule.recipe_count(), 0);
4773
4774 empty_rule.insert_command(0, "first_command");
4775 assert_eq!(empty_rule.recipe_count(), 1);
4776
4777 let mut empty_rule2: Rule = "empty:\n".parse().unwrap();
4779 empty_rule2.clear_commands();
4780 assert_eq!(empty_rule2.recipe_count(), 0);
4781 }
4782
4783 #[test]
4784 fn test_archive_member_parsing() {
4785 let input = "libfoo.a(bar.o): bar.c\n\tgcc -c bar.c -o bar.o\n\tar r libfoo.a bar.o\n";
4787 let parsed = parse(input);
4788 assert!(
4789 parsed.errors.is_empty(),
4790 "Should parse archive member without errors"
4791 );
4792
4793 let makefile = parsed.root();
4794 let rules: Vec<_> = makefile.rules().collect();
4795 assert_eq!(rules.len(), 1);
4796
4797 let target_text = rules[0].targets().next().unwrap();
4799 assert_eq!(target_text, "libfoo.a(bar.o)");
4800 }
4801
4802 #[test]
4803 fn test_archive_member_multiple_members() {
4804 let input = "libfoo.a(bar.o baz.o): bar.c baz.c\n\tgcc -c bar.c baz.c\n\tar r libfoo.a bar.o baz.o\n";
4806 let parsed = parse(input);
4807 assert!(
4808 parsed.errors.is_empty(),
4809 "Should parse multiple archive members"
4810 );
4811
4812 let makefile = parsed.root();
4813 let rules: Vec<_> = makefile.rules().collect();
4814 assert_eq!(rules.len(), 1);
4815 }
4816
4817 #[test]
4818 fn test_archive_member_in_dependencies() {
4819 let input =
4821 "program: main.o libfoo.a(bar.o) libfoo.a(baz.o)\n\tgcc -o program main.o libfoo.a\n";
4822 let parsed = parse(input);
4823 assert!(
4824 parsed.errors.is_empty(),
4825 "Should parse archive members in dependencies"
4826 );
4827
4828 let makefile = parsed.root();
4829 let rules: Vec<_> = makefile.rules().collect();
4830 assert_eq!(rules.len(), 1);
4831 }
4832
4833 #[test]
4834 fn test_archive_member_with_variables() {
4835 let input = "$(LIB)($(OBJ)): $(SRC)\n\t$(CC) -c $(SRC)\n\t$(AR) r $(LIB) $(OBJ)\n";
4837 let parsed = parse(input);
4838 assert!(
4840 parsed.errors.is_empty(),
4841 "Should parse archive members with variables"
4842 );
4843 }
4844
4845 #[test]
4846 fn test_archive_member_ast_access() {
4847 let input = "libtest.a(foo.o bar.o): foo.c bar.c\n\tgcc -c foo.c bar.c\n";
4849 let parsed = parse(input);
4850 let makefile = parsed.root();
4851
4852 let archive_member_count = makefile
4854 .syntax()
4855 .descendants()
4856 .filter(|n| n.kind() == ARCHIVE_MEMBERS)
4857 .count();
4858
4859 assert!(
4860 archive_member_count > 0,
4861 "Should find ARCHIVE_MEMBERS nodes in AST"
4862 );
4863 }
4864
4865 #[test]
4866 fn test_large_makefile_performance() {
4867 let mut makefile = Makefile::new();
4869
4870 for i in 0..100 {
4872 let rule_name = format!("rule{}", i);
4873 let _rule = makefile
4874 .add_rule(&rule_name)
4875 .push_command(&format!("command{}", i));
4876 }
4877
4878 assert_eq!(makefile.rules().count(), 100);
4879
4880 let new_rule: Rule = "middle_rule:\n\tmiddle_command\n".parse().unwrap();
4882 makefile.replace_rule(50, new_rule).unwrap();
4883
4884 let rule_50_targets: Vec<_> = makefile.rules().nth(50).unwrap().targets().collect();
4886 assert_eq!(rule_50_targets, vec!["middle_rule"]);
4887
4888 assert_eq!(makefile.rules().count(), 100); }
4890
4891 #[test]
4892 fn test_complex_recipe_manipulation() {
4893 let mut complex_rule: Rule = r#"complex:
4894 @echo "Starting build"
4895 $(CC) $(CFLAGS) -o $@ $<
4896 @echo "Build complete"
4897 chmod +x $@
4898"#
4899 .parse()
4900 .unwrap();
4901
4902 assert_eq!(complex_rule.recipe_count(), 4);
4903
4904 complex_rule.remove_command(0); complex_rule.remove_command(1); let final_recipes: Vec<_> = complex_rule.recipes().collect();
4909 assert_eq!(final_recipes.len(), 2);
4910 assert!(final_recipes[0].contains("$(CC)"));
4911 assert!(final_recipes[1].contains("chmod"));
4912 }
4913
4914 #[test]
4915 fn test_variable_definition_remove() {
4916 let makefile: Makefile = r#"VAR1 = value1
4917VAR2 = value2
4918VAR3 = value3
4919"#
4920 .parse()
4921 .unwrap();
4922
4923 assert_eq!(makefile.variable_definitions().count(), 3);
4925
4926 let mut var2 = makefile
4928 .variable_definitions()
4929 .nth(1)
4930 .expect("Should have second variable");
4931 assert_eq!(var2.name(), Some("VAR2".to_string()));
4932 var2.remove();
4933
4934 assert_eq!(makefile.variable_definitions().count(), 2);
4936 let var_names: Vec<_> = makefile
4937 .variable_definitions()
4938 .filter_map(|v| v.name())
4939 .collect();
4940 assert_eq!(var_names, vec!["VAR1", "VAR3"]);
4941 }
4942
4943 #[test]
4944 fn test_variable_definition_set_value() {
4945 let makefile: Makefile = "VAR = old_value\n".parse().unwrap();
4946
4947 let mut var = makefile
4948 .variable_definitions()
4949 .next()
4950 .expect("Should have variable");
4951 assert_eq!(var.raw_value(), Some("old_value".to_string()));
4952
4953 var.set_value("new_value");
4955
4956 assert_eq!(var.raw_value(), Some("new_value".to_string()));
4958 assert!(makefile.code().contains("VAR = new_value"));
4959 }
4960
4961 #[test]
4962 fn test_variable_definition_set_value_preserves_format() {
4963 let makefile: Makefile = "export VAR := old_value\n".parse().unwrap();
4964
4965 let mut var = makefile
4966 .variable_definitions()
4967 .next()
4968 .expect("Should have variable");
4969 assert_eq!(var.raw_value(), Some("old_value".to_string()));
4970
4971 var.set_value("new_value");
4973
4974 assert_eq!(var.raw_value(), Some("new_value".to_string()));
4976 let code = makefile.code();
4977 assert!(code.contains("export"), "Should preserve export prefix");
4978 assert!(code.contains(":="), "Should preserve := operator");
4979 assert!(code.contains("new_value"), "Should have new value");
4980 }
4981
4982 #[test]
4983 fn test_makefile_find_variable() {
4984 let makefile: Makefile = r#"VAR1 = value1
4985VAR2 = value2
4986VAR3 = value3
4987"#
4988 .parse()
4989 .unwrap();
4990
4991 let vars: Vec<_> = makefile.find_variable("VAR2").collect();
4993 assert_eq!(vars.len(), 1);
4994 assert_eq!(vars[0].name(), Some("VAR2".to_string()));
4995 assert_eq!(vars[0].raw_value(), Some("value2".to_string()));
4996
4997 assert_eq!(makefile.find_variable("NONEXISTENT").count(), 0);
4999 }
5000
5001 #[test]
5002 fn test_makefile_find_variable_with_export() {
5003 let makefile: Makefile = r#"VAR1 = value1
5004export VAR2 := value2
5005VAR3 = value3
5006"#
5007 .parse()
5008 .unwrap();
5009
5010 let vars: Vec<_> = makefile.find_variable("VAR2").collect();
5012 assert_eq!(vars.len(), 1);
5013 assert_eq!(vars[0].name(), Some("VAR2".to_string()));
5014 assert_eq!(vars[0].raw_value(), Some("value2".to_string()));
5015 }
5016
5017 #[test]
5018 fn test_variable_definition_is_export() {
5019 let makefile: Makefile = r#"VAR1 = value1
5020export VAR2 := value2
5021export VAR3 = value3
5022VAR4 := value4
5023"#
5024 .parse()
5025 .unwrap();
5026
5027 let vars: Vec<_> = makefile.variable_definitions().collect();
5028 assert_eq!(vars.len(), 4);
5029
5030 assert_eq!(vars[0].is_export(), false);
5031 assert_eq!(vars[1].is_export(), true);
5032 assert_eq!(vars[2].is_export(), true);
5033 assert_eq!(vars[3].is_export(), false);
5034 }
5035
5036 #[test]
5037 fn test_makefile_find_variable_multiple() {
5038 let makefile: Makefile = r#"VAR1 = value1
5039VAR1 = value2
5040VAR2 = other
5041VAR1 = value3
5042"#
5043 .parse()
5044 .unwrap();
5045
5046 let vars: Vec<_> = makefile.find_variable("VAR1").collect();
5048 assert_eq!(vars.len(), 3);
5049 assert_eq!(vars[0].raw_value(), Some("value1".to_string()));
5050 assert_eq!(vars[1].raw_value(), Some("value2".to_string()));
5051 assert_eq!(vars[2].raw_value(), Some("value3".to_string()));
5052
5053 let var2s: Vec<_> = makefile.find_variable("VAR2").collect();
5055 assert_eq!(var2s.len(), 1);
5056 assert_eq!(var2s[0].raw_value(), Some("other".to_string()));
5057 }
5058
5059 #[test]
5060 fn test_variable_remove_and_find() {
5061 let makefile: Makefile = r#"VAR1 = value1
5062VAR2 = value2
5063VAR3 = value3
5064"#
5065 .parse()
5066 .unwrap();
5067
5068 let mut var2 = makefile
5070 .find_variable("VAR2")
5071 .next()
5072 .expect("Should find VAR2");
5073 var2.remove();
5074
5075 assert_eq!(makefile.find_variable("VAR2").count(), 0);
5077
5078 assert_eq!(makefile.find_variable("VAR1").count(), 1);
5080 assert_eq!(makefile.find_variable("VAR3").count(), 1);
5081 }
5082
5083 #[test]
5084 fn test_variable_remove_with_comment() {
5085 let makefile: Makefile = r#"VAR1 = value1
5086# This is a comment about VAR2
5087VAR2 = value2
5088VAR3 = value3
5089"#
5090 .parse()
5091 .unwrap();
5092
5093 let mut var2 = makefile
5095 .variable_definitions()
5096 .nth(1)
5097 .expect("Should have second variable");
5098 assert_eq!(var2.name(), Some("VAR2".to_string()));
5099 var2.remove();
5100
5101 assert_eq!(makefile.code(), "VAR1 = value1\nVAR3 = value3\n");
5103 }
5104
5105 #[test]
5106 fn test_variable_remove_with_multiple_comments() {
5107 let makefile: Makefile = r#"VAR1 = value1
5108# Comment line 1
5109# Comment line 2
5110# Comment line 3
5111VAR2 = value2
5112VAR3 = value3
5113"#
5114 .parse()
5115 .unwrap();
5116
5117 let mut var2 = makefile
5119 .variable_definitions()
5120 .nth(1)
5121 .expect("Should have second variable");
5122 var2.remove();
5123
5124 assert_eq!(makefile.code(), "VAR1 = value1\nVAR3 = value3\n");
5126 }
5127
5128 #[test]
5129 fn test_variable_remove_with_empty_line() {
5130 let makefile: Makefile = r#"VAR1 = value1
5131
5132# Comment about VAR2
5133VAR2 = value2
5134VAR3 = value3
5135"#
5136 .parse()
5137 .unwrap();
5138
5139 let mut var2 = makefile
5141 .variable_definitions()
5142 .nth(1)
5143 .expect("Should have second variable");
5144 var2.remove();
5145
5146 assert_eq!(makefile.code(), "VAR1 = value1\nVAR3 = value3\n");
5149 }
5150
5151 #[test]
5152 fn test_variable_remove_with_multiple_empty_lines() {
5153 let makefile: Makefile = r#"VAR1 = value1
5154
5155
5156# Comment about VAR2
5157VAR2 = value2
5158VAR3 = value3
5159"#
5160 .parse()
5161 .unwrap();
5162
5163 let mut var2 = makefile
5165 .variable_definitions()
5166 .nth(1)
5167 .expect("Should have second variable");
5168 var2.remove();
5169
5170 assert_eq!(makefile.code(), "VAR1 = value1\n\nVAR3 = value3\n");
5173 }
5174
5175 #[test]
5176 fn test_rule_remove_with_comment() {
5177 let makefile: Makefile = r#"rule1:
5178 command1
5179
5180# Comment about rule2
5181rule2:
5182 command2
5183rule3:
5184 command3
5185"#
5186 .parse()
5187 .unwrap();
5188
5189 let rule2 = makefile.rules().nth(1).expect("Should have second rule");
5191 rule2.remove().unwrap();
5192
5193 assert_eq!(
5196 makefile.code(),
5197 "rule1:\n\tcommand1\n\nrule3:\n\tcommand3\n"
5198 );
5199 }
5200
5201 #[test]
5202 fn test_variable_remove_preserves_shebang() {
5203 let makefile: Makefile = r#"#!/usr/bin/make -f
5204# This is a regular comment
5205VAR1 = value1
5206VAR2 = value2
5207"#
5208 .parse()
5209 .unwrap();
5210
5211 let mut var1 = makefile.variable_definitions().next().unwrap();
5213 var1.remove();
5214
5215 let code = makefile.code();
5217 assert!(code.starts_with("#!/usr/bin/make -f"));
5218 assert!(!code.contains("regular comment"));
5219 assert!(!code.contains("VAR1"));
5220 assert!(code.contains("VAR2"));
5221 }
5222
5223 #[test]
5224 fn test_variable_remove_preserves_subsequent_comments() {
5225 let makefile: Makefile = r#"VAR1 = value1
5226# Comment about VAR2
5227VAR2 = value2
5228
5229# Comment about VAR3
5230VAR3 = value3
5231"#
5232 .parse()
5233 .unwrap();
5234
5235 let mut var2 = makefile
5237 .variable_definitions()
5238 .nth(1)
5239 .expect("Should have second variable");
5240 var2.remove();
5241
5242 let code = makefile.code();
5244 assert_eq!(
5245 code,
5246 "VAR1 = value1\n\n# Comment about VAR3\nVAR3 = value3\n"
5247 );
5248 }
5249
5250 #[test]
5251 fn test_variable_remove_after_shebang_preserves_empty_line() {
5252 let makefile: Makefile = r#"#!/usr/bin/make -f
5253export DEB_LDFLAGS_MAINT_APPEND = -Wl,--as-needed
5254
5255%:
5256 dh $@
5257"#
5258 .parse()
5259 .unwrap();
5260
5261 let mut var = makefile.variable_definitions().next().unwrap();
5263 var.remove();
5264
5265 assert_eq!(makefile.code(), "#!/usr/bin/make -f\n\n%:\n\tdh $@\n");
5267 }
5268
5269 #[test]
5270 fn test_rule_add_prerequisite() {
5271 let mut rule: Rule = "target: dep1\n".parse().unwrap();
5272 rule.add_prerequisite("dep2").unwrap();
5273 assert_eq!(
5274 rule.prerequisites().collect::<Vec<_>>(),
5275 vec!["dep1", "dep2"]
5276 );
5277 }
5278
5279 #[test]
5280 fn test_rule_remove_prerequisite() {
5281 let mut rule: Rule = "target: dep1 dep2 dep3\n".parse().unwrap();
5282 assert!(rule.remove_prerequisite("dep2").unwrap());
5283 assert_eq!(
5284 rule.prerequisites().collect::<Vec<_>>(),
5285 vec!["dep1", "dep3"]
5286 );
5287 assert!(!rule.remove_prerequisite("nonexistent").unwrap());
5288 }
5289
5290 #[test]
5291 fn test_rule_set_prerequisites() {
5292 let mut rule: Rule = "target: old_dep\n".parse().unwrap();
5293 rule.set_prerequisites(vec!["new_dep1", "new_dep2"])
5294 .unwrap();
5295 assert_eq!(
5296 rule.prerequisites().collect::<Vec<_>>(),
5297 vec!["new_dep1", "new_dep2"]
5298 );
5299 }
5300
5301 #[test]
5302 fn test_rule_set_prerequisites_empty() {
5303 let mut rule: Rule = "target: dep1 dep2\n".parse().unwrap();
5304 rule.set_prerequisites(vec![]).unwrap();
5305 assert_eq!(rule.prerequisites().collect::<Vec<_>>().len(), 0);
5306 }
5307
5308 #[test]
5309 fn test_rule_add_target() {
5310 let mut rule: Rule = "target1: dep1\n".parse().unwrap();
5311 rule.add_target("target2").unwrap();
5312 assert_eq!(
5313 rule.targets().collect::<Vec<_>>(),
5314 vec!["target1", "target2"]
5315 );
5316 }
5317
5318 #[test]
5319 fn test_rule_set_targets() {
5320 let mut rule: Rule = "old_target: dependency\n".parse().unwrap();
5321 rule.set_targets(vec!["new_target1", "new_target2"])
5322 .unwrap();
5323 assert_eq!(
5324 rule.targets().collect::<Vec<_>>(),
5325 vec!["new_target1", "new_target2"]
5326 );
5327 }
5328
5329 #[test]
5330 fn test_rule_set_targets_empty() {
5331 let mut rule: Rule = "target: dep1\n".parse().unwrap();
5332 let result = rule.set_targets(vec![]);
5333 assert!(result.is_err());
5334 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["target"]);
5336 }
5337
5338 #[test]
5339 fn test_rule_has_target() {
5340 let rule: Rule = "target1 target2: dependency\n".parse().unwrap();
5341 assert!(rule.has_target("target1"));
5342 assert!(rule.has_target("target2"));
5343 assert!(!rule.has_target("target3"));
5344 assert!(!rule.has_target("nonexistent"));
5345 }
5346
5347 #[test]
5348 fn test_rule_rename_target() {
5349 let mut rule: Rule = "old_target: dependency\n".parse().unwrap();
5350 assert!(rule.rename_target("old_target", "new_target").unwrap());
5351 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["new_target"]);
5352 assert!(!rule.rename_target("nonexistent", "something").unwrap());
5354 }
5355
5356 #[test]
5357 fn test_rule_rename_target_multiple() {
5358 let mut rule: Rule = "target1 target2 target3: dependency\n".parse().unwrap();
5359 assert!(rule.rename_target("target2", "renamed_target").unwrap());
5360 assert_eq!(
5361 rule.targets().collect::<Vec<_>>(),
5362 vec!["target1", "renamed_target", "target3"]
5363 );
5364 }
5365
5366 #[test]
5367 fn test_rule_remove_target() {
5368 let mut rule: Rule = "target1 target2 target3: dependency\n".parse().unwrap();
5369 assert!(rule.remove_target("target2").unwrap());
5370 assert_eq!(
5371 rule.targets().collect::<Vec<_>>(),
5372 vec!["target1", "target3"]
5373 );
5374 assert!(!rule.remove_target("nonexistent").unwrap());
5376 }
5377
5378 #[test]
5379 fn test_rule_remove_target_last() {
5380 let mut rule: Rule = "single_target: dependency\n".parse().unwrap();
5381 let result = rule.remove_target("single_target");
5382 assert!(result.is_err());
5383 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["single_target"]);
5385 }
5386
5387 #[test]
5388 fn test_rule_target_manipulation_preserves_prerequisites() {
5389 let mut rule: Rule = "target1 target2: dep1 dep2\n\tcommand".parse().unwrap();
5390
5391 rule.remove_target("target1").unwrap();
5393 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["target2"]);
5394 assert_eq!(
5395 rule.prerequisites().collect::<Vec<_>>(),
5396 vec!["dep1", "dep2"]
5397 );
5398 assert_eq!(rule.recipes().collect::<Vec<_>>(), vec!["command"]);
5399
5400 rule.add_target("target3").unwrap();
5402 assert_eq!(
5403 rule.targets().collect::<Vec<_>>(),
5404 vec!["target2", "target3"]
5405 );
5406 assert_eq!(
5407 rule.prerequisites().collect::<Vec<_>>(),
5408 vec!["dep1", "dep2"]
5409 );
5410 assert_eq!(rule.recipes().collect::<Vec<_>>(), vec!["command"]);
5411
5412 rule.rename_target("target2", "renamed").unwrap();
5414 assert_eq!(
5415 rule.targets().collect::<Vec<_>>(),
5416 vec!["renamed", "target3"]
5417 );
5418 assert_eq!(
5419 rule.prerequisites().collect::<Vec<_>>(),
5420 vec!["dep1", "dep2"]
5421 );
5422 assert_eq!(rule.recipes().collect::<Vec<_>>(), vec!["command"]);
5423 }
5424
5425 #[test]
5426 fn test_rule_remove() {
5427 let makefile: Makefile = "rule1:\n\tcommand1\nrule2:\n\tcommand2\n".parse().unwrap();
5428 let rule = makefile.find_rule_by_target("rule1").unwrap();
5429 rule.remove().unwrap();
5430 assert_eq!(makefile.rules().count(), 1);
5431 assert!(makefile.find_rule_by_target("rule1").is_none());
5432 assert!(makefile.find_rule_by_target("rule2").is_some());
5433 }
5434
5435 #[test]
5436 fn test_makefile_find_rule_by_target() {
5437 let makefile: Makefile = "rule1:\n\tcommand1\nrule2:\n\tcommand2\n".parse().unwrap();
5438 let rule = makefile.find_rule_by_target("rule2");
5439 assert!(rule.is_some());
5440 assert_eq!(rule.unwrap().targets().collect::<Vec<_>>(), vec!["rule2"]);
5441 assert!(makefile.find_rule_by_target("nonexistent").is_none());
5442 }
5443
5444 #[test]
5445 fn test_makefile_find_rules_by_target() {
5446 let makefile: Makefile = "rule1:\n\tcommand1\nrule1:\n\tcommand2\nrule2:\n\tcommand3\n"
5447 .parse()
5448 .unwrap();
5449 assert_eq!(makefile.find_rules_by_target("rule1").count(), 2);
5450 assert_eq!(makefile.find_rules_by_target("rule2").count(), 1);
5451 assert_eq!(makefile.find_rules_by_target("nonexistent").count(), 0);
5452 }
5453
5454 #[test]
5455 fn test_makefile_add_phony_target() {
5456 let mut makefile = Makefile::new();
5457 makefile.add_phony_target("clean").unwrap();
5458 assert!(makefile.is_phony("clean"));
5459 assert_eq!(makefile.phony_targets().collect::<Vec<_>>(), vec!["clean"]);
5460 }
5461
5462 #[test]
5463 fn test_makefile_add_phony_target_existing() {
5464 let mut makefile: Makefile = ".PHONY: test\n".parse().unwrap();
5465 makefile.add_phony_target("clean").unwrap();
5466 assert!(makefile.is_phony("test"));
5467 assert!(makefile.is_phony("clean"));
5468 let targets: Vec<_> = makefile.phony_targets().collect();
5469 assert!(targets.contains(&"test".to_string()));
5470 assert!(targets.contains(&"clean".to_string()));
5471 }
5472
5473 #[test]
5474 fn test_makefile_remove_phony_target() {
5475 let mut makefile: Makefile = ".PHONY: clean test\n".parse().unwrap();
5476 assert!(makefile.remove_phony_target("clean").unwrap());
5477 assert!(!makefile.is_phony("clean"));
5478 assert!(makefile.is_phony("test"));
5479 assert!(!makefile.remove_phony_target("nonexistent").unwrap());
5480 }
5481
5482 #[test]
5483 fn test_makefile_remove_phony_target_last() {
5484 let mut makefile: Makefile = ".PHONY: clean\n".parse().unwrap();
5485 assert!(makefile.remove_phony_target("clean").unwrap());
5486 assert!(!makefile.is_phony("clean"));
5487 assert!(makefile.find_rule_by_target(".PHONY").is_none());
5489 }
5490
5491 #[test]
5492 fn test_makefile_is_phony() {
5493 let makefile: Makefile = ".PHONY: clean test\n".parse().unwrap();
5494 assert!(makefile.is_phony("clean"));
5495 assert!(makefile.is_phony("test"));
5496 assert!(!makefile.is_phony("build"));
5497 }
5498
5499 #[test]
5500 fn test_makefile_phony_targets() {
5501 let makefile: Makefile = ".PHONY: clean test build\n".parse().unwrap();
5502 let phony_targets: Vec<_> = makefile.phony_targets().collect();
5503 assert_eq!(phony_targets, vec!["clean", "test", "build"]);
5504 }
5505
5506 #[test]
5507 fn test_makefile_phony_targets_empty() {
5508 let makefile = Makefile::new();
5509 assert_eq!(makefile.phony_targets().count(), 0);
5510 }
5511
5512 #[test]
5513 fn test_recipe_with_leading_comments_and_blank_lines() {
5514 let makefile_text = r#"#!/usr/bin/make
5518
5519%:
5520 dh $@
5521
5522override_dh_build:
5523 # The next line is empty
5524
5525 dh_python3
5526"#;
5527 let makefile = Makefile::read_relaxed(makefile_text.as_bytes()).unwrap();
5528
5529 let rules: Vec<_> = makefile.rules().collect();
5530 assert_eq!(rules.len(), 2, "Expected 2 rules");
5531
5532 let rule0 = &rules[0];
5534 assert_eq!(rule0.targets().collect::<Vec<_>>(), vec!["%"]);
5535 assert_eq!(rule0.recipes().collect::<Vec<_>>(), vec!["dh $@"]);
5536
5537 let rule1 = &rules[1];
5539 assert_eq!(
5540 rule1.targets().collect::<Vec<_>>(),
5541 vec!["override_dh_build"]
5542 );
5543
5544 let recipes: Vec<_> = rule1.recipes().collect();
5546 assert!(
5547 !recipes.is_empty(),
5548 "Expected at least one recipe for override_dh_build, got none"
5549 );
5550 assert!(
5551 recipes.contains(&"dh_python3".to_string()),
5552 "Expected 'dh_python3' in recipes, got: {:?}",
5553 recipes
5554 );
5555 }
5556}