1use crate::lex::lex;
2use crate::SyntaxKind;
3use crate::SyntaxKind::*;
4use rowan::ast::AstNode;
5use std::str::FromStr;
6
7#[derive(Debug)]
8pub enum Error {
10 Io(std::io::Error),
12
13 Parse(ParseError),
15}
16
17impl std::fmt::Display for Error {
18 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
19 match &self {
20 Error::Io(e) => write!(f, "IO error: {}", e),
21 Error::Parse(e) => write!(f, "Parse error: {}", e),
22 }
23 }
24}
25
26impl From<std::io::Error> for Error {
27 fn from(e: std::io::Error) -> Self {
28 Error::Io(e)
29 }
30}
31
32impl std::error::Error for Error {}
33
34#[derive(Debug, Clone, PartialEq, Eq, Hash)]
35pub struct ParseError {
37 pub errors: Vec<ErrorInfo>,
39}
40
41#[derive(Debug, Clone, PartialEq, Eq, Hash)]
42pub struct ErrorInfo {
44 pub message: String,
46 pub line: usize,
48 pub context: String,
50}
51
52impl std::fmt::Display for ParseError {
53 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
54 for err in &self.errors {
55 writeln!(f, "Error at line {}: {}", err.line, err.message)?;
56 writeln!(f, "{}| {}", err.line, err.context)?;
57 }
58 Ok(())
59 }
60}
61
62impl std::error::Error for ParseError {}
63
64impl From<ParseError> for Error {
65 fn from(e: ParseError) -> Self {
66 Error::Parse(e)
67 }
68}
69
70#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
74pub enum Lang {}
75impl rowan::Language for Lang {
76 type Kind = SyntaxKind;
77 fn kind_from_raw(raw: rowan::SyntaxKind) -> Self::Kind {
78 unsafe { std::mem::transmute::<u16, SyntaxKind>(raw.0) }
79 }
80 fn kind_to_raw(kind: Self::Kind) -> rowan::SyntaxKind {
81 kind.into()
82 }
83}
84
85use rowan::GreenNode;
88
89use rowan::GreenNodeBuilder;
93
94#[derive(Debug)]
97pub(crate) struct Parse {
98 pub(crate) green_node: GreenNode,
99 #[allow(unused)]
100 pub(crate) errors: Vec<ErrorInfo>,
101}
102
103pub(crate) fn parse(text: &str) -> Parse {
104 struct Parser {
105 tokens: Vec<(SyntaxKind, String)>,
108 builder: GreenNodeBuilder<'static>,
110 errors: Vec<ErrorInfo>,
113 original_text: String,
115 }
116
117 impl Parser {
118 fn error(&mut self, msg: String) {
119 self.builder.start_node(ERROR.into());
120
121 let (line, context) = if self.current() == Some(INDENT) {
122 let lines: Vec<&str> = self.original_text.lines().collect();
124 let tab_line = lines
125 .iter()
126 .enumerate()
127 .find(|(_, line)| line.starts_with('\t'))
128 .map(|(i, _)| i + 1)
129 .unwrap_or(1);
130
131 let next_line = tab_line + 1;
133 if next_line <= lines.len() {
134 (next_line, lines[next_line - 1].to_string())
135 } else {
136 (tab_line, lines[tab_line - 1].to_string())
137 }
138 } else {
139 let line = self.get_line_number_for_position(self.tokens.len());
140 (line, self.get_context_for_line(line))
141 };
142
143 let message = if self.current() == Some(INDENT) && !msg.contains("indented") {
144 if !self.tokens.is_empty() && self.tokens[self.tokens.len() - 1].0 == IDENTIFIER {
145 "expected ':'".to_string()
146 } else {
147 "indented line not part of a rule".to_string()
148 }
149 } else {
150 msg
151 };
152
153 self.errors.push(ErrorInfo {
154 message,
155 line,
156 context,
157 });
158
159 if self.current().is_some() {
160 self.bump();
161 }
162 self.builder.finish_node();
163 }
164
165 fn get_line_number_for_position(&self, position: usize) -> usize {
166 if position >= self.tokens.len() {
167 return self.original_text.matches('\n').count() + 1;
168 }
169
170 self.tokens[0..position]
172 .iter()
173 .filter(|(kind, _)| *kind == NEWLINE)
174 .count()
175 + 1
176 }
177
178 fn get_context_for_line(&self, line_number: usize) -> String {
179 self.original_text
180 .lines()
181 .nth(line_number - 1)
182 .unwrap_or("")
183 .to_string()
184 }
185
186 fn parse_recipe_line(&mut self) {
187 self.builder.start_node(RECIPE.into());
188
189 if self.current() != Some(INDENT) {
191 self.error("recipe line must start with a tab".to_string());
192 self.builder.finish_node();
193 return;
194 }
195 self.bump();
196
197 while self.current().is_some() && self.current() != Some(NEWLINE) {
200 self.bump();
201 }
202
203 if self.current() == Some(NEWLINE) {
205 self.bump();
206 }
207
208 self.builder.finish_node();
209 }
210
211 fn parse_rule_target(&mut self) -> bool {
212 match self.current() {
213 Some(IDENTIFIER) => {
214 if self.is_archive_member() {
216 self.parse_archive_member();
217 } else {
218 self.bump();
219 }
220 true
221 }
222 Some(DOLLAR) => {
223 self.parse_variable_reference();
224 true
225 }
226 _ => {
227 self.error("expected rule target".to_string());
228 false
229 }
230 }
231 }
232
233 fn is_archive_member(&self) -> bool {
234 if self.tokens.len() < 2 {
237 return false;
238 }
239
240 let current_is_identifier = self.current() == Some(IDENTIFIER);
242 let next_is_lparen =
243 self.tokens.len() > 1 && self.tokens[self.tokens.len() - 2].0 == LPAREN;
244
245 current_is_identifier && next_is_lparen
246 }
247
248 fn parse_archive_member(&mut self) {
249 if self.current() == Some(IDENTIFIER) {
260 self.bump();
261 }
262
263 if self.current() == Some(LPAREN) {
265 self.bump();
266
267 self.builder.start_node(ARCHIVE_MEMBERS.into());
269
270 while self.current().is_some() && self.current() != Some(RPAREN) {
272 match self.current() {
273 Some(IDENTIFIER) | Some(TEXT) => {
274 self.builder.start_node(ARCHIVE_MEMBER.into());
276 self.bump();
277 self.builder.finish_node();
278 }
279 Some(WHITESPACE) => self.bump(),
280 Some(DOLLAR) => {
281 self.builder.start_node(ARCHIVE_MEMBER.into());
283 self.parse_variable_reference();
284 self.builder.finish_node();
285 }
286 _ => break,
287 }
288 }
289
290 self.builder.finish_node();
292
293 if self.current() == Some(RPAREN) {
295 self.bump();
296 } else {
297 self.error("expected ')' to close archive member".to_string());
298 }
299 }
300 }
301
302 fn parse_rule_dependencies(&mut self) {
303 self.builder.start_node(PREREQUISITES.into());
304
305 while self.current().is_some() && self.current() != Some(NEWLINE) {
306 match self.current() {
307 Some(WHITESPACE) => {
308 self.bump(); }
310 Some(IDENTIFIER) => {
311 self.builder.start_node(PREREQUISITE.into());
313
314 if self.is_archive_member() {
315 self.parse_archive_member();
316 } else {
317 self.bump(); }
319
320 self.builder.finish_node(); }
322 Some(DOLLAR) => {
323 self.builder.start_node(PREREQUISITE.into());
325
326 self.bump(); if self.current() == Some(LPAREN) {
330 self.bump(); let mut paren_count = 1;
332
333 while self.current().is_some() && paren_count > 0 {
334 if self.current() == Some(LPAREN) {
335 paren_count += 1;
336 } else if self.current() == Some(RPAREN) {
337 paren_count -= 1;
338 }
339 self.bump();
340 }
341 } else {
342 if self.current().is_some() {
344 self.bump();
345 }
346 }
347
348 self.builder.finish_node(); }
350 _ => {
351 self.bump();
353 }
354 }
355 }
356
357 self.builder.finish_node(); }
359
360 fn parse_rule_recipes(&mut self) {
361 loop {
362 match self.current() {
363 Some(INDENT) => {
364 self.parse_recipe_line();
365 }
366 Some(NEWLINE) => {
367 self.bump();
371 }
372 _ => break,
373 }
374 }
375 }
376
377 fn find_and_consume_colon(&mut self) -> bool {
378 self.skip_ws();
380
381 if self.current() == Some(OPERATOR) && self.tokens.last().unwrap().1 == ":" {
383 self.bump();
384 return true;
385 }
386
387 let has_colon = self
389 .tokens
390 .iter()
391 .rev()
392 .any(|(kind, text)| *kind == OPERATOR && text == ":");
393
394 if has_colon {
395 while self.current().is_some() {
397 if self.current() == Some(OPERATOR)
398 && self.tokens.last().map(|(_, text)| text.as_str()) == Some(":")
399 {
400 self.bump();
401 return true;
402 }
403 self.bump();
404 }
405 }
406
407 self.error("expected ':'".to_string());
408 false
409 }
410
411 fn parse_rule(&mut self) {
412 self.builder.start_node(RULE.into());
413
414 self.skip_ws();
416 self.builder.start_node(TARGETS.into());
417 let has_target = self.parse_rule_targets();
418 self.builder.finish_node();
419
420 let has_colon = if has_target {
422 self.find_and_consume_colon()
423 } else {
424 false
425 };
426
427 if has_target && has_colon {
429 self.skip_ws();
430 self.parse_rule_dependencies();
431 self.expect_eol();
432
433 self.parse_rule_recipes();
435 }
436
437 self.builder.finish_node();
438 }
439
440 fn parse_rule_targets(&mut self) -> bool {
441 let has_first_target = self.parse_rule_target();
443
444 if !has_first_target {
445 return false;
446 }
447
448 loop {
450 self.skip_ws();
451
452 if self.current() == Some(OPERATOR) && self.tokens.last().unwrap().1 == ":" {
454 break;
455 }
456
457 match self.current() {
459 Some(IDENTIFIER) | Some(DOLLAR) => {
460 if !self.parse_rule_target() {
461 break;
462 }
463 }
464 _ => break,
465 }
466 }
467
468 true
469 }
470
471 fn parse_comment(&mut self) {
472 if self.current() == Some(COMMENT) {
473 self.bump(); if self.current() == Some(NEWLINE) {
477 self.bump(); } else if self.current() == Some(WHITESPACE) {
479 self.skip_ws();
481 if self.current() == Some(NEWLINE) {
482 self.bump();
483 }
484 }
485 } else {
487 self.error("expected comment".to_string());
488 }
489 }
490
491 fn parse_assignment(&mut self) {
492 self.builder.start_node(VARIABLE.into());
493
494 self.skip_ws();
496 if self.current() == Some(IDENTIFIER) && self.tokens.last().unwrap().1 == "export" {
497 self.bump();
498 self.skip_ws();
499 }
500
501 match self.current() {
503 Some(IDENTIFIER) => self.bump(),
504 Some(DOLLAR) => self.parse_variable_reference(),
505 _ => {
506 self.error("expected variable name".to_string());
507 self.builder.finish_node();
508 return;
509 }
510 }
511
512 self.skip_ws();
514 match self.current() {
515 Some(OPERATOR) => {
516 let op = &self.tokens.last().unwrap().1;
517 if ["=", ":=", "::=", ":::=", "+=", "?=", "!="].contains(&op.as_str()) {
518 self.bump();
519 self.skip_ws();
520
521 self.builder.start_node(EXPR.into());
523 while self.current().is_some() && self.current() != Some(NEWLINE) {
524 self.bump();
525 }
526 self.builder.finish_node();
527
528 if self.current() == Some(NEWLINE) {
530 self.bump();
531 } else {
532 self.error("expected newline after variable value".to_string());
533 }
534 } else {
535 self.error(format!("invalid assignment operator: {}", op));
536 }
537 }
538 _ => self.error("expected assignment operator".to_string()),
539 }
540
541 self.builder.finish_node();
542 }
543
544 fn parse_variable_reference(&mut self) {
545 self.builder.start_node(EXPR.into());
546 self.bump(); if self.current() == Some(LPAREN) {
549 self.bump(); let mut is_function = false;
553
554 if self.current() == Some(IDENTIFIER) {
555 let function_name = &self.tokens.last().unwrap().1;
556 let known_functions = [
558 "shell", "wildcard", "call", "eval", "file", "abspath", "dir",
559 ];
560 if known_functions.contains(&function_name.as_str()) {
561 is_function = true;
562 }
563 }
564
565 if is_function {
566 self.bump();
568
569 self.consume_balanced_parens(1);
571 } else {
572 self.parse_parenthesized_expr_internal(true);
574 }
575 } else {
576 self.error("expected ( after $ in variable reference".to_string());
577 }
578
579 self.builder.finish_node();
580 }
581
582 fn parse_parenthesized_expr(&mut self) {
584 self.builder.start_node(EXPR.into());
585
586 if self.current() != Some(LPAREN) {
587 self.error("expected opening parenthesis".to_string());
588 self.builder.finish_node();
589 return;
590 }
591
592 self.bump(); self.parse_parenthesized_expr_internal(false);
594 self.builder.finish_node();
595 }
596
597 fn parse_parenthesized_expr_internal(&mut self, is_variable_ref: bool) {
599 let mut paren_count = 1;
600
601 while paren_count > 0 && self.current().is_some() {
602 match self.current() {
603 Some(LPAREN) => {
604 paren_count += 1;
605 self.bump();
606 self.builder.start_node(EXPR.into());
608 }
609 Some(RPAREN) => {
610 paren_count -= 1;
611 self.bump();
612 if paren_count > 0 {
613 self.builder.finish_node();
614 }
615 }
616 Some(QUOTE) => {
617 self.parse_quoted_string();
619 }
620 Some(DOLLAR) => {
621 self.parse_variable_reference();
623 }
624 Some(_) => self.bump(),
625 None => {
626 self.error(if is_variable_ref {
627 "unclosed variable reference".to_string()
628 } else {
629 "unclosed parenthesis".to_string()
630 });
631 break;
632 }
633 }
634 }
635
636 if !is_variable_ref {
637 self.skip_ws();
638 self.expect_eol();
639 }
640 }
641
642 fn parse_quoted_string(&mut self) {
644 self.bump(); while !self.is_at_eof() && self.current() != Some(QUOTE) {
646 self.bump();
647 }
648 if self.current() == Some(QUOTE) {
649 self.bump();
650 }
651 }
652
653 fn parse_conditional_keyword(&mut self) -> Option<String> {
654 if self.current() != Some(IDENTIFIER) {
655 self.error(
656 "expected conditional keyword (ifdef, ifndef, ifeq, or ifneq)".to_string(),
657 );
658 return None;
659 }
660
661 let token = self.tokens.last().unwrap().1.clone();
662 if !["ifdef", "ifndef", "ifeq", "ifneq"].contains(&token.as_str()) {
663 self.error(format!("unknown conditional directive: {}", token));
664 return None;
665 }
666
667 self.bump();
668 Some(token)
669 }
670
671 fn parse_simple_condition(&mut self) {
672 self.builder.start_node(EXPR.into());
673
674 self.skip_ws();
676
677 let mut found_var = false;
679
680 while !self.is_at_eof() && self.current() != Some(NEWLINE) {
681 match self.current() {
682 Some(WHITESPACE) => self.skip_ws(),
683 Some(DOLLAR) => {
684 found_var = true;
685 self.parse_variable_reference();
686 }
687 Some(_) => {
688 found_var = true;
690 self.bump();
691 }
692 None => break,
693 }
694 }
695
696 if !found_var {
697 self.error("expected condition after conditional directive".to_string());
699 }
700
701 self.builder.finish_node();
702
703 if self.current() == Some(NEWLINE) {
705 self.bump();
706 } else if !self.is_at_eof() {
707 self.skip_until_newline();
708 }
709 }
710
711 fn is_conditional_directive(&self, token: &str) -> bool {
713 token == "ifdef"
714 || token == "ifndef"
715 || token == "ifeq"
716 || token == "ifneq"
717 || token == "else"
718 || token == "elif"
719 || token == "endif"
720 }
721
722 fn handle_conditional_token(&mut self, token: &str, depth: &mut usize) -> bool {
724 match token {
725 "ifdef" | "ifndef" | "ifeq" | "ifneq" => {
726 *depth += 1;
727 self.parse_conditional();
728 true
729 }
730 "else" | "elif" => {
731 if *depth == 0 {
733 self.error(format!("{} without matching if", token));
734 self.bump();
736 false
737 } else {
738 self.bump();
740
741 if token == "elif" {
743 self.skip_ws();
744
745 if self.current() == Some(IDENTIFIER) {
747 let next_token = &self.tokens.last().unwrap().1;
748 if next_token == "ifeq"
749 || next_token == "ifdef"
750 || next_token == "ifndef"
751 || next_token == "ifneq"
752 {
753 match next_token.as_str() {
755 "ifdef" | "ifndef" => {
756 self.bump(); self.skip_ws();
758 self.parse_simple_condition();
759 }
760 "ifeq" | "ifneq" => {
761 self.bump(); self.skip_ws();
763 self.parse_parenthesized_expr();
764 }
765 _ => unreachable!(),
766 }
767 } else {
768 self.builder.start_node(EXPR.into());
770 while self.current().is_some()
772 && self.current() != Some(NEWLINE)
773 {
774 self.bump();
775 }
776 self.builder.finish_node();
777 if self.current() == Some(NEWLINE) {
778 self.bump();
779 }
780 }
781 } else {
782 self.builder.start_node(EXPR.into());
784 while self.current().is_some() && self.current() != Some(NEWLINE) {
786 self.bump();
787 }
788 self.builder.finish_node();
789 if self.current() == Some(NEWLINE) {
790 self.bump();
791 }
792 }
793 } else {
794 self.expect_eol();
796 }
797 true
798 }
799 }
800 "endif" => {
801 if *depth == 0 {
803 self.error("endif without matching if".to_string());
804 self.bump();
806 false
807 } else {
808 *depth -= 1;
809 self.bump();
811
812 self.skip_ws();
814
815 if self.current() == Some(COMMENT) {
820 self.parse_comment();
821 } else if self.current() == Some(NEWLINE) {
822 self.bump();
823 } else if self.current() == Some(WHITESPACE) {
824 self.skip_ws();
826 if self.current() == Some(NEWLINE) {
827 self.bump();
828 }
829 } else if !self.is_at_eof() {
831 while !self.is_at_eof() && self.current() != Some(NEWLINE) {
834 self.bump();
835 }
836 if self.current() == Some(NEWLINE) {
837 self.bump();
838 }
839 }
840 true
843 }
844 }
845 _ => false,
846 }
847 }
848
849 fn parse_conditional(&mut self) {
850 self.builder.start_node(CONDITIONAL.into());
851
852 let Some(token) = self.parse_conditional_keyword() else {
854 self.skip_until_newline();
855 self.builder.finish_node();
856 return;
857 };
858
859 self.skip_ws();
861
862 match token.as_str() {
864 "ifdef" | "ifndef" => {
865 self.parse_simple_condition();
866 }
867 "ifeq" | "ifneq" => {
868 self.parse_parenthesized_expr();
869 }
870 _ => unreachable!("Invalid conditional token"),
871 }
872
873 self.skip_ws();
875 if self.current() == Some(COMMENT) {
876 self.parse_comment();
877 } else {
878 self.expect_eol();
879 }
880
881 let mut depth = 1;
883
884 let mut position_count = std::collections::HashMap::<usize, usize>::new();
886 let max_repetitions = 15; while depth > 0 && !self.is_at_eof() {
889 let current_pos = self.tokens.len();
891 *position_count.entry(current_pos).or_insert(0) += 1;
892
893 if position_count.get(¤t_pos).unwrap() > &max_repetitions {
896 break;
899 }
900
901 match self.current() {
902 None => {
903 self.error("unterminated conditional (missing endif)".to_string());
904 break;
905 }
906 Some(IDENTIFIER) => {
907 let token = self.tokens.last().unwrap().1.clone();
908 if !self.handle_conditional_token(&token, &mut depth) {
909 if token == "include" || token == "-include" || token == "sinclude" {
910 self.parse_include();
911 } else {
912 self.parse_normal_content();
913 }
914 }
915 }
916 Some(INDENT) => self.parse_recipe_line(),
917 Some(WHITESPACE) => self.bump(),
918 Some(COMMENT) => self.parse_comment(),
919 Some(NEWLINE) => self.bump(),
920 Some(DOLLAR) => self.parse_normal_content(),
921 Some(QUOTE) => self.parse_quoted_string(),
922 Some(_) => {
923 self.bump();
925 }
926 }
927 }
928
929 self.builder.finish_node();
930 }
931
932 fn parse_normal_content(&mut self) {
934 self.skip_ws();
936
937 if self.is_assignment_line() {
939 self.parse_assignment();
940 } else {
941 self.parse_rule();
943 }
944 }
945
946 fn parse_include(&mut self) {
947 self.builder.start_node(INCLUDE.into());
948
949 if self.current() != Some(IDENTIFIER)
951 || (!["include", "-include", "sinclude"]
952 .contains(&self.tokens.last().unwrap().1.as_str()))
953 {
954 self.error("expected include directive".to_string());
955 self.builder.finish_node();
956 return;
957 }
958 self.bump();
959 self.skip_ws();
960
961 self.builder.start_node(EXPR.into());
963 let mut found_path = false;
964
965 while !self.is_at_eof() && self.current() != Some(NEWLINE) {
966 match self.current() {
967 Some(WHITESPACE) => self.skip_ws(),
968 Some(DOLLAR) => {
969 found_path = true;
970 self.parse_variable_reference();
971 }
972 Some(_) => {
973 found_path = true;
975 self.bump();
976 }
977 None => break,
978 }
979 }
980
981 if !found_path {
982 self.error("expected file path after include".to_string());
983 }
984
985 self.builder.finish_node();
986
987 if self.current() == Some(NEWLINE) {
989 self.bump();
990 } else if !self.is_at_eof() {
991 self.error("expected newline after include".to_string());
992 self.skip_until_newline();
993 }
994
995 self.builder.finish_node();
996 }
997
998 fn parse_identifier_token(&mut self) -> bool {
999 let token = &self.tokens.last().unwrap().1;
1000
1001 if token.starts_with("%") {
1003 self.parse_rule();
1004 return true;
1005 }
1006
1007 if token.starts_with("if") {
1008 self.parse_conditional();
1009 return true;
1010 }
1011
1012 if token == "include" || token == "-include" || token == "sinclude" {
1013 self.parse_include();
1014 return true;
1015 }
1016
1017 self.parse_normal_content();
1019 true
1020 }
1021
1022 fn parse_token(&mut self) -> bool {
1023 match self.current() {
1024 None => false,
1025 Some(IDENTIFIER) => {
1026 let token = &self.tokens.last().unwrap().1;
1027 if self.is_conditional_directive(token) {
1028 self.parse_conditional();
1029 true
1030 } else {
1031 self.parse_identifier_token()
1032 }
1033 }
1034 Some(DOLLAR) => {
1035 self.parse_normal_content();
1036 true
1037 }
1038 Some(NEWLINE) => {
1039 self.builder.start_node(BLANK_LINE.into());
1040 self.bump();
1041 self.builder.finish_node();
1042 true
1043 }
1044 Some(COMMENT) => {
1045 self.parse_comment();
1046 true
1047 }
1048 Some(WHITESPACE) => {
1049 if self.is_end_of_file_or_newline_after_whitespace() {
1051 self.skip_ws();
1054 return true;
1055 }
1056
1057 let look_ahead_pos = self.tokens.len().saturating_sub(1);
1060 let mut is_documentation_or_help = false;
1061
1062 if look_ahead_pos > 0 {
1063 let next_token = &self.tokens[look_ahead_pos - 1];
1064 if next_token.0 == IDENTIFIER
1067 || next_token.0 == COMMENT
1068 || next_token.0 == TEXT
1069 {
1070 is_documentation_or_help = true;
1071 }
1072 }
1073
1074 if is_documentation_or_help {
1075 self.skip_ws();
1078 while self.current().is_some() && self.current() != Some(NEWLINE) {
1079 self.bump();
1080 }
1081 if self.current() == Some(NEWLINE) {
1082 self.bump();
1083 }
1084 } else {
1085 self.skip_ws();
1086 }
1087 true
1088 }
1089 Some(INDENT) => {
1090 self.bump();
1092
1093 while !self.is_at_eof() && self.current() != Some(NEWLINE) {
1095 self.bump();
1096 }
1097 if self.current() == Some(NEWLINE) {
1098 self.bump();
1099 }
1100 true
1101 }
1102 Some(kind) => {
1103 self.error(format!("unexpected token {:?}", kind));
1104 self.bump();
1105 true
1106 }
1107 }
1108 }
1109
1110 fn parse(mut self) -> Parse {
1111 self.builder.start_node(ROOT.into());
1112
1113 while self.parse_token() {}
1114
1115 self.builder.finish_node();
1116
1117 Parse {
1118 green_node: self.builder.finish(),
1119 errors: self.errors,
1120 }
1121 }
1122
1123 fn is_assignment_line(&mut self) -> bool {
1125 let assignment_ops = ["=", ":=", "::=", ":::=", "+=", "?=", "!="];
1126 let mut pos = self.tokens.len().saturating_sub(1);
1127 let mut seen_identifier = false;
1128 let mut seen_export = false;
1129
1130 while pos > 0 {
1131 let (kind, text) = &self.tokens[pos];
1132
1133 match kind {
1134 NEWLINE => break,
1135 IDENTIFIER if text == "export" => seen_export = true,
1136 IDENTIFIER if !seen_identifier => seen_identifier = true,
1137 OPERATOR if assignment_ops.contains(&text.as_str()) => {
1138 return seen_identifier || seen_export
1139 }
1140 OPERATOR if text == ":" => return false, WHITESPACE => (),
1142 _ if seen_export => return true, _ => return false,
1144 }
1145 pos = pos.saturating_sub(1);
1146 }
1147 false
1148 }
1149
1150 fn bump(&mut self) {
1152 let (kind, text) = self.tokens.pop().unwrap();
1153 self.builder.token(kind.into(), text.as_str());
1154 }
1155 fn current(&self) -> Option<SyntaxKind> {
1157 self.tokens.last().map(|(kind, _)| *kind)
1158 }
1159
1160 fn expect_eol(&mut self) {
1161 self.skip_ws();
1163
1164 match self.current() {
1165 Some(NEWLINE) => {
1166 self.bump();
1167 }
1168 None => {
1169 }
1171 n => {
1172 self.error(format!("expected newline, got {:?}", n));
1173 self.skip_until_newline();
1175 }
1176 }
1177 }
1178
1179 fn is_at_eof(&self) -> bool {
1181 self.current().is_none()
1182 }
1183
1184 fn is_at_eof_or_only_whitespace(&self) -> bool {
1186 if self.is_at_eof() {
1187 return true;
1188 }
1189
1190 self.tokens
1192 .iter()
1193 .rev()
1194 .all(|(kind, _)| matches!(*kind, WHITESPACE | NEWLINE))
1195 }
1196
1197 fn skip_ws(&mut self) {
1198 while self.current() == Some(WHITESPACE) {
1199 self.bump()
1200 }
1201 }
1202
1203 fn skip_until_newline(&mut self) {
1204 while !self.is_at_eof() && self.current() != Some(NEWLINE) {
1205 self.bump();
1206 }
1207 if self.current() == Some(NEWLINE) {
1208 self.bump();
1209 }
1210 }
1211
1212 fn consume_balanced_parens(&mut self, start_paren_count: usize) -> usize {
1214 let mut paren_count = start_paren_count;
1215
1216 while paren_count > 0 && self.current().is_some() {
1217 match self.current() {
1218 Some(LPAREN) => {
1219 paren_count += 1;
1220 self.bump();
1221 }
1222 Some(RPAREN) => {
1223 paren_count -= 1;
1224 self.bump();
1225 if paren_count == 0 {
1226 break;
1227 }
1228 }
1229 Some(DOLLAR) => {
1230 self.parse_variable_reference();
1232 }
1233 Some(_) => self.bump(),
1234 None => {
1235 self.error("unclosed parenthesis".to_string());
1236 break;
1237 }
1238 }
1239 }
1240
1241 paren_count
1242 }
1243
1244 fn is_end_of_file_or_newline_after_whitespace(&self) -> bool {
1246 if self.is_at_eof_or_only_whitespace() {
1248 return true;
1249 }
1250
1251 if self.tokens.len() <= 1 {
1253 return true;
1254 }
1255
1256 false
1257 }
1258 }
1259
1260 let mut tokens = lex(text);
1261 tokens.reverse();
1262 Parser {
1263 tokens,
1264 builder: GreenNodeBuilder::new(),
1265 errors: Vec::new(),
1266 original_text: text.to_string(),
1267 }
1268 .parse()
1269}
1270
1271type SyntaxNode = rowan::SyntaxNode<Lang>;
1277#[allow(unused)]
1278type SyntaxToken = rowan::SyntaxToken<Lang>;
1279#[allow(unused)]
1280type SyntaxElement = rowan::NodeOrToken<SyntaxNode, SyntaxToken>;
1281
1282impl Parse {
1283 fn syntax(&self) -> SyntaxNode {
1284 SyntaxNode::new_root_mut(self.green_node.clone())
1285 }
1286
1287 fn root(&self) -> Makefile {
1288 Makefile::cast(self.syntax()).unwrap()
1289 }
1290}
1291
1292macro_rules! ast_node {
1293 ($ast:ident, $kind:ident) => {
1294 #[derive(PartialEq, Eq, Hash)]
1295 #[repr(transparent)]
1296 pub struct $ast(SyntaxNode);
1298
1299 impl AstNode for $ast {
1300 type Language = Lang;
1301
1302 fn can_cast(kind: SyntaxKind) -> bool {
1303 kind == $kind
1304 }
1305
1306 fn cast(syntax: SyntaxNode) -> Option<Self> {
1307 if Self::can_cast(syntax.kind()) {
1308 Some(Self(syntax))
1309 } else {
1310 None
1311 }
1312 }
1313
1314 fn syntax(&self) -> &SyntaxNode {
1315 &self.0
1316 }
1317 }
1318
1319 impl core::fmt::Display for $ast {
1320 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
1321 write!(f, "{}", self.0.text())
1322 }
1323 }
1324 };
1325}
1326
1327ast_node!(Makefile, ROOT);
1328ast_node!(Rule, RULE);
1329ast_node!(Identifier, IDENTIFIER);
1330ast_node!(VariableDefinition, VARIABLE);
1331ast_node!(Include, INCLUDE);
1332ast_node!(ArchiveMembers, ARCHIVE_MEMBERS);
1333ast_node!(ArchiveMember, ARCHIVE_MEMBER);
1334
1335impl ArchiveMembers {
1336 pub fn archive_name(&self) -> Option<String> {
1338 for element in self.syntax().children_with_tokens() {
1340 if let Some(token) = element.as_token() {
1341 if token.kind() == IDENTIFIER {
1342 return Some(token.text().to_string());
1343 } else if token.kind() == LPAREN {
1344 break;
1346 }
1347 }
1348 }
1349 None
1350 }
1351
1352 pub fn members(&self) -> impl Iterator<Item = ArchiveMember> + '_ {
1354 self.syntax().children().filter_map(ArchiveMember::cast)
1355 }
1356
1357 pub fn member_names(&self) -> Vec<String> {
1359 self.members().map(|m| m.text()).collect()
1360 }
1361}
1362
1363impl ArchiveMember {
1364 pub fn text(&self) -> String {
1366 self.syntax().text().to_string().trim().to_string()
1367 }
1368}
1369
1370fn remove_with_preceding_comments(node: &SyntaxNode, parent: &SyntaxNode) {
1378 let mut collected_elements = vec![];
1379 let mut found_comment = false;
1380
1381 let mut current = node.prev_sibling_or_token();
1383 while let Some(element) = current {
1384 match &element {
1385 rowan::NodeOrToken::Token(token) => match token.kind() {
1386 COMMENT => {
1387 if token.text().starts_with("#!") {
1388 break; }
1390 found_comment = true;
1391 collected_elements.push(element.clone());
1392 }
1393 NEWLINE | WHITESPACE => {
1394 collected_elements.push(element.clone());
1395 }
1396 _ => break, },
1398 rowan::NodeOrToken::Node(n) => {
1399 if n.kind() == BLANK_LINE {
1401 collected_elements.push(element.clone());
1402 } else {
1403 break; }
1405 }
1406 }
1407 current = element.prev_sibling_or_token();
1408 }
1409
1410 let node_index = node.index();
1412 parent.splice_children(node_index..node_index + 1, vec![]);
1413
1414 if found_comment {
1416 let mut consecutive_newlines = 0;
1417 for element in collected_elements.iter().rev() {
1418 let should_remove = match element {
1419 rowan::NodeOrToken::Token(token) => match token.kind() {
1420 COMMENT => {
1421 consecutive_newlines = 0;
1422 true
1423 }
1424 NEWLINE => {
1425 consecutive_newlines += 1;
1426 consecutive_newlines <= 1
1427 }
1428 WHITESPACE => true,
1429 _ => false,
1430 },
1431 rowan::NodeOrToken::Node(n) => {
1432 if n.kind() == BLANK_LINE {
1434 consecutive_newlines += 1;
1435 consecutive_newlines <= 1
1436 } else {
1437 false
1438 }
1439 }
1440 };
1441
1442 if should_remove {
1443 let idx = element.index();
1444 parent.splice_children(idx..idx + 1, vec![]);
1445 }
1446 }
1447 }
1448}
1449
1450impl VariableDefinition {
1451 pub fn name(&self) -> Option<String> {
1453 self.syntax().children_with_tokens().find_map(|it| {
1454 it.as_token().and_then(|it| {
1455 if it.kind() == IDENTIFIER && it.text() != "export" {
1456 Some(it.text().to_string())
1457 } else {
1458 None
1459 }
1460 })
1461 })
1462 }
1463
1464 pub fn is_export(&self) -> bool {
1466 self.syntax()
1467 .children_with_tokens()
1468 .any(|it| it.as_token().is_some_and(|token| token.text() == "export"))
1469 }
1470
1471 pub fn raw_value(&self) -> Option<String> {
1473 self.syntax()
1474 .children()
1475 .find(|it| it.kind() == EXPR)
1476 .map(|it| it.text().into())
1477 }
1478
1479 pub fn remove(&mut self) {
1492 if let Some(parent) = self.syntax().parent() {
1493 remove_with_preceding_comments(self.syntax(), &parent);
1494 }
1495 }
1496
1497 pub fn set_value(&mut self, new_value: &str) {
1510 let expr_index = self
1512 .syntax()
1513 .children()
1514 .find(|it| it.kind() == EXPR)
1515 .map(|it| it.index());
1516
1517 if let Some(expr_idx) = expr_index {
1518 let mut builder = GreenNodeBuilder::new();
1520 builder.start_node(EXPR.into());
1521 builder.token(IDENTIFIER.into(), new_value);
1522 builder.finish_node();
1523
1524 let new_expr = SyntaxNode::new_root_mut(builder.finish());
1525
1526 self.0
1528 .splice_children(expr_idx..expr_idx + 1, vec![new_expr.into()]);
1529 }
1530 }
1531}
1532
1533impl Makefile {
1534 pub fn new() -> Makefile {
1536 let mut builder = GreenNodeBuilder::new();
1537
1538 builder.start_node(ROOT.into());
1539 builder.finish_node();
1540
1541 let syntax = SyntaxNode::new_root_mut(builder.finish());
1542 Makefile(syntax)
1543 }
1544
1545 pub fn parse(text: &str) -> crate::Parse<Makefile> {
1547 crate::Parse::<Makefile>::parse_makefile(text)
1548 }
1549
1550 pub fn code(&self) -> String {
1552 self.syntax().text().to_string()
1553 }
1554
1555 pub fn is_root(&self) -> bool {
1557 self.syntax().kind() == ROOT
1558 }
1559
1560 pub fn read<R: std::io::Read>(mut r: R) -> Result<Makefile, Error> {
1562 let mut buf = String::new();
1563 r.read_to_string(&mut buf)?;
1564 buf.parse()
1565 }
1566
1567 pub fn read_relaxed<R: std::io::Read>(mut r: R) -> Result<Makefile, Error> {
1569 let mut buf = String::new();
1570 r.read_to_string(&mut buf)?;
1571
1572 let parsed = parse(&buf);
1573 Ok(parsed.root())
1574 }
1575
1576 pub fn rules(&self) -> impl Iterator<Item = Rule> + '_ {
1585 self.syntax().children().filter_map(Rule::cast)
1586 }
1587
1588 pub fn rules_by_target<'a>(&'a self, target: &'a str) -> impl Iterator<Item = Rule> + 'a {
1590 self.rules()
1591 .filter(move |rule| rule.targets().any(|t| t == target))
1592 }
1593
1594 pub fn variable_definitions(&self) -> impl Iterator<Item = VariableDefinition> {
1596 self.syntax()
1597 .children()
1598 .filter_map(VariableDefinition::cast)
1599 }
1600
1601 pub fn find_variable<'a>(
1616 &'a self,
1617 name: &'a str,
1618 ) -> impl Iterator<Item = VariableDefinition> + 'a {
1619 self.variable_definitions()
1620 .filter(move |var| var.name().as_deref() == Some(name))
1621 }
1622
1623 pub fn add_rule(&mut self, target: &str) -> Rule {
1633 let mut builder = GreenNodeBuilder::new();
1634 builder.start_node(RULE.into());
1635 builder.token(IDENTIFIER.into(), target);
1636 builder.token(OPERATOR.into(), ":");
1637 builder.token(NEWLINE.into(), "\n");
1638 builder.finish_node();
1639
1640 let syntax = SyntaxNode::new_root_mut(builder.finish());
1641 let pos = self.0.children_with_tokens().count();
1642
1643 let needs_blank_line = self.0.children().any(|c| c.kind() == RULE);
1646
1647 if needs_blank_line {
1648 let mut bl_builder = GreenNodeBuilder::new();
1650 bl_builder.start_node(BLANK_LINE.into());
1651 bl_builder.token(NEWLINE.into(), "\n");
1652 bl_builder.finish_node();
1653 let blank_line = SyntaxNode::new_root_mut(bl_builder.finish());
1654
1655 self.0
1656 .splice_children(pos..pos, vec![blank_line.into(), syntax.into()]);
1657 } else {
1658 self.0.splice_children(pos..pos, vec![syntax.into()]);
1659 }
1660
1661 Rule(self.0.children().last().unwrap())
1664 }
1665
1666 pub fn from_reader<R: std::io::Read>(mut r: R) -> Result<Makefile, Error> {
1668 let mut buf = String::new();
1669 r.read_to_string(&mut buf)?;
1670
1671 let parsed = parse(&buf);
1672 if !parsed.errors.is_empty() {
1673 Err(Error::Parse(ParseError {
1674 errors: parsed.errors,
1675 }))
1676 } else {
1677 Ok(parsed.root())
1678 }
1679 }
1680
1681 pub fn replace_rule(&mut self, index: usize, new_rule: Rule) -> Result<(), Error> {
1692 let rules: Vec<_> = self.0.children().filter(|n| n.kind() == RULE).collect();
1693
1694 if rules.is_empty() {
1695 return Err(Error::Parse(ParseError {
1696 errors: vec![ErrorInfo {
1697 message: "Cannot replace rule in empty makefile".to_string(),
1698 line: 1,
1699 context: "replace_rule".to_string(),
1700 }],
1701 }));
1702 }
1703
1704 if index >= rules.len() {
1705 return Err(Error::Parse(ParseError {
1706 errors: vec![ErrorInfo {
1707 message: format!(
1708 "Rule index {} out of bounds (max {})",
1709 index,
1710 rules.len() - 1
1711 ),
1712 line: 1,
1713 context: "replace_rule".to_string(),
1714 }],
1715 }));
1716 }
1717
1718 let target_node = &rules[index];
1719 let target_index = target_node.index();
1720
1721 self.0.splice_children(
1723 target_index..target_index + 1,
1724 vec![new_rule.0.clone().into()],
1725 );
1726 Ok(())
1727 }
1728
1729 pub fn remove_rule(&mut self, index: usize) -> Result<Rule, Error> {
1740 let rules: Vec<_> = self.0.children().filter(|n| n.kind() == RULE).collect();
1741
1742 if rules.is_empty() {
1743 return Err(Error::Parse(ParseError {
1744 errors: vec![ErrorInfo {
1745 message: "Cannot remove rule from empty makefile".to_string(),
1746 line: 1,
1747 context: "remove_rule".to_string(),
1748 }],
1749 }));
1750 }
1751
1752 if index >= rules.len() {
1753 return Err(Error::Parse(ParseError {
1754 errors: vec![ErrorInfo {
1755 message: format!(
1756 "Rule index {} out of bounds (max {})",
1757 index,
1758 rules.len() - 1
1759 ),
1760 line: 1,
1761 context: "remove_rule".to_string(),
1762 }],
1763 }));
1764 }
1765
1766 let target_node = rules[index].clone();
1767 let target_index = target_node.index();
1768
1769 self.0
1771 .splice_children(target_index..target_index + 1, vec![]);
1772 Ok(Rule(target_node))
1773 }
1774
1775 pub fn insert_rule(&mut self, index: usize, new_rule: Rule) -> Result<(), Error> {
1787 let rules: Vec<_> = self.0.children().filter(|n| n.kind() == RULE).collect();
1788
1789 if index > rules.len() {
1790 return Err(Error::Parse(ParseError {
1791 errors: vec![ErrorInfo {
1792 message: format!("Rule index {} out of bounds (max {})", index, rules.len()),
1793 line: 1,
1794 context: "insert_rule".to_string(),
1795 }],
1796 }));
1797 }
1798
1799 let target_index = if index == rules.len() {
1800 self.0.children_with_tokens().count()
1802 } else {
1803 rules[index].index()
1805 };
1806
1807 self.0
1809 .splice_children(target_index..target_index, vec![new_rule.0.clone().into()]);
1810 Ok(())
1811 }
1812
1813 pub fn includes(&self) -> impl Iterator<Item = Include> {
1823 self.syntax().children().filter_map(Include::cast)
1824 }
1825
1826 pub fn included_files(&self) -> impl Iterator<Item = String> + '_ {
1836 fn collect_includes(node: &SyntaxNode) -> Vec<Include> {
1839 let mut includes = Vec::new();
1840
1841 if let Some(include) = Include::cast(node.clone()) {
1843 includes.push(include);
1844 }
1845
1846 for child in node.children() {
1848 includes.extend(collect_includes(&child));
1849 }
1850
1851 includes
1852 }
1853
1854 let includes = collect_includes(self.syntax());
1856
1857 includes.into_iter().map(|include| {
1859 include
1860 .syntax()
1861 .children()
1862 .find(|node| node.kind() == EXPR)
1863 .map(|expr| expr.text().to_string().trim().to_string())
1864 .unwrap_or_default()
1865 })
1866 }
1867
1868 pub fn find_rule_by_target(&self, target: &str) -> Option<Rule> {
1879 self.rules()
1880 .find(|rule| rule.targets().any(|t| t == target))
1881 }
1882
1883 pub fn find_rules_by_target<'a>(&'a self, target: &'a str) -> impl Iterator<Item = Rule> + 'a {
1893 self.rules_by_target(target)
1894 }
1895
1896 pub fn add_phony_target(&mut self, target: &str) -> Result<(), Error> {
1906 if let Some(mut phony_rule) = self.find_rule_by_target(".PHONY") {
1908 if !phony_rule.prerequisites().any(|p| p == target) {
1910 phony_rule.add_prerequisite(target)?;
1911 }
1912 } else {
1913 let mut phony_rule = self.add_rule(".PHONY");
1915 phony_rule.add_prerequisite(target)?;
1916 }
1917 Ok(())
1918 }
1919
1920 pub fn remove_phony_target(&mut self, target: &str) -> Result<bool, Error> {
1934 let mut phony_rule = None;
1936 for rule in self.rules_by_target(".PHONY") {
1937 if rule.prerequisites().any(|p| p == target) {
1938 phony_rule = Some(rule);
1939 break;
1940 }
1941 }
1942
1943 let mut phony_rule = match phony_rule {
1944 Some(rule) => rule,
1945 None => return Ok(false),
1946 };
1947
1948 let prereq_count = phony_rule.prerequisites().count();
1950
1951 phony_rule.remove_prerequisite(target)?;
1953
1954 if prereq_count == 1 {
1956 phony_rule.remove()?;
1958 }
1959
1960 Ok(true)
1961 }
1962
1963 pub fn is_phony(&self, target: &str) -> bool {
1974 self.rules_by_target(".PHONY")
1976 .any(|rule| rule.prerequisites().any(|p| p == target))
1977 }
1978
1979 pub fn phony_targets(&self) -> impl Iterator<Item = String> + '_ {
1989 self.rules_by_target(".PHONY")
1991 .flat_map(|rule| rule.prerequisites().collect::<Vec<_>>())
1992 }
1993}
1994
1995impl FromStr for Rule {
1996 type Err = crate::Error;
1997
1998 fn from_str(s: &str) -> Result<Self, Self::Err> {
1999 Rule::parse(s).to_rule_result()
2000 }
2001}
2002
2003impl FromStr for Makefile {
2004 type Err = crate::Error;
2005
2006 fn from_str(s: &str) -> Result<Self, Self::Err> {
2007 Makefile::parse(s).to_result()
2008 }
2009}
2010
2011fn build_prerequisites_node(prereqs: &[String], include_leading_space: bool) -> SyntaxNode {
2013 let mut builder = GreenNodeBuilder::new();
2014 builder.start_node(PREREQUISITES.into());
2015
2016 for (i, prereq) in prereqs.iter().enumerate() {
2017 if (i == 0 && include_leading_space) || i > 0 {
2019 builder.token(WHITESPACE.into(), " ");
2020 }
2021
2022 builder.start_node(PREREQUISITE.into());
2024 builder.token(IDENTIFIER.into(), prereq);
2025 builder.finish_node();
2026 }
2027
2028 builder.finish_node();
2029 SyntaxNode::new_root_mut(builder.finish())
2030}
2031
2032fn build_targets_node(targets: &[String]) -> SyntaxNode {
2034 let mut builder = GreenNodeBuilder::new();
2035 builder.start_node(TARGETS.into());
2036
2037 for (i, target) in targets.iter().enumerate() {
2038 if i > 0 {
2039 builder.token(WHITESPACE.into(), " ");
2040 }
2041 builder.token(IDENTIFIER.into(), target);
2042 }
2043
2044 builder.finish_node();
2045 SyntaxNode::new_root_mut(builder.finish())
2046}
2047
2048impl Rule {
2049 pub fn parse(text: &str) -> crate::Parse<Rule> {
2051 crate::Parse::<Rule>::parse_rule(text)
2052 }
2053
2054 fn collect_variable_reference(
2056 &self,
2057 tokens: &mut std::iter::Peekable<impl Iterator<Item = SyntaxElement>>,
2058 ) -> Option<String> {
2059 let mut var_ref = String::new();
2060
2061 if let Some(token) = tokens.next() {
2063 if let Some(t) = token.as_token() {
2064 if t.kind() == DOLLAR {
2065 var_ref.push_str(t.text());
2066
2067 if let Some(next) = tokens.peek() {
2069 if let Some(nt) = next.as_token() {
2070 if nt.kind() == LPAREN {
2071 var_ref.push_str(nt.text());
2073 tokens.next();
2074
2075 let mut paren_count = 1;
2077
2078 for next_token in tokens.by_ref() {
2080 if let Some(nt) = next_token.as_token() {
2081 var_ref.push_str(nt.text());
2082
2083 if nt.kind() == LPAREN {
2084 paren_count += 1;
2085 } else if nt.kind() == RPAREN {
2086 paren_count -= 1;
2087 if paren_count == 0 {
2088 break;
2089 }
2090 }
2091 }
2092 }
2093
2094 return Some(var_ref);
2095 }
2096 }
2097 }
2098
2099 for next_token in tokens.by_ref() {
2101 if let Some(nt) = next_token.as_token() {
2102 var_ref.push_str(nt.text());
2103 if nt.kind() == RPAREN {
2104 break;
2105 }
2106 }
2107 }
2108 return Some(var_ref);
2109 }
2110 }
2111 }
2112
2113 None
2114 }
2115
2116 fn extract_targets_from_node(node: &SyntaxNode) -> Vec<String> {
2118 let mut result = Vec::new();
2119 let mut current_target = String::new();
2120 let mut in_parens = 0;
2121
2122 for child in node.children_with_tokens() {
2123 if let Some(token) = child.as_token() {
2124 match token.kind() {
2125 IDENTIFIER => {
2126 current_target.push_str(token.text());
2127 }
2128 WHITESPACE => {
2129 if in_parens == 0 && !current_target.is_empty() {
2131 result.push(current_target.clone());
2132 current_target.clear();
2133 } else if in_parens > 0 {
2134 current_target.push_str(token.text());
2135 }
2136 }
2137 LPAREN => {
2138 in_parens += 1;
2139 current_target.push_str(token.text());
2140 }
2141 RPAREN => {
2142 in_parens -= 1;
2143 current_target.push_str(token.text());
2144 }
2145 DOLLAR => {
2146 current_target.push_str(token.text());
2147 }
2148 _ => {
2149 current_target.push_str(token.text());
2150 }
2151 }
2152 } else if let Some(child_node) = child.as_node() {
2153 current_target.push_str(&child_node.text().to_string());
2155 }
2156 }
2157
2158 if !current_target.is_empty() {
2160 result.push(current_target);
2161 }
2162
2163 result
2164 }
2165
2166 pub fn targets(&self) -> impl Iterator<Item = String> + '_ {
2176 for child in self.syntax().children_with_tokens() {
2178 if let Some(node) = child.as_node() {
2179 if node.kind() == TARGETS {
2180 return Self::extract_targets_from_node(node).into_iter();
2182 }
2183 }
2184 if let Some(token) = child.as_token() {
2186 if token.kind() == OPERATOR {
2187 break;
2188 }
2189 }
2190 }
2191
2192 let mut result = Vec::new();
2194 let mut tokens = self
2195 .syntax()
2196 .children_with_tokens()
2197 .take_while(|it| it.as_token().map(|t| t.kind() != OPERATOR).unwrap_or(true))
2198 .peekable();
2199
2200 while let Some(token) = tokens.peek().cloned() {
2201 if let Some(node) = token.as_node() {
2202 tokens.next(); if node.kind() == EXPR {
2204 let mut var_content = String::new();
2206 for child in node.children_with_tokens() {
2207 if let Some(t) = child.as_token() {
2208 var_content.push_str(t.text());
2209 }
2210 }
2211 if !var_content.is_empty() {
2212 result.push(var_content);
2213 }
2214 }
2215 } else if let Some(t) = token.as_token() {
2216 if t.kind() == DOLLAR {
2217 if let Some(var_ref) = self.collect_variable_reference(&mut tokens) {
2218 result.push(var_ref);
2219 }
2220 } else if t.kind() == IDENTIFIER {
2221 let ident_text = t.text().to_string();
2223 tokens.next(); if let Some(next) = tokens.peek() {
2227 if let Some(next_token) = next.as_token() {
2228 if next_token.kind() == LPAREN {
2229 let mut archive_target = ident_text;
2231 archive_target.push_str(next_token.text()); tokens.next(); while let Some(token) = tokens.peek() {
2236 if let Some(node) = token.as_node() {
2237 if node.kind() == ARCHIVE_MEMBERS {
2238 archive_target.push_str(&node.text().to_string());
2239 tokens.next();
2240 } else {
2241 tokens.next();
2242 }
2243 } else if let Some(t) = token.as_token() {
2244 if t.kind() == RPAREN {
2245 archive_target.push_str(t.text());
2246 tokens.next();
2247 break;
2248 } else {
2249 tokens.next();
2250 }
2251 } else {
2252 break;
2253 }
2254 }
2255 result.push(archive_target);
2256 } else {
2257 result.push(ident_text);
2259 }
2260 } else {
2261 result.push(ident_text);
2263 }
2264 } else {
2265 result.push(ident_text);
2267 }
2268 } else {
2269 tokens.next(); }
2271 }
2272 }
2273 result.into_iter()
2274 }
2275
2276 pub fn prerequisites(&self) -> impl Iterator<Item = String> + '_ {
2285 let mut found_operator = false;
2287 let mut prerequisites_node = None;
2288
2289 for element in self.syntax().children_with_tokens() {
2290 if let Some(token) = element.as_token() {
2291 if token.kind() == OPERATOR {
2292 found_operator = true;
2293 }
2294 } else if let Some(node) = element.as_node() {
2295 if found_operator && node.kind() == PREREQUISITES {
2296 prerequisites_node = Some(node.clone());
2297 break;
2298 }
2299 }
2300 }
2301
2302 let result: Vec<String> = if let Some(prereqs) = prerequisites_node {
2303 prereqs
2305 .children()
2306 .filter(|child| child.kind() == PREREQUISITE)
2307 .map(|child| child.text().to_string().trim().to_string())
2308 .collect()
2309 } else {
2310 Vec::new()
2311 };
2312
2313 result.into_iter()
2314 }
2315
2316 pub fn recipes(&self) -> impl Iterator<Item = String> {
2325 self.syntax()
2326 .children()
2327 .filter(|it| it.kind() == RECIPE)
2328 .flat_map(|it| {
2329 it.children_with_tokens().filter_map(|it| {
2330 it.as_token().and_then(|t| {
2331 if t.kind() == TEXT {
2332 Some(t.text().to_string())
2333 } else {
2334 None
2335 }
2336 })
2337 })
2338 })
2339 }
2340
2341 pub fn replace_command(&mut self, i: usize, line: &str) -> bool {
2351 let recipes: Vec<_> = self
2354 .syntax()
2355 .children()
2356 .filter(|n| {
2357 n.kind() == RECIPE
2358 && n.children_with_tokens()
2359 .any(|t| t.as_token().map(|t| t.kind() == TEXT).unwrap_or(false))
2360 })
2361 .collect();
2362
2363 if i >= recipes.len() {
2364 return false;
2365 }
2366
2367 let target_node = &recipes[i];
2369 let target_index = target_node.index();
2370
2371 let mut builder = GreenNodeBuilder::new();
2372 builder.start_node(RECIPE.into());
2373 builder.token(INDENT.into(), "\t");
2374 builder.token(TEXT.into(), line);
2375 builder.token(NEWLINE.into(), "\n");
2376 builder.finish_node();
2377
2378 let syntax = SyntaxNode::new_root_mut(builder.finish());
2379
2380 self.0
2381 .splice_children(target_index..target_index + 1, vec![syntax.into()]);
2382
2383 true
2384 }
2385
2386 pub fn push_command(&mut self, line: &str) {
2396 let index = self
2398 .0
2399 .children_with_tokens()
2400 .filter(|it| it.kind() == RECIPE)
2401 .last();
2402
2403 let index = index.map_or_else(
2404 || self.0.children_with_tokens().count(),
2405 |it| it.index() + 1,
2406 );
2407
2408 let mut builder = GreenNodeBuilder::new();
2409 builder.start_node(RECIPE.into());
2410 builder.token(INDENT.into(), "\t");
2411 builder.token(TEXT.into(), line);
2412 builder.token(NEWLINE.into(), "\n");
2413 builder.finish_node();
2414 let syntax = SyntaxNode::new_root_mut(builder.finish());
2415
2416 self.0.splice_children(index..index, vec![syntax.into()]);
2417 }
2418
2419 pub fn remove_command(&mut self, index: usize) -> bool {
2429 let recipes: Vec<_> = self
2430 .syntax()
2431 .children()
2432 .filter(|n| n.kind() == RECIPE)
2433 .collect();
2434
2435 if index >= recipes.len() {
2436 return false;
2437 }
2438
2439 let target_node = &recipes[index];
2440 let target_index = target_node.index();
2441
2442 self.0
2443 .splice_children(target_index..target_index + 1, vec![]);
2444 true
2445 }
2446
2447 pub fn insert_command(&mut self, index: usize, line: &str) -> bool {
2458 let recipes: Vec<_> = self
2459 .syntax()
2460 .children()
2461 .filter(|n| n.kind() == RECIPE)
2462 .collect();
2463
2464 if index > recipes.len() {
2465 return false;
2466 }
2467
2468 let target_index = if index == recipes.len() {
2469 recipes.last().map(|n| n.index() + 1).unwrap_or_else(|| {
2471 self.0.children_with_tokens().count()
2473 })
2474 } else {
2475 recipes[index].index()
2477 };
2478
2479 let mut builder = GreenNodeBuilder::new();
2480 builder.start_node(RECIPE.into());
2481 builder.token(INDENT.into(), "\t");
2482 builder.token(TEXT.into(), line);
2483 builder.token(NEWLINE.into(), "\n");
2484 builder.finish_node();
2485 let syntax = SyntaxNode::new_root_mut(builder.finish());
2486
2487 self.0
2488 .splice_children(target_index..target_index, vec![syntax.into()]);
2489 true
2490 }
2491
2492 pub fn recipe_count(&self) -> usize {
2501 self.syntax()
2502 .children()
2503 .filter(|n| n.kind() == RECIPE)
2504 .count()
2505 }
2506
2507 pub fn clear_commands(&mut self) {
2517 let recipes: Vec<_> = self
2518 .syntax()
2519 .children()
2520 .filter(|n| n.kind() == RECIPE)
2521 .collect();
2522
2523 if recipes.is_empty() {
2524 return;
2525 }
2526
2527 for recipe in recipes.iter().rev() {
2529 let index = recipe.index();
2530 self.0.splice_children(index..index + 1, vec![]);
2531 }
2532 }
2533
2534 pub fn remove_prerequisite(&mut self, target: &str) -> Result<bool, Error> {
2547 let mut found_operator = false;
2549 let mut prereqs_node = None;
2550
2551 for child in self.syntax().children_with_tokens() {
2552 if let Some(token) = child.as_token() {
2553 if token.kind() == OPERATOR {
2554 found_operator = true;
2555 }
2556 } else if let Some(node) = child.as_node() {
2557 if found_operator && node.kind() == PREREQUISITES {
2558 prereqs_node = Some(node.clone());
2559 break;
2560 }
2561 }
2562 }
2563
2564 let prereqs_node = match prereqs_node {
2565 Some(node) => node,
2566 None => return Ok(false), };
2568
2569 let current_prereqs: Vec<String> = self.prerequisites().collect();
2571
2572 if !current_prereqs.iter().any(|p| p == target) {
2574 return Ok(false);
2575 }
2576
2577 let new_prereqs: Vec<String> = current_prereqs
2579 .into_iter()
2580 .filter(|p| p != target)
2581 .collect();
2582
2583 let prereqs_index = prereqs_node.index();
2585 let new_prereqs_node = build_prerequisites_node(&new_prereqs, true);
2586
2587 self.0.splice_children(
2588 prereqs_index..prereqs_index + 1,
2589 vec![new_prereqs_node.into()],
2590 );
2591
2592 Ok(true)
2593 }
2594
2595 pub fn add_prerequisite(&mut self, target: &str) -> Result<(), Error> {
2605 let mut current_prereqs: Vec<String> = self.prerequisites().collect();
2606 current_prereqs.push(target.to_string());
2607 self.set_prerequisites(current_prereqs.iter().map(|s| s.as_str()).collect())
2608 }
2609
2610 pub fn set_prerequisites(&mut self, prereqs: Vec<&str>) -> Result<(), Error> {
2620 let mut prereqs_index = None;
2622 let mut operator_found = false;
2623
2624 for child in self.syntax().children_with_tokens() {
2625 if let Some(token) = child.as_token() {
2626 if token.kind() == OPERATOR {
2627 operator_found = true;
2628 }
2629 } else if let Some(node) = child.as_node() {
2630 if operator_found && node.kind() == PREREQUISITES {
2631 prereqs_index = Some((node.index(), true)); break;
2633 }
2634 }
2635 }
2636
2637 match prereqs_index {
2638 Some((idx, true)) => {
2639 let has_external_whitespace = self
2641 .syntax()
2642 .children_with_tokens()
2643 .skip_while(|e| !matches!(e.as_token().map(|t| t.kind()), Some(OPERATOR)))
2644 .nth(1) .map(|e| matches!(e.as_token().map(|t| t.kind()), Some(WHITESPACE)))
2646 .unwrap_or(false);
2647
2648 let new_prereqs = build_prerequisites_node(
2649 &prereqs.iter().map(|s| s.to_string()).collect::<Vec<_>>(),
2650 !has_external_whitespace, );
2652 self.0
2653 .splice_children(idx..idx + 1, vec![new_prereqs.into()]);
2654 }
2655 _ => {
2656 let new_prereqs = build_prerequisites_node(
2658 &prereqs.iter().map(|s| s.to_string()).collect::<Vec<_>>(),
2659 true, );
2661
2662 let insert_pos = self
2663 .syntax()
2664 .children_with_tokens()
2665 .position(|t| t.as_token().map(|t| t.kind() == OPERATOR).unwrap_or(false))
2666 .map(|p| p + 1)
2667 .ok_or_else(|| {
2668 Error::Parse(ParseError {
2669 errors: vec![ErrorInfo {
2670 message: "No operator found in rule".to_string(),
2671 line: 1,
2672 context: "set_prerequisites".to_string(),
2673 }],
2674 })
2675 })?;
2676
2677 self.0
2678 .splice_children(insert_pos..insert_pos, vec![new_prereqs.into()]);
2679 }
2680 }
2681
2682 Ok(())
2683 }
2684
2685 pub fn rename_target(&mut self, old_name: &str, new_name: &str) -> Result<bool, Error> {
2697 let current_targets: Vec<String> = self.targets().collect();
2699
2700 if !current_targets.iter().any(|t| t == old_name) {
2702 return Ok(false);
2703 }
2704
2705 let new_targets: Vec<String> = current_targets
2707 .into_iter()
2708 .map(|t| {
2709 if t == old_name {
2710 new_name.to_string()
2711 } else {
2712 t
2713 }
2714 })
2715 .collect();
2716
2717 let mut targets_index = None;
2719 for (idx, child) in self.syntax().children_with_tokens().enumerate() {
2720 if let Some(node) = child.as_node() {
2721 if node.kind() == TARGETS {
2722 targets_index = Some(idx);
2723 break;
2724 }
2725 }
2726 }
2727
2728 let targets_index = targets_index.ok_or_else(|| {
2729 Error::Parse(ParseError {
2730 errors: vec![ErrorInfo {
2731 message: "No TARGETS node found in rule".to_string(),
2732 line: 1,
2733 context: "rename_target".to_string(),
2734 }],
2735 })
2736 })?;
2737
2738 let new_targets_node = build_targets_node(&new_targets);
2740
2741 self.0.splice_children(
2743 targets_index..targets_index + 1,
2744 vec![new_targets_node.into()],
2745 );
2746
2747 Ok(true)
2748 }
2749
2750 pub fn add_target(&mut self, target: &str) -> Result<(), Error> {
2760 let mut current_targets: Vec<String> = self.targets().collect();
2761 current_targets.push(target.to_string());
2762 self.set_targets(current_targets.iter().map(|s| s.as_str()).collect())
2763 }
2764
2765 pub fn set_targets(&mut self, targets: Vec<&str>) -> Result<(), Error> {
2777 if targets.is_empty() {
2779 return Err(Error::Parse(ParseError {
2780 errors: vec![ErrorInfo {
2781 message: "Cannot set empty targets list for a rule".to_string(),
2782 line: 1,
2783 context: "set_targets".to_string(),
2784 }],
2785 }));
2786 }
2787
2788 let mut targets_index = None;
2790 for (idx, child) in self.syntax().children_with_tokens().enumerate() {
2791 if let Some(node) = child.as_node() {
2792 if node.kind() == TARGETS {
2793 targets_index = Some(idx);
2794 break;
2795 }
2796 }
2797 }
2798
2799 let targets_index = targets_index.ok_or_else(|| {
2800 Error::Parse(ParseError {
2801 errors: vec![ErrorInfo {
2802 message: "No TARGETS node found in rule".to_string(),
2803 line: 1,
2804 context: "set_targets".to_string(),
2805 }],
2806 })
2807 })?;
2808
2809 let new_targets_node =
2811 build_targets_node(&targets.iter().map(|s| s.to_string()).collect::<Vec<_>>());
2812
2813 self.0.splice_children(
2815 targets_index..targets_index + 1,
2816 vec![new_targets_node.into()],
2817 );
2818
2819 Ok(())
2820 }
2821
2822 pub fn has_target(&self, target: &str) -> bool {
2833 self.targets().any(|t| t == target)
2834 }
2835
2836 pub fn remove_target(&mut self, target_name: &str) -> Result<bool, Error> {
2849 let current_targets: Vec<String> = self.targets().collect();
2851
2852 if !current_targets.iter().any(|t| t == target_name) {
2854 return Ok(false);
2855 }
2856
2857 let new_targets: Vec<String> = current_targets
2859 .into_iter()
2860 .filter(|t| t != target_name)
2861 .collect();
2862
2863 if new_targets.is_empty() {
2865 return Err(Error::Parse(ParseError {
2866 errors: vec![ErrorInfo {
2867 message: "Cannot remove all targets from a rule".to_string(),
2868 line: 1,
2869 context: "remove_target".to_string(),
2870 }],
2871 }));
2872 }
2873
2874 let mut targets_index = None;
2876 for (idx, child) in self.syntax().children_with_tokens().enumerate() {
2877 if let Some(node) = child.as_node() {
2878 if node.kind() == TARGETS {
2879 targets_index = Some(idx);
2880 break;
2881 }
2882 }
2883 }
2884
2885 let targets_index = targets_index.ok_or_else(|| {
2886 Error::Parse(ParseError {
2887 errors: vec![ErrorInfo {
2888 message: "No TARGETS node found in rule".to_string(),
2889 line: 1,
2890 context: "remove_target".to_string(),
2891 }],
2892 })
2893 })?;
2894
2895 let new_targets_node = build_targets_node(&new_targets);
2897
2898 self.0.splice_children(
2900 targets_index..targets_index + 1,
2901 vec![new_targets_node.into()],
2902 );
2903
2904 Ok(true)
2905 }
2906
2907 pub fn remove(self) -> Result<(), Error> {
2920 let parent = self.syntax().parent().ok_or_else(|| {
2921 Error::Parse(ParseError {
2922 errors: vec![ErrorInfo {
2923 message: "Rule has no parent".to_string(),
2924 line: 1,
2925 context: "remove".to_string(),
2926 }],
2927 })
2928 })?;
2929
2930 remove_with_preceding_comments(self.syntax(), &parent);
2931 Ok(())
2932 }
2933}
2934
2935impl Default for Makefile {
2936 fn default() -> Self {
2937 Self::new()
2938 }
2939}
2940
2941impl Include {
2942 pub fn path(&self) -> Option<String> {
2944 self.syntax()
2945 .children()
2946 .find(|it| it.kind() == EXPR)
2947 .map(|it| it.text().to_string().trim().to_string())
2948 }
2949
2950 pub fn is_optional(&self) -> bool {
2952 let text = self.syntax().text();
2953 text.to_string().starts_with("-include") || text.to_string().starts_with("sinclude")
2954 }
2955}
2956
2957#[cfg(test)]
2958mod tests {
2959 use super::*;
2960
2961 #[test]
2962 fn test_conditionals() {
2963 let code = "ifdef DEBUG\n DEBUG_FLAG := 1\nendif\n";
2967 let mut buf = code.as_bytes();
2968 let makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse basic ifdef");
2969 assert!(makefile.code().contains("DEBUG_FLAG"));
2970
2971 let code =
2973 "ifeq ($(OS),Windows_NT)\n RESULT := windows\nelse\n RESULT := unix\nendif\n";
2974 let mut buf = code.as_bytes();
2975 let makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse ifeq/ifneq");
2976 assert!(makefile.code().contains("RESULT"));
2977 assert!(makefile.code().contains("windows"));
2978
2979 let code = "ifdef DEBUG\n CFLAGS += -g\n ifdef VERBOSE\n CFLAGS += -v\n endif\nelse\n CFLAGS += -O2\nendif\n";
2981 let mut buf = code.as_bytes();
2982 let makefile = Makefile::read_relaxed(&mut buf)
2983 .expect("Failed to parse nested conditionals with else");
2984 assert!(makefile.code().contains("CFLAGS"));
2985 assert!(makefile.code().contains("VERBOSE"));
2986
2987 let code = "ifdef DEBUG\nendif\n";
2989 let mut buf = code.as_bytes();
2990 let makefile =
2991 Makefile::read_relaxed(&mut buf).expect("Failed to parse empty conditionals");
2992 assert!(makefile.code().contains("ifdef DEBUG"));
2993
2994 let code = "ifeq ($(OS),Windows)\n EXT := .exe\nelif ifeq ($(OS),Linux)\n EXT := .bin\nelse\n EXT := .out\nendif\n";
2996 let mut buf = code.as_bytes();
2997 let makefile =
2998 Makefile::read_relaxed(&mut buf).expect("Failed to parse conditionals with elif");
2999 assert!(makefile.code().contains("EXT"));
3000
3001 let code = "ifXYZ DEBUG\nDEBUG := 1\nendif\n";
3003 let mut buf = code.as_bytes();
3004 let makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse with recovery");
3005 assert!(makefile.code().contains("DEBUG"));
3006
3007 let code = "ifdef \nDEBUG := 1\nendif\n";
3009 let mut buf = code.as_bytes();
3010 let makefile = Makefile::read_relaxed(&mut buf)
3011 .expect("Failed to parse with recovery - missing condition");
3012 assert!(makefile.code().contains("DEBUG"));
3013 }
3014
3015 #[test]
3016 fn test_parse_simple() {
3017 const SIMPLE: &str = r#"VARIABLE = value
3018
3019rule: dependency
3020 command
3021"#;
3022 let parsed = parse(SIMPLE);
3023 assert!(parsed.errors.is_empty());
3024 let node = parsed.syntax();
3025 assert_eq!(
3026 format!("{:#?}", node),
3027 r#"ROOT@0..44
3028 VARIABLE@0..17
3029 IDENTIFIER@0..8 "VARIABLE"
3030 WHITESPACE@8..9 " "
3031 OPERATOR@9..10 "="
3032 WHITESPACE@10..11 " "
3033 EXPR@11..16
3034 IDENTIFIER@11..16 "value"
3035 NEWLINE@16..17 "\n"
3036 BLANK_LINE@17..18
3037 NEWLINE@17..18 "\n"
3038 RULE@18..44
3039 TARGETS@18..22
3040 IDENTIFIER@18..22 "rule"
3041 OPERATOR@22..23 ":"
3042 WHITESPACE@23..24 " "
3043 PREREQUISITES@24..34
3044 PREREQUISITE@24..34
3045 IDENTIFIER@24..34 "dependency"
3046 NEWLINE@34..35 "\n"
3047 RECIPE@35..44
3048 INDENT@35..36 "\t"
3049 TEXT@36..43 "command"
3050 NEWLINE@43..44 "\n"
3051"#
3052 );
3053
3054 let root = parsed.root();
3055
3056 let mut rules = root.rules().collect::<Vec<_>>();
3057 assert_eq!(rules.len(), 1);
3058 let rule = rules.pop().unwrap();
3059 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["rule"]);
3060 assert_eq!(rule.prerequisites().collect::<Vec<_>>(), vec!["dependency"]);
3061 assert_eq!(rule.recipes().collect::<Vec<_>>(), vec!["command"]);
3062
3063 let mut variables = root.variable_definitions().collect::<Vec<_>>();
3064 assert_eq!(variables.len(), 1);
3065 let variable = variables.pop().unwrap();
3066 assert_eq!(variable.name(), Some("VARIABLE".to_string()));
3067 assert_eq!(variable.raw_value(), Some("value".to_string()));
3068 }
3069
3070 #[test]
3071 fn test_parse_export_assign() {
3072 const EXPORT: &str = r#"export VARIABLE := value
3073"#;
3074 let parsed = parse(EXPORT);
3075 assert!(parsed.errors.is_empty());
3076 let node = parsed.syntax();
3077 assert_eq!(
3078 format!("{:#?}", node),
3079 r#"ROOT@0..25
3080 VARIABLE@0..25
3081 IDENTIFIER@0..6 "export"
3082 WHITESPACE@6..7 " "
3083 IDENTIFIER@7..15 "VARIABLE"
3084 WHITESPACE@15..16 " "
3085 OPERATOR@16..18 ":="
3086 WHITESPACE@18..19 " "
3087 EXPR@19..24
3088 IDENTIFIER@19..24 "value"
3089 NEWLINE@24..25 "\n"
3090"#
3091 );
3092
3093 let root = parsed.root();
3094
3095 let mut variables = root.variable_definitions().collect::<Vec<_>>();
3096 assert_eq!(variables.len(), 1);
3097 let variable = variables.pop().unwrap();
3098 assert_eq!(variable.name(), Some("VARIABLE".to_string()));
3099 assert_eq!(variable.raw_value(), Some("value".to_string()));
3100 }
3101
3102 #[test]
3103 fn test_parse_multiple_prerequisites() {
3104 const MULTIPLE_PREREQUISITES: &str = r#"rule: dependency1 dependency2
3105 command
3106
3107"#;
3108 let parsed = parse(MULTIPLE_PREREQUISITES);
3109 assert!(parsed.errors.is_empty());
3110 let node = parsed.syntax();
3111 assert_eq!(
3112 format!("{:#?}", node),
3113 r#"ROOT@0..40
3114 RULE@0..40
3115 TARGETS@0..4
3116 IDENTIFIER@0..4 "rule"
3117 OPERATOR@4..5 ":"
3118 WHITESPACE@5..6 " "
3119 PREREQUISITES@6..29
3120 PREREQUISITE@6..17
3121 IDENTIFIER@6..17 "dependency1"
3122 WHITESPACE@17..18 " "
3123 PREREQUISITE@18..29
3124 IDENTIFIER@18..29 "dependency2"
3125 NEWLINE@29..30 "\n"
3126 RECIPE@30..39
3127 INDENT@30..31 "\t"
3128 TEXT@31..38 "command"
3129 NEWLINE@38..39 "\n"
3130 NEWLINE@39..40 "\n"
3131"#
3132 );
3133 let root = parsed.root();
3134
3135 let rule = root.rules().next().unwrap();
3136 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["rule"]);
3137 assert_eq!(
3138 rule.prerequisites().collect::<Vec<_>>(),
3139 vec!["dependency1", "dependency2"]
3140 );
3141 assert_eq!(rule.recipes().collect::<Vec<_>>(), vec!["command"]);
3142 }
3143
3144 #[test]
3145 fn test_add_rule() {
3146 let mut makefile = Makefile::new();
3147 let rule = makefile.add_rule("rule");
3148 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["rule"]);
3149 assert_eq!(
3150 rule.prerequisites().collect::<Vec<_>>(),
3151 Vec::<String>::new()
3152 );
3153
3154 assert_eq!(makefile.to_string(), "rule:\n");
3155 }
3156
3157 #[test]
3158 fn test_add_rule_with_shebang() {
3159 let content = r#"#!/usr/bin/make -f
3161
3162build: blah
3163 $(MAKE) install
3164
3165clean:
3166 dh_clean
3167"#;
3168
3169 let mut makefile = Makefile::read_relaxed(content.as_bytes()).unwrap();
3170 let initial_count = makefile.rules().count();
3171 assert_eq!(initial_count, 2);
3172
3173 let rule = makefile.add_rule("build-indep");
3175 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["build-indep"]);
3176
3177 assert_eq!(makefile.rules().count(), initial_count + 1);
3179 }
3180
3181 #[test]
3182 fn test_add_rule_formatting() {
3183 let content = r#"build: blah
3185 $(MAKE) install
3186
3187clean:
3188 dh_clean
3189"#;
3190
3191 let mut makefile = Makefile::read_relaxed(content.as_bytes()).unwrap();
3192 let mut rule = makefile.add_rule("build-indep");
3193 rule.add_prerequisite("build").unwrap();
3194
3195 let expected = r#"build: blah
3196 $(MAKE) install
3197
3198clean:
3199 dh_clean
3200
3201build-indep: build
3202"#;
3203
3204 assert_eq!(makefile.to_string(), expected);
3205 }
3206
3207 #[test]
3208 fn test_push_command() {
3209 let mut makefile = Makefile::new();
3210 let mut rule = makefile.add_rule("rule");
3211
3212 rule.push_command("command");
3214 rule.push_command("command2");
3215
3216 assert_eq!(
3218 rule.recipes().collect::<Vec<_>>(),
3219 vec!["command", "command2"]
3220 );
3221
3222 rule.push_command("command3");
3224 assert_eq!(
3225 rule.recipes().collect::<Vec<_>>(),
3226 vec!["command", "command2", "command3"]
3227 );
3228
3229 assert_eq!(
3231 makefile.to_string(),
3232 "rule:\n\tcommand\n\tcommand2\n\tcommand3\n"
3233 );
3234
3235 assert_eq!(
3237 rule.to_string(),
3238 "rule:\n\tcommand\n\tcommand2\n\tcommand3\n"
3239 );
3240 }
3241
3242 #[test]
3243 fn test_replace_command() {
3244 let mut makefile = Makefile::new();
3245 let mut rule = makefile.add_rule("rule");
3246
3247 rule.push_command("command");
3249 rule.push_command("command2");
3250
3251 assert_eq!(
3253 rule.recipes().collect::<Vec<_>>(),
3254 vec!["command", "command2"]
3255 );
3256
3257 rule.replace_command(0, "new command");
3259 assert_eq!(
3260 rule.recipes().collect::<Vec<_>>(),
3261 vec!["new command", "command2"]
3262 );
3263
3264 assert_eq!(makefile.to_string(), "rule:\n\tnew command\n\tcommand2\n");
3266
3267 assert_eq!(rule.to_string(), "rule:\n\tnew command\n\tcommand2\n");
3269 }
3270
3271 #[test]
3272 fn test_replace_command_with_comments() {
3273 let content = b"override_dh_strip:\n\t# no longer necessary after buster\n\tdh_strip --dbgsym-migration='amule-dbg (<< 1:2.3.2-2~)'\n";
3276
3277 let makefile = Makefile::read_relaxed(&content[..]).unwrap();
3278
3279 let mut rule = makefile.rules().next().unwrap();
3280
3281 assert_eq!(rule.recipes().count(), 1);
3283 assert_eq!(
3284 rule.recipes().collect::<Vec<_>>(),
3285 vec!["dh_strip --dbgsym-migration='amule-dbg (<< 1:2.3.2-2~)'"]
3286 );
3287
3288 assert!(rule.replace_command(0, "dh_strip"));
3290
3291 assert_eq!(rule.recipes().count(), 1);
3293 assert_eq!(rule.recipes().collect::<Vec<_>>(), vec!["dh_strip"]);
3294 }
3295
3296 #[test]
3297 fn test_parse_rule_without_newline() {
3298 let rule = "rule: dependency\n\tcommand".parse::<Rule>().unwrap();
3299 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["rule"]);
3300 assert_eq!(rule.recipes().collect::<Vec<_>>(), vec!["command"]);
3301 let rule = "rule: dependency".parse::<Rule>().unwrap();
3302 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["rule"]);
3303 assert_eq!(rule.recipes().collect::<Vec<_>>(), Vec::<String>::new());
3304 }
3305
3306 #[test]
3307 fn test_parse_makefile_without_newline() {
3308 let makefile = "rule: dependency\n\tcommand".parse::<Makefile>().unwrap();
3309 assert_eq!(makefile.rules().count(), 1);
3310 }
3311
3312 #[test]
3313 fn test_from_reader() {
3314 let makefile = Makefile::from_reader("rule: dependency\n\tcommand".as_bytes()).unwrap();
3315 assert_eq!(makefile.rules().count(), 1);
3316 }
3317
3318 #[test]
3319 fn test_parse_with_tab_after_last_newline() {
3320 let makefile = Makefile::from_reader("rule: dependency\n\tcommand\n\t".as_bytes()).unwrap();
3321 assert_eq!(makefile.rules().count(), 1);
3322 }
3323
3324 #[test]
3325 fn test_parse_with_space_after_last_newline() {
3326 let makefile = Makefile::from_reader("rule: dependency\n\tcommand\n ".as_bytes()).unwrap();
3327 assert_eq!(makefile.rules().count(), 1);
3328 }
3329
3330 #[test]
3331 fn test_parse_with_comment_after_last_newline() {
3332 let makefile =
3333 Makefile::from_reader("rule: dependency\n\tcommand\n#comment".as_bytes()).unwrap();
3334 assert_eq!(makefile.rules().count(), 1);
3335 }
3336
3337 #[test]
3338 fn test_parse_with_variable_rule() {
3339 let makefile =
3340 Makefile::from_reader("RULE := rule\n$(RULE): dependency\n\tcommand".as_bytes())
3341 .unwrap();
3342
3343 let vars = makefile.variable_definitions().collect::<Vec<_>>();
3345 assert_eq!(vars.len(), 1);
3346 assert_eq!(vars[0].name(), Some("RULE".to_string()));
3347 assert_eq!(vars[0].raw_value(), Some("rule".to_string()));
3348
3349 let rules = makefile.rules().collect::<Vec<_>>();
3351 assert_eq!(rules.len(), 1);
3352 assert_eq!(rules[0].targets().collect::<Vec<_>>(), vec!["$(RULE)"]);
3353 assert_eq!(
3354 rules[0].prerequisites().collect::<Vec<_>>(),
3355 vec!["dependency"]
3356 );
3357 assert_eq!(rules[0].recipes().collect::<Vec<_>>(), vec!["command"]);
3358 }
3359
3360 #[test]
3361 fn test_parse_with_variable_dependency() {
3362 let makefile =
3363 Makefile::from_reader("DEP := dependency\nrule: $(DEP)\n\tcommand".as_bytes()).unwrap();
3364
3365 let vars = makefile.variable_definitions().collect::<Vec<_>>();
3367 assert_eq!(vars.len(), 1);
3368 assert_eq!(vars[0].name(), Some("DEP".to_string()));
3369 assert_eq!(vars[0].raw_value(), Some("dependency".to_string()));
3370
3371 let rules = makefile.rules().collect::<Vec<_>>();
3373 assert_eq!(rules.len(), 1);
3374 assert_eq!(rules[0].targets().collect::<Vec<_>>(), vec!["rule"]);
3375 assert_eq!(rules[0].prerequisites().collect::<Vec<_>>(), vec!["$(DEP)"]);
3376 assert_eq!(rules[0].recipes().collect::<Vec<_>>(), vec!["command"]);
3377 }
3378
3379 #[test]
3380 fn test_parse_with_variable_command() {
3381 let makefile =
3382 Makefile::from_reader("COM := command\nrule: dependency\n\t$(COM)".as_bytes()).unwrap();
3383
3384 let vars = makefile.variable_definitions().collect::<Vec<_>>();
3386 assert_eq!(vars.len(), 1);
3387 assert_eq!(vars[0].name(), Some("COM".to_string()));
3388 assert_eq!(vars[0].raw_value(), Some("command".to_string()));
3389
3390 let rules = makefile.rules().collect::<Vec<_>>();
3392 assert_eq!(rules.len(), 1);
3393 assert_eq!(rules[0].targets().collect::<Vec<_>>(), vec!["rule"]);
3394 assert_eq!(
3395 rules[0].prerequisites().collect::<Vec<_>>(),
3396 vec!["dependency"]
3397 );
3398 assert_eq!(rules[0].recipes().collect::<Vec<_>>(), vec!["$(COM)"]);
3399 }
3400
3401 #[test]
3402 fn test_regular_line_error_reporting() {
3403 let input = "rule target\n\tcommand";
3404
3405 let parsed = parse(input);
3407 let direct_error = &parsed.errors[0];
3408
3409 assert_eq!(direct_error.line, 2);
3411 assert!(
3412 direct_error.message.contains("expected"),
3413 "Error message should contain 'expected': {}",
3414 direct_error.message
3415 );
3416 assert_eq!(direct_error.context, "\tcommand");
3417
3418 let reader_result = Makefile::from_reader(input.as_bytes());
3420 let parse_error = match reader_result {
3421 Ok(_) => panic!("Expected Parse error from from_reader"),
3422 Err(err) => match err {
3423 self::Error::Parse(parse_err) => parse_err,
3424 _ => panic!("Expected Parse error"),
3425 },
3426 };
3427
3428 let error_text = parse_error.to_string();
3430 assert!(error_text.contains("Error at line 2:"));
3431 assert!(error_text.contains("2| \tcommand"));
3432 }
3433
3434 #[test]
3435 fn test_parsing_error_context_with_bad_syntax() {
3436 let input = "#begin comment\n\t(╯°□°)╯︵ ┻━┻\n#end comment";
3438
3439 match Makefile::from_reader(input.as_bytes()) {
3441 Ok(makefile) => {
3442 assert_eq!(
3444 makefile.rules().count(),
3445 0,
3446 "Should not have found any rules"
3447 );
3448 }
3449 Err(err) => match err {
3450 self::Error::Parse(error) => {
3451 assert!(error.errors[0].line >= 2, "Error line should be at least 2");
3453 assert!(
3454 !error.errors[0].context.is_empty(),
3455 "Error context should not be empty"
3456 );
3457 }
3458 _ => panic!("Unexpected error type"),
3459 },
3460 };
3461 }
3462
3463 #[test]
3464 fn test_error_message_format() {
3465 let parse_error = ParseError {
3467 errors: vec![ErrorInfo {
3468 message: "test error".to_string(),
3469 line: 42,
3470 context: "some problematic code".to_string(),
3471 }],
3472 };
3473
3474 let error_text = parse_error.to_string();
3475 assert!(error_text.contains("Error at line 42: test error"));
3476 assert!(error_text.contains("42| some problematic code"));
3477 }
3478
3479 #[test]
3480 fn test_line_number_calculation() {
3481 let test_cases = [
3483 ("rule dependency\n\tcommand", 2), ("#comment\n\t(╯°□°)╯︵ ┻━┻", 2), ("var = value\n#comment\n\tindented line", 3), ];
3487
3488 for (input, expected_line) in test_cases {
3489 match input.parse::<Makefile>() {
3491 Ok(_) => {
3492 continue;
3495 }
3496 Err(err) => {
3497 if let Error::Parse(parse_err) = err {
3498 assert_eq!(
3500 parse_err.errors[0].line, expected_line,
3501 "Line number should match the expected line"
3502 );
3503
3504 if parse_err.errors[0].message.contains("indented") {
3506 assert!(
3507 parse_err.errors[0].context.starts_with('\t'),
3508 "Context for indentation errors should include the tab character"
3509 );
3510 }
3511 } else {
3512 panic!("Expected parse error, got: {:?}", err);
3513 }
3514 }
3515 }
3516 }
3517 }
3518
3519 #[test]
3520 fn test_conditional_features() {
3521 let code = r#"
3523# Set variables based on DEBUG flag
3524ifdef DEBUG
3525 CFLAGS += -g -DDEBUG
3526else
3527 CFLAGS = -O2
3528endif
3529
3530# Define a build rule
3531all: $(OBJS)
3532 $(CC) $(CFLAGS) -o $@ $^
3533"#;
3534
3535 let mut buf = code.as_bytes();
3536 let makefile =
3537 Makefile::read_relaxed(&mut buf).expect("Failed to parse conditional features");
3538
3539 assert!(!makefile.code().is_empty(), "Makefile has content");
3542
3543 let rules = makefile.rules().collect::<Vec<_>>();
3545 assert!(!rules.is_empty(), "Should have found rules");
3546
3547 assert!(code.contains("ifdef DEBUG"));
3549 assert!(code.contains("endif"));
3550
3551 let code_with_var = r#"
3553# Define a variable first
3554CC = gcc
3555
3556ifdef DEBUG
3557 CFLAGS += -g -DDEBUG
3558else
3559 CFLAGS = -O2
3560endif
3561
3562all: $(OBJS)
3563 $(CC) $(CFLAGS) -o $@ $^
3564"#;
3565
3566 let mut buf = code_with_var.as_bytes();
3567 let makefile =
3568 Makefile::read_relaxed(&mut buf).expect("Failed to parse with explicit variable");
3569
3570 let vars = makefile.variable_definitions().collect::<Vec<_>>();
3572 assert!(
3573 !vars.is_empty(),
3574 "Should have found at least the CC variable definition"
3575 );
3576 }
3577
3578 #[test]
3579 fn test_include_directive() {
3580 let parsed = parse("include config.mk\ninclude $(TOPDIR)/rules.mk\ninclude *.mk\n");
3581 assert!(parsed.errors.is_empty());
3582 let node = parsed.syntax();
3583 assert!(format!("{:#?}", node).contains("INCLUDE@"));
3584 }
3585
3586 #[test]
3587 fn test_export_variables() {
3588 let parsed = parse("export SHELL := /bin/bash\n");
3589 assert!(parsed.errors.is_empty());
3590 let makefile = parsed.root();
3591 let vars = makefile.variable_definitions().collect::<Vec<_>>();
3592 assert_eq!(vars.len(), 1);
3593 let shell_var = vars
3594 .iter()
3595 .find(|v| v.name() == Some("SHELL".to_string()))
3596 .unwrap();
3597 assert!(shell_var.raw_value().unwrap().contains("bin/bash"));
3598 }
3599
3600 #[test]
3601 fn test_variable_scopes() {
3602 let parsed =
3603 parse("SIMPLE = value\nIMMEDIATE := value\nCONDITIONAL ?= value\nAPPEND += value\n");
3604 assert!(parsed.errors.is_empty());
3605 let makefile = parsed.root();
3606 let vars = makefile.variable_definitions().collect::<Vec<_>>();
3607 assert_eq!(vars.len(), 4);
3608 let var_names: Vec<_> = vars.iter().filter_map(|v| v.name()).collect();
3609 assert!(var_names.contains(&"SIMPLE".to_string()));
3610 assert!(var_names.contains(&"IMMEDIATE".to_string()));
3611 assert!(var_names.contains(&"CONDITIONAL".to_string()));
3612 assert!(var_names.contains(&"APPEND".to_string()));
3613 }
3614
3615 #[test]
3616 fn test_pattern_rule_parsing() {
3617 let parsed = parse("%.o: %.c\n\t$(CC) -c -o $@ $<\n");
3618 assert!(parsed.errors.is_empty());
3619 let makefile = parsed.root();
3620 let rules = makefile.rules().collect::<Vec<_>>();
3621 assert_eq!(rules.len(), 1);
3622 assert_eq!(rules[0].targets().next().unwrap(), "%.o");
3623 assert!(rules[0].recipes().next().unwrap().contains("$@"));
3624 }
3625
3626 #[test]
3627 fn test_include_variants() {
3628 let makefile_str = "include simple.mk\n-include optional.mk\nsinclude synonym.mk\ninclude $(VAR)/generated.mk\n";
3630 let parsed = parse(makefile_str);
3631 assert!(parsed.errors.is_empty());
3632
3633 let node = parsed.syntax();
3635 let debug_str = format!("{:#?}", node);
3636
3637 assert_eq!(debug_str.matches("INCLUDE@").count(), 4);
3639
3640 let makefile = parsed.root();
3642
3643 let include_count = makefile
3645 .syntax()
3646 .children()
3647 .filter(|child| child.kind() == INCLUDE)
3648 .count();
3649 assert_eq!(include_count, 4);
3650
3651 assert!(makefile
3653 .included_files()
3654 .any(|path| path.contains("$(VAR)")));
3655 }
3656
3657 #[test]
3658 fn test_include_api() {
3659 let makefile_str = "include simple.mk\n-include optional.mk\nsinclude synonym.mk\n";
3661 let makefile: Makefile = makefile_str.parse().unwrap();
3662
3663 let includes: Vec<_> = makefile.includes().collect();
3665 assert_eq!(includes.len(), 3);
3666
3667 assert!(!includes[0].is_optional()); assert!(includes[1].is_optional()); assert!(includes[2].is_optional()); let files: Vec<_> = makefile.included_files().collect();
3674 assert_eq!(files, vec!["simple.mk", "optional.mk", "synonym.mk"]);
3675
3676 assert_eq!(includes[0].path(), Some("simple.mk".to_string()));
3678 assert_eq!(includes[1].path(), Some("optional.mk".to_string()));
3679 assert_eq!(includes[2].path(), Some("synonym.mk".to_string()));
3680 }
3681
3682 #[test]
3683 fn test_include_integration() {
3684 let phony_makefile = Makefile::from_reader(
3688 ".PHONY: build\n\nVERBOSE ?= 0\n\n# comment\n-include .env\n\nrule: dependency\n\tcommand"
3689 .as_bytes()
3690 ).unwrap();
3691
3692 assert_eq!(phony_makefile.rules().count(), 2);
3694
3695 let normal_rules_count = phony_makefile
3697 .rules()
3698 .filter(|r| !r.targets().any(|t| t.starts_with('.')))
3699 .count();
3700 assert_eq!(normal_rules_count, 1);
3701
3702 assert_eq!(phony_makefile.includes().count(), 1);
3704 assert_eq!(phony_makefile.included_files().next().unwrap(), ".env");
3705
3706 let simple_makefile = Makefile::from_reader(
3708 "\n\nVERBOSE ?= 0\n\n# comment\n-include .env\n\nrule: dependency\n\tcommand"
3709 .as_bytes(),
3710 )
3711 .unwrap();
3712 assert_eq!(simple_makefile.rules().count(), 1);
3713 assert_eq!(simple_makefile.includes().count(), 1);
3714 }
3715
3716 #[test]
3717 fn test_real_conditional_directives() {
3718 let conditional = "ifdef DEBUG\nCFLAGS = -g\nelse\nCFLAGS = -O2\nendif\n";
3720 let mut buf = conditional.as_bytes();
3721 let makefile =
3722 Makefile::read_relaxed(&mut buf).expect("Failed to parse basic if/else conditional");
3723 let code = makefile.code();
3724 assert!(code.contains("ifdef DEBUG"));
3725 assert!(code.contains("else"));
3726 assert!(code.contains("endif"));
3727
3728 let nested = "ifdef DEBUG\nCFLAGS = -g\nifdef VERBOSE\nCFLAGS += -v\nendif\nendif\n";
3730 let mut buf = nested.as_bytes();
3731 let makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse nested ifdef");
3732 let code = makefile.code();
3733 assert!(code.contains("ifdef DEBUG"));
3734 assert!(code.contains("ifdef VERBOSE"));
3735
3736 let ifeq = "ifeq ($(OS),Windows_NT)\nTARGET = app.exe\nelse\nTARGET = app\nendif\n";
3738 let mut buf = ifeq.as_bytes();
3739 let makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse ifeq form");
3740 let code = makefile.code();
3741 assert!(code.contains("ifeq"));
3742 assert!(code.contains("Windows_NT"));
3743 }
3744
3745 #[test]
3746 fn test_indented_text_outside_rules() {
3747 let help_text = "help:\n\t@echo \"Available targets:\"\n\t@echo \" help show help\"\n";
3749 let parsed = parse(help_text);
3750 assert!(parsed.errors.is_empty());
3751
3752 let root = parsed.root();
3754 let rules = root.rules().collect::<Vec<_>>();
3755 assert_eq!(rules.len(), 1);
3756
3757 let help_rule = &rules[0];
3758 let recipes = help_rule.recipes().collect::<Vec<_>>();
3759 assert_eq!(recipes.len(), 2);
3760 assert!(recipes[0].contains("Available targets"));
3761 assert!(recipes[1].contains("help"));
3762 }
3763
3764 #[test]
3765 fn test_comment_handling_in_recipes() {
3766 let recipe_comment = "build:\n\t# This is a comment\n\tgcc -o app main.c\n";
3768
3769 let parsed = parse(recipe_comment);
3771
3772 assert!(
3774 parsed.errors.is_empty(),
3775 "Should parse recipe with comments without errors"
3776 );
3777
3778 let root = parsed.root();
3780 let rules = root.rules().collect::<Vec<_>>();
3781 assert_eq!(rules.len(), 1, "Should find exactly one rule");
3782
3783 let build_rule = &rules[0];
3785 assert_eq!(
3786 build_rule.targets().collect::<Vec<_>>(),
3787 vec!["build"],
3788 "Rule should have 'build' as target"
3789 );
3790
3791 let recipes = build_rule.recipes().collect::<Vec<_>>();
3795 assert_eq!(
3796 recipes.len(),
3797 1,
3798 "Should find exactly one recipe line (comment lines are filtered)"
3799 );
3800 assert!(
3801 recipes[0].contains("gcc -o app"),
3802 "Recipe should be the command line"
3803 );
3804 assert!(
3805 !recipes[0].contains("This is a comment"),
3806 "Comments should not be included in recipe lines"
3807 );
3808 }
3809
3810 #[test]
3811 fn test_multiline_variables() {
3812 let multiline = "SOURCES = main.c \\\n util.c\n";
3814
3815 let parsed = parse(multiline);
3817
3818 let root = parsed.root();
3820 let vars = root.variable_definitions().collect::<Vec<_>>();
3821 assert!(!vars.is_empty(), "Should find at least one variable");
3822
3823 let operators = "CFLAGS := -Wall \\\n -Werror\n";
3827 let parsed_operators = parse(operators);
3828
3829 let root = parsed_operators.root();
3831 let vars = root.variable_definitions().collect::<Vec<_>>();
3832 assert!(
3833 !vars.is_empty(),
3834 "Should find at least one variable with := operator"
3835 );
3836
3837 let append = "LDFLAGS += -L/usr/lib \\\n -lm\n";
3839 let parsed_append = parse(append);
3840
3841 let root = parsed_append.root();
3843 let vars = root.variable_definitions().collect::<Vec<_>>();
3844 assert!(
3845 !vars.is_empty(),
3846 "Should find at least one variable with += operator"
3847 );
3848 }
3849
3850 #[test]
3851 fn test_whitespace_and_eof_handling() {
3852 let blank_lines = "VAR = value\n\n\n";
3854
3855 let parsed_blank = parse(blank_lines);
3856
3857 let root = parsed_blank.root();
3859 let vars = root.variable_definitions().collect::<Vec<_>>();
3860 assert_eq!(
3861 vars.len(),
3862 1,
3863 "Should find one variable in blank lines test"
3864 );
3865
3866 let trailing_space = "VAR = value \n";
3868
3869 let parsed_space = parse(trailing_space);
3870
3871 let root = parsed_space.root();
3873 let vars = root.variable_definitions().collect::<Vec<_>>();
3874 assert_eq!(
3875 vars.len(),
3876 1,
3877 "Should find one variable in trailing space test"
3878 );
3879
3880 let no_newline = "VAR = value";
3882
3883 let parsed_no_newline = parse(no_newline);
3884
3885 let root = parsed_no_newline.root();
3887 let vars = root.variable_definitions().collect::<Vec<_>>();
3888 assert_eq!(vars.len(), 1, "Should find one variable in no newline test");
3889 assert_eq!(
3890 vars[0].name(),
3891 Some("VAR".to_string()),
3892 "Variable name should be VAR"
3893 );
3894 }
3895
3896 #[test]
3897 fn test_complex_variable_references() {
3898 let wildcard = "SOURCES = $(wildcard *.c)\n";
3900 let parsed = parse(wildcard);
3901 assert!(parsed.errors.is_empty());
3902
3903 let nested = "PREFIX = /usr\nBINDIR = $(PREFIX)/bin\n";
3905 let parsed = parse(nested);
3906 assert!(parsed.errors.is_empty());
3907
3908 let patsubst = "OBJECTS = $(patsubst %.c,%.o,$(SOURCES))\n";
3910 let parsed = parse(patsubst);
3911 assert!(parsed.errors.is_empty());
3912 }
3913
3914 #[test]
3915 fn test_complex_variable_references_minimal() {
3916 let wildcard = "SOURCES = $(wildcard *.c)\n";
3918 let parsed = parse(wildcard);
3919 assert!(parsed.errors.is_empty());
3920
3921 let nested = "PREFIX = /usr\nBINDIR = $(PREFIX)/bin\n";
3923 let parsed = parse(nested);
3924 assert!(parsed.errors.is_empty());
3925
3926 let patsubst = "OBJECTS = $(patsubst %.c,%.o,$(SOURCES))\n";
3928 let parsed = parse(patsubst);
3929 assert!(parsed.errors.is_empty());
3930 }
3931
3932 #[test]
3933 fn test_multiline_variable_with_backslash() {
3934 let content = r#"
3935LONG_VAR = This is a long variable \
3936 that continues on the next line \
3937 and even one more line
3938"#;
3939
3940 let mut buf = content.as_bytes();
3942 let makefile =
3943 Makefile::read_relaxed(&mut buf).expect("Failed to parse multiline variable");
3944
3945 let vars = makefile.variable_definitions().collect::<Vec<_>>();
3947 assert_eq!(
3948 vars.len(),
3949 1,
3950 "Expected 1 variable but found {}",
3951 vars.len()
3952 );
3953 let var_value = vars[0].raw_value();
3954 assert!(var_value.is_some(), "Variable value is None");
3955
3956 let value_str = var_value.unwrap();
3958 assert!(
3959 value_str.contains("long variable"),
3960 "Value doesn't contain expected content"
3961 );
3962 }
3963
3964 #[test]
3965 fn test_multiline_variable_with_mixed_operators() {
3966 let content = r#"
3967PREFIX ?= /usr/local
3968CFLAGS := -Wall -O2 \
3969 -I$(PREFIX)/include \
3970 -DDEBUG
3971"#;
3972 let mut buf = content.as_bytes();
3974 let makefile = Makefile::read_relaxed(&mut buf)
3975 .expect("Failed to parse multiline variable with operators");
3976
3977 let vars = makefile.variable_definitions().collect::<Vec<_>>();
3979 assert!(
3980 vars.len() >= 1,
3981 "Expected at least 1 variable, found {}",
3982 vars.len()
3983 );
3984
3985 let prefix_var = vars
3987 .iter()
3988 .find(|v| v.name().unwrap_or_default() == "PREFIX");
3989 assert!(prefix_var.is_some(), "Expected to find PREFIX variable");
3990 assert!(
3991 prefix_var.unwrap().raw_value().is_some(),
3992 "PREFIX variable has no value"
3993 );
3994
3995 let cflags_var = vars
3997 .iter()
3998 .find(|v| v.name().unwrap_or_default().contains("CFLAGS"));
3999 assert!(
4000 cflags_var.is_some(),
4001 "Expected to find CFLAGS variable (or part of it)"
4002 );
4003 }
4004
4005 #[test]
4006 fn test_indented_help_text() {
4007 let content = r#"
4008.PHONY: help
4009help:
4010 @echo "Available targets:"
4011 @echo " build - Build the project"
4012 @echo " test - Run tests"
4013 @echo " clean - Remove build artifacts"
4014"#;
4015 let mut buf = content.as_bytes();
4017 let makefile =
4018 Makefile::read_relaxed(&mut buf).expect("Failed to parse indented help text");
4019
4020 let rules = makefile.rules().collect::<Vec<_>>();
4022 assert!(!rules.is_empty(), "Expected at least one rule");
4023
4024 let help_rule = rules.iter().find(|r| r.targets().any(|t| t == "help"));
4026 assert!(help_rule.is_some(), "Expected to find help rule");
4027
4028 let recipes = help_rule.unwrap().recipes().collect::<Vec<_>>();
4030 assert!(
4031 !recipes.is_empty(),
4032 "Expected at least one recipe line in help rule"
4033 );
4034 assert!(
4035 recipes.iter().any(|r| r.contains("Available targets")),
4036 "Expected to find 'Available targets' in recipes"
4037 );
4038 }
4039
4040 #[test]
4041 fn test_indented_lines_in_conditionals() {
4042 let content = r#"
4043ifdef DEBUG
4044 CFLAGS += -g -DDEBUG
4045 # This is a comment inside conditional
4046 ifdef VERBOSE
4047 CFLAGS += -v
4048 endif
4049endif
4050"#;
4051 let mut buf = content.as_bytes();
4053 let makefile = Makefile::read_relaxed(&mut buf)
4054 .expect("Failed to parse indented lines in conditionals");
4055
4056 let code = makefile.code();
4058 assert!(code.contains("ifdef DEBUG"));
4059 assert!(code.contains("ifdef VERBOSE"));
4060 assert!(code.contains("endif"));
4061 }
4062
4063 #[test]
4064 fn test_recipe_with_colon() {
4065 let content = r#"
4066build:
4067 @echo "Building at: $(shell date)"
4068 gcc -o program main.c
4069"#;
4070 let parsed = parse(content);
4071 assert!(
4072 parsed.errors.is_empty(),
4073 "Failed to parse recipe with colon: {:?}",
4074 parsed.errors
4075 );
4076 }
4077
4078 #[test]
4079 #[ignore]
4080 fn test_double_colon_rules() {
4081 let content = r#"
4084%.o :: %.c
4085 $(CC) -c $< -o $@
4086
4087# Double colon allows multiple rules for same target
4088all:: prerequisite1
4089 @echo "First rule for all"
4090
4091all:: prerequisite2
4092 @echo "Second rule for all"
4093"#;
4094 let mut buf = content.as_bytes();
4095 let makefile =
4096 Makefile::read_relaxed(&mut buf).expect("Failed to parse double colon rules");
4097
4098 let rules = makefile.rules().collect::<Vec<_>>();
4100 assert!(!rules.is_empty(), "Expected at least one rule");
4101
4102 let all_rules = rules
4104 .iter()
4105 .filter(|r| r.targets().any(|t| t.contains("all")));
4106 assert!(
4107 all_rules.count() > 0,
4108 "Expected to find at least one rule containing 'all'"
4109 );
4110 }
4111
4112 #[test]
4113 fn test_elif_directive() {
4114 let content = r#"
4115ifeq ($(OS),Windows_NT)
4116 TARGET = windows
4117elif ifeq ($(OS),Darwin)
4118 TARGET = macos
4119elif ifeq ($(OS),Linux)
4120 TARGET = linux
4121else
4122 TARGET = unknown
4123endif
4124"#;
4125 let mut buf = content.as_bytes();
4127 let _makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse elif directive");
4128
4129 }
4132
4133 #[test]
4134 fn test_ambiguous_assignment_vs_rule() {
4135 const VAR_ASSIGNMENT: &str = "VARIABLE = value\n";
4137
4138 let mut buf = std::io::Cursor::new(VAR_ASSIGNMENT);
4139 let makefile =
4140 Makefile::read_relaxed(&mut buf).expect("Failed to parse variable assignment");
4141
4142 let vars = makefile.variable_definitions().collect::<Vec<_>>();
4143 let rules = makefile.rules().collect::<Vec<_>>();
4144
4145 assert_eq!(vars.len(), 1, "Expected 1 variable, found {}", vars.len());
4146 assert_eq!(rules.len(), 0, "Expected 0 rules, found {}", rules.len());
4147
4148 assert_eq!(vars[0].name(), Some("VARIABLE".to_string()));
4149
4150 const SIMPLE_RULE: &str = "target: dependency\n";
4152
4153 let mut buf = std::io::Cursor::new(SIMPLE_RULE);
4154 let makefile = Makefile::read_relaxed(&mut buf).expect("Failed to parse simple rule");
4155
4156 let vars = makefile.variable_definitions().collect::<Vec<_>>();
4157 let rules = makefile.rules().collect::<Vec<_>>();
4158
4159 assert_eq!(vars.len(), 0, "Expected 0 variables, found {}", vars.len());
4160 assert_eq!(rules.len(), 1, "Expected 1 rule, found {}", rules.len());
4161
4162 let rule = &rules[0];
4163 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["target"]);
4164 }
4165
4166 #[test]
4167 fn test_nested_conditionals() {
4168 let content = r#"
4169ifdef RELEASE
4170 CFLAGS += -O3
4171 ifndef DEBUG
4172 ifneq ($(ARCH),arm)
4173 CFLAGS += -march=native
4174 else
4175 CFLAGS += -mcpu=cortex-a72
4176 endif
4177 endif
4178endif
4179"#;
4180 let mut buf = content.as_bytes();
4182 let makefile =
4183 Makefile::read_relaxed(&mut buf).expect("Failed to parse nested conditionals");
4184
4185 let code = makefile.code();
4187 assert!(code.contains("ifdef RELEASE"));
4188 assert!(code.contains("ifndef DEBUG"));
4189 assert!(code.contains("ifneq"));
4190 }
4191
4192 #[test]
4193 fn test_space_indented_recipes() {
4194 let content = r#"
4197build:
4198 @echo "Building with spaces instead of tabs"
4199 gcc -o program main.c
4200"#;
4201 let mut buf = content.as_bytes();
4203 let makefile =
4204 Makefile::read_relaxed(&mut buf).expect("Failed to parse space-indented recipes");
4205
4206 let rules = makefile.rules().collect::<Vec<_>>();
4208 assert!(!rules.is_empty(), "Expected at least one rule");
4209
4210 let build_rule = rules.iter().find(|r| r.targets().any(|t| t == "build"));
4212 assert!(build_rule.is_some(), "Expected to find build rule");
4213 }
4214
4215 #[test]
4216 fn test_complex_variable_functions() {
4217 let content = r#"
4218FILES := $(shell find . -name "*.c")
4219OBJS := $(patsubst %.c,%.o,$(FILES))
4220NAME := $(if $(PROGRAM),$(PROGRAM),a.out)
4221HEADERS := ${wildcard *.h}
4222"#;
4223 let parsed = parse(content);
4224 assert!(
4225 parsed.errors.is_empty(),
4226 "Failed to parse complex variable functions: {:?}",
4227 parsed.errors
4228 );
4229 }
4230
4231 #[test]
4232 fn test_nested_variable_expansions() {
4233 let content = r#"
4234VERSION = 1.0
4235PACKAGE = myapp
4236TARBALL = $(PACKAGE)-$(VERSION).tar.gz
4237INSTALL_PATH = $(shell echo $(PREFIX) | sed 's/\/$//')
4238"#;
4239 let parsed = parse(content);
4240 assert!(
4241 parsed.errors.is_empty(),
4242 "Failed to parse nested variable expansions: {:?}",
4243 parsed.errors
4244 );
4245 }
4246
4247 #[test]
4248 fn test_special_directives() {
4249 let content = r#"
4250# Special makefile directives
4251.PHONY: all clean
4252.SUFFIXES: .c .o
4253.DEFAULT: all
4254
4255# Variable definition and export directive
4256export PATH := /usr/bin:/bin
4257"#;
4258 let mut buf = content.as_bytes();
4260 let makefile =
4261 Makefile::read_relaxed(&mut buf).expect("Failed to parse special directives");
4262
4263 let rules = makefile.rules().collect::<Vec<_>>();
4265
4266 let phony_rule = rules
4268 .iter()
4269 .find(|r| r.targets().any(|t| t.contains(".PHONY")));
4270 assert!(phony_rule.is_some(), "Expected to find .PHONY rule");
4271
4272 let vars = makefile.variable_definitions().collect::<Vec<_>>();
4274 assert!(!vars.is_empty(), "Expected to find at least one variable");
4275 }
4276
4277 #[test]
4280 fn test_comprehensive_real_world_makefile() {
4281 let content = r#"
4283# Basic variable assignment
4284VERSION = 1.0.0
4285
4286# Phony target
4287.PHONY: all clean
4288
4289# Simple rule
4290all:
4291 echo "Building version $(VERSION)"
4292
4293# Another rule with dependencies
4294clean:
4295 rm -f *.o
4296"#;
4297
4298 let parsed = parse(content);
4300
4301 assert!(parsed.errors.is_empty(), "Expected no parsing errors");
4303
4304 let variables = parsed.root().variable_definitions().collect::<Vec<_>>();
4306 assert!(!variables.is_empty(), "Expected at least one variable");
4307 assert_eq!(
4308 variables[0].name(),
4309 Some("VERSION".to_string()),
4310 "Expected VERSION variable"
4311 );
4312
4313 let rules = parsed.root().rules().collect::<Vec<_>>();
4315 assert!(!rules.is_empty(), "Expected at least one rule");
4316
4317 let rule_targets: Vec<String> = rules
4319 .iter()
4320 .flat_map(|r| r.targets().collect::<Vec<_>>())
4321 .collect();
4322 assert!(
4323 rule_targets.contains(&".PHONY".to_string()),
4324 "Expected .PHONY rule"
4325 );
4326 assert!(
4327 rule_targets.contains(&"all".to_string()),
4328 "Expected 'all' rule"
4329 );
4330 assert!(
4331 rule_targets.contains(&"clean".to_string()),
4332 "Expected 'clean' rule"
4333 );
4334 }
4335
4336 #[test]
4337 fn test_indented_help_text_outside_rules() {
4338 let content = r#"
4340# Targets with help text
4341help:
4342 @echo "Available targets:"
4343 @echo " build build the project"
4344 @echo " test run tests"
4345 @echo " clean clean build artifacts"
4346
4347# Another target
4348clean:
4349 rm -rf build/
4350"#;
4351
4352 let parsed = parse(content);
4354
4355 assert!(
4357 parsed.errors.is_empty(),
4358 "Failed to parse indented help text"
4359 );
4360
4361 let rules = parsed.root().rules().collect::<Vec<_>>();
4363 assert_eq!(rules.len(), 2, "Expected to find two rules");
4364
4365 let help_rule = rules
4367 .iter()
4368 .find(|r| r.targets().any(|t| t == "help"))
4369 .expect("Expected to find help rule");
4370
4371 let clean_rule = rules
4372 .iter()
4373 .find(|r| r.targets().any(|t| t == "clean"))
4374 .expect("Expected to find clean rule");
4375
4376 let help_recipes = help_rule.recipes().collect::<Vec<_>>();
4378 assert!(
4379 !help_recipes.is_empty(),
4380 "Help rule should have recipe lines"
4381 );
4382 assert!(
4383 help_recipes
4384 .iter()
4385 .any(|line| line.contains("Available targets")),
4386 "Help recipes should include 'Available targets' line"
4387 );
4388
4389 let clean_recipes = clean_rule.recipes().collect::<Vec<_>>();
4391 assert!(
4392 !clean_recipes.is_empty(),
4393 "Clean rule should have recipe lines"
4394 );
4395 assert!(
4396 clean_recipes.iter().any(|line| line.contains("rm -rf")),
4397 "Clean recipes should include 'rm -rf' command"
4398 );
4399 }
4400
4401 #[test]
4402 fn test_makefile1_phony_pattern() {
4403 let content = "#line 2145\n.PHONY: $(PHONY)\n";
4405
4406 let result = parse(content);
4408
4409 assert!(
4411 result.errors.is_empty(),
4412 "Failed to parse .PHONY: $(PHONY) pattern"
4413 );
4414
4415 let rules = result.root().rules().collect::<Vec<_>>();
4417 assert_eq!(rules.len(), 1, "Expected 1 rule");
4418 assert_eq!(
4419 rules[0].targets().next().unwrap(),
4420 ".PHONY",
4421 "Expected .PHONY rule"
4422 );
4423
4424 let prereqs = rules[0].prerequisites().collect::<Vec<_>>();
4426 assert_eq!(prereqs.len(), 1, "Expected 1 prerequisite");
4427 assert_eq!(prereqs[0], "$(PHONY)", "Expected $(PHONY) prerequisite");
4428 }
4429
4430 #[test]
4431 fn test_skip_until_newline_behavior() {
4432 let input = "text without newline";
4434 let parsed = parse(input);
4435 assert!(parsed.errors.is_empty() || !parsed.errors.is_empty());
4437
4438 let input_with_newline = "text\nafter newline";
4439 let parsed2 = parse(input_with_newline);
4440 assert!(parsed2.errors.is_empty() || !parsed2.errors.is_empty());
4441 }
4442
4443 #[test]
4444 #[ignore] fn test_error_with_indent_token() {
4446 let input = "\tinvalid indented line";
4448 let parsed = parse(input);
4449 assert!(!parsed.errors.is_empty());
4451
4452 let error_msg = &parsed.errors[0].message;
4453 assert!(error_msg.contains("recipe commences before first target"));
4454 }
4455
4456 #[test]
4457 fn test_conditional_token_handling() {
4458 let input = r#"
4460ifndef VAR
4461 CFLAGS = -DTEST
4462endif
4463"#;
4464 let parsed = parse(input);
4465 let makefile = parsed.root();
4467 let _vars = makefile.variable_definitions().collect::<Vec<_>>();
4468 let nested = r#"
4472ifdef DEBUG
4473 ifndef RELEASE
4474 CFLAGS = -g
4475 endif
4476endif
4477"#;
4478 let parsed_nested = parse(nested);
4479 let _makefile = parsed_nested.root();
4481 }
4482
4483 #[test]
4484 fn test_include_vs_conditional_logic() {
4485 let input = r#"
4487include file.mk
4488ifdef VAR
4489 VALUE = 1
4490endif
4491"#;
4492 let parsed = parse(input);
4493 let makefile = parsed.root();
4495 let includes = makefile.includes().collect::<Vec<_>>();
4496 assert!(includes.len() >= 1 || parsed.errors.len() > 0);
4498
4499 let optional_include = r#"
4501-include optional.mk
4502ifndef VAR
4503 VALUE = default
4504endif
4505"#;
4506 let parsed2 = parse(optional_include);
4507 let _makefile = parsed2.root();
4509 }
4510
4511 #[test]
4512 fn test_balanced_parens_counting() {
4513 let input = r#"
4515VAR = $(call func,$(nested,arg),extra)
4516COMPLEX = $(if $(condition),$(then_val),$(else_val))
4517"#;
4518 let parsed = parse(input);
4519 assert!(parsed.errors.is_empty());
4520
4521 let makefile = parsed.root();
4522 let vars = makefile.variable_definitions().collect::<Vec<_>>();
4523 assert_eq!(vars.len(), 2);
4524 }
4525
4526 #[test]
4527 fn test_documentation_lookahead() {
4528 let input = r#"
4530# Documentation comment
4531help:
4532 @echo "Usage instructions"
4533 @echo "More help text"
4534"#;
4535 let parsed = parse(input);
4536 assert!(parsed.errors.is_empty());
4537
4538 let makefile = parsed.root();
4539 let rules = makefile.rules().collect::<Vec<_>>();
4540 assert_eq!(rules.len(), 1);
4541 assert_eq!(rules[0].targets().next().unwrap(), "help");
4542 }
4543
4544 #[test]
4545 fn test_edge_case_empty_input() {
4546 let parsed = parse("");
4548 assert!(parsed.errors.is_empty());
4549
4550 let parsed2 = parse(" \n \n");
4552 let _makefile = parsed2.root();
4555 }
4556
4557 #[test]
4558 fn test_malformed_conditional_recovery() {
4559 let input = r#"
4561ifdef
4562 # Missing condition variable
4563endif
4564"#;
4565 let parsed = parse(input);
4566 assert!(parsed.errors.is_empty() || !parsed.errors.is_empty());
4569 }
4570
4571 #[test]
4572 fn test_replace_rule() {
4573 let mut makefile: Makefile = "rule1:\n\tcommand1\nrule2:\n\tcommand2\n".parse().unwrap();
4574 let new_rule: Rule = "new_rule:\n\tnew_command\n".parse().unwrap();
4575
4576 makefile.replace_rule(0, new_rule).unwrap();
4577
4578 let targets: Vec<_> = makefile
4579 .rules()
4580 .flat_map(|r| r.targets().collect::<Vec<_>>())
4581 .collect();
4582 assert_eq!(targets, vec!["new_rule", "rule2"]);
4583
4584 let recipes: Vec<_> = makefile.rules().next().unwrap().recipes().collect();
4585 assert_eq!(recipes, vec!["new_command"]);
4586 }
4587
4588 #[test]
4589 fn test_replace_rule_out_of_bounds() {
4590 let mut makefile: Makefile = "rule1:\n\tcommand1\n".parse().unwrap();
4591 let new_rule: Rule = "new_rule:\n\tnew_command\n".parse().unwrap();
4592
4593 let result = makefile.replace_rule(5, new_rule);
4594 assert!(result.is_err());
4595 }
4596
4597 #[test]
4598 fn test_remove_rule() {
4599 let mut makefile: Makefile = "rule1:\n\tcommand1\nrule2:\n\tcommand2\nrule3:\n\tcommand3\n"
4600 .parse()
4601 .unwrap();
4602
4603 let removed = makefile.remove_rule(1).unwrap();
4604 assert_eq!(removed.targets().collect::<Vec<_>>(), vec!["rule2"]);
4605
4606 let remaining_targets: Vec<_> = makefile
4607 .rules()
4608 .flat_map(|r| r.targets().collect::<Vec<_>>())
4609 .collect();
4610 assert_eq!(remaining_targets, vec!["rule1", "rule3"]);
4611 assert_eq!(makefile.rules().count(), 2);
4612 }
4613
4614 #[test]
4615 fn test_remove_rule_out_of_bounds() {
4616 let mut makefile: Makefile = "rule1:\n\tcommand1\n".parse().unwrap();
4617
4618 let result = makefile.remove_rule(5);
4619 assert!(result.is_err());
4620 }
4621
4622 #[test]
4623 fn test_insert_rule() {
4624 let mut makefile: Makefile = "rule1:\n\tcommand1\nrule2:\n\tcommand2\n".parse().unwrap();
4625 let new_rule: Rule = "inserted_rule:\n\tinserted_command\n".parse().unwrap();
4626
4627 makefile.insert_rule(1, new_rule).unwrap();
4628
4629 let targets: Vec<_> = makefile
4630 .rules()
4631 .flat_map(|r| r.targets().collect::<Vec<_>>())
4632 .collect();
4633 assert_eq!(targets, vec!["rule1", "inserted_rule", "rule2"]);
4634 assert_eq!(makefile.rules().count(), 3);
4635 }
4636
4637 #[test]
4638 fn test_insert_rule_at_end() {
4639 let mut makefile: Makefile = "rule1:\n\tcommand1\n".parse().unwrap();
4640 let new_rule: Rule = "end_rule:\n\tend_command\n".parse().unwrap();
4641
4642 makefile.insert_rule(1, new_rule).unwrap();
4643
4644 let targets: Vec<_> = makefile
4645 .rules()
4646 .flat_map(|r| r.targets().collect::<Vec<_>>())
4647 .collect();
4648 assert_eq!(targets, vec!["rule1", "end_rule"]);
4649 }
4650
4651 #[test]
4652 fn test_insert_rule_out_of_bounds() {
4653 let mut makefile: Makefile = "rule1:\n\tcommand1\n".parse().unwrap();
4654 let new_rule: Rule = "new_rule:\n\tnew_command\n".parse().unwrap();
4655
4656 let result = makefile.insert_rule(5, new_rule);
4657 assert!(result.is_err());
4658 }
4659
4660 #[test]
4661 fn test_remove_command() {
4662 let mut rule: Rule = "rule:\n\tcommand1\n\tcommand2\n\tcommand3\n"
4663 .parse()
4664 .unwrap();
4665
4666 rule.remove_command(1);
4667 let recipes: Vec<_> = rule.recipes().collect();
4668 assert_eq!(recipes, vec!["command1", "command3"]);
4669 assert_eq!(rule.recipe_count(), 2);
4670 }
4671
4672 #[test]
4673 fn test_remove_command_out_of_bounds() {
4674 let mut rule: Rule = "rule:\n\tcommand1\n".parse().unwrap();
4675
4676 let result = rule.remove_command(5);
4677 assert!(!result);
4678 }
4679
4680 #[test]
4681 fn test_insert_command() {
4682 let mut rule: Rule = "rule:\n\tcommand1\n\tcommand3\n".parse().unwrap();
4683
4684 rule.insert_command(1, "command2");
4685 let recipes: Vec<_> = rule.recipes().collect();
4686 assert_eq!(recipes, vec!["command1", "command2", "command3"]);
4687 }
4688
4689 #[test]
4690 fn test_insert_command_at_end() {
4691 let mut rule: Rule = "rule:\n\tcommand1\n".parse().unwrap();
4692
4693 rule.insert_command(1, "command2");
4694 let recipes: Vec<_> = rule.recipes().collect();
4695 assert_eq!(recipes, vec!["command1", "command2"]);
4696 }
4697
4698 #[test]
4699 fn test_insert_command_in_empty_rule() {
4700 let mut rule: Rule = "rule:\n".parse().unwrap();
4701
4702 rule.insert_command(0, "new_command");
4703 let recipes: Vec<_> = rule.recipes().collect();
4704 assert_eq!(recipes, vec!["new_command"]);
4705 }
4706
4707 #[test]
4708 fn test_recipe_count() {
4709 let rule1: Rule = "rule:\n".parse().unwrap();
4710 assert_eq!(rule1.recipe_count(), 0);
4711
4712 let rule2: Rule = "rule:\n\tcommand1\n\tcommand2\n".parse().unwrap();
4713 assert_eq!(rule2.recipe_count(), 2);
4714 }
4715
4716 #[test]
4717 fn test_clear_commands() {
4718 let mut rule: Rule = "rule:\n\tcommand1\n\tcommand2\n\tcommand3\n"
4719 .parse()
4720 .unwrap();
4721
4722 rule.clear_commands();
4723 assert_eq!(rule.recipe_count(), 0);
4724
4725 let recipes: Vec<_> = rule.recipes().collect();
4726 assert_eq!(recipes, Vec::<String>::new());
4727
4728 let targets: Vec<_> = rule.targets().collect();
4730 assert_eq!(targets, vec!["rule"]);
4731 }
4732
4733 #[test]
4734 fn test_clear_commands_empty_rule() {
4735 let mut rule: Rule = "rule:\n".parse().unwrap();
4736
4737 rule.clear_commands();
4738 assert_eq!(rule.recipe_count(), 0);
4739
4740 let targets: Vec<_> = rule.targets().collect();
4741 assert_eq!(targets, vec!["rule"]);
4742 }
4743
4744 #[test]
4745 fn test_rule_manipulation_preserves_structure() {
4746 let input = r#"# Comment
4748VAR = value
4749
4750rule1:
4751 command1
4752
4753# Another comment
4754rule2:
4755 command2
4756
4757VAR2 = value2
4758"#;
4759
4760 let mut makefile: Makefile = input.parse().unwrap();
4761 let new_rule: Rule = "new_rule:\n\tnew_command\n".parse().unwrap();
4762
4763 makefile.insert_rule(1, new_rule).unwrap();
4765
4766 let targets: Vec<_> = makefile
4768 .rules()
4769 .flat_map(|r| r.targets().collect::<Vec<_>>())
4770 .collect();
4771 assert_eq!(targets, vec!["rule1", "new_rule", "rule2"]);
4772
4773 let vars: Vec<_> = makefile.variable_definitions().collect();
4775 assert_eq!(vars.len(), 2);
4776
4777 let output = makefile.code();
4779 assert!(output.contains("# Comment"));
4780 assert!(output.contains("VAR = value"));
4781 assert!(output.contains("# Another comment"));
4782 assert!(output.contains("VAR2 = value2"));
4783 }
4784
4785 #[test]
4786 fn test_replace_rule_with_multiple_targets() {
4787 let mut makefile: Makefile = "target1 target2: dep\n\tcommand\n".parse().unwrap();
4788 let new_rule: Rule = "new_target: new_dep\n\tnew_command\n".parse().unwrap();
4789
4790 makefile.replace_rule(0, new_rule).unwrap();
4791
4792 let targets: Vec<_> = makefile
4793 .rules()
4794 .flat_map(|r| r.targets().collect::<Vec<_>>())
4795 .collect();
4796 assert_eq!(targets, vec!["new_target"]);
4797 }
4798
4799 #[test]
4800 fn test_empty_makefile_operations() {
4801 let mut makefile = Makefile::new();
4802
4803 assert!(makefile
4805 .replace_rule(0, "rule:\n\tcommand\n".parse().unwrap())
4806 .is_err());
4807 assert!(makefile.remove_rule(0).is_err());
4808
4809 let new_rule: Rule = "first_rule:\n\tcommand\n".parse().unwrap();
4811 makefile.insert_rule(0, new_rule).unwrap();
4812 assert_eq!(makefile.rules().count(), 1);
4813 }
4814
4815 #[test]
4816 fn test_command_operations_preserve_indentation() {
4817 let mut rule: Rule = "rule:\n\t\tdeep_indent\n\tshallow_indent\n"
4818 .parse()
4819 .unwrap();
4820
4821 rule.insert_command(1, "middle_command");
4822 let recipes: Vec<_> = rule.recipes().collect();
4823 assert_eq!(
4824 recipes,
4825 vec!["\tdeep_indent", "middle_command", "shallow_indent"]
4826 );
4827 }
4828
4829 #[test]
4830 fn test_rule_operations_with_variables_and_includes() {
4831 let input = r#"VAR1 = value1
4832include common.mk
4833
4834rule1:
4835 command1
4836
4837VAR2 = value2
4838include other.mk
4839
4840rule2:
4841 command2
4842"#;
4843
4844 let mut makefile: Makefile = input.parse().unwrap();
4845
4846 makefile.remove_rule(0).unwrap();
4848
4849 let output = makefile.code();
4851 assert!(output.contains("VAR1 = value1"));
4852 assert!(output.contains("include common.mk"));
4853 assert!(output.contains("VAR2 = value2"));
4854 assert!(output.contains("include other.mk"));
4855
4856 assert_eq!(makefile.rules().count(), 1);
4858 let remaining_targets: Vec<_> = makefile
4859 .rules()
4860 .flat_map(|r| r.targets().collect::<Vec<_>>())
4861 .collect();
4862 assert_eq!(remaining_targets, vec!["rule2"]);
4863 }
4864
4865 #[test]
4866 fn test_command_manipulation_edge_cases() {
4867 let mut empty_rule: Rule = "empty:\n".parse().unwrap();
4869 assert_eq!(empty_rule.recipe_count(), 0);
4870
4871 empty_rule.insert_command(0, "first_command");
4872 assert_eq!(empty_rule.recipe_count(), 1);
4873
4874 let mut empty_rule2: Rule = "empty:\n".parse().unwrap();
4876 empty_rule2.clear_commands();
4877 assert_eq!(empty_rule2.recipe_count(), 0);
4878 }
4879
4880 #[test]
4881 fn test_archive_member_parsing() {
4882 let input = "libfoo.a(bar.o): bar.c\n\tgcc -c bar.c -o bar.o\n\tar r libfoo.a bar.o\n";
4884 let parsed = parse(input);
4885 assert!(
4886 parsed.errors.is_empty(),
4887 "Should parse archive member without errors"
4888 );
4889
4890 let makefile = parsed.root();
4891 let rules: Vec<_> = makefile.rules().collect();
4892 assert_eq!(rules.len(), 1);
4893
4894 let target_text = rules[0].targets().next().unwrap();
4896 assert_eq!(target_text, "libfoo.a(bar.o)");
4897 }
4898
4899 #[test]
4900 fn test_archive_member_multiple_members() {
4901 let input = "libfoo.a(bar.o baz.o): bar.c baz.c\n\tgcc -c bar.c baz.c\n\tar r libfoo.a bar.o baz.o\n";
4903 let parsed = parse(input);
4904 assert!(
4905 parsed.errors.is_empty(),
4906 "Should parse multiple archive members"
4907 );
4908
4909 let makefile = parsed.root();
4910 let rules: Vec<_> = makefile.rules().collect();
4911 assert_eq!(rules.len(), 1);
4912 }
4913
4914 #[test]
4915 fn test_archive_member_in_dependencies() {
4916 let input =
4918 "program: main.o libfoo.a(bar.o) libfoo.a(baz.o)\n\tgcc -o program main.o libfoo.a\n";
4919 let parsed = parse(input);
4920 assert!(
4921 parsed.errors.is_empty(),
4922 "Should parse archive members in dependencies"
4923 );
4924
4925 let makefile = parsed.root();
4926 let rules: Vec<_> = makefile.rules().collect();
4927 assert_eq!(rules.len(), 1);
4928 }
4929
4930 #[test]
4931 fn test_archive_member_with_variables() {
4932 let input = "$(LIB)($(OBJ)): $(SRC)\n\t$(CC) -c $(SRC)\n\t$(AR) r $(LIB) $(OBJ)\n";
4934 let parsed = parse(input);
4935 assert!(
4937 parsed.errors.is_empty(),
4938 "Should parse archive members with variables"
4939 );
4940 }
4941
4942 #[test]
4943 fn test_archive_member_ast_access() {
4944 let input = "libtest.a(foo.o bar.o): foo.c bar.c\n\tgcc -c foo.c bar.c\n";
4946 let parsed = parse(input);
4947 let makefile = parsed.root();
4948
4949 let archive_member_count = makefile
4951 .syntax()
4952 .descendants()
4953 .filter(|n| n.kind() == ARCHIVE_MEMBERS)
4954 .count();
4955
4956 assert!(
4957 archive_member_count > 0,
4958 "Should find ARCHIVE_MEMBERS nodes in AST"
4959 );
4960 }
4961
4962 #[test]
4963 fn test_large_makefile_performance() {
4964 let mut makefile = Makefile::new();
4966
4967 for i in 0..100 {
4969 let rule_name = format!("rule{}", i);
4970 let _rule = makefile
4971 .add_rule(&rule_name)
4972 .push_command(&format!("command{}", i));
4973 }
4974
4975 assert_eq!(makefile.rules().count(), 100);
4976
4977 let new_rule: Rule = "middle_rule:\n\tmiddle_command\n".parse().unwrap();
4979 makefile.replace_rule(50, new_rule).unwrap();
4980
4981 let rule_50_targets: Vec<_> = makefile.rules().nth(50).unwrap().targets().collect();
4983 assert_eq!(rule_50_targets, vec!["middle_rule"]);
4984
4985 assert_eq!(makefile.rules().count(), 100); }
4987
4988 #[test]
4989 fn test_complex_recipe_manipulation() {
4990 let mut complex_rule: Rule = r#"complex:
4991 @echo "Starting build"
4992 $(CC) $(CFLAGS) -o $@ $<
4993 @echo "Build complete"
4994 chmod +x $@
4995"#
4996 .parse()
4997 .unwrap();
4998
4999 assert_eq!(complex_rule.recipe_count(), 4);
5000
5001 complex_rule.remove_command(0); complex_rule.remove_command(1); let final_recipes: Vec<_> = complex_rule.recipes().collect();
5006 assert_eq!(final_recipes.len(), 2);
5007 assert!(final_recipes[0].contains("$(CC)"));
5008 assert!(final_recipes[1].contains("chmod"));
5009 }
5010
5011 #[test]
5012 fn test_variable_definition_remove() {
5013 let makefile: Makefile = r#"VAR1 = value1
5014VAR2 = value2
5015VAR3 = value3
5016"#
5017 .parse()
5018 .unwrap();
5019
5020 assert_eq!(makefile.variable_definitions().count(), 3);
5022
5023 let mut var2 = makefile
5025 .variable_definitions()
5026 .nth(1)
5027 .expect("Should have second variable");
5028 assert_eq!(var2.name(), Some("VAR2".to_string()));
5029 var2.remove();
5030
5031 assert_eq!(makefile.variable_definitions().count(), 2);
5033 let var_names: Vec<_> = makefile
5034 .variable_definitions()
5035 .filter_map(|v| v.name())
5036 .collect();
5037 assert_eq!(var_names, vec!["VAR1", "VAR3"]);
5038 }
5039
5040 #[test]
5041 fn test_variable_definition_set_value() {
5042 let makefile: Makefile = "VAR = old_value\n".parse().unwrap();
5043
5044 let mut var = makefile
5045 .variable_definitions()
5046 .next()
5047 .expect("Should have variable");
5048 assert_eq!(var.raw_value(), Some("old_value".to_string()));
5049
5050 var.set_value("new_value");
5052
5053 assert_eq!(var.raw_value(), Some("new_value".to_string()));
5055 assert!(makefile.code().contains("VAR = new_value"));
5056 }
5057
5058 #[test]
5059 fn test_variable_definition_set_value_preserves_format() {
5060 let makefile: Makefile = "export VAR := old_value\n".parse().unwrap();
5061
5062 let mut var = makefile
5063 .variable_definitions()
5064 .next()
5065 .expect("Should have variable");
5066 assert_eq!(var.raw_value(), Some("old_value".to_string()));
5067
5068 var.set_value("new_value");
5070
5071 assert_eq!(var.raw_value(), Some("new_value".to_string()));
5073 let code = makefile.code();
5074 assert!(code.contains("export"), "Should preserve export prefix");
5075 assert!(code.contains(":="), "Should preserve := operator");
5076 assert!(code.contains("new_value"), "Should have new value");
5077 }
5078
5079 #[test]
5080 fn test_makefile_find_variable() {
5081 let makefile: Makefile = r#"VAR1 = value1
5082VAR2 = value2
5083VAR3 = value3
5084"#
5085 .parse()
5086 .unwrap();
5087
5088 let vars: Vec<_> = makefile.find_variable("VAR2").collect();
5090 assert_eq!(vars.len(), 1);
5091 assert_eq!(vars[0].name(), Some("VAR2".to_string()));
5092 assert_eq!(vars[0].raw_value(), Some("value2".to_string()));
5093
5094 assert_eq!(makefile.find_variable("NONEXISTENT").count(), 0);
5096 }
5097
5098 #[test]
5099 fn test_makefile_find_variable_with_export() {
5100 let makefile: Makefile = r#"VAR1 = value1
5101export VAR2 := value2
5102VAR3 = value3
5103"#
5104 .parse()
5105 .unwrap();
5106
5107 let vars: Vec<_> = makefile.find_variable("VAR2").collect();
5109 assert_eq!(vars.len(), 1);
5110 assert_eq!(vars[0].name(), Some("VAR2".to_string()));
5111 assert_eq!(vars[0].raw_value(), Some("value2".to_string()));
5112 }
5113
5114 #[test]
5115 fn test_variable_definition_is_export() {
5116 let makefile: Makefile = r#"VAR1 = value1
5117export VAR2 := value2
5118export VAR3 = value3
5119VAR4 := value4
5120"#
5121 .parse()
5122 .unwrap();
5123
5124 let vars: Vec<_> = makefile.variable_definitions().collect();
5125 assert_eq!(vars.len(), 4);
5126
5127 assert_eq!(vars[0].is_export(), false);
5128 assert_eq!(vars[1].is_export(), true);
5129 assert_eq!(vars[2].is_export(), true);
5130 assert_eq!(vars[3].is_export(), false);
5131 }
5132
5133 #[test]
5134 fn test_makefile_find_variable_multiple() {
5135 let makefile: Makefile = r#"VAR1 = value1
5136VAR1 = value2
5137VAR2 = other
5138VAR1 = value3
5139"#
5140 .parse()
5141 .unwrap();
5142
5143 let vars: Vec<_> = makefile.find_variable("VAR1").collect();
5145 assert_eq!(vars.len(), 3);
5146 assert_eq!(vars[0].raw_value(), Some("value1".to_string()));
5147 assert_eq!(vars[1].raw_value(), Some("value2".to_string()));
5148 assert_eq!(vars[2].raw_value(), Some("value3".to_string()));
5149
5150 let var2s: Vec<_> = makefile.find_variable("VAR2").collect();
5152 assert_eq!(var2s.len(), 1);
5153 assert_eq!(var2s[0].raw_value(), Some("other".to_string()));
5154 }
5155
5156 #[test]
5157 fn test_variable_remove_and_find() {
5158 let makefile: Makefile = r#"VAR1 = value1
5159VAR2 = value2
5160VAR3 = value3
5161"#
5162 .parse()
5163 .unwrap();
5164
5165 let mut var2 = makefile
5167 .find_variable("VAR2")
5168 .next()
5169 .expect("Should find VAR2");
5170 var2.remove();
5171
5172 assert_eq!(makefile.find_variable("VAR2").count(), 0);
5174
5175 assert_eq!(makefile.find_variable("VAR1").count(), 1);
5177 assert_eq!(makefile.find_variable("VAR3").count(), 1);
5178 }
5179
5180 #[test]
5181 fn test_variable_remove_with_comment() {
5182 let makefile: Makefile = r#"VAR1 = value1
5183# This is a comment about VAR2
5184VAR2 = value2
5185VAR3 = value3
5186"#
5187 .parse()
5188 .unwrap();
5189
5190 let mut var2 = makefile
5192 .variable_definitions()
5193 .nth(1)
5194 .expect("Should have second variable");
5195 assert_eq!(var2.name(), Some("VAR2".to_string()));
5196 var2.remove();
5197
5198 assert_eq!(makefile.code(), "VAR1 = value1\nVAR3 = value3\n");
5200 }
5201
5202 #[test]
5203 fn test_variable_remove_with_multiple_comments() {
5204 let makefile: Makefile = r#"VAR1 = value1
5205# Comment line 1
5206# Comment line 2
5207# Comment line 3
5208VAR2 = value2
5209VAR3 = value3
5210"#
5211 .parse()
5212 .unwrap();
5213
5214 let mut var2 = makefile
5216 .variable_definitions()
5217 .nth(1)
5218 .expect("Should have second variable");
5219 var2.remove();
5220
5221 assert_eq!(makefile.code(), "VAR1 = value1\nVAR3 = value3\n");
5223 }
5224
5225 #[test]
5226 fn test_variable_remove_with_empty_line() {
5227 let makefile: Makefile = r#"VAR1 = value1
5228
5229# Comment about VAR2
5230VAR2 = value2
5231VAR3 = value3
5232"#
5233 .parse()
5234 .unwrap();
5235
5236 let mut var2 = makefile
5238 .variable_definitions()
5239 .nth(1)
5240 .expect("Should have second variable");
5241 var2.remove();
5242
5243 assert_eq!(makefile.code(), "VAR1 = value1\nVAR3 = value3\n");
5246 }
5247
5248 #[test]
5249 fn test_variable_remove_with_multiple_empty_lines() {
5250 let makefile: Makefile = r#"VAR1 = value1
5251
5252
5253# Comment about VAR2
5254VAR2 = value2
5255VAR3 = value3
5256"#
5257 .parse()
5258 .unwrap();
5259
5260 let mut var2 = makefile
5262 .variable_definitions()
5263 .nth(1)
5264 .expect("Should have second variable");
5265 var2.remove();
5266
5267 assert_eq!(makefile.code(), "VAR1 = value1\n\nVAR3 = value3\n");
5270 }
5271
5272 #[test]
5273 fn test_rule_remove_with_comment() {
5274 let makefile: Makefile = r#"rule1:
5275 command1
5276
5277# Comment about rule2
5278rule2:
5279 command2
5280rule3:
5281 command3
5282"#
5283 .parse()
5284 .unwrap();
5285
5286 let rule2 = makefile.rules().nth(1).expect("Should have second rule");
5288 rule2.remove().unwrap();
5289
5290 assert_eq!(
5293 makefile.code(),
5294 "rule1:\n\tcommand1\n\nrule3:\n\tcommand3\n"
5295 );
5296 }
5297
5298 #[test]
5299 fn test_variable_remove_preserves_shebang() {
5300 let makefile: Makefile = r#"#!/usr/bin/make -f
5301# This is a regular comment
5302VAR1 = value1
5303VAR2 = value2
5304"#
5305 .parse()
5306 .unwrap();
5307
5308 let mut var1 = makefile.variable_definitions().next().unwrap();
5310 var1.remove();
5311
5312 let code = makefile.code();
5314 assert!(code.starts_with("#!/usr/bin/make -f"));
5315 assert!(!code.contains("regular comment"));
5316 assert!(!code.contains("VAR1"));
5317 assert!(code.contains("VAR2"));
5318 }
5319
5320 #[test]
5321 fn test_variable_remove_preserves_subsequent_comments() {
5322 let makefile: Makefile = r#"VAR1 = value1
5323# Comment about VAR2
5324VAR2 = value2
5325
5326# Comment about VAR3
5327VAR3 = value3
5328"#
5329 .parse()
5330 .unwrap();
5331
5332 let mut var2 = makefile
5334 .variable_definitions()
5335 .nth(1)
5336 .expect("Should have second variable");
5337 var2.remove();
5338
5339 let code = makefile.code();
5341 assert_eq!(
5342 code,
5343 "VAR1 = value1\n\n# Comment about VAR3\nVAR3 = value3\n"
5344 );
5345 }
5346
5347 #[test]
5348 fn test_variable_remove_after_shebang_preserves_empty_line() {
5349 let makefile: Makefile = r#"#!/usr/bin/make -f
5350export DEB_LDFLAGS_MAINT_APPEND = -Wl,--as-needed
5351
5352%:
5353 dh $@
5354"#
5355 .parse()
5356 .unwrap();
5357
5358 let mut var = makefile.variable_definitions().next().unwrap();
5360 var.remove();
5361
5362 assert_eq!(makefile.code(), "#!/usr/bin/make -f\n\n%:\n\tdh $@\n");
5364 }
5365
5366 #[test]
5367 fn test_rule_add_prerequisite() {
5368 let mut rule: Rule = "target: dep1\n".parse().unwrap();
5369 rule.add_prerequisite("dep2").unwrap();
5370 assert_eq!(
5371 rule.prerequisites().collect::<Vec<_>>(),
5372 vec!["dep1", "dep2"]
5373 );
5374 assert_eq!(rule.to_string(), "target: dep1 dep2\n");
5376 }
5377
5378 #[test]
5379 fn test_rule_add_prerequisite_to_rule_without_prereqs() {
5380 let mut rule: Rule = "target:\n".parse().unwrap();
5382 rule.add_prerequisite("dep1").unwrap();
5383 assert_eq!(rule.prerequisites().collect::<Vec<_>>(), vec!["dep1"]);
5384 assert_eq!(rule.to_string(), "target: dep1\n");
5386 }
5387
5388 #[test]
5389 fn test_rule_remove_prerequisite() {
5390 let mut rule: Rule = "target: dep1 dep2 dep3\n".parse().unwrap();
5391 assert!(rule.remove_prerequisite("dep2").unwrap());
5392 assert_eq!(
5393 rule.prerequisites().collect::<Vec<_>>(),
5394 vec!["dep1", "dep3"]
5395 );
5396 assert!(!rule.remove_prerequisite("nonexistent").unwrap());
5397 }
5398
5399 #[test]
5400 fn test_rule_set_prerequisites() {
5401 let mut rule: Rule = "target: old_dep\n".parse().unwrap();
5402 rule.set_prerequisites(vec!["new_dep1", "new_dep2"])
5403 .unwrap();
5404 assert_eq!(
5405 rule.prerequisites().collect::<Vec<_>>(),
5406 vec!["new_dep1", "new_dep2"]
5407 );
5408 }
5409
5410 #[test]
5411 fn test_rule_set_prerequisites_empty() {
5412 let mut rule: Rule = "target: dep1 dep2\n".parse().unwrap();
5413 rule.set_prerequisites(vec![]).unwrap();
5414 assert_eq!(rule.prerequisites().collect::<Vec<_>>().len(), 0);
5415 }
5416
5417 #[test]
5418 fn test_rule_add_target() {
5419 let mut rule: Rule = "target1: dep1\n".parse().unwrap();
5420 rule.add_target("target2").unwrap();
5421 assert_eq!(
5422 rule.targets().collect::<Vec<_>>(),
5423 vec!["target1", "target2"]
5424 );
5425 }
5426
5427 #[test]
5428 fn test_rule_set_targets() {
5429 let mut rule: Rule = "old_target: dependency\n".parse().unwrap();
5430 rule.set_targets(vec!["new_target1", "new_target2"])
5431 .unwrap();
5432 assert_eq!(
5433 rule.targets().collect::<Vec<_>>(),
5434 vec!["new_target1", "new_target2"]
5435 );
5436 }
5437
5438 #[test]
5439 fn test_rule_set_targets_empty() {
5440 let mut rule: Rule = "target: dep1\n".parse().unwrap();
5441 let result = rule.set_targets(vec![]);
5442 assert!(result.is_err());
5443 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["target"]);
5445 }
5446
5447 #[test]
5448 fn test_rule_has_target() {
5449 let rule: Rule = "target1 target2: dependency\n".parse().unwrap();
5450 assert!(rule.has_target("target1"));
5451 assert!(rule.has_target("target2"));
5452 assert!(!rule.has_target("target3"));
5453 assert!(!rule.has_target("nonexistent"));
5454 }
5455
5456 #[test]
5457 fn test_rule_rename_target() {
5458 let mut rule: Rule = "old_target: dependency\n".parse().unwrap();
5459 assert!(rule.rename_target("old_target", "new_target").unwrap());
5460 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["new_target"]);
5461 assert!(!rule.rename_target("nonexistent", "something").unwrap());
5463 }
5464
5465 #[test]
5466 fn test_rule_rename_target_multiple() {
5467 let mut rule: Rule = "target1 target2 target3: dependency\n".parse().unwrap();
5468 assert!(rule.rename_target("target2", "renamed_target").unwrap());
5469 assert_eq!(
5470 rule.targets().collect::<Vec<_>>(),
5471 vec!["target1", "renamed_target", "target3"]
5472 );
5473 }
5474
5475 #[test]
5476 fn test_rule_remove_target() {
5477 let mut rule: Rule = "target1 target2 target3: dependency\n".parse().unwrap();
5478 assert!(rule.remove_target("target2").unwrap());
5479 assert_eq!(
5480 rule.targets().collect::<Vec<_>>(),
5481 vec!["target1", "target3"]
5482 );
5483 assert!(!rule.remove_target("nonexistent").unwrap());
5485 }
5486
5487 #[test]
5488 fn test_rule_remove_target_last() {
5489 let mut rule: Rule = "single_target: dependency\n".parse().unwrap();
5490 let result = rule.remove_target("single_target");
5491 assert!(result.is_err());
5492 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["single_target"]);
5494 }
5495
5496 #[test]
5497 fn test_rule_target_manipulation_preserves_prerequisites() {
5498 let mut rule: Rule = "target1 target2: dep1 dep2\n\tcommand".parse().unwrap();
5499
5500 rule.remove_target("target1").unwrap();
5502 assert_eq!(rule.targets().collect::<Vec<_>>(), vec!["target2"]);
5503 assert_eq!(
5504 rule.prerequisites().collect::<Vec<_>>(),
5505 vec!["dep1", "dep2"]
5506 );
5507 assert_eq!(rule.recipes().collect::<Vec<_>>(), vec!["command"]);
5508
5509 rule.add_target("target3").unwrap();
5511 assert_eq!(
5512 rule.targets().collect::<Vec<_>>(),
5513 vec!["target2", "target3"]
5514 );
5515 assert_eq!(
5516 rule.prerequisites().collect::<Vec<_>>(),
5517 vec!["dep1", "dep2"]
5518 );
5519 assert_eq!(rule.recipes().collect::<Vec<_>>(), vec!["command"]);
5520
5521 rule.rename_target("target2", "renamed").unwrap();
5523 assert_eq!(
5524 rule.targets().collect::<Vec<_>>(),
5525 vec!["renamed", "target3"]
5526 );
5527 assert_eq!(
5528 rule.prerequisites().collect::<Vec<_>>(),
5529 vec!["dep1", "dep2"]
5530 );
5531 assert_eq!(rule.recipes().collect::<Vec<_>>(), vec!["command"]);
5532 }
5533
5534 #[test]
5535 fn test_rule_remove() {
5536 let makefile: Makefile = "rule1:\n\tcommand1\nrule2:\n\tcommand2\n".parse().unwrap();
5537 let rule = makefile.find_rule_by_target("rule1").unwrap();
5538 rule.remove().unwrap();
5539 assert_eq!(makefile.rules().count(), 1);
5540 assert!(makefile.find_rule_by_target("rule1").is_none());
5541 assert!(makefile.find_rule_by_target("rule2").is_some());
5542 }
5543
5544 #[test]
5545 fn test_makefile_find_rule_by_target() {
5546 let makefile: Makefile = "rule1:\n\tcommand1\nrule2:\n\tcommand2\n".parse().unwrap();
5547 let rule = makefile.find_rule_by_target("rule2");
5548 assert!(rule.is_some());
5549 assert_eq!(rule.unwrap().targets().collect::<Vec<_>>(), vec!["rule2"]);
5550 assert!(makefile.find_rule_by_target("nonexistent").is_none());
5551 }
5552
5553 #[test]
5554 fn test_makefile_find_rules_by_target() {
5555 let makefile: Makefile = "rule1:\n\tcommand1\nrule1:\n\tcommand2\nrule2:\n\tcommand3\n"
5556 .parse()
5557 .unwrap();
5558 assert_eq!(makefile.find_rules_by_target("rule1").count(), 2);
5559 assert_eq!(makefile.find_rules_by_target("rule2").count(), 1);
5560 assert_eq!(makefile.find_rules_by_target("nonexistent").count(), 0);
5561 }
5562
5563 #[test]
5564 fn test_makefile_add_phony_target() {
5565 let mut makefile = Makefile::new();
5566 makefile.add_phony_target("clean").unwrap();
5567 assert!(makefile.is_phony("clean"));
5568 assert_eq!(makefile.phony_targets().collect::<Vec<_>>(), vec!["clean"]);
5569 }
5570
5571 #[test]
5572 fn test_makefile_add_phony_target_existing() {
5573 let mut makefile: Makefile = ".PHONY: test\n".parse().unwrap();
5574 makefile.add_phony_target("clean").unwrap();
5575 assert!(makefile.is_phony("test"));
5576 assert!(makefile.is_phony("clean"));
5577 let targets: Vec<_> = makefile.phony_targets().collect();
5578 assert!(targets.contains(&"test".to_string()));
5579 assert!(targets.contains(&"clean".to_string()));
5580 }
5581
5582 #[test]
5583 fn test_makefile_remove_phony_target() {
5584 let mut makefile: Makefile = ".PHONY: clean test\n".parse().unwrap();
5585 assert!(makefile.remove_phony_target("clean").unwrap());
5586 assert!(!makefile.is_phony("clean"));
5587 assert!(makefile.is_phony("test"));
5588 assert!(!makefile.remove_phony_target("nonexistent").unwrap());
5589 }
5590
5591 #[test]
5592 fn test_makefile_remove_phony_target_last() {
5593 let mut makefile: Makefile = ".PHONY: clean\n".parse().unwrap();
5594 assert!(makefile.remove_phony_target("clean").unwrap());
5595 assert!(!makefile.is_phony("clean"));
5596 assert!(makefile.find_rule_by_target(".PHONY").is_none());
5598 }
5599
5600 #[test]
5601 fn test_makefile_is_phony() {
5602 let makefile: Makefile = ".PHONY: clean test\n".parse().unwrap();
5603 assert!(makefile.is_phony("clean"));
5604 assert!(makefile.is_phony("test"));
5605 assert!(!makefile.is_phony("build"));
5606 }
5607
5608 #[test]
5609 fn test_makefile_phony_targets() {
5610 let makefile: Makefile = ".PHONY: clean test build\n".parse().unwrap();
5611 let phony_targets: Vec<_> = makefile.phony_targets().collect();
5612 assert_eq!(phony_targets, vec!["clean", "test", "build"]);
5613 }
5614
5615 #[test]
5616 fn test_makefile_phony_targets_empty() {
5617 let makefile = Makefile::new();
5618 assert_eq!(makefile.phony_targets().count(), 0);
5619 }
5620
5621 #[test]
5622 fn test_recipe_with_leading_comments_and_blank_lines() {
5623 let makefile_text = r#"#!/usr/bin/make
5627
5628%:
5629 dh $@
5630
5631override_dh_build:
5632 # The next line is empty
5633
5634 dh_python3
5635"#;
5636 let makefile = Makefile::read_relaxed(makefile_text.as_bytes()).unwrap();
5637
5638 let rules: Vec<_> = makefile.rules().collect();
5639 assert_eq!(rules.len(), 2, "Expected 2 rules");
5640
5641 let rule0 = &rules[0];
5643 assert_eq!(rule0.targets().collect::<Vec<_>>(), vec!["%"]);
5644 assert_eq!(rule0.recipes().collect::<Vec<_>>(), vec!["dh $@"]);
5645
5646 let rule1 = &rules[1];
5648 assert_eq!(
5649 rule1.targets().collect::<Vec<_>>(),
5650 vec!["override_dh_build"]
5651 );
5652
5653 let recipes: Vec<_> = rule1.recipes().collect();
5655 assert!(
5656 !recipes.is_empty(),
5657 "Expected at least one recipe for override_dh_build, got none"
5658 );
5659 assert!(
5660 recipes.contains(&"dh_python3".to_string()),
5661 "Expected 'dh_python3' in recipes, got: {:?}",
5662 recipes
5663 );
5664 }
5665}