1use super::lexer::Token;
2
3#[derive(Debug, Clone, PartialEq, Eq)]
4pub enum Ast {
5 Pipeline(Vec<ShellCommand>),
6 Sequence(Vec<Ast>),
7 Assignment {
8 var: String,
9 value: String,
10 },
11 LocalAssignment {
12 var: String,
13 value: String,
14 },
15 If {
16 branches: Vec<(Box<Ast>, Box<Ast>)>, else_branch: Option<Box<Ast>>,
18 },
19 Case {
20 word: String,
21 cases: Vec<(Vec<String>, Ast)>,
22 default: Option<Box<Ast>>,
23 },
24 For {
25 variable: String,
26 items: Vec<String>,
27 body: Box<Ast>,
28 },
29 While {
30 condition: Box<Ast>,
31 body: Box<Ast>,
32 },
33 Until {
34 condition: Box<Ast>,
35 body: Box<Ast>,
36 },
37 FunctionDefinition {
38 name: String,
39 body: Box<Ast>,
40 },
41 FunctionCall {
42 name: String,
43 args: Vec<String>,
44 },
45 Return {
46 value: Option<String>,
47 },
48 And {
49 left: Box<Ast>,
50 right: Box<Ast>,
51 },
52 Or {
53 left: Box<Ast>,
54 right: Box<Ast>,
55 },
56 Subshell {
59 body: Box<Ast>,
60 },
61 CommandGroup {
64 body: Box<Ast>,
65 },
66 Negation {
69 command: Box<Ast>,
70 },
71}
72
73#[derive(Debug, Clone, PartialEq, Eq)]
75pub enum Redirection {
76 Input(String),
78 Output(String),
80 OutputClobber(String),
82 Append(String),
84 FdInput(i32, String),
86 FdOutput(i32, String),
88 FdOutputClobber(i32, String),
90 FdAppend(i32, String),
92 FdDuplicate(i32, i32),
94 FdClose(i32),
96 FdInputOutput(i32, String),
98 HereDoc(String, String),
100 HereString(String),
102}
103
104#[derive(Debug, Clone, PartialEq, Eq, Default)]
105pub struct ShellCommand {
106 pub args: Vec<String>,
107 pub redirections: Vec<Redirection>,
109 pub compound: Option<Box<Ast>>,
112}
113
114fn is_valid_variable_name(name: &str) -> bool {
117 if let Some(first_char) = name.chars().next() {
118 first_char.is_alphabetic() || first_char == '_'
119 } else {
120 false
121 }
122}
123
124fn create_empty_body_ast() -> Ast {
127 Ast::Pipeline(vec![ShellCommand {
128 args: vec!["true".to_string()],
129 redirections: Vec::new(),
130 compound: None,
131 }])
132}
133
134fn skip_newlines(tokens: &[Token], i: &mut usize) {
137 while *i < tokens.len() && tokens[*i] == Token::Newline {
138 *i += 1;
139 }
140}
141
142fn skip_to_matching_fi(tokens: &[Token], i: &mut usize) {
145 let mut if_depth = 1;
146 *i += 1; while *i < tokens.len() && if_depth > 0 {
148 match tokens[*i] {
149 Token::If => if_depth += 1,
150 Token::Fi => if_depth -= 1,
151 _ => {}
152 }
153 *i += 1;
154 }
155}
156
157fn skip_to_matching_done(tokens: &[Token], i: &mut usize) {
160 let mut loop_depth = 1;
161 *i += 1; while *i < tokens.len() && loop_depth > 0 {
163 match tokens[*i] {
164 Token::For | Token::While | Token::Until => loop_depth += 1,
165 Token::Done => loop_depth -= 1,
166 _ => {}
167 }
168 *i += 1;
169 }
170}
171
172fn skip_to_matching_esac(tokens: &[Token], i: &mut usize) {
174 *i += 1; while *i < tokens.len() {
176 if tokens[*i] == Token::Esac {
177 *i += 1;
178 break;
179 }
180 *i += 1;
181 }
182}
183
184pub fn parse(tokens: Vec<Token>) -> Result<Ast, String> {
185 if tokens.len() >= 4
187 && let (Token::Word(_), Token::LeftParen, Token::RightParen, Token::LeftBrace) =
188 (&tokens[0], &tokens[1], &tokens[2], &tokens[3])
189 {
190 let mut brace_depth = 1; let mut function_end = tokens.len();
194 let mut j = 4; while j < tokens.len() {
197 match &tokens[j] {
198 Token::LeftBrace => {
199 brace_depth += 1;
200 j += 1;
201 }
202 Token::RightBrace => {
203 brace_depth -= 1;
204 if brace_depth == 0 {
205 function_end = j + 1; break;
207 }
208 j += 1;
209 }
210 Token::If => {
211 let mut if_depth = 1;
213 j += 1;
214 while j < tokens.len() && if_depth > 0 {
215 match tokens[j] {
216 Token::If => if_depth += 1,
217 Token::Fi => if_depth -= 1,
218 _ => {}
219 }
220 j += 1;
221 }
222 }
223 Token::For | Token::While | Token::Until => {
224 let mut for_depth = 1;
226 j += 1;
227 while j < tokens.len() && for_depth > 0 {
228 match tokens[j] {
229 Token::For | Token::While | Token::Until => for_depth += 1,
230 Token::Done => for_depth -= 1,
231 _ => {}
232 }
233 j += 1;
234 }
235 }
236 Token::Case => {
237 j += 1;
239 while j < tokens.len() {
240 if tokens[j] == Token::Esac {
241 j += 1;
242 break;
243 }
244 j += 1;
245 }
246 }
247 _ => {
248 j += 1;
249 }
250 }
251 }
252
253 if brace_depth == 0 && function_end <= tokens.len() {
254 let function_tokens = &tokens[0..function_end];
256 let remaining_tokens = &tokens[function_end..];
257
258 let function_ast = parse_function_definition(function_tokens)?;
259
260 return if remaining_tokens.is_empty() {
261 Ok(function_ast)
262 } else {
263 let remaining_ast = parse_commands_sequentially(remaining_tokens)?;
265 Ok(Ast::Sequence(vec![function_ast, remaining_ast]))
266 };
267 }
268 }
269
270 if tokens.len() >= 2
272 && let Token::Word(ref word) = tokens[0]
273 && let Some(paren_pos) = word.find('(')
274 && word.ends_with(')')
275 && paren_pos > 0
276 && tokens[1] == Token::LeftBrace
277 {
278 return parse_function_definition(&tokens);
279 }
280
281 parse_commands_sequentially(&tokens)
283}
284
285fn parse_slice(tokens: &[Token]) -> Result<Ast, String> {
303 if tokens.is_empty() {
304 return Err("No commands found".to_string());
305 }
306
307 if tokens.len() == 2 {
309 if let (Token::Word(var_eq), Token::Word(value)) = (&tokens[0], &tokens[1])
311 && let Some(eq_pos) = var_eq.find('=')
312 && eq_pos > 0
313 && eq_pos < var_eq.len()
314 {
315 let var = var_eq[..eq_pos].to_string();
316 let full_value = format!("{}{}", &var_eq[eq_pos + 1..], value);
317 if is_valid_variable_name(&var) {
319 return Ok(Ast::Assignment {
320 var,
321 value: full_value,
322 });
323 }
324 }
325 }
326
327 if tokens.len() == 2
329 && let (Token::Word(var_eq), Token::Word(value)) = (&tokens[0], &tokens[1])
330 && let Some(eq_pos) = var_eq.find('=')
331 && eq_pos > 0
332 && eq_pos == var_eq.len() - 1
333 {
334 let var = var_eq[..eq_pos].to_string();
335 if is_valid_variable_name(&var) {
337 return Ok(Ast::Assignment {
338 var,
339 value: value.clone(),
340 });
341 }
342 }
343
344 if tokens.len() == 3
346 && let (Token::Local, Token::Word(var), Token::Word(value)) =
347 (&tokens[0], &tokens[1], &tokens[2])
348 {
349 let clean_var = if var.ends_with('=') {
351 &var[..var.len() - 1]
352 } else {
353 var
354 };
355 if is_valid_variable_name(clean_var) {
357 return Ok(Ast::LocalAssignment {
358 var: clean_var.to_string(),
359 value: value.clone(),
360 });
361 } else {
362 return Err(format!("Invalid variable name: {}", clean_var));
363 }
364 }
365
366 if !tokens.is_empty()
368 && tokens.len() <= 2
369 && let Token::Return = &tokens[0]
370 {
371 if tokens.len() == 1 {
372 return Ok(Ast::Return { value: None });
374 } else if let Token::Word(word) = &tokens[1] {
375 return Ok(Ast::Return {
377 value: Some(word.clone()),
378 });
379 }
380 }
381
382 if tokens.len() == 2
384 && let (Token::Local, Token::Word(var_eq)) = (&tokens[0], &tokens[1])
385 && let Some(eq_pos) = var_eq.find('=')
386 && eq_pos > 0
387 && eq_pos < var_eq.len()
388 {
389 let var = var_eq[..eq_pos].to_string();
390 let value = var_eq[eq_pos + 1..].to_string();
391 if is_valid_variable_name(&var) {
393 return Ok(Ast::LocalAssignment { var, value });
394 } else {
395 return Err(format!("Invalid variable name: {}", var));
396 }
397 }
398
399 if tokens.len() == 2
401 && let (Token::Local, Token::Word(var)) = (&tokens[0], &tokens[1])
402 && !var.contains('=')
403 {
404 if is_valid_variable_name(var) {
406 return Ok(Ast::LocalAssignment {
407 var: var.clone(),
408 value: String::new(),
409 });
410 } else {
411 return Err(format!("Invalid variable name: {}", var));
412 }
413 }
414
415 if tokens.len() == 1
417 && let Token::Word(ref word) = tokens[0]
418 && let Some(eq_pos) = word.find('=')
419 && eq_pos > 0
420 && eq_pos < word.len()
421 {
422 let var = word[..eq_pos].to_string();
423 let value = word[eq_pos + 1..].to_string();
424 if is_valid_variable_name(&var) {
426 return Ok(Ast::Assignment { var, value });
427 }
428 }
429
430 if let Token::If = tokens[0] {
432 return parse_if(tokens);
433 }
434
435 if let Token::Case = tokens[0] {
437 return parse_case(tokens);
438 }
439
440 if let Token::For = tokens[0] {
442 return parse_for(tokens);
443 }
444
445 if let Token::While = tokens[0] {
447 return parse_while(tokens);
448 }
449
450 if let Token::Until = tokens[0] {
452 return parse_until(tokens);
453 }
454
455 if tokens.len() >= 4
458 && let (Token::Word(word), Token::LeftParen, Token::RightParen, Token::LeftBrace) =
459 (&tokens[0], &tokens[1], &tokens[2], &tokens[3])
460 && is_valid_variable_name(word)
461 {
462 return parse_function_definition(tokens);
463 }
464
465 if tokens.len() >= 2
467 && let Token::Word(ref word) = tokens[0]
468 && let Some(paren_pos) = word.find('(')
469 && word.ends_with(')')
470 && paren_pos > 0
471 {
472 let func_name = &word[..paren_pos];
473 if is_valid_variable_name(func_name) && tokens[1] == Token::LeftBrace {
474 return parse_function_definition(tokens);
475 }
476 }
477
478 parse_pipeline(tokens)
483}
484
485fn parse_single_command(tokens: &[Token]) -> Result<(Ast, usize), String> {
488 if tokens.is_empty() {
489 return Err("Expected command".to_string());
490 }
491
492 let mut i = 0;
493
494 while i < tokens.len() && tokens[i] == Token::Newline {
496 i += 1;
497 }
498
499 if i >= tokens.len() {
500 return Err("Expected command".to_string());
501 }
502
503 if tokens[i] == Token::Bang {
505 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
509 i += 1;
510 }
511
512 if i >= tokens.len() {
513 return Err("Expected command after !".to_string());
514 }
515
516 let (negated_ast, consumed) = parse_single_command(&tokens[i..])?;
520 i += consumed;
521
522 return Ok((
525 Ast::Negation {
526 command: Box::new(negated_ast),
527 },
528 i,
529 ));
530 }
531
532 let start = i;
533
534 match &tokens[i] {
536 Token::LeftParen => {
537 let mut paren_depth = 1;
539 i += 1;
540 while i < tokens.len() && paren_depth > 0 {
541 match tokens[i] {
542 Token::LeftParen => paren_depth += 1,
543 Token::RightParen => paren_depth -= 1,
544 _ => {}
545 }
546 i += 1;
547 }
548 if paren_depth != 0 {
549 return Err("Unmatched parenthesis".to_string());
550 }
551 }
552 Token::LeftBrace => {
553 let mut brace_depth = 1;
555 i += 1;
556 while i < tokens.len() && brace_depth > 0 {
557 match tokens[i] {
558 Token::LeftBrace => brace_depth += 1,
559 Token::RightBrace => brace_depth -= 1,
560 _ => {}
561 }
562 i += 1;
563 }
564 if brace_depth != 0 {
565 return Err("Unmatched brace".to_string());
566 }
567 }
568 Token::If => {
569 let mut if_depth = 1;
571 i += 1;
572 while i < tokens.len() && if_depth > 0 {
573 match tokens[i] {
574 Token::If => if_depth += 1,
575 Token::Fi => {
576 if_depth -= 1;
577 if if_depth == 0 {
578 i += 1;
579 break;
580 }
581 }
582 _ => {}
583 }
584 i += 1;
585 }
586 }
587 Token::For | Token::While | Token::Until => {
588 let mut loop_depth = 1;
590 i += 1;
591 while i < tokens.len() && loop_depth > 0 {
592 match tokens[i] {
593 Token::For | Token::While | Token::Until => loop_depth += 1,
594 Token::Done => {
595 loop_depth -= 1;
596 if loop_depth == 0 {
597 i += 1;
598 break;
599 }
600 }
601 _ => {}
602 }
603 i += 1;
604 }
605 }
606 Token::Case => {
607 i += 1;
609 while i < tokens.len() {
610 if tokens[i] == Token::Esac {
611 i += 1;
612 break;
613 }
614 i += 1;
615 }
616 }
617 _ => {
618 let mut brace_depth = 0;
620 let mut paren_depth = 0;
621 let mut last_was_pipe = false;
622
623 while i < tokens.len() {
624 if i > start {
628 match &tokens[i] {
629 Token::And | Token::Or => {
630 if brace_depth == 0 && paren_depth == 0 {
631 if last_was_pipe {
632 return Err("Expected command after |".to_string());
633 }
634 break;
635 }
636 }
637 Token::Newline | Token::Semicolon => {
638 if brace_depth == 0 && paren_depth == 0 && !last_was_pipe {
639 break;
640 }
641 }
642 Token::RightBrace if brace_depth == 0 => break,
643 Token::RightParen if paren_depth == 0 => break,
644 _ => {}
645 }
646 }
647
648 match &tokens[i] {
650 Token::LeftBrace => {
651 brace_depth += 1;
652 last_was_pipe = false;
653 }
654 Token::RightBrace => {
655 if brace_depth > 0 {
656 brace_depth -= 1;
657 last_was_pipe = false;
658 }
659 }
660 Token::LeftParen => {
661 paren_depth += 1;
662 last_was_pipe = false;
663 }
664 Token::RightParen => {
665 if paren_depth > 0 {
666 paren_depth -= 1;
667 last_was_pipe = false;
668 }
669 }
670 Token::Pipe => last_was_pipe = true,
671 Token::Word(_) => last_was_pipe = false,
672 _ => last_was_pipe = false,
673 }
674 i += 1;
675 }
676 }
677 }
678
679 let command_tokens = &tokens[start..i];
680
681 if i == start {
683 return Err("Internal parser error: parse_single_command consumed no tokens".to_string());
684 }
685
686 let ast = parse_slice(command_tokens)?;
687 Ok((ast, i))
688}
689
690fn parse_next_command(tokens: &[Token]) -> Result<(Ast, usize), String> {
694 let (mut ast, mut i) = parse_single_command(tokens)?;
696
697 loop {
699 if i >= tokens.len() || (tokens[i] != Token::And && tokens[i] != Token::Or) {
701 break;
702 }
703
704 let operator = tokens[i].clone();
705 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
709 i += 1;
710 }
711
712 if i >= tokens.len() {
713 return Err("Expected command after operator".to_string());
714 }
715
716 let (right_ast, consumed) = parse_single_command(&tokens[i..])?;
718 i += consumed;
719
720 ast = match operator {
722 Token::And => Ast::And {
723 left: Box::new(ast),
724 right: Box::new(right_ast),
725 },
726 Token::Or => Ast::Or {
727 left: Box::new(ast),
728 right: Box::new(right_ast),
729 },
730 _ => unreachable!(),
731 };
732 }
733
734 Ok((ast, i))
735}
736
737fn parse_commands_sequentially(tokens: &[Token]) -> Result<Ast, String> {
756 let mut i = 0;
757 let mut commands = Vec::new();
758
759 while i < tokens.len() {
760 while i < tokens.len() {
762 match &tokens[i] {
763 Token::Newline => {
764 i += 1;
765 }
766 Token::Word(word) if word.starts_with('#') => {
767 while i < tokens.len() && tokens[i] != Token::Newline {
769 i += 1;
770 }
771 if i < tokens.len() {
772 i += 1; }
774 }
775 _ => break,
776 }
777 }
778
779 if i >= tokens.len() {
780 break;
781 }
782
783 let start = i;
785
786 if tokens[i] == Token::LeftParen {
789 let mut paren_depth = 1;
791 let mut j = i + 1;
792
793 while j < tokens.len() && paren_depth > 0 {
794 match tokens[j] {
795 Token::LeftParen => paren_depth += 1,
796 Token::RightParen => paren_depth -= 1,
797 _ => {}
798 }
799 j += 1;
800 }
801
802 if paren_depth != 0 {
803 return Err("Unmatched parenthesis in subshell".to_string());
804 }
805
806 let subshell_tokens = &tokens[i + 1..j - 1];
808
809 let body_ast = if subshell_tokens.is_empty() {
812 return Err("Empty subshell".to_string());
813 } else {
814 parse_commands_sequentially(subshell_tokens)?
815 };
816
817 let mut subshell_ast = Ast::Subshell {
818 body: Box::new(body_ast),
819 };
820
821 i = j; let mut redirections = Vec::new();
825 while i < tokens.len() {
826 match &tokens[i] {
827 Token::RedirOut => {
828 i += 1;
829 if i < tokens.len() {
830 if let Token::Word(file) = &tokens[i] {
831 redirections.push(Redirection::Output(file.clone()));
832 i += 1;
833 }
834 }
835 }
836 Token::RedirOutClobber => {
837 i += 1;
838 if i >= tokens.len() {
839 return Err("expected filename after >|".to_string());
840 }
841 if let Token::Word(file) = &tokens[i] {
842 redirections.push(Redirection::OutputClobber(file.clone()));
843 i += 1;
844 } else {
845 return Err("expected filename after >|".to_string());
846 }
847 }
848 Token::RedirIn => {
849 i += 1;
850 if i < tokens.len() {
851 if let Token::Word(file) = &tokens[i] {
852 redirections.push(Redirection::Input(file.clone()));
853 i += 1;
854 }
855 }
856 }
857 Token::RedirAppend => {
858 i += 1;
859 if i < tokens.len() {
860 if let Token::Word(file) = &tokens[i] {
861 redirections.push(Redirection::Append(file.clone()));
862 i += 1;
863 }
864 }
865 }
866 Token::RedirectFdOut(fd, file) => {
867 redirections.push(Redirection::FdOutput(*fd, file.clone()));
868 i += 1;
869 }
870 Token::RedirectFdOutClobber(fd, file) => {
871 redirections.push(Redirection::FdOutputClobber(*fd, file.clone()));
872 i += 1;
873 }
874 Token::RedirectFdIn(fd, file) => {
875 redirections.push(Redirection::FdInput(*fd, file.clone()));
876 i += 1;
877 }
878 Token::RedirectFdAppend(fd, file) => {
879 redirections.push(Redirection::FdAppend(*fd, file.clone()));
880 i += 1;
881 }
882 Token::RedirectFdDup(from_fd, to_fd) => {
883 redirections.push(Redirection::FdDuplicate(*from_fd, *to_fd));
884 i += 1;
885 }
886 Token::RedirectFdClose(fd) => {
887 redirections.push(Redirection::FdClose(*fd));
888 i += 1;
889 }
890 Token::RedirectFdInOut(fd, file) => {
891 redirections.push(Redirection::FdInputOutput(*fd, file.clone()));
892 i += 1;
893 }
894 Token::RedirHereDoc(delimiter, quoted) => {
895 redirections
896 .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
897 i += 1;
898 }
899 Token::RedirHereString(content) => {
900 redirections.push(Redirection::HereString(content.clone()));
901 i += 1;
902 }
903 _ => break,
904 }
905 }
906
907 if i < tokens.len() && tokens[i] == Token::Pipe {
909 let mut end = i;
911 let mut brace_depth = 0;
912 let mut paren_depth = 0;
913 let mut last_was_pipe = true; while end < tokens.len() {
915 match &tokens[end] {
916 Token::Pipe => last_was_pipe = true,
917 Token::LeftBrace => {
918 brace_depth += 1;
919 last_was_pipe = false;
920 }
921 Token::RightBrace => {
922 if brace_depth > 0 {
923 brace_depth -= 1;
924 } else {
925 break;
926 }
927 last_was_pipe = false;
928 }
929 Token::LeftParen => {
930 paren_depth += 1;
931 last_was_pipe = false;
932 }
933 Token::RightParen => {
934 if paren_depth > 0 {
935 paren_depth -= 1;
936 } else {
937 break;
938 }
939 last_was_pipe = false;
940 }
941 Token::Newline | Token::Semicolon => {
942 if brace_depth == 0 && paren_depth == 0 && !last_was_pipe {
943 break;
944 }
945 }
946 Token::Word(_) => last_was_pipe = false,
947 _ => {}
948 }
949 end += 1;
950 }
951
952 let pipeline_ast = parse_pipeline(&tokens[start..end])?;
953 commands.push(pipeline_ast);
954 i = end;
955 continue;
956 }
957
958 if !redirections.is_empty() {
960 subshell_ast = Ast::Pipeline(vec![ShellCommand {
961 args: Vec::new(),
962 redirections,
963 compound: Some(Box::new(subshell_ast)),
964 }]);
965 }
966
967 if i < tokens.len() && (tokens[i] == Token::And || tokens[i] == Token::Or) {
969 let operator = tokens[i].clone();
970 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
974 i += 1;
975 }
976
977 let (right_ast, consumed) = parse_next_command(&tokens[i..])?;
979 i += consumed;
980
981 let combined_ast = match operator {
983 Token::And => Ast::And {
984 left: Box::new(subshell_ast),
985 right: Box::new(right_ast),
986 },
987 Token::Or => Ast::Or {
988 left: Box::new(subshell_ast),
989 right: Box::new(right_ast),
990 },
991 _ => unreachable!(),
992 };
993
994 commands.push(combined_ast);
995
996 if i < tokens.len()
998 && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon)
999 {
1000 i += 1;
1001 }
1002 continue;
1003 } else {
1004 commands.push(subshell_ast);
1005 }
1006
1007 if i < tokens.len() && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon) {
1009 i += 1;
1010 }
1011 continue;
1012 }
1013
1014 if tokens[i] == Token::LeftBrace {
1016 let mut brace_depth = 1;
1018 let mut j = i + 1;
1019
1020 while j < tokens.len() && brace_depth > 0 {
1021 match tokens[j] {
1022 Token::LeftBrace => brace_depth += 1,
1023 Token::RightBrace => brace_depth -= 1,
1024 _ => {}
1025 }
1026 j += 1;
1027 }
1028
1029 if brace_depth != 0 {
1030 return Err("Unmatched brace in command group".to_string());
1031 }
1032
1033 let group_tokens = &tokens[i + 1..j - 1];
1035
1036 let body_ast = if group_tokens.is_empty() {
1039 return Err("Empty command group".to_string());
1040 } else {
1041 parse_commands_sequentially(group_tokens)?
1042 };
1043
1044 let mut group_ast = Ast::CommandGroup {
1045 body: Box::new(body_ast),
1046 };
1047
1048 i = j; let mut redirections = Vec::new();
1052 while i < tokens.len() {
1053 match &tokens[i] {
1054 Token::RedirOut => {
1055 i += 1;
1056 if i < tokens.len() {
1057 if let Token::Word(file) = &tokens[i] {
1058 redirections.push(Redirection::Output(file.clone()));
1059 i += 1;
1060 }
1061 }
1062 }
1063 Token::RedirOutClobber => {
1064 i += 1;
1065 if i >= tokens.len() {
1066 return Err("expected filename after >|".to_string());
1067 }
1068 if let Token::Word(file) = &tokens[i] {
1069 redirections.push(Redirection::OutputClobber(file.clone()));
1070 i += 1;
1071 } else {
1072 return Err("expected filename after >|".to_string());
1073 }
1074 }
1075 Token::RedirIn => {
1076 i += 1;
1077 if i < tokens.len() {
1078 if let Token::Word(file) = &tokens[i] {
1079 redirections.push(Redirection::Input(file.clone()));
1080 i += 1;
1081 }
1082 }
1083 }
1084 Token::RedirAppend => {
1085 i += 1;
1086 if i < tokens.len() {
1087 if let Token::Word(file) = &tokens[i] {
1088 redirections.push(Redirection::Append(file.clone()));
1089 i += 1;
1090 }
1091 }
1092 }
1093 Token::RedirectFdOut(fd, file) => {
1094 redirections.push(Redirection::FdOutput(*fd, file.clone()));
1095 i += 1;
1096 }
1097 Token::RedirectFdIn(fd, file) => {
1098 redirections.push(Redirection::FdInput(*fd, file.clone()));
1099 i += 1;
1100 }
1101 Token::RedirectFdAppend(fd, file) => {
1102 redirections.push(Redirection::FdAppend(*fd, file.clone()));
1103 i += 1;
1104 }
1105 Token::RedirectFdDup(from_fd, to_fd) => {
1106 redirections.push(Redirection::FdDuplicate(*from_fd, *to_fd));
1107 i += 1;
1108 }
1109 Token::RedirectFdClose(fd) => {
1110 redirections.push(Redirection::FdClose(*fd));
1111 i += 1;
1112 }
1113 Token::RedirectFdInOut(fd, file) => {
1114 redirections.push(Redirection::FdInputOutput(*fd, file.clone()));
1115 i += 1;
1116 }
1117 Token::RedirHereDoc(delimiter, quoted) => {
1118 redirections
1119 .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
1120 i += 1;
1121 }
1122 Token::RedirHereString(content) => {
1123 redirections.push(Redirection::HereString(content.clone()));
1124 i += 1;
1125 }
1126 _ => break,
1127 }
1128 }
1129
1130 if i < tokens.len() && tokens[i] == Token::Pipe {
1132 let mut end = i;
1134 let mut brace_depth = 0;
1135 let mut paren_depth = 0;
1136 let mut last_was_pipe = true; while end < tokens.len() {
1138 match &tokens[end] {
1139 Token::Pipe => last_was_pipe = true,
1140 Token::LeftBrace => {
1141 brace_depth += 1;
1142 last_was_pipe = false;
1143 }
1144 Token::RightBrace => {
1145 if brace_depth > 0 {
1146 brace_depth -= 1;
1147 } else {
1148 break;
1149 }
1150 last_was_pipe = false;
1151 }
1152 Token::LeftParen => {
1153 paren_depth += 1;
1154 last_was_pipe = false;
1155 }
1156 Token::RightParen => {
1157 if paren_depth > 0 {
1158 paren_depth -= 1;
1159 } else {
1160 break;
1161 }
1162 last_was_pipe = false;
1163 }
1164 Token::Newline | Token::Semicolon => {
1165 if brace_depth == 0 && paren_depth == 0 && !last_was_pipe {
1166 break;
1167 }
1168 }
1169 Token::Word(_) => last_was_pipe = false,
1170 _ => {}
1171 }
1172 end += 1;
1173 }
1174
1175 let pipeline_ast = parse_pipeline(&tokens[start..end])?;
1176 commands.push(pipeline_ast);
1177 i = end;
1178 continue;
1179 }
1180
1181 if !redirections.is_empty() {
1183 group_ast = Ast::Pipeline(vec![ShellCommand {
1184 args: Vec::new(),
1185 redirections,
1186 compound: Some(Box::new(group_ast)),
1187 }]);
1188 }
1189
1190 if i < tokens.len() && (tokens[i] == Token::And || tokens[i] == Token::Or) {
1192 let operator = tokens[i].clone();
1193 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
1197 i += 1;
1198 }
1199
1200 let (right_ast, consumed) = parse_next_command(&tokens[i..])?;
1202 i += consumed;
1203
1204 let combined_ast = match operator {
1206 Token::And => Ast::And {
1207 left: Box::new(group_ast),
1208 right: Box::new(right_ast),
1209 },
1210 Token::Or => Ast::Or {
1211 left: Box::new(group_ast),
1212 right: Box::new(right_ast),
1213 },
1214 _ => unreachable!(),
1215 };
1216
1217 commands.push(combined_ast);
1218
1219 if i < tokens.len()
1221 && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon)
1222 {
1223 i += 1;
1224 }
1225 continue;
1226 } else {
1227 commands.push(group_ast);
1228 }
1229
1230 if i < tokens.len() && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon) {
1232 i += 1;
1233 }
1234 continue;
1235 }
1236
1237 if tokens[i] == Token::If {
1239 let mut depth = 0;
1241 while i < tokens.len() {
1242 match tokens[i] {
1243 Token::If => depth += 1,
1244 Token::Fi => {
1245 depth -= 1;
1246 if depth == 0 {
1247 i += 1; break;
1249 }
1250 }
1251 _ => {}
1252 }
1253 i += 1;
1254 }
1255
1256 } else if tokens[i] == Token::For {
1259 let mut depth = 1; i += 1; while i < tokens.len() {
1263 match tokens[i] {
1264 Token::For | Token::While | Token::Until => depth += 1,
1265 Token::Done => {
1266 depth -= 1;
1267 if depth == 0 {
1268 i += 1; break;
1270 }
1271 }
1272 _ => {}
1273 }
1274 i += 1;
1275 }
1276 } else if tokens[i] == Token::While {
1277 let mut depth = 1; i += 1; while i < tokens.len() {
1281 match tokens[i] {
1282 Token::While | Token::For | Token::Until => depth += 1,
1283 Token::Done => {
1284 depth -= 1;
1285 if depth == 0 {
1286 i += 1; break;
1288 }
1289 }
1290 _ => {}
1291 }
1292 i += 1;
1293 }
1294 } else if tokens[i] == Token::Until {
1295 let mut depth = 1; i += 1; while i < tokens.len() {
1299 match tokens[i] {
1300 Token::Until | Token::For | Token::While => depth += 1,
1301 Token::Done => {
1302 depth -= 1;
1303 if depth == 0 {
1304 i += 1; break;
1306 }
1307 }
1308 _ => {}
1309 }
1310 i += 1;
1311 }
1312 } else if tokens[i] == Token::Case {
1313 while i < tokens.len() {
1315 if tokens[i] == Token::Esac {
1316 i += 1; break;
1318 }
1319 i += 1;
1320 }
1321 } else if i + 3 < tokens.len()
1322 && matches!(tokens[i], Token::Word(_))
1323 && tokens[i + 1] == Token::LeftParen
1324 && tokens[i + 2] == Token::RightParen
1325 && tokens[i + 3] == Token::LeftBrace
1326 {
1327 let mut brace_depth = 1;
1329 i += 4; while i < tokens.len() && brace_depth > 0 {
1331 match tokens[i] {
1332 Token::LeftBrace => brace_depth += 1,
1333 Token::RightBrace => brace_depth -= 1,
1334 _ => {}
1335 }
1336 i += 1;
1337 }
1338 } else {
1339 if matches!(tokens[i], Token::And | Token::Or | Token::Semicolon) {
1341 return Err(format!(
1342 "Unexpected operator at command start: {:?}",
1343 tokens[i]
1344 ));
1345 }
1346
1347 let mut brace_depth = 0;
1350 let mut paren_depth = 0;
1351 let mut last_was_pipe = false;
1352 while i < tokens.len() {
1353 match &tokens[i] {
1354 Token::LeftBrace => {
1355 brace_depth += 1;
1356 last_was_pipe = false;
1357 }
1358 Token::RightBrace => {
1359 if brace_depth > 0 {
1360 brace_depth -= 1;
1361 } else {
1362 break;
1363 }
1364 last_was_pipe = false;
1365 }
1366 Token::LeftParen => {
1367 paren_depth += 1;
1368 last_was_pipe = false;
1369 }
1370 Token::RightParen => {
1371 if paren_depth > 0 {
1372 paren_depth -= 1;
1373 } else {
1374 break;
1375 }
1376 last_was_pipe = false;
1377 }
1378 Token::Pipe => last_was_pipe = true,
1379 Token::Newline | Token::Semicolon | Token::And | Token::Or => {
1380 if brace_depth == 0 && paren_depth == 0 && !last_was_pipe {
1381 break;
1382 }
1383 }
1384 Token::Word(_) => last_was_pipe = false,
1385 _ => {}
1386 }
1387 i += 1;
1388 }
1389 }
1390
1391 let command_tokens = &tokens[start..i];
1392 if !command_tokens.is_empty() {
1393 if command_tokens.len() == 1 {
1395 match command_tokens[0] {
1396 Token::Else | Token::Elif | Token::Fi => {
1397 if i < tokens.len()
1399 && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon)
1400 {
1401 i += 1;
1402 }
1403 continue;
1404 }
1405 _ => {}
1406 }
1407 }
1408
1409 let (ast, consumed) = parse_next_command(&tokens[start..])?;
1411 i = start + consumed;
1412
1413 commands.push(ast);
1414 }
1415
1416 if i < tokens.len() && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon) {
1417 i += 1;
1418 }
1419 }
1420
1421 if commands.is_empty() {
1422 return Err("No commands found".to_string());
1423 }
1424
1425 if commands.len() == 1 {
1426 Ok(commands.into_iter().next().unwrap())
1427 } else {
1428 Ok(Ast::Sequence(commands))
1429 }
1430}
1431
1432fn parse_pipeline(tokens: &[Token]) -> Result<Ast, String> {
1445 let mut commands = Vec::new();
1446 let mut current_cmd = ShellCommand::default();
1447
1448 let mut i = 0;
1449 while i < tokens.len() {
1450 let token = &tokens[i];
1451 match token {
1452 Token::LeftBrace => {
1453 let mut brace_depth = 1;
1456 let mut j = i + 1;
1457
1458 while j < tokens.len() && brace_depth > 0 {
1459 match tokens[j] {
1460 Token::LeftBrace => brace_depth += 1,
1461 Token::RightBrace => brace_depth -= 1,
1462 _ => {}
1463 }
1464 j += 1;
1465 }
1466
1467 if brace_depth != 0 {
1468 return Err("Unmatched brace in pipeline".to_string());
1469 }
1470
1471 let group_tokens = &tokens[i + 1..j - 1];
1473
1474 let body_ast = if group_tokens.is_empty() {
1476 create_empty_body_ast()
1477 } else {
1478 parse_commands_sequentially(group_tokens)?
1479 };
1480
1481 current_cmd.compound = Some(Box::new(Ast::CommandGroup {
1483 body: Box::new(body_ast),
1484 }));
1485
1486 i = j; while i < tokens.len() {
1490 match &tokens[i] {
1491 Token::RedirOut => {
1492 i += 1;
1493 if i < tokens.len() {
1494 if let Token::Word(file) = &tokens[i] {
1495 current_cmd
1496 .redirections
1497 .push(Redirection::Output(file.clone()));
1498 i += 1;
1499 }
1500 }
1501 }
1502 Token::RedirOutClobber => {
1503 i += 1;
1504 if i >= tokens.len() {
1505 return Err("expected filename after >|".to_string());
1506 }
1507 if let Token::Word(file) = &tokens[i] {
1508 current_cmd
1509 .redirections
1510 .push(Redirection::OutputClobber(file.clone()));
1511 i += 1;
1512 } else {
1513 return Err("expected filename after >|".to_string());
1514 }
1515 }
1516 Token::RedirIn => {
1517 i += 1;
1518 if i < tokens.len() {
1519 if let Token::Word(file) = &tokens[i] {
1520 current_cmd
1521 .redirections
1522 .push(Redirection::Input(file.clone()));
1523 i += 1;
1524 }
1525 }
1526 }
1527 Token::RedirAppend => {
1528 i += 1;
1529 if i < tokens.len() {
1530 if let Token::Word(file) = &tokens[i] {
1531 current_cmd
1532 .redirections
1533 .push(Redirection::Append(file.clone()));
1534 i += 1;
1535 }
1536 }
1537 }
1538 Token::RedirectFdOut(fd, file) => {
1539 current_cmd
1540 .redirections
1541 .push(Redirection::FdOutput(*fd, file.clone()));
1542 i += 1;
1543 }
1544 Token::RedirectFdOutClobber(fd, file) => {
1545 current_cmd
1546 .redirections
1547 .push(Redirection::FdOutputClobber(*fd, file.clone()));
1548 i += 1;
1549 }
1550 Token::RedirectFdIn(fd, file) => {
1551 current_cmd
1552 .redirections
1553 .push(Redirection::FdInput(*fd, file.clone()));
1554 i += 1;
1555 }
1556 Token::RedirectFdAppend(fd, file) => {
1557 current_cmd
1558 .redirections
1559 .push(Redirection::FdAppend(*fd, file.clone()));
1560 i += 1;
1561 }
1562 Token::RedirectFdDup(from_fd, to_fd) => {
1563 current_cmd
1564 .redirections
1565 .push(Redirection::FdDuplicate(*from_fd, *to_fd));
1566 i += 1;
1567 }
1568 Token::RedirectFdClose(fd) => {
1569 current_cmd.redirections.push(Redirection::FdClose(*fd));
1570 i += 1;
1571 }
1572 Token::RedirectFdInOut(fd, file) => {
1573 current_cmd
1574 .redirections
1575 .push(Redirection::FdInputOutput(*fd, file.clone()));
1576 i += 1;
1577 }
1578 Token::RedirHereDoc(delimiter, quoted) => {
1579 current_cmd
1580 .redirections
1581 .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
1582 i += 1;
1583 }
1584 Token::RedirHereString(content) => {
1585 current_cmd
1586 .redirections
1587 .push(Redirection::HereString(content.clone()));
1588 i += 1;
1589 }
1590 Token::Pipe => {
1591 break;
1593 }
1594 _ => break,
1595 }
1596 }
1597
1598 continue;
1600 }
1601 Token::LeftParen => {
1602 let mut paren_depth = 1;
1605 let mut j = i + 1;
1606
1607 while j < tokens.len() && paren_depth > 0 {
1608 match tokens[j] {
1609 Token::LeftParen => paren_depth += 1,
1610 Token::RightParen => paren_depth -= 1,
1611 _ => {}
1612 }
1613 j += 1;
1614 }
1615
1616 if paren_depth != 0 {
1617 return Err("Unmatched parenthesis in pipeline".to_string());
1618 }
1619
1620 let subshell_tokens = &tokens[i + 1..j - 1];
1622
1623 let body_ast = if subshell_tokens.is_empty() {
1625 create_empty_body_ast()
1626 } else {
1627 parse_commands_sequentially(subshell_tokens)?
1628 };
1629
1630 current_cmd.compound = Some(Box::new(Ast::Subshell {
1633 body: Box::new(body_ast),
1634 }));
1635
1636 i = j; while i < tokens.len() {
1640 match &tokens[i] {
1641 Token::RedirOut => {
1642 i += 1;
1643 if i < tokens.len() {
1644 if let Token::Word(file) = &tokens[i] {
1645 current_cmd
1646 .redirections
1647 .push(Redirection::Output(file.clone()));
1648 i += 1;
1649 }
1650 }
1651 }
1652 Token::RedirOutClobber => {
1653 i += 1;
1654 if i >= tokens.len() {
1655 return Err("expected filename after >|".to_string());
1656 }
1657 if let Token::Word(file) = &tokens[i] {
1658 current_cmd
1659 .redirections
1660 .push(Redirection::OutputClobber(file.clone()));
1661 i += 1;
1662 } else {
1663 return Err("expected filename after >|".to_string());
1664 }
1665 }
1666 Token::RedirIn => {
1667 i += 1;
1668 if i < tokens.len() {
1669 if let Token::Word(file) = &tokens[i] {
1670 current_cmd
1671 .redirections
1672 .push(Redirection::Input(file.clone()));
1673 i += 1;
1674 }
1675 }
1676 }
1677 Token::RedirAppend => {
1678 i += 1;
1679 if i < tokens.len() {
1680 if let Token::Word(file) = &tokens[i] {
1681 current_cmd
1682 .redirections
1683 .push(Redirection::Append(file.clone()));
1684 i += 1;
1685 }
1686 }
1687 }
1688 Token::RedirectFdOut(fd, file) => {
1689 current_cmd
1690 .redirections
1691 .push(Redirection::FdOutput(*fd, file.clone()));
1692 i += 1;
1693 }
1694 Token::RedirectFdOutClobber(fd, file) => {
1695 current_cmd
1696 .redirections
1697 .push(Redirection::FdOutputClobber(*fd, file.clone()));
1698 i += 1;
1699 }
1700 Token::RedirectFdIn(fd, file) => {
1701 current_cmd
1702 .redirections
1703 .push(Redirection::FdInput(*fd, file.clone()));
1704 i += 1;
1705 }
1706 Token::RedirectFdAppend(fd, file) => {
1707 current_cmd
1708 .redirections
1709 .push(Redirection::FdAppend(*fd, file.clone()));
1710 i += 1;
1711 }
1712 Token::RedirectFdDup(from_fd, to_fd) => {
1713 current_cmd
1714 .redirections
1715 .push(Redirection::FdDuplicate(*from_fd, *to_fd));
1716 i += 1;
1717 }
1718 Token::RedirectFdClose(fd) => {
1719 current_cmd.redirections.push(Redirection::FdClose(*fd));
1720 i += 1;
1721 }
1722 Token::RedirectFdInOut(fd, file) => {
1723 current_cmd
1724 .redirections
1725 .push(Redirection::FdInputOutput(*fd, file.clone()));
1726 i += 1;
1727 }
1728 Token::RedirHereDoc(delimiter, quoted) => {
1729 current_cmd
1730 .redirections
1731 .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
1732 i += 1;
1733 }
1734 Token::RedirHereString(content) => {
1735 current_cmd
1736 .redirections
1737 .push(Redirection::HereString(content.clone()));
1738 i += 1;
1739 }
1740 Token::Pipe => {
1741 break;
1743 }
1744 _ => break,
1745 }
1746 }
1747
1748 continue;
1750 }
1751 Token::Word(word) => {
1752 current_cmd.args.push(word.clone());
1753 }
1754 Token::Local => {
1755 current_cmd.args.push("local".to_string());
1756 }
1757 Token::Return => {
1758 current_cmd.args.push("return".to_string());
1759 }
1760 Token::Break => {
1761 current_cmd.args.push("break".to_string());
1762 }
1763 Token::Continue => {
1764 current_cmd.args.push("continue".to_string());
1765 }
1766 Token::If => {
1770 current_cmd.args.push("if".to_string());
1771 }
1772 Token::Then => {
1773 current_cmd.args.push("then".to_string());
1774 }
1775 Token::Else => {
1776 current_cmd.args.push("else".to_string());
1777 }
1778 Token::Elif => {
1779 current_cmd.args.push("elif".to_string());
1780 }
1781 Token::Fi => {
1782 current_cmd.args.push("fi".to_string());
1783 }
1784 Token::Case => {
1785 current_cmd.args.push("case".to_string());
1786 }
1787 Token::In => {
1788 current_cmd.args.push("in".to_string());
1789 }
1790 Token::Esac => {
1791 current_cmd.args.push("esac".to_string());
1792 }
1793 Token::For => {
1794 current_cmd.args.push("for".to_string());
1795 }
1796 Token::While => {
1797 current_cmd.args.push("while".to_string());
1798 }
1799 Token::Until => {
1800 current_cmd.args.push("until".to_string());
1801 }
1802 Token::Do => {
1803 current_cmd.args.push("do".to_string());
1804 }
1805 Token::Done => {
1806 current_cmd.args.push("done".to_string());
1807 }
1808 Token::Pipe => {
1809 if !current_cmd.args.is_empty() || current_cmd.compound.is_some() {
1810 commands.push(current_cmd.clone());
1811 current_cmd = ShellCommand::default();
1812 }
1813 }
1814 Token::RedirIn => {
1816 i += 1;
1817 if i < tokens.len()
1818 && let Token::Word(ref file) = tokens[i]
1819 {
1820 current_cmd
1821 .redirections
1822 .push(Redirection::Input(file.clone()));
1823 }
1824 }
1825 Token::RedirOut => {
1826 i += 1;
1827 if i < tokens.len()
1828 && let Token::Word(ref file) = tokens[i]
1829 {
1830 current_cmd
1831 .redirections
1832 .push(Redirection::Output(file.clone()));
1833 }
1834 }
1835 Token::RedirOutClobber => {
1836 i += 1;
1837 if i >= tokens.len() {
1838 return Err("expected filename after >|".to_string());
1839 }
1840 if let Token::Word(ref file) = tokens[i] {
1841 current_cmd
1842 .redirections
1843 .push(Redirection::OutputClobber(file.clone()));
1844 } else {
1845 return Err("expected filename after >|".to_string());
1846 }
1847 }
1848 Token::RedirAppend => {
1849 i += 1;
1850 if i < tokens.len()
1851 && let Token::Word(ref file) = tokens[i]
1852 {
1853 current_cmd
1854 .redirections
1855 .push(Redirection::Append(file.clone()));
1856 }
1857 }
1858 Token::RedirHereDoc(delimiter, quoted) => {
1859 current_cmd
1861 .redirections
1862 .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
1863 }
1864 Token::RedirHereString(content) => {
1865 current_cmd
1866 .redirections
1867 .push(Redirection::HereString(content.clone()));
1868 }
1869 Token::RedirectFdIn(fd, file) => {
1871 current_cmd
1872 .redirections
1873 .push(Redirection::FdInput(*fd, file.clone()));
1874 }
1875 Token::RedirectFdOut(fd, file) => {
1876 current_cmd
1877 .redirections
1878 .push(Redirection::FdOutput(*fd, file.clone()));
1879 }
1880 Token::RedirectFdOutClobber(fd, file) => {
1881 current_cmd
1882 .redirections
1883 .push(Redirection::FdOutputClobber(*fd, file.clone()));
1884 }
1885 Token::RedirectFdAppend(fd, file) => {
1886 current_cmd
1887 .redirections
1888 .push(Redirection::FdAppend(*fd, file.clone()));
1889 }
1890 Token::RedirectFdDup(from_fd, to_fd) => {
1891 current_cmd
1892 .redirections
1893 .push(Redirection::FdDuplicate(*from_fd, *to_fd));
1894 }
1895 Token::RedirectFdClose(fd) => {
1896 current_cmd.redirections.push(Redirection::FdClose(*fd));
1897 }
1898 Token::RedirectFdInOut(fd, file) => {
1899 current_cmd
1900 .redirections
1901 .push(Redirection::FdInputOutput(*fd, file.clone()));
1902 }
1903 Token::RightParen => {
1904 if !current_cmd.args.is_empty()
1907 && i > 0
1908 && let Token::LeftParen = tokens[i - 1]
1909 {
1910 break;
1914 }
1915 return Err("Unexpected ) in pipeline".to_string());
1916 }
1917 Token::Newline => {
1918 if current_cmd.args.is_empty() && current_cmd.compound.is_none() {
1920 } else {
1922 break;
1923 }
1924 }
1925 Token::And | Token::Or | Token::Semicolon => {
1926 break;
1929 }
1930 _ => {
1931 return Err(format!("Unexpected token in pipeline: {:?}", token));
1932 }
1933 }
1934 i += 1;
1935 }
1936
1937 if !current_cmd.args.is_empty() || current_cmd.compound.is_some() {
1938 commands.push(current_cmd);
1939 }
1940
1941 if commands.is_empty() {
1942 return Err("No commands found".to_string());
1943 }
1944
1945 Ok(Ast::Pipeline(commands))
1946}
1947
1948fn parse_if(tokens: &[Token]) -> Result<Ast, String> {
1949 let mut i = 1; let mut branches = Vec::new();
1951
1952 loop {
1953 let mut cond_tokens = Vec::new();
1955 while i < tokens.len()
1956 && tokens[i] != Token::Semicolon
1957 && tokens[i] != Token::Newline
1958 && tokens[i] != Token::Then
1959 {
1960 cond_tokens.push(tokens[i].clone());
1961 i += 1;
1962 }
1963
1964 if i < tokens.len() && (tokens[i] == Token::Semicolon || tokens[i] == Token::Newline) {
1966 i += 1;
1967 }
1968
1969 skip_newlines(tokens, &mut i);
1971
1972 if i >= tokens.len() || tokens[i] != Token::Then {
1973 return Err("Expected then after if/elif condition".to_string());
1974 }
1975 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
1979 i += 1;
1980 }
1981
1982 let mut then_tokens = Vec::new();
1985 let mut depth = 0;
1986 while i < tokens.len() {
1987 match &tokens[i] {
1988 Token::If => {
1989 depth += 1;
1990 then_tokens.push(tokens[i].clone());
1991 }
1992 Token::Fi => {
1993 if depth > 0 {
1994 depth -= 1;
1995 then_tokens.push(tokens[i].clone());
1996 } else {
1997 break; }
1999 }
2000 Token::Else | Token::Elif if depth == 0 => {
2001 break; }
2003 Token::Newline => {
2004 let mut j = i + 1;
2006 while j < tokens.len() && tokens[j] == Token::Newline {
2007 j += 1;
2008 }
2009 if j < tokens.len()
2010 && depth == 0
2011 && (tokens[j] == Token::Else
2012 || tokens[j] == Token::Elif
2013 || tokens[j] == Token::Fi)
2014 {
2015 i = j; break;
2017 }
2018 then_tokens.push(tokens[i].clone());
2020 }
2021 _ => {
2022 then_tokens.push(tokens[i].clone());
2023 }
2024 }
2025 i += 1;
2026 }
2027
2028 skip_newlines(tokens, &mut i);
2030
2031 let then_ast = if then_tokens.is_empty() {
2032 create_empty_body_ast()
2034 } else {
2035 parse_commands_sequentially(&then_tokens)?
2036 };
2037
2038 let (condition, _) = parse_next_command(&cond_tokens)?;
2040 branches.push((Box::new(condition), Box::new(then_ast)));
2041
2042 if i < tokens.len() && tokens[i] == Token::Elif {
2044 i += 1; } else {
2046 break;
2047 }
2048 }
2049
2050 let else_ast = if i < tokens.len() && tokens[i] == Token::Else {
2051 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
2055 i += 1;
2056 }
2057
2058 let mut else_tokens = Vec::new();
2059 let mut depth = 0;
2060 while i < tokens.len() {
2061 match &tokens[i] {
2062 Token::If => {
2063 depth += 1;
2064 else_tokens.push(tokens[i].clone());
2065 }
2066 Token::Fi => {
2067 if depth > 0 {
2068 depth -= 1;
2069 else_tokens.push(tokens[i].clone());
2070 } else {
2071 break; }
2073 }
2074 Token::Newline => {
2075 let mut j = i + 1;
2077 while j < tokens.len() && tokens[j] == Token::Newline {
2078 j += 1;
2079 }
2080 if j < tokens.len() && depth == 0 && tokens[j] == Token::Fi {
2081 i = j; break;
2083 }
2084 else_tokens.push(tokens[i].clone());
2086 }
2087 _ => {
2088 else_tokens.push(tokens[i].clone());
2089 }
2090 }
2091 i += 1;
2092 }
2093
2094 let else_ast = if else_tokens.is_empty() {
2095 create_empty_body_ast()
2097 } else {
2098 parse_commands_sequentially(&else_tokens)?
2099 };
2100
2101 Some(Box::new(else_ast))
2102 } else {
2103 None
2104 };
2105
2106 if i >= tokens.len() || tokens[i] != Token::Fi {
2107 return Err("Expected fi".to_string());
2108 }
2109
2110 Ok(Ast::If {
2111 branches,
2112 else_branch: else_ast,
2113 })
2114}
2115
2116fn parse_case(tokens: &[Token]) -> Result<Ast, String> {
2117 let mut i = 1; if i >= tokens.len() || !matches!(tokens[i], Token::Word(_)) {
2121 return Err("Expected word after case".to_string());
2122 }
2123 let word = if let Token::Word(ref w) = tokens[i] {
2124 w.clone()
2125 } else {
2126 unreachable!()
2127 };
2128 i += 1;
2129
2130 if i >= tokens.len() || tokens[i] != Token::In {
2131 return Err("Expected in after case word".to_string());
2132 }
2133 i += 1;
2134
2135 let mut cases = Vec::new();
2136 let mut default = None;
2137
2138 loop {
2139 while i < tokens.len() && tokens[i] == Token::Newline {
2141 i += 1;
2142 }
2143
2144 if i >= tokens.len() {
2145 return Err("Unexpected end in case statement".to_string());
2146 }
2147
2148 if tokens[i] == Token::Esac {
2149 break;
2150 }
2151
2152 let mut patterns = Vec::new();
2154 while i < tokens.len() && tokens[i] != Token::RightParen {
2155 if let Token::Word(ref p) = tokens[i] {
2156 for pat in p.split('|') {
2158 patterns.push(pat.to_string());
2159 }
2160 } else if tokens[i] == Token::Pipe {
2161 } else if tokens[i] == Token::Newline {
2163 } else {
2165 return Err(format!("Expected pattern, found {:?}", tokens[i]));
2166 }
2167 i += 1;
2168 }
2169
2170 if i >= tokens.len() || tokens[i] != Token::RightParen {
2171 return Err("Expected ) after patterns".to_string());
2172 }
2173 i += 1;
2174
2175 let mut commands_tokens = Vec::new();
2177 while i < tokens.len() && tokens[i] != Token::DoubleSemicolon && tokens[i] != Token::Esac {
2178 commands_tokens.push(tokens[i].clone());
2179 i += 1;
2180 }
2181 let (commands_ast, _) = parse_next_command(&commands_tokens)?;
2183
2184 if i >= tokens.len() {
2185 return Err("Unexpected end in case statement".to_string());
2186 }
2187
2188 if tokens[i] == Token::DoubleSemicolon {
2189 i += 1;
2190 if patterns.len() == 1 && patterns[0] == "*" {
2192 default = Some(Box::new(commands_ast));
2193 } else {
2194 cases.push((patterns, commands_ast));
2195 }
2196 } else if tokens[i] == Token::Esac {
2197 if patterns.len() == 1 && patterns[0] == "*" {
2199 default = Some(Box::new(commands_ast));
2200 } else {
2201 cases.push((patterns, commands_ast));
2202 }
2203 break;
2204 } else {
2205 return Err("Expected ;; or esac after commands".to_string());
2206 }
2207 }
2208
2209 Ok(Ast::Case {
2210 word,
2211 cases,
2212 default,
2213 })
2214}
2215
2216fn parse_for(tokens: &[Token]) -> Result<Ast, String> {
2217 let mut i = 1; if i >= tokens.len() || !matches!(tokens[i], Token::Word(_)) {
2221 return Err("Expected variable name after for".to_string());
2222 }
2223 let variable = if let Token::Word(ref v) = tokens[i] {
2224 v.clone()
2225 } else {
2226 unreachable!()
2227 };
2228 i += 1;
2229
2230 if i >= tokens.len() || tokens[i] != Token::In {
2232 return Err("Expected 'in' after for variable".to_string());
2233 }
2234 i += 1;
2235
2236 let mut items = Vec::new();
2238 while i < tokens.len() {
2239 match &tokens[i] {
2240 Token::Do => break,
2241 Token::Semicolon | Token::Newline => {
2242 i += 1;
2243 if i < tokens.len() && tokens[i] == Token::Do {
2245 break;
2246 }
2247 }
2248 Token::Word(word) => {
2249 items.push(word.clone());
2250 i += 1;
2251 }
2252 _ => {
2253 return Err(format!("Unexpected token in for items: {:?}", tokens[i]));
2254 }
2255 }
2256 }
2257
2258 while i < tokens.len() && tokens[i] == Token::Newline {
2260 i += 1;
2261 }
2262
2263 if i >= tokens.len() || tokens[i] != Token::Do {
2265 return Err("Expected 'do' in for loop".to_string());
2266 }
2267 i += 1;
2268
2269 while i < tokens.len() && tokens[i] == Token::Newline {
2271 i += 1;
2272 }
2273
2274 let mut body_tokens = Vec::new();
2276 let mut depth = 0;
2277 while i < tokens.len() {
2278 match &tokens[i] {
2279 Token::For => {
2280 depth += 1;
2281 body_tokens.push(tokens[i].clone());
2282 }
2283 Token::Done => {
2284 if depth > 0 {
2285 depth -= 1;
2286 body_tokens.push(tokens[i].clone());
2287 } else {
2288 break; }
2290 }
2291 Token::Newline => {
2292 let mut j = i + 1;
2294 while j < tokens.len() && tokens[j] == Token::Newline {
2295 j += 1;
2296 }
2297 if j < tokens.len() && depth == 0 && tokens[j] == Token::Done {
2298 i = j; break;
2300 }
2301 body_tokens.push(tokens[i].clone());
2303 }
2304 _ => {
2305 body_tokens.push(tokens[i].clone());
2306 }
2307 }
2308 i += 1;
2309 }
2310
2311 if i >= tokens.len() || tokens[i] != Token::Done {
2312 return Err("Expected 'done' to close for loop".to_string());
2313 }
2314
2315 let body_ast = if body_tokens.is_empty() {
2317 create_empty_body_ast()
2319 } else {
2320 parse_commands_sequentially(&body_tokens)?
2321 };
2322
2323 Ok(Ast::For {
2324 variable,
2325 items,
2326 body: Box::new(body_ast),
2327 })
2328}
2329
2330fn parse_while(tokens: &[Token]) -> Result<Ast, String> {
2331 let mut i = 1; let mut cond_tokens = Vec::new();
2335 while i < tokens.len() {
2336 match &tokens[i] {
2337 Token::Do => break,
2338 Token::Semicolon | Token::Newline => {
2339 i += 1;
2340 if i < tokens.len() && tokens[i] == Token::Do {
2342 break;
2343 }
2344 }
2345 _ => {
2346 cond_tokens.push(tokens[i].clone());
2347 i += 1;
2348 }
2349 }
2350 }
2351
2352 if cond_tokens.is_empty() {
2353 return Err("Expected condition after while".to_string());
2354 }
2355
2356 while i < tokens.len() && tokens[i] == Token::Newline {
2358 i += 1;
2359 }
2360
2361 if i >= tokens.len() || tokens[i] != Token::Do {
2363 return Err("Expected 'do' in while loop".to_string());
2364 }
2365 i += 1;
2366
2367 while i < tokens.len() && tokens[i] == Token::Newline {
2369 i += 1;
2370 }
2371
2372 let mut body_tokens = Vec::new();
2374 let mut depth = 0;
2375 while i < tokens.len() {
2376 match &tokens[i] {
2377 Token::While | Token::For | Token::Until => {
2378 depth += 1;
2379 body_tokens.push(tokens[i].clone());
2380 }
2381 Token::Done => {
2382 if depth > 0 {
2383 depth -= 1;
2384 body_tokens.push(tokens[i].clone());
2385 } else {
2386 break; }
2388 }
2389 Token::Newline => {
2390 let mut j = i + 1;
2392 while j < tokens.len() && tokens[j] == Token::Newline {
2393 j += 1;
2394 }
2395 if j < tokens.len() && depth == 0 && tokens[j] == Token::Done {
2396 i = j; break;
2398 }
2399 body_tokens.push(tokens[i].clone());
2401 }
2402 _ => {
2403 body_tokens.push(tokens[i].clone());
2404 }
2405 }
2406 i += 1;
2407 }
2408
2409 if i >= tokens.len() || tokens[i] != Token::Done {
2410 return Err("Expected 'done' to close while loop".to_string());
2411 }
2412
2413 let (condition_ast, _) = parse_next_command(&cond_tokens)?;
2415
2416 let body_ast = if body_tokens.is_empty() {
2418 create_empty_body_ast()
2420 } else {
2421 parse_commands_sequentially(&body_tokens)?
2422 };
2423
2424 Ok(Ast::While {
2425 condition: Box::new(condition_ast),
2426 body: Box::new(body_ast),
2427 })
2428}
2429
2430fn parse_until(tokens: &[Token]) -> Result<Ast, String> {
2431 let mut i = 1; let mut cond_tokens = Vec::new();
2435 while i < tokens.len() {
2436 match &tokens[i] {
2437 Token::Do => break,
2438 Token::Semicolon | Token::Newline => {
2439 i += 1;
2440 if i < tokens.len() && tokens[i] == Token::Do {
2442 break;
2443 }
2444 }
2445 _ => {
2446 cond_tokens.push(tokens[i].clone());
2447 i += 1;
2448 }
2449 }
2450 }
2451
2452 if cond_tokens.is_empty() {
2453 return Err("Expected condition after until".to_string());
2454 }
2455
2456 while i < tokens.len() && tokens[i] == Token::Newline {
2458 i += 1;
2459 }
2460
2461 if i >= tokens.len() || tokens[i] != Token::Do {
2463 return Err("Expected 'do' in until loop".to_string());
2464 }
2465 i += 1;
2466
2467 while i < tokens.len() && tokens[i] == Token::Newline {
2469 i += 1;
2470 }
2471
2472 let mut body_tokens = Vec::new();
2474 let mut depth = 0;
2475 while i < tokens.len() {
2476 match &tokens[i] {
2477 Token::While | Token::For | Token::Until => {
2478 depth += 1;
2479 body_tokens.push(tokens[i].clone());
2480 }
2481 Token::Done => {
2482 if depth > 0 {
2483 depth -= 1;
2484 body_tokens.push(tokens[i].clone());
2485 } else {
2486 break; }
2488 }
2489 Token::Newline => {
2490 let mut j = i + 1;
2492 while j < tokens.len() && tokens[j] == Token::Newline {
2493 j += 1;
2494 }
2495 if j < tokens.len() && depth == 0 && tokens[j] == Token::Done {
2496 i = j; break;
2498 }
2499 body_tokens.push(tokens[i].clone());
2501 }
2502 _ => {
2503 body_tokens.push(tokens[i].clone());
2504 }
2505 }
2506 i += 1;
2507 }
2508
2509 if i >= tokens.len() || tokens[i] != Token::Done {
2510 return Err("Expected 'done' to close until loop".to_string());
2511 }
2512
2513 let (condition_ast, _) = parse_next_command(&cond_tokens)?;
2515
2516 let body_ast = if body_tokens.is_empty() {
2518 create_empty_body_ast()
2520 } else {
2521 parse_commands_sequentially(&body_tokens)?
2522 };
2523
2524 Ok(Ast::Until {
2525 condition: Box::new(condition_ast),
2526 body: Box::new(body_ast),
2527 })
2528}
2529
2530fn parse_function_definition(tokens: &[Token]) -> Result<Ast, String> {
2531 if tokens.len() < 2 {
2532 return Err("Function definition too short".to_string());
2533 }
2534
2535 let func_name = if let Token::Word(word) = &tokens[0] {
2537 if let Some(paren_pos) = word.find('(') {
2539 if word.ends_with(')') && paren_pos > 0 {
2540 word[..paren_pos].to_string()
2541 } else {
2542 word.clone()
2543 }
2544 } else {
2545 word.clone()
2546 }
2547 } else {
2548 return Err("Function name must be a word".to_string());
2549 };
2550
2551 let brace_pos =
2553 if tokens.len() >= 4 && tokens[1] == Token::LeftParen && tokens[2] == Token::RightParen {
2554 if tokens[3] != Token::LeftBrace {
2556 return Err("Expected { after function name".to_string());
2557 }
2558 3
2559 } else if tokens.len() >= 2 && tokens[1] == Token::LeftBrace {
2560 1
2562 } else {
2563 return Err("Expected ( after function name or { for legacy format".to_string());
2564 };
2565
2566 let mut brace_depth = 0;
2568 let mut body_end = 0;
2569 let mut found_closing = false;
2570 let mut i = brace_pos + 1;
2571
2572 while i < tokens.len() {
2573 if i + 3 < tokens.len()
2576 && matches!(&tokens[i], Token::Word(_))
2577 && tokens[i + 1] == Token::LeftParen
2578 && tokens[i + 2] == Token::RightParen
2579 && tokens[i + 3] == Token::LeftBrace
2580 {
2581 i += 4;
2584 let mut nested_depth = 1;
2585 while i < tokens.len() && nested_depth > 0 {
2586 match tokens[i] {
2587 Token::LeftBrace => nested_depth += 1,
2588 Token::RightBrace => nested_depth -= 1,
2589 _ => {}
2590 }
2591 i += 1;
2592 }
2593 continue;
2595 }
2596
2597 match &tokens[i] {
2598 Token::LeftBrace => {
2599 brace_depth += 1;
2600 i += 1;
2601 }
2602 Token::RightBrace => {
2603 if brace_depth == 0 {
2604 body_end = i;
2606 found_closing = true;
2607 break;
2608 } else {
2609 brace_depth -= 1;
2610 i += 1;
2611 }
2612 }
2613 Token::If => {
2614 skip_to_matching_fi(tokens, &mut i);
2616 }
2617 Token::For | Token::While | Token::Until => {
2618 skip_to_matching_done(tokens, &mut i);
2620 }
2621 Token::Case => {
2622 skip_to_matching_esac(tokens, &mut i);
2624 }
2625 _ => {
2626 i += 1;
2627 }
2628 }
2629 }
2630
2631 if !found_closing {
2632 return Err("Missing closing } for function definition".to_string());
2633 }
2634
2635 let body_tokens = &tokens[brace_pos + 1..body_end];
2637
2638 let body_ast = if body_tokens.is_empty() {
2640 create_empty_body_ast()
2642 } else {
2643 parse_commands_sequentially(body_tokens)?
2644 };
2645
2646 Ok(Ast::FunctionDefinition {
2647 name: func_name,
2648 body: Box::new(body_ast),
2649 })
2650}
2651
2652#[cfg(test)]
2653mod tests {
2654 use super::super::lexer::Token;
2655 use super::*;
2656
2657 #[test]
2658 fn test_single_command() {
2659 let tokens = vec![Token::Word("ls".to_string())];
2660 let result = parse(tokens).unwrap();
2661 assert_eq!(
2662 result,
2663 Ast::Pipeline(vec![ShellCommand {
2664 args: vec!["ls".to_string()],
2665 redirections: Vec::new(),
2666 compound: None,
2667 }])
2668 );
2669 }
2670
2671 #[test]
2672 fn test_command_with_args() {
2673 let tokens = vec![
2674 Token::Word("ls".to_string()),
2675 Token::Word("-la".to_string()),
2676 ];
2677 let result = parse(tokens).unwrap();
2678 assert_eq!(
2679 result,
2680 Ast::Pipeline(vec![ShellCommand {
2681 args: vec!["ls".to_string(), "-la".to_string()],
2682 redirections: Vec::new(),
2683 compound: None,
2684 }])
2685 );
2686 }
2687
2688 #[test]
2689 fn test_pipeline() {
2690 let tokens = vec![
2691 Token::Word("ls".to_string()),
2692 Token::Pipe,
2693 Token::Word("grep".to_string()),
2694 Token::Word("txt".to_string()),
2695 ];
2696 let result = parse(tokens).unwrap();
2697 assert_eq!(
2698 result,
2699 Ast::Pipeline(vec![
2700 ShellCommand {
2701 args: vec!["ls".to_string()],
2702 redirections: Vec::new(),
2703 compound: None,
2704 },
2705 ShellCommand {
2706 args: vec!["grep".to_string(), "txt".to_string()],
2707 redirections: Vec::new(),
2708 compound: None,
2709 }
2710 ])
2711 );
2712 }
2713
2714 #[test]
2715 fn test_input_redirection() {
2716 let tokens = vec![
2717 Token::Word("cat".to_string()),
2718 Token::RedirIn,
2719 Token::Word("input.txt".to_string()),
2720 ];
2721 let result = parse(tokens).unwrap();
2722 assert_eq!(
2723 result,
2724 Ast::Pipeline(vec![ShellCommand {
2725 args: vec!["cat".to_string()],
2726 redirections: vec![Redirection::Input("input.txt".to_string())],
2727 compound: None,
2728 }])
2729 );
2730 }
2731
2732 #[test]
2733 fn test_output_redirection() {
2734 let tokens = vec![
2735 Token::Word("printf".to_string()),
2736 Token::Word("hello".to_string()),
2737 Token::RedirOut,
2738 Token::Word("output.txt".to_string()),
2739 ];
2740 let result = parse(tokens).unwrap();
2741 assert_eq!(
2742 result,
2743 Ast::Pipeline(vec![ShellCommand {
2744 args: vec!["printf".to_string(), "hello".to_string()],
2745 compound: None,
2746 redirections: vec![Redirection::Output("output.txt".to_string())],
2747 }])
2748 );
2749 }
2750
2751 #[test]
2752 fn test_append_redirection() {
2753 let tokens = vec![
2754 Token::Word("printf".to_string()),
2755 Token::Word("hello".to_string()),
2756 Token::RedirAppend,
2757 Token::Word("output.txt".to_string()),
2758 ];
2759 let result = parse(tokens).unwrap();
2760 assert_eq!(
2761 result,
2762 Ast::Pipeline(vec![ShellCommand {
2763 args: vec!["printf".to_string(), "hello".to_string()],
2764 compound: None,
2765 redirections: vec![Redirection::Append("output.txt".to_string())],
2766 }])
2767 );
2768 }
2769
2770 #[test]
2771 fn test_complex_pipeline_with_redirections() {
2772 let tokens = vec![
2773 Token::Word("cat".to_string()),
2774 Token::RedirIn,
2775 Token::Word("input.txt".to_string()),
2776 Token::Pipe,
2777 Token::Word("grep".to_string()),
2778 Token::Word("pattern".to_string()),
2779 Token::Pipe,
2780 Token::Word("sort".to_string()),
2781 Token::RedirOut,
2782 Token::Word("output.txt".to_string()),
2783 ];
2784 let result = parse(tokens).unwrap();
2785 assert_eq!(
2786 result,
2787 Ast::Pipeline(vec![
2788 ShellCommand {
2789 args: vec!["cat".to_string()],
2790 compound: None,
2791 redirections: vec![Redirection::Input("input.txt".to_string())],
2792 },
2793 ShellCommand {
2794 args: vec!["grep".to_string(), "pattern".to_string()],
2795 compound: None,
2796 redirections: Vec::new(),
2797 },
2798 ShellCommand {
2799 args: vec!["sort".to_string()],
2800 redirections: vec![Redirection::Output("output.txt".to_string())],
2801 compound: None,
2802 }
2803 ])
2804 );
2805 }
2806
2807 #[test]
2808 fn test_empty_tokens() {
2809 let tokens = vec![];
2810 let result = parse(tokens);
2811 assert!(result.is_err());
2812 assert_eq!(result.unwrap_err(), "No commands found");
2813 }
2814
2815 #[test]
2816 fn test_only_pipe() {
2817 let tokens = vec![Token::Pipe];
2818 let result = parse(tokens);
2819 assert!(result.is_err());
2820 assert_eq!(result.unwrap_err(), "No commands found");
2821 }
2822
2823 #[test]
2824 fn test_redirection_without_file() {
2825 let tokens = vec![Token::Word("cat".to_string()), Token::RedirIn];
2827 let result = parse(tokens).unwrap();
2828 assert_eq!(
2829 result,
2830 Ast::Pipeline(vec![ShellCommand {
2831 args: vec!["cat".to_string()],
2832 compound: None,
2833 redirections: Vec::new(),
2834 }])
2835 );
2836 }
2837
2838 #[test]
2839 fn test_multiple_redirections() {
2840 let tokens = vec![
2841 Token::Word("cat".to_string()),
2842 Token::RedirIn,
2843 Token::Word("file1.txt".to_string()),
2844 Token::RedirOut,
2845 Token::Word("file2.txt".to_string()),
2846 ];
2847 let result = parse(tokens).unwrap();
2848 assert_eq!(
2849 result,
2850 Ast::Pipeline(vec![ShellCommand {
2851 args: vec!["cat".to_string()],
2852 redirections: vec![
2853 Redirection::Input("file1.txt".to_string()),
2854 Redirection::Output("file2.txt".to_string()),
2855 ],
2856 compound: None,
2857 }])
2858 );
2859 }
2860
2861 #[test]
2862 fn test_parse_if() {
2863 let tokens = vec![
2864 Token::If,
2865 Token::Word("true".to_string()),
2866 Token::Semicolon,
2867 Token::Then,
2868 Token::Word("printf".to_string()),
2869 Token::Word("yes".to_string()),
2870 Token::Semicolon,
2871 Token::Fi,
2872 ];
2873 let result = parse(tokens).unwrap();
2874 if let Ast::If {
2875 branches,
2876 else_branch,
2877 } = result
2878 {
2879 assert_eq!(branches.len(), 1);
2880 let (condition, then_branch) = &branches[0];
2881 if let Ast::Pipeline(cmds) = &**condition {
2882 assert_eq!(cmds[0].args, vec!["true"]);
2883 } else {
2884 panic!("condition not pipeline");
2885 }
2886 if let Ast::Pipeline(cmds) = &**then_branch {
2887 assert_eq!(cmds[0].args, vec!["printf", "yes"]);
2888 } else {
2889 panic!("then_branch not pipeline");
2890 }
2891 assert!(else_branch.is_none());
2892 } else {
2893 panic!("not if");
2894 }
2895 }
2896
2897 #[test]
2898 fn test_parse_if_elif() {
2899 let tokens = vec![
2900 Token::If,
2901 Token::Word("false".to_string()),
2902 Token::Semicolon,
2903 Token::Then,
2904 Token::Word("printf".to_string()),
2905 Token::Word("no".to_string()),
2906 Token::Semicolon,
2907 Token::Elif,
2908 Token::Word("true".to_string()),
2909 Token::Semicolon,
2910 Token::Then,
2911 Token::Word("printf".to_string()),
2912 Token::Word("yes".to_string()),
2913 Token::Semicolon,
2914 Token::Fi,
2915 ];
2916 let result = parse(tokens).unwrap();
2917 if let Ast::If {
2918 branches,
2919 else_branch,
2920 } = result
2921 {
2922 assert_eq!(branches.len(), 2);
2923 let (condition1, then1) = &branches[0];
2925 if let Ast::Pipeline(cmds) = &**condition1 {
2926 assert_eq!(cmds[0].args, vec!["false"]);
2927 }
2928 if let Ast::Pipeline(cmds) = &**then1 {
2929 assert_eq!(cmds[0].args, vec!["printf", "no"]);
2930 }
2931 let (condition2, then2) = &branches[1];
2933 if let Ast::Pipeline(cmds) = &**condition2 {
2934 assert_eq!(cmds[0].args, vec!["true"]);
2935 }
2936 if let Ast::Pipeline(cmds) = &**then2 {
2937 assert_eq!(cmds[0].args, vec!["printf", "yes"]);
2938 }
2939 assert!(else_branch.is_none());
2940 } else {
2941 panic!("not if");
2942 }
2943 }
2944
2945 #[test]
2946 fn test_parse_assignment() {
2947 let tokens = vec![Token::Word("MY_VAR=test_value".to_string())];
2948 let result = parse(tokens).unwrap();
2949 if let Ast::Assignment { var, value } = result {
2950 assert_eq!(var, "MY_VAR");
2951 assert_eq!(value, "test_value");
2952 } else {
2953 panic!("not assignment");
2954 }
2955 }
2956
2957 #[test]
2958 fn test_parse_assignment_quoted() {
2959 let tokens = vec![Token::Word("MY_VAR=hello world".to_string())];
2960 let result = parse(tokens).unwrap();
2961 if let Ast::Assignment { var, value } = result {
2962 assert_eq!(var, "MY_VAR");
2963 assert_eq!(value, "hello world");
2964 } else {
2965 panic!("not assignment");
2966 }
2967 }
2968
2969 #[test]
2970 fn test_parse_assignment_invalid() {
2971 let tokens = vec![Token::Word("123VAR=value".to_string())];
2973 let result = parse(tokens).unwrap();
2974 if let Ast::Pipeline(cmds) = result {
2975 assert_eq!(cmds[0].args, vec!["123VAR=value"]);
2976 } else {
2977 panic!("should be parsed as pipeline");
2978 }
2979 }
2980
2981 #[test]
2982 fn test_parse_function_definition() {
2983 let tokens = vec![
2984 Token::Word("myfunc".to_string()),
2985 Token::LeftParen,
2986 Token::RightParen,
2987 Token::LeftBrace,
2988 Token::Word("echo".to_string()),
2989 Token::Word("hello".to_string()),
2990 Token::RightBrace,
2991 ];
2992 let result = parse(tokens).unwrap();
2993 if let Ast::FunctionDefinition { name, body } = result {
2994 assert_eq!(name, "myfunc");
2995 if let Ast::Pipeline(cmds) = *body {
2997 assert_eq!(cmds[0].args, vec!["echo", "hello"]);
2998 } else {
2999 panic!("function body should be a pipeline");
3000 }
3001 } else {
3002 panic!("should be parsed as function definition");
3003 }
3004 }
3005
3006 #[test]
3007 fn test_parse_function_definition_empty() {
3008 let tokens = vec![
3009 Token::Word("emptyfunc".to_string()),
3010 Token::LeftParen,
3011 Token::RightParen,
3012 Token::LeftBrace,
3013 Token::RightBrace,
3014 ];
3015 let result = parse(tokens).unwrap();
3016 if let Ast::FunctionDefinition { name, body } = result {
3017 assert_eq!(name, "emptyfunc");
3018 if let Ast::Pipeline(cmds) = *body {
3020 assert_eq!(cmds[0].args, vec!["true"]);
3021 } else {
3022 panic!("function body should be a pipeline");
3023 }
3024 } else {
3025 panic!("should be parsed as function definition");
3026 }
3027 }
3028
3029 #[test]
3030 fn test_parse_function_definition_legacy_format() {
3031 let tokens = vec![
3033 Token::Word("legacyfunc()".to_string()),
3034 Token::LeftBrace,
3035 Token::Word("echo".to_string()),
3036 Token::Word("hello".to_string()),
3037 Token::RightBrace,
3038 ];
3039 let result = parse(tokens).unwrap();
3040 if let Ast::FunctionDefinition { name, body } = result {
3041 assert_eq!(name, "legacyfunc");
3042 if let Ast::Pipeline(cmds) = *body {
3044 assert_eq!(cmds[0].args, vec!["echo", "hello"]);
3045 } else {
3046 panic!("function body should be a pipeline");
3047 }
3048 } else {
3049 panic!("should be parsed as function definition");
3050 }
3051 }
3052
3053 #[test]
3054 fn test_parse_local_assignment() {
3055 let tokens = vec![Token::Local, Token::Word("MY_VAR=test_value".to_string())];
3056 let result = parse(tokens).unwrap();
3057 if let Ast::LocalAssignment { var, value } = result {
3058 assert_eq!(var, "MY_VAR");
3059 assert_eq!(value, "test_value");
3060 } else {
3061 panic!("should be parsed as local assignment");
3062 }
3063 }
3064
3065 #[test]
3066 fn test_parse_local_assignment_separate_tokens() {
3067 let tokens = vec![
3068 Token::Local,
3069 Token::Word("MY_VAR".to_string()),
3070 Token::Word("test_value".to_string()),
3071 ];
3072 let result = parse(tokens).unwrap();
3073 if let Ast::LocalAssignment { var, value } = result {
3074 assert_eq!(var, "MY_VAR");
3075 assert_eq!(value, "test_value");
3076 } else {
3077 panic!("should be parsed as local assignment");
3078 }
3079 }
3080
3081 #[test]
3082 fn test_parse_local_assignment_invalid_var_name() {
3083 let tokens = vec![Token::Local, Token::Word("123VAR=value".to_string())];
3085 let result = parse(tokens);
3086 assert!(result.is_err());
3088 }
3089
3090 #[test]
3091 fn test_parse_here_document_redirection() {
3092 let tokens = vec![
3093 Token::Word("cat".to_string()),
3094 Token::RedirHereDoc("EOF".to_string(), false),
3095 ];
3096 let result = parse(tokens).unwrap();
3097 assert_eq!(
3098 result,
3099 Ast::Pipeline(vec![ShellCommand {
3100 args: vec!["cat".to_string()],
3101 redirections: vec![Redirection::HereDoc("EOF".to_string(), "false".to_string())],
3102 compound: None,
3103 }])
3104 );
3105 }
3106
3107 #[test]
3108 fn test_parse_here_string_redirection() {
3109 let tokens = vec![
3110 Token::Word("grep".to_string()),
3111 Token::RedirHereString("pattern".to_string()),
3112 ];
3113 let result = parse(tokens).unwrap();
3114 assert_eq!(
3115 result,
3116 Ast::Pipeline(vec![ShellCommand {
3117 args: vec!["grep".to_string()],
3118 compound: None,
3119 redirections: vec![Redirection::HereString("pattern".to_string())],
3120 }])
3121 );
3122 }
3123
3124 #[test]
3125 fn test_parse_mixed_redirections() {
3126 let tokens = vec![
3127 Token::Word("cat".to_string()),
3128 Token::RedirIn,
3129 Token::Word("file.txt".to_string()),
3130 Token::RedirHereString("fallback".to_string()),
3131 Token::RedirOut,
3132 Token::Word("output.txt".to_string()),
3133 ];
3134 let result = parse(tokens).unwrap();
3135 assert_eq!(
3136 result,
3137 Ast::Pipeline(vec![ShellCommand {
3138 args: vec!["cat".to_string()],
3139 compound: None,
3140 redirections: vec![
3141 Redirection::Input("file.txt".to_string()),
3142 Redirection::HereString("fallback".to_string()),
3143 Redirection::Output("output.txt".to_string()),
3144 ],
3145 }])
3146 );
3147 }
3148
3149 #[test]
3152 fn test_parse_fd_input_redirection() {
3153 let tokens = vec![
3154 Token::Word("command".to_string()),
3155 Token::RedirectFdIn(3, "input.txt".to_string()),
3156 ];
3157 let result = parse(tokens).unwrap();
3158 assert_eq!(
3159 result,
3160 Ast::Pipeline(vec![ShellCommand {
3161 args: vec!["command".to_string()],
3162 redirections: vec![Redirection::FdInput(3, "input.txt".to_string())],
3163 compound: None,
3164 }])
3165 );
3166 }
3167
3168 #[test]
3169 fn test_parse_fd_output_redirection() {
3170 let tokens = vec![
3171 Token::Word("command".to_string()),
3172 Token::RedirectFdOut(2, "errors.log".to_string()),
3173 ];
3174 let result = parse(tokens).unwrap();
3175 assert_eq!(
3176 result,
3177 Ast::Pipeline(vec![ShellCommand {
3178 args: vec!["command".to_string()],
3179 compound: None,
3180 redirections: vec![Redirection::FdOutput(2, "errors.log".to_string())],
3181 }])
3182 );
3183 }
3184
3185 #[test]
3186 fn test_parse_fd_append_redirection() {
3187 let tokens = vec![
3188 Token::Word("command".to_string()),
3189 Token::RedirectFdAppend(2, "errors.log".to_string()),
3190 ];
3191 let result = parse(tokens).unwrap();
3192 assert_eq!(
3193 result,
3194 Ast::Pipeline(vec![ShellCommand {
3195 args: vec!["command".to_string()],
3196 compound: None,
3197 redirections: vec![Redirection::FdAppend(2, "errors.log".to_string())],
3198 }])
3199 );
3200 }
3201
3202 #[test]
3203 fn test_parse_fd_duplicate() {
3204 let tokens = vec![
3205 Token::Word("command".to_string()),
3206 Token::RedirectFdDup(2, 1),
3207 ];
3208 let result = parse(tokens).unwrap();
3209 assert_eq!(
3210 result,
3211 Ast::Pipeline(vec![ShellCommand {
3212 args: vec!["command".to_string()],
3213 compound: None,
3214 redirections: vec![Redirection::FdDuplicate(2, 1)],
3215 }])
3216 );
3217 }
3218
3219 #[test]
3220 fn test_parse_fd_close() {
3221 let tokens = vec![
3222 Token::Word("command".to_string()),
3223 Token::RedirectFdClose(2),
3224 ];
3225 let result = parse(tokens).unwrap();
3226 assert_eq!(
3227 result,
3228 Ast::Pipeline(vec![ShellCommand {
3229 args: vec!["command".to_string()],
3230 compound: None,
3231 redirections: vec![Redirection::FdClose(2)],
3232 }])
3233 );
3234 }
3235
3236 #[test]
3237 fn test_parse_fd_input_output() {
3238 let tokens = vec![
3239 Token::Word("command".to_string()),
3240 Token::RedirectFdInOut(3, "file.txt".to_string()),
3241 ];
3242 let result = parse(tokens).unwrap();
3243 assert_eq!(
3244 result,
3245 Ast::Pipeline(vec![ShellCommand {
3246 args: vec!["command".to_string()],
3247 compound: None,
3248 redirections: vec![Redirection::FdInputOutput(3, "file.txt".to_string())],
3249 }])
3250 );
3251 }
3252
3253 #[test]
3254 fn test_parse_multiple_fd_redirections() {
3255 let tokens = vec![
3256 Token::Word("command".to_string()),
3257 Token::RedirectFdOut(2, "err.log".to_string()),
3258 Token::RedirectFdIn(3, "input.txt".to_string()),
3259 Token::RedirectFdAppend(4, "append.log".to_string()),
3260 ];
3261 let result = parse(tokens).unwrap();
3262 assert_eq!(
3263 result,
3264 Ast::Pipeline(vec![ShellCommand {
3265 args: vec!["command".to_string()],
3266 compound: None,
3267 redirections: vec![
3268 Redirection::FdOutput(2, "err.log".to_string()),
3269 Redirection::FdInput(3, "input.txt".to_string()),
3270 Redirection::FdAppend(4, "append.log".to_string()),
3271 ],
3272 }])
3273 );
3274 }
3275
3276 #[test]
3277 fn test_parse_fd_swap_pattern() {
3278 let tokens = vec![
3279 Token::Word("command".to_string()),
3280 Token::RedirectFdDup(3, 1),
3281 Token::RedirectFdDup(1, 2),
3282 Token::RedirectFdDup(2, 3),
3283 Token::RedirectFdClose(3),
3284 ];
3285 let result = parse(tokens).unwrap();
3286 assert_eq!(
3287 result,
3288 Ast::Pipeline(vec![ShellCommand {
3289 args: vec!["command".to_string()],
3290 redirections: vec![
3291 Redirection::FdDuplicate(3, 1),
3292 Redirection::FdDuplicate(1, 2),
3293 Redirection::FdDuplicate(2, 3),
3294 Redirection::FdClose(3),
3295 ],
3296 compound: None,
3297 }])
3298 );
3299 }
3300
3301 #[test]
3302 fn test_parse_mixed_basic_and_fd_redirections() {
3303 let tokens = vec![
3304 Token::Word("command".to_string()),
3305 Token::RedirOut,
3306 Token::Word("output.txt".to_string()),
3307 Token::RedirectFdDup(2, 1),
3308 ];
3309 let result = parse(tokens).unwrap();
3310 assert_eq!(
3311 result,
3312 Ast::Pipeline(vec![ShellCommand {
3313 args: vec!["command".to_string()],
3314 redirections: vec![
3315 Redirection::Output("output.txt".to_string()),
3316 Redirection::FdDuplicate(2, 1),
3317 ],
3318 compound: None,
3319 }])
3320 );
3321 }
3322
3323 #[test]
3324 fn test_parse_fd_redirection_ordering() {
3325 let tokens = vec![
3327 Token::Word("command".to_string()),
3328 Token::RedirectFdOut(2, "first.log".to_string()),
3329 Token::RedirOut,
3330 Token::Word("second.txt".to_string()),
3331 Token::RedirectFdDup(2, 1),
3332 ];
3333 let result = parse(tokens).unwrap();
3334 assert_eq!(
3335 result,
3336 Ast::Pipeline(vec![ShellCommand {
3337 args: vec!["command".to_string()],
3338 redirections: vec![
3339 Redirection::FdOutput(2, "first.log".to_string()),
3340 Redirection::Output("second.txt".to_string()),
3341 Redirection::FdDuplicate(2, 1),
3342 ],
3343 compound: None,
3344 }])
3345 );
3346 }
3347
3348 #[test]
3349 fn test_parse_fd_redirection_with_pipe() {
3350 let tokens = vec![
3351 Token::Word("command".to_string()),
3352 Token::RedirectFdDup(2, 1),
3353 Token::Pipe,
3354 Token::Word("grep".to_string()),
3355 Token::Word("error".to_string()),
3356 ];
3357 let result = parse(tokens).unwrap();
3358 assert_eq!(
3359 result,
3360 Ast::Pipeline(vec![
3361 ShellCommand {
3362 args: vec!["command".to_string()],
3363 redirections: vec![Redirection::FdDuplicate(2, 1)],
3364 compound: None,
3365 },
3366 ShellCommand {
3367 args: vec!["grep".to_string(), "error".to_string()],
3368 compound: None,
3369 redirections: Vec::new(),
3370 }
3371 ])
3372 );
3373 }
3374
3375 #[test]
3376 fn test_parse_all_fd_numbers() {
3377 let tokens = vec![
3379 Token::Word("cmd".to_string()),
3380 Token::RedirectFdIn(0, "file".to_string()),
3381 ];
3382 let result = parse(tokens).unwrap();
3383 if let Ast::Pipeline(cmds) = result {
3384 assert_eq!(
3385 cmds[0].redirections[0],
3386 Redirection::FdInput(0, "file".to_string())
3387 );
3388 } else {
3389 panic!("Expected Pipeline");
3390 }
3391
3392 let tokens = vec![
3394 Token::Word("cmd".to_string()),
3395 Token::RedirectFdOut(9, "file".to_string()),
3396 ];
3397 let result = parse(tokens).unwrap();
3398 if let Ast::Pipeline(cmds) = result {
3399 assert_eq!(
3400 cmds[0].redirections[0],
3401 Redirection::FdOutput(9, "file".to_string())
3402 );
3403 } else {
3404 panic!("Expected Pipeline");
3405 }
3406 }
3407
3408 #[test]
3411 fn test_negation_with_and_operator() {
3412 let tokens = vec![
3415 Token::Bang,
3416 Token::Word("false".to_string()),
3417 Token::And,
3418 Token::Word("echo".to_string()),
3419 Token::Word("success".to_string()),
3420 ];
3421 let result = parse(tokens).unwrap();
3422
3423 if let Ast::And { left, right } = result {
3425 if let Ast::Negation { command } = *left {
3427 if let Ast::Pipeline(cmds) = *command {
3428 assert_eq!(cmds[0].args, vec!["false"]);
3429 } else {
3430 panic!("Expected Pipeline in negation");
3431 }
3432 } else {
3433 panic!("Expected Negation on left side of And");
3434 }
3435
3436 if let Ast::Pipeline(cmds) = *right {
3438 assert_eq!(cmds[0].args, vec!["echo", "success"]);
3439 } else {
3440 panic!("Expected Pipeline on right side of And");
3441 }
3442 } else {
3443 panic!("Expected And node, got: {:?}", result);
3444 }
3445 }
3446
3447 #[test]
3448 fn test_negation_with_or_operator() {
3449 let tokens = vec![
3452 Token::Bang,
3453 Token::Word("true".to_string()),
3454 Token::Or,
3455 Token::Word("echo".to_string()),
3456 Token::Word("fallback".to_string()),
3457 ];
3458 let result = parse(tokens).unwrap();
3459
3460 if let Ast::Or { left, right } = result {
3462 if let Ast::Negation { command } = *left {
3464 if let Ast::Pipeline(cmds) = *command {
3465 assert_eq!(cmds[0].args, vec!["true"]);
3466 } else {
3467 panic!("Expected Pipeline in negation");
3468 }
3469 } else {
3470 panic!("Expected Negation on left side of Or");
3471 }
3472
3473 if let Ast::Pipeline(cmds) = *right {
3475 assert_eq!(cmds[0].args, vec!["echo", "fallback"]);
3476 } else {
3477 panic!("Expected Pipeline on right side of Or");
3478 }
3479 } else {
3480 panic!("Expected Or node, got: {:?}", result);
3481 }
3482 }
3483
3484 #[test]
3485 fn test_negation_and_semicolon_sequence() {
3486 let tokens = vec![
3489 Token::Bang,
3490 Token::Word("false".to_string()),
3491 Token::And,
3492 Token::Word("echo".to_string()),
3493 Token::Word("second".to_string()),
3494 Token::Semicolon,
3495 Token::Word("echo".to_string()),
3496 Token::Word("third".to_string()),
3497 ];
3498 let result = parse(tokens).unwrap();
3499
3500 if let Ast::Sequence(commands) = result {
3502 assert_eq!(commands.len(), 2);
3503
3504 if let Ast::And { left, right } = &commands[0] {
3506 if let Ast::Negation { command } = &**left {
3507 if let Ast::Pipeline(cmds) = &**command {
3508 assert_eq!(cmds[0].args, vec!["false"]);
3509 } else {
3510 panic!("Expected Pipeline in negation");
3511 }
3512 } else {
3513 panic!("Expected Negation");
3514 }
3515
3516 if let Ast::Pipeline(cmds) = &**right {
3517 assert_eq!(cmds[0].args, vec!["echo", "second"]);
3518 } else {
3519 panic!("Expected Pipeline");
3520 }
3521 } else {
3522 panic!("Expected And node");
3523 }
3524
3525 if let Ast::Pipeline(cmds) = &commands[1] {
3527 assert_eq!(cmds[0].args, vec!["echo", "third"]);
3528 } else {
3529 panic!("Expected Pipeline");
3530 }
3531 } else {
3532 panic!("Expected Sequence, got: {:?}", result);
3533 }
3534 }
3535
3536 #[test]
3537 fn test_nested_logical_operators() {
3538 let tokens = vec![
3541 Token::Word("true".to_string()),
3542 Token::And,
3543 Token::Bang,
3544 Token::Word("false".to_string()),
3545 Token::Or,
3546 Token::Word("echo".to_string()),
3547 Token::Word("fallback".to_string()),
3548 ];
3549 let result = parse(tokens).unwrap();
3550
3551 if let Ast::Or { left, right } = result {
3553 if let Ast::And {
3555 left: and_left,
3556 right: and_right,
3557 } = *left
3558 {
3559 if let Ast::Pipeline(cmds) = *and_left {
3561 assert_eq!(cmds[0].args, vec!["true"]);
3562 } else {
3563 panic!("Expected Pipeline");
3564 }
3565
3566 if let Ast::Negation { command } = *and_right {
3568 if let Ast::Pipeline(cmds) = *command {
3569 assert_eq!(cmds[0].args, vec!["false"]);
3570 } else {
3571 panic!("Expected Pipeline in negation");
3572 }
3573 } else {
3574 panic!("Expected Negation");
3575 }
3576 } else {
3577 panic!("Expected And node on left side of Or");
3578 }
3579
3580 if let Ast::Pipeline(cmds) = *right {
3582 assert_eq!(cmds[0].args, vec!["echo", "fallback"]);
3583 } else {
3584 panic!("Expected Pipeline");
3585 }
3586 } else {
3587 panic!("Expected Or node, got: {:?}", result);
3588 }
3589 }
3590
3591 #[test]
3592 fn test_subshell_with_and_operator_and_sequence() {
3593 let tokens = vec![
3596 Token::LeftParen,
3597 Token::Word("true".to_string()),
3598 Token::RightParen,
3599 Token::And,
3600 Token::Word("echo".to_string()),
3601 Token::Word("second".to_string()),
3602 Token::Semicolon,
3603 Token::Word("echo".to_string()),
3604 Token::Word("third".to_string()),
3605 ];
3606 let result = parse(tokens).unwrap();
3607
3608 if let Ast::Sequence(commands) = result {
3610 assert_eq!(commands.len(), 2);
3611
3612 if let Ast::And { .. } = &commands[0] {
3614 } else {
3616 panic!("Expected And node");
3617 }
3618
3619 if let Ast::Pipeline(cmds) = &commands[1] {
3621 assert_eq!(cmds[0].args, vec!["echo", "third"]);
3622 } else {
3623 panic!("Expected Pipeline");
3624 }
3625 } else {
3626 panic!("Expected Sequence, got: {:?}", result);
3627 }
3628 }
3629
3630 #[test]
3631 fn test_command_group_with_or_operator_and_sequence() {
3632 let tokens = vec![
3635 Token::LeftBrace,
3636 Token::Word("false".to_string()),
3637 Token::Semicolon,
3638 Token::RightBrace,
3639 Token::Or,
3640 Token::Word("echo".to_string()),
3641 Token::Word("second".to_string()),
3642 Token::Semicolon,
3643 Token::Word("echo".to_string()),
3644 Token::Word("third".to_string()),
3645 ];
3646 let result = parse(tokens).unwrap();
3647
3648 if let Ast::Sequence(commands) = result {
3650 assert_eq!(commands.len(), 2);
3651
3652 if let Ast::Or { .. } = &commands[0] {
3654 } else {
3656 panic!("Expected Or node");
3657 }
3658
3659 if let Ast::Pipeline(cmds) = &commands[1] {
3661 assert_eq!(cmds[0].args, vec!["echo", "third"]);
3662 } else {
3663 panic!("Expected Pipeline");
3664 }
3665 } else {
3666 panic!("Expected Sequence, got: {:?}", result);
3667 }
3668 }
3669
3670 #[test]
3671 fn test_multiple_and_operators_in_sequence() {
3672 let tokens = vec![
3675 Token::Word("true".to_string()),
3676 Token::And,
3677 Token::Word("echo".to_string()),
3678 Token::Word("second".to_string()),
3679 Token::And,
3680 Token::Word("echo".to_string()),
3681 Token::Word("third".to_string()),
3682 ];
3683 let result = parse(tokens).unwrap();
3684
3685 if let Ast::And { left, right } = result {
3687 if let Ast::And {
3689 left: inner_left,
3690 right: inner_right,
3691 } = *left
3692 {
3693 if let Ast::Pipeline(cmds) = *inner_left {
3694 assert_eq!(cmds[0].args, vec!["true"]);
3695 } else {
3696 panic!("Expected Pipeline");
3697 }
3698
3699 if let Ast::Pipeline(cmds) = *inner_right {
3700 assert_eq!(cmds[0].args, vec!["echo", "second"]);
3701 } else {
3702 panic!("Expected Pipeline");
3703 }
3704 } else {
3705 panic!("Expected nested And node on left");
3706 }
3707
3708 if let Ast::Pipeline(cmds) = *right {
3710 assert_eq!(cmds[0].args, vec!["echo", "third"]);
3711 } else {
3712 panic!("Expected Pipeline on right");
3713 }
3714 } else {
3715 panic!("Expected And node, got: {:?}", result);
3716 }
3717 }
3718
3719 #[test]
3720 fn test_negation_in_pipeline() {
3721 let tokens = vec![
3724 Token::Bang,
3725 Token::Word("grep".to_string()),
3726 Token::Word("pattern".to_string()),
3727 Token::Pipe,
3728 Token::Word("wc".to_string()),
3729 Token::Word("-l".to_string()),
3730 ];
3731 let result = parse(tokens).unwrap();
3732
3733 if let Ast::Negation { command } = result {
3735 if let Ast::Pipeline(cmds) = *command {
3736 assert_eq!(cmds.len(), 2);
3737 assert_eq!(cmds[0].args, vec!["grep", "pattern"]);
3738 assert_eq!(cmds[1].args, vec!["wc", "-l"]);
3739 } else {
3740 panic!("Expected Pipeline in negation");
3741 }
3742 } else {
3743 panic!("Expected Negation, got: {:?}", result);
3744 }
3745 }
3746
3747 #[test]
3748 fn test_redirclobber_without_filename() {
3749 let tokens = vec![
3751 Token::Word("echo".to_string()),
3752 Token::Word("hello".to_string()),
3753 Token::RedirOutClobber,
3754 ];
3755 let result = parse(tokens);
3756 assert!(result.is_err());
3757 assert_eq!(result.unwrap_err(), "expected filename after >|");
3758 }
3759
3760 #[test]
3761 fn test_redirclobber_with_non_word_token() {
3762 let tokens = vec![
3764 Token::Word("echo".to_string()),
3765 Token::Word("hello".to_string()),
3766 Token::RedirOutClobber,
3767 Token::Pipe,
3768 ];
3769 let result = parse(tokens);
3770 assert!(result.is_err());
3771 assert_eq!(result.unwrap_err(), "expected filename after >|");
3772 }
3773
3774 #[test]
3775 fn test_redirclobber_with_valid_filename() {
3776 let tokens = vec![
3778 Token::Word("echo".to_string()),
3779 Token::Word("hello".to_string()),
3780 Token::RedirOutClobber,
3781 Token::Word("output.txt".to_string()),
3782 ];
3783 let result = parse(tokens).unwrap();
3784 assert_eq!(
3785 result,
3786 Ast::Pipeline(vec![ShellCommand {
3787 args: vec!["echo".to_string(), "hello".to_string()],
3788 redirections: vec![Redirection::OutputClobber("output.txt".to_string())],
3789 compound: None,
3790 }])
3791 );
3792 }
3793}