1use super::lexer::Token;
2
3#[derive(Debug, Clone, PartialEq, Eq)]
4pub enum Ast {
5 Pipeline(Vec<ShellCommand>),
6 Sequence(Vec<Ast>),
7 Assignment {
8 var: String,
9 value: String,
10 },
11 LocalAssignment {
12 var: String,
13 value: String,
14 },
15 If {
16 branches: Vec<(Box<Ast>, Box<Ast>)>, else_branch: Option<Box<Ast>>,
18 },
19 Case {
20 word: String,
21 cases: Vec<(Vec<String>, Ast)>,
22 default: Option<Box<Ast>>,
23 },
24 For {
25 variable: String,
26 items: Vec<String>,
27 body: Box<Ast>,
28 },
29 While {
30 condition: Box<Ast>,
31 body: Box<Ast>,
32 },
33 Until {
34 condition: Box<Ast>,
35 body: Box<Ast>,
36 },
37 FunctionDefinition {
38 name: String,
39 body: Box<Ast>,
40 },
41 FunctionCall {
42 name: String,
43 args: Vec<String>,
44 },
45 Return {
46 value: Option<String>,
47 },
48 And {
49 left: Box<Ast>,
50 right: Box<Ast>,
51 },
52 Or {
53 left: Box<Ast>,
54 right: Box<Ast>,
55 },
56 Subshell {
59 body: Box<Ast>,
60 },
61 CommandGroup {
64 body: Box<Ast>,
65 },
66}
67
68#[derive(Debug, Clone, PartialEq, Eq)]
70pub enum Redirection {
71 Input(String),
73 Output(String),
75 Append(String),
77 FdInput(i32, String),
79 FdOutput(i32, String),
81 FdAppend(i32, String),
83 FdDuplicate(i32, i32),
85 FdClose(i32),
87 FdInputOutput(i32, String),
89 HereDoc(String, String),
91 HereString(String),
93}
94
95#[derive(Debug, Clone, PartialEq, Eq, Default)]
96pub struct ShellCommand {
97 pub args: Vec<String>,
98 pub redirections: Vec<Redirection>,
100 pub compound: Option<Box<Ast>>,
103}
104
105fn is_valid_variable_name(name: &str) -> bool {
108 if let Some(first_char) = name.chars().next() {
109 first_char.is_alphabetic() || first_char == '_'
110 } else {
111 false
112 }
113}
114
115fn create_empty_body_ast() -> Ast {
118 Ast::Pipeline(vec![ShellCommand {
119 args: vec!["true".to_string()],
120 redirections: Vec::new(),
121 compound: None,
122 }])
123}
124
125fn skip_newlines(tokens: &[Token], i: &mut usize) {
128 while *i < tokens.len() && tokens[*i] == Token::Newline {
129 *i += 1;
130 }
131}
132
133fn skip_to_matching_fi(tokens: &[Token], i: &mut usize) {
136 let mut if_depth = 1;
137 *i += 1; while *i < tokens.len() && if_depth > 0 {
139 match tokens[*i] {
140 Token::If => if_depth += 1,
141 Token::Fi => if_depth -= 1,
142 _ => {}
143 }
144 *i += 1;
145 }
146}
147
148fn skip_to_matching_done(tokens: &[Token], i: &mut usize) {
151 let mut loop_depth = 1;
152 *i += 1; while *i < tokens.len() && loop_depth > 0 {
154 match tokens[*i] {
155 Token::For | Token::While | Token::Until => loop_depth += 1,
156 Token::Done => loop_depth -= 1,
157 _ => {}
158 }
159 *i += 1;
160 }
161}
162
163fn skip_to_matching_esac(tokens: &[Token], i: &mut usize) {
165 *i += 1; while *i < tokens.len() {
167 if tokens[*i] == Token::Esac {
168 *i += 1;
169 break;
170 }
171 *i += 1;
172 }
173}
174
175pub fn parse(tokens: Vec<Token>) -> Result<Ast, String> {
176 if tokens.len() >= 4
178 && let (Token::Word(_), Token::LeftParen, Token::RightParen, Token::LeftBrace) =
179 (&tokens[0], &tokens[1], &tokens[2], &tokens[3])
180 {
181 let mut brace_depth = 1; let mut function_end = tokens.len();
185 let mut j = 4; while j < tokens.len() {
188 match &tokens[j] {
189 Token::LeftBrace => {
190 brace_depth += 1;
191 j += 1;
192 }
193 Token::RightBrace => {
194 brace_depth -= 1;
195 if brace_depth == 0 {
196 function_end = j + 1; break;
198 }
199 j += 1;
200 }
201 Token::If => {
202 let mut if_depth = 1;
204 j += 1;
205 while j < tokens.len() && if_depth > 0 {
206 match tokens[j] {
207 Token::If => if_depth += 1,
208 Token::Fi => if_depth -= 1,
209 _ => {}
210 }
211 j += 1;
212 }
213 }
214 Token::For | Token::While | Token::Until => {
215 let mut for_depth = 1;
217 j += 1;
218 while j < tokens.len() && for_depth > 0 {
219 match tokens[j] {
220 Token::For | Token::While | Token::Until => for_depth += 1,
221 Token::Done => for_depth -= 1,
222 _ => {}
223 }
224 j += 1;
225 }
226 }
227 Token::Case => {
228 j += 1;
230 while j < tokens.len() {
231 if tokens[j] == Token::Esac {
232 j += 1;
233 break;
234 }
235 j += 1;
236 }
237 }
238 _ => {
239 j += 1;
240 }
241 }
242 }
243
244 if brace_depth == 0 && function_end <= tokens.len() {
245 let function_tokens = &tokens[0..function_end];
247 let remaining_tokens = &tokens[function_end..];
248
249 let function_ast = parse_function_definition(function_tokens)?;
250
251 return if remaining_tokens.is_empty() {
252 Ok(function_ast)
253 } else {
254 let remaining_ast = parse_commands_sequentially(remaining_tokens)?;
256 Ok(Ast::Sequence(vec![function_ast, remaining_ast]))
257 };
258 }
259 }
260
261 if tokens.len() >= 2
263 && let Token::Word(ref word) = tokens[0]
264 && let Some(paren_pos) = word.find('(')
265 && word.ends_with(')')
266 && paren_pos > 0
267 && tokens[1] == Token::LeftBrace
268 {
269 return parse_function_definition(&tokens);
270 }
271
272 parse_commands_sequentially(&tokens)
274}
275
276fn parse_slice(tokens: &[Token]) -> Result<Ast, String> {
277 if tokens.is_empty() {
278 return Err("No commands found".to_string());
279 }
280
281 if tokens.len() == 2 {
283 if let (Token::Word(var_eq), Token::Word(value)) = (&tokens[0], &tokens[1])
285 && let Some(eq_pos) = var_eq.find('=')
286 && eq_pos > 0
287 && eq_pos < var_eq.len()
288 {
289 let var = var_eq[..eq_pos].to_string();
290 let full_value = format!("{}{}", &var_eq[eq_pos + 1..], value);
291 if is_valid_variable_name(&var) {
293 return Ok(Ast::Assignment {
294 var,
295 value: full_value,
296 });
297 }
298 }
299 }
300
301 if tokens.len() == 2
303 && let (Token::Word(var_eq), Token::Word(value)) = (&tokens[0], &tokens[1])
304 && let Some(eq_pos) = var_eq.find('=')
305 && eq_pos > 0
306 && eq_pos == var_eq.len() - 1
307 {
308 let var = var_eq[..eq_pos].to_string();
309 if is_valid_variable_name(&var) {
311 return Ok(Ast::Assignment {
312 var,
313 value: value.clone(),
314 });
315 }
316 }
317
318 if tokens.len() == 3
320 && let (Token::Local, Token::Word(var), Token::Word(value)) =
321 (&tokens[0], &tokens[1], &tokens[2])
322 {
323 let clean_var = if var.ends_with('=') {
325 &var[..var.len() - 1]
326 } else {
327 var
328 };
329 if is_valid_variable_name(clean_var) {
331 return Ok(Ast::LocalAssignment {
332 var: clean_var.to_string(),
333 value: value.clone(),
334 });
335 } else {
336 return Err(format!("Invalid variable name: {}", clean_var));
337 }
338 }
339
340 if !tokens.is_empty()
342 && tokens.len() <= 2
343 && let Token::Return = &tokens[0]
344 {
345 if tokens.len() == 1 {
346 return Ok(Ast::Return { value: None });
348 } else if let Token::Word(word) = &tokens[1] {
349 return Ok(Ast::Return {
351 value: Some(word.clone()),
352 });
353 }
354 }
355
356 if tokens.len() == 2
358 && let (Token::Local, Token::Word(var_eq)) = (&tokens[0], &tokens[1])
359 && let Some(eq_pos) = var_eq.find('=')
360 && eq_pos > 0
361 && eq_pos < var_eq.len()
362 {
363 let var = var_eq[..eq_pos].to_string();
364 let value = var_eq[eq_pos + 1..].to_string();
365 if is_valid_variable_name(&var) {
367 return Ok(Ast::LocalAssignment { var, value });
368 } else {
369 return Err(format!("Invalid variable name: {}", var));
370 }
371 }
372
373 if tokens.len() == 2
375 && let (Token::Local, Token::Word(var)) = (&tokens[0], &tokens[1])
376 && !var.contains('=')
377 {
378 if is_valid_variable_name(var) {
380 return Ok(Ast::LocalAssignment {
381 var: var.clone(),
382 value: String::new(),
383 });
384 } else {
385 return Err(format!("Invalid variable name: {}", var));
386 }
387 }
388
389 if tokens.len() == 1
391 && let Token::Word(ref word) = tokens[0]
392 && let Some(eq_pos) = word.find('=')
393 && eq_pos > 0
394 && eq_pos < word.len()
395 {
396 let var = word[..eq_pos].to_string();
397 let value = word[eq_pos + 1..].to_string();
398 if is_valid_variable_name(&var) {
400 return Ok(Ast::Assignment { var, value });
401 }
402 }
403
404 if let Token::If = tokens[0] {
406 return parse_if(tokens);
407 }
408
409 if let Token::Case = tokens[0] {
411 return parse_case(tokens);
412 }
413
414 if let Token::For = tokens[0] {
416 return parse_for(tokens);
417 }
418
419 if let Token::While = tokens[0] {
421 return parse_while(tokens);
422 }
423
424 if let Token::Until = tokens[0] {
426 return parse_until(tokens);
427 }
428
429 if tokens.len() >= 4
432 && let (Token::Word(word), Token::LeftParen, Token::RightParen, Token::LeftBrace) =
433 (&tokens[0], &tokens[1], &tokens[2], &tokens[3])
434 && is_valid_variable_name(word)
435 {
436 return parse_function_definition(tokens);
437 }
438
439 if tokens.len() >= 2
441 && let Token::Word(ref word) = tokens[0]
442 && let Some(paren_pos) = word.find('(')
443 && word.ends_with(')')
444 && paren_pos > 0
445 {
446 let func_name = &word[..paren_pos];
447 if is_valid_variable_name(func_name) && tokens[1] == Token::LeftBrace {
448 return parse_function_definition(tokens);
449 }
450 }
451
452 parse_pipeline(tokens)
457}
458
459fn parse_commands_sequentially(tokens: &[Token]) -> Result<Ast, String> {
460 let mut i = 0;
461 let mut commands = Vec::new();
462
463 while i < tokens.len() {
464 while i < tokens.len() {
466 match &tokens[i] {
467 Token::Newline => {
468 i += 1;
469 }
470 Token::Word(word) if word.starts_with('#') => {
471 while i < tokens.len() && tokens[i] != Token::Newline {
473 i += 1;
474 }
475 if i < tokens.len() {
476 i += 1; }
478 }
479 _ => break,
480 }
481 }
482
483 if i >= tokens.len() {
484 break;
485 }
486
487 let start = i;
489
490 if tokens[i] == Token::LeftParen {
493 let mut paren_depth = 1;
495 let mut j = i + 1;
496
497 while j < tokens.len() && paren_depth > 0 {
498 match tokens[j] {
499 Token::LeftParen => paren_depth += 1,
500 Token::RightParen => paren_depth -= 1,
501 _ => {}
502 }
503 j += 1;
504 }
505
506 if paren_depth != 0 {
507 return Err("Unmatched parenthesis in subshell".to_string());
508 }
509
510 let subshell_tokens = &tokens[i + 1..j - 1];
512
513 let body_ast = if subshell_tokens.is_empty() {
516 return Err("Empty subshell".to_string());
517 } else {
518 parse_commands_sequentially(subshell_tokens)?
519 };
520
521 let mut subshell_ast = Ast::Subshell {
522 body: Box::new(body_ast),
523 };
524
525 i = j; let mut redirections = Vec::new();
529 while i < tokens.len() {
530 match &tokens[i] {
531 Token::RedirOut => {
532 i += 1;
533 if i < tokens.len() {
534 if let Token::Word(file) = &tokens[i] {
535 redirections.push(Redirection::Output(file.clone()));
536 i += 1;
537 }
538 }
539 }
540 Token::RedirIn => {
541 i += 1;
542 if i < tokens.len() {
543 if let Token::Word(file) = &tokens[i] {
544 redirections.push(Redirection::Input(file.clone()));
545 i += 1;
546 }
547 }
548 }
549 Token::RedirAppend => {
550 i += 1;
551 if i < tokens.len() {
552 if let Token::Word(file) = &tokens[i] {
553 redirections.push(Redirection::Append(file.clone()));
554 i += 1;
555 }
556 }
557 }
558 Token::RedirectFdOut(fd, file) => {
559 redirections.push(Redirection::FdOutput(*fd, file.clone()));
560 i += 1;
561 }
562 Token::RedirectFdIn(fd, file) => {
563 redirections.push(Redirection::FdInput(*fd, file.clone()));
564 i += 1;
565 }
566 Token::RedirectFdAppend(fd, file) => {
567 redirections.push(Redirection::FdAppend(*fd, file.clone()));
568 i += 1;
569 }
570 Token::RedirectFdDup(from_fd, to_fd) => {
571 redirections.push(Redirection::FdDuplicate(*from_fd, *to_fd));
572 i += 1;
573 }
574 Token::RedirectFdClose(fd) => {
575 redirections.push(Redirection::FdClose(*fd));
576 i += 1;
577 }
578 Token::RedirectFdInOut(fd, file) => {
579 redirections.push(Redirection::FdInputOutput(*fd, file.clone()));
580 i += 1;
581 }
582 Token::RedirHereDoc(delimiter, quoted) => {
583 redirections
584 .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
585 i += 1;
586 }
587 Token::RedirHereString(content) => {
588 redirections.push(Redirection::HereString(content.clone()));
589 i += 1;
590 }
591 _ => break,
592 }
593 }
594
595 if i < tokens.len() && tokens[i] == Token::Pipe {
597 let mut end = i;
599 let mut brace_depth = 0;
600 let mut paren_depth = 0;
601 let mut last_was_pipe = true; while end < tokens.len() {
603 match &tokens[end] {
604 Token::Pipe => last_was_pipe = true,
605 Token::LeftBrace => {
606 brace_depth += 1;
607 last_was_pipe = false;
608 }
609 Token::RightBrace => {
610 if brace_depth > 0 {
611 brace_depth -= 1;
612 } else {
613 break;
614 }
615 last_was_pipe = false;
616 }
617 Token::LeftParen => {
618 paren_depth += 1;
619 last_was_pipe = false;
620 }
621 Token::RightParen => {
622 if paren_depth > 0 {
623 paren_depth -= 1;
624 } else {
625 break;
626 }
627 last_was_pipe = false;
628 }
629 Token::Newline | Token::Semicolon => {
630 if brace_depth == 0 && paren_depth == 0 && !last_was_pipe {
631 break;
632 }
633 }
634 Token::Word(_) => last_was_pipe = false,
635 _ => {}
636 }
637 end += 1;
638 }
639
640 let pipeline_ast = parse_pipeline(&tokens[start..end])?;
641 commands.push(pipeline_ast);
642 i = end;
643 continue;
644 }
645
646 if !redirections.is_empty() {
648 subshell_ast = Ast::Pipeline(vec![ShellCommand {
649 args: Vec::new(),
650 redirections,
651 compound: Some(Box::new(subshell_ast)),
652 }]);
653 }
654
655 if i < tokens.len() && (tokens[i] == Token::And || tokens[i] == Token::Or) {
657 let operator = tokens[i].clone();
658 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
662 i += 1;
663 }
664
665 let remaining_tokens = &tokens[i..];
667 let right_ast = parse_commands_sequentially(remaining_tokens)?;
668
669 let combined_ast = match operator {
671 Token::And => Ast::And {
672 left: Box::new(subshell_ast),
673 right: Box::new(right_ast),
674 },
675 Token::Or => Ast::Or {
676 left: Box::new(subshell_ast),
677 right: Box::new(right_ast),
678 },
679 _ => unreachable!(),
680 };
681
682 commands.push(combined_ast);
683 break; }
685
686 commands.push(subshell_ast);
687
688 if i < tokens.len() && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon) {
690 i += 1;
691 }
692 continue;
693 }
694
695 if tokens[i] == Token::LeftBrace {
697 let mut brace_depth = 1;
699 let mut j = i + 1;
700
701 while j < tokens.len() && brace_depth > 0 {
702 match tokens[j] {
703 Token::LeftBrace => brace_depth += 1,
704 Token::RightBrace => brace_depth -= 1,
705 _ => {}
706 }
707 j += 1;
708 }
709
710 if brace_depth != 0 {
711 return Err("Unmatched brace in command group".to_string());
712 }
713
714 let group_tokens = &tokens[i + 1..j - 1];
716
717 let body_ast = if group_tokens.is_empty() {
720 return Err("Empty command group".to_string());
721 } else {
722 parse_commands_sequentially(group_tokens)?
723 };
724
725 let mut group_ast = Ast::CommandGroup {
726 body: Box::new(body_ast),
727 };
728
729 i = j; let mut redirections = Vec::new();
733 while i < tokens.len() {
734 match &tokens[i] {
735 Token::RedirOut => {
736 i += 1;
737 if i < tokens.len() {
738 if let Token::Word(file) = &tokens[i] {
739 redirections.push(Redirection::Output(file.clone()));
740 i += 1;
741 }
742 }
743 }
744 Token::RedirIn => {
745 i += 1;
746 if i < tokens.len() {
747 if let Token::Word(file) = &tokens[i] {
748 redirections.push(Redirection::Input(file.clone()));
749 i += 1;
750 }
751 }
752 }
753 Token::RedirAppend => {
754 i += 1;
755 if i < tokens.len() {
756 if let Token::Word(file) = &tokens[i] {
757 redirections.push(Redirection::Append(file.clone()));
758 i += 1;
759 }
760 }
761 }
762 Token::RedirectFdOut(fd, file) => {
763 redirections.push(Redirection::FdOutput(*fd, file.clone()));
764 i += 1;
765 }
766 Token::RedirectFdIn(fd, file) => {
767 redirections.push(Redirection::FdInput(*fd, file.clone()));
768 i += 1;
769 }
770 Token::RedirectFdAppend(fd, file) => {
771 redirections.push(Redirection::FdAppend(*fd, file.clone()));
772 i += 1;
773 }
774 Token::RedirectFdDup(from_fd, to_fd) => {
775 redirections.push(Redirection::FdDuplicate(*from_fd, *to_fd));
776 i += 1;
777 }
778 Token::RedirectFdClose(fd) => {
779 redirections.push(Redirection::FdClose(*fd));
780 i += 1;
781 }
782 Token::RedirectFdInOut(fd, file) => {
783 redirections.push(Redirection::FdInputOutput(*fd, file.clone()));
784 i += 1;
785 }
786 Token::RedirHereDoc(delimiter, quoted) => {
787 redirections
788 .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
789 i += 1;
790 }
791 Token::RedirHereString(content) => {
792 redirections.push(Redirection::HereString(content.clone()));
793 i += 1;
794 }
795 _ => break,
796 }
797 }
798
799 if i < tokens.len() && tokens[i] == Token::Pipe {
801 let mut end = i;
803 let mut brace_depth = 0;
804 let mut paren_depth = 0;
805 let mut last_was_pipe = true; while end < tokens.len() {
807 match &tokens[end] {
808 Token::Pipe => last_was_pipe = true,
809 Token::LeftBrace => {
810 brace_depth += 1;
811 last_was_pipe = false;
812 }
813 Token::RightBrace => {
814 if brace_depth > 0 {
815 brace_depth -= 1;
816 } else {
817 break;
818 }
819 last_was_pipe = false;
820 }
821 Token::LeftParen => {
822 paren_depth += 1;
823 last_was_pipe = false;
824 }
825 Token::RightParen => {
826 if paren_depth > 0 {
827 paren_depth -= 1;
828 } else {
829 break;
830 }
831 last_was_pipe = false;
832 }
833 Token::Newline | Token::Semicolon => {
834 if brace_depth == 0 && paren_depth == 0 && !last_was_pipe {
835 break;
836 }
837 }
838 Token::Word(_) => last_was_pipe = false,
839 _ => {}
840 }
841 end += 1;
842 }
843
844 let pipeline_ast = parse_pipeline(&tokens[start..end])?;
845 commands.push(pipeline_ast);
846 i = end;
847 continue;
848 }
849
850 if !redirections.is_empty() {
852 group_ast = Ast::Pipeline(vec![ShellCommand {
853 args: Vec::new(),
854 redirections,
855 compound: Some(Box::new(group_ast)),
856 }]);
857 }
858
859 if i < tokens.len() && (tokens[i] == Token::And || tokens[i] == Token::Or) {
861 let operator = tokens[i].clone();
862 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
866 i += 1;
867 }
868
869 let remaining_tokens = &tokens[i..];
871 let right_ast = parse_commands_sequentially(remaining_tokens)?;
872
873 let combined_ast = match operator {
875 Token::And => Ast::And {
876 left: Box::new(group_ast),
877 right: Box::new(right_ast),
878 },
879 Token::Or => Ast::Or {
880 left: Box::new(group_ast),
881 right: Box::new(right_ast),
882 },
883 _ => unreachable!(),
884 };
885
886 commands.push(combined_ast);
887 break; }
889
890 commands.push(group_ast);
891
892 if i < tokens.len() && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon) {
894 i += 1;
895 }
896 continue;
897 }
898
899 if tokens[i] == Token::If {
901 let mut depth = 0;
903 while i < tokens.len() {
904 match tokens[i] {
905 Token::If => depth += 1,
906 Token::Fi => {
907 depth -= 1;
908 if depth == 0 {
909 i += 1; break;
911 }
912 }
913 _ => {}
914 }
915 i += 1;
916 }
917
918 } else if tokens[i] == Token::For {
921 let mut depth = 1; i += 1; while i < tokens.len() {
925 match tokens[i] {
926 Token::For | Token::While | Token::Until => depth += 1,
927 Token::Done => {
928 depth -= 1;
929 if depth == 0 {
930 i += 1; break;
932 }
933 }
934 _ => {}
935 }
936 i += 1;
937 }
938 } else if tokens[i] == Token::While {
939 let mut depth = 1; i += 1; while i < tokens.len() {
943 match tokens[i] {
944 Token::While | Token::For | Token::Until => depth += 1,
945 Token::Done => {
946 depth -= 1;
947 if depth == 0 {
948 i += 1; break;
950 }
951 }
952 _ => {}
953 }
954 i += 1;
955 }
956 } else if tokens[i] == Token::Until {
957 let mut depth = 1; i += 1; while i < tokens.len() {
961 match tokens[i] {
962 Token::Until | Token::For | Token::While => depth += 1,
963 Token::Done => {
964 depth -= 1;
965 if depth == 0 {
966 i += 1; break;
968 }
969 }
970 _ => {}
971 }
972 i += 1;
973 }
974 } else if tokens[i] == Token::Case {
975 while i < tokens.len() {
977 if tokens[i] == Token::Esac {
978 i += 1; break;
980 }
981 i += 1;
982 }
983 } else if i + 3 < tokens.len()
984 && matches!(tokens[i], Token::Word(_))
985 && tokens[i + 1] == Token::LeftParen
986 && tokens[i + 2] == Token::RightParen
987 && tokens[i + 3] == Token::LeftBrace
988 {
989 let mut brace_depth = 1;
991 i += 4; while i < tokens.len() && brace_depth > 0 {
993 match tokens[i] {
994 Token::LeftBrace => brace_depth += 1,
995 Token::RightBrace => brace_depth -= 1,
996 _ => {}
997 }
998 i += 1;
999 }
1000 } else {
1001 let mut brace_depth = 0;
1004 let mut paren_depth = 0;
1005 let mut last_was_pipe = false;
1006 while i < tokens.len() {
1007 match &tokens[i] {
1008 Token::LeftBrace => {
1009 brace_depth += 1;
1010 last_was_pipe = false;
1011 }
1012 Token::RightBrace => {
1013 if brace_depth > 0 {
1014 brace_depth -= 1;
1015 } else {
1016 break;
1017 }
1018 last_was_pipe = false;
1019 }
1020 Token::LeftParen => {
1021 paren_depth += 1;
1022 last_was_pipe = false;
1023 }
1024 Token::RightParen => {
1025 if paren_depth > 0 {
1026 paren_depth -= 1;
1027 } else {
1028 break;
1029 }
1030 last_was_pipe = false;
1031 }
1032 Token::Pipe => last_was_pipe = true,
1033 Token::Newline | Token::Semicolon | Token::And | Token::Or => {
1034 if brace_depth == 0 && paren_depth == 0 && !last_was_pipe {
1035 break;
1036 }
1037 }
1038 Token::Word(_) => last_was_pipe = false,
1039 _ => {}
1040 }
1041 i += 1;
1042 }
1043 }
1044
1045 let command_tokens = &tokens[start..i];
1046 if !command_tokens.is_empty() {
1047 if command_tokens.len() == 1 {
1049 match command_tokens[0] {
1050 Token::Else | Token::Elif | Token::Fi => {
1051 if i < tokens.len()
1053 && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon)
1054 {
1055 i += 1;
1056 }
1057 continue;
1058 }
1059 _ => {}
1060 }
1061 }
1062
1063 let ast = parse_slice(command_tokens)?;
1064
1065 if i < tokens.len() && (tokens[i] == Token::And || tokens[i] == Token::Or) {
1067 let operator = tokens[i].clone();
1068 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
1072 i += 1;
1073 }
1074
1075 let remaining_tokens = &tokens[i..];
1077 let right_ast = parse_commands_sequentially(remaining_tokens)?;
1078
1079 let combined_ast = match operator {
1081 Token::And => Ast::And {
1082 left: Box::new(ast),
1083 right: Box::new(right_ast),
1084 },
1085 Token::Or => Ast::Or {
1086 left: Box::new(ast),
1087 right: Box::new(right_ast),
1088 },
1089 _ => unreachable!(),
1090 };
1091
1092 commands.push(combined_ast);
1093 break; } else {
1095 commands.push(ast);
1096 }
1097 }
1098
1099 if i < tokens.len() && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon) {
1100 i += 1;
1101 }
1102 }
1103
1104 if commands.is_empty() {
1105 return Err("No commands found".to_string());
1106 }
1107
1108 if commands.len() == 1 {
1109 Ok(commands.into_iter().next().unwrap())
1110 } else {
1111 Ok(Ast::Sequence(commands))
1112 }
1113}
1114
1115fn parse_pipeline(tokens: &[Token]) -> Result<Ast, String> {
1116 let mut commands = Vec::new();
1117 let mut current_cmd = ShellCommand::default();
1118
1119 let mut i = 0;
1120 while i < tokens.len() {
1121 let token = &tokens[i];
1122 match token {
1123 Token::LeftBrace => {
1124 let mut brace_depth = 1;
1127 let mut j = i + 1;
1128
1129 while j < tokens.len() && brace_depth > 0 {
1130 match tokens[j] {
1131 Token::LeftBrace => brace_depth += 1,
1132 Token::RightBrace => brace_depth -= 1,
1133 _ => {}
1134 }
1135 j += 1;
1136 }
1137
1138 if brace_depth != 0 {
1139 return Err("Unmatched brace in pipeline".to_string());
1140 }
1141
1142 let group_tokens = &tokens[i + 1..j - 1];
1144
1145 let body_ast = if group_tokens.is_empty() {
1147 create_empty_body_ast()
1148 } else {
1149 parse_commands_sequentially(group_tokens)?
1150 };
1151
1152 current_cmd.compound = Some(Box::new(Ast::CommandGroup {
1154 body: Box::new(body_ast),
1155 }));
1156
1157 i = j; while i < tokens.len() {
1161 match &tokens[i] {
1162 Token::RedirOut => {
1163 i += 1;
1164 if i < tokens.len() {
1165 if let Token::Word(file) = &tokens[i] {
1166 current_cmd
1167 .redirections
1168 .push(Redirection::Output(file.clone()));
1169 i += 1;
1170 }
1171 }
1172 }
1173 Token::RedirIn => {
1174 i += 1;
1175 if i < tokens.len() {
1176 if let Token::Word(file) = &tokens[i] {
1177 current_cmd
1178 .redirections
1179 .push(Redirection::Input(file.clone()));
1180 i += 1;
1181 }
1182 }
1183 }
1184 Token::RedirAppend => {
1185 i += 1;
1186 if i < tokens.len() {
1187 if let Token::Word(file) = &tokens[i] {
1188 current_cmd
1189 .redirections
1190 .push(Redirection::Append(file.clone()));
1191 i += 1;
1192 }
1193 }
1194 }
1195 Token::RedirectFdOut(fd, file) => {
1196 current_cmd
1197 .redirections
1198 .push(Redirection::FdOutput(*fd, file.clone()));
1199 i += 1;
1200 }
1201 Token::RedirectFdIn(fd, file) => {
1202 current_cmd
1203 .redirections
1204 .push(Redirection::FdInput(*fd, file.clone()));
1205 i += 1;
1206 }
1207 Token::RedirectFdAppend(fd, file) => {
1208 current_cmd
1209 .redirections
1210 .push(Redirection::FdAppend(*fd, file.clone()));
1211 i += 1;
1212 }
1213 Token::RedirectFdDup(from_fd, to_fd) => {
1214 current_cmd
1215 .redirections
1216 .push(Redirection::FdDuplicate(*from_fd, *to_fd));
1217 i += 1;
1218 }
1219 Token::RedirectFdClose(fd) => {
1220 current_cmd.redirections.push(Redirection::FdClose(*fd));
1221 i += 1;
1222 }
1223 Token::RedirectFdInOut(fd, file) => {
1224 current_cmd
1225 .redirections
1226 .push(Redirection::FdInputOutput(*fd, file.clone()));
1227 i += 1;
1228 }
1229 Token::RedirHereDoc(delimiter, quoted) => {
1230 current_cmd
1231 .redirections
1232 .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
1233 i += 1;
1234 }
1235 Token::RedirHereString(content) => {
1236 current_cmd
1237 .redirections
1238 .push(Redirection::HereString(content.clone()));
1239 i += 1;
1240 }
1241 Token::Pipe => {
1242 break;
1244 }
1245 _ => break,
1246 }
1247 }
1248
1249 continue;
1251 }
1252 Token::LeftParen => {
1253 let mut paren_depth = 1;
1256 let mut j = i + 1;
1257
1258 while j < tokens.len() && paren_depth > 0 {
1259 match tokens[j] {
1260 Token::LeftParen => paren_depth += 1,
1261 Token::RightParen => paren_depth -= 1,
1262 _ => {}
1263 }
1264 j += 1;
1265 }
1266
1267 if paren_depth != 0 {
1268 return Err("Unmatched parenthesis in pipeline".to_string());
1269 }
1270
1271 let subshell_tokens = &tokens[i + 1..j - 1];
1273
1274 let body_ast = if subshell_tokens.is_empty() {
1276 create_empty_body_ast()
1277 } else {
1278 parse_commands_sequentially(subshell_tokens)?
1279 };
1280
1281 current_cmd.compound = Some(Box::new(Ast::Subshell {
1284 body: Box::new(body_ast),
1285 }));
1286
1287 i = j; while i < tokens.len() {
1291 match &tokens[i] {
1292 Token::RedirOut => {
1293 i += 1;
1294 if i < tokens.len() {
1295 if let Token::Word(file) = &tokens[i] {
1296 current_cmd
1297 .redirections
1298 .push(Redirection::Output(file.clone()));
1299 i += 1;
1300 }
1301 }
1302 }
1303 Token::RedirIn => {
1304 i += 1;
1305 if i < tokens.len() {
1306 if let Token::Word(file) = &tokens[i] {
1307 current_cmd
1308 .redirections
1309 .push(Redirection::Input(file.clone()));
1310 i += 1;
1311 }
1312 }
1313 }
1314 Token::RedirAppend => {
1315 i += 1;
1316 if i < tokens.len() {
1317 if let Token::Word(file) = &tokens[i] {
1318 current_cmd
1319 .redirections
1320 .push(Redirection::Append(file.clone()));
1321 i += 1;
1322 }
1323 }
1324 }
1325 Token::RedirectFdOut(fd, file) => {
1326 current_cmd
1327 .redirections
1328 .push(Redirection::FdOutput(*fd, file.clone()));
1329 i += 1;
1330 }
1331 Token::RedirectFdIn(fd, file) => {
1332 current_cmd
1333 .redirections
1334 .push(Redirection::FdInput(*fd, file.clone()));
1335 i += 1;
1336 }
1337 Token::RedirectFdAppend(fd, file) => {
1338 current_cmd
1339 .redirections
1340 .push(Redirection::FdAppend(*fd, file.clone()));
1341 i += 1;
1342 }
1343 Token::RedirectFdDup(from_fd, to_fd) => {
1344 current_cmd
1345 .redirections
1346 .push(Redirection::FdDuplicate(*from_fd, *to_fd));
1347 i += 1;
1348 }
1349 Token::RedirectFdClose(fd) => {
1350 current_cmd.redirections.push(Redirection::FdClose(*fd));
1351 i += 1;
1352 }
1353 Token::RedirectFdInOut(fd, file) => {
1354 current_cmd
1355 .redirections
1356 .push(Redirection::FdInputOutput(*fd, file.clone()));
1357 i += 1;
1358 }
1359 Token::RedirHereDoc(delimiter, quoted) => {
1360 current_cmd
1361 .redirections
1362 .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
1363 i += 1;
1364 }
1365 Token::RedirHereString(content) => {
1366 current_cmd
1367 .redirections
1368 .push(Redirection::HereString(content.clone()));
1369 i += 1;
1370 }
1371 Token::Pipe => {
1372 break;
1374 }
1375 _ => break,
1376 }
1377 }
1378
1379 continue;
1381 }
1382 Token::Word(word) => {
1383 current_cmd.args.push(word.clone());
1384 }
1385 Token::Local => {
1386 current_cmd.args.push("local".to_string());
1387 }
1388 Token::Return => {
1389 current_cmd.args.push("return".to_string());
1390 }
1391 Token::Break => {
1392 current_cmd.args.push("break".to_string());
1393 }
1394 Token::Continue => {
1395 current_cmd.args.push("continue".to_string());
1396 }
1397 Token::If => {
1401 current_cmd.args.push("if".to_string());
1402 }
1403 Token::Then => {
1404 current_cmd.args.push("then".to_string());
1405 }
1406 Token::Else => {
1407 current_cmd.args.push("else".to_string());
1408 }
1409 Token::Elif => {
1410 current_cmd.args.push("elif".to_string());
1411 }
1412 Token::Fi => {
1413 current_cmd.args.push("fi".to_string());
1414 }
1415 Token::Case => {
1416 current_cmd.args.push("case".to_string());
1417 }
1418 Token::In => {
1419 current_cmd.args.push("in".to_string());
1420 }
1421 Token::Esac => {
1422 current_cmd.args.push("esac".to_string());
1423 }
1424 Token::For => {
1425 current_cmd.args.push("for".to_string());
1426 }
1427 Token::While => {
1428 current_cmd.args.push("while".to_string());
1429 }
1430 Token::Until => {
1431 current_cmd.args.push("until".to_string());
1432 }
1433 Token::Do => {
1434 current_cmd.args.push("do".to_string());
1435 }
1436 Token::Done => {
1437 current_cmd.args.push("done".to_string());
1438 }
1439 Token::Pipe => {
1440 if !current_cmd.args.is_empty() || current_cmd.compound.is_some() {
1441 commands.push(current_cmd.clone());
1442 current_cmd = ShellCommand::default();
1443 }
1444 }
1445 Token::RedirIn => {
1447 i += 1;
1448 if i < tokens.len()
1449 && let Token::Word(ref file) = tokens[i]
1450 {
1451 current_cmd
1452 .redirections
1453 .push(Redirection::Input(file.clone()));
1454 }
1455 }
1456 Token::RedirOut => {
1457 i += 1;
1458 if i < tokens.len()
1459 && let Token::Word(ref file) = tokens[i]
1460 {
1461 current_cmd
1462 .redirections
1463 .push(Redirection::Output(file.clone()));
1464 }
1465 }
1466 Token::RedirAppend => {
1467 i += 1;
1468 if i < tokens.len()
1469 && let Token::Word(ref file) = tokens[i]
1470 {
1471 current_cmd
1472 .redirections
1473 .push(Redirection::Append(file.clone()));
1474 }
1475 }
1476 Token::RedirHereDoc(delimiter, quoted) => {
1477 current_cmd
1479 .redirections
1480 .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
1481 }
1482 Token::RedirHereString(content) => {
1483 current_cmd
1484 .redirections
1485 .push(Redirection::HereString(content.clone()));
1486 }
1487 Token::RedirectFdIn(fd, file) => {
1489 current_cmd
1490 .redirections
1491 .push(Redirection::FdInput(*fd, file.clone()));
1492 }
1493 Token::RedirectFdOut(fd, file) => {
1494 current_cmd
1495 .redirections
1496 .push(Redirection::FdOutput(*fd, file.clone()));
1497 }
1498 Token::RedirectFdAppend(fd, file) => {
1499 current_cmd
1500 .redirections
1501 .push(Redirection::FdAppend(*fd, file.clone()));
1502 }
1503 Token::RedirectFdDup(from_fd, to_fd) => {
1504 current_cmd
1505 .redirections
1506 .push(Redirection::FdDuplicate(*from_fd, *to_fd));
1507 }
1508 Token::RedirectFdClose(fd) => {
1509 current_cmd.redirections.push(Redirection::FdClose(*fd));
1510 }
1511 Token::RedirectFdInOut(fd, file) => {
1512 current_cmd
1513 .redirections
1514 .push(Redirection::FdInputOutput(*fd, file.clone()));
1515 }
1516 Token::RightParen => {
1517 if !current_cmd.args.is_empty()
1520 && i > 0
1521 && let Token::LeftParen = tokens[i - 1]
1522 {
1523 break;
1527 }
1528 return Err("Unexpected ) in pipeline".to_string());
1529 }
1530 Token::Newline => {
1531 if current_cmd.args.is_empty() && current_cmd.compound.is_none() {
1533 } else {
1535 break;
1536 }
1537 }
1538 Token::And | Token::Or | Token::Semicolon => {
1539 break;
1542 }
1543 _ => {
1544 return Err(format!("Unexpected token in pipeline: {:?}", token));
1545 }
1546 }
1547 i += 1;
1548 }
1549
1550 if !current_cmd.args.is_empty() || current_cmd.compound.is_some() {
1551 commands.push(current_cmd);
1552 }
1553
1554 if commands.is_empty() {
1555 return Err("No commands found".to_string());
1556 }
1557
1558 Ok(Ast::Pipeline(commands))
1559}
1560
1561fn parse_if(tokens: &[Token]) -> Result<Ast, String> {
1562 let mut i = 1; let mut branches = Vec::new();
1564
1565 loop {
1566 let mut cond_tokens = Vec::new();
1568 while i < tokens.len()
1569 && tokens[i] != Token::Semicolon
1570 && tokens[i] != Token::Newline
1571 && tokens[i] != Token::Then
1572 {
1573 cond_tokens.push(tokens[i].clone());
1574 i += 1;
1575 }
1576
1577 if i < tokens.len() && (tokens[i] == Token::Semicolon || tokens[i] == Token::Newline) {
1579 i += 1;
1580 }
1581
1582 skip_newlines(tokens, &mut i);
1584
1585 if i >= tokens.len() || tokens[i] != Token::Then {
1586 return Err("Expected then after if/elif condition".to_string());
1587 }
1588 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
1592 i += 1;
1593 }
1594
1595 let mut then_tokens = Vec::new();
1598 let mut depth = 0;
1599 while i < tokens.len() {
1600 match &tokens[i] {
1601 Token::If => {
1602 depth += 1;
1603 then_tokens.push(tokens[i].clone());
1604 }
1605 Token::Fi => {
1606 if depth > 0 {
1607 depth -= 1;
1608 then_tokens.push(tokens[i].clone());
1609 } else {
1610 break; }
1612 }
1613 Token::Else | Token::Elif if depth == 0 => {
1614 break; }
1616 Token::Newline => {
1617 let mut j = i + 1;
1619 while j < tokens.len() && tokens[j] == Token::Newline {
1620 j += 1;
1621 }
1622 if j < tokens.len()
1623 && depth == 0
1624 && (tokens[j] == Token::Else
1625 || tokens[j] == Token::Elif
1626 || tokens[j] == Token::Fi)
1627 {
1628 i = j; break;
1630 }
1631 then_tokens.push(tokens[i].clone());
1633 }
1634 _ => {
1635 then_tokens.push(tokens[i].clone());
1636 }
1637 }
1638 i += 1;
1639 }
1640
1641 skip_newlines(tokens, &mut i);
1643
1644 let then_ast = if then_tokens.is_empty() {
1645 create_empty_body_ast()
1647 } else {
1648 parse_commands_sequentially(&then_tokens)?
1649 };
1650
1651 let condition = parse_slice(&cond_tokens)?;
1652 branches.push((Box::new(condition), Box::new(then_ast)));
1653
1654 if i < tokens.len() && tokens[i] == Token::Elif {
1656 i += 1; } else {
1658 break;
1659 }
1660 }
1661
1662 let else_ast = if i < tokens.len() && tokens[i] == Token::Else {
1663 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
1667 i += 1;
1668 }
1669
1670 let mut else_tokens = Vec::new();
1671 let mut depth = 0;
1672 while i < tokens.len() {
1673 match &tokens[i] {
1674 Token::If => {
1675 depth += 1;
1676 else_tokens.push(tokens[i].clone());
1677 }
1678 Token::Fi => {
1679 if depth > 0 {
1680 depth -= 1;
1681 else_tokens.push(tokens[i].clone());
1682 } else {
1683 break; }
1685 }
1686 Token::Newline => {
1687 let mut j = i + 1;
1689 while j < tokens.len() && tokens[j] == Token::Newline {
1690 j += 1;
1691 }
1692 if j < tokens.len() && depth == 0 && tokens[j] == Token::Fi {
1693 i = j; break;
1695 }
1696 else_tokens.push(tokens[i].clone());
1698 }
1699 _ => {
1700 else_tokens.push(tokens[i].clone());
1701 }
1702 }
1703 i += 1;
1704 }
1705
1706 let else_ast = if else_tokens.is_empty() {
1707 create_empty_body_ast()
1709 } else {
1710 parse_commands_sequentially(&else_tokens)?
1711 };
1712
1713 Some(Box::new(else_ast))
1714 } else {
1715 None
1716 };
1717
1718 if i >= tokens.len() || tokens[i] != Token::Fi {
1719 return Err("Expected fi".to_string());
1720 }
1721
1722 Ok(Ast::If {
1723 branches,
1724 else_branch: else_ast,
1725 })
1726}
1727
1728fn parse_case(tokens: &[Token]) -> Result<Ast, String> {
1729 let mut i = 1; if i >= tokens.len() || !matches!(tokens[i], Token::Word(_)) {
1733 return Err("Expected word after case".to_string());
1734 }
1735 let word = if let Token::Word(ref w) = tokens[i] {
1736 w.clone()
1737 } else {
1738 unreachable!()
1739 };
1740 i += 1;
1741
1742 if i >= tokens.len() || tokens[i] != Token::In {
1743 return Err("Expected in after case word".to_string());
1744 }
1745 i += 1;
1746
1747 let mut cases = Vec::new();
1748 let mut default = None;
1749
1750 loop {
1751 while i < tokens.len() && tokens[i] == Token::Newline {
1753 i += 1;
1754 }
1755
1756 if i >= tokens.len() {
1757 return Err("Unexpected end in case statement".to_string());
1758 }
1759
1760 if tokens[i] == Token::Esac {
1761 break;
1762 }
1763
1764 let mut patterns = Vec::new();
1766 while i < tokens.len() && tokens[i] != Token::RightParen {
1767 if let Token::Word(ref p) = tokens[i] {
1768 for pat in p.split('|') {
1770 patterns.push(pat.to_string());
1771 }
1772 } else if tokens[i] == Token::Pipe {
1773 } else if tokens[i] == Token::Newline {
1775 } else {
1777 return Err(format!("Expected pattern, found {:?}", tokens[i]));
1778 }
1779 i += 1;
1780 }
1781
1782 if i >= tokens.len() || tokens[i] != Token::RightParen {
1783 return Err("Expected ) after patterns".to_string());
1784 }
1785 i += 1;
1786
1787 let mut commands_tokens = Vec::new();
1789 while i < tokens.len() && tokens[i] != Token::DoubleSemicolon && tokens[i] != Token::Esac {
1790 commands_tokens.push(tokens[i].clone());
1791 i += 1;
1792 }
1793
1794 let commands_ast = parse_slice(&commands_tokens)?;
1795
1796 if i >= tokens.len() {
1797 return Err("Unexpected end in case statement".to_string());
1798 }
1799
1800 if tokens[i] == Token::DoubleSemicolon {
1801 i += 1;
1802 if patterns.len() == 1 && patterns[0] == "*" {
1804 default = Some(Box::new(commands_ast));
1805 } else {
1806 cases.push((patterns, commands_ast));
1807 }
1808 } else if tokens[i] == Token::Esac {
1809 if patterns.len() == 1 && patterns[0] == "*" {
1811 default = Some(Box::new(commands_ast));
1812 } else {
1813 cases.push((patterns, commands_ast));
1814 }
1815 break;
1816 } else {
1817 return Err("Expected ;; or esac after commands".to_string());
1818 }
1819 }
1820
1821 Ok(Ast::Case {
1822 word,
1823 cases,
1824 default,
1825 })
1826}
1827
1828fn parse_for(tokens: &[Token]) -> Result<Ast, String> {
1829 let mut i = 1; if i >= tokens.len() || !matches!(tokens[i], Token::Word(_)) {
1833 return Err("Expected variable name after for".to_string());
1834 }
1835 let variable = if let Token::Word(ref v) = tokens[i] {
1836 v.clone()
1837 } else {
1838 unreachable!()
1839 };
1840 i += 1;
1841
1842 if i >= tokens.len() || tokens[i] != Token::In {
1844 return Err("Expected 'in' after for variable".to_string());
1845 }
1846 i += 1;
1847
1848 let mut items = Vec::new();
1850 while i < tokens.len() {
1851 match &tokens[i] {
1852 Token::Do => break,
1853 Token::Semicolon | Token::Newline => {
1854 i += 1;
1855 if i < tokens.len() && tokens[i] == Token::Do {
1857 break;
1858 }
1859 }
1860 Token::Word(word) => {
1861 items.push(word.clone());
1862 i += 1;
1863 }
1864 _ => {
1865 return Err(format!("Unexpected token in for items: {:?}", tokens[i]));
1866 }
1867 }
1868 }
1869
1870 while i < tokens.len() && tokens[i] == Token::Newline {
1872 i += 1;
1873 }
1874
1875 if i >= tokens.len() || tokens[i] != Token::Do {
1877 return Err("Expected 'do' in for loop".to_string());
1878 }
1879 i += 1;
1880
1881 while i < tokens.len() && tokens[i] == Token::Newline {
1883 i += 1;
1884 }
1885
1886 let mut body_tokens = Vec::new();
1888 let mut depth = 0;
1889 while i < tokens.len() {
1890 match &tokens[i] {
1891 Token::For => {
1892 depth += 1;
1893 body_tokens.push(tokens[i].clone());
1894 }
1895 Token::Done => {
1896 if depth > 0 {
1897 depth -= 1;
1898 body_tokens.push(tokens[i].clone());
1899 } else {
1900 break; }
1902 }
1903 Token::Newline => {
1904 let mut j = i + 1;
1906 while j < tokens.len() && tokens[j] == Token::Newline {
1907 j += 1;
1908 }
1909 if j < tokens.len() && depth == 0 && tokens[j] == Token::Done {
1910 i = j; break;
1912 }
1913 body_tokens.push(tokens[i].clone());
1915 }
1916 _ => {
1917 body_tokens.push(tokens[i].clone());
1918 }
1919 }
1920 i += 1;
1921 }
1922
1923 if i >= tokens.len() || tokens[i] != Token::Done {
1924 return Err("Expected 'done' to close for loop".to_string());
1925 }
1926
1927 let body_ast = if body_tokens.is_empty() {
1929 create_empty_body_ast()
1931 } else {
1932 parse_commands_sequentially(&body_tokens)?
1933 };
1934
1935 Ok(Ast::For {
1936 variable,
1937 items,
1938 body: Box::new(body_ast),
1939 })
1940}
1941
1942fn parse_while(tokens: &[Token]) -> Result<Ast, String> {
1943 let mut i = 1; let mut cond_tokens = Vec::new();
1947 while i < tokens.len() {
1948 match &tokens[i] {
1949 Token::Do => break,
1950 Token::Semicolon | Token::Newline => {
1951 i += 1;
1952 if i < tokens.len() && tokens[i] == Token::Do {
1954 break;
1955 }
1956 }
1957 _ => {
1958 cond_tokens.push(tokens[i].clone());
1959 i += 1;
1960 }
1961 }
1962 }
1963
1964 if cond_tokens.is_empty() {
1965 return Err("Expected condition after while".to_string());
1966 }
1967
1968 while i < tokens.len() && tokens[i] == Token::Newline {
1970 i += 1;
1971 }
1972
1973 if i >= tokens.len() || tokens[i] != Token::Do {
1975 return Err("Expected 'do' in while loop".to_string());
1976 }
1977 i += 1;
1978
1979 while i < tokens.len() && tokens[i] == Token::Newline {
1981 i += 1;
1982 }
1983
1984 let mut body_tokens = Vec::new();
1986 let mut depth = 0;
1987 while i < tokens.len() {
1988 match &tokens[i] {
1989 Token::While | Token::For | Token::Until => {
1990 depth += 1;
1991 body_tokens.push(tokens[i].clone());
1992 }
1993 Token::Done => {
1994 if depth > 0 {
1995 depth -= 1;
1996 body_tokens.push(tokens[i].clone());
1997 } else {
1998 break; }
2000 }
2001 Token::Newline => {
2002 let mut j = i + 1;
2004 while j < tokens.len() && tokens[j] == Token::Newline {
2005 j += 1;
2006 }
2007 if j < tokens.len() && depth == 0 && tokens[j] == Token::Done {
2008 i = j; break;
2010 }
2011 body_tokens.push(tokens[i].clone());
2013 }
2014 _ => {
2015 body_tokens.push(tokens[i].clone());
2016 }
2017 }
2018 i += 1;
2019 }
2020
2021 if i >= tokens.len() || tokens[i] != Token::Done {
2022 return Err("Expected 'done' to close while loop".to_string());
2023 }
2024
2025 let condition_ast = parse_slice(&cond_tokens)?;
2027
2028 let body_ast = if body_tokens.is_empty() {
2030 create_empty_body_ast()
2032 } else {
2033 parse_commands_sequentially(&body_tokens)?
2034 };
2035
2036 Ok(Ast::While {
2037 condition: Box::new(condition_ast),
2038 body: Box::new(body_ast),
2039 })
2040}
2041
2042fn parse_until(tokens: &[Token]) -> Result<Ast, String> {
2043 let mut i = 1; let mut cond_tokens = Vec::new();
2047 while i < tokens.len() {
2048 match &tokens[i] {
2049 Token::Do => break,
2050 Token::Semicolon | Token::Newline => {
2051 i += 1;
2052 if i < tokens.len() && tokens[i] == Token::Do {
2054 break;
2055 }
2056 }
2057 _ => {
2058 cond_tokens.push(tokens[i].clone());
2059 i += 1;
2060 }
2061 }
2062 }
2063
2064 if cond_tokens.is_empty() {
2065 return Err("Expected condition after until".to_string());
2066 }
2067
2068 while i < tokens.len() && tokens[i] == Token::Newline {
2070 i += 1;
2071 }
2072
2073 if i >= tokens.len() || tokens[i] != Token::Do {
2075 return Err("Expected 'do' in until loop".to_string());
2076 }
2077 i += 1;
2078
2079 while i < tokens.len() && tokens[i] == Token::Newline {
2081 i += 1;
2082 }
2083
2084 let mut body_tokens = Vec::new();
2086 let mut depth = 0;
2087 while i < tokens.len() {
2088 match &tokens[i] {
2089 Token::While | Token::For | Token::Until => {
2090 depth += 1;
2091 body_tokens.push(tokens[i].clone());
2092 }
2093 Token::Done => {
2094 if depth > 0 {
2095 depth -= 1;
2096 body_tokens.push(tokens[i].clone());
2097 } else {
2098 break; }
2100 }
2101 Token::Newline => {
2102 let mut j = i + 1;
2104 while j < tokens.len() && tokens[j] == Token::Newline {
2105 j += 1;
2106 }
2107 if j < tokens.len() && depth == 0 && tokens[j] == Token::Done {
2108 i = j; break;
2110 }
2111 body_tokens.push(tokens[i].clone());
2113 }
2114 _ => {
2115 body_tokens.push(tokens[i].clone());
2116 }
2117 }
2118 i += 1;
2119 }
2120
2121 if i >= tokens.len() || tokens[i] != Token::Done {
2122 return Err("Expected 'done' to close until loop".to_string());
2123 }
2124
2125 let condition_ast = parse_slice(&cond_tokens)?;
2127
2128 let body_ast = if body_tokens.is_empty() {
2130 create_empty_body_ast()
2132 } else {
2133 parse_commands_sequentially(&body_tokens)?
2134 };
2135
2136 Ok(Ast::Until {
2137 condition: Box::new(condition_ast),
2138 body: Box::new(body_ast),
2139 })
2140}
2141
2142fn parse_function_definition(tokens: &[Token]) -> Result<Ast, String> {
2143 if tokens.len() < 2 {
2144 return Err("Function definition too short".to_string());
2145 }
2146
2147 let func_name = if let Token::Word(word) = &tokens[0] {
2149 if let Some(paren_pos) = word.find('(') {
2151 if word.ends_with(')') && paren_pos > 0 {
2152 word[..paren_pos].to_string()
2153 } else {
2154 word.clone()
2155 }
2156 } else {
2157 word.clone()
2158 }
2159 } else {
2160 return Err("Function name must be a word".to_string());
2161 };
2162
2163 let brace_pos =
2165 if tokens.len() >= 4 && tokens[1] == Token::LeftParen && tokens[2] == Token::RightParen {
2166 if tokens[3] != Token::LeftBrace {
2168 return Err("Expected { after function name".to_string());
2169 }
2170 3
2171 } else if tokens.len() >= 2 && tokens[1] == Token::LeftBrace {
2172 1
2174 } else {
2175 return Err("Expected ( after function name or { for legacy format".to_string());
2176 };
2177
2178 let mut brace_depth = 0;
2180 let mut body_end = 0;
2181 let mut found_closing = false;
2182 let mut i = brace_pos + 1;
2183
2184 while i < tokens.len() {
2185 if i + 3 < tokens.len()
2188 && matches!(&tokens[i], Token::Word(_))
2189 && tokens[i + 1] == Token::LeftParen
2190 && tokens[i + 2] == Token::RightParen
2191 && tokens[i + 3] == Token::LeftBrace
2192 {
2193 i += 4;
2196 let mut nested_depth = 1;
2197 while i < tokens.len() && nested_depth > 0 {
2198 match tokens[i] {
2199 Token::LeftBrace => nested_depth += 1,
2200 Token::RightBrace => nested_depth -= 1,
2201 _ => {}
2202 }
2203 i += 1;
2204 }
2205 continue;
2207 }
2208
2209 match &tokens[i] {
2210 Token::LeftBrace => {
2211 brace_depth += 1;
2212 i += 1;
2213 }
2214 Token::RightBrace => {
2215 if brace_depth == 0 {
2216 body_end = i;
2218 found_closing = true;
2219 break;
2220 } else {
2221 brace_depth -= 1;
2222 i += 1;
2223 }
2224 }
2225 Token::If => {
2226 skip_to_matching_fi(tokens, &mut i);
2228 }
2229 Token::For | Token::While | Token::Until => {
2230 skip_to_matching_done(tokens, &mut i);
2232 }
2233 Token::Case => {
2234 skip_to_matching_esac(tokens, &mut i);
2236 }
2237 _ => {
2238 i += 1;
2239 }
2240 }
2241 }
2242
2243 if !found_closing {
2244 return Err("Missing closing } for function definition".to_string());
2245 }
2246
2247 let body_tokens = &tokens[brace_pos + 1..body_end];
2249
2250 let body_ast = if body_tokens.is_empty() {
2252 create_empty_body_ast()
2254 } else {
2255 parse_commands_sequentially(body_tokens)?
2256 };
2257
2258 Ok(Ast::FunctionDefinition {
2259 name: func_name,
2260 body: Box::new(body_ast),
2261 })
2262}
2263
2264#[cfg(test)]
2265mod tests {
2266 use super::super::lexer::Token;
2267 use super::*;
2268
2269 #[test]
2270 fn test_single_command() {
2271 let tokens = vec![Token::Word("ls".to_string())];
2272 let result = parse(tokens).unwrap();
2273 assert_eq!(
2274 result,
2275 Ast::Pipeline(vec![ShellCommand {
2276 args: vec!["ls".to_string()],
2277 redirections: Vec::new(),
2278 compound: None,
2279 }])
2280 );
2281 }
2282
2283 #[test]
2284 fn test_command_with_args() {
2285 let tokens = vec![
2286 Token::Word("ls".to_string()),
2287 Token::Word("-la".to_string()),
2288 ];
2289 let result = parse(tokens).unwrap();
2290 assert_eq!(
2291 result,
2292 Ast::Pipeline(vec![ShellCommand {
2293 args: vec!["ls".to_string(), "-la".to_string()],
2294 redirections: Vec::new(),
2295 compound: None,
2296 }])
2297 );
2298 }
2299
2300 #[test]
2301 fn test_pipeline() {
2302 let tokens = vec![
2303 Token::Word("ls".to_string()),
2304 Token::Pipe,
2305 Token::Word("grep".to_string()),
2306 Token::Word("txt".to_string()),
2307 ];
2308 let result = parse(tokens).unwrap();
2309 assert_eq!(
2310 result,
2311 Ast::Pipeline(vec![
2312 ShellCommand {
2313 args: vec!["ls".to_string()],
2314 redirections: Vec::new(),
2315 compound: None,
2316 },
2317 ShellCommand {
2318 args: vec!["grep".to_string(), "txt".to_string()],
2319 redirections: Vec::new(),
2320 compound: None,
2321 }
2322 ])
2323 );
2324 }
2325
2326 #[test]
2327 fn test_input_redirection() {
2328 let tokens = vec![
2329 Token::Word("cat".to_string()),
2330 Token::RedirIn,
2331 Token::Word("input.txt".to_string()),
2332 ];
2333 let result = parse(tokens).unwrap();
2334 assert_eq!(
2335 result,
2336 Ast::Pipeline(vec![ShellCommand {
2337 args: vec!["cat".to_string()],
2338 redirections: vec![Redirection::Input("input.txt".to_string())],
2339 compound: None,
2340 }])
2341 );
2342 }
2343
2344 #[test]
2345 fn test_output_redirection() {
2346 let tokens = vec![
2347 Token::Word("printf".to_string()),
2348 Token::Word("hello".to_string()),
2349 Token::RedirOut,
2350 Token::Word("output.txt".to_string()),
2351 ];
2352 let result = parse(tokens).unwrap();
2353 assert_eq!(
2354 result,
2355 Ast::Pipeline(vec![ShellCommand {
2356 args: vec!["printf".to_string(), "hello".to_string()],
2357 compound: None,
2358 redirections: vec![Redirection::Output("output.txt".to_string())],
2359 }])
2360 );
2361 }
2362
2363 #[test]
2364 fn test_append_redirection() {
2365 let tokens = vec![
2366 Token::Word("printf".to_string()),
2367 Token::Word("hello".to_string()),
2368 Token::RedirAppend,
2369 Token::Word("output.txt".to_string()),
2370 ];
2371 let result = parse(tokens).unwrap();
2372 assert_eq!(
2373 result,
2374 Ast::Pipeline(vec![ShellCommand {
2375 args: vec!["printf".to_string(), "hello".to_string()],
2376 compound: None,
2377 redirections: vec![Redirection::Append("output.txt".to_string())],
2378 }])
2379 );
2380 }
2381
2382 #[test]
2383 fn test_complex_pipeline_with_redirections() {
2384 let tokens = vec![
2385 Token::Word("cat".to_string()),
2386 Token::RedirIn,
2387 Token::Word("input.txt".to_string()),
2388 Token::Pipe,
2389 Token::Word("grep".to_string()),
2390 Token::Word("pattern".to_string()),
2391 Token::Pipe,
2392 Token::Word("sort".to_string()),
2393 Token::RedirOut,
2394 Token::Word("output.txt".to_string()),
2395 ];
2396 let result = parse(tokens).unwrap();
2397 assert_eq!(
2398 result,
2399 Ast::Pipeline(vec![
2400 ShellCommand {
2401 args: vec!["cat".to_string()],
2402 compound: None,
2403 redirections: vec![Redirection::Input("input.txt".to_string())],
2404 },
2405 ShellCommand {
2406 args: vec!["grep".to_string(), "pattern".to_string()],
2407 compound: None,
2408 redirections: Vec::new(),
2409 },
2410 ShellCommand {
2411 args: vec!["sort".to_string()],
2412 redirections: vec![Redirection::Output("output.txt".to_string())],
2413 compound: None,
2414 }
2415 ])
2416 );
2417 }
2418
2419 #[test]
2420 fn test_empty_tokens() {
2421 let tokens = vec![];
2422 let result = parse(tokens);
2423 assert!(result.is_err());
2424 assert_eq!(result.unwrap_err(), "No commands found");
2425 }
2426
2427 #[test]
2428 fn test_only_pipe() {
2429 let tokens = vec![Token::Pipe];
2430 let result = parse(tokens);
2431 assert!(result.is_err());
2432 assert_eq!(result.unwrap_err(), "No commands found");
2433 }
2434
2435 #[test]
2436 fn test_redirection_without_file() {
2437 let tokens = vec![Token::Word("cat".to_string()), Token::RedirIn];
2439 let result = parse(tokens).unwrap();
2440 assert_eq!(
2441 result,
2442 Ast::Pipeline(vec![ShellCommand {
2443 args: vec!["cat".to_string()],
2444 compound: None,
2445 redirections: Vec::new(),
2446 }])
2447 );
2448 }
2449
2450 #[test]
2451 fn test_multiple_redirections() {
2452 let tokens = vec![
2453 Token::Word("cat".to_string()),
2454 Token::RedirIn,
2455 Token::Word("file1.txt".to_string()),
2456 Token::RedirOut,
2457 Token::Word("file2.txt".to_string()),
2458 ];
2459 let result = parse(tokens).unwrap();
2460 assert_eq!(
2461 result,
2462 Ast::Pipeline(vec![ShellCommand {
2463 args: vec!["cat".to_string()],
2464 redirections: vec![
2465 Redirection::Input("file1.txt".to_string()),
2466 Redirection::Output("file2.txt".to_string()),
2467 ],
2468 compound: None,
2469 }])
2470 );
2471 }
2472
2473 #[test]
2474 fn test_parse_if() {
2475 let tokens = vec![
2476 Token::If,
2477 Token::Word("true".to_string()),
2478 Token::Semicolon,
2479 Token::Then,
2480 Token::Word("printf".to_string()),
2481 Token::Word("yes".to_string()),
2482 Token::Semicolon,
2483 Token::Fi,
2484 ];
2485 let result = parse(tokens).unwrap();
2486 if let Ast::If {
2487 branches,
2488 else_branch,
2489 } = result
2490 {
2491 assert_eq!(branches.len(), 1);
2492 let (condition, then_branch) = &branches[0];
2493 if let Ast::Pipeline(cmds) = &**condition {
2494 assert_eq!(cmds[0].args, vec!["true"]);
2495 } else {
2496 panic!("condition not pipeline");
2497 }
2498 if let Ast::Pipeline(cmds) = &**then_branch {
2499 assert_eq!(cmds[0].args, vec!["printf", "yes"]);
2500 } else {
2501 panic!("then_branch not pipeline");
2502 }
2503 assert!(else_branch.is_none());
2504 } else {
2505 panic!("not if");
2506 }
2507 }
2508
2509 #[test]
2510 fn test_parse_if_elif() {
2511 let tokens = vec![
2512 Token::If,
2513 Token::Word("false".to_string()),
2514 Token::Semicolon,
2515 Token::Then,
2516 Token::Word("printf".to_string()),
2517 Token::Word("no".to_string()),
2518 Token::Semicolon,
2519 Token::Elif,
2520 Token::Word("true".to_string()),
2521 Token::Semicolon,
2522 Token::Then,
2523 Token::Word("printf".to_string()),
2524 Token::Word("yes".to_string()),
2525 Token::Semicolon,
2526 Token::Fi,
2527 ];
2528 let result = parse(tokens).unwrap();
2529 if let Ast::If {
2530 branches,
2531 else_branch,
2532 } = result
2533 {
2534 assert_eq!(branches.len(), 2);
2535 let (condition1, then1) = &branches[0];
2537 if let Ast::Pipeline(cmds) = &**condition1 {
2538 assert_eq!(cmds[0].args, vec!["false"]);
2539 }
2540 if let Ast::Pipeline(cmds) = &**then1 {
2541 assert_eq!(cmds[0].args, vec!["printf", "no"]);
2542 }
2543 let (condition2, then2) = &branches[1];
2545 if let Ast::Pipeline(cmds) = &**condition2 {
2546 assert_eq!(cmds[0].args, vec!["true"]);
2547 }
2548 if let Ast::Pipeline(cmds) = &**then2 {
2549 assert_eq!(cmds[0].args, vec!["printf", "yes"]);
2550 }
2551 assert!(else_branch.is_none());
2552 } else {
2553 panic!("not if");
2554 }
2555 }
2556
2557 #[test]
2558 fn test_parse_assignment() {
2559 let tokens = vec![Token::Word("MY_VAR=test_value".to_string())];
2560 let result = parse(tokens).unwrap();
2561 if let Ast::Assignment { var, value } = result {
2562 assert_eq!(var, "MY_VAR");
2563 assert_eq!(value, "test_value");
2564 } else {
2565 panic!("not assignment");
2566 }
2567 }
2568
2569 #[test]
2570 fn test_parse_assignment_quoted() {
2571 let tokens = vec![Token::Word("MY_VAR=hello world".to_string())];
2572 let result = parse(tokens).unwrap();
2573 if let Ast::Assignment { var, value } = result {
2574 assert_eq!(var, "MY_VAR");
2575 assert_eq!(value, "hello world");
2576 } else {
2577 panic!("not assignment");
2578 }
2579 }
2580
2581 #[test]
2582 fn test_parse_assignment_invalid() {
2583 let tokens = vec![Token::Word("123VAR=value".to_string())];
2585 let result = parse(tokens).unwrap();
2586 if let Ast::Pipeline(cmds) = result {
2587 assert_eq!(cmds[0].args, vec!["123VAR=value"]);
2588 } else {
2589 panic!("should be parsed as pipeline");
2590 }
2591 }
2592
2593 #[test]
2594 fn test_parse_function_definition() {
2595 let tokens = vec![
2596 Token::Word("myfunc".to_string()),
2597 Token::LeftParen,
2598 Token::RightParen,
2599 Token::LeftBrace,
2600 Token::Word("echo".to_string()),
2601 Token::Word("hello".to_string()),
2602 Token::RightBrace,
2603 ];
2604 let result = parse(tokens).unwrap();
2605 if let Ast::FunctionDefinition { name, body } = result {
2606 assert_eq!(name, "myfunc");
2607 if let Ast::Pipeline(cmds) = *body {
2609 assert_eq!(cmds[0].args, vec!["echo", "hello"]);
2610 } else {
2611 panic!("function body should be a pipeline");
2612 }
2613 } else {
2614 panic!("should be parsed as function definition");
2615 }
2616 }
2617
2618 #[test]
2619 fn test_parse_function_definition_empty() {
2620 let tokens = vec![
2621 Token::Word("emptyfunc".to_string()),
2622 Token::LeftParen,
2623 Token::RightParen,
2624 Token::LeftBrace,
2625 Token::RightBrace,
2626 ];
2627 let result = parse(tokens).unwrap();
2628 if let Ast::FunctionDefinition { name, body } = result {
2629 assert_eq!(name, "emptyfunc");
2630 if let Ast::Pipeline(cmds) = *body {
2632 assert_eq!(cmds[0].args, vec!["true"]);
2633 } else {
2634 panic!("function body should be a pipeline");
2635 }
2636 } else {
2637 panic!("should be parsed as function definition");
2638 }
2639 }
2640
2641 #[test]
2642 fn test_parse_function_definition_legacy_format() {
2643 let tokens = vec![
2645 Token::Word("legacyfunc()".to_string()),
2646 Token::LeftBrace,
2647 Token::Word("echo".to_string()),
2648 Token::Word("hello".to_string()),
2649 Token::RightBrace,
2650 ];
2651 let result = parse(tokens).unwrap();
2652 if let Ast::FunctionDefinition { name, body } = result {
2653 assert_eq!(name, "legacyfunc");
2654 if let Ast::Pipeline(cmds) = *body {
2656 assert_eq!(cmds[0].args, vec!["echo", "hello"]);
2657 } else {
2658 panic!("function body should be a pipeline");
2659 }
2660 } else {
2661 panic!("should be parsed as function definition");
2662 }
2663 }
2664
2665 #[test]
2666 fn test_parse_local_assignment() {
2667 let tokens = vec![Token::Local, Token::Word("MY_VAR=test_value".to_string())];
2668 let result = parse(tokens).unwrap();
2669 if let Ast::LocalAssignment { var, value } = result {
2670 assert_eq!(var, "MY_VAR");
2671 assert_eq!(value, "test_value");
2672 } else {
2673 panic!("should be parsed as local assignment");
2674 }
2675 }
2676
2677 #[test]
2678 fn test_parse_local_assignment_separate_tokens() {
2679 let tokens = vec![
2680 Token::Local,
2681 Token::Word("MY_VAR".to_string()),
2682 Token::Word("test_value".to_string()),
2683 ];
2684 let result = parse(tokens).unwrap();
2685 if let Ast::LocalAssignment { var, value } = result {
2686 assert_eq!(var, "MY_VAR");
2687 assert_eq!(value, "test_value");
2688 } else {
2689 panic!("should be parsed as local assignment");
2690 }
2691 }
2692
2693 #[test]
2694 fn test_parse_local_assignment_invalid_var_name() {
2695 let tokens = vec![Token::Local, Token::Word("123VAR=value".to_string())];
2697 let result = parse(tokens);
2698 assert!(result.is_err());
2700 }
2701
2702 #[test]
2703 fn test_parse_here_document_redirection() {
2704 let tokens = vec![
2705 Token::Word("cat".to_string()),
2706 Token::RedirHereDoc("EOF".to_string(), false),
2707 ];
2708 let result = parse(tokens).unwrap();
2709 assert_eq!(
2710 result,
2711 Ast::Pipeline(vec![ShellCommand {
2712 args: vec!["cat".to_string()],
2713 redirections: vec![Redirection::HereDoc("EOF".to_string(), "false".to_string())],
2714 compound: None,
2715 }])
2716 );
2717 }
2718
2719 #[test]
2720 fn test_parse_here_string_redirection() {
2721 let tokens = vec![
2722 Token::Word("grep".to_string()),
2723 Token::RedirHereString("pattern".to_string()),
2724 ];
2725 let result = parse(tokens).unwrap();
2726 assert_eq!(
2727 result,
2728 Ast::Pipeline(vec![ShellCommand {
2729 args: vec!["grep".to_string()],
2730 compound: None,
2731 redirections: vec![Redirection::HereString("pattern".to_string())],
2732 }])
2733 );
2734 }
2735
2736 #[test]
2737 fn test_parse_mixed_redirections() {
2738 let tokens = vec![
2739 Token::Word("cat".to_string()),
2740 Token::RedirIn,
2741 Token::Word("file.txt".to_string()),
2742 Token::RedirHereString("fallback".to_string()),
2743 Token::RedirOut,
2744 Token::Word("output.txt".to_string()),
2745 ];
2746 let result = parse(tokens).unwrap();
2747 assert_eq!(
2748 result,
2749 Ast::Pipeline(vec![ShellCommand {
2750 args: vec!["cat".to_string()],
2751 compound: None,
2752 redirections: vec![
2753 Redirection::Input("file.txt".to_string()),
2754 Redirection::HereString("fallback".to_string()),
2755 Redirection::Output("output.txt".to_string()),
2756 ],
2757 }])
2758 );
2759 }
2760
2761 #[test]
2764 fn test_parse_fd_input_redirection() {
2765 let tokens = vec![
2766 Token::Word("command".to_string()),
2767 Token::RedirectFdIn(3, "input.txt".to_string()),
2768 ];
2769 let result = parse(tokens).unwrap();
2770 assert_eq!(
2771 result,
2772 Ast::Pipeline(vec![ShellCommand {
2773 args: vec!["command".to_string()],
2774 redirections: vec![Redirection::FdInput(3, "input.txt".to_string())],
2775 compound: None,
2776 }])
2777 );
2778 }
2779
2780 #[test]
2781 fn test_parse_fd_output_redirection() {
2782 let tokens = vec![
2783 Token::Word("command".to_string()),
2784 Token::RedirectFdOut(2, "errors.log".to_string()),
2785 ];
2786 let result = parse(tokens).unwrap();
2787 assert_eq!(
2788 result,
2789 Ast::Pipeline(vec![ShellCommand {
2790 args: vec!["command".to_string()],
2791 compound: None,
2792 redirections: vec![Redirection::FdOutput(2, "errors.log".to_string())],
2793 }])
2794 );
2795 }
2796
2797 #[test]
2798 fn test_parse_fd_append_redirection() {
2799 let tokens = vec![
2800 Token::Word("command".to_string()),
2801 Token::RedirectFdAppend(2, "errors.log".to_string()),
2802 ];
2803 let result = parse(tokens).unwrap();
2804 assert_eq!(
2805 result,
2806 Ast::Pipeline(vec![ShellCommand {
2807 args: vec!["command".to_string()],
2808 compound: None,
2809 redirections: vec![Redirection::FdAppend(2, "errors.log".to_string())],
2810 }])
2811 );
2812 }
2813
2814 #[test]
2815 fn test_parse_fd_duplicate() {
2816 let tokens = vec![
2817 Token::Word("command".to_string()),
2818 Token::RedirectFdDup(2, 1),
2819 ];
2820 let result = parse(tokens).unwrap();
2821 assert_eq!(
2822 result,
2823 Ast::Pipeline(vec![ShellCommand {
2824 args: vec!["command".to_string()],
2825 compound: None,
2826 redirections: vec![Redirection::FdDuplicate(2, 1)],
2827 }])
2828 );
2829 }
2830
2831 #[test]
2832 fn test_parse_fd_close() {
2833 let tokens = vec![
2834 Token::Word("command".to_string()),
2835 Token::RedirectFdClose(2),
2836 ];
2837 let result = parse(tokens).unwrap();
2838 assert_eq!(
2839 result,
2840 Ast::Pipeline(vec![ShellCommand {
2841 args: vec!["command".to_string()],
2842 compound: None,
2843 redirections: vec![Redirection::FdClose(2)],
2844 }])
2845 );
2846 }
2847
2848 #[test]
2849 fn test_parse_fd_input_output() {
2850 let tokens = vec![
2851 Token::Word("command".to_string()),
2852 Token::RedirectFdInOut(3, "file.txt".to_string()),
2853 ];
2854 let result = parse(tokens).unwrap();
2855 assert_eq!(
2856 result,
2857 Ast::Pipeline(vec![ShellCommand {
2858 args: vec!["command".to_string()],
2859 compound: None,
2860 redirections: vec![Redirection::FdInputOutput(3, "file.txt".to_string())],
2861 }])
2862 );
2863 }
2864
2865 #[test]
2866 fn test_parse_multiple_fd_redirections() {
2867 let tokens = vec![
2868 Token::Word("command".to_string()),
2869 Token::RedirectFdOut(2, "err.log".to_string()),
2870 Token::RedirectFdIn(3, "input.txt".to_string()),
2871 Token::RedirectFdAppend(4, "append.log".to_string()),
2872 ];
2873 let result = parse(tokens).unwrap();
2874 assert_eq!(
2875 result,
2876 Ast::Pipeline(vec![ShellCommand {
2877 args: vec!["command".to_string()],
2878 compound: None,
2879 redirections: vec![
2880 Redirection::FdOutput(2, "err.log".to_string()),
2881 Redirection::FdInput(3, "input.txt".to_string()),
2882 Redirection::FdAppend(4, "append.log".to_string()),
2883 ],
2884 }])
2885 );
2886 }
2887
2888 #[test]
2889 fn test_parse_fd_swap_pattern() {
2890 let tokens = vec![
2891 Token::Word("command".to_string()),
2892 Token::RedirectFdDup(3, 1),
2893 Token::RedirectFdDup(1, 2),
2894 Token::RedirectFdDup(2, 3),
2895 Token::RedirectFdClose(3),
2896 ];
2897 let result = parse(tokens).unwrap();
2898 assert_eq!(
2899 result,
2900 Ast::Pipeline(vec![ShellCommand {
2901 args: vec!["command".to_string()],
2902 redirections: vec![
2903 Redirection::FdDuplicate(3, 1),
2904 Redirection::FdDuplicate(1, 2),
2905 Redirection::FdDuplicate(2, 3),
2906 Redirection::FdClose(3),
2907 ],
2908 compound: None,
2909 }])
2910 );
2911 }
2912
2913 #[test]
2914 fn test_parse_mixed_basic_and_fd_redirections() {
2915 let tokens = vec![
2916 Token::Word("command".to_string()),
2917 Token::RedirOut,
2918 Token::Word("output.txt".to_string()),
2919 Token::RedirectFdDup(2, 1),
2920 ];
2921 let result = parse(tokens).unwrap();
2922 assert_eq!(
2923 result,
2924 Ast::Pipeline(vec![ShellCommand {
2925 args: vec!["command".to_string()],
2926 redirections: vec![
2927 Redirection::Output("output.txt".to_string()),
2928 Redirection::FdDuplicate(2, 1),
2929 ],
2930 compound: None,
2931 }])
2932 );
2933 }
2934
2935 #[test]
2936 fn test_parse_fd_redirection_ordering() {
2937 let tokens = vec![
2939 Token::Word("command".to_string()),
2940 Token::RedirectFdOut(2, "first.log".to_string()),
2941 Token::RedirOut,
2942 Token::Word("second.txt".to_string()),
2943 Token::RedirectFdDup(2, 1),
2944 ];
2945 let result = parse(tokens).unwrap();
2946 assert_eq!(
2947 result,
2948 Ast::Pipeline(vec![ShellCommand {
2949 args: vec!["command".to_string()],
2950 redirections: vec![
2951 Redirection::FdOutput(2, "first.log".to_string()),
2952 Redirection::Output("second.txt".to_string()),
2953 Redirection::FdDuplicate(2, 1),
2954 ],
2955 compound: None,
2956 }])
2957 );
2958 }
2959
2960 #[test]
2961 fn test_parse_fd_redirection_with_pipe() {
2962 let tokens = vec![
2963 Token::Word("command".to_string()),
2964 Token::RedirectFdDup(2, 1),
2965 Token::Pipe,
2966 Token::Word("grep".to_string()),
2967 Token::Word("error".to_string()),
2968 ];
2969 let result = parse(tokens).unwrap();
2970 assert_eq!(
2971 result,
2972 Ast::Pipeline(vec![
2973 ShellCommand {
2974 args: vec!["command".to_string()],
2975 redirections: vec![Redirection::FdDuplicate(2, 1)],
2976 compound: None,
2977 },
2978 ShellCommand {
2979 args: vec!["grep".to_string(), "error".to_string()],
2980 compound: None,
2981 redirections: Vec::new(),
2982 }
2983 ])
2984 );
2985 }
2986
2987 #[test]
2988 fn test_parse_all_fd_numbers() {
2989 let tokens = vec![
2991 Token::Word("cmd".to_string()),
2992 Token::RedirectFdIn(0, "file".to_string()),
2993 ];
2994 let result = parse(tokens).unwrap();
2995 if let Ast::Pipeline(cmds) = result {
2996 assert_eq!(
2997 cmds[0].redirections[0],
2998 Redirection::FdInput(0, "file".to_string())
2999 );
3000 } else {
3001 panic!("Expected Pipeline");
3002 }
3003
3004 let tokens = vec![
3006 Token::Word("cmd".to_string()),
3007 Token::RedirectFdOut(9, "file".to_string()),
3008 ];
3009 let result = parse(tokens).unwrap();
3010 if let Ast::Pipeline(cmds) = result {
3011 assert_eq!(
3012 cmds[0].redirections[0],
3013 Redirection::FdOutput(9, "file".to_string())
3014 );
3015 } else {
3016 panic!("Expected Pipeline");
3017 }
3018 }
3019}