1use super::lexer::Token;
2
3#[derive(Debug, Clone, PartialEq, Eq)]
4pub enum Ast {
5 Pipeline(Vec<ShellCommand>),
6 Sequence(Vec<Ast>),
7 Assignment {
8 var: String,
9 value: String,
10 },
11 LocalAssignment {
12 var: String,
13 value: String,
14 },
15 If {
16 branches: Vec<(Box<Ast>, Box<Ast>)>, else_branch: Option<Box<Ast>>,
18 },
19 Case {
20 word: String,
21 cases: Vec<(Vec<String>, Ast)>,
22 default: Option<Box<Ast>>,
23 },
24 For {
25 variable: String,
26 items: Vec<String>,
27 body: Box<Ast>,
28 },
29 While {
30 condition: Box<Ast>,
31 body: Box<Ast>,
32 },
33 FunctionDefinition {
34 name: String,
35 body: Box<Ast>,
36 },
37 FunctionCall {
38 name: String,
39 args: Vec<String>,
40 },
41 Return {
42 value: Option<String>,
43 },
44 And {
45 left: Box<Ast>,
46 right: Box<Ast>,
47 },
48 Or {
49 left: Box<Ast>,
50 right: Box<Ast>,
51 },
52 Subshell {
55 body: Box<Ast>,
56 },
57 CommandGroup {
60 body: Box<Ast>,
61 },
62}
63
64#[derive(Debug, Clone, PartialEq, Eq)]
66pub enum Redirection {
67 Input(String),
69 Output(String),
71 Append(String),
73 FdInput(i32, String),
75 FdOutput(i32, String),
77 FdAppend(i32, String),
79 FdDuplicate(i32, i32),
81 FdClose(i32),
83 FdInputOutput(i32, String),
85 HereDoc(String, String),
87 HereString(String),
89}
90
91#[derive(Debug, Clone, PartialEq, Eq, Default)]
92pub struct ShellCommand {
93 pub args: Vec<String>,
94 pub redirections: Vec<Redirection>,
96 pub compound: Option<Box<Ast>>,
99}
100
101fn is_valid_variable_name(name: &str) -> bool {
104 if let Some(first_char) = name.chars().next() {
105 first_char.is_alphabetic() || first_char == '_'
106 } else {
107 false
108 }
109}
110
111fn create_empty_body_ast() -> Ast {
114 Ast::Pipeline(vec![ShellCommand {
115 args: vec!["true".to_string()],
116 redirections: Vec::new(),
117 compound: None,
118 }])
119}
120
121fn skip_newlines(tokens: &[Token], i: &mut usize) {
124 while *i < tokens.len() && tokens[*i] == Token::Newline {
125 *i += 1;
126 }
127}
128
129fn skip_to_matching_fi(tokens: &[Token], i: &mut usize) {
132 let mut if_depth = 1;
133 *i += 1; while *i < tokens.len() && if_depth > 0 {
135 match tokens[*i] {
136 Token::If => if_depth += 1,
137 Token::Fi => if_depth -= 1,
138 _ => {}
139 }
140 *i += 1;
141 }
142}
143
144fn skip_to_matching_done(tokens: &[Token], i: &mut usize) {
147 let mut loop_depth = 1;
148 *i += 1; while *i < tokens.len() && loop_depth > 0 {
150 match tokens[*i] {
151 Token::For | Token::While => loop_depth += 1,
152 Token::Done => loop_depth -= 1,
153 _ => {}
154 }
155 *i += 1;
156 }
157}
158
159fn skip_to_matching_esac(tokens: &[Token], i: &mut usize) {
161 *i += 1; while *i < tokens.len() {
163 if tokens[*i] == Token::Esac {
164 *i += 1;
165 break;
166 }
167 *i += 1;
168 }
169}
170
171pub fn parse(tokens: Vec<Token>) -> Result<Ast, String> {
172 if tokens.len() >= 4
174 && let (Token::Word(_), Token::LeftParen, Token::RightParen, Token::LeftBrace) =
175 (&tokens[0], &tokens[1], &tokens[2], &tokens[3])
176 {
177 let mut brace_depth = 1; let mut function_end = tokens.len();
181 let mut j = 4; while j < tokens.len() {
184 match &tokens[j] {
185 Token::LeftBrace => {
186 brace_depth += 1;
187 j += 1;
188 }
189 Token::RightBrace => {
190 brace_depth -= 1;
191 if brace_depth == 0 {
192 function_end = j + 1; break;
194 }
195 j += 1;
196 }
197 Token::If => {
198 let mut if_depth = 1;
200 j += 1;
201 while j < tokens.len() && if_depth > 0 {
202 match tokens[j] {
203 Token::If => if_depth += 1,
204 Token::Fi => if_depth -= 1,
205 _ => {}
206 }
207 j += 1;
208 }
209 }
210 Token::For | Token::While => {
211 let mut for_depth = 1;
213 j += 1;
214 while j < tokens.len() && for_depth > 0 {
215 match tokens[j] {
216 Token::For | Token::While => for_depth += 1,
217 Token::Done => for_depth -= 1,
218 _ => {}
219 }
220 j += 1;
221 }
222 }
223 Token::Case => {
224 j += 1;
226 while j < tokens.len() {
227 if tokens[j] == Token::Esac {
228 j += 1;
229 break;
230 }
231 j += 1;
232 }
233 }
234 _ => {
235 j += 1;
236 }
237 }
238 }
239
240 if brace_depth == 0 && function_end <= tokens.len() {
241 let function_tokens = &tokens[0..function_end];
243 let remaining_tokens = &tokens[function_end..];
244
245 let function_ast = parse_function_definition(function_tokens)?;
246
247 return if remaining_tokens.is_empty() {
248 Ok(function_ast)
249 } else {
250 let remaining_ast = parse_commands_sequentially(remaining_tokens)?;
252 Ok(Ast::Sequence(vec![function_ast, remaining_ast]))
253 };
254 }
255 }
256
257 if tokens.len() >= 2
259 && let Token::Word(ref word) = tokens[0]
260 && let Some(paren_pos) = word.find('(')
261 && word.ends_with(')')
262 && paren_pos > 0
263 && tokens[1] == Token::LeftBrace
264 {
265 return parse_function_definition(&tokens);
266 }
267
268 parse_commands_sequentially(&tokens)
270}
271
272fn parse_slice(tokens: &[Token]) -> Result<Ast, String> {
273 if tokens.is_empty() {
274 return Err("No commands found".to_string());
275 }
276
277 if tokens.len() == 2 {
279 if let (Token::Word(var_eq), Token::Word(value)) = (&tokens[0], &tokens[1])
281 && let Some(eq_pos) = var_eq.find('=')
282 && eq_pos > 0
283 && eq_pos < var_eq.len()
284 {
285 let var = var_eq[..eq_pos].to_string();
286 let full_value = format!("{}{}", &var_eq[eq_pos + 1..], value);
287 if is_valid_variable_name(&var) {
289 return Ok(Ast::Assignment {
290 var,
291 value: full_value,
292 });
293 }
294 }
295 }
296
297 if tokens.len() == 2
299 && let (Token::Word(var_eq), Token::Word(value)) = (&tokens[0], &tokens[1])
300 && let Some(eq_pos) = var_eq.find('=')
301 && eq_pos > 0
302 && eq_pos == var_eq.len() - 1
303 {
304 let var = var_eq[..eq_pos].to_string();
305 if is_valid_variable_name(&var) {
307 return Ok(Ast::Assignment {
308 var,
309 value: value.clone(),
310 });
311 }
312 }
313
314 if tokens.len() == 3
316 && let (Token::Local, Token::Word(var), Token::Word(value)) =
317 (&tokens[0], &tokens[1], &tokens[2])
318 {
319 let clean_var = if var.ends_with('=') {
321 &var[..var.len() - 1]
322 } else {
323 var
324 };
325 if is_valid_variable_name(clean_var) {
327 return Ok(Ast::LocalAssignment {
328 var: clean_var.to_string(),
329 value: value.clone(),
330 });
331 }
332 }
333
334 if !tokens.is_empty()
336 && tokens.len() <= 2
337 && let Token::Return = &tokens[0]
338 {
339 if tokens.len() == 1 {
340 return Ok(Ast::Return { value: None });
342 } else if let Token::Word(word) = &tokens[1] {
343 return Ok(Ast::Return {
345 value: Some(word.clone()),
346 });
347 }
348 }
349
350 if tokens.len() == 2
352 && let (Token::Local, Token::Word(var_eq)) = (&tokens[0], &tokens[1])
353 && let Some(eq_pos) = var_eq.find('=')
354 && eq_pos > 0
355 && eq_pos < var_eq.len()
356 {
357 let var = var_eq[..eq_pos].to_string();
358 let value = var_eq[eq_pos + 1..].to_string();
359 if is_valid_variable_name(&var) {
361 return Ok(Ast::LocalAssignment { var, value });
362 }
363 }
364
365 if tokens.len() == 1
367 && let Token::Word(ref word) = tokens[0]
368 && let Some(eq_pos) = word.find('=')
369 && eq_pos > 0
370 && eq_pos < word.len()
371 {
372 let var = word[..eq_pos].to_string();
373 let value = word[eq_pos + 1..].to_string();
374 if is_valid_variable_name(&var) {
376 return Ok(Ast::Assignment { var, value });
377 }
378 }
379
380 if let Token::If = tokens[0] {
382 return parse_if(tokens);
383 }
384
385 if let Token::Case = tokens[0] {
387 return parse_case(tokens);
388 }
389
390 if let Token::For = tokens[0] {
392 return parse_for(tokens);
393 }
394
395 if let Token::While = tokens[0] {
397 return parse_while(tokens);
398 }
399
400 if tokens.len() >= 4
403 && let (Token::Word(word), Token::LeftParen, Token::RightParen, Token::LeftBrace) =
404 (&tokens[0], &tokens[1], &tokens[2], &tokens[3])
405 && is_valid_variable_name(word)
406 {
407 return parse_function_definition(tokens);
408 }
409
410 if tokens.len() >= 2
412 && let Token::Word(ref word) = tokens[0]
413 && let Some(paren_pos) = word.find('(')
414 && word.ends_with(')')
415 && paren_pos > 0
416 {
417 let func_name = &word[..paren_pos];
418 if is_valid_variable_name(func_name) && tokens[1] == Token::LeftBrace {
419 return parse_function_definition(tokens);
420 }
421 }
422
423 parse_pipeline(tokens)
428}
429
430fn parse_commands_sequentially(tokens: &[Token]) -> Result<Ast, String> {
431 let mut i = 0;
432 let mut commands = Vec::new();
433
434 while i < tokens.len() {
435 while i < tokens.len() {
437 match &tokens[i] {
438 Token::Newline => {
439 i += 1;
440 }
441 Token::Word(word) if word.starts_with('#') => {
442 while i < tokens.len() && tokens[i] != Token::Newline {
444 i += 1;
445 }
446 if i < tokens.len() {
447 i += 1; }
449 }
450 _ => break,
451 }
452 }
453
454 if i >= tokens.len() {
455 break;
456 }
457
458 let start = i;
460
461 if tokens[i] == Token::LeftParen {
464 let mut paren_depth = 1;
466 let mut j = i + 1;
467
468 while j < tokens.len() && paren_depth > 0 {
469 match tokens[j] {
470 Token::LeftParen => paren_depth += 1,
471 Token::RightParen => paren_depth -= 1,
472 _ => {}
473 }
474 j += 1;
475 }
476
477 if paren_depth != 0 {
478 return Err("Unmatched parenthesis in subshell".to_string());
479 }
480
481 let subshell_tokens = &tokens[i + 1..j - 1];
483
484 let body_ast = if subshell_tokens.is_empty() {
487 return Err("Empty subshell".to_string());
488 } else {
489 parse_commands_sequentially(subshell_tokens)?
490 };
491
492 let mut subshell_ast = Ast::Subshell {
493 body: Box::new(body_ast),
494 };
495
496 i = j; let mut redirections = Vec::new();
500 while i < tokens.len() {
501 match &tokens[i] {
502 Token::RedirOut => {
503 i += 1;
504 if i < tokens.len() {
505 if let Token::Word(file) = &tokens[i] {
506 redirections.push(Redirection::Output(file.clone()));
507 i += 1;
508 }
509 }
510 }
511 Token::RedirIn => {
512 i += 1;
513 if i < tokens.len() {
514 if let Token::Word(file) = &tokens[i] {
515 redirections.push(Redirection::Input(file.clone()));
516 i += 1;
517 }
518 }
519 }
520 Token::RedirAppend => {
521 i += 1;
522 if i < tokens.len() {
523 if let Token::Word(file) = &tokens[i] {
524 redirections.push(Redirection::Append(file.clone()));
525 i += 1;
526 }
527 }
528 }
529 Token::RedirectFdOut(fd, file) => {
530 redirections.push(Redirection::FdOutput(*fd, file.clone()));
531 i += 1;
532 }
533 Token::RedirectFdIn(fd, file) => {
534 redirections.push(Redirection::FdInput(*fd, file.clone()));
535 i += 1;
536 }
537 Token::RedirectFdAppend(fd, file) => {
538 redirections.push(Redirection::FdAppend(*fd, file.clone()));
539 i += 1;
540 }
541 Token::RedirectFdDup(from_fd, to_fd) => {
542 redirections.push(Redirection::FdDuplicate(*from_fd, *to_fd));
543 i += 1;
544 }
545 Token::RedirectFdClose(fd) => {
546 redirections.push(Redirection::FdClose(*fd));
547 i += 1;
548 }
549 Token::RedirectFdInOut(fd, file) => {
550 redirections.push(Redirection::FdInputOutput(*fd, file.clone()));
551 i += 1;
552 }
553 Token::RedirHereDoc(delimiter, quoted) => {
554 redirections
555 .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
556 i += 1;
557 }
558 Token::RedirHereString(content) => {
559 redirections.push(Redirection::HereString(content.clone()));
560 i += 1;
561 }
562 _ => break,
563 }
564 }
565
566 if i < tokens.len() && tokens[i] == Token::Pipe {
568 let mut end = i;
570 let mut brace_depth = 0;
571 let mut paren_depth = 0;
572 let mut last_was_pipe = true; while end < tokens.len() {
574 match &tokens[end] {
575 Token::Pipe => last_was_pipe = true,
576 Token::LeftBrace => {
577 brace_depth += 1;
578 last_was_pipe = false;
579 }
580 Token::RightBrace => {
581 if brace_depth > 0 {
582 brace_depth -= 1;
583 } else {
584 break;
585 }
586 last_was_pipe = false;
587 }
588 Token::LeftParen => {
589 paren_depth += 1;
590 last_was_pipe = false;
591 }
592 Token::RightParen => {
593 if paren_depth > 0 {
594 paren_depth -= 1;
595 } else {
596 break;
597 }
598 last_was_pipe = false;
599 }
600 Token::Newline | Token::Semicolon => {
601 if brace_depth == 0 && paren_depth == 0 && !last_was_pipe {
602 break;
603 }
604 }
605 Token::Word(_) => last_was_pipe = false,
606 _ => {}
607 }
608 end += 1;
609 }
610
611 let pipeline_ast = parse_pipeline(&tokens[start..end])?;
612 commands.push(pipeline_ast);
613 i = end;
614 continue;
615 }
616
617 if !redirections.is_empty() {
619 subshell_ast = Ast::Pipeline(vec![ShellCommand {
620 args: Vec::new(),
621 redirections,
622 compound: Some(Box::new(subshell_ast)),
623 }]);
624 }
625
626 if i < tokens.len() && (tokens[i] == Token::And || tokens[i] == Token::Or) {
628 let operator = tokens[i].clone();
629 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
633 i += 1;
634 }
635
636 let remaining_tokens = &tokens[i..];
638 let right_ast = parse_commands_sequentially(remaining_tokens)?;
639
640 let combined_ast = match operator {
642 Token::And => Ast::And {
643 left: Box::new(subshell_ast),
644 right: Box::new(right_ast),
645 },
646 Token::Or => Ast::Or {
647 left: Box::new(subshell_ast),
648 right: Box::new(right_ast),
649 },
650 _ => unreachable!(),
651 };
652
653 commands.push(combined_ast);
654 break; }
656
657 commands.push(subshell_ast);
658
659 if i < tokens.len() && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon) {
661 i += 1;
662 }
663 continue;
664 }
665
666 if tokens[i] == Token::LeftBrace {
668 let mut brace_depth = 1;
670 let mut j = i + 1;
671
672 while j < tokens.len() && brace_depth > 0 {
673 match tokens[j] {
674 Token::LeftBrace => brace_depth += 1,
675 Token::RightBrace => brace_depth -= 1,
676 _ => {}
677 }
678 j += 1;
679 }
680
681 if brace_depth != 0 {
682 return Err("Unmatched brace in command group".to_string());
683 }
684
685 let group_tokens = &tokens[i + 1..j - 1];
687
688 let body_ast = if group_tokens.is_empty() {
691 return Err("Empty command group".to_string());
692 } else {
693 parse_commands_sequentially(group_tokens)?
694 };
695
696 let mut group_ast = Ast::CommandGroup {
697 body: Box::new(body_ast),
698 };
699
700 i = j; let mut redirections = Vec::new();
704 while i < tokens.len() {
705 match &tokens[i] {
706 Token::RedirOut => {
707 i += 1;
708 if i < tokens.len() {
709 if let Token::Word(file) = &tokens[i] {
710 redirections.push(Redirection::Output(file.clone()));
711 i += 1;
712 }
713 }
714 }
715 Token::RedirIn => {
716 i += 1;
717 if i < tokens.len() {
718 if let Token::Word(file) = &tokens[i] {
719 redirections.push(Redirection::Input(file.clone()));
720 i += 1;
721 }
722 }
723 }
724 Token::RedirAppend => {
725 i += 1;
726 if i < tokens.len() {
727 if let Token::Word(file) = &tokens[i] {
728 redirections.push(Redirection::Append(file.clone()));
729 i += 1;
730 }
731 }
732 }
733 Token::RedirectFdOut(fd, file) => {
734 redirections.push(Redirection::FdOutput(*fd, file.clone()));
735 i += 1;
736 }
737 Token::RedirectFdIn(fd, file) => {
738 redirections.push(Redirection::FdInput(*fd, file.clone()));
739 i += 1;
740 }
741 Token::RedirectFdAppend(fd, file) => {
742 redirections.push(Redirection::FdAppend(*fd, file.clone()));
743 i += 1;
744 }
745 Token::RedirectFdDup(from_fd, to_fd) => {
746 redirections.push(Redirection::FdDuplicate(*from_fd, *to_fd));
747 i += 1;
748 }
749 Token::RedirectFdClose(fd) => {
750 redirections.push(Redirection::FdClose(*fd));
751 i += 1;
752 }
753 Token::RedirectFdInOut(fd, file) => {
754 redirections.push(Redirection::FdInputOutput(*fd, file.clone()));
755 i += 1;
756 }
757 Token::RedirHereDoc(delimiter, quoted) => {
758 redirections
759 .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
760 i += 1;
761 }
762 Token::RedirHereString(content) => {
763 redirections.push(Redirection::HereString(content.clone()));
764 i += 1;
765 }
766 _ => break,
767 }
768 }
769
770 if i < tokens.len() && tokens[i] == Token::Pipe {
772 let mut end = i;
774 let mut brace_depth = 0;
775 let mut paren_depth = 0;
776 let mut last_was_pipe = true; while end < tokens.len() {
778 match &tokens[end] {
779 Token::Pipe => last_was_pipe = true,
780 Token::LeftBrace => {
781 brace_depth += 1;
782 last_was_pipe = false;
783 }
784 Token::RightBrace => {
785 if brace_depth > 0 {
786 brace_depth -= 1;
787 } else {
788 break;
789 }
790 last_was_pipe = false;
791 }
792 Token::LeftParen => {
793 paren_depth += 1;
794 last_was_pipe = false;
795 }
796 Token::RightParen => {
797 if paren_depth > 0 {
798 paren_depth -= 1;
799 } else {
800 break;
801 }
802 last_was_pipe = false;
803 }
804 Token::Newline | Token::Semicolon => {
805 if brace_depth == 0 && paren_depth == 0 && !last_was_pipe {
806 break;
807 }
808 }
809 Token::Word(_) => last_was_pipe = false,
810 _ => {}
811 }
812 end += 1;
813 }
814
815 let pipeline_ast = parse_pipeline(&tokens[start..end])?;
816 commands.push(pipeline_ast);
817 i = end;
818 continue;
819 }
820
821 if !redirections.is_empty() {
823 group_ast = Ast::Pipeline(vec![ShellCommand {
824 args: Vec::new(),
825 redirections,
826 compound: Some(Box::new(group_ast)),
827 }]);
828 }
829
830 if i < tokens.len() && (tokens[i] == Token::And || tokens[i] == Token::Or) {
832 let operator = tokens[i].clone();
833 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
837 i += 1;
838 }
839
840 let remaining_tokens = &tokens[i..];
842 let right_ast = parse_commands_sequentially(remaining_tokens)?;
843
844 let combined_ast = match operator {
846 Token::And => Ast::And {
847 left: Box::new(group_ast),
848 right: Box::new(right_ast),
849 },
850 Token::Or => Ast::Or {
851 left: Box::new(group_ast),
852 right: Box::new(right_ast),
853 },
854 _ => unreachable!(),
855 };
856
857 commands.push(combined_ast);
858 break; }
860
861 commands.push(group_ast);
862
863 if i < tokens.len() && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon) {
865 i += 1;
866 }
867 continue;
868 }
869
870 if tokens[i] == Token::If {
872 let mut depth = 0;
874 while i < tokens.len() {
875 match tokens[i] {
876 Token::If => depth += 1,
877 Token::Fi => {
878 depth -= 1;
879 if depth == 0 {
880 i += 1; break;
882 }
883 }
884 _ => {}
885 }
886 i += 1;
887 }
888
889 } else if tokens[i] == Token::For {
892 let mut depth = 1; i += 1; while i < tokens.len() {
896 match tokens[i] {
897 Token::For | Token::While => depth += 1,
898 Token::Done => {
899 depth -= 1;
900 if depth == 0 {
901 i += 1; break;
903 }
904 }
905 _ => {}
906 }
907 i += 1;
908 }
909 } else if tokens[i] == Token::While {
910 let mut depth = 1; i += 1; while i < tokens.len() {
914 match tokens[i] {
915 Token::While | Token::For => depth += 1,
916 Token::Done => {
917 depth -= 1;
918 if depth == 0 {
919 i += 1; break;
921 }
922 }
923 _ => {}
924 }
925 i += 1;
926 }
927 } else if tokens[i] == Token::Case {
928 while i < tokens.len() {
930 if tokens[i] == Token::Esac {
931 i += 1; break;
933 }
934 i += 1;
935 }
936 } else if i + 3 < tokens.len()
937 && matches!(tokens[i], Token::Word(_))
938 && tokens[i + 1] == Token::LeftParen
939 && tokens[i + 2] == Token::RightParen
940 && tokens[i + 3] == Token::LeftBrace
941 {
942 let mut brace_depth = 1;
944 i += 4; while i < tokens.len() && brace_depth > 0 {
946 match tokens[i] {
947 Token::LeftBrace => brace_depth += 1,
948 Token::RightBrace => brace_depth -= 1,
949 _ => {}
950 }
951 i += 1;
952 }
953 } else {
954 let mut brace_depth = 0;
957 let mut paren_depth = 0;
958 let mut last_was_pipe = false;
959 while i < tokens.len() {
960 match &tokens[i] {
961 Token::LeftBrace => {
962 brace_depth += 1;
963 last_was_pipe = false;
964 }
965 Token::RightBrace => {
966 if brace_depth > 0 {
967 brace_depth -= 1;
968 } else {
969 break;
970 }
971 last_was_pipe = false;
972 }
973 Token::LeftParen => {
974 paren_depth += 1;
975 last_was_pipe = false;
976 }
977 Token::RightParen => {
978 if paren_depth > 0 {
979 paren_depth -= 1;
980 } else {
981 break;
982 }
983 last_was_pipe = false;
984 }
985 Token::Pipe => last_was_pipe = true,
986 Token::Newline | Token::Semicolon | Token::And | Token::Or => {
987 if brace_depth == 0 && paren_depth == 0 && !last_was_pipe {
988 break;
989 }
990 }
991 Token::Word(_) => last_was_pipe = false,
992 _ => {}
993 }
994 i += 1;
995 }
996 }
997
998 let command_tokens = &tokens[start..i];
999 if !command_tokens.is_empty() {
1000 if command_tokens.len() == 1 {
1002 match command_tokens[0] {
1003 Token::Else | Token::Elif | Token::Fi => {
1004 if i < tokens.len()
1006 && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon)
1007 {
1008 i += 1;
1009 }
1010 continue;
1011 }
1012 _ => {}
1013 }
1014 }
1015
1016 let ast = parse_slice(command_tokens)?;
1017
1018 if i < tokens.len() && (tokens[i] == Token::And || tokens[i] == Token::Or) {
1020 let operator = tokens[i].clone();
1021 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
1025 i += 1;
1026 }
1027
1028 let remaining_tokens = &tokens[i..];
1030 let right_ast = parse_commands_sequentially(remaining_tokens)?;
1031
1032 let combined_ast = match operator {
1034 Token::And => Ast::And {
1035 left: Box::new(ast),
1036 right: Box::new(right_ast),
1037 },
1038 Token::Or => Ast::Or {
1039 left: Box::new(ast),
1040 right: Box::new(right_ast),
1041 },
1042 _ => unreachable!(),
1043 };
1044
1045 commands.push(combined_ast);
1046 break; } else {
1048 commands.push(ast);
1049 }
1050 }
1051
1052 if i < tokens.len() && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon) {
1053 i += 1;
1054 }
1055 }
1056
1057 if commands.is_empty() {
1058 return Err("No commands found".to_string());
1059 }
1060
1061 if commands.len() == 1 {
1062 Ok(commands.into_iter().next().unwrap())
1063 } else {
1064 Ok(Ast::Sequence(commands))
1065 }
1066}
1067
1068fn parse_pipeline(tokens: &[Token]) -> Result<Ast, String> {
1069 let mut commands = Vec::new();
1070 let mut current_cmd = ShellCommand::default();
1071
1072 let mut i = 0;
1073 while i < tokens.len() {
1074 let token = &tokens[i];
1075 match token {
1076 Token::LeftBrace => {
1077 let mut brace_depth = 1;
1080 let mut j = i + 1;
1081
1082 while j < tokens.len() && brace_depth > 0 {
1083 match tokens[j] {
1084 Token::LeftBrace => brace_depth += 1,
1085 Token::RightBrace => brace_depth -= 1,
1086 _ => {}
1087 }
1088 j += 1;
1089 }
1090
1091 if brace_depth != 0 {
1092 return Err("Unmatched brace in pipeline".to_string());
1093 }
1094
1095 let group_tokens = &tokens[i + 1..j - 1];
1097
1098 let body_ast = if group_tokens.is_empty() {
1100 create_empty_body_ast()
1101 } else {
1102 parse_commands_sequentially(group_tokens)?
1103 };
1104
1105 current_cmd.compound = Some(Box::new(Ast::CommandGroup {
1107 body: Box::new(body_ast),
1108 }));
1109
1110 i = j; while i < tokens.len() {
1114 match &tokens[i] {
1115 Token::RedirOut => {
1116 i += 1;
1117 if i < tokens.len() {
1118 if let Token::Word(file) = &tokens[i] {
1119 current_cmd
1120 .redirections
1121 .push(Redirection::Output(file.clone()));
1122 i += 1;
1123 }
1124 }
1125 }
1126 Token::RedirIn => {
1127 i += 1;
1128 if i < tokens.len() {
1129 if let Token::Word(file) = &tokens[i] {
1130 current_cmd
1131 .redirections
1132 .push(Redirection::Input(file.clone()));
1133 i += 1;
1134 }
1135 }
1136 }
1137 Token::RedirAppend => {
1138 i += 1;
1139 if i < tokens.len() {
1140 if let Token::Word(file) = &tokens[i] {
1141 current_cmd
1142 .redirections
1143 .push(Redirection::Append(file.clone()));
1144 i += 1;
1145 }
1146 }
1147 }
1148 Token::RedirectFdOut(fd, file) => {
1149 current_cmd
1150 .redirections
1151 .push(Redirection::FdOutput(*fd, file.clone()));
1152 i += 1;
1153 }
1154 Token::RedirectFdIn(fd, file) => {
1155 current_cmd
1156 .redirections
1157 .push(Redirection::FdInput(*fd, file.clone()));
1158 i += 1;
1159 }
1160 Token::RedirectFdAppend(fd, file) => {
1161 current_cmd
1162 .redirections
1163 .push(Redirection::FdAppend(*fd, file.clone()));
1164 i += 1;
1165 }
1166 Token::RedirectFdDup(from_fd, to_fd) => {
1167 current_cmd
1168 .redirections
1169 .push(Redirection::FdDuplicate(*from_fd, *to_fd));
1170 i += 1;
1171 }
1172 Token::RedirectFdClose(fd) => {
1173 current_cmd.redirections.push(Redirection::FdClose(*fd));
1174 i += 1;
1175 }
1176 Token::RedirectFdInOut(fd, file) => {
1177 current_cmd
1178 .redirections
1179 .push(Redirection::FdInputOutput(*fd, file.clone()));
1180 i += 1;
1181 }
1182 Token::RedirHereDoc(delimiter, quoted) => {
1183 current_cmd
1184 .redirections
1185 .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
1186 i += 1;
1187 }
1188 Token::RedirHereString(content) => {
1189 current_cmd
1190 .redirections
1191 .push(Redirection::HereString(content.clone()));
1192 i += 1;
1193 }
1194 Token::Pipe => {
1195 break;
1197 }
1198 _ => break,
1199 }
1200 }
1201
1202 continue;
1204 }
1205 Token::LeftParen => {
1206 let mut paren_depth = 1;
1209 let mut j = i + 1;
1210
1211 while j < tokens.len() && paren_depth > 0 {
1212 match tokens[j] {
1213 Token::LeftParen => paren_depth += 1,
1214 Token::RightParen => paren_depth -= 1,
1215 _ => {}
1216 }
1217 j += 1;
1218 }
1219
1220 if paren_depth != 0 {
1221 return Err("Unmatched parenthesis in pipeline".to_string());
1222 }
1223
1224 let subshell_tokens = &tokens[i + 1..j - 1];
1226
1227 let body_ast = if subshell_tokens.is_empty() {
1229 create_empty_body_ast()
1230 } else {
1231 parse_commands_sequentially(subshell_tokens)?
1232 };
1233
1234 current_cmd.compound = Some(Box::new(Ast::Subshell {
1237 body: Box::new(body_ast),
1238 }));
1239
1240 i = j; while i < tokens.len() {
1244 match &tokens[i] {
1245 Token::RedirOut => {
1246 i += 1;
1247 if i < tokens.len() {
1248 if let Token::Word(file) = &tokens[i] {
1249 current_cmd
1250 .redirections
1251 .push(Redirection::Output(file.clone()));
1252 i += 1;
1253 }
1254 }
1255 }
1256 Token::RedirIn => {
1257 i += 1;
1258 if i < tokens.len() {
1259 if let Token::Word(file) = &tokens[i] {
1260 current_cmd
1261 .redirections
1262 .push(Redirection::Input(file.clone()));
1263 i += 1;
1264 }
1265 }
1266 }
1267 Token::RedirAppend => {
1268 i += 1;
1269 if i < tokens.len() {
1270 if let Token::Word(file) = &tokens[i] {
1271 current_cmd
1272 .redirections
1273 .push(Redirection::Append(file.clone()));
1274 i += 1;
1275 }
1276 }
1277 }
1278 Token::RedirectFdOut(fd, file) => {
1279 current_cmd
1280 .redirections
1281 .push(Redirection::FdOutput(*fd, file.clone()));
1282 i += 1;
1283 }
1284 Token::RedirectFdIn(fd, file) => {
1285 current_cmd
1286 .redirections
1287 .push(Redirection::FdInput(*fd, file.clone()));
1288 i += 1;
1289 }
1290 Token::RedirectFdAppend(fd, file) => {
1291 current_cmd
1292 .redirections
1293 .push(Redirection::FdAppend(*fd, file.clone()));
1294 i += 1;
1295 }
1296 Token::RedirectFdDup(from_fd, to_fd) => {
1297 current_cmd
1298 .redirections
1299 .push(Redirection::FdDuplicate(*from_fd, *to_fd));
1300 i += 1;
1301 }
1302 Token::RedirectFdClose(fd) => {
1303 current_cmd.redirections.push(Redirection::FdClose(*fd));
1304 i += 1;
1305 }
1306 Token::RedirectFdInOut(fd, file) => {
1307 current_cmd
1308 .redirections
1309 .push(Redirection::FdInputOutput(*fd, file.clone()));
1310 i += 1;
1311 }
1312 Token::RedirHereDoc(delimiter, quoted) => {
1313 current_cmd
1314 .redirections
1315 .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
1316 i += 1;
1317 }
1318 Token::RedirHereString(content) => {
1319 current_cmd
1320 .redirections
1321 .push(Redirection::HereString(content.clone()));
1322 i += 1;
1323 }
1324 Token::Pipe => {
1325 break;
1327 }
1328 _ => break,
1329 }
1330 }
1331
1332 continue;
1334 }
1335 Token::Word(word) => {
1336 current_cmd.args.push(word.clone());
1337 }
1338 Token::Pipe => {
1339 if !current_cmd.args.is_empty() || current_cmd.compound.is_some() {
1340 commands.push(current_cmd.clone());
1341 current_cmd = ShellCommand::default();
1342 }
1343 }
1344 Token::RedirIn => {
1346 i += 1;
1347 if i < tokens.len()
1348 && let Token::Word(ref file) = tokens[i]
1349 {
1350 current_cmd
1351 .redirections
1352 .push(Redirection::Input(file.clone()));
1353 }
1354 }
1355 Token::RedirOut => {
1356 i += 1;
1357 if i < tokens.len()
1358 && let Token::Word(ref file) = tokens[i]
1359 {
1360 current_cmd
1361 .redirections
1362 .push(Redirection::Output(file.clone()));
1363 }
1364 }
1365 Token::RedirAppend => {
1366 i += 1;
1367 if i < tokens.len()
1368 && let Token::Word(ref file) = tokens[i]
1369 {
1370 current_cmd
1371 .redirections
1372 .push(Redirection::Append(file.clone()));
1373 }
1374 }
1375 Token::RedirHereDoc(delimiter, quoted) => {
1376 current_cmd
1378 .redirections
1379 .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
1380 }
1381 Token::RedirHereString(content) => {
1382 current_cmd
1383 .redirections
1384 .push(Redirection::HereString(content.clone()));
1385 }
1386 Token::RedirectFdIn(fd, file) => {
1388 current_cmd
1389 .redirections
1390 .push(Redirection::FdInput(*fd, file.clone()));
1391 }
1392 Token::RedirectFdOut(fd, file) => {
1393 current_cmd
1394 .redirections
1395 .push(Redirection::FdOutput(*fd, file.clone()));
1396 }
1397 Token::RedirectFdAppend(fd, file) => {
1398 current_cmd
1399 .redirections
1400 .push(Redirection::FdAppend(*fd, file.clone()));
1401 }
1402 Token::RedirectFdDup(from_fd, to_fd) => {
1403 current_cmd
1404 .redirections
1405 .push(Redirection::FdDuplicate(*from_fd, *to_fd));
1406 }
1407 Token::RedirectFdClose(fd) => {
1408 current_cmd.redirections.push(Redirection::FdClose(*fd));
1409 }
1410 Token::RedirectFdInOut(fd, file) => {
1411 current_cmd
1412 .redirections
1413 .push(Redirection::FdInputOutput(*fd, file.clone()));
1414 }
1415 Token::RightParen => {
1416 if !current_cmd.args.is_empty()
1419 && i > 0
1420 && let Token::LeftParen = tokens[i - 1]
1421 {
1422 break;
1426 }
1427 return Err("Unexpected ) in pipeline".to_string());
1428 }
1429 Token::Newline => {
1430 if current_cmd.args.is_empty() && current_cmd.compound.is_none() {
1432 } else {
1434 break;
1435 }
1436 }
1437 Token::Do
1438 | Token::Done
1439 | Token::Then
1440 | Token::Else
1441 | Token::Elif
1442 | Token::Fi
1443 | Token::Esac => {
1444 break;
1447 }
1448 _ => {
1449 return Err(format!("Unexpected token in pipeline: {:?}", token));
1450 }
1451 }
1452 i += 1;
1453 }
1454
1455 if !current_cmd.args.is_empty() || current_cmd.compound.is_some() {
1456 commands.push(current_cmd);
1457 }
1458
1459 if commands.is_empty() {
1460 return Err("No commands found".to_string());
1461 }
1462
1463 Ok(Ast::Pipeline(commands))
1464}
1465
1466fn parse_if(tokens: &[Token]) -> Result<Ast, String> {
1467 let mut i = 1; let mut branches = Vec::new();
1469
1470 loop {
1471 let mut cond_tokens = Vec::new();
1473 while i < tokens.len()
1474 && tokens[i] != Token::Semicolon
1475 && tokens[i] != Token::Newline
1476 && tokens[i] != Token::Then
1477 {
1478 cond_tokens.push(tokens[i].clone());
1479 i += 1;
1480 }
1481
1482 if i < tokens.len() && (tokens[i] == Token::Semicolon || tokens[i] == Token::Newline) {
1484 i += 1;
1485 }
1486
1487 skip_newlines(tokens, &mut i);
1489
1490 if i >= tokens.len() || tokens[i] != Token::Then {
1491 return Err("Expected then after if/elif condition".to_string());
1492 }
1493 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
1497 i += 1;
1498 }
1499
1500 let mut then_tokens = Vec::new();
1503 let mut depth = 0;
1504 while i < tokens.len() {
1505 match &tokens[i] {
1506 Token::If => {
1507 depth += 1;
1508 then_tokens.push(tokens[i].clone());
1509 }
1510 Token::Fi => {
1511 if depth > 0 {
1512 depth -= 1;
1513 then_tokens.push(tokens[i].clone());
1514 } else {
1515 break; }
1517 }
1518 Token::Else | Token::Elif if depth == 0 => {
1519 break; }
1521 Token::Newline => {
1522 let mut j = i + 1;
1524 while j < tokens.len() && tokens[j] == Token::Newline {
1525 j += 1;
1526 }
1527 if j < tokens.len()
1528 && depth == 0
1529 && (tokens[j] == Token::Else
1530 || tokens[j] == Token::Elif
1531 || tokens[j] == Token::Fi)
1532 {
1533 i = j; break;
1535 }
1536 then_tokens.push(tokens[i].clone());
1538 }
1539 _ => {
1540 then_tokens.push(tokens[i].clone());
1541 }
1542 }
1543 i += 1;
1544 }
1545
1546 skip_newlines(tokens, &mut i);
1548
1549 let then_ast = if then_tokens.is_empty() {
1550 create_empty_body_ast()
1552 } else {
1553 parse_commands_sequentially(&then_tokens)?
1554 };
1555
1556 let condition = parse_slice(&cond_tokens)?;
1557 branches.push((Box::new(condition), Box::new(then_ast)));
1558
1559 if i < tokens.len() && tokens[i] == Token::Elif {
1561 i += 1; } else {
1563 break;
1564 }
1565 }
1566
1567 let else_ast = if i < tokens.len() && tokens[i] == Token::Else {
1568 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
1572 i += 1;
1573 }
1574
1575 let mut else_tokens = Vec::new();
1576 let mut depth = 0;
1577 while i < tokens.len() {
1578 match &tokens[i] {
1579 Token::If => {
1580 depth += 1;
1581 else_tokens.push(tokens[i].clone());
1582 }
1583 Token::Fi => {
1584 if depth > 0 {
1585 depth -= 1;
1586 else_tokens.push(tokens[i].clone());
1587 } else {
1588 break; }
1590 }
1591 Token::Newline => {
1592 let mut j = i + 1;
1594 while j < tokens.len() && tokens[j] == Token::Newline {
1595 j += 1;
1596 }
1597 if j < tokens.len() && depth == 0 && tokens[j] == Token::Fi {
1598 i = j; break;
1600 }
1601 else_tokens.push(tokens[i].clone());
1603 }
1604 _ => {
1605 else_tokens.push(tokens[i].clone());
1606 }
1607 }
1608 i += 1;
1609 }
1610
1611 let else_ast = if else_tokens.is_empty() {
1612 create_empty_body_ast()
1614 } else {
1615 parse_commands_sequentially(&else_tokens)?
1616 };
1617
1618 Some(Box::new(else_ast))
1619 } else {
1620 None
1621 };
1622
1623 if i >= tokens.len() || tokens[i] != Token::Fi {
1624 return Err("Expected fi".to_string());
1625 }
1626
1627 Ok(Ast::If {
1628 branches,
1629 else_branch: else_ast,
1630 })
1631}
1632
1633fn parse_case(tokens: &[Token]) -> Result<Ast, String> {
1634 let mut i = 1; if i >= tokens.len() || !matches!(tokens[i], Token::Word(_)) {
1638 return Err("Expected word after case".to_string());
1639 }
1640 let word = if let Token::Word(ref w) = tokens[i] {
1641 w.clone()
1642 } else {
1643 unreachable!()
1644 };
1645 i += 1;
1646
1647 if i >= tokens.len() || tokens[i] != Token::In {
1648 return Err("Expected in after case word".to_string());
1649 }
1650 i += 1;
1651
1652 let mut cases = Vec::new();
1653 let mut default = None;
1654
1655 loop {
1656 while i < tokens.len() && tokens[i] == Token::Newline {
1658 i += 1;
1659 }
1660
1661 if i >= tokens.len() {
1662 return Err("Unexpected end in case statement".to_string());
1663 }
1664
1665 if tokens[i] == Token::Esac {
1666 break;
1667 }
1668
1669 let mut patterns = Vec::new();
1671 while i < tokens.len() && tokens[i] != Token::RightParen {
1672 if let Token::Word(ref p) = tokens[i] {
1673 for pat in p.split('|') {
1675 patterns.push(pat.to_string());
1676 }
1677 } else if tokens[i] == Token::Pipe {
1678 } else if tokens[i] == Token::Newline {
1680 } else {
1682 return Err(format!("Expected pattern, found {:?}", tokens[i]));
1683 }
1684 i += 1;
1685 }
1686
1687 if i >= tokens.len() || tokens[i] != Token::RightParen {
1688 return Err("Expected ) after patterns".to_string());
1689 }
1690 i += 1;
1691
1692 let mut commands_tokens = Vec::new();
1694 while i < tokens.len() && tokens[i] != Token::DoubleSemicolon && tokens[i] != Token::Esac {
1695 commands_tokens.push(tokens[i].clone());
1696 i += 1;
1697 }
1698
1699 let commands_ast = parse_slice(&commands_tokens)?;
1700
1701 if i >= tokens.len() {
1702 return Err("Unexpected end in case statement".to_string());
1703 }
1704
1705 if tokens[i] == Token::DoubleSemicolon {
1706 i += 1;
1707 if patterns.len() == 1 && patterns[0] == "*" {
1709 default = Some(Box::new(commands_ast));
1710 } else {
1711 cases.push((patterns, commands_ast));
1712 }
1713 } else if tokens[i] == Token::Esac {
1714 if patterns.len() == 1 && patterns[0] == "*" {
1716 default = Some(Box::new(commands_ast));
1717 } else {
1718 cases.push((patterns, commands_ast));
1719 }
1720 break;
1721 } else {
1722 return Err("Expected ;; or esac after commands".to_string());
1723 }
1724 }
1725
1726 Ok(Ast::Case {
1727 word,
1728 cases,
1729 default,
1730 })
1731}
1732
1733fn parse_for(tokens: &[Token]) -> Result<Ast, String> {
1734 let mut i = 1; if i >= tokens.len() || !matches!(tokens[i], Token::Word(_)) {
1738 return Err("Expected variable name after for".to_string());
1739 }
1740 let variable = if let Token::Word(ref v) = tokens[i] {
1741 v.clone()
1742 } else {
1743 unreachable!()
1744 };
1745 i += 1;
1746
1747 if i >= tokens.len() || tokens[i] != Token::In {
1749 return Err("Expected 'in' after for variable".to_string());
1750 }
1751 i += 1;
1752
1753 let mut items = Vec::new();
1755 while i < tokens.len() {
1756 match &tokens[i] {
1757 Token::Do => break,
1758 Token::Semicolon | Token::Newline => {
1759 i += 1;
1760 if i < tokens.len() && tokens[i] == Token::Do {
1762 break;
1763 }
1764 }
1765 Token::Word(word) => {
1766 items.push(word.clone());
1767 i += 1;
1768 }
1769 _ => {
1770 return Err(format!("Unexpected token in for items: {:?}", tokens[i]));
1771 }
1772 }
1773 }
1774
1775 while i < tokens.len() && tokens[i] == Token::Newline {
1777 i += 1;
1778 }
1779
1780 if i >= tokens.len() || tokens[i] != Token::Do {
1782 return Err("Expected 'do' in for loop".to_string());
1783 }
1784 i += 1;
1785
1786 while i < tokens.len() && tokens[i] == Token::Newline {
1788 i += 1;
1789 }
1790
1791 let mut body_tokens = Vec::new();
1793 let mut depth = 0;
1794 while i < tokens.len() {
1795 match &tokens[i] {
1796 Token::For => {
1797 depth += 1;
1798 body_tokens.push(tokens[i].clone());
1799 }
1800 Token::Done => {
1801 if depth > 0 {
1802 depth -= 1;
1803 body_tokens.push(tokens[i].clone());
1804 } else {
1805 break; }
1807 }
1808 Token::Newline => {
1809 let mut j = i + 1;
1811 while j < tokens.len() && tokens[j] == Token::Newline {
1812 j += 1;
1813 }
1814 if j < tokens.len() && depth == 0 && tokens[j] == Token::Done {
1815 i = j; break;
1817 }
1818 body_tokens.push(tokens[i].clone());
1820 }
1821 _ => {
1822 body_tokens.push(tokens[i].clone());
1823 }
1824 }
1825 i += 1;
1826 }
1827
1828 if i >= tokens.len() || tokens[i] != Token::Done {
1829 return Err("Expected 'done' to close for loop".to_string());
1830 }
1831
1832 let body_ast = if body_tokens.is_empty() {
1834 create_empty_body_ast()
1836 } else {
1837 parse_commands_sequentially(&body_tokens)?
1838 };
1839
1840 Ok(Ast::For {
1841 variable,
1842 items,
1843 body: Box::new(body_ast),
1844 })
1845}
1846
1847fn parse_while(tokens: &[Token]) -> Result<Ast, String> {
1848 let mut i = 1; let mut cond_tokens = Vec::new();
1852 while i < tokens.len() {
1853 match &tokens[i] {
1854 Token::Do => break,
1855 Token::Semicolon | Token::Newline => {
1856 i += 1;
1857 if i < tokens.len() && tokens[i] == Token::Do {
1859 break;
1860 }
1861 }
1862 _ => {
1863 cond_tokens.push(tokens[i].clone());
1864 i += 1;
1865 }
1866 }
1867 }
1868
1869 if cond_tokens.is_empty() {
1870 return Err("Expected condition after while".to_string());
1871 }
1872
1873 while i < tokens.len() && tokens[i] == Token::Newline {
1875 i += 1;
1876 }
1877
1878 if i >= tokens.len() || tokens[i] != Token::Do {
1880 return Err("Expected 'do' in while loop".to_string());
1881 }
1882 i += 1;
1883
1884 while i < tokens.len() && tokens[i] == Token::Newline {
1886 i += 1;
1887 }
1888
1889 let mut body_tokens = Vec::new();
1891 let mut depth = 0;
1892 while i < tokens.len() {
1893 match &tokens[i] {
1894 Token::While | Token::For => {
1895 depth += 1;
1896 body_tokens.push(tokens[i].clone());
1897 }
1898 Token::Done => {
1899 if depth > 0 {
1900 depth -= 1;
1901 body_tokens.push(tokens[i].clone());
1902 } else {
1903 break; }
1905 }
1906 Token::Newline => {
1907 let mut j = i + 1;
1909 while j < tokens.len() && tokens[j] == Token::Newline {
1910 j += 1;
1911 }
1912 if j < tokens.len() && depth == 0 && tokens[j] == Token::Done {
1913 i = j; break;
1915 }
1916 body_tokens.push(tokens[i].clone());
1918 }
1919 _ => {
1920 body_tokens.push(tokens[i].clone());
1921 }
1922 }
1923 i += 1;
1924 }
1925
1926 if i >= tokens.len() || tokens[i] != Token::Done {
1927 return Err("Expected 'done' to close while loop".to_string());
1928 }
1929
1930 let condition_ast = parse_slice(&cond_tokens)?;
1932
1933 let body_ast = if body_tokens.is_empty() {
1935 create_empty_body_ast()
1937 } else {
1938 parse_commands_sequentially(&body_tokens)?
1939 };
1940
1941 Ok(Ast::While {
1942 condition: Box::new(condition_ast),
1943 body: Box::new(body_ast),
1944 })
1945}
1946
1947fn parse_function_definition(tokens: &[Token]) -> Result<Ast, String> {
1948 if tokens.len() < 2 {
1949 return Err("Function definition too short".to_string());
1950 }
1951
1952 let func_name = if let Token::Word(word) = &tokens[0] {
1954 if let Some(paren_pos) = word.find('(') {
1956 if word.ends_with(')') && paren_pos > 0 {
1957 word[..paren_pos].to_string()
1958 } else {
1959 word.clone()
1960 }
1961 } else {
1962 word.clone()
1963 }
1964 } else {
1965 return Err("Function name must be a word".to_string());
1966 };
1967
1968 let brace_pos =
1970 if tokens.len() >= 4 && tokens[1] == Token::LeftParen && tokens[2] == Token::RightParen {
1971 if tokens[3] != Token::LeftBrace {
1973 return Err("Expected { after function name".to_string());
1974 }
1975 3
1976 } else if tokens.len() >= 2 && tokens[1] == Token::LeftBrace {
1977 1
1979 } else {
1980 return Err("Expected ( after function name or { for legacy format".to_string());
1981 };
1982
1983 let mut brace_depth = 0;
1985 let mut body_end = 0;
1986 let mut found_closing = false;
1987 let mut i = brace_pos + 1;
1988
1989 while i < tokens.len() {
1990 if i + 3 < tokens.len()
1993 && matches!(&tokens[i], Token::Word(_))
1994 && tokens[i + 1] == Token::LeftParen
1995 && tokens[i + 2] == Token::RightParen
1996 && tokens[i + 3] == Token::LeftBrace
1997 {
1998 i += 4;
2001 let mut nested_depth = 1;
2002 while i < tokens.len() && nested_depth > 0 {
2003 match tokens[i] {
2004 Token::LeftBrace => nested_depth += 1,
2005 Token::RightBrace => nested_depth -= 1,
2006 _ => {}
2007 }
2008 i += 1;
2009 }
2010 continue;
2012 }
2013
2014 match &tokens[i] {
2015 Token::LeftBrace => {
2016 brace_depth += 1;
2017 i += 1;
2018 }
2019 Token::RightBrace => {
2020 if brace_depth == 0 {
2021 body_end = i;
2023 found_closing = true;
2024 break;
2025 } else {
2026 brace_depth -= 1;
2027 i += 1;
2028 }
2029 }
2030 Token::If => {
2031 skip_to_matching_fi(tokens, &mut i);
2033 }
2034 Token::For | Token::While => {
2035 skip_to_matching_done(tokens, &mut i);
2037 }
2038 Token::Case => {
2039 skip_to_matching_esac(tokens, &mut i);
2041 }
2042 _ => {
2043 i += 1;
2044 }
2045 }
2046 }
2047
2048 if !found_closing {
2049 return Err("Missing closing } for function definition".to_string());
2050 }
2051
2052 let body_tokens = &tokens[brace_pos + 1..body_end];
2054
2055 let body_ast = if body_tokens.is_empty() {
2057 create_empty_body_ast()
2059 } else {
2060 parse_commands_sequentially(body_tokens)?
2061 };
2062
2063 Ok(Ast::FunctionDefinition {
2064 name: func_name,
2065 body: Box::new(body_ast),
2066 })
2067}
2068
2069#[cfg(test)]
2070mod tests {
2071 use super::super::lexer::Token;
2072 use super::*;
2073
2074 #[test]
2075 fn test_single_command() {
2076 let tokens = vec![Token::Word("ls".to_string())];
2077 let result = parse(tokens).unwrap();
2078 assert_eq!(
2079 result,
2080 Ast::Pipeline(vec![ShellCommand {
2081 args: vec!["ls".to_string()],
2082 redirections: Vec::new(),
2083 compound: None,
2084 }])
2085 );
2086 }
2087
2088 #[test]
2089 fn test_command_with_args() {
2090 let tokens = vec![
2091 Token::Word("ls".to_string()),
2092 Token::Word("-la".to_string()),
2093 ];
2094 let result = parse(tokens).unwrap();
2095 assert_eq!(
2096 result,
2097 Ast::Pipeline(vec![ShellCommand {
2098 args: vec!["ls".to_string(), "-la".to_string()],
2099 redirections: Vec::new(),
2100 compound: None,
2101 }])
2102 );
2103 }
2104
2105 #[test]
2106 fn test_pipeline() {
2107 let tokens = vec![
2108 Token::Word("ls".to_string()),
2109 Token::Pipe,
2110 Token::Word("grep".to_string()),
2111 Token::Word("txt".to_string()),
2112 ];
2113 let result = parse(tokens).unwrap();
2114 assert_eq!(
2115 result,
2116 Ast::Pipeline(vec![
2117 ShellCommand {
2118 args: vec!["ls".to_string()],
2119 redirections: Vec::new(),
2120 compound: None,
2121 },
2122 ShellCommand {
2123 args: vec!["grep".to_string(), "txt".to_string()],
2124 redirections: Vec::new(),
2125 compound: None,
2126 }
2127 ])
2128 );
2129 }
2130
2131 #[test]
2132 fn test_input_redirection() {
2133 let tokens = vec![
2134 Token::Word("cat".to_string()),
2135 Token::RedirIn,
2136 Token::Word("input.txt".to_string()),
2137 ];
2138 let result = parse(tokens).unwrap();
2139 assert_eq!(
2140 result,
2141 Ast::Pipeline(vec![ShellCommand {
2142 args: vec!["cat".to_string()],
2143 redirections: vec![Redirection::Input("input.txt".to_string())],
2144 compound: None,
2145 }])
2146 );
2147 }
2148
2149 #[test]
2150 fn test_output_redirection() {
2151 let tokens = vec![
2152 Token::Word("printf".to_string()),
2153 Token::Word("hello".to_string()),
2154 Token::RedirOut,
2155 Token::Word("output.txt".to_string()),
2156 ];
2157 let result = parse(tokens).unwrap();
2158 assert_eq!(
2159 result,
2160 Ast::Pipeline(vec![ShellCommand {
2161 args: vec!["printf".to_string(), "hello".to_string()],
2162 compound: None,
2163 redirections: vec![Redirection::Output("output.txt".to_string())],
2164 }])
2165 );
2166 }
2167
2168 #[test]
2169 fn test_append_redirection() {
2170 let tokens = vec![
2171 Token::Word("printf".to_string()),
2172 Token::Word("hello".to_string()),
2173 Token::RedirAppend,
2174 Token::Word("output.txt".to_string()),
2175 ];
2176 let result = parse(tokens).unwrap();
2177 assert_eq!(
2178 result,
2179 Ast::Pipeline(vec![ShellCommand {
2180 args: vec!["printf".to_string(), "hello".to_string()],
2181 compound: None,
2182 redirections: vec![Redirection::Append("output.txt".to_string())],
2183 }])
2184 );
2185 }
2186
2187 #[test]
2188 fn test_complex_pipeline_with_redirections() {
2189 let tokens = vec![
2190 Token::Word("cat".to_string()),
2191 Token::RedirIn,
2192 Token::Word("input.txt".to_string()),
2193 Token::Pipe,
2194 Token::Word("grep".to_string()),
2195 Token::Word("pattern".to_string()),
2196 Token::Pipe,
2197 Token::Word("sort".to_string()),
2198 Token::RedirOut,
2199 Token::Word("output.txt".to_string()),
2200 ];
2201 let result = parse(tokens).unwrap();
2202 assert_eq!(
2203 result,
2204 Ast::Pipeline(vec![
2205 ShellCommand {
2206 args: vec!["cat".to_string()],
2207 compound: None,
2208 redirections: vec![Redirection::Input("input.txt".to_string())],
2209 },
2210 ShellCommand {
2211 args: vec!["grep".to_string(), "pattern".to_string()],
2212 compound: None,
2213 redirections: Vec::new(),
2214 },
2215 ShellCommand {
2216 args: vec!["sort".to_string()],
2217 redirections: vec![Redirection::Output("output.txt".to_string())],
2218 compound: None,
2219 }
2220 ])
2221 );
2222 }
2223
2224 #[test]
2225 fn test_empty_tokens() {
2226 let tokens = vec![];
2227 let result = parse(tokens);
2228 assert!(result.is_err());
2229 assert_eq!(result.unwrap_err(), "No commands found");
2230 }
2231
2232 #[test]
2233 fn test_only_pipe() {
2234 let tokens = vec![Token::Pipe];
2235 let result = parse(tokens);
2236 assert!(result.is_err());
2237 assert_eq!(result.unwrap_err(), "No commands found");
2238 }
2239
2240 #[test]
2241 fn test_redirection_without_file() {
2242 let tokens = vec![Token::Word("cat".to_string()), Token::RedirIn];
2244 let result = parse(tokens).unwrap();
2245 assert_eq!(
2246 result,
2247 Ast::Pipeline(vec![ShellCommand {
2248 args: vec!["cat".to_string()],
2249 compound: None,
2250 redirections: Vec::new(),
2251 }])
2252 );
2253 }
2254
2255 #[test]
2256 fn test_multiple_redirections() {
2257 let tokens = vec![
2258 Token::Word("cat".to_string()),
2259 Token::RedirIn,
2260 Token::Word("file1.txt".to_string()),
2261 Token::RedirOut,
2262 Token::Word("file2.txt".to_string()),
2263 ];
2264 let result = parse(tokens).unwrap();
2265 assert_eq!(
2266 result,
2267 Ast::Pipeline(vec![ShellCommand {
2268 args: vec!["cat".to_string()],
2269 redirections: vec![
2270 Redirection::Input("file1.txt".to_string()),
2271 Redirection::Output("file2.txt".to_string()),
2272 ],
2273 compound: None,
2274 }])
2275 );
2276 }
2277
2278 #[test]
2279 fn test_parse_if() {
2280 let tokens = vec![
2281 Token::If,
2282 Token::Word("true".to_string()),
2283 Token::Semicolon,
2284 Token::Then,
2285 Token::Word("printf".to_string()),
2286 Token::Word("yes".to_string()),
2287 Token::Semicolon,
2288 Token::Fi,
2289 ];
2290 let result = parse(tokens).unwrap();
2291 if let Ast::If {
2292 branches,
2293 else_branch,
2294 } = result
2295 {
2296 assert_eq!(branches.len(), 1);
2297 let (condition, then_branch) = &branches[0];
2298 if let Ast::Pipeline(cmds) = &**condition {
2299 assert_eq!(cmds[0].args, vec!["true"]);
2300 } else {
2301 panic!("condition not pipeline");
2302 }
2303 if let Ast::Pipeline(cmds) = &**then_branch {
2304 assert_eq!(cmds[0].args, vec!["printf", "yes"]);
2305 } else {
2306 panic!("then_branch not pipeline");
2307 }
2308 assert!(else_branch.is_none());
2309 } else {
2310 panic!("not if");
2311 }
2312 }
2313
2314 #[test]
2315 fn test_parse_if_elif() {
2316 let tokens = vec![
2317 Token::If,
2318 Token::Word("false".to_string()),
2319 Token::Semicolon,
2320 Token::Then,
2321 Token::Word("printf".to_string()),
2322 Token::Word("no".to_string()),
2323 Token::Semicolon,
2324 Token::Elif,
2325 Token::Word("true".to_string()),
2326 Token::Semicolon,
2327 Token::Then,
2328 Token::Word("printf".to_string()),
2329 Token::Word("yes".to_string()),
2330 Token::Semicolon,
2331 Token::Fi,
2332 ];
2333 let result = parse(tokens).unwrap();
2334 if let Ast::If {
2335 branches,
2336 else_branch,
2337 } = result
2338 {
2339 assert_eq!(branches.len(), 2);
2340 let (condition1, then1) = &branches[0];
2342 if let Ast::Pipeline(cmds) = &**condition1 {
2343 assert_eq!(cmds[0].args, vec!["false"]);
2344 }
2345 if let Ast::Pipeline(cmds) = &**then1 {
2346 assert_eq!(cmds[0].args, vec!["printf", "no"]);
2347 }
2348 let (condition2, then2) = &branches[1];
2350 if let Ast::Pipeline(cmds) = &**condition2 {
2351 assert_eq!(cmds[0].args, vec!["true"]);
2352 }
2353 if let Ast::Pipeline(cmds) = &**then2 {
2354 assert_eq!(cmds[0].args, vec!["printf", "yes"]);
2355 }
2356 assert!(else_branch.is_none());
2357 } else {
2358 panic!("not if");
2359 }
2360 }
2361
2362 #[test]
2363 fn test_parse_assignment() {
2364 let tokens = vec![Token::Word("MY_VAR=test_value".to_string())];
2365 let result = parse(tokens).unwrap();
2366 if let Ast::Assignment { var, value } = result {
2367 assert_eq!(var, "MY_VAR");
2368 assert_eq!(value, "test_value");
2369 } else {
2370 panic!("not assignment");
2371 }
2372 }
2373
2374 #[test]
2375 fn test_parse_assignment_quoted() {
2376 let tokens = vec![Token::Word("MY_VAR=hello world".to_string())];
2377 let result = parse(tokens).unwrap();
2378 if let Ast::Assignment { var, value } = result {
2379 assert_eq!(var, "MY_VAR");
2380 assert_eq!(value, "hello world");
2381 } else {
2382 panic!("not assignment");
2383 }
2384 }
2385
2386 #[test]
2387 fn test_parse_assignment_invalid() {
2388 let tokens = vec![Token::Word("123VAR=value".to_string())];
2390 let result = parse(tokens).unwrap();
2391 if let Ast::Pipeline(cmds) = result {
2392 assert_eq!(cmds[0].args, vec!["123VAR=value"]);
2393 } else {
2394 panic!("should be parsed as pipeline");
2395 }
2396 }
2397
2398 #[test]
2399 fn test_parse_function_definition() {
2400 let tokens = vec![
2401 Token::Word("myfunc".to_string()),
2402 Token::LeftParen,
2403 Token::RightParen,
2404 Token::LeftBrace,
2405 Token::Word("echo".to_string()),
2406 Token::Word("hello".to_string()),
2407 Token::RightBrace,
2408 ];
2409 let result = parse(tokens).unwrap();
2410 if let Ast::FunctionDefinition { name, body } = result {
2411 assert_eq!(name, "myfunc");
2412 if let Ast::Pipeline(cmds) = *body {
2414 assert_eq!(cmds[0].args, vec!["echo", "hello"]);
2415 } else {
2416 panic!("function body should be a pipeline");
2417 }
2418 } else {
2419 panic!("should be parsed as function definition");
2420 }
2421 }
2422
2423 #[test]
2424 fn test_parse_function_definition_empty() {
2425 let tokens = vec![
2426 Token::Word("emptyfunc".to_string()),
2427 Token::LeftParen,
2428 Token::RightParen,
2429 Token::LeftBrace,
2430 Token::RightBrace,
2431 ];
2432 let result = parse(tokens).unwrap();
2433 if let Ast::FunctionDefinition { name, body } = result {
2434 assert_eq!(name, "emptyfunc");
2435 if let Ast::Pipeline(cmds) = *body {
2437 assert_eq!(cmds[0].args, vec!["true"]);
2438 } else {
2439 panic!("function body should be a pipeline");
2440 }
2441 } else {
2442 panic!("should be parsed as function definition");
2443 }
2444 }
2445
2446 #[test]
2447 fn test_parse_function_definition_legacy_format() {
2448 let tokens = vec![
2450 Token::Word("legacyfunc()".to_string()),
2451 Token::LeftBrace,
2452 Token::Word("echo".to_string()),
2453 Token::Word("hello".to_string()),
2454 Token::RightBrace,
2455 ];
2456 let result = parse(tokens).unwrap();
2457 if let Ast::FunctionDefinition { name, body } = result {
2458 assert_eq!(name, "legacyfunc");
2459 if let Ast::Pipeline(cmds) = *body {
2461 assert_eq!(cmds[0].args, vec!["echo", "hello"]);
2462 } else {
2463 panic!("function body should be a pipeline");
2464 }
2465 } else {
2466 panic!("should be parsed as function definition");
2467 }
2468 }
2469
2470 #[test]
2471 fn test_parse_local_assignment() {
2472 let tokens = vec![Token::Local, Token::Word("MY_VAR=test_value".to_string())];
2473 let result = parse(tokens).unwrap();
2474 if let Ast::LocalAssignment { var, value } = result {
2475 assert_eq!(var, "MY_VAR");
2476 assert_eq!(value, "test_value");
2477 } else {
2478 panic!("should be parsed as local assignment");
2479 }
2480 }
2481
2482 #[test]
2483 fn test_parse_local_assignment_separate_tokens() {
2484 let tokens = vec![
2485 Token::Local,
2486 Token::Word("MY_VAR".to_string()),
2487 Token::Word("test_value".to_string()),
2488 ];
2489 let result = parse(tokens).unwrap();
2490 if let Ast::LocalAssignment { var, value } = result {
2491 assert_eq!(var, "MY_VAR");
2492 assert_eq!(value, "test_value");
2493 } else {
2494 panic!("should be parsed as local assignment");
2495 }
2496 }
2497
2498 #[test]
2499 fn test_parse_local_assignment_invalid_var_name() {
2500 let tokens = vec![Token::Local, Token::Word("123VAR=value".to_string())];
2502 let result = parse(tokens);
2503 assert!(result.is_err());
2505 }
2506
2507 #[test]
2508 fn test_parse_here_document_redirection() {
2509 let tokens = vec![
2510 Token::Word("cat".to_string()),
2511 Token::RedirHereDoc("EOF".to_string(), false),
2512 ];
2513 let result = parse(tokens).unwrap();
2514 assert_eq!(
2515 result,
2516 Ast::Pipeline(vec![ShellCommand {
2517 args: vec!["cat".to_string()],
2518 redirections: vec![Redirection::HereDoc("EOF".to_string(), "false".to_string())],
2519 compound: None,
2520 }])
2521 );
2522 }
2523
2524 #[test]
2525 fn test_parse_here_string_redirection() {
2526 let tokens = vec![
2527 Token::Word("grep".to_string()),
2528 Token::RedirHereString("pattern".to_string()),
2529 ];
2530 let result = parse(tokens).unwrap();
2531 assert_eq!(
2532 result,
2533 Ast::Pipeline(vec![ShellCommand {
2534 args: vec!["grep".to_string()],
2535 compound: None,
2536 redirections: vec![Redirection::HereString("pattern".to_string())],
2537 }])
2538 );
2539 }
2540
2541 #[test]
2542 fn test_parse_mixed_redirections() {
2543 let tokens = vec![
2544 Token::Word("cat".to_string()),
2545 Token::RedirIn,
2546 Token::Word("file.txt".to_string()),
2547 Token::RedirHereString("fallback".to_string()),
2548 Token::RedirOut,
2549 Token::Word("output.txt".to_string()),
2550 ];
2551 let result = parse(tokens).unwrap();
2552 assert_eq!(
2553 result,
2554 Ast::Pipeline(vec![ShellCommand {
2555 args: vec!["cat".to_string()],
2556 compound: None,
2557 redirections: vec![
2558 Redirection::Input("file.txt".to_string()),
2559 Redirection::HereString("fallback".to_string()),
2560 Redirection::Output("output.txt".to_string()),
2561 ],
2562 }])
2563 );
2564 }
2565
2566 #[test]
2569 fn test_parse_fd_input_redirection() {
2570 let tokens = vec![
2571 Token::Word("command".to_string()),
2572 Token::RedirectFdIn(3, "input.txt".to_string()),
2573 ];
2574 let result = parse(tokens).unwrap();
2575 assert_eq!(
2576 result,
2577 Ast::Pipeline(vec![ShellCommand {
2578 args: vec!["command".to_string()],
2579 redirections: vec![Redirection::FdInput(3, "input.txt".to_string())],
2580 compound: None,
2581 }])
2582 );
2583 }
2584
2585 #[test]
2586 fn test_parse_fd_output_redirection() {
2587 let tokens = vec![
2588 Token::Word("command".to_string()),
2589 Token::RedirectFdOut(2, "errors.log".to_string()),
2590 ];
2591 let result = parse(tokens).unwrap();
2592 assert_eq!(
2593 result,
2594 Ast::Pipeline(vec![ShellCommand {
2595 args: vec!["command".to_string()],
2596 compound: None,
2597 redirections: vec![Redirection::FdOutput(2, "errors.log".to_string())],
2598 }])
2599 );
2600 }
2601
2602 #[test]
2603 fn test_parse_fd_append_redirection() {
2604 let tokens = vec![
2605 Token::Word("command".to_string()),
2606 Token::RedirectFdAppend(2, "errors.log".to_string()),
2607 ];
2608 let result = parse(tokens).unwrap();
2609 assert_eq!(
2610 result,
2611 Ast::Pipeline(vec![ShellCommand {
2612 args: vec!["command".to_string()],
2613 compound: None,
2614 redirections: vec![Redirection::FdAppend(2, "errors.log".to_string())],
2615 }])
2616 );
2617 }
2618
2619 #[test]
2620 fn test_parse_fd_duplicate() {
2621 let tokens = vec![
2622 Token::Word("command".to_string()),
2623 Token::RedirectFdDup(2, 1),
2624 ];
2625 let result = parse(tokens).unwrap();
2626 assert_eq!(
2627 result,
2628 Ast::Pipeline(vec![ShellCommand {
2629 args: vec!["command".to_string()],
2630 compound: None,
2631 redirections: vec![Redirection::FdDuplicate(2, 1)],
2632 }])
2633 );
2634 }
2635
2636 #[test]
2637 fn test_parse_fd_close() {
2638 let tokens = vec![
2639 Token::Word("command".to_string()),
2640 Token::RedirectFdClose(2),
2641 ];
2642 let result = parse(tokens).unwrap();
2643 assert_eq!(
2644 result,
2645 Ast::Pipeline(vec![ShellCommand {
2646 args: vec!["command".to_string()],
2647 compound: None,
2648 redirections: vec![Redirection::FdClose(2)],
2649 }])
2650 );
2651 }
2652
2653 #[test]
2654 fn test_parse_fd_input_output() {
2655 let tokens = vec![
2656 Token::Word("command".to_string()),
2657 Token::RedirectFdInOut(3, "file.txt".to_string()),
2658 ];
2659 let result = parse(tokens).unwrap();
2660 assert_eq!(
2661 result,
2662 Ast::Pipeline(vec![ShellCommand {
2663 args: vec!["command".to_string()],
2664 compound: None,
2665 redirections: vec![Redirection::FdInputOutput(3, "file.txt".to_string())],
2666 }])
2667 );
2668 }
2669
2670 #[test]
2671 fn test_parse_multiple_fd_redirections() {
2672 let tokens = vec![
2673 Token::Word("command".to_string()),
2674 Token::RedirectFdOut(2, "err.log".to_string()),
2675 Token::RedirectFdIn(3, "input.txt".to_string()),
2676 Token::RedirectFdAppend(4, "append.log".to_string()),
2677 ];
2678 let result = parse(tokens).unwrap();
2679 assert_eq!(
2680 result,
2681 Ast::Pipeline(vec![ShellCommand {
2682 args: vec!["command".to_string()],
2683 compound: None,
2684 redirections: vec![
2685 Redirection::FdOutput(2, "err.log".to_string()),
2686 Redirection::FdInput(3, "input.txt".to_string()),
2687 Redirection::FdAppend(4, "append.log".to_string()),
2688 ],
2689 }])
2690 );
2691 }
2692
2693 #[test]
2694 fn test_parse_fd_swap_pattern() {
2695 let tokens = vec![
2696 Token::Word("command".to_string()),
2697 Token::RedirectFdDup(3, 1),
2698 Token::RedirectFdDup(1, 2),
2699 Token::RedirectFdDup(2, 3),
2700 Token::RedirectFdClose(3),
2701 ];
2702 let result = parse(tokens).unwrap();
2703 assert_eq!(
2704 result,
2705 Ast::Pipeline(vec![ShellCommand {
2706 args: vec!["command".to_string()],
2707 redirections: vec![
2708 Redirection::FdDuplicate(3, 1),
2709 Redirection::FdDuplicate(1, 2),
2710 Redirection::FdDuplicate(2, 3),
2711 Redirection::FdClose(3),
2712 ],
2713 compound: None,
2714 }])
2715 );
2716 }
2717
2718 #[test]
2719 fn test_parse_mixed_basic_and_fd_redirections() {
2720 let tokens = vec![
2721 Token::Word("command".to_string()),
2722 Token::RedirOut,
2723 Token::Word("output.txt".to_string()),
2724 Token::RedirectFdDup(2, 1),
2725 ];
2726 let result = parse(tokens).unwrap();
2727 assert_eq!(
2728 result,
2729 Ast::Pipeline(vec![ShellCommand {
2730 args: vec!["command".to_string()],
2731 redirections: vec![
2732 Redirection::Output("output.txt".to_string()),
2733 Redirection::FdDuplicate(2, 1),
2734 ],
2735 compound: None,
2736 }])
2737 );
2738 }
2739
2740 #[test]
2741 fn test_parse_fd_redirection_ordering() {
2742 let tokens = vec![
2744 Token::Word("command".to_string()),
2745 Token::RedirectFdOut(2, "first.log".to_string()),
2746 Token::RedirOut,
2747 Token::Word("second.txt".to_string()),
2748 Token::RedirectFdDup(2, 1),
2749 ];
2750 let result = parse(tokens).unwrap();
2751 assert_eq!(
2752 result,
2753 Ast::Pipeline(vec![ShellCommand {
2754 args: vec!["command".to_string()],
2755 redirections: vec![
2756 Redirection::FdOutput(2, "first.log".to_string()),
2757 Redirection::Output("second.txt".to_string()),
2758 Redirection::FdDuplicate(2, 1),
2759 ],
2760 compound: None,
2761 }])
2762 );
2763 }
2764
2765 #[test]
2766 fn test_parse_fd_redirection_with_pipe() {
2767 let tokens = vec![
2768 Token::Word("command".to_string()),
2769 Token::RedirectFdDup(2, 1),
2770 Token::Pipe,
2771 Token::Word("grep".to_string()),
2772 Token::Word("error".to_string()),
2773 ];
2774 let result = parse(tokens).unwrap();
2775 assert_eq!(
2776 result,
2777 Ast::Pipeline(vec![
2778 ShellCommand {
2779 args: vec!["command".to_string()],
2780 redirections: vec![Redirection::FdDuplicate(2, 1)],
2781 compound: None,
2782 },
2783 ShellCommand {
2784 args: vec!["grep".to_string(), "error".to_string()],
2785 compound: None,
2786 redirections: Vec::new(),
2787 }
2788 ])
2789 );
2790 }
2791
2792 #[test]
2793 fn test_parse_all_fd_numbers() {
2794 let tokens = vec![
2796 Token::Word("cmd".to_string()),
2797 Token::RedirectFdIn(0, "file".to_string()),
2798 ];
2799 let result = parse(tokens).unwrap();
2800 if let Ast::Pipeline(cmds) = result {
2801 assert_eq!(
2802 cmds[0].redirections[0],
2803 Redirection::FdInput(0, "file".to_string())
2804 );
2805 } else {
2806 panic!("Expected Pipeline");
2807 }
2808
2809 let tokens = vec![
2811 Token::Word("cmd".to_string()),
2812 Token::RedirectFdOut(9, "file".to_string()),
2813 ];
2814 let result = parse(tokens).unwrap();
2815 if let Ast::Pipeline(cmds) = result {
2816 assert_eq!(
2817 cmds[0].redirections[0],
2818 Redirection::FdOutput(9, "file".to_string())
2819 );
2820 } else {
2821 panic!("Expected Pipeline");
2822 }
2823 }
2824}