1use super::lexer::Token;
2
3#[derive(Debug, Clone, PartialEq, Eq)]
4pub enum Ast {
5 Pipeline(Vec<ShellCommand>),
6 Sequence(Vec<Ast>),
7 Assignment {
8 var: String,
9 value: String,
10 },
11 LocalAssignment {
12 var: String,
13 value: String,
14 },
15 If {
16 branches: Vec<(Box<Ast>, Box<Ast>)>, else_branch: Option<Box<Ast>>,
18 },
19 Case {
20 word: String,
21 cases: Vec<(Vec<String>, Ast)>,
22 default: Option<Box<Ast>>,
23 },
24 For {
25 variable: String,
26 items: Vec<String>,
27 body: Box<Ast>,
28 },
29 While {
30 condition: Box<Ast>,
31 body: Box<Ast>,
32 },
33 FunctionDefinition {
34 name: String,
35 body: Box<Ast>,
36 },
37 FunctionCall {
38 name: String,
39 args: Vec<String>,
40 },
41 Return {
42 value: Option<String>,
43 },
44 And {
45 left: Box<Ast>,
46 right: Box<Ast>,
47 },
48 Or {
49 left: Box<Ast>,
50 right: Box<Ast>,
51 },
52 Subshell {
55 body: Box<Ast>,
56 },
57 CommandGroup {
60 body: Box<Ast>,
61 },
62}
63
64#[derive(Debug, Clone, PartialEq, Eq)]
66pub enum Redirection {
67 Input(String),
69 Output(String),
71 Append(String),
73 FdInput(i32, String),
75 FdOutput(i32, String),
77 FdAppend(i32, String),
79 FdDuplicate(i32, i32),
81 FdClose(i32),
83 FdInputOutput(i32, String),
85 HereDoc(String, String),
87 HereString(String),
89}
90
91#[derive(Debug, Clone, PartialEq, Eq, Default)]
92pub struct ShellCommand {
93 pub args: Vec<String>,
94 pub redirections: Vec<Redirection>,
96 pub compound: Option<Box<Ast>>,
99}
100
101fn is_valid_variable_name(name: &str) -> bool {
104 if let Some(first_char) = name.chars().next() {
105 first_char.is_alphabetic() || first_char == '_'
106 } else {
107 false
108 }
109}
110
111fn create_empty_body_ast() -> Ast {
114 Ast::Pipeline(vec![ShellCommand {
115 args: vec!["true".to_string()],
116 redirections: Vec::new(),
117 compound: None,
118 }])
119}
120
121fn skip_newlines(tokens: &[Token], i: &mut usize) {
124 while *i < tokens.len() && tokens[*i] == Token::Newline {
125 *i += 1;
126 }
127}
128
129fn skip_to_matching_fi(tokens: &[Token], i: &mut usize) {
132 let mut if_depth = 1;
133 *i += 1; while *i < tokens.len() && if_depth > 0 {
135 match tokens[*i] {
136 Token::If => if_depth += 1,
137 Token::Fi => if_depth -= 1,
138 _ => {}
139 }
140 *i += 1;
141 }
142}
143
144fn skip_to_matching_done(tokens: &[Token], i: &mut usize) {
147 let mut loop_depth = 1;
148 *i += 1; while *i < tokens.len() && loop_depth > 0 {
150 match tokens[*i] {
151 Token::For | Token::While => loop_depth += 1,
152 Token::Done => loop_depth -= 1,
153 _ => {}
154 }
155 *i += 1;
156 }
157}
158
159fn skip_to_matching_esac(tokens: &[Token], i: &mut usize) {
161 *i += 1; while *i < tokens.len() {
163 if tokens[*i] == Token::Esac {
164 *i += 1;
165 break;
166 }
167 *i += 1;
168 }
169}
170
171pub fn parse(tokens: Vec<Token>) -> Result<Ast, String> {
172 if tokens.len() >= 4
174 && let (Token::Word(_), Token::LeftParen, Token::RightParen, Token::LeftBrace) =
175 (&tokens[0], &tokens[1], &tokens[2], &tokens[3])
176 {
177 let mut brace_depth = 1; let mut function_end = tokens.len();
181 let mut j = 4; while j < tokens.len() {
184 match &tokens[j] {
185 Token::LeftBrace => {
186 brace_depth += 1;
187 j += 1;
188 }
189 Token::RightBrace => {
190 brace_depth -= 1;
191 if brace_depth == 0 {
192 function_end = j + 1; break;
194 }
195 j += 1;
196 }
197 Token::If => {
198 let mut if_depth = 1;
200 j += 1;
201 while j < tokens.len() && if_depth > 0 {
202 match tokens[j] {
203 Token::If => if_depth += 1,
204 Token::Fi => if_depth -= 1,
205 _ => {}
206 }
207 j += 1;
208 }
209 }
210 Token::For | Token::While => {
211 let mut for_depth = 1;
213 j += 1;
214 while j < tokens.len() && for_depth > 0 {
215 match tokens[j] {
216 Token::For | Token::While => for_depth += 1,
217 Token::Done => for_depth -= 1,
218 _ => {}
219 }
220 j += 1;
221 }
222 }
223 Token::Case => {
224 j += 1;
226 while j < tokens.len() {
227 if tokens[j] == Token::Esac {
228 j += 1;
229 break;
230 }
231 j += 1;
232 }
233 }
234 _ => {
235 j += 1;
236 }
237 }
238 }
239
240 if brace_depth == 0 && function_end <= tokens.len() {
241 let function_tokens = &tokens[0..function_end];
243 let remaining_tokens = &tokens[function_end..];
244
245 let function_ast = parse_function_definition(function_tokens)?;
246
247 return if remaining_tokens.is_empty() {
248 Ok(function_ast)
249 } else {
250 let remaining_ast = parse_commands_sequentially(remaining_tokens)?;
252 Ok(Ast::Sequence(vec![function_ast, remaining_ast]))
253 };
254 }
255 }
256
257 if tokens.len() >= 2
259 && let Token::Word(ref word) = tokens[0]
260 && let Some(paren_pos) = word.find('(')
261 && word.ends_with(')')
262 && paren_pos > 0
263 && tokens[1] == Token::LeftBrace
264 {
265 return parse_function_definition(&tokens);
266 }
267
268 parse_commands_sequentially(&tokens)
270}
271
272fn parse_slice(tokens: &[Token]) -> Result<Ast, String> {
273 if tokens.is_empty() {
274 return Err("No commands found".to_string());
275 }
276
277 if tokens.len() == 2 {
279 if let (Token::Word(var_eq), Token::Word(value)) = (&tokens[0], &tokens[1])
281 && let Some(eq_pos) = var_eq.find('=')
282 && eq_pos > 0
283 && eq_pos < var_eq.len()
284 {
285 let var = var_eq[..eq_pos].to_string();
286 let full_value = format!("{}{}", &var_eq[eq_pos + 1..], value);
287 if is_valid_variable_name(&var) {
289 return Ok(Ast::Assignment {
290 var,
291 value: full_value,
292 });
293 }
294 }
295 }
296
297 if tokens.len() == 2
299 && let (Token::Word(var_eq), Token::Word(value)) = (&tokens[0], &tokens[1])
300 && let Some(eq_pos) = var_eq.find('=')
301 && eq_pos > 0
302 && eq_pos == var_eq.len() - 1
303 {
304 let var = var_eq[..eq_pos].to_string();
305 if is_valid_variable_name(&var) {
307 return Ok(Ast::Assignment {
308 var,
309 value: value.clone(),
310 });
311 }
312 }
313
314 if tokens.len() == 3
316 && let (Token::Local, Token::Word(var), Token::Word(value)) =
317 (&tokens[0], &tokens[1], &tokens[2])
318 {
319 let clean_var = if var.ends_with('=') {
321 &var[..var.len() - 1]
322 } else {
323 var
324 };
325 if is_valid_variable_name(clean_var) {
327 return Ok(Ast::LocalAssignment {
328 var: clean_var.to_string(),
329 value: value.clone(),
330 });
331 } else {
332 return Err(format!("Invalid variable name: {}", clean_var));
333 }
334 }
335
336 if !tokens.is_empty()
338 && tokens.len() <= 2
339 && let Token::Return = &tokens[0]
340 {
341 if tokens.len() == 1 {
342 return Ok(Ast::Return { value: None });
344 } else if let Token::Word(word) = &tokens[1] {
345 return Ok(Ast::Return {
347 value: Some(word.clone()),
348 });
349 }
350 }
351
352 if tokens.len() == 2
354 && let (Token::Local, Token::Word(var_eq)) = (&tokens[0], &tokens[1])
355 && let Some(eq_pos) = var_eq.find('=')
356 && eq_pos > 0
357 && eq_pos < var_eq.len()
358 {
359 let var = var_eq[..eq_pos].to_string();
360 let value = var_eq[eq_pos + 1..].to_string();
361 if is_valid_variable_name(&var) {
363 return Ok(Ast::LocalAssignment { var, value });
364 } else {
365 return Err(format!("Invalid variable name: {}", var));
366 }
367 }
368
369 if tokens.len() == 2
371 && let (Token::Local, Token::Word(var)) = (&tokens[0], &tokens[1])
372 && !var.contains('=')
373 {
374 if is_valid_variable_name(var) {
376 return Ok(Ast::LocalAssignment {
377 var: var.clone(),
378 value: String::new(),
379 });
380 } else {
381 return Err(format!("Invalid variable name: {}", var));
382 }
383 }
384
385 if tokens.len() == 1
387 && let Token::Word(ref word) = tokens[0]
388 && let Some(eq_pos) = word.find('=')
389 && eq_pos > 0
390 && eq_pos < word.len()
391 {
392 let var = word[..eq_pos].to_string();
393 let value = word[eq_pos + 1..].to_string();
394 if is_valid_variable_name(&var) {
396 return Ok(Ast::Assignment { var, value });
397 }
398 }
399
400 if let Token::If = tokens[0] {
402 return parse_if(tokens);
403 }
404
405 if let Token::Case = tokens[0] {
407 return parse_case(tokens);
408 }
409
410 if let Token::For = tokens[0] {
412 return parse_for(tokens);
413 }
414
415 if let Token::While = tokens[0] {
417 return parse_while(tokens);
418 }
419
420 if tokens.len() >= 4
423 && let (Token::Word(word), Token::LeftParen, Token::RightParen, Token::LeftBrace) =
424 (&tokens[0], &tokens[1], &tokens[2], &tokens[3])
425 && is_valid_variable_name(word)
426 {
427 return parse_function_definition(tokens);
428 }
429
430 if tokens.len() >= 2
432 && let Token::Word(ref word) = tokens[0]
433 && let Some(paren_pos) = word.find('(')
434 && word.ends_with(')')
435 && paren_pos > 0
436 {
437 let func_name = &word[..paren_pos];
438 if is_valid_variable_name(func_name) && tokens[1] == Token::LeftBrace {
439 return parse_function_definition(tokens);
440 }
441 }
442
443 parse_pipeline(tokens)
448}
449
450fn parse_commands_sequentially(tokens: &[Token]) -> Result<Ast, String> {
451 let mut i = 0;
452 let mut commands = Vec::new();
453
454 while i < tokens.len() {
455 while i < tokens.len() {
457 match &tokens[i] {
458 Token::Newline => {
459 i += 1;
460 }
461 Token::Word(word) if word.starts_with('#') => {
462 while i < tokens.len() && tokens[i] != Token::Newline {
464 i += 1;
465 }
466 if i < tokens.len() {
467 i += 1; }
469 }
470 _ => break,
471 }
472 }
473
474 if i >= tokens.len() {
475 break;
476 }
477
478 let start = i;
480
481 if tokens[i] == Token::LeftParen {
484 let mut paren_depth = 1;
486 let mut j = i + 1;
487
488 while j < tokens.len() && paren_depth > 0 {
489 match tokens[j] {
490 Token::LeftParen => paren_depth += 1,
491 Token::RightParen => paren_depth -= 1,
492 _ => {}
493 }
494 j += 1;
495 }
496
497 if paren_depth != 0 {
498 return Err("Unmatched parenthesis in subshell".to_string());
499 }
500
501 let subshell_tokens = &tokens[i + 1..j - 1];
503
504 let body_ast = if subshell_tokens.is_empty() {
507 return Err("Empty subshell".to_string());
508 } else {
509 parse_commands_sequentially(subshell_tokens)?
510 };
511
512 let mut subshell_ast = Ast::Subshell {
513 body: Box::new(body_ast),
514 };
515
516 i = j; let mut redirections = Vec::new();
520 while i < tokens.len() {
521 match &tokens[i] {
522 Token::RedirOut => {
523 i += 1;
524 if i < tokens.len() {
525 if let Token::Word(file) = &tokens[i] {
526 redirections.push(Redirection::Output(file.clone()));
527 i += 1;
528 }
529 }
530 }
531 Token::RedirIn => {
532 i += 1;
533 if i < tokens.len() {
534 if let Token::Word(file) = &tokens[i] {
535 redirections.push(Redirection::Input(file.clone()));
536 i += 1;
537 }
538 }
539 }
540 Token::RedirAppend => {
541 i += 1;
542 if i < tokens.len() {
543 if let Token::Word(file) = &tokens[i] {
544 redirections.push(Redirection::Append(file.clone()));
545 i += 1;
546 }
547 }
548 }
549 Token::RedirectFdOut(fd, file) => {
550 redirections.push(Redirection::FdOutput(*fd, file.clone()));
551 i += 1;
552 }
553 Token::RedirectFdIn(fd, file) => {
554 redirections.push(Redirection::FdInput(*fd, file.clone()));
555 i += 1;
556 }
557 Token::RedirectFdAppend(fd, file) => {
558 redirections.push(Redirection::FdAppend(*fd, file.clone()));
559 i += 1;
560 }
561 Token::RedirectFdDup(from_fd, to_fd) => {
562 redirections.push(Redirection::FdDuplicate(*from_fd, *to_fd));
563 i += 1;
564 }
565 Token::RedirectFdClose(fd) => {
566 redirections.push(Redirection::FdClose(*fd));
567 i += 1;
568 }
569 Token::RedirectFdInOut(fd, file) => {
570 redirections.push(Redirection::FdInputOutput(*fd, file.clone()));
571 i += 1;
572 }
573 Token::RedirHereDoc(delimiter, quoted) => {
574 redirections
575 .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
576 i += 1;
577 }
578 Token::RedirHereString(content) => {
579 redirections.push(Redirection::HereString(content.clone()));
580 i += 1;
581 }
582 _ => break,
583 }
584 }
585
586 if i < tokens.len() && tokens[i] == Token::Pipe {
588 let mut end = i;
590 let mut brace_depth = 0;
591 let mut paren_depth = 0;
592 let mut last_was_pipe = true; while end < tokens.len() {
594 match &tokens[end] {
595 Token::Pipe => last_was_pipe = true,
596 Token::LeftBrace => {
597 brace_depth += 1;
598 last_was_pipe = false;
599 }
600 Token::RightBrace => {
601 if brace_depth > 0 {
602 brace_depth -= 1;
603 } else {
604 break;
605 }
606 last_was_pipe = false;
607 }
608 Token::LeftParen => {
609 paren_depth += 1;
610 last_was_pipe = false;
611 }
612 Token::RightParen => {
613 if paren_depth > 0 {
614 paren_depth -= 1;
615 } else {
616 break;
617 }
618 last_was_pipe = false;
619 }
620 Token::Newline | Token::Semicolon => {
621 if brace_depth == 0 && paren_depth == 0 && !last_was_pipe {
622 break;
623 }
624 }
625 Token::Word(_) => last_was_pipe = false,
626 _ => {}
627 }
628 end += 1;
629 }
630
631 let pipeline_ast = parse_pipeline(&tokens[start..end])?;
632 commands.push(pipeline_ast);
633 i = end;
634 continue;
635 }
636
637 if !redirections.is_empty() {
639 subshell_ast = Ast::Pipeline(vec![ShellCommand {
640 args: Vec::new(),
641 redirections,
642 compound: Some(Box::new(subshell_ast)),
643 }]);
644 }
645
646 if i < tokens.len() && (tokens[i] == Token::And || tokens[i] == Token::Or) {
648 let operator = tokens[i].clone();
649 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
653 i += 1;
654 }
655
656 let remaining_tokens = &tokens[i..];
658 let right_ast = parse_commands_sequentially(remaining_tokens)?;
659
660 let combined_ast = match operator {
662 Token::And => Ast::And {
663 left: Box::new(subshell_ast),
664 right: Box::new(right_ast),
665 },
666 Token::Or => Ast::Or {
667 left: Box::new(subshell_ast),
668 right: Box::new(right_ast),
669 },
670 _ => unreachable!(),
671 };
672
673 commands.push(combined_ast);
674 break; }
676
677 commands.push(subshell_ast);
678
679 if i < tokens.len() && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon) {
681 i += 1;
682 }
683 continue;
684 }
685
686 if tokens[i] == Token::LeftBrace {
688 let mut brace_depth = 1;
690 let mut j = i + 1;
691
692 while j < tokens.len() && brace_depth > 0 {
693 match tokens[j] {
694 Token::LeftBrace => brace_depth += 1,
695 Token::RightBrace => brace_depth -= 1,
696 _ => {}
697 }
698 j += 1;
699 }
700
701 if brace_depth != 0 {
702 return Err("Unmatched brace in command group".to_string());
703 }
704
705 let group_tokens = &tokens[i + 1..j - 1];
707
708 let body_ast = if group_tokens.is_empty() {
711 return Err("Empty command group".to_string());
712 } else {
713 parse_commands_sequentially(group_tokens)?
714 };
715
716 let mut group_ast = Ast::CommandGroup {
717 body: Box::new(body_ast),
718 };
719
720 i = j; let mut redirections = Vec::new();
724 while i < tokens.len() {
725 match &tokens[i] {
726 Token::RedirOut => {
727 i += 1;
728 if i < tokens.len() {
729 if let Token::Word(file) = &tokens[i] {
730 redirections.push(Redirection::Output(file.clone()));
731 i += 1;
732 }
733 }
734 }
735 Token::RedirIn => {
736 i += 1;
737 if i < tokens.len() {
738 if let Token::Word(file) = &tokens[i] {
739 redirections.push(Redirection::Input(file.clone()));
740 i += 1;
741 }
742 }
743 }
744 Token::RedirAppend => {
745 i += 1;
746 if i < tokens.len() {
747 if let Token::Word(file) = &tokens[i] {
748 redirections.push(Redirection::Append(file.clone()));
749 i += 1;
750 }
751 }
752 }
753 Token::RedirectFdOut(fd, file) => {
754 redirections.push(Redirection::FdOutput(*fd, file.clone()));
755 i += 1;
756 }
757 Token::RedirectFdIn(fd, file) => {
758 redirections.push(Redirection::FdInput(*fd, file.clone()));
759 i += 1;
760 }
761 Token::RedirectFdAppend(fd, file) => {
762 redirections.push(Redirection::FdAppend(*fd, file.clone()));
763 i += 1;
764 }
765 Token::RedirectFdDup(from_fd, to_fd) => {
766 redirections.push(Redirection::FdDuplicate(*from_fd, *to_fd));
767 i += 1;
768 }
769 Token::RedirectFdClose(fd) => {
770 redirections.push(Redirection::FdClose(*fd));
771 i += 1;
772 }
773 Token::RedirectFdInOut(fd, file) => {
774 redirections.push(Redirection::FdInputOutput(*fd, file.clone()));
775 i += 1;
776 }
777 Token::RedirHereDoc(delimiter, quoted) => {
778 redirections
779 .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
780 i += 1;
781 }
782 Token::RedirHereString(content) => {
783 redirections.push(Redirection::HereString(content.clone()));
784 i += 1;
785 }
786 _ => break,
787 }
788 }
789
790 if i < tokens.len() && tokens[i] == Token::Pipe {
792 let mut end = i;
794 let mut brace_depth = 0;
795 let mut paren_depth = 0;
796 let mut last_was_pipe = true; while end < tokens.len() {
798 match &tokens[end] {
799 Token::Pipe => last_was_pipe = true,
800 Token::LeftBrace => {
801 brace_depth += 1;
802 last_was_pipe = false;
803 }
804 Token::RightBrace => {
805 if brace_depth > 0 {
806 brace_depth -= 1;
807 } else {
808 break;
809 }
810 last_was_pipe = false;
811 }
812 Token::LeftParen => {
813 paren_depth += 1;
814 last_was_pipe = false;
815 }
816 Token::RightParen => {
817 if paren_depth > 0 {
818 paren_depth -= 1;
819 } else {
820 break;
821 }
822 last_was_pipe = false;
823 }
824 Token::Newline | Token::Semicolon => {
825 if brace_depth == 0 && paren_depth == 0 && !last_was_pipe {
826 break;
827 }
828 }
829 Token::Word(_) => last_was_pipe = false,
830 _ => {}
831 }
832 end += 1;
833 }
834
835 let pipeline_ast = parse_pipeline(&tokens[start..end])?;
836 commands.push(pipeline_ast);
837 i = end;
838 continue;
839 }
840
841 if !redirections.is_empty() {
843 group_ast = Ast::Pipeline(vec![ShellCommand {
844 args: Vec::new(),
845 redirections,
846 compound: Some(Box::new(group_ast)),
847 }]);
848 }
849
850 if i < tokens.len() && (tokens[i] == Token::And || tokens[i] == Token::Or) {
852 let operator = tokens[i].clone();
853 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
857 i += 1;
858 }
859
860 let remaining_tokens = &tokens[i..];
862 let right_ast = parse_commands_sequentially(remaining_tokens)?;
863
864 let combined_ast = match operator {
866 Token::And => Ast::And {
867 left: Box::new(group_ast),
868 right: Box::new(right_ast),
869 },
870 Token::Or => Ast::Or {
871 left: Box::new(group_ast),
872 right: Box::new(right_ast),
873 },
874 _ => unreachable!(),
875 };
876
877 commands.push(combined_ast);
878 break; }
880
881 commands.push(group_ast);
882
883 if i < tokens.len() && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon) {
885 i += 1;
886 }
887 continue;
888 }
889
890 if tokens[i] == Token::If {
892 let mut depth = 0;
894 while i < tokens.len() {
895 match tokens[i] {
896 Token::If => depth += 1,
897 Token::Fi => {
898 depth -= 1;
899 if depth == 0 {
900 i += 1; break;
902 }
903 }
904 _ => {}
905 }
906 i += 1;
907 }
908
909 } else if tokens[i] == Token::For {
912 let mut depth = 1; i += 1; while i < tokens.len() {
916 match tokens[i] {
917 Token::For | Token::While => depth += 1,
918 Token::Done => {
919 depth -= 1;
920 if depth == 0 {
921 i += 1; break;
923 }
924 }
925 _ => {}
926 }
927 i += 1;
928 }
929 } else if tokens[i] == Token::While {
930 let mut depth = 1; i += 1; while i < tokens.len() {
934 match tokens[i] {
935 Token::While | Token::For => depth += 1,
936 Token::Done => {
937 depth -= 1;
938 if depth == 0 {
939 i += 1; break;
941 }
942 }
943 _ => {}
944 }
945 i += 1;
946 }
947 } else if tokens[i] == Token::Case {
948 while i < tokens.len() {
950 if tokens[i] == Token::Esac {
951 i += 1; break;
953 }
954 i += 1;
955 }
956 } else if i + 3 < tokens.len()
957 && matches!(tokens[i], Token::Word(_))
958 && tokens[i + 1] == Token::LeftParen
959 && tokens[i + 2] == Token::RightParen
960 && tokens[i + 3] == Token::LeftBrace
961 {
962 let mut brace_depth = 1;
964 i += 4; while i < tokens.len() && brace_depth > 0 {
966 match tokens[i] {
967 Token::LeftBrace => brace_depth += 1,
968 Token::RightBrace => brace_depth -= 1,
969 _ => {}
970 }
971 i += 1;
972 }
973 } else {
974 let mut brace_depth = 0;
977 let mut paren_depth = 0;
978 let mut last_was_pipe = false;
979 while i < tokens.len() {
980 match &tokens[i] {
981 Token::LeftBrace => {
982 brace_depth += 1;
983 last_was_pipe = false;
984 }
985 Token::RightBrace => {
986 if brace_depth > 0 {
987 brace_depth -= 1;
988 } else {
989 break;
990 }
991 last_was_pipe = false;
992 }
993 Token::LeftParen => {
994 paren_depth += 1;
995 last_was_pipe = false;
996 }
997 Token::RightParen => {
998 if paren_depth > 0 {
999 paren_depth -= 1;
1000 } else {
1001 break;
1002 }
1003 last_was_pipe = false;
1004 }
1005 Token::Pipe => last_was_pipe = true,
1006 Token::Newline | Token::Semicolon | Token::And | Token::Or => {
1007 if brace_depth == 0 && paren_depth == 0 && !last_was_pipe {
1008 break;
1009 }
1010 }
1011 Token::Word(_) => last_was_pipe = false,
1012 _ => {}
1013 }
1014 i += 1;
1015 }
1016 }
1017
1018 let command_tokens = &tokens[start..i];
1019 if !command_tokens.is_empty() {
1020 if command_tokens.len() == 1 {
1022 match command_tokens[0] {
1023 Token::Else | Token::Elif | Token::Fi => {
1024 if i < tokens.len()
1026 && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon)
1027 {
1028 i += 1;
1029 }
1030 continue;
1031 }
1032 _ => {}
1033 }
1034 }
1035
1036 let ast = parse_slice(command_tokens)?;
1037
1038 if i < tokens.len() && (tokens[i] == Token::And || tokens[i] == Token::Or) {
1040 let operator = tokens[i].clone();
1041 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
1045 i += 1;
1046 }
1047
1048 let remaining_tokens = &tokens[i..];
1050 let right_ast = parse_commands_sequentially(remaining_tokens)?;
1051
1052 let combined_ast = match operator {
1054 Token::And => Ast::And {
1055 left: Box::new(ast),
1056 right: Box::new(right_ast),
1057 },
1058 Token::Or => Ast::Or {
1059 left: Box::new(ast),
1060 right: Box::new(right_ast),
1061 },
1062 _ => unreachable!(),
1063 };
1064
1065 commands.push(combined_ast);
1066 break; } else {
1068 commands.push(ast);
1069 }
1070 }
1071
1072 if i < tokens.len() && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon) {
1073 i += 1;
1074 }
1075 }
1076
1077 if commands.is_empty() {
1078 return Err("No commands found".to_string());
1079 }
1080
1081 if commands.len() == 1 {
1082 Ok(commands.into_iter().next().unwrap())
1083 } else {
1084 Ok(Ast::Sequence(commands))
1085 }
1086}
1087
1088fn parse_pipeline(tokens: &[Token]) -> Result<Ast, String> {
1089 let mut commands = Vec::new();
1090 let mut current_cmd = ShellCommand::default();
1091
1092 let mut i = 0;
1093 while i < tokens.len() {
1094 let token = &tokens[i];
1095 match token {
1096 Token::LeftBrace => {
1097 let mut brace_depth = 1;
1100 let mut j = i + 1;
1101
1102 while j < tokens.len() && brace_depth > 0 {
1103 match tokens[j] {
1104 Token::LeftBrace => brace_depth += 1,
1105 Token::RightBrace => brace_depth -= 1,
1106 _ => {}
1107 }
1108 j += 1;
1109 }
1110
1111 if brace_depth != 0 {
1112 return Err("Unmatched brace in pipeline".to_string());
1113 }
1114
1115 let group_tokens = &tokens[i + 1..j - 1];
1117
1118 let body_ast = if group_tokens.is_empty() {
1120 create_empty_body_ast()
1121 } else {
1122 parse_commands_sequentially(group_tokens)?
1123 };
1124
1125 current_cmd.compound = Some(Box::new(Ast::CommandGroup {
1127 body: Box::new(body_ast),
1128 }));
1129
1130 i = j; while i < tokens.len() {
1134 match &tokens[i] {
1135 Token::RedirOut => {
1136 i += 1;
1137 if i < tokens.len() {
1138 if let Token::Word(file) = &tokens[i] {
1139 current_cmd
1140 .redirections
1141 .push(Redirection::Output(file.clone()));
1142 i += 1;
1143 }
1144 }
1145 }
1146 Token::RedirIn => {
1147 i += 1;
1148 if i < tokens.len() {
1149 if let Token::Word(file) = &tokens[i] {
1150 current_cmd
1151 .redirections
1152 .push(Redirection::Input(file.clone()));
1153 i += 1;
1154 }
1155 }
1156 }
1157 Token::RedirAppend => {
1158 i += 1;
1159 if i < tokens.len() {
1160 if let Token::Word(file) = &tokens[i] {
1161 current_cmd
1162 .redirections
1163 .push(Redirection::Append(file.clone()));
1164 i += 1;
1165 }
1166 }
1167 }
1168 Token::RedirectFdOut(fd, file) => {
1169 current_cmd
1170 .redirections
1171 .push(Redirection::FdOutput(*fd, file.clone()));
1172 i += 1;
1173 }
1174 Token::RedirectFdIn(fd, file) => {
1175 current_cmd
1176 .redirections
1177 .push(Redirection::FdInput(*fd, file.clone()));
1178 i += 1;
1179 }
1180 Token::RedirectFdAppend(fd, file) => {
1181 current_cmd
1182 .redirections
1183 .push(Redirection::FdAppend(*fd, file.clone()));
1184 i += 1;
1185 }
1186 Token::RedirectFdDup(from_fd, to_fd) => {
1187 current_cmd
1188 .redirections
1189 .push(Redirection::FdDuplicate(*from_fd, *to_fd));
1190 i += 1;
1191 }
1192 Token::RedirectFdClose(fd) => {
1193 current_cmd.redirections.push(Redirection::FdClose(*fd));
1194 i += 1;
1195 }
1196 Token::RedirectFdInOut(fd, file) => {
1197 current_cmd
1198 .redirections
1199 .push(Redirection::FdInputOutput(*fd, file.clone()));
1200 i += 1;
1201 }
1202 Token::RedirHereDoc(delimiter, quoted) => {
1203 current_cmd
1204 .redirections
1205 .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
1206 i += 1;
1207 }
1208 Token::RedirHereString(content) => {
1209 current_cmd
1210 .redirections
1211 .push(Redirection::HereString(content.clone()));
1212 i += 1;
1213 }
1214 Token::Pipe => {
1215 break;
1217 }
1218 _ => break,
1219 }
1220 }
1221
1222 continue;
1224 }
1225 Token::LeftParen => {
1226 let mut paren_depth = 1;
1229 let mut j = i + 1;
1230
1231 while j < tokens.len() && paren_depth > 0 {
1232 match tokens[j] {
1233 Token::LeftParen => paren_depth += 1,
1234 Token::RightParen => paren_depth -= 1,
1235 _ => {}
1236 }
1237 j += 1;
1238 }
1239
1240 if paren_depth != 0 {
1241 return Err("Unmatched parenthesis in pipeline".to_string());
1242 }
1243
1244 let subshell_tokens = &tokens[i + 1..j - 1];
1246
1247 let body_ast = if subshell_tokens.is_empty() {
1249 create_empty_body_ast()
1250 } else {
1251 parse_commands_sequentially(subshell_tokens)?
1252 };
1253
1254 current_cmd.compound = Some(Box::new(Ast::Subshell {
1257 body: Box::new(body_ast),
1258 }));
1259
1260 i = j; while i < tokens.len() {
1264 match &tokens[i] {
1265 Token::RedirOut => {
1266 i += 1;
1267 if i < tokens.len() {
1268 if let Token::Word(file) = &tokens[i] {
1269 current_cmd
1270 .redirections
1271 .push(Redirection::Output(file.clone()));
1272 i += 1;
1273 }
1274 }
1275 }
1276 Token::RedirIn => {
1277 i += 1;
1278 if i < tokens.len() {
1279 if let Token::Word(file) = &tokens[i] {
1280 current_cmd
1281 .redirections
1282 .push(Redirection::Input(file.clone()));
1283 i += 1;
1284 }
1285 }
1286 }
1287 Token::RedirAppend => {
1288 i += 1;
1289 if i < tokens.len() {
1290 if let Token::Word(file) = &tokens[i] {
1291 current_cmd
1292 .redirections
1293 .push(Redirection::Append(file.clone()));
1294 i += 1;
1295 }
1296 }
1297 }
1298 Token::RedirectFdOut(fd, file) => {
1299 current_cmd
1300 .redirections
1301 .push(Redirection::FdOutput(*fd, file.clone()));
1302 i += 1;
1303 }
1304 Token::RedirectFdIn(fd, file) => {
1305 current_cmd
1306 .redirections
1307 .push(Redirection::FdInput(*fd, file.clone()));
1308 i += 1;
1309 }
1310 Token::RedirectFdAppend(fd, file) => {
1311 current_cmd
1312 .redirections
1313 .push(Redirection::FdAppend(*fd, file.clone()));
1314 i += 1;
1315 }
1316 Token::RedirectFdDup(from_fd, to_fd) => {
1317 current_cmd
1318 .redirections
1319 .push(Redirection::FdDuplicate(*from_fd, *to_fd));
1320 i += 1;
1321 }
1322 Token::RedirectFdClose(fd) => {
1323 current_cmd.redirections.push(Redirection::FdClose(*fd));
1324 i += 1;
1325 }
1326 Token::RedirectFdInOut(fd, file) => {
1327 current_cmd
1328 .redirections
1329 .push(Redirection::FdInputOutput(*fd, file.clone()));
1330 i += 1;
1331 }
1332 Token::RedirHereDoc(delimiter, quoted) => {
1333 current_cmd
1334 .redirections
1335 .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
1336 i += 1;
1337 }
1338 Token::RedirHereString(content) => {
1339 current_cmd
1340 .redirections
1341 .push(Redirection::HereString(content.clone()));
1342 i += 1;
1343 }
1344 Token::Pipe => {
1345 break;
1347 }
1348 _ => break,
1349 }
1350 }
1351
1352 continue;
1354 }
1355 Token::Word(word) => {
1356 current_cmd.args.push(word.clone());
1357 }
1358 Token::Local => {
1359 current_cmd.args.push("local".to_string());
1360 }
1361 Token::Return => {
1362 current_cmd.args.push("return".to_string());
1363 }
1364 Token::Pipe => {
1365 if !current_cmd.args.is_empty() || current_cmd.compound.is_some() {
1366 commands.push(current_cmd.clone());
1367 current_cmd = ShellCommand::default();
1368 }
1369 }
1370 Token::RedirIn => {
1372 i += 1;
1373 if i < tokens.len()
1374 && let Token::Word(ref file) = tokens[i]
1375 {
1376 current_cmd
1377 .redirections
1378 .push(Redirection::Input(file.clone()));
1379 }
1380 }
1381 Token::RedirOut => {
1382 i += 1;
1383 if i < tokens.len()
1384 && let Token::Word(ref file) = tokens[i]
1385 {
1386 current_cmd
1387 .redirections
1388 .push(Redirection::Output(file.clone()));
1389 }
1390 }
1391 Token::RedirAppend => {
1392 i += 1;
1393 if i < tokens.len()
1394 && let Token::Word(ref file) = tokens[i]
1395 {
1396 current_cmd
1397 .redirections
1398 .push(Redirection::Append(file.clone()));
1399 }
1400 }
1401 Token::RedirHereDoc(delimiter, quoted) => {
1402 current_cmd
1404 .redirections
1405 .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
1406 }
1407 Token::RedirHereString(content) => {
1408 current_cmd
1409 .redirections
1410 .push(Redirection::HereString(content.clone()));
1411 }
1412 Token::RedirectFdIn(fd, file) => {
1414 current_cmd
1415 .redirections
1416 .push(Redirection::FdInput(*fd, file.clone()));
1417 }
1418 Token::RedirectFdOut(fd, file) => {
1419 current_cmd
1420 .redirections
1421 .push(Redirection::FdOutput(*fd, file.clone()));
1422 }
1423 Token::RedirectFdAppend(fd, file) => {
1424 current_cmd
1425 .redirections
1426 .push(Redirection::FdAppend(*fd, file.clone()));
1427 }
1428 Token::RedirectFdDup(from_fd, to_fd) => {
1429 current_cmd
1430 .redirections
1431 .push(Redirection::FdDuplicate(*from_fd, *to_fd));
1432 }
1433 Token::RedirectFdClose(fd) => {
1434 current_cmd.redirections.push(Redirection::FdClose(*fd));
1435 }
1436 Token::RedirectFdInOut(fd, file) => {
1437 current_cmd
1438 .redirections
1439 .push(Redirection::FdInputOutput(*fd, file.clone()));
1440 }
1441 Token::RightParen => {
1442 if !current_cmd.args.is_empty()
1445 && i > 0
1446 && let Token::LeftParen = tokens[i - 1]
1447 {
1448 break;
1452 }
1453 return Err("Unexpected ) in pipeline".to_string());
1454 }
1455 Token::Newline => {
1456 if current_cmd.args.is_empty() && current_cmd.compound.is_none() {
1458 } else {
1460 break;
1461 }
1462 }
1463 Token::Do
1464 | Token::Done
1465 | Token::Then
1466 | Token::Else
1467 | Token::Elif
1468 | Token::Fi
1469 | Token::Esac => {
1470 break;
1473 }
1474 _ => {
1475 return Err(format!("Unexpected token in pipeline: {:?}", token));
1476 }
1477 }
1478 i += 1;
1479 }
1480
1481 if !current_cmd.args.is_empty() || current_cmd.compound.is_some() {
1482 commands.push(current_cmd);
1483 }
1484
1485 if commands.is_empty() {
1486 return Err("No commands found".to_string());
1487 }
1488
1489 Ok(Ast::Pipeline(commands))
1490}
1491
1492fn parse_if(tokens: &[Token]) -> Result<Ast, String> {
1493 let mut i = 1; let mut branches = Vec::new();
1495
1496 loop {
1497 let mut cond_tokens = Vec::new();
1499 while i < tokens.len()
1500 && tokens[i] != Token::Semicolon
1501 && tokens[i] != Token::Newline
1502 && tokens[i] != Token::Then
1503 {
1504 cond_tokens.push(tokens[i].clone());
1505 i += 1;
1506 }
1507
1508 if i < tokens.len() && (tokens[i] == Token::Semicolon || tokens[i] == Token::Newline) {
1510 i += 1;
1511 }
1512
1513 skip_newlines(tokens, &mut i);
1515
1516 if i >= tokens.len() || tokens[i] != Token::Then {
1517 return Err("Expected then after if/elif condition".to_string());
1518 }
1519 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
1523 i += 1;
1524 }
1525
1526 let mut then_tokens = Vec::new();
1529 let mut depth = 0;
1530 while i < tokens.len() {
1531 match &tokens[i] {
1532 Token::If => {
1533 depth += 1;
1534 then_tokens.push(tokens[i].clone());
1535 }
1536 Token::Fi => {
1537 if depth > 0 {
1538 depth -= 1;
1539 then_tokens.push(tokens[i].clone());
1540 } else {
1541 break; }
1543 }
1544 Token::Else | Token::Elif if depth == 0 => {
1545 break; }
1547 Token::Newline => {
1548 let mut j = i + 1;
1550 while j < tokens.len() && tokens[j] == Token::Newline {
1551 j += 1;
1552 }
1553 if j < tokens.len()
1554 && depth == 0
1555 && (tokens[j] == Token::Else
1556 || tokens[j] == Token::Elif
1557 || tokens[j] == Token::Fi)
1558 {
1559 i = j; break;
1561 }
1562 then_tokens.push(tokens[i].clone());
1564 }
1565 _ => {
1566 then_tokens.push(tokens[i].clone());
1567 }
1568 }
1569 i += 1;
1570 }
1571
1572 skip_newlines(tokens, &mut i);
1574
1575 let then_ast = if then_tokens.is_empty() {
1576 create_empty_body_ast()
1578 } else {
1579 parse_commands_sequentially(&then_tokens)?
1580 };
1581
1582 let condition = parse_slice(&cond_tokens)?;
1583 branches.push((Box::new(condition), Box::new(then_ast)));
1584
1585 if i < tokens.len() && tokens[i] == Token::Elif {
1587 i += 1; } else {
1589 break;
1590 }
1591 }
1592
1593 let else_ast = if i < tokens.len() && tokens[i] == Token::Else {
1594 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
1598 i += 1;
1599 }
1600
1601 let mut else_tokens = Vec::new();
1602 let mut depth = 0;
1603 while i < tokens.len() {
1604 match &tokens[i] {
1605 Token::If => {
1606 depth += 1;
1607 else_tokens.push(tokens[i].clone());
1608 }
1609 Token::Fi => {
1610 if depth > 0 {
1611 depth -= 1;
1612 else_tokens.push(tokens[i].clone());
1613 } else {
1614 break; }
1616 }
1617 Token::Newline => {
1618 let mut j = i + 1;
1620 while j < tokens.len() && tokens[j] == Token::Newline {
1621 j += 1;
1622 }
1623 if j < tokens.len() && depth == 0 && tokens[j] == Token::Fi {
1624 i = j; break;
1626 }
1627 else_tokens.push(tokens[i].clone());
1629 }
1630 _ => {
1631 else_tokens.push(tokens[i].clone());
1632 }
1633 }
1634 i += 1;
1635 }
1636
1637 let else_ast = if else_tokens.is_empty() {
1638 create_empty_body_ast()
1640 } else {
1641 parse_commands_sequentially(&else_tokens)?
1642 };
1643
1644 Some(Box::new(else_ast))
1645 } else {
1646 None
1647 };
1648
1649 if i >= tokens.len() || tokens[i] != Token::Fi {
1650 return Err("Expected fi".to_string());
1651 }
1652
1653 Ok(Ast::If {
1654 branches,
1655 else_branch: else_ast,
1656 })
1657}
1658
1659fn parse_case(tokens: &[Token]) -> Result<Ast, String> {
1660 let mut i = 1; if i >= tokens.len() || !matches!(tokens[i], Token::Word(_)) {
1664 return Err("Expected word after case".to_string());
1665 }
1666 let word = if let Token::Word(ref w) = tokens[i] {
1667 w.clone()
1668 } else {
1669 unreachable!()
1670 };
1671 i += 1;
1672
1673 if i >= tokens.len() || tokens[i] != Token::In {
1674 return Err("Expected in after case word".to_string());
1675 }
1676 i += 1;
1677
1678 let mut cases = Vec::new();
1679 let mut default = None;
1680
1681 loop {
1682 while i < tokens.len() && tokens[i] == Token::Newline {
1684 i += 1;
1685 }
1686
1687 if i >= tokens.len() {
1688 return Err("Unexpected end in case statement".to_string());
1689 }
1690
1691 if tokens[i] == Token::Esac {
1692 break;
1693 }
1694
1695 let mut patterns = Vec::new();
1697 while i < tokens.len() && tokens[i] != Token::RightParen {
1698 if let Token::Word(ref p) = tokens[i] {
1699 for pat in p.split('|') {
1701 patterns.push(pat.to_string());
1702 }
1703 } else if tokens[i] == Token::Pipe {
1704 } else if tokens[i] == Token::Newline {
1706 } else {
1708 return Err(format!("Expected pattern, found {:?}", tokens[i]));
1709 }
1710 i += 1;
1711 }
1712
1713 if i >= tokens.len() || tokens[i] != Token::RightParen {
1714 return Err("Expected ) after patterns".to_string());
1715 }
1716 i += 1;
1717
1718 let mut commands_tokens = Vec::new();
1720 while i < tokens.len() && tokens[i] != Token::DoubleSemicolon && tokens[i] != Token::Esac {
1721 commands_tokens.push(tokens[i].clone());
1722 i += 1;
1723 }
1724
1725 let commands_ast = parse_slice(&commands_tokens)?;
1726
1727 if i >= tokens.len() {
1728 return Err("Unexpected end in case statement".to_string());
1729 }
1730
1731 if tokens[i] == Token::DoubleSemicolon {
1732 i += 1;
1733 if patterns.len() == 1 && patterns[0] == "*" {
1735 default = Some(Box::new(commands_ast));
1736 } else {
1737 cases.push((patterns, commands_ast));
1738 }
1739 } else if tokens[i] == Token::Esac {
1740 if patterns.len() == 1 && patterns[0] == "*" {
1742 default = Some(Box::new(commands_ast));
1743 } else {
1744 cases.push((patterns, commands_ast));
1745 }
1746 break;
1747 } else {
1748 return Err("Expected ;; or esac after commands".to_string());
1749 }
1750 }
1751
1752 Ok(Ast::Case {
1753 word,
1754 cases,
1755 default,
1756 })
1757}
1758
1759fn parse_for(tokens: &[Token]) -> Result<Ast, String> {
1760 let mut i = 1; if i >= tokens.len() || !matches!(tokens[i], Token::Word(_)) {
1764 return Err("Expected variable name after for".to_string());
1765 }
1766 let variable = if let Token::Word(ref v) = tokens[i] {
1767 v.clone()
1768 } else {
1769 unreachable!()
1770 };
1771 i += 1;
1772
1773 if i >= tokens.len() || tokens[i] != Token::In {
1775 return Err("Expected 'in' after for variable".to_string());
1776 }
1777 i += 1;
1778
1779 let mut items = Vec::new();
1781 while i < tokens.len() {
1782 match &tokens[i] {
1783 Token::Do => break,
1784 Token::Semicolon | Token::Newline => {
1785 i += 1;
1786 if i < tokens.len() && tokens[i] == Token::Do {
1788 break;
1789 }
1790 }
1791 Token::Word(word) => {
1792 items.push(word.clone());
1793 i += 1;
1794 }
1795 _ => {
1796 return Err(format!("Unexpected token in for items: {:?}", tokens[i]));
1797 }
1798 }
1799 }
1800
1801 while i < tokens.len() && tokens[i] == Token::Newline {
1803 i += 1;
1804 }
1805
1806 if i >= tokens.len() || tokens[i] != Token::Do {
1808 return Err("Expected 'do' in for loop".to_string());
1809 }
1810 i += 1;
1811
1812 while i < tokens.len() && tokens[i] == Token::Newline {
1814 i += 1;
1815 }
1816
1817 let mut body_tokens = Vec::new();
1819 let mut depth = 0;
1820 while i < tokens.len() {
1821 match &tokens[i] {
1822 Token::For => {
1823 depth += 1;
1824 body_tokens.push(tokens[i].clone());
1825 }
1826 Token::Done => {
1827 if depth > 0 {
1828 depth -= 1;
1829 body_tokens.push(tokens[i].clone());
1830 } else {
1831 break; }
1833 }
1834 Token::Newline => {
1835 let mut j = i + 1;
1837 while j < tokens.len() && tokens[j] == Token::Newline {
1838 j += 1;
1839 }
1840 if j < tokens.len() && depth == 0 && tokens[j] == Token::Done {
1841 i = j; break;
1843 }
1844 body_tokens.push(tokens[i].clone());
1846 }
1847 _ => {
1848 body_tokens.push(tokens[i].clone());
1849 }
1850 }
1851 i += 1;
1852 }
1853
1854 if i >= tokens.len() || tokens[i] != Token::Done {
1855 return Err("Expected 'done' to close for loop".to_string());
1856 }
1857
1858 let body_ast = if body_tokens.is_empty() {
1860 create_empty_body_ast()
1862 } else {
1863 parse_commands_sequentially(&body_tokens)?
1864 };
1865
1866 Ok(Ast::For {
1867 variable,
1868 items,
1869 body: Box::new(body_ast),
1870 })
1871}
1872
1873fn parse_while(tokens: &[Token]) -> Result<Ast, String> {
1874 let mut i = 1; let mut cond_tokens = Vec::new();
1878 while i < tokens.len() {
1879 match &tokens[i] {
1880 Token::Do => break,
1881 Token::Semicolon | Token::Newline => {
1882 i += 1;
1883 if i < tokens.len() && tokens[i] == Token::Do {
1885 break;
1886 }
1887 }
1888 _ => {
1889 cond_tokens.push(tokens[i].clone());
1890 i += 1;
1891 }
1892 }
1893 }
1894
1895 if cond_tokens.is_empty() {
1896 return Err("Expected condition after while".to_string());
1897 }
1898
1899 while i < tokens.len() && tokens[i] == Token::Newline {
1901 i += 1;
1902 }
1903
1904 if i >= tokens.len() || tokens[i] != Token::Do {
1906 return Err("Expected 'do' in while loop".to_string());
1907 }
1908 i += 1;
1909
1910 while i < tokens.len() && tokens[i] == Token::Newline {
1912 i += 1;
1913 }
1914
1915 let mut body_tokens = Vec::new();
1917 let mut depth = 0;
1918 while i < tokens.len() {
1919 match &tokens[i] {
1920 Token::While | Token::For => {
1921 depth += 1;
1922 body_tokens.push(tokens[i].clone());
1923 }
1924 Token::Done => {
1925 if depth > 0 {
1926 depth -= 1;
1927 body_tokens.push(tokens[i].clone());
1928 } else {
1929 break; }
1931 }
1932 Token::Newline => {
1933 let mut j = i + 1;
1935 while j < tokens.len() && tokens[j] == Token::Newline {
1936 j += 1;
1937 }
1938 if j < tokens.len() && depth == 0 && tokens[j] == Token::Done {
1939 i = j; break;
1941 }
1942 body_tokens.push(tokens[i].clone());
1944 }
1945 _ => {
1946 body_tokens.push(tokens[i].clone());
1947 }
1948 }
1949 i += 1;
1950 }
1951
1952 if i >= tokens.len() || tokens[i] != Token::Done {
1953 return Err("Expected 'done' to close while loop".to_string());
1954 }
1955
1956 let condition_ast = parse_slice(&cond_tokens)?;
1958
1959 let body_ast = if body_tokens.is_empty() {
1961 create_empty_body_ast()
1963 } else {
1964 parse_commands_sequentially(&body_tokens)?
1965 };
1966
1967 Ok(Ast::While {
1968 condition: Box::new(condition_ast),
1969 body: Box::new(body_ast),
1970 })
1971}
1972
1973fn parse_function_definition(tokens: &[Token]) -> Result<Ast, String> {
1974 if tokens.len() < 2 {
1975 return Err("Function definition too short".to_string());
1976 }
1977
1978 let func_name = if let Token::Word(word) = &tokens[0] {
1980 if let Some(paren_pos) = word.find('(') {
1982 if word.ends_with(')') && paren_pos > 0 {
1983 word[..paren_pos].to_string()
1984 } else {
1985 word.clone()
1986 }
1987 } else {
1988 word.clone()
1989 }
1990 } else {
1991 return Err("Function name must be a word".to_string());
1992 };
1993
1994 let brace_pos =
1996 if tokens.len() >= 4 && tokens[1] == Token::LeftParen && tokens[2] == Token::RightParen {
1997 if tokens[3] != Token::LeftBrace {
1999 return Err("Expected { after function name".to_string());
2000 }
2001 3
2002 } else if tokens.len() >= 2 && tokens[1] == Token::LeftBrace {
2003 1
2005 } else {
2006 return Err("Expected ( after function name or { for legacy format".to_string());
2007 };
2008
2009 let mut brace_depth = 0;
2011 let mut body_end = 0;
2012 let mut found_closing = false;
2013 let mut i = brace_pos + 1;
2014
2015 while i < tokens.len() {
2016 if i + 3 < tokens.len()
2019 && matches!(&tokens[i], Token::Word(_))
2020 && tokens[i + 1] == Token::LeftParen
2021 && tokens[i + 2] == Token::RightParen
2022 && tokens[i + 3] == Token::LeftBrace
2023 {
2024 i += 4;
2027 let mut nested_depth = 1;
2028 while i < tokens.len() && nested_depth > 0 {
2029 match tokens[i] {
2030 Token::LeftBrace => nested_depth += 1,
2031 Token::RightBrace => nested_depth -= 1,
2032 _ => {}
2033 }
2034 i += 1;
2035 }
2036 continue;
2038 }
2039
2040 match &tokens[i] {
2041 Token::LeftBrace => {
2042 brace_depth += 1;
2043 i += 1;
2044 }
2045 Token::RightBrace => {
2046 if brace_depth == 0 {
2047 body_end = i;
2049 found_closing = true;
2050 break;
2051 } else {
2052 brace_depth -= 1;
2053 i += 1;
2054 }
2055 }
2056 Token::If => {
2057 skip_to_matching_fi(tokens, &mut i);
2059 }
2060 Token::For | Token::While => {
2061 skip_to_matching_done(tokens, &mut i);
2063 }
2064 Token::Case => {
2065 skip_to_matching_esac(tokens, &mut i);
2067 }
2068 _ => {
2069 i += 1;
2070 }
2071 }
2072 }
2073
2074 if !found_closing {
2075 return Err("Missing closing } for function definition".to_string());
2076 }
2077
2078 let body_tokens = &tokens[brace_pos + 1..body_end];
2080
2081 let body_ast = if body_tokens.is_empty() {
2083 create_empty_body_ast()
2085 } else {
2086 parse_commands_sequentially(body_tokens)?
2087 };
2088
2089 Ok(Ast::FunctionDefinition {
2090 name: func_name,
2091 body: Box::new(body_ast),
2092 })
2093}
2094
2095#[cfg(test)]
2096mod tests {
2097 use super::super::lexer::Token;
2098 use super::*;
2099
2100 #[test]
2101 fn test_single_command() {
2102 let tokens = vec![Token::Word("ls".to_string())];
2103 let result = parse(tokens).unwrap();
2104 assert_eq!(
2105 result,
2106 Ast::Pipeline(vec![ShellCommand {
2107 args: vec!["ls".to_string()],
2108 redirections: Vec::new(),
2109 compound: None,
2110 }])
2111 );
2112 }
2113
2114 #[test]
2115 fn test_command_with_args() {
2116 let tokens = vec![
2117 Token::Word("ls".to_string()),
2118 Token::Word("-la".to_string()),
2119 ];
2120 let result = parse(tokens).unwrap();
2121 assert_eq!(
2122 result,
2123 Ast::Pipeline(vec![ShellCommand {
2124 args: vec!["ls".to_string(), "-la".to_string()],
2125 redirections: Vec::new(),
2126 compound: None,
2127 }])
2128 );
2129 }
2130
2131 #[test]
2132 fn test_pipeline() {
2133 let tokens = vec![
2134 Token::Word("ls".to_string()),
2135 Token::Pipe,
2136 Token::Word("grep".to_string()),
2137 Token::Word("txt".to_string()),
2138 ];
2139 let result = parse(tokens).unwrap();
2140 assert_eq!(
2141 result,
2142 Ast::Pipeline(vec![
2143 ShellCommand {
2144 args: vec!["ls".to_string()],
2145 redirections: Vec::new(),
2146 compound: None,
2147 },
2148 ShellCommand {
2149 args: vec!["grep".to_string(), "txt".to_string()],
2150 redirections: Vec::new(),
2151 compound: None,
2152 }
2153 ])
2154 );
2155 }
2156
2157 #[test]
2158 fn test_input_redirection() {
2159 let tokens = vec![
2160 Token::Word("cat".to_string()),
2161 Token::RedirIn,
2162 Token::Word("input.txt".to_string()),
2163 ];
2164 let result = parse(tokens).unwrap();
2165 assert_eq!(
2166 result,
2167 Ast::Pipeline(vec![ShellCommand {
2168 args: vec!["cat".to_string()],
2169 redirections: vec![Redirection::Input("input.txt".to_string())],
2170 compound: None,
2171 }])
2172 );
2173 }
2174
2175 #[test]
2176 fn test_output_redirection() {
2177 let tokens = vec![
2178 Token::Word("printf".to_string()),
2179 Token::Word("hello".to_string()),
2180 Token::RedirOut,
2181 Token::Word("output.txt".to_string()),
2182 ];
2183 let result = parse(tokens).unwrap();
2184 assert_eq!(
2185 result,
2186 Ast::Pipeline(vec![ShellCommand {
2187 args: vec!["printf".to_string(), "hello".to_string()],
2188 compound: None,
2189 redirections: vec![Redirection::Output("output.txt".to_string())],
2190 }])
2191 );
2192 }
2193
2194 #[test]
2195 fn test_append_redirection() {
2196 let tokens = vec![
2197 Token::Word("printf".to_string()),
2198 Token::Word("hello".to_string()),
2199 Token::RedirAppend,
2200 Token::Word("output.txt".to_string()),
2201 ];
2202 let result = parse(tokens).unwrap();
2203 assert_eq!(
2204 result,
2205 Ast::Pipeline(vec![ShellCommand {
2206 args: vec!["printf".to_string(), "hello".to_string()],
2207 compound: None,
2208 redirections: vec![Redirection::Append("output.txt".to_string())],
2209 }])
2210 );
2211 }
2212
2213 #[test]
2214 fn test_complex_pipeline_with_redirections() {
2215 let tokens = vec![
2216 Token::Word("cat".to_string()),
2217 Token::RedirIn,
2218 Token::Word("input.txt".to_string()),
2219 Token::Pipe,
2220 Token::Word("grep".to_string()),
2221 Token::Word("pattern".to_string()),
2222 Token::Pipe,
2223 Token::Word("sort".to_string()),
2224 Token::RedirOut,
2225 Token::Word("output.txt".to_string()),
2226 ];
2227 let result = parse(tokens).unwrap();
2228 assert_eq!(
2229 result,
2230 Ast::Pipeline(vec![
2231 ShellCommand {
2232 args: vec!["cat".to_string()],
2233 compound: None,
2234 redirections: vec![Redirection::Input("input.txt".to_string())],
2235 },
2236 ShellCommand {
2237 args: vec!["grep".to_string(), "pattern".to_string()],
2238 compound: None,
2239 redirections: Vec::new(),
2240 },
2241 ShellCommand {
2242 args: vec!["sort".to_string()],
2243 redirections: vec![Redirection::Output("output.txt".to_string())],
2244 compound: None,
2245 }
2246 ])
2247 );
2248 }
2249
2250 #[test]
2251 fn test_empty_tokens() {
2252 let tokens = vec![];
2253 let result = parse(tokens);
2254 assert!(result.is_err());
2255 assert_eq!(result.unwrap_err(), "No commands found");
2256 }
2257
2258 #[test]
2259 fn test_only_pipe() {
2260 let tokens = vec![Token::Pipe];
2261 let result = parse(tokens);
2262 assert!(result.is_err());
2263 assert_eq!(result.unwrap_err(), "No commands found");
2264 }
2265
2266 #[test]
2267 fn test_redirection_without_file() {
2268 let tokens = vec![Token::Word("cat".to_string()), Token::RedirIn];
2270 let result = parse(tokens).unwrap();
2271 assert_eq!(
2272 result,
2273 Ast::Pipeline(vec![ShellCommand {
2274 args: vec!["cat".to_string()],
2275 compound: None,
2276 redirections: Vec::new(),
2277 }])
2278 );
2279 }
2280
2281 #[test]
2282 fn test_multiple_redirections() {
2283 let tokens = vec![
2284 Token::Word("cat".to_string()),
2285 Token::RedirIn,
2286 Token::Word("file1.txt".to_string()),
2287 Token::RedirOut,
2288 Token::Word("file2.txt".to_string()),
2289 ];
2290 let result = parse(tokens).unwrap();
2291 assert_eq!(
2292 result,
2293 Ast::Pipeline(vec![ShellCommand {
2294 args: vec!["cat".to_string()],
2295 redirections: vec![
2296 Redirection::Input("file1.txt".to_string()),
2297 Redirection::Output("file2.txt".to_string()),
2298 ],
2299 compound: None,
2300 }])
2301 );
2302 }
2303
2304 #[test]
2305 fn test_parse_if() {
2306 let tokens = vec![
2307 Token::If,
2308 Token::Word("true".to_string()),
2309 Token::Semicolon,
2310 Token::Then,
2311 Token::Word("printf".to_string()),
2312 Token::Word("yes".to_string()),
2313 Token::Semicolon,
2314 Token::Fi,
2315 ];
2316 let result = parse(tokens).unwrap();
2317 if let Ast::If {
2318 branches,
2319 else_branch,
2320 } = result
2321 {
2322 assert_eq!(branches.len(), 1);
2323 let (condition, then_branch) = &branches[0];
2324 if let Ast::Pipeline(cmds) = &**condition {
2325 assert_eq!(cmds[0].args, vec!["true"]);
2326 } else {
2327 panic!("condition not pipeline");
2328 }
2329 if let Ast::Pipeline(cmds) = &**then_branch {
2330 assert_eq!(cmds[0].args, vec!["printf", "yes"]);
2331 } else {
2332 panic!("then_branch not pipeline");
2333 }
2334 assert!(else_branch.is_none());
2335 } else {
2336 panic!("not if");
2337 }
2338 }
2339
2340 #[test]
2341 fn test_parse_if_elif() {
2342 let tokens = vec![
2343 Token::If,
2344 Token::Word("false".to_string()),
2345 Token::Semicolon,
2346 Token::Then,
2347 Token::Word("printf".to_string()),
2348 Token::Word("no".to_string()),
2349 Token::Semicolon,
2350 Token::Elif,
2351 Token::Word("true".to_string()),
2352 Token::Semicolon,
2353 Token::Then,
2354 Token::Word("printf".to_string()),
2355 Token::Word("yes".to_string()),
2356 Token::Semicolon,
2357 Token::Fi,
2358 ];
2359 let result = parse(tokens).unwrap();
2360 if let Ast::If {
2361 branches,
2362 else_branch,
2363 } = result
2364 {
2365 assert_eq!(branches.len(), 2);
2366 let (condition1, then1) = &branches[0];
2368 if let Ast::Pipeline(cmds) = &**condition1 {
2369 assert_eq!(cmds[0].args, vec!["false"]);
2370 }
2371 if let Ast::Pipeline(cmds) = &**then1 {
2372 assert_eq!(cmds[0].args, vec!["printf", "no"]);
2373 }
2374 let (condition2, then2) = &branches[1];
2376 if let Ast::Pipeline(cmds) = &**condition2 {
2377 assert_eq!(cmds[0].args, vec!["true"]);
2378 }
2379 if let Ast::Pipeline(cmds) = &**then2 {
2380 assert_eq!(cmds[0].args, vec!["printf", "yes"]);
2381 }
2382 assert!(else_branch.is_none());
2383 } else {
2384 panic!("not if");
2385 }
2386 }
2387
2388 #[test]
2389 fn test_parse_assignment() {
2390 let tokens = vec![Token::Word("MY_VAR=test_value".to_string())];
2391 let result = parse(tokens).unwrap();
2392 if let Ast::Assignment { var, value } = result {
2393 assert_eq!(var, "MY_VAR");
2394 assert_eq!(value, "test_value");
2395 } else {
2396 panic!("not assignment");
2397 }
2398 }
2399
2400 #[test]
2401 fn test_parse_assignment_quoted() {
2402 let tokens = vec![Token::Word("MY_VAR=hello world".to_string())];
2403 let result = parse(tokens).unwrap();
2404 if let Ast::Assignment { var, value } = result {
2405 assert_eq!(var, "MY_VAR");
2406 assert_eq!(value, "hello world");
2407 } else {
2408 panic!("not assignment");
2409 }
2410 }
2411
2412 #[test]
2413 fn test_parse_assignment_invalid() {
2414 let tokens = vec![Token::Word("123VAR=value".to_string())];
2416 let result = parse(tokens).unwrap();
2417 if let Ast::Pipeline(cmds) = result {
2418 assert_eq!(cmds[0].args, vec!["123VAR=value"]);
2419 } else {
2420 panic!("should be parsed as pipeline");
2421 }
2422 }
2423
2424 #[test]
2425 fn test_parse_function_definition() {
2426 let tokens = vec![
2427 Token::Word("myfunc".to_string()),
2428 Token::LeftParen,
2429 Token::RightParen,
2430 Token::LeftBrace,
2431 Token::Word("echo".to_string()),
2432 Token::Word("hello".to_string()),
2433 Token::RightBrace,
2434 ];
2435 let result = parse(tokens).unwrap();
2436 if let Ast::FunctionDefinition { name, body } = result {
2437 assert_eq!(name, "myfunc");
2438 if let Ast::Pipeline(cmds) = *body {
2440 assert_eq!(cmds[0].args, vec!["echo", "hello"]);
2441 } else {
2442 panic!("function body should be a pipeline");
2443 }
2444 } else {
2445 panic!("should be parsed as function definition");
2446 }
2447 }
2448
2449 #[test]
2450 fn test_parse_function_definition_empty() {
2451 let tokens = vec![
2452 Token::Word("emptyfunc".to_string()),
2453 Token::LeftParen,
2454 Token::RightParen,
2455 Token::LeftBrace,
2456 Token::RightBrace,
2457 ];
2458 let result = parse(tokens).unwrap();
2459 if let Ast::FunctionDefinition { name, body } = result {
2460 assert_eq!(name, "emptyfunc");
2461 if let Ast::Pipeline(cmds) = *body {
2463 assert_eq!(cmds[0].args, vec!["true"]);
2464 } else {
2465 panic!("function body should be a pipeline");
2466 }
2467 } else {
2468 panic!("should be parsed as function definition");
2469 }
2470 }
2471
2472 #[test]
2473 fn test_parse_function_definition_legacy_format() {
2474 let tokens = vec![
2476 Token::Word("legacyfunc()".to_string()),
2477 Token::LeftBrace,
2478 Token::Word("echo".to_string()),
2479 Token::Word("hello".to_string()),
2480 Token::RightBrace,
2481 ];
2482 let result = parse(tokens).unwrap();
2483 if let Ast::FunctionDefinition { name, body } = result {
2484 assert_eq!(name, "legacyfunc");
2485 if let Ast::Pipeline(cmds) = *body {
2487 assert_eq!(cmds[0].args, vec!["echo", "hello"]);
2488 } else {
2489 panic!("function body should be a pipeline");
2490 }
2491 } else {
2492 panic!("should be parsed as function definition");
2493 }
2494 }
2495
2496 #[test]
2497 fn test_parse_local_assignment() {
2498 let tokens = vec![Token::Local, Token::Word("MY_VAR=test_value".to_string())];
2499 let result = parse(tokens).unwrap();
2500 if let Ast::LocalAssignment { var, value } = result {
2501 assert_eq!(var, "MY_VAR");
2502 assert_eq!(value, "test_value");
2503 } else {
2504 panic!("should be parsed as local assignment");
2505 }
2506 }
2507
2508 #[test]
2509 fn test_parse_local_assignment_separate_tokens() {
2510 let tokens = vec![
2511 Token::Local,
2512 Token::Word("MY_VAR".to_string()),
2513 Token::Word("test_value".to_string()),
2514 ];
2515 let result = parse(tokens).unwrap();
2516 if let Ast::LocalAssignment { var, value } = result {
2517 assert_eq!(var, "MY_VAR");
2518 assert_eq!(value, "test_value");
2519 } else {
2520 panic!("should be parsed as local assignment");
2521 }
2522 }
2523
2524 #[test]
2525 fn test_parse_local_assignment_invalid_var_name() {
2526 let tokens = vec![Token::Local, Token::Word("123VAR=value".to_string())];
2528 let result = parse(tokens);
2529 assert!(result.is_err());
2531 }
2532
2533 #[test]
2534 fn test_parse_here_document_redirection() {
2535 let tokens = vec![
2536 Token::Word("cat".to_string()),
2537 Token::RedirHereDoc("EOF".to_string(), false),
2538 ];
2539 let result = parse(tokens).unwrap();
2540 assert_eq!(
2541 result,
2542 Ast::Pipeline(vec![ShellCommand {
2543 args: vec!["cat".to_string()],
2544 redirections: vec![Redirection::HereDoc("EOF".to_string(), "false".to_string())],
2545 compound: None,
2546 }])
2547 );
2548 }
2549
2550 #[test]
2551 fn test_parse_here_string_redirection() {
2552 let tokens = vec![
2553 Token::Word("grep".to_string()),
2554 Token::RedirHereString("pattern".to_string()),
2555 ];
2556 let result = parse(tokens).unwrap();
2557 assert_eq!(
2558 result,
2559 Ast::Pipeline(vec![ShellCommand {
2560 args: vec!["grep".to_string()],
2561 compound: None,
2562 redirections: vec![Redirection::HereString("pattern".to_string())],
2563 }])
2564 );
2565 }
2566
2567 #[test]
2568 fn test_parse_mixed_redirections() {
2569 let tokens = vec![
2570 Token::Word("cat".to_string()),
2571 Token::RedirIn,
2572 Token::Word("file.txt".to_string()),
2573 Token::RedirHereString("fallback".to_string()),
2574 Token::RedirOut,
2575 Token::Word("output.txt".to_string()),
2576 ];
2577 let result = parse(tokens).unwrap();
2578 assert_eq!(
2579 result,
2580 Ast::Pipeline(vec![ShellCommand {
2581 args: vec!["cat".to_string()],
2582 compound: None,
2583 redirections: vec![
2584 Redirection::Input("file.txt".to_string()),
2585 Redirection::HereString("fallback".to_string()),
2586 Redirection::Output("output.txt".to_string()),
2587 ],
2588 }])
2589 );
2590 }
2591
2592 #[test]
2595 fn test_parse_fd_input_redirection() {
2596 let tokens = vec![
2597 Token::Word("command".to_string()),
2598 Token::RedirectFdIn(3, "input.txt".to_string()),
2599 ];
2600 let result = parse(tokens).unwrap();
2601 assert_eq!(
2602 result,
2603 Ast::Pipeline(vec![ShellCommand {
2604 args: vec!["command".to_string()],
2605 redirections: vec![Redirection::FdInput(3, "input.txt".to_string())],
2606 compound: None,
2607 }])
2608 );
2609 }
2610
2611 #[test]
2612 fn test_parse_fd_output_redirection() {
2613 let tokens = vec![
2614 Token::Word("command".to_string()),
2615 Token::RedirectFdOut(2, "errors.log".to_string()),
2616 ];
2617 let result = parse(tokens).unwrap();
2618 assert_eq!(
2619 result,
2620 Ast::Pipeline(vec![ShellCommand {
2621 args: vec!["command".to_string()],
2622 compound: None,
2623 redirections: vec![Redirection::FdOutput(2, "errors.log".to_string())],
2624 }])
2625 );
2626 }
2627
2628 #[test]
2629 fn test_parse_fd_append_redirection() {
2630 let tokens = vec![
2631 Token::Word("command".to_string()),
2632 Token::RedirectFdAppend(2, "errors.log".to_string()),
2633 ];
2634 let result = parse(tokens).unwrap();
2635 assert_eq!(
2636 result,
2637 Ast::Pipeline(vec![ShellCommand {
2638 args: vec!["command".to_string()],
2639 compound: None,
2640 redirections: vec![Redirection::FdAppend(2, "errors.log".to_string())],
2641 }])
2642 );
2643 }
2644
2645 #[test]
2646 fn test_parse_fd_duplicate() {
2647 let tokens = vec![
2648 Token::Word("command".to_string()),
2649 Token::RedirectFdDup(2, 1),
2650 ];
2651 let result = parse(tokens).unwrap();
2652 assert_eq!(
2653 result,
2654 Ast::Pipeline(vec![ShellCommand {
2655 args: vec!["command".to_string()],
2656 compound: None,
2657 redirections: vec![Redirection::FdDuplicate(2, 1)],
2658 }])
2659 );
2660 }
2661
2662 #[test]
2663 fn test_parse_fd_close() {
2664 let tokens = vec![
2665 Token::Word("command".to_string()),
2666 Token::RedirectFdClose(2),
2667 ];
2668 let result = parse(tokens).unwrap();
2669 assert_eq!(
2670 result,
2671 Ast::Pipeline(vec![ShellCommand {
2672 args: vec!["command".to_string()],
2673 compound: None,
2674 redirections: vec![Redirection::FdClose(2)],
2675 }])
2676 );
2677 }
2678
2679 #[test]
2680 fn test_parse_fd_input_output() {
2681 let tokens = vec![
2682 Token::Word("command".to_string()),
2683 Token::RedirectFdInOut(3, "file.txt".to_string()),
2684 ];
2685 let result = parse(tokens).unwrap();
2686 assert_eq!(
2687 result,
2688 Ast::Pipeline(vec![ShellCommand {
2689 args: vec!["command".to_string()],
2690 compound: None,
2691 redirections: vec![Redirection::FdInputOutput(3, "file.txt".to_string())],
2692 }])
2693 );
2694 }
2695
2696 #[test]
2697 fn test_parse_multiple_fd_redirections() {
2698 let tokens = vec![
2699 Token::Word("command".to_string()),
2700 Token::RedirectFdOut(2, "err.log".to_string()),
2701 Token::RedirectFdIn(3, "input.txt".to_string()),
2702 Token::RedirectFdAppend(4, "append.log".to_string()),
2703 ];
2704 let result = parse(tokens).unwrap();
2705 assert_eq!(
2706 result,
2707 Ast::Pipeline(vec![ShellCommand {
2708 args: vec!["command".to_string()],
2709 compound: None,
2710 redirections: vec![
2711 Redirection::FdOutput(2, "err.log".to_string()),
2712 Redirection::FdInput(3, "input.txt".to_string()),
2713 Redirection::FdAppend(4, "append.log".to_string()),
2714 ],
2715 }])
2716 );
2717 }
2718
2719 #[test]
2720 fn test_parse_fd_swap_pattern() {
2721 let tokens = vec![
2722 Token::Word("command".to_string()),
2723 Token::RedirectFdDup(3, 1),
2724 Token::RedirectFdDup(1, 2),
2725 Token::RedirectFdDup(2, 3),
2726 Token::RedirectFdClose(3),
2727 ];
2728 let result = parse(tokens).unwrap();
2729 assert_eq!(
2730 result,
2731 Ast::Pipeline(vec![ShellCommand {
2732 args: vec!["command".to_string()],
2733 redirections: vec![
2734 Redirection::FdDuplicate(3, 1),
2735 Redirection::FdDuplicate(1, 2),
2736 Redirection::FdDuplicate(2, 3),
2737 Redirection::FdClose(3),
2738 ],
2739 compound: None,
2740 }])
2741 );
2742 }
2743
2744 #[test]
2745 fn test_parse_mixed_basic_and_fd_redirections() {
2746 let tokens = vec![
2747 Token::Word("command".to_string()),
2748 Token::RedirOut,
2749 Token::Word("output.txt".to_string()),
2750 Token::RedirectFdDup(2, 1),
2751 ];
2752 let result = parse(tokens).unwrap();
2753 assert_eq!(
2754 result,
2755 Ast::Pipeline(vec![ShellCommand {
2756 args: vec!["command".to_string()],
2757 redirections: vec![
2758 Redirection::Output("output.txt".to_string()),
2759 Redirection::FdDuplicate(2, 1),
2760 ],
2761 compound: None,
2762 }])
2763 );
2764 }
2765
2766 #[test]
2767 fn test_parse_fd_redirection_ordering() {
2768 let tokens = vec![
2770 Token::Word("command".to_string()),
2771 Token::RedirectFdOut(2, "first.log".to_string()),
2772 Token::RedirOut,
2773 Token::Word("second.txt".to_string()),
2774 Token::RedirectFdDup(2, 1),
2775 ];
2776 let result = parse(tokens).unwrap();
2777 assert_eq!(
2778 result,
2779 Ast::Pipeline(vec![ShellCommand {
2780 args: vec!["command".to_string()],
2781 redirections: vec![
2782 Redirection::FdOutput(2, "first.log".to_string()),
2783 Redirection::Output("second.txt".to_string()),
2784 Redirection::FdDuplicate(2, 1),
2785 ],
2786 compound: None,
2787 }])
2788 );
2789 }
2790
2791 #[test]
2792 fn test_parse_fd_redirection_with_pipe() {
2793 let tokens = vec![
2794 Token::Word("command".to_string()),
2795 Token::RedirectFdDup(2, 1),
2796 Token::Pipe,
2797 Token::Word("grep".to_string()),
2798 Token::Word("error".to_string()),
2799 ];
2800 let result = parse(tokens).unwrap();
2801 assert_eq!(
2802 result,
2803 Ast::Pipeline(vec![
2804 ShellCommand {
2805 args: vec!["command".to_string()],
2806 redirections: vec![Redirection::FdDuplicate(2, 1)],
2807 compound: None,
2808 },
2809 ShellCommand {
2810 args: vec!["grep".to_string(), "error".to_string()],
2811 compound: None,
2812 redirections: Vec::new(),
2813 }
2814 ])
2815 );
2816 }
2817
2818 #[test]
2819 fn test_parse_all_fd_numbers() {
2820 let tokens = vec![
2822 Token::Word("cmd".to_string()),
2823 Token::RedirectFdIn(0, "file".to_string()),
2824 ];
2825 let result = parse(tokens).unwrap();
2826 if let Ast::Pipeline(cmds) = result {
2827 assert_eq!(
2828 cmds[0].redirections[0],
2829 Redirection::FdInput(0, "file".to_string())
2830 );
2831 } else {
2832 panic!("Expected Pipeline");
2833 }
2834
2835 let tokens = vec![
2837 Token::Word("cmd".to_string()),
2838 Token::RedirectFdOut(9, "file".to_string()),
2839 ];
2840 let result = parse(tokens).unwrap();
2841 if let Ast::Pipeline(cmds) = result {
2842 assert_eq!(
2843 cmds[0].redirections[0],
2844 Redirection::FdOutput(9, "file".to_string())
2845 );
2846 } else {
2847 panic!("Expected Pipeline");
2848 }
2849 }
2850}