1use super::lexer::Token;
2
3#[derive(Debug, Clone, PartialEq, Eq)]
4pub enum Ast {
5 Pipeline(Vec<ShellCommand>),
6 Sequence(Vec<Ast>),
7 Assignment {
8 var: String,
9 value: String,
10 },
11 LocalAssignment {
12 var: String,
13 value: String,
14 },
15 If {
16 branches: Vec<(Box<Ast>, Box<Ast>)>, else_branch: Option<Box<Ast>>,
18 },
19 Case {
20 word: String,
21 cases: Vec<(Vec<String>, Ast)>,
22 default: Option<Box<Ast>>,
23 },
24 For {
25 variable: String,
26 items: Vec<String>,
27 body: Box<Ast>,
28 },
29 While {
30 condition: Box<Ast>,
31 body: Box<Ast>,
32 },
33 FunctionDefinition {
34 name: String,
35 body: Box<Ast>,
36 },
37 FunctionCall {
38 name: String,
39 args: Vec<String>,
40 },
41 Return {
42 value: Option<String>,
43 },
44 And {
45 left: Box<Ast>,
46 right: Box<Ast>,
47 },
48 Or {
49 left: Box<Ast>,
50 right: Box<Ast>,
51 },
52 Subshell {
55 body: Box<Ast>,
56 },
57}
58
59#[derive(Debug, Clone, PartialEq, Eq)]
61pub enum Redirection {
62 Input(String),
64 Output(String),
66 Append(String),
68 FdInput(i32, String),
70 FdOutput(i32, String),
72 FdAppend(i32, String),
74 FdDuplicate(i32, i32),
76 FdClose(i32),
78 FdInputOutput(i32, String),
80 HereDoc(String, String),
82 HereString(String),
84}
85
86#[derive(Debug, Clone, PartialEq, Eq, Default)]
87pub struct ShellCommand {
88 pub args: Vec<String>,
89 pub redirections: Vec<Redirection>,
91 pub compound: Option<Box<Ast>>,
94}
95
96fn is_valid_variable_name(name: &str) -> bool {
99 if let Some(first_char) = name.chars().next() {
100 first_char.is_alphabetic() || first_char == '_'
101 } else {
102 false
103 }
104}
105
106fn create_empty_body_ast() -> Ast {
109 Ast::Pipeline(vec![ShellCommand {
110 args: vec!["true".to_string()],
111 redirections: Vec::new(),
112 compound: None,
113 }])
114}
115
116fn skip_newlines(tokens: &[Token], i: &mut usize) {
119 while *i < tokens.len() && tokens[*i] == Token::Newline {
120 *i += 1;
121 }
122}
123
124fn skip_to_matching_fi(tokens: &[Token], i: &mut usize) {
127 let mut if_depth = 1;
128 *i += 1; while *i < tokens.len() && if_depth > 0 {
130 match tokens[*i] {
131 Token::If => if_depth += 1,
132 Token::Fi => if_depth -= 1,
133 _ => {}
134 }
135 *i += 1;
136 }
137}
138
139fn skip_to_matching_done(tokens: &[Token], i: &mut usize) {
142 let mut loop_depth = 1;
143 *i += 1; while *i < tokens.len() && loop_depth > 0 {
145 match tokens[*i] {
146 Token::For | Token::While => loop_depth += 1,
147 Token::Done => loop_depth -= 1,
148 _ => {}
149 }
150 *i += 1;
151 }
152}
153
154fn skip_to_matching_esac(tokens: &[Token], i: &mut usize) {
156 *i += 1; while *i < tokens.len() {
158 if tokens[*i] == Token::Esac {
159 *i += 1;
160 break;
161 }
162 *i += 1;
163 }
164}
165
166pub fn parse(tokens: Vec<Token>) -> Result<Ast, String> {
167 if tokens.len() >= 4
169 && let (Token::Word(_), Token::LeftParen, Token::RightParen, Token::LeftBrace) =
170 (&tokens[0], &tokens[1], &tokens[2], &tokens[3])
171 {
172 let mut brace_depth = 1; let mut function_end = tokens.len();
176 let mut j = 4; while j < tokens.len() {
179 match &tokens[j] {
180 Token::LeftBrace => {
181 brace_depth += 1;
182 j += 1;
183 }
184 Token::RightBrace => {
185 brace_depth -= 1;
186 if brace_depth == 0 {
187 function_end = j + 1; break;
189 }
190 j += 1;
191 }
192 Token::If => {
193 let mut if_depth = 1;
195 j += 1;
196 while j < tokens.len() && if_depth > 0 {
197 match tokens[j] {
198 Token::If => if_depth += 1,
199 Token::Fi => if_depth -= 1,
200 _ => {}
201 }
202 j += 1;
203 }
204 }
205 Token::For | Token::While => {
206 let mut for_depth = 1;
208 j += 1;
209 while j < tokens.len() && for_depth > 0 {
210 match tokens[j] {
211 Token::For | Token::While => for_depth += 1,
212 Token::Done => for_depth -= 1,
213 _ => {}
214 }
215 j += 1;
216 }
217 }
218 Token::Case => {
219 j += 1;
221 while j < tokens.len() {
222 if tokens[j] == Token::Esac {
223 j += 1;
224 break;
225 }
226 j += 1;
227 }
228 }
229 _ => {
230 j += 1;
231 }
232 }
233 }
234
235 if brace_depth == 0 && function_end <= tokens.len() {
236 let function_tokens = &tokens[0..function_end];
238 let remaining_tokens = &tokens[function_end..];
239
240 let function_ast = parse_function_definition(function_tokens)?;
241
242 return if remaining_tokens.is_empty() {
243 Ok(function_ast)
244 } else {
245 let remaining_ast = parse_commands_sequentially(remaining_tokens)?;
247 Ok(Ast::Sequence(vec![function_ast, remaining_ast]))
248 };
249 }
250 }
251
252 if tokens.len() >= 2
254 && let Token::Word(ref word) = tokens[0]
255 && let Some(paren_pos) = word.find('(')
256 && word.ends_with(')')
257 && paren_pos > 0
258 && tokens[1] == Token::LeftBrace
259 {
260 return parse_function_definition(&tokens);
261 }
262
263 parse_commands_sequentially(&tokens)
265}
266
267fn parse_slice(tokens: &[Token]) -> Result<Ast, String> {
268 if tokens.is_empty() {
269 return Err("No commands found".to_string());
270 }
271
272 if tokens.len() == 2 {
274 if let (Token::Word(var_eq), Token::Word(value)) = (&tokens[0], &tokens[1])
276 && let Some(eq_pos) = var_eq.find('=')
277 && eq_pos > 0
278 && eq_pos < var_eq.len()
279 {
280 let var = var_eq[..eq_pos].to_string();
281 let full_value = format!("{}{}", &var_eq[eq_pos + 1..], value);
282 if is_valid_variable_name(&var) {
284 return Ok(Ast::Assignment {
285 var,
286 value: full_value,
287 });
288 }
289 }
290 }
291
292 if tokens.len() == 2
294 && let (Token::Word(var_eq), Token::Word(value)) = (&tokens[0], &tokens[1])
295 && let Some(eq_pos) = var_eq.find('=')
296 && eq_pos > 0
297 && eq_pos == var_eq.len() - 1
298 {
299 let var = var_eq[..eq_pos].to_string();
300 if is_valid_variable_name(&var) {
302 return Ok(Ast::Assignment {
303 var,
304 value: value.clone(),
305 });
306 }
307 }
308
309 if tokens.len() == 3
311 && let (Token::Local, Token::Word(var), Token::Word(value)) =
312 (&tokens[0], &tokens[1], &tokens[2])
313 {
314 let clean_var = if var.ends_with('=') {
316 &var[..var.len() - 1]
317 } else {
318 var
319 };
320 if is_valid_variable_name(clean_var) {
322 return Ok(Ast::LocalAssignment {
323 var: clean_var.to_string(),
324 value: value.clone(),
325 });
326 }
327 }
328
329 if !tokens.is_empty()
331 && tokens.len() <= 2
332 && let Token::Return = &tokens[0]
333 {
334 if tokens.len() == 1 {
335 return Ok(Ast::Return { value: None });
337 } else if let Token::Word(word) = &tokens[1] {
338 return Ok(Ast::Return {
340 value: Some(word.clone()),
341 });
342 }
343 }
344
345 if tokens.len() == 2
347 && let (Token::Local, Token::Word(var_eq)) = (&tokens[0], &tokens[1])
348 && let Some(eq_pos) = var_eq.find('=')
349 && eq_pos > 0
350 && eq_pos < var_eq.len()
351 {
352 let var = var_eq[..eq_pos].to_string();
353 let value = var_eq[eq_pos + 1..].to_string();
354 if is_valid_variable_name(&var) {
356 return Ok(Ast::LocalAssignment { var, value });
357 }
358 }
359
360 if tokens.len() == 1
362 && let Token::Word(ref word) = tokens[0]
363 && let Some(eq_pos) = word.find('=')
364 && eq_pos > 0
365 && eq_pos < word.len()
366 {
367 let var = word[..eq_pos].to_string();
368 let value = word[eq_pos + 1..].to_string();
369 if is_valid_variable_name(&var) {
371 return Ok(Ast::Assignment { var, value });
372 }
373 }
374
375 if let Token::If = tokens[0] {
377 return parse_if(tokens);
378 }
379
380 if let Token::Case = tokens[0] {
382 return parse_case(tokens);
383 }
384
385 if let Token::For = tokens[0] {
387 return parse_for(tokens);
388 }
389
390 if let Token::While = tokens[0] {
392 return parse_while(tokens);
393 }
394
395 if tokens.len() >= 4
398 && let (Token::Word(word), Token::LeftParen, Token::RightParen, Token::LeftBrace) =
399 (&tokens[0], &tokens[1], &tokens[2], &tokens[3])
400 && is_valid_variable_name(word)
401 {
402 return parse_function_definition(tokens);
403 }
404
405 if tokens.len() >= 2
407 && let Token::Word(ref word) = tokens[0]
408 && let Some(paren_pos) = word.find('(')
409 && word.ends_with(')')
410 && paren_pos > 0
411 {
412 let func_name = &word[..paren_pos];
413 if is_valid_variable_name(func_name) && tokens[1] == Token::LeftBrace {
414 return parse_function_definition(tokens);
415 }
416 }
417
418 parse_pipeline(tokens)
423}
424
425fn parse_commands_sequentially(tokens: &[Token]) -> Result<Ast, String> {
426 let mut i = 0;
427 let mut commands = Vec::new();
428
429 while i < tokens.len() {
430 while i < tokens.len() {
432 match &tokens[i] {
433 Token::Newline => {
434 i += 1;
435 }
436 Token::Word(word) if word.starts_with('#') => {
437 while i < tokens.len() && tokens[i] != Token::Newline {
439 i += 1;
440 }
441 if i < tokens.len() {
442 i += 1; }
444 }
445 _ => break,
446 }
447 }
448
449 if i >= tokens.len() {
450 break;
451 }
452
453 let start = i;
455
456 if tokens[i] == Token::LeftParen {
459 let mut paren_depth = 1;
461 let mut j = i + 1;
462
463 while j < tokens.len() && paren_depth > 0 {
464 match tokens[j] {
465 Token::LeftParen => paren_depth += 1,
466 Token::RightParen => paren_depth -= 1,
467 _ => {}
468 }
469 j += 1;
470 }
471
472 if paren_depth != 0 {
473 return Err("Unmatched parenthesis in subshell".to_string());
474 }
475
476 let subshell_tokens = &tokens[i + 1..j - 1];
478
479 let body_ast = if subshell_tokens.is_empty() {
482 return Err("Empty subshell".to_string());
483 } else {
484 parse_commands_sequentially(subshell_tokens)?
485 };
486
487 let mut subshell_ast = Ast::Subshell {
488 body: Box::new(body_ast),
489 };
490
491 i = j; let mut redirections = Vec::new();
495 while i < tokens.len() {
496 match &tokens[i] {
497 Token::RedirOut => {
498 i += 1;
499 if i < tokens.len() {
500 if let Token::Word(file) = &tokens[i] {
501 redirections.push(Redirection::Output(file.clone()));
502 i += 1;
503 }
504 }
505 }
506 Token::RedirIn => {
507 i += 1;
508 if i < tokens.len() {
509 if let Token::Word(file) = &tokens[i] {
510 redirections.push(Redirection::Input(file.clone()));
511 i += 1;
512 }
513 }
514 }
515 Token::RedirAppend => {
516 i += 1;
517 if i < tokens.len() {
518 if let Token::Word(file) = &tokens[i] {
519 redirections.push(Redirection::Append(file.clone()));
520 i += 1;
521 }
522 }
523 }
524 Token::RedirectFdOut(fd, file) => {
525 redirections.push(Redirection::FdOutput(*fd, file.clone()));
526 i += 1;
527 }
528 Token::RedirectFdIn(fd, file) => {
529 redirections.push(Redirection::FdInput(*fd, file.clone()));
530 i += 1;
531 }
532 Token::RedirectFdAppend(fd, file) => {
533 redirections.push(Redirection::FdAppend(*fd, file.clone()));
534 i += 1;
535 }
536 Token::RedirectFdDup(from_fd, to_fd) => {
537 redirections.push(Redirection::FdDuplicate(*from_fd, *to_fd));
538 i += 1;
539 }
540 Token::RedirectFdClose(fd) => {
541 redirections.push(Redirection::FdClose(*fd));
542 i += 1;
543 }
544 Token::RedirectFdInOut(fd, file) => {
545 redirections.push(Redirection::FdInputOutput(*fd, file.clone()));
546 i += 1;
547 }
548 Token::RedirHereDoc(delimiter, quoted) => {
549 redirections
550 .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
551 i += 1;
552 }
553 Token::RedirHereString(content) => {
554 redirections.push(Redirection::HereString(content.clone()));
555 i += 1;
556 }
557 _ => break,
558 }
559 }
560
561 if !redirections.is_empty() {
563 subshell_ast = Ast::Pipeline(vec![ShellCommand {
564 args: Vec::new(),
565 redirections,
566 compound: Some(Box::new(subshell_ast)),
567 }]);
568 }
569
570 if i < tokens.len() && tokens[i] == Token::Pipe {
572 let pipeline_ast = parse_pipeline(&tokens[start..])?;
574 commands.push(pipeline_ast);
575 break; }
577
578 if i < tokens.len() && (tokens[i] == Token::And || tokens[i] == Token::Or) {
580 let operator = tokens[i].clone();
581 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
585 i += 1;
586 }
587
588 let remaining_tokens = &tokens[i..];
590 let right_ast = parse_commands_sequentially(remaining_tokens)?;
591
592 let combined_ast = match operator {
594 Token::And => Ast::And {
595 left: Box::new(subshell_ast),
596 right: Box::new(right_ast),
597 },
598 Token::Or => Ast::Or {
599 left: Box::new(subshell_ast),
600 right: Box::new(right_ast),
601 },
602 _ => unreachable!(),
603 };
604
605 commands.push(combined_ast);
606 break; } else {
608 commands.push(subshell_ast);
609 }
610
611 if i < tokens.len() && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon) {
613 i += 1;
614 }
615 continue;
616 }
617
618 if tokens[i] == Token::If {
620 let mut depth = 0;
622 while i < tokens.len() {
623 match tokens[i] {
624 Token::If => depth += 1,
625 Token::Fi => {
626 depth -= 1;
627 if depth == 0 {
628 i += 1; break;
630 }
631 }
632 _ => {}
633 }
634 i += 1;
635 }
636
637 } else if tokens[i] == Token::For {
640 let mut depth = 1; i += 1; while i < tokens.len() {
644 match tokens[i] {
645 Token::For | Token::While => depth += 1,
646 Token::Done => {
647 depth -= 1;
648 if depth == 0 {
649 i += 1; break;
651 }
652 }
653 _ => {}
654 }
655 i += 1;
656 }
657 } else if tokens[i] == Token::While {
658 let mut depth = 1; i += 1; while i < tokens.len() {
662 match tokens[i] {
663 Token::While | Token::For => depth += 1,
664 Token::Done => {
665 depth -= 1;
666 if depth == 0 {
667 i += 1; break;
669 }
670 }
671 _ => {}
672 }
673 i += 1;
674 }
675 } else if tokens[i] == Token::Case {
676 while i < tokens.len() {
678 if tokens[i] == Token::Esac {
679 i += 1; break;
681 }
682 i += 1;
683 }
684 } else if i + 3 < tokens.len()
685 && matches!(tokens[i], Token::Word(_))
686 && tokens[i + 1] == Token::LeftParen
687 && tokens[i + 2] == Token::RightParen
688 && tokens[i + 3] == Token::LeftBrace
689 {
690 let mut brace_depth = 1;
692 i += 4; while i < tokens.len() && brace_depth > 0 {
694 match tokens[i] {
695 Token::LeftBrace => brace_depth += 1,
696 Token::RightBrace => brace_depth -= 1,
697 _ => {}
698 }
699 i += 1;
700 }
701 } else {
702 while i < tokens.len() {
705 if tokens[i] == Token::Newline
706 || tokens[i] == Token::Semicolon
707 || tokens[i] == Token::And
708 || tokens[i] == Token::Or
709 {
710 let mut j = i + 1;
712 while j < tokens.len() && tokens[j] == Token::Newline {
713 j += 1;
714 }
715 if j < tokens.len()
717 && (tokens[j] == Token::Else
718 || tokens[j] == Token::Elif
719 || tokens[j] == Token::Fi)
720 {
721 i = j + 1;
723 continue;
724 }
725 break;
726 }
727 i += 1;
728 }
729 }
730
731 let command_tokens = &tokens[start..i];
732 if !command_tokens.is_empty() {
733 if command_tokens.len() == 1 {
735 match command_tokens[0] {
736 Token::Else | Token::Elif | Token::Fi => {
737 if i < tokens.len()
739 && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon)
740 {
741 i += 1;
742 }
743 continue;
744 }
745 _ => {}
746 }
747 }
748
749 let ast = parse_slice(command_tokens)?;
750
751 if i < tokens.len() && (tokens[i] == Token::And || tokens[i] == Token::Or) {
753 let operator = tokens[i].clone();
754 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
758 i += 1;
759 }
760
761 let remaining_tokens = &tokens[i..];
763 let right_ast = parse_commands_sequentially(remaining_tokens)?;
764
765 let combined_ast = match operator {
767 Token::And => Ast::And {
768 left: Box::new(ast),
769 right: Box::new(right_ast),
770 },
771 Token::Or => Ast::Or {
772 left: Box::new(ast),
773 right: Box::new(right_ast),
774 },
775 _ => unreachable!(),
776 };
777
778 commands.push(combined_ast);
779 break; } else {
781 commands.push(ast);
782 }
783 }
784
785 if i < tokens.len() && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon) {
786 i += 1;
787 }
788 }
789
790 if commands.is_empty() {
791 return Err("No commands found".to_string());
792 }
793
794 if commands.len() == 1 {
795 Ok(commands.into_iter().next().unwrap())
796 } else {
797 Ok(Ast::Sequence(commands))
798 }
799}
800
801fn parse_pipeline(tokens: &[Token]) -> Result<Ast, String> {
802 let mut commands = Vec::new();
803 let mut current_cmd = ShellCommand::default();
804
805 let mut i = 0;
806 while i < tokens.len() {
807 let token = &tokens[i];
808 match token {
809 Token::LeftParen => {
810 let mut paren_depth = 1;
813 let mut j = i + 1;
814
815 while j < tokens.len() && paren_depth > 0 {
816 match tokens[j] {
817 Token::LeftParen => paren_depth += 1,
818 Token::RightParen => paren_depth -= 1,
819 _ => {}
820 }
821 j += 1;
822 }
823
824 if paren_depth != 0 {
825 return Err("Unmatched parenthesis in pipeline".to_string());
826 }
827
828 let subshell_tokens = &tokens[i + 1..j - 1];
830
831 let body_ast = if subshell_tokens.is_empty() {
833 create_empty_body_ast()
834 } else {
835 parse_commands_sequentially(subshell_tokens)?
836 };
837
838 current_cmd.compound = Some(Box::new(Ast::Subshell {
840 body: Box::new(body_ast),
841 }));
842
843 i = j; while i < tokens.len() {
847 match &tokens[i] {
848 Token::RedirOut => {
849 i += 1;
850 if i < tokens.len() {
851 if let Token::Word(file) = &tokens[i] {
852 current_cmd
853 .redirections
854 .push(Redirection::Output(file.clone()));
855 i += 1;
856 }
857 }
858 }
859 Token::RedirIn => {
860 i += 1;
861 if i < tokens.len() {
862 if let Token::Word(file) = &tokens[i] {
863 current_cmd
864 .redirections
865 .push(Redirection::Input(file.clone()));
866 i += 1;
867 }
868 }
869 }
870 Token::RedirAppend => {
871 i += 1;
872 if i < tokens.len() {
873 if let Token::Word(file) = &tokens[i] {
874 current_cmd
875 .redirections
876 .push(Redirection::Append(file.clone()));
877 i += 1;
878 }
879 }
880 }
881 Token::RedirectFdOut(fd, file) => {
882 current_cmd
883 .redirections
884 .push(Redirection::FdOutput(*fd, file.clone()));
885 i += 1;
886 }
887 Token::RedirectFdIn(fd, file) => {
888 current_cmd
889 .redirections
890 .push(Redirection::FdInput(*fd, file.clone()));
891 i += 1;
892 }
893 Token::RedirectFdAppend(fd, file) => {
894 current_cmd
895 .redirections
896 .push(Redirection::FdAppend(*fd, file.clone()));
897 i += 1;
898 }
899 Token::RedirectFdDup(from_fd, to_fd) => {
900 current_cmd
901 .redirections
902 .push(Redirection::FdDuplicate(*from_fd, *to_fd));
903 i += 1;
904 }
905 Token::RedirectFdClose(fd) => {
906 current_cmd.redirections.push(Redirection::FdClose(*fd));
907 i += 1;
908 }
909 Token::RedirectFdInOut(fd, file) => {
910 current_cmd
911 .redirections
912 .push(Redirection::FdInputOutput(*fd, file.clone()));
913 i += 1;
914 }
915 Token::RedirHereDoc(delimiter, quoted) => {
916 current_cmd
917 .redirections
918 .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
919 i += 1;
920 }
921 Token::RedirHereString(content) => {
922 current_cmd
923 .redirections
924 .push(Redirection::HereString(content.clone()));
925 i += 1;
926 }
927 Token::Pipe => {
928 break;
930 }
931 _ => break,
932 }
933 }
934
935 commands.push(current_cmd.clone());
937 current_cmd = ShellCommand::default();
938
939 continue;
940 }
941 Token::Word(word) => {
942 current_cmd.args.push(word.clone());
943 }
944 Token::Pipe => {
945 if !current_cmd.args.is_empty() || current_cmd.compound.is_some() {
946 commands.push(current_cmd.clone());
947 current_cmd = ShellCommand::default();
948 }
949 }
950 Token::RedirIn => {
952 i += 1;
953 if i < tokens.len()
954 && let Token::Word(ref file) = tokens[i]
955 {
956 current_cmd
957 .redirections
958 .push(Redirection::Input(file.clone()));
959 }
960 }
961 Token::RedirOut => {
962 i += 1;
963 if i < tokens.len()
964 && let Token::Word(ref file) = tokens[i]
965 {
966 current_cmd
967 .redirections
968 .push(Redirection::Output(file.clone()));
969 }
970 }
971 Token::RedirAppend => {
972 i += 1;
973 if i < tokens.len()
974 && let Token::Word(ref file) = tokens[i]
975 {
976 current_cmd
977 .redirections
978 .push(Redirection::Append(file.clone()));
979 }
980 }
981 Token::RedirHereDoc(delimiter, quoted) => {
982 current_cmd
984 .redirections
985 .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
986 }
987 Token::RedirHereString(content) => {
988 current_cmd
989 .redirections
990 .push(Redirection::HereString(content.clone()));
991 }
992 Token::RedirectFdIn(fd, file) => {
994 current_cmd
995 .redirections
996 .push(Redirection::FdInput(*fd, file.clone()));
997 }
998 Token::RedirectFdOut(fd, file) => {
999 current_cmd
1000 .redirections
1001 .push(Redirection::FdOutput(*fd, file.clone()));
1002 }
1003 Token::RedirectFdAppend(fd, file) => {
1004 current_cmd
1005 .redirections
1006 .push(Redirection::FdAppend(*fd, file.clone()));
1007 }
1008 Token::RedirectFdDup(from_fd, to_fd) => {
1009 current_cmd
1010 .redirections
1011 .push(Redirection::FdDuplicate(*from_fd, *to_fd));
1012 }
1013 Token::RedirectFdClose(fd) => {
1014 current_cmd.redirections.push(Redirection::FdClose(*fd));
1015 }
1016 Token::RedirectFdInOut(fd, file) => {
1017 current_cmd
1018 .redirections
1019 .push(Redirection::FdInputOutput(*fd, file.clone()));
1020 }
1021 Token::RightParen => {
1022 if !current_cmd.args.is_empty()
1025 && i > 0
1026 && let Token::LeftParen = tokens[i - 1]
1027 {
1028 break;
1032 }
1033 return Err("Unexpected ) in pipeline".to_string());
1034 }
1035 Token::Newline => {
1036 i += 1;
1038 continue;
1039 }
1040 Token::Do
1041 | Token::Done
1042 | Token::Then
1043 | Token::Else
1044 | Token::Elif
1045 | Token::Fi
1046 | Token::Esac => {
1047 break;
1050 }
1051 _ => {
1052 return Err(format!("Unexpected token in pipeline: {:?}", token));
1053 }
1054 }
1055 i += 1;
1056 }
1057
1058 if !current_cmd.args.is_empty() {
1059 commands.push(current_cmd);
1060 }
1061
1062 if commands.is_empty() {
1063 return Err("No commands found".to_string());
1064 }
1065
1066 Ok(Ast::Pipeline(commands))
1067}
1068
1069fn parse_if(tokens: &[Token]) -> Result<Ast, String> {
1070 let mut i = 1; let mut branches = Vec::new();
1072
1073 loop {
1074 let mut cond_tokens = Vec::new();
1076 while i < tokens.len()
1077 && tokens[i] != Token::Semicolon
1078 && tokens[i] != Token::Newline
1079 && tokens[i] != Token::Then
1080 {
1081 cond_tokens.push(tokens[i].clone());
1082 i += 1;
1083 }
1084
1085 if i < tokens.len() && (tokens[i] == Token::Semicolon || tokens[i] == Token::Newline) {
1087 i += 1;
1088 }
1089
1090 skip_newlines(tokens, &mut i);
1092
1093 if i >= tokens.len() || tokens[i] != Token::Then {
1094 return Err("Expected then after if/elif condition".to_string());
1095 }
1096 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
1100 i += 1;
1101 }
1102
1103 let mut then_tokens = Vec::new();
1106 let mut depth = 0;
1107 while i < tokens.len() {
1108 match &tokens[i] {
1109 Token::If => {
1110 depth += 1;
1111 then_tokens.push(tokens[i].clone());
1112 }
1113 Token::Fi => {
1114 if depth > 0 {
1115 depth -= 1;
1116 then_tokens.push(tokens[i].clone());
1117 } else {
1118 break; }
1120 }
1121 Token::Else | Token::Elif if depth == 0 => {
1122 break; }
1124 Token::Newline => {
1125 let mut j = i + 1;
1127 while j < tokens.len() && tokens[j] == Token::Newline {
1128 j += 1;
1129 }
1130 if j < tokens.len()
1131 && depth == 0
1132 && (tokens[j] == Token::Else
1133 || tokens[j] == Token::Elif
1134 || tokens[j] == Token::Fi)
1135 {
1136 i = j; break;
1138 }
1139 then_tokens.push(tokens[i].clone());
1141 }
1142 _ => {
1143 then_tokens.push(tokens[i].clone());
1144 }
1145 }
1146 i += 1;
1147 }
1148
1149 skip_newlines(tokens, &mut i);
1151
1152 let then_ast = if then_tokens.is_empty() {
1153 create_empty_body_ast()
1155 } else {
1156 parse_commands_sequentially(&then_tokens)?
1157 };
1158
1159 let condition = parse_slice(&cond_tokens)?;
1160 branches.push((Box::new(condition), Box::new(then_ast)));
1161
1162 if i < tokens.len() && tokens[i] == Token::Elif {
1164 i += 1; } else {
1166 break;
1167 }
1168 }
1169
1170 let else_ast = if i < tokens.len() && tokens[i] == Token::Else {
1171 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
1175 i += 1;
1176 }
1177
1178 let mut else_tokens = Vec::new();
1179 let mut depth = 0;
1180 while i < tokens.len() {
1181 match &tokens[i] {
1182 Token::If => {
1183 depth += 1;
1184 else_tokens.push(tokens[i].clone());
1185 }
1186 Token::Fi => {
1187 if depth > 0 {
1188 depth -= 1;
1189 else_tokens.push(tokens[i].clone());
1190 } else {
1191 break; }
1193 }
1194 Token::Newline => {
1195 let mut j = i + 1;
1197 while j < tokens.len() && tokens[j] == Token::Newline {
1198 j += 1;
1199 }
1200 if j < tokens.len() && depth == 0 && tokens[j] == Token::Fi {
1201 i = j; break;
1203 }
1204 else_tokens.push(tokens[i].clone());
1206 }
1207 _ => {
1208 else_tokens.push(tokens[i].clone());
1209 }
1210 }
1211 i += 1;
1212 }
1213
1214 let else_ast = if else_tokens.is_empty() {
1215 create_empty_body_ast()
1217 } else {
1218 parse_commands_sequentially(&else_tokens)?
1219 };
1220
1221 Some(Box::new(else_ast))
1222 } else {
1223 None
1224 };
1225
1226 if i >= tokens.len() || tokens[i] != Token::Fi {
1227 return Err("Expected fi".to_string());
1228 }
1229
1230 Ok(Ast::If {
1231 branches,
1232 else_branch: else_ast,
1233 })
1234}
1235
1236fn parse_case(tokens: &[Token]) -> Result<Ast, String> {
1237 let mut i = 1; if i >= tokens.len() || !matches!(tokens[i], Token::Word(_)) {
1241 return Err("Expected word after case".to_string());
1242 }
1243 let word = if let Token::Word(ref w) = tokens[i] {
1244 w.clone()
1245 } else {
1246 unreachable!()
1247 };
1248 i += 1;
1249
1250 if i >= tokens.len() || tokens[i] != Token::In {
1251 return Err("Expected in after case word".to_string());
1252 }
1253 i += 1;
1254
1255 let mut cases = Vec::new();
1256 let mut default = None;
1257
1258 loop {
1259 while i < tokens.len() && tokens[i] == Token::Newline {
1261 i += 1;
1262 }
1263
1264 if i >= tokens.len() {
1265 return Err("Unexpected end in case statement".to_string());
1266 }
1267
1268 if tokens[i] == Token::Esac {
1269 break;
1270 }
1271
1272 let mut patterns = Vec::new();
1274 while i < tokens.len() && tokens[i] != Token::RightParen {
1275 if let Token::Word(ref p) = tokens[i] {
1276 for pat in p.split('|') {
1278 patterns.push(pat.to_string());
1279 }
1280 } else if tokens[i] == Token::Pipe {
1281 } else if tokens[i] == Token::Newline {
1283 } else {
1285 return Err(format!("Expected pattern, found {:?}", tokens[i]));
1286 }
1287 i += 1;
1288 }
1289
1290 if i >= tokens.len() || tokens[i] != Token::RightParen {
1291 return Err("Expected ) after patterns".to_string());
1292 }
1293 i += 1;
1294
1295 let mut commands_tokens = Vec::new();
1297 while i < tokens.len() && tokens[i] != Token::DoubleSemicolon && tokens[i] != Token::Esac {
1298 commands_tokens.push(tokens[i].clone());
1299 i += 1;
1300 }
1301
1302 let commands_ast = parse_slice(&commands_tokens)?;
1303
1304 if i >= tokens.len() {
1305 return Err("Unexpected end in case statement".to_string());
1306 }
1307
1308 if tokens[i] == Token::DoubleSemicolon {
1309 i += 1;
1310 if patterns.len() == 1 && patterns[0] == "*" {
1312 default = Some(Box::new(commands_ast));
1313 } else {
1314 cases.push((patterns, commands_ast));
1315 }
1316 } else if tokens[i] == Token::Esac {
1317 if patterns.len() == 1 && patterns[0] == "*" {
1319 default = Some(Box::new(commands_ast));
1320 } else {
1321 cases.push((patterns, commands_ast));
1322 }
1323 break;
1324 } else {
1325 return Err("Expected ;; or esac after commands".to_string());
1326 }
1327 }
1328
1329 Ok(Ast::Case {
1330 word,
1331 cases,
1332 default,
1333 })
1334}
1335
1336fn parse_for(tokens: &[Token]) -> Result<Ast, String> {
1337 let mut i = 1; if i >= tokens.len() || !matches!(tokens[i], Token::Word(_)) {
1341 return Err("Expected variable name after for".to_string());
1342 }
1343 let variable = if let Token::Word(ref v) = tokens[i] {
1344 v.clone()
1345 } else {
1346 unreachable!()
1347 };
1348 i += 1;
1349
1350 if i >= tokens.len() || tokens[i] != Token::In {
1352 return Err("Expected 'in' after for variable".to_string());
1353 }
1354 i += 1;
1355
1356 let mut items = Vec::new();
1358 while i < tokens.len() {
1359 match &tokens[i] {
1360 Token::Do => break,
1361 Token::Semicolon | Token::Newline => {
1362 i += 1;
1363 if i < tokens.len() && tokens[i] == Token::Do {
1365 break;
1366 }
1367 }
1368 Token::Word(word) => {
1369 items.push(word.clone());
1370 i += 1;
1371 }
1372 _ => {
1373 return Err(format!("Unexpected token in for items: {:?}", tokens[i]));
1374 }
1375 }
1376 }
1377
1378 while i < tokens.len() && tokens[i] == Token::Newline {
1380 i += 1;
1381 }
1382
1383 if i >= tokens.len() || tokens[i] != Token::Do {
1385 return Err("Expected 'do' in for loop".to_string());
1386 }
1387 i += 1;
1388
1389 while i < tokens.len() && tokens[i] == Token::Newline {
1391 i += 1;
1392 }
1393
1394 let mut body_tokens = Vec::new();
1396 let mut depth = 0;
1397 while i < tokens.len() {
1398 match &tokens[i] {
1399 Token::For => {
1400 depth += 1;
1401 body_tokens.push(tokens[i].clone());
1402 }
1403 Token::Done => {
1404 if depth > 0 {
1405 depth -= 1;
1406 body_tokens.push(tokens[i].clone());
1407 } else {
1408 break; }
1410 }
1411 Token::Newline => {
1412 let mut j = i + 1;
1414 while j < tokens.len() && tokens[j] == Token::Newline {
1415 j += 1;
1416 }
1417 if j < tokens.len() && depth == 0 && tokens[j] == Token::Done {
1418 i = j; break;
1420 }
1421 body_tokens.push(tokens[i].clone());
1423 }
1424 _ => {
1425 body_tokens.push(tokens[i].clone());
1426 }
1427 }
1428 i += 1;
1429 }
1430
1431 if i >= tokens.len() || tokens[i] != Token::Done {
1432 return Err("Expected 'done' to close for loop".to_string());
1433 }
1434
1435 let body_ast = if body_tokens.is_empty() {
1437 create_empty_body_ast()
1439 } else {
1440 parse_commands_sequentially(&body_tokens)?
1441 };
1442
1443 Ok(Ast::For {
1444 variable,
1445 items,
1446 body: Box::new(body_ast),
1447 })
1448}
1449
1450fn parse_while(tokens: &[Token]) -> Result<Ast, String> {
1451 let mut i = 1; let mut cond_tokens = Vec::new();
1455 while i < tokens.len() {
1456 match &tokens[i] {
1457 Token::Do => break,
1458 Token::Semicolon | Token::Newline => {
1459 i += 1;
1460 if i < tokens.len() && tokens[i] == Token::Do {
1462 break;
1463 }
1464 }
1465 _ => {
1466 cond_tokens.push(tokens[i].clone());
1467 i += 1;
1468 }
1469 }
1470 }
1471
1472 if cond_tokens.is_empty() {
1473 return Err("Expected condition after while".to_string());
1474 }
1475
1476 while i < tokens.len() && tokens[i] == Token::Newline {
1478 i += 1;
1479 }
1480
1481 if i >= tokens.len() || tokens[i] != Token::Do {
1483 return Err("Expected 'do' in while loop".to_string());
1484 }
1485 i += 1;
1486
1487 while i < tokens.len() && tokens[i] == Token::Newline {
1489 i += 1;
1490 }
1491
1492 let mut body_tokens = Vec::new();
1494 let mut depth = 0;
1495 while i < tokens.len() {
1496 match &tokens[i] {
1497 Token::While | Token::For => {
1498 depth += 1;
1499 body_tokens.push(tokens[i].clone());
1500 }
1501 Token::Done => {
1502 if depth > 0 {
1503 depth -= 1;
1504 body_tokens.push(tokens[i].clone());
1505 } else {
1506 break; }
1508 }
1509 Token::Newline => {
1510 let mut j = i + 1;
1512 while j < tokens.len() && tokens[j] == Token::Newline {
1513 j += 1;
1514 }
1515 if j < tokens.len() && depth == 0 && tokens[j] == Token::Done {
1516 i = j; break;
1518 }
1519 body_tokens.push(tokens[i].clone());
1521 }
1522 _ => {
1523 body_tokens.push(tokens[i].clone());
1524 }
1525 }
1526 i += 1;
1527 }
1528
1529 if i >= tokens.len() || tokens[i] != Token::Done {
1530 return Err("Expected 'done' to close while loop".to_string());
1531 }
1532
1533 let condition_ast = parse_slice(&cond_tokens)?;
1535
1536 let body_ast = if body_tokens.is_empty() {
1538 create_empty_body_ast()
1540 } else {
1541 parse_commands_sequentially(&body_tokens)?
1542 };
1543
1544 Ok(Ast::While {
1545 condition: Box::new(condition_ast),
1546 body: Box::new(body_ast),
1547 })
1548}
1549
1550fn parse_function_definition(tokens: &[Token]) -> Result<Ast, String> {
1551 if tokens.len() < 2 {
1552 return Err("Function definition too short".to_string());
1553 }
1554
1555 let func_name = if let Token::Word(word) = &tokens[0] {
1557 if let Some(paren_pos) = word.find('(') {
1559 if word.ends_with(')') && paren_pos > 0 {
1560 word[..paren_pos].to_string()
1561 } else {
1562 word.clone()
1563 }
1564 } else {
1565 word.clone()
1566 }
1567 } else {
1568 return Err("Function name must be a word".to_string());
1569 };
1570
1571 let brace_pos =
1573 if tokens.len() >= 4 && tokens[1] == Token::LeftParen && tokens[2] == Token::RightParen {
1574 if tokens[3] != Token::LeftBrace {
1576 return Err("Expected { after function name".to_string());
1577 }
1578 3
1579 } else if tokens.len() >= 2 && tokens[1] == Token::LeftBrace {
1580 1
1582 } else {
1583 return Err("Expected ( after function name or { for legacy format".to_string());
1584 };
1585
1586 let mut brace_depth = 0;
1588 let mut body_end = 0;
1589 let mut found_closing = false;
1590 let mut i = brace_pos + 1;
1591
1592 while i < tokens.len() {
1593 if i + 3 < tokens.len()
1596 && matches!(&tokens[i], Token::Word(_))
1597 && tokens[i + 1] == Token::LeftParen
1598 && tokens[i + 2] == Token::RightParen
1599 && tokens[i + 3] == Token::LeftBrace
1600 {
1601 i += 4;
1604 let mut nested_depth = 1;
1605 while i < tokens.len() && nested_depth > 0 {
1606 match tokens[i] {
1607 Token::LeftBrace => nested_depth += 1,
1608 Token::RightBrace => nested_depth -= 1,
1609 _ => {}
1610 }
1611 i += 1;
1612 }
1613 continue;
1615 }
1616
1617 match &tokens[i] {
1618 Token::LeftBrace => {
1619 brace_depth += 1;
1620 i += 1;
1621 }
1622 Token::RightBrace => {
1623 if brace_depth == 0 {
1624 body_end = i;
1626 found_closing = true;
1627 break;
1628 } else {
1629 brace_depth -= 1;
1630 i += 1;
1631 }
1632 }
1633 Token::If => {
1634 skip_to_matching_fi(tokens, &mut i);
1636 }
1637 Token::For | Token::While => {
1638 skip_to_matching_done(tokens, &mut i);
1640 }
1641 Token::Case => {
1642 skip_to_matching_esac(tokens, &mut i);
1644 }
1645 _ => {
1646 i += 1;
1647 }
1648 }
1649 }
1650
1651 if !found_closing {
1652 return Err("Missing closing } for function definition".to_string());
1653 }
1654
1655 let body_tokens = &tokens[brace_pos + 1..body_end];
1657
1658 let body_ast = if body_tokens.is_empty() {
1660 create_empty_body_ast()
1662 } else {
1663 parse_commands_sequentially(body_tokens)?
1664 };
1665
1666 Ok(Ast::FunctionDefinition {
1667 name: func_name,
1668 body: Box::new(body_ast),
1669 })
1670}
1671
1672#[cfg(test)]
1673mod tests {
1674 use super::super::lexer::Token;
1675 use super::*;
1676
1677 #[test]
1678 fn test_single_command() {
1679 let tokens = vec![Token::Word("ls".to_string())];
1680 let result = parse(tokens).unwrap();
1681 assert_eq!(
1682 result,
1683 Ast::Pipeline(vec![ShellCommand {
1684 args: vec!["ls".to_string()],
1685 redirections: Vec::new(),
1686 compound: None,
1687 }])
1688 );
1689 }
1690
1691 #[test]
1692 fn test_command_with_args() {
1693 let tokens = vec![
1694 Token::Word("ls".to_string()),
1695 Token::Word("-la".to_string()),
1696 ];
1697 let result = parse(tokens).unwrap();
1698 assert_eq!(
1699 result,
1700 Ast::Pipeline(vec![ShellCommand {
1701 args: vec!["ls".to_string(), "-la".to_string()],
1702 redirections: Vec::new(),
1703 compound: None,
1704 }])
1705 );
1706 }
1707
1708 #[test]
1709 fn test_pipeline() {
1710 let tokens = vec![
1711 Token::Word("ls".to_string()),
1712 Token::Pipe,
1713 Token::Word("grep".to_string()),
1714 Token::Word("txt".to_string()),
1715 ];
1716 let result = parse(tokens).unwrap();
1717 assert_eq!(
1718 result,
1719 Ast::Pipeline(vec![
1720 ShellCommand {
1721 args: vec!["ls".to_string()],
1722 redirections: Vec::new(),
1723 compound: None,
1724 },
1725 ShellCommand {
1726 args: vec!["grep".to_string(), "txt".to_string()],
1727 redirections: Vec::new(),
1728 compound: None,
1729 }
1730 ])
1731 );
1732 }
1733
1734 #[test]
1735 fn test_input_redirection() {
1736 let tokens = vec![
1737 Token::Word("cat".to_string()),
1738 Token::RedirIn,
1739 Token::Word("input.txt".to_string()),
1740 ];
1741 let result = parse(tokens).unwrap();
1742 assert_eq!(
1743 result,
1744 Ast::Pipeline(vec![ShellCommand {
1745 args: vec!["cat".to_string()],
1746 redirections: vec![Redirection::Input("input.txt".to_string())],
1747 compound: None,
1748 }])
1749 );
1750 }
1751
1752 #[test]
1753 fn test_output_redirection() {
1754 let tokens = vec![
1755 Token::Word("printf".to_string()),
1756 Token::Word("hello".to_string()),
1757 Token::RedirOut,
1758 Token::Word("output.txt".to_string()),
1759 ];
1760 let result = parse(tokens).unwrap();
1761 assert_eq!(
1762 result,
1763 Ast::Pipeline(vec![ShellCommand {
1764 args: vec!["printf".to_string(), "hello".to_string()],
1765 compound: None,
1766 redirections: vec![Redirection::Output("output.txt".to_string())],
1767 }])
1768 );
1769 }
1770
1771 #[test]
1772 fn test_append_redirection() {
1773 let tokens = vec![
1774 Token::Word("printf".to_string()),
1775 Token::Word("hello".to_string()),
1776 Token::RedirAppend,
1777 Token::Word("output.txt".to_string()),
1778 ];
1779 let result = parse(tokens).unwrap();
1780 assert_eq!(
1781 result,
1782 Ast::Pipeline(vec![ShellCommand {
1783 args: vec!["printf".to_string(), "hello".to_string()],
1784 compound: None,
1785 redirections: vec![Redirection::Append("output.txt".to_string())],
1786 }])
1787 );
1788 }
1789
1790 #[test]
1791 fn test_complex_pipeline_with_redirections() {
1792 let tokens = vec![
1793 Token::Word("cat".to_string()),
1794 Token::RedirIn,
1795 Token::Word("input.txt".to_string()),
1796 Token::Pipe,
1797 Token::Word("grep".to_string()),
1798 Token::Word("pattern".to_string()),
1799 Token::Pipe,
1800 Token::Word("sort".to_string()),
1801 Token::RedirOut,
1802 Token::Word("output.txt".to_string()),
1803 ];
1804 let result = parse(tokens).unwrap();
1805 assert_eq!(
1806 result,
1807 Ast::Pipeline(vec![
1808 ShellCommand {
1809 args: vec!["cat".to_string()],
1810 compound: None,
1811 redirections: vec![Redirection::Input("input.txt".to_string())],
1812 },
1813 ShellCommand {
1814 args: vec!["grep".to_string(), "pattern".to_string()],
1815 compound: None,
1816 redirections: Vec::new(),
1817 },
1818 ShellCommand {
1819 args: vec!["sort".to_string()],
1820 redirections: vec![Redirection::Output("output.txt".to_string())],
1821 compound: None,
1822 }
1823 ])
1824 );
1825 }
1826
1827 #[test]
1828 fn test_empty_tokens() {
1829 let tokens = vec![];
1830 let result = parse(tokens);
1831 assert!(result.is_err());
1832 assert_eq!(result.unwrap_err(), "No commands found");
1833 }
1834
1835 #[test]
1836 fn test_only_pipe() {
1837 let tokens = vec![Token::Pipe];
1838 let result = parse(tokens);
1839 assert!(result.is_err());
1840 assert_eq!(result.unwrap_err(), "No commands found");
1841 }
1842
1843 #[test]
1844 fn test_redirection_without_file() {
1845 let tokens = vec![Token::Word("cat".to_string()), Token::RedirIn];
1847 let result = parse(tokens).unwrap();
1848 assert_eq!(
1849 result,
1850 Ast::Pipeline(vec![ShellCommand {
1851 args: vec!["cat".to_string()],
1852 compound: None,
1853 redirections: Vec::new(),
1854 }])
1855 );
1856 }
1857
1858 #[test]
1859 fn test_multiple_redirections() {
1860 let tokens = vec![
1861 Token::Word("cat".to_string()),
1862 Token::RedirIn,
1863 Token::Word("file1.txt".to_string()),
1864 Token::RedirOut,
1865 Token::Word("file2.txt".to_string()),
1866 ];
1867 let result = parse(tokens).unwrap();
1868 assert_eq!(
1869 result,
1870 Ast::Pipeline(vec![ShellCommand {
1871 args: vec!["cat".to_string()],
1872 redirections: vec![
1873 Redirection::Input("file1.txt".to_string()),
1874 Redirection::Output("file2.txt".to_string()),
1875 ],
1876 compound: None,
1877 }])
1878 );
1879 }
1880
1881 #[test]
1882 fn test_parse_if() {
1883 let tokens = vec![
1884 Token::If,
1885 Token::Word("true".to_string()),
1886 Token::Semicolon,
1887 Token::Then,
1888 Token::Word("printf".to_string()),
1889 Token::Word("yes".to_string()),
1890 Token::Semicolon,
1891 Token::Fi,
1892 ];
1893 let result = parse(tokens).unwrap();
1894 if let Ast::If {
1895 branches,
1896 else_branch,
1897 } = result
1898 {
1899 assert_eq!(branches.len(), 1);
1900 let (condition, then_branch) = &branches[0];
1901 if let Ast::Pipeline(cmds) = &**condition {
1902 assert_eq!(cmds[0].args, vec!["true"]);
1903 } else {
1904 panic!("condition not pipeline");
1905 }
1906 if let Ast::Pipeline(cmds) = &**then_branch {
1907 assert_eq!(cmds[0].args, vec!["printf", "yes"]);
1908 } else {
1909 panic!("then_branch not pipeline");
1910 }
1911 assert!(else_branch.is_none());
1912 } else {
1913 panic!("not if");
1914 }
1915 }
1916
1917 #[test]
1918 fn test_parse_if_elif() {
1919 let tokens = vec![
1920 Token::If,
1921 Token::Word("false".to_string()),
1922 Token::Semicolon,
1923 Token::Then,
1924 Token::Word("printf".to_string()),
1925 Token::Word("no".to_string()),
1926 Token::Semicolon,
1927 Token::Elif,
1928 Token::Word("true".to_string()),
1929 Token::Semicolon,
1930 Token::Then,
1931 Token::Word("printf".to_string()),
1932 Token::Word("yes".to_string()),
1933 Token::Semicolon,
1934 Token::Fi,
1935 ];
1936 let result = parse(tokens).unwrap();
1937 if let Ast::If {
1938 branches,
1939 else_branch,
1940 } = result
1941 {
1942 assert_eq!(branches.len(), 2);
1943 let (condition1, then1) = &branches[0];
1945 if let Ast::Pipeline(cmds) = &**condition1 {
1946 assert_eq!(cmds[0].args, vec!["false"]);
1947 }
1948 if let Ast::Pipeline(cmds) = &**then1 {
1949 assert_eq!(cmds[0].args, vec!["printf", "no"]);
1950 }
1951 let (condition2, then2) = &branches[1];
1953 if let Ast::Pipeline(cmds) = &**condition2 {
1954 assert_eq!(cmds[0].args, vec!["true"]);
1955 }
1956 if let Ast::Pipeline(cmds) = &**then2 {
1957 assert_eq!(cmds[0].args, vec!["printf", "yes"]);
1958 }
1959 assert!(else_branch.is_none());
1960 } else {
1961 panic!("not if");
1962 }
1963 }
1964
1965 #[test]
1966 fn test_parse_assignment() {
1967 let tokens = vec![Token::Word("MY_VAR=test_value".to_string())];
1968 let result = parse(tokens).unwrap();
1969 if let Ast::Assignment { var, value } = result {
1970 assert_eq!(var, "MY_VAR");
1971 assert_eq!(value, "test_value");
1972 } else {
1973 panic!("not assignment");
1974 }
1975 }
1976
1977 #[test]
1978 fn test_parse_assignment_quoted() {
1979 let tokens = vec![Token::Word("MY_VAR=hello world".to_string())];
1980 let result = parse(tokens).unwrap();
1981 if let Ast::Assignment { var, value } = result {
1982 assert_eq!(var, "MY_VAR");
1983 assert_eq!(value, "hello world");
1984 } else {
1985 panic!("not assignment");
1986 }
1987 }
1988
1989 #[test]
1990 fn test_parse_assignment_invalid() {
1991 let tokens = vec![Token::Word("123VAR=value".to_string())];
1993 let result = parse(tokens).unwrap();
1994 if let Ast::Pipeline(cmds) = result {
1995 assert_eq!(cmds[0].args, vec!["123VAR=value"]);
1996 } else {
1997 panic!("should be parsed as pipeline");
1998 }
1999 }
2000
2001 #[test]
2002 fn test_parse_function_definition() {
2003 let tokens = vec![
2004 Token::Word("myfunc".to_string()),
2005 Token::LeftParen,
2006 Token::RightParen,
2007 Token::LeftBrace,
2008 Token::Word("echo".to_string()),
2009 Token::Word("hello".to_string()),
2010 Token::RightBrace,
2011 ];
2012 let result = parse(tokens).unwrap();
2013 if let Ast::FunctionDefinition { name, body } = result {
2014 assert_eq!(name, "myfunc");
2015 if let Ast::Pipeline(cmds) = *body {
2017 assert_eq!(cmds[0].args, vec!["echo", "hello"]);
2018 } else {
2019 panic!("function body should be a pipeline");
2020 }
2021 } else {
2022 panic!("should be parsed as function definition");
2023 }
2024 }
2025
2026 #[test]
2027 fn test_parse_function_definition_empty() {
2028 let tokens = vec![
2029 Token::Word("emptyfunc".to_string()),
2030 Token::LeftParen,
2031 Token::RightParen,
2032 Token::LeftBrace,
2033 Token::RightBrace,
2034 ];
2035 let result = parse(tokens).unwrap();
2036 if let Ast::FunctionDefinition { name, body } = result {
2037 assert_eq!(name, "emptyfunc");
2038 if let Ast::Pipeline(cmds) = *body {
2040 assert_eq!(cmds[0].args, vec!["true"]);
2041 } else {
2042 panic!("function body should be a pipeline");
2043 }
2044 } else {
2045 panic!("should be parsed as function definition");
2046 }
2047 }
2048
2049 #[test]
2050 fn test_parse_function_definition_legacy_format() {
2051 let tokens = vec![
2053 Token::Word("legacyfunc()".to_string()),
2054 Token::LeftBrace,
2055 Token::Word("echo".to_string()),
2056 Token::Word("hello".to_string()),
2057 Token::RightBrace,
2058 ];
2059 let result = parse(tokens).unwrap();
2060 if let Ast::FunctionDefinition { name, body } = result {
2061 assert_eq!(name, "legacyfunc");
2062 if let Ast::Pipeline(cmds) = *body {
2064 assert_eq!(cmds[0].args, vec!["echo", "hello"]);
2065 } else {
2066 panic!("function body should be a pipeline");
2067 }
2068 } else {
2069 panic!("should be parsed as function definition");
2070 }
2071 }
2072
2073 #[test]
2074 fn test_parse_local_assignment() {
2075 let tokens = vec![Token::Local, Token::Word("MY_VAR=test_value".to_string())];
2076 let result = parse(tokens).unwrap();
2077 if let Ast::LocalAssignment { var, value } = result {
2078 assert_eq!(var, "MY_VAR");
2079 assert_eq!(value, "test_value");
2080 } else {
2081 panic!("should be parsed as local assignment");
2082 }
2083 }
2084
2085 #[test]
2086 fn test_parse_local_assignment_separate_tokens() {
2087 let tokens = vec![
2088 Token::Local,
2089 Token::Word("MY_VAR".to_string()),
2090 Token::Word("test_value".to_string()),
2091 ];
2092 let result = parse(tokens).unwrap();
2093 if let Ast::LocalAssignment { var, value } = result {
2094 assert_eq!(var, "MY_VAR");
2095 assert_eq!(value, "test_value");
2096 } else {
2097 panic!("should be parsed as local assignment");
2098 }
2099 }
2100
2101 #[test]
2102 fn test_parse_local_assignment_invalid_var_name() {
2103 let tokens = vec![Token::Local, Token::Word("123VAR=value".to_string())];
2105 let result = parse(tokens);
2106 assert!(result.is_err());
2108 }
2109
2110 #[test]
2111 fn test_parse_here_document_redirection() {
2112 let tokens = vec![
2113 Token::Word("cat".to_string()),
2114 Token::RedirHereDoc("EOF".to_string(), false),
2115 ];
2116 let result = parse(tokens).unwrap();
2117 assert_eq!(
2118 result,
2119 Ast::Pipeline(vec![ShellCommand {
2120 args: vec!["cat".to_string()],
2121 redirections: vec![Redirection::HereDoc("EOF".to_string(), "false".to_string())],
2122 compound: None,
2123 }])
2124 );
2125 }
2126
2127 #[test]
2128 fn test_parse_here_string_redirection() {
2129 let tokens = vec![
2130 Token::Word("grep".to_string()),
2131 Token::RedirHereString("pattern".to_string()),
2132 ];
2133 let result = parse(tokens).unwrap();
2134 assert_eq!(
2135 result,
2136 Ast::Pipeline(vec![ShellCommand {
2137 args: vec!["grep".to_string()],
2138 compound: None,
2139 redirections: vec![Redirection::HereString("pattern".to_string())],
2140 }])
2141 );
2142 }
2143
2144 #[test]
2145 fn test_parse_mixed_redirections() {
2146 let tokens = vec![
2147 Token::Word("cat".to_string()),
2148 Token::RedirIn,
2149 Token::Word("file.txt".to_string()),
2150 Token::RedirHereString("fallback".to_string()),
2151 Token::RedirOut,
2152 Token::Word("output.txt".to_string()),
2153 ];
2154 let result = parse(tokens).unwrap();
2155 assert_eq!(
2156 result,
2157 Ast::Pipeline(vec![ShellCommand {
2158 args: vec!["cat".to_string()],
2159 compound: None,
2160 redirections: vec![
2161 Redirection::Input("file.txt".to_string()),
2162 Redirection::HereString("fallback".to_string()),
2163 Redirection::Output("output.txt".to_string()),
2164 ],
2165 }])
2166 );
2167 }
2168
2169 #[test]
2172 fn test_parse_fd_input_redirection() {
2173 let tokens = vec![
2174 Token::Word("command".to_string()),
2175 Token::RedirectFdIn(3, "input.txt".to_string()),
2176 ];
2177 let result = parse(tokens).unwrap();
2178 assert_eq!(
2179 result,
2180 Ast::Pipeline(vec![ShellCommand {
2181 args: vec!["command".to_string()],
2182 redirections: vec![Redirection::FdInput(3, "input.txt".to_string())],
2183 compound: None,
2184 }])
2185 );
2186 }
2187
2188 #[test]
2189 fn test_parse_fd_output_redirection() {
2190 let tokens = vec![
2191 Token::Word("command".to_string()),
2192 Token::RedirectFdOut(2, "errors.log".to_string()),
2193 ];
2194 let result = parse(tokens).unwrap();
2195 assert_eq!(
2196 result,
2197 Ast::Pipeline(vec![ShellCommand {
2198 args: vec!["command".to_string()],
2199 compound: None,
2200 redirections: vec![Redirection::FdOutput(2, "errors.log".to_string())],
2201 }])
2202 );
2203 }
2204
2205 #[test]
2206 fn test_parse_fd_append_redirection() {
2207 let tokens = vec![
2208 Token::Word("command".to_string()),
2209 Token::RedirectFdAppend(2, "errors.log".to_string()),
2210 ];
2211 let result = parse(tokens).unwrap();
2212 assert_eq!(
2213 result,
2214 Ast::Pipeline(vec![ShellCommand {
2215 args: vec!["command".to_string()],
2216 compound: None,
2217 redirections: vec![Redirection::FdAppend(2, "errors.log".to_string())],
2218 }])
2219 );
2220 }
2221
2222 #[test]
2223 fn test_parse_fd_duplicate() {
2224 let tokens = vec![
2225 Token::Word("command".to_string()),
2226 Token::RedirectFdDup(2, 1),
2227 ];
2228 let result = parse(tokens).unwrap();
2229 assert_eq!(
2230 result,
2231 Ast::Pipeline(vec![ShellCommand {
2232 args: vec!["command".to_string()],
2233 compound: None,
2234 redirections: vec![Redirection::FdDuplicate(2, 1)],
2235 }])
2236 );
2237 }
2238
2239 #[test]
2240 fn test_parse_fd_close() {
2241 let tokens = vec![
2242 Token::Word("command".to_string()),
2243 Token::RedirectFdClose(2),
2244 ];
2245 let result = parse(tokens).unwrap();
2246 assert_eq!(
2247 result,
2248 Ast::Pipeline(vec![ShellCommand {
2249 args: vec!["command".to_string()],
2250 compound: None,
2251 redirections: vec![Redirection::FdClose(2)],
2252 }])
2253 );
2254 }
2255
2256 #[test]
2257 fn test_parse_fd_input_output() {
2258 let tokens = vec![
2259 Token::Word("command".to_string()),
2260 Token::RedirectFdInOut(3, "file.txt".to_string()),
2261 ];
2262 let result = parse(tokens).unwrap();
2263 assert_eq!(
2264 result,
2265 Ast::Pipeline(vec![ShellCommand {
2266 args: vec!["command".to_string()],
2267 compound: None,
2268 redirections: vec![Redirection::FdInputOutput(3, "file.txt".to_string())],
2269 }])
2270 );
2271 }
2272
2273 #[test]
2274 fn test_parse_multiple_fd_redirections() {
2275 let tokens = vec![
2276 Token::Word("command".to_string()),
2277 Token::RedirectFdOut(2, "err.log".to_string()),
2278 Token::RedirectFdIn(3, "input.txt".to_string()),
2279 Token::RedirectFdAppend(4, "append.log".to_string()),
2280 ];
2281 let result = parse(tokens).unwrap();
2282 assert_eq!(
2283 result,
2284 Ast::Pipeline(vec![ShellCommand {
2285 args: vec!["command".to_string()],
2286 compound: None,
2287 redirections: vec![
2288 Redirection::FdOutput(2, "err.log".to_string()),
2289 Redirection::FdInput(3, "input.txt".to_string()),
2290 Redirection::FdAppend(4, "append.log".to_string()),
2291 ],
2292 }])
2293 );
2294 }
2295
2296 #[test]
2297 fn test_parse_fd_swap_pattern() {
2298 let tokens = vec![
2299 Token::Word("command".to_string()),
2300 Token::RedirectFdDup(3, 1),
2301 Token::RedirectFdDup(1, 2),
2302 Token::RedirectFdDup(2, 3),
2303 Token::RedirectFdClose(3),
2304 ];
2305 let result = parse(tokens).unwrap();
2306 assert_eq!(
2307 result,
2308 Ast::Pipeline(vec![ShellCommand {
2309 args: vec!["command".to_string()],
2310 redirections: vec![
2311 Redirection::FdDuplicate(3, 1),
2312 Redirection::FdDuplicate(1, 2),
2313 Redirection::FdDuplicate(2, 3),
2314 Redirection::FdClose(3),
2315 ],
2316 compound: None,
2317 }])
2318 );
2319 }
2320
2321 #[test]
2322 fn test_parse_mixed_basic_and_fd_redirections() {
2323 let tokens = vec![
2324 Token::Word("command".to_string()),
2325 Token::RedirOut,
2326 Token::Word("output.txt".to_string()),
2327 Token::RedirectFdDup(2, 1),
2328 ];
2329 let result = parse(tokens).unwrap();
2330 assert_eq!(
2331 result,
2332 Ast::Pipeline(vec![ShellCommand {
2333 args: vec!["command".to_string()],
2334 redirections: vec![
2335 Redirection::Output("output.txt".to_string()),
2336 Redirection::FdDuplicate(2, 1),
2337 ],
2338 compound: None,
2339 }])
2340 );
2341 }
2342
2343 #[test]
2344 fn test_parse_fd_redirection_ordering() {
2345 let tokens = vec![
2347 Token::Word("command".to_string()),
2348 Token::RedirectFdOut(2, "first.log".to_string()),
2349 Token::RedirOut,
2350 Token::Word("second.txt".to_string()),
2351 Token::RedirectFdDup(2, 1),
2352 ];
2353 let result = parse(tokens).unwrap();
2354 assert_eq!(
2355 result,
2356 Ast::Pipeline(vec![ShellCommand {
2357 args: vec!["command".to_string()],
2358 redirections: vec![
2359 Redirection::FdOutput(2, "first.log".to_string()),
2360 Redirection::Output("second.txt".to_string()),
2361 Redirection::FdDuplicate(2, 1),
2362 ],
2363 compound: None,
2364 }])
2365 );
2366 }
2367
2368 #[test]
2369 fn test_parse_fd_redirection_with_pipe() {
2370 let tokens = vec![
2371 Token::Word("command".to_string()),
2372 Token::RedirectFdDup(2, 1),
2373 Token::Pipe,
2374 Token::Word("grep".to_string()),
2375 Token::Word("error".to_string()),
2376 ];
2377 let result = parse(tokens).unwrap();
2378 assert_eq!(
2379 result,
2380 Ast::Pipeline(vec![
2381 ShellCommand {
2382 args: vec!["command".to_string()],
2383 redirections: vec![Redirection::FdDuplicate(2, 1)],
2384 compound: None,
2385 },
2386 ShellCommand {
2387 args: vec!["grep".to_string(), "error".to_string()],
2388 compound: None,
2389 redirections: Vec::new(),
2390 }
2391 ])
2392 );
2393 }
2394
2395 #[test]
2396 fn test_parse_all_fd_numbers() {
2397 let tokens = vec![
2399 Token::Word("cmd".to_string()),
2400 Token::RedirectFdIn(0, "file".to_string()),
2401 ];
2402 let result = parse(tokens).unwrap();
2403 if let Ast::Pipeline(cmds) = result {
2404 assert_eq!(
2405 cmds[0].redirections[0],
2406 Redirection::FdInput(0, "file".to_string())
2407 );
2408 } else {
2409 panic!("Expected Pipeline");
2410 }
2411
2412 let tokens = vec![
2414 Token::Word("cmd".to_string()),
2415 Token::RedirectFdOut(9, "file".to_string()),
2416 ];
2417 let result = parse(tokens).unwrap();
2418 if let Ast::Pipeline(cmds) = result {
2419 assert_eq!(
2420 cmds[0].redirections[0],
2421 Redirection::FdOutput(9, "file".to_string())
2422 );
2423 } else {
2424 panic!("Expected Pipeline");
2425 }
2426 }
2427}