1use super::lexer::Token;
2
3#[derive(Debug, Clone, PartialEq, Eq)]
4pub enum Ast {
5 Pipeline(Vec<ShellCommand>),
6 Sequence(Vec<Ast>),
7 Assignment {
8 var: String,
9 value: String,
10 },
11 LocalAssignment {
12 var: String,
13 value: String,
14 },
15 If {
16 branches: Vec<(Box<Ast>, Box<Ast>)>, else_branch: Option<Box<Ast>>,
18 },
19 Case {
20 word: String,
21 cases: Vec<(Vec<String>, Ast)>,
22 default: Option<Box<Ast>>,
23 },
24 For {
25 variable: String,
26 items: Vec<String>,
27 body: Box<Ast>,
28 },
29 While {
30 condition: Box<Ast>,
31 body: Box<Ast>,
32 },
33 FunctionDefinition {
34 name: String,
35 body: Box<Ast>,
36 },
37 FunctionCall {
38 name: String,
39 args: Vec<String>,
40 },
41 Return {
42 value: Option<String>,
43 },
44 And {
45 left: Box<Ast>,
46 right: Box<Ast>,
47 },
48 Or {
49 left: Box<Ast>,
50 right: Box<Ast>,
51 },
52 Subshell {
55 body: Box<Ast>,
56 },
57}
58
59#[derive(Debug, Clone, PartialEq, Eq)]
61pub enum Redirection {
62 Input(String),
64 Output(String),
66 Append(String),
68 FdInput(i32, String),
70 FdOutput(i32, String),
72 FdAppend(i32, String),
74 FdDuplicate(i32, i32),
76 FdClose(i32),
78 FdInputOutput(i32, String),
80 HereDoc(String, String),
82 HereString(String),
84}
85
86#[derive(Debug, Clone, PartialEq, Eq, Default)]
87pub struct ShellCommand {
88 pub args: Vec<String>,
89 pub redirections: Vec<Redirection>,
91 pub compound: Option<Box<Ast>>,
94}
95
96fn is_valid_variable_name(name: &str) -> bool {
99 if let Some(first_char) = name.chars().next() {
100 first_char.is_alphabetic() || first_char == '_'
101 } else {
102 false
103 }
104}
105
106fn create_empty_body_ast() -> Ast {
109 Ast::Pipeline(vec![ShellCommand {
110 args: vec!["true".to_string()],
111 redirections: Vec::new(),
112 compound: None,
113 }])
114}
115
116fn skip_newlines(tokens: &[Token], i: &mut usize) {
119 while *i < tokens.len() && tokens[*i] == Token::Newline {
120 *i += 1;
121 }
122}
123
124fn skip_to_matching_fi(tokens: &[Token], i: &mut usize) {
127 let mut if_depth = 1;
128 *i += 1; while *i < tokens.len() && if_depth > 0 {
130 match tokens[*i] {
131 Token::If => if_depth += 1,
132 Token::Fi => if_depth -= 1,
133 _ => {}
134 }
135 *i += 1;
136 }
137}
138
139fn skip_to_matching_done(tokens: &[Token], i: &mut usize) {
142 let mut loop_depth = 1;
143 *i += 1; while *i < tokens.len() && loop_depth > 0 {
145 match tokens[*i] {
146 Token::For | Token::While => loop_depth += 1,
147 Token::Done => loop_depth -= 1,
148 _ => {}
149 }
150 *i += 1;
151 }
152}
153
154fn skip_to_matching_esac(tokens: &[Token], i: &mut usize) {
156 *i += 1; while *i < tokens.len() {
158 if tokens[*i] == Token::Esac {
159 *i += 1;
160 break;
161 }
162 *i += 1;
163 }
164}
165
166pub fn parse(tokens: Vec<Token>) -> Result<Ast, String> {
167 if tokens.len() >= 4
169 && let (Token::Word(_), Token::LeftParen, Token::RightParen, Token::LeftBrace) =
170 (&tokens[0], &tokens[1], &tokens[2], &tokens[3])
171 {
172 let mut brace_depth = 1; let mut function_end = tokens.len();
176 let mut j = 4; while j < tokens.len() {
179 match &tokens[j] {
180 Token::LeftBrace => {
181 brace_depth += 1;
182 j += 1;
183 }
184 Token::RightBrace => {
185 brace_depth -= 1;
186 if brace_depth == 0 {
187 function_end = j + 1; break;
189 }
190 j += 1;
191 }
192 Token::If => {
193 let mut if_depth = 1;
195 j += 1;
196 while j < tokens.len() && if_depth > 0 {
197 match tokens[j] {
198 Token::If => if_depth += 1,
199 Token::Fi => if_depth -= 1,
200 _ => {}
201 }
202 j += 1;
203 }
204 }
205 Token::For | Token::While => {
206 let mut for_depth = 1;
208 j += 1;
209 while j < tokens.len() && for_depth > 0 {
210 match tokens[j] {
211 Token::For | Token::While => for_depth += 1,
212 Token::Done => for_depth -= 1,
213 _ => {}
214 }
215 j += 1;
216 }
217 }
218 Token::Case => {
219 j += 1;
221 while j < tokens.len() {
222 if tokens[j] == Token::Esac {
223 j += 1;
224 break;
225 }
226 j += 1;
227 }
228 }
229 _ => {
230 j += 1;
231 }
232 }
233 }
234
235 if brace_depth == 0 && function_end <= tokens.len() {
236 let function_tokens = &tokens[0..function_end];
238 let remaining_tokens = &tokens[function_end..];
239
240 let function_ast = parse_function_definition(function_tokens)?;
241
242 return if remaining_tokens.is_empty() {
243 Ok(function_ast)
244 } else {
245 let remaining_ast = parse_commands_sequentially(remaining_tokens)?;
247 Ok(Ast::Sequence(vec![function_ast, remaining_ast]))
248 };
249 }
250 }
251
252 if tokens.len() >= 2
254 && let Token::Word(ref word) = tokens[0]
255 && let Some(paren_pos) = word.find('(')
256 && word.ends_with(')')
257 && paren_pos > 0
258 && tokens[1] == Token::LeftBrace
259 {
260 return parse_function_definition(&tokens);
261 }
262
263 parse_commands_sequentially(&tokens)
265}
266
267fn parse_slice(tokens: &[Token]) -> Result<Ast, String> {
268 if tokens.is_empty() {
269 return Err("No commands found".to_string());
270 }
271
272 if tokens.len() == 2 {
274 if let (Token::Word(var_eq), Token::Word(value)) = (&tokens[0], &tokens[1])
276 && let Some(eq_pos) = var_eq.find('=')
277 && eq_pos > 0
278 && eq_pos < var_eq.len()
279 {
280 let var = var_eq[..eq_pos].to_string();
281 let full_value = format!("{}{}", &var_eq[eq_pos + 1..], value);
282 if is_valid_variable_name(&var) {
284 return Ok(Ast::Assignment {
285 var,
286 value: full_value,
287 });
288 }
289 }
290 }
291
292 if tokens.len() == 2
294 && let (Token::Word(var_eq), Token::Word(value)) = (&tokens[0], &tokens[1])
295 && let Some(eq_pos) = var_eq.find('=')
296 && eq_pos > 0
297 && eq_pos == var_eq.len() - 1
298 {
299 let var = var_eq[..eq_pos].to_string();
300 if is_valid_variable_name(&var) {
302 return Ok(Ast::Assignment {
303 var,
304 value: value.clone(),
305 });
306 }
307 }
308
309 if tokens.len() == 3
311 && let (Token::Local, Token::Word(var), Token::Word(value)) =
312 (&tokens[0], &tokens[1], &tokens[2])
313 {
314 let clean_var = if var.ends_with('=') {
316 &var[..var.len() - 1]
317 } else {
318 var
319 };
320 if is_valid_variable_name(clean_var) {
322 return Ok(Ast::LocalAssignment {
323 var: clean_var.to_string(),
324 value: value.clone(),
325 });
326 }
327 }
328
329 if !tokens.is_empty()
331 && tokens.len() <= 2
332 && let Token::Return = &tokens[0]
333 {
334 if tokens.len() == 1 {
335 return Ok(Ast::Return { value: None });
337 } else if let Token::Word(word) = &tokens[1] {
338 return Ok(Ast::Return {
340 value: Some(word.clone()),
341 });
342 }
343 }
344
345 if tokens.len() == 2
347 && let (Token::Local, Token::Word(var_eq)) = (&tokens[0], &tokens[1])
348 && let Some(eq_pos) = var_eq.find('=')
349 && eq_pos > 0
350 && eq_pos < var_eq.len()
351 {
352 let var = var_eq[..eq_pos].to_string();
353 let value = var_eq[eq_pos + 1..].to_string();
354 if is_valid_variable_name(&var) {
356 return Ok(Ast::LocalAssignment { var, value });
357 }
358 }
359
360 if tokens.len() == 1
362 && let Token::Word(ref word) = tokens[0]
363 && let Some(eq_pos) = word.find('=')
364 && eq_pos > 0
365 && eq_pos < word.len()
366 {
367 let var = word[..eq_pos].to_string();
368 let value = word[eq_pos + 1..].to_string();
369 if is_valid_variable_name(&var) {
371 return Ok(Ast::Assignment { var, value });
372 }
373 }
374
375 if let Token::If = tokens[0] {
377 return parse_if(tokens);
378 }
379
380 if let Token::Case = tokens[0] {
382 return parse_case(tokens);
383 }
384
385 if let Token::For = tokens[0] {
387 return parse_for(tokens);
388 }
389
390 if let Token::While = tokens[0] {
392 return parse_while(tokens);
393 }
394
395 if tokens.len() >= 4
398 && let (Token::Word(word), Token::LeftParen, Token::RightParen, Token::LeftBrace) =
399 (&tokens[0], &tokens[1], &tokens[2], &tokens[3])
400 && is_valid_variable_name(word)
401 {
402 return parse_function_definition(tokens);
403 }
404
405 if tokens.len() >= 2
407 && let Token::Word(ref word) = tokens[0]
408 && let Some(paren_pos) = word.find('(')
409 && word.ends_with(')')
410 && paren_pos > 0
411 {
412 let func_name = &word[..paren_pos];
413 if is_valid_variable_name(func_name) && tokens[1] == Token::LeftBrace {
414 return parse_function_definition(tokens);
415 }
416 }
417
418 parse_pipeline(tokens)
423}
424
425fn parse_commands_sequentially(tokens: &[Token]) -> Result<Ast, String> {
426 let mut i = 0;
427 let mut commands = Vec::new();
428
429 while i < tokens.len() {
430 while i < tokens.len() {
432 match &tokens[i] {
433 Token::Newline => {
434 i += 1;
435 }
436 Token::Word(word) if word.starts_with('#') => {
437 while i < tokens.len() && tokens[i] != Token::Newline {
439 i += 1;
440 }
441 if i < tokens.len() {
442 i += 1; }
444 }
445 _ => break,
446 }
447 }
448
449 if i >= tokens.len() {
450 break;
451 }
452
453 let start = i;
455
456 if tokens[i] == Token::LeftParen {
459 let mut paren_depth = 1;
461 let mut j = i + 1;
462
463 while j < tokens.len() && paren_depth > 0 {
464 match tokens[j] {
465 Token::LeftParen => paren_depth += 1,
466 Token::RightParen => paren_depth -= 1,
467 _ => {}
468 }
469 j += 1;
470 }
471
472 if paren_depth != 0 {
473 return Err("Unmatched parenthesis in subshell".to_string());
474 }
475
476 let subshell_tokens = &tokens[i + 1..j - 1];
478
479 if subshell_tokens.is_empty() {
480 return Err("Empty subshell".to_string());
481 }
482
483 let body_ast = parse_commands_sequentially(subshell_tokens)?;
485
486 let mut subshell_ast = Ast::Subshell {
487 body: Box::new(body_ast),
488 };
489
490 i = j; let mut redirections = Vec::new();
494 while i < tokens.len() {
495 match &tokens[i] {
496 Token::RedirOut => {
497 i += 1;
498 if i < tokens.len() {
499 if let Token::Word(file) = &tokens[i] {
500 redirections.push(Redirection::Output(file.clone()));
501 i += 1;
502 }
503 }
504 }
505 Token::RedirIn => {
506 i += 1;
507 if i < tokens.len() {
508 if let Token::Word(file) = &tokens[i] {
509 redirections.push(Redirection::Input(file.clone()));
510 i += 1;
511 }
512 }
513 }
514 Token::RedirAppend => {
515 i += 1;
516 if i < tokens.len() {
517 if let Token::Word(file) = &tokens[i] {
518 redirections.push(Redirection::Append(file.clone()));
519 i += 1;
520 }
521 }
522 }
523 Token::RedirectFdOut(fd, file) => {
524 redirections.push(Redirection::FdOutput(*fd, file.clone()));
525 i += 1;
526 }
527 Token::RedirectFdIn(fd, file) => {
528 redirections.push(Redirection::FdInput(*fd, file.clone()));
529 i += 1;
530 }
531 Token::RedirectFdAppend(fd, file) => {
532 redirections.push(Redirection::FdAppend(*fd, file.clone()));
533 i += 1;
534 }
535 Token::RedirectFdDup(from_fd, to_fd) => {
536 redirections.push(Redirection::FdDuplicate(*from_fd, *to_fd));
537 i += 1;
538 }
539 Token::RedirectFdClose(fd) => {
540 redirections.push(Redirection::FdClose(*fd));
541 i += 1;
542 }
543 Token::RedirectFdInOut(fd, file) => {
544 redirections.push(Redirection::FdInputOutput(*fd, file.clone()));
545 i += 1;
546 }
547 Token::RedirHereDoc(delimiter, quoted) => {
548 redirections.push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
549 i += 1;
550 }
551 Token::RedirHereString(content) => {
552 redirections.push(Redirection::HereString(content.clone()));
553 i += 1;
554 }
555 _ => break,
556 }
557 }
558
559 if !redirections.is_empty() {
561 subshell_ast = Ast::Pipeline(vec![ShellCommand {
562 args: Vec::new(),
563 redirections,
564 compound: Some(Box::new(subshell_ast)),
565 }]);
566 }
567
568 if i < tokens.len() && tokens[i] == Token::Pipe {
570 let pipeline_ast = parse_pipeline(&tokens[start..])?;
572 commands.push(pipeline_ast);
573 break; }
575
576 if i < tokens.len() && (tokens[i] == Token::And || tokens[i] == Token::Or) {
578 let operator = tokens[i].clone();
579 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
583 i += 1;
584 }
585
586 let remaining_tokens = &tokens[i..];
588 let right_ast = parse_commands_sequentially(remaining_tokens)?;
589
590 let combined_ast = match operator {
592 Token::And => Ast::And {
593 left: Box::new(subshell_ast),
594 right: Box::new(right_ast),
595 },
596 Token::Or => Ast::Or {
597 left: Box::new(subshell_ast),
598 right: Box::new(right_ast),
599 },
600 _ => unreachable!(),
601 };
602
603 commands.push(combined_ast);
604 break; } else {
606 commands.push(subshell_ast);
607 }
608
609 if i < tokens.len() && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon) {
611 i += 1;
612 }
613 continue;
614 }
615
616 if tokens[i] == Token::If {
618 let mut depth = 0;
620 while i < tokens.len() {
621 match tokens[i] {
622 Token::If => depth += 1,
623 Token::Fi => {
624 depth -= 1;
625 if depth == 0 {
626 i += 1; break;
628 }
629 }
630 _ => {}
631 }
632 i += 1;
633 }
634
635 } else if tokens[i] == Token::For {
638 let mut depth = 1; i += 1; while i < tokens.len() {
642 match tokens[i] {
643 Token::For | Token::While => depth += 1,
644 Token::Done => {
645 depth -= 1;
646 if depth == 0 {
647 i += 1; break;
649 }
650 }
651 _ => {}
652 }
653 i += 1;
654 }
655 } else if tokens[i] == Token::While {
656 let mut depth = 1; i += 1; while i < tokens.len() {
660 match tokens[i] {
661 Token::While | Token::For => depth += 1,
662 Token::Done => {
663 depth -= 1;
664 if depth == 0 {
665 i += 1; break;
667 }
668 }
669 _ => {}
670 }
671 i += 1;
672 }
673 } else if tokens[i] == Token::Case {
674 while i < tokens.len() {
676 if tokens[i] == Token::Esac {
677 i += 1; break;
679 }
680 i += 1;
681 }
682 } else if i + 3 < tokens.len()
683 && matches!(tokens[i], Token::Word(_))
684 && tokens[i + 1] == Token::LeftParen
685 && tokens[i + 2] == Token::RightParen
686 && tokens[i + 3] == Token::LeftBrace
687 {
688 let mut brace_depth = 1;
690 i += 4; while i < tokens.len() && brace_depth > 0 {
692 match tokens[i] {
693 Token::LeftBrace => brace_depth += 1,
694 Token::RightBrace => brace_depth -= 1,
695 _ => {}
696 }
697 i += 1;
698 }
699 } else {
700 while i < tokens.len() {
703 if tokens[i] == Token::Newline
704 || tokens[i] == Token::Semicolon
705 || tokens[i] == Token::And
706 || tokens[i] == Token::Or
707 {
708 let mut j = i + 1;
710 while j < tokens.len() && tokens[j] == Token::Newline {
711 j += 1;
712 }
713 if j < tokens.len()
715 && (tokens[j] == Token::Else
716 || tokens[j] == Token::Elif
717 || tokens[j] == Token::Fi)
718 {
719 i = j + 1;
721 continue;
722 }
723 break;
724 }
725 i += 1;
726 }
727 }
728
729 let command_tokens = &tokens[start..i];
730 if !command_tokens.is_empty() {
731 if command_tokens.len() == 1 {
733 match command_tokens[0] {
734 Token::Else | Token::Elif | Token::Fi => {
735 if i < tokens.len()
737 && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon)
738 {
739 i += 1;
740 }
741 continue;
742 }
743 _ => {}
744 }
745 }
746
747 let ast = parse_slice(command_tokens)?;
748
749 if i < tokens.len() && (tokens[i] == Token::And || tokens[i] == Token::Or) {
751 let operator = tokens[i].clone();
752 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
756 i += 1;
757 }
758
759 let remaining_tokens = &tokens[i..];
761 let right_ast = parse_commands_sequentially(remaining_tokens)?;
762
763 let combined_ast = match operator {
765 Token::And => Ast::And {
766 left: Box::new(ast),
767 right: Box::new(right_ast),
768 },
769 Token::Or => Ast::Or {
770 left: Box::new(ast),
771 right: Box::new(right_ast),
772 },
773 _ => unreachable!(),
774 };
775
776 commands.push(combined_ast);
777 break; } else {
779 commands.push(ast);
780 }
781 }
782
783 if i < tokens.len() && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon) {
784 i += 1;
785 }
786 }
787
788 if commands.is_empty() {
789 return Err("No commands found".to_string());
790 }
791
792 if commands.len() == 1 {
793 Ok(commands.into_iter().next().unwrap())
794 } else {
795 Ok(Ast::Sequence(commands))
796 }
797}
798
799fn parse_pipeline(tokens: &[Token]) -> Result<Ast, String> {
800 let mut commands = Vec::new();
801 let mut current_cmd = ShellCommand::default();
802
803 let mut i = 0;
804 while i < tokens.len() {
805 let token = &tokens[i];
806 match token {
807 Token::LeftParen => {
808 let mut paren_depth = 1;
811 let mut j = i + 1;
812
813 while j < tokens.len() && paren_depth > 0 {
814 match tokens[j] {
815 Token::LeftParen => paren_depth += 1,
816 Token::RightParen => paren_depth -= 1,
817 _ => {}
818 }
819 j += 1;
820 }
821
822 if paren_depth != 0 {
823 return Err("Unmatched parenthesis in pipeline".to_string());
824 }
825
826 let subshell_tokens = &tokens[i + 1..j - 1];
828 if subshell_tokens.is_empty() {
829 return Err("Empty subshell in pipeline".to_string());
830 }
831
832 let body_ast = parse_commands_sequentially(subshell_tokens)?;
833
834 current_cmd.compound = Some(Box::new(Ast::Subshell {
836 body: Box::new(body_ast),
837 }));
838
839 i = j; while i < tokens.len() {
843 match &tokens[i] {
844 Token::RedirOut => {
845 i += 1;
846 if i < tokens.len() {
847 if let Token::Word(file) = &tokens[i] {
848 current_cmd.redirections.push(Redirection::Output(file.clone()));
849 i += 1;
850 }
851 }
852 }
853 Token::RedirIn => {
854 i += 1;
855 if i < tokens.len() {
856 if let Token::Word(file) = &tokens[i] {
857 current_cmd.redirections.push(Redirection::Input(file.clone()));
858 i += 1;
859 }
860 }
861 }
862 Token::RedirAppend => {
863 i += 1;
864 if i < tokens.len() {
865 if let Token::Word(file) = &tokens[i] {
866 current_cmd.redirections.push(Redirection::Append(file.clone()));
867 i += 1;
868 }
869 }
870 }
871 Token::RedirectFdOut(fd, file) => {
872 current_cmd.redirections.push(Redirection::FdOutput(*fd, file.clone()));
873 i += 1;
874 }
875 Token::RedirectFdIn(fd, file) => {
876 current_cmd.redirections.push(Redirection::FdInput(*fd, file.clone()));
877 i += 1;
878 }
879 Token::RedirectFdAppend(fd, file) => {
880 current_cmd.redirections.push(Redirection::FdAppend(*fd, file.clone()));
881 i += 1;
882 }
883 Token::RedirectFdDup(from_fd, to_fd) => {
884 current_cmd.redirections.push(Redirection::FdDuplicate(*from_fd, *to_fd));
885 i += 1;
886 }
887 Token::RedirectFdClose(fd) => {
888 current_cmd.redirections.push(Redirection::FdClose(*fd));
889 i += 1;
890 }
891 Token::RedirectFdInOut(fd, file) => {
892 current_cmd.redirections.push(Redirection::FdInputOutput(*fd, file.clone()));
893 i += 1;
894 }
895 Token::RedirHereDoc(delimiter, quoted) => {
896 current_cmd.redirections.push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
897 i += 1;
898 }
899 Token::RedirHereString(content) => {
900 current_cmd.redirections.push(Redirection::HereString(content.clone()));
901 i += 1;
902 }
903 Token::Pipe => {
904 break;
906 }
907 _ => break,
908 }
909 }
910
911 commands.push(current_cmd.clone());
913 current_cmd = ShellCommand::default();
914
915 continue;
916 }
917 Token::Word(word) => {
918 current_cmd.args.push(word.clone());
919 }
920 Token::Pipe => {
921 if !current_cmd.args.is_empty() || current_cmd.compound.is_some() {
922 commands.push(current_cmd.clone());
923 current_cmd = ShellCommand::default();
924 }
925 }
926 Token::RedirIn => {
928 i += 1;
929 if i < tokens.len()
930 && let Token::Word(ref file) = tokens[i]
931 {
932 current_cmd
933 .redirections
934 .push(Redirection::Input(file.clone()));
935 }
936 }
937 Token::RedirOut => {
938 i += 1;
939 if i < tokens.len()
940 && let Token::Word(ref file) = tokens[i]
941 {
942 current_cmd
943 .redirections
944 .push(Redirection::Output(file.clone()));
945 }
946 }
947 Token::RedirAppend => {
948 i += 1;
949 if i < tokens.len()
950 && let Token::Word(ref file) = tokens[i]
951 {
952 current_cmd
953 .redirections
954 .push(Redirection::Append(file.clone()));
955 }
956 }
957 Token::RedirHereDoc(delimiter, quoted) => {
958 current_cmd
960 .redirections
961 .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
962 }
963 Token::RedirHereString(content) => {
964 current_cmd
965 .redirections
966 .push(Redirection::HereString(content.clone()));
967 }
968 Token::RedirectFdIn(fd, file) => {
970 current_cmd
971 .redirections
972 .push(Redirection::FdInput(*fd, file.clone()));
973 }
974 Token::RedirectFdOut(fd, file) => {
975 current_cmd
976 .redirections
977 .push(Redirection::FdOutput(*fd, file.clone()));
978 }
979 Token::RedirectFdAppend(fd, file) => {
980 current_cmd
981 .redirections
982 .push(Redirection::FdAppend(*fd, file.clone()));
983 }
984 Token::RedirectFdDup(from_fd, to_fd) => {
985 current_cmd
986 .redirections
987 .push(Redirection::FdDuplicate(*from_fd, *to_fd));
988 }
989 Token::RedirectFdClose(fd) => {
990 current_cmd.redirections.push(Redirection::FdClose(*fd));
991 }
992 Token::RedirectFdInOut(fd, file) => {
993 current_cmd
994 .redirections
995 .push(Redirection::FdInputOutput(*fd, file.clone()));
996 }
997 Token::RightParen => {
998 if !current_cmd.args.is_empty()
1001 && i > 0
1002 && let Token::LeftParen = tokens[i - 1]
1003 {
1004 break;
1008 }
1009 return Err("Unexpected ) in pipeline".to_string());
1010 }
1011 Token::Newline => {
1012 i += 1;
1014 continue;
1015 }
1016 Token::Do
1017 | Token::Done
1018 | Token::Then
1019 | Token::Else
1020 | Token::Elif
1021 | Token::Fi
1022 | Token::Esac => {
1023 break;
1026 }
1027 _ => {
1028 return Err(format!("Unexpected token in pipeline: {:?}", token));
1029 }
1030 }
1031 i += 1;
1032 }
1033
1034 if !current_cmd.args.is_empty() {
1035 commands.push(current_cmd);
1036 }
1037
1038 if commands.is_empty() {
1039 return Err("No commands found".to_string());
1040 }
1041
1042 Ok(Ast::Pipeline(commands))
1043}
1044
1045fn parse_if(tokens: &[Token]) -> Result<Ast, String> {
1046 let mut i = 1; let mut branches = Vec::new();
1048
1049 loop {
1050 let mut cond_tokens = Vec::new();
1052 while i < tokens.len()
1053 && tokens[i] != Token::Semicolon
1054 && tokens[i] != Token::Newline
1055 && tokens[i] != Token::Then
1056 {
1057 cond_tokens.push(tokens[i].clone());
1058 i += 1;
1059 }
1060
1061 if i < tokens.len() && (tokens[i] == Token::Semicolon || tokens[i] == Token::Newline) {
1063 i += 1;
1064 }
1065
1066 skip_newlines(tokens, &mut i);
1068
1069 if i >= tokens.len() || tokens[i] != Token::Then {
1070 return Err("Expected then after if/elif condition".to_string());
1071 }
1072 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
1076 i += 1;
1077 }
1078
1079 let mut then_tokens = Vec::new();
1082 let mut depth = 0;
1083 while i < tokens.len() {
1084 match &tokens[i] {
1085 Token::If => {
1086 depth += 1;
1087 then_tokens.push(tokens[i].clone());
1088 }
1089 Token::Fi => {
1090 if depth > 0 {
1091 depth -= 1;
1092 then_tokens.push(tokens[i].clone());
1093 } else {
1094 break; }
1096 }
1097 Token::Else | Token::Elif if depth == 0 => {
1098 break; }
1100 Token::Newline => {
1101 let mut j = i + 1;
1103 while j < tokens.len() && tokens[j] == Token::Newline {
1104 j += 1;
1105 }
1106 if j < tokens.len()
1107 && depth == 0
1108 && (tokens[j] == Token::Else
1109 || tokens[j] == Token::Elif
1110 || tokens[j] == Token::Fi)
1111 {
1112 i = j; break;
1114 }
1115 then_tokens.push(tokens[i].clone());
1117 }
1118 _ => {
1119 then_tokens.push(tokens[i].clone());
1120 }
1121 }
1122 i += 1;
1123 }
1124
1125 skip_newlines(tokens, &mut i);
1127
1128 let then_ast = if then_tokens.is_empty() {
1129 create_empty_body_ast()
1131 } else {
1132 parse_commands_sequentially(&then_tokens)?
1133 };
1134
1135 let condition = parse_slice(&cond_tokens)?;
1136 branches.push((Box::new(condition), Box::new(then_ast)));
1137
1138 if i < tokens.len() && tokens[i] == Token::Elif {
1140 i += 1; } else {
1142 break;
1143 }
1144 }
1145
1146 let else_ast = if i < tokens.len() && tokens[i] == Token::Else {
1147 i += 1; while i < tokens.len() && tokens[i] == Token::Newline {
1151 i += 1;
1152 }
1153
1154 let mut else_tokens = Vec::new();
1155 let mut depth = 0;
1156 while i < tokens.len() {
1157 match &tokens[i] {
1158 Token::If => {
1159 depth += 1;
1160 else_tokens.push(tokens[i].clone());
1161 }
1162 Token::Fi => {
1163 if depth > 0 {
1164 depth -= 1;
1165 else_tokens.push(tokens[i].clone());
1166 } else {
1167 break; }
1169 }
1170 Token::Newline => {
1171 let mut j = i + 1;
1173 while j < tokens.len() && tokens[j] == Token::Newline {
1174 j += 1;
1175 }
1176 if j < tokens.len() && depth == 0 && tokens[j] == Token::Fi {
1177 i = j; break;
1179 }
1180 else_tokens.push(tokens[i].clone());
1182 }
1183 _ => {
1184 else_tokens.push(tokens[i].clone());
1185 }
1186 }
1187 i += 1;
1188 }
1189
1190 let else_ast = if else_tokens.is_empty() {
1191 create_empty_body_ast()
1193 } else {
1194 parse_commands_sequentially(&else_tokens)?
1195 };
1196
1197 Some(Box::new(else_ast))
1198 } else {
1199 None
1200 };
1201
1202 if i >= tokens.len() || tokens[i] != Token::Fi {
1203 return Err("Expected fi".to_string());
1204 }
1205
1206 Ok(Ast::If {
1207 branches,
1208 else_branch: else_ast,
1209 })
1210}
1211
1212fn parse_case(tokens: &[Token]) -> Result<Ast, String> {
1213 let mut i = 1; if i >= tokens.len() || !matches!(tokens[i], Token::Word(_)) {
1217 return Err("Expected word after case".to_string());
1218 }
1219 let word = if let Token::Word(ref w) = tokens[i] {
1220 w.clone()
1221 } else {
1222 unreachable!()
1223 };
1224 i += 1;
1225
1226 if i >= tokens.len() || tokens[i] != Token::In {
1227 return Err("Expected in after case word".to_string());
1228 }
1229 i += 1;
1230
1231 let mut cases = Vec::new();
1232 let mut default = None;
1233
1234 loop {
1235 while i < tokens.len() && tokens[i] == Token::Newline {
1237 i += 1;
1238 }
1239
1240 if i >= tokens.len() {
1241 return Err("Unexpected end in case statement".to_string());
1242 }
1243
1244 if tokens[i] == Token::Esac {
1245 break;
1246 }
1247
1248 let mut patterns = Vec::new();
1250 while i < tokens.len() && tokens[i] != Token::RightParen {
1251 if let Token::Word(ref p) = tokens[i] {
1252 for pat in p.split('|') {
1254 patterns.push(pat.to_string());
1255 }
1256 } else if tokens[i] == Token::Pipe {
1257 } else if tokens[i] == Token::Newline {
1259 } else {
1261 return Err(format!("Expected pattern, found {:?}", tokens[i]));
1262 }
1263 i += 1;
1264 }
1265
1266 if i >= tokens.len() || tokens[i] != Token::RightParen {
1267 return Err("Expected ) after patterns".to_string());
1268 }
1269 i += 1;
1270
1271 let mut commands_tokens = Vec::new();
1273 while i < tokens.len() && tokens[i] != Token::DoubleSemicolon && tokens[i] != Token::Esac {
1274 commands_tokens.push(tokens[i].clone());
1275 i += 1;
1276 }
1277
1278 let commands_ast = parse_slice(&commands_tokens)?;
1279
1280 if i >= tokens.len() {
1281 return Err("Unexpected end in case statement".to_string());
1282 }
1283
1284 if tokens[i] == Token::DoubleSemicolon {
1285 i += 1;
1286 if patterns.len() == 1 && patterns[0] == "*" {
1288 default = Some(Box::new(commands_ast));
1289 } else {
1290 cases.push((patterns, commands_ast));
1291 }
1292 } else if tokens[i] == Token::Esac {
1293 if patterns.len() == 1 && patterns[0] == "*" {
1295 default = Some(Box::new(commands_ast));
1296 } else {
1297 cases.push((patterns, commands_ast));
1298 }
1299 break;
1300 } else {
1301 return Err("Expected ;; or esac after commands".to_string());
1302 }
1303 }
1304
1305 Ok(Ast::Case {
1306 word,
1307 cases,
1308 default,
1309 })
1310}
1311
1312fn parse_for(tokens: &[Token]) -> Result<Ast, String> {
1313 let mut i = 1; if i >= tokens.len() || !matches!(tokens[i], Token::Word(_)) {
1317 return Err("Expected variable name after for".to_string());
1318 }
1319 let variable = if let Token::Word(ref v) = tokens[i] {
1320 v.clone()
1321 } else {
1322 unreachable!()
1323 };
1324 i += 1;
1325
1326 if i >= tokens.len() || tokens[i] != Token::In {
1328 return Err("Expected 'in' after for variable".to_string());
1329 }
1330 i += 1;
1331
1332 let mut items = Vec::new();
1334 while i < tokens.len() {
1335 match &tokens[i] {
1336 Token::Do => break,
1337 Token::Semicolon | Token::Newline => {
1338 i += 1;
1339 if i < tokens.len() && tokens[i] == Token::Do {
1341 break;
1342 }
1343 }
1344 Token::Word(word) => {
1345 items.push(word.clone());
1346 i += 1;
1347 }
1348 _ => {
1349 return Err(format!("Unexpected token in for items: {:?}", tokens[i]));
1350 }
1351 }
1352 }
1353
1354 while i < tokens.len() && tokens[i] == Token::Newline {
1356 i += 1;
1357 }
1358
1359 if i >= tokens.len() || tokens[i] != Token::Do {
1361 return Err("Expected 'do' in for loop".to_string());
1362 }
1363 i += 1;
1364
1365 while i < tokens.len() && tokens[i] == Token::Newline {
1367 i += 1;
1368 }
1369
1370 let mut body_tokens = Vec::new();
1372 let mut depth = 0;
1373 while i < tokens.len() {
1374 match &tokens[i] {
1375 Token::For => {
1376 depth += 1;
1377 body_tokens.push(tokens[i].clone());
1378 }
1379 Token::Done => {
1380 if depth > 0 {
1381 depth -= 1;
1382 body_tokens.push(tokens[i].clone());
1383 } else {
1384 break; }
1386 }
1387 Token::Newline => {
1388 let mut j = i + 1;
1390 while j < tokens.len() && tokens[j] == Token::Newline {
1391 j += 1;
1392 }
1393 if j < tokens.len() && depth == 0 && tokens[j] == Token::Done {
1394 i = j; break;
1396 }
1397 body_tokens.push(tokens[i].clone());
1399 }
1400 _ => {
1401 body_tokens.push(tokens[i].clone());
1402 }
1403 }
1404 i += 1;
1405 }
1406
1407 if i >= tokens.len() || tokens[i] != Token::Done {
1408 return Err("Expected 'done' to close for loop".to_string());
1409 }
1410
1411 let body_ast = if body_tokens.is_empty() {
1413 create_empty_body_ast()
1415 } else {
1416 parse_commands_sequentially(&body_tokens)?
1417 };
1418
1419 Ok(Ast::For {
1420 variable,
1421 items,
1422 body: Box::new(body_ast),
1423 })
1424}
1425
1426fn parse_while(tokens: &[Token]) -> Result<Ast, String> {
1427 let mut i = 1; let mut cond_tokens = Vec::new();
1431 while i < tokens.len() {
1432 match &tokens[i] {
1433 Token::Do => break,
1434 Token::Semicolon | Token::Newline => {
1435 i += 1;
1436 if i < tokens.len() && tokens[i] == Token::Do {
1438 break;
1439 }
1440 }
1441 _ => {
1442 cond_tokens.push(tokens[i].clone());
1443 i += 1;
1444 }
1445 }
1446 }
1447
1448 if cond_tokens.is_empty() {
1449 return Err("Expected condition after while".to_string());
1450 }
1451
1452 while i < tokens.len() && tokens[i] == Token::Newline {
1454 i += 1;
1455 }
1456
1457 if i >= tokens.len() || tokens[i] != Token::Do {
1459 return Err("Expected 'do' in while loop".to_string());
1460 }
1461 i += 1;
1462
1463 while i < tokens.len() && tokens[i] == Token::Newline {
1465 i += 1;
1466 }
1467
1468 let mut body_tokens = Vec::new();
1470 let mut depth = 0;
1471 while i < tokens.len() {
1472 match &tokens[i] {
1473 Token::While | Token::For => {
1474 depth += 1;
1475 body_tokens.push(tokens[i].clone());
1476 }
1477 Token::Done => {
1478 if depth > 0 {
1479 depth -= 1;
1480 body_tokens.push(tokens[i].clone());
1481 } else {
1482 break; }
1484 }
1485 Token::Newline => {
1486 let mut j = i + 1;
1488 while j < tokens.len() && tokens[j] == Token::Newline {
1489 j += 1;
1490 }
1491 if j < tokens.len() && depth == 0 && tokens[j] == Token::Done {
1492 i = j; break;
1494 }
1495 body_tokens.push(tokens[i].clone());
1497 }
1498 _ => {
1499 body_tokens.push(tokens[i].clone());
1500 }
1501 }
1502 i += 1;
1503 }
1504
1505 if i >= tokens.len() || tokens[i] != Token::Done {
1506 return Err("Expected 'done' to close while loop".to_string());
1507 }
1508
1509 let condition_ast = parse_slice(&cond_tokens)?;
1511
1512 let body_ast = if body_tokens.is_empty() {
1514 create_empty_body_ast()
1516 } else {
1517 parse_commands_sequentially(&body_tokens)?
1518 };
1519
1520 Ok(Ast::While {
1521 condition: Box::new(condition_ast),
1522 body: Box::new(body_ast),
1523 })
1524}
1525
1526fn parse_function_definition(tokens: &[Token]) -> Result<Ast, String> {
1527 if tokens.len() < 2 {
1528 return Err("Function definition too short".to_string());
1529 }
1530
1531 let func_name = if let Token::Word(word) = &tokens[0] {
1533 if let Some(paren_pos) = word.find('(') {
1535 if word.ends_with(')') && paren_pos > 0 {
1536 word[..paren_pos].to_string()
1537 } else {
1538 word.clone()
1539 }
1540 } else {
1541 word.clone()
1542 }
1543 } else {
1544 return Err("Function name must be a word".to_string());
1545 };
1546
1547 let brace_pos =
1549 if tokens.len() >= 4 && tokens[1] == Token::LeftParen && tokens[2] == Token::RightParen {
1550 if tokens[3] != Token::LeftBrace {
1552 return Err("Expected { after function name".to_string());
1553 }
1554 3
1555 } else if tokens.len() >= 2 && tokens[1] == Token::LeftBrace {
1556 1
1558 } else {
1559 return Err("Expected ( after function name or { for legacy format".to_string());
1560 };
1561
1562 let mut brace_depth = 0;
1564 let mut body_end = 0;
1565 let mut found_closing = false;
1566 let mut i = brace_pos + 1;
1567
1568 while i < tokens.len() {
1569 if i + 3 < tokens.len()
1572 && matches!(&tokens[i], Token::Word(_))
1573 && tokens[i + 1] == Token::LeftParen
1574 && tokens[i + 2] == Token::RightParen
1575 && tokens[i + 3] == Token::LeftBrace
1576 {
1577 i += 4;
1580 let mut nested_depth = 1;
1581 while i < tokens.len() && nested_depth > 0 {
1582 match tokens[i] {
1583 Token::LeftBrace => nested_depth += 1,
1584 Token::RightBrace => nested_depth -= 1,
1585 _ => {}
1586 }
1587 i += 1;
1588 }
1589 continue;
1591 }
1592
1593 match &tokens[i] {
1594 Token::LeftBrace => {
1595 brace_depth += 1;
1596 i += 1;
1597 }
1598 Token::RightBrace => {
1599 if brace_depth == 0 {
1600 body_end = i;
1602 found_closing = true;
1603 break;
1604 } else {
1605 brace_depth -= 1;
1606 i += 1;
1607 }
1608 }
1609 Token::If => {
1610 skip_to_matching_fi(tokens, &mut i);
1612 }
1613 Token::For | Token::While => {
1614 skip_to_matching_done(tokens, &mut i);
1616 }
1617 Token::Case => {
1618 skip_to_matching_esac(tokens, &mut i);
1620 }
1621 _ => {
1622 i += 1;
1623 }
1624 }
1625 }
1626
1627 if !found_closing {
1628 return Err("Missing closing } for function definition".to_string());
1629 }
1630
1631 let body_tokens = &tokens[brace_pos + 1..body_end];
1633
1634 let body_ast = if body_tokens.is_empty() {
1636 create_empty_body_ast()
1638 } else {
1639 parse_commands_sequentially(body_tokens)?
1640 };
1641
1642 Ok(Ast::FunctionDefinition {
1643 name: func_name,
1644 body: Box::new(body_ast),
1645 })
1646}
1647
1648#[cfg(test)]
1649mod tests {
1650 use super::super::lexer::Token;
1651 use super::*;
1652
1653 #[test]
1654 fn test_single_command() {
1655 let tokens = vec![Token::Word("ls".to_string())];
1656 let result = parse(tokens).unwrap();
1657 assert_eq!(
1658 result,
1659 Ast::Pipeline(vec![ShellCommand {
1660 args: vec!["ls".to_string()],
1661 redirections: Vec::new(),
1662 compound: None,
1663 }])
1664 );
1665 }
1666
1667 #[test]
1668 fn test_command_with_args() {
1669 let tokens = vec![
1670 Token::Word("ls".to_string()),
1671 Token::Word("-la".to_string()),
1672 ];
1673 let result = parse(tokens).unwrap();
1674 assert_eq!(
1675 result,
1676 Ast::Pipeline(vec![ShellCommand {
1677 args: vec!["ls".to_string(), "-la".to_string()],
1678 redirections: Vec::new(),
1679 compound: None,
1680 }])
1681 );
1682 }
1683
1684 #[test]
1685 fn test_pipeline() {
1686 let tokens = vec![
1687 Token::Word("ls".to_string()),
1688 Token::Pipe,
1689 Token::Word("grep".to_string()),
1690 Token::Word("txt".to_string()),
1691 ];
1692 let result = parse(tokens).unwrap();
1693 assert_eq!(
1694 result,
1695 Ast::Pipeline(vec![
1696 ShellCommand {
1697 args: vec!["ls".to_string()],
1698 redirections: Vec::new(),
1699 compound: None,
1700 },
1701 ShellCommand {
1702 args: vec!["grep".to_string(), "txt".to_string()],
1703 redirections: Vec::new(),
1704 compound: None,
1705 }
1706 ])
1707 );
1708 }
1709
1710 #[test]
1711 fn test_input_redirection() {
1712 let tokens = vec![
1713 Token::Word("cat".to_string()),
1714 Token::RedirIn,
1715 Token::Word("input.txt".to_string()),
1716 ];
1717 let result = parse(tokens).unwrap();
1718 assert_eq!(
1719 result,
1720 Ast::Pipeline(vec![ShellCommand {
1721 args: vec!["cat".to_string()],
1722 redirections: vec![Redirection::Input("input.txt".to_string())],
1723 compound: None,
1724 }])
1725 );
1726 }
1727
1728 #[test]
1729 fn test_output_redirection() {
1730 let tokens = vec![
1731 Token::Word("printf".to_string()),
1732 Token::Word("hello".to_string()),
1733 Token::RedirOut,
1734 Token::Word("output.txt".to_string()),
1735 ];
1736 let result = parse(tokens).unwrap();
1737 assert_eq!(
1738 result,
1739 Ast::Pipeline(vec![ShellCommand {
1740 args: vec!["printf".to_string(), "hello".to_string()],
1741 compound: None,
1742 redirections: vec![Redirection::Output("output.txt".to_string())],
1743 }])
1744 );
1745 }
1746
1747 #[test]
1748 fn test_append_redirection() {
1749 let tokens = vec![
1750 Token::Word("printf".to_string()),
1751 Token::Word("hello".to_string()),
1752 Token::RedirAppend,
1753 Token::Word("output.txt".to_string()),
1754 ];
1755 let result = parse(tokens).unwrap();
1756 assert_eq!(
1757 result,
1758 Ast::Pipeline(vec![ShellCommand {
1759 args: vec!["printf".to_string(), "hello".to_string()],
1760 compound: None,
1761 redirections: vec![Redirection::Append("output.txt".to_string())],
1762 }])
1763 );
1764 }
1765
1766 #[test]
1767 fn test_complex_pipeline_with_redirections() {
1768 let tokens = vec![
1769 Token::Word("cat".to_string()),
1770 Token::RedirIn,
1771 Token::Word("input.txt".to_string()),
1772 Token::Pipe,
1773 Token::Word("grep".to_string()),
1774 Token::Word("pattern".to_string()),
1775 Token::Pipe,
1776 Token::Word("sort".to_string()),
1777 Token::RedirOut,
1778 Token::Word("output.txt".to_string()),
1779 ];
1780 let result = parse(tokens).unwrap();
1781 assert_eq!(
1782 result,
1783 Ast::Pipeline(vec![
1784 ShellCommand {
1785 args: vec!["cat".to_string()],
1786 compound: None,
1787 redirections: vec![Redirection::Input("input.txt".to_string())],
1788 },
1789 ShellCommand {
1790 args: vec!["grep".to_string(), "pattern".to_string()],
1791 compound: None,
1792 redirections: Vec::new(),
1793 },
1794 ShellCommand {
1795 args: vec!["sort".to_string()],
1796 redirections: vec![Redirection::Output("output.txt".to_string())],
1797 compound: None,
1798 }
1799 ])
1800 );
1801 }
1802
1803 #[test]
1804 fn test_empty_tokens() {
1805 let tokens = vec![];
1806 let result = parse(tokens);
1807 assert!(result.is_err());
1808 assert_eq!(result.unwrap_err(), "No commands found");
1809 }
1810
1811 #[test]
1812 fn test_only_pipe() {
1813 let tokens = vec![Token::Pipe];
1814 let result = parse(tokens);
1815 assert!(result.is_err());
1816 assert_eq!(result.unwrap_err(), "No commands found");
1817 }
1818
1819 #[test]
1820 fn test_redirection_without_file() {
1821 let tokens = vec![Token::Word("cat".to_string()), Token::RedirIn];
1823 let result = parse(tokens).unwrap();
1824 assert_eq!(
1825 result,
1826 Ast::Pipeline(vec![ShellCommand {
1827 args: vec!["cat".to_string()],
1828 compound: None,
1829 redirections: Vec::new(),
1830 }])
1831 );
1832 }
1833
1834 #[test]
1835 fn test_multiple_redirections() {
1836 let tokens = vec![
1837 Token::Word("cat".to_string()),
1838 Token::RedirIn,
1839 Token::Word("file1.txt".to_string()),
1840 Token::RedirOut,
1841 Token::Word("file2.txt".to_string()),
1842 ];
1843 let result = parse(tokens).unwrap();
1844 assert_eq!(
1845 result,
1846 Ast::Pipeline(vec![ShellCommand {
1847 args: vec!["cat".to_string()],
1848 redirections: vec![
1849 Redirection::Input("file1.txt".to_string()),
1850 Redirection::Output("file2.txt".to_string()),
1851 ],
1852 compound: None,
1853 }])
1854 );
1855 }
1856
1857 #[test]
1858 fn test_parse_if() {
1859 let tokens = vec![
1860 Token::If,
1861 Token::Word("true".to_string()),
1862 Token::Semicolon,
1863 Token::Then,
1864 Token::Word("printf".to_string()),
1865 Token::Word("yes".to_string()),
1866 Token::Semicolon,
1867 Token::Fi,
1868 ];
1869 let result = parse(tokens).unwrap();
1870 if let Ast::If {
1871 branches,
1872 else_branch,
1873 } = result
1874 {
1875 assert_eq!(branches.len(), 1);
1876 let (condition, then_branch) = &branches[0];
1877 if let Ast::Pipeline(cmds) = &**condition {
1878 assert_eq!(cmds[0].args, vec!["true"]);
1879 } else {
1880 panic!("condition not pipeline");
1881 }
1882 if let Ast::Pipeline(cmds) = &**then_branch {
1883 assert_eq!(cmds[0].args, vec!["printf", "yes"]);
1884 } else {
1885 panic!("then_branch not pipeline");
1886 }
1887 assert!(else_branch.is_none());
1888 } else {
1889 panic!("not if");
1890 }
1891 }
1892
1893 #[test]
1894 fn test_parse_if_elif() {
1895 let tokens = vec![
1896 Token::If,
1897 Token::Word("false".to_string()),
1898 Token::Semicolon,
1899 Token::Then,
1900 Token::Word("printf".to_string()),
1901 Token::Word("no".to_string()),
1902 Token::Semicolon,
1903 Token::Elif,
1904 Token::Word("true".to_string()),
1905 Token::Semicolon,
1906 Token::Then,
1907 Token::Word("printf".to_string()),
1908 Token::Word("yes".to_string()),
1909 Token::Semicolon,
1910 Token::Fi,
1911 ];
1912 let result = parse(tokens).unwrap();
1913 if let Ast::If {
1914 branches,
1915 else_branch,
1916 } = result
1917 {
1918 assert_eq!(branches.len(), 2);
1919 let (condition1, then1) = &branches[0];
1921 if let Ast::Pipeline(cmds) = &**condition1 {
1922 assert_eq!(cmds[0].args, vec!["false"]);
1923 }
1924 if let Ast::Pipeline(cmds) = &**then1 {
1925 assert_eq!(cmds[0].args, vec!["printf", "no"]);
1926 }
1927 let (condition2, then2) = &branches[1];
1929 if let Ast::Pipeline(cmds) = &**condition2 {
1930 assert_eq!(cmds[0].args, vec!["true"]);
1931 }
1932 if let Ast::Pipeline(cmds) = &**then2 {
1933 assert_eq!(cmds[0].args, vec!["printf", "yes"]);
1934 }
1935 assert!(else_branch.is_none());
1936 } else {
1937 panic!("not if");
1938 }
1939 }
1940
1941 #[test]
1942 fn test_parse_assignment() {
1943 let tokens = vec![Token::Word("MY_VAR=test_value".to_string())];
1944 let result = parse(tokens).unwrap();
1945 if let Ast::Assignment { var, value } = result {
1946 assert_eq!(var, "MY_VAR");
1947 assert_eq!(value, "test_value");
1948 } else {
1949 panic!("not assignment");
1950 }
1951 }
1952
1953 #[test]
1954 fn test_parse_assignment_quoted() {
1955 let tokens = vec![Token::Word("MY_VAR=hello world".to_string())];
1956 let result = parse(tokens).unwrap();
1957 if let Ast::Assignment { var, value } = result {
1958 assert_eq!(var, "MY_VAR");
1959 assert_eq!(value, "hello world");
1960 } else {
1961 panic!("not assignment");
1962 }
1963 }
1964
1965 #[test]
1966 fn test_parse_assignment_invalid() {
1967 let tokens = vec![Token::Word("123VAR=value".to_string())];
1969 let result = parse(tokens).unwrap();
1970 if let Ast::Pipeline(cmds) = result {
1971 assert_eq!(cmds[0].args, vec!["123VAR=value"]);
1972 } else {
1973 panic!("should be parsed as pipeline");
1974 }
1975 }
1976
1977 #[test]
1978 fn test_parse_function_definition() {
1979 let tokens = vec![
1980 Token::Word("myfunc".to_string()),
1981 Token::LeftParen,
1982 Token::RightParen,
1983 Token::LeftBrace,
1984 Token::Word("echo".to_string()),
1985 Token::Word("hello".to_string()),
1986 Token::RightBrace,
1987 ];
1988 let result = parse(tokens).unwrap();
1989 if let Ast::FunctionDefinition { name, body } = result {
1990 assert_eq!(name, "myfunc");
1991 if let Ast::Pipeline(cmds) = *body {
1993 assert_eq!(cmds[0].args, vec!["echo", "hello"]);
1994 } else {
1995 panic!("function body should be a pipeline");
1996 }
1997 } else {
1998 panic!("should be parsed as function definition");
1999 }
2000 }
2001
2002 #[test]
2003 fn test_parse_function_definition_empty() {
2004 let tokens = vec![
2005 Token::Word("emptyfunc".to_string()),
2006 Token::LeftParen,
2007 Token::RightParen,
2008 Token::LeftBrace,
2009 Token::RightBrace,
2010 ];
2011 let result = parse(tokens).unwrap();
2012 if let Ast::FunctionDefinition { name, body } = result {
2013 assert_eq!(name, "emptyfunc");
2014 if let Ast::Pipeline(cmds) = *body {
2016 assert_eq!(cmds[0].args, vec!["true"]);
2017 } else {
2018 panic!("function body should be a pipeline");
2019 }
2020 } else {
2021 panic!("should be parsed as function definition");
2022 }
2023 }
2024
2025 #[test]
2026 fn test_parse_function_definition_legacy_format() {
2027 let tokens = vec![
2029 Token::Word("legacyfunc()".to_string()),
2030 Token::LeftBrace,
2031 Token::Word("echo".to_string()),
2032 Token::Word("hello".to_string()),
2033 Token::RightBrace,
2034 ];
2035 let result = parse(tokens).unwrap();
2036 if let Ast::FunctionDefinition { name, body } = result {
2037 assert_eq!(name, "legacyfunc");
2038 if let Ast::Pipeline(cmds) = *body {
2040 assert_eq!(cmds[0].args, vec!["echo", "hello"]);
2041 } else {
2042 panic!("function body should be a pipeline");
2043 }
2044 } else {
2045 panic!("should be parsed as function definition");
2046 }
2047 }
2048
2049 #[test]
2050 fn test_parse_local_assignment() {
2051 let tokens = vec![Token::Local, Token::Word("MY_VAR=test_value".to_string())];
2052 let result = parse(tokens).unwrap();
2053 if let Ast::LocalAssignment { var, value } = result {
2054 assert_eq!(var, "MY_VAR");
2055 assert_eq!(value, "test_value");
2056 } else {
2057 panic!("should be parsed as local assignment");
2058 }
2059 }
2060
2061 #[test]
2062 fn test_parse_local_assignment_separate_tokens() {
2063 let tokens = vec![
2064 Token::Local,
2065 Token::Word("MY_VAR".to_string()),
2066 Token::Word("test_value".to_string()),
2067 ];
2068 let result = parse(tokens).unwrap();
2069 if let Ast::LocalAssignment { var, value } = result {
2070 assert_eq!(var, "MY_VAR");
2071 assert_eq!(value, "test_value");
2072 } else {
2073 panic!("should be parsed as local assignment");
2074 }
2075 }
2076
2077 #[test]
2078 fn test_parse_local_assignment_invalid_var_name() {
2079 let tokens = vec![Token::Local, Token::Word("123VAR=value".to_string())];
2081 let result = parse(tokens);
2082 assert!(result.is_err());
2084 }
2085
2086 #[test]
2087 fn test_parse_here_document_redirection() {
2088 let tokens = vec![
2089 Token::Word("cat".to_string()),
2090 Token::RedirHereDoc("EOF".to_string(), false),
2091 ];
2092 let result = parse(tokens).unwrap();
2093 assert_eq!(
2094 result,
2095 Ast::Pipeline(vec![ShellCommand {
2096 args: vec!["cat".to_string()],
2097 redirections: vec![Redirection::HereDoc("EOF".to_string(), "false".to_string())],
2098 compound: None,
2099 }])
2100 );
2101 }
2102
2103 #[test]
2104 fn test_parse_here_string_redirection() {
2105 let tokens = vec![
2106 Token::Word("grep".to_string()),
2107 Token::RedirHereString("pattern".to_string()),
2108 ];
2109 let result = parse(tokens).unwrap();
2110 assert_eq!(
2111 result,
2112 Ast::Pipeline(vec![ShellCommand {
2113 args: vec!["grep".to_string()],
2114 compound: None,
2115 redirections: vec![Redirection::HereString("pattern".to_string())],
2116 }])
2117 );
2118 }
2119
2120 #[test]
2121 fn test_parse_mixed_redirections() {
2122 let tokens = vec![
2123 Token::Word("cat".to_string()),
2124 Token::RedirIn,
2125 Token::Word("file.txt".to_string()),
2126 Token::RedirHereString("fallback".to_string()),
2127 Token::RedirOut,
2128 Token::Word("output.txt".to_string()),
2129 ];
2130 let result = parse(tokens).unwrap();
2131 assert_eq!(
2132 result,
2133 Ast::Pipeline(vec![ShellCommand {
2134 args: vec!["cat".to_string()],
2135 compound: None,
2136 redirections: vec![
2137 Redirection::Input("file.txt".to_string()),
2138 Redirection::HereString("fallback".to_string()),
2139 Redirection::Output("output.txt".to_string()),
2140 ],
2141 }])
2142 );
2143 }
2144
2145 #[test]
2148 fn test_parse_fd_input_redirection() {
2149 let tokens = vec![
2150 Token::Word("command".to_string()),
2151 Token::RedirectFdIn(3, "input.txt".to_string()),
2152 ];
2153 let result = parse(tokens).unwrap();
2154 assert_eq!(
2155 result,
2156 Ast::Pipeline(vec![ShellCommand {
2157 args: vec!["command".to_string()],
2158 redirections: vec![Redirection::FdInput(3, "input.txt".to_string())],
2159 compound: None,
2160 }])
2161 );
2162 }
2163
2164 #[test]
2165 fn test_parse_fd_output_redirection() {
2166 let tokens = vec![
2167 Token::Word("command".to_string()),
2168 Token::RedirectFdOut(2, "errors.log".to_string()),
2169 ];
2170 let result = parse(tokens).unwrap();
2171 assert_eq!(
2172 result,
2173 Ast::Pipeline(vec![ShellCommand {
2174 args: vec!["command".to_string()],
2175 compound: None,
2176 redirections: vec![Redirection::FdOutput(2, "errors.log".to_string())],
2177 }])
2178 );
2179 }
2180
2181 #[test]
2182 fn test_parse_fd_append_redirection() {
2183 let tokens = vec![
2184 Token::Word("command".to_string()),
2185 Token::RedirectFdAppend(2, "errors.log".to_string()),
2186 ];
2187 let result = parse(tokens).unwrap();
2188 assert_eq!(
2189 result,
2190 Ast::Pipeline(vec![ShellCommand {
2191 args: vec!["command".to_string()],
2192 compound: None,
2193 redirections: vec![Redirection::FdAppend(2, "errors.log".to_string())],
2194 }])
2195 );
2196 }
2197
2198 #[test]
2199 fn test_parse_fd_duplicate() {
2200 let tokens = vec![
2201 Token::Word("command".to_string()),
2202 Token::RedirectFdDup(2, 1),
2203 ];
2204 let result = parse(tokens).unwrap();
2205 assert_eq!(
2206 result,
2207 Ast::Pipeline(vec![ShellCommand {
2208 args: vec!["command".to_string()],
2209 compound: None,
2210 redirections: vec![Redirection::FdDuplicate(2, 1)],
2211 }])
2212 );
2213 }
2214
2215 #[test]
2216 fn test_parse_fd_close() {
2217 let tokens = vec![
2218 Token::Word("command".to_string()),
2219 Token::RedirectFdClose(2),
2220 ];
2221 let result = parse(tokens).unwrap();
2222 assert_eq!(
2223 result,
2224 Ast::Pipeline(vec![ShellCommand {
2225 args: vec!["command".to_string()],
2226 compound: None,
2227 redirections: vec![Redirection::FdClose(2)],
2228 }])
2229 );
2230 }
2231
2232 #[test]
2233 fn test_parse_fd_input_output() {
2234 let tokens = vec![
2235 Token::Word("command".to_string()),
2236 Token::RedirectFdInOut(3, "file.txt".to_string()),
2237 ];
2238 let result = parse(tokens).unwrap();
2239 assert_eq!(
2240 result,
2241 Ast::Pipeline(vec![ShellCommand {
2242 args: vec!["command".to_string()],
2243 compound: None,
2244 redirections: vec![Redirection::FdInputOutput(3, "file.txt".to_string())],
2245 }])
2246 );
2247 }
2248
2249 #[test]
2250 fn test_parse_multiple_fd_redirections() {
2251 let tokens = vec![
2252 Token::Word("command".to_string()),
2253 Token::RedirectFdOut(2, "err.log".to_string()),
2254 Token::RedirectFdIn(3, "input.txt".to_string()),
2255 Token::RedirectFdAppend(4, "append.log".to_string()),
2256 ];
2257 let result = parse(tokens).unwrap();
2258 assert_eq!(
2259 result,
2260 Ast::Pipeline(vec![ShellCommand {
2261 args: vec!["command".to_string()],
2262 compound: None,
2263 redirections: vec![
2264 Redirection::FdOutput(2, "err.log".to_string()),
2265 Redirection::FdInput(3, "input.txt".to_string()),
2266 Redirection::FdAppend(4, "append.log".to_string()),
2267 ],
2268 }])
2269 );
2270 }
2271
2272 #[test]
2273 fn test_parse_fd_swap_pattern() {
2274 let tokens = vec![
2275 Token::Word("command".to_string()),
2276 Token::RedirectFdDup(3, 1),
2277 Token::RedirectFdDup(1, 2),
2278 Token::RedirectFdDup(2, 3),
2279 Token::RedirectFdClose(3),
2280 ];
2281 let result = parse(tokens).unwrap();
2282 assert_eq!(
2283 result,
2284 Ast::Pipeline(vec![ShellCommand {
2285 args: vec!["command".to_string()],
2286 redirections: vec![
2287 Redirection::FdDuplicate(3, 1),
2288 Redirection::FdDuplicate(1, 2),
2289 Redirection::FdDuplicate(2, 3),
2290 Redirection::FdClose(3),
2291 ],
2292 compound: None,
2293 }])
2294 );
2295 }
2296
2297 #[test]
2298 fn test_parse_mixed_basic_and_fd_redirections() {
2299 let tokens = vec![
2300 Token::Word("command".to_string()),
2301 Token::RedirOut,
2302 Token::Word("output.txt".to_string()),
2303 Token::RedirectFdDup(2, 1),
2304 ];
2305 let result = parse(tokens).unwrap();
2306 assert_eq!(
2307 result,
2308 Ast::Pipeline(vec![ShellCommand {
2309 args: vec!["command".to_string()],
2310 redirections: vec![
2311 Redirection::Output("output.txt".to_string()),
2312 Redirection::FdDuplicate(2, 1),
2313 ],
2314 compound: None,
2315 }])
2316 );
2317 }
2318
2319 #[test]
2320 fn test_parse_fd_redirection_ordering() {
2321 let tokens = vec![
2323 Token::Word("command".to_string()),
2324 Token::RedirectFdOut(2, "first.log".to_string()),
2325 Token::RedirOut,
2326 Token::Word("second.txt".to_string()),
2327 Token::RedirectFdDup(2, 1),
2328 ];
2329 let result = parse(tokens).unwrap();
2330 assert_eq!(
2331 result,
2332 Ast::Pipeline(vec![ShellCommand {
2333 args: vec!["command".to_string()],
2334 redirections: vec![
2335 Redirection::FdOutput(2, "first.log".to_string()),
2336 Redirection::Output("second.txt".to_string()),
2337 Redirection::FdDuplicate(2, 1),
2338 ],
2339 compound: None,
2340 }])
2341 );
2342 }
2343
2344 #[test]
2345 fn test_parse_fd_redirection_with_pipe() {
2346 let tokens = vec![
2347 Token::Word("command".to_string()),
2348 Token::RedirectFdDup(2, 1),
2349 Token::Pipe,
2350 Token::Word("grep".to_string()),
2351 Token::Word("error".to_string()),
2352 ];
2353 let result = parse(tokens).unwrap();
2354 assert_eq!(
2355 result,
2356 Ast::Pipeline(vec![
2357 ShellCommand {
2358 args: vec!["command".to_string()],
2359 redirections: vec![Redirection::FdDuplicate(2, 1)],
2360 compound: None,
2361 },
2362 ShellCommand {
2363 args: vec!["grep".to_string(), "error".to_string()],
2364 compound: None,
2365 redirections: Vec::new(),
2366 }
2367 ])
2368 );
2369 }
2370
2371 #[test]
2372 fn test_parse_all_fd_numbers() {
2373 let tokens = vec![
2375 Token::Word("cmd".to_string()),
2376 Token::RedirectFdIn(0, "file".to_string()),
2377 ];
2378 let result = parse(tokens).unwrap();
2379 if let Ast::Pipeline(cmds) = result {
2380 assert_eq!(
2381 cmds[0].redirections[0],
2382 Redirection::FdInput(0, "file".to_string())
2383 );
2384 } else {
2385 panic!("Expected Pipeline");
2386 }
2387
2388 let tokens = vec![
2390 Token::Word("cmd".to_string()),
2391 Token::RedirectFdOut(9, "file".to_string()),
2392 ];
2393 let result = parse(tokens).unwrap();
2394 if let Ast::Pipeline(cmds) = result {
2395 assert_eq!(
2396 cmds[0].redirections[0],
2397 Redirection::FdOutput(9, "file".to_string())
2398 );
2399 } else {
2400 panic!("Expected Pipeline");
2401 }
2402 }
2403}