rush_sh/
parser.rs

1use super::lexer::Token;
2
3#[derive(Debug, Clone, PartialEq, Eq)]
4pub enum Ast {
5    Pipeline(Vec<ShellCommand>),
6    Sequence(Vec<Ast>),
7    Assignment {
8        var: String,
9        value: String,
10    },
11    LocalAssignment {
12        var: String,
13        value: String,
14    },
15    If {
16        branches: Vec<(Box<Ast>, Box<Ast>)>, // (condition, then_branch)
17        else_branch: Option<Box<Ast>>,
18    },
19    Case {
20        word: String,
21        cases: Vec<(Vec<String>, Ast)>,
22        default: Option<Box<Ast>>,
23    },
24    For {
25        variable: String,
26        items: Vec<String>,
27        body: Box<Ast>,
28    },
29    While {
30        condition: Box<Ast>,
31        body: Box<Ast>,
32    },
33    FunctionDefinition {
34        name: String,
35        body: Box<Ast>,
36    },
37    FunctionCall {
38        name: String,
39        args: Vec<String>,
40    },
41    Return {
42        value: Option<String>,
43    },
44    And {
45        left: Box<Ast>,
46        right: Box<Ast>,
47    },
48    Or {
49        left: Box<Ast>,
50        right: Box<Ast>,
51    },
52    /// Subshell execution: (commands)
53    /// Commands execute in an isolated copy of the shell state
54    Subshell {
55        body: Box<Ast>,
56    },
57}
58
59/// Represents a single redirection operation
60#[derive(Debug, Clone, PartialEq, Eq)]
61pub enum Redirection {
62    /// Input from file: < file or N< file
63    Input(String),
64    /// Output to file: > file or N> file
65    Output(String),
66    /// Append to file: >> file or N>> file
67    Append(String),
68    /// Input from file with explicit fd: N< file
69    FdInput(i32, String),
70    /// Output to file with explicit fd: N> file
71    FdOutput(i32, String),
72    /// Append to file with explicit fd: N>> file
73    FdAppend(i32, String),
74    /// Duplicate file descriptor: N>&M or N<&M
75    FdDuplicate(i32, i32),
76    /// Close file descriptor: N>&- or N<&-
77    FdClose(i32),
78    /// Open file for read/write: N<> file
79    FdInputOutput(i32, String),
80    /// Here-document: << EOF ... EOF
81    HereDoc(String, String),
82    /// Here-string: <<< "string"
83    HereString(String),
84}
85
86#[derive(Debug, Clone, PartialEq, Eq, Default)]
87pub struct ShellCommand {
88    pub args: Vec<String>,
89    /// All redirections in order of appearance (for POSIX left-to-right processing)
90    pub redirections: Vec<Redirection>,
91    /// Optional compound command (subshell, command group, etc.)
92    /// If present, this takes precedence over args
93    pub compound: Option<Box<Ast>>,
94}
95
96/// Helper function to validate if a string is a valid variable name.
97/// Returns true if the name starts with a letter or underscore.
98fn is_valid_variable_name(name: &str) -> bool {
99    if let Some(first_char) = name.chars().next() {
100        first_char.is_alphabetic() || first_char == '_'
101    } else {
102        false
103    }
104}
105
106/// Helper function to create an empty body AST (a no-op that returns success).
107/// Used for empty then/else branches, empty loop bodies, and empty function bodies.
108fn create_empty_body_ast() -> Ast {
109    Ast::Pipeline(vec![ShellCommand {
110        args: vec!["true".to_string()],
111        redirections: Vec::new(),
112        compound: None,
113    }])
114}
115
116/// Helper function to skip consecutive newline tokens.
117/// Updates the index to point to the first non-newline token.
118fn skip_newlines(tokens: &[Token], i: &mut usize) {
119    while *i < tokens.len() && tokens[*i] == Token::Newline {
120        *i += 1;
121    }
122}
123
124/// Helper function to skip to the matching 'fi' token for an 'if' statement.
125/// Handles nested if statements correctly.
126fn skip_to_matching_fi(tokens: &[Token], i: &mut usize) {
127    let mut if_depth = 1;
128    *i += 1; // Move past the 'if' token
129    while *i < tokens.len() && if_depth > 0 {
130        match tokens[*i] {
131            Token::If => if_depth += 1,
132            Token::Fi => if_depth -= 1,
133            _ => {}
134        }
135        *i += 1;
136    }
137}
138
139/// Helper function to skip to the matching 'done' token for a 'for' or 'while' loop.
140/// Handles nested loops correctly.
141fn skip_to_matching_done(tokens: &[Token], i: &mut usize) {
142    let mut loop_depth = 1;
143    *i += 1; // Move past the 'for' or 'while' token
144    while *i < tokens.len() && loop_depth > 0 {
145        match tokens[*i] {
146            Token::For | Token::While => loop_depth += 1,
147            Token::Done => loop_depth -= 1,
148            _ => {}
149        }
150        *i += 1;
151    }
152}
153
154/// Helper function to skip to the matching 'esac' token for a 'case' statement.
155fn skip_to_matching_esac(tokens: &[Token], i: &mut usize) {
156    *i += 1; // Move past the 'case' token
157    while *i < tokens.len() {
158        if tokens[*i] == Token::Esac {
159            *i += 1;
160            break;
161        }
162        *i += 1;
163    }
164}
165
166pub fn parse(tokens: Vec<Token>) -> Result<Ast, String> {
167    // First, try to detect and parse function definitions that span multiple lines
168    if tokens.len() >= 4
169        && let (Token::Word(_), Token::LeftParen, Token::RightParen, Token::LeftBrace) =
170            (&tokens[0], &tokens[1], &tokens[2], &tokens[3])
171    {
172        // Look for the matching RightBrace
173        // Start from the opening brace (token 3) and find its match
174        let mut brace_depth = 1; // We've already seen the opening brace at position 3
175        let mut function_end = tokens.len();
176        let mut j = 4; // Start after the opening brace
177
178        while j < tokens.len() {
179            match &tokens[j] {
180                Token::LeftBrace => {
181                    brace_depth += 1;
182                    j += 1;
183                }
184                Token::RightBrace => {
185                    brace_depth -= 1;
186                    if brace_depth == 0 {
187                        function_end = j + 1; // Include the closing brace
188                        break;
189                    }
190                    j += 1;
191                }
192                Token::If => {
193                    // Skip to matching fi to avoid confusion
194                    let mut if_depth = 1;
195                    j += 1;
196                    while j < tokens.len() && if_depth > 0 {
197                        match tokens[j] {
198                            Token::If => if_depth += 1,
199                            Token::Fi => if_depth -= 1,
200                            _ => {}
201                        }
202                        j += 1;
203                    }
204                }
205                Token::For | Token::While => {
206                    // Skip to matching done
207                    let mut for_depth = 1;
208                    j += 1;
209                    while j < tokens.len() && for_depth > 0 {
210                        match tokens[j] {
211                            Token::For | Token::While => for_depth += 1,
212                            Token::Done => for_depth -= 1,
213                            _ => {}
214                        }
215                        j += 1;
216                    }
217                }
218                Token::Case => {
219                    // Skip to matching esac
220                    j += 1;
221                    while j < tokens.len() {
222                        if tokens[j] == Token::Esac {
223                            j += 1;
224                            break;
225                        }
226                        j += 1;
227                    }
228                }
229                _ => {
230                    j += 1;
231                }
232            }
233        }
234
235        if brace_depth == 0 && function_end <= tokens.len() {
236            // We found the complete function definition
237            let function_tokens = &tokens[0..function_end];
238            let remaining_tokens = &tokens[function_end..];
239
240            let function_ast = parse_function_definition(function_tokens)?;
241
242            return if remaining_tokens.is_empty() {
243                Ok(function_ast)
244            } else {
245                // There are more commands after the function
246                let remaining_ast = parse_commands_sequentially(remaining_tokens)?;
247                Ok(Ast::Sequence(vec![function_ast, remaining_ast]))
248            };
249        }
250    }
251
252    // Also check for legacy function definition format (word with parentheses followed by brace)
253    if tokens.len() >= 2
254        && let Token::Word(ref word) = tokens[0]
255        && let Some(paren_pos) = word.find('(')
256        && word.ends_with(')')
257        && paren_pos > 0
258        && tokens[1] == Token::LeftBrace
259    {
260        return parse_function_definition(&tokens);
261    }
262
263    // Fall back to normal parsing
264    parse_commands_sequentially(&tokens)
265}
266
267fn parse_slice(tokens: &[Token]) -> Result<Ast, String> {
268    if tokens.is_empty() {
269        return Err("No commands found".to_string());
270    }
271
272    // Check if it's an assignment
273    if tokens.len() == 2 {
274        // Check for pattern: VAR= VALUE
275        if let (Token::Word(var_eq), Token::Word(value)) = (&tokens[0], &tokens[1])
276            && let Some(eq_pos) = var_eq.find('=')
277            && eq_pos > 0
278            && eq_pos < var_eq.len()
279        {
280            let var = var_eq[..eq_pos].to_string();
281            let full_value = format!("{}{}", &var_eq[eq_pos + 1..], value);
282            // Basic validation: variable name should start with letter or underscore
283            if is_valid_variable_name(&var) {
284                return Ok(Ast::Assignment {
285                    var,
286                    value: full_value,
287                });
288            }
289        }
290    }
291
292    // Check if it's an assignment (VAR= VALUE)
293    if tokens.len() == 2
294        && let (Token::Word(var_eq), Token::Word(value)) = (&tokens[0], &tokens[1])
295        && let Some(eq_pos) = var_eq.find('=')
296        && eq_pos > 0
297        && eq_pos == var_eq.len() - 1
298    {
299        let var = var_eq[..eq_pos].to_string();
300        // Basic validation: variable name should start with letter or underscore
301        if is_valid_variable_name(&var) {
302            return Ok(Ast::Assignment {
303                var,
304                value: value.clone(),
305            });
306        }
307    }
308
309    // Check if it's a local assignment (local VAR VALUE or local VAR= VALUE)
310    if tokens.len() == 3
311        && let (Token::Local, Token::Word(var), Token::Word(value)) =
312            (&tokens[0], &tokens[1], &tokens[2])
313    {
314        // Strip trailing = if present (handles "local var= value" format)
315        let clean_var = if var.ends_with('=') {
316            &var[..var.len() - 1]
317        } else {
318            var
319        };
320        // Basic validation: variable name should start with letter or underscore
321        if is_valid_variable_name(clean_var) {
322            return Ok(Ast::LocalAssignment {
323                var: clean_var.to_string(),
324                value: value.clone(),
325            });
326        }
327    }
328
329    // Check if it's a return statement
330    if !tokens.is_empty()
331        && tokens.len() <= 2
332        && let Token::Return = &tokens[0]
333    {
334        if tokens.len() == 1 {
335            // return (with no value, defaults to 0)
336            return Ok(Ast::Return { value: None });
337        } else if let Token::Word(word) = &tokens[1] {
338            // return value
339            return Ok(Ast::Return {
340                value: Some(word.clone()),
341            });
342        }
343    }
344
345    // Check if it's a local assignment (local VAR=VALUE)
346    if tokens.len() == 2
347        && let (Token::Local, Token::Word(var_eq)) = (&tokens[0], &tokens[1])
348        && let Some(eq_pos) = var_eq.find('=')
349        && eq_pos > 0
350        && eq_pos < var_eq.len()
351    {
352        let var = var_eq[..eq_pos].to_string();
353        let value = var_eq[eq_pos + 1..].to_string();
354        // Basic validation: variable name should start with letter or underscore
355        if is_valid_variable_name(&var) {
356            return Ok(Ast::LocalAssignment { var, value });
357        }
358    }
359
360    // Check if it's an assignment (single token with =)
361    if tokens.len() == 1
362        && let Token::Word(ref word) = tokens[0]
363        && let Some(eq_pos) = word.find('=')
364        && eq_pos > 0
365        && eq_pos < word.len()
366    {
367        let var = word[..eq_pos].to_string();
368        let value = word[eq_pos + 1..].to_string();
369        // Basic validation: variable name should start with letter or underscore
370        if is_valid_variable_name(&var) {
371            return Ok(Ast::Assignment { var, value });
372        }
373    }
374
375    // Check if it's an if statement
376    if let Token::If = tokens[0] {
377        return parse_if(tokens);
378    }
379
380    // Check if it's a case statement
381    if let Token::Case = tokens[0] {
382        return parse_case(tokens);
383    }
384
385    // Check if it's a for loop
386    if let Token::For = tokens[0] {
387        return parse_for(tokens);
388    }
389
390    // Check if it's a while loop
391    if let Token::While = tokens[0] {
392        return parse_while(tokens);
393    }
394
395    // Check if it's a function definition
396    // Pattern: Word LeftParen RightParen LeftBrace
397    if tokens.len() >= 4
398        && let (Token::Word(word), Token::LeftParen, Token::RightParen, Token::LeftBrace) =
399            (&tokens[0], &tokens[1], &tokens[2], &tokens[3])
400        && is_valid_variable_name(word)
401    {
402        return parse_function_definition(tokens);
403    }
404
405    // Also check for function definition with parentheses in the word (legacy support)
406    if tokens.len() >= 2
407        && let Token::Word(ref word) = tokens[0]
408        && let Some(paren_pos) = word.find('(')
409        && word.ends_with(')')
410        && paren_pos > 0
411    {
412        let func_name = &word[..paren_pos];
413        if is_valid_variable_name(func_name) && tokens[1] == Token::LeftBrace {
414            return parse_function_definition(tokens);
415        }
416    }
417
418    // Check if it's a function call (word followed by arguments)
419    // For Phase 1, we'll parse as regular pipeline and handle function calls in executor
420
421    // Otherwise, parse as pipeline
422    parse_pipeline(tokens)
423}
424
425fn parse_commands_sequentially(tokens: &[Token]) -> Result<Ast, String> {
426    let mut i = 0;
427    let mut commands = Vec::new();
428
429    while i < tokens.len() {
430        // Skip whitespace and comments
431        while i < tokens.len() {
432            match &tokens[i] {
433                Token::Newline => {
434                    i += 1;
435                }
436                Token::Word(word) if word.starts_with('#') => {
437                    // Skip comment line
438                    while i < tokens.len() && tokens[i] != Token::Newline {
439                        i += 1;
440                    }
441                    if i < tokens.len() {
442                        i += 1; // Skip the newline
443                    }
444                }
445                _ => break,
446            }
447        }
448
449        if i >= tokens.len() {
450            break;
451        }
452
453        // Find the end of this command
454        let start = i;
455
456        // Check for subshell: LeftParen at start of command
457        // Must check BEFORE function definition to avoid ambiguity
458        if tokens[i] == Token::LeftParen {
459            // This is a subshell - find the matching RightParen
460            let mut paren_depth = 1;
461            let mut j = i + 1;
462
463            while j < tokens.len() && paren_depth > 0 {
464                match tokens[j] {
465                    Token::LeftParen => paren_depth += 1,
466                    Token::RightParen => paren_depth -= 1,
467                    _ => {}
468                }
469                j += 1;
470            }
471
472            if paren_depth != 0 {
473                return Err("Unmatched parenthesis in subshell".to_string());
474            }
475
476            // Extract subshell body (tokens between parens)
477            let subshell_tokens = &tokens[i + 1..j - 1];
478
479            // Parse the subshell body recursively
480            // Empty subshells are not allowed
481            let body_ast = if subshell_tokens.is_empty() {
482                return Err("Empty subshell".to_string());
483            } else {
484                parse_commands_sequentially(subshell_tokens)?
485            };
486
487            let mut subshell_ast = Ast::Subshell {
488                body: Box::new(body_ast),
489            };
490
491            i = j; // Move past the closing paren
492
493            // Check for redirections after subshell
494            let mut redirections = Vec::new();
495            while i < tokens.len() {
496                match &tokens[i] {
497                    Token::RedirOut => {
498                        i += 1;
499                        if i < tokens.len() {
500                            if let Token::Word(file) = &tokens[i] {
501                                redirections.push(Redirection::Output(file.clone()));
502                                i += 1;
503                            }
504                        }
505                    }
506                    Token::RedirIn => {
507                        i += 1;
508                        if i < tokens.len() {
509                            if let Token::Word(file) = &tokens[i] {
510                                redirections.push(Redirection::Input(file.clone()));
511                                i += 1;
512                            }
513                        }
514                    }
515                    Token::RedirAppend => {
516                        i += 1;
517                        if i < tokens.len() {
518                            if let Token::Word(file) = &tokens[i] {
519                                redirections.push(Redirection::Append(file.clone()));
520                                i += 1;
521                            }
522                        }
523                    }
524                    Token::RedirectFdOut(fd, file) => {
525                        redirections.push(Redirection::FdOutput(*fd, file.clone()));
526                        i += 1;
527                    }
528                    Token::RedirectFdIn(fd, file) => {
529                        redirections.push(Redirection::FdInput(*fd, file.clone()));
530                        i += 1;
531                    }
532                    Token::RedirectFdAppend(fd, file) => {
533                        redirections.push(Redirection::FdAppend(*fd, file.clone()));
534                        i += 1;
535                    }
536                    Token::RedirectFdDup(from_fd, to_fd) => {
537                        redirections.push(Redirection::FdDuplicate(*from_fd, *to_fd));
538                        i += 1;
539                    }
540                    Token::RedirectFdClose(fd) => {
541                        redirections.push(Redirection::FdClose(*fd));
542                        i += 1;
543                    }
544                    Token::RedirectFdInOut(fd, file) => {
545                        redirections.push(Redirection::FdInputOutput(*fd, file.clone()));
546                        i += 1;
547                    }
548                    Token::RedirHereDoc(delimiter, quoted) => {
549                        redirections
550                            .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
551                        i += 1;
552                    }
553                    Token::RedirHereString(content) => {
554                        redirections.push(Redirection::HereString(content.clone()));
555                        i += 1;
556                    }
557                    _ => break,
558                }
559            }
560
561            // If redirections found, wrap subshell in a pipeline with redirections
562            if !redirections.is_empty() {
563                subshell_ast = Ast::Pipeline(vec![ShellCommand {
564                    args: Vec::new(),
565                    redirections,
566                    compound: Some(Box::new(subshell_ast)),
567                }]);
568            }
569
570            // Check if this is part of a pipeline
571            if i < tokens.len() && tokens[i] == Token::Pipe {
572                // This subshell is part of a pipeline - parse the entire line as a pipeline
573                let pipeline_ast = parse_pipeline(&tokens[start..])?;
574                commands.push(pipeline_ast);
575                break; // We've consumed the rest of the tokens
576            }
577
578            // Handle operators after subshell (&&, ||, ;, newline)
579            if i < tokens.len() && (tokens[i] == Token::And || tokens[i] == Token::Or) {
580                let operator = tokens[i].clone();
581                i += 1; // Skip the operator
582
583                // Skip any newlines after the operator
584                while i < tokens.len() && tokens[i] == Token::Newline {
585                    i += 1;
586                }
587
588                // Parse the right side recursively
589                let remaining_tokens = &tokens[i..];
590                let right_ast = parse_commands_sequentially(remaining_tokens)?;
591
592                // Create And or Or node
593                let combined_ast = match operator {
594                    Token::And => Ast::And {
595                        left: Box::new(subshell_ast),
596                        right: Box::new(right_ast),
597                    },
598                    Token::Or => Ast::Or {
599                        left: Box::new(subshell_ast),
600                        right: Box::new(right_ast),
601                    },
602                    _ => unreachable!(),
603                };
604
605                commands.push(combined_ast);
606                break; // We've consumed the rest of the tokens
607            } else {
608                commands.push(subshell_ast);
609            }
610
611            // Skip semicolon or newline after subshell
612            if i < tokens.len() && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon) {
613                i += 1;
614            }
615            continue;
616        }
617
618        // Special handling for compound commands
619        if tokens[i] == Token::If {
620            // For if statements, find the matching fi
621            let mut depth = 0;
622            while i < tokens.len() {
623                match tokens[i] {
624                    Token::If => depth += 1,
625                    Token::Fi => {
626                        depth -= 1;
627                        if depth == 0 {
628                            i += 1; // Include the fi
629                            break;
630                        }
631                    }
632                    _ => {}
633                }
634                i += 1;
635            }
636
637            // If we didn't find a matching fi, include all remaining tokens
638            // This handles the case where the if statement is incomplete
639        } else if tokens[i] == Token::For {
640            // For for loops, find the matching done
641            let mut depth = 1; // Start at 1 because we're already inside the for
642            i += 1; // Move past the 'for' token
643            while i < tokens.len() {
644                match tokens[i] {
645                    Token::For | Token::While => depth += 1,
646                    Token::Done => {
647                        depth -= 1;
648                        if depth == 0 {
649                            i += 1; // Include the done
650                            break;
651                        }
652                    }
653                    _ => {}
654                }
655                i += 1;
656            }
657        } else if tokens[i] == Token::While {
658            // For while loops, find the matching done
659            let mut depth = 1; // Start at 1 because we're already inside the while
660            i += 1; // Move past the 'while' token
661            while i < tokens.len() {
662                match tokens[i] {
663                    Token::While | Token::For => depth += 1,
664                    Token::Done => {
665                        depth -= 1;
666                        if depth == 0 {
667                            i += 1; // Include the done
668                            break;
669                        }
670                    }
671                    _ => {}
672                }
673                i += 1;
674            }
675        } else if tokens[i] == Token::Case {
676            // For case statements, find the matching esac
677            while i < tokens.len() {
678                if tokens[i] == Token::Esac {
679                    i += 1; // Include the esac
680                    break;
681                }
682                i += 1;
683            }
684        } else if i + 3 < tokens.len()
685            && matches!(tokens[i], Token::Word(_))
686            && tokens[i + 1] == Token::LeftParen
687            && tokens[i + 2] == Token::RightParen
688            && tokens[i + 3] == Token::LeftBrace
689        {
690            // This is a function definition - find the matching closing brace
691            let mut brace_depth = 1;
692            i += 4; // Skip to after opening brace
693            while i < tokens.len() && brace_depth > 0 {
694                match tokens[i] {
695                    Token::LeftBrace => brace_depth += 1,
696                    Token::RightBrace => brace_depth -= 1,
697                    _ => {}
698                }
699                i += 1;
700            }
701        } else {
702            // For simple commands, stop at newline, semicolon, &&, or ||
703            // But check if the next token after newline is a control flow keyword
704            while i < tokens.len() {
705                if tokens[i] == Token::Newline
706                    || tokens[i] == Token::Semicolon
707                    || tokens[i] == Token::And
708                    || tokens[i] == Token::Or
709                {
710                    // Look ahead to see if the next non-newline token is else/elif/fi
711                    let mut j = i + 1;
712                    while j < tokens.len() && tokens[j] == Token::Newline {
713                        j += 1;
714                    }
715                    // If we find else/elif/fi, this is likely part of an if statement that wasn't properly detected
716                    if j < tokens.len()
717                        && (tokens[j] == Token::Else
718                            || tokens[j] == Token::Elif
719                            || tokens[j] == Token::Fi)
720                    {
721                        // Skip this token and continue - it will be handled as a parse error
722                        i = j + 1;
723                        continue;
724                    }
725                    break;
726                }
727                i += 1;
728            }
729        }
730
731        let command_tokens = &tokens[start..i];
732        if !command_tokens.is_empty() {
733            // Don't try to parse orphaned else/elif/fi tokens
734            if command_tokens.len() == 1 {
735                match command_tokens[0] {
736                    Token::Else | Token::Elif | Token::Fi => {
737                        // Skip orphaned control flow tokens
738                        if i < tokens.len()
739                            && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon)
740                        {
741                            i += 1;
742                        }
743                        continue;
744                    }
745                    _ => {}
746                }
747            }
748
749            let ast = parse_slice(command_tokens)?;
750
751            // Check if the next token is && or ||
752            if i < tokens.len() && (tokens[i] == Token::And || tokens[i] == Token::Or) {
753                let operator = tokens[i].clone();
754                i += 1; // Skip the operator
755
756                // Skip any newlines after the operator
757                while i < tokens.len() && tokens[i] == Token::Newline {
758                    i += 1;
759                }
760
761                // Parse the right side recursively
762                let remaining_tokens = &tokens[i..];
763                let right_ast = parse_commands_sequentially(remaining_tokens)?;
764
765                // Create And or Or node
766                let combined_ast = match operator {
767                    Token::And => Ast::And {
768                        left: Box::new(ast),
769                        right: Box::new(right_ast),
770                    },
771                    Token::Or => Ast::Or {
772                        left: Box::new(ast),
773                        right: Box::new(right_ast),
774                    },
775                    _ => unreachable!(),
776                };
777
778                commands.push(combined_ast);
779                break; // We've consumed the rest of the tokens
780            } else {
781                commands.push(ast);
782            }
783        }
784
785        if i < tokens.len() && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon) {
786            i += 1;
787        }
788    }
789
790    if commands.is_empty() {
791        return Err("No commands found".to_string());
792    }
793
794    if commands.len() == 1 {
795        Ok(commands.into_iter().next().unwrap())
796    } else {
797        Ok(Ast::Sequence(commands))
798    }
799}
800
801fn parse_pipeline(tokens: &[Token]) -> Result<Ast, String> {
802    let mut commands = Vec::new();
803    let mut current_cmd = ShellCommand::default();
804
805    let mut i = 0;
806    while i < tokens.len() {
807        let token = &tokens[i];
808        match token {
809            Token::LeftParen => {
810                // Start of subshell in pipeline
811                // Find matching RightParen
812                let mut paren_depth = 1;
813                let mut j = i + 1;
814
815                while j < tokens.len() && paren_depth > 0 {
816                    match tokens[j] {
817                        Token::LeftParen => paren_depth += 1,
818                        Token::RightParen => paren_depth -= 1,
819                        _ => {}
820                    }
821                    j += 1;
822                }
823
824                if paren_depth != 0 {
825                    return Err("Unmatched parenthesis in pipeline".to_string());
826                }
827
828                // Parse subshell body
829                let subshell_tokens = &tokens[i + 1..j - 1];
830
831                // Empty subshells are valid and equivalent to 'true'
832                let body_ast = if subshell_tokens.is_empty() {
833                    create_empty_body_ast()
834                } else {
835                    parse_commands_sequentially(subshell_tokens)?
836                };
837
838                // Create ShellCommand with compound subshell
839                current_cmd.compound = Some(Box::new(Ast::Subshell {
840                    body: Box::new(body_ast),
841                }));
842
843                i = j; // Move past closing paren
844
845                // Check for redirections after subshell
846                while i < tokens.len() {
847                    match &tokens[i] {
848                        Token::RedirOut => {
849                            i += 1;
850                            if i < tokens.len() {
851                                if let Token::Word(file) = &tokens[i] {
852                                    current_cmd
853                                        .redirections
854                                        .push(Redirection::Output(file.clone()));
855                                    i += 1;
856                                }
857                            }
858                        }
859                        Token::RedirIn => {
860                            i += 1;
861                            if i < tokens.len() {
862                                if let Token::Word(file) = &tokens[i] {
863                                    current_cmd
864                                        .redirections
865                                        .push(Redirection::Input(file.clone()));
866                                    i += 1;
867                                }
868                            }
869                        }
870                        Token::RedirAppend => {
871                            i += 1;
872                            if i < tokens.len() {
873                                if let Token::Word(file) = &tokens[i] {
874                                    current_cmd
875                                        .redirections
876                                        .push(Redirection::Append(file.clone()));
877                                    i += 1;
878                                }
879                            }
880                        }
881                        Token::RedirectFdOut(fd, file) => {
882                            current_cmd
883                                .redirections
884                                .push(Redirection::FdOutput(*fd, file.clone()));
885                            i += 1;
886                        }
887                        Token::RedirectFdIn(fd, file) => {
888                            current_cmd
889                                .redirections
890                                .push(Redirection::FdInput(*fd, file.clone()));
891                            i += 1;
892                        }
893                        Token::RedirectFdAppend(fd, file) => {
894                            current_cmd
895                                .redirections
896                                .push(Redirection::FdAppend(*fd, file.clone()));
897                            i += 1;
898                        }
899                        Token::RedirectFdDup(from_fd, to_fd) => {
900                            current_cmd
901                                .redirections
902                                .push(Redirection::FdDuplicate(*from_fd, *to_fd));
903                            i += 1;
904                        }
905                        Token::RedirectFdClose(fd) => {
906                            current_cmd.redirections.push(Redirection::FdClose(*fd));
907                            i += 1;
908                        }
909                        Token::RedirectFdInOut(fd, file) => {
910                            current_cmd
911                                .redirections
912                                .push(Redirection::FdInputOutput(*fd, file.clone()));
913                            i += 1;
914                        }
915                        Token::RedirHereDoc(delimiter, quoted) => {
916                            current_cmd
917                                .redirections
918                                .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
919                            i += 1;
920                        }
921                        Token::RedirHereString(content) => {
922                            current_cmd
923                                .redirections
924                                .push(Redirection::HereString(content.clone()));
925                            i += 1;
926                        }
927                        Token::Pipe => {
928                            // End of this pipeline stage
929                            break;
930                        }
931                        _ => break,
932                    }
933                }
934
935                // Push the command with subshell
936                commands.push(current_cmd.clone());
937                current_cmd = ShellCommand::default();
938
939                continue;
940            }
941            Token::Word(word) => {
942                current_cmd.args.push(word.clone());
943            }
944            Token::Pipe => {
945                if !current_cmd.args.is_empty() || current_cmd.compound.is_some() {
946                    commands.push(current_cmd.clone());
947                    current_cmd = ShellCommand::default();
948                }
949            }
950            // Basic redirections (backward compatible)
951            Token::RedirIn => {
952                i += 1;
953                if i < tokens.len()
954                    && let Token::Word(ref file) = tokens[i]
955                {
956                    current_cmd
957                        .redirections
958                        .push(Redirection::Input(file.clone()));
959                }
960            }
961            Token::RedirOut => {
962                i += 1;
963                if i < tokens.len()
964                    && let Token::Word(ref file) = tokens[i]
965                {
966                    current_cmd
967                        .redirections
968                        .push(Redirection::Output(file.clone()));
969                }
970            }
971            Token::RedirAppend => {
972                i += 1;
973                if i < tokens.len()
974                    && let Token::Word(ref file) = tokens[i]
975                {
976                    current_cmd
977                        .redirections
978                        .push(Redirection::Append(file.clone()));
979                }
980            }
981            Token::RedirHereDoc(delimiter, quoted) => {
982                // Store delimiter and quoted flag - content will be read by executor
983                current_cmd
984                    .redirections
985                    .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
986            }
987            Token::RedirHereString(content) => {
988                current_cmd
989                    .redirections
990                    .push(Redirection::HereString(content.clone()));
991            }
992            // File descriptor redirections
993            Token::RedirectFdIn(fd, file) => {
994                current_cmd
995                    .redirections
996                    .push(Redirection::FdInput(*fd, file.clone()));
997            }
998            Token::RedirectFdOut(fd, file) => {
999                current_cmd
1000                    .redirections
1001                    .push(Redirection::FdOutput(*fd, file.clone()));
1002            }
1003            Token::RedirectFdAppend(fd, file) => {
1004                current_cmd
1005                    .redirections
1006                    .push(Redirection::FdAppend(*fd, file.clone()));
1007            }
1008            Token::RedirectFdDup(from_fd, to_fd) => {
1009                current_cmd
1010                    .redirections
1011                    .push(Redirection::FdDuplicate(*from_fd, *to_fd));
1012            }
1013            Token::RedirectFdClose(fd) => {
1014                current_cmd.redirections.push(Redirection::FdClose(*fd));
1015            }
1016            Token::RedirectFdInOut(fd, file) => {
1017                current_cmd
1018                    .redirections
1019                    .push(Redirection::FdInputOutput(*fd, file.clone()));
1020            }
1021            Token::RightParen => {
1022                // Check if this looks like a function call pattern: Word LeftParen ... RightParen
1023                // If so, treat it as a function call even if the function doesn't exist
1024                if !current_cmd.args.is_empty()
1025                    && i > 0
1026                    && let Token::LeftParen = tokens[i - 1]
1027                {
1028                    // This looks like a function call pattern, treat as function call
1029                    // For now, we'll handle this in the executor by checking if it's a function
1030                    // If not a function, the executor will handle the error gracefully
1031                    break;
1032                }
1033                return Err("Unexpected ) in pipeline".to_string());
1034            }
1035            Token::Newline => {
1036                // Newlines are handled at the sequence level, skip them in pipelines
1037                i += 1;
1038                continue;
1039            }
1040            Token::Do
1041            | Token::Done
1042            | Token::Then
1043            | Token::Else
1044            | Token::Elif
1045            | Token::Fi
1046            | Token::Esac => {
1047                // These are control flow keywords that should be handled at a higher level
1048                // If we encounter them here, it means we've reached the end of the current command
1049                break;
1050            }
1051            _ => {
1052                return Err(format!("Unexpected token in pipeline: {:?}", token));
1053            }
1054        }
1055        i += 1;
1056    }
1057
1058    if !current_cmd.args.is_empty() {
1059        commands.push(current_cmd);
1060    }
1061
1062    if commands.is_empty() {
1063        return Err("No commands found".to_string());
1064    }
1065
1066    Ok(Ast::Pipeline(commands))
1067}
1068
1069fn parse_if(tokens: &[Token]) -> Result<Ast, String> {
1070    let mut i = 1; // Skip 'if'
1071    let mut branches = Vec::new();
1072
1073    loop {
1074        // Parse condition until ; or newline or then
1075        let mut cond_tokens = Vec::new();
1076        while i < tokens.len()
1077            && tokens[i] != Token::Semicolon
1078            && tokens[i] != Token::Newline
1079            && tokens[i] != Token::Then
1080        {
1081            cond_tokens.push(tokens[i].clone());
1082            i += 1;
1083        }
1084
1085        // Skip ; or newline if present
1086        if i < tokens.len() && (tokens[i] == Token::Semicolon || tokens[i] == Token::Newline) {
1087            i += 1;
1088        }
1089
1090        // Skip any additional newlines
1091        skip_newlines(tokens, &mut i);
1092
1093        if i >= tokens.len() || tokens[i] != Token::Then {
1094            return Err("Expected then after if/elif condition".to_string());
1095        }
1096        i += 1; // Skip then
1097
1098        // Skip any newlines after then
1099        while i < tokens.len() && tokens[i] == Token::Newline {
1100            i += 1;
1101        }
1102
1103        // Parse then branch - collect all tokens until we hit else/elif/fi
1104        // We need to handle nested structures properly
1105        let mut then_tokens = Vec::new();
1106        let mut depth = 0;
1107        while i < tokens.len() {
1108            match &tokens[i] {
1109                Token::If => {
1110                    depth += 1;
1111                    then_tokens.push(tokens[i].clone());
1112                }
1113                Token::Fi => {
1114                    if depth > 0 {
1115                        depth -= 1;
1116                        then_tokens.push(tokens[i].clone());
1117                    } else {
1118                        break; // This fi closes our if
1119                    }
1120                }
1121                Token::Else | Token::Elif if depth == 0 => {
1122                    break; // These belong to our if, not nested ones
1123                }
1124                Token::Newline => {
1125                    // Skip newlines but check what comes after
1126                    let mut j = i + 1;
1127                    while j < tokens.len() && tokens[j] == Token::Newline {
1128                        j += 1;
1129                    }
1130                    if j < tokens.len()
1131                        && depth == 0
1132                        && (tokens[j] == Token::Else
1133                            || tokens[j] == Token::Elif
1134                            || tokens[j] == Token::Fi)
1135                    {
1136                        i = j; // Skip to the keyword
1137                        break;
1138                    }
1139                    // Otherwise it's just a newline in the middle of commands
1140                    then_tokens.push(tokens[i].clone());
1141                }
1142                _ => {
1143                    then_tokens.push(tokens[i].clone());
1144                }
1145            }
1146            i += 1;
1147        }
1148
1149        // Skip any trailing newlines
1150        skip_newlines(tokens, &mut i);
1151
1152        let then_ast = if then_tokens.is_empty() {
1153            // Empty then branch - create a no-op
1154            create_empty_body_ast()
1155        } else {
1156            parse_commands_sequentially(&then_tokens)?
1157        };
1158
1159        let condition = parse_slice(&cond_tokens)?;
1160        branches.push((Box::new(condition), Box::new(then_ast)));
1161
1162        // Check next
1163        if i < tokens.len() && tokens[i] == Token::Elif {
1164            i += 1; // Skip elif, continue loop
1165        } else {
1166            break;
1167        }
1168    }
1169
1170    let else_ast = if i < tokens.len() && tokens[i] == Token::Else {
1171        i += 1; // Skip else
1172
1173        // Skip any newlines after else
1174        while i < tokens.len() && tokens[i] == Token::Newline {
1175            i += 1;
1176        }
1177
1178        let mut else_tokens = Vec::new();
1179        let mut depth = 0;
1180        while i < tokens.len() {
1181            match &tokens[i] {
1182                Token::If => {
1183                    depth += 1;
1184                    else_tokens.push(tokens[i].clone());
1185                }
1186                Token::Fi => {
1187                    if depth > 0 {
1188                        depth -= 1;
1189                        else_tokens.push(tokens[i].clone());
1190                    } else {
1191                        break; // This fi closes our if
1192                    }
1193                }
1194                Token::Newline => {
1195                    // Skip newlines but check what comes after
1196                    let mut j = i + 1;
1197                    while j < tokens.len() && tokens[j] == Token::Newline {
1198                        j += 1;
1199                    }
1200                    if j < tokens.len() && depth == 0 && tokens[j] == Token::Fi {
1201                        i = j; // Skip to fi
1202                        break;
1203                    }
1204                    // Otherwise it's just a newline in the middle of commands
1205                    else_tokens.push(tokens[i].clone());
1206                }
1207                _ => {
1208                    else_tokens.push(tokens[i].clone());
1209                }
1210            }
1211            i += 1;
1212        }
1213
1214        let else_ast = if else_tokens.is_empty() {
1215            // Empty else branch - create a no-op
1216            create_empty_body_ast()
1217        } else {
1218            parse_commands_sequentially(&else_tokens)?
1219        };
1220
1221        Some(Box::new(else_ast))
1222    } else {
1223        None
1224    };
1225
1226    if i >= tokens.len() || tokens[i] != Token::Fi {
1227        return Err("Expected fi".to_string());
1228    }
1229
1230    Ok(Ast::If {
1231        branches,
1232        else_branch: else_ast,
1233    })
1234}
1235
1236fn parse_case(tokens: &[Token]) -> Result<Ast, String> {
1237    let mut i = 1; // Skip 'case'
1238
1239    // Parse word
1240    if i >= tokens.len() || !matches!(tokens[i], Token::Word(_)) {
1241        return Err("Expected word after case".to_string());
1242    }
1243    let word = if let Token::Word(ref w) = tokens[i] {
1244        w.clone()
1245    } else {
1246        unreachable!()
1247    };
1248    i += 1;
1249
1250    if i >= tokens.len() || tokens[i] != Token::In {
1251        return Err("Expected in after case word".to_string());
1252    }
1253    i += 1;
1254
1255    let mut cases = Vec::new();
1256    let mut default = None;
1257
1258    loop {
1259        // Skip newlines
1260        while i < tokens.len() && tokens[i] == Token::Newline {
1261            i += 1;
1262        }
1263
1264        if i >= tokens.len() {
1265            return Err("Unexpected end in case statement".to_string());
1266        }
1267
1268        if tokens[i] == Token::Esac {
1269            break;
1270        }
1271
1272        // Parse patterns
1273        let mut patterns = Vec::new();
1274        while i < tokens.len() && tokens[i] != Token::RightParen {
1275            if let Token::Word(ref p) = tokens[i] {
1276                // Split pattern on |
1277                for pat in p.split('|') {
1278                    patterns.push(pat.to_string());
1279                }
1280            } else if tokens[i] == Token::Pipe {
1281                // Skip | separator
1282            } else if tokens[i] == Token::Newline {
1283                // Skip newlines in patterns
1284            } else {
1285                return Err(format!("Expected pattern, found {:?}", tokens[i]));
1286            }
1287            i += 1;
1288        }
1289
1290        if i >= tokens.len() || tokens[i] != Token::RightParen {
1291            return Err("Expected ) after patterns".to_string());
1292        }
1293        i += 1;
1294
1295        // Parse commands
1296        let mut commands_tokens = Vec::new();
1297        while i < tokens.len() && tokens[i] != Token::DoubleSemicolon && tokens[i] != Token::Esac {
1298            commands_tokens.push(tokens[i].clone());
1299            i += 1;
1300        }
1301
1302        let commands_ast = parse_slice(&commands_tokens)?;
1303
1304        if i >= tokens.len() {
1305            return Err("Unexpected end in case statement".to_string());
1306        }
1307
1308        if tokens[i] == Token::DoubleSemicolon {
1309            i += 1;
1310            // Check if this is the default case (*)
1311            if patterns.len() == 1 && patterns[0] == "*" {
1312                default = Some(Box::new(commands_ast));
1313            } else {
1314                cases.push((patterns, commands_ast));
1315            }
1316        } else if tokens[i] == Token::Esac {
1317            // Last case without ;;
1318            if patterns.len() == 1 && patterns[0] == "*" {
1319                default = Some(Box::new(commands_ast));
1320            } else {
1321                cases.push((patterns, commands_ast));
1322            }
1323            break;
1324        } else {
1325            return Err("Expected ;; or esac after commands".to_string());
1326        }
1327    }
1328
1329    Ok(Ast::Case {
1330        word,
1331        cases,
1332        default,
1333    })
1334}
1335
1336fn parse_for(tokens: &[Token]) -> Result<Ast, String> {
1337    let mut i = 1; // Skip 'for'
1338
1339    // Parse variable name
1340    if i >= tokens.len() || !matches!(tokens[i], Token::Word(_)) {
1341        return Err("Expected variable name after for".to_string());
1342    }
1343    let variable = if let Token::Word(ref v) = tokens[i] {
1344        v.clone()
1345    } else {
1346        unreachable!()
1347    };
1348    i += 1;
1349
1350    // Expect 'in'
1351    if i >= tokens.len() || tokens[i] != Token::In {
1352        return Err("Expected 'in' after for variable".to_string());
1353    }
1354    i += 1;
1355
1356    // Parse items until we hit 'do' or semicolon/newline
1357    let mut items = Vec::new();
1358    while i < tokens.len() {
1359        match &tokens[i] {
1360            Token::Do => break,
1361            Token::Semicolon | Token::Newline => {
1362                i += 1;
1363                // Check if next token is 'do'
1364                if i < tokens.len() && tokens[i] == Token::Do {
1365                    break;
1366                }
1367            }
1368            Token::Word(word) => {
1369                items.push(word.clone());
1370                i += 1;
1371            }
1372            _ => {
1373                return Err(format!("Unexpected token in for items: {:?}", tokens[i]));
1374            }
1375        }
1376    }
1377
1378    // Skip any newlines before 'do'
1379    while i < tokens.len() && tokens[i] == Token::Newline {
1380        i += 1;
1381    }
1382
1383    // Expect 'do'
1384    if i >= tokens.len() || tokens[i] != Token::Do {
1385        return Err("Expected 'do' in for loop".to_string());
1386    }
1387    i += 1;
1388
1389    // Skip any newlines after 'do'
1390    while i < tokens.len() && tokens[i] == Token::Newline {
1391        i += 1;
1392    }
1393
1394    // Parse body until 'done'
1395    let mut body_tokens = Vec::new();
1396    let mut depth = 0;
1397    while i < tokens.len() {
1398        match &tokens[i] {
1399            Token::For => {
1400                depth += 1;
1401                body_tokens.push(tokens[i].clone());
1402            }
1403            Token::Done => {
1404                if depth > 0 {
1405                    depth -= 1;
1406                    body_tokens.push(tokens[i].clone());
1407                } else {
1408                    break; // This done closes our for loop
1409                }
1410            }
1411            Token::Newline => {
1412                // Skip newlines but check what comes after
1413                let mut j = i + 1;
1414                while j < tokens.len() && tokens[j] == Token::Newline {
1415                    j += 1;
1416                }
1417                if j < tokens.len() && depth == 0 && tokens[j] == Token::Done {
1418                    i = j; // Skip to done
1419                    break;
1420                }
1421                // Otherwise it's just a newline in the middle of commands
1422                body_tokens.push(tokens[i].clone());
1423            }
1424            _ => {
1425                body_tokens.push(tokens[i].clone());
1426            }
1427        }
1428        i += 1;
1429    }
1430
1431    if i >= tokens.len() || tokens[i] != Token::Done {
1432        return Err("Expected 'done' to close for loop".to_string());
1433    }
1434
1435    // Parse the body
1436    let body_ast = if body_tokens.is_empty() {
1437        // Empty body - create a no-op
1438        create_empty_body_ast()
1439    } else {
1440        parse_commands_sequentially(&body_tokens)?
1441    };
1442
1443    Ok(Ast::For {
1444        variable,
1445        items,
1446        body: Box::new(body_ast),
1447    })
1448}
1449
1450fn parse_while(tokens: &[Token]) -> Result<Ast, String> {
1451    let mut i = 1; // Skip 'while'
1452
1453    // Parse condition until we hit 'do' or semicolon/newline
1454    let mut cond_tokens = Vec::new();
1455    while i < tokens.len() {
1456        match &tokens[i] {
1457            Token::Do => break,
1458            Token::Semicolon | Token::Newline => {
1459                i += 1;
1460                // Check if next token is 'do'
1461                if i < tokens.len() && tokens[i] == Token::Do {
1462                    break;
1463                }
1464            }
1465            _ => {
1466                cond_tokens.push(tokens[i].clone());
1467                i += 1;
1468            }
1469        }
1470    }
1471
1472    if cond_tokens.is_empty() {
1473        return Err("Expected condition after while".to_string());
1474    }
1475
1476    // Skip any newlines before 'do'
1477    while i < tokens.len() && tokens[i] == Token::Newline {
1478        i += 1;
1479    }
1480
1481    // Expect 'do'
1482    if i >= tokens.len() || tokens[i] != Token::Do {
1483        return Err("Expected 'do' in while loop".to_string());
1484    }
1485    i += 1;
1486
1487    // Skip any newlines after 'do'
1488    while i < tokens.len() && tokens[i] == Token::Newline {
1489        i += 1;
1490    }
1491
1492    // Parse body until 'done'
1493    let mut body_tokens = Vec::new();
1494    let mut depth = 0;
1495    while i < tokens.len() {
1496        match &tokens[i] {
1497            Token::While | Token::For => {
1498                depth += 1;
1499                body_tokens.push(tokens[i].clone());
1500            }
1501            Token::Done => {
1502                if depth > 0 {
1503                    depth -= 1;
1504                    body_tokens.push(tokens[i].clone());
1505                } else {
1506                    break; // This done closes our while loop
1507                }
1508            }
1509            Token::Newline => {
1510                // Skip newlines but check what comes after
1511                let mut j = i + 1;
1512                while j < tokens.len() && tokens[j] == Token::Newline {
1513                    j += 1;
1514                }
1515                if j < tokens.len() && depth == 0 && tokens[j] == Token::Done {
1516                    i = j; // Skip to done
1517                    break;
1518                }
1519                // Otherwise it's just a newline in the middle of commands
1520                body_tokens.push(tokens[i].clone());
1521            }
1522            _ => {
1523                body_tokens.push(tokens[i].clone());
1524            }
1525        }
1526        i += 1;
1527    }
1528
1529    if i >= tokens.len() || tokens[i] != Token::Done {
1530        return Err("Expected 'done' to close while loop".to_string());
1531    }
1532
1533    // Parse the condition
1534    let condition_ast = parse_slice(&cond_tokens)?;
1535
1536    // Parse the body
1537    let body_ast = if body_tokens.is_empty() {
1538        // Empty body - create a no-op
1539        create_empty_body_ast()
1540    } else {
1541        parse_commands_sequentially(&body_tokens)?
1542    };
1543
1544    Ok(Ast::While {
1545        condition: Box::new(condition_ast),
1546        body: Box::new(body_ast),
1547    })
1548}
1549
1550fn parse_function_definition(tokens: &[Token]) -> Result<Ast, String> {
1551    if tokens.len() < 2 {
1552        return Err("Function definition too short".to_string());
1553    }
1554
1555    // Extract function name from first token
1556    let func_name = if let Token::Word(word) = &tokens[0] {
1557        // Handle legacy format with parentheses in the word (e.g., "legacyfunc()")
1558        if let Some(paren_pos) = word.find('(') {
1559            if word.ends_with(')') && paren_pos > 0 {
1560                word[..paren_pos].to_string()
1561            } else {
1562                word.clone()
1563            }
1564        } else {
1565            word.clone()
1566        }
1567    } else {
1568        return Err("Function name must be a word".to_string());
1569    };
1570
1571    // Find the opening brace and body
1572    let brace_pos =
1573        if tokens.len() >= 4 && tokens[1] == Token::LeftParen && tokens[2] == Token::RightParen {
1574            // Standard format: name() {
1575            if tokens[3] != Token::LeftBrace {
1576                return Err("Expected { after function name".to_string());
1577            }
1578            3
1579        } else if tokens.len() >= 2 && tokens[1] == Token::LeftBrace {
1580            // Legacy format: name() {
1581            1
1582        } else {
1583            return Err("Expected ( after function name or { for legacy format".to_string());
1584        };
1585
1586    // Find the matching closing brace, accounting for nested function definitions and control structures
1587    let mut brace_depth = 0;
1588    let mut body_end = 0;
1589    let mut found_closing = false;
1590    let mut i = brace_pos + 1;
1591
1592    while i < tokens.len() {
1593        // Check if this is the start of a nested function definition
1594        // Pattern: Word LeftParen RightParen LeftBrace
1595        if i + 3 < tokens.len()
1596            && matches!(&tokens[i], Token::Word(_))
1597            && tokens[i + 1] == Token::LeftParen
1598            && tokens[i + 2] == Token::RightParen
1599            && tokens[i + 3] == Token::LeftBrace
1600        {
1601            // This is a nested function - skip over it entirely
1602            // Skip to after the opening brace of nested function
1603            i += 4;
1604            let mut nested_depth = 1;
1605            while i < tokens.len() && nested_depth > 0 {
1606                match tokens[i] {
1607                    Token::LeftBrace => nested_depth += 1,
1608                    Token::RightBrace => nested_depth -= 1,
1609                    _ => {}
1610                }
1611                i += 1;
1612            }
1613            // Don't increment i again - continue from current position
1614            continue;
1615        }
1616
1617        match &tokens[i] {
1618            Token::LeftBrace => {
1619                brace_depth += 1;
1620                i += 1;
1621            }
1622            Token::RightBrace => {
1623                if brace_depth == 0 {
1624                    // This is our matching closing brace
1625                    body_end = i;
1626                    found_closing = true;
1627                    break;
1628                } else {
1629                    brace_depth -= 1;
1630                    i += 1;
1631                }
1632            }
1633            Token::If => {
1634                // Skip to matching fi
1635                skip_to_matching_fi(tokens, &mut i);
1636            }
1637            Token::For | Token::While => {
1638                // Skip to matching done
1639                skip_to_matching_done(tokens, &mut i);
1640            }
1641            Token::Case => {
1642                // Skip to matching esac
1643                skip_to_matching_esac(tokens, &mut i);
1644            }
1645            _ => {
1646                i += 1;
1647            }
1648        }
1649    }
1650
1651    if !found_closing {
1652        return Err("Missing closing } for function definition".to_string());
1653    }
1654
1655    // Extract body tokens (everything between { and })
1656    let body_tokens = &tokens[brace_pos + 1..body_end];
1657
1658    // Parse the function body using the existing parser
1659    let body_ast = if body_tokens.is_empty() {
1660        // Empty function body
1661        create_empty_body_ast()
1662    } else {
1663        parse_commands_sequentially(body_tokens)?
1664    };
1665
1666    Ok(Ast::FunctionDefinition {
1667        name: func_name,
1668        body: Box::new(body_ast),
1669    })
1670}
1671
1672#[cfg(test)]
1673mod tests {
1674    use super::super::lexer::Token;
1675    use super::*;
1676
1677    #[test]
1678    fn test_single_command() {
1679        let tokens = vec![Token::Word("ls".to_string())];
1680        let result = parse(tokens).unwrap();
1681        assert_eq!(
1682            result,
1683            Ast::Pipeline(vec![ShellCommand {
1684                args: vec!["ls".to_string()],
1685                redirections: Vec::new(),
1686                compound: None,
1687            }])
1688        );
1689    }
1690
1691    #[test]
1692    fn test_command_with_args() {
1693        let tokens = vec![
1694            Token::Word("ls".to_string()),
1695            Token::Word("-la".to_string()),
1696        ];
1697        let result = parse(tokens).unwrap();
1698        assert_eq!(
1699            result,
1700            Ast::Pipeline(vec![ShellCommand {
1701                args: vec!["ls".to_string(), "-la".to_string()],
1702                redirections: Vec::new(),
1703                compound: None,
1704            }])
1705        );
1706    }
1707
1708    #[test]
1709    fn test_pipeline() {
1710        let tokens = vec![
1711            Token::Word("ls".to_string()),
1712            Token::Pipe,
1713            Token::Word("grep".to_string()),
1714            Token::Word("txt".to_string()),
1715        ];
1716        let result = parse(tokens).unwrap();
1717        assert_eq!(
1718            result,
1719            Ast::Pipeline(vec![
1720                ShellCommand {
1721                    args: vec!["ls".to_string()],
1722                    redirections: Vec::new(),
1723                    compound: None,
1724                },
1725                ShellCommand {
1726                    args: vec!["grep".to_string(), "txt".to_string()],
1727                    redirections: Vec::new(),
1728                    compound: None,
1729                }
1730            ])
1731        );
1732    }
1733
1734    #[test]
1735    fn test_input_redirection() {
1736        let tokens = vec![
1737            Token::Word("cat".to_string()),
1738            Token::RedirIn,
1739            Token::Word("input.txt".to_string()),
1740        ];
1741        let result = parse(tokens).unwrap();
1742        assert_eq!(
1743            result,
1744            Ast::Pipeline(vec![ShellCommand {
1745                args: vec!["cat".to_string()],
1746                redirections: vec![Redirection::Input("input.txt".to_string())],
1747                compound: None,
1748            }])
1749        );
1750    }
1751
1752    #[test]
1753    fn test_output_redirection() {
1754        let tokens = vec![
1755            Token::Word("printf".to_string()),
1756            Token::Word("hello".to_string()),
1757            Token::RedirOut,
1758            Token::Word("output.txt".to_string()),
1759        ];
1760        let result = parse(tokens).unwrap();
1761        assert_eq!(
1762            result,
1763            Ast::Pipeline(vec![ShellCommand {
1764                args: vec!["printf".to_string(), "hello".to_string()],
1765                compound: None,
1766                redirections: vec![Redirection::Output("output.txt".to_string())],
1767            }])
1768        );
1769    }
1770
1771    #[test]
1772    fn test_append_redirection() {
1773        let tokens = vec![
1774            Token::Word("printf".to_string()),
1775            Token::Word("hello".to_string()),
1776            Token::RedirAppend,
1777            Token::Word("output.txt".to_string()),
1778        ];
1779        let result = parse(tokens).unwrap();
1780        assert_eq!(
1781            result,
1782            Ast::Pipeline(vec![ShellCommand {
1783                args: vec!["printf".to_string(), "hello".to_string()],
1784                compound: None,
1785                redirections: vec![Redirection::Append("output.txt".to_string())],
1786            }])
1787        );
1788    }
1789
1790    #[test]
1791    fn test_complex_pipeline_with_redirections() {
1792        let tokens = vec![
1793            Token::Word("cat".to_string()),
1794            Token::RedirIn,
1795            Token::Word("input.txt".to_string()),
1796            Token::Pipe,
1797            Token::Word("grep".to_string()),
1798            Token::Word("pattern".to_string()),
1799            Token::Pipe,
1800            Token::Word("sort".to_string()),
1801            Token::RedirOut,
1802            Token::Word("output.txt".to_string()),
1803        ];
1804        let result = parse(tokens).unwrap();
1805        assert_eq!(
1806            result,
1807            Ast::Pipeline(vec![
1808                ShellCommand {
1809                    args: vec!["cat".to_string()],
1810                    compound: None,
1811                    redirections: vec![Redirection::Input("input.txt".to_string())],
1812                },
1813                ShellCommand {
1814                    args: vec!["grep".to_string(), "pattern".to_string()],
1815                    compound: None,
1816                    redirections: Vec::new(),
1817                },
1818                ShellCommand {
1819                    args: vec!["sort".to_string()],
1820                    redirections: vec![Redirection::Output("output.txt".to_string())],
1821                    compound: None,
1822                }
1823            ])
1824        );
1825    }
1826
1827    #[test]
1828    fn test_empty_tokens() {
1829        let tokens = vec![];
1830        let result = parse(tokens);
1831        assert!(result.is_err());
1832        assert_eq!(result.unwrap_err(), "No commands found");
1833    }
1834
1835    #[test]
1836    fn test_only_pipe() {
1837        let tokens = vec![Token::Pipe];
1838        let result = parse(tokens);
1839        assert!(result.is_err());
1840        assert_eq!(result.unwrap_err(), "No commands found");
1841    }
1842
1843    #[test]
1844    fn test_redirection_without_file() {
1845        // Parser doesn't check for missing file, just skips if no token after
1846        let tokens = vec![Token::Word("cat".to_string()), Token::RedirIn];
1847        let result = parse(tokens).unwrap();
1848        assert_eq!(
1849            result,
1850            Ast::Pipeline(vec![ShellCommand {
1851                args: vec!["cat".to_string()],
1852                compound: None,
1853                redirections: Vec::new(),
1854            }])
1855        );
1856    }
1857
1858    #[test]
1859    fn test_multiple_redirections() {
1860        let tokens = vec![
1861            Token::Word("cat".to_string()),
1862            Token::RedirIn,
1863            Token::Word("file1.txt".to_string()),
1864            Token::RedirOut,
1865            Token::Word("file2.txt".to_string()),
1866        ];
1867        let result = parse(tokens).unwrap();
1868        assert_eq!(
1869            result,
1870            Ast::Pipeline(vec![ShellCommand {
1871                args: vec!["cat".to_string()],
1872                redirections: vec![
1873                    Redirection::Input("file1.txt".to_string()),
1874                    Redirection::Output("file2.txt".to_string()),
1875                ],
1876                compound: None,
1877            }])
1878        );
1879    }
1880
1881    #[test]
1882    fn test_parse_if() {
1883        let tokens = vec![
1884            Token::If,
1885            Token::Word("true".to_string()),
1886            Token::Semicolon,
1887            Token::Then,
1888            Token::Word("printf".to_string()),
1889            Token::Word("yes".to_string()),
1890            Token::Semicolon,
1891            Token::Fi,
1892        ];
1893        let result = parse(tokens).unwrap();
1894        if let Ast::If {
1895            branches,
1896            else_branch,
1897        } = result
1898        {
1899            assert_eq!(branches.len(), 1);
1900            let (condition, then_branch) = &branches[0];
1901            if let Ast::Pipeline(cmds) = &**condition {
1902                assert_eq!(cmds[0].args, vec!["true"]);
1903            } else {
1904                panic!("condition not pipeline");
1905            }
1906            if let Ast::Pipeline(cmds) = &**then_branch {
1907                assert_eq!(cmds[0].args, vec!["printf", "yes"]);
1908            } else {
1909                panic!("then_branch not pipeline");
1910            }
1911            assert!(else_branch.is_none());
1912        } else {
1913            panic!("not if");
1914        }
1915    }
1916
1917    #[test]
1918    fn test_parse_if_elif() {
1919        let tokens = vec![
1920            Token::If,
1921            Token::Word("false".to_string()),
1922            Token::Semicolon,
1923            Token::Then,
1924            Token::Word("printf".to_string()),
1925            Token::Word("no".to_string()),
1926            Token::Semicolon,
1927            Token::Elif,
1928            Token::Word("true".to_string()),
1929            Token::Semicolon,
1930            Token::Then,
1931            Token::Word("printf".to_string()),
1932            Token::Word("yes".to_string()),
1933            Token::Semicolon,
1934            Token::Fi,
1935        ];
1936        let result = parse(tokens).unwrap();
1937        if let Ast::If {
1938            branches,
1939            else_branch,
1940        } = result
1941        {
1942            assert_eq!(branches.len(), 2);
1943            // First branch: false -> printf no
1944            let (condition1, then1) = &branches[0];
1945            if let Ast::Pipeline(cmds) = &**condition1 {
1946                assert_eq!(cmds[0].args, vec!["false"]);
1947            }
1948            if let Ast::Pipeline(cmds) = &**then1 {
1949                assert_eq!(cmds[0].args, vec!["printf", "no"]);
1950            }
1951            // Second branch: true -> printf yes
1952            let (condition2, then2) = &branches[1];
1953            if let Ast::Pipeline(cmds) = &**condition2 {
1954                assert_eq!(cmds[0].args, vec!["true"]);
1955            }
1956            if let Ast::Pipeline(cmds) = &**then2 {
1957                assert_eq!(cmds[0].args, vec!["printf", "yes"]);
1958            }
1959            assert!(else_branch.is_none());
1960        } else {
1961            panic!("not if");
1962        }
1963    }
1964
1965    #[test]
1966    fn test_parse_assignment() {
1967        let tokens = vec![Token::Word("MY_VAR=test_value".to_string())];
1968        let result = parse(tokens).unwrap();
1969        if let Ast::Assignment { var, value } = result {
1970            assert_eq!(var, "MY_VAR");
1971            assert_eq!(value, "test_value");
1972        } else {
1973            panic!("not assignment");
1974        }
1975    }
1976
1977    #[test]
1978    fn test_parse_assignment_quoted() {
1979        let tokens = vec![Token::Word("MY_VAR=hello world".to_string())];
1980        let result = parse(tokens).unwrap();
1981        if let Ast::Assignment { var, value } = result {
1982            assert_eq!(var, "MY_VAR");
1983            assert_eq!(value, "hello world");
1984        } else {
1985            panic!("not assignment");
1986        }
1987    }
1988
1989    #[test]
1990    fn test_parse_assignment_invalid() {
1991        // Variable name starting with number should not be parsed as assignment
1992        let tokens = vec![Token::Word("123VAR=value".to_string())];
1993        let result = parse(tokens).unwrap();
1994        if let Ast::Pipeline(cmds) = result {
1995            assert_eq!(cmds[0].args, vec!["123VAR=value"]);
1996        } else {
1997            panic!("should be parsed as pipeline");
1998        }
1999    }
2000
2001    #[test]
2002    fn test_parse_function_definition() {
2003        let tokens = vec![
2004            Token::Word("myfunc".to_string()),
2005            Token::LeftParen,
2006            Token::RightParen,
2007            Token::LeftBrace,
2008            Token::Word("echo".to_string()),
2009            Token::Word("hello".to_string()),
2010            Token::RightBrace,
2011        ];
2012        let result = parse(tokens).unwrap();
2013        if let Ast::FunctionDefinition { name, body } = result {
2014            assert_eq!(name, "myfunc");
2015            // Body should be a pipeline with echo hello
2016            if let Ast::Pipeline(cmds) = *body {
2017                assert_eq!(cmds[0].args, vec!["echo", "hello"]);
2018            } else {
2019                panic!("function body should be a pipeline");
2020            }
2021        } else {
2022            panic!("should be parsed as function definition");
2023        }
2024    }
2025
2026    #[test]
2027    fn test_parse_function_definition_empty() {
2028        let tokens = vec![
2029            Token::Word("emptyfunc".to_string()),
2030            Token::LeftParen,
2031            Token::RightParen,
2032            Token::LeftBrace,
2033            Token::RightBrace,
2034        ];
2035        let result = parse(tokens).unwrap();
2036        if let Ast::FunctionDefinition { name, body } = result {
2037            assert_eq!(name, "emptyfunc");
2038            // Empty body should default to true command
2039            if let Ast::Pipeline(cmds) = *body {
2040                assert_eq!(cmds[0].args, vec!["true"]);
2041            } else {
2042                panic!("function body should be a pipeline");
2043            }
2044        } else {
2045            panic!("should be parsed as function definition");
2046        }
2047    }
2048
2049    #[test]
2050    fn test_parse_function_definition_legacy_format() {
2051        // Test backward compatibility with parentheses in the function name
2052        let tokens = vec![
2053            Token::Word("legacyfunc()".to_string()),
2054            Token::LeftBrace,
2055            Token::Word("echo".to_string()),
2056            Token::Word("hello".to_string()),
2057            Token::RightBrace,
2058        ];
2059        let result = parse(tokens).unwrap();
2060        if let Ast::FunctionDefinition { name, body } = result {
2061            assert_eq!(name, "legacyfunc");
2062            // Body should be a pipeline with echo hello
2063            if let Ast::Pipeline(cmds) = *body {
2064                assert_eq!(cmds[0].args, vec!["echo", "hello"]);
2065            } else {
2066                panic!("function body should be a pipeline");
2067            }
2068        } else {
2069            panic!("should be parsed as function definition");
2070        }
2071    }
2072
2073    #[test]
2074    fn test_parse_local_assignment() {
2075        let tokens = vec![Token::Local, Token::Word("MY_VAR=test_value".to_string())];
2076        let result = parse(tokens).unwrap();
2077        if let Ast::LocalAssignment { var, value } = result {
2078            assert_eq!(var, "MY_VAR");
2079            assert_eq!(value, "test_value");
2080        } else {
2081            panic!("should be parsed as local assignment");
2082        }
2083    }
2084
2085    #[test]
2086    fn test_parse_local_assignment_separate_tokens() {
2087        let tokens = vec![
2088            Token::Local,
2089            Token::Word("MY_VAR".to_string()),
2090            Token::Word("test_value".to_string()),
2091        ];
2092        let result = parse(tokens).unwrap();
2093        if let Ast::LocalAssignment { var, value } = result {
2094            assert_eq!(var, "MY_VAR");
2095            assert_eq!(value, "test_value");
2096        } else {
2097            panic!("should be parsed as local assignment");
2098        }
2099    }
2100
2101    #[test]
2102    fn test_parse_local_assignment_invalid_var_name() {
2103        // Variable name starting with number should not be parsed as local assignment
2104        let tokens = vec![Token::Local, Token::Word("123VAR=value".to_string())];
2105        let result = parse(tokens);
2106        // Should return an error since 123VAR is not a valid variable name
2107        assert!(result.is_err());
2108    }
2109
2110    #[test]
2111    fn test_parse_here_document_redirection() {
2112        let tokens = vec![
2113            Token::Word("cat".to_string()),
2114            Token::RedirHereDoc("EOF".to_string(), false),
2115        ];
2116        let result = parse(tokens).unwrap();
2117        assert_eq!(
2118            result,
2119            Ast::Pipeline(vec![ShellCommand {
2120                args: vec!["cat".to_string()],
2121                redirections: vec![Redirection::HereDoc("EOF".to_string(), "false".to_string())],
2122                compound: None,
2123            }])
2124        );
2125    }
2126
2127    #[test]
2128    fn test_parse_here_string_redirection() {
2129        let tokens = vec![
2130            Token::Word("grep".to_string()),
2131            Token::RedirHereString("pattern".to_string()),
2132        ];
2133        let result = parse(tokens).unwrap();
2134        assert_eq!(
2135            result,
2136            Ast::Pipeline(vec![ShellCommand {
2137                args: vec!["grep".to_string()],
2138                compound: None,
2139                redirections: vec![Redirection::HereString("pattern".to_string())],
2140            }])
2141        );
2142    }
2143
2144    #[test]
2145    fn test_parse_mixed_redirections() {
2146        let tokens = vec![
2147            Token::Word("cat".to_string()),
2148            Token::RedirIn,
2149            Token::Word("file.txt".to_string()),
2150            Token::RedirHereString("fallback".to_string()),
2151            Token::RedirOut,
2152            Token::Word("output.txt".to_string()),
2153        ];
2154        let result = parse(tokens).unwrap();
2155        assert_eq!(
2156            result,
2157            Ast::Pipeline(vec![ShellCommand {
2158                args: vec!["cat".to_string()],
2159                compound: None,
2160                redirections: vec![
2161                    Redirection::Input("file.txt".to_string()),
2162                    Redirection::HereString("fallback".to_string()),
2163                    Redirection::Output("output.txt".to_string()),
2164                ],
2165            }])
2166        );
2167    }
2168
2169    // ===== File Descriptor Redirection Tests =====
2170
2171    #[test]
2172    fn test_parse_fd_input_redirection() {
2173        let tokens = vec![
2174            Token::Word("command".to_string()),
2175            Token::RedirectFdIn(3, "input.txt".to_string()),
2176        ];
2177        let result = parse(tokens).unwrap();
2178        assert_eq!(
2179            result,
2180            Ast::Pipeline(vec![ShellCommand {
2181                args: vec!["command".to_string()],
2182                redirections: vec![Redirection::FdInput(3, "input.txt".to_string())],
2183                compound: None,
2184            }])
2185        );
2186    }
2187
2188    #[test]
2189    fn test_parse_fd_output_redirection() {
2190        let tokens = vec![
2191            Token::Word("command".to_string()),
2192            Token::RedirectFdOut(2, "errors.log".to_string()),
2193        ];
2194        let result = parse(tokens).unwrap();
2195        assert_eq!(
2196            result,
2197            Ast::Pipeline(vec![ShellCommand {
2198                args: vec!["command".to_string()],
2199                compound: None,
2200                redirections: vec![Redirection::FdOutput(2, "errors.log".to_string())],
2201            }])
2202        );
2203    }
2204
2205    #[test]
2206    fn test_parse_fd_append_redirection() {
2207        let tokens = vec![
2208            Token::Word("command".to_string()),
2209            Token::RedirectFdAppend(2, "errors.log".to_string()),
2210        ];
2211        let result = parse(tokens).unwrap();
2212        assert_eq!(
2213            result,
2214            Ast::Pipeline(vec![ShellCommand {
2215                args: vec!["command".to_string()],
2216                compound: None,
2217                redirections: vec![Redirection::FdAppend(2, "errors.log".to_string())],
2218            }])
2219        );
2220    }
2221
2222    #[test]
2223    fn test_parse_fd_duplicate() {
2224        let tokens = vec![
2225            Token::Word("command".to_string()),
2226            Token::RedirectFdDup(2, 1),
2227        ];
2228        let result = parse(tokens).unwrap();
2229        assert_eq!(
2230            result,
2231            Ast::Pipeline(vec![ShellCommand {
2232                args: vec!["command".to_string()],
2233                compound: None,
2234                redirections: vec![Redirection::FdDuplicate(2, 1)],
2235            }])
2236        );
2237    }
2238
2239    #[test]
2240    fn test_parse_fd_close() {
2241        let tokens = vec![
2242            Token::Word("command".to_string()),
2243            Token::RedirectFdClose(2),
2244        ];
2245        let result = parse(tokens).unwrap();
2246        assert_eq!(
2247            result,
2248            Ast::Pipeline(vec![ShellCommand {
2249                args: vec!["command".to_string()],
2250                compound: None,
2251                redirections: vec![Redirection::FdClose(2)],
2252            }])
2253        );
2254    }
2255
2256    #[test]
2257    fn test_parse_fd_input_output() {
2258        let tokens = vec![
2259            Token::Word("command".to_string()),
2260            Token::RedirectFdInOut(3, "file.txt".to_string()),
2261        ];
2262        let result = parse(tokens).unwrap();
2263        assert_eq!(
2264            result,
2265            Ast::Pipeline(vec![ShellCommand {
2266                args: vec!["command".to_string()],
2267                compound: None,
2268                redirections: vec![Redirection::FdInputOutput(3, "file.txt".to_string())],
2269            }])
2270        );
2271    }
2272
2273    #[test]
2274    fn test_parse_multiple_fd_redirections() {
2275        let tokens = vec![
2276            Token::Word("command".to_string()),
2277            Token::RedirectFdOut(2, "err.log".to_string()),
2278            Token::RedirectFdIn(3, "input.txt".to_string()),
2279            Token::RedirectFdAppend(4, "append.log".to_string()),
2280        ];
2281        let result = parse(tokens).unwrap();
2282        assert_eq!(
2283            result,
2284            Ast::Pipeline(vec![ShellCommand {
2285                args: vec!["command".to_string()],
2286                compound: None,
2287                redirections: vec![
2288                    Redirection::FdOutput(2, "err.log".to_string()),
2289                    Redirection::FdInput(3, "input.txt".to_string()),
2290                    Redirection::FdAppend(4, "append.log".to_string()),
2291                ],
2292            }])
2293        );
2294    }
2295
2296    #[test]
2297    fn test_parse_fd_swap_pattern() {
2298        let tokens = vec![
2299            Token::Word("command".to_string()),
2300            Token::RedirectFdDup(3, 1),
2301            Token::RedirectFdDup(1, 2),
2302            Token::RedirectFdDup(2, 3),
2303            Token::RedirectFdClose(3),
2304        ];
2305        let result = parse(tokens).unwrap();
2306        assert_eq!(
2307            result,
2308            Ast::Pipeline(vec![ShellCommand {
2309                args: vec!["command".to_string()],
2310                redirections: vec![
2311                    Redirection::FdDuplicate(3, 1),
2312                    Redirection::FdDuplicate(1, 2),
2313                    Redirection::FdDuplicate(2, 3),
2314                    Redirection::FdClose(3),
2315                ],
2316                compound: None,
2317            }])
2318        );
2319    }
2320
2321    #[test]
2322    fn test_parse_mixed_basic_and_fd_redirections() {
2323        let tokens = vec![
2324            Token::Word("command".to_string()),
2325            Token::RedirOut,
2326            Token::Word("output.txt".to_string()),
2327            Token::RedirectFdDup(2, 1),
2328        ];
2329        let result = parse(tokens).unwrap();
2330        assert_eq!(
2331            result,
2332            Ast::Pipeline(vec![ShellCommand {
2333                args: vec!["command".to_string()],
2334                redirections: vec![
2335                    Redirection::Output("output.txt".to_string()),
2336                    Redirection::FdDuplicate(2, 1),
2337                ],
2338                compound: None,
2339            }])
2340        );
2341    }
2342
2343    #[test]
2344    fn test_parse_fd_redirection_ordering() {
2345        // Test that redirections are preserved in left-to-right order
2346        let tokens = vec![
2347            Token::Word("command".to_string()),
2348            Token::RedirectFdOut(2, "first.log".to_string()),
2349            Token::RedirOut,
2350            Token::Word("second.txt".to_string()),
2351            Token::RedirectFdDup(2, 1),
2352        ];
2353        let result = parse(tokens).unwrap();
2354        assert_eq!(
2355            result,
2356            Ast::Pipeline(vec![ShellCommand {
2357                args: vec!["command".to_string()],
2358                redirections: vec![
2359                    Redirection::FdOutput(2, "first.log".to_string()),
2360                    Redirection::Output("second.txt".to_string()),
2361                    Redirection::FdDuplicate(2, 1),
2362                ],
2363                compound: None,
2364            }])
2365        );
2366    }
2367
2368    #[test]
2369    fn test_parse_fd_redirection_with_pipe() {
2370        let tokens = vec![
2371            Token::Word("command".to_string()),
2372            Token::RedirectFdDup(2, 1),
2373            Token::Pipe,
2374            Token::Word("grep".to_string()),
2375            Token::Word("error".to_string()),
2376        ];
2377        let result = parse(tokens).unwrap();
2378        assert_eq!(
2379            result,
2380            Ast::Pipeline(vec![
2381                ShellCommand {
2382                    args: vec!["command".to_string()],
2383                    redirections: vec![Redirection::FdDuplicate(2, 1)],
2384                    compound: None,
2385                },
2386                ShellCommand {
2387                    args: vec!["grep".to_string(), "error".to_string()],
2388                    compound: None,
2389                    redirections: Vec::new(),
2390                }
2391            ])
2392        );
2393    }
2394
2395    #[test]
2396    fn test_parse_all_fd_numbers() {
2397        // Test fd 0
2398        let tokens = vec![
2399            Token::Word("cmd".to_string()),
2400            Token::RedirectFdIn(0, "file".to_string()),
2401        ];
2402        let result = parse(tokens).unwrap();
2403        if let Ast::Pipeline(cmds) = result {
2404            assert_eq!(
2405                cmds[0].redirections[0],
2406                Redirection::FdInput(0, "file".to_string())
2407            );
2408        } else {
2409            panic!("Expected Pipeline");
2410        }
2411
2412        // Test fd 9
2413        let tokens = vec![
2414            Token::Word("cmd".to_string()),
2415            Token::RedirectFdOut(9, "file".to_string()),
2416        ];
2417        let result = parse(tokens).unwrap();
2418        if let Ast::Pipeline(cmds) = result {
2419            assert_eq!(
2420                cmds[0].redirections[0],
2421                Redirection::FdOutput(9, "file".to_string())
2422            );
2423        } else {
2424            panic!("Expected Pipeline");
2425        }
2426    }
2427}