rush_sh/
parser.rs

1use super::lexer::Token;
2
3#[derive(Debug, Clone, PartialEq, Eq)]
4pub enum Ast {
5    Pipeline(Vec<ShellCommand>),
6    Sequence(Vec<Ast>),
7    Assignment {
8        var: String,
9        value: String,
10    },
11    LocalAssignment {
12        var: String,
13        value: String,
14    },
15    If {
16        branches: Vec<(Box<Ast>, Box<Ast>)>, // (condition, then_branch)
17        else_branch: Option<Box<Ast>>,
18    },
19    Case {
20        word: String,
21        cases: Vec<(Vec<String>, Ast)>,
22        default: Option<Box<Ast>>,
23    },
24    For {
25        variable: String,
26        items: Vec<String>,
27        body: Box<Ast>,
28    },
29    While {
30        condition: Box<Ast>,
31        body: Box<Ast>,
32    },
33    FunctionDefinition {
34        name: String,
35        body: Box<Ast>,
36    },
37    FunctionCall {
38        name: String,
39        args: Vec<String>,
40    },
41    Return {
42        value: Option<String>,
43    },
44    And {
45        left: Box<Ast>,
46        right: Box<Ast>,
47    },
48    Or {
49        left: Box<Ast>,
50        right: Box<Ast>,
51    },
52    /// Subshell execution: (commands)
53    /// Commands execute in an isolated copy of the shell state
54    Subshell {
55        body: Box<Ast>,
56    },
57    /// Command group execution: { commands; }
58    /// Commands execute in the current shell state
59    CommandGroup {
60        body: Box<Ast>,
61    },
62}
63
64/// Represents a single redirection operation
65#[derive(Debug, Clone, PartialEq, Eq)]
66pub enum Redirection {
67    /// Input from file: < file or N< file
68    Input(String),
69    /// Output to file: > file or N> file
70    Output(String),
71    /// Append to file: >> file or N>> file
72    Append(String),
73    /// Input from file with explicit fd: N< file
74    FdInput(i32, String),
75    /// Output to file with explicit fd: N> file
76    FdOutput(i32, String),
77    /// Append to file with explicit fd: N>> file
78    FdAppend(i32, String),
79    /// Duplicate file descriptor: N>&M or N<&M
80    FdDuplicate(i32, i32),
81    /// Close file descriptor: N>&- or N<&-
82    FdClose(i32),
83    /// Open file for read/write: N<> file
84    FdInputOutput(i32, String),
85    /// Here-document: << EOF ... EOF
86    HereDoc(String, String),
87    /// Here-string: <<< "string"
88    HereString(String),
89}
90
91#[derive(Debug, Clone, PartialEq, Eq, Default)]
92pub struct ShellCommand {
93    pub args: Vec<String>,
94    /// All redirections in order of appearance (for POSIX left-to-right processing)
95    pub redirections: Vec<Redirection>,
96    /// Optional compound command (subshell, command group, etc.)
97    /// If present, this takes precedence over args
98    pub compound: Option<Box<Ast>>,
99}
100
101/// Helper function to validate if a string is a valid variable name.
102/// Returns true if the name starts with a letter or underscore.
103fn is_valid_variable_name(name: &str) -> bool {
104    if let Some(first_char) = name.chars().next() {
105        first_char.is_alphabetic() || first_char == '_'
106    } else {
107        false
108    }
109}
110
111/// Helper function to create an empty body AST (a no-op that returns success).
112/// Used for empty then/else branches, empty loop bodies, and empty function bodies.
113fn create_empty_body_ast() -> Ast {
114    Ast::Pipeline(vec![ShellCommand {
115        args: vec!["true".to_string()],
116        redirections: Vec::new(),
117        compound: None,
118    }])
119}
120
121/// Helper function to skip consecutive newline tokens.
122/// Updates the index to point to the first non-newline token.
123fn skip_newlines(tokens: &[Token], i: &mut usize) {
124    while *i < tokens.len() && tokens[*i] == Token::Newline {
125        *i += 1;
126    }
127}
128
129/// Helper function to skip to the matching 'fi' token for an 'if' statement.
130/// Handles nested if statements correctly.
131fn skip_to_matching_fi(tokens: &[Token], i: &mut usize) {
132    let mut if_depth = 1;
133    *i += 1; // Move past the 'if' token
134    while *i < tokens.len() && if_depth > 0 {
135        match tokens[*i] {
136            Token::If => if_depth += 1,
137            Token::Fi => if_depth -= 1,
138            _ => {}
139        }
140        *i += 1;
141    }
142}
143
144/// Helper function to skip to the matching 'done' token for a 'for' or 'while' loop.
145/// Handles nested loops correctly.
146fn skip_to_matching_done(tokens: &[Token], i: &mut usize) {
147    let mut loop_depth = 1;
148    *i += 1; // Move past the 'for' or 'while' token
149    while *i < tokens.len() && loop_depth > 0 {
150        match tokens[*i] {
151            Token::For | Token::While => loop_depth += 1,
152            Token::Done => loop_depth -= 1,
153            _ => {}
154        }
155        *i += 1;
156    }
157}
158
159/// Helper function to skip to the matching 'esac' token for a 'case' statement.
160fn skip_to_matching_esac(tokens: &[Token], i: &mut usize) {
161    *i += 1; // Move past the 'case' token
162    while *i < tokens.len() {
163        if tokens[*i] == Token::Esac {
164            *i += 1;
165            break;
166        }
167        *i += 1;
168    }
169}
170
171pub fn parse(tokens: Vec<Token>) -> Result<Ast, String> {
172    // First, try to detect and parse function definitions that span multiple lines
173    if tokens.len() >= 4
174        && let (Token::Word(_), Token::LeftParen, Token::RightParen, Token::LeftBrace) =
175            (&tokens[0], &tokens[1], &tokens[2], &tokens[3])
176    {
177        // Look for the matching RightBrace
178        // Start from the opening brace (token 3) and find its match
179        let mut brace_depth = 1; // We've already seen the opening brace at position 3
180        let mut function_end = tokens.len();
181        let mut j = 4; // Start after the opening brace
182
183        while j < tokens.len() {
184            match &tokens[j] {
185                Token::LeftBrace => {
186                    brace_depth += 1;
187                    j += 1;
188                }
189                Token::RightBrace => {
190                    brace_depth -= 1;
191                    if brace_depth == 0 {
192                        function_end = j + 1; // Include the closing brace
193                        break;
194                    }
195                    j += 1;
196                }
197                Token::If => {
198                    // Skip to matching fi to avoid confusion
199                    let mut if_depth = 1;
200                    j += 1;
201                    while j < tokens.len() && if_depth > 0 {
202                        match tokens[j] {
203                            Token::If => if_depth += 1,
204                            Token::Fi => if_depth -= 1,
205                            _ => {}
206                        }
207                        j += 1;
208                    }
209                }
210                Token::For | Token::While => {
211                    // Skip to matching done
212                    let mut for_depth = 1;
213                    j += 1;
214                    while j < tokens.len() && for_depth > 0 {
215                        match tokens[j] {
216                            Token::For | Token::While => for_depth += 1,
217                            Token::Done => for_depth -= 1,
218                            _ => {}
219                        }
220                        j += 1;
221                    }
222                }
223                Token::Case => {
224                    // Skip to matching esac
225                    j += 1;
226                    while j < tokens.len() {
227                        if tokens[j] == Token::Esac {
228                            j += 1;
229                            break;
230                        }
231                        j += 1;
232                    }
233                }
234                _ => {
235                    j += 1;
236                }
237            }
238        }
239
240        if brace_depth == 0 && function_end <= tokens.len() {
241            // We found the complete function definition
242            let function_tokens = &tokens[0..function_end];
243            let remaining_tokens = &tokens[function_end..];
244
245            let function_ast = parse_function_definition(function_tokens)?;
246
247            return if remaining_tokens.is_empty() {
248                Ok(function_ast)
249            } else {
250                // There are more commands after the function
251                let remaining_ast = parse_commands_sequentially(remaining_tokens)?;
252                Ok(Ast::Sequence(vec![function_ast, remaining_ast]))
253            };
254        }
255    }
256
257    // Also check for legacy function definition format (word with parentheses followed by brace)
258    if tokens.len() >= 2
259        && let Token::Word(ref word) = tokens[0]
260        && let Some(paren_pos) = word.find('(')
261        && word.ends_with(')')
262        && paren_pos > 0
263        && tokens[1] == Token::LeftBrace
264    {
265        return parse_function_definition(&tokens);
266    }
267
268    // Fall back to normal parsing
269    parse_commands_sequentially(&tokens)
270}
271
272fn parse_slice(tokens: &[Token]) -> Result<Ast, String> {
273    if tokens.is_empty() {
274        return Err("No commands found".to_string());
275    }
276
277    // Check if it's an assignment
278    if tokens.len() == 2 {
279        // Check for pattern: VAR= VALUE
280        if let (Token::Word(var_eq), Token::Word(value)) = (&tokens[0], &tokens[1])
281            && let Some(eq_pos) = var_eq.find('=')
282            && eq_pos > 0
283            && eq_pos < var_eq.len()
284        {
285            let var = var_eq[..eq_pos].to_string();
286            let full_value = format!("{}{}", &var_eq[eq_pos + 1..], value);
287            // Basic validation: variable name should start with letter or underscore
288            if is_valid_variable_name(&var) {
289                return Ok(Ast::Assignment {
290                    var,
291                    value: full_value,
292                });
293            }
294        }
295    }
296
297    // Check if it's an assignment (VAR= VALUE)
298    if tokens.len() == 2
299        && let (Token::Word(var_eq), Token::Word(value)) = (&tokens[0], &tokens[1])
300        && let Some(eq_pos) = var_eq.find('=')
301        && eq_pos > 0
302        && eq_pos == var_eq.len() - 1
303    {
304        let var = var_eq[..eq_pos].to_string();
305        // Basic validation: variable name should start with letter or underscore
306        if is_valid_variable_name(&var) {
307            return Ok(Ast::Assignment {
308                var,
309                value: value.clone(),
310            });
311        }
312    }
313
314    // Check if it's a local assignment (local VAR VALUE or local VAR= VALUE)
315    if tokens.len() == 3
316        && let (Token::Local, Token::Word(var), Token::Word(value)) =
317            (&tokens[0], &tokens[1], &tokens[2])
318    {
319        // Strip trailing = if present (handles "local var= value" format)
320        let clean_var = if var.ends_with('=') {
321            &var[..var.len() - 1]
322        } else {
323            var
324        };
325        // Basic validation: variable name should start with letter or underscore
326        if is_valid_variable_name(clean_var) {
327            return Ok(Ast::LocalAssignment {
328                var: clean_var.to_string(),
329                value: value.clone(),
330            });
331        } else {
332            return Err(format!("Invalid variable name: {}", clean_var));
333        }
334    }
335
336    // Check if it's a return statement
337    if !tokens.is_empty()
338        && tokens.len() <= 2
339        && let Token::Return = &tokens[0]
340    {
341        if tokens.len() == 1 {
342            // return (with no value, defaults to 0)
343            return Ok(Ast::Return { value: None });
344        } else if let Token::Word(word) = &tokens[1] {
345            // return value
346            return Ok(Ast::Return {
347                value: Some(word.clone()),
348            });
349        }
350    }
351
352    // Check if it's a local assignment (local VAR=VALUE)
353    if tokens.len() == 2
354        && let (Token::Local, Token::Word(var_eq)) = (&tokens[0], &tokens[1])
355        && let Some(eq_pos) = var_eq.find('=')
356        && eq_pos > 0
357        && eq_pos < var_eq.len()
358    {
359        let var = var_eq[..eq_pos].to_string();
360        let value = var_eq[eq_pos + 1..].to_string();
361        // Basic validation: variable name should start with letter or underscore
362        if is_valid_variable_name(&var) {
363            return Ok(Ast::LocalAssignment { var, value });
364        } else {
365            return Err(format!("Invalid variable name: {}", var));
366        }
367    }
368
369    // Check if it's a local assignment (local VAR) with no initial value
370    if tokens.len() == 2
371        && let (Token::Local, Token::Word(var)) = (&tokens[0], &tokens[1])
372        && !var.contains('=')
373    {
374        // Basic validation: variable name should start with letter or underscore
375        if is_valid_variable_name(var) {
376            return Ok(Ast::LocalAssignment {
377                var: var.clone(),
378                value: String::new(),
379            });
380        } else {
381            return Err(format!("Invalid variable name: {}", var));
382        }
383    }
384
385    // Check if it's an assignment (single token with =)
386    if tokens.len() == 1
387        && let Token::Word(ref word) = tokens[0]
388        && let Some(eq_pos) = word.find('=')
389        && eq_pos > 0
390        && eq_pos < word.len()
391    {
392        let var = word[..eq_pos].to_string();
393        let value = word[eq_pos + 1..].to_string();
394        // Basic validation: variable name should start with letter or underscore
395        if is_valid_variable_name(&var) {
396            return Ok(Ast::Assignment { var, value });
397        }
398    }
399
400    // Check if it's an if statement
401    if let Token::If = tokens[0] {
402        return parse_if(tokens);
403    }
404
405    // Check if it's a case statement
406    if let Token::Case = tokens[0] {
407        return parse_case(tokens);
408    }
409
410    // Check if it's a for loop
411    if let Token::For = tokens[0] {
412        return parse_for(tokens);
413    }
414
415    // Check if it's a while loop
416    if let Token::While = tokens[0] {
417        return parse_while(tokens);
418    }
419
420    // Check if it's a function definition
421    // Pattern: Word LeftParen RightParen LeftBrace
422    if tokens.len() >= 4
423        && let (Token::Word(word), Token::LeftParen, Token::RightParen, Token::LeftBrace) =
424            (&tokens[0], &tokens[1], &tokens[2], &tokens[3])
425        && is_valid_variable_name(word)
426    {
427        return parse_function_definition(tokens);
428    }
429
430    // Also check for function definition with parentheses in the word (legacy support)
431    if tokens.len() >= 2
432        && let Token::Word(ref word) = tokens[0]
433        && let Some(paren_pos) = word.find('(')
434        && word.ends_with(')')
435        && paren_pos > 0
436    {
437        let func_name = &word[..paren_pos];
438        if is_valid_variable_name(func_name) && tokens[1] == Token::LeftBrace {
439            return parse_function_definition(tokens);
440        }
441    }
442
443    // Check if it's a function call (word followed by arguments)
444    // For Phase 1, we'll parse as regular pipeline and handle function calls in executor
445
446    // Otherwise, parse as pipeline
447    parse_pipeline(tokens)
448}
449
450fn parse_commands_sequentially(tokens: &[Token]) -> Result<Ast, String> {
451    let mut i = 0;
452    let mut commands = Vec::new();
453
454    while i < tokens.len() {
455        // Skip whitespace and comments
456        while i < tokens.len() {
457            match &tokens[i] {
458                Token::Newline => {
459                    i += 1;
460                }
461                Token::Word(word) if word.starts_with('#') => {
462                    // Skip comment line
463                    while i < tokens.len() && tokens[i] != Token::Newline {
464                        i += 1;
465                    }
466                    if i < tokens.len() {
467                        i += 1; // Skip the newline
468                    }
469                }
470                _ => break,
471            }
472        }
473
474        if i >= tokens.len() {
475            break;
476        }
477
478        // Find the end of this command
479        let start = i;
480
481        // Check for subshell: LeftParen at start of command
482        // Must check BEFORE function definition to avoid ambiguity
483        if tokens[i] == Token::LeftParen {
484            // This is a subshell - find the matching RightParen
485            let mut paren_depth = 1;
486            let mut j = i + 1;
487
488            while j < tokens.len() && paren_depth > 0 {
489                match tokens[j] {
490                    Token::LeftParen => paren_depth += 1,
491                    Token::RightParen => paren_depth -= 1,
492                    _ => {}
493                }
494                j += 1;
495            }
496
497            if paren_depth != 0 {
498                return Err("Unmatched parenthesis in subshell".to_string());
499            }
500
501            // Extract subshell body (tokens between parens)
502            let subshell_tokens = &tokens[i + 1..j - 1];
503
504            // Parse the subshell body recursively
505            // Empty subshells are not allowed
506            let body_ast = if subshell_tokens.is_empty() {
507                return Err("Empty subshell".to_string());
508            } else {
509                parse_commands_sequentially(subshell_tokens)?
510            };
511
512            let mut subshell_ast = Ast::Subshell {
513                body: Box::new(body_ast),
514            };
515
516            i = j; // Move past the closing paren
517
518            // Check for redirections after subshell
519            let mut redirections = Vec::new();
520            while i < tokens.len() {
521                match &tokens[i] {
522                    Token::RedirOut => {
523                        i += 1;
524                        if i < tokens.len() {
525                            if let Token::Word(file) = &tokens[i] {
526                                redirections.push(Redirection::Output(file.clone()));
527                                i += 1;
528                            }
529                        }
530                    }
531                    Token::RedirIn => {
532                        i += 1;
533                        if i < tokens.len() {
534                            if let Token::Word(file) = &tokens[i] {
535                                redirections.push(Redirection::Input(file.clone()));
536                                i += 1;
537                            }
538                        }
539                    }
540                    Token::RedirAppend => {
541                        i += 1;
542                        if i < tokens.len() {
543                            if let Token::Word(file) = &tokens[i] {
544                                redirections.push(Redirection::Append(file.clone()));
545                                i += 1;
546                            }
547                        }
548                    }
549                    Token::RedirectFdOut(fd, file) => {
550                        redirections.push(Redirection::FdOutput(*fd, file.clone()));
551                        i += 1;
552                    }
553                    Token::RedirectFdIn(fd, file) => {
554                        redirections.push(Redirection::FdInput(*fd, file.clone()));
555                        i += 1;
556                    }
557                    Token::RedirectFdAppend(fd, file) => {
558                        redirections.push(Redirection::FdAppend(*fd, file.clone()));
559                        i += 1;
560                    }
561                    Token::RedirectFdDup(from_fd, to_fd) => {
562                        redirections.push(Redirection::FdDuplicate(*from_fd, *to_fd));
563                        i += 1;
564                    }
565                    Token::RedirectFdClose(fd) => {
566                        redirections.push(Redirection::FdClose(*fd));
567                        i += 1;
568                    }
569                    Token::RedirectFdInOut(fd, file) => {
570                        redirections.push(Redirection::FdInputOutput(*fd, file.clone()));
571                        i += 1;
572                    }
573                    Token::RedirHereDoc(delimiter, quoted) => {
574                        redirections
575                            .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
576                        i += 1;
577                    }
578                    Token::RedirHereString(content) => {
579                        redirections.push(Redirection::HereString(content.clone()));
580                        i += 1;
581                    }
582                    _ => break,
583                }
584            }
585
586            // Check if this subshell is part of a pipeline
587            if i < tokens.len() && tokens[i] == Token::Pipe {
588                // Find end of pipeline
589                let mut end = i;
590                let mut brace_depth = 0;
591                let mut paren_depth = 0;
592                let mut last_was_pipe = true; // Started with a pipe
593                while end < tokens.len() {
594                    match &tokens[end] {
595                        Token::Pipe => last_was_pipe = true,
596                        Token::LeftBrace => {
597                            brace_depth += 1;
598                            last_was_pipe = false;
599                        }
600                        Token::RightBrace => {
601                            if brace_depth > 0 {
602                                brace_depth -= 1;
603                            } else {
604                                break;
605                            }
606                            last_was_pipe = false;
607                        }
608                        Token::LeftParen => {
609                            paren_depth += 1;
610                            last_was_pipe = false;
611                        }
612                        Token::RightParen => {
613                            if paren_depth > 0 {
614                                paren_depth -= 1;
615                            } else {
616                                break;
617                            }
618                            last_was_pipe = false;
619                        }
620                        Token::Newline | Token::Semicolon => {
621                            if brace_depth == 0 && paren_depth == 0 && !last_was_pipe {
622                                break;
623                            }
624                        }
625                        Token::Word(_) => last_was_pipe = false,
626                        _ => {}
627                    }
628                    end += 1;
629                }
630
631                let pipeline_ast = parse_pipeline(&tokens[start..end])?;
632                commands.push(pipeline_ast);
633                i = end;
634                continue;
635            }
636
637            // If not part of a pipeline, apply redirections to the subshell itself
638            if !redirections.is_empty() {
639                subshell_ast = Ast::Pipeline(vec![ShellCommand {
640                    args: Vec::new(),
641                    redirections,
642                    compound: Some(Box::new(subshell_ast)),
643                }]);
644            }
645
646            // Handle operators after subshell (&&, ||, ;, newline)
647            if i < tokens.len() && (tokens[i] == Token::And || tokens[i] == Token::Or) {
648                let operator = tokens[i].clone();
649                i += 1; // Skip the operator
650
651                // Skip any newlines after the operator
652                while i < tokens.len() && tokens[i] == Token::Newline {
653                    i += 1;
654                }
655
656                // Parse the right side recursively
657                let remaining_tokens = &tokens[i..];
658                let right_ast = parse_commands_sequentially(remaining_tokens)?;
659
660                // Create And or Or node
661                let combined_ast = match operator {
662                    Token::And => Ast::And {
663                        left: Box::new(subshell_ast),
664                        right: Box::new(right_ast),
665                    },
666                    Token::Or => Ast::Or {
667                        left: Box::new(subshell_ast),
668                        right: Box::new(right_ast),
669                    },
670                    _ => unreachable!(),
671                };
672
673                commands.push(combined_ast);
674                break; // We've consumed the rest of the tokens
675            }
676
677            commands.push(subshell_ast);
678
679            // Skip semicolon or newline after subshell
680            if i < tokens.len() && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon) {
681                i += 1;
682            }
683            continue;
684        }
685
686        // Check for command group: LeftBrace at start of command
687        if tokens[i] == Token::LeftBrace {
688            // This is a command group - find the matching RightBrace
689            let mut brace_depth = 1;
690            let mut j = i + 1;
691
692            while j < tokens.len() && brace_depth > 0 {
693                match tokens[j] {
694                    Token::LeftBrace => brace_depth += 1,
695                    Token::RightBrace => brace_depth -= 1,
696                    _ => {}
697                }
698                j += 1;
699            }
700
701            if brace_depth != 0 {
702                return Err("Unmatched brace in command group".to_string());
703            }
704
705            // Extract group body (tokens between braces)
706            let group_tokens = &tokens[i + 1..j - 1];
707
708            // Parse the group body recursively
709            // Empty groups are not allowed
710            let body_ast = if group_tokens.is_empty() {
711                return Err("Empty command group".to_string());
712            } else {
713                parse_commands_sequentially(group_tokens)?
714            };
715
716            let mut group_ast = Ast::CommandGroup {
717                body: Box::new(body_ast),
718            };
719
720            i = j; // Move past the closing brace
721
722            // Check for redirections after command group
723            let mut redirections = Vec::new();
724            while i < tokens.len() {
725                match &tokens[i] {
726                    Token::RedirOut => {
727                        i += 1;
728                        if i < tokens.len() {
729                            if let Token::Word(file) = &tokens[i] {
730                                redirections.push(Redirection::Output(file.clone()));
731                                i += 1;
732                            }
733                        }
734                    }
735                    Token::RedirIn => {
736                        i += 1;
737                        if i < tokens.len() {
738                            if let Token::Word(file) = &tokens[i] {
739                                redirections.push(Redirection::Input(file.clone()));
740                                i += 1;
741                            }
742                        }
743                    }
744                    Token::RedirAppend => {
745                        i += 1;
746                        if i < tokens.len() {
747                            if let Token::Word(file) = &tokens[i] {
748                                redirections.push(Redirection::Append(file.clone()));
749                                i += 1;
750                            }
751                        }
752                    }
753                    Token::RedirectFdOut(fd, file) => {
754                        redirections.push(Redirection::FdOutput(*fd, file.clone()));
755                        i += 1;
756                    }
757                    Token::RedirectFdIn(fd, file) => {
758                        redirections.push(Redirection::FdInput(*fd, file.clone()));
759                        i += 1;
760                    }
761                    Token::RedirectFdAppend(fd, file) => {
762                        redirections.push(Redirection::FdAppend(*fd, file.clone()));
763                        i += 1;
764                    }
765                    Token::RedirectFdDup(from_fd, to_fd) => {
766                        redirections.push(Redirection::FdDuplicate(*from_fd, *to_fd));
767                        i += 1;
768                    }
769                    Token::RedirectFdClose(fd) => {
770                        redirections.push(Redirection::FdClose(*fd));
771                        i += 1;
772                    }
773                    Token::RedirectFdInOut(fd, file) => {
774                        redirections.push(Redirection::FdInputOutput(*fd, file.clone()));
775                        i += 1;
776                    }
777                    Token::RedirHereDoc(delimiter, quoted) => {
778                        redirections
779                            .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
780                        i += 1;
781                    }
782                    Token::RedirHereString(content) => {
783                        redirections.push(Redirection::HereString(content.clone()));
784                        i += 1;
785                    }
786                    _ => break,
787                }
788            }
789
790            // Check if this group is part of a pipeline
791            if i < tokens.len() && tokens[i] == Token::Pipe {
792                // Find end of pipeline
793                let mut end = i;
794                let mut brace_depth = 0;
795                let mut paren_depth = 0;
796                let mut last_was_pipe = true; // Started with a pipe
797                while end < tokens.len() {
798                    match &tokens[end] {
799                        Token::Pipe => last_was_pipe = true,
800                        Token::LeftBrace => {
801                            brace_depth += 1;
802                            last_was_pipe = false;
803                        }
804                        Token::RightBrace => {
805                            if brace_depth > 0 {
806                                brace_depth -= 1;
807                            } else {
808                                break;
809                            }
810                            last_was_pipe = false;
811                        }
812                        Token::LeftParen => {
813                            paren_depth += 1;
814                            last_was_pipe = false;
815                        }
816                        Token::RightParen => {
817                            if paren_depth > 0 {
818                                paren_depth -= 1;
819                            } else {
820                                break;
821                            }
822                            last_was_pipe = false;
823                        }
824                        Token::Newline | Token::Semicolon => {
825                            if brace_depth == 0 && paren_depth == 0 && !last_was_pipe {
826                                break;
827                            }
828                        }
829                        Token::Word(_) => last_was_pipe = false,
830                        _ => {}
831                    }
832                    end += 1;
833                }
834
835                let pipeline_ast = parse_pipeline(&tokens[start..end])?;
836                commands.push(pipeline_ast);
837                i = end;
838                continue;
839            }
840
841            // If not part of a pipeline, apply redirections to the group itself
842            if !redirections.is_empty() {
843                group_ast = Ast::Pipeline(vec![ShellCommand {
844                    args: Vec::new(),
845                    redirections,
846                    compound: Some(Box::new(group_ast)),
847                }]);
848            }
849
850            // Handle operators after group (&&, ||, ;, newline)
851            if i < tokens.len() && (tokens[i] == Token::And || tokens[i] == Token::Or) {
852                let operator = tokens[i].clone();
853                i += 1; // Skip the operator
854
855                // Skip any newlines after the operator
856                while i < tokens.len() && tokens[i] == Token::Newline {
857                    i += 1;
858                }
859
860                // Parse the right side recursively
861                let remaining_tokens = &tokens[i..];
862                let right_ast = parse_commands_sequentially(remaining_tokens)?;
863
864                // Create And or Or node
865                let combined_ast = match operator {
866                    Token::And => Ast::And {
867                        left: Box::new(group_ast),
868                        right: Box::new(right_ast),
869                    },
870                    Token::Or => Ast::Or {
871                        left: Box::new(group_ast),
872                        right: Box::new(right_ast),
873                    },
874                    _ => unreachable!(),
875                };
876
877                commands.push(combined_ast);
878                break; // We've consumed the rest of the tokens
879            }
880
881            commands.push(group_ast);
882
883            // Skip semicolon or newline after group
884            if i < tokens.len() && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon) {
885                i += 1;
886            }
887            continue;
888        }
889
890        // Special handling for compound commands
891        if tokens[i] == Token::If {
892            // For if statements, find the matching fi
893            let mut depth = 0;
894            while i < tokens.len() {
895                match tokens[i] {
896                    Token::If => depth += 1,
897                    Token::Fi => {
898                        depth -= 1;
899                        if depth == 0 {
900                            i += 1; // Include the fi
901                            break;
902                        }
903                    }
904                    _ => {}
905                }
906                i += 1;
907            }
908
909            // If we didn't find a matching fi, include all remaining tokens
910            // This handles the case where the if statement is incomplete
911        } else if tokens[i] == Token::For {
912            // For for loops, find the matching done
913            let mut depth = 1; // Start at 1 because we're already inside the for
914            i += 1; // Move past the 'for' token
915            while i < tokens.len() {
916                match tokens[i] {
917                    Token::For | Token::While => depth += 1,
918                    Token::Done => {
919                        depth -= 1;
920                        if depth == 0 {
921                            i += 1; // Include the done
922                            break;
923                        }
924                    }
925                    _ => {}
926                }
927                i += 1;
928            }
929        } else if tokens[i] == Token::While {
930            // For while loops, find the matching done
931            let mut depth = 1; // Start at 1 because we're already inside the while
932            i += 1; // Move past the 'while' token
933            while i < tokens.len() {
934                match tokens[i] {
935                    Token::While | Token::For => depth += 1,
936                    Token::Done => {
937                        depth -= 1;
938                        if depth == 0 {
939                            i += 1; // Include the done
940                            break;
941                        }
942                    }
943                    _ => {}
944                }
945                i += 1;
946            }
947        } else if tokens[i] == Token::Case {
948            // For case statements, find the matching esac
949            while i < tokens.len() {
950                if tokens[i] == Token::Esac {
951                    i += 1; // Include the esac
952                    break;
953                }
954                i += 1;
955            }
956        } else if i + 3 < tokens.len()
957            && matches!(tokens[i], Token::Word(_))
958            && tokens[i + 1] == Token::LeftParen
959            && tokens[i + 2] == Token::RightParen
960            && tokens[i + 3] == Token::LeftBrace
961        {
962            // This is a function definition - find the matching closing brace
963            let mut brace_depth = 1;
964            i += 4; // Skip to after opening brace
965            while i < tokens.len() && brace_depth > 0 {
966                match tokens[i] {
967                    Token::LeftBrace => brace_depth += 1,
968                    Token::RightBrace => brace_depth -= 1,
969                    _ => {}
970                }
971                i += 1;
972            }
973        } else {
974            // For simple commands, stop at newline, semicolon, &&, or ||
975            // But check if the next token after newline is a control flow keyword
976            let mut brace_depth = 0;
977            let mut paren_depth = 0;
978            let mut last_was_pipe = false;
979            while i < tokens.len() {
980                match &tokens[i] {
981                    Token::LeftBrace => {
982                        brace_depth += 1;
983                        last_was_pipe = false;
984                    }
985                    Token::RightBrace => {
986                        if brace_depth > 0 {
987                            brace_depth -= 1;
988                        } else {
989                            break;
990                        }
991                        last_was_pipe = false;
992                    }
993                    Token::LeftParen => {
994                        paren_depth += 1;
995                        last_was_pipe = false;
996                    }
997                    Token::RightParen => {
998                        if paren_depth > 0 {
999                            paren_depth -= 1;
1000                        } else {
1001                            break;
1002                        }
1003                        last_was_pipe = false;
1004                    }
1005                    Token::Pipe => last_was_pipe = true,
1006                    Token::Newline | Token::Semicolon | Token::And | Token::Or => {
1007                        if brace_depth == 0 && paren_depth == 0 && !last_was_pipe {
1008                            break;
1009                        }
1010                    }
1011                    Token::Word(_) => last_was_pipe = false,
1012                    _ => {}
1013                }
1014                i += 1;
1015            }
1016        }
1017
1018        let command_tokens = &tokens[start..i];
1019        if !command_tokens.is_empty() {
1020            // Don't try to parse orphaned else/elif/fi tokens
1021            if command_tokens.len() == 1 {
1022                match command_tokens[0] {
1023                    Token::Else | Token::Elif | Token::Fi => {
1024                        // Skip orphaned control flow tokens
1025                        if i < tokens.len()
1026                            && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon)
1027                        {
1028                            i += 1;
1029                        }
1030                        continue;
1031                    }
1032                    _ => {}
1033                }
1034            }
1035
1036            let ast = parse_slice(command_tokens)?;
1037
1038            // Check if the next token is && or ||
1039            if i < tokens.len() && (tokens[i] == Token::And || tokens[i] == Token::Or) {
1040                let operator = tokens[i].clone();
1041                i += 1; // Skip the operator
1042
1043                // Skip any newlines after the operator
1044                while i < tokens.len() && tokens[i] == Token::Newline {
1045                    i += 1;
1046                }
1047
1048                // Parse the right side recursively
1049                let remaining_tokens = &tokens[i..];
1050                let right_ast = parse_commands_sequentially(remaining_tokens)?;
1051
1052                // Create And or Or node
1053                let combined_ast = match operator {
1054                    Token::And => Ast::And {
1055                        left: Box::new(ast),
1056                        right: Box::new(right_ast),
1057                    },
1058                    Token::Or => Ast::Or {
1059                        left: Box::new(ast),
1060                        right: Box::new(right_ast),
1061                    },
1062                    _ => unreachable!(),
1063                };
1064
1065                commands.push(combined_ast);
1066                break; // We've consumed the rest of the tokens
1067            } else {
1068                commands.push(ast);
1069            }
1070        }
1071
1072        if i < tokens.len() && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon) {
1073            i += 1;
1074        }
1075    }
1076
1077    if commands.is_empty() {
1078        return Err("No commands found".to_string());
1079    }
1080
1081    if commands.len() == 1 {
1082        Ok(commands.into_iter().next().unwrap())
1083    } else {
1084        Ok(Ast::Sequence(commands))
1085    }
1086}
1087
1088fn parse_pipeline(tokens: &[Token]) -> Result<Ast, String> {
1089    let mut commands = Vec::new();
1090    let mut current_cmd = ShellCommand::default();
1091
1092    let mut i = 0;
1093    while i < tokens.len() {
1094        let token = &tokens[i];
1095        match token {
1096            Token::LeftBrace => {
1097                // Start of command group in pipeline
1098                // Find matching RightBrace
1099                let mut brace_depth = 1;
1100                let mut j = i + 1;
1101
1102                while j < tokens.len() && brace_depth > 0 {
1103                    match tokens[j] {
1104                        Token::LeftBrace => brace_depth += 1,
1105                        Token::RightBrace => brace_depth -= 1,
1106                        _ => {}
1107                    }
1108                    j += 1;
1109                }
1110
1111                if brace_depth != 0 {
1112                    return Err("Unmatched brace in pipeline".to_string());
1113                }
1114
1115                // Parse group body
1116                let group_tokens = &tokens[i + 1..j - 1];
1117
1118                // Empty groups are valid and equivalent to 'true'
1119                let body_ast = if group_tokens.is_empty() {
1120                    create_empty_body_ast()
1121                } else {
1122                    parse_commands_sequentially(group_tokens)?
1123                };
1124
1125                // Create ShellCommand with compound command group
1126                current_cmd.compound = Some(Box::new(Ast::CommandGroup {
1127                    body: Box::new(body_ast),
1128                }));
1129
1130                i = j; // Move past closing brace
1131
1132                // Check for redirections after command group
1133                while i < tokens.len() {
1134                    match &tokens[i] {
1135                        Token::RedirOut => {
1136                            i += 1;
1137                            if i < tokens.len() {
1138                                if let Token::Word(file) = &tokens[i] {
1139                                    current_cmd
1140                                        .redirections
1141                                        .push(Redirection::Output(file.clone()));
1142                                    i += 1;
1143                                }
1144                            }
1145                        }
1146                        Token::RedirIn => {
1147                            i += 1;
1148                            if i < tokens.len() {
1149                                if let Token::Word(file) = &tokens[i] {
1150                                    current_cmd
1151                                        .redirections
1152                                        .push(Redirection::Input(file.clone()));
1153                                    i += 1;
1154                                }
1155                            }
1156                        }
1157                        Token::RedirAppend => {
1158                            i += 1;
1159                            if i < tokens.len() {
1160                                if let Token::Word(file) = &tokens[i] {
1161                                    current_cmd
1162                                        .redirections
1163                                        .push(Redirection::Append(file.clone()));
1164                                    i += 1;
1165                                }
1166                            }
1167                        }
1168                        Token::RedirectFdOut(fd, file) => {
1169                            current_cmd
1170                                .redirections
1171                                .push(Redirection::FdOutput(*fd, file.clone()));
1172                            i += 1;
1173                        }
1174                        Token::RedirectFdIn(fd, file) => {
1175                            current_cmd
1176                                .redirections
1177                                .push(Redirection::FdInput(*fd, file.clone()));
1178                            i += 1;
1179                        }
1180                        Token::RedirectFdAppend(fd, file) => {
1181                            current_cmd
1182                                .redirections
1183                                .push(Redirection::FdAppend(*fd, file.clone()));
1184                            i += 1;
1185                        }
1186                        Token::RedirectFdDup(from_fd, to_fd) => {
1187                            current_cmd
1188                                .redirections
1189                                .push(Redirection::FdDuplicate(*from_fd, *to_fd));
1190                            i += 1;
1191                        }
1192                        Token::RedirectFdClose(fd) => {
1193                            current_cmd.redirections.push(Redirection::FdClose(*fd));
1194                            i += 1;
1195                        }
1196                        Token::RedirectFdInOut(fd, file) => {
1197                            current_cmd
1198                                .redirections
1199                                .push(Redirection::FdInputOutput(*fd, file.clone()));
1200                            i += 1;
1201                        }
1202                        Token::RedirHereDoc(delimiter, quoted) => {
1203                            current_cmd
1204                                .redirections
1205                                .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
1206                            i += 1;
1207                        }
1208                        Token::RedirHereString(content) => {
1209                            current_cmd
1210                                .redirections
1211                                .push(Redirection::HereString(content.clone()));
1212                            i += 1;
1213                        }
1214                        Token::Pipe => {
1215                            // End of this pipeline stage
1216                            break;
1217                        }
1218                        _ => break,
1219                    }
1220                }
1221
1222                // Stage will be pushed at next | or end of loop
1223                continue;
1224            }
1225            Token::LeftParen => {
1226                // Start of subshell in pipeline
1227                // Find matching RightParen
1228                let mut paren_depth = 1;
1229                let mut j = i + 1;
1230
1231                while j < tokens.len() && paren_depth > 0 {
1232                    match tokens[j] {
1233                        Token::LeftParen => paren_depth += 1,
1234                        Token::RightParen => paren_depth -= 1,
1235                        _ => {}
1236                    }
1237                    j += 1;
1238                }
1239
1240                if paren_depth != 0 {
1241                    return Err("Unmatched parenthesis in pipeline".to_string());
1242                }
1243
1244                // Parse subshell body
1245                let subshell_tokens = &tokens[i + 1..j - 1];
1246
1247                // Empty subshells are valid and equivalent to 'true'
1248                let body_ast = if subshell_tokens.is_empty() {
1249                    create_empty_body_ast()
1250                } else {
1251                    parse_commands_sequentially(subshell_tokens)?
1252                };
1253
1254                // Create ShellCommand with compound subshell
1255                // Create ShellCommand with compound subshell
1256                current_cmd.compound = Some(Box::new(Ast::Subshell {
1257                    body: Box::new(body_ast),
1258                }));
1259
1260                i = j; // Move past closing paren
1261
1262                // Check for redirections after subshell
1263                while i < tokens.len() {
1264                    match &tokens[i] {
1265                        Token::RedirOut => {
1266                            i += 1;
1267                            if i < tokens.len() {
1268                                if let Token::Word(file) = &tokens[i] {
1269                                    current_cmd
1270                                        .redirections
1271                                        .push(Redirection::Output(file.clone()));
1272                                    i += 1;
1273                                }
1274                            }
1275                        }
1276                        Token::RedirIn => {
1277                            i += 1;
1278                            if i < tokens.len() {
1279                                if let Token::Word(file) = &tokens[i] {
1280                                    current_cmd
1281                                        .redirections
1282                                        .push(Redirection::Input(file.clone()));
1283                                    i += 1;
1284                                }
1285                            }
1286                        }
1287                        Token::RedirAppend => {
1288                            i += 1;
1289                            if i < tokens.len() {
1290                                if let Token::Word(file) = &tokens[i] {
1291                                    current_cmd
1292                                        .redirections
1293                                        .push(Redirection::Append(file.clone()));
1294                                    i += 1;
1295                                }
1296                            }
1297                        }
1298                        Token::RedirectFdOut(fd, file) => {
1299                            current_cmd
1300                                .redirections
1301                                .push(Redirection::FdOutput(*fd, file.clone()));
1302                            i += 1;
1303                        }
1304                        Token::RedirectFdIn(fd, file) => {
1305                            current_cmd
1306                                .redirections
1307                                .push(Redirection::FdInput(*fd, file.clone()));
1308                            i += 1;
1309                        }
1310                        Token::RedirectFdAppend(fd, file) => {
1311                            current_cmd
1312                                .redirections
1313                                .push(Redirection::FdAppend(*fd, file.clone()));
1314                            i += 1;
1315                        }
1316                        Token::RedirectFdDup(from_fd, to_fd) => {
1317                            current_cmd
1318                                .redirections
1319                                .push(Redirection::FdDuplicate(*from_fd, *to_fd));
1320                            i += 1;
1321                        }
1322                        Token::RedirectFdClose(fd) => {
1323                            current_cmd.redirections.push(Redirection::FdClose(*fd));
1324                            i += 1;
1325                        }
1326                        Token::RedirectFdInOut(fd, file) => {
1327                            current_cmd
1328                                .redirections
1329                                .push(Redirection::FdInputOutput(*fd, file.clone()));
1330                            i += 1;
1331                        }
1332                        Token::RedirHereDoc(delimiter, quoted) => {
1333                            current_cmd
1334                                .redirections
1335                                .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
1336                            i += 1;
1337                        }
1338                        Token::RedirHereString(content) => {
1339                            current_cmd
1340                                .redirections
1341                                .push(Redirection::HereString(content.clone()));
1342                            i += 1;
1343                        }
1344                        Token::Pipe => {
1345                            // End of this pipeline stage
1346                            break;
1347                        }
1348                        _ => break,
1349                    }
1350                }
1351
1352                // Stage will be pushed at next | or end of loop
1353                continue;
1354            }
1355            Token::Word(word) => {
1356                current_cmd.args.push(word.clone());
1357            }
1358            Token::Local => {
1359                current_cmd.args.push("local".to_string());
1360            }
1361            Token::Return => {
1362                current_cmd.args.push("return".to_string());
1363            }
1364            Token::Pipe => {
1365                if !current_cmd.args.is_empty() || current_cmd.compound.is_some() {
1366                    commands.push(current_cmd.clone());
1367                    current_cmd = ShellCommand::default();
1368                }
1369            }
1370            // Basic redirections (backward compatible)
1371            Token::RedirIn => {
1372                i += 1;
1373                if i < tokens.len()
1374                    && let Token::Word(ref file) = tokens[i]
1375                {
1376                    current_cmd
1377                        .redirections
1378                        .push(Redirection::Input(file.clone()));
1379                }
1380            }
1381            Token::RedirOut => {
1382                i += 1;
1383                if i < tokens.len()
1384                    && let Token::Word(ref file) = tokens[i]
1385                {
1386                    current_cmd
1387                        .redirections
1388                        .push(Redirection::Output(file.clone()));
1389                }
1390            }
1391            Token::RedirAppend => {
1392                i += 1;
1393                if i < tokens.len()
1394                    && let Token::Word(ref file) = tokens[i]
1395                {
1396                    current_cmd
1397                        .redirections
1398                        .push(Redirection::Append(file.clone()));
1399                }
1400            }
1401            Token::RedirHereDoc(delimiter, quoted) => {
1402                // Store delimiter and quoted flag - content will be read by executor
1403                current_cmd
1404                    .redirections
1405                    .push(Redirection::HereDoc(delimiter.clone(), quoted.to_string()));
1406            }
1407            Token::RedirHereString(content) => {
1408                current_cmd
1409                    .redirections
1410                    .push(Redirection::HereString(content.clone()));
1411            }
1412            // File descriptor redirections
1413            Token::RedirectFdIn(fd, file) => {
1414                current_cmd
1415                    .redirections
1416                    .push(Redirection::FdInput(*fd, file.clone()));
1417            }
1418            Token::RedirectFdOut(fd, file) => {
1419                current_cmd
1420                    .redirections
1421                    .push(Redirection::FdOutput(*fd, file.clone()));
1422            }
1423            Token::RedirectFdAppend(fd, file) => {
1424                current_cmd
1425                    .redirections
1426                    .push(Redirection::FdAppend(*fd, file.clone()));
1427            }
1428            Token::RedirectFdDup(from_fd, to_fd) => {
1429                current_cmd
1430                    .redirections
1431                    .push(Redirection::FdDuplicate(*from_fd, *to_fd));
1432            }
1433            Token::RedirectFdClose(fd) => {
1434                current_cmd.redirections.push(Redirection::FdClose(*fd));
1435            }
1436            Token::RedirectFdInOut(fd, file) => {
1437                current_cmd
1438                    .redirections
1439                    .push(Redirection::FdInputOutput(*fd, file.clone()));
1440            }
1441            Token::RightParen => {
1442                // Check if this looks like a function call pattern: Word LeftParen ... RightParen
1443                // If so, treat it as a function call even if the function doesn't exist
1444                if !current_cmd.args.is_empty()
1445                    && i > 0
1446                    && let Token::LeftParen = tokens[i - 1]
1447                {
1448                    // This looks like a function call pattern, treat as function call
1449                    // For now, we'll handle this in the executor by checking if it's a function
1450                    // If not a function, the executor will handle the error gracefully
1451                    break;
1452                }
1453                return Err("Unexpected ) in pipeline".to_string());
1454            }
1455            Token::Newline => {
1456                // Ignore newlines in pipelines if they follow a pipe or if we are at the start of a stage
1457                if current_cmd.args.is_empty() && current_cmd.compound.is_none() {
1458                    // This newline is between commands or at the start, skip it
1459                } else {
1460                    break;
1461                }
1462            }
1463            Token::Do
1464            | Token::Done
1465            | Token::Then
1466            | Token::Else
1467            | Token::Elif
1468            | Token::Fi
1469            | Token::Esac => {
1470                // These are control flow keywords that should be handled at a higher level
1471                // If we encounter them here, it means we've reached the end of the current command
1472                break;
1473            }
1474            _ => {
1475                return Err(format!("Unexpected token in pipeline: {:?}", token));
1476            }
1477        }
1478        i += 1;
1479    }
1480
1481    if !current_cmd.args.is_empty() || current_cmd.compound.is_some() {
1482        commands.push(current_cmd);
1483    }
1484
1485    if commands.is_empty() {
1486        return Err("No commands found".to_string());
1487    }
1488
1489    Ok(Ast::Pipeline(commands))
1490}
1491
1492fn parse_if(tokens: &[Token]) -> Result<Ast, String> {
1493    let mut i = 1; // Skip 'if'
1494    let mut branches = Vec::new();
1495
1496    loop {
1497        // Parse condition until ; or newline or then
1498        let mut cond_tokens = Vec::new();
1499        while i < tokens.len()
1500            && tokens[i] != Token::Semicolon
1501            && tokens[i] != Token::Newline
1502            && tokens[i] != Token::Then
1503        {
1504            cond_tokens.push(tokens[i].clone());
1505            i += 1;
1506        }
1507
1508        // Skip ; or newline if present
1509        if i < tokens.len() && (tokens[i] == Token::Semicolon || tokens[i] == Token::Newline) {
1510            i += 1;
1511        }
1512
1513        // Skip any additional newlines
1514        skip_newlines(tokens, &mut i);
1515
1516        if i >= tokens.len() || tokens[i] != Token::Then {
1517            return Err("Expected then after if/elif condition".to_string());
1518        }
1519        i += 1; // Skip then
1520
1521        // Skip any newlines after then
1522        while i < tokens.len() && tokens[i] == Token::Newline {
1523            i += 1;
1524        }
1525
1526        // Parse then branch - collect all tokens until we hit else/elif/fi
1527        // We need to handle nested structures properly
1528        let mut then_tokens = Vec::new();
1529        let mut depth = 0;
1530        while i < tokens.len() {
1531            match &tokens[i] {
1532                Token::If => {
1533                    depth += 1;
1534                    then_tokens.push(tokens[i].clone());
1535                }
1536                Token::Fi => {
1537                    if depth > 0 {
1538                        depth -= 1;
1539                        then_tokens.push(tokens[i].clone());
1540                    } else {
1541                        break; // This fi closes our if
1542                    }
1543                }
1544                Token::Else | Token::Elif if depth == 0 => {
1545                    break; // These belong to our if, not nested ones
1546                }
1547                Token::Newline => {
1548                    // Skip newlines but check what comes after
1549                    let mut j = i + 1;
1550                    while j < tokens.len() && tokens[j] == Token::Newline {
1551                        j += 1;
1552                    }
1553                    if j < tokens.len()
1554                        && depth == 0
1555                        && (tokens[j] == Token::Else
1556                            || tokens[j] == Token::Elif
1557                            || tokens[j] == Token::Fi)
1558                    {
1559                        i = j; // Skip to the keyword
1560                        break;
1561                    }
1562                    // Otherwise it's just a newline in the middle of commands
1563                    then_tokens.push(tokens[i].clone());
1564                }
1565                _ => {
1566                    then_tokens.push(tokens[i].clone());
1567                }
1568            }
1569            i += 1;
1570        }
1571
1572        // Skip any trailing newlines
1573        skip_newlines(tokens, &mut i);
1574
1575        let then_ast = if then_tokens.is_empty() {
1576            // Empty then branch - create a no-op
1577            create_empty_body_ast()
1578        } else {
1579            parse_commands_sequentially(&then_tokens)?
1580        };
1581
1582        let condition = parse_slice(&cond_tokens)?;
1583        branches.push((Box::new(condition), Box::new(then_ast)));
1584
1585        // Check next
1586        if i < tokens.len() && tokens[i] == Token::Elif {
1587            i += 1; // Skip elif, continue loop
1588        } else {
1589            break;
1590        }
1591    }
1592
1593    let else_ast = if i < tokens.len() && tokens[i] == Token::Else {
1594        i += 1; // Skip else
1595
1596        // Skip any newlines after else
1597        while i < tokens.len() && tokens[i] == Token::Newline {
1598            i += 1;
1599        }
1600
1601        let mut else_tokens = Vec::new();
1602        let mut depth = 0;
1603        while i < tokens.len() {
1604            match &tokens[i] {
1605                Token::If => {
1606                    depth += 1;
1607                    else_tokens.push(tokens[i].clone());
1608                }
1609                Token::Fi => {
1610                    if depth > 0 {
1611                        depth -= 1;
1612                        else_tokens.push(tokens[i].clone());
1613                    } else {
1614                        break; // This fi closes our if
1615                    }
1616                }
1617                Token::Newline => {
1618                    // Skip newlines but check what comes after
1619                    let mut j = i + 1;
1620                    while j < tokens.len() && tokens[j] == Token::Newline {
1621                        j += 1;
1622                    }
1623                    if j < tokens.len() && depth == 0 && tokens[j] == Token::Fi {
1624                        i = j; // Skip to fi
1625                        break;
1626                    }
1627                    // Otherwise it's just a newline in the middle of commands
1628                    else_tokens.push(tokens[i].clone());
1629                }
1630                _ => {
1631                    else_tokens.push(tokens[i].clone());
1632                }
1633            }
1634            i += 1;
1635        }
1636
1637        let else_ast = if else_tokens.is_empty() {
1638            // Empty else branch - create a no-op
1639            create_empty_body_ast()
1640        } else {
1641            parse_commands_sequentially(&else_tokens)?
1642        };
1643
1644        Some(Box::new(else_ast))
1645    } else {
1646        None
1647    };
1648
1649    if i >= tokens.len() || tokens[i] != Token::Fi {
1650        return Err("Expected fi".to_string());
1651    }
1652
1653    Ok(Ast::If {
1654        branches,
1655        else_branch: else_ast,
1656    })
1657}
1658
1659fn parse_case(tokens: &[Token]) -> Result<Ast, String> {
1660    let mut i = 1; // Skip 'case'
1661
1662    // Parse word
1663    if i >= tokens.len() || !matches!(tokens[i], Token::Word(_)) {
1664        return Err("Expected word after case".to_string());
1665    }
1666    let word = if let Token::Word(ref w) = tokens[i] {
1667        w.clone()
1668    } else {
1669        unreachable!()
1670    };
1671    i += 1;
1672
1673    if i >= tokens.len() || tokens[i] != Token::In {
1674        return Err("Expected in after case word".to_string());
1675    }
1676    i += 1;
1677
1678    let mut cases = Vec::new();
1679    let mut default = None;
1680
1681    loop {
1682        // Skip newlines
1683        while i < tokens.len() && tokens[i] == Token::Newline {
1684            i += 1;
1685        }
1686
1687        if i >= tokens.len() {
1688            return Err("Unexpected end in case statement".to_string());
1689        }
1690
1691        if tokens[i] == Token::Esac {
1692            break;
1693        }
1694
1695        // Parse patterns
1696        let mut patterns = Vec::new();
1697        while i < tokens.len() && tokens[i] != Token::RightParen {
1698            if let Token::Word(ref p) = tokens[i] {
1699                // Split pattern on |
1700                for pat in p.split('|') {
1701                    patterns.push(pat.to_string());
1702                }
1703            } else if tokens[i] == Token::Pipe {
1704                // Skip | separator
1705            } else if tokens[i] == Token::Newline {
1706                // Skip newlines in patterns
1707            } else {
1708                return Err(format!("Expected pattern, found {:?}", tokens[i]));
1709            }
1710            i += 1;
1711        }
1712
1713        if i >= tokens.len() || tokens[i] != Token::RightParen {
1714            return Err("Expected ) after patterns".to_string());
1715        }
1716        i += 1;
1717
1718        // Parse commands
1719        let mut commands_tokens = Vec::new();
1720        while i < tokens.len() && tokens[i] != Token::DoubleSemicolon && tokens[i] != Token::Esac {
1721            commands_tokens.push(tokens[i].clone());
1722            i += 1;
1723        }
1724
1725        let commands_ast = parse_slice(&commands_tokens)?;
1726
1727        if i >= tokens.len() {
1728            return Err("Unexpected end in case statement".to_string());
1729        }
1730
1731        if tokens[i] == Token::DoubleSemicolon {
1732            i += 1;
1733            // Check if this is the default case (*)
1734            if patterns.len() == 1 && patterns[0] == "*" {
1735                default = Some(Box::new(commands_ast));
1736            } else {
1737                cases.push((patterns, commands_ast));
1738            }
1739        } else if tokens[i] == Token::Esac {
1740            // Last case without ;;
1741            if patterns.len() == 1 && patterns[0] == "*" {
1742                default = Some(Box::new(commands_ast));
1743            } else {
1744                cases.push((patterns, commands_ast));
1745            }
1746            break;
1747        } else {
1748            return Err("Expected ;; or esac after commands".to_string());
1749        }
1750    }
1751
1752    Ok(Ast::Case {
1753        word,
1754        cases,
1755        default,
1756    })
1757}
1758
1759fn parse_for(tokens: &[Token]) -> Result<Ast, String> {
1760    let mut i = 1; // Skip 'for'
1761
1762    // Parse variable name
1763    if i >= tokens.len() || !matches!(tokens[i], Token::Word(_)) {
1764        return Err("Expected variable name after for".to_string());
1765    }
1766    let variable = if let Token::Word(ref v) = tokens[i] {
1767        v.clone()
1768    } else {
1769        unreachable!()
1770    };
1771    i += 1;
1772
1773    // Expect 'in'
1774    if i >= tokens.len() || tokens[i] != Token::In {
1775        return Err("Expected 'in' after for variable".to_string());
1776    }
1777    i += 1;
1778
1779    // Parse items until we hit 'do' or semicolon/newline
1780    let mut items = Vec::new();
1781    while i < tokens.len() {
1782        match &tokens[i] {
1783            Token::Do => break,
1784            Token::Semicolon | Token::Newline => {
1785                i += 1;
1786                // Check if next token is 'do'
1787                if i < tokens.len() && tokens[i] == Token::Do {
1788                    break;
1789                }
1790            }
1791            Token::Word(word) => {
1792                items.push(word.clone());
1793                i += 1;
1794            }
1795            _ => {
1796                return Err(format!("Unexpected token in for items: {:?}", tokens[i]));
1797            }
1798        }
1799    }
1800
1801    // Skip any newlines before 'do'
1802    while i < tokens.len() && tokens[i] == Token::Newline {
1803        i += 1;
1804    }
1805
1806    // Expect 'do'
1807    if i >= tokens.len() || tokens[i] != Token::Do {
1808        return Err("Expected 'do' in for loop".to_string());
1809    }
1810    i += 1;
1811
1812    // Skip any newlines after 'do'
1813    while i < tokens.len() && tokens[i] == Token::Newline {
1814        i += 1;
1815    }
1816
1817    // Parse body until 'done'
1818    let mut body_tokens = Vec::new();
1819    let mut depth = 0;
1820    while i < tokens.len() {
1821        match &tokens[i] {
1822            Token::For => {
1823                depth += 1;
1824                body_tokens.push(tokens[i].clone());
1825            }
1826            Token::Done => {
1827                if depth > 0 {
1828                    depth -= 1;
1829                    body_tokens.push(tokens[i].clone());
1830                } else {
1831                    break; // This done closes our for loop
1832                }
1833            }
1834            Token::Newline => {
1835                // Skip newlines but check what comes after
1836                let mut j = i + 1;
1837                while j < tokens.len() && tokens[j] == Token::Newline {
1838                    j += 1;
1839                }
1840                if j < tokens.len() && depth == 0 && tokens[j] == Token::Done {
1841                    i = j; // Skip to done
1842                    break;
1843                }
1844                // Otherwise it's just a newline in the middle of commands
1845                body_tokens.push(tokens[i].clone());
1846            }
1847            _ => {
1848                body_tokens.push(tokens[i].clone());
1849            }
1850        }
1851        i += 1;
1852    }
1853
1854    if i >= tokens.len() || tokens[i] != Token::Done {
1855        return Err("Expected 'done' to close for loop".to_string());
1856    }
1857
1858    // Parse the body
1859    let body_ast = if body_tokens.is_empty() {
1860        // Empty body - create a no-op
1861        create_empty_body_ast()
1862    } else {
1863        parse_commands_sequentially(&body_tokens)?
1864    };
1865
1866    Ok(Ast::For {
1867        variable,
1868        items,
1869        body: Box::new(body_ast),
1870    })
1871}
1872
1873fn parse_while(tokens: &[Token]) -> Result<Ast, String> {
1874    let mut i = 1; // Skip 'while'
1875
1876    // Parse condition until we hit 'do' or semicolon/newline
1877    let mut cond_tokens = Vec::new();
1878    while i < tokens.len() {
1879        match &tokens[i] {
1880            Token::Do => break,
1881            Token::Semicolon | Token::Newline => {
1882                i += 1;
1883                // Check if next token is 'do'
1884                if i < tokens.len() && tokens[i] == Token::Do {
1885                    break;
1886                }
1887            }
1888            _ => {
1889                cond_tokens.push(tokens[i].clone());
1890                i += 1;
1891            }
1892        }
1893    }
1894
1895    if cond_tokens.is_empty() {
1896        return Err("Expected condition after while".to_string());
1897    }
1898
1899    // Skip any newlines before 'do'
1900    while i < tokens.len() && tokens[i] == Token::Newline {
1901        i += 1;
1902    }
1903
1904    // Expect 'do'
1905    if i >= tokens.len() || tokens[i] != Token::Do {
1906        return Err("Expected 'do' in while loop".to_string());
1907    }
1908    i += 1;
1909
1910    // Skip any newlines after 'do'
1911    while i < tokens.len() && tokens[i] == Token::Newline {
1912        i += 1;
1913    }
1914
1915    // Parse body until 'done'
1916    let mut body_tokens = Vec::new();
1917    let mut depth = 0;
1918    while i < tokens.len() {
1919        match &tokens[i] {
1920            Token::While | Token::For => {
1921                depth += 1;
1922                body_tokens.push(tokens[i].clone());
1923            }
1924            Token::Done => {
1925                if depth > 0 {
1926                    depth -= 1;
1927                    body_tokens.push(tokens[i].clone());
1928                } else {
1929                    break; // This done closes our while loop
1930                }
1931            }
1932            Token::Newline => {
1933                // Skip newlines but check what comes after
1934                let mut j = i + 1;
1935                while j < tokens.len() && tokens[j] == Token::Newline {
1936                    j += 1;
1937                }
1938                if j < tokens.len() && depth == 0 && tokens[j] == Token::Done {
1939                    i = j; // Skip to done
1940                    break;
1941                }
1942                // Otherwise it's just a newline in the middle of commands
1943                body_tokens.push(tokens[i].clone());
1944            }
1945            _ => {
1946                body_tokens.push(tokens[i].clone());
1947            }
1948        }
1949        i += 1;
1950    }
1951
1952    if i >= tokens.len() || tokens[i] != Token::Done {
1953        return Err("Expected 'done' to close while loop".to_string());
1954    }
1955
1956    // Parse the condition
1957    let condition_ast = parse_slice(&cond_tokens)?;
1958
1959    // Parse the body
1960    let body_ast = if body_tokens.is_empty() {
1961        // Empty body - create a no-op
1962        create_empty_body_ast()
1963    } else {
1964        parse_commands_sequentially(&body_tokens)?
1965    };
1966
1967    Ok(Ast::While {
1968        condition: Box::new(condition_ast),
1969        body: Box::new(body_ast),
1970    })
1971}
1972
1973fn parse_function_definition(tokens: &[Token]) -> Result<Ast, String> {
1974    if tokens.len() < 2 {
1975        return Err("Function definition too short".to_string());
1976    }
1977
1978    // Extract function name from first token
1979    let func_name = if let Token::Word(word) = &tokens[0] {
1980        // Handle legacy format with parentheses in the word (e.g., "legacyfunc()")
1981        if let Some(paren_pos) = word.find('(') {
1982            if word.ends_with(')') && paren_pos > 0 {
1983                word[..paren_pos].to_string()
1984            } else {
1985                word.clone()
1986            }
1987        } else {
1988            word.clone()
1989        }
1990    } else {
1991        return Err("Function name must be a word".to_string());
1992    };
1993
1994    // Find the opening brace and body
1995    let brace_pos =
1996        if tokens.len() >= 4 && tokens[1] == Token::LeftParen && tokens[2] == Token::RightParen {
1997            // Standard format: name() {
1998            if tokens[3] != Token::LeftBrace {
1999                return Err("Expected { after function name".to_string());
2000            }
2001            3
2002        } else if tokens.len() >= 2 && tokens[1] == Token::LeftBrace {
2003            // Legacy format: name() {
2004            1
2005        } else {
2006            return Err("Expected ( after function name or { for legacy format".to_string());
2007        };
2008
2009    // Find the matching closing brace, accounting for nested function definitions and control structures
2010    let mut brace_depth = 0;
2011    let mut body_end = 0;
2012    let mut found_closing = false;
2013    let mut i = brace_pos + 1;
2014
2015    while i < tokens.len() {
2016        // Check if this is the start of a nested function definition
2017        // Pattern: Word LeftParen RightParen LeftBrace
2018        if i + 3 < tokens.len()
2019            && matches!(&tokens[i], Token::Word(_))
2020            && tokens[i + 1] == Token::LeftParen
2021            && tokens[i + 2] == Token::RightParen
2022            && tokens[i + 3] == Token::LeftBrace
2023        {
2024            // This is a nested function - skip over it entirely
2025            // Skip to after the opening brace of nested function
2026            i += 4;
2027            let mut nested_depth = 1;
2028            while i < tokens.len() && nested_depth > 0 {
2029                match tokens[i] {
2030                    Token::LeftBrace => nested_depth += 1,
2031                    Token::RightBrace => nested_depth -= 1,
2032                    _ => {}
2033                }
2034                i += 1;
2035            }
2036            // Don't increment i again - continue from current position
2037            continue;
2038        }
2039
2040        match &tokens[i] {
2041            Token::LeftBrace => {
2042                brace_depth += 1;
2043                i += 1;
2044            }
2045            Token::RightBrace => {
2046                if brace_depth == 0 {
2047                    // This is our matching closing brace
2048                    body_end = i;
2049                    found_closing = true;
2050                    break;
2051                } else {
2052                    brace_depth -= 1;
2053                    i += 1;
2054                }
2055            }
2056            Token::If => {
2057                // Skip to matching fi
2058                skip_to_matching_fi(tokens, &mut i);
2059            }
2060            Token::For | Token::While => {
2061                // Skip to matching done
2062                skip_to_matching_done(tokens, &mut i);
2063            }
2064            Token::Case => {
2065                // Skip to matching esac
2066                skip_to_matching_esac(tokens, &mut i);
2067            }
2068            _ => {
2069                i += 1;
2070            }
2071        }
2072    }
2073
2074    if !found_closing {
2075        return Err("Missing closing } for function definition".to_string());
2076    }
2077
2078    // Extract body tokens (everything between { and })
2079    let body_tokens = &tokens[brace_pos + 1..body_end];
2080
2081    // Parse the function body using the existing parser
2082    let body_ast = if body_tokens.is_empty() {
2083        // Empty function body
2084        create_empty_body_ast()
2085    } else {
2086        parse_commands_sequentially(body_tokens)?
2087    };
2088
2089    Ok(Ast::FunctionDefinition {
2090        name: func_name,
2091        body: Box::new(body_ast),
2092    })
2093}
2094
2095#[cfg(test)]
2096mod tests {
2097    use super::super::lexer::Token;
2098    use super::*;
2099
2100    #[test]
2101    fn test_single_command() {
2102        let tokens = vec![Token::Word("ls".to_string())];
2103        let result = parse(tokens).unwrap();
2104        assert_eq!(
2105            result,
2106            Ast::Pipeline(vec![ShellCommand {
2107                args: vec!["ls".to_string()],
2108                redirections: Vec::new(),
2109                compound: None,
2110            }])
2111        );
2112    }
2113
2114    #[test]
2115    fn test_command_with_args() {
2116        let tokens = vec![
2117            Token::Word("ls".to_string()),
2118            Token::Word("-la".to_string()),
2119        ];
2120        let result = parse(tokens).unwrap();
2121        assert_eq!(
2122            result,
2123            Ast::Pipeline(vec![ShellCommand {
2124                args: vec!["ls".to_string(), "-la".to_string()],
2125                redirections: Vec::new(),
2126                compound: None,
2127            }])
2128        );
2129    }
2130
2131    #[test]
2132    fn test_pipeline() {
2133        let tokens = vec![
2134            Token::Word("ls".to_string()),
2135            Token::Pipe,
2136            Token::Word("grep".to_string()),
2137            Token::Word("txt".to_string()),
2138        ];
2139        let result = parse(tokens).unwrap();
2140        assert_eq!(
2141            result,
2142            Ast::Pipeline(vec![
2143                ShellCommand {
2144                    args: vec!["ls".to_string()],
2145                    redirections: Vec::new(),
2146                    compound: None,
2147                },
2148                ShellCommand {
2149                    args: vec!["grep".to_string(), "txt".to_string()],
2150                    redirections: Vec::new(),
2151                    compound: None,
2152                }
2153            ])
2154        );
2155    }
2156
2157    #[test]
2158    fn test_input_redirection() {
2159        let tokens = vec![
2160            Token::Word("cat".to_string()),
2161            Token::RedirIn,
2162            Token::Word("input.txt".to_string()),
2163        ];
2164        let result = parse(tokens).unwrap();
2165        assert_eq!(
2166            result,
2167            Ast::Pipeline(vec![ShellCommand {
2168                args: vec!["cat".to_string()],
2169                redirections: vec![Redirection::Input("input.txt".to_string())],
2170                compound: None,
2171            }])
2172        );
2173    }
2174
2175    #[test]
2176    fn test_output_redirection() {
2177        let tokens = vec![
2178            Token::Word("printf".to_string()),
2179            Token::Word("hello".to_string()),
2180            Token::RedirOut,
2181            Token::Word("output.txt".to_string()),
2182        ];
2183        let result = parse(tokens).unwrap();
2184        assert_eq!(
2185            result,
2186            Ast::Pipeline(vec![ShellCommand {
2187                args: vec!["printf".to_string(), "hello".to_string()],
2188                compound: None,
2189                redirections: vec![Redirection::Output("output.txt".to_string())],
2190            }])
2191        );
2192    }
2193
2194    #[test]
2195    fn test_append_redirection() {
2196        let tokens = vec![
2197            Token::Word("printf".to_string()),
2198            Token::Word("hello".to_string()),
2199            Token::RedirAppend,
2200            Token::Word("output.txt".to_string()),
2201        ];
2202        let result = parse(tokens).unwrap();
2203        assert_eq!(
2204            result,
2205            Ast::Pipeline(vec![ShellCommand {
2206                args: vec!["printf".to_string(), "hello".to_string()],
2207                compound: None,
2208                redirections: vec![Redirection::Append("output.txt".to_string())],
2209            }])
2210        );
2211    }
2212
2213    #[test]
2214    fn test_complex_pipeline_with_redirections() {
2215        let tokens = vec![
2216            Token::Word("cat".to_string()),
2217            Token::RedirIn,
2218            Token::Word("input.txt".to_string()),
2219            Token::Pipe,
2220            Token::Word("grep".to_string()),
2221            Token::Word("pattern".to_string()),
2222            Token::Pipe,
2223            Token::Word("sort".to_string()),
2224            Token::RedirOut,
2225            Token::Word("output.txt".to_string()),
2226        ];
2227        let result = parse(tokens).unwrap();
2228        assert_eq!(
2229            result,
2230            Ast::Pipeline(vec![
2231                ShellCommand {
2232                    args: vec!["cat".to_string()],
2233                    compound: None,
2234                    redirections: vec![Redirection::Input("input.txt".to_string())],
2235                },
2236                ShellCommand {
2237                    args: vec!["grep".to_string(), "pattern".to_string()],
2238                    compound: None,
2239                    redirections: Vec::new(),
2240                },
2241                ShellCommand {
2242                    args: vec!["sort".to_string()],
2243                    redirections: vec![Redirection::Output("output.txt".to_string())],
2244                    compound: None,
2245                }
2246            ])
2247        );
2248    }
2249
2250    #[test]
2251    fn test_empty_tokens() {
2252        let tokens = vec![];
2253        let result = parse(tokens);
2254        assert!(result.is_err());
2255        assert_eq!(result.unwrap_err(), "No commands found");
2256    }
2257
2258    #[test]
2259    fn test_only_pipe() {
2260        let tokens = vec![Token::Pipe];
2261        let result = parse(tokens);
2262        assert!(result.is_err());
2263        assert_eq!(result.unwrap_err(), "No commands found");
2264    }
2265
2266    #[test]
2267    fn test_redirection_without_file() {
2268        // Parser doesn't check for missing file, just skips if no token after
2269        let tokens = vec![Token::Word("cat".to_string()), Token::RedirIn];
2270        let result = parse(tokens).unwrap();
2271        assert_eq!(
2272            result,
2273            Ast::Pipeline(vec![ShellCommand {
2274                args: vec!["cat".to_string()],
2275                compound: None,
2276                redirections: Vec::new(),
2277            }])
2278        );
2279    }
2280
2281    #[test]
2282    fn test_multiple_redirections() {
2283        let tokens = vec![
2284            Token::Word("cat".to_string()),
2285            Token::RedirIn,
2286            Token::Word("file1.txt".to_string()),
2287            Token::RedirOut,
2288            Token::Word("file2.txt".to_string()),
2289        ];
2290        let result = parse(tokens).unwrap();
2291        assert_eq!(
2292            result,
2293            Ast::Pipeline(vec![ShellCommand {
2294                args: vec!["cat".to_string()],
2295                redirections: vec![
2296                    Redirection::Input("file1.txt".to_string()),
2297                    Redirection::Output("file2.txt".to_string()),
2298                ],
2299                compound: None,
2300            }])
2301        );
2302    }
2303
2304    #[test]
2305    fn test_parse_if() {
2306        let tokens = vec![
2307            Token::If,
2308            Token::Word("true".to_string()),
2309            Token::Semicolon,
2310            Token::Then,
2311            Token::Word("printf".to_string()),
2312            Token::Word("yes".to_string()),
2313            Token::Semicolon,
2314            Token::Fi,
2315        ];
2316        let result = parse(tokens).unwrap();
2317        if let Ast::If {
2318            branches,
2319            else_branch,
2320        } = result
2321        {
2322            assert_eq!(branches.len(), 1);
2323            let (condition, then_branch) = &branches[0];
2324            if let Ast::Pipeline(cmds) = &**condition {
2325                assert_eq!(cmds[0].args, vec!["true"]);
2326            } else {
2327                panic!("condition not pipeline");
2328            }
2329            if let Ast::Pipeline(cmds) = &**then_branch {
2330                assert_eq!(cmds[0].args, vec!["printf", "yes"]);
2331            } else {
2332                panic!("then_branch not pipeline");
2333            }
2334            assert!(else_branch.is_none());
2335        } else {
2336            panic!("not if");
2337        }
2338    }
2339
2340    #[test]
2341    fn test_parse_if_elif() {
2342        let tokens = vec![
2343            Token::If,
2344            Token::Word("false".to_string()),
2345            Token::Semicolon,
2346            Token::Then,
2347            Token::Word("printf".to_string()),
2348            Token::Word("no".to_string()),
2349            Token::Semicolon,
2350            Token::Elif,
2351            Token::Word("true".to_string()),
2352            Token::Semicolon,
2353            Token::Then,
2354            Token::Word("printf".to_string()),
2355            Token::Word("yes".to_string()),
2356            Token::Semicolon,
2357            Token::Fi,
2358        ];
2359        let result = parse(tokens).unwrap();
2360        if let Ast::If {
2361            branches,
2362            else_branch,
2363        } = result
2364        {
2365            assert_eq!(branches.len(), 2);
2366            // First branch: false -> printf no
2367            let (condition1, then1) = &branches[0];
2368            if let Ast::Pipeline(cmds) = &**condition1 {
2369                assert_eq!(cmds[0].args, vec!["false"]);
2370            }
2371            if let Ast::Pipeline(cmds) = &**then1 {
2372                assert_eq!(cmds[0].args, vec!["printf", "no"]);
2373            }
2374            // Second branch: true -> printf yes
2375            let (condition2, then2) = &branches[1];
2376            if let Ast::Pipeline(cmds) = &**condition2 {
2377                assert_eq!(cmds[0].args, vec!["true"]);
2378            }
2379            if let Ast::Pipeline(cmds) = &**then2 {
2380                assert_eq!(cmds[0].args, vec!["printf", "yes"]);
2381            }
2382            assert!(else_branch.is_none());
2383        } else {
2384            panic!("not if");
2385        }
2386    }
2387
2388    #[test]
2389    fn test_parse_assignment() {
2390        let tokens = vec![Token::Word("MY_VAR=test_value".to_string())];
2391        let result = parse(tokens).unwrap();
2392        if let Ast::Assignment { var, value } = result {
2393            assert_eq!(var, "MY_VAR");
2394            assert_eq!(value, "test_value");
2395        } else {
2396            panic!("not assignment");
2397        }
2398    }
2399
2400    #[test]
2401    fn test_parse_assignment_quoted() {
2402        let tokens = vec![Token::Word("MY_VAR=hello world".to_string())];
2403        let result = parse(tokens).unwrap();
2404        if let Ast::Assignment { var, value } = result {
2405            assert_eq!(var, "MY_VAR");
2406            assert_eq!(value, "hello world");
2407        } else {
2408            panic!("not assignment");
2409        }
2410    }
2411
2412    #[test]
2413    fn test_parse_assignment_invalid() {
2414        // Variable name starting with number should not be parsed as assignment
2415        let tokens = vec![Token::Word("123VAR=value".to_string())];
2416        let result = parse(tokens).unwrap();
2417        if let Ast::Pipeline(cmds) = result {
2418            assert_eq!(cmds[0].args, vec!["123VAR=value"]);
2419        } else {
2420            panic!("should be parsed as pipeline");
2421        }
2422    }
2423
2424    #[test]
2425    fn test_parse_function_definition() {
2426        let tokens = vec![
2427            Token::Word("myfunc".to_string()),
2428            Token::LeftParen,
2429            Token::RightParen,
2430            Token::LeftBrace,
2431            Token::Word("echo".to_string()),
2432            Token::Word("hello".to_string()),
2433            Token::RightBrace,
2434        ];
2435        let result = parse(tokens).unwrap();
2436        if let Ast::FunctionDefinition { name, body } = result {
2437            assert_eq!(name, "myfunc");
2438            // Body should be a pipeline with echo hello
2439            if let Ast::Pipeline(cmds) = *body {
2440                assert_eq!(cmds[0].args, vec!["echo", "hello"]);
2441            } else {
2442                panic!("function body should be a pipeline");
2443            }
2444        } else {
2445            panic!("should be parsed as function definition");
2446        }
2447    }
2448
2449    #[test]
2450    fn test_parse_function_definition_empty() {
2451        let tokens = vec![
2452            Token::Word("emptyfunc".to_string()),
2453            Token::LeftParen,
2454            Token::RightParen,
2455            Token::LeftBrace,
2456            Token::RightBrace,
2457        ];
2458        let result = parse(tokens).unwrap();
2459        if let Ast::FunctionDefinition { name, body } = result {
2460            assert_eq!(name, "emptyfunc");
2461            // Empty body should default to true command
2462            if let Ast::Pipeline(cmds) = *body {
2463                assert_eq!(cmds[0].args, vec!["true"]);
2464            } else {
2465                panic!("function body should be a pipeline");
2466            }
2467        } else {
2468            panic!("should be parsed as function definition");
2469        }
2470    }
2471
2472    #[test]
2473    fn test_parse_function_definition_legacy_format() {
2474        // Test backward compatibility with parentheses in the function name
2475        let tokens = vec![
2476            Token::Word("legacyfunc()".to_string()),
2477            Token::LeftBrace,
2478            Token::Word("echo".to_string()),
2479            Token::Word("hello".to_string()),
2480            Token::RightBrace,
2481        ];
2482        let result = parse(tokens).unwrap();
2483        if let Ast::FunctionDefinition { name, body } = result {
2484            assert_eq!(name, "legacyfunc");
2485            // Body should be a pipeline with echo hello
2486            if let Ast::Pipeline(cmds) = *body {
2487                assert_eq!(cmds[0].args, vec!["echo", "hello"]);
2488            } else {
2489                panic!("function body should be a pipeline");
2490            }
2491        } else {
2492            panic!("should be parsed as function definition");
2493        }
2494    }
2495
2496    #[test]
2497    fn test_parse_local_assignment() {
2498        let tokens = vec![Token::Local, Token::Word("MY_VAR=test_value".to_string())];
2499        let result = parse(tokens).unwrap();
2500        if let Ast::LocalAssignment { var, value } = result {
2501            assert_eq!(var, "MY_VAR");
2502            assert_eq!(value, "test_value");
2503        } else {
2504            panic!("should be parsed as local assignment");
2505        }
2506    }
2507
2508    #[test]
2509    fn test_parse_local_assignment_separate_tokens() {
2510        let tokens = vec![
2511            Token::Local,
2512            Token::Word("MY_VAR".to_string()),
2513            Token::Word("test_value".to_string()),
2514        ];
2515        let result = parse(tokens).unwrap();
2516        if let Ast::LocalAssignment { var, value } = result {
2517            assert_eq!(var, "MY_VAR");
2518            assert_eq!(value, "test_value");
2519        } else {
2520            panic!("should be parsed as local assignment");
2521        }
2522    }
2523
2524    #[test]
2525    fn test_parse_local_assignment_invalid_var_name() {
2526        // Variable name starting with number should not be parsed as local assignment
2527        let tokens = vec![Token::Local, Token::Word("123VAR=value".to_string())];
2528        let result = parse(tokens);
2529        // Should return an error since 123VAR is not a valid variable name
2530        assert!(result.is_err());
2531    }
2532
2533    #[test]
2534    fn test_parse_here_document_redirection() {
2535        let tokens = vec![
2536            Token::Word("cat".to_string()),
2537            Token::RedirHereDoc("EOF".to_string(), false),
2538        ];
2539        let result = parse(tokens).unwrap();
2540        assert_eq!(
2541            result,
2542            Ast::Pipeline(vec![ShellCommand {
2543                args: vec!["cat".to_string()],
2544                redirections: vec![Redirection::HereDoc("EOF".to_string(), "false".to_string())],
2545                compound: None,
2546            }])
2547        );
2548    }
2549
2550    #[test]
2551    fn test_parse_here_string_redirection() {
2552        let tokens = vec![
2553            Token::Word("grep".to_string()),
2554            Token::RedirHereString("pattern".to_string()),
2555        ];
2556        let result = parse(tokens).unwrap();
2557        assert_eq!(
2558            result,
2559            Ast::Pipeline(vec![ShellCommand {
2560                args: vec!["grep".to_string()],
2561                compound: None,
2562                redirections: vec![Redirection::HereString("pattern".to_string())],
2563            }])
2564        );
2565    }
2566
2567    #[test]
2568    fn test_parse_mixed_redirections() {
2569        let tokens = vec![
2570            Token::Word("cat".to_string()),
2571            Token::RedirIn,
2572            Token::Word("file.txt".to_string()),
2573            Token::RedirHereString("fallback".to_string()),
2574            Token::RedirOut,
2575            Token::Word("output.txt".to_string()),
2576        ];
2577        let result = parse(tokens).unwrap();
2578        assert_eq!(
2579            result,
2580            Ast::Pipeline(vec![ShellCommand {
2581                args: vec!["cat".to_string()],
2582                compound: None,
2583                redirections: vec![
2584                    Redirection::Input("file.txt".to_string()),
2585                    Redirection::HereString("fallback".to_string()),
2586                    Redirection::Output("output.txt".to_string()),
2587                ],
2588            }])
2589        );
2590    }
2591
2592    // ===== File Descriptor Redirection Tests =====
2593
2594    #[test]
2595    fn test_parse_fd_input_redirection() {
2596        let tokens = vec![
2597            Token::Word("command".to_string()),
2598            Token::RedirectFdIn(3, "input.txt".to_string()),
2599        ];
2600        let result = parse(tokens).unwrap();
2601        assert_eq!(
2602            result,
2603            Ast::Pipeline(vec![ShellCommand {
2604                args: vec!["command".to_string()],
2605                redirections: vec![Redirection::FdInput(3, "input.txt".to_string())],
2606                compound: None,
2607            }])
2608        );
2609    }
2610
2611    #[test]
2612    fn test_parse_fd_output_redirection() {
2613        let tokens = vec![
2614            Token::Word("command".to_string()),
2615            Token::RedirectFdOut(2, "errors.log".to_string()),
2616        ];
2617        let result = parse(tokens).unwrap();
2618        assert_eq!(
2619            result,
2620            Ast::Pipeline(vec![ShellCommand {
2621                args: vec!["command".to_string()],
2622                compound: None,
2623                redirections: vec![Redirection::FdOutput(2, "errors.log".to_string())],
2624            }])
2625        );
2626    }
2627
2628    #[test]
2629    fn test_parse_fd_append_redirection() {
2630        let tokens = vec![
2631            Token::Word("command".to_string()),
2632            Token::RedirectFdAppend(2, "errors.log".to_string()),
2633        ];
2634        let result = parse(tokens).unwrap();
2635        assert_eq!(
2636            result,
2637            Ast::Pipeline(vec![ShellCommand {
2638                args: vec!["command".to_string()],
2639                compound: None,
2640                redirections: vec![Redirection::FdAppend(2, "errors.log".to_string())],
2641            }])
2642        );
2643    }
2644
2645    #[test]
2646    fn test_parse_fd_duplicate() {
2647        let tokens = vec![
2648            Token::Word("command".to_string()),
2649            Token::RedirectFdDup(2, 1),
2650        ];
2651        let result = parse(tokens).unwrap();
2652        assert_eq!(
2653            result,
2654            Ast::Pipeline(vec![ShellCommand {
2655                args: vec!["command".to_string()],
2656                compound: None,
2657                redirections: vec![Redirection::FdDuplicate(2, 1)],
2658            }])
2659        );
2660    }
2661
2662    #[test]
2663    fn test_parse_fd_close() {
2664        let tokens = vec![
2665            Token::Word("command".to_string()),
2666            Token::RedirectFdClose(2),
2667        ];
2668        let result = parse(tokens).unwrap();
2669        assert_eq!(
2670            result,
2671            Ast::Pipeline(vec![ShellCommand {
2672                args: vec!["command".to_string()],
2673                compound: None,
2674                redirections: vec![Redirection::FdClose(2)],
2675            }])
2676        );
2677    }
2678
2679    #[test]
2680    fn test_parse_fd_input_output() {
2681        let tokens = vec![
2682            Token::Word("command".to_string()),
2683            Token::RedirectFdInOut(3, "file.txt".to_string()),
2684        ];
2685        let result = parse(tokens).unwrap();
2686        assert_eq!(
2687            result,
2688            Ast::Pipeline(vec![ShellCommand {
2689                args: vec!["command".to_string()],
2690                compound: None,
2691                redirections: vec![Redirection::FdInputOutput(3, "file.txt".to_string())],
2692            }])
2693        );
2694    }
2695
2696    #[test]
2697    fn test_parse_multiple_fd_redirections() {
2698        let tokens = vec![
2699            Token::Word("command".to_string()),
2700            Token::RedirectFdOut(2, "err.log".to_string()),
2701            Token::RedirectFdIn(3, "input.txt".to_string()),
2702            Token::RedirectFdAppend(4, "append.log".to_string()),
2703        ];
2704        let result = parse(tokens).unwrap();
2705        assert_eq!(
2706            result,
2707            Ast::Pipeline(vec![ShellCommand {
2708                args: vec!["command".to_string()],
2709                compound: None,
2710                redirections: vec![
2711                    Redirection::FdOutput(2, "err.log".to_string()),
2712                    Redirection::FdInput(3, "input.txt".to_string()),
2713                    Redirection::FdAppend(4, "append.log".to_string()),
2714                ],
2715            }])
2716        );
2717    }
2718
2719    #[test]
2720    fn test_parse_fd_swap_pattern() {
2721        let tokens = vec![
2722            Token::Word("command".to_string()),
2723            Token::RedirectFdDup(3, 1),
2724            Token::RedirectFdDup(1, 2),
2725            Token::RedirectFdDup(2, 3),
2726            Token::RedirectFdClose(3),
2727        ];
2728        let result = parse(tokens).unwrap();
2729        assert_eq!(
2730            result,
2731            Ast::Pipeline(vec![ShellCommand {
2732                args: vec!["command".to_string()],
2733                redirections: vec![
2734                    Redirection::FdDuplicate(3, 1),
2735                    Redirection::FdDuplicate(1, 2),
2736                    Redirection::FdDuplicate(2, 3),
2737                    Redirection::FdClose(3),
2738                ],
2739                compound: None,
2740            }])
2741        );
2742    }
2743
2744    #[test]
2745    fn test_parse_mixed_basic_and_fd_redirections() {
2746        let tokens = vec![
2747            Token::Word("command".to_string()),
2748            Token::RedirOut,
2749            Token::Word("output.txt".to_string()),
2750            Token::RedirectFdDup(2, 1),
2751        ];
2752        let result = parse(tokens).unwrap();
2753        assert_eq!(
2754            result,
2755            Ast::Pipeline(vec![ShellCommand {
2756                args: vec!["command".to_string()],
2757                redirections: vec![
2758                    Redirection::Output("output.txt".to_string()),
2759                    Redirection::FdDuplicate(2, 1),
2760                ],
2761                compound: None,
2762            }])
2763        );
2764    }
2765
2766    #[test]
2767    fn test_parse_fd_redirection_ordering() {
2768        // Test that redirections are preserved in left-to-right order
2769        let tokens = vec![
2770            Token::Word("command".to_string()),
2771            Token::RedirectFdOut(2, "first.log".to_string()),
2772            Token::RedirOut,
2773            Token::Word("second.txt".to_string()),
2774            Token::RedirectFdDup(2, 1),
2775        ];
2776        let result = parse(tokens).unwrap();
2777        assert_eq!(
2778            result,
2779            Ast::Pipeline(vec![ShellCommand {
2780                args: vec!["command".to_string()],
2781                redirections: vec![
2782                    Redirection::FdOutput(2, "first.log".to_string()),
2783                    Redirection::Output("second.txt".to_string()),
2784                    Redirection::FdDuplicate(2, 1),
2785                ],
2786                compound: None,
2787            }])
2788        );
2789    }
2790
2791    #[test]
2792    fn test_parse_fd_redirection_with_pipe() {
2793        let tokens = vec![
2794            Token::Word("command".to_string()),
2795            Token::RedirectFdDup(2, 1),
2796            Token::Pipe,
2797            Token::Word("grep".to_string()),
2798            Token::Word("error".to_string()),
2799        ];
2800        let result = parse(tokens).unwrap();
2801        assert_eq!(
2802            result,
2803            Ast::Pipeline(vec![
2804                ShellCommand {
2805                    args: vec!["command".to_string()],
2806                    redirections: vec![Redirection::FdDuplicate(2, 1)],
2807                    compound: None,
2808                },
2809                ShellCommand {
2810                    args: vec!["grep".to_string(), "error".to_string()],
2811                    compound: None,
2812                    redirections: Vec::new(),
2813                }
2814            ])
2815        );
2816    }
2817
2818    #[test]
2819    fn test_parse_all_fd_numbers() {
2820        // Test fd 0
2821        let tokens = vec![
2822            Token::Word("cmd".to_string()),
2823            Token::RedirectFdIn(0, "file".to_string()),
2824        ];
2825        let result = parse(tokens).unwrap();
2826        if let Ast::Pipeline(cmds) = result {
2827            assert_eq!(
2828                cmds[0].redirections[0],
2829                Redirection::FdInput(0, "file".to_string())
2830            );
2831        } else {
2832            panic!("Expected Pipeline");
2833        }
2834
2835        // Test fd 9
2836        let tokens = vec![
2837            Token::Word("cmd".to_string()),
2838            Token::RedirectFdOut(9, "file".to_string()),
2839        ];
2840        let result = parse(tokens).unwrap();
2841        if let Ast::Pipeline(cmds) = result {
2842            assert_eq!(
2843                cmds[0].redirections[0],
2844                Redirection::FdOutput(9, "file".to_string())
2845            );
2846        } else {
2847            panic!("Expected Pipeline");
2848        }
2849    }
2850}