rush_sh/
parser.rs

1use super::lexer::Token;
2
3#[derive(Debug, Clone, PartialEq, Eq)]
4pub enum Ast {
5    Pipeline(Vec<ShellCommand>),
6    Sequence(Vec<Ast>),
7    Assignment {
8        var: String,
9        value: String,
10    },
11    LocalAssignment {
12        var: String,
13        value: String,
14    },
15    If {
16        branches: Vec<(Box<Ast>, Box<Ast>)>, // (condition, then_branch)
17        else_branch: Option<Box<Ast>>,
18    },
19    Case {
20        word: String,
21        cases: Vec<(Vec<String>, Ast)>,
22        default: Option<Box<Ast>>,
23    },
24    For {
25        variable: String,
26        items: Vec<String>,
27        body: Box<Ast>,
28    },
29    While {
30        condition: Box<Ast>,
31        body: Box<Ast>,
32    },
33    FunctionDefinition {
34        name: String,
35        body: Box<Ast>,
36    },
37    FunctionCall {
38        name: String,
39        args: Vec<String>,
40    },
41    Return {
42        value: Option<String>,
43    },
44    And {
45        left: Box<Ast>,
46        right: Box<Ast>,
47    },
48    Or {
49        left: Box<Ast>,
50        right: Box<Ast>,
51    },
52}
53
54#[derive(Debug, Clone, PartialEq, Eq, Default)]
55pub struct ShellCommand {
56    pub args: Vec<String>,
57    pub input: Option<String>,
58    pub output: Option<String>,
59    pub append: Option<String>,
60}
61
62/// Helper function to validate if a string is a valid variable name.
63/// Returns true if the name starts with a letter or underscore.
64fn is_valid_variable_name(name: &str) -> bool {
65    if let Some(first_char) = name.chars().next() {
66        first_char.is_alphabetic() || first_char == '_'
67    } else {
68        false
69    }
70}
71
72/// Helper function to create an empty body AST (a no-op that returns success).
73/// Used for empty then/else branches, empty loop bodies, and empty function bodies.
74fn create_empty_body_ast() -> Ast {
75    Ast::Pipeline(vec![ShellCommand {
76        args: vec!["true".to_string()],
77        input: None,
78        output: None,
79        append: None,
80    }])
81}
82
83/// Helper function to skip consecutive newline tokens.
84/// Updates the index to point to the first non-newline token.
85fn skip_newlines(tokens: &[Token], i: &mut usize) {
86    while *i < tokens.len() && tokens[*i] == Token::Newline {
87        *i += 1;
88    }
89}
90
91/// Helper function to skip to the matching 'fi' token for an 'if' statement.
92/// Handles nested if statements correctly.
93fn skip_to_matching_fi(tokens: &[Token], i: &mut usize) {
94    let mut if_depth = 1;
95    *i += 1; // Move past the 'if' token
96    while *i < tokens.len() && if_depth > 0 {
97        match tokens[*i] {
98            Token::If => if_depth += 1,
99            Token::Fi => if_depth -= 1,
100            _ => {}
101        }
102        *i += 1;
103    }
104}
105
106/// Helper function to skip to the matching 'done' token for a 'for' or 'while' loop.
107/// Handles nested loops correctly.
108fn skip_to_matching_done(tokens: &[Token], i: &mut usize) {
109    let mut loop_depth = 1;
110    *i += 1; // Move past the 'for' or 'while' token
111    while *i < tokens.len() && loop_depth > 0 {
112        match tokens[*i] {
113            Token::For | Token::While => loop_depth += 1,
114            Token::Done => loop_depth -= 1,
115            _ => {}
116        }
117        *i += 1;
118    }
119}
120
121/// Helper function to skip to the matching 'esac' token for a 'case' statement.
122fn skip_to_matching_esac(tokens: &[Token], i: &mut usize) {
123    *i += 1; // Move past the 'case' token
124    while *i < tokens.len() {
125        if tokens[*i] == Token::Esac {
126            *i += 1;
127            break;
128        }
129        *i += 1;
130    }
131}
132
133pub fn parse(tokens: Vec<Token>) -> Result<Ast, String> {
134    // First, try to detect and parse function definitions that span multiple lines
135    if tokens.len() >= 4
136        && let (Token::Word(_), Token::LeftParen, Token::RightParen, Token::LeftBrace) =
137            (&tokens[0], &tokens[1], &tokens[2], &tokens[3])
138    {
139        // Look for the matching RightBrace
140        // Start from the opening brace (token 3) and find its match
141        let mut brace_depth = 1; // We've already seen the opening brace at position 3
142        let mut function_end = tokens.len();
143        let mut j = 4; // Start after the opening brace
144
145        while j < tokens.len() {
146            match &tokens[j] {
147                Token::LeftBrace => {
148                    brace_depth += 1;
149                    j += 1;
150                }
151                Token::RightBrace => {
152                    brace_depth -= 1;
153                    if brace_depth == 0 {
154                        function_end = j + 1; // Include the closing brace
155                        break;
156                    }
157                    j += 1;
158                }
159                Token::If => {
160                    // Skip to matching fi to avoid confusion
161                    let mut if_depth = 1;
162                    j += 1;
163                    while j < tokens.len() && if_depth > 0 {
164                        match tokens[j] {
165                            Token::If => if_depth += 1,
166                            Token::Fi => if_depth -= 1,
167                            _ => {}
168                        }
169                        j += 1;
170                    }
171                }
172                Token::For | Token::While => {
173                    // Skip to matching done
174                    let mut for_depth = 1;
175                    j += 1;
176                    while j < tokens.len() && for_depth > 0 {
177                        match tokens[j] {
178                            Token::For | Token::While => for_depth += 1,
179                            Token::Done => for_depth -= 1,
180                            _ => {}
181                        }
182                        j += 1;
183                    }
184                }
185                Token::Case => {
186                    // Skip to matching esac
187                    j += 1;
188                    while j < tokens.len() {
189                        if tokens[j] == Token::Esac {
190                            j += 1;
191                            break;
192                        }
193                        j += 1;
194                    }
195                }
196                _ => {
197                    j += 1;
198                }
199            }
200        }
201
202        if brace_depth == 0 && function_end <= tokens.len() {
203            // We found the complete function definition
204            let function_tokens = &tokens[0..function_end];
205            let remaining_tokens = &tokens[function_end..];
206
207            let function_ast = parse_function_definition(function_tokens)?;
208
209            return if remaining_tokens.is_empty() {
210                Ok(function_ast)
211            } else {
212                // There are more commands after the function
213                let remaining_ast = parse_commands_sequentially(remaining_tokens)?;
214                Ok(Ast::Sequence(vec![function_ast, remaining_ast]))
215            };
216        }
217    }
218
219    // Also check for legacy function definition format (word with parentheses followed by brace)
220    if tokens.len() >= 2
221        && let Token::Word(ref word) = tokens[0]
222        && let Some(paren_pos) = word.find('(')
223        && word.ends_with(')')
224        && paren_pos > 0
225        && tokens[1] == Token::LeftBrace
226    {
227        return parse_function_definition(&tokens);
228    }
229
230    // Fall back to normal parsing
231    parse_commands_sequentially(&tokens)
232}
233
234fn parse_slice(tokens: &[Token]) -> Result<Ast, String> {
235    if tokens.is_empty() {
236        return Err("No commands found".to_string());
237    }
238
239    // Check if it's an assignment
240    if tokens.len() == 2 {
241        // Check for pattern: VAR= VALUE
242        if let (Token::Word(var_eq), Token::Word(value)) = (&tokens[0], &tokens[1])
243            && let Some(eq_pos) = var_eq.find('=')
244            && eq_pos > 0
245            && eq_pos < var_eq.len()
246        {
247            let var = var_eq[..eq_pos].to_string();
248            let full_value = format!("{}{}", &var_eq[eq_pos + 1..], value);
249            // Basic validation: variable name should start with letter or underscore
250            if is_valid_variable_name(&var) {
251                return Ok(Ast::Assignment {
252                    var,
253                    value: full_value,
254                });
255            }
256        }
257    }
258
259    // Check if it's an assignment (VAR= VALUE)
260    if tokens.len() == 2
261        && let (Token::Word(var_eq), Token::Word(value)) = (&tokens[0], &tokens[1])
262        && let Some(eq_pos) = var_eq.find('=')
263        && eq_pos > 0
264        && eq_pos == var_eq.len() - 1
265    {
266        let var = var_eq[..eq_pos].to_string();
267        // Basic validation: variable name should start with letter or underscore
268        if is_valid_variable_name(&var) {
269            return Ok(Ast::Assignment {
270                var,
271                value: value.clone(),
272            });
273        }
274    }
275
276    // Check if it's a local assignment (local VAR VALUE or local VAR= VALUE)
277    if tokens.len() == 3
278        && let (Token::Local, Token::Word(var), Token::Word(value)) =
279            (&tokens[0], &tokens[1], &tokens[2])
280    {
281        // Strip trailing = if present (handles "local var= value" format)
282        let clean_var = if var.ends_with('=') {
283            &var[..var.len() - 1]
284        } else {
285            var
286        };
287        // Basic validation: variable name should start with letter or underscore
288        if is_valid_variable_name(clean_var) {
289            return Ok(Ast::LocalAssignment {
290                var: clean_var.to_string(),
291                value: value.clone(),
292            });
293        }
294    }
295
296    // Check if it's a return statement
297    if !tokens.is_empty()
298        && tokens.len() <= 2
299        && let Token::Return = &tokens[0]
300    {
301        if tokens.len() == 1 {
302            // return (with no value, defaults to 0)
303            return Ok(Ast::Return { value: None });
304        } else if let Token::Word(word) = &tokens[1] {
305            // return value
306            return Ok(Ast::Return {
307                value: Some(word.clone()),
308            });
309        }
310    }
311
312    // Check if it's a local assignment (local VAR=VALUE)
313    if tokens.len() == 2
314        && let (Token::Local, Token::Word(var_eq)) = (&tokens[0], &tokens[1])
315        && let Some(eq_pos) = var_eq.find('=')
316        && eq_pos > 0
317        && eq_pos < var_eq.len()
318    {
319        let var = var_eq[..eq_pos].to_string();
320        let value = var_eq[eq_pos + 1..].to_string();
321        // Basic validation: variable name should start with letter or underscore
322        if is_valid_variable_name(&var) {
323            return Ok(Ast::LocalAssignment { var, value });
324        }
325    }
326
327    // Check if it's an assignment (single token with =)
328    if tokens.len() == 1
329        && let Token::Word(ref word) = tokens[0]
330        && let Some(eq_pos) = word.find('=')
331        && eq_pos > 0
332        && eq_pos < word.len()
333    {
334        let var = word[..eq_pos].to_string();
335        let value = word[eq_pos + 1..].to_string();
336        // Basic validation: variable name should start with letter or underscore
337        if is_valid_variable_name(&var) {
338            return Ok(Ast::Assignment { var, value });
339        }
340    }
341
342    // Check if it's an if statement
343    if let Token::If = tokens[0] {
344        return parse_if(tokens);
345    }
346
347    // Check if it's a case statement
348    if let Token::Case = tokens[0] {
349        return parse_case(tokens);
350    }
351
352    // Check if it's a for loop
353    if let Token::For = tokens[0] {
354        return parse_for(tokens);
355    }
356
357    // Check if it's a while loop
358    if let Token::While = tokens[0] {
359        return parse_while(tokens);
360    }
361
362    // Check if it's a function definition
363    // Pattern: Word LeftParen RightParen LeftBrace
364    if tokens.len() >= 4
365        && let (Token::Word(word), Token::LeftParen, Token::RightParen, Token::LeftBrace) =
366            (&tokens[0], &tokens[1], &tokens[2], &tokens[3])
367        && is_valid_variable_name(word)
368    {
369        return parse_function_definition(tokens);
370    }
371
372    // Also check for function definition with parentheses in the word (legacy support)
373    if tokens.len() >= 2
374        && let Token::Word(ref word) = tokens[0]
375        && let Some(paren_pos) = word.find('(')
376        && word.ends_with(')')
377        && paren_pos > 0
378    {
379        let func_name = &word[..paren_pos];
380        if is_valid_variable_name(func_name) && tokens[1] == Token::LeftBrace {
381            return parse_function_definition(tokens);
382        }
383    }
384
385    // Check if it's a function call (word followed by arguments)
386    // For Phase 1, we'll parse as regular pipeline and handle function calls in executor
387
388    // Otherwise, parse as pipeline
389    parse_pipeline(tokens)
390}
391
392fn parse_commands_sequentially(tokens: &[Token]) -> Result<Ast, String> {
393    let mut i = 0;
394    let mut commands = Vec::new();
395
396    while i < tokens.len() {
397        // Skip whitespace and comments
398        while i < tokens.len() {
399            match &tokens[i] {
400                Token::Newline => {
401                    i += 1;
402                }
403                Token::Word(word) if word.starts_with('#') => {
404                    // Skip comment line
405                    while i < tokens.len() && tokens[i] != Token::Newline {
406                        i += 1;
407                    }
408                    if i < tokens.len() {
409                        i += 1; // Skip the newline
410                    }
411                }
412                _ => break,
413            }
414        }
415
416        if i >= tokens.len() {
417            break;
418        }
419
420        // Find the end of this command
421        let start = i;
422
423        // Special handling for compound commands
424        if tokens[i] == Token::If {
425            // For if statements, find the matching fi
426            let mut depth = 0;
427            while i < tokens.len() {
428                match tokens[i] {
429                    Token::If => depth += 1,
430                    Token::Fi => {
431                        depth -= 1;
432                        if depth == 0 {
433                            i += 1; // Include the fi
434                            break;
435                        }
436                    }
437                    _ => {}
438                }
439                i += 1;
440            }
441
442            // If we didn't find a matching fi, include all remaining tokens
443            // This handles the case where the if statement is incomplete
444        } else if tokens[i] == Token::For {
445            // For for loops, find the matching done
446            let mut depth = 1; // Start at 1 because we're already inside the for
447            i += 1; // Move past the 'for' token
448            while i < tokens.len() {
449                match tokens[i] {
450                    Token::For | Token::While => depth += 1,
451                    Token::Done => {
452                        depth -= 1;
453                        if depth == 0 {
454                            i += 1; // Include the done
455                            break;
456                        }
457                    }
458                    _ => {}
459                }
460                i += 1;
461            }
462        } else if tokens[i] == Token::While {
463            // For while loops, find the matching done
464            let mut depth = 1; // Start at 1 because we're already inside the while
465            i += 1; // Move past the 'while' token
466            while i < tokens.len() {
467                match tokens[i] {
468                    Token::While | Token::For => depth += 1,
469                    Token::Done => {
470                        depth -= 1;
471                        if depth == 0 {
472                            i += 1; // Include the done
473                            break;
474                        }
475                    }
476                    _ => {}
477                }
478                i += 1;
479            }
480        } else if tokens[i] == Token::Case {
481            // For case statements, find the matching esac
482            while i < tokens.len() {
483                if tokens[i] == Token::Esac {
484                    i += 1; // Include the esac
485                    break;
486                }
487                i += 1;
488            }
489        } else if i + 3 < tokens.len()
490            && matches!(tokens[i], Token::Word(_))
491            && tokens[i + 1] == Token::LeftParen
492            && tokens[i + 2] == Token::RightParen
493            && tokens[i + 3] == Token::LeftBrace
494        {
495            // This is a function definition - find the matching closing brace
496            let mut brace_depth = 1;
497            i += 4; // Skip to after opening brace
498            while i < tokens.len() && brace_depth > 0 {
499                match tokens[i] {
500                    Token::LeftBrace => brace_depth += 1,
501                    Token::RightBrace => brace_depth -= 1,
502                    _ => {}
503                }
504                i += 1;
505            }
506        } else {
507            // For simple commands, stop at newline, semicolon, &&, or ||
508            // But check if the next token after newline is a control flow keyword
509            while i < tokens.len() {
510                if tokens[i] == Token::Newline
511                    || tokens[i] == Token::Semicolon
512                    || tokens[i] == Token::And
513                    || tokens[i] == Token::Or
514                {
515                    // Look ahead to see if the next non-newline token is else/elif/fi
516                    let mut j = i + 1;
517                    while j < tokens.len() && tokens[j] == Token::Newline {
518                        j += 1;
519                    }
520                    // If we find else/elif/fi, this is likely part of an if statement that wasn't properly detected
521                    if j < tokens.len()
522                        && (tokens[j] == Token::Else
523                            || tokens[j] == Token::Elif
524                            || tokens[j] == Token::Fi)
525                    {
526                        // Skip this token and continue - it will be handled as a parse error
527                        i = j + 1;
528                        continue;
529                    }
530                    break;
531                }
532                i += 1;
533            }
534        }
535
536        let command_tokens = &tokens[start..i];
537        if !command_tokens.is_empty() {
538            // Don't try to parse orphaned else/elif/fi tokens
539            if command_tokens.len() == 1 {
540                match command_tokens[0] {
541                    Token::Else | Token::Elif | Token::Fi => {
542                        // Skip orphaned control flow tokens
543                        if i < tokens.len()
544                            && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon)
545                        {
546                            i += 1;
547                        }
548                        continue;
549                    }
550                    _ => {}
551                }
552            }
553
554            let ast = parse_slice(command_tokens)?;
555
556            // Check if the next token is && or ||
557            if i < tokens.len() && (tokens[i] == Token::And || tokens[i] == Token::Or) {
558                let operator = tokens[i].clone();
559                i += 1; // Skip the operator
560
561                // Skip any newlines after the operator
562                while i < tokens.len() && tokens[i] == Token::Newline {
563                    i += 1;
564                }
565
566                // Parse the right side recursively
567                let remaining_tokens = &tokens[i..];
568                let right_ast = parse_commands_sequentially(remaining_tokens)?;
569
570                // Create And or Or node
571                let combined_ast = match operator {
572                    Token::And => Ast::And {
573                        left: Box::new(ast),
574                        right: Box::new(right_ast),
575                    },
576                    Token::Or => Ast::Or {
577                        left: Box::new(ast),
578                        right: Box::new(right_ast),
579                    },
580                    _ => unreachable!(),
581                };
582
583                commands.push(combined_ast);
584                break; // We've consumed the rest of the tokens
585            } else {
586                commands.push(ast);
587            }
588        }
589
590        if i < tokens.len() && (tokens[i] == Token::Newline || tokens[i] == Token::Semicolon) {
591            i += 1;
592        }
593    }
594
595    if commands.is_empty() {
596        return Err("No commands found".to_string());
597    }
598
599    if commands.len() == 1 {
600        Ok(commands.into_iter().next().unwrap())
601    } else {
602        Ok(Ast::Sequence(commands))
603    }
604}
605
606fn parse_pipeline(tokens: &[Token]) -> Result<Ast, String> {
607    let mut commands = Vec::new();
608    let mut current_cmd = ShellCommand::default();
609
610    let mut i = 0;
611    while i < tokens.len() {
612        let token = &tokens[i];
613        match token {
614            Token::Word(word) => {
615                current_cmd.args.push(word.clone());
616            }
617            Token::Pipe => {
618                if !current_cmd.args.is_empty() {
619                    commands.push(current_cmd.clone());
620                    current_cmd = ShellCommand::default();
621                }
622            }
623            Token::RedirIn => {
624                i += 1;
625                if i < tokens.len()
626                    && let Token::Word(ref file) = tokens[i]
627                {
628                    current_cmd.input = Some(file.clone());
629                }
630            }
631            Token::RedirOut => {
632                i += 1;
633                if i < tokens.len()
634                    && let Token::Word(ref file) = tokens[i]
635                {
636                    current_cmd.output = Some(file.clone());
637                }
638            }
639            Token::RedirAppend => {
640                i += 1;
641                if i < tokens.len()
642                    && let Token::Word(ref file) = tokens[i]
643                {
644                    current_cmd.append = Some(file.clone());
645                }
646            }
647            Token::RightParen => {
648                // Check if this looks like a function call pattern: Word LeftParen ... RightParen
649                // If so, treat it as a function call even if the function doesn't exist
650                if !current_cmd.args.is_empty()
651                    && i > 0
652                    && let Token::LeftParen = tokens[i - 1]
653                {
654                    // This looks like a function call pattern, treat as function call
655                    // For now, we'll handle this in the executor by checking if it's a function
656                    // If not a function, the executor will handle the error gracefully
657                    break;
658                }
659                return Err("Unexpected ) in pipeline".to_string());
660            }
661            Token::Newline => {
662                // Newlines are handled at the sequence level, skip them in pipelines
663                i += 1;
664                continue;
665            }
666            Token::Do
667            | Token::Done
668            | Token::Then
669            | Token::Else
670            | Token::Elif
671            | Token::Fi
672            | Token::Esac => {
673                // These are control flow keywords that should be handled at a higher level
674                // If we encounter them here, it means we've reached the end of the current command
675                break;
676            }
677            _ => {
678                return Err(format!("Unexpected token in pipeline: {:?}", token));
679            }
680        }
681        i += 1;
682    }
683
684    if !current_cmd.args.is_empty() {
685        commands.push(current_cmd);
686    }
687
688    if commands.is_empty() {
689        return Err("No commands found".to_string());
690    }
691
692    Ok(Ast::Pipeline(commands))
693}
694
695fn parse_if(tokens: &[Token]) -> Result<Ast, String> {
696    let mut i = 1; // Skip 'if'
697    let mut branches = Vec::new();
698
699    loop {
700        // Parse condition until ; or newline or then
701        let mut cond_tokens = Vec::new();
702        while i < tokens.len()
703            && tokens[i] != Token::Semicolon
704            && tokens[i] != Token::Newline
705            && tokens[i] != Token::Then
706        {
707            cond_tokens.push(tokens[i].clone());
708            i += 1;
709        }
710
711        // Skip ; or newline if present
712        if i < tokens.len() && (tokens[i] == Token::Semicolon || tokens[i] == Token::Newline) {
713            i += 1;
714        }
715
716        // Skip any additional newlines
717        skip_newlines(tokens, &mut i);
718
719        if i >= tokens.len() || tokens[i] != Token::Then {
720            return Err("Expected then after if/elif condition".to_string());
721        }
722        i += 1; // Skip then
723
724        // Skip any newlines after then
725        while i < tokens.len() && tokens[i] == Token::Newline {
726            i += 1;
727        }
728
729        // Parse then branch - collect all tokens until we hit else/elif/fi
730        // We need to handle nested structures properly
731        let mut then_tokens = Vec::new();
732        let mut depth = 0;
733        while i < tokens.len() {
734            match &tokens[i] {
735                Token::If => {
736                    depth += 1;
737                    then_tokens.push(tokens[i].clone());
738                }
739                Token::Fi => {
740                    if depth > 0 {
741                        depth -= 1;
742                        then_tokens.push(tokens[i].clone());
743                    } else {
744                        break; // This fi closes our if
745                    }
746                }
747                Token::Else | Token::Elif if depth == 0 => {
748                    break; // These belong to our if, not nested ones
749                }
750                Token::Newline => {
751                    // Skip newlines but check what comes after
752                    let mut j = i + 1;
753                    while j < tokens.len() && tokens[j] == Token::Newline {
754                        j += 1;
755                    }
756                    if j < tokens.len()
757                        && depth == 0
758                        && (tokens[j] == Token::Else
759                            || tokens[j] == Token::Elif
760                            || tokens[j] == Token::Fi)
761                    {
762                        i = j; // Skip to the keyword
763                        break;
764                    }
765                    // Otherwise it's just a newline in the middle of commands
766                    then_tokens.push(tokens[i].clone());
767                }
768                _ => {
769                    then_tokens.push(tokens[i].clone());
770                }
771            }
772            i += 1;
773        }
774
775        // Skip any trailing newlines
776        skip_newlines(tokens, &mut i);
777
778        let then_ast = if then_tokens.is_empty() {
779            // Empty then branch - create a no-op
780            create_empty_body_ast()
781        } else {
782            parse_commands_sequentially(&then_tokens)?
783        };
784
785        let condition = parse_slice(&cond_tokens)?;
786        branches.push((Box::new(condition), Box::new(then_ast)));
787
788        // Check next
789        if i < tokens.len() && tokens[i] == Token::Elif {
790            i += 1; // Skip elif, continue loop
791        } else {
792            break;
793        }
794    }
795
796    let else_ast = if i < tokens.len() && tokens[i] == Token::Else {
797        i += 1; // Skip else
798
799        // Skip any newlines after else
800        while i < tokens.len() && tokens[i] == Token::Newline {
801            i += 1;
802        }
803
804        let mut else_tokens = Vec::new();
805        let mut depth = 0;
806        while i < tokens.len() {
807            match &tokens[i] {
808                Token::If => {
809                    depth += 1;
810                    else_tokens.push(tokens[i].clone());
811                }
812                Token::Fi => {
813                    if depth > 0 {
814                        depth -= 1;
815                        else_tokens.push(tokens[i].clone());
816                    } else {
817                        break; // This fi closes our if
818                    }
819                }
820                Token::Newline => {
821                    // Skip newlines but check what comes after
822                    let mut j = i + 1;
823                    while j < tokens.len() && tokens[j] == Token::Newline {
824                        j += 1;
825                    }
826                    if j < tokens.len() && depth == 0 && tokens[j] == Token::Fi {
827                        i = j; // Skip to fi
828                        break;
829                    }
830                    // Otherwise it's just a newline in the middle of commands
831                    else_tokens.push(tokens[i].clone());
832                }
833                _ => {
834                    else_tokens.push(tokens[i].clone());
835                }
836            }
837            i += 1;
838        }
839
840        let else_ast = if else_tokens.is_empty() {
841            // Empty else branch - create a no-op
842            create_empty_body_ast()
843        } else {
844            parse_commands_sequentially(&else_tokens)?
845        };
846
847        Some(Box::new(else_ast))
848    } else {
849        None
850    };
851
852    if i >= tokens.len() || tokens[i] != Token::Fi {
853        return Err("Expected fi".to_string());
854    }
855
856    Ok(Ast::If {
857        branches,
858        else_branch: else_ast,
859    })
860}
861
862fn parse_case(tokens: &[Token]) -> Result<Ast, String> {
863    let mut i = 1; // Skip 'case'
864
865    // Parse word
866    if i >= tokens.len() || !matches!(tokens[i], Token::Word(_)) {
867        return Err("Expected word after case".to_string());
868    }
869    let word = if let Token::Word(ref w) = tokens[i] {
870        w.clone()
871    } else {
872        unreachable!()
873    };
874    i += 1;
875
876    if i >= tokens.len() || tokens[i] != Token::In {
877        return Err("Expected in after case word".to_string());
878    }
879    i += 1;
880
881    let mut cases = Vec::new();
882    let mut default = None;
883
884    loop {
885        // Skip newlines
886        while i < tokens.len() && tokens[i] == Token::Newline {
887            i += 1;
888        }
889
890        if i >= tokens.len() {
891            return Err("Unexpected end in case statement".to_string());
892        }
893
894        if tokens[i] == Token::Esac {
895            break;
896        }
897
898        // Parse patterns
899        let mut patterns = Vec::new();
900        while i < tokens.len() && tokens[i] != Token::RightParen {
901            if let Token::Word(ref p) = tokens[i] {
902                // Split pattern on |
903                for pat in p.split('|') {
904                    patterns.push(pat.to_string());
905                }
906            } else if tokens[i] == Token::Pipe {
907                // Skip | separator
908            } else if tokens[i] == Token::Newline {
909                // Skip newlines in patterns
910            } else {
911                return Err(format!("Expected pattern, found {:?}", tokens[i]));
912            }
913            i += 1;
914        }
915
916        if i >= tokens.len() || tokens[i] != Token::RightParen {
917            return Err("Expected ) after patterns".to_string());
918        }
919        i += 1;
920
921        // Parse commands
922        let mut commands_tokens = Vec::new();
923        while i < tokens.len() && tokens[i] != Token::DoubleSemicolon && tokens[i] != Token::Esac {
924            commands_tokens.push(tokens[i].clone());
925            i += 1;
926        }
927
928        let commands_ast = parse_slice(&commands_tokens)?;
929
930        if i >= tokens.len() {
931            return Err("Unexpected end in case statement".to_string());
932        }
933
934        if tokens[i] == Token::DoubleSemicolon {
935            i += 1;
936            // Check if this is the default case (*)
937            if patterns.len() == 1 && patterns[0] == "*" {
938                default = Some(Box::new(commands_ast));
939            } else {
940                cases.push((patterns, commands_ast));
941            }
942        } else if tokens[i] == Token::Esac {
943            // Last case without ;;
944            if patterns.len() == 1 && patterns[0] == "*" {
945                default = Some(Box::new(commands_ast));
946            } else {
947                cases.push((patterns, commands_ast));
948            }
949            break;
950        } else {
951            return Err("Expected ;; or esac after commands".to_string());
952        }
953    }
954
955    Ok(Ast::Case {
956        word,
957        cases,
958        default,
959    })
960}
961
962fn parse_for(tokens: &[Token]) -> Result<Ast, String> {
963    let mut i = 1; // Skip 'for'
964
965    // Parse variable name
966    if i >= tokens.len() || !matches!(tokens[i], Token::Word(_)) {
967        return Err("Expected variable name after for".to_string());
968    }
969    let variable = if let Token::Word(ref v) = tokens[i] {
970        v.clone()
971    } else {
972        unreachable!()
973    };
974    i += 1;
975
976    // Expect 'in'
977    if i >= tokens.len() || tokens[i] != Token::In {
978        return Err("Expected 'in' after for variable".to_string());
979    }
980    i += 1;
981
982    // Parse items until we hit 'do' or semicolon/newline
983    let mut items = Vec::new();
984    while i < tokens.len() {
985        match &tokens[i] {
986            Token::Do => break,
987            Token::Semicolon | Token::Newline => {
988                i += 1;
989                // Check if next token is 'do'
990                if i < tokens.len() && tokens[i] == Token::Do {
991                    break;
992                }
993            }
994            Token::Word(word) => {
995                items.push(word.clone());
996                i += 1;
997            }
998            _ => {
999                return Err(format!("Unexpected token in for items: {:?}", tokens[i]));
1000            }
1001        }
1002    }
1003
1004    // Skip any newlines before 'do'
1005    while i < tokens.len() && tokens[i] == Token::Newline {
1006        i += 1;
1007    }
1008
1009    // Expect 'do'
1010    if i >= tokens.len() || tokens[i] != Token::Do {
1011        return Err("Expected 'do' in for loop".to_string());
1012    }
1013    i += 1;
1014
1015    // Skip any newlines after 'do'
1016    while i < tokens.len() && tokens[i] == Token::Newline {
1017        i += 1;
1018    }
1019
1020    // Parse body until 'done'
1021    let mut body_tokens = Vec::new();
1022    let mut depth = 0;
1023    while i < tokens.len() {
1024        match &tokens[i] {
1025            Token::For => {
1026                depth += 1;
1027                body_tokens.push(tokens[i].clone());
1028            }
1029            Token::Done => {
1030                if depth > 0 {
1031                    depth -= 1;
1032                    body_tokens.push(tokens[i].clone());
1033                } else {
1034                    break; // This done closes our for loop
1035                }
1036            }
1037            Token::Newline => {
1038                // Skip newlines but check what comes after
1039                let mut j = i + 1;
1040                while j < tokens.len() && tokens[j] == Token::Newline {
1041                    j += 1;
1042                }
1043                if j < tokens.len() && depth == 0 && tokens[j] == Token::Done {
1044                    i = j; // Skip to done
1045                    break;
1046                }
1047                // Otherwise it's just a newline in the middle of commands
1048                body_tokens.push(tokens[i].clone());
1049            }
1050            _ => {
1051                body_tokens.push(tokens[i].clone());
1052            }
1053        }
1054        i += 1;
1055    }
1056
1057    if i >= tokens.len() || tokens[i] != Token::Done {
1058        return Err("Expected 'done' to close for loop".to_string());
1059    }
1060
1061    // Parse the body
1062    let body_ast = if body_tokens.is_empty() {
1063        // Empty body - create a no-op
1064        create_empty_body_ast()
1065    } else {
1066        parse_commands_sequentially(&body_tokens)?
1067    };
1068
1069    Ok(Ast::For {
1070        variable,
1071        items,
1072        body: Box::new(body_ast),
1073    })
1074}
1075
1076fn parse_while(tokens: &[Token]) -> Result<Ast, String> {
1077    let mut i = 1; // Skip 'while'
1078
1079    // Parse condition until we hit 'do' or semicolon/newline
1080    let mut cond_tokens = Vec::new();
1081    while i < tokens.len() {
1082        match &tokens[i] {
1083            Token::Do => break,
1084            Token::Semicolon | Token::Newline => {
1085                i += 1;
1086                // Check if next token is 'do'
1087                if i < tokens.len() && tokens[i] == Token::Do {
1088                    break;
1089                }
1090            }
1091            _ => {
1092                cond_tokens.push(tokens[i].clone());
1093                i += 1;
1094            }
1095        }
1096    }
1097
1098    if cond_tokens.is_empty() {
1099        return Err("Expected condition after while".to_string());
1100    }
1101
1102    // Skip any newlines before 'do'
1103    while i < tokens.len() && tokens[i] == Token::Newline {
1104        i += 1;
1105    }
1106
1107    // Expect 'do'
1108    if i >= tokens.len() || tokens[i] != Token::Do {
1109        return Err("Expected 'do' in while loop".to_string());
1110    }
1111    i += 1;
1112
1113    // Skip any newlines after 'do'
1114    while i < tokens.len() && tokens[i] == Token::Newline {
1115        i += 1;
1116    }
1117
1118    // Parse body until 'done'
1119    let mut body_tokens = Vec::new();
1120    let mut depth = 0;
1121    while i < tokens.len() {
1122        match &tokens[i] {
1123            Token::While | Token::For => {
1124                depth += 1;
1125                body_tokens.push(tokens[i].clone());
1126            }
1127            Token::Done => {
1128                if depth > 0 {
1129                    depth -= 1;
1130                    body_tokens.push(tokens[i].clone());
1131                } else {
1132                    break; // This done closes our while loop
1133                }
1134            }
1135            Token::Newline => {
1136                // Skip newlines but check what comes after
1137                let mut j = i + 1;
1138                while j < tokens.len() && tokens[j] == Token::Newline {
1139                    j += 1;
1140                }
1141                if j < tokens.len() && depth == 0 && tokens[j] == Token::Done {
1142                    i = j; // Skip to done
1143                    break;
1144                }
1145                // Otherwise it's just a newline in the middle of commands
1146                body_tokens.push(tokens[i].clone());
1147            }
1148            _ => {
1149                body_tokens.push(tokens[i].clone());
1150            }
1151        }
1152        i += 1;
1153    }
1154
1155    if i >= tokens.len() || tokens[i] != Token::Done {
1156        return Err("Expected 'done' to close while loop".to_string());
1157    }
1158
1159    // Parse the condition
1160    let condition_ast = parse_slice(&cond_tokens)?;
1161
1162    // Parse the body
1163    let body_ast = if body_tokens.is_empty() {
1164        // Empty body - create a no-op
1165        create_empty_body_ast()
1166    } else {
1167        parse_commands_sequentially(&body_tokens)?
1168    };
1169
1170    Ok(Ast::While {
1171        condition: Box::new(condition_ast),
1172        body: Box::new(body_ast),
1173    })
1174}
1175
1176fn parse_function_definition(tokens: &[Token]) -> Result<Ast, String> {
1177    if tokens.len() < 2 {
1178        return Err("Function definition too short".to_string());
1179    }
1180
1181    // Extract function name from first token
1182    let func_name = if let Token::Word(word) = &tokens[0] {
1183        // Handle legacy format with parentheses in the word (e.g., "legacyfunc()")
1184        if let Some(paren_pos) = word.find('(') {
1185            if word.ends_with(')') && paren_pos > 0 {
1186                word[..paren_pos].to_string()
1187            } else {
1188                word.clone()
1189            }
1190        } else {
1191            word.clone()
1192        }
1193    } else {
1194        return Err("Function name must be a word".to_string());
1195    };
1196
1197    // Find the opening brace and body
1198    let brace_pos =
1199        if tokens.len() >= 4 && tokens[1] == Token::LeftParen && tokens[2] == Token::RightParen {
1200            // Standard format: name() {
1201            if tokens[3] != Token::LeftBrace {
1202                return Err("Expected { after function name".to_string());
1203            }
1204            3
1205        } else if tokens.len() >= 2 && tokens[1] == Token::LeftBrace {
1206            // Legacy format: name() {
1207            1
1208        } else {
1209            return Err("Expected ( after function name or { for legacy format".to_string());
1210        };
1211
1212    // Find the matching closing brace, accounting for nested function definitions and control structures
1213    let mut brace_depth = 0;
1214    let mut body_end = 0;
1215    let mut found_closing = false;
1216    let mut i = brace_pos + 1;
1217
1218    while i < tokens.len() {
1219        // Check if this is the start of a nested function definition
1220        // Pattern: Word LeftParen RightParen LeftBrace
1221        if i + 3 < tokens.len()
1222            && matches!(&tokens[i], Token::Word(_))
1223            && tokens[i + 1] == Token::LeftParen
1224            && tokens[i + 2] == Token::RightParen
1225            && tokens[i + 3] == Token::LeftBrace
1226        {
1227            // This is a nested function - skip over it entirely
1228            // Skip to after the opening brace of nested function
1229            i += 4;
1230            let mut nested_depth = 1;
1231            while i < tokens.len() && nested_depth > 0 {
1232                match tokens[i] {
1233                    Token::LeftBrace => nested_depth += 1,
1234                    Token::RightBrace => nested_depth -= 1,
1235                    _ => {}
1236                }
1237                i += 1;
1238            }
1239            // Don't increment i again - continue from current position
1240            continue;
1241        }
1242
1243        match &tokens[i] {
1244            Token::LeftBrace => {
1245                brace_depth += 1;
1246                i += 1;
1247            }
1248            Token::RightBrace => {
1249                if brace_depth == 0 {
1250                    // This is our matching closing brace
1251                    body_end = i;
1252                    found_closing = true;
1253                    break;
1254                } else {
1255                    brace_depth -= 1;
1256                    i += 1;
1257                }
1258            }
1259            Token::If => {
1260                // Skip to matching fi
1261                skip_to_matching_fi(tokens, &mut i);
1262            }
1263            Token::For | Token::While => {
1264                // Skip to matching done
1265                skip_to_matching_done(tokens, &mut i);
1266            }
1267            Token::Case => {
1268                // Skip to matching esac
1269                skip_to_matching_esac(tokens, &mut i);
1270            }
1271            _ => {
1272                i += 1;
1273            }
1274        }
1275    }
1276
1277    if !found_closing {
1278        return Err("Missing closing } for function definition".to_string());
1279    }
1280
1281    // Extract body tokens (everything between { and })
1282    let body_tokens = &tokens[brace_pos + 1..body_end];
1283
1284    // Parse the function body using the existing parser
1285    let body_ast = if body_tokens.is_empty() {
1286        // Empty function body
1287        create_empty_body_ast()
1288    } else {
1289        parse_commands_sequentially(body_tokens)?
1290    };
1291
1292    Ok(Ast::FunctionDefinition {
1293        name: func_name,
1294        body: Box::new(body_ast),
1295    })
1296}
1297
1298#[cfg(test)]
1299mod tests {
1300    use super::super::lexer::Token;
1301    use super::*;
1302
1303    #[test]
1304    fn test_single_command() {
1305        let tokens = vec![Token::Word("ls".to_string())];
1306        let result = parse(tokens).unwrap();
1307        assert_eq!(
1308            result,
1309            Ast::Pipeline(vec![ShellCommand {
1310                args: vec!["ls".to_string()],
1311                input: None,
1312                output: None,
1313                append: None,
1314            }])
1315        );
1316    }
1317
1318    #[test]
1319    fn test_command_with_args() {
1320        let tokens = vec![
1321            Token::Word("ls".to_string()),
1322            Token::Word("-la".to_string()),
1323        ];
1324        let result = parse(tokens).unwrap();
1325        assert_eq!(
1326            result,
1327            Ast::Pipeline(vec![ShellCommand {
1328                args: vec!["ls".to_string(), "-la".to_string()],
1329                input: None,
1330                output: None,
1331                append: None,
1332            }])
1333        );
1334    }
1335
1336    #[test]
1337    fn test_pipeline() {
1338        let tokens = vec![
1339            Token::Word("ls".to_string()),
1340            Token::Pipe,
1341            Token::Word("grep".to_string()),
1342            Token::Word("txt".to_string()),
1343        ];
1344        let result = parse(tokens).unwrap();
1345        assert_eq!(
1346            result,
1347            Ast::Pipeline(vec![
1348                ShellCommand {
1349                    args: vec!["ls".to_string()],
1350                    input: None,
1351                    output: None,
1352                    append: None,
1353                },
1354                ShellCommand {
1355                    args: vec!["grep".to_string(), "txt".to_string()],
1356                    input: None,
1357                    output: None,
1358                    append: None,
1359                }
1360            ])
1361        );
1362    }
1363
1364    #[test]
1365    fn test_input_redirection() {
1366        let tokens = vec![
1367            Token::Word("cat".to_string()),
1368            Token::RedirIn,
1369            Token::Word("input.txt".to_string()),
1370        ];
1371        let result = parse(tokens).unwrap();
1372        assert_eq!(
1373            result,
1374            Ast::Pipeline(vec![ShellCommand {
1375                args: vec!["cat".to_string()],
1376                input: Some("input.txt".to_string()),
1377                output: None,
1378                append: None,
1379            }])
1380        );
1381    }
1382
1383    #[test]
1384    fn test_output_redirection() {
1385        let tokens = vec![
1386            Token::Word("printf".to_string()),
1387            Token::Word("hello".to_string()),
1388            Token::RedirOut,
1389            Token::Word("output.txt".to_string()),
1390        ];
1391        let result = parse(tokens).unwrap();
1392        assert_eq!(
1393            result,
1394            Ast::Pipeline(vec![ShellCommand {
1395                args: vec!["printf".to_string(), "hello".to_string()],
1396                input: None,
1397                output: Some("output.txt".to_string()),
1398                append: None,
1399            }])
1400        );
1401    }
1402
1403    #[test]
1404    fn test_append_redirection() {
1405        let tokens = vec![
1406            Token::Word("printf".to_string()),
1407            Token::Word("hello".to_string()),
1408            Token::RedirAppend,
1409            Token::Word("output.txt".to_string()),
1410        ];
1411        let result = parse(tokens).unwrap();
1412        assert_eq!(
1413            result,
1414            Ast::Pipeline(vec![ShellCommand {
1415                args: vec!["printf".to_string(), "hello".to_string()],
1416                input: None,
1417                output: None,
1418                append: Some("output.txt".to_string()),
1419            }])
1420        );
1421    }
1422
1423    #[test]
1424    fn test_complex_pipeline_with_redirections() {
1425        let tokens = vec![
1426            Token::Word("cat".to_string()),
1427            Token::RedirIn,
1428            Token::Word("input.txt".to_string()),
1429            Token::Pipe,
1430            Token::Word("grep".to_string()),
1431            Token::Word("pattern".to_string()),
1432            Token::Pipe,
1433            Token::Word("sort".to_string()),
1434            Token::RedirOut,
1435            Token::Word("output.txt".to_string()),
1436        ];
1437        let result = parse(tokens).unwrap();
1438        assert_eq!(
1439            result,
1440            Ast::Pipeline(vec![
1441                ShellCommand {
1442                    args: vec!["cat".to_string()],
1443                    input: Some("input.txt".to_string()),
1444                    output: None,
1445                    append: None,
1446                },
1447                ShellCommand {
1448                    args: vec!["grep".to_string(), "pattern".to_string()],
1449                    input: None,
1450                    output: None,
1451                    append: None,
1452                },
1453                ShellCommand {
1454                    args: vec!["sort".to_string()],
1455                    input: None,
1456                    output: Some("output.txt".to_string()),
1457                    append: None,
1458                }
1459            ])
1460        );
1461    }
1462
1463    #[test]
1464    fn test_empty_tokens() {
1465        let tokens = vec![];
1466        let result = parse(tokens);
1467        assert!(result.is_err());
1468        assert_eq!(result.unwrap_err(), "No commands found");
1469    }
1470
1471    #[test]
1472    fn test_only_pipe() {
1473        let tokens = vec![Token::Pipe];
1474        let result = parse(tokens);
1475        assert!(result.is_err());
1476        assert_eq!(result.unwrap_err(), "No commands found");
1477    }
1478
1479    #[test]
1480    fn test_redirection_without_file() {
1481        // Parser doesn't check for missing file, just skips if no token after
1482        let tokens = vec![Token::Word("cat".to_string()), Token::RedirIn];
1483        let result = parse(tokens).unwrap();
1484        assert_eq!(
1485            result,
1486            Ast::Pipeline(vec![ShellCommand {
1487                args: vec!["cat".to_string()],
1488                input: None,
1489                output: None,
1490                append: None,
1491            }])
1492        );
1493    }
1494
1495    #[test]
1496    fn test_multiple_redirections() {
1497        let tokens = vec![
1498            Token::Word("cat".to_string()),
1499            Token::RedirIn,
1500            Token::Word("file1.txt".to_string()),
1501            Token::RedirOut,
1502            Token::Word("file2.txt".to_string()),
1503        ];
1504        let result = parse(tokens).unwrap();
1505        assert_eq!(
1506            result,
1507            Ast::Pipeline(vec![ShellCommand {
1508                args: vec!["cat".to_string()],
1509                input: Some("file1.txt".to_string()),
1510                output: Some("file2.txt".to_string()),
1511                append: None,
1512            }])
1513        );
1514    }
1515
1516    #[test]
1517    fn test_parse_if() {
1518        let tokens = vec![
1519            Token::If,
1520            Token::Word("true".to_string()),
1521            Token::Semicolon,
1522            Token::Then,
1523            Token::Word("printf".to_string()),
1524            Token::Word("yes".to_string()),
1525            Token::Semicolon,
1526            Token::Fi,
1527        ];
1528        let result = parse(tokens).unwrap();
1529        if let Ast::If {
1530            branches,
1531            else_branch,
1532        } = result
1533        {
1534            assert_eq!(branches.len(), 1);
1535            let (condition, then_branch) = &branches[0];
1536            if let Ast::Pipeline(cmds) = &**condition {
1537                assert_eq!(cmds[0].args, vec!["true"]);
1538            } else {
1539                panic!("condition not pipeline");
1540            }
1541            if let Ast::Pipeline(cmds) = &**then_branch {
1542                assert_eq!(cmds[0].args, vec!["printf", "yes"]);
1543            } else {
1544                panic!("then_branch not pipeline");
1545            }
1546            assert!(else_branch.is_none());
1547        } else {
1548            panic!("not if");
1549        }
1550    }
1551
1552    #[test]
1553    fn test_parse_if_elif() {
1554        let tokens = vec![
1555            Token::If,
1556            Token::Word("false".to_string()),
1557            Token::Semicolon,
1558            Token::Then,
1559            Token::Word("printf".to_string()),
1560            Token::Word("no".to_string()),
1561            Token::Semicolon,
1562            Token::Elif,
1563            Token::Word("true".to_string()),
1564            Token::Semicolon,
1565            Token::Then,
1566            Token::Word("printf".to_string()),
1567            Token::Word("yes".to_string()),
1568            Token::Semicolon,
1569            Token::Fi,
1570        ];
1571        let result = parse(tokens).unwrap();
1572        if let Ast::If {
1573            branches,
1574            else_branch,
1575        } = result
1576        {
1577            assert_eq!(branches.len(), 2);
1578            // First branch: false -> printf no
1579            let (condition1, then1) = &branches[0];
1580            if let Ast::Pipeline(cmds) = &**condition1 {
1581                assert_eq!(cmds[0].args, vec!["false"]);
1582            }
1583            if let Ast::Pipeline(cmds) = &**then1 {
1584                assert_eq!(cmds[0].args, vec!["printf", "no"]);
1585            }
1586            // Second branch: true -> printf yes
1587            let (condition2, then2) = &branches[1];
1588            if let Ast::Pipeline(cmds) = &**condition2 {
1589                assert_eq!(cmds[0].args, vec!["true"]);
1590            }
1591            if let Ast::Pipeline(cmds) = &**then2 {
1592                assert_eq!(cmds[0].args, vec!["printf", "yes"]);
1593            }
1594            assert!(else_branch.is_none());
1595        } else {
1596            panic!("not if");
1597        }
1598    }
1599
1600    #[test]
1601    fn test_parse_assignment() {
1602        let tokens = vec![Token::Word("MY_VAR=test_value".to_string())];
1603        let result = parse(tokens).unwrap();
1604        if let Ast::Assignment { var, value } = result {
1605            assert_eq!(var, "MY_VAR");
1606            assert_eq!(value, "test_value");
1607        } else {
1608            panic!("not assignment");
1609        }
1610    }
1611
1612    #[test]
1613    fn test_parse_assignment_quoted() {
1614        let tokens = vec![Token::Word("MY_VAR=hello world".to_string())];
1615        let result = parse(tokens).unwrap();
1616        if let Ast::Assignment { var, value } = result {
1617            assert_eq!(var, "MY_VAR");
1618            assert_eq!(value, "hello world");
1619        } else {
1620            panic!("not assignment");
1621        }
1622    }
1623
1624    #[test]
1625    fn test_parse_assignment_invalid() {
1626        // Variable name starting with number should not be parsed as assignment
1627        let tokens = vec![Token::Word("123VAR=value".to_string())];
1628        let result = parse(tokens).unwrap();
1629        if let Ast::Pipeline(cmds) = result {
1630            assert_eq!(cmds[0].args, vec!["123VAR=value"]);
1631        } else {
1632            panic!("should be parsed as pipeline");
1633        }
1634    }
1635
1636    #[test]
1637    fn test_parse_function_definition() {
1638        let tokens = vec![
1639            Token::Word("myfunc".to_string()),
1640            Token::LeftParen,
1641            Token::RightParen,
1642            Token::LeftBrace,
1643            Token::Word("echo".to_string()),
1644            Token::Word("hello".to_string()),
1645            Token::RightBrace,
1646        ];
1647        let result = parse(tokens).unwrap();
1648        if let Ast::FunctionDefinition { name, body } = result {
1649            assert_eq!(name, "myfunc");
1650            // Body should be a pipeline with echo hello
1651            if let Ast::Pipeline(cmds) = *body {
1652                assert_eq!(cmds[0].args, vec!["echo", "hello"]);
1653            } else {
1654                panic!("function body should be a pipeline");
1655            }
1656        } else {
1657            panic!("should be parsed as function definition");
1658        }
1659    }
1660
1661    #[test]
1662    fn test_parse_function_definition_empty() {
1663        let tokens = vec![
1664            Token::Word("emptyfunc".to_string()),
1665            Token::LeftParen,
1666            Token::RightParen,
1667            Token::LeftBrace,
1668            Token::RightBrace,
1669        ];
1670        let result = parse(tokens).unwrap();
1671        if let Ast::FunctionDefinition { name, body } = result {
1672            assert_eq!(name, "emptyfunc");
1673            // Empty body should default to true command
1674            if let Ast::Pipeline(cmds) = *body {
1675                assert_eq!(cmds[0].args, vec!["true"]);
1676            } else {
1677                panic!("function body should be a pipeline");
1678            }
1679        } else {
1680            panic!("should be parsed as function definition");
1681        }
1682    }
1683
1684    #[test]
1685    fn test_parse_function_definition_legacy_format() {
1686        // Test backward compatibility with parentheses in the function name
1687        let tokens = vec![
1688            Token::Word("legacyfunc()".to_string()),
1689            Token::LeftBrace,
1690            Token::Word("echo".to_string()),
1691            Token::Word("hello".to_string()),
1692            Token::RightBrace,
1693        ];
1694        let result = parse(tokens).unwrap();
1695        if let Ast::FunctionDefinition { name, body } = result {
1696            assert_eq!(name, "legacyfunc");
1697            // Body should be a pipeline with echo hello
1698            if let Ast::Pipeline(cmds) = *body {
1699                assert_eq!(cmds[0].args, vec!["echo", "hello"]);
1700            } else {
1701                panic!("function body should be a pipeline");
1702            }
1703        } else {
1704            panic!("should be parsed as function definition");
1705        }
1706    }
1707
1708    #[test]
1709    fn test_parse_local_assignment() {
1710        let tokens = vec![Token::Local, Token::Word("MY_VAR=test_value".to_string())];
1711        let result = parse(tokens).unwrap();
1712        if let Ast::LocalAssignment { var, value } = result {
1713            assert_eq!(var, "MY_VAR");
1714            assert_eq!(value, "test_value");
1715        } else {
1716            panic!("should be parsed as local assignment");
1717        }
1718    }
1719
1720    #[test]
1721    fn test_parse_local_assignment_separate_tokens() {
1722        let tokens = vec![
1723            Token::Local,
1724            Token::Word("MY_VAR".to_string()),
1725            Token::Word("test_value".to_string()),
1726        ];
1727        let result = parse(tokens).unwrap();
1728        if let Ast::LocalAssignment { var, value } = result {
1729            assert_eq!(var, "MY_VAR");
1730            assert_eq!(value, "test_value");
1731        } else {
1732            panic!("should be parsed as local assignment");
1733        }
1734    }
1735
1736    #[test]
1737    fn test_parse_local_assignment_invalid_var_name() {
1738        // Variable name starting with number should not be parsed as local assignment
1739        let tokens = vec![Token::Local, Token::Word("123VAR=value".to_string())];
1740        let result = parse(tokens);
1741        // Should return an error since 123VAR is not a valid variable name
1742        assert!(result.is_err());
1743    }
1744}