espy_ears/
lib.rs

1#![doc = include_str!("../README.md")]
2
3use dst_factory::make_dst_factory;
4use espy_eyes::{Lexer, Lexigram, Token};
5use std::iter::Peekable;
6
7#[cfg(test)]
8mod tests;
9
10#[derive(Debug, Eq, PartialEq)]
11pub enum Error<'source> {
12    /// An invalid token was encountered.
13    ///
14    /// The ast interprets an erroneous token as `None`,
15    /// which may lead to further error diagnostics.
16    ///
17    /// See: [`espy_eyes::Error`]
18    Lexer(espy_eyes::Error<'source>),
19    MissingToken {
20        /// Must contain at least one element.
21        expected: &'static [Lexigram],
22        /// A `None` token may have been caused by a Lexer error.
23        actual: Option<Token<'source>>,
24    },
25    /// Occurs when an expression is required,
26    /// but a token that ends expression context was immediately encountered.
27    ExpectedExpression(Option<Token<'source>>),
28    /// Occurs when a "root" [`Block`] encounters something which is not a statement or expression.
29    ExpectedStatementOrExpression(Token<'source>),
30    /// Occurs when parenthesis in an expression are unbalanced.
31    ///
32    /// [`Error::IncompleteExpression`] serves as the opening-parenthesis equivalent.
33    UnexpectedCloseParen(Token<'source>),
34    /// This error should only ever occur on [`Expression`]s,
35    /// so positioning can be derived from surrounding context.
36    IncompleteExpression,
37}
38
39/// Contains a list of the errors encountered by an ast node.
40#[derive(Debug, Default, Eq, PartialEq)]
41pub struct Diagnostics<'source> {
42    pub errors: Vec<Error<'source>>,
43}
44
45impl<'source> Diagnostics<'source> {
46    fn expect(
47        &mut self,
48        t: Option<espy_eyes::Result<'source>>,
49        expected: &'static [Lexigram],
50    ) -> Option<Token<'source>> {
51        let actual = self.wrap(t);
52        if actual.is_some_and(|actual| expected.contains(&actual.lexigram)) {
53            actual
54        } else {
55            self.errors.push(Error::MissingToken { expected, actual });
56            None
57        }
58    }
59
60    fn expect_expression(
61        &mut self,
62        lexer: &mut Peekable<Lexer<'source>>,
63    ) -> Option<Box<Expression<'source>>> {
64        let expression = Expression::new(lexer);
65        if expression.is_none() {
66            self.errors.push(Error::ExpectedExpression(
67                lexer.peek().copied().transpose().ok().flatten(),
68            ));
69        }
70        expression
71    }
72
73    fn next_if(
74        &mut self,
75        lexer: &mut Peekable<Lexer<'source>>,
76        expected: &'static [Lexigram],
77    ) -> Option<Token<'source>> {
78        self.expect(lexer.peek().copied(), expected).inspect(|_| {
79            lexer.next();
80        })
81    }
82
83    fn wrap(&mut self, t: Option<espy_eyes::Result<'source>>) -> Option<Token<'source>> {
84        match t? {
85            Ok(t) => Some(t),
86            Err(e) => {
87                let t = if let espy_eyes::Error {
88                    origin,
89                    kind: espy_eyes::ErrorKind::ReservedSymbol,
90                } = e
91                {
92                    Some(Token {
93                        origin,
94                        lexigram: Lexigram::Ident,
95                    })
96                } else {
97                    None
98                };
99                self.errors.push(Error::Lexer(e));
100                t
101            }
102        }
103    }
104}
105
106/// Components of an expression.
107///
108/// Expression evalutation is stack-based rather than using a syntax tree.
109#[derive(Debug, Eq, PartialEq)]
110pub enum Node<'source> {
111    Unit(Token<'source>, Token<'source>),
112    Bool(bool, Token<'source>),
113    Number(Token<'source>),
114    String(Token<'source>),
115    Variable(Token<'source>),
116    Block(Box<Block<'source>>),
117    If(Box<If<'source>>),
118    Match(Box<Match<'source>>),
119    Enum(Box<Enum<'source>>),
120
121    Pipe(Token<'source>),
122    Call(Token<'source>),
123    Bind(Token<'source>),
124    Positive(Token<'source>),
125    Negative(Token<'source>),
126    Annotation(Box<Annotation<'source>>),
127    Deref(Token<'source>),
128    Mul(Token<'source>),
129    Div(Token<'source>),
130    Add(Token<'source>),
131    Sub(Token<'source>),
132    BitwiseAnd(Token<'source>),
133    BitwiseOr(Token<'source>),
134    BitwiseXor(Token<'source>),
135    EqualTo(Token<'source>),
136    NotEqualTo(Token<'source>),
137    Greater(Token<'source>),
138    GreaterEqual(Token<'source>),
139    Lesser(Token<'source>),
140    LesserEqual(Token<'source>),
141    LogicalAnd(Token<'source>),
142    LogicalOr(Token<'source>),
143    Name {
144        name: Token<'source>,
145        colon_token: Token<'source>,
146    },
147    Field {
148        dot_token: Token<'source>,
149        index: Token<'source>,
150    },
151    Tuple(Token<'source>),
152}
153
154#[derive(Debug, Eq, PartialEq)]
155enum Operation<'source> {
156    Call(Token<'source>),
157    Pipe(Token<'source>),
158    Bind(Token<'source>),
159    Positive(Token<'source>),
160    Negative(Token<'source>),
161    Annotation(Box<Annotation<'source>>),
162    Deref(Token<'source>),
163    Mul(Token<'source>),
164    Div(Token<'source>),
165    Add(Token<'source>),
166    Sub(Token<'source>),
167    BitwiseAnd(Token<'source>),
168    BitwiseXor(Token<'source>),
169    BitwiseOr(Token<'source>),
170    EqualTo(Token<'source>),
171    NotEqualTo(Token<'source>),
172    Greater(Token<'source>),
173    GreaterEqual(Token<'source>),
174    Lesser(Token<'source>),
175    LesserEqual(Token<'source>),
176    LogicalAnd(Token<'source>),
177    LogicalOr(Token<'source>),
178    Name {
179        name: Token<'source>,
180        colon_token: Token<'source>,
181    },
182    Field {
183        dot_token: Token<'source>,
184        index: Token<'source>,
185    },
186    Tuple(Token<'source>),
187    SubExpression(Token<'source>),
188}
189
190impl Operation<'_> {
191    // NOTE: please keep espybook/precedence.md up to date with this!
192    fn precedence(&self) -> usize {
193        match self {
194            Operation::Field { .. } | Operation::Deref(_) => 13,
195            Operation::Positive(_) | Operation::Negative(_) | Operation::Annotation(_) => 12,
196            Operation::Mul(_) | Operation::Div(_) => 11,
197            Operation::Add(_) | Operation::Sub(_) => 10,
198            Operation::BitwiseAnd(_) => 9,
199            Operation::BitwiseXor(_) => 8,
200            Operation::BitwiseOr(_) => 7,
201            Operation::EqualTo(_)
202            | Operation::NotEqualTo(_)
203            | Operation::Greater(_)
204            | Operation::GreaterEqual(_)
205            | Operation::Lesser(_)
206            | Operation::LesserEqual(_) => 6,
207            Operation::LogicalAnd(_) => 5,
208            Operation::LogicalOr(_) => 4,
209            Operation::Name { .. } => 3,
210            Operation::Tuple(_) => 2,
211            Operation::Pipe(_) | Operation::Call(_) | Operation::Bind(_) => 1,
212            Operation::SubExpression(_) => 0,
213        }
214    }
215
216    // NOTE: please keep espybook/precedence.md up to date with this!
217    fn left_associative(&self) -> bool {
218        match self {
219            Operation::Field { .. }
220            | Operation::Positive(_)
221            | Operation::Negative(_)
222            | Operation::Annotation(_)
223            | Operation::Deref(_)
224            | Operation::Mul(_)
225            | Operation::Div(_)
226            | Operation::Add(_)
227            | Operation::Sub(_)
228            | Operation::BitwiseAnd(_)
229            | Operation::BitwiseXor(_)
230            | Operation::BitwiseOr(_)
231            | Operation::EqualTo(_)
232            | Operation::NotEqualTo(_)
233            | Operation::Greater(_)
234            | Operation::GreaterEqual(_)
235            | Operation::Lesser(_)
236            | Operation::LesserEqual(_)
237            | Operation::LogicalAnd(_)
238            | Operation::LogicalOr(_)
239            | Operation::Tuple(_)
240            | Operation::SubExpression(_)
241            | Operation::Call(_)
242            | Operation::Bind(_)
243            | Operation::Pipe(_) => true,
244            Operation::Name { .. } => false,
245        }
246    }
247}
248
249impl<'source> From<Operation<'source>> for Node<'source> {
250    fn from(op: Operation<'source>) -> Self {
251        match op {
252            Operation::Field { dot_token, index } => Node::Field { dot_token, index },
253            Operation::Pipe(t) => Node::Pipe(t),
254            Operation::Bind(t) => Node::Bind(t),
255            Operation::Call(t) => Node::Call(t),
256            Operation::Positive(t) => Node::Positive(t),
257            Operation::Negative(t) => Node::Negative(t),
258            Operation::Annotation(t) => Node::Annotation(t),
259            Operation::Deref(t) => Node::Deref(t),
260            Operation::Mul(t) => Node::Mul(t),
261            Operation::Div(t) => Node::Div(t),
262            Operation::Add(t) => Node::Add(t),
263            Operation::Sub(t) => Node::Sub(t),
264            Operation::BitwiseAnd(t) => Node::BitwiseAnd(t),
265            Operation::BitwiseXor(t) => Node::BitwiseXor(t),
266            Operation::BitwiseOr(t) => Node::BitwiseOr(t),
267            Operation::EqualTo(t) => Node::EqualTo(t),
268            Operation::NotEqualTo(t) => Node::NotEqualTo(t),
269            Operation::Greater(t) => Node::Greater(t),
270            Operation::GreaterEqual(t) => Node::GreaterEqual(t),
271            Operation::Lesser(t) => Node::Lesser(t),
272            Operation::LesserEqual(t) => Node::LesserEqual(t),
273            Operation::LogicalAnd(t) => Node::LogicalAnd(t),
274            Operation::LogicalOr(t) => Node::LogicalOr(t),
275            Operation::Name { name, colon_token } => Node::Name { name, colon_token },
276            Operation::Tuple(t) => Node::Tuple(t),
277            Operation::SubExpression(_) => {
278                panic!("sub expressions may not enter the output stack")
279            }
280        }
281    }
282}
283
284/// This type should not contain an incomplete expression so long as there are no error diagnostics.
285#[derive(Debug, Eq, PartialEq)]
286#[make_dst_factory(pub)]
287pub struct Expression<'source> {
288    pub first_token: Option<Token<'source>>,
289    pub last_token: Option<Token<'source>>,
290    pub diagnostics: Diagnostics<'source>,
291    pub contents: [Node<'source>],
292}
293
294impl<'source> Expression<'source> {
295    /// Parse an expression until an unexpected token is upcoming (via peek).
296    pub fn new(lexer: &mut Peekable<Lexer<'source>>) -> Option<Box<Self>> {
297        let first_token = lexer.peek().copied().transpose().ok().flatten();
298        let mut last_token = None;
299        let mut diagnostics = Diagnostics::default();
300        let mut contents = Vec::new();
301        let mut stack = Vec::new();
302        // Check if the last token implies the unary position.
303        // This is probably not the best way to do this.
304        let unary_position = |last_token| {
305            matches!(
306                last_token,
307                None | Some(Token {
308                    lexigram: Lexigram::Plus
309                        | Lexigram::Minus
310                        | Lexigram::Star
311                        | Lexigram::Slash
312                        | Lexigram::Ampersand
313                        | Lexigram::Caret
314                        | Lexigram::Pipe
315                        | Lexigram::DoubleEqual
316                        | Lexigram::BangEqual
317                        | Lexigram::Greater
318                        | Lexigram::GreaterEqual
319                        | Lexigram::Lesser
320                        | Lexigram::LesserEqual
321                        | Lexigram::And
322                        | Lexigram::Or
323                        | Lexigram::Comma
324                        | Lexigram::Colon
325                        | Lexigram::Triangle
326                        | Lexigram::OpenParen
327                        | Lexigram::CloseSquare,
328                    ..
329                })
330            )
331        };
332        let flush = |output: &mut Vec<Node<'source>>, stack: &mut Vec<Operation<'source>>| {
333            while let Some(op) = stack.pop_if(|x| !matches!(x, Operation::SubExpression(_))) {
334                output.push(op.into());
335            }
336        };
337        let push_with_precedence =
338            |output: &mut Vec<Node<'source>>,
339             stack: &mut Vec<Operation<'source>>,
340             operator: Operation<'source>| {
341                while let Some(op) = stack.pop_if(|x| {
342                    if operator.left_associative() {
343                        x.precedence() >= operator.precedence()
344                    } else {
345                        x.precedence() > operator.precedence()
346                    }
347                }) {
348                    // SubExpression has the lowest precedence, so this cannot panic.
349                    output.push(op.into());
350                }
351                stack.push(operator);
352            };
353        loop {
354            let unary_position = unary_position(last_token);
355            let t = diagnostics.wrap(lexer.peek().copied());
356            macro_rules! lexi {
357                ($($name:ident)? @ $lexi:ident) => {
358                    Some($($name @)? Token {
359                        lexigram: Lexigram::$lexi,
360                        ..
361                    })
362                };
363            }
364            macro_rules! op {
365                ($op:ident($inner:expr)) => {
366                    push_with_precedence(&mut contents, &mut stack, Operation::$op($inner))
367                };
368            }
369            match t {
370                // Terminals
371                //
372                // A terminal value outside of unary position implies a function call,
373                // so flush the operator stack in this case.
374                lexi!(number @ Number) => {
375                    if !unary_position {
376                        op!(Call(number));
377                    }
378                    contents.push(Node::Number(number));
379                }
380                lexi!(string @ String) => {
381                    if !unary_position {
382                        op!(Call(string));
383                    }
384                    contents.push(Node::String(string));
385                }
386                lexi!(discard @ Discard) => {
387                    last_token = lexer.next().transpose().ok().flatten();
388                    let colon_token = diagnostics.next_if(lexer, &[Lexigram::Colon]);
389                    if let Some(colon_token) = colon_token {
390                        last_token = Some(colon_token);
391                        push_with_precedence(
392                            &mut contents,
393                            &mut stack,
394                            Operation::Name {
395                                name: discard,
396                                colon_token,
397                            },
398                        );
399                    }
400                    continue;
401                }
402                lexi!(ident @ Ident) => {
403                    if !unary_position {
404                        op!(Call(ident));
405                    }
406                    last_token = lexer.next().transpose().ok().flatten();
407                    if let Some(Ok(
408                        colon_token @ Token {
409                            lexigram: Lexigram::Colon,
410                            ..
411                        },
412                    )) = lexer.peek().copied()
413                    {
414                        last_token = lexer.next().transpose().ok().flatten();
415                        push_with_precedence(
416                            &mut contents,
417                            &mut stack,
418                            Operation::Name {
419                                name: ident,
420                                colon_token,
421                            },
422                        );
423                    } else {
424                        contents.push(Node::Variable(ident));
425                    }
426                    continue;
427                }
428                lexi!(t @ True) => {
429                    if !unary_position {
430                        push_with_precedence(&mut contents, &mut stack, Operation::Call(t));
431                    }
432                    contents.push(Node::Bool(true, t));
433                }
434                lexi!(t @ False) => {
435                    if !unary_position {
436                        push_with_precedence(&mut contents, &mut stack, Operation::Call(t));
437                    }
438                    contents.push(Node::Bool(false, t));
439                }
440                lexi!(t @ OpenParen) => {
441                    if !unary_position {
442                        push_with_precedence(&mut contents, &mut stack, Operation::Call(t));
443                    }
444                    stack.push(Operation::SubExpression(t));
445                }
446                lexi!(t @ OpenBrace) => {
447                    if !unary_position {
448                        push_with_precedence(&mut contents, &mut stack, Operation::Call(t));
449                    }
450                    lexer.next();
451                    contents.push(Node::Block(Block::child(&mut *lexer)));
452                    diagnostics.expect(lexer.peek().copied(), &[Lexigram::CloseBrace]);
453                }
454
455                // prefix operators
456                lexi!(t @ Plus) if unary_position => op!(Positive(t)),
457                lexi!(t @ Minus) if unary_position => op!(Negative(t)),
458                lexi!(t @ At) if unary_position => {
459                    lexer.next();
460                    let annotation = Annotation::new(t, lexer);
461                    last_token = annotation.close_square;
462                    op!(Annotation(Box::new(annotation)));
463                    continue;
464                }
465                // postfix
466                lexi!(dot_token @ Dot) if !unary_position => {
467                    last_token = lexer.next().transpose().ok().flatten();
468                    if let Some(index) =
469                        diagnostics.next_if(lexer, &[Lexigram::Ident, Lexigram::Number])
470                    {
471                        last_token = Some(index);
472                        push_with_precedence(
473                            &mut contents,
474                            &mut stack,
475                            Operation::Field { dot_token, index },
476                        );
477                    }
478                    continue;
479                }
480                lexi!(t @ DotStar) if !unary_position => op!(Deref(t)),
481                lexi!(t @ Bang) if !unary_position => op!(Bind(t)),
482                // infix operators
483                lexi!(t @ Plus) if !unary_position => op!(Add(t)),
484                lexi!(t @ Minus) if !unary_position => op!(Sub(t)),
485                lexi!(t @ Star) if !unary_position => op!(Mul(t)),
486                lexi!(t @ Slash) if !unary_position => op!(Div(t)),
487                lexi!(t @ Ampersand) if !unary_position => op!(BitwiseAnd(t)),
488                lexi!(t @ Caret) if !unary_position => op!(BitwiseXor(t)),
489                lexi!(t @ Pipe) if !unary_position => op!(BitwiseOr(t)),
490                lexi!(t @ DoubleEqual) if !unary_position => op!(EqualTo(t)),
491                lexi!(t @ BangEqual) if !unary_position => op!(NotEqualTo(t)),
492                lexi!(t @ Greater) if !unary_position => op!(Greater(t)),
493                lexi!(t @ GreaterEqual) if !unary_position => op!(GreaterEqual(t)),
494                lexi!(t @ Lesser) if !unary_position => op!(Lesser(t)),
495                lexi!(t @ LesserEqual) if !unary_position => op!(LesserEqual(t)),
496                lexi!(t @ And) if !unary_position => op!(LogicalAnd(t)),
497                lexi!(t @ Or) if !unary_position => op!(LogicalOr(t)),
498                lexi!(t @ Triangle) if !unary_position => op!(Pipe(t)),
499                lexi!(t @ Comma) if !unary_position => op!(Tuple(t)),
500                lexi!(  @ If) => contents.push(If::from(&mut *lexer).into()),
501                lexi!(  @ Match) => contents.push(Match::new(&mut *lexer).into()),
502                lexi!(  @ Enum) => contents.push(Enum::from(&mut *lexer).into()),
503                lexi!(t @ CloseParen) if unary_position => {
504                    if let Some(
505                        last_token @ Token {
506                            lexigram: Lexigram::OpenParen,
507                            ..
508                        },
509                    ) = last_token
510                    {
511                        contents.push(Node::Unit(last_token, t));
512                    } else if let Some(Operation::Tuple(..)) = stack.last() {
513                        // Special case Tuple (comma) to be ignored when incomplete.
514                        stack.pop();
515                    } else {
516                        diagnostics.errors.push(Error::IncompleteExpression);
517                    }
518                    if !matches!(stack.pop(), Some(Operation::SubExpression(_))) {
519                        diagnostics.errors.push(Error::UnexpectedCloseParen(t));
520                    }
521                }
522                lexi!(t @ CloseParen) if !unary_position => {
523                    while let Some(op) = stack.pop_if(|x| !matches!(x, Operation::SubExpression(_)))
524                    {
525                        contents.push(op.into());
526                    }
527                    if !matches!(stack.pop(), Some(Operation::SubExpression(_))) {
528                        diagnostics.errors.push(Error::UnexpectedCloseParen(t));
529                    }
530                }
531                _ => {
532                    if unary_position {
533                        if let [Operation::Tuple(..)] = stack.as_slice() {
534                            // Special case Tuple (comma) to be ignored when incomplete.
535                            stack.pop();
536                        } else if !contents.is_empty() || !stack.is_empty() {
537                            diagnostics.errors.push(Error::IncompleteExpression);
538                        }
539                    } else {
540                        flush(&mut contents, &mut stack);
541                        if !stack.is_empty() {
542                            diagnostics.errors.push(Error::IncompleteExpression);
543                        }
544                    }
545                    if contents.is_empty() && diagnostics.errors.is_empty() {
546                        return None;
547                    }
548                    return Some(Expression::build(
549                        first_token,
550                        last_token,
551                        diagnostics,
552                        contents,
553                    ));
554                }
555            }
556            // This is sometimes skipped with a continue!
557            last_token = lexer.next().transpose().unwrap_or(None);
558        }
559    }
560}
561
562#[derive(Debug, Eq, PartialEq)]
563pub struct Annotation<'source> {
564    pub at_sign: Token<'source>,
565    pub name: Option<Token<'source>>,
566    pub open_square: Option<Token<'source>>,
567    pub tokens: Box<[Token<'source>]>,
568    pub close_square: Option<Token<'source>>,
569    pub diagnostics: Diagnostics<'source>,
570}
571
572impl<'source> Annotation<'source> {
573    fn new(at_sign: Token<'source>, lexer: &mut Peekable<Lexer<'source>>) -> Self {
574        let mut diagnostics = Diagnostics::default();
575        let name = diagnostics.next_if(lexer, &[Lexigram::Ident]);
576        let open_square = diagnostics.next_if(lexer, &[Lexigram::OpenSquare]);
577        let mut tokens = Vec::new();
578        let mut square_level = 0;
579        loop {
580            let token = match diagnostics.wrap(lexer.peek().copied()) {
581                Some(
582                    token @ Token {
583                        lexigram: Lexigram::OpenSquare,
584                        ..
585                    },
586                ) => {
587                    square_level += 1;
588                    token
589                }
590                Some(
591                    token @ Token {
592                        lexigram: Lexigram::CloseSquare,
593                        ..
594                    },
595                ) => {
596                    if square_level > 0 {
597                        square_level -= 1;
598                        token
599                    } else {
600                        break;
601                    }
602                }
603                Some(token) => token,
604                None => break,
605            };
606            lexer.next();
607            tokens.push(token);
608        }
609        let close_square = diagnostics.next_if(lexer, &[Lexigram::CloseSquare]);
610        Self {
611            at_sign,
612            name,
613            open_square,
614            tokens: tokens.into_boxed_slice(),
615            close_square,
616            diagnostics,
617        }
618    }
619}
620
621#[derive(Debug, Eq, PartialEq)]
622pub struct If<'source> {
623    pub if_token: Token<'source>,
624    pub condition: Option<Box<Expression<'source>>>,
625    pub then_token: Option<Token<'source>>,
626    pub first: Box<Block<'source>>,
627    pub else_token: Option<Token<'source>>,
628    pub else_kind: Option<Token<'source>>,
629    pub second: Box<Block<'source>>,
630    pub end_token: Option<Token<'source>>,
631    pub diagnostics: Diagnostics<'source>,
632}
633
634impl<'source> From<If<'source>> for Node<'source> {
635    fn from(if_block: If<'source>) -> Self {
636        Self::If(Box::new(if_block))
637    }
638}
639
640impl<'source> From<&mut Peekable<Lexer<'source>>> for If<'source> {
641    fn from(lexer: &mut Peekable<Lexer<'source>>) -> Self {
642        let if_token = lexer
643            .next()
644            .transpose()
645            .ok()
646            .flatten()
647            .expect("caller must have peeked a token");
648        let mut diagnostics = Diagnostics::default();
649        let condition = diagnostics.expect_expression(lexer);
650        let then_token = diagnostics.next_if(lexer, &[Lexigram::Then]);
651        let first = Block::child(&mut *lexer);
652        let (second, else_token, else_kind) = if let else_token @ Some(Token {
653            lexigram: Lexigram::Else,
654            ..
655        }) = diagnostics.wrap(lexer.peek().copied())
656        {
657            lexer.next();
658            let (second, else_kind) = match diagnostics.wrap(lexer.peek().copied()) {
659                else_kind @ Some(Token {
660                    lexigram: Lexigram::Then,
661                    ..
662                }) => {
663                    lexer.next();
664                    (Block::child(&mut *lexer), else_kind)
665                }
666                else_kind @ Some(Token {
667                    lexigram: Lexigram::If,
668                    ..
669                }) => (
670                    Block::build(
671                        Expression::build(
672                            None,
673                            None,
674                            Diagnostics::default(),
675                            [Self::from(&mut *lexer).into()],
676                        )
677                        .into(),
678                        Diagnostics::default(),
679                        [],
680                    ),
681                    else_kind,
682                ),
683                _ => {
684                    diagnostics.expect(lexer.peek().copied(), &[Lexigram::Then, Lexigram::If]);
685                    (Box::default(), None)
686                }
687            };
688            (second, else_token, else_kind)
689        } else {
690            (Box::default(), None, None)
691        };
692        let end_token = diagnostics.expect(lexer.peek().copied(), &[Lexigram::End]);
693        Self {
694            if_token,
695            condition,
696            then_token,
697            first,
698            else_token,
699            else_kind,
700            second,
701            end_token,
702            diagnostics,
703        }
704    }
705}
706
707#[derive(Debug, Eq, PartialEq)]
708pub struct MatchCase<'source> {
709    pub let_token: Option<Token<'source>>,
710    pub binding: Option<Binding<'source>>,
711    pub equals_token: Option<Token<'source>>,
712    pub case: Option<Box<Expression<'source>>>,
713    pub arrow_token: Option<Token<'source>>,
714    pub expression: Option<Box<Expression<'source>>>,
715    pub semicolon_token: Option<Token<'source>>,
716}
717
718#[derive(Debug, Eq, PartialEq)]
719#[make_dst_factory(pub)]
720pub struct Match<'source> {
721    pub match_token: Token<'source>,
722    pub expression: Option<Box<Expression<'source>>>,
723    pub then_token: Option<Token<'source>>,
724    pub end_token: Option<Token<'source>>,
725    pub diagnostics: Diagnostics<'source>,
726    pub cases: [MatchCase<'source>],
727}
728
729impl<'source> From<Box<Match<'source>>> for Node<'source> {
730    fn from(struct_block: Box<Match<'source>>) -> Self {
731        Self::Match(struct_block)
732    }
733}
734
735impl<'source> Match<'source> {
736    /// # Panics
737    ///
738    /// Panics if the lexer returns `None`.
739    ///
740    /// This function should only be called after successfully peeking a [`Lexigram::Match`].
741    pub fn new(lexer: &mut Peekable<Lexer<'source>>) -> Box<Self> {
742        let match_token = lexer
743            .next()
744            .transpose()
745            .ok()
746            .flatten()
747            .expect("caller must have peeked a token");
748        let mut diagnostics = Diagnostics::default();
749
750        let expression = diagnostics.expect_expression(lexer);
751        let then_token = diagnostics.next_if(lexer, &[Lexigram::Then]);
752        let mut cases = Vec::new();
753
754        // ew
755        loop {
756            let (let_token, binding, equals_token, case) = if let let_token @ Some(Token {
757                lexigram: Lexigram::Let,
758                ..
759            }) =
760                diagnostics.wrap(lexer.peek().copied())
761            {
762                lexer.next();
763                let binding = Binding::new(lexer)
764                    .map_err(|e| diagnostics.errors.push(e))
765                    .ok();
766                let (equal_token, case) = if let equal_token @ Some(Token {
767                    lexigram: Lexigram::SingleEqual,
768                    ..
769                }) = diagnostics.wrap(lexer.peek().copied())
770                {
771                    lexer.next();
772                    let case = diagnostics.expect_expression(lexer);
773                    (equal_token, case)
774                } else {
775                    (None, None)
776                };
777                (let_token, binding, equal_token, case)
778            } else {
779                let case = diagnostics.expect_expression(lexer);
780                (None, None, None, case)
781            };
782            let arrow_token = diagnostics.next_if(lexer, &[Lexigram::DoubleArrow]);
783            let expression = diagnostics.expect_expression(lexer);
784            let semicolon_token = diagnostics.next_if(lexer, &[Lexigram::Semicolon]);
785            cases.push(MatchCase {
786                let_token,
787                binding,
788                equals_token,
789                case,
790                arrow_token,
791                expression,
792                semicolon_token,
793            });
794            if semicolon_token.is_none()
795                || diagnostics
796                    .wrap(lexer.peek().copied())
797                    .is_some_and(|t| t.lexigram == Lexigram::End)
798            {
799                break;
800            }
801        }
802        let end_token = diagnostics.expect(lexer.peek().copied(), &[Lexigram::End]);
803        Match::build(
804            match_token,
805            expression,
806            then_token,
807            end_token,
808            diagnostics,
809            cases,
810        )
811    }
812}
813
814#[derive(Debug, Eq, PartialEq)]
815pub struct Enum<'source> {
816    pub enum_token: Token<'source>,
817    pub variants: Option<Box<Expression<'source>>>,
818    pub end_token: Option<Token<'source>>,
819    pub diagnostics: Diagnostics<'source>,
820}
821
822impl<'source> From<Enum<'source>> for Node<'source> {
823    fn from(struct_block: Enum<'source>) -> Self {
824        Self::Enum(Box::new(struct_block))
825    }
826}
827
828impl<'source> From<&mut Peekable<Lexer<'source>>> for Enum<'source> {
829    fn from(lexer: &mut Peekable<Lexer<'source>>) -> Self {
830        let enum_token = lexer
831            .next()
832            .transpose()
833            .ok()
834            .flatten()
835            .expect("caller must have peeked a token");
836        let mut diagnostics = Diagnostics::default();
837        let variants = diagnostics.expect_expression(lexer);
838        let end_token = diagnostics.expect(lexer.peek().copied(), &[Lexigram::End]);
839        Self {
840            enum_token,
841            variants,
842            end_token,
843            diagnostics,
844        }
845    }
846}
847
848#[derive(Debug, Eq, PartialEq)]
849pub enum Statement<'source> {
850    Sequence(Sequence<'source>),
851    Let(Let<'source>),
852    Rebind(Rebind<'source>),
853    Set(Set<'source>),
854    Use(Use<'source>),
855}
856
857#[derive(Debug, Eq, PartialEq)]
858pub struct Let<'source> {
859    pub let_token: Token<'source>,
860    pub binding: Option<Binding<'source>>,
861    pub equals_token: Option<Token<'source>>,
862    pub expression: Option<Box<Expression<'source>>>,
863    pub semicolon_token: Option<Token<'source>>,
864    pub diagnostics: Diagnostics<'source>,
865}
866
867impl<'source> Let<'source> {
868    /// # Panics
869    ///
870    /// Panics if the lexer returns `None`.
871    ///
872    /// This function should only be called after successfully peeking a [`Lexigram::Let`].
873    pub fn new(let_token: Token<'source>, lexer: &mut Peekable<Lexer<'source>>) -> Self {
874        let mut diagnostics = Diagnostics::default();
875        let binding = Binding::new(lexer)
876            .map_err(|e| diagnostics.errors.push(e))
877            .ok();
878        let equals_token = diagnostics.next_if(lexer, &[Lexigram::SingleEqual]);
879        let expression = diagnostics.expect_expression(lexer);
880        let semicolon_token = diagnostics.next_if(lexer, &[Lexigram::Semicolon]);
881
882        Let {
883            let_token,
884            binding,
885            equals_token,
886            expression,
887            semicolon_token,
888            diagnostics,
889        }
890    }
891}
892
893#[derive(Debug, Eq, PartialEq)]
894pub struct RebindSubject<'source> {
895    pub ident_token: Option<Token<'source>>,
896    pub comma_token: Option<Token<'source>>,
897}
898
899#[derive(Debug, Eq, PartialEq)]
900pub enum RebindBy<'source> {
901    Glob {
902        star_token: Token<'source>,
903    },
904    Identifiers {
905        bindings: Box<[RebindSubject<'source>]>,
906    },
907}
908
909#[derive(Debug, Eq, PartialEq)]
910pub struct Rebind<'source> {
911    pub let_token: Token<'source>,
912    pub caret_token: Token<'source>,
913    pub by: RebindBy<'source>,
914    pub semicolon_token: Option<Token<'source>>,
915    pub diagnostics: Diagnostics<'source>,
916}
917
918impl<'source> Rebind<'source> {
919    /// # Panics
920    ///
921    /// Panics if the lexer returns `None`.
922    ///
923    /// This function should only be called after successfully peeking a [`Lexigram::Caret`].
924    pub fn new(let_token: Token<'source>, lexer: &mut Peekable<Lexer<'source>>) -> Self {
925        let mut diagnostics = Diagnostics::default();
926        let caret_token = lexer
927            .next()
928            .transpose()
929            .ok()
930            .flatten()
931            .expect("caller must have peeked a token");
932        let by = if let Some(
933            star_token @ Token {
934                lexigram: Lexigram::Star,
935                ..
936            },
937        ) = diagnostics.wrap(lexer.peek().copied())
938        {
939            lexer.next();
940            RebindBy::Glob { star_token }
941        } else {
942            let mut bindings = Vec::new();
943            if let Some(ident_token) = diagnostics.next_if(lexer, &[Lexigram::Ident]) {
944                bindings.push(RebindSubject {
945                    ident_token: Some(ident_token),
946                    comma_token: None,
947                });
948            }
949            while let comma_token @ Some(Token {
950                lexigram: Lexigram::Comma,
951                ..
952            }) = diagnostics.wrap(lexer.peek().copied())
953            {
954                lexer.next();
955                bindings.push(RebindSubject {
956                    ident_token: diagnostics.next_if(lexer, &[Lexigram::Ident]),
957                    comma_token,
958                });
959            }
960            RebindBy::Identifiers {
961                bindings: bindings.into_boxed_slice(),
962            }
963        };
964        let semicolon_token = diagnostics.next_if(lexer, &[Lexigram::Semicolon]);
965        Rebind {
966            let_token,
967            caret_token,
968            by,
969            semicolon_token,
970            diagnostics,
971        }
972    }
973}
974
975#[derive(Debug, Eq, PartialEq)]
976pub struct Sequence<'source> {
977    pub expression: Option<Box<Expression<'source>>>,
978    pub semicolon_token: Token<'source>,
979}
980
981impl<'source> Sequence<'source> {
982    /// # Errors
983    ///
984    /// Returns a lone [`Expression`] if no semicolon token was encountered.
985    pub fn try_sequence(
986        lexer: &mut Peekable<Lexer<'source>>,
987    ) -> Result<Self, Option<Box<Expression<'source>>>> {
988        let expression = Expression::new(&mut *lexer);
989        if let Some(Ok(
990            semicolon_token @ Token {
991                lexigram: Lexigram::Semicolon,
992                ..
993            },
994        )) = lexer.peek().copied()
995        {
996            lexer.next();
997            Ok(Sequence {
998                expression,
999                semicolon_token,
1000            })
1001        } else {
1002            Err(expression)
1003        }
1004    }
1005}
1006
1007#[derive(Debug, Eq, PartialEq)]
1008pub struct Set<'source> {
1009    pub set_token: Token<'source>,
1010    pub target: Option<Box<Expression<'source>>>,
1011    pub equals_token: Option<Token<'source>>,
1012    pub expression: Option<Box<Expression<'source>>>,
1013    pub semicolon_token: Option<Token<'source>>,
1014    pub diagnostics: Diagnostics<'source>,
1015}
1016
1017impl<'source> Set<'source> {
1018    /// # Panics
1019    ///
1020    /// Panics if the lexer returns `None`.
1021    ///
1022    /// This function should only be called after successfully peeking a [`Lexigram::Set`].
1023    pub fn new(lexer: &mut Peekable<Lexer<'source>>) -> Self {
1024        let mut diagnostics = Diagnostics::default();
1025        let set_token = lexer
1026            .next()
1027            .transpose()
1028            .ok()
1029            .flatten()
1030            .expect("caller must have peeked a token");
1031        let target = diagnostics.expect_expression(lexer);
1032        let equals_token = diagnostics.next_if(lexer, &[Lexigram::SingleEqual]);
1033        let expression = diagnostics.expect_expression(lexer);
1034        let semicolon_token = diagnostics.next_if(lexer, &[Lexigram::Semicolon]);
1035
1036        Set {
1037            set_token,
1038            target,
1039            equals_token,
1040            expression,
1041            semicolon_token,
1042            diagnostics,
1043        }
1044    }
1045}
1046
1047#[derive(Debug, Eq, PartialEq)]
1048pub struct Use<'source> {
1049    pub use_token: Token<'source>,
1050    pub expression: Option<Box<Expression<'source>>>,
1051    pub semicolon_token: Option<Token<'source>>,
1052    pub diagnostics: Diagnostics<'source>,
1053}
1054
1055impl<'source> Use<'source> {
1056    /// # Panics
1057    ///
1058    /// Panics if the lexer returns `None`.
1059    ///
1060    /// This function should only be called after successfully peeking a [`Lexigram::Use`].
1061    pub fn new(lexer: &mut Peekable<Lexer<'source>>) -> Self {
1062        let mut diagnostics = Diagnostics::default();
1063        let use_token = lexer
1064            .next()
1065            .transpose()
1066            .ok()
1067            .flatten()
1068            .expect("caller must have peeked a token");
1069        let expression = diagnostics.expect_expression(lexer);
1070        let semicolon_token = diagnostics.next_if(lexer, &[Lexigram::Semicolon]);
1071
1072        Self {
1073            use_token,
1074            expression,
1075            semicolon_token,
1076            diagnostics,
1077        }
1078    }
1079}
1080
1081#[derive(Debug, Eq, PartialEq)]
1082pub struct NumericBinding<'source> {
1083    pub binding: Binding<'source>,
1084    pub comma_token: Option<Token<'source>>,
1085}
1086
1087#[derive(Debug, Eq, PartialEq)]
1088pub struct NamedBinding<'source> {
1089    pub field: Token<'source>,
1090    pub binding: Option<NamedSubBinding<'source>>,
1091    pub comma_token: Option<Token<'source>>,
1092}
1093
1094#[derive(Debug, Eq, PartialEq)]
1095pub struct NamedSubBinding<'source> {
1096    pub colon_token: Token<'source>,
1097    pub binding: Binding<'source>,
1098}
1099
1100#[derive(Debug, Eq, PartialEq)]
1101pub enum BindingMethod<'source> {
1102    Single(Token<'source>),
1103    Numeric {
1104        open_paren: Token<'source>,
1105        bindings: Box<[NumericBinding<'source>]>,
1106        close_paren: Option<Token<'source>>,
1107    },
1108    Named {
1109        open_brace: Token<'source>,
1110        bindings: Box<[NamedBinding<'source>]>,
1111        close_brace: Option<Token<'source>>,
1112    },
1113}
1114
1115#[derive(Debug, Eq, PartialEq)]
1116pub struct Binding<'source> {
1117    pub method: BindingMethod<'source>,
1118    pub diagnostics: Diagnostics<'source>,
1119}
1120
1121impl<'source> Binding<'source> {
1122    /// # Errors
1123    ///
1124    /// Returns an error if no valid binding token was encountered.
1125    pub fn new(lexer: &mut Peekable<Lexer<'source>>) -> Result<Self, Error<'source>> {
1126        match lexer.peek().copied().transpose().map_err(Error::Lexer)? {
1127            Some(
1128                t @ Token {
1129                    lexigram: Lexigram::Ident | Lexigram::Discard,
1130                    ..
1131                },
1132            ) => {
1133                lexer.next();
1134                Ok(Binding {
1135                    method: BindingMethod::Single(t),
1136                    diagnostics: Diagnostics::default(),
1137                })
1138            }
1139            Some(
1140                open_paren @ Token {
1141                    lexigram: Lexigram::OpenParen,
1142                    ..
1143                },
1144            ) => {
1145                let mut diagnostics = Diagnostics::default();
1146                let mut bindings = Vec::new();
1147                lexer.next();
1148                loop {
1149                    let t = diagnostics.wrap(lexer.peek().copied());
1150                    if let Some(Token {
1151                        lexigram: Lexigram::CloseParen,
1152                        ..
1153                    }) = t
1154                    {
1155                        break;
1156                    }
1157                    if let Ok(binding) = Binding::new(lexer) {
1158                        let comma_token = diagnostics
1159                            .wrap(lexer.peek().copied())
1160                            .filter(|t| t.lexigram == Lexigram::Comma);
1161                        bindings.push(NumericBinding {
1162                            binding,
1163                            comma_token,
1164                        });
1165                        if comma_token.is_some() {
1166                            lexer.next();
1167                        } else {
1168                            break;
1169                        }
1170                    } else {
1171                        diagnostics.errors.push(Error::MissingToken {
1172                            expected: &[
1173                                Lexigram::Ident,
1174                                Lexigram::Discard,
1175                                Lexigram::OpenParen,
1176                                Lexigram::OpenBrace,
1177                                Lexigram::CloseParen,
1178                            ],
1179                            actual: t,
1180                        });
1181                        break;
1182                    }
1183                }
1184                let close_paren = diagnostics.next_if(lexer, &[Lexigram::CloseParen]);
1185                Ok(Binding {
1186                    method: BindingMethod::Numeric {
1187                        open_paren,
1188                        bindings: bindings.into_boxed_slice(),
1189                        close_paren,
1190                    },
1191                    diagnostics,
1192                })
1193            }
1194            Some(
1195                open_brace @ Token {
1196                    lexigram: Lexigram::OpenBrace,
1197                    ..
1198                },
1199            ) => {
1200                let mut diagnostics = Diagnostics::default();
1201                let mut bindings = Vec::new();
1202                lexer.next();
1203                loop {
1204                    match diagnostics.wrap(lexer.peek().copied()) {
1205                        Some(Token {
1206                            lexigram: Lexigram::CloseBrace,
1207                            ..
1208                        }) => break,
1209                        Some(
1210                            field @ Token {
1211                                lexigram: Lexigram::Ident,
1212                                ..
1213                            },
1214                        ) => {
1215                            lexer.next();
1216                            match diagnostics.wrap(lexer.peek().copied()) {
1217                                Some(
1218                                    colon_token @ Token {
1219                                        lexigram: Lexigram::Colon,
1220                                        ..
1221                                    },
1222                                ) => {
1223                                    lexer.next();
1224                                    match Binding::new(lexer) {
1225                                        Ok(binding) => {
1226                                            let comma_token = diagnostics
1227                                                .wrap(lexer.peek().copied())
1228                                                .filter(|t| t.lexigram == Lexigram::Comma);
1229                                            bindings.push(NamedBinding {
1230                                                field,
1231                                                binding: Some(NamedSubBinding {
1232                                                    colon_token,
1233                                                    binding,
1234                                                }),
1235                                                comma_token,
1236                                            });
1237                                            if comma_token.is_some() {
1238                                                lexer.next();
1239                                            } else {
1240                                                break;
1241                                            }
1242                                        }
1243                                        Err(e) => {
1244                                            diagnostics.errors.push(e);
1245                                            break;
1246                                        }
1247                                    }
1248                                }
1249                                comma_token @ Some(Token {
1250                                    lexigram: Lexigram::Comma,
1251                                    ..
1252                                }) => {
1253                                    lexer.next();
1254                                    bindings.push(NamedBinding {
1255                                        field,
1256                                        binding: None,
1257                                        comma_token,
1258                                    });
1259                                }
1260                                _ => {
1261                                    bindings.push(NamedBinding {
1262                                        field,
1263                                        binding: None,
1264                                        comma_token: None,
1265                                    });
1266                                    break;
1267                                }
1268                            }
1269                        }
1270                        actual => {
1271                            diagnostics.errors.push(Error::MissingToken {
1272                                expected: &[Lexigram::Ident, Lexigram::CloseBrace],
1273                                actual,
1274                            });
1275                            break;
1276                        }
1277                    }
1278                }
1279                let close_brace = diagnostics.next_if(lexer, &[Lexigram::CloseBrace]);
1280                Ok(Binding {
1281                    method: BindingMethod::Named {
1282                        open_brace,
1283                        bindings: bindings.into_boxed_slice(),
1284                        close_brace,
1285                    },
1286                    diagnostics,
1287                })
1288            }
1289            actual => Err(Error::MissingToken {
1290                expected: &[
1291                    Lexigram::Ident,
1292                    Lexigram::Discard,
1293                    Lexigram::OpenParen,
1294                    Lexigram::OpenBrace,
1295                ],
1296                actual,
1297            }),
1298        }
1299    }
1300}
1301
1302#[derive(Debug, Eq, PartialEq)]
1303pub enum FunctionBody<'source> {
1304    Block(Box<Block<'source>>),
1305    /// Occurs when with's semicolon is absent.
1306    ///
1307    /// Used to name only the type of a function.
1308    Never,
1309}
1310
1311#[derive(Debug, Eq, PartialEq)]
1312pub struct Function<'source> {
1313    pub with_token: Token<'source>,
1314    pub argument: Option<Binding<'source>>,
1315    pub colon_token: Option<Token<'source>>,
1316    pub input: Option<Box<Expression<'source>>>,
1317    pub single_arrow_token: Option<Token<'source>>,
1318    pub output: Option<Box<Expression<'source>>>,
1319    pub semicolon_token: Option<Token<'source>>,
1320    pub body: FunctionBody<'source>,
1321    pub diagnostics: Diagnostics<'source>,
1322}
1323
1324#[derive(Debug, Eq, PartialEq)]
1325#[expect(
1326    clippy::large_enum_variant,
1327    reason = "this is already inside of a (very large) boxed block"
1328)]
1329pub enum BlockResult<'source> {
1330    Expression(Option<Box<Expression<'source>>>),
1331    Function(Function<'source>),
1332}
1333
1334impl BlockResult<'_> {
1335    #[must_use]
1336    pub fn is_empty(&self) -> bool {
1337        match self {
1338            BlockResult::Expression(expression) => expression.is_none(),
1339            BlockResult::Function(_) => false,
1340        }
1341    }
1342}
1343
1344impl Default for BlockResult<'_> {
1345    fn default() -> Self {
1346        Self::Expression(None)
1347    }
1348}
1349
1350impl<'source> From<Box<Expression<'source>>> for BlockResult<'source> {
1351    fn from(expression: Box<Expression<'source>>) -> Self {
1352        Self::Expression(Some(expression))
1353    }
1354}
1355
1356impl<'source> From<Function<'source>> for BlockResult<'source> {
1357    fn from(function: Function<'source>) -> Self {
1358        Self::Function(function)
1359    }
1360}
1361
1362#[derive(Debug, Eq, PartialEq)]
1363#[make_dst_factory(pub)]
1364pub struct Block<'source> {
1365    pub result: BlockResult<'source>,
1366    pub diagnostics: Diagnostics<'source>,
1367    pub statements: [Statement<'source>],
1368}
1369
1370impl Default for Box<Block<'_>> {
1371    fn default() -> Self {
1372        Block::build(BlockResult::Expression(None), Diagnostics::default(), [])
1373    }
1374}
1375
1376impl<'source> Block<'source> {
1377    pub fn new(lexer: &mut Peekable<Lexer<'source>>) -> Box<Self> {
1378        Self::parse(lexer, true)
1379    }
1380
1381    fn child(lexer: &mut Peekable<Lexer<'source>>) -> Box<Self> {
1382        Self::parse(lexer, false)
1383    }
1384
1385    fn parse(lexer: &mut Peekable<Lexer<'source>>, root: bool) -> Box<Self> {
1386        let mut diagnostics = Diagnostics::default();
1387        let mut statements = Vec::new();
1388        let result = loop {
1389            let statement = match diagnostics.wrap(lexer.peek().copied()) {
1390                Some(
1391                    let_token @ Token {
1392                        lexigram: Lexigram::Let,
1393                        ..
1394                    },
1395                ) => {
1396                    lexer.next();
1397                    if let Some(Ok(Token {
1398                        lexigram: Lexigram::Caret,
1399                        ..
1400                    })) = lexer.peek()
1401                    {
1402                        Statement::Rebind(Rebind::new(let_token, lexer))
1403                    } else {
1404                        Statement::Let(Let::new(let_token, lexer))
1405                    }
1406                }
1407                Some(Token {
1408                    lexigram: Lexigram::Set,
1409                    ..
1410                }) => Statement::Set(Set::new(lexer)),
1411                Some(Token {
1412                    lexigram: Lexigram::Use,
1413                    ..
1414                }) => Statement::Use(Use::new(lexer)),
1415                Some(
1416                    with_token @ Token {
1417                        lexigram: Lexigram::With,
1418                        ..
1419                    },
1420                ) => {
1421                    lexer.next();
1422                    let mut st_diagnostics = Diagnostics::default();
1423                    let argument = Binding::new(lexer)
1424                        .map_err(|e| st_diagnostics.errors.push(e))
1425                        .ok();
1426                    let (colon_token, input) = if let Some(
1427                        t @ Token {
1428                            lexigram: Lexigram::Colon,
1429                            ..
1430                        },
1431                    ) = st_diagnostics.wrap(lexer.peek().copied())
1432                    {
1433                        lexer.next();
1434                        (Some(t), diagnostics.expect_expression(lexer))
1435                    } else {
1436                        (None, None)
1437                    };
1438                    let (single_arrow_token, output) = if let Some(
1439                        t @ Token {
1440                            lexigram: Lexigram::SingleArrow,
1441                            ..
1442                        },
1443                    ) =
1444                        st_diagnostics.wrap(lexer.peek().copied())
1445                    {
1446                        lexer.next();
1447                        (Some(t), diagnostics.expect_expression(lexer))
1448                    } else {
1449                        (None, None)
1450                    };
1451                    let (semicolon_token, body) = if let Some(
1452                        t @ Token {
1453                            lexigram: Lexigram::Semicolon,
1454                            ..
1455                        },
1456                    ) = st_diagnostics.wrap(lexer.peek().copied())
1457                    {
1458                        lexer.next();
1459                        (
1460                            Some(t),
1461                            FunctionBody::Block(Block::parse(&mut *lexer, root)),
1462                        )
1463                    } else {
1464                        (None, FunctionBody::Never)
1465                    };
1466
1467                    break Function {
1468                        with_token,
1469                        argument,
1470                        colon_token,
1471                        input,
1472                        single_arrow_token,
1473                        output,
1474                        semicolon_token,
1475                        body,
1476                        diagnostics: st_diagnostics,
1477                    }
1478                    .into();
1479                }
1480                _ => match Sequence::try_sequence(&mut *lexer) {
1481                    Ok(sequence) => Statement::Sequence(sequence),
1482                    Err(expression) => {
1483                        break BlockResult::Expression(expression);
1484                    }
1485                },
1486            };
1487            statements.push(statement);
1488        };
1489        if root && let Some(t) = lexer.peek().copied().transpose().ok().flatten() {
1490            diagnostics
1491                .errors
1492                .push(Error::ExpectedStatementOrExpression(t));
1493        }
1494        Self::build(result, diagnostics, statements)
1495    }
1496}