espy_ears/
lib.rs

1#![doc = include_str!("../README.md")]
2
3use dst_factory::make_dst_factory;
4use espy_eyes::{Lexer, Lexigram, Token};
5use std::iter::Peekable;
6
7#[cfg(test)]
8mod tests;
9
10#[derive(Debug, Eq, PartialEq)]
11pub enum Error<'source> {
12    /// An invalid token was encountered.
13    ///
14    /// The ast interprets an erroneous token as `None`,
15    /// which may lead to further error diagnostics.
16    ///
17    /// See: [`espy_eyes::Error`]
18    Lexer(espy_eyes::Error<'source>),
19    MissingToken {
20        /// Must contain at least one element.
21        expected: &'static [Lexigram],
22        /// A `None` token may have been caused by a Lexer error.
23        actual: Option<Token<'source>>,
24    },
25    /// Occurs when an expression is required,
26    /// but a token that ends expression context was immediately encountered.
27    ExpectedExpression(Option<Token<'source>>),
28    /// Occurs when a "root" [`Block`] encounters something which is not a statement or expression.
29    ExpectedStatementOrExpression(Token<'source>),
30    /// Occurs when parenthesis in an expression are unbalanced.
31    ///
32    /// [`Error::IncompleteExpression`] serves as the opening-parenthesis equivalent.
33    UnexpectedCloseParen(Token<'source>),
34    /// This error should only ever occur on [`Expression`]s,
35    /// so positioning can be derived from surrounding context.
36    IncompleteExpression,
37}
38
39/// Contains a list of the errors encountered by an ast node.
40#[derive(Debug, Default, Eq, PartialEq)]
41pub struct Diagnostics<'source> {
42    pub errors: Vec<Error<'source>>,
43}
44
45impl<'source> Diagnostics<'source> {
46    fn expect(
47        &mut self,
48        t: Option<espy_eyes::Result<'source>>,
49        expected: &'static [Lexigram],
50    ) -> Option<Token<'source>> {
51        let actual = self.wrap(t);
52        if actual.is_some_and(|actual| expected.contains(&actual.lexigram)) {
53            actual
54        } else {
55            self.errors.push(Error::MissingToken { expected, actual });
56            None
57        }
58    }
59
60    fn expect_expression(
61        &mut self,
62        lexer: &mut Peekable<Lexer<'source>>,
63    ) -> Option<Box<Expression<'source>>> {
64        let expression = Expression::new(lexer);
65        if expression.is_none() {
66            self.errors.push(Error::ExpectedExpression(
67                lexer.peek().copied().transpose().ok().flatten(),
68            ));
69        }
70        expression
71    }
72
73    fn next_if(
74        &mut self,
75        lexer: &mut Peekable<Lexer<'source>>,
76        expected: &'static [Lexigram],
77    ) -> Option<Token<'source>> {
78        self.expect(lexer.peek().copied(), expected).inspect(|_| {
79            lexer.next();
80        })
81    }
82
83    fn wrap(&mut self, t: Option<espy_eyes::Result<'source>>) -> Option<Token<'source>> {
84        match t? {
85            Ok(t) => Some(t),
86            Err(e) => {
87                let t = if let espy_eyes::Error {
88                    origin,
89                    kind: espy_eyes::ErrorKind::ReservedSymbol,
90                } = e
91                {
92                    Some(Token {
93                        origin,
94                        lexigram: Lexigram::Ident,
95                    })
96                } else {
97                    None
98                };
99                self.errors.push(Error::Lexer(e));
100                t
101            }
102        }
103    }
104}
105
106/// Components of an expression.
107///
108/// Expression evalutation is stack-based rather than using a syntax tree.
109#[derive(Debug, Eq, PartialEq)]
110pub enum Node<'source> {
111    Unit(Token<'source>, Token<'source>),
112    Bool(bool, Token<'source>),
113    Number(Token<'source>),
114    String(Token<'source>),
115    Variable(Token<'source>),
116    Block(Box<Block<'source>>),
117    If(Box<If<'source>>),
118    Match(Box<Match<'source>>),
119    Enum(Box<Enum<'source>>),
120
121    Pipe(Token<'source>),
122    Call(Token<'source>),
123    Bind(Token<'source>),
124    Positive(Token<'source>),
125    Negative(Token<'source>),
126    Annotation(Box<Annotation<'source>>),
127    Deref(Token<'source>),
128    Mul(Token<'source>),
129    Div(Token<'source>),
130    Add(Token<'source>),
131    Sub(Token<'source>),
132    BitwiseAnd(Token<'source>),
133    BitwiseOr(Token<'source>),
134    BitwiseXor(Token<'source>),
135    EqualTo(Token<'source>),
136    NotEqualTo(Token<'source>),
137    Greater(Token<'source>),
138    GreaterEqual(Token<'source>),
139    Lesser(Token<'source>),
140    LesserEqual(Token<'source>),
141    LogicalAnd(Token<'source>),
142    LogicalOr(Token<'source>),
143    Name {
144        name: Token<'source>,
145        colon_token: Token<'source>,
146    },
147    Field {
148        dot_token: Token<'source>,
149        index: Token<'source>,
150    },
151    Tuple(Token<'source>),
152}
153
154#[derive(Debug, Eq, PartialEq)]
155enum Operation<'source> {
156    Call(Token<'source>),
157    Pipe(Token<'source>),
158    Bind(Token<'source>),
159    Positive(Token<'source>),
160    Negative(Token<'source>),
161    Annotation(Box<Annotation<'source>>),
162    Deref(Token<'source>),
163    Mul(Token<'source>),
164    Div(Token<'source>),
165    Add(Token<'source>),
166    Sub(Token<'source>),
167    BitwiseAnd(Token<'source>),
168    BitwiseXor(Token<'source>),
169    BitwiseOr(Token<'source>),
170    EqualTo(Token<'source>),
171    NotEqualTo(Token<'source>),
172    Greater(Token<'source>),
173    GreaterEqual(Token<'source>),
174    Lesser(Token<'source>),
175    LesserEqual(Token<'source>),
176    LogicalAnd(Token<'source>),
177    LogicalOr(Token<'source>),
178    Name {
179        name: Token<'source>,
180        colon_token: Token<'source>,
181    },
182    Field {
183        dot_token: Token<'source>,
184        index: Token<'source>,
185    },
186    Tuple(Token<'source>),
187    SubExpression(Token<'source>),
188}
189
190impl Operation<'_> {
191    // NOTE: please keep espybook/precedence.md up to date with this!
192    fn precedence(&self) -> usize {
193        match self {
194            Operation::Field { .. } | Operation::Deref(_) => 13,
195            Operation::Positive(_) | Operation::Negative(_) | Operation::Annotation(_) => 12,
196            Operation::Mul(_) | Operation::Div(_) => 11,
197            Operation::Add(_) | Operation::Sub(_) => 10,
198            Operation::BitwiseAnd(_) => 9,
199            Operation::BitwiseXor(_) => 8,
200            Operation::BitwiseOr(_) => 7,
201            Operation::EqualTo(_)
202            | Operation::NotEqualTo(_)
203            | Operation::Greater(_)
204            | Operation::GreaterEqual(_)
205            | Operation::Lesser(_)
206            | Operation::LesserEqual(_) => 6,
207            Operation::LogicalAnd(_) => 5,
208            Operation::LogicalOr(_) => 4,
209            Operation::Name { .. } => 3,
210            Operation::Tuple(_) => 2,
211            Operation::Pipe(_) | Operation::Call(_) | Operation::Bind(_) => 1,
212            Operation::SubExpression(_) => 0,
213        }
214    }
215
216    // NOTE: please keep espybook/precedence.md up to date with this!
217    fn left_associative(&self) -> bool {
218        match self {
219            Operation::Field { .. }
220            | Operation::Deref(_)
221            | Operation::Mul(_)
222            | Operation::Div(_)
223            | Operation::Add(_)
224            | Operation::Sub(_)
225            | Operation::BitwiseAnd(_)
226            | Operation::BitwiseXor(_)
227            | Operation::BitwiseOr(_)
228            | Operation::EqualTo(_)
229            | Operation::NotEqualTo(_)
230            | Operation::Greater(_)
231            | Operation::GreaterEqual(_)
232            | Operation::Lesser(_)
233            | Operation::LesserEqual(_)
234            | Operation::LogicalAnd(_)
235            | Operation::LogicalOr(_)
236            | Operation::Tuple(_)
237            | Operation::SubExpression(_)
238            | Operation::Call(_)
239            | Operation::Bind(_)
240            | Operation::Pipe(_) => true,
241            // Unary operators have to be here to avoid miscompilation
242            Operation::Positive(_)
243            | Operation::Negative(_)
244            | Operation::Annotation(_)
245            | Operation::Name { .. } => false,
246        }
247    }
248}
249
250impl<'source> From<Operation<'source>> for Node<'source> {
251    fn from(op: Operation<'source>) -> Self {
252        match op {
253            Operation::Field { dot_token, index } => Node::Field { dot_token, index },
254            Operation::Pipe(t) => Node::Pipe(t),
255            Operation::Bind(t) => Node::Bind(t),
256            Operation::Call(t) => Node::Call(t),
257            Operation::Positive(t) => Node::Positive(t),
258            Operation::Negative(t) => Node::Negative(t),
259            Operation::Annotation(t) => Node::Annotation(t),
260            Operation::Deref(t) => Node::Deref(t),
261            Operation::Mul(t) => Node::Mul(t),
262            Operation::Div(t) => Node::Div(t),
263            Operation::Add(t) => Node::Add(t),
264            Operation::Sub(t) => Node::Sub(t),
265            Operation::BitwiseAnd(t) => Node::BitwiseAnd(t),
266            Operation::BitwiseXor(t) => Node::BitwiseXor(t),
267            Operation::BitwiseOr(t) => Node::BitwiseOr(t),
268            Operation::EqualTo(t) => Node::EqualTo(t),
269            Operation::NotEqualTo(t) => Node::NotEqualTo(t),
270            Operation::Greater(t) => Node::Greater(t),
271            Operation::GreaterEqual(t) => Node::GreaterEqual(t),
272            Operation::Lesser(t) => Node::Lesser(t),
273            Operation::LesserEqual(t) => Node::LesserEqual(t),
274            Operation::LogicalAnd(t) => Node::LogicalAnd(t),
275            Operation::LogicalOr(t) => Node::LogicalOr(t),
276            Operation::Name { name, colon_token } => Node::Name { name, colon_token },
277            Operation::Tuple(t) => Node::Tuple(t),
278            Operation::SubExpression(_) => {
279                panic!("sub expressions may not enter the output stack")
280            }
281        }
282    }
283}
284
285/// This type should not contain an incomplete expression so long as there are no error diagnostics.
286#[derive(Debug, Eq, PartialEq)]
287#[make_dst_factory(pub)]
288pub struct Expression<'source> {
289    pub first_token: Option<Token<'source>>,
290    pub last_token: Option<Token<'source>>,
291    pub diagnostics: Diagnostics<'source>,
292    pub contents: [Node<'source>],
293}
294
295impl<'source> Expression<'source> {
296    /// Parse an expression until an unexpected token is upcoming (via peek).
297    pub fn new(lexer: &mut Peekable<Lexer<'source>>) -> Option<Box<Self>> {
298        // List of tokens that imply the unary position.
299        // This is probably not the best way to do this.
300        const UNARY_POSITION: &[Lexigram] = &[
301            Lexigram::Plus,
302            Lexigram::Minus,
303            Lexigram::Star,
304            Lexigram::Slash,
305            Lexigram::Ampersand,
306            Lexigram::Caret,
307            Lexigram::Pipe,
308            Lexigram::DoubleEqual,
309            Lexigram::BangEqual,
310            Lexigram::Greater,
311            Lexigram::GreaterEqual,
312            Lexigram::Lesser,
313            Lexigram::LesserEqual,
314            Lexigram::And,
315            Lexigram::Or,
316            Lexigram::Comma,
317            Lexigram::Colon,
318            Lexigram::Triangle,
319            Lexigram::OpenParen,
320            Lexigram::CloseSquare,
321        ];
322        // List of tokens which are well-known to end a (sub-)expression.
323        // Technically any token not recognized by the match block later in this
324        // function are capable of ending an expression, but only the following
325        // list will be considered "end of expression" for the purposes of
326        // trailing comma detection.
327        const EXPRESSION_TERMINATORS: &[Lexigram] = &[
328            // Close paren is an outlier; it ends sub-expressions rather than
329            // expressions, but that's still of interest to the comma operator.
330            Lexigram::CloseParen,
331            Lexigram::CloseBrace,
332            Lexigram::Then,
333            Lexigram::End,
334            Lexigram::SingleEqual,
335            Lexigram::Semicolon,
336        ];
337        let first_token = lexer.peek().copied().transpose().ok().flatten();
338        let mut last_token: Option<Token> = None;
339        let mut diagnostics = Diagnostics::default();
340        let mut contents = Vec::new();
341        let mut stack = Vec::new();
342        let flush = |output: &mut Vec<Node<'source>>, stack: &mut Vec<Operation<'source>>| {
343            while let Some(op) = stack.pop_if(|x| !matches!(x, Operation::SubExpression(_))) {
344                output.push(op.into());
345            }
346        };
347        let push_with_precedence =
348            |output: &mut Vec<Node<'source>>,
349             stack: &mut Vec<Operation<'source>>,
350             operator: Operation<'source>| {
351                while let Some(op) = stack.pop_if(|x| {
352                    if operator.left_associative() {
353                        x.precedence() >= operator.precedence()
354                    } else {
355                        x.precedence() > operator.precedence()
356                    }
357                }) {
358                    // SubExpression has the lowest precedence, so this cannot panic.
359                    output.push(op.into());
360                }
361                stack.push(operator);
362            };
363        loop {
364            let unary_position = last_token.is_none_or(|t| UNARY_POSITION.contains(&t.lexigram));
365            let t = diagnostics.wrap(lexer.peek().copied());
366            macro_rules! lexi {
367                ($($name:ident)? @ $lexi:ident) => {
368                    Some($($name @)? Token {
369                        lexigram: Lexigram::$lexi,
370                        ..
371                    })
372                };
373            }
374            macro_rules! op {
375                ($op:ident($inner:expr)) => {
376                    push_with_precedence(&mut contents, &mut stack, Operation::$op($inner))
377                };
378            }
379            match t {
380                // Terminals
381                //
382                // A terminal value outside of unary position implies a function call,
383                // so flush the operator stack in this case.
384                lexi!(number @ Number) => {
385                    if !unary_position {
386                        op!(Call(number));
387                    }
388                    contents.push(Node::Number(number));
389                }
390                lexi!(string @ String) => {
391                    if !unary_position {
392                        op!(Call(string));
393                    }
394                    contents.push(Node::String(string));
395                }
396                lexi!(discard @ Discard) => {
397                    if !unary_position {
398                        op!(Call(discard));
399                    }
400                    last_token = lexer.next().transpose().ok().flatten();
401                    let colon_token = diagnostics.next_if(lexer, &[Lexigram::Colon]);
402                    if let Some(colon_token) = colon_token {
403                        last_token = Some(colon_token);
404                        push_with_precedence(
405                            &mut contents,
406                            &mut stack,
407                            Operation::Name {
408                                name: discard,
409                                colon_token,
410                            },
411                        );
412                    }
413                    continue;
414                }
415                lexi!(ident @ Ident) => {
416                    if !unary_position {
417                        op!(Call(ident));
418                    }
419                    last_token = lexer.next().transpose().ok().flatten();
420                    if let Some(Ok(
421                        colon_token @ Token {
422                            lexigram: Lexigram::Colon,
423                            ..
424                        },
425                    )) = lexer.peek().copied()
426                    {
427                        last_token = lexer.next().transpose().ok().flatten();
428                        push_with_precedence(
429                            &mut contents,
430                            &mut stack,
431                            Operation::Name {
432                                name: ident,
433                                colon_token,
434                            },
435                        );
436                    } else {
437                        contents.push(Node::Variable(ident));
438                    }
439                    continue;
440                }
441                lexi!(t @ True) => {
442                    if !unary_position {
443                        push_with_precedence(&mut contents, &mut stack, Operation::Call(t));
444                    }
445                    contents.push(Node::Bool(true, t));
446                }
447                lexi!(t @ False) => {
448                    if !unary_position {
449                        push_with_precedence(&mut contents, &mut stack, Operation::Call(t));
450                    }
451                    contents.push(Node::Bool(false, t));
452                }
453                lexi!(t @ OpenParen) => {
454                    if !unary_position {
455                        push_with_precedence(&mut contents, &mut stack, Operation::Call(t));
456                    }
457                    stack.push(Operation::SubExpression(t));
458                }
459                lexi!(t @ OpenBrace) => {
460                    if !unary_position {
461                        push_with_precedence(&mut contents, &mut stack, Operation::Call(t));
462                    }
463                    lexer.next();
464                    contents.push(Node::Block(Block::child(&mut *lexer)));
465                    diagnostics.expect(lexer.peek().copied(), &[Lexigram::CloseBrace]);
466                }
467
468                // prefix operators
469                lexi!(t @ Plus) if unary_position => op!(Positive(t)),
470                lexi!(t @ Minus) if unary_position => op!(Negative(t)),
471                lexi!(t @ At) if unary_position => {
472                    lexer.next();
473                    let annotation = Annotation::new(t, lexer);
474                    last_token = annotation.close_square;
475                    op!(Annotation(Box::new(annotation)));
476                    continue;
477                }
478                // postfix
479                lexi!(dot_token @ Dot) if !unary_position => {
480                    last_token = lexer.next().transpose().ok().flatten();
481                    if let Some(index) =
482                        diagnostics.next_if(lexer, &[Lexigram::Ident, Lexigram::Number])
483                    {
484                        last_token = Some(index);
485                        push_with_precedence(
486                            &mut contents,
487                            &mut stack,
488                            Operation::Field { dot_token, index },
489                        );
490                    }
491                    continue;
492                }
493                lexi!(t @ DotStar) if !unary_position => op!(Deref(t)),
494                lexi!(t @ Bang) if !unary_position => op!(Bind(t)),
495                // infix operators
496                lexi!(t @ Plus) if !unary_position => op!(Add(t)),
497                lexi!(t @ Minus) if !unary_position => op!(Sub(t)),
498                lexi!(t @ Star) if !unary_position => op!(Mul(t)),
499                lexi!(t @ Slash) if !unary_position => op!(Div(t)),
500                lexi!(t @ Ampersand) if !unary_position => op!(BitwiseAnd(t)),
501                lexi!(t @ Caret) if !unary_position => op!(BitwiseXor(t)),
502                lexi!(t @ Pipe) if !unary_position => op!(BitwiseOr(t)),
503                lexi!(t @ DoubleEqual) if !unary_position => op!(EqualTo(t)),
504                lexi!(t @ BangEqual) if !unary_position => op!(NotEqualTo(t)),
505                lexi!(t @ Greater) if !unary_position => op!(Greater(t)),
506                lexi!(t @ GreaterEqual) if !unary_position => op!(GreaterEqual(t)),
507                lexi!(t @ Lesser) if !unary_position => op!(Lesser(t)),
508                lexi!(t @ LesserEqual) if !unary_position => op!(LesserEqual(t)),
509                lexi!(t @ And) if !unary_position => op!(LogicalAnd(t)),
510                lexi!(t @ Or) if !unary_position => op!(LogicalOr(t)),
511                lexi!(t @ Triangle) if !unary_position => op!(Pipe(t)),
512                lexi!(t @ Comma) if !unary_position => {
513                    // this will be ignored if the comma is trailing
514                    let potential_last_token = lexer.next().transpose().ok().flatten();
515                    if !lexer
516                        .peek()
517                        .copied()
518                        .transpose()
519                        .ok()
520                        .flatten()
521                        .is_none_or(|t| EXPRESSION_TERMINATORS.contains(&t.lexigram))
522                    {
523                        op!(Tuple(t));
524                        last_token = potential_last_token;
525                    }
526                    continue;
527                }
528                lexi!(  @ If) => contents.push(If::from(&mut *lexer).into()),
529                lexi!(  @ Match) => contents.push(Match::new(&mut *lexer).into()),
530                lexi!(  @ Enum) => contents.push(Enum::from(&mut *lexer).into()),
531                lexi!(t @ CloseParen) if unary_position => {
532                    if let Some(
533                        last_token @ Token {
534                            lexigram: Lexigram::OpenParen,
535                            ..
536                        },
537                    ) = last_token
538                    {
539                        contents.push(Node::Unit(last_token, t));
540                    } else {
541                        diagnostics.errors.push(Error::IncompleteExpression);
542                    }
543                    if !matches!(stack.pop(), Some(Operation::SubExpression(_))) {
544                        diagnostics.errors.push(Error::UnexpectedCloseParen(t));
545                    }
546                }
547                lexi!(t @ CloseParen) if !unary_position => {
548                    while let Some(op) = stack.pop_if(|x| !matches!(x, Operation::SubExpression(_)))
549                    {
550                        contents.push(op.into());
551                    }
552                    if !matches!(stack.pop(), Some(Operation::SubExpression(_))) {
553                        diagnostics.errors.push(Error::UnexpectedCloseParen(t));
554                    }
555                }
556                _ => {
557                    if unary_position {
558                        if !contents.is_empty() || !stack.is_empty() {
559                            diagnostics.errors.push(Error::IncompleteExpression);
560                        }
561                    } else {
562                        flush(&mut contents, &mut stack);
563                        if !stack.is_empty() {
564                            diagnostics.errors.push(Error::IncompleteExpression);
565                        }
566                    }
567                    if contents.is_empty() && diagnostics.errors.is_empty() {
568                        return None;
569                    }
570                    return Some(Expression::build(
571                        first_token,
572                        last_token,
573                        diagnostics,
574                        contents,
575                    ));
576                }
577            }
578            // This is sometimes skipped with a continue!
579            last_token = lexer.next().transpose().unwrap_or(None);
580        }
581    }
582}
583
584#[derive(Debug, Eq, PartialEq)]
585pub struct Annotation<'source> {
586    pub at_sign: Token<'source>,
587    pub name: Option<Token<'source>>,
588    pub open_square: Option<Token<'source>>,
589    pub tokens: Box<[Token<'source>]>,
590    pub close_square: Option<Token<'source>>,
591    pub diagnostics: Diagnostics<'source>,
592}
593
594impl<'source> Annotation<'source> {
595    fn new(at_sign: Token<'source>, lexer: &mut Peekable<Lexer<'source>>) -> Self {
596        let mut diagnostics = Diagnostics::default();
597        let name = diagnostics.next_if(lexer, &[Lexigram::Ident]);
598        let open_square = diagnostics.next_if(lexer, &[Lexigram::OpenSquare]);
599        let mut tokens = Vec::new();
600        let mut square_level = 0;
601        loop {
602            let token = match diagnostics.wrap(lexer.peek().copied()) {
603                Some(
604                    token @ Token {
605                        lexigram: Lexigram::OpenSquare,
606                        ..
607                    },
608                ) => {
609                    square_level += 1;
610                    token
611                }
612                Some(
613                    token @ Token {
614                        lexigram: Lexigram::CloseSquare,
615                        ..
616                    },
617                ) => {
618                    if square_level > 0 {
619                        square_level -= 1;
620                        token
621                    } else {
622                        break;
623                    }
624                }
625                Some(token) => token,
626                None => break,
627            };
628            lexer.next();
629            tokens.push(token);
630        }
631        let close_square = diagnostics.next_if(lexer, &[Lexigram::CloseSquare]);
632        Self {
633            at_sign,
634            name,
635            open_square,
636            tokens: tokens.into_boxed_slice(),
637            close_square,
638            diagnostics,
639        }
640    }
641}
642
643#[derive(Debug, Eq, PartialEq)]
644pub struct If<'source> {
645    pub if_token: Token<'source>,
646    pub condition: Option<Box<Expression<'source>>>,
647    pub then_token: Option<Token<'source>>,
648    pub first: Box<Block<'source>>,
649    pub else_token: Option<Token<'source>>,
650    pub else_kind: Option<Token<'source>>,
651    pub second: Box<Block<'source>>,
652    pub end_token: Option<Token<'source>>,
653    pub diagnostics: Diagnostics<'source>,
654}
655
656impl<'source> From<If<'source>> for Node<'source> {
657    fn from(if_block: If<'source>) -> Self {
658        Self::If(Box::new(if_block))
659    }
660}
661
662impl<'source> From<&mut Peekable<Lexer<'source>>> for If<'source> {
663    fn from(lexer: &mut Peekable<Lexer<'source>>) -> Self {
664        let if_token = lexer
665            .next()
666            .transpose()
667            .ok()
668            .flatten()
669            .expect("caller must have peeked a token");
670        let mut diagnostics = Diagnostics::default();
671        let condition = diagnostics.expect_expression(lexer);
672        let then_token = diagnostics.next_if(lexer, &[Lexigram::Then]);
673        let first = Block::child(&mut *lexer);
674        let (second, else_token, else_kind) = if let else_token @ Some(Token {
675            lexigram: Lexigram::Else,
676            ..
677        }) = diagnostics.wrap(lexer.peek().copied())
678        {
679            lexer.next();
680            let (second, else_kind) = match diagnostics.wrap(lexer.peek().copied()) {
681                else_kind @ Some(Token {
682                    lexigram: Lexigram::Then,
683                    ..
684                }) => {
685                    lexer.next();
686                    (Block::child(&mut *lexer), else_kind)
687                }
688                else_kind @ Some(Token {
689                    lexigram: Lexigram::If,
690                    ..
691                }) => (
692                    Block::build(
693                        Expression::build(
694                            None,
695                            None,
696                            Diagnostics::default(),
697                            [Self::from(&mut *lexer).into()],
698                        )
699                        .into(),
700                        Diagnostics::default(),
701                        [],
702                    ),
703                    else_kind,
704                ),
705                _ => {
706                    diagnostics.expect(lexer.peek().copied(), &[Lexigram::Then, Lexigram::If]);
707                    (Box::default(), None)
708                }
709            };
710            (second, else_token, else_kind)
711        } else {
712            (Box::default(), None, None)
713        };
714        let end_token = diagnostics.expect(lexer.peek().copied(), &[Lexigram::End]);
715        Self {
716            if_token,
717            condition,
718            then_token,
719            first,
720            else_token,
721            else_kind,
722            second,
723            end_token,
724            diagnostics,
725        }
726    }
727}
728
729#[derive(Debug, Eq, PartialEq)]
730pub struct MatchCase<'source> {
731    pub let_token: Option<Token<'source>>,
732    pub binding: Option<Binding<'source>>,
733    pub equals_token: Option<Token<'source>>,
734    pub case: Option<Box<Expression<'source>>>,
735    pub arrow_token: Option<Token<'source>>,
736    pub expression: Option<Box<Expression<'source>>>,
737    pub semicolon_token: Option<Token<'source>>,
738}
739
740#[derive(Debug, Eq, PartialEq)]
741#[make_dst_factory(pub)]
742pub struct Match<'source> {
743    pub match_token: Token<'source>,
744    pub expression: Option<Box<Expression<'source>>>,
745    pub then_token: Option<Token<'source>>,
746    pub end_token: Option<Token<'source>>,
747    pub diagnostics: Diagnostics<'source>,
748    pub cases: [MatchCase<'source>],
749}
750
751impl<'source> From<Box<Match<'source>>> for Node<'source> {
752    fn from(struct_block: Box<Match<'source>>) -> Self {
753        Self::Match(struct_block)
754    }
755}
756
757impl<'source> Match<'source> {
758    /// # Panics
759    ///
760    /// Panics if the lexer returns `None`.
761    ///
762    /// This function should only be called after successfully peeking a [`Lexigram::Match`].
763    pub fn new(lexer: &mut Peekable<Lexer<'source>>) -> Box<Self> {
764        let match_token = lexer
765            .next()
766            .transpose()
767            .ok()
768            .flatten()
769            .expect("caller must have peeked a token");
770        let mut diagnostics = Diagnostics::default();
771
772        let expression = diagnostics.expect_expression(lexer);
773        let then_token = diagnostics.next_if(lexer, &[Lexigram::Then]);
774        let mut cases = Vec::new();
775
776        // ew
777        loop {
778            let (let_token, binding, equals_token, case) = if let let_token @ Some(Token {
779                lexigram: Lexigram::Let,
780                ..
781            }) =
782                diagnostics.wrap(lexer.peek().copied())
783            {
784                lexer.next();
785                let binding = Binding::new(lexer)
786                    .map_err(|e| diagnostics.errors.push(e))
787                    .ok();
788                let (equal_token, case) = if let equal_token @ Some(Token {
789                    lexigram: Lexigram::SingleEqual,
790                    ..
791                }) = diagnostics.wrap(lexer.peek().copied())
792                {
793                    lexer.next();
794                    let case = diagnostics.expect_expression(lexer);
795                    (equal_token, case)
796                } else {
797                    (None, None)
798                };
799                (let_token, binding, equal_token, case)
800            } else {
801                let case = diagnostics.expect_expression(lexer);
802                (None, None, None, case)
803            };
804            let arrow_token = diagnostics.next_if(lexer, &[Lexigram::DoubleArrow]);
805            let expression = diagnostics.expect_expression(lexer);
806            let semicolon_token = diagnostics.next_if(lexer, &[Lexigram::Semicolon]);
807            cases.push(MatchCase {
808                let_token,
809                binding,
810                equals_token,
811                case,
812                arrow_token,
813                expression,
814                semicolon_token,
815            });
816            if semicolon_token.is_none()
817                || diagnostics
818                    .wrap(lexer.peek().copied())
819                    .is_some_and(|t| t.lexigram == Lexigram::End)
820            {
821                break;
822            }
823        }
824        let end_token = diagnostics.expect(lexer.peek().copied(), &[Lexigram::End]);
825        Match::build(
826            match_token,
827            expression,
828            then_token,
829            end_token,
830            diagnostics,
831            cases,
832        )
833    }
834}
835
836#[derive(Debug, Eq, PartialEq)]
837pub struct Enum<'source> {
838    pub enum_token: Token<'source>,
839    pub variants: Option<Box<Expression<'source>>>,
840    pub end_token: Option<Token<'source>>,
841    pub diagnostics: Diagnostics<'source>,
842}
843
844impl<'source> From<Enum<'source>> for Node<'source> {
845    fn from(struct_block: Enum<'source>) -> Self {
846        Self::Enum(Box::new(struct_block))
847    }
848}
849
850impl<'source> From<&mut Peekable<Lexer<'source>>> for Enum<'source> {
851    fn from(lexer: &mut Peekable<Lexer<'source>>) -> Self {
852        let enum_token = lexer
853            .next()
854            .transpose()
855            .ok()
856            .flatten()
857            .expect("caller must have peeked a token");
858        let mut diagnostics = Diagnostics::default();
859        let variants = diagnostics.expect_expression(lexer);
860        let end_token = diagnostics.expect(lexer.peek().copied(), &[Lexigram::End]);
861        Self {
862            enum_token,
863            variants,
864            end_token,
865            diagnostics,
866        }
867    }
868}
869
870#[derive(Debug, Eq, PartialEq)]
871pub enum Statement<'source> {
872    Sequence(Sequence<'source>),
873    Let(Let<'source>),
874    Rebind(Rebind<'source>),
875    Set(Set<'source>),
876    Use(Use<'source>),
877}
878
879#[derive(Debug, Eq, PartialEq)]
880pub struct Let<'source> {
881    pub let_token: Token<'source>,
882    pub binding: Option<Binding<'source>>,
883    pub equals_token: Option<Token<'source>>,
884    pub expression: Option<Box<Expression<'source>>>,
885    pub semicolon_token: Option<Token<'source>>,
886    pub diagnostics: Diagnostics<'source>,
887}
888
889impl<'source> Let<'source> {
890    /// # Panics
891    ///
892    /// Panics if the lexer returns `None`.
893    ///
894    /// This function should only be called after successfully peeking a [`Lexigram::Let`].
895    pub fn new(let_token: Token<'source>, lexer: &mut Peekable<Lexer<'source>>) -> Self {
896        let mut diagnostics = Diagnostics::default();
897        let binding = Binding::new(lexer)
898            .map_err(|e| diagnostics.errors.push(e))
899            .ok();
900        let equals_token = diagnostics.next_if(lexer, &[Lexigram::SingleEqual]);
901        let expression = diagnostics.expect_expression(lexer);
902        let semicolon_token = diagnostics.next_if(lexer, &[Lexigram::Semicolon]);
903
904        Let {
905            let_token,
906            binding,
907            equals_token,
908            expression,
909            semicolon_token,
910            diagnostics,
911        }
912    }
913}
914
915#[derive(Debug, Eq, PartialEq)]
916pub struct RebindSubject<'source> {
917    pub ident_token: Option<Token<'source>>,
918    pub comma_token: Option<Token<'source>>,
919}
920
921#[derive(Debug, Eq, PartialEq)]
922pub enum RebindBy<'source> {
923    Glob {
924        star_token: Token<'source>,
925    },
926    Identifiers {
927        bindings: Box<[RebindSubject<'source>]>,
928    },
929}
930
931#[derive(Debug, Eq, PartialEq)]
932pub struct Rebind<'source> {
933    pub let_token: Token<'source>,
934    pub caret_token: Token<'source>,
935    pub by: RebindBy<'source>,
936    pub semicolon_token: Option<Token<'source>>,
937    pub diagnostics: Diagnostics<'source>,
938}
939
940impl<'source> Rebind<'source> {
941    /// # Panics
942    ///
943    /// Panics if the lexer returns `None`.
944    ///
945    /// This function should only be called after successfully peeking a [`Lexigram::Caret`].
946    pub fn new(let_token: Token<'source>, lexer: &mut Peekable<Lexer<'source>>) -> Self {
947        let mut diagnostics = Diagnostics::default();
948        let caret_token = lexer
949            .next()
950            .transpose()
951            .ok()
952            .flatten()
953            .expect("caller must have peeked a token");
954        let by = if let Some(
955            star_token @ Token {
956                lexigram: Lexigram::Star,
957                ..
958            },
959        ) = diagnostics.wrap(lexer.peek().copied())
960        {
961            lexer.next();
962            RebindBy::Glob { star_token }
963        } else {
964            let mut bindings = Vec::new();
965            if let Some(ident_token) = diagnostics.next_if(lexer, &[Lexigram::Ident]) {
966                bindings.push(RebindSubject {
967                    ident_token: Some(ident_token),
968                    comma_token: None,
969                });
970            }
971            while let comma_token @ Some(Token {
972                lexigram: Lexigram::Comma,
973                ..
974            }) = diagnostics.wrap(lexer.peek().copied())
975            {
976                lexer.next();
977                bindings.push(RebindSubject {
978                    ident_token: diagnostics.next_if(lexer, &[Lexigram::Ident]),
979                    comma_token,
980                });
981            }
982            RebindBy::Identifiers {
983                bindings: bindings.into_boxed_slice(),
984            }
985        };
986        let semicolon_token = diagnostics.next_if(lexer, &[Lexigram::Semicolon]);
987        Rebind {
988            let_token,
989            caret_token,
990            by,
991            semicolon_token,
992            diagnostics,
993        }
994    }
995}
996
997#[derive(Debug, Eq, PartialEq)]
998pub struct Sequence<'source> {
999    pub expression: Option<Box<Expression<'source>>>,
1000    pub semicolon_token: Token<'source>,
1001}
1002
1003impl<'source> Sequence<'source> {
1004    /// # Errors
1005    ///
1006    /// Returns a lone [`Expression`] if no semicolon token was encountered.
1007    pub fn try_sequence(
1008        lexer: &mut Peekable<Lexer<'source>>,
1009    ) -> Result<Self, Option<Box<Expression<'source>>>> {
1010        let expression = Expression::new(&mut *lexer);
1011        if let Some(Ok(
1012            semicolon_token @ Token {
1013                lexigram: Lexigram::Semicolon,
1014                ..
1015            },
1016        )) = lexer.peek().copied()
1017        {
1018            lexer.next();
1019            Ok(Sequence {
1020                expression,
1021                semicolon_token,
1022            })
1023        } else {
1024            Err(expression)
1025        }
1026    }
1027}
1028
1029#[derive(Debug, Eq, PartialEq)]
1030pub struct Set<'source> {
1031    pub set_token: Token<'source>,
1032    pub target: Option<Box<Expression<'source>>>,
1033    pub equals_token: Option<Token<'source>>,
1034    pub expression: Option<Box<Expression<'source>>>,
1035    pub semicolon_token: Option<Token<'source>>,
1036    pub diagnostics: Diagnostics<'source>,
1037}
1038
1039impl<'source> Set<'source> {
1040    /// # Panics
1041    ///
1042    /// Panics if the lexer returns `None`.
1043    ///
1044    /// This function should only be called after successfully peeking a [`Lexigram::Set`].
1045    pub fn new(lexer: &mut Peekable<Lexer<'source>>) -> Self {
1046        let mut diagnostics = Diagnostics::default();
1047        let set_token = lexer
1048            .next()
1049            .transpose()
1050            .ok()
1051            .flatten()
1052            .expect("caller must have peeked a token");
1053        let target = diagnostics.expect_expression(lexer);
1054        let equals_token = diagnostics.next_if(lexer, &[Lexigram::SingleEqual]);
1055        let expression = diagnostics.expect_expression(lexer);
1056        let semicolon_token = diagnostics.next_if(lexer, &[Lexigram::Semicolon]);
1057
1058        Set {
1059            set_token,
1060            target,
1061            equals_token,
1062            expression,
1063            semicolon_token,
1064            diagnostics,
1065        }
1066    }
1067}
1068
1069#[derive(Debug, Eq, PartialEq)]
1070pub struct Use<'source> {
1071    pub use_token: Token<'source>,
1072    pub expression: Option<Box<Expression<'source>>>,
1073    pub semicolon_token: Option<Token<'source>>,
1074    pub diagnostics: Diagnostics<'source>,
1075}
1076
1077impl<'source> Use<'source> {
1078    /// # Panics
1079    ///
1080    /// Panics if the lexer returns `None`.
1081    ///
1082    /// This function should only be called after successfully peeking a [`Lexigram::Use`].
1083    pub fn new(lexer: &mut Peekable<Lexer<'source>>) -> Self {
1084        let mut diagnostics = Diagnostics::default();
1085        let use_token = lexer
1086            .next()
1087            .transpose()
1088            .ok()
1089            .flatten()
1090            .expect("caller must have peeked a token");
1091        let expression = diagnostics.expect_expression(lexer);
1092        let semicolon_token = diagnostics.next_if(lexer, &[Lexigram::Semicolon]);
1093
1094        Self {
1095            use_token,
1096            expression,
1097            semicolon_token,
1098            diagnostics,
1099        }
1100    }
1101}
1102
1103#[derive(Debug, Eq, PartialEq)]
1104pub struct NumericBinding<'source> {
1105    pub binding: Binding<'source>,
1106    pub comma_token: Option<Token<'source>>,
1107}
1108
1109#[derive(Debug, Eq, PartialEq)]
1110pub struct NamedBinding<'source> {
1111    pub field: Token<'source>,
1112    pub binding: Option<NamedSubBinding<'source>>,
1113    pub comma_token: Option<Token<'source>>,
1114}
1115
1116#[derive(Debug, Eq, PartialEq)]
1117pub struct NamedSubBinding<'source> {
1118    pub colon_token: Token<'source>,
1119    pub binding: Binding<'source>,
1120}
1121
1122#[derive(Debug, Eq, PartialEq)]
1123pub enum BindingMethod<'source> {
1124    Single(Token<'source>),
1125    Numeric {
1126        open_paren: Token<'source>,
1127        bindings: Box<[NumericBinding<'source>]>,
1128        close_paren: Option<Token<'source>>,
1129    },
1130    Named {
1131        open_brace: Token<'source>,
1132        bindings: Box<[NamedBinding<'source>]>,
1133        close_brace: Option<Token<'source>>,
1134    },
1135}
1136
1137#[derive(Debug, Eq, PartialEq)]
1138pub struct Binding<'source> {
1139    pub method: BindingMethod<'source>,
1140    pub diagnostics: Diagnostics<'source>,
1141}
1142
1143impl<'source> Binding<'source> {
1144    /// # Errors
1145    ///
1146    /// Returns an error if no valid binding token was encountered.
1147    pub fn new(lexer: &mut Peekable<Lexer<'source>>) -> Result<Self, Error<'source>> {
1148        match lexer.peek().copied().transpose().map_err(Error::Lexer)? {
1149            Some(
1150                t @ Token {
1151                    lexigram: Lexigram::Ident | Lexigram::Discard,
1152                    ..
1153                },
1154            ) => {
1155                lexer.next();
1156                Ok(Binding {
1157                    method: BindingMethod::Single(t),
1158                    diagnostics: Diagnostics::default(),
1159                })
1160            }
1161            Some(
1162                open_paren @ Token {
1163                    lexigram: Lexigram::OpenParen,
1164                    ..
1165                },
1166            ) => {
1167                let mut diagnostics = Diagnostics::default();
1168                let mut bindings = Vec::new();
1169                lexer.next();
1170                loop {
1171                    let t = diagnostics.wrap(lexer.peek().copied());
1172                    if let Some(Token {
1173                        lexigram: Lexigram::CloseParen,
1174                        ..
1175                    }) = t
1176                    {
1177                        break;
1178                    }
1179                    if let Ok(binding) = Binding::new(lexer) {
1180                        let comma_token = diagnostics
1181                            .wrap(lexer.peek().copied())
1182                            .filter(|t| t.lexigram == Lexigram::Comma);
1183                        bindings.push(NumericBinding {
1184                            binding,
1185                            comma_token,
1186                        });
1187                        if comma_token.is_some() {
1188                            lexer.next();
1189                        } else {
1190                            break;
1191                        }
1192                    } else {
1193                        diagnostics.errors.push(Error::MissingToken {
1194                            expected: &[
1195                                Lexigram::Ident,
1196                                Lexigram::Discard,
1197                                Lexigram::OpenParen,
1198                                Lexigram::OpenBrace,
1199                                Lexigram::CloseParen,
1200                            ],
1201                            actual: t,
1202                        });
1203                        break;
1204                    }
1205                }
1206                let close_paren = diagnostics.next_if(lexer, &[Lexigram::CloseParen]);
1207                Ok(Binding {
1208                    method: BindingMethod::Numeric {
1209                        open_paren,
1210                        bindings: bindings.into_boxed_slice(),
1211                        close_paren,
1212                    },
1213                    diagnostics,
1214                })
1215            }
1216            Some(
1217                open_brace @ Token {
1218                    lexigram: Lexigram::OpenBrace,
1219                    ..
1220                },
1221            ) => {
1222                let mut diagnostics = Diagnostics::default();
1223                let mut bindings = Vec::new();
1224                lexer.next();
1225                loop {
1226                    match diagnostics.wrap(lexer.peek().copied()) {
1227                        Some(Token {
1228                            lexigram: Lexigram::CloseBrace,
1229                            ..
1230                        }) => break,
1231                        Some(
1232                            field @ Token {
1233                                lexigram: Lexigram::Ident,
1234                                ..
1235                            },
1236                        ) => {
1237                            lexer.next();
1238                            match diagnostics.wrap(lexer.peek().copied()) {
1239                                Some(
1240                                    colon_token @ Token {
1241                                        lexigram: Lexigram::Colon,
1242                                        ..
1243                                    },
1244                                ) => {
1245                                    lexer.next();
1246                                    match Binding::new(lexer) {
1247                                        Ok(binding) => {
1248                                            let comma_token = diagnostics
1249                                                .wrap(lexer.peek().copied())
1250                                                .filter(|t| t.lexigram == Lexigram::Comma);
1251                                            bindings.push(NamedBinding {
1252                                                field,
1253                                                binding: Some(NamedSubBinding {
1254                                                    colon_token,
1255                                                    binding,
1256                                                }),
1257                                                comma_token,
1258                                            });
1259                                            if comma_token.is_some() {
1260                                                lexer.next();
1261                                            } else {
1262                                                break;
1263                                            }
1264                                        }
1265                                        Err(e) => {
1266                                            diagnostics.errors.push(e);
1267                                            break;
1268                                        }
1269                                    }
1270                                }
1271                                comma_token @ Some(Token {
1272                                    lexigram: Lexigram::Comma,
1273                                    ..
1274                                }) => {
1275                                    lexer.next();
1276                                    bindings.push(NamedBinding {
1277                                        field,
1278                                        binding: None,
1279                                        comma_token,
1280                                    });
1281                                }
1282                                _ => {
1283                                    bindings.push(NamedBinding {
1284                                        field,
1285                                        binding: None,
1286                                        comma_token: None,
1287                                    });
1288                                    break;
1289                                }
1290                            }
1291                        }
1292                        actual => {
1293                            diagnostics.errors.push(Error::MissingToken {
1294                                expected: &[Lexigram::Ident, Lexigram::CloseBrace],
1295                                actual,
1296                            });
1297                            break;
1298                        }
1299                    }
1300                }
1301                let close_brace = diagnostics.next_if(lexer, &[Lexigram::CloseBrace]);
1302                Ok(Binding {
1303                    method: BindingMethod::Named {
1304                        open_brace,
1305                        bindings: bindings.into_boxed_slice(),
1306                        close_brace,
1307                    },
1308                    diagnostics,
1309                })
1310            }
1311            actual => Err(Error::MissingToken {
1312                expected: &[
1313                    Lexigram::Ident,
1314                    Lexigram::Discard,
1315                    Lexigram::OpenParen,
1316                    Lexigram::OpenBrace,
1317                ],
1318                actual,
1319            }),
1320        }
1321    }
1322}
1323
1324#[derive(Debug, Eq, PartialEq)]
1325pub enum FunctionBody<'source> {
1326    Block(Box<Block<'source>>),
1327    /// Occurs when with's semicolon is absent.
1328    ///
1329    /// Used to name only the type of a function.
1330    Never,
1331}
1332
1333#[derive(Debug, Eq, PartialEq)]
1334pub struct Function<'source> {
1335    pub with_token: Token<'source>,
1336    pub argument: Option<Binding<'source>>,
1337    pub colon_token: Option<Token<'source>>,
1338    pub input: Option<Box<Expression<'source>>>,
1339    pub single_arrow_token: Option<Token<'source>>,
1340    pub output: Option<Box<Expression<'source>>>,
1341    pub semicolon_token: Option<Token<'source>>,
1342    pub body: FunctionBody<'source>,
1343    pub diagnostics: Diagnostics<'source>,
1344}
1345
1346#[derive(Debug, Eq, PartialEq)]
1347#[expect(
1348    clippy::large_enum_variant,
1349    reason = "this is already inside of a (very large) boxed block"
1350)]
1351pub enum BlockResult<'source> {
1352    Expression(Option<Box<Expression<'source>>>),
1353    Function(Function<'source>),
1354}
1355
1356impl BlockResult<'_> {
1357    #[must_use]
1358    pub fn is_empty(&self) -> bool {
1359        match self {
1360            BlockResult::Expression(expression) => expression.is_none(),
1361            BlockResult::Function(_) => false,
1362        }
1363    }
1364}
1365
1366impl Default for BlockResult<'_> {
1367    fn default() -> Self {
1368        Self::Expression(None)
1369    }
1370}
1371
1372impl<'source> From<Box<Expression<'source>>> for BlockResult<'source> {
1373    fn from(expression: Box<Expression<'source>>) -> Self {
1374        Self::Expression(Some(expression))
1375    }
1376}
1377
1378impl<'source> From<Function<'source>> for BlockResult<'source> {
1379    fn from(function: Function<'source>) -> Self {
1380        Self::Function(function)
1381    }
1382}
1383
1384#[derive(Debug, Eq, PartialEq)]
1385#[make_dst_factory(pub)]
1386pub struct Block<'source> {
1387    pub result: BlockResult<'source>,
1388    pub diagnostics: Diagnostics<'source>,
1389    pub statements: [Statement<'source>],
1390}
1391
1392impl Default for Box<Block<'_>> {
1393    fn default() -> Self {
1394        Block::build(BlockResult::Expression(None), Diagnostics::default(), [])
1395    }
1396}
1397
1398impl<'source> Block<'source> {
1399    pub fn new(lexer: &mut Peekable<Lexer<'source>>) -> Box<Self> {
1400        Self::parse(lexer, true)
1401    }
1402
1403    fn child(lexer: &mut Peekable<Lexer<'source>>) -> Box<Self> {
1404        Self::parse(lexer, false)
1405    }
1406
1407    fn parse(lexer: &mut Peekable<Lexer<'source>>, root: bool) -> Box<Self> {
1408        let mut diagnostics = Diagnostics::default();
1409        let mut statements = Vec::new();
1410        let result = loop {
1411            let statement = match diagnostics.wrap(lexer.peek().copied()) {
1412                Some(
1413                    let_token @ Token {
1414                        lexigram: Lexigram::Let,
1415                        ..
1416                    },
1417                ) => {
1418                    lexer.next();
1419                    if let Some(Ok(Token {
1420                        lexigram: Lexigram::Caret,
1421                        ..
1422                    })) = lexer.peek()
1423                    {
1424                        Statement::Rebind(Rebind::new(let_token, lexer))
1425                    } else {
1426                        Statement::Let(Let::new(let_token, lexer))
1427                    }
1428                }
1429                Some(Token {
1430                    lexigram: Lexigram::Set,
1431                    ..
1432                }) => Statement::Set(Set::new(lexer)),
1433                Some(Token {
1434                    lexigram: Lexigram::Use,
1435                    ..
1436                }) => Statement::Use(Use::new(lexer)),
1437                Some(
1438                    with_token @ Token {
1439                        lexigram: Lexigram::With,
1440                        ..
1441                    },
1442                ) => {
1443                    lexer.next();
1444                    let mut st_diagnostics = Diagnostics::default();
1445                    let argument = Binding::new(lexer)
1446                        .map_err(|e| st_diagnostics.errors.push(e))
1447                        .ok();
1448                    let (colon_token, input) = if let Some(
1449                        t @ Token {
1450                            lexigram: Lexigram::Colon,
1451                            ..
1452                        },
1453                    ) = st_diagnostics.wrap(lexer.peek().copied())
1454                    {
1455                        lexer.next();
1456                        (Some(t), diagnostics.expect_expression(lexer))
1457                    } else {
1458                        (None, None)
1459                    };
1460                    let (single_arrow_token, output) = if let Some(
1461                        t @ Token {
1462                            lexigram: Lexigram::SingleArrow,
1463                            ..
1464                        },
1465                    ) =
1466                        st_diagnostics.wrap(lexer.peek().copied())
1467                    {
1468                        lexer.next();
1469                        (Some(t), diagnostics.expect_expression(lexer))
1470                    } else {
1471                        (None, None)
1472                    };
1473                    let (semicolon_token, body) = if let Some(
1474                        t @ Token {
1475                            lexigram: Lexigram::Semicolon,
1476                            ..
1477                        },
1478                    ) = st_diagnostics.wrap(lexer.peek().copied())
1479                    {
1480                        lexer.next();
1481                        (
1482                            Some(t),
1483                            FunctionBody::Block(Block::parse(&mut *lexer, root)),
1484                        )
1485                    } else {
1486                        (None, FunctionBody::Never)
1487                    };
1488
1489                    break Function {
1490                        with_token,
1491                        argument,
1492                        colon_token,
1493                        input,
1494                        single_arrow_token,
1495                        output,
1496                        semicolon_token,
1497                        body,
1498                        diagnostics: st_diagnostics,
1499                    }
1500                    .into();
1501                }
1502                _ => match Sequence::try_sequence(&mut *lexer) {
1503                    Ok(sequence) => Statement::Sequence(sequence),
1504                    Err(expression) => {
1505                        break BlockResult::Expression(expression);
1506                    }
1507                },
1508            };
1509            statements.push(statement);
1510        };
1511        if root && let Some(t) = lexer.peek().copied().transpose().ok().flatten() {
1512            diagnostics
1513                .errors
1514                .push(Error::ExpectedStatementOrExpression(t));
1515        }
1516        Self::build(result, diagnostics, statements)
1517    }
1518}