typst_syntax/
parser.rs

1use std::collections::{HashMap, HashSet};
2use std::mem;
3use std::ops::{Index, IndexMut, Range};
4
5use ecow::{eco_format, EcoString};
6use typst_utils::default_math_class;
7use unicode_math_class::MathClass;
8
9use crate::set::{syntax_set, SyntaxSet};
10use crate::{ast, set, LexMode, Lexer, SyntaxError, SyntaxKind, SyntaxNode};
11
12/// Parses a source file as top-level markup.
13pub fn parse(text: &str) -> SyntaxNode {
14    let _scope = typst_timing::TimingScope::new("parse");
15    let mut p = Parser::new(text, 0, LexMode::Markup);
16    markup_exprs(&mut p, true, syntax_set!(End));
17    p.finish_into(SyntaxKind::Markup)
18}
19
20/// Parses top-level code.
21pub fn parse_code(text: &str) -> SyntaxNode {
22    let _scope = typst_timing::TimingScope::new("parse code");
23    let mut p = Parser::new(text, 0, LexMode::Code);
24    code_exprs(&mut p, syntax_set!(End));
25    p.finish_into(SyntaxKind::Code)
26}
27
28/// Parses top-level math.
29pub fn parse_math(text: &str) -> SyntaxNode {
30    let _scope = typst_timing::TimingScope::new("parse math");
31    let mut p = Parser::new(text, 0, LexMode::Math);
32    math_exprs(&mut p, syntax_set!(End));
33    p.finish_into(SyntaxKind::Math)
34}
35
36/// Parses markup expressions until a stop condition is met.
37fn markup(p: &mut Parser, at_start: bool, wrap_trivia: bool, stop_set: SyntaxSet) {
38    let m = if wrap_trivia { p.before_trivia() } else { p.marker() };
39    markup_exprs(p, at_start, stop_set);
40    if wrap_trivia {
41        p.flush_trivia();
42    }
43    p.wrap(m, SyntaxKind::Markup);
44}
45
46/// Parses a sequence of markup expressions.
47fn markup_exprs(p: &mut Parser, mut at_start: bool, stop_set: SyntaxSet) {
48    debug_assert!(stop_set.contains(SyntaxKind::End));
49    at_start |= p.had_newline();
50    let mut nesting: usize = 0;
51    // Keep going if we're at a nested right-bracket regardless of the stop set.
52    while !p.at_set(stop_set) || (nesting > 0 && p.at(SyntaxKind::RightBracket)) {
53        markup_expr(p, at_start, &mut nesting);
54        at_start = p.had_newline();
55    }
56}
57
58/// Reparses a subsection of markup incrementally.
59pub(super) fn reparse_markup(
60    text: &str,
61    range: Range<usize>,
62    at_start: &mut bool,
63    nesting: &mut usize,
64    top_level: bool,
65) -> Option<Vec<SyntaxNode>> {
66    let mut p = Parser::new(text, range.start, LexMode::Markup);
67    *at_start |= p.had_newline();
68    while !p.end() && p.current_start() < range.end {
69        // If not top-level and at a new RightBracket, stop the reparse.
70        if !top_level && *nesting == 0 && p.at(SyntaxKind::RightBracket) {
71            break;
72        }
73        markup_expr(&mut p, *at_start, nesting);
74        *at_start = p.had_newline();
75    }
76    (p.balanced && p.current_start() == range.end).then(|| p.finish())
77}
78
79/// Parses a single markup expression. This includes markup elements like text,
80/// headings, strong/emph, lists/enums, etc. This is also the entry point for
81/// parsing math equations and embedded code expressions.
82fn markup_expr(p: &mut Parser, at_start: bool, nesting: &mut usize) {
83    match p.current() {
84        SyntaxKind::LeftBracket => {
85            *nesting += 1;
86            p.convert_and_eat(SyntaxKind::Text);
87        }
88        SyntaxKind::RightBracket if *nesting > 0 => {
89            *nesting -= 1;
90            p.convert_and_eat(SyntaxKind::Text);
91        }
92        SyntaxKind::RightBracket => {
93            p.unexpected();
94            p.hint("try using a backslash escape: \\]");
95        }
96
97        SyntaxKind::Shebang => p.eat(),
98
99        SyntaxKind::Text
100        | SyntaxKind::Linebreak
101        | SyntaxKind::Escape
102        | SyntaxKind::Shorthand
103        | SyntaxKind::SmartQuote
104        | SyntaxKind::Link
105        | SyntaxKind::Label => p.eat(),
106
107        SyntaxKind::Raw => p.eat(), // Raw is handled entirely in the Lexer.
108
109        SyntaxKind::Hash => embedded_code_expr(p),
110        SyntaxKind::Star => strong(p),
111        SyntaxKind::Underscore => emph(p),
112        SyntaxKind::HeadingMarker if at_start => heading(p),
113        SyntaxKind::ListMarker if at_start => list_item(p),
114        SyntaxKind::EnumMarker if at_start => enum_item(p),
115        SyntaxKind::TermMarker if at_start => term_item(p),
116        SyntaxKind::RefMarker => reference(p),
117        SyntaxKind::Dollar => equation(p),
118
119        SyntaxKind::HeadingMarker
120        | SyntaxKind::ListMarker
121        | SyntaxKind::EnumMarker
122        | SyntaxKind::TermMarker
123        | SyntaxKind::Colon => p.convert_and_eat(SyntaxKind::Text),
124
125        _ => p.unexpected(),
126    }
127}
128
129/// Parses strong content: `*Strong*`.
130fn strong(p: &mut Parser) {
131    p.with_nl_mode(AtNewline::StopParBreak, |p| {
132        let m = p.marker();
133        p.assert(SyntaxKind::Star);
134        markup(p, false, true, syntax_set!(Star, RightBracket, End));
135        p.expect_closing_delimiter(m, SyntaxKind::Star);
136        p.wrap(m, SyntaxKind::Strong);
137    });
138}
139
140/// Parses emphasized content: `_Emphasized_`.
141fn emph(p: &mut Parser) {
142    p.with_nl_mode(AtNewline::StopParBreak, |p| {
143        let m = p.marker();
144        p.assert(SyntaxKind::Underscore);
145        markup(p, false, true, syntax_set!(Underscore, RightBracket, End));
146        p.expect_closing_delimiter(m, SyntaxKind::Underscore);
147        p.wrap(m, SyntaxKind::Emph);
148    });
149}
150
151/// Parses a section heading: `= Introduction`.
152fn heading(p: &mut Parser) {
153    p.with_nl_mode(AtNewline::Stop, |p| {
154        let m = p.marker();
155        p.assert(SyntaxKind::HeadingMarker);
156        markup(p, false, false, syntax_set!(Label, RightBracket, End));
157        p.wrap(m, SyntaxKind::Heading);
158    });
159}
160
161/// Parses an item in a bullet list: `- ...`.
162fn list_item(p: &mut Parser) {
163    p.with_nl_mode(AtNewline::RequireColumn(p.current_column()), |p| {
164        let m = p.marker();
165        p.assert(SyntaxKind::ListMarker);
166        markup(p, true, false, syntax_set!(RightBracket, End));
167        p.wrap(m, SyntaxKind::ListItem);
168    });
169}
170
171/// Parses an item in an enumeration (numbered list): `+ ...` or `1. ...`.
172fn enum_item(p: &mut Parser) {
173    p.with_nl_mode(AtNewline::RequireColumn(p.current_column()), |p| {
174        let m = p.marker();
175        p.assert(SyntaxKind::EnumMarker);
176        markup(p, true, false, syntax_set!(RightBracket, End));
177        p.wrap(m, SyntaxKind::EnumItem);
178    });
179}
180
181/// Parses an item in a term list: `/ Term: Details`.
182fn term_item(p: &mut Parser) {
183    p.with_nl_mode(AtNewline::RequireColumn(p.current_column()), |p| {
184        let m = p.marker();
185        p.with_nl_mode(AtNewline::Stop, |p| {
186            p.assert(SyntaxKind::TermMarker);
187            markup(p, false, false, syntax_set!(Colon, RightBracket, End));
188        });
189        p.expect(SyntaxKind::Colon);
190        markup(p, true, false, syntax_set!(RightBracket, End));
191        p.wrap(m, SyntaxKind::TermItem);
192    });
193}
194
195/// Parses a reference: `@target`, `@target[..]`.
196fn reference(p: &mut Parser) {
197    let m = p.marker();
198    p.assert(SyntaxKind::RefMarker);
199    if p.directly_at(SyntaxKind::LeftBracket) {
200        content_block(p);
201    }
202    p.wrap(m, SyntaxKind::Ref);
203}
204
205/// Parses a mathematical equation: `$x$`, `$ x^2 $`.
206fn equation(p: &mut Parser) {
207    let m = p.marker();
208    p.enter_modes(LexMode::Math, AtNewline::Continue, |p| {
209        p.assert(SyntaxKind::Dollar);
210        math(p, syntax_set!(Dollar, End));
211        p.expect_closing_delimiter(m, SyntaxKind::Dollar);
212    });
213    p.wrap(m, SyntaxKind::Equation);
214}
215
216/// Parses the contents of a mathematical equation: `x^2 + 1`.
217fn math(p: &mut Parser, stop_set: SyntaxSet) {
218    let m = p.marker();
219    math_exprs(p, stop_set);
220    p.wrap(m, SyntaxKind::Math);
221}
222
223/// Parses a sequence of math expressions. Returns the number of expressions
224/// parsed.
225fn math_exprs(p: &mut Parser, stop_set: SyntaxSet) -> usize {
226    debug_assert!(stop_set.contains(SyntaxKind::End));
227    let mut count = 0;
228    while !p.at_set(stop_set) {
229        if p.at_set(set::MATH_EXPR) {
230            math_expr(p);
231            count += 1;
232        } else {
233            p.unexpected();
234        }
235    }
236    count
237}
238
239/// Parses a single math expression: This includes math elements like
240/// attachment, fractions, and roots, and embedded code expressions.
241fn math_expr(p: &mut Parser) {
242    math_expr_prec(p, 0, SyntaxKind::End)
243}
244
245/// Parses a math expression with at least the given precedence.
246fn math_expr_prec(p: &mut Parser, min_prec: usize, stop: SyntaxKind) {
247    let m = p.marker();
248    let mut continuable = false;
249    match p.current() {
250        SyntaxKind::Hash => embedded_code_expr(p),
251        // The lexer manages creating full FieldAccess nodes if needed.
252        SyntaxKind::MathIdent | SyntaxKind::FieldAccess => {
253            continuable = true;
254            p.eat();
255            // Parse a function call for an identifier or field access.
256            if min_prec < 3
257                && p.directly_at(SyntaxKind::MathText)
258                && p.current_text() == "("
259            {
260                math_args(p);
261                p.wrap(m, SyntaxKind::FuncCall);
262                continuable = false;
263            }
264        }
265
266        SyntaxKind::Dot
267        | SyntaxKind::Comma
268        | SyntaxKind::Semicolon
269        | SyntaxKind::RightParen => {
270            p.convert_and_eat(SyntaxKind::MathText);
271        }
272
273        SyntaxKind::Text | SyntaxKind::MathText | SyntaxKind::MathShorthand => {
274            continuable = matches!(
275                math_class(p.current_text()),
276                None | Some(MathClass::Alphabetic)
277            );
278            if !maybe_delimited(p) {
279                p.eat();
280            }
281        }
282
283        SyntaxKind::Linebreak | SyntaxKind::MathAlignPoint => p.eat(),
284        SyntaxKind::Escape | SyntaxKind::Str => {
285            continuable = true;
286            p.eat();
287        }
288
289        SyntaxKind::Root => {
290            if min_prec < 3 {
291                p.eat();
292                let m2 = p.marker();
293                math_expr_prec(p, 2, stop);
294                math_unparen(p, m2);
295                p.wrap(m, SyntaxKind::MathRoot);
296            }
297        }
298
299        SyntaxKind::Prime => {
300            // Means that there is nothing to attach the prime to.
301            continuable = true;
302            while p.at(SyntaxKind::Prime) {
303                let m2 = p.marker();
304                p.eat();
305                // Eat the group until the space.
306                while p.eat_if_direct(SyntaxKind::Prime) {}
307                p.wrap(m2, SyntaxKind::MathPrimes);
308            }
309        }
310
311        _ => p.expected("expression"),
312    }
313
314    if continuable && min_prec < 3 && !p.had_trivia() && maybe_delimited(p) {
315        p.wrap(m, SyntaxKind::Math);
316    }
317
318    // Whether there were _any_ primes in the loop.
319    let mut primed = false;
320
321    while !p.end() && !p.at(stop) {
322        if p.directly_at(SyntaxKind::MathText) && p.current_text() == "!" {
323            p.eat();
324            p.wrap(m, SyntaxKind::Math);
325            continue;
326        }
327
328        let prime_marker = p.marker();
329        if p.eat_if_direct(SyntaxKind::Prime) {
330            // Eat as many primes as possible.
331            while p.eat_if_direct(SyntaxKind::Prime) {}
332            p.wrap(prime_marker, SyntaxKind::MathPrimes);
333
334            // Will not be continued, so need to wrap the prime as attachment.
335            if p.at(stop) {
336                p.wrap(m, SyntaxKind::MathAttach);
337            }
338
339            primed = true;
340            continue;
341        }
342
343        let Some((kind, stop, assoc, mut prec)) = math_op(p.current()) else {
344            // No attachments, so we need to wrap primes as attachment.
345            if primed {
346                p.wrap(m, SyntaxKind::MathAttach);
347            }
348
349            break;
350        };
351
352        if primed && kind == SyntaxKind::MathFrac {
353            p.wrap(m, SyntaxKind::MathAttach);
354        }
355
356        if prec < min_prec {
357            break;
358        }
359
360        match assoc {
361            ast::Assoc::Left => prec += 1,
362            ast::Assoc::Right => {}
363        }
364
365        if kind == SyntaxKind::MathFrac {
366            math_unparen(p, m);
367        }
368
369        p.eat();
370        let m2 = p.marker();
371        math_expr_prec(p, prec, stop);
372        math_unparen(p, m2);
373
374        if p.eat_if(SyntaxKind::Underscore) || p.eat_if(SyntaxKind::Hat) {
375            let m3 = p.marker();
376            math_expr_prec(p, prec, SyntaxKind::End);
377            math_unparen(p, m3);
378        }
379
380        p.wrap(m, kind);
381    }
382}
383
384/// Precedence and wrapper kinds for the binary math operators.
385fn math_op(kind: SyntaxKind) -> Option<(SyntaxKind, SyntaxKind, ast::Assoc, usize)> {
386    match kind {
387        SyntaxKind::Underscore => {
388            Some((SyntaxKind::MathAttach, SyntaxKind::Hat, ast::Assoc::Right, 2))
389        }
390        SyntaxKind::Hat => {
391            Some((SyntaxKind::MathAttach, SyntaxKind::Underscore, ast::Assoc::Right, 2))
392        }
393        SyntaxKind::Slash => {
394            Some((SyntaxKind::MathFrac, SyntaxKind::End, ast::Assoc::Left, 1))
395        }
396        _ => None,
397    }
398}
399
400/// Try to parse delimiters based on the current token's unicode math class.
401fn maybe_delimited(p: &mut Parser) -> bool {
402    let open = math_class(p.current_text()) == Some(MathClass::Opening);
403    if open {
404        math_delimited(p);
405    }
406    open
407}
408
409/// Parse matched delimiters in math: `[x + y]`.
410fn math_delimited(p: &mut Parser) {
411    let m = p.marker();
412    p.eat();
413    let m2 = p.marker();
414    while !p.at_set(syntax_set!(Dollar, End)) {
415        if math_class(p.current_text()) == Some(MathClass::Closing) {
416            p.wrap(m2, SyntaxKind::Math);
417            // We could be at the shorthand `|]`, which shouldn't be converted
418            // to a `Text` kind.
419            if p.at(SyntaxKind::RightParen) {
420                p.convert_and_eat(SyntaxKind::MathText);
421            } else {
422                p.eat();
423            }
424            p.wrap(m, SyntaxKind::MathDelimited);
425            return;
426        }
427
428        if p.at_set(set::MATH_EXPR) {
429            math_expr(p);
430        } else {
431            p.unexpected();
432        }
433    }
434
435    p.wrap(m, SyntaxKind::Math);
436}
437
438/// Remove one set of parentheses (if any) from a previously parsed expression
439/// by converting to non-expression SyntaxKinds.
440fn math_unparen(p: &mut Parser, m: Marker) {
441    let Some(node) = p.nodes.get_mut(m.0) else { return };
442    if node.kind() != SyntaxKind::MathDelimited {
443        return;
444    }
445
446    if let [first, .., last] = node.children_mut() {
447        if first.text() == "(" && last.text() == ")" {
448            first.convert_to_kind(SyntaxKind::LeftParen);
449            last.convert_to_kind(SyntaxKind::RightParen);
450            // Only convert if we did have regular parens.
451            node.convert_to_kind(SyntaxKind::Math);
452        }
453    }
454}
455
456/// The unicode math class of a string. Only returns `Some` if `text` has
457/// exactly one unicode character or is a math shorthand string (currently just
458/// `[|`, `||`, `|]`) and then only returns `Some` if there is a math class
459/// defined for that character.
460fn math_class(text: &str) -> Option<MathClass> {
461    match text {
462        "[|" => return Some(MathClass::Opening),
463        "|]" => return Some(MathClass::Closing),
464        "||" => return Some(MathClass::Fence),
465        _ => {}
466    }
467
468    let mut chars = text.chars();
469    chars
470        .next()
471        .filter(|_| chars.next().is_none())
472        .and_then(default_math_class)
473}
474
475/// Parse an argument list in math: `(a, b; c, d; size: #50%)`.
476fn math_args(p: &mut Parser) {
477    let m = p.marker();
478    p.convert_and_eat(SyntaxKind::LeftParen);
479
480    let mut positional = true;
481    let mut has_arrays = false;
482
483    let mut maybe_array_start = p.marker();
484    let mut seen = HashSet::new();
485    while !p.at_set(syntax_set!(End, Dollar, RightParen)) {
486        positional = math_arg(p, &mut seen);
487
488        match p.current() {
489            SyntaxKind::Comma => {
490                p.eat();
491                if !positional {
492                    maybe_array_start = p.marker();
493                }
494            }
495            SyntaxKind::Semicolon => {
496                if !positional {
497                    maybe_array_start = p.marker();
498                }
499
500                // Parses an array: `a, b, c;`.
501                // The semicolon merges preceding arguments separated by commas
502                // into an array argument.
503                p.wrap(maybe_array_start, SyntaxKind::Array);
504                p.eat();
505                maybe_array_start = p.marker();
506                has_arrays = true;
507            }
508            SyntaxKind::End | SyntaxKind::Dollar | SyntaxKind::RightParen => {}
509            _ => p.expected("comma or semicolon"),
510        }
511    }
512
513    // Check if we need to wrap the preceding arguments in an array.
514    if maybe_array_start != p.marker() && has_arrays && positional {
515        p.wrap(maybe_array_start, SyntaxKind::Array);
516    }
517
518    p.expect_closing_delimiter(m, SyntaxKind::RightParen);
519    p.wrap(m, SyntaxKind::Args);
520}
521
522/// Parses a single argument in a math argument list.
523///
524/// Returns whether the parsed argument was positional or not.
525fn math_arg<'s>(p: &mut Parser<'s>, seen: &mut HashSet<&'s str>) -> bool {
526    let m = p.marker();
527    let start = p.current_start();
528
529    if p.at(SyntaxKind::Dot) {
530        // Parses a spread argument: `..args`.
531        if let Some(spread) = p.lexer.maybe_math_spread_arg(start) {
532            p.token.node = spread;
533            p.eat();
534            math_expr(p);
535            p.wrap(m, SyntaxKind::Spread);
536            return true;
537        }
538    }
539
540    let mut positional = true;
541    if p.at_set(syntax_set!(MathText, MathIdent, Underscore)) {
542        // Parses a named argument: `thickness: #12pt`.
543        if let Some(named) = p.lexer.maybe_math_named_arg(start) {
544            p.token.node = named;
545            let text = p.current_text();
546            p.eat();
547            p.convert_and_eat(SyntaxKind::Colon);
548            if !seen.insert(text) {
549                p[m].convert_to_error(eco_format!("duplicate argument: {text}"));
550            }
551            positional = false;
552        }
553    }
554
555    // Parses a normal positional argument.
556    let arg = p.marker();
557    let count = math_exprs(p, syntax_set!(End, Dollar, Comma, Semicolon, RightParen));
558    if count == 0 {
559        // Named argument requires a value.
560        if !positional {
561            p.expected("expression");
562        }
563
564        // Flush trivia so that the new empty Math node will be wrapped _inside_
565        // any `SyntaxKind::Array` elements created in `math_args`.
566        // (And if we don't follow by wrapping in an array, it has no effect.)
567        // The difference in node layout without this would look like:
568        // Expression: `$ mat( ;) $`
569        // - Correct:   [ .., Space(" "), Array[Math[], ], Semicolon(";"), .. ]
570        // - Incorrect: [ .., Math[], Array[], Space(" "), Semicolon(";"), .. ]
571        p.flush_trivia();
572    }
573
574    // Wrap math function arguments to join adjacent math content or create an
575    // empty 'Math' node for when we have 0 args. We don't wrap when
576    // `count == 1`, since wrapping would change the type of the expression
577    // from potentially non-content to content. Ex: `$ func(#12pt) $` would
578    // change the type from size to content if wrapped.
579    if count != 1 {
580        p.wrap(arg, SyntaxKind::Math);
581    }
582
583    if !positional {
584        p.wrap(m, SyntaxKind::Named);
585    }
586    positional
587}
588
589/// Parses the contents of a code block.
590fn code(p: &mut Parser, stop_set: SyntaxSet) {
591    let m = p.marker();
592    code_exprs(p, stop_set);
593    p.wrap(m, SyntaxKind::Code);
594}
595
596/// Parses a sequence of code expressions.
597fn code_exprs(p: &mut Parser, stop_set: SyntaxSet) {
598    debug_assert!(stop_set.contains(SyntaxKind::End));
599    while !p.at_set(stop_set) {
600        p.with_nl_mode(AtNewline::ContextualContinue, |p| {
601            if !p.at_set(set::CODE_EXPR) {
602                p.unexpected();
603                return;
604            }
605            code_expr(p);
606            if !p.at_set(stop_set) && !p.eat_if(SyntaxKind::Semicolon) {
607                p.expected("semicolon or line break");
608                if p.at(SyntaxKind::Label) {
609                    p.hint("labels can only be applied in markup mode");
610                    p.hint("try wrapping your code in a markup block (`[ ]`)");
611                }
612            }
613        });
614    }
615}
616
617/// Parses an atomic code expression embedded in markup or math.
618fn embedded_code_expr(p: &mut Parser) {
619    p.enter_modes(LexMode::Code, AtNewline::Stop, |p| {
620        p.assert(SyntaxKind::Hash);
621        if p.had_trivia() || p.end() {
622            p.expected("expression");
623            return;
624        }
625
626        let stmt = p.at_set(set::STMT);
627        let at = p.at_set(set::ATOMIC_CODE_EXPR);
628        code_expr_prec(p, true, 0);
629
630        // Consume error for things like `#12p` or `#"abc\"`.#
631        if !at {
632            p.unexpected();
633        }
634
635        let semi = (stmt || p.directly_at(SyntaxKind::Semicolon))
636            && p.eat_if(SyntaxKind::Semicolon);
637
638        if stmt && !semi && !p.end() && !p.at(SyntaxKind::RightBracket) {
639            p.expected("semicolon or line break");
640        }
641    });
642}
643
644/// Parses a single code expression.
645fn code_expr(p: &mut Parser) {
646    code_expr_prec(p, false, 0)
647}
648
649/// Parses a code expression with at least the given precedence.
650fn code_expr_prec(p: &mut Parser, atomic: bool, min_prec: usize) {
651    let m = p.marker();
652    if !atomic && p.at_set(set::UNARY_OP) {
653        let op = ast::UnOp::from_kind(p.current()).unwrap();
654        p.eat();
655        code_expr_prec(p, atomic, op.precedence());
656        p.wrap(m, SyntaxKind::Unary);
657    } else {
658        code_primary(p, atomic);
659    }
660
661    loop {
662        if p.directly_at(SyntaxKind::LeftParen) || p.directly_at(SyntaxKind::LeftBracket)
663        {
664            args(p);
665            p.wrap(m, SyntaxKind::FuncCall);
666            continue;
667        }
668
669        let at_field_or_method = p.directly_at(SyntaxKind::Dot)
670            && p.lexer.clone().next().0 == SyntaxKind::Ident;
671
672        if atomic && !at_field_or_method {
673            break;
674        }
675
676        if p.eat_if(SyntaxKind::Dot) {
677            p.expect(SyntaxKind::Ident);
678            p.wrap(m, SyntaxKind::FieldAccess);
679            continue;
680        }
681
682        let binop = if p.at_set(set::BINARY_OP) {
683            ast::BinOp::from_kind(p.current())
684        } else if min_prec <= ast::BinOp::NotIn.precedence() && p.eat_if(SyntaxKind::Not)
685        {
686            if p.at(SyntaxKind::In) {
687                Some(ast::BinOp::NotIn)
688            } else {
689                p.expected("keyword `in`");
690                break;
691            }
692        } else {
693            None
694        };
695
696        if let Some(op) = binop {
697            let mut prec = op.precedence();
698            if prec < min_prec {
699                break;
700            }
701
702            match op.assoc() {
703                ast::Assoc::Left => prec += 1,
704                ast::Assoc::Right => {}
705            }
706
707            p.eat();
708            code_expr_prec(p, false, prec);
709            p.wrap(m, SyntaxKind::Binary);
710            continue;
711        }
712
713        break;
714    }
715}
716
717/// Parses an primary in a code expression. These are the atoms that unary and
718/// binary operations, functions calls, and field accesses start with / are
719/// composed of.
720fn code_primary(p: &mut Parser, atomic: bool) {
721    let m = p.marker();
722    match p.current() {
723        SyntaxKind::Ident => {
724            p.eat();
725            if !atomic && p.at(SyntaxKind::Arrow) {
726                p.wrap(m, SyntaxKind::Params);
727                p.assert(SyntaxKind::Arrow);
728                code_expr(p);
729                p.wrap(m, SyntaxKind::Closure);
730            }
731        }
732        SyntaxKind::Underscore if !atomic => {
733            p.eat();
734            if p.at(SyntaxKind::Arrow) {
735                p.wrap(m, SyntaxKind::Params);
736                p.eat();
737                code_expr(p);
738                p.wrap(m, SyntaxKind::Closure);
739            } else if p.eat_if(SyntaxKind::Eq) {
740                code_expr(p);
741                p.wrap(m, SyntaxKind::DestructAssignment);
742            } else {
743                p[m].expected("expression");
744            }
745        }
746
747        SyntaxKind::LeftBrace => code_block(p),
748        SyntaxKind::LeftBracket => content_block(p),
749        SyntaxKind::LeftParen => expr_with_paren(p, atomic),
750        SyntaxKind::Dollar => equation(p),
751        SyntaxKind::Let => let_binding(p),
752        SyntaxKind::Set => set_rule(p),
753        SyntaxKind::Show => show_rule(p),
754        SyntaxKind::Context => contextual(p, atomic),
755        SyntaxKind::If => conditional(p),
756        SyntaxKind::While => while_loop(p),
757        SyntaxKind::For => for_loop(p),
758        SyntaxKind::Import => module_import(p),
759        SyntaxKind::Include => module_include(p),
760        SyntaxKind::Break => break_stmt(p),
761        SyntaxKind::Continue => continue_stmt(p),
762        SyntaxKind::Return => return_stmt(p),
763
764        SyntaxKind::Raw => p.eat(), // Raw is handled entirely in the Lexer.
765
766        SyntaxKind::None
767        | SyntaxKind::Auto
768        | SyntaxKind::Int
769        | SyntaxKind::Float
770        | SyntaxKind::Bool
771        | SyntaxKind::Numeric
772        | SyntaxKind::Str
773        | SyntaxKind::Label => p.eat(),
774
775        _ => p.expected("expression"),
776    }
777}
778
779/// Reparses a full content or code block.
780pub(super) fn reparse_block(text: &str, range: Range<usize>) -> Option<SyntaxNode> {
781    let mut p = Parser::new(text, range.start, LexMode::Code);
782    assert!(p.at(SyntaxKind::LeftBracket) || p.at(SyntaxKind::LeftBrace));
783    block(&mut p);
784    (p.balanced && p.prev_end() == range.end)
785        .then(|| p.finish().into_iter().next().unwrap())
786}
787
788/// Parses a content or code block.
789fn block(p: &mut Parser) {
790    match p.current() {
791        SyntaxKind::LeftBracket => content_block(p),
792        SyntaxKind::LeftBrace => code_block(p),
793        _ => p.expected("block"),
794    }
795}
796
797/// Parses a code block: `{ let x = 1; x + 2 }`.
798fn code_block(p: &mut Parser) {
799    let m = p.marker();
800    p.enter_modes(LexMode::Code, AtNewline::Continue, |p| {
801        p.assert(SyntaxKind::LeftBrace);
802        code(p, syntax_set!(RightBrace, RightBracket, RightParen, End));
803        p.expect_closing_delimiter(m, SyntaxKind::RightBrace);
804    });
805    p.wrap(m, SyntaxKind::CodeBlock);
806}
807
808/// Parses a content block: `[*Hi* there!]`.
809fn content_block(p: &mut Parser) {
810    let m = p.marker();
811    p.enter_modes(LexMode::Markup, AtNewline::Continue, |p| {
812        p.assert(SyntaxKind::LeftBracket);
813        markup(p, true, true, syntax_set!(RightBracket, End));
814        p.expect_closing_delimiter(m, SyntaxKind::RightBracket);
815    });
816    p.wrap(m, SyntaxKind::ContentBlock);
817}
818
819/// Parses a let binding: `let x = 1`.
820fn let_binding(p: &mut Parser) {
821    let m = p.marker();
822    p.assert(SyntaxKind::Let);
823
824    let m2 = p.marker();
825    let mut closure = false;
826    let mut other = false;
827
828    if p.eat_if(SyntaxKind::Ident) {
829        if p.directly_at(SyntaxKind::LeftParen) {
830            params(p);
831            closure = true;
832        }
833    } else {
834        pattern(p, false, &mut HashSet::new(), None);
835        other = true;
836    }
837
838    let f = if closure || other { Parser::expect } else { Parser::eat_if };
839    if f(p, SyntaxKind::Eq) {
840        code_expr(p);
841    }
842
843    if closure {
844        p.wrap(m2, SyntaxKind::Closure);
845    }
846
847    p.wrap(m, SyntaxKind::LetBinding);
848}
849
850/// Parses a set rule: `set text(...)`.
851fn set_rule(p: &mut Parser) {
852    let m = p.marker();
853    p.assert(SyntaxKind::Set);
854
855    let m2 = p.marker();
856    p.expect(SyntaxKind::Ident);
857    while p.eat_if(SyntaxKind::Dot) {
858        p.expect(SyntaxKind::Ident);
859        p.wrap(m2, SyntaxKind::FieldAccess);
860    }
861
862    args(p);
863    if p.eat_if(SyntaxKind::If) {
864        code_expr(p);
865    }
866    p.wrap(m, SyntaxKind::SetRule);
867}
868
869/// Parses a show rule: `show heading: it => emph(it.body)`.
870fn show_rule(p: &mut Parser) {
871    let m = p.marker();
872    p.assert(SyntaxKind::Show);
873    let m2 = p.before_trivia();
874
875    if !p.at(SyntaxKind::Colon) {
876        code_expr(p);
877    }
878
879    if p.eat_if(SyntaxKind::Colon) {
880        code_expr(p);
881    } else {
882        p.expected_at(m2, "colon");
883    }
884
885    p.wrap(m, SyntaxKind::ShowRule);
886}
887
888/// Parses a contextual expression: `context text.lang`.
889fn contextual(p: &mut Parser, atomic: bool) {
890    let m = p.marker();
891    p.assert(SyntaxKind::Context);
892    code_expr_prec(p, atomic, 0);
893    p.wrap(m, SyntaxKind::Contextual);
894}
895
896/// Parses an if-else conditional: `if x { y } else { z }`.
897fn conditional(p: &mut Parser) {
898    let m = p.marker();
899    p.assert(SyntaxKind::If);
900    code_expr(p);
901    block(p);
902    if p.eat_if(SyntaxKind::Else) {
903        if p.at(SyntaxKind::If) {
904            conditional(p);
905        } else {
906            block(p);
907        }
908    }
909    p.wrap(m, SyntaxKind::Conditional);
910}
911
912/// Parses a while loop: `while x { y }`.
913fn while_loop(p: &mut Parser) {
914    let m = p.marker();
915    p.assert(SyntaxKind::While);
916    code_expr(p);
917    block(p);
918    p.wrap(m, SyntaxKind::WhileLoop);
919}
920
921/// Parses a for loop: `for x in y { z }`.
922fn for_loop(p: &mut Parser) {
923    let m = p.marker();
924    p.assert(SyntaxKind::For);
925
926    let mut seen = HashSet::new();
927    pattern(p, false, &mut seen, None);
928
929    if p.at(SyntaxKind::Comma) {
930        let node = p.eat_and_get();
931        node.unexpected();
932        node.hint("destructuring patterns must be wrapped in parentheses");
933        if p.at_set(set::PATTERN) {
934            pattern(p, false, &mut seen, None);
935        }
936    }
937
938    p.expect(SyntaxKind::In);
939    code_expr(p);
940    block(p);
941    p.wrap(m, SyntaxKind::ForLoop);
942}
943
944/// Parses a module import: `import "utils.typ": a, b, c`.
945fn module_import(p: &mut Parser) {
946    let m = p.marker();
947    p.assert(SyntaxKind::Import);
948    code_expr(p);
949    if p.eat_if(SyntaxKind::As) {
950        // Allow renaming a full module import.
951        // If items are included, both the full module and the items are
952        // imported at the same time.
953        p.expect(SyntaxKind::Ident);
954    }
955
956    if p.eat_if(SyntaxKind::Colon) {
957        if p.at(SyntaxKind::LeftParen) {
958            p.with_nl_mode(AtNewline::Continue, |p| {
959                let m2 = p.marker();
960                p.assert(SyntaxKind::LeftParen);
961
962                import_items(p);
963
964                p.expect_closing_delimiter(m2, SyntaxKind::RightParen);
965            });
966        } else if !p.eat_if(SyntaxKind::Star) {
967            import_items(p);
968        }
969    }
970
971    p.wrap(m, SyntaxKind::ModuleImport);
972}
973
974/// Parses items to import from a module: `a, b, c`.
975fn import_items(p: &mut Parser) {
976    let m = p.marker();
977    while !p.current().is_terminator() {
978        let item_marker = p.marker();
979        if !p.eat_if(SyntaxKind::Ident) {
980            p.unexpected();
981        }
982
983        // Nested import path: `a.b.c`
984        while p.eat_if(SyntaxKind::Dot) {
985            p.expect(SyntaxKind::Ident);
986        }
987
988        p.wrap(item_marker, SyntaxKind::ImportItemPath);
989
990        // Rename imported item.
991        if p.eat_if(SyntaxKind::As) {
992            p.expect(SyntaxKind::Ident);
993            p.wrap(item_marker, SyntaxKind::RenamedImportItem);
994        }
995
996        if !p.current().is_terminator() {
997            p.expect(SyntaxKind::Comma);
998        }
999    }
1000
1001    p.wrap(m, SyntaxKind::ImportItems);
1002}
1003
1004/// Parses a module include: `include "chapter1.typ"`.
1005fn module_include(p: &mut Parser) {
1006    let m = p.marker();
1007    p.assert(SyntaxKind::Include);
1008    code_expr(p);
1009    p.wrap(m, SyntaxKind::ModuleInclude);
1010}
1011
1012/// Parses a break from a loop: `break`.
1013fn break_stmt(p: &mut Parser) {
1014    let m = p.marker();
1015    p.assert(SyntaxKind::Break);
1016    p.wrap(m, SyntaxKind::LoopBreak);
1017}
1018
1019/// Parses a continue in a loop: `continue`.
1020fn continue_stmt(p: &mut Parser) {
1021    let m = p.marker();
1022    p.assert(SyntaxKind::Continue);
1023    p.wrap(m, SyntaxKind::LoopContinue);
1024}
1025
1026/// Parses a return from a function: `return`, `return x + 1`.
1027fn return_stmt(p: &mut Parser) {
1028    let m = p.marker();
1029    p.assert(SyntaxKind::Return);
1030    if p.at_set(set::CODE_EXPR) {
1031        code_expr(p);
1032    }
1033    p.wrap(m, SyntaxKind::FuncReturn);
1034}
1035
1036/// An expression that starts with a parenthesis.
1037fn expr_with_paren(p: &mut Parser, atomic: bool) {
1038    if atomic {
1039        // Atomic expressions aren't modified by operators that follow them, so
1040        // our first guess of array/dict will be correct.
1041        parenthesized_or_array_or_dict(p);
1042        return;
1043    }
1044
1045    // If we've seen this position before and have a memoized result, restore it
1046    // and return. Otherwise, get a key to this position and a checkpoint to
1047    // restart from in case we make a wrong prediction.
1048    let Some((memo_key, checkpoint)) = p.restore_memo_or_checkpoint() else { return };
1049    // The node length from when we restored.
1050    let prev_len = checkpoint.node_len;
1051
1052    // When we reach a '(', we can't be sure what it is. First, we attempt to
1053    // parse as a simple parenthesized expression, array, or dictionary as
1054    // these are the most likely things. We can handle all of those in a single
1055    // pass.
1056    let kind = parenthesized_or_array_or_dict(p);
1057
1058    // If, however, '=>' or '=' follows, we must backtrack and reparse as either
1059    // a parameter list or a destructuring. To be able to do that, we created a
1060    // parser checkpoint before our speculative parse, which we can restore.
1061    //
1062    // However, naive backtracking has a fatal flaw: It can lead to exponential
1063    // parsing time if we are constantly getting things wrong in a nested
1064    // scenario. The particular failure case for parameter parsing is the
1065    // following: `(x: (x: (x) => y) => y) => y`
1066    //
1067    // Such a structure will reparse over and over again recursively, leading to
1068    // a running time of O(2^n) for nesting depth n. To prevent this, we perform
1069    // a simple trick: When we have done the mistake of picking the wrong path
1070    // once and have subsequently parsed correctly, we save the result of that
1071    // correct parsing in the `p.memo` map. When we reach the same position
1072    // again, we can then just restore this result. In this way, no
1073    // parenthesized expression is parsed more than twice, leading to a worst
1074    // case running time of O(2n).
1075    if p.at(SyntaxKind::Arrow) {
1076        p.restore(checkpoint);
1077        let m = p.marker();
1078        params(p);
1079        if !p.expect(SyntaxKind::Arrow) {
1080            return;
1081        }
1082        code_expr(p);
1083        p.wrap(m, SyntaxKind::Closure);
1084    } else if p.at(SyntaxKind::Eq) && kind != SyntaxKind::Parenthesized {
1085        p.restore(checkpoint);
1086        let m = p.marker();
1087        destructuring_or_parenthesized(p, true, &mut HashSet::new());
1088        if !p.expect(SyntaxKind::Eq) {
1089            return;
1090        }
1091        code_expr(p);
1092        p.wrap(m, SyntaxKind::DestructAssignment);
1093    } else {
1094        return;
1095    }
1096
1097    // Memoize result if we backtracked.
1098    p.memoize_parsed_nodes(memo_key, prev_len);
1099}
1100
1101/// Parses either
1102/// - a parenthesized expression: `(1 + 2)`, or
1103/// - an array: `(1, "hi", 12cm)`, or
1104/// - a dictionary: `(thickness: 3pt, dash: "solid")`.
1105fn parenthesized_or_array_or_dict(p: &mut Parser) -> SyntaxKind {
1106    let mut state = GroupState {
1107        count: 0,
1108        maybe_just_parens: true,
1109        kind: None,
1110        seen: HashSet::new(),
1111    };
1112
1113    // An edge case with parens is whether we can interpret a leading spread
1114    // expression as a dictionary, e.g. if we want `(..dict1, ..dict2)` to join
1115    // the two dicts.
1116    //
1117    // The issue is that we decide on the type of the parenthesized expression
1118    // here in the parser by the `SyntaxKind` we wrap with, instead of in eval
1119    // based on the type of the spread item.
1120    //
1121    // The current fix is that we allow a leading colon to force the
1122    // parenthesized value into a dict:
1123    // - `(..arr1, ..arr2)` is wrapped as an `Array`.
1124    // - `(: ..dict1, ..dict2)` is wrapped as a `Dict`.
1125    //
1126    // This does allow some unexpected expressions, such as `(: key: val)`, but
1127    // it's currently intentional.
1128    let m = p.marker();
1129    p.with_nl_mode(AtNewline::Continue, |p| {
1130        p.assert(SyntaxKind::LeftParen);
1131        if p.eat_if(SyntaxKind::Colon) {
1132            state.kind = Some(SyntaxKind::Dict);
1133        }
1134
1135        while !p.current().is_terminator() {
1136            if !p.at_set(set::ARRAY_OR_DICT_ITEM) {
1137                p.unexpected();
1138                continue;
1139            }
1140
1141            array_or_dict_item(p, &mut state);
1142            state.count += 1;
1143
1144            if !p.current().is_terminator() && p.expect(SyntaxKind::Comma) {
1145                state.maybe_just_parens = false;
1146            }
1147        }
1148
1149        p.expect_closing_delimiter(m, SyntaxKind::RightParen);
1150    });
1151
1152    let kind = if state.maybe_just_parens && state.count == 1 {
1153        SyntaxKind::Parenthesized
1154    } else {
1155        state.kind.unwrap_or(SyntaxKind::Array)
1156    };
1157
1158    p.wrap(m, kind);
1159    kind
1160}
1161
1162/// State for array/dictionary parsing.
1163struct GroupState {
1164    count: usize,
1165    /// Whether this is just a single expression in parens: `(a)`. Single
1166    /// element arrays require an explicit comma: `(a,)`, unless we're
1167    /// spreading: `(..a)`.
1168    maybe_just_parens: bool,
1169    /// The `SyntaxKind` to wrap as (if we've figured it out yet).
1170    kind: Option<SyntaxKind>,
1171    /// Store named arguments so we can give an error if they're repeated.
1172    seen: HashSet<EcoString>,
1173}
1174
1175/// Parses a single item in an array or dictionary.
1176fn array_or_dict_item(p: &mut Parser, state: &mut GroupState) {
1177    let m = p.marker();
1178
1179    if p.eat_if(SyntaxKind::Dots) {
1180        // Parses a spread item: `..item`.
1181        code_expr(p);
1182        p.wrap(m, SyntaxKind::Spread);
1183        state.maybe_just_parens = false;
1184        return;
1185    }
1186
1187    code_expr(p);
1188
1189    if p.eat_if(SyntaxKind::Colon) {
1190        // Parses a named/keyed pair: `name: item` or `"key": item`.
1191        code_expr(p);
1192
1193        let node = &mut p[m];
1194        let pair_kind = match node.kind() {
1195            SyntaxKind::Ident => SyntaxKind::Named,
1196            _ => SyntaxKind::Keyed,
1197        };
1198
1199        if let Some(key) = match node.cast::<ast::Expr>() {
1200            Some(ast::Expr::Ident(ident)) => Some(ident.get().clone()),
1201            Some(ast::Expr::Str(s)) => Some(s.get()),
1202            _ => None,
1203        } {
1204            if !state.seen.insert(key.clone()) {
1205                node.convert_to_error(eco_format!("duplicate key: {key}"));
1206            }
1207        }
1208
1209        p.wrap(m, pair_kind);
1210        state.maybe_just_parens = false;
1211
1212        if state.kind == Some(SyntaxKind::Array) {
1213            p[m].expected("expression");
1214        } else {
1215            state.kind = Some(SyntaxKind::Dict);
1216        }
1217    } else {
1218        // Parses a positional item.
1219        if state.kind == Some(SyntaxKind::Dict) {
1220            p[m].expected("named or keyed pair");
1221        } else {
1222            state.kind = Some(SyntaxKind::Array)
1223        }
1224    }
1225}
1226
1227/// Parses a function call's argument list: `(12pt, y)`.
1228fn args(p: &mut Parser) {
1229    if !p.directly_at(SyntaxKind::LeftParen) && !p.directly_at(SyntaxKind::LeftBracket) {
1230        p.expected("argument list");
1231        if p.at(SyntaxKind::LeftParen) || p.at(SyntaxKind::LeftBracket) {
1232            p.hint("there may not be any spaces before the argument list");
1233        }
1234    }
1235
1236    let m = p.marker();
1237    if p.at(SyntaxKind::LeftParen) {
1238        let m2 = p.marker();
1239        p.with_nl_mode(AtNewline::Continue, |p| {
1240            p.assert(SyntaxKind::LeftParen);
1241
1242            let mut seen = HashSet::new();
1243            while !p.current().is_terminator() {
1244                if !p.at_set(set::ARG) {
1245                    p.unexpected();
1246                    continue;
1247                }
1248
1249                arg(p, &mut seen);
1250
1251                if !p.current().is_terminator() {
1252                    p.expect(SyntaxKind::Comma);
1253                }
1254            }
1255
1256            p.expect_closing_delimiter(m2, SyntaxKind::RightParen);
1257        });
1258    }
1259
1260    while p.directly_at(SyntaxKind::LeftBracket) {
1261        content_block(p);
1262    }
1263
1264    p.wrap(m, SyntaxKind::Args);
1265}
1266
1267/// Parses a single argument in an argument list.
1268fn arg<'s>(p: &mut Parser<'s>, seen: &mut HashSet<&'s str>) {
1269    let m = p.marker();
1270
1271    // Parses a spread argument: `..args`.
1272    if p.eat_if(SyntaxKind::Dots) {
1273        code_expr(p);
1274        p.wrap(m, SyntaxKind::Spread);
1275        return;
1276    }
1277
1278    // Parses a normal positional argument or an argument name.
1279    let was_at_expr = p.at_set(set::CODE_EXPR);
1280    let text = p.current_text();
1281    code_expr(p);
1282
1283    // Parses a named argument: `thickness: 12pt`.
1284    if p.eat_if(SyntaxKind::Colon) {
1285        // Recover from bad argument name.
1286        if was_at_expr {
1287            if p[m].kind() != SyntaxKind::Ident {
1288                p[m].expected("identifier");
1289            } else if !seen.insert(text) {
1290                p[m].convert_to_error(eco_format!("duplicate argument: {text}"));
1291            }
1292        }
1293
1294        code_expr(p);
1295        p.wrap(m, SyntaxKind::Named);
1296    }
1297}
1298
1299/// Parses a closure's parameters: `(x, y)`.
1300fn params(p: &mut Parser) {
1301    let m = p.marker();
1302    p.with_nl_mode(AtNewline::Continue, |p| {
1303        p.assert(SyntaxKind::LeftParen);
1304
1305        let mut seen = HashSet::new();
1306        let mut sink = false;
1307
1308        while !p.current().is_terminator() {
1309            if !p.at_set(set::PARAM) {
1310                p.unexpected();
1311                continue;
1312            }
1313
1314            param(p, &mut seen, &mut sink);
1315
1316            if !p.current().is_terminator() {
1317                p.expect(SyntaxKind::Comma);
1318            }
1319        }
1320
1321        p.expect_closing_delimiter(m, SyntaxKind::RightParen);
1322    });
1323    p.wrap(m, SyntaxKind::Params);
1324}
1325
1326/// Parses a single parameter in a parameter list.
1327fn param<'s>(p: &mut Parser<'s>, seen: &mut HashSet<&'s str>, sink: &mut bool) {
1328    let m = p.marker();
1329
1330    // Parses argument sink: `..sink`.
1331    if p.eat_if(SyntaxKind::Dots) {
1332        if p.at_set(set::PATTERN_LEAF) {
1333            pattern_leaf(p, false, seen, Some("parameter"));
1334        }
1335        p.wrap(m, SyntaxKind::Spread);
1336        if mem::replace(sink, true) {
1337            p[m].convert_to_error("only one argument sink is allowed");
1338        }
1339        return;
1340    }
1341
1342    // Parses a normal positional parameter or a parameter name.
1343    let was_at_pat = p.at_set(set::PATTERN);
1344    pattern(p, false, seen, Some("parameter"));
1345
1346    // Parses a named parameter: `thickness: 12pt`.
1347    if p.eat_if(SyntaxKind::Colon) {
1348        // Recover from bad parameter name.
1349        if was_at_pat && p[m].kind() != SyntaxKind::Ident {
1350            p[m].expected("identifier");
1351        }
1352
1353        code_expr(p);
1354        p.wrap(m, SyntaxKind::Named);
1355    }
1356}
1357
1358/// Parses a binding or reassignment pattern.
1359fn pattern<'s>(
1360    p: &mut Parser<'s>,
1361    reassignment: bool,
1362    seen: &mut HashSet<&'s str>,
1363    dupe: Option<&'s str>,
1364) {
1365    match p.current() {
1366        SyntaxKind::Underscore => p.eat(),
1367        SyntaxKind::LeftParen => destructuring_or_parenthesized(p, reassignment, seen),
1368        _ => pattern_leaf(p, reassignment, seen, dupe),
1369    }
1370}
1371
1372/// Parses a destructuring pattern or just a parenthesized pattern.
1373fn destructuring_or_parenthesized<'s>(
1374    p: &mut Parser<'s>,
1375    reassignment: bool,
1376    seen: &mut HashSet<&'s str>,
1377) {
1378    let mut sink = false;
1379    let mut count = 0;
1380    let mut maybe_just_parens = true;
1381
1382    let m = p.marker();
1383    p.with_nl_mode(AtNewline::Continue, |p| {
1384        p.assert(SyntaxKind::LeftParen);
1385
1386        while !p.current().is_terminator() {
1387            if !p.at_set(set::DESTRUCTURING_ITEM) {
1388                p.unexpected();
1389                continue;
1390            }
1391
1392            destructuring_item(p, reassignment, seen, &mut maybe_just_parens, &mut sink);
1393            count += 1;
1394
1395            if !p.current().is_terminator() && p.expect(SyntaxKind::Comma) {
1396                maybe_just_parens = false;
1397            }
1398        }
1399
1400        p.expect_closing_delimiter(m, SyntaxKind::RightParen);
1401    });
1402
1403    if maybe_just_parens && count == 1 && !sink {
1404        p.wrap(m, SyntaxKind::Parenthesized);
1405    } else {
1406        p.wrap(m, SyntaxKind::Destructuring);
1407    }
1408}
1409
1410/// Parses an item in a destructuring pattern.
1411fn destructuring_item<'s>(
1412    p: &mut Parser<'s>,
1413    reassignment: bool,
1414    seen: &mut HashSet<&'s str>,
1415    maybe_just_parens: &mut bool,
1416    sink: &mut bool,
1417) {
1418    let m = p.marker();
1419
1420    // Parse destructuring sink: `..rest`.
1421    if p.eat_if(SyntaxKind::Dots) {
1422        if p.at_set(set::PATTERN_LEAF) {
1423            pattern_leaf(p, reassignment, seen, None);
1424        }
1425        p.wrap(m, SyntaxKind::Spread);
1426        if mem::replace(sink, true) {
1427            p[m].convert_to_error("only one destructuring sink is allowed");
1428        }
1429        return;
1430    }
1431
1432    // Parse a normal positional pattern or a destructuring key.
1433    let was_at_pat = p.at_set(set::PATTERN);
1434
1435    // We must use a full checkpoint here (can't just clone the lexer) because
1436    // there may be trivia between the identifier and the colon we need to skip.
1437    let checkpoint = p.checkpoint();
1438    if !(p.eat_if(SyntaxKind::Ident) && p.at(SyntaxKind::Colon)) {
1439        p.restore(checkpoint);
1440        pattern(p, reassignment, seen, None);
1441    }
1442
1443    // Parse named destructuring item.
1444    if p.eat_if(SyntaxKind::Colon) {
1445        // Recover from bad named destructuring.
1446        if was_at_pat && p[m].kind() != SyntaxKind::Ident {
1447            p[m].expected("identifier");
1448        }
1449
1450        pattern(p, reassignment, seen, None);
1451        p.wrap(m, SyntaxKind::Named);
1452        *maybe_just_parens = false;
1453    }
1454}
1455
1456/// Parses a leaf in a pattern - either an identifier or an expression
1457/// depending on whether it's a binding or reassignment pattern.
1458fn pattern_leaf<'s>(
1459    p: &mut Parser<'s>,
1460    reassignment: bool,
1461    seen: &mut HashSet<&'s str>,
1462    dupe: Option<&'s str>,
1463) {
1464    if p.current().is_keyword() {
1465        p.eat_and_get().expected("pattern");
1466        return;
1467    } else if !p.at_set(set::PATTERN_LEAF) {
1468        p.expected("pattern");
1469        return;
1470    }
1471
1472    let m = p.marker();
1473    let text = p.current_text();
1474
1475    // We parse an atomic expression even though we only want an identifier for
1476    // better error recovery. We can mark the whole expression as unexpected
1477    // instead of going through its pieces one by one.
1478    code_expr_prec(p, true, 0);
1479
1480    if !reassignment {
1481        let node = &mut p[m];
1482        if node.kind() == SyntaxKind::Ident {
1483            if !seen.insert(text) {
1484                node.convert_to_error(eco_format!(
1485                    "duplicate {}: {text}",
1486                    dupe.unwrap_or("binding"),
1487                ));
1488            }
1489        } else {
1490            node.expected("pattern");
1491        }
1492    }
1493}
1494
1495/// Manages parsing a stream of tokens into a tree of [`SyntaxNode`]s.
1496///
1497/// The implementation presents an interface that investigates a current `token`
1498/// with a [`SyntaxKind`] and can take one of the following actions:
1499///
1500/// 1. Eat a token: push `token` onto the `nodes` vector as a [leaf
1501///    node](`SyntaxNode::leaf`) and prepare a new `token` by calling into the
1502///    lexer.
1503/// 2. Wrap nodes from a marker to the end of `nodes` (excluding `token` and any
1504///    attached trivia) into an [inner node](`SyntaxNode::inner`) of a specific
1505///    `SyntaxKind`.
1506/// 3. Produce or convert nodes into an [error node](`SyntaxNode::error`) when
1507///    something expected is missing or something unexpected is found.
1508///
1509/// Overall the parser produces a nested tree of SyntaxNodes as a "_Concrete_
1510/// Syntax Tree." The raw Concrete Syntax Tree should contain the entire source
1511/// text, and is used as-is for e.g. syntax highlighting and IDE features. In
1512/// `ast.rs` the CST is interpreted as a lazy view over an "_Abstract_ Syntax
1513/// Tree." The AST module skips over irrelevant tokens -- whitespace, comments,
1514/// code parens, commas in function args, etc. -- as it iterates through the
1515/// tree.
1516///
1517/// ### Modes
1518///
1519/// The parser manages the transitions between the three modes of Typst through
1520/// [lexer modes](`LexMode`) and [newline modes](`AtNewline`).
1521///
1522/// The lexer modes map to the three Typst modes and are stored in the lexer,
1523/// changing which`SyntaxKind`s it will generate.
1524///
1525/// The newline mode is used to determine whether a newline should end the
1526/// current expression. If so, the parser temporarily changes `token`'s kind to
1527/// a fake [`SyntaxKind::End`]. When the parser exits the mode the original
1528/// `SyntaxKind` is restored.
1529struct Parser<'s> {
1530    /// The source text shared with the lexer.
1531    text: &'s str,
1532    /// A lexer over the source text with multiple modes. Defines the boundaries
1533    /// of tokens and determines their [`SyntaxKind`]. Contains the [`LexMode`]
1534    /// defining our current Typst mode.
1535    lexer: Lexer<'s>,
1536    /// The newline mode: whether to insert a temporary end at newlines.
1537    nl_mode: AtNewline,
1538    /// The current token under inspection, not yet present in `nodes`. This
1539    /// acts like a single item of lookahead for the parser.
1540    ///
1541    /// When wrapping, this is _not_ included in the wrapped nodes.
1542    token: Token,
1543    /// Whether the parser has the expected set of open/close delimiters. This
1544    /// only ever transitions from `true` to `false`.
1545    balanced: bool,
1546    /// Nodes representing the concrete syntax tree of previously parsed text.
1547    /// In Code and Math, includes previously parsed trivia, but not `token`.
1548    nodes: Vec<SyntaxNode>,
1549    /// Parser checkpoints for a given text index. Used for efficient parser
1550    /// backtracking similar to packrat parsing. See comments above in
1551    /// [`expr_with_paren`].
1552    memo: MemoArena,
1553}
1554
1555/// A single token returned from the lexer with a cached [`SyntaxKind`] and a
1556/// record of preceding trivia.
1557#[derive(Debug, Clone)]
1558struct Token {
1559    /// The [`SyntaxKind`] of the current token.
1560    kind: SyntaxKind,
1561    /// The [`SyntaxNode`] of the current token, ready to be eaten and pushed
1562    /// onto the end of `nodes`.
1563    node: SyntaxNode,
1564    /// The number of preceding trivia before this token.
1565    n_trivia: usize,
1566    /// Whether this token's preceding trivia contained a newline.
1567    newline: Option<Newline>,
1568    /// The index into `text` of the start of our current token (the end is
1569    /// stored as the lexer's cursor).
1570    start: usize,
1571    /// The index into `text` of the end of the previous token.
1572    prev_end: usize,
1573}
1574
1575/// Information about a newline if present (currently only relevant in Markup).
1576#[derive(Debug, Clone, Copy)]
1577struct Newline {
1578    /// The column of the start of our token in its line.
1579    column: Option<usize>,
1580    /// Whether any of our newlines were paragraph breaks.
1581    parbreak: bool,
1582}
1583
1584/// How to proceed with parsing when at a newline.
1585#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1586enum AtNewline {
1587    /// Continue at newlines.
1588    Continue,
1589    /// Stop at any newline.
1590    Stop,
1591    /// Continue only if there is no continuation with `else` or `.` (Code only).
1592    ContextualContinue,
1593    /// Stop only at a parbreak, not normal newlines (Markup only).
1594    StopParBreak,
1595    /// Require that the token's column be greater or equal to a column (Markup
1596    /// only). If this is `0`, acts like `Continue`; if this is `usize::MAX`,
1597    /// acts like `Stop`.
1598    RequireColumn(usize),
1599}
1600
1601impl AtNewline {
1602    /// Whether to stop at a newline or continue based on the current context.
1603    fn stop_at(self, Newline { column, parbreak }: Newline, kind: SyntaxKind) -> bool {
1604        #[allow(clippy::match_like_matches_macro)]
1605        match self {
1606            AtNewline::Continue => false,
1607            AtNewline::Stop => true,
1608            AtNewline::ContextualContinue => match kind {
1609                SyntaxKind::Else | SyntaxKind::Dot => false,
1610                _ => true,
1611            },
1612            AtNewline::StopParBreak => parbreak,
1613            AtNewline::RequireColumn(min_col) => {
1614                // Don't stop if this newline doesn't start a column (this may
1615                // be checked on the boundary of lexer modes, since we only
1616                // report a column in Markup).
1617                column.is_some_and(|column| column <= min_col)
1618            }
1619        }
1620    }
1621}
1622
1623/// A marker representing a node's position in the parser. Mainly used for
1624/// wrapping, but can also index into the parser to access the node, like
1625/// `p[m]`.
1626#[derive(Debug, Copy, Clone, Eq, PartialEq)]
1627struct Marker(usize);
1628
1629// Index into the parser with markers.
1630impl Index<Marker> for Parser<'_> {
1631    type Output = SyntaxNode;
1632
1633    fn index(&self, m: Marker) -> &Self::Output {
1634        &self.nodes[m.0]
1635    }
1636}
1637
1638impl IndexMut<Marker> for Parser<'_> {
1639    fn index_mut(&mut self, m: Marker) -> &mut Self::Output {
1640        &mut self.nodes[m.0]
1641    }
1642}
1643
1644/// Creating/Consuming the parser and getting info about the current token.
1645impl<'s> Parser<'s> {
1646    /// Create a new parser starting from the given text offset and lexer mode.
1647    fn new(text: &'s str, offset: usize, mode: LexMode) -> Self {
1648        let mut lexer = Lexer::new(text, mode);
1649        lexer.jump(offset);
1650        let nl_mode = AtNewline::Continue;
1651        let mut nodes = vec![];
1652        let token = Self::lex(&mut nodes, &mut lexer, nl_mode);
1653        Self {
1654            text,
1655            lexer,
1656            nl_mode,
1657            token,
1658            balanced: true,
1659            nodes,
1660            memo: Default::default(),
1661        }
1662    }
1663
1664    /// Consume the parser, yielding the full vector of parsed SyntaxNodes.
1665    fn finish(self) -> Vec<SyntaxNode> {
1666        self.nodes
1667    }
1668
1669    /// Consume the parser, generating a single top-level node.
1670    fn finish_into(self, kind: SyntaxKind) -> SyntaxNode {
1671        assert!(self.at(SyntaxKind::End));
1672        SyntaxNode::inner(kind, self.finish())
1673    }
1674
1675    /// Similar to a `peek()` function: returns the `kind` of the next token to
1676    /// be eaten.
1677    fn current(&self) -> SyntaxKind {
1678        self.token.kind
1679    }
1680
1681    /// Whether the current token is a given [`SyntaxKind`].
1682    fn at(&self, kind: SyntaxKind) -> bool {
1683        self.token.kind == kind
1684    }
1685
1686    /// Whether the current token is contained in a [`SyntaxSet`].
1687    fn at_set(&self, set: SyntaxSet) -> bool {
1688        set.contains(self.token.kind)
1689    }
1690
1691    /// Whether we're at the end of the token stream.
1692    ///
1693    /// Note: This might be a fake end due to the newline mode.
1694    fn end(&self) -> bool {
1695        self.at(SyntaxKind::End)
1696    }
1697
1698    /// If we're at the given `kind` with no preceding trivia tokens.
1699    fn directly_at(&self, kind: SyntaxKind) -> bool {
1700        self.token.kind == kind && !self.had_trivia()
1701    }
1702
1703    /// Whether `token` had any preceding trivia.
1704    fn had_trivia(&self) -> bool {
1705        self.token.n_trivia > 0
1706    }
1707
1708    /// Whether `token` had a newline among any of its preceding trivia.
1709    fn had_newline(&self) -> bool {
1710        self.token.newline.is_some()
1711    }
1712
1713    /// The number of characters until the most recent newline from the start of
1714    /// the current token. Uses a cached value from the newline mode if present.
1715    fn current_column(&self) -> usize {
1716        self.token
1717            .newline
1718            .and_then(|newline| newline.column)
1719            .unwrap_or_else(|| self.lexer.column(self.token.start))
1720    }
1721
1722    /// The current token's text.
1723    fn current_text(&self) -> &'s str {
1724        &self.text[self.token.start..self.current_end()]
1725    }
1726
1727    /// The offset into `text` of the current token's start.
1728    fn current_start(&self) -> usize {
1729        self.token.start
1730    }
1731
1732    /// The offset into `text` of the current token's end.
1733    fn current_end(&self) -> usize {
1734        self.lexer.cursor()
1735    }
1736
1737    /// The offset into `text` of the previous token's end.
1738    fn prev_end(&self) -> usize {
1739        self.token.prev_end
1740    }
1741}
1742
1743// The main parsing interface for generating tokens and eating/modifying nodes.
1744impl<'s> Parser<'s> {
1745    /// A marker that will point to the current token in the parser once it's
1746    /// been eaten.
1747    fn marker(&self) -> Marker {
1748        Marker(self.nodes.len())
1749    }
1750
1751    /// A marker that will point to first trivia before this token in the
1752    /// parser (or the token itself if no trivia precede it).
1753    fn before_trivia(&self) -> Marker {
1754        Marker(self.nodes.len() - self.token.n_trivia)
1755    }
1756
1757    /// Eat the current node and return a reference for in-place mutation.
1758    #[track_caller]
1759    fn eat_and_get(&mut self) -> &mut SyntaxNode {
1760        let offset = self.nodes.len();
1761        self.eat();
1762        &mut self.nodes[offset]
1763    }
1764
1765    /// Eat the token if at `kind`. Returns `true` if eaten.
1766    ///
1767    /// Note: In Math and Code, this will ignore trivia in front of the
1768    /// `kind`, To forbid skipping trivia, consider using `eat_if_direct`.
1769    fn eat_if(&mut self, kind: SyntaxKind) -> bool {
1770        let at = self.at(kind);
1771        if at {
1772            self.eat();
1773        }
1774        at
1775    }
1776
1777    /// Eat the token only if at `kind` with no preceding trivia. Returns `true`
1778    /// if eaten.
1779    fn eat_if_direct(&mut self, kind: SyntaxKind) -> bool {
1780        let at = self.directly_at(kind);
1781        if at {
1782            self.eat();
1783        }
1784        at
1785    }
1786
1787    /// Assert that we are at the given [`SyntaxKind`] and eat it. This should
1788    /// be used when moving between functions that expect to start with a
1789    /// specific token.
1790    #[track_caller]
1791    fn assert(&mut self, kind: SyntaxKind) {
1792        assert_eq!(self.token.kind, kind);
1793        self.eat();
1794    }
1795
1796    /// Convert the current token's [`SyntaxKind`] and eat it.
1797    fn convert_and_eat(&mut self, kind: SyntaxKind) {
1798        // Only need to replace the node here.
1799        self.token.node.convert_to_kind(kind);
1800        self.eat();
1801    }
1802
1803    /// Eat the current token by saving it to the `nodes` vector, then move
1804    /// the lexer forward to prepare a new token.
1805    fn eat(&mut self) {
1806        self.nodes.push(std::mem::take(&mut self.token.node));
1807        self.token = Self::lex(&mut self.nodes, &mut self.lexer, self.nl_mode);
1808    }
1809
1810    /// Detach the parsed trivia nodes from this token (but not newline info) so
1811    /// that subsequent wrapping will include the trivia.
1812    fn flush_trivia(&mut self) {
1813        self.token.n_trivia = 0;
1814        self.token.prev_end = self.token.start;
1815    }
1816
1817    /// Wrap the nodes from a marker up to (but excluding) the current token in
1818    /// a new [inner node](`SyntaxNode::inner`) of the given kind. This is an
1819    /// easy interface for creating nested syntax nodes _after_ having parsed
1820    /// their children.
1821    fn wrap(&mut self, from: Marker, kind: SyntaxKind) {
1822        let to = self.before_trivia().0;
1823        let from = from.0.min(to);
1824        let children = self.nodes.drain(from..to).collect();
1825        self.nodes.insert(from, SyntaxNode::inner(kind, children));
1826    }
1827
1828    /// Parse within the [`LexMode`] for subsequent tokens (does not change the
1829    /// current token). This may re-lex the final token on exit.
1830    ///
1831    /// This function effectively repurposes the call stack as a stack of modes.
1832    fn enter_modes(
1833        &mut self,
1834        mode: LexMode,
1835        stop: AtNewline,
1836        func: impl FnOnce(&mut Parser<'s>),
1837    ) {
1838        let previous = self.lexer.mode();
1839        self.lexer.set_mode(mode);
1840        self.with_nl_mode(stop, func);
1841        if mode != previous {
1842            self.lexer.set_mode(previous);
1843            self.lexer.jump(self.token.prev_end);
1844            self.nodes.truncate(self.nodes.len() - self.token.n_trivia);
1845            self.token = Self::lex(&mut self.nodes, &mut self.lexer, self.nl_mode);
1846        }
1847    }
1848
1849    /// Parse within the [`AtNewline`] mode for subsequent tokens (does not
1850    /// change the current token). This may re-lex the final token on exit.
1851    ///
1852    /// This function effectively repurposes the call stack as a stack of modes.
1853    fn with_nl_mode(&mut self, mode: AtNewline, func: impl FnOnce(&mut Parser<'s>)) {
1854        let previous = self.nl_mode;
1855        self.nl_mode = mode;
1856        func(self);
1857        self.nl_mode = previous;
1858        if let Some(newline) = self.token.newline {
1859            if mode != previous {
1860                // Restore our actual token's kind or insert a fake end.
1861                let actual_kind = self.token.node.kind();
1862                if self.nl_mode.stop_at(newline, actual_kind) {
1863                    self.token.kind = SyntaxKind::End;
1864                } else {
1865                    self.token.kind = actual_kind;
1866                }
1867            }
1868        }
1869    }
1870
1871    /// Move the lexer forward and prepare the current token. In Code, this
1872    /// might insert a temporary [`SyntaxKind::End`] based on our newline mode.
1873    ///
1874    /// This is not a method on `self` because we need a valid token before we
1875    /// can initialize the parser.
1876    fn lex(nodes: &mut Vec<SyntaxNode>, lexer: &mut Lexer, nl_mode: AtNewline) -> Token {
1877        let prev_end = lexer.cursor();
1878        let mut start = prev_end;
1879        let (mut kind, mut node) = lexer.next();
1880        let mut n_trivia = 0;
1881        let mut had_newline = false;
1882        let mut parbreak = false;
1883
1884        while kind.is_trivia() {
1885            had_newline |= lexer.newline(); // Newlines are always trivia.
1886            parbreak |= kind == SyntaxKind::Parbreak;
1887            n_trivia += 1;
1888            nodes.push(node);
1889            start = lexer.cursor();
1890            (kind, node) = lexer.next();
1891        }
1892
1893        let newline = if had_newline {
1894            let column = (lexer.mode() == LexMode::Markup).then(|| lexer.column(start));
1895            let newline = Newline { column, parbreak };
1896            if nl_mode.stop_at(newline, kind) {
1897                // Insert a temporary `SyntaxKind::End` to halt the parser.
1898                // The actual kind will be restored from `node` later.
1899                kind = SyntaxKind::End;
1900            }
1901            Some(newline)
1902        } else {
1903            None
1904        };
1905
1906        Token { kind, node, n_trivia, newline, start, prev_end }
1907    }
1908}
1909
1910/// Extra parser state for efficiently recovering from mispredicted parses.
1911///
1912/// This is the same idea as packrat parsing, but we use it only in the limited
1913/// case of parenthesized structures. See [`expr_with_paren`] for more.
1914#[derive(Default)]
1915struct MemoArena {
1916    /// A single arena of previously parsed nodes (to reduce allocations).
1917    /// Memoized ranges refer to unique sections of the arena.
1918    arena: Vec<SyntaxNode>,
1919    /// A map from the parser's current position to a range of previously parsed
1920    /// nodes in the arena and a checkpoint of the parser's state. These allow
1921    /// us to reset the parser to avoid parsing the same location again.
1922    memo_map: HashMap<MemoKey, (Range<usize>, PartialState)>,
1923}
1924
1925/// A type alias for the memo key so it doesn't get confused with other usizes.
1926///
1927/// The memo is keyed by the index into `text` of the current token's start.
1928type MemoKey = usize;
1929
1930/// A checkpoint of the parser which can fully restore it to a previous state.
1931struct Checkpoint {
1932    node_len: usize,
1933    state: PartialState,
1934}
1935
1936/// State needed to restore the parser's current token and the lexer (but not
1937/// the nodes vector).
1938#[derive(Clone)]
1939struct PartialState {
1940    cursor: usize,
1941    lex_mode: LexMode,
1942    token: Token,
1943}
1944
1945/// The Memoization interface.
1946impl Parser<'_> {
1947    /// Store the already parsed nodes and the parser state into the memo map by
1948    /// extending the arena and storing the extended range and a checkpoint.
1949    fn memoize_parsed_nodes(&mut self, key: MemoKey, prev_len: usize) {
1950        let Checkpoint { state, node_len } = self.checkpoint();
1951        let memo_start = self.memo.arena.len();
1952        self.memo.arena.extend_from_slice(&self.nodes[prev_len..node_len]);
1953        let arena_range = memo_start..self.memo.arena.len();
1954        self.memo.memo_map.insert(key, (arena_range, state));
1955    }
1956
1957    /// Try to load a memoized result, return `None` if we did or `Some` (with a
1958    /// checkpoint and a key for the memo map) if we didn't.
1959    fn restore_memo_or_checkpoint(&mut self) -> Option<(MemoKey, Checkpoint)> {
1960        // We use the starting index of the current token as our key.
1961        let key: MemoKey = self.current_start();
1962        match self.memo.memo_map.get(&key).cloned() {
1963            Some((range, state)) => {
1964                self.nodes.extend_from_slice(&self.memo.arena[range]);
1965                // It's important that we don't truncate the nodes vector since
1966                // it may have grown or shrunk (due to other memoization or
1967                // error reporting) since we made this checkpoint.
1968                self.restore_partial(state);
1969                None
1970            }
1971            None => Some((key, self.checkpoint())),
1972        }
1973    }
1974
1975    /// Restore the parser to the state at a checkpoint.
1976    fn restore(&mut self, checkpoint: Checkpoint) {
1977        self.nodes.truncate(checkpoint.node_len);
1978        self.restore_partial(checkpoint.state);
1979    }
1980
1981    /// Restore parts of the checkpoint excluding the nodes vector.
1982    fn restore_partial(&mut self, state: PartialState) {
1983        self.lexer.jump(state.cursor);
1984        self.lexer.set_mode(state.lex_mode);
1985        self.token = state.token;
1986    }
1987
1988    /// Save a checkpoint of the parser state.
1989    fn checkpoint(&self) -> Checkpoint {
1990        let node_len = self.nodes.len();
1991        let state = PartialState {
1992            cursor: self.lexer.cursor(),
1993            lex_mode: self.lexer.mode(),
1994            token: self.token.clone(),
1995        };
1996        Checkpoint { node_len, state }
1997    }
1998}
1999
2000/// Functions for eating expected or unexpected tokens and generating errors if
2001/// we don't get what we expect.
2002impl Parser<'_> {
2003    /// Consume the given `kind` or produce an error.
2004    fn expect(&mut self, kind: SyntaxKind) -> bool {
2005        let at = self.at(kind);
2006        if at {
2007            self.eat();
2008        } else if kind == SyntaxKind::Ident && self.token.kind.is_keyword() {
2009            self.trim_errors();
2010            self.eat_and_get().expected(kind.name());
2011        } else {
2012            self.balanced &= !kind.is_grouping();
2013            self.expected(kind.name());
2014        }
2015        at
2016    }
2017
2018    /// Consume the given closing delimiter or produce an error for the matching
2019    /// opening delimiter at `open`.
2020    #[track_caller]
2021    fn expect_closing_delimiter(&mut self, open: Marker, kind: SyntaxKind) {
2022        if !self.eat_if(kind) {
2023            self.nodes[open.0].convert_to_error("unclosed delimiter");
2024        }
2025    }
2026
2027    /// Produce an error that the given `thing` was expected.
2028    fn expected(&mut self, thing: &str) {
2029        if !self.after_error() {
2030            self.expected_at(self.before_trivia(), thing);
2031        }
2032    }
2033
2034    /// Whether the last non-trivia node is an error.
2035    fn after_error(&mut self) -> bool {
2036        let m = self.before_trivia();
2037        m.0 > 0 && self.nodes[m.0 - 1].kind().is_error()
2038    }
2039
2040    /// Produce an error that the given `thing` was expected at the position
2041    /// of the marker `m`.
2042    fn expected_at(&mut self, m: Marker, thing: &str) {
2043        let error =
2044            SyntaxNode::error(SyntaxError::new(eco_format!("expected {thing}")), "");
2045        self.nodes.insert(m.0, error);
2046    }
2047
2048    /// Add a hint to a trailing error.
2049    fn hint(&mut self, hint: &str) {
2050        let m = self.before_trivia();
2051        if let Some(error) = self.nodes.get_mut(m.0 - 1) {
2052            error.hint(hint);
2053        }
2054    }
2055
2056    /// Consume the next token (if any) and produce an error stating that it was
2057    /// unexpected.
2058    fn unexpected(&mut self) {
2059        self.trim_errors();
2060        self.balanced &= !self.token.kind.is_grouping();
2061        self.eat_and_get().unexpected();
2062    }
2063
2064    /// Remove trailing errors with zero length.
2065    fn trim_errors(&mut self) {
2066        let Marker(end) = self.before_trivia();
2067        let mut start = end;
2068        while start > 0
2069            && self.nodes[start - 1].kind().is_error()
2070            && self.nodes[start - 1].is_empty()
2071        {
2072            start -= 1;
2073        }
2074        self.nodes.drain(start..end);
2075    }
2076}