typst_syntax/
parser.rs

1use std::mem;
2use std::ops::{Index, IndexMut, Range};
3
4use ecow::{EcoString, eco_format};
5use rustc_hash::{FxHashMap, FxHashSet};
6use typst_utils::default_math_class;
7use unicode_math_class::MathClass;
8
9use crate::set::{SyntaxSet, syntax_set};
10use crate::{Lexer, SyntaxError, SyntaxKind, SyntaxMode, SyntaxNode, ast, set};
11
12/// Parses a source file as top-level markup.
13pub fn parse(text: &str) -> SyntaxNode {
14    let _scope = typst_timing::TimingScope::new("parse");
15    let mut p = Parser::new(text, 0, SyntaxMode::Markup);
16    markup_exprs(&mut p, true, syntax_set!(End));
17    p.finish_into(SyntaxKind::Markup)
18}
19
20/// Parses top-level code.
21pub fn parse_code(text: &str) -> SyntaxNode {
22    let _scope = typst_timing::TimingScope::new("parse code");
23    let mut p = Parser::new(text, 0, SyntaxMode::Code);
24    code_exprs(&mut p, syntax_set!(End));
25    p.finish_into(SyntaxKind::Code)
26}
27
28/// Parses top-level math.
29pub fn parse_math(text: &str) -> SyntaxNode {
30    let _scope = typst_timing::TimingScope::new("parse math");
31    let mut p = Parser::new(text, 0, SyntaxMode::Math);
32    math_exprs(&mut p, syntax_set!(End));
33    p.finish_into(SyntaxKind::Math)
34}
35
36/// Parses markup expressions until a stop condition is met.
37fn markup(p: &mut Parser, at_start: bool, wrap_trivia: bool, stop_set: SyntaxSet) {
38    let m = if wrap_trivia { p.before_trivia() } else { p.marker() };
39    markup_exprs(p, at_start, stop_set);
40    if wrap_trivia {
41        p.flush_trivia();
42    }
43    p.wrap(m, SyntaxKind::Markup);
44}
45
46/// Parses a sequence of markup expressions.
47fn markup_exprs(p: &mut Parser, mut at_start: bool, stop_set: SyntaxSet) {
48    debug_assert!(stop_set.contains(SyntaxKind::End));
49    at_start |= p.had_newline();
50    let mut nesting: usize = 0;
51    // Keep going if we're at a nested right-bracket regardless of the stop set.
52    while !p.at_set(stop_set) || (nesting > 0 && p.at(SyntaxKind::RightBracket)) {
53        markup_expr(p, at_start, &mut nesting);
54        at_start = p.had_newline();
55    }
56}
57
58/// Reparses a subsection of markup incrementally.
59pub(super) fn reparse_markup(
60    text: &str,
61    range: Range<usize>,
62    at_start: &mut bool,
63    nesting: &mut usize,
64    top_level: bool,
65) -> Option<Vec<SyntaxNode>> {
66    let mut p = Parser::new(text, range.start, SyntaxMode::Markup);
67    *at_start |= p.had_newline();
68    while !p.end() && p.current_start() < range.end {
69        // If not top-level and at a new RightBracket, stop the reparse.
70        if !top_level && *nesting == 0 && p.at(SyntaxKind::RightBracket) {
71            break;
72        }
73        markup_expr(&mut p, *at_start, nesting);
74        *at_start = p.had_newline();
75    }
76    (p.balanced && p.current_start() == range.end).then(|| p.finish())
77}
78
79/// Parses a single markup expression. This includes markup elements like text,
80/// headings, strong/emph, lists/enums, etc. This is also the entry point for
81/// parsing math equations and embedded code expressions.
82fn markup_expr(p: &mut Parser, at_start: bool, nesting: &mut usize) {
83    match p.current() {
84        SyntaxKind::LeftBracket => {
85            *nesting += 1;
86            p.convert_and_eat(SyntaxKind::Text);
87        }
88        SyntaxKind::RightBracket if *nesting > 0 => {
89            *nesting -= 1;
90            p.convert_and_eat(SyntaxKind::Text);
91        }
92        SyntaxKind::RightBracket => {
93            p.unexpected();
94            p.hint("try using a backslash escape: \\]");
95        }
96
97        SyntaxKind::Shebang => p.eat(),
98
99        SyntaxKind::Text
100        | SyntaxKind::Linebreak
101        | SyntaxKind::Escape
102        | SyntaxKind::Shorthand
103        | SyntaxKind::SmartQuote
104        | SyntaxKind::Link
105        | SyntaxKind::Label => p.eat(),
106
107        SyntaxKind::Raw => p.eat(), // Raw is handled entirely in the Lexer.
108
109        SyntaxKind::Hash => embedded_code_expr(p),
110        SyntaxKind::Star => strong(p),
111        SyntaxKind::Underscore => emph(p),
112        SyntaxKind::HeadingMarker if at_start => heading(p),
113        SyntaxKind::ListMarker if at_start => list_item(p),
114        SyntaxKind::EnumMarker if at_start => enum_item(p),
115        SyntaxKind::TermMarker if at_start => term_item(p),
116        SyntaxKind::RefMarker => reference(p),
117        SyntaxKind::Dollar => equation(p),
118
119        SyntaxKind::HeadingMarker
120        | SyntaxKind::ListMarker
121        | SyntaxKind::EnumMarker
122        | SyntaxKind::TermMarker
123        | SyntaxKind::Colon => p.convert_and_eat(SyntaxKind::Text),
124
125        _ => p.unexpected(),
126    }
127}
128
129/// Parses strong content: `*Strong*`.
130fn strong(p: &mut Parser) {
131    p.with_nl_mode(AtNewline::StopParBreak, |p| {
132        let m = p.marker();
133        p.assert(SyntaxKind::Star);
134        markup(p, false, true, syntax_set!(Star, RightBracket, End));
135        p.expect_closing_delimiter(m, SyntaxKind::Star);
136        p.wrap(m, SyntaxKind::Strong);
137    });
138}
139
140/// Parses emphasized content: `_Emphasized_`.
141fn emph(p: &mut Parser) {
142    p.with_nl_mode(AtNewline::StopParBreak, |p| {
143        let m = p.marker();
144        p.assert(SyntaxKind::Underscore);
145        markup(p, false, true, syntax_set!(Underscore, RightBracket, End));
146        p.expect_closing_delimiter(m, SyntaxKind::Underscore);
147        p.wrap(m, SyntaxKind::Emph);
148    });
149}
150
151/// Parses a section heading: `= Introduction`.
152fn heading(p: &mut Parser) {
153    p.with_nl_mode(AtNewline::Stop, |p| {
154        let m = p.marker();
155        p.assert(SyntaxKind::HeadingMarker);
156        markup(p, false, false, syntax_set!(Label, RightBracket, End));
157        p.wrap(m, SyntaxKind::Heading);
158    });
159}
160
161/// Parses an item in a bullet list: `- ...`.
162fn list_item(p: &mut Parser) {
163    p.with_nl_mode(AtNewline::RequireColumn(p.current_column()), |p| {
164        let m = p.marker();
165        p.assert(SyntaxKind::ListMarker);
166        markup(p, true, false, syntax_set!(RightBracket, End));
167        p.wrap(m, SyntaxKind::ListItem);
168    });
169}
170
171/// Parses an item in an enumeration (numbered list): `+ ...` or `1. ...`.
172fn enum_item(p: &mut Parser) {
173    p.with_nl_mode(AtNewline::RequireColumn(p.current_column()), |p| {
174        let m = p.marker();
175        p.assert(SyntaxKind::EnumMarker);
176        markup(p, true, false, syntax_set!(RightBracket, End));
177        p.wrap(m, SyntaxKind::EnumItem);
178    });
179}
180
181/// Parses an item in a term list: `/ Term: Details`.
182fn term_item(p: &mut Parser) {
183    p.with_nl_mode(AtNewline::RequireColumn(p.current_column()), |p| {
184        let m = p.marker();
185        p.with_nl_mode(AtNewline::Stop, |p| {
186            p.assert(SyntaxKind::TermMarker);
187            markup(p, false, false, syntax_set!(Colon, RightBracket, End));
188        });
189        p.expect(SyntaxKind::Colon);
190        markup(p, true, false, syntax_set!(RightBracket, End));
191        p.wrap(m, SyntaxKind::TermItem);
192    });
193}
194
195/// Parses a reference: `@target`, `@target[..]`.
196fn reference(p: &mut Parser) {
197    let m = p.marker();
198    p.assert(SyntaxKind::RefMarker);
199    if p.directly_at(SyntaxKind::LeftBracket) {
200        content_block(p);
201    }
202    p.wrap(m, SyntaxKind::Ref);
203}
204
205/// Parses a mathematical equation: `$x$`, `$ x^2 $`.
206fn equation(p: &mut Parser) {
207    let m = p.marker();
208    p.enter_modes(SyntaxMode::Math, AtNewline::Continue, |p| {
209        p.assert(SyntaxKind::Dollar);
210        math(p, syntax_set!(Dollar, End));
211        p.expect_closing_delimiter(m, SyntaxKind::Dollar);
212    });
213    p.wrap(m, SyntaxKind::Equation);
214}
215
216/// Parses the contents of a mathematical equation: `x^2 + 1`.
217fn math(p: &mut Parser, stop_set: SyntaxSet) {
218    let m = p.marker();
219    math_exprs(p, stop_set);
220    p.wrap(m, SyntaxKind::Math);
221}
222
223/// Parses a sequence of math expressions. Returns the number of expressions
224/// parsed.
225fn math_exprs(p: &mut Parser, stop_set: SyntaxSet) -> usize {
226    debug_assert!(stop_set.contains(SyntaxKind::End));
227    let mut count = 0;
228    while !p.at_set(stop_set) {
229        if p.at_set(set::MATH_EXPR) {
230            math_expr(p);
231            count += 1;
232        } else {
233            p.unexpected();
234        }
235    }
236    count
237}
238
239/// Parses a single math expression: This includes math elements like
240/// attachment, fractions, and roots, and embedded code expressions.
241fn math_expr(p: &mut Parser) {
242    math_expr_prec(p, 0, SyntaxKind::End)
243}
244
245/// Parses a math expression with at least the given precedence.
246fn math_expr_prec(p: &mut Parser, min_prec: usize, stop: SyntaxKind) {
247    let m = p.marker();
248    let mut continuable = false;
249    match p.current() {
250        SyntaxKind::Hash => embedded_code_expr(p),
251        // The lexer manages creating full FieldAccess nodes if needed.
252        SyntaxKind::MathIdent | SyntaxKind::FieldAccess => {
253            continuable = true;
254            p.eat();
255            // Parse a function call for an identifier or field access.
256            if min_prec < 3
257                && p.directly_at(SyntaxKind::MathText)
258                && p.current_text() == "("
259            {
260                math_args(p);
261                p.wrap(m, SyntaxKind::FuncCall);
262                continuable = false;
263            }
264        }
265
266        SyntaxKind::Dot
267        | SyntaxKind::Comma
268        | SyntaxKind::Semicolon
269        | SyntaxKind::RightParen => {
270            p.convert_and_eat(SyntaxKind::MathText);
271        }
272
273        SyntaxKind::Text | SyntaxKind::MathText | SyntaxKind::MathShorthand => {
274            // `a(b)/c` parses as `(a(b))/c` if `a` is continuable.
275            continuable = math_class(p.current_text()) == Some(MathClass::Alphabetic)
276                || p.current_text().chars().all(char::is_alphabetic);
277            if !maybe_delimited(p) {
278                p.eat();
279            }
280        }
281
282        SyntaxKind::Linebreak | SyntaxKind::MathAlignPoint => p.eat(),
283        SyntaxKind::Escape | SyntaxKind::Str => {
284            continuable = true;
285            p.eat();
286        }
287
288        SyntaxKind::Root => {
289            if min_prec < 3 {
290                p.eat();
291                let m2 = p.marker();
292                math_expr_prec(p, 2, stop);
293                math_unparen(p, m2);
294                p.wrap(m, SyntaxKind::MathRoot);
295            }
296        }
297
298        SyntaxKind::Prime => {
299            // Means that there is nothing to attach the prime to.
300            continuable = true;
301            while p.at(SyntaxKind::Prime) {
302                let m2 = p.marker();
303                p.eat();
304                // Eat the group until the space.
305                while p.eat_if_direct(SyntaxKind::Prime) {}
306                p.wrap(m2, SyntaxKind::MathPrimes);
307            }
308        }
309
310        _ => p.expected("expression"),
311    }
312
313    if continuable && min_prec < 3 && !p.had_trivia() && maybe_delimited(p) {
314        p.wrap(m, SyntaxKind::Math);
315    }
316
317    // Whether there were _any_ primes in the loop.
318    let mut primed = false;
319
320    while !p.end() && !p.at(stop) {
321        if p.directly_at(SyntaxKind::MathText) && p.current_text() == "!" {
322            p.eat();
323            p.wrap(m, SyntaxKind::Math);
324            continue;
325        }
326
327        let prime_marker = p.marker();
328        if p.eat_if_direct(SyntaxKind::Prime) {
329            // Eat as many primes as possible.
330            while p.eat_if_direct(SyntaxKind::Prime) {}
331            p.wrap(prime_marker, SyntaxKind::MathPrimes);
332
333            // Will not be continued, so need to wrap the prime as attachment.
334            if p.at(stop) {
335                p.wrap(m, SyntaxKind::MathAttach);
336            }
337
338            primed = true;
339            continue;
340        }
341
342        let Some((kind, stop, assoc, mut prec)) = math_op(p.current()) else {
343            // No attachments, so we need to wrap primes as attachment.
344            if primed {
345                p.wrap(m, SyntaxKind::MathAttach);
346            }
347
348            break;
349        };
350
351        if primed && kind == SyntaxKind::MathFrac {
352            p.wrap(m, SyntaxKind::MathAttach);
353        }
354
355        if prec < min_prec {
356            break;
357        }
358
359        match assoc {
360            ast::Assoc::Left => prec += 1,
361            ast::Assoc::Right => {}
362        }
363
364        if kind == SyntaxKind::MathFrac {
365            math_unparen(p, m);
366        }
367
368        p.eat();
369        let m2 = p.marker();
370        math_expr_prec(p, prec, stop);
371        math_unparen(p, m2);
372
373        if p.eat_if(SyntaxKind::Underscore) || p.eat_if(SyntaxKind::Hat) {
374            let m3 = p.marker();
375            math_expr_prec(p, prec, SyntaxKind::End);
376            math_unparen(p, m3);
377        }
378
379        p.wrap(m, kind);
380    }
381}
382
383/// Precedence and wrapper kinds for the binary math operators.
384fn math_op(kind: SyntaxKind) -> Option<(SyntaxKind, SyntaxKind, ast::Assoc, usize)> {
385    match kind {
386        SyntaxKind::Underscore => {
387            Some((SyntaxKind::MathAttach, SyntaxKind::Hat, ast::Assoc::Right, 2))
388        }
389        SyntaxKind::Hat => {
390            Some((SyntaxKind::MathAttach, SyntaxKind::Underscore, ast::Assoc::Right, 2))
391        }
392        SyntaxKind::Slash => {
393            Some((SyntaxKind::MathFrac, SyntaxKind::End, ast::Assoc::Left, 1))
394        }
395        _ => None,
396    }
397}
398
399/// Try to parse delimiters based on the current token's unicode math class.
400fn maybe_delimited(p: &mut Parser) -> bool {
401    let open = math_class(p.current_text()) == Some(MathClass::Opening);
402    if open {
403        math_delimited(p);
404    }
405    open
406}
407
408/// Parse matched delimiters in math: `[x + y]`.
409fn math_delimited(p: &mut Parser) {
410    let m = p.marker();
411    p.eat();
412    let m2 = p.marker();
413    while !p.at_set(syntax_set!(Dollar, End)) {
414        if math_class(p.current_text()) == Some(MathClass::Closing) {
415            p.wrap(m2, SyntaxKind::Math);
416            // We could be at the shorthand `|]`, which shouldn't be converted
417            // to a `Text` kind.
418            if p.at(SyntaxKind::RightParen) {
419                p.convert_and_eat(SyntaxKind::MathText);
420            } else {
421                p.eat();
422            }
423            p.wrap(m, SyntaxKind::MathDelimited);
424            return;
425        }
426
427        if p.at_set(set::MATH_EXPR) {
428            math_expr(p);
429        } else {
430            p.unexpected();
431        }
432    }
433
434    p.wrap(m, SyntaxKind::Math);
435}
436
437/// Remove one set of parentheses (if any) from a previously parsed expression
438/// by converting to non-expression SyntaxKinds.
439fn math_unparen(p: &mut Parser, m: Marker) {
440    let Some(node) = p.nodes.get_mut(m.0) else { return };
441    if node.kind() != SyntaxKind::MathDelimited {
442        return;
443    }
444
445    if let [first, .., last] = node.children_mut()
446        && first.text() == "("
447        && last.text() == ")"
448    {
449        first.convert_to_kind(SyntaxKind::LeftParen);
450        last.convert_to_kind(SyntaxKind::RightParen);
451        // Only convert if we did have regular parens.
452        node.convert_to_kind(SyntaxKind::Math);
453    }
454}
455
456/// The unicode math class of a string. Only returns `Some` if `text` has
457/// exactly one unicode character or is a math shorthand string (currently just
458/// `[|`, `||`, `|]`) and then only returns `Some` if there is a math class
459/// defined for that character.
460fn math_class(text: &str) -> Option<MathClass> {
461    match text {
462        "[|" => return Some(MathClass::Opening),
463        "|]" => return Some(MathClass::Closing),
464        "||" => return Some(MathClass::Fence),
465        _ => {}
466    }
467
468    let mut chars = text.chars();
469    chars
470        .next()
471        .filter(|_| chars.next().is_none())
472        .and_then(default_math_class)
473}
474
475/// Parse an argument list in math: `(a, b; c, d; size: #50%)`.
476fn math_args(p: &mut Parser) {
477    let m = p.marker();
478    p.convert_and_eat(SyntaxKind::LeftParen);
479
480    let mut positional = true;
481    let mut has_arrays = false;
482
483    let mut maybe_array_start = p.marker();
484    let mut seen = FxHashSet::default();
485    while !p.at_set(syntax_set!(End, Dollar, RightParen)) {
486        positional = math_arg(p, &mut seen);
487
488        match p.current() {
489            SyntaxKind::Comma => {
490                p.eat();
491                if !positional {
492                    maybe_array_start = p.marker();
493                }
494            }
495            SyntaxKind::Semicolon => {
496                if !positional {
497                    maybe_array_start = p.marker();
498                }
499
500                // Parses an array: `a, b, c;`.
501                // The semicolon merges preceding arguments separated by commas
502                // into an array argument.
503                p.wrap(maybe_array_start, SyntaxKind::Array);
504                p.eat();
505                maybe_array_start = p.marker();
506                has_arrays = true;
507            }
508            SyntaxKind::End | SyntaxKind::Dollar | SyntaxKind::RightParen => {}
509            _ => p.expected("comma or semicolon"),
510        }
511    }
512
513    // Check if we need to wrap the preceding arguments in an array.
514    if maybe_array_start != p.marker() && has_arrays && positional {
515        p.wrap(maybe_array_start, SyntaxKind::Array);
516    }
517
518    p.expect_closing_delimiter(m, SyntaxKind::RightParen);
519    p.wrap(m, SyntaxKind::Args);
520}
521
522/// Parses a single argument in a math argument list.
523///
524/// Returns whether the parsed argument was positional or not.
525fn math_arg<'s>(p: &mut Parser<'s>, seen: &mut FxHashSet<&'s str>) -> bool {
526    let m = p.marker();
527    let start = p.current_start();
528
529    if p.at(SyntaxKind::Dot) {
530        // Parses a spread argument: `..args`.
531        if let Some(spread) = p.lexer.maybe_math_spread_arg(start) {
532            p.token.node = spread;
533            p.eat();
534            math_expr(p);
535            p.wrap(m, SyntaxKind::Spread);
536            return true;
537        }
538    }
539
540    let mut positional = true;
541    if p.at_set(syntax_set!(MathText, MathIdent, Underscore)) {
542        // Parses a named argument: `thickness: #12pt`.
543        if let Some(named) = p.lexer.maybe_math_named_arg(start) {
544            p.token.node = named;
545            let text = p.current_text();
546            p.eat();
547            p.convert_and_eat(SyntaxKind::Colon);
548            if !seen.insert(text) {
549                p[m].convert_to_error(eco_format!("duplicate argument: {text}"));
550            }
551            positional = false;
552        }
553    }
554
555    // Parses a normal positional argument.
556    let arg = p.marker();
557    let count = math_exprs(p, syntax_set!(End, Dollar, Comma, Semicolon, RightParen));
558    if count == 0 {
559        // Named argument requires a value.
560        if !positional {
561            p.expected("expression");
562        }
563
564        // Flush trivia so that the new empty Math node will be wrapped _inside_
565        // any `SyntaxKind::Array` elements created in `math_args`.
566        // (And if we don't follow by wrapping in an array, it has no effect.)
567        // The difference in node layout without this would look like:
568        // Expression: `$ mat( ;) $`
569        // - Correct:   [ .., Space(" "), Array[Math[], ], Semicolon(";"), .. ]
570        // - Incorrect: [ .., Math[], Array[], Space(" "), Semicolon(";"), .. ]
571        p.flush_trivia();
572    }
573
574    // Wrap math function arguments to join adjacent math content or create an
575    // empty 'Math' node for when we have 0 args. We don't wrap when
576    // `count == 1`, since wrapping would change the type of the expression
577    // from potentially non-content to content. Ex: `$ func(#12pt) $` would
578    // change the type from size to content if wrapped.
579    if count != 1 {
580        p.wrap(arg, SyntaxKind::Math);
581    }
582
583    if !positional {
584        p.wrap(m, SyntaxKind::Named);
585    }
586    positional
587}
588
589/// Parses the contents of a code block.
590fn code(p: &mut Parser, stop_set: SyntaxSet) {
591    let m = p.marker();
592    code_exprs(p, stop_set);
593    p.wrap(m, SyntaxKind::Code);
594}
595
596/// Parses a sequence of code expressions.
597fn code_exprs(p: &mut Parser, stop_set: SyntaxSet) {
598    debug_assert!(stop_set.contains(SyntaxKind::End));
599    while !p.at_set(stop_set) {
600        p.with_nl_mode(AtNewline::ContextualContinue, |p| {
601            if !p.at_set(set::CODE_EXPR) {
602                p.unexpected();
603                return;
604            }
605            code_expr(p);
606            if !p.at_set(stop_set) && !p.eat_if(SyntaxKind::Semicolon) {
607                p.expected("semicolon or line break");
608                if p.at(SyntaxKind::Label) {
609                    p.hint("labels can only be applied in markup mode");
610                    p.hint("try wrapping your code in a markup block (`[ ]`)");
611                }
612            }
613        });
614    }
615}
616
617/// Parses an atomic code expression embedded in markup or math.
618fn embedded_code_expr(p: &mut Parser) {
619    p.enter_modes(SyntaxMode::Code, AtNewline::Stop, |p| {
620        p.assert(SyntaxKind::Hash);
621        if p.had_trivia() || p.end() {
622            p.expected("expression");
623            return;
624        }
625
626        let stmt = p.at_set(set::STMT);
627        let at = p.at_set(set::ATOMIC_CODE_EXPR);
628        code_expr_prec(p, true, 0);
629
630        // Consume error for things like `#12p` or `#"abc\"`.#
631        if !at {
632            p.unexpected();
633        }
634
635        let semi = (stmt || p.directly_at(SyntaxKind::Semicolon))
636            && p.eat_if(SyntaxKind::Semicolon);
637
638        if stmt && !semi && !p.end() && !p.at(SyntaxKind::RightBracket) {
639            p.expected("semicolon or line break");
640        }
641    });
642}
643
644/// Parses a single code expression.
645fn code_expr(p: &mut Parser) {
646    code_expr_prec(p, false, 0)
647}
648
649/// Parses a code expression with at least the given precedence.
650fn code_expr_prec(p: &mut Parser, atomic: bool, min_prec: u8) {
651    let m = p.marker();
652    if !atomic && p.at_set(set::UNARY_OP) {
653        let op = ast::UnOp::from_kind(p.current()).unwrap();
654        p.eat();
655        code_expr_prec(p, atomic, op.precedence());
656        p.wrap(m, SyntaxKind::Unary);
657    } else {
658        code_primary(p, atomic);
659    }
660
661    loop {
662        if p.directly_at(SyntaxKind::LeftParen) || p.directly_at(SyntaxKind::LeftBracket)
663        {
664            args(p);
665            p.wrap(m, SyntaxKind::FuncCall);
666            continue;
667        }
668
669        let at_field_or_method = p.directly_at(SyntaxKind::Dot)
670            && p.lexer.clone().next().0 == SyntaxKind::Ident;
671
672        if atomic && !at_field_or_method {
673            break;
674        }
675
676        if p.eat_if(SyntaxKind::Dot) {
677            p.expect(SyntaxKind::Ident);
678            p.wrap(m, SyntaxKind::FieldAccess);
679            continue;
680        }
681
682        let binop = if p.at_set(set::BINARY_OP) {
683            ast::BinOp::from_kind(p.current())
684        } else if min_prec <= ast::BinOp::NotIn.precedence() && p.eat_if(SyntaxKind::Not)
685        {
686            if p.at(SyntaxKind::In) {
687                Some(ast::BinOp::NotIn)
688            } else {
689                p.expected("keyword `in`");
690                break;
691            }
692        } else {
693            None
694        };
695
696        if let Some(op) = binop {
697            let mut prec = op.precedence();
698            if prec < min_prec {
699                break;
700            }
701
702            match op.assoc() {
703                ast::Assoc::Left => prec += 1,
704                ast::Assoc::Right => {}
705            }
706
707            p.eat();
708            code_expr_prec(p, false, prec);
709            p.wrap(m, SyntaxKind::Binary);
710            continue;
711        }
712
713        break;
714    }
715}
716
717/// Parses an primary in a code expression. These are the atoms that unary and
718/// binary operations, functions calls, and field accesses start with / are
719/// composed of.
720fn code_primary(p: &mut Parser, atomic: bool) {
721    let m = p.marker();
722    match p.current() {
723        SyntaxKind::Ident => {
724            p.eat();
725            if !atomic && p.at(SyntaxKind::Arrow) {
726                p.wrap(m, SyntaxKind::Params);
727                p.assert(SyntaxKind::Arrow);
728                code_expr(p);
729                p.wrap(m, SyntaxKind::Closure);
730            }
731        }
732        SyntaxKind::Underscore if !atomic => {
733            p.eat();
734            if p.at(SyntaxKind::Arrow) {
735                p.wrap(m, SyntaxKind::Params);
736                p.eat();
737                code_expr(p);
738                p.wrap(m, SyntaxKind::Closure);
739            } else if p.eat_if(SyntaxKind::Eq) {
740                code_expr(p);
741                p.wrap(m, SyntaxKind::DestructAssignment);
742            } else {
743                p[m].expected("expression");
744            }
745        }
746
747        SyntaxKind::LeftBrace => code_block(p),
748        SyntaxKind::LeftBracket => content_block(p),
749        SyntaxKind::LeftParen => expr_with_paren(p, atomic),
750        SyntaxKind::Dollar => equation(p),
751        SyntaxKind::Let => let_binding(p),
752        SyntaxKind::Set => set_rule(p),
753        SyntaxKind::Show => show_rule(p),
754        SyntaxKind::Context => contextual(p, atomic),
755        SyntaxKind::If => conditional(p),
756        SyntaxKind::While => while_loop(p),
757        SyntaxKind::For => for_loop(p),
758        SyntaxKind::Import => module_import(p),
759        SyntaxKind::Include => module_include(p),
760        SyntaxKind::Break => break_stmt(p),
761        SyntaxKind::Continue => continue_stmt(p),
762        SyntaxKind::Return => return_stmt(p),
763
764        SyntaxKind::Raw => p.eat(), // Raw is handled entirely in the Lexer.
765
766        SyntaxKind::None
767        | SyntaxKind::Auto
768        | SyntaxKind::Int
769        | SyntaxKind::Float
770        | SyntaxKind::Bool
771        | SyntaxKind::Numeric
772        | SyntaxKind::Str
773        | SyntaxKind::Label => p.eat(),
774
775        _ => p.expected("expression"),
776    }
777}
778
779/// Reparses a full content or code block.
780pub(super) fn reparse_block(text: &str, range: Range<usize>) -> Option<SyntaxNode> {
781    let mut p = Parser::new(text, range.start, SyntaxMode::Code);
782    assert!(p.at(SyntaxKind::LeftBracket) || p.at(SyntaxKind::LeftBrace));
783    block(&mut p);
784    (p.balanced && p.prev_end() == range.end)
785        .then(|| p.finish().into_iter().next().unwrap())
786}
787
788/// Parses a content or code block.
789fn block(p: &mut Parser) {
790    match p.current() {
791        SyntaxKind::LeftBracket => content_block(p),
792        SyntaxKind::LeftBrace => code_block(p),
793        _ => p.expected("block"),
794    }
795}
796
797/// Parses a code block: `{ let x = 1; x + 2 }`.
798fn code_block(p: &mut Parser) {
799    let m = p.marker();
800    p.enter_modes(SyntaxMode::Code, AtNewline::Continue, |p| {
801        p.assert(SyntaxKind::LeftBrace);
802        code(p, syntax_set!(RightBrace, RightBracket, RightParen, End));
803        p.expect_closing_delimiter(m, SyntaxKind::RightBrace);
804    });
805    p.wrap(m, SyntaxKind::CodeBlock);
806}
807
808/// Parses a content block: `[*Hi* there!]`.
809fn content_block(p: &mut Parser) {
810    let m = p.marker();
811    p.enter_modes(SyntaxMode::Markup, AtNewline::Continue, |p| {
812        p.assert(SyntaxKind::LeftBracket);
813        markup(p, true, true, syntax_set!(RightBracket, End));
814        p.expect_closing_delimiter(m, SyntaxKind::RightBracket);
815    });
816    p.wrap(m, SyntaxKind::ContentBlock);
817}
818
819/// Parses a let binding: `let x = 1`.
820fn let_binding(p: &mut Parser) {
821    let m = p.marker();
822    p.assert(SyntaxKind::Let);
823
824    let m2 = p.marker();
825    let mut closure = false;
826    let mut other = false;
827
828    if p.eat_if(SyntaxKind::Ident) {
829        if p.directly_at(SyntaxKind::LeftParen) {
830            params(p);
831            closure = true;
832        }
833    } else {
834        pattern(p, false, &mut FxHashSet::default(), None);
835        other = true;
836    }
837
838    let f = if closure || other { Parser::expect } else { Parser::eat_if };
839    if f(p, SyntaxKind::Eq) {
840        code_expr(p);
841    }
842
843    if closure {
844        p.wrap(m2, SyntaxKind::Closure);
845    }
846
847    p.wrap(m, SyntaxKind::LetBinding);
848}
849
850/// Parses a set rule: `set text(...)`.
851fn set_rule(p: &mut Parser) {
852    let m = p.marker();
853    p.assert(SyntaxKind::Set);
854
855    let m2 = p.marker();
856    p.expect(SyntaxKind::Ident);
857    while p.eat_if(SyntaxKind::Dot) {
858        p.expect(SyntaxKind::Ident);
859        p.wrap(m2, SyntaxKind::FieldAccess);
860    }
861
862    args(p);
863    if p.eat_if(SyntaxKind::If) {
864        code_expr(p);
865    }
866    p.wrap(m, SyntaxKind::SetRule);
867}
868
869/// Parses a show rule: `show heading: it => emph(it.body)`.
870fn show_rule(p: &mut Parser) {
871    let m = p.marker();
872    p.assert(SyntaxKind::Show);
873    let m2 = p.before_trivia();
874
875    if !p.at(SyntaxKind::Colon) {
876        code_expr(p);
877    }
878
879    if p.eat_if(SyntaxKind::Colon) {
880        code_expr(p);
881    } else {
882        p.expected_at(m2, "colon");
883    }
884
885    p.wrap(m, SyntaxKind::ShowRule);
886}
887
888/// Parses a contextual expression: `context text.lang`.
889fn contextual(p: &mut Parser, atomic: bool) {
890    let m = p.marker();
891    p.assert(SyntaxKind::Context);
892    code_expr_prec(p, atomic, 0);
893    p.wrap(m, SyntaxKind::Contextual);
894}
895
896/// Parses an if-else conditional: `if x { y } else { z }`.
897fn conditional(p: &mut Parser) {
898    let m = p.marker();
899    p.assert(SyntaxKind::If);
900    code_expr(p);
901    block(p);
902    if p.eat_if(SyntaxKind::Else) {
903        if p.at(SyntaxKind::If) {
904            conditional(p);
905        } else {
906            block(p);
907        }
908    }
909    p.wrap(m, SyntaxKind::Conditional);
910}
911
912/// Parses a while loop: `while x { y }`.
913fn while_loop(p: &mut Parser) {
914    let m = p.marker();
915    p.assert(SyntaxKind::While);
916    code_expr(p);
917    block(p);
918    p.wrap(m, SyntaxKind::WhileLoop);
919}
920
921/// Parses a for loop: `for x in y { z }`.
922fn for_loop(p: &mut Parser) {
923    let m = p.marker();
924    p.assert(SyntaxKind::For);
925
926    let mut seen = FxHashSet::default();
927    pattern(p, false, &mut seen, None);
928
929    if p.at(SyntaxKind::Comma) {
930        let node = p.eat_and_get();
931        node.unexpected();
932        node.hint("destructuring patterns must be wrapped in parentheses");
933        if p.at_set(set::PATTERN) {
934            pattern(p, false, &mut seen, None);
935        }
936    }
937
938    p.expect(SyntaxKind::In);
939    code_expr(p);
940    block(p);
941    p.wrap(m, SyntaxKind::ForLoop);
942}
943
944/// Parses a module import: `import "utils.typ": a, b, c`.
945fn module_import(p: &mut Parser) {
946    let m = p.marker();
947    p.assert(SyntaxKind::Import);
948    code_expr(p);
949    if p.eat_if(SyntaxKind::As) {
950        // Allow renaming a full module import.
951        // If items are included, both the full module and the items are
952        // imported at the same time.
953        p.expect(SyntaxKind::Ident);
954    }
955
956    if p.eat_if(SyntaxKind::Colon) {
957        if p.at(SyntaxKind::LeftParen) {
958            p.with_nl_mode(AtNewline::Continue, |p| {
959                let m2 = p.marker();
960                p.assert(SyntaxKind::LeftParen);
961
962                import_items(p);
963
964                p.expect_closing_delimiter(m2, SyntaxKind::RightParen);
965            });
966        } else if !p.eat_if(SyntaxKind::Star) {
967            import_items(p);
968        }
969    }
970
971    p.wrap(m, SyntaxKind::ModuleImport);
972}
973
974/// Parses items to import from a module: `a, b, c`.
975fn import_items(p: &mut Parser) {
976    let m = p.marker();
977    while !p.current().is_terminator() {
978        let item_marker = p.marker();
979        if !p.eat_if(SyntaxKind::Ident) {
980            p.unexpected();
981        }
982
983        // Nested import path: `a.b.c`
984        while p.eat_if(SyntaxKind::Dot) {
985            p.expect(SyntaxKind::Ident);
986        }
987
988        p.wrap(item_marker, SyntaxKind::ImportItemPath);
989
990        // Rename imported item.
991        if p.eat_if(SyntaxKind::As) {
992            p.expect(SyntaxKind::Ident);
993            p.wrap(item_marker, SyntaxKind::RenamedImportItem);
994        }
995
996        if !p.current().is_terminator() {
997            p.expect(SyntaxKind::Comma);
998        }
999    }
1000
1001    p.wrap(m, SyntaxKind::ImportItems);
1002}
1003
1004/// Parses a module include: `include "chapter1.typ"`.
1005fn module_include(p: &mut Parser) {
1006    let m = p.marker();
1007    p.assert(SyntaxKind::Include);
1008    code_expr(p);
1009    p.wrap(m, SyntaxKind::ModuleInclude);
1010}
1011
1012/// Parses a break from a loop: `break`.
1013fn break_stmt(p: &mut Parser) {
1014    let m = p.marker();
1015    p.assert(SyntaxKind::Break);
1016    p.wrap(m, SyntaxKind::LoopBreak);
1017}
1018
1019/// Parses a continue in a loop: `continue`.
1020fn continue_stmt(p: &mut Parser) {
1021    let m = p.marker();
1022    p.assert(SyntaxKind::Continue);
1023    p.wrap(m, SyntaxKind::LoopContinue);
1024}
1025
1026/// Parses a return from a function: `return`, `return x + 1`.
1027fn return_stmt(p: &mut Parser) {
1028    let m = p.marker();
1029    p.assert(SyntaxKind::Return);
1030    if p.at_set(set::CODE_EXPR) {
1031        code_expr(p);
1032    }
1033    p.wrap(m, SyntaxKind::FuncReturn);
1034}
1035
1036/// An expression that starts with a parenthesis.
1037fn expr_with_paren(p: &mut Parser, atomic: bool) {
1038    if atomic {
1039        // Atomic expressions aren't modified by operators that follow them, so
1040        // our first guess of array/dict will be correct.
1041        parenthesized_or_array_or_dict(p);
1042        return;
1043    }
1044
1045    // If we've seen this position before and have a memoized result, restore it
1046    // and return. Otherwise, get a key to this position and a checkpoint to
1047    // restart from in case we make a wrong prediction.
1048    let Some((memo_key, checkpoint)) = p.restore_memo_or_checkpoint() else { return };
1049    // The node length from when we restored.
1050    let prev_len = checkpoint.node_len;
1051
1052    // When we reach a '(', we can't be sure what it is. First, we attempt to
1053    // parse as a simple parenthesized expression, array, or dictionary as
1054    // these are the most likely things. We can handle all of those in a single
1055    // pass.
1056    let kind = parenthesized_or_array_or_dict(p);
1057
1058    // If, however, '=>' or '=' follows, we must backtrack and reparse as either
1059    // a parameter list or a destructuring. To be able to do that, we created a
1060    // parser checkpoint before our speculative parse, which we can restore.
1061    //
1062    // However, naive backtracking has a fatal flaw: It can lead to exponential
1063    // parsing time if we are constantly getting things wrong in a nested
1064    // scenario. The particular failure case for parameter parsing is the
1065    // following: `(x: (x: (x) => y) => y) => y`
1066    //
1067    // Such a structure will reparse over and over again recursively, leading to
1068    // a running time of O(2^n) for nesting depth n. To prevent this, we perform
1069    // a simple trick: When we have done the mistake of picking the wrong path
1070    // once and have subsequently parsed correctly, we save the result of that
1071    // correct parsing in the `p.memo` map. When we reach the same position
1072    // again, we can then just restore this result. In this way, no
1073    // parenthesized expression is parsed more than twice, leading to a worst
1074    // case running time of O(2n).
1075    if p.at(SyntaxKind::Arrow) {
1076        p.restore(checkpoint);
1077        let m = p.marker();
1078        params(p);
1079        if !p.expect(SyntaxKind::Arrow) {
1080            return;
1081        }
1082        code_expr(p);
1083        p.wrap(m, SyntaxKind::Closure);
1084    } else if p.at(SyntaxKind::Eq) && kind != SyntaxKind::Parenthesized {
1085        p.restore(checkpoint);
1086        let m = p.marker();
1087        destructuring_or_parenthesized(p, true, &mut FxHashSet::default());
1088        if !p.expect(SyntaxKind::Eq) {
1089            return;
1090        }
1091        code_expr(p);
1092        p.wrap(m, SyntaxKind::DestructAssignment);
1093    } else {
1094        return;
1095    }
1096
1097    // Memoize result if we backtracked.
1098    p.memoize_parsed_nodes(memo_key, prev_len);
1099}
1100
1101/// Parses either
1102/// - a parenthesized expression: `(1 + 2)`, or
1103/// - an array: `(1, "hi", 12cm)`, or
1104/// - a dictionary: `(thickness: 3pt, dash: "solid")`.
1105fn parenthesized_or_array_or_dict(p: &mut Parser) -> SyntaxKind {
1106    let mut state = GroupState {
1107        count: 0,
1108        maybe_just_parens: true,
1109        kind: None,
1110        seen: FxHashSet::default(),
1111    };
1112
1113    // An edge case with parens is whether we can interpret a leading spread
1114    // expression as a dictionary, e.g. if we want `(..dict1, ..dict2)` to join
1115    // the two dicts.
1116    //
1117    // The issue is that we decide on the type of the parenthesized expression
1118    // here in the parser by the `SyntaxKind` we wrap with, instead of in eval
1119    // based on the type of the spread item.
1120    //
1121    // The current fix is that we allow a leading colon to force the
1122    // parenthesized value into a dict:
1123    // - `(..arr1, ..arr2)` is wrapped as an `Array`.
1124    // - `(: ..dict1, ..dict2)` is wrapped as a `Dict`.
1125    //
1126    // This does allow some unexpected expressions, such as `(: key: val)`, but
1127    // it's currently intentional.
1128    let m = p.marker();
1129    p.with_nl_mode(AtNewline::Continue, |p| {
1130        p.assert(SyntaxKind::LeftParen);
1131        if p.eat_if(SyntaxKind::Colon) {
1132            state.kind = Some(SyntaxKind::Dict);
1133        }
1134
1135        while !p.current().is_terminator() {
1136            if !p.at_set(set::ARRAY_OR_DICT_ITEM) {
1137                p.unexpected();
1138                continue;
1139            }
1140
1141            array_or_dict_item(p, &mut state);
1142            state.count += 1;
1143
1144            if !p.current().is_terminator() && p.expect(SyntaxKind::Comma) {
1145                state.maybe_just_parens = false;
1146            }
1147        }
1148
1149        p.expect_closing_delimiter(m, SyntaxKind::RightParen);
1150    });
1151
1152    let kind = if state.maybe_just_parens && state.count == 1 {
1153        SyntaxKind::Parenthesized
1154    } else {
1155        state.kind.unwrap_or(SyntaxKind::Array)
1156    };
1157
1158    p.wrap(m, kind);
1159    kind
1160}
1161
1162/// State for array/dictionary parsing.
1163struct GroupState {
1164    count: usize,
1165    /// Whether this is just a single expression in parens: `(a)`. Single
1166    /// element arrays require an explicit comma: `(a,)`, unless we're
1167    /// spreading: `(..a)`.
1168    maybe_just_parens: bool,
1169    /// The `SyntaxKind` to wrap as (if we've figured it out yet).
1170    kind: Option<SyntaxKind>,
1171    /// Store named arguments so we can give an error if they're repeated.
1172    seen: FxHashSet<EcoString>,
1173}
1174
1175/// Parses a single item in an array or dictionary.
1176fn array_or_dict_item(p: &mut Parser, state: &mut GroupState) {
1177    let m = p.marker();
1178
1179    if p.eat_if(SyntaxKind::Dots) {
1180        // Parses a spread item: `..item`.
1181        code_expr(p);
1182        p.wrap(m, SyntaxKind::Spread);
1183        state.maybe_just_parens = false;
1184        return;
1185    }
1186
1187    code_expr(p);
1188
1189    if p.eat_if(SyntaxKind::Colon) {
1190        // Parses a named/keyed pair: `name: item` or `"key": item`.
1191        code_expr(p);
1192
1193        let node = &mut p[m];
1194        let pair_kind = match node.kind() {
1195            SyntaxKind::Ident => SyntaxKind::Named,
1196            _ => SyntaxKind::Keyed,
1197        };
1198
1199        if let Some(key) = match node.cast::<ast::Expr>() {
1200            Some(ast::Expr::Ident(ident)) => Some(ident.get().clone()),
1201            Some(ast::Expr::Str(s)) => Some(s.get()),
1202            _ => None,
1203        } && !state.seen.insert(key.clone())
1204        {
1205            node.convert_to_error(eco_format!("duplicate key: {key}"));
1206        }
1207
1208        p.wrap(m, pair_kind);
1209        state.maybe_just_parens = false;
1210
1211        if state.kind == Some(SyntaxKind::Array) {
1212            p[m].expected("expression");
1213        } else {
1214            state.kind = Some(SyntaxKind::Dict);
1215        }
1216    } else {
1217        // Parses a positional item.
1218        if state.kind == Some(SyntaxKind::Dict) {
1219            p[m].expected("named or keyed pair");
1220        } else {
1221            state.kind = Some(SyntaxKind::Array)
1222        }
1223    }
1224}
1225
1226/// Parses a function call's argument list: `(12pt, y)`.
1227fn args(p: &mut Parser) {
1228    if !p.directly_at(SyntaxKind::LeftParen) && !p.directly_at(SyntaxKind::LeftBracket) {
1229        p.expected("argument list");
1230        if p.at(SyntaxKind::LeftParen) || p.at(SyntaxKind::LeftBracket) {
1231            p.hint("there may not be any spaces before the argument list");
1232        }
1233    }
1234
1235    let m = p.marker();
1236    if p.at(SyntaxKind::LeftParen) {
1237        let m2 = p.marker();
1238        p.with_nl_mode(AtNewline::Continue, |p| {
1239            p.assert(SyntaxKind::LeftParen);
1240
1241            let mut seen = FxHashSet::default();
1242            while !p.current().is_terminator() {
1243                if !p.at_set(set::ARG) {
1244                    p.unexpected();
1245                    continue;
1246                }
1247
1248                arg(p, &mut seen);
1249
1250                if !p.current().is_terminator() {
1251                    p.expect(SyntaxKind::Comma);
1252                }
1253            }
1254
1255            p.expect_closing_delimiter(m2, SyntaxKind::RightParen);
1256        });
1257    }
1258
1259    while p.directly_at(SyntaxKind::LeftBracket) {
1260        content_block(p);
1261    }
1262
1263    p.wrap(m, SyntaxKind::Args);
1264}
1265
1266/// Parses a single argument in an argument list.
1267fn arg<'s>(p: &mut Parser<'s>, seen: &mut FxHashSet<&'s str>) {
1268    let m = p.marker();
1269
1270    // Parses a spread argument: `..args`.
1271    if p.eat_if(SyntaxKind::Dots) {
1272        code_expr(p);
1273        p.wrap(m, SyntaxKind::Spread);
1274        return;
1275    }
1276
1277    // Parses a normal positional argument or an argument name.
1278    let was_at_expr = p.at_set(set::CODE_EXPR);
1279    let text = p.current_text();
1280    code_expr(p);
1281
1282    // Parses a named argument: `thickness: 12pt`.
1283    if p.eat_if(SyntaxKind::Colon) {
1284        // Recover from bad argument name.
1285        if was_at_expr {
1286            if p[m].kind() != SyntaxKind::Ident {
1287                p[m].expected("identifier");
1288            } else if !seen.insert(text) {
1289                p[m].convert_to_error(eco_format!("duplicate argument: {text}"));
1290            }
1291        }
1292
1293        code_expr(p);
1294        p.wrap(m, SyntaxKind::Named);
1295    }
1296}
1297
1298/// Parses a closure's parameters: `(x, y)`.
1299fn params(p: &mut Parser) {
1300    let m = p.marker();
1301    p.with_nl_mode(AtNewline::Continue, |p| {
1302        p.assert(SyntaxKind::LeftParen);
1303
1304        let mut seen = FxHashSet::default();
1305        let mut sink = false;
1306
1307        while !p.current().is_terminator() {
1308            if !p.at_set(set::PARAM) {
1309                p.unexpected();
1310                continue;
1311            }
1312
1313            param(p, &mut seen, &mut sink);
1314
1315            if !p.current().is_terminator() {
1316                p.expect(SyntaxKind::Comma);
1317            }
1318        }
1319
1320        p.expect_closing_delimiter(m, SyntaxKind::RightParen);
1321    });
1322    p.wrap(m, SyntaxKind::Params);
1323}
1324
1325/// Parses a single parameter in a parameter list.
1326fn param<'s>(p: &mut Parser<'s>, seen: &mut FxHashSet<&'s str>, sink: &mut bool) {
1327    let m = p.marker();
1328
1329    // Parses argument sink: `..sink`.
1330    if p.eat_if(SyntaxKind::Dots) {
1331        if p.at_set(set::PATTERN_LEAF) {
1332            pattern_leaf(p, false, seen, Some("parameter"));
1333        }
1334        p.wrap(m, SyntaxKind::Spread);
1335        if mem::replace(sink, true) {
1336            p[m].convert_to_error("only one argument sink is allowed");
1337        }
1338        return;
1339    }
1340
1341    // Parses a normal positional parameter or a parameter name.
1342    let was_at_pat = p.at_set(set::PATTERN);
1343    pattern(p, false, seen, Some("parameter"));
1344
1345    // Parses a named parameter: `thickness: 12pt`.
1346    if p.eat_if(SyntaxKind::Colon) {
1347        // Recover from bad parameter name.
1348        if was_at_pat && p[m].kind() != SyntaxKind::Ident {
1349            p[m].expected("identifier");
1350        }
1351
1352        code_expr(p);
1353        p.wrap(m, SyntaxKind::Named);
1354    }
1355}
1356
1357/// Parses a binding or reassignment pattern.
1358fn pattern<'s>(
1359    p: &mut Parser<'s>,
1360    reassignment: bool,
1361    seen: &mut FxHashSet<&'s str>,
1362    dupe: Option<&'s str>,
1363) {
1364    match p.current() {
1365        SyntaxKind::Underscore => p.eat(),
1366        SyntaxKind::LeftParen => destructuring_or_parenthesized(p, reassignment, seen),
1367        _ => pattern_leaf(p, reassignment, seen, dupe),
1368    }
1369}
1370
1371/// Parses a destructuring pattern or just a parenthesized pattern.
1372fn destructuring_or_parenthesized<'s>(
1373    p: &mut Parser<'s>,
1374    reassignment: bool,
1375    seen: &mut FxHashSet<&'s str>,
1376) {
1377    let mut sink = false;
1378    let mut count = 0;
1379    let mut maybe_just_parens = true;
1380
1381    let m = p.marker();
1382    p.with_nl_mode(AtNewline::Continue, |p| {
1383        p.assert(SyntaxKind::LeftParen);
1384
1385        while !p.current().is_terminator() {
1386            if !p.at_set(set::DESTRUCTURING_ITEM) {
1387                p.unexpected();
1388                continue;
1389            }
1390
1391            destructuring_item(p, reassignment, seen, &mut maybe_just_parens, &mut sink);
1392            count += 1;
1393
1394            if !p.current().is_terminator() && p.expect(SyntaxKind::Comma) {
1395                maybe_just_parens = false;
1396            }
1397        }
1398
1399        p.expect_closing_delimiter(m, SyntaxKind::RightParen);
1400    });
1401
1402    if maybe_just_parens && count == 1 && !sink {
1403        p.wrap(m, SyntaxKind::Parenthesized);
1404    } else {
1405        p.wrap(m, SyntaxKind::Destructuring);
1406    }
1407}
1408
1409/// Parses an item in a destructuring pattern.
1410fn destructuring_item<'s>(
1411    p: &mut Parser<'s>,
1412    reassignment: bool,
1413    seen: &mut FxHashSet<&'s str>,
1414    maybe_just_parens: &mut bool,
1415    sink: &mut bool,
1416) {
1417    let m = p.marker();
1418
1419    // Parse destructuring sink: `..rest`.
1420    if p.eat_if(SyntaxKind::Dots) {
1421        if p.at_set(set::PATTERN_LEAF) {
1422            pattern_leaf(p, reassignment, seen, None);
1423        }
1424        p.wrap(m, SyntaxKind::Spread);
1425        if mem::replace(sink, true) {
1426            p[m].convert_to_error("only one destructuring sink is allowed");
1427        }
1428        return;
1429    }
1430
1431    // Parse a normal positional pattern or a destructuring key.
1432    let was_at_pat = p.at_set(set::PATTERN);
1433
1434    // We must use a full checkpoint here (can't just clone the lexer) because
1435    // there may be trivia between the identifier and the colon we need to skip.
1436    let checkpoint = p.checkpoint();
1437    if !(p.eat_if(SyntaxKind::Ident) && p.at(SyntaxKind::Colon)) {
1438        p.restore(checkpoint);
1439        pattern(p, reassignment, seen, None);
1440    }
1441
1442    // Parse named destructuring item.
1443    if p.eat_if(SyntaxKind::Colon) {
1444        // Recover from bad named destructuring.
1445        if was_at_pat && p[m].kind() != SyntaxKind::Ident {
1446            p[m].expected("identifier");
1447        }
1448
1449        pattern(p, reassignment, seen, None);
1450        p.wrap(m, SyntaxKind::Named);
1451        *maybe_just_parens = false;
1452    }
1453}
1454
1455/// Parses a leaf in a pattern - either an identifier or an expression
1456/// depending on whether it's a binding or reassignment pattern.
1457fn pattern_leaf<'s>(
1458    p: &mut Parser<'s>,
1459    reassignment: bool,
1460    seen: &mut FxHashSet<&'s str>,
1461    dupe: Option<&'s str>,
1462) {
1463    if p.current().is_keyword() {
1464        p.eat_and_get().expected("pattern");
1465        return;
1466    } else if !p.at_set(set::PATTERN_LEAF) {
1467        p.expected("pattern");
1468        return;
1469    }
1470
1471    let m = p.marker();
1472    let text = p.current_text();
1473
1474    // We parse an atomic expression even though we only want an identifier for
1475    // better error recovery. We can mark the whole expression as unexpected
1476    // instead of going through its pieces one by one.
1477    code_expr_prec(p, true, 0);
1478
1479    if !reassignment {
1480        let node = &mut p[m];
1481        if node.kind() == SyntaxKind::Ident {
1482            if !seen.insert(text) {
1483                node.convert_to_error(eco_format!(
1484                    "duplicate {}: {text}",
1485                    dupe.unwrap_or("binding"),
1486                ));
1487            }
1488        } else {
1489            node.expected("pattern");
1490        }
1491    }
1492}
1493
1494/// Manages parsing a stream of tokens into a tree of [`SyntaxNode`]s.
1495///
1496/// The implementation presents an interface that investigates a current `token`
1497/// with a [`SyntaxKind`] and can take one of the following actions:
1498///
1499/// 1. Eat a token: push `token` onto the `nodes` vector as a [leaf
1500///    node](`SyntaxNode::leaf`) and prepare a new `token` by calling into the
1501///    lexer.
1502/// 2. Wrap nodes from a marker to the end of `nodes` (excluding `token` and any
1503///    attached trivia) into an [inner node](`SyntaxNode::inner`) of a specific
1504///    `SyntaxKind`.
1505/// 3. Produce or convert nodes into an [error node](`SyntaxNode::error`) when
1506///    something expected is missing or something unexpected is found.
1507///
1508/// Overall the parser produces a nested tree of SyntaxNodes as a "_Concrete_
1509/// Syntax Tree." The raw Concrete Syntax Tree should contain the entire source
1510/// text, and is used as-is for e.g. syntax highlighting and IDE features. In
1511/// `ast.rs` the CST is interpreted as a lazy view over an "_Abstract_ Syntax
1512/// Tree." The AST module skips over irrelevant tokens -- whitespace, comments,
1513/// code parens, commas in function args, etc. -- as it iterates through the
1514/// tree.
1515///
1516/// ### Modes
1517///
1518/// The parser manages the transitions between the three modes of Typst through
1519/// [syntax modes](`SyntaxMode`) and [newline modes](`AtNewline`).
1520///
1521/// The syntax modes map to the three Typst modes and are stored in the lexer,
1522/// changing which `SyntaxKind`s it will generate.
1523///
1524/// The newline mode is used to determine whether a newline should end the
1525/// current expression. If so, the parser temporarily changes `token`'s kind to
1526/// a fake [`SyntaxKind::End`]. When the parser exits the mode the original
1527/// `SyntaxKind` is restored.
1528struct Parser<'s> {
1529    /// The source text shared with the lexer.
1530    text: &'s str,
1531    /// A lexer over the source text with multiple modes. Defines the boundaries
1532    /// of tokens and determines their [`SyntaxKind`]. Contains the [`SyntaxMode`]
1533    /// defining our current Typst mode.
1534    lexer: Lexer<'s>,
1535    /// The newline mode: whether to insert a temporary end at newlines.
1536    nl_mode: AtNewline,
1537    /// The current token under inspection, not yet present in `nodes`. This
1538    /// acts like a single item of lookahead for the parser.
1539    ///
1540    /// When wrapping, this is _not_ included in the wrapped nodes.
1541    token: Token,
1542    /// Whether the parser has the expected set of open/close delimiters. This
1543    /// only ever transitions from `true` to `false`.
1544    balanced: bool,
1545    /// Nodes representing the concrete syntax tree of previously parsed text.
1546    /// In Code and Math, includes previously parsed trivia, but not `token`.
1547    nodes: Vec<SyntaxNode>,
1548    /// Parser checkpoints for a given text index. Used for efficient parser
1549    /// backtracking similar to packrat parsing. See comments above in
1550    /// [`expr_with_paren`].
1551    memo: MemoArena,
1552}
1553
1554/// A single token returned from the lexer with a cached [`SyntaxKind`] and a
1555/// record of preceding trivia.
1556#[derive(Debug, Clone)]
1557struct Token {
1558    /// The [`SyntaxKind`] of the current token.
1559    kind: SyntaxKind,
1560    /// The [`SyntaxNode`] of the current token, ready to be eaten and pushed
1561    /// onto the end of `nodes`.
1562    node: SyntaxNode,
1563    /// The number of preceding trivia before this token.
1564    n_trivia: usize,
1565    /// Whether this token's preceding trivia contained a newline.
1566    newline: Option<Newline>,
1567    /// The index into `text` of the start of our current token (the end is
1568    /// stored as the lexer's cursor).
1569    start: usize,
1570    /// The index into `text` of the end of the previous token.
1571    prev_end: usize,
1572}
1573
1574/// Information about newlines in a group of trivia.
1575#[derive(Debug, Copy, Clone)]
1576struct Newline {
1577    /// The column of the start of the next token in its line.
1578    column: Option<usize>,
1579    /// Whether any of our newlines were paragraph breaks.
1580    parbreak: bool,
1581}
1582
1583/// How to proceed with parsing when at a newline.
1584#[derive(Debug, Copy, Clone, Eq, PartialEq)]
1585enum AtNewline {
1586    /// Continue at newlines.
1587    Continue,
1588    /// Stop at any newline.
1589    Stop,
1590    /// Continue only if there is a continuation with `else` or `.` (Code only).
1591    ContextualContinue,
1592    /// Stop only at a parbreak, not normal newlines (Markup only).
1593    StopParBreak,
1594    /// Require that the token's column be greater or equal to a column (Markup
1595    /// only). If this is `0`, acts like `Continue`; if this is `usize::MAX`,
1596    /// acts like `Stop`.
1597    RequireColumn(usize),
1598}
1599
1600impl AtNewline {
1601    /// Whether to stop at a newline or continue based on the current context.
1602    fn stop_at(self, Newline { column, parbreak }: Newline, kind: SyntaxKind) -> bool {
1603        #[allow(clippy::match_like_matches_macro)]
1604        match self {
1605            AtNewline::Continue => false,
1606            AtNewline::Stop => true,
1607            AtNewline::ContextualContinue => match kind {
1608                SyntaxKind::Else | SyntaxKind::Dot => false,
1609                _ => true,
1610            },
1611            AtNewline::StopParBreak => parbreak,
1612            AtNewline::RequireColumn(min_col) => {
1613                // When the column is `None`, the newline doesn't start a
1614                // column, and we continue parsing. This may happen on the
1615                // boundary of syntax modes, since we only report a column in
1616                // Markup.
1617                column.is_some_and(|column| column <= min_col)
1618            }
1619        }
1620    }
1621}
1622
1623/// A marker representing a node's position in the parser. Mainly used for
1624/// wrapping, but can also index into the parser to access the node, like
1625/// `p[m]`.
1626#[derive(Debug, Copy, Clone, Eq, PartialEq)]
1627struct Marker(usize);
1628
1629// Index into the parser with markers.
1630impl Index<Marker> for Parser<'_> {
1631    type Output = SyntaxNode;
1632
1633    fn index(&self, m: Marker) -> &Self::Output {
1634        &self.nodes[m.0]
1635    }
1636}
1637
1638impl IndexMut<Marker> for Parser<'_> {
1639    fn index_mut(&mut self, m: Marker) -> &mut Self::Output {
1640        &mut self.nodes[m.0]
1641    }
1642}
1643
1644/// Creating/Consuming the parser and getting info about the current token.
1645impl<'s> Parser<'s> {
1646    /// Create a new parser starting from the given text offset and syntax mode.
1647    fn new(text: &'s str, offset: usize, mode: SyntaxMode) -> Self {
1648        let mut lexer = Lexer::new(text, mode);
1649        lexer.jump(offset);
1650        let nl_mode = AtNewline::Continue;
1651        let mut nodes = vec![];
1652        let token = Self::lex(&mut nodes, &mut lexer, nl_mode);
1653        Self {
1654            text,
1655            lexer,
1656            nl_mode,
1657            token,
1658            balanced: true,
1659            nodes,
1660            memo: Default::default(),
1661        }
1662    }
1663
1664    /// Consume the parser, yielding the full vector of parsed SyntaxNodes.
1665    fn finish(self) -> Vec<SyntaxNode> {
1666        self.nodes
1667    }
1668
1669    /// Consume the parser, generating a single top-level node.
1670    fn finish_into(self, kind: SyntaxKind) -> SyntaxNode {
1671        assert!(self.at(SyntaxKind::End));
1672        SyntaxNode::inner(kind, self.finish())
1673    }
1674
1675    /// Similar to a `peek()` function: returns the `kind` of the next token to
1676    /// be eaten.
1677    fn current(&self) -> SyntaxKind {
1678        self.token.kind
1679    }
1680
1681    /// Whether the current token is a given [`SyntaxKind`].
1682    fn at(&self, kind: SyntaxKind) -> bool {
1683        self.token.kind == kind
1684    }
1685
1686    /// Whether the current token is contained in a [`SyntaxSet`].
1687    fn at_set(&self, set: SyntaxSet) -> bool {
1688        set.contains(self.token.kind)
1689    }
1690
1691    /// Whether we're at the end of the token stream.
1692    ///
1693    /// Note: This might be a fake end due to the newline mode.
1694    fn end(&self) -> bool {
1695        self.at(SyntaxKind::End)
1696    }
1697
1698    /// If we're at the given `kind` with no preceding trivia tokens.
1699    fn directly_at(&self, kind: SyntaxKind) -> bool {
1700        self.token.kind == kind && !self.had_trivia()
1701    }
1702
1703    /// Whether `token` had any preceding trivia.
1704    fn had_trivia(&self) -> bool {
1705        self.token.n_trivia > 0
1706    }
1707
1708    /// Whether `token` had a newline among any of its preceding trivia.
1709    fn had_newline(&self) -> bool {
1710        self.token.newline.is_some()
1711    }
1712
1713    /// The number of characters until the most recent newline from the start of
1714    /// the current token. Uses a cached value from the newline mode if present.
1715    fn current_column(&self) -> usize {
1716        self.token
1717            .newline
1718            .and_then(|newline| newline.column)
1719            .unwrap_or_else(|| self.lexer.column(self.token.start))
1720    }
1721
1722    /// The current token's text.
1723    fn current_text(&self) -> &'s str {
1724        &self.text[self.token.start..self.current_end()]
1725    }
1726
1727    /// The offset into `text` of the current token's start.
1728    fn current_start(&self) -> usize {
1729        self.token.start
1730    }
1731
1732    /// The offset into `text` of the current token's end.
1733    fn current_end(&self) -> usize {
1734        self.lexer.cursor()
1735    }
1736
1737    /// The offset into `text` of the previous token's end.
1738    fn prev_end(&self) -> usize {
1739        self.token.prev_end
1740    }
1741}
1742
1743// The main parsing interface for generating tokens and eating/modifying nodes.
1744impl<'s> Parser<'s> {
1745    /// A marker that will point to the current token in the parser once it's
1746    /// been eaten.
1747    fn marker(&self) -> Marker {
1748        Marker(self.nodes.len())
1749    }
1750
1751    /// A marker that will point to first trivia before this token in the
1752    /// parser (or the token itself if no trivia precede it).
1753    fn before_trivia(&self) -> Marker {
1754        Marker(self.nodes.len() - self.token.n_trivia)
1755    }
1756
1757    /// Eat the current node and return a reference for in-place mutation.
1758    #[track_caller]
1759    fn eat_and_get(&mut self) -> &mut SyntaxNode {
1760        let offset = self.nodes.len();
1761        self.eat();
1762        &mut self.nodes[offset]
1763    }
1764
1765    /// Eat the token if at `kind`. Returns `true` if eaten.
1766    ///
1767    /// Note: In Math and Code, this will ignore trivia in front of the
1768    /// `kind`, To forbid skipping trivia, consider using `eat_if_direct`.
1769    fn eat_if(&mut self, kind: SyntaxKind) -> bool {
1770        let at = self.at(kind);
1771        if at {
1772            self.eat();
1773        }
1774        at
1775    }
1776
1777    /// Eat the token only if at `kind` with no preceding trivia. Returns `true`
1778    /// if eaten.
1779    fn eat_if_direct(&mut self, kind: SyntaxKind) -> bool {
1780        let at = self.directly_at(kind);
1781        if at {
1782            self.eat();
1783        }
1784        at
1785    }
1786
1787    /// Assert that we are at the given [`SyntaxKind`] and eat it. This should
1788    /// be used when moving between functions that expect to start with a
1789    /// specific token.
1790    #[track_caller]
1791    fn assert(&mut self, kind: SyntaxKind) {
1792        assert_eq!(self.token.kind, kind);
1793        self.eat();
1794    }
1795
1796    /// Convert the current token's [`SyntaxKind`] and eat it.
1797    fn convert_and_eat(&mut self, kind: SyntaxKind) {
1798        // Only need to replace the node here.
1799        self.token.node.convert_to_kind(kind);
1800        self.eat();
1801    }
1802
1803    /// Eat the current token by saving it to the `nodes` vector, then move
1804    /// the lexer forward to prepare a new token.
1805    fn eat(&mut self) {
1806        self.nodes.push(std::mem::take(&mut self.token.node));
1807        self.token = Self::lex(&mut self.nodes, &mut self.lexer, self.nl_mode);
1808    }
1809
1810    /// Detach the parsed trivia nodes from this token (but not newline info) so
1811    /// that subsequent wrapping will include the trivia.
1812    fn flush_trivia(&mut self) {
1813        self.token.n_trivia = 0;
1814        self.token.prev_end = self.token.start;
1815    }
1816
1817    /// Wrap the nodes from a marker up to (but excluding) the current token in
1818    /// a new [inner node](`SyntaxNode::inner`) of the given kind. This is an
1819    /// easy interface for creating nested syntax nodes _after_ having parsed
1820    /// their children.
1821    fn wrap(&mut self, from: Marker, kind: SyntaxKind) {
1822        let to = self.before_trivia().0;
1823        let from = from.0.min(to);
1824        let children = self.nodes.drain(from..to).collect();
1825        self.nodes.insert(from, SyntaxNode::inner(kind, children));
1826    }
1827
1828    /// Parse within the [`SyntaxMode`] for subsequent tokens (does not change the
1829    /// current token). This may re-lex the final token on exit.
1830    ///
1831    /// This function effectively repurposes the call stack as a stack of modes.
1832    fn enter_modes(
1833        &mut self,
1834        mode: SyntaxMode,
1835        stop: AtNewline,
1836        func: impl FnOnce(&mut Parser<'s>),
1837    ) {
1838        let previous = self.lexer.mode();
1839        self.lexer.set_mode(mode);
1840        self.with_nl_mode(stop, func);
1841        if mode != previous {
1842            self.lexer.set_mode(previous);
1843            self.lexer.jump(self.token.prev_end);
1844            self.nodes.truncate(self.nodes.len() - self.token.n_trivia);
1845            self.token = Self::lex(&mut self.nodes, &mut self.lexer, self.nl_mode);
1846        }
1847    }
1848
1849    /// Parse within the [`AtNewline`] mode for subsequent tokens (does not
1850    /// change the current token). This may re-lex the final token on exit.
1851    ///
1852    /// This function effectively repurposes the call stack as a stack of modes.
1853    fn with_nl_mode(&mut self, mode: AtNewline, func: impl FnOnce(&mut Parser<'s>)) {
1854        let previous = self.nl_mode;
1855        self.nl_mode = mode;
1856        func(self);
1857        self.nl_mode = previous;
1858        if let Some(newline) = self.token.newline
1859            && mode != previous
1860        {
1861            // Restore our actual token's kind or insert a fake end.
1862            let actual_kind = self.token.node.kind();
1863            if self.nl_mode.stop_at(newline, actual_kind) {
1864                self.token.kind = SyntaxKind::End;
1865            } else {
1866                self.token.kind = actual_kind;
1867            }
1868        }
1869    }
1870
1871    /// Move the lexer forward and prepare the current token. In Code, this
1872    /// might insert a temporary [`SyntaxKind::End`] based on our newline mode.
1873    ///
1874    /// This is not a method on `self` because we need a valid token before we
1875    /// can initialize the parser.
1876    fn lex(nodes: &mut Vec<SyntaxNode>, lexer: &mut Lexer, nl_mode: AtNewline) -> Token {
1877        let prev_end = lexer.cursor();
1878        let mut start = prev_end;
1879        let (mut kind, mut node) = lexer.next();
1880        let mut n_trivia = 0;
1881        let mut had_newline = false;
1882        let mut parbreak = false;
1883
1884        while kind.is_trivia() {
1885            had_newline |= lexer.newline(); // Newlines are always trivia.
1886            parbreak |= kind == SyntaxKind::Parbreak;
1887            n_trivia += 1;
1888            nodes.push(node);
1889            start = lexer.cursor();
1890            (kind, node) = lexer.next();
1891        }
1892
1893        let newline = if had_newline {
1894            let column =
1895                (lexer.mode() == SyntaxMode::Markup).then(|| lexer.column(start));
1896            let newline = Newline { column, parbreak };
1897            if nl_mode.stop_at(newline, kind) {
1898                // Insert a temporary `SyntaxKind::End` to halt the parser.
1899                // The actual kind will be restored from `node` later.
1900                kind = SyntaxKind::End;
1901            }
1902            Some(newline)
1903        } else {
1904            None
1905        };
1906
1907        Token { kind, node, n_trivia, newline, start, prev_end }
1908    }
1909}
1910
1911/// Extra parser state for efficiently recovering from mispredicted parses.
1912///
1913/// This is the same idea as packrat parsing, but we use it only in the limited
1914/// case of parenthesized structures. See [`expr_with_paren`] for more.
1915#[derive(Default)]
1916struct MemoArena {
1917    /// A single arena of previously parsed nodes (to reduce allocations).
1918    /// Memoized ranges refer to unique sections of the arena.
1919    arena: Vec<SyntaxNode>,
1920    /// A map from the parser's current position to a range of previously parsed
1921    /// nodes in the arena and a checkpoint of the parser's state. These allow
1922    /// us to reset the parser to avoid parsing the same location again.
1923    memo_map: FxHashMap<MemoKey, (Range<usize>, PartialState)>,
1924}
1925
1926/// A type alias for the memo key so it doesn't get confused with other usizes.
1927///
1928/// The memo is keyed by the index into `text` of the current token's start.
1929type MemoKey = usize;
1930
1931/// A checkpoint of the parser which can fully restore it to a previous state.
1932struct Checkpoint {
1933    node_len: usize,
1934    state: PartialState,
1935}
1936
1937/// State needed to restore the parser's current token and the lexer (but not
1938/// the nodes vector).
1939#[derive(Clone)]
1940struct PartialState {
1941    cursor: usize,
1942    lex_mode: SyntaxMode,
1943    token: Token,
1944}
1945
1946/// The Memoization interface.
1947impl Parser<'_> {
1948    /// Store the already parsed nodes and the parser state into the memo map by
1949    /// extending the arena and storing the extended range and a checkpoint.
1950    fn memoize_parsed_nodes(&mut self, key: MemoKey, prev_len: usize) {
1951        let Checkpoint { state, node_len } = self.checkpoint();
1952        let memo_start = self.memo.arena.len();
1953        self.memo.arena.extend_from_slice(&self.nodes[prev_len..node_len]);
1954        let arena_range = memo_start..self.memo.arena.len();
1955        self.memo.memo_map.insert(key, (arena_range, state));
1956    }
1957
1958    /// Try to load a memoized result, return `None` if we did or `Some` (with a
1959    /// checkpoint and a key for the memo map) if we didn't.
1960    fn restore_memo_or_checkpoint(&mut self) -> Option<(MemoKey, Checkpoint)> {
1961        // We use the starting index of the current token as our key.
1962        let key: MemoKey = self.current_start();
1963        match self.memo.memo_map.get(&key).cloned() {
1964            Some((range, state)) => {
1965                self.nodes.extend_from_slice(&self.memo.arena[range]);
1966                // It's important that we don't truncate the nodes vector since
1967                // it may have grown or shrunk (due to other memoization or
1968                // error reporting) since we made this checkpoint.
1969                self.restore_partial(state);
1970                None
1971            }
1972            None => Some((key, self.checkpoint())),
1973        }
1974    }
1975
1976    /// Restore the parser to the state at a checkpoint.
1977    fn restore(&mut self, checkpoint: Checkpoint) {
1978        self.nodes.truncate(checkpoint.node_len);
1979        self.restore_partial(checkpoint.state);
1980    }
1981
1982    /// Restore parts of the checkpoint excluding the nodes vector.
1983    fn restore_partial(&mut self, state: PartialState) {
1984        self.lexer.jump(state.cursor);
1985        self.lexer.set_mode(state.lex_mode);
1986        self.token = state.token;
1987    }
1988
1989    /// Save a checkpoint of the parser state.
1990    fn checkpoint(&self) -> Checkpoint {
1991        let node_len = self.nodes.len();
1992        let state = PartialState {
1993            cursor: self.lexer.cursor(),
1994            lex_mode: self.lexer.mode(),
1995            token: self.token.clone(),
1996        };
1997        Checkpoint { node_len, state }
1998    }
1999}
2000
2001/// Functions for eating expected or unexpected tokens and generating errors if
2002/// we don't get what we expect.
2003impl Parser<'_> {
2004    /// Consume the given `kind` or produce an error.
2005    fn expect(&mut self, kind: SyntaxKind) -> bool {
2006        let at = self.at(kind);
2007        if at {
2008            self.eat();
2009        } else if kind == SyntaxKind::Ident && self.token.kind.is_keyword() {
2010            self.trim_errors();
2011            self.eat_and_get().expected(kind.name());
2012        } else {
2013            self.balanced &= !kind.is_grouping();
2014            self.expected(kind.name());
2015        }
2016        at
2017    }
2018
2019    /// Consume the given closing delimiter or produce an error for the matching
2020    /// opening delimiter at `open`.
2021    #[track_caller]
2022    fn expect_closing_delimiter(&mut self, open: Marker, kind: SyntaxKind) {
2023        if !self.eat_if(kind) {
2024            self.nodes[open.0].convert_to_error("unclosed delimiter");
2025        }
2026    }
2027
2028    /// Produce an error that the given `thing` was expected.
2029    fn expected(&mut self, thing: &str) {
2030        if !self.after_error() {
2031            self.expected_at(self.before_trivia(), thing);
2032        }
2033    }
2034
2035    /// Whether the last non-trivia node is an error.
2036    fn after_error(&mut self) -> bool {
2037        let m = self.before_trivia();
2038        m.0 > 0 && self.nodes[m.0 - 1].kind().is_error()
2039    }
2040
2041    /// Produce an error that the given `thing` was expected at the position
2042    /// of the marker `m`.
2043    fn expected_at(&mut self, m: Marker, thing: &str) {
2044        let error =
2045            SyntaxNode::error(SyntaxError::new(eco_format!("expected {thing}")), "");
2046        self.nodes.insert(m.0, error);
2047    }
2048
2049    /// Add a hint to a trailing error.
2050    fn hint(&mut self, hint: &str) {
2051        let m = self.before_trivia();
2052        if let Some(error) = self.nodes.get_mut(m.0 - 1) {
2053            error.hint(hint);
2054        }
2055    }
2056
2057    /// Consume the next token (if any) and produce an error stating that it was
2058    /// unexpected.
2059    fn unexpected(&mut self) {
2060        self.trim_errors();
2061        self.balanced &= !self.token.kind.is_grouping();
2062        self.eat_and_get().unexpected();
2063    }
2064
2065    /// Remove trailing errors with zero length.
2066    fn trim_errors(&mut self) {
2067        let Marker(end) = self.before_trivia();
2068        let mut start = end;
2069        while start > 0
2070            && self.nodes[start - 1].kind().is_error()
2071            && self.nodes[start - 1].is_empty()
2072        {
2073            start -= 1;
2074        }
2075        self.nodes.drain(start..end);
2076    }
2077}