apollo_parser/parser/
mod.rs

1mod generated;
2mod language;
3mod syntax_tree;
4mod token_text;
5
6pub(crate) mod grammar;
7
8use crate::cst::Document;
9use crate::cst::SelectionSet;
10use crate::cst::Type;
11use crate::lexer::Lexer;
12use crate::Error;
13use crate::LimitTracker;
14use crate::Token;
15use crate::TokenKind;
16pub use generated::syntax_kind::SyntaxKind;
17pub use language::SyntaxElement;
18pub use language::SyntaxNode;
19pub use language::SyntaxNodeChildren;
20pub use language::SyntaxNodePtr;
21pub use language::SyntaxToken;
22use std::cell::RefCell;
23use std::ops::ControlFlow;
24use std::rc::Rc;
25pub use syntax_tree::SyntaxTree;
26// pub(crate) use language::GraphQLLanguage;
27pub(crate) use syntax_tree::SyntaxTreeBuilder;
28pub(crate) use token_text::TokenText;
29
30/// Parse GraphQL schemas or queries into a typed CST.
31///
32/// ## Example
33///
34/// The API to parse a query or a schema is the same, as the parser currently
35/// accepts a `&str`. Here is an example of parsing a query:
36/// ```rust
37/// use apollo_parser::Parser;
38///
39/// let query = "
40/// {
41///     animal
42///     ...snackSelection
43///     ... on Pet {
44///       playmates {
45///         count
46///       }
47///     }
48/// }
49/// ";
50/// // Create a new instance of a parser given a query above.
51/// let parser = Parser::new(query);
52/// // Parse the query, and return a SyntaxTree.
53/// let cst = parser.parse();
54/// // Check that are no errors. These are not part of the CST.
55/// assert_eq!(0, cst.errors().len());
56///
57/// // Get the document root node
58/// let doc = cst.document();
59/// // ... continue
60/// ```
61///
62/// Here is how you'd parse a schema:
63/// ```rust
64/// use apollo_parser::Parser;
65/// let core_schema = r#"
66/// schema @core(feature: "https://specs.apollo.dev/join/v0.1") {
67///   query: Query
68///   mutation: Mutation
69/// }
70///
71/// enum join__Graph {
72///   ACCOUNTS @join__graph(name: "accounts")
73/// }
74/// "#;
75/// let parser = Parser::new(core_schema);
76/// let cst = parser.parse();
77///
78/// assert_eq!(0, cst.errors().len());
79///
80/// let document = cst.document();
81/// ```
82#[derive(Debug)]
83pub struct Parser<'input> {
84    lexer: Lexer<'input>,
85    /// Store one lookahead token so we don't need to reparse things as much.
86    current_token: Option<Token<'input>>,
87    /// The in-progress tree.
88    builder: Rc<RefCell<SyntaxTreeBuilder>>,
89    /// Ignored tokens that should be added to the tree.
90    ignored: Vec<Token<'input>>,
91    /// The list of syntax errors we've accumulated so far.
92    errors: Vec<crate::Error>,
93    /// The limit to apply to parsing.
94    recursion_limit: LimitTracker,
95    /// Accept parsing errors?
96    accept_errors: bool,
97}
98
99/// Chosen experimentally with:
100///
101/// * apollo-parser 0.6.2+ (e05abbf4f)
102/// * Rust 1.72.1
103/// * aarch64-apple-darwin
104/// * Unoptimized (default `cargo test` profile)
105///
106/// This couldn’t be set to much more than 2000 before the `recursion_limit` test below
107/// hit "fatal runtime error: stack overflow"
108///
109/// Defaulting to around a quarter of that, to keep a comfortable safety margin.
110const DEFAULT_RECURSION_LIMIT: usize = 500;
111
112impl<'input> Parser<'input> {
113    /// Create a new instance of a parser given an input string.
114    pub fn new(input: &'input str) -> Self {
115        let lexer = Lexer::new(input);
116
117        Self {
118            lexer,
119            current_token: None,
120            builder: Rc::new(RefCell::new(SyntaxTreeBuilder::new())),
121            ignored: vec![],
122            errors: Vec::new(),
123            recursion_limit: LimitTracker::new(DEFAULT_RECURSION_LIMIT),
124            accept_errors: true,
125        }
126    }
127
128    /// Configure the recursion limit to use while parsing.
129    pub fn recursion_limit(mut self, recursion_limit: usize) -> Self {
130        self.recursion_limit = LimitTracker::new(recursion_limit);
131        self
132    }
133
134    /// Configure the limit on the number of tokens to parse. If an input document
135    /// is too big, parsing will be aborted.
136    ///
137    /// By default, there is no limit.
138    pub fn token_limit(mut self, token_limit: usize) -> Self {
139        self.lexer = self.lexer.with_limit(token_limit);
140        self
141    }
142
143    /// Parse the current tokens.
144    pub fn parse(mut self) -> SyntaxTree<Document> {
145        grammar::document::document(&mut self);
146
147        let builder = Rc::try_unwrap(self.builder)
148            .expect("More than one reference to builder left")
149            .into_inner();
150        let builder =
151            builder.finish_document(self.errors, self.recursion_limit, self.lexer.limit_tracker);
152
153        match builder {
154            syntax_tree::SyntaxTreeWrapper::Document(tree) => tree,
155            syntax_tree::SyntaxTreeWrapper::Type(_)
156            | syntax_tree::SyntaxTreeWrapper::FieldSet(_) => {
157                unreachable!("parse constructor can only construct a document")
158            }
159        }
160    }
161
162    /// Parse a selection set with optional outer braces.
163    /// This is the expected format of the string value of the `fields` argument of some directives
164    /// like [`@requires`](https://www.apollographql.com/docs/federation/federated-types/federated-directives/#requires).
165    pub fn parse_selection_set(mut self) -> SyntaxTree<SelectionSet> {
166        grammar::selection::field_set(&mut self);
167
168        let builder = Rc::try_unwrap(self.builder)
169            .expect("More than one reference to builder left")
170            .into_inner();
171        let builder = builder.finish_selection_set(
172            self.errors,
173            self.recursion_limit,
174            self.lexer.limit_tracker,
175        );
176
177        match builder {
178            syntax_tree::SyntaxTreeWrapper::FieldSet(tree) => tree,
179            syntax_tree::SyntaxTreeWrapper::Document(_)
180            | syntax_tree::SyntaxTreeWrapper::Type(_) => {
181                unreachable!("parse_selection_set constructor can only construct a selection set")
182            }
183        }
184    }
185
186    /// Parse a GraphQL type.
187    /// This is the expected format of the string value of the `type` argument
188    /// of some directives like [`@field`](https://specs.apollo.dev/join/v0.3/#@field).
189    pub fn parse_type(mut self) -> SyntaxTree<Type> {
190        grammar::ty::ty(&mut self);
191
192        let builder = Rc::try_unwrap(self.builder)
193            .expect("More than one reference to builder left")
194            .into_inner();
195        let builder =
196            builder.finish_type(self.errors, self.recursion_limit, self.lexer.limit_tracker);
197
198        match builder {
199            syntax_tree::SyntaxTreeWrapper::Type(tree) => tree,
200            syntax_tree::SyntaxTreeWrapper::FieldSet(_)
201            | syntax_tree::SyntaxTreeWrapper::Document(_) => {
202                unreachable!("parse_type constructor can only construct a type")
203            }
204        }
205    }
206
207    /// Check if the current token is `kind`.
208    pub(crate) fn at(&mut self, token: TokenKind) -> bool {
209        if let Some(t) = self.peek() {
210            if t == token {
211                return true;
212            }
213            return false;
214        }
215
216        false
217    }
218
219    /// Consume a token and add it to the syntax tree. Queue any ignored tokens that follow.
220    pub(crate) fn bump(&mut self, kind: SyntaxKind) {
221        self.eat(kind);
222        self.skip_ignored();
223    }
224
225    /// Consume and skip ignored tokens from the lexer.
226    pub(crate) fn skip_ignored(&mut self) {
227        while let Some(TokenKind::Comment | TokenKind::Whitespace | TokenKind::Comma) = self.peek()
228        {
229            let token = self.pop();
230            self.ignored.push(token);
231        }
232    }
233
234    /// Push skipped ignored tokens to the current node.
235    pub(crate) fn push_ignored(&mut self) {
236        let tokens = std::mem::take(&mut self.ignored);
237        for token in tokens {
238            let syntax_kind = match token.kind {
239                TokenKind::Comment => SyntaxKind::COMMENT,
240                TokenKind::Whitespace => SyntaxKind::WHITESPACE,
241                TokenKind::Comma => SyntaxKind::COMMA,
242                _ => unreachable!(),
243            };
244            self.push_token(syntax_kind, token);
245        }
246    }
247
248    /// Get current token's data.
249    pub(crate) fn current(&mut self) -> Option<&Token<'input>> {
250        self.peek_token()
251    }
252
253    /// Consume a token from the lexer and add it to the syntax tree.
254    fn eat(&mut self, kind: SyntaxKind) {
255        self.push_ignored();
256        if self.current().is_none() {
257            return;
258        }
259
260        let token = self.pop();
261        self.push_token(kind, token);
262    }
263
264    /// Create a parser limit error and push it into the error vector.
265    ///
266    /// Note: After a limit error is pushed, any further errors pushed
267    /// are silently discarded.
268    pub(crate) fn limit_err<S: Into<String>>(&mut self, message: S) {
269        let current = if let Some(current) = self.current() {
270            current
271        } else {
272            return;
273        };
274        // this needs to be the computed location
275        let err = Error::limit(message, current.index());
276        self.push_err(err);
277        self.accept_errors = false;
278    }
279
280    /// Create a parser error at a given location and push it into the error vector.
281    pub(crate) fn err_at_token(&mut self, current: &Token<'_>, message: &str) {
282        let err = if current.kind == TokenKind::Eof {
283            Error::eof(message, current.index())
284        } else {
285            // this needs to be the computed location
286            Error::with_loc(message, current.data().to_string(), current.index())
287        };
288        self.push_err(err);
289    }
290
291    /// Create a parser error at the current location and push it into the error vector.
292    pub(crate) fn err(&mut self, message: &str) {
293        let current = if let Some(current) = self.current() {
294            current
295        } else {
296            return;
297        };
298        let err = if current.kind == TokenKind::Eof {
299            Error::eof(message, current.index())
300        } else {
301            // this needs to be the computed location
302            Error::with_loc(message, current.data().to_string(), current.index())
303        };
304        self.push_err(err);
305    }
306
307    /// Create a parser error at the current location and eat the responsible token.
308    pub(crate) fn err_and_pop(&mut self, message: &str) {
309        self.push_ignored();
310        if self.current().is_none() {
311            return;
312        }
313
314        let current = self.pop();
315        let err = if current.kind == TokenKind::Eof {
316            Error::eof(message, current.index())
317        } else {
318            // this needs to be the computed location
319            Error::with_loc(message, current.data().to_string(), current.index())
320        };
321
322        // Keep the error in the parse tree for position information
323        self.push_token(SyntaxKind::ERROR, current);
324        self.push_err(err);
325
326        // we usually skip ignored tokens after we pop each token, so make sure we also do
327        // this when we create an error and pop.
328        self.skip_ignored();
329    }
330
331    /// Consume the next token if it is `kind` or emit an error
332    /// otherwise.
333    pub(crate) fn expect(&mut self, token: TokenKind, kind: SyntaxKind) {
334        let Some(current) = self.current() else {
335            return;
336        };
337        let is_eof = current.kind == TokenKind::Eof;
338        let data = current.data();
339        let index = current.index();
340
341        if self.at(token) {
342            self.bump(kind);
343            return;
344        }
345
346        let err = if is_eof {
347            let message = format!("expected {kind:?}, got EOF");
348            Error::eof(message, index)
349        } else {
350            let message = format!("expected {kind:?}, got {data}");
351            Error::with_loc(message, data.to_string(), index)
352        };
353
354        self.push_err(err);
355    }
356
357    /// Push an error to parser's error Vec.
358    pub(crate) fn push_err(&mut self, err: crate::error::Error) {
359        // If the parser has reached a limit, self.accept_errors will
360        // be set to false so that we do not push any more errors.
361        //
362        // This is because the limit activation will result
363        // in an early termination which will cause the parser to
364        // report "errors" which aren't really errors and thus
365        // must be ignored.
366        if self.accept_errors {
367            self.errors.push(err);
368        }
369    }
370
371    /// Gets the next token from the lexer.
372    fn next_token(&mut self) -> Option<Token<'input>> {
373        for res in &mut self.lexer {
374            match res {
375                Err(err) => {
376                    if err.is_limit() {
377                        self.accept_errors = false;
378                    }
379                    self.errors.push(err);
380                }
381                Ok(token) => {
382                    return Some(token);
383                }
384            }
385        }
386
387        None
388    }
389
390    /// Consume a token from the lexer.
391    pub(crate) fn pop(&mut self) -> Token<'input> {
392        if let Some(token) = self.current_token.take() {
393            return token;
394        }
395
396        self.next_token()
397            .expect("Could not pop a token from the lexer")
398    }
399
400    /// Insert a token into the syntax tree.
401    pub(crate) fn push_token(&mut self, kind: SyntaxKind, token: Token) {
402        self.builder.borrow_mut().token(kind, token.data())
403    }
404
405    /// Start a node and make it current.
406    ///
407    /// This also creates a NodeGuard under the hood that will automatically
408    /// close the node(via Drop) when the guard goes out of scope.
409    /// This allows for us to not have to always close nodes when we are parsing
410    /// tokens.
411    pub(crate) fn start_node(&mut self, kind: SyntaxKind) -> NodeGuard {
412        self.push_ignored();
413
414        self.builder.borrow_mut().start_node(kind);
415        let guard = NodeGuard::new(self.builder.clone());
416        self.skip_ignored();
417
418        guard
419    }
420
421    /// Set a checkpoint for *maybe* wrapping the following parse tree in some
422    /// other node.
423    pub(crate) fn checkpoint_node(&mut self) -> Checkpoint {
424        // We may start a new node here in the future, so let's process
425        // our preceding whitespace first
426        self.push_ignored();
427
428        let checkpoint = self.builder.borrow().checkpoint();
429        Checkpoint::new(self.builder.clone(), checkpoint)
430    }
431
432    /// Peek the next Token and return its TokenKind.
433    pub(crate) fn peek(&mut self) -> Option<TokenKind> {
434        self.peek_token().map(|token| token.kind())
435    }
436
437    /// Repeatedly peek at the next token and call the parse function. The parse function must
438    /// advance parsing or break out of the loop.
439    pub(crate) fn peek_while(
440        &mut self,
441        mut run: impl FnMut(&mut Parser, TokenKind) -> ControlFlow<()>,
442    ) {
443        while let Some(kind) = self.peek() {
444            let before = self.current_token.clone();
445            match run(self, kind) {
446                ControlFlow::Break(()) => break,
447                ControlFlow::Continue(()) => {
448                    debug_assert!(
449                        before != self.current_token,
450                        "peek_while() iteration must advance parsing"
451                    );
452                }
453            }
454        }
455    }
456
457    /// Call the parse function while the next token is of the expected kind. The parse function
458    /// must consume the peeked token.
459    pub(crate) fn peek_while_kind(&mut self, expect: TokenKind, mut run: impl FnMut(&mut Parser)) {
460        while let Some(kind) = self.peek() {
461            if kind != expect {
462                break;
463            }
464
465            let before = self.current_token.clone();
466            run(self);
467            debug_assert!(
468                before != self.current_token,
469                "peek_while_kind() iteration must advance parsing"
470            );
471        }
472    }
473
474    /// Call the parse function, separated by a token given in `separator`. This parses at least
475    /// one item. The first item may optionally be prefixed by an initial separator.
476    pub(crate) fn parse_separated_list(
477        &mut self,
478        separator: TokenKind,
479        separator_syntax: SyntaxKind,
480        mut run: impl FnMut(&mut Parser),
481    ) {
482        if matches!(self.peek(), Some(kind) if kind == separator) {
483            self.bump(separator_syntax);
484        }
485
486        run(self);
487
488        self.peek_while_kind(separator, |p| {
489            p.bump(separator_syntax);
490            run(p);
491        });
492    }
493
494    /// Peek the next Token and return it.
495    pub(crate) fn peek_token(&mut self) -> Option<&Token<'input>> {
496        if self.current_token.is_none() {
497            self.current_token = self.next_token();
498        }
499        self.current_token.as_ref()
500    }
501
502    /// Peek Token `n` and return it.
503    pub(crate) fn peek_token_n(&self, n: usize) -> Option<Token<'input>> {
504        self.peek_n_inner(n)
505    }
506
507    /// Peek Token `n` and return its TokenKind.
508    pub(crate) fn peek_n(&self, n: usize) -> Option<TokenKind> {
509        self.peek_n_inner(n).map(|token| token.kind())
510    }
511
512    fn peek_n_inner(&self, n: usize) -> Option<Token<'input>> {
513        self.current_token
514            .iter()
515            .cloned()
516            .map(Result::Ok)
517            .chain(self.lexer.clone())
518            .filter_map(Result::ok)
519            .filter(|token| !matches!(token.kind(), TokenKind::Whitespace | TokenKind::Comment))
520            .nth(n - 1)
521    }
522
523    /// Peek next Token's `data` property.
524    pub(crate) fn peek_data(&mut self) -> Option<&'input str> {
525        self.peek_token().map(|token| token.data())
526    }
527
528    /// Peek `n` Token's `data` property.
529    pub(crate) fn peek_data_n(&self, n: usize) -> Option<&'input str> {
530        self.peek_token_n(n).map(|token| token.data())
531    }
532}
533
534/// A wrapper around the SyntaxTreeBuilder used to self-close nodes.
535///
536/// When the NodeGuard goes out of scope, it automatically runs `finish_node()`
537/// on the SyntaxTreeBuilder. This ensures that nodes are not forgotten to be
538/// closed.
539#[must_use]
540pub(crate) struct NodeGuard {
541    builder: Rc<RefCell<SyntaxTreeBuilder>>,
542}
543
544impl NodeGuard {
545    fn new(builder: Rc<RefCell<SyntaxTreeBuilder>>) -> Self {
546        Self { builder }
547    }
548
549    pub(crate) fn finish_node(self) {
550        drop(self);
551    }
552}
553
554impl Drop for NodeGuard {
555    fn drop(&mut self) {
556        self.builder.borrow_mut().finish_node();
557    }
558}
559
560/// A rowan Checkpoint that can self-close the new wrapper node if required.
561pub(crate) struct Checkpoint {
562    builder: Rc<RefCell<SyntaxTreeBuilder>>,
563    checkpoint: rowan::Checkpoint,
564}
565
566impl Checkpoint {
567    fn new(builder: Rc<RefCell<SyntaxTreeBuilder>>, checkpoint: rowan::Checkpoint) -> Self {
568        Self {
569            builder,
570            checkpoint,
571        }
572    }
573
574    /// Wrap the nodes that were parsed since setting this checkpoint in a new parent node of kind
575    /// `kind`. Returns a NodeGuard that when dropped, finishes this new parent node. More children
576    /// can be added to this new node in the mean time.
577    pub(crate) fn wrap_node(self, kind: SyntaxKind) -> NodeGuard {
578        self.builder.borrow_mut().wrap_node(self.checkpoint, kind);
579        NodeGuard::new(self.builder)
580    }
581}
582
583#[cfg(test)]
584mod tests {
585    use super::DEFAULT_RECURSION_LIMIT;
586    use crate::cst;
587    use crate::Error;
588    use crate::Parser;
589    use crate::SyntaxTree;
590    use expect_test::expect;
591
592    #[test]
593    fn limited_mid_node() {
594        let source = r#"
595            type Query {
596                field(arg1: Int, arg2: Int, arg3: Int, arg4: Int, arg5: Int, arg6: Int): Int
597            }
598        "#;
599        let parser = Parser::new(source)
600            // Make it stop inside the arguments list
601            .token_limit(18);
602        let tree = parser.parse();
603        let mut errors = tree.errors();
604        assert_eq!(
605            errors.next(),
606            Some(&Error::limit("token limit reached, aborting lexing", 65))
607        );
608        assert_eq!(errors.next(), None);
609    }
610
611    #[test]
612    fn multiple_limits() {
613        let source = r#"
614            query {
615                a {
616                    a {
617                        a {
618                            a
619                        }
620                    }
621                }
622            }
623        "#;
624
625        let parser = Parser::new(source).recursion_limit(10).token_limit(22);
626        let cst = parser.parse();
627        let errors = cst.errors().collect::<Vec<_>>();
628        assert_eq!(
629            errors,
630            &[&Error::limit("token limit reached, aborting lexing", 170),]
631        );
632
633        let parser = Parser::new(source).recursion_limit(3).token_limit(200);
634        let cst = parser.parse();
635        let errors = cst.errors().collect::<Vec<_>>();
636        assert_eq!(
637            errors,
638            &[&Error::limit("parser recursion limit reached", 121),]
639        );
640    }
641
642    #[test]
643    fn syntax_errors_and_limits() {
644        // Syntax errors before and after the limit
645        let source = r#"
646            type Query {
647                field(arg1: Int, missing_arg): Int
648                # limit reached here
649                field2: !String
650            } and then some garbage
651        "#;
652        let parser = Parser::new(source).token_limit(22);
653        let cst = parser.parse();
654        let mut errors = cst.errors();
655        assert_eq!(
656            errors.next(),
657            Some(&Error::with_loc("expected a Name", ")".to_string(), 70))
658        );
659        // index 113 is immediately after the comment, before the newline
660        assert_eq!(
661            errors.next(),
662            Some(&Error::limit("token limit reached, aborting lexing", 113))
663        );
664        assert_eq!(errors.next(), None);
665
666        let tree = expect![[r##"
667            DOCUMENT@0..113
668              WHITESPACE@0..13 "\n            "
669              OBJECT_TYPE_DEFINITION@13..76
670                type_KW@13..17 "type"
671                WHITESPACE@17..18 " "
672                NAME@18..23
673                  IDENT@18..23 "Query"
674                WHITESPACE@23..24 " "
675                FIELDS_DEFINITION@24..76
676                  L_CURLY@24..25 "{"
677                  WHITESPACE@25..42 "\n                "
678                  FIELD_DEFINITION@42..76
679                    NAME@42..47
680                      IDENT@42..47 "field"
681                    ARGUMENTS_DEFINITION@47..71
682                      L_PAREN@47..48 "("
683                      INPUT_VALUE_DEFINITION@48..57
684                        NAME@48..52
685                          IDENT@48..52 "arg1"
686                        COLON@52..53 ":"
687                        WHITESPACE@53..54 " "
688                        NAMED_TYPE@54..57
689                          NAME@54..57
690                            IDENT@54..57 "Int"
691                      COMMA@57..58 ","
692                      WHITESPACE@58..59 " "
693                      INPUT_VALUE_DEFINITION@59..70
694                        NAME@59..70
695                          IDENT@59..70 "missing_arg"
696                      R_PAREN@70..71 ")"
697                    COLON@71..72 ":"
698                    WHITESPACE@72..73 " "
699                    NAMED_TYPE@73..76
700                      NAME@73..76
701                        IDENT@73..76 "Int"
702              WHITESPACE@76..93 "\n                "
703              COMMENT@93..113 "# limit reached here"
704        "##]];
705        tree.assert_eq(&format!("{:#?}", cst.document().syntax));
706    }
707
708    #[test]
709    fn tree_with_syntax_errors() {
710        use crate::cst::Definition;
711
712        // Some arbitrary token spam in incorrect places--this test uses
713        // valid tokens only
714        let source = r#"
715            garbage type Query implements X {
716                field(arg: Int): Int
717            } garbage :,, (|) interface X {}
718        "#;
719        let cst = Parser::new(source).parse();
720
721        let mut definitions = cst.document().definitions();
722        let query_def = definitions.next().unwrap();
723        let interface_def = definitions.next().unwrap();
724        assert_eq!(definitions.next(), None);
725        assert!(matches!(query_def, Definition::ObjectTypeDefinition(_)));
726        assert!(matches!(
727            interface_def,
728            Definition::InterfaceTypeDefinition(_)
729        ));
730    }
731
732    #[test]
733    fn token_limit() {
734        let cst = Parser::new("type Query { a a a a a a a a a }")
735            .token_limit(100)
736            .parse();
737        // token count includes EOF token.
738        assert_eq!(cst.token_limit().high, 26);
739    }
740
741    #[test]
742    // single char v.s. multiple is less important than consistency between consecutive calls:
743    #[allow(clippy::single_char_add_str)]
744    fn recursion_limit() {
745        // A factor 50 makes this test to run in ~1 second on a laptop from 2021,
746        // in unoptimized mode
747        const SMASH_THE_STACK_FACTOR: usize = 50;
748
749        wide(2, |ast| assert_eq!(ast.errors, []));
750        wide(DEFAULT_RECURSION_LIMIT - 2, |ast| {
751            assert_eq!(ast.errors.len(), 0, "{:?}", ast.errors[0])
752        });
753        wide(DEFAULT_RECURSION_LIMIT * SMASH_THE_STACK_FACTOR, |_ast| {
754            // TODO: remove use of recursion to parse repetition and uncomment:
755            // assert_eq!(ast.errors.len(), 0)
756        });
757
758        deep(2, |ast| assert_eq!(ast.errors, []));
759        deep(DEFAULT_RECURSION_LIMIT - 2, |ast| {
760            assert_eq!(ast.errors.len(), 0, "{:?}", ast.errors[0])
761        });
762        deep(DEFAULT_RECURSION_LIMIT * SMASH_THE_STACK_FACTOR, |ast| {
763            // Parsing nested structures without recursion on the call stack
764            // is possible but not as easy as it would require an explicit stack.
765
766            // The recursion limit triggered and protected against stack overflow.
767            assert_eq!(ast.errors.len(), 1);
768            assert!(ast.errors[0].message.contains("recursion limit reached"));
769        });
770
771        fn deep(count: usize, each: impl Fn(SyntaxTree)) {
772            let check = |input: String| each(Parser::new(&input).parse());
773
774            // Nested list type
775            let mut doc = String::new();
776            doc.push_str("type O { field: ");
777            doc.push_str(&"[".repeat(count));
778            doc.push_str("Int");
779            doc.push_str(&"]".repeat(count));
780            doc.push_str(" }");
781            check(doc);
782
783            // Nested list value
784            let mut doc = String::new();
785            doc.push_str("type O { field(arg: T = ");
786            doc.push_str(&"[".repeat(count));
787            doc.push_str("0");
788            doc.push_str(&"]".repeat(count));
789            doc.push_str("): Int }");
790            check(doc);
791
792            // Nested object value
793            let mut doc = String::new();
794            doc.push_str("type O { field(arg: T = ");
795            doc.push_str(&"{f: ".repeat(count));
796            doc.push_str("0");
797            doc.push_str(&"}".repeat(count));
798            doc.push_str("): Int }");
799            check(doc);
800
801            // Nested selection set
802            let mut doc = String::new();
803            doc.push_str("query { ");
804            doc.push_str(&"f { ".repeat(count));
805            doc.push_str("f ");
806            doc.push_str(&"}".repeat(count));
807            doc.push_str("}");
808            check(doc);
809        }
810
811        fn wide(count: usize, each: impl Fn(SyntaxTree)) {
812            let check = |input: String| each(Parser::new(&input).parse());
813
814            // Repeated top-level definitions
815            let mut doc = String::new();
816            doc.push_str(&"directive @d on FIELD ".repeat(count));
817            check(doc);
818
819            // Repeated directive applications
820            let mut doc = String::new();
821            doc.push_str("scalar Url");
822            doc.push_str(&" @d".repeat(count));
823            check(doc);
824
825            // Repeated root operation
826            let mut doc = String::new();
827            doc.push_str("schema {");
828            doc.push_str(&" query: Q".repeat(count));
829            doc.push_str(" }");
830            check(doc);
831
832            // Repeated implements interface
833            let mut doc = String::new();
834            doc.push_str("type O implements");
835            doc.push_str(&" & I".repeat(count));
836            check(doc);
837
838            // Repeated object type field
839            let mut doc = String::new();
840            doc.push_str("type O {");
841            doc.push_str(&" f: T".repeat(count));
842            doc.push_str("}");
843            check(doc);
844
845            // Repeated enum value field
846            let mut doc = String::new();
847            doc.push_str("enum E {");
848            doc.push_str(&" V".repeat(count));
849            doc.push_str("}");
850            check(doc);
851
852            // Repeated union member
853            let mut doc = String::new();
854            doc.push_str("union U = ");
855            doc.push_str(&" | T".repeat(count));
856            check(doc);
857
858            // Repeated input object type field
859            let mut doc = String::new();
860            doc.push_str("input In {");
861            doc.push_str(&" f: T".repeat(count));
862            doc.push_str("}");
863            check(doc);
864
865            // Repeated input object value field
866            let mut doc = String::new();
867            doc.push_str("type O { field(arg: T = {");
868            doc.push_str(&" f: 0".repeat(count));
869            doc.push_str(" }): Int }");
870            check(doc);
871
872            // Repeated list value item
873            let mut doc = String::new();
874            doc.push_str("type O { field(arg: T = [");
875            doc.push_str(&" 0,".repeat(count));
876            doc.push_str(" ]): Int }");
877            check(doc);
878
879            // Repeated field argument definitions
880            let mut doc = String::new();
881            doc.push_str("type O { field(");
882            doc.push_str(&"a: T ".repeat(count));
883            doc.push_str("): Int }");
884            check(doc);
885
886            // Repeated field selection
887            let mut doc = String::new();
888            doc.push_str("query {");
889            doc.push_str(&" f".repeat(count));
890            doc.push_str(" }");
891            check(doc);
892
893            // Repeated field argument
894            let mut doc = String::new();
895            doc.push_str("query { f(");
896            doc.push_str(&" a: 0".repeat(count));
897            doc.push_str(") }");
898            check(doc);
899
900            // Repeated variable definition
901            let mut doc = String::new();
902            doc.push_str("query Q(");
903            doc.push_str(&" $v: Int".repeat(count));
904            doc.push_str(" ) { f }");
905            check(doc);
906        }
907    }
908
909    #[test]
910    fn parse_field_set() {
911        let source = r#"{ a }"#;
912
913        let parser = Parser::new(source);
914        let cst: SyntaxTree<cst::SelectionSet> = parser.parse_selection_set();
915        let errors = cst.errors().collect::<Vec<_>>();
916        assert_eq!(errors.len(), 0);
917
918        let sel_set: cst::SelectionSet = cst.field_set();
919        let _ = sel_set.selections().map(|sel| {
920            if let cst::Selection::Field(f) = sel {
921                assert_eq!(f.name().unwrap().text().as_ref(), "a")
922            } else {
923                panic!("no field a in field set selection")
924            }
925        });
926
927        let source = r#"a { a }"#;
928
929        let parser = Parser::new(source);
930        let cst: SyntaxTree<cst::SelectionSet> = parser.parse_selection_set();
931        let errors = cst.errors().collect::<Vec<_>>();
932        assert_eq!(errors.len(), 0);
933
934        let sel_set: cst::SelectionSet = cst.field_set();
935        let _ = sel_set.selections().map(|sel| {
936            if let cst::Selection::Field(f) = sel {
937                assert_eq!(f.name().unwrap().text().as_ref(), "a")
938            } else {
939                panic!("no field a in field set selection")
940            }
941        });
942    }
943
944    #[test]
945    fn no_infinite_loop() {
946        let source = r#"{ ..."#;
947        let parser = Parser::new(source).token_limit(3);
948        let _cst = parser.parse();
949    }
950}