glast/
parser.rs

1//! Types and functionality related to the parser.
2//!
3//! This module contains the structs and enums used to represent the AST (in the [`ast`] submodule), and the
4//! [`parse_from_str()`]/[`parse_from_token_stream()`] functions that return a [`TokenTree`], which can be used to
5//! parse the token tree into an abstract syntax tree ([`ParseResult`]).
6//!
7//! # Parser
8//! This parser is (aiming to be) 100% specification compliant; that is, all valid source strings are parsed to
9//! produce correct results with no compile-time errors, and all invalid source strings are parsed on a "best
10//! effort" basis to produce some results and the correct compile-time errors.
11//!
12//! ## Macro expansion
13//! This parser correctly deals with all macro expansion, no matter how arbitrarily complex. Macros are expanded in
14//! all of the places that they are allowed to be expanded in. Because of the fact that macros can contain
15//! partially-valid grammar that only becomes fully valid at the call site with surrounding context, the parser
16//! discards information that a macro call site exists and just looks at the result of the expansion. Hence, the
17//! final AST has no information about macro call sites. However, the syntax highlighting spans does correctly
18//! colour macro call sites.
19//!
20//! ## Conditional compilation
21//! This parser fully supports conditional compilation. Because conditional compilation is a pre-pass (part of the
22//! preprocessor) that runs before the main parser, conditional compilation must be applied beforehand. This crate
23//! handles this process through the [`TokenTree`] struct, which allows you to choose how to apply conditional
24//! compilation. The following options are available:
25//! - Conditional compilation is disabled - no branches are included.
26//! - Conditional compilation is evaluated - branches are included according to the evaluation rules.
27//! - Conditional compilation is enabled using a key - branches are included if they are part of a key.
28//!
29//! By default, syntax highlighting spans are only produced for the chosen branches. If you wish to highlight the
30//! entire source string, all parsing functions have a `syntax_highlight_entire_file` boolean parameter.
31//!
32//! # Differences in behaviour
33//! Since this crate is part of a larger effort to provide an LSP implementation, it is designed to handle errors
34//! in a UX friendly manner. Therefore, this parser tries its best to recover from syntax errors in a sensible
35//! manner and provide a "best effort" AST. The AST retains 100% semantic meaning of the token stream only if no
36//! syntax or semantic errors are produced. If any errors are produced, that means some information has been lost
37//! in the token stream-to-ast conversion.
38//!
39//! The GLSL specification does not mention what the result should be if a syntax/semantic error is encountered,
40//! apart from the fact that a compile-time error must be emitted. The [`ParseResult`] contains all detected
41//! compile-time diagnostics.
42
43pub mod ast;
44pub mod conditional_eval;
45mod conditional_expression;
46mod expression;
47mod printing;
48#[cfg(test)]
49mod walker_tests;
50
51use crate::{
52	diag::{
53		ExprDiag, PreprocConditionalDiag, PreprocDefineDiag, PreprocExtDiag,
54		PreprocLineDiag, PreprocVersionDiag, Semantic, StmtDiag, Syntax,
55	},
56	lexer::{
57		self,
58		preprocessor::{
59			ConditionToken, ExtensionToken, LineToken,
60			TokenStream as PreprocStream, VersionToken,
61		},
62		OpTy, Token, TokenStream,
63	},
64	parser::conditional_expression::cond_parser,
65	syntax::*,
66	Either, GlslVersion, Span, SpanEncoding, Spanned,
67};
68use ast::*;
69use expression::{expr_parser, Mode};
70use std::collections::{HashMap, HashSet};
71
72/// The result of a parsed GLSL token tree.
73#[derive(Debug)]
74pub struct ParseResult {
75	/// The abstract syntax tree. By nature of this tree being parsed after having applied conditional compilation,
76	/// it will not contain any conditional compilation directives.
77	pub ast: Vec<Node>,
78	/// All syntax diagnostics.
79	pub syntax_diags: Vec<Syntax>,
80	/// All semantic diagnostics. Since the parser only creates an AST and doesn't perform any further analysis
81	/// (such as name resolution), this vector will only contain semantic errors in relation to macros.
82	pub semantic_diags: Vec<Semantic>,
83	/// The syntax highlighting tokens. If this result is obtained by calling a parsing method without enabling
84	/// entire-file syntax highlighting, the tokens in this vector will only be for the contents of the abstract
85	/// syntax tree. If entire-file syntax highlighting was enabled, the tokens will be for the entire token tree,
86	/// (and will be correctly ordered).
87	pub syntax_tokens: Vec<SyntaxToken>,
88	/// Spans which cover any regions of disabled code. These regions map to conditional branches that have not
89	/// been included. This vector is populated only if entire-file syntax highlighting was enabled, otherwise it
90	/// will be empty.
91	pub disabled_code_regions: Vec<Span>,
92}
93
94/// Parses a GLSL source string into a tree of tokens that can be then parsed into an abstract syntax tree.
95///
96/// This parser returns a [`TokenTree`] rather than the AST itself; this is required to support conditional
97/// compilation. Because conditional compilation is applied through the preprocessor, there are no rules as to
98/// where the parser can branch - a conditional branch could be introduced in the middle of a variable declaration
99/// for instance. This makes it effectively impossible to represent all branches of a source string within a single
100/// AST without greatly overcomplicating the entire parser, so multiple ASTs are needed to represent all the
101/// conditional branch permutations.
102///
103/// The [`TokenTree`] struct allows you to pick which conditional branches to include, and then parse the source
104/// string with that permutation to produce a [`ParseResult`]. Each permutation of all possible ASTs can be
105/// accessed with a key that describes which of the conditional branches to include. The example below illustrates
106/// this:
107/// ```c
108///                         // Order by appearance
109///                         //  0 (root)
110/// foo                     //  │                   
111///                         //  │                   
112/// #ifdef ...              //  │  1                
113///     AAA                 //  │  │                
114///                         //  │  │                
115///         #ifdef ...      //  │  │  2             
116///             50          //  │  │  │             
117///         #else           //  │  │  3             
118///             60          //  │  │  │             
119///         #endif          //  │  │  ┴             
120///                         //  │  │                
121///     BBB                 //  │  │                
122/// #elif ...               //  │  4                
123///     CCC                 //  │  │                
124/// #else                   //  │  5                
125///     DDD                 //  │  │                
126/// #endif                  //  │  ┴                
127///                         //  │                   
128/// #ifdef ...              //  │  6                
129///     EEE                 //  │  │                
130///                         //  │  │                
131///         #ifdef ...      //  │  │  7             
132///             100         //  │  │  │             
133///         #endif          //  │  │  ┴             
134/// #endif                  //  │  ┴                
135///                         //  │                   
136/// bar                     //  │                   
137///                         //  ┴                   
138/// ```
139///
140/// ## Conditional compilation is disabled
141/// There is always a root token stream which has no conditional branches included. This can be accessed through
142/// the [`root()`](TokenTree::root) method.
143///
144/// ## Conditional compilation is evaluated
145/// Conditional branches are included if they evaluate to true according to the evaluation rules. This can be
146/// accessed through the [`evaluate()`](TokenTree::evaluate) method.
147///
148/// ## Conditional compilation is enabled using a key
149/// Conditional branches are included only if they are part of a key. This can be accessed through the
150/// [`with_key`()](TokenTree::with_key) method.
151///
152/// A key is a list of integers which describes a set of conditional branches. Each encountered controlling
153/// conditional directive (`#ifdef`/`#ifndef`/`#if`/`#elif`/`#else`) in the token stream is given an incrementing
154/// number starting at `1`. If a key contains a given number `n`, that is equivalent to including the conditional
155/// branch under the `n`th directive.
156///
157/// Some examples to visualise:
158/// - `[1, 3]` will produce: `foo AAA 60 BBB bar`.
159/// - `[4]` will produce: `foo CCC bar`.
160/// - `[6, 7]` will produce: `foo EEE 100 bar`.
161/// - `[1, 2, 6, 7]` will produce: `foo AAA 50 BBB EEE 100 bar`.
162///
163/// If you pass a key which doesn't form a valid permutation, the method will return an error. If you pass a key
164/// which includes more than one conditional branch from the same block, the method will return an error.
165///
166/// # Examples
167/// Parse a simple GLSL expression:
168/// ```rust
169/// # use glast::parser::{parse_from_str, ParseResult};
170/// let src = r#"
171/// ##version 450 core
172/// int i = 5.0 + 1;
173/// "#;
174/// let tree = parse_from_str(&src).unwrap();
175/// let ParseResult { ast, .. } = tree.root(false); // We don't care about extra
176///                                                 // syntax highlighting information
177/// ```
178///
179/// # Further reading
180/// See the documentation for the [`TokenTree`] struct for a more in-depth explanation about why this seemingly
181/// roundabout way of doing things is necessary.
182pub fn parse_from_str(source: &str) -> Result<TokenTree, lexer::ParseErr> {
183	let (token_stream, metadata) = lexer::parse(source)?;
184	parse_from_token_stream(token_stream, metadata)
185}
186
187/// Parses a token stream into a tree of tokens that can be then parsed into an abstract syntax tree.
188///
189/// # Examples
190/// See the documentation for the [`parse_from_str()`] function.
191pub fn parse_from_token_stream(
192	mut token_stream: TokenStream,
193	metadata: lexer::Metadata,
194) -> Result<TokenTree, lexer::ParseErr> {
195	// Check the GLSL version as detected by the lexer.
196	if metadata.version == GlslVersion::Unsupported && !token_stream.is_empty()
197	{
198		return Err(lexer::ParseErr::UnsupportedVersion(metadata.version));
199	}
200
201	// Skip tree generation if there are no conditional compilation directives, or if the token stream is empty.
202	if !metadata.contains_conditional_directives || token_stream.is_empty() {
203		let span = if !token_stream.is_empty() {
204			Span::new(
205				token_stream.first().unwrap().1.start,
206				token_stream.last().unwrap().1.end,
207			)
208		} else {
209			Span::new(0, 0)
210		};
211		return Ok(TokenTree {
212			arena: vec![token_stream],
213			tree: vec![TreeNode {
214				parent: None,
215				children: vec![Either::Left(TokenTree::ROOT_NODE_ID)],
216				span,
217			}],
218			order_by_appearance: vec![],
219			end_position: span.end,
220			syntax_diags: vec![],
221			contains_conditional_directives: false,
222			span_encoding: metadata.span_encoding,
223		});
224	}
225
226	// Below is a simple arena-based tree structure. Here is an example of how the source would be represented in
227	// the tree:
228	//
229	// foo
230	// #ifdef T
231	//   AAA
232	//     #ifdef Z
233	//       90
234	//
235	//     #endif
236	//   BBB
237	// #else
238	//   EEE
239	// #endif
240	// bar
241	// baz
242	//
243	// Tree representation:
244	//
245	// Node(                                   0
246	//     Tokens[foo],                        |
247	//     Conditional{                        |
248	//         if: Node(                       |  1
249	//             Tokens[AAA],                |  |
250	//             Conditional{                |  |
251	//                 if: Node(               |  |  2
252	//                     Tokens[90],         |  |  |
253	//                 ),                      |  |  |
254	//             },                          |  |
255	//             Tokens[BBB],                |  |
256	//         ),                              |  |
257	//         else: Node(                     |  3
258	//             Tokens[EEE],                |  |
259	//         )                               |  |
260	//     },                                  |
261	//     Tokens[bar, baz],                   |
262	// )
263	//
264	// order-by-appearance: [(0, [0]), (1, [0]), (2, [1, 0]), (3, [0])]
265
266	let token_stream_end = token_stream.last().unwrap().1.end;
267
268	let mut arena = Vec::new();
269	let mut tree = vec![TreeNode {
270		parent: None,
271		children: Vec::new(),
272		span: Span::new(0, 0),
273	}];
274	// A vector which creates a mapping between `order-of-appearance` -> `(node ID, parent node IDs)`. The parent
275	// node IDs are tracked so that in the `with_key()` method we can check whether the key is valid.
276	let mut order_by_appearance = vec![(0, vec![0])];
277	let mut syntax_diags = Vec::new();
278
279	// The current grouping of tokens. This is pushed into the arena whenever we encounter a branch that creates a
280	// new tree node.
281	let mut current_tokens = Vec::with_capacity(100);
282	// The stack representing the IDs of currently nested nodes. The first ID always refers to the root node.
283	// Invariant: Any time this is `pop()`ed a length check is made to ensure that `[0]` is always valid.
284	let mut stack: Vec<NodeId> = vec![0];
285
286	fn top(stack: &[NodeId]) -> NodeId {
287		*stack.last().unwrap()
288	}
289
290	// We consume all of the tokens from the beginning.
291	loop {
292		let (token, token_span) = if !token_stream.is_empty() {
293			token_stream.remove(0)
294		} else {
295			break;
296		};
297
298		match token {
299			Token::Directive(d) => match d {
300				PreprocStream::IfDef {
301					kw: kw_span,
302					tokens,
303				} => {
304					let hash_syntax = SyntaxToken {
305						ty: SyntaxType::DirectiveHash,
306						modifiers: SyntaxModifiers::empty(),
307						span: token_span.first_char(),
308					};
309					let name_syntax = SyntaxToken {
310						ty: SyntaxType::DirectiveName,
311						modifiers: SyntaxModifiers::empty(),
312						span: kw_span,
313					};
314
315					let conditional_content_span = if tokens.is_empty() {
316						syntax_diags.push(Syntax::PreprocConditional(
317							PreprocConditionalDiag::ExpectedNameAfterIfDef(
318								kw_span.next_single_width(),
319							),
320						));
321						kw_span.next_single_width()
322					} else if tokens.len() == 1 {
323						Span::new(tokens[0].1.start, tokens[0].1.end)
324					} else {
325						// We have trailing tokens.
326						let start = tokens[1].1.start;
327						let end = tokens.last().unwrap().1.end;
328						syntax_diags.push(Syntax::PreprocTrailingTokens(
329							Span::new(start, end),
330						));
331						Span::new(start, end)
332					};
333
334					// Finish the current token group.
335					let idx = arena.len();
336					arena.push(std::mem::take(&mut current_tokens));
337					tree.get_mut(top(&stack))
338						.unwrap()
339						.children
340						.push(Either::Left(idx));
341
342					// Create a new condition block, and a new node for the `ifdef` condition.
343					let idx = tree.len();
344					tree.push(TreeNode {
345						parent: Some(top(&stack)),
346						children: Vec::new(),
347						span: token_span,
348					});
349					tree.get_mut(top(&stack)).unwrap().children.push(
350						Either::Right(ConditionalBlock {
351							conditions: vec![(
352								Conditional::IfDef,
353								token_span,
354								tokens,
355								conditional_content_span,
356								idx,
357								hash_syntax,
358								name_syntax,
359							)],
360							end: None,
361						}),
362					);
363					order_by_appearance.push((idx, stack.clone()));
364					stack.push(idx);
365				}
366				PreprocStream::IfNotDef {
367					kw: kw_span,
368					tokens,
369				} => {
370					let hash_syntax = SyntaxToken {
371						ty: SyntaxType::DirectiveHash,
372						modifiers: SyntaxModifiers::empty(),
373						span: token_span.first_char(),
374					};
375					let name_syntax = SyntaxToken {
376						ty: SyntaxType::DirectiveName,
377						modifiers: SyntaxModifiers::empty(),
378						span: kw_span,
379					};
380
381					let conditional_content_span = if tokens.is_empty() {
382						syntax_diags.push(Syntax::PreprocConditional(
383							PreprocConditionalDiag::ExpectedNameAfterIfDef(
384								kw_span.next_single_width(),
385							),
386						));
387						kw_span.next_single_width()
388					} else if tokens.len() == 1 {
389						Span::new(tokens[0].1.start, tokens[0].1.end)
390					} else {
391						// We have trailing tokens.
392						let start = tokens[1].1.start;
393						let end = tokens.last().unwrap().1.end;
394						syntax_diags.push(Syntax::PreprocTrailingTokens(
395							Span::new(start, end),
396						));
397						Span::new(start, end)
398					};
399
400					// Finish the current token group.
401					let idx = arena.len();
402					arena.push(std::mem::take(&mut current_tokens));
403					tree.get_mut(top(&stack))
404						.unwrap()
405						.children
406						.push(Either::Left(idx));
407
408					// Create a new condition block, and a new node for the `ifdef` condition.
409					let idx = tree.len();
410					tree.push(TreeNode {
411						parent: Some(top(&stack)),
412						children: Vec::new(),
413						span: token_span,
414					});
415					tree.get_mut(top(&stack)).unwrap().children.push(
416						Either::Right(ConditionalBlock {
417							conditions: vec![(
418								Conditional::IfNotDef,
419								token_span,
420								tokens,
421								conditional_content_span,
422								idx,
423								hash_syntax,
424								name_syntax,
425							)],
426							end: None,
427						}),
428					);
429					order_by_appearance.push((idx, stack.clone()));
430					stack.push(idx);
431				}
432				PreprocStream::If {
433					kw: kw_span,
434					tokens,
435				} => {
436					let hash_syntax = SyntaxToken {
437						ty: SyntaxType::DirectiveHash,
438						modifiers: SyntaxModifiers::empty(),
439						span: token_span.first_char(),
440					};
441					let name_syntax = SyntaxToken {
442						ty: SyntaxType::DirectiveName,
443						modifiers: SyntaxModifiers::empty(),
444						span: kw_span,
445					};
446
447					let conditional_content_span = if tokens.is_empty() {
448						syntax_diags.push(Syntax::PreprocConditional(
449							PreprocConditionalDiag::ExpectedExprAfterIf(
450								kw_span.next_single_width(),
451							),
452						));
453						kw_span.next_single_width()
454					} else {
455						Span::new(
456							tokens.first().unwrap().1.start,
457							tokens.last().unwrap().1.end,
458						)
459					};
460
461					// Finish the current token group.
462					let idx = arena.len();
463					arena.push(std::mem::take(&mut current_tokens));
464					tree.get_mut(top(&stack))
465						.unwrap()
466						.children
467						.push(Either::Left(idx));
468
469					// Create a new condition block, and a new node for the `if` condition.
470					let idx = tree.len();
471					tree.push(TreeNode {
472						parent: Some(top(&stack)),
473						children: Vec::new(),
474						span: token_span,
475					});
476					tree.get_mut(top(&stack)).unwrap().children.push(
477						Either::Right(ConditionalBlock {
478							conditions: vec![(
479								Conditional::If,
480								token_span,
481								tokens,
482								conditional_content_span,
483								idx,
484								hash_syntax,
485								name_syntax,
486							)],
487							end: None,
488						}),
489					);
490					order_by_appearance.push((idx, stack.clone()));
491					stack.push(idx);
492				}
493				PreprocStream::ElseIf {
494					kw: kw_span,
495					tokens,
496				} => {
497					let hash_syntax = SyntaxToken {
498						ty: SyntaxType::DirectiveHash,
499						modifiers: SyntaxModifiers::empty(),
500						span: token_span.first_char(),
501					};
502					let name_syntax = SyntaxToken {
503						ty: SyntaxType::DirectiveName,
504						modifiers: SyntaxModifiers::empty(),
505						span: kw_span,
506					};
507
508					let conditional_content_span = if tokens.is_empty() {
509						syntax_diags.push(Syntax::PreprocConditional(
510							PreprocConditionalDiag::ExpectedExprAfterElseIf(
511								kw_span.next_single_width(),
512							),
513						));
514						kw_span.next_single_width()
515					} else {
516						Span::new(
517							tokens.first().unwrap().1.start,
518							tokens.last().unwrap().1.end,
519						)
520					};
521
522					if stack.len() > 1 {
523						// Finish the current token group for the previous conditional node.
524						let idx = arena.len();
525						arena.push(std::mem::take(&mut current_tokens));
526						tree.get_mut(top(&stack))
527							.unwrap()
528							.children
529							.push(Either::Left(idx));
530						stack.pop();
531
532						// By popping the stack, we are now pointing to the parent node that is the conditional
533						// block.
534
535						// Create a new node for the `elif` condition.
536						let idx = tree.len();
537						tree.push(TreeNode {
538							parent: Some(top(&stack)),
539							children: Vec::new(),
540							span: token_span,
541						});
542						let node = tree.get_mut(top(&stack)).unwrap();
543						node.span.end = token_span.end;
544						let Either::Right(cond_block) = node.children.last_mut().unwrap() else { unreachable!() };
545						cond_block.conditions.push((
546							Conditional::ElseIf,
547							token_span,
548							tokens,
549							conditional_content_span,
550							idx,
551							hash_syntax,
552							name_syntax,
553						));
554						order_by_appearance.push((idx, stack.clone()));
555						stack.push(idx);
556					} else {
557						syntax_diags.push(Syntax::PreprocConditional(
558							PreprocConditionalDiag::UnmatchedElseIf(token_span),
559						));
560					}
561				}
562				PreprocStream::Else {
563					kw: kw_span,
564					tokens,
565				} => {
566					let hash_syntax = SyntaxToken {
567						ty: SyntaxType::DirectiveHash,
568						modifiers: SyntaxModifiers::empty(),
569						span: token_span.first_char(),
570					};
571					let name_syntax = SyntaxToken {
572						ty: SyntaxType::DirectiveName,
573						modifiers: SyntaxModifiers::empty(),
574						span: kw_span,
575					};
576
577					// We are not expecting anything after `#else`.
578					let conditional_content_span = if tokens.is_empty() {
579						kw_span.next_single_width()
580					} else {
581						let span = Span::new(
582							tokens.first().unwrap().1.start,
583							tokens.last().unwrap().1.end,
584						);
585						syntax_diags.push(Syntax::PreprocTrailingTokens(span));
586						span
587					};
588
589					if stack.len() > 1 {
590						// Finish the current token group for the previous conditional node.
591						let idx = arena.len();
592						arena.push(std::mem::take(&mut current_tokens));
593						tree.get_mut(top(&stack))
594							.unwrap()
595							.children
596							.push(Either::Left(idx));
597						stack.pop();
598
599						// By popping the stack, we are now pointing to the parent node that is the conditional
600						// block.
601
602						// Create a new node for the `else` condition.
603						let idx = tree.len();
604						tree.push(TreeNode {
605							parent: Some(top(&stack)),
606							children: Vec::new(),
607							span: token_span,
608						});
609						let node = tree.get_mut(top(&stack)).unwrap();
610						node.span.end = token_span.end;
611						let Either::Right(cond_block) = node.children.last_mut().unwrap() else { unreachable!() };
612						cond_block.conditions.push((
613							Conditional::Else,
614							token_span,
615							tokens,
616							conditional_content_span,
617							idx,
618							hash_syntax,
619							name_syntax,
620						));
621						order_by_appearance.push((idx, stack.clone()));
622						stack.push(idx);
623					} else {
624						syntax_diags.push(Syntax::PreprocConditional(
625							PreprocConditionalDiag::UnmatchedElse(token_span),
626						));
627					}
628				}
629				PreprocStream::EndIf {
630					kw: kw_span,
631					tokens,
632				} => {
633					let hash_syntax = SyntaxToken {
634						ty: SyntaxType::DirectiveHash,
635						modifiers: SyntaxModifiers::empty(),
636						span: token_span.first_char(),
637					};
638					let name_syntax = SyntaxToken {
639						ty: SyntaxType::DirectiveName,
640						modifiers: SyntaxModifiers::empty(),
641						span: kw_span,
642					};
643
644					// We are not expecting anything after `#endif`.
645					let conditional_content_span = if tokens.is_empty() {
646						kw_span.next_single_width()
647					} else {
648						let span = Span::new(
649							tokens.first().unwrap().1.start,
650							tokens.last().unwrap().1.end,
651						);
652						syntax_diags.push(Syntax::PreprocTrailingTokens(span));
653						span
654					};
655
656					if stack.len() > 1 {
657						let node = tree.get_mut(top(&stack)).unwrap();
658						node.span.end = token_span.end;
659						// Finish the current token group for the previous conditional node.
660						let idx = arena.len();
661						arena.push(std::mem::take(&mut current_tokens));
662						tree.get_mut(top(&stack))
663							.unwrap()
664							.children
665							.push(Either::Left(idx));
666						stack.pop();
667
668						// By popping the stack, we are now pointing to the parent node that is the conditional
669						// block.
670
671						// Close the condition block.
672						let node = tree.get_mut(top(&stack)).unwrap();
673						node.span.end = token_span.end;
674						let Either::Right(cond_block) = node.children.last_mut().unwrap() else { unreachable!() };
675						cond_block.end = Some((
676							Conditional::End,
677							token_span,
678							tokens,
679							conditional_content_span,
680							hash_syntax,
681							name_syntax,
682						));
683					} else {
684						syntax_diags.push(Syntax::PreprocConditional(
685							PreprocConditionalDiag::UnmatchedEndIf(token_span),
686						));
687					}
688				}
689				_ => {
690					let node = tree.get_mut(top(&stack)).unwrap();
691					node.span.end = token_span.end;
692					current_tokens.push((Token::Directive(d), token_span));
693				}
694			},
695			_ => {
696				let node = tree.get_mut(top(&stack)).unwrap();
697				node.span.end = token_span.end;
698				current_tokens.push((token, token_span));
699			}
700		}
701	}
702
703	// Finish the current group of remaining tokens.
704	if !current_tokens.is_empty() {
705		let idx = arena.len();
706		arena.push(std::mem::take(&mut current_tokens));
707		tree.get_mut(top(&stack))
708			.unwrap()
709			.children
710			.push(Either::Left(idx));
711	}
712	stack.pop();
713
714	// If we still have nodes on the stack, that means we have one or more unclosed condition blocks.
715	if stack.len() > 0 {
716		let node = tree.get_mut(top(&stack)).unwrap();
717		node.span.end = token_stream_end;
718		let Either::Right(cond) = node.children.last_mut().unwrap() else { unreachable!(); };
719		syntax_diags.push(Syntax::PreprocConditional(
720			PreprocConditionalDiag::UnclosedBlock(
721				cond.conditions[0].1,
722				Span::new(token_stream_end, token_stream_end),
723			),
724		));
725	}
726
727	// In order to make our job easier later down the line, for each conditional branch node ordered-by-appearance,
728	// we want to know its node ID and the () for the parent nodes. The () consists of:
729	// - The parent's position within `order_by_appearance`. <- We don't have this information yet.
730	// - The parent's node ID.
731	let old_order = order_by_appearance;
732	let mut order_by_appearance = Vec::with_capacity(old_order.len());
733	for (node_id, parents) in old_order.iter() {
734		order_by_appearance.push((
735			*node_id,
736			parents
737				.iter()
738				.map(|node_id| {
739					(
740						old_order.iter().find(|(n, _)| node_id == n).unwrap().0,
741						*node_id,
742					)
743				})
744				.collect::<Vec<_>>(),
745		))
746	}
747
748	Ok(TokenTree {
749		arena,
750		tree,
751		order_by_appearance,
752		end_position: token_stream_end,
753		syntax_diags,
754		contains_conditional_directives: true,
755		span_encoding: metadata.span_encoding,
756	})
757}
758
759/// Pretty-prints the AST.
760///
761/// The output is not stable and can be changed at any time, so the specific formatting should not be relied upon.
762///
763/// # Examples
764/// Print a simple GLSL expression:
765/// ```rust
766/// # use glast::parser::{parse_from_str, print_ast, ParseResult};
767/// let src = r#"
768/// ##version 450 core
769/// int i = 5.0 + 1;
770/// "#;
771/// let tree = parse_from_str(&src).unwrap();
772/// let ParseResult { ast, .. } = tree.root(false);
773/// println!("{}", print_ast(&ast));
774/// ```
775/// Would result in:
776/// ```text
777/// #Version(
778///     version: 450
779///     profile: core
780/// ),
781/// VarDef(
782///     type: int
783///     ident: i
784///     value: BinOp(
785///         op: +
786///         left: 5.0
787///         right: 1
788///     )
789/// )
790/// ```
791pub fn print_ast(ast: &[Node]) -> String {
792	printing::print_ast(ast)
793}
794
795/// The error type for parsing operations.
796#[derive(Debug)]
797pub enum ParseErr {
798	/// This number doesn't map to a controlling conditional directive.
799	InvalidNum(usize),
800	/// This number has a dependent number that was not specified in the key.
801	InvalidChain(usize),
802	/// This tree contains no conditional compilation branches.
803	NoConditionalBranches,
804}
805
806/// A tree of token streams generated from a GLSL source string.
807///
808/// The tree represents all conditional compilation branches. Call the [`root()`](Self::root),
809/// [`evaluate()`](Self::evaluate) or [`with_key()`](Self::with_key) method to parse an abstract syntax tree with
810/// the selected conditional branches into a [`ParseResult`].
811///
812/// # Examples
813/// For a fully detailed example on how to use this struct to create an AST, see the documentation for the
814/// [`parse_from_str()`] function.
815///
816/// # Why is this necessary?
817/// Conditional compilation is implemented through the preprocessor, which sets no rules as to where conditional
818/// branching can take place, (apart from the fact that a preprocessor directive must exist on its own line). This
819/// means that a conditional branch could, for example, completely change the signature of a program:
820/// ```c
821///  1│ void foo() {
822///  2│
823///  3│     int i = 5;
824///  4│
825///  5│     #ifdef TOGGLE
826///  6│     }
827///  7│     void bar() {
828///  8│     #endif
829///  9│
830/// 10│     int p = 0;
831/// 11│ }
832/// ```
833/// In the example above, if `TOGGLE` is not defined, we have a function `foo` who's scope ends on line `11` and
834/// includes two variable definitions `i` and `p`. However, if `TOGGLE` is defined, the function `foo` ends on line
835/// `6` instead and only contains the variable `i`, and we have a completely new function `bar` which has the
836/// variable `p`.
837///
838/// This technically can be representable in the AST, it's just that it would look something like this:
839/// ```text
840/// Root(
841///     Either(
842///         (
843///             Function(
844///                 name="foo"
845///                 start=1
846///                 end=11
847///                 contents(
848///                     Variable(name="i" value=5)
849///                     Variable(name="p" value=0)
850///                 )
851///             )
852///         ),
853///         (
854///             Function(
855///                 name="foo"
856///                 start=1
857///                 end=6
858///                 contents(
859///                     Variable(name="i" value=5)
860///                 )
861///             ),
862///             Function(
863///                 name="bar"
864///                 start=7
865///                 end=11
866///                 contents(
867///                     Variable(name="p" value=0)
868///                 )
869///             ),
870///         )
871///     )
872/// )
873/// ```
874/// Notice how this AST is effectively `Either(AST_with_condition_false, AST_with_condition_true)`. This is because
875/// the function `foo` could potentially be split in the middle, but an AST node cannot have multiple end points,
876/// which means that we can't include both permutations within the function node; we need separate function nodes
877/// instead. And since we have two separate possibilities for `foo`, we need to branch in the node above `foo`,
878/// which in this example is effectively the root node.
879///
880/// It is arguable whether such a representation would be better than the current solution. On one hand all
881/// possibilities are within the single AST, but on the other hand such an AST would quickly become confusing to
882/// work with, manipulate, and analyse in the scenario of complex conditional branching.
883///
884/// The main reason this option wasn't chosen is because it would immensely complicate the parsing logic, and in
885/// turn the maintainability of this project. As with all recursive-descent parsers, the individual parsing
886/// functions hold onto any temporary state. In this case, the function for parsing functions holds information
887/// such as the name, the starting position, the parameters, etc. If we would encounter the conditional branching
888/// within this parsing function, we would somehow need to know ahead-of-time whether this conditional branch will
889/// affect the function node, and if so, be able to return up the call stack to split the parser whilst also
890/// somehow not losing the temporary state. This would require abandoning the recursive-descent approach, which
891/// would greatly complicate the parser and make writing & maintaining the parsing logic itself a convoluted mess,
892/// and that is not a trade-off I'm willing to take.
893///
894/// This complication occurs because the preprocessor is a separate pass ran before the main compiler and does not
895/// follow the GLSL grammar rules, which means that preprocessor directives and macros can be included literally
896/// anywhere and the file *may* still be valid after expansion. In comparison, some newer languages include
897/// conditional compilation as part of the language grammar itself. In Rust for example, conditional compilation is
898/// applied via attributes to entire expressions/statements, which means that you can't run into this mess where a
899/// conditional branch could split a function mid-way through parsing. GLSL unfortunately uses the C preprocessor,
900/// which results in the approach taken by this crate being necessary to achieve 100% specification-compliant
901/// behaviour.
902///
903/// Note that macros can actually be correctly expanded within the same pass as the main parser without introducing
904/// too much complexity, it's just that conditional compilation can't.
905pub struct TokenTree {
906	/// The arena of token streams.
907	///
908	/// # Invariants
909	/// If `contains_conditional_directives` is `false`, this vector is:
910	/// ```ignore
911	/// vec![enitire_token_stream]
912	/// ```
913	arena: Vec<TokenStream>,
914	/// The tree.
915	///
916	/// # Invariants
917	/// `self.[0]` always exists and is the root node.
918	///
919	/// If `contains_conditional_directives` is `false`, this vector is:
920	/// ```ignore
921	/// vec![TreeNode {
922	///     parent: None,
923	///     children: vec![Either::Left(Self::ROOT_NODE_ID)]
924	/// }]
925	/// ```
926	tree: Vec<TreeNode>,
927	/// IDs of the conditional branch nodes ordered by appearance.
928	///
929	/// - `0` - The ID of the `[n]`th conditional branch node.
930	/// - `1` - The `(index into self, node ID)` of the parent nodes which this conditional branch node depends on.
931	///
932	/// # Invariants
933	/// If `contains_conditional_directives` is `false`, this is empty.
934	///
935	/// If this contains entries, each `self[n].1[0]` is guaranteed to exist and be of value `(0,
936	/// Self::ROOT_NODE_ID)`. Also, `self[0]` is guaranteed to exist, (and point to the root node).
937	order_by_appearance: Vec<(NodeId, Vec<(usize, NodeId)>)>,
938	/// The ending position of the last token in the tree.
939	end_position: usize,
940
941	/// Syntax diagnostics related to conditional compilation directives. Note that this vector won't contain any
942	/// syntax diagnostics in relation to conditional expressions, since those are not evaluated here.
943	///
944	/// # Invariants
945	/// If `contains_conditional_directives` is `false`, this is empty.
946	syntax_diags: Vec<Syntax>,
947
948	/// Whether there are any conditional directives.
949	contains_conditional_directives: bool,
950	/// The type of encoding of spans.
951	span_encoding: SpanEncoding,
952}
953
954type NodeId = usize;
955type ArenaId = usize;
956
957/// A node within the token tree.
958#[derive(Debug)]
959struct TreeNode {
960	/// The parent of this node.
961	parent: Option<NodeId>,
962	/// The children/contents of this node. Each entry either points to a token stream (in the arena), or is a
963	/// conditional block which points to child nodes for each conditional branch.
964	children: Vec<Either<ArenaId, ConditionalBlock>>,
965	/// The span of the entire node.
966	///
967	/// If this is a conditional branch node, the span starts from the beginning of the controlling conditional
968	/// directive to the beginning of the next `#elif`/`#else` directive, or to the end of the `#endif` directive.
969	span: Span,
970}
971
972/// A conditional block, part of a `TreeNode`.
973#[derive(Debug)]
974struct ConditionalBlock {
975	/// The individual conditional branches.
976	///
977	/// - `0` - The type of condition.
978	/// - `1` - The span of the entire directive.
979	/// - `2` - The tokens in the directive.
980	/// - `3` - The span of the tokens **only**, this does not include the `#if` part.
981	/// - `4` - The ID of the node that contains the contents of the branch.
982	/// - `5` - The syntax highlighting token for the `#` symbol.
983	/// - `6` - The syntax highlighting token for the name of the directive.
984	///
985	/// # Invariants
986	/// There will always be an entry at `[0]` and it will be a `Conditional::IfDef/IfNotDef/If` variant.
987	///
988	/// This will never contain a `Conditional::End` variant.
989	conditions: Vec<(
990		Conditional,
991		Span,
992		Vec<Spanned<ConditionToken>>,
993		Span,
994		NodeId,
995		SyntaxToken,
996		SyntaxToken,
997	)>,
998	/// The `#endif` directive.
999	///
1000	/// This is separate because the `#endif` doesn't contain any children, (since it ends the conditional block),
1001	/// hence a `NodeId` for this would be semantically nonsensical.
1002	///
1003	/// - `0` - The type of conditional directive.
1004	/// - `1` - The span of the entire directive.
1005	/// - `2` - The tokens in the directive.
1006	/// - `3` - The span of the tokens **only**, this does not include the `#endif` part.
1007	/// - `4` - The syntax highlighting token for the `#` symbol.
1008	/// - `5` - The syntax highlighting token for the `endif` directive name.
1009	///
1010	/// # Invariants
1011	/// This will be a `Conditional::End` variant.
1012	end: Option<(
1013		Conditional,
1014		Span,
1015		Vec<Spanned<ConditionToken>>,
1016		Span,
1017		SyntaxToken,
1018		SyntaxToken,
1019	)>,
1020}
1021
1022/// Describes the type of a conditional directive.
1023#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1024pub enum Conditional {
1025	IfDef,
1026	IfNotDef,
1027	If,
1028	ElseIf,
1029	Else,
1030	End,
1031}
1032
1033impl TokenTree {
1034	/// Node ID of the root node.
1035	const ROOT_NODE_ID: usize = 0;
1036
1037	/// Parses the root token stream; no conditional branches are included.
1038	///
1039	/// Whilst this is guaranteed to succeed, if the entire source string is wrapped within a conditional block
1040	/// this will return an empty AST.
1041	///
1042	/// # Syntax highlighting
1043	/// The `syntax_highlight_entire_source` parameter controls whether to produce syntax tokens for the entire
1044	/// source string, rather than just for the root tokens. This involves parsing all conditional branches in
1045	/// order to produce all the syntax highlighting information. Whilst the implementation of this functionality
1046	/// uses the smallest possible number of permutations that cover the entire source string, if there are a lot
1047	/// of conditional branches that can result in the token tree being parsed many times which may have
1048	/// performance implications.
1049	///
1050	/// The actual syntax highlighting results are based off the chosen permutations which cannot be controlled. If
1051	/// you require more control, you must manually parse the relevant permutations and collect the tokens
1052	/// yourself.
1053	///
1054	/// If there are no conditional branches, this parameter does nothing.
1055	///
1056	/// # Examples
1057	/// For a fully detailed example on how to use this method to create an abstract syntax tree, see the
1058	/// documentation for the [`parse_from_str()`] function.
1059	pub fn root(&self, syntax_highlight_entire_source: bool) -> ParseResult {
1060		// Get the relevant streams for the root branch.
1061		let streams = if !self.contains_conditional_directives {
1062			self.arena.clone()
1063		} else {
1064			let mut streams = Vec::new();
1065			let node = &self.tree[Self::ROOT_NODE_ID];
1066			for child in &node.children {
1067				match child {
1068					Either::Left(idx) => streams.push(self.arena[*idx].clone()),
1069					// Ignore any conditional blocks under the root node.
1070					Either::Right(_) => {}
1071				}
1072			}
1073			streams
1074		};
1075
1076		// Parse the root branch.
1077		let mut walker = Walker::new(
1078			RootTokenStreamProvider::new(streams, self.end_position),
1079			self.span_encoding,
1080		);
1081		let mut nodes = Vec::new();
1082		while !walker.is_done() {
1083			parse_stmt(&mut walker, &mut nodes);
1084		}
1085		walker.syntax_diags.append(&mut self.syntax_diags.clone());
1086		let (ast, syntax_diags, semantic_diags, mut root_tokens) = (
1087			nodes,
1088			walker.syntax_diags,
1089			walker.semantic_diags,
1090			walker.syntax_tokens,
1091		);
1092
1093		if syntax_highlight_entire_source
1094			&& self.contains_conditional_directives
1095		{
1096			let mut merged_syntax_tokens =
1097				Vec::with_capacity(root_tokens.len());
1098			// This will store the regions of the conditional blocks.
1099			let mut conditional_block_regions = Vec::new();
1100
1101			let keys = self
1102				.minimal_no_of_permutations_for_complete_syntax_highlighting();
1103
1104			// Move over any root tokens before any conditional blocks.
1105			let first_node = &self.tree[keys[0][0]];
1106			let span = Span::new(0, first_node.span.start);
1107			loop {
1108				match root_tokens.get(0) {
1109					Some(token) => {
1110						if span.contains(token.span) {
1111							merged_syntax_tokens.push(root_tokens.remove(0));
1112						} else {
1113							break;
1114						}
1115					}
1116					None => break,
1117				}
1118			}
1119
1120			// Deal with all tokens produced from conditional branches, as well as any root tokens in-between
1121			// the conditional blocks.
1122			for (i, key) in keys.iter().enumerate() {
1123				let node = &self.tree[key[0]];
1124				conditional_block_regions.push(node.span);
1125
1126				let (
1127					ParseResult {
1128						syntax_tokens: mut new_tokens,
1129						..
1130					},
1131					_,
1132				) = self.parse_nodes(key);
1133				loop {
1134					let SyntaxToken { span: s, .. } = match new_tokens.get(0) {
1135						Some(t) => t,
1136						None => break,
1137					};
1138
1139					if s.is_before_pos(node.span.start) {
1140						new_tokens.remove(0);
1141						continue;
1142					}
1143
1144					if node.span.contains(*s) {
1145						merged_syntax_tokens.push(new_tokens.remove(0));
1146					} else {
1147						break;
1148					}
1149				}
1150
1151				if let Some(next_key) = keys.get(i + 1) {
1152					let next_node = &self.tree[next_key[0]];
1153					let span = Span::new(node.span.end, next_node.span.start);
1154					if !span.is_zero_width() {
1155						// We have another conditional block after this one; there may be root tokens in-between
1156						// these two blocks which require moving over.
1157						loop {
1158							let SyntaxToken { span: s, .. } =
1159								match root_tokens.get(0) {
1160									Some(t) => t,
1161									None => break,
1162								};
1163
1164							if span.contains(*s) {
1165								merged_syntax_tokens
1166									.push(root_tokens.remove(0));
1167							} else {
1168								break;
1169							}
1170						}
1171					}
1172				}
1173			}
1174
1175			// Append any remaining root tokens.
1176			merged_syntax_tokens.append(&mut root_tokens);
1177
1178			ParseResult {
1179				ast,
1180				syntax_diags,
1181				semantic_diags,
1182				syntax_tokens: merged_syntax_tokens,
1183				disabled_code_regions: conditional_block_regions,
1184			}
1185		} else {
1186			ParseResult {
1187				ast,
1188				syntax_diags,
1189				semantic_diags,
1190				syntax_tokens: root_tokens,
1191				disabled_code_regions: Vec::new(),
1192			}
1193		}
1194	}
1195
1196	/// Parses the token tree by including conditional branches if they evaluate to true.
1197	///
1198	/// Whilst this is guaranteed to succeed, if the entire source string is wrapped within a conditional branch
1199	/// that fails evaluation this will return an empty AST. This method also returns the evaluated key.
1200	///
1201	/// # Syntax highlighting
1202	/// The `syntax_highlight_entire_source` parameter controls whether to produce syntax tokens for the entire
1203	/// source string, rather than just for the included conditional branches. This involves parsing **all**
1204	/// conditional branches in order to produce all the syntax highlighting information. Whilst the implementation
1205	/// of this functionality uses the smallest possible number of permutations that cover the entire source
1206	/// string, if there are a lot of conditional branches that can result in the token tree being parsed many
1207	/// times which may have performance implications.
1208	///
1209	/// The actual syntax highlighting results are based off the chosen permutations which cannot be controlled. If
1210	/// you require more control, you must manually parse the relevant permutations and collect the tokens
1211	/// yourself.
1212	///
1213	/// If there are no conditional branches, or the only conditional branches that exist are also evaluated as
1214	/// true and included in the running of the parser, this parameter does nothing.
1215	///
1216	/// # Examples
1217	/// For a fully detailed example on how to use this method to create an abstract syntax tree, see the
1218	/// documentation for the [`parse_from_str()`] function.
1219	pub fn evaluate(
1220		&self,
1221		syntax_highlight_entire_source: bool,
1222	) -> (ParseResult, Vec<usize>) {
1223		// Parse the token tree, evaluating conditional compilation.
1224		let mut walker = Walker::new(
1225			DynamicTokenStreamProvider::new(
1226				&self.arena,
1227				&self.tree,
1228				self.end_position,
1229			),
1230			self.span_encoding,
1231		);
1232		let mut nodes = Vec::new();
1233		while !walker.is_done() {
1234			parse_stmt(&mut walker, &mut nodes);
1235		}
1236		walker.syntax_diags.append(&mut self.syntax_diags.clone());
1237
1238		let eval_key = walker.token_provider.chosen_key;
1239		let eval_regions = walker.token_provider.chosen_regions;
1240		let (ast, syntax_diags, semantic_diags, eval_tokens) = (
1241			nodes,
1242			walker.syntax_diags,
1243			walker.semantic_diags,
1244			walker.syntax_tokens,
1245		);
1246
1247		let (syntax_tokens, disabled_code_regions) =
1248			if syntax_highlight_entire_source
1249				&& self.contains_conditional_directives
1250			{
1251				self.merge_syntax_tokens(
1252					eval_key.clone(),
1253					eval_regions,
1254					eval_tokens,
1255				)
1256			} else {
1257				(eval_tokens, Vec::new())
1258			};
1259
1260		(
1261			ParseResult {
1262				ast,
1263				syntax_diags,
1264				semantic_diags,
1265				syntax_tokens,
1266				disabled_code_regions,
1267			},
1268			eval_key,
1269		)
1270	}
1271
1272	/// Parses a token tree by including conditional branches if they are part of the provided key.
1273	///
1274	/// This method can return an `Err` in the following cases:
1275	/// - The `key` has a number which doesn't map to a controlling conditional directive.
1276	/// - The `key` has a number which depends on another number that is missing.
1277	///
1278	/// # Syntax highlighting
1279	/// The `syntax_highlight_entire_source` parameter controls whether to produce syntax tokens for the entire
1280	/// source string, rather than just for the selected conditional branches. This involves parsing all
1281	/// conditional branches in order to produce all the syntax highlighting information. Whilst the implementation
1282	/// of this functionality uses the smallest possible number of permutations that cover the entire source
1283	/// string, if there are a lot of conditional branches that can result in the token tree being parsed many
1284	/// times which may have performance implications.
1285	///
1286	/// The actual syntax highlighting results are based off the chosen permutations which cannot be controlled. If
1287	/// you require more control, you must manually parse the relevant permutations and collect the tokens
1288	/// yourself.
1289	///
1290	/// If there are no conditional branches, this parameter does nothing.
1291	///
1292	/// # Examples
1293	/// For a fully detailed example on how to use this method to create an abstract syntax tree, see the
1294	/// documentation for the [`parse_from_str()`] function.
1295	pub fn with_key(
1296		&self,
1297		key: impl AsRef<[usize]>,
1298		syntax_highlight_entire_source: bool,
1299	) -> Result<ParseResult, ParseErr> {
1300		let key = key.as_ref();
1301
1302		if !self.contains_conditional_directives {
1303			return Err(ParseErr::NoConditionalBranches);
1304		}
1305
1306		let mut nodes = Vec::with_capacity(key.len());
1307		// Check that the key is valid.
1308		let mut visited_node_ids = vec![0];
1309		for num in key {
1310			let (id, required_ids) = match self.order_by_appearance.get(*num) {
1311				Some(t) => t,
1312				None => return Err(ParseErr::InvalidNum(*num)),
1313			};
1314
1315			// Panic: See `self.order_by_appearance` invariant.
1316			if !visited_node_ids.contains(&required_ids.last().unwrap().1) {
1317				return Err(ParseErr::InvalidChain(*num));
1318			}
1319
1320			visited_node_ids.push(*id);
1321			nodes.push(*id);
1322		}
1323
1324		let (
1325			ParseResult {
1326				ast,
1327				syntax_diags,
1328				semantic_diags,
1329				syntax_tokens,
1330				disabled_code_regions: _,
1331			},
1332			chosen_regions,
1333		) = self.parse_nodes(&nodes);
1334
1335		let (syntax_tokens, disabled_code_regions) =
1336			if syntax_highlight_entire_source
1337				&& self.contains_conditional_directives
1338			{
1339				self.merge_syntax_tokens(nodes, chosen_regions, syntax_tokens)
1340			} else {
1341				(syntax_tokens, Vec::new())
1342			};
1343
1344		Ok(ParseResult {
1345			ast,
1346			syntax_diags,
1347			semantic_diags,
1348			syntax_tokens,
1349			disabled_code_regions,
1350		})
1351	}
1352
1353	/// Parses the specified nodes.
1354	///
1355	/// Returns a `ParseResult` along with a vector of chosen regions.
1356	///
1357	/// # Invariants
1358	/// At least one node ID needs to be specified.
1359	///
1360	/// The IDs of the nodes need to be in chronological order.
1361	///
1362	/// The IDs need to map to a valid permutation of conditional branches.
1363	fn parse_nodes(&self, nodes: &[NodeId]) -> (ParseResult, Vec<Span>) {
1364		if nodes.is_empty() {
1365			panic!("Expected at least one node to parse");
1366		}
1367
1368		let mut streams = Vec::new();
1369		let mut chosen_regions = Vec::new();
1370		let mut conditional_syntax_tokens = Vec::new();
1371		let mut nodes_idx = 0;
1372		let mut call_stack = vec![(0, 0, 0, -1)];
1373		// Panic: We have at least one node, so at least one iteration of this loop can be performed without
1374		// any panics.
1375		'outer: loop {
1376			let (node_id, child_idx, cond_block_idx, evaluated_cond_block) =
1377				match call_stack.last_mut() {
1378					Some(t) => t,
1379					None => break,
1380				};
1381			let node = &self.tree[*node_id];
1382			let Some(child) = node.children.get(*child_idx) else { break; };
1383
1384			match child {
1385				Either::Left(arena_id) => {
1386					let stream = self.arena[*arena_id].clone();
1387
1388					if let Some((_, span)) = stream.last() {
1389						chosen_regions.push(Span::new(
1390							stream.first().unwrap().1.start,
1391							span.end,
1392						));
1393					}
1394
1395					*child_idx += 1;
1396					if *child_idx == node.children.len() {
1397						// We have gone through all of the children of this node, so we want to pop it from the
1398						// stack.
1399						call_stack.pop();
1400					}
1401
1402					streams.push(stream);
1403				}
1404				Either::Right(cond_block) => {
1405					let matched_condition_node_id;
1406					loop {
1407						if *cond_block_idx == cond_block.conditions.len() {
1408							// We've gone through all of the conditional blocks. We can now push the syntax tokens
1409							// for the `#endif` and move onto the next child of this node.
1410							if let Some((
1411								_,
1412								directive_span,
1413								syntax_tokens,
1414								_,
1415								hash_token,
1416								dir_token,
1417							)) = &cond_block.end
1418							{
1419								let mut tokens = vec![*hash_token, *dir_token];
1420								if !syntax_tokens.is_empty() {
1421									tokens.push(SyntaxToken {
1422										ty: SyntaxType::Invalid,
1423										modifiers: SyntaxModifiers::CONDITIONAL,
1424										span: Span::new(
1425											syntax_tokens
1426												.first()
1427												.unwrap()
1428												.1
1429												.start,
1430											syntax_tokens.last().unwrap().1.end,
1431										),
1432									});
1433								}
1434								conditional_syntax_tokens.push(tokens);
1435
1436								if *evaluated_cond_block
1437									== cond_block.conditions.len() as isize - 1
1438								{
1439									// We have either chosen the final conditional branch, which means we are
1440									// responsible for syntax highlighting the `#endif` directive. (This is only
1441									// relevant if we are syntax highlighting the entire file). The reason we can't
1442									// do this unconditionally is because if the final block wasn't picked, then an
1443									// alternative permutation is responsible for syntax highlighting it, but the
1444									// span of the syntax highlight region stretches to cover the `#endif` part. If
1445									// we declared this as chosen, the other span region wouldn't fit and would
1446									// therefore be discarded, and hence syntax highlighting would be missing for
1447									// the final branch.
1448									chosen_regions.push(*directive_span);
1449								}
1450							}
1451
1452							*cond_block_idx = 0;
1453							*child_idx += 1;
1454							*evaluated_cond_block = -1;
1455							if *child_idx == node.children.len() {
1456								// We have gone through all of the children of this node, so we want to pop it from
1457								// the stack.
1458								call_stack.pop();
1459							}
1460
1461							continue 'outer;
1462						}
1463
1464						let current_cond_block_idx = *cond_block_idx;
1465
1466						let (
1467							_,
1468							directive_span,
1469							syntax_tokens,
1470							_,
1471							branch_node_id,
1472							hash_token,
1473							dir_token,
1474						) = &cond_block.conditions[current_cond_block_idx];
1475
1476						*cond_block_idx += 1;
1477
1478						match nodes.get(nodes_idx) {
1479							Some(n) => {
1480								if *branch_node_id == *n {
1481									// We have found a matching branch.
1482									let mut tokens =
1483										vec![*hash_token, *dir_token];
1484									for (token, span) in syntax_tokens.iter() {
1485										tokens.push(SyntaxToken {
1486											ty: token.non_semantic_colour(),
1487											modifiers:
1488												SyntaxModifiers::CONDITIONAL,
1489											span: *span,
1490										});
1491									}
1492									conditional_syntax_tokens.push(tokens);
1493
1494									matched_condition_node_id = *branch_node_id;
1495									*evaluated_cond_block =
1496										current_cond_block_idx as isize;
1497									chosen_regions.push(*directive_span);
1498									break;
1499								}
1500							}
1501							None => {}
1502						}
1503					}
1504
1505					call_stack.push((matched_condition_node_id, 0, 0, -1));
1506					nodes_idx += 1;
1507					continue;
1508				}
1509			}
1510		}
1511
1512		// Parse the pre-selected branches.
1513		let mut walker = Walker::new(
1514			PreselectedTokenStreamProvider::new(
1515				streams,
1516				conditional_syntax_tokens,
1517				self.end_position,
1518			),
1519			self.span_encoding,
1520		);
1521		let mut nodes = Vec::new();
1522		while !walker.is_done() {
1523			parse_stmt(&mut walker, &mut nodes);
1524		}
1525		walker.syntax_diags.append(&mut self.syntax_diags.clone());
1526
1527		(
1528			ParseResult {
1529				ast: nodes,
1530				syntax_diags: walker.syntax_diags,
1531				semantic_diags: walker.semantic_diags,
1532				syntax_tokens: walker.syntax_tokens,
1533				disabled_code_regions: Vec::new(),
1534			},
1535			chosen_regions,
1536		)
1537	}
1538
1539	/// Merges syntax tokens from other keys to cover the entire file.
1540	///
1541	/// This method takes the chosen key, the chosen regions, and syntax tokens from said chosen key. If there are
1542	/// no other permutations, this will return the syntax tokens verbatim.
1543	fn merge_syntax_tokens(
1544		&self,
1545		chosen_key: Vec<usize>,
1546		chosen_regions: Vec<Span>,
1547		mut chosen_tokens: Vec<SyntaxToken>,
1548	) -> (Vec<SyntaxToken>, Vec<Span>) {
1549		let mut other_keys =
1550			self.minimal_no_of_permutations_for_complete_syntax_highlighting();
1551		// We want to exclude the key that we've already chosen. If that leaves no keys left, we know we've already
1552		// covered the entire tree, so we can return early.
1553		other_keys.retain(|k| k != &chosen_key);
1554		if other_keys.is_empty() {
1555			return (chosen_tokens, Vec::new());
1556		}
1557
1558		// Parse all of the keys and store relevant information.
1559		// `(key, parse_result, chosen_spans)`.
1560		let mut other_keys = other_keys
1561			.into_iter()
1562			.map(|k| {
1563				let (a, b) = self.parse_nodes(&k);
1564				(k, a, b)
1565			})
1566			.collect::<Vec<_>>();
1567
1568		// This will store the calculated regions of disabled code in the context of the chosen key.
1569		let mut disabled_regions_for_chosen_key = Vec::new();
1570		// This will store the regions of tokens (with the key they came from) in a chronological order that covers
1571		// the entire source string.
1572		let mut final_regions_with_key = Vec::new();
1573
1574		let mut span_to_next_chosen_region =
1575			Span::new(0, chosen_regions.first().map(|s| s.start).unwrap_or(0));
1576		let mut chosen_regions_idx = 0;
1577		// We toggle between consuming regions from the chosen key and consuming regions from the other keys on
1578		// each iteration on the loop.
1579		let mut consuming_chosen = false;
1580		loop {
1581			if !consuming_chosen {
1582				// We create a vector of all regions from the other keys that can fit before the next region in the
1583				// chosen key.
1584				let mut regions_that_can_fit = Vec::new();
1585				for (key, _, regions) in other_keys.iter() {
1586					for region in regions {
1587						if span_to_next_chosen_region.contains(*region) {
1588							regions_that_can_fit.push((*region, key.clone()));
1589						}
1590					}
1591				}
1592
1593				// We sort the vector chronologically and remove any duplicates. It doesn't matter which duplicate
1594				// we remove since they will be identical.
1595				regions_that_can_fit.sort_by(|a, b| {
1596					if a.0.is_before(&b.0) {
1597						std::cmp::Ordering::Less
1598					} else if a.0.is_after(&b.0) {
1599						std::cmp::Ordering::Greater
1600					} else {
1601						std::cmp::Ordering::Equal
1602					}
1603				});
1604				regions_that_can_fit.dedup_by(|a, b| a.0 == b.0);
1605
1606				// This vector is also a list of disabled regions from the perspective of the chosen key, so we
1607				// want to append it.
1608				regions_that_can_fit.iter().for_each(|(span, _)| {
1609					disabled_regions_for_chosen_key.push(*span)
1610				});
1611
1612				final_regions_with_key.append(&mut regions_that_can_fit);
1613
1614				if chosen_regions_idx == chosen_regions.len() {
1615					break;
1616				}
1617				consuming_chosen = true;
1618			} else {
1619				// We push the next region from the chosen key.
1620				let current_region = chosen_regions[chosen_regions_idx];
1621				final_regions_with_key
1622					.push((current_region, chosen_key.clone()));
1623				match chosen_regions.get(chosen_regions_idx + 1) {
1624					Some(next) => {
1625						span_to_next_chosen_region =
1626							Span::new(current_region.end, next.start);
1627					}
1628					None => {
1629						span_to_next_chosen_region =
1630							Span::new(current_region.end, self.end_position);
1631					}
1632				}
1633
1634				chosen_regions_idx += 1;
1635				consuming_chosen = false;
1636			}
1637		}
1638
1639		// We now have a vector of chronologically ordered regions along with the key we should take tokens from.
1640		// We can now create a new vector that contains all of these tokens.
1641		let mut merged_syntax_tokens = Vec::with_capacity(chosen_tokens.len());
1642		for (range, key) in final_regions_with_key {
1643			let tokens = if key == chosen_key {
1644				&mut chosen_tokens
1645			} else {
1646				&mut other_keys
1647					.iter_mut()
1648					.find(|(k, _, _)| k == &key)
1649					.unwrap()
1650					.1
1651					.syntax_tokens
1652			};
1653
1654			loop {
1655				let Some(token) = tokens.get(0) else { break; };
1656
1657				if token.span.is_before(&range) {
1658					// This token is before the current range. We clearly have already gone past it, so it can
1659					// safely be discarded.
1660					tokens.remove(0);
1661				} else if range.contains(token.span) {
1662					merged_syntax_tokens.push(tokens.remove(0));
1663				} else {
1664					// This token is after the current range. We haven't gotten there yet, so that means we can
1665					// finish dealing with this token stream for now.
1666					break;
1667				}
1668			}
1669		}
1670
1671		(merged_syntax_tokens, disabled_regions_for_chosen_key)
1672	}
1673
1674	/// Returns all of the keys (**of node IDs, not order-of-appearance numbers**) required to fully syntax
1675	/// highlight the entire tree.
1676	///
1677	/// Each key points to the conditional branch nodes that contain the actual tokens of the conditional branch.
1678	/// To get information about the controlling conditional directive itself, you must look up the parent and find
1679	/// the node ID in one of the child's conditional blocks.
1680	fn minimal_no_of_permutations_for_complete_syntax_highlighting(
1681		&self,
1682	) -> Vec<Vec<NodeId>> {
1683		// TODO: Merge permutations that have no collisions, such as the first branch from the first conditional
1684		// block with the first branch from the second conditional block. It may make sense to replace the
1685		// `order_by_appearance` traversal with a manual stack traversal of the tree.
1686
1687		let mut chains_of_nodes = Vec::new();
1688		for (id, required_ids) in self.order_by_appearance.iter().skip(1) {
1689			let mut new_chain = required_ids[1..]
1690				.iter()
1691				.map(|(_, id)| *id)
1692				.collect::<Vec<_>>();
1693			new_chain.push(*id);
1694
1695			// We may have a chain of nodes which fully fits within this new chain. For example, we could have a
1696			// chain `[0, 4]`, and the new chain we have is `[0, 4, 5]`. In this case, the existing chain is wholly
1697			// unnecessary because all of the lines of code in that chain will be covered in this new chain, (plus
1698			// the lines of code in the new `5` branch). Since we are trying to find the minimal number of
1699			// permutations to cover the whole file, we can discard the existing chain.
1700
1701			// See if any existing chains are contained within the new one.
1702			let idx = chains_of_nodes
1703				.iter()
1704				.position(|v: &Vec<usize>| new_chain.starts_with(v.as_ref()));
1705
1706			if let Some(idx) = idx {
1707				// `idx` points to an existing chain of nodes that is part of the new chain of nodes being added
1708				// right now. That means the existing chain can be removed because this new chain will cover 100%
1709				// of the old chain.
1710				chains_of_nodes.remove(idx);
1711			}
1712
1713			chains_of_nodes.push(new_chain);
1714		}
1715		chains_of_nodes
1716	}
1717
1718	/// Returns a vector of all controlling conditional directives in the tree.
1719	///
1720	/// The return value consists of:
1721	/// - `0` - The conditional directive type. This cannot be `Conditional::End`.
1722	/// - `1` - Span of the directive.
1723	///
1724	/// Note that the first controlling conditional directive (index of `1`) is at the beginning of this vector
1725	/// (index `0`), so an offset must be performed.
1726	pub fn get_all_controlling_conditional_directives(
1727		&self,
1728	) -> Vec<(Conditional, Span)> {
1729		let mut directives = Vec::new();
1730		for (_i, (node_id, _)) in
1731			self.order_by_appearance.iter().enumerate().skip(1)
1732		{
1733			let parent_id = self.tree[*node_id].parent.unwrap();
1734			for child in self.tree[parent_id].children.iter() {
1735				match child {
1736					Either::Left(_) => {}
1737					Either::Right(block) => {
1738						let Some((ty,_, _, span, _, _, _)) = block.conditions.iter().find(|(_,_, _, _, id, _, _)| node_id == id) else { continue; };
1739						directives.push((*ty, *span));
1740					}
1741				}
1742			}
1743		}
1744		directives
1745	}
1746
1747	/// Creates a new key to access the specified controlling conditional directive.
1748	///
1749	/// This method takes the index (of the chronological appearance) of the controlling conditional directive
1750	/// (`#if`/`#ifdef`/`#ifndef`/`#elif`/`#else`), and returns a key that reaches that conditional branch. The new
1751	/// key contains the minimal number of prerequisite branches necessary to reach the chosen directive.
1752	pub fn create_key(
1753		&self,
1754		chosen_conditional_directive: usize,
1755	) -> Vec<usize> {
1756		// There is no existing key, so we need to construct one from scratch. Each node within the vector has
1757		// a list of all prerequisite parents, so we can just use that, (removing the unneeded parent node IDs).
1758		let Some((_new_selection_node_id, parent_info)) = self.order_by_appearance.get(chosen_conditional_directive) else {
1759				return Vec::new();
1760			};
1761
1762		let mut key = parent_info
1763			.iter()
1764			.skip(1)
1765			.map(|(idx, _)| *idx)
1766			.collect::<Vec<_>>();
1767		key.push(chosen_conditional_directive);
1768		key
1769	}
1770
1771	/// Modifies an existing key to access the specified controlling conditional directive.
1772	///
1773	/// This method keeps all existing conditional branches as long as they don't conflict with the newly chosen
1774	/// branch.
1775	pub fn add_selection_to_key(
1776		&self,
1777		existing_key: &Vec<usize>,
1778		chosen_conditional_directive: usize,
1779	) -> Vec<usize> {
1780		let mut call_stack = vec![(0, 0, 0)];
1781		// This map will store a vector of sibling branches. This is a map so that the iteration can access the
1782		// correct vector to push into, but once the iteration is over we only care about the values not the keys.
1783		// Each vector has all of the order-of-appearance indexes of sibling conditional branches.
1784		let mut sibling_map: HashMap<(usize, usize), Vec<usize>> =
1785			HashMap::new();
1786		let mut cond_counter = 0;
1787		'outer: loop {
1788			let (node_id, child_idx, cond_block_idx) =
1789				match call_stack.last_mut() {
1790					Some(t) => t,
1791					None => break,
1792				};
1793			let node = &self.tree[*node_id];
1794			let Some(child) = node.children.get(*child_idx) else { break; };
1795
1796			match child {
1797				Either::Left(_) => {
1798					*child_idx += 1;
1799					if *child_idx == node.children.len() {
1800						// We have gone through all of the children of this node, so we want to pop it from the
1801						// stack.
1802						call_stack.pop();
1803					}
1804				}
1805				Either::Right(cond_block) => {
1806					if *cond_block_idx == cond_block.conditions.len() {
1807						// We have gone through all of the conditional branches.
1808						*cond_block_idx = 0;
1809						*child_idx += 1;
1810						if *child_idx == node.children.len() {
1811							// We have gone through all of the children of this node, so we want to pop it from the
1812							// stack.
1813							call_stack.pop();
1814						}
1815
1816						continue 'outer;
1817					}
1818
1819					cond_counter += 1;
1820					let current_cond_block_idx = *cond_block_idx;
1821					let (_, _, _, _, cond_branch_node_id, _, _) =
1822						&cond_block.conditions[current_cond_block_idx];
1823
1824					match sibling_map.get_mut(&(*node_id, *child_idx)) {
1825						Some(v) => v.push(cond_counter),
1826						None => {
1827							sibling_map.insert(
1828								(*node_id, *child_idx),
1829								vec![cond_counter],
1830							);
1831						}
1832					}
1833
1834					*cond_block_idx += 1;
1835					call_stack.push((*cond_branch_node_id, 0, 0));
1836				}
1837			}
1838		}
1839
1840		let chosen_parents =
1841			match self.order_by_appearance.get(chosen_conditional_directive) {
1842				Some((_, parent_info)) => {
1843					parent_info.iter().map(|(i, _)| *i).collect::<Vec<_>>()
1844				}
1845				None => {
1846					return existing_key.to_vec();
1847				}
1848			};
1849
1850		// Vector of vectors of siblings of nodes that the newly chosen node depends on.
1851		let mut siblings = sibling_map
1852			.values()
1853			.filter(|siblings| {
1854				for parent in chosen_parents.iter() {
1855					if siblings.contains(parent) {
1856						return true;
1857					}
1858				}
1859				false
1860			})
1861			.collect::<Vec<_>>();
1862
1863		match sibling_map
1864			.values()
1865			.find(|siblings| siblings.contains(&chosen_conditional_directive))
1866		{
1867			Some(v) => siblings.push(v),
1868			None => return existing_key.to_vec(),
1869		}
1870
1871		let mut new_key = Vec::with_capacity(existing_key.len());
1872		'outer: for existing in existing_key {
1873			for siblings in siblings.iter() {
1874				if siblings.contains(existing) {
1875					// This node is a sibling of the newly chosen node or one of the parent nodes required by the
1876					// newly chosen node, so we disgard it.
1877					continue 'outer;
1878				}
1879			}
1880
1881			let (_, parent_info) =
1882				self.order_by_appearance.get(*existing).unwrap();
1883			let parent_idx_s =
1884				parent_info.iter().map(|(i, _)| *i).collect::<Vec<_>>();
1885			for siblings in siblings.iter() {
1886				for i in parent_idx_s.iter() {
1887					if siblings.contains(i) {
1888						// This node depends on a parent node that is a sibling of the newly chosen node or one of
1889						// the parent nodes required by the newly chosen node, so we discard it.
1890						continue 'outer;
1891					}
1892				}
1893			}
1894
1895			// This node does not clash, so we can keep it.
1896			new_key.push(*existing);
1897		}
1898
1899		let mut insertion = chosen_parents;
1900		insertion.remove(0); // Remove the `0` root parent, since that's treated implicitly in the key.
1901		insertion.push(chosen_conditional_directive);
1902
1903		if new_key.is_empty() {
1904			return insertion;
1905		} else if new_key.len() == 1 {
1906			if insertion.last().unwrap() < new_key.first().unwrap() {
1907				insertion.append(&mut new_key);
1908				return insertion;
1909			} else {
1910				new_key.append(&mut insertion);
1911				return new_key;
1912			}
1913		}
1914
1915		// We need to insert the new selection. The correct place to insert it will be chronological.
1916		if insertion.last().unwrap() < new_key.first().unwrap() {
1917			insertion.append(&mut new_key);
1918			return insertion;
1919		}
1920
1921		let mut insertion_idx = None;
1922		for (i, val) in new_key.windows(2).enumerate() {
1923			let first = val[0];
1924			let second = val[1];
1925
1926			if first == *insertion.first().unwrap() {
1927				insertion.remove(0);
1928			}
1929
1930			if first < *insertion.first().unwrap()
1931				&& *insertion.last().unwrap() < second
1932			{
1933				// The insertion fits between these two values.
1934				insertion_idx = Some(i + 1);
1935				break;
1936			}
1937		}
1938
1939		if let Some(insertion_idx) = insertion_idx {
1940			for i in insertion.into_iter().rev() {
1941				new_key.insert(insertion_idx, i);
1942			}
1943		} else {
1944			new_key.append(&mut insertion);
1945		}
1946
1947		new_key
1948	}
1949
1950	/// Modifies an existing key to remove access to the specified controlling conditional directive.
1951	///
1952	/// This method keeps all existing conditional branches as long as they don't depend on the specified to-remove
1953	/// branch.
1954	pub fn remove_selection_from_key(
1955		&self,
1956		existing_key: &Vec<usize>,
1957		removed_conditional_directive: usize,
1958	) -> Vec<usize> {
1959		existing_key
1960			.iter()
1961			.filter_map(|node| {
1962				// This node is the to-remove node.
1963				if *node == removed_conditional_directive {
1964					return None;
1965				}
1966
1967				// This node doesn't even exist
1968				let Some((_node_id, parent_info)) = self.order_by_appearance.get(*node) else {
1969				return None;
1970			};
1971
1972				// This node depends on the to-remove node.
1973				if parent_info
1974					.iter()
1975					.find(|(i, _)| *i == removed_conditional_directive)
1976					.is_some()
1977				{
1978					return None;
1979				}
1980
1981				return Some(*node);
1982			})
1983			.collect()
1984	}
1985
1986	/// Returns whether the source string contains any conditional directives.
1987	pub fn contains_conditional_directives(&self) -> bool {
1988		self.contains_conditional_directives
1989	}
1990}
1991
1992/// A token stream provider.
1993trait TokenStreamProvider<'a>: Clone {
1994	/// Returns the next token stream. If the end of the source string has been reached, `None` will be returned.
1995	fn get_next_stream(
1996		&mut self,
1997		macros: &HashMap<String, (Span, Macro)>,
1998		syntax_diags: &mut Vec<Syntax>,
1999		syntax_tokens: &mut Vec<SyntaxToken>,
2000		span_encoding: SpanEncoding,
2001	) -> Option<TokenStream>;
2002
2003	/// Returns the zero-width span of the source string.
2004	fn get_end_span(&self) -> Span;
2005}
2006
2007/// A root token stream provider.
2008#[derive(Debug, Clone)]
2009struct RootTokenStreamProvider<'a> {
2010	/// The source streams in the correct order.
2011	streams: Vec<TokenStream>,
2012	/// Cursor position.
2013	cursor: usize,
2014	/// The zero-width span at the end of the source string.
2015	end_span: Span,
2016	_phantom: std::marker::PhantomData<&'a ()>,
2017}
2018
2019impl<'a> RootTokenStreamProvider<'a> {
2020	/// Constructs a new pre-selected token stream provider.
2021	fn new(streams: Vec<TokenStream>, end_position: usize) -> Self {
2022		Self {
2023			streams,
2024			cursor: 0,
2025			end_span: Span::new(end_position, end_position),
2026			_phantom: std::marker::PhantomData::default(),
2027		}
2028	}
2029}
2030
2031impl<'a> TokenStreamProvider<'a> for RootTokenStreamProvider<'a> {
2032	fn get_next_stream(
2033		&mut self,
2034		_macros: &HashMap<String, (Span, Macro)>,
2035		_syntax_diags: &mut Vec<Syntax>,
2036		_syntax_tokens: &mut Vec<SyntaxToken>,
2037		_span_encoding: SpanEncoding,
2038	) -> Option<TokenStream> {
2039		let v = self.streams.get(self.cursor).map(|v| v.clone());
2040		self.cursor += 1;
2041		v
2042	}
2043
2044	fn get_end_span(&self) -> Span {
2045		self.end_span
2046	}
2047}
2048
2049/// A pre-selected token stream provider.
2050#[derive(Debug, Clone)]
2051struct PreselectedTokenStreamProvider<'a> {
2052	/// The source streams in the correct order.
2053	streams: Vec<TokenStream>,
2054	/// Cursor position.
2055	cursor: usize,
2056	/// The zero-width span at the end of the source string.
2057	end_span: Span,
2058	/// Syntax tokens for each conditional directive that is part of the pre-selected evaluation, in order of
2059	/// appearance.
2060	conditional_syntax_tokens: Vec<Vec<SyntaxToken>>,
2061	_phantom: std::marker::PhantomData<&'a ()>,
2062}
2063
2064impl<'a> PreselectedTokenStreamProvider<'a> {
2065	/// Constructs a new pre-selected token stream provider.
2066	fn new(
2067		streams: Vec<TokenStream>,
2068		conditional_syntax_tokens: Vec<Vec<SyntaxToken>>,
2069		end_position: usize,
2070	) -> Self {
2071		Self {
2072			streams,
2073			cursor: 0,
2074			end_span: Span::new(end_position, end_position),
2075			conditional_syntax_tokens,
2076			_phantom: std::marker::PhantomData::default(),
2077		}
2078	}
2079}
2080
2081impl<'a> TokenStreamProvider<'a> for PreselectedTokenStreamProvider<'a> {
2082	fn get_next_stream(
2083		&mut self,
2084		_macros: &HashMap<String, (Span, Macro)>,
2085		_syntax_diags: &mut Vec<Syntax>,
2086		syntax_tokens: &mut Vec<SyntaxToken>,
2087		_span_encoding: SpanEncoding,
2088	) -> Option<TokenStream> {
2089		match self.streams.get(self.cursor) {
2090			Some(v) => {
2091				if let Some((_, stream_span)) = v.first() {
2092					while let Some(f) = self.conditional_syntax_tokens.first() {
2093						if let Some(SyntaxToken {
2094							span: cond_span, ..
2095						}) = f.first()
2096						{
2097							if cond_span.is_before(stream_span) {
2098								syntax_tokens.append(
2099									&mut self
2100										.conditional_syntax_tokens
2101										.remove(0),
2102								);
2103							} else {
2104								break;
2105							}
2106						} else {
2107							// This vector conditional syntax tokens is empty, so there's no need to keep it
2108							// around. If we didn't remove this, we could theoretically have an infinite loop.
2109							self.conditional_syntax_tokens.remove(0);
2110						}
2111					}
2112				}
2113
2114				self.cursor += 1;
2115				return Some(v.clone());
2116			}
2117			None => {
2118				while !self.conditional_syntax_tokens.is_empty() {
2119					syntax_tokens
2120						.append(&mut self.conditional_syntax_tokens.remove(0));
2121				}
2122				return None;
2123			}
2124		}
2125	}
2126
2127	fn get_end_span(&self) -> Span {
2128		self.end_span
2129	}
2130}
2131
2132/// A dynamic token stream provider. This evaluates conditional directives on-the-fly.
2133#[derive(Debug, Clone)]
2134struct DynamicTokenStreamProvider<'a> {
2135	/// The arena of token streams.
2136	arena: &'a [TokenStream],
2137	/// The tree.
2138	tree: &'a [TreeNode],
2139	/// The current call stack.
2140	///
2141	/// - `0` - The node ID.
2142	/// - `1` - The index into the node's `children`.
2143	/// - `2` - The index into the current child's conditional branches if the child is a conditional block.
2144	/// - `3` - The conditional branch that has been picked, if the child is a conditional block.
2145	ptrs: Vec<(usize, usize, usize, isize)>,
2146	/// The key of node IDs that was chosen in the evaluation.
2147	chosen_key: Vec<usize>,
2148	/// The spans of regions of relevant syntax tokens. This includes all tokens within conditional branches that
2149	/// have been chosen, as well as all tokens for the directives themselves that have been looked at, but not
2150	/// necessarily chosen; i.e. this would include a failed `#elif`/`#else` and the `#endif`.
2151	chosen_regions: Vec<Span>,
2152	/// The zero-width span at the end of the source string.
2153	end_span: Span,
2154}
2155
2156impl<'a> DynamicTokenStreamProvider<'a> {
2157	/// Constructs a new dynamic token stream provider.
2158	fn new(
2159		arena: &'a [TokenStream],
2160		tree: &'a [TreeNode],
2161		end_position: usize,
2162	) -> Self {
2163		Self {
2164			arena,
2165			tree,
2166			ptrs: vec![(TokenTree::ROOT_NODE_ID, 0, 0, -1)],
2167			chosen_key: Vec::new(),
2168			chosen_regions: Vec::new(),
2169			end_span: Span::new(end_position, end_position),
2170		}
2171	}
2172}
2173
2174impl<'a> TokenStreamProvider<'a> for DynamicTokenStreamProvider<'a> {
2175	fn get_next_stream(
2176		&mut self,
2177		macros: &HashMap<String, (Span, Macro)>,
2178		syntax_diags: &mut Vec<Syntax>,
2179		syntax_tokens: &mut Vec<SyntaxToken>,
2180		span_encoding: SpanEncoding,
2181	) -> Option<TokenStream> {
2182		'outer: loop {
2183			let (node_ptr, child_idx, cond_block_idx, evaluated_cond_block) =
2184				match self.ptrs.last_mut() {
2185					// `let-else` breaks `rustfmt`.
2186					Some((
2187						node_ptr,
2188						child_idx,
2189						cond_block_idx,
2190						evaluated_cond_block,
2191					)) => (
2192						node_ptr,
2193						child_idx,
2194						cond_block_idx,
2195						evaluated_cond_block,
2196					),
2197					_ => {
2198						// We have exhausted the token tree; there is nothing left.
2199						return None;
2200					}
2201				};
2202			let node = self.tree.get(*node_ptr).unwrap();
2203			let Some(child) = node.children.get(*child_idx) else { return None; };
2204
2205			match child {
2206				Either::Left(arena_id) => {
2207					let stream = self.arena[*arena_id].clone();
2208
2209					// Update the last span value. This value can't be calculated ahead-of-time since we don't know
2210					// what conditional compilation will evaluate to.
2211					if let Some((_, span)) = stream.last() {
2212						/* self.end_span = *span; */
2213						self.chosen_regions.push(Span::new(
2214							stream.first().unwrap().1.start,
2215							span.end,
2216						));
2217					}
2218
2219					*child_idx += 1;
2220					if *child_idx == node.children.len() {
2221						// We have gone through all of the children of this node, so we want to pop it from the
2222						// stack.
2223						self.ptrs.pop();
2224					}
2225
2226					return Some(stream);
2227				}
2228				Either::Right(cond_block) => {
2229					let matched_condition_node_id;
2230					loop {
2231						if *cond_block_idx == cond_block.conditions.len() {
2232							// We've gone through all of the conditional blocks. We can now push the syntax tokens
2233							// for the `#endif` and move onto the next child of this node.
2234							if let Some((
2235								_,
2236								directive_span,
2237								tokens,
2238								_,
2239								hash_token,
2240								dir_token,
2241							)) = &cond_block.end
2242							{
2243								syntax_tokens.push(*hash_token);
2244								syntax_tokens.push(*dir_token);
2245								if !tokens.is_empty() {
2246									syntax_tokens.push(SyntaxToken {
2247										ty: SyntaxType::Invalid,
2248										modifiers: SyntaxModifiers::CONDITIONAL,
2249										span: Span::new(
2250											tokens.first().unwrap().1.start,
2251											tokens.last().unwrap().1.end,
2252										),
2253									});
2254								}
2255								if *evaluated_cond_block
2256									== cond_block.conditions.len() as isize - 1
2257								{
2258									// We have chosen the final conditional block, which means we are responsible
2259									// for syntax highlighting the `#endif` directive. (This is only relevant if we
2260									// are syntax highlighting the entire file). The reason we can't do this
2261									// unconditionally is because if the final block wasn't picked, then an
2262									// alternative permutation is responsible for syntax highlighting it, but the
2263									// span of the syntax highlight region stretches to cover the `#endif` part. If
2264									// we declared this as chosen, the other span region wouldn't fit and would
2265									// therefore be discarded, and hence syntax highlighting would be missing for
2266									// the final branch.
2267									self.chosen_regions.push(*directive_span);
2268								}
2269							}
2270
2271							*cond_block_idx = 0;
2272							*child_idx += 1;
2273							*evaluated_cond_block = -1;
2274							if *child_idx == node.children.len() {
2275								// We have gone through all of the children of this node, so we want to pop it from
2276								// the stack.
2277								self.ptrs.pop();
2278							}
2279
2280							continue 'outer;
2281						}
2282
2283						let current_cond_block_idx = *cond_block_idx;
2284
2285						let (
2286							condition_ty,
2287							directive_span,
2288							tokens,
2289							_,
2290							node_id,
2291							hash_token,
2292							dir_token,
2293						) = &cond_block.conditions[current_cond_block_idx];
2294
2295						*cond_block_idx += 1;
2296
2297						match condition_ty {
2298							Conditional::IfDef | Conditional::IfNotDef => {
2299								syntax_tokens.push(*hash_token);
2300								syntax_tokens.push(*dir_token);
2301
2302								if !tokens.is_empty() {
2303									let (token, token_span) = &tokens[0];
2304									match token {
2305										ConditionToken::Ident(str) => {
2306											syntax_tokens.push(SyntaxToken {
2307												ty: SyntaxType::Ident,
2308												modifiers:
2309													SyntaxModifiers::CONDITIONAL,
2310												span: *token_span,
2311											});
2312											if tokens.len() > 1 {
2313												syntax_tokens.push(SyntaxToken {
2314													ty: SyntaxType::Invalid,
2315													modifiers:SyntaxModifiers::CONDITIONAL,
2316													span: Span::new(
2317														tokens[1].1.start,
2318														tokens.last().unwrap().1.end
2319													)
2320												});
2321											}
2322											let result =
2323												conditional_eval::evaluate_def(
2324													Ident {
2325														name: str.clone(),
2326														span: *token_span,
2327													},
2328													macros,
2329												);
2330											if result
2331												&& *evaluated_cond_block == -1
2332											{
2333												matched_condition_node_id =
2334													*node_id;
2335												*evaluated_cond_block =
2336													current_cond_block_idx
2337														as isize;
2338												self.chosen_regions
2339													.push(*directive_span);
2340												break;
2341											}
2342										}
2343										_ => {
2344											syntax_tokens.push(SyntaxToken {
2345												ty: SyntaxType::Invalid,
2346												modifiers:
2347													SyntaxModifiers::CONDITIONAL,
2348												span: Span::new(
2349													token_span.start,
2350													tokens
2351														.last()
2352														.unwrap()
2353														.1
2354														.end,
2355												),
2356											});
2357										}
2358									}
2359								}
2360							}
2361							Conditional::If | Conditional::ElseIf => {
2362								syntax_tokens.push(*hash_token);
2363								syntax_tokens.push(*dir_token);
2364
2365								let (expr, mut syntax, mut colours) =
2366									cond_parser(
2367										tokens.clone(),
2368										macros,
2369										span_encoding,
2370									);
2371								syntax_diags.append(&mut syntax);
2372								syntax_tokens.append(&mut colours);
2373
2374								if let Some(expr) = expr {
2375									let result =
2376										conditional_eval::evaluate_expr(
2377											expr, macros,
2378										);
2379									if result && *evaluated_cond_block == -1 {
2380										matched_condition_node_id = *node_id;
2381										*evaluated_cond_block =
2382											current_cond_block_idx as isize;
2383										self.chosen_regions
2384											.push(*directive_span);
2385										break;
2386									}
2387								}
2388							}
2389							Conditional::Else => {
2390								syntax_tokens.push(*hash_token);
2391								syntax_tokens.push(*dir_token);
2392								if !tokens.is_empty() {
2393									syntax_tokens.push(SyntaxToken {
2394										ty: SyntaxType::Invalid,
2395										modifiers: SyntaxModifiers::CONDITIONAL,
2396										span: Span::new(
2397											tokens.first().unwrap().1.start,
2398											tokens.last().unwrap().1.end,
2399										),
2400									});
2401								}
2402
2403								if *evaluated_cond_block == -1 {
2404									// An `else` branch is always unconditionally chosen.
2405									matched_condition_node_id = *node_id;
2406									*evaluated_cond_block =
2407										current_cond_block_idx as isize;
2408									self.chosen_regions.push(*directive_span);
2409									break;
2410								}
2411							}
2412							Conditional::End => unreachable!(),
2413						}
2414					}
2415
2416					self.ptrs.push((matched_condition_node_id, 0, 0, -1));
2417					self.chosen_key.push(matched_condition_node_id);
2418					continue;
2419				}
2420			}
2421		}
2422	}
2423
2424	fn get_end_span(&self) -> Span {
2425		self.end_span
2426	}
2427}
2428
2429/// Allows for stepping through a token stream. Takes care of dealing with irrelevant details from the perspective
2430/// of the parser, such as comments and macro expansion.
2431struct Walker<'a, Provider: TokenStreamProvider<'a>> {
2432	/// The token stream provider.
2433	token_provider: Provider,
2434	_phantom: std::marker::PhantomData<&'a ()>,
2435	/// The active token streams.
2436	///
2437	/// - `0` - The macro identifier, (for the root source stream this is just `""`).
2438	/// - `1` - The token stream.
2439	/// - `2` - The cursor.
2440	streams: Vec<(String, TokenStream, usize)>,
2441
2442	/// The currently defined macros.
2443	///
2444	/// Key: The macro identifier.
2445	///
2446	/// Value:
2447	/// - `0` - The span of the macro signature.
2448	/// - `1` - Macro information.
2449	macros: HashMap<String, (Span, Macro)>,
2450	/// The span of an initial macro call site. Only the first macro call site is registered here.
2451	macro_call_site: Option<Span>,
2452	/// The actively-called macro identifiers.
2453	active_macros: HashSet<String>,
2454
2455	/// Any syntax diagnostics created from the tokens parsed so-far.
2456	syntax_diags: Vec<Syntax>,
2457	/// Any semantic diagnostics created from the tokens parsed so-far.
2458	semantic_diags: Vec<Semantic>,
2459
2460	/// The syntax highlighting tokens created from the tokens parsed so-far.
2461	syntax_tokens: Vec<SyntaxToken>,
2462	/// The type of encoding of spans.
2463	span_encoding: SpanEncoding,
2464}
2465
2466/// Data for a macro.
2467#[derive(Debug, Clone)]
2468enum Macro {
2469	Object(TokenStream),
2470	Function {
2471		params: Vec<Ident>,
2472		body: TokenStream,
2473	},
2474}
2475
2476impl<'a, Provider: TokenStreamProvider<'a>> Walker<'a, Provider> {
2477	/// Constructs a new walker.
2478	fn new(mut token_provider: Provider, span_encoding: SpanEncoding) -> Self {
2479		let macros = HashMap::new();
2480		let mut syntax_diags = Vec::new();
2481		let mut syntax_tokens = Vec::new();
2482
2483		// Get the first stream.
2484		let streams = match token_provider.get_next_stream(
2485			&macros,
2486			&mut syntax_diags,
2487			&mut syntax_tokens,
2488			span_encoding,
2489		) {
2490			Some(stream) => vec![("".into(), stream, 0)],
2491			None => vec![],
2492		};
2493
2494		let mut active_macros = HashSet::new();
2495		// Invariant: A macro cannot have no name (an empty identifier), so this won't cause any hashing clashes
2496		// with valid macros. By using "" we can avoid having a special case for the root source stream.
2497		active_macros.insert("".into());
2498
2499		Self {
2500			token_provider,
2501			_phantom: Default::default(),
2502			streams,
2503			macros,
2504			macro_call_site: None,
2505			active_macros,
2506			syntax_diags,
2507			semantic_diags: Vec::new(),
2508			syntax_tokens,
2509			span_encoding,
2510		}
2511	}
2512
2513	/// Returns a reference to the current token under the cursor, without advancing the cursor.
2514	fn peek(&self) -> Option<Spanned<&Token>> {
2515		if self.streams.is_empty() {
2516			None
2517		} else if self.streams.len() == 1 {
2518			let (_, stream, cursor) = self.streams.last().unwrap();
2519			stream.get(*cursor).map(|(t, s)| (t, *s))
2520		} else {
2521			let (_, stream, cursor) = self.streams.last().unwrap();
2522			match stream.get(*cursor).map(|(t, _)| t) {
2523				Some(token) => Some((
2524					token,
2525					// Panic: This is guaranteed to be some if `self.streams.len() > 1`.
2526					self.macro_call_site.unwrap(),
2527				)),
2528				None => None,
2529			}
2530		}
2531	}
2532
2533	/// Returns the current token under the cursor, without advancing the cursor. (The token gets cloned).
2534	fn get(&self) -> Option<Spanned<Token>> {
2535		if self.streams.is_empty() {
2536			None
2537		} else if self.streams.len() == 1 {
2538			let (_, stream, cursor) = self.streams.last().unwrap();
2539			stream.get(*cursor).cloned()
2540		} else {
2541			let (_, stream, cursor) = self.streams.last().unwrap();
2542			let token = stream.get(*cursor).map(|(t, _)| t).cloned();
2543			token.map(|t| {
2544				(
2545					t,
2546					// Panic: This is guaranteed to be some if `self.streams.len() > 1`.
2547					self.macro_call_site.unwrap(),
2548				)
2549			})
2550		}
2551	}
2552
2553	/// Peeks the next token without advancing the cursor.
2554	///
2555	/// **This method is expensive** to call because it needs to correctly deal with macros. Avoid calling this
2556	/// often.
2557	///
2558	/// This method correctly steps into/out-of macros, jumps between conditional compilation branches, and
2559	/// consumes any comments.
2560	fn lookahead_1(&self) -> Option<Spanned<Token>> {
2561		let mut token_provider = self.token_provider.clone();
2562		let mut streams = self.streams.clone();
2563		let mut macros = self.macros.clone();
2564		let mut active_macros = self.active_macros.clone();
2565		let mut macro_call_site = self.macro_call_site.clone();
2566		let mut syntax_diags = Vec::new();
2567		let mut semantic_diags = Vec::new();
2568		let mut syntax_tokens = Vec::new();
2569		// PERF: Optimize for certain cases to prevent having to clone everything everytime.
2570		Self::_move_cursor(
2571			&mut token_provider,
2572			&mut streams,
2573			&mut macros,
2574			&mut active_macros,
2575			&mut macro_call_site,
2576			&mut syntax_diags,
2577			&mut semantic_diags,
2578			&mut syntax_tokens,
2579			self.span_encoding,
2580		);
2581
2582		// Copy of `Self::get()`.
2583		if streams.is_empty() {
2584			None
2585		} else if streams.len() == 1 {
2586			let (_, stream, cursor) = streams.last().unwrap();
2587			stream.get(*cursor).cloned()
2588		} else {
2589			let (_, stream, cursor) = streams.last().unwrap();
2590			let token = stream.get(*cursor).map(|(t, _)| t).cloned();
2591			token.map(|t| {
2592				(
2593					t,
2594					// Panic: This is guaranteed to be some if `streams.len() > 1`.
2595					macro_call_site.unwrap(),
2596				)
2597			})
2598		}
2599	}
2600
2601	/// Advances the cursor by one.
2602	///
2603	/// This method correctly steps into/out-of macros, jumps between conditional compilation branches, and
2604	/// consumes any comments.
2605	fn advance(&mut self) {
2606		Self::_move_cursor(
2607			&mut self.token_provider,
2608			&mut self.streams,
2609			&mut self.macros,
2610			&mut self.active_macros,
2611			&mut self.macro_call_site,
2612			&mut self.syntax_diags,
2613			&mut self.semantic_diags,
2614			&mut self.syntax_tokens,
2615			self.span_encoding,
2616		);
2617	}
2618
2619	/// Advances the cursor by one.
2620	///
2621	/// This method is identical to `advance()` apart from that diagnostics and syntax highlighting tokens are
2622	/// returned. This is necessary because otherwise the spans could be produced in the wrong order, if, for
2623	/// example, the walker consumes a comment but the expresion syntax tokens are appended after the fact.
2624	fn advance_expr_parser(
2625		&mut self,
2626		syntax_diags: &mut Vec<Syntax>,
2627		semantic_diags: &mut Vec<Semantic>,
2628		syntax_tokens: &mut Vec<SyntaxToken>,
2629	) {
2630		Self::_move_cursor(
2631			&mut self.token_provider,
2632			&mut self.streams,
2633			&mut self.macros,
2634			&mut self.active_macros,
2635			&mut self.macro_call_site,
2636			syntax_diags,
2637			semantic_diags,
2638			syntax_tokens,
2639			self.span_encoding,
2640		);
2641	}
2642
2643	/// Returns whether the walker has reached the end of the token streams.
2644	fn is_done(&self) -> bool {
2645		self.streams.is_empty()
2646	}
2647
2648	/// Returns the span of the last token in the token stream.
2649	fn get_last_span(&self) -> Span {
2650		self.token_provider.get_end_span()
2651	}
2652
2653	/// Moves the cursor to the next token. This function takes all the necessary data by parameter so that the
2654	/// functionality can be re-used between the `Self::advance()` and `Self::lookahead_1()` methods.
2655	fn _move_cursor(
2656		token_provider: &mut Provider,
2657		streams: &mut Vec<(String, TokenStream, usize)>,
2658		macros: &mut HashMap<String, (Span, Macro)>,
2659		active_macros: &mut HashSet<String>,
2660		macro_call_site: &mut Option<Span>,
2661		syntax_diags: &mut Vec<Syntax>,
2662		semantic_diags: &mut Vec<Semantic>,
2663		syntax_tokens: &mut Vec<SyntaxToken>,
2664		span_encoding: SpanEncoding,
2665	) {
2666		let mut dont_increment = false;
2667		'outer: while let Some((identifier, stream, cursor)) =
2668			streams.last_mut()
2669		{
2670			if !dont_increment {
2671				*cursor += 1;
2672			}
2673			dont_increment = false;
2674
2675			if *cursor == stream.len() {
2676				// We have reached the end of this stream. We close it and re-run the loop on the next stream, (if
2677				// there is one).
2678
2679				let ident = identifier.clone();
2680				if streams.len() == 1 {
2681					// If we aren't in a macro, that means we've finished the current source stream. There may
2682					// however be another stream, for which we need to query the provider for.
2683					match token_provider.get_next_stream(
2684						macros,
2685						syntax_diags,
2686						syntax_tokens,
2687						span_encoding,
2688					) {
2689						Some(mut next_stream) => {
2690							let (_, s, c) = &mut streams[0];
2691							std::mem::swap(s, &mut next_stream);
2692							*c = 0;
2693							dont_increment = true;
2694							continue;
2695						}
2696						None => {
2697							// The provider didn't return anything, so that means we have reached the final end.
2698							streams.remove(0);
2699							break;
2700						}
2701					}
2702				} else {
2703					// Panic: Anytime a stream is added the identifier is inserted into the set.
2704					active_macros.remove(&ident);
2705					streams.remove(streams.len() - 1);
2706					continue;
2707				}
2708			}
2709
2710			let (token, token_span) = stream.get(*cursor).unwrap();
2711
2712			match token {
2713				// We check if the new token is a macro call site.
2714				Token::Ident(s) => {
2715					if let Some((signature_span, macro_)) = macros.get(s) {
2716						if active_macros.contains(s) {
2717							// We have already visited a macro with this identifier. Recursion is not supported so
2718							// we don't continue.
2719							break;
2720						}
2721
2722						let ident_span = *token_span;
2723
2724						if let Macro::Function { params, body } = macro_ {
2725							// We have an identifier which matches a function-like macro, so we are expecting a
2726							// parameter list in the current token stream before we do any switching.
2727
2728							// We don't need to worry about having to switch source streams because that would
2729							// imply that a conditional compilation directive is in the middle of a function-like
2730							// macro call site, which isn't valid. A function-like macro call cannot have
2731							// preprocessor directives within, which means that the source stream won't be split up
2732							// by a conditional, which means the entire invocation of the macro will be within this
2733							// stream.
2734
2735							let mut tmp_cursor = *cursor + 1;
2736							let mut syntax_spans = vec![SyntaxToken {
2737								ty: SyntaxType::FunctionMacro,
2738								modifiers: SyntaxModifiers::empty(),
2739								span: ident_span,
2740							}];
2741							loop {
2742								match stream.get(tmp_cursor) {
2743									Some((token, token_span)) => match token {
2744										Token::LineComment(_)
2745										| Token::BlockComment { .. } => {
2746											syntax_spans.push(SyntaxToken {
2747												ty: SyntaxType::Comment,
2748												modifiers:
2749													SyntaxModifiers::empty(),
2750												span: *token_span,
2751											});
2752											tmp_cursor += 1;
2753										}
2754										_ => break,
2755									},
2756									None => break 'outer,
2757								}
2758							}
2759
2760							// Consume the opening `(` parenthesis.
2761							let l_paren_span = match stream.get(tmp_cursor) {
2762								Some((token, token_span)) => match token {
2763									Token::LParen => {
2764										syntax_spans.push(SyntaxToken {
2765											ty: SyntaxType::Punctuation,
2766											modifiers: SyntaxModifiers::empty(),
2767											span: *token_span,
2768										});
2769										*cursor = tmp_cursor + 1;
2770										*token_span
2771									}
2772									_ => {
2773										// We did not immediately encounter a parenthesis, which means that this is
2774										// not a call to a function-like macro even if the names match.
2775										break;
2776									}
2777								},
2778								None => break,
2779							};
2780
2781							// Look for any arguments until we hit a closing `)` parenthesis. The preprocessor
2782							// immediately switches to the next argument when a `,` is encountered, unless we are
2783							// within a parenthesis group.
2784							/* #[derive(PartialEq)]
2785							enum Prev {
2786								None,
2787								Param,
2788								Comma,
2789								Invalid,
2790							}
2791							let mut prev = Prev::None; */
2792							let mut prev_span = l_paren_span;
2793							let mut paren_groups = 0;
2794							let mut args = Vec::new();
2795							let mut arg = Vec::new();
2796							let r_paren_span = loop {
2797								let (token, token_span) =
2798									match stream.get(*cursor) {
2799										Some(t) => t,
2800										None => {
2801											syntax_diags.push(Syntax::PreprocDefine(PreprocDefineDiag::ParamsExpectedRParen(
2802											prev_span.next_single_width()
2803										)));
2804											break 'outer;
2805										}
2806									};
2807
2808								match token {
2809									Token::Comma => {
2810										syntax_spans.push(SyntaxToken {
2811											ty: SyntaxType::Punctuation,
2812											modifiers: SyntaxModifiers::empty(),
2813											span: *token_span,
2814										});
2815										if paren_groups == 0 {
2816											let arg = std::mem::take(&mut arg);
2817											args.push(arg);
2818											/* prev = Prev::Comma; */
2819										}
2820										prev_span = *token_span;
2821										*cursor += 1;
2822										continue;
2823									}
2824									Token::LParen => {
2825										paren_groups += 1;
2826									}
2827									Token::RParen => {
2828										if paren_groups == 0 {
2829											// We have reached the end of this function-like macro call site.
2830											syntax_spans.push(SyntaxToken {
2831												ty: SyntaxType::Punctuation,
2832												modifiers:
2833													SyntaxModifiers::empty(),
2834												span: *token_span,
2835											});
2836											let arg = std::mem::take(&mut arg);
2837											args.push(arg);
2838											// It is important that we don't increment the cursor to the next token
2839											// after the macro call site. This is because once this macro is
2840											// finished, and we return to the previous stream, we will
2841											// automatically increment the cursor onto the next token which will be
2842											// the first token after the macro call site. The object-like macro
2843											// branch also doesn't perform this increment.
2844											// *cursor += 1;
2845											break *token_span;
2846										}
2847										paren_groups -= 1;
2848									}
2849									_ => {}
2850								}
2851								syntax_spans.push(SyntaxToken {
2852									ty: token.non_semantic_colour(),
2853									modifiers: SyntaxModifiers::empty(),
2854									span: *token_span,
2855								});
2856								arg.push((token.clone(), *token_span));
2857								/* prev = Prev::Param; */
2858								*cursor += 1;
2859							};
2860							let call_site_span =
2861								Span::new(ident_span.start, r_paren_span.end);
2862
2863							// We have a set of arguments now.
2864							if params.len() != args.len() {
2865								// If there is a mismatch in the argument/parameter count, we ignore this macro
2866								// call and move onto the next token after the call site.
2867								semantic_diags.push(
2868									Semantic::FunctionMacroMismatchedArgCount(
2869										call_site_span,
2870										*signature_span,
2871									),
2872								);
2873								continue;
2874							}
2875							let mut param_map = HashMap::new();
2876							params.iter().zip(args.into_iter()).for_each(
2877								|(ident, tokens)| {
2878									param_map.insert(&ident.name, tokens);
2879								},
2880							);
2881
2882							// We now go through the replacement token list and replace any identifiers which match
2883							// a parameter name with the relevant argument's tokens.
2884							let mut new_body = Vec::with_capacity(body.len());
2885							for (token, token_span) in body {
2886								match token {
2887									Token::Ident(str) => {
2888										if let Some(arg) = param_map.get(&str) {
2889											for token in arg {
2890												new_body.push(token.clone());
2891											}
2892											continue;
2893										}
2894									}
2895									_ => {}
2896								}
2897								new_body.push((token.clone(), *token_span));
2898							}
2899							// Then, we perform token concatenation.
2900							let (new_body, mut syntax, mut semantic) =
2901								lexer::preprocessor::concat_macro_body(
2902									new_body,
2903									span_encoding,
2904								);
2905							syntax_diags.append(&mut syntax);
2906							semantic_diags.append(&mut semantic);
2907
2908							if body.is_empty() {
2909								// The macro is empty, so we want to move to the next token of the existing stream.
2910								semantic_diags.push(
2911									Semantic::EmptyMacroCallSite(
2912										call_site_span,
2913									),
2914								);
2915								if streams.len() == 1 {
2916									// We only syntax highlight when it is the first macro call.
2917									syntax_tokens.append(&mut syntax_spans);
2918								}
2919								continue;
2920							}
2921
2922							let ident = s.to_owned();
2923
2924							// We only syntax highlight and note the macro call site when it is the first macro
2925							// call.
2926							if streams.len() == 1 {
2927								*macro_call_site = Some(call_site_span);
2928								syntax_tokens.append(&mut syntax_spans);
2929							}
2930
2931							active_macros.insert(ident.clone());
2932							streams.push((ident, new_body, 0));
2933
2934							// The first token in the new stream could be another macro call, so we re-run the loop
2935							// on this new stream in case.
2936							dont_increment = true;
2937							continue;
2938						} else if let Macro::Object(stream) = macro_ {
2939							if stream.is_empty() {
2940								// The macro is empty, so we want to move to the next token of the existing stream.
2941								semantic_diags.push(
2942									Semantic::EmptyMacroCallSite(ident_span),
2943								);
2944								if streams.len() == 1 {
2945									// We only syntax highlight when it is the first macro call.
2946									syntax_tokens.push(SyntaxToken {
2947										ty: SyntaxType::ObjectMacro,
2948										modifiers: SyntaxModifiers::empty(),
2949										span: ident_span,
2950									});
2951								}
2952								continue;
2953							}
2954
2955							let ident = s.to_owned();
2956
2957							// We only syntax highlight and note the macro call site when it is the first macro
2958							// call.
2959							if streams.len() == 1 {
2960								*macro_call_site = Some(ident_span);
2961								syntax_tokens.push(SyntaxToken {
2962									ty: SyntaxType::ObjectMacro,
2963									modifiers: SyntaxModifiers::empty(),
2964									span: ident_span,
2965								});
2966							}
2967
2968							active_macros.insert(ident.clone());
2969							streams.push((ident, stream.clone(), 0));
2970
2971							// The first token in the new stream could be another macro call, so we re-run the loop
2972							// on this new stream in case.
2973							dont_increment = true;
2974							continue;
2975						}
2976					}
2977					break;
2978				}
2979				// We want to consume any comments since they are semantically ignored.
2980				Token::LineComment(_) => {
2981					let token_span = *token_span;
2982					if streams.len() == 1 {
2983						// We only syntax highlight when we are not in a macro call.
2984						syntax_tokens.push(SyntaxToken {
2985							ty: SyntaxType::Comment,
2986							modifiers: SyntaxModifiers::empty(),
2987							span: token_span,
2988						});
2989					}
2990				}
2991				Token::BlockComment { contains_eof, .. } => {
2992					if *contains_eof {
2993						syntax_diags.push(Syntax::BlockCommentMissingEnd(
2994							token_span.end_zero_width(),
2995						));
2996					}
2997					let token_span = *token_span;
2998					if streams.len() == 1 {
2999						// We only syntax highlight when we are not in a macro call.
3000						syntax_tokens.push(SyntaxToken {
3001							ty: SyntaxType::Comment,
3002							modifiers: SyntaxModifiers::empty(),
3003							span: token_span,
3004						});
3005					}
3006				}
3007				_ => break,
3008			}
3009		}
3010
3011		if streams.len() <= 1 {
3012			*macro_call_site = None;
3013		}
3014	}
3015
3016	/// Registers a define macro.
3017	fn register_macro(
3018		&mut self,
3019		ident: String,
3020		signature_span: Span,
3021		macro_: Macro,
3022	) {
3023		if let Some(_prev) = self.macros.insert(ident, (signature_span, macro_))
3024		{
3025			// TODO: Emit error if the macros aren't identical (will require scanning the tokenstream to compare).
3026		}
3027	}
3028
3029	/// Un-registers a defined macro.
3030	fn unregister_macro(&mut self, ident: &str, span: Span) {
3031		match self.macros.remove(ident) {
3032			Some((_, macro_)) => match macro_ {
3033				Macro::Object(_) => self.push_colour_with_modifiers(
3034					span,
3035					SyntaxType::ObjectMacro,
3036					SyntaxModifiers::UNDEFINE,
3037				),
3038				Macro::Function { .. } => self.push_colour_with_modifiers(
3039					span,
3040					SyntaxType::FunctionMacro,
3041					SyntaxModifiers::UNDEFINE,
3042				),
3043			},
3044			None => {
3045				self.push_colour_with_modifiers(
3046					span,
3047					SyntaxType::UnresolvedIdent,
3048					SyntaxModifiers::UNDEFINE,
3049				);
3050				self.push_semantic_diag(Semantic::UndefMacroNameUnresolved(
3051					span,
3052				));
3053			}
3054		}
3055	}
3056
3057	/// Pushes a syntax diagnostic.
3058	fn push_syntax_diag(&mut self, diag: Syntax) {
3059		self.syntax_diags.push(diag);
3060	}
3061
3062	/// Appends a collection of syntax diagnostics.
3063	fn append_syntax_diags(&mut self, syntax: &mut Vec<Syntax>) {
3064		self.syntax_diags.append(syntax);
3065	}
3066
3067	/// Pushes a semantic diagnostic.
3068	fn push_semantic_diag(&mut self, diag: Semantic) {
3069		self.semantic_diags.push(diag);
3070	}
3071
3072	/// Appends a collection of semantic diagnostics.
3073	fn append_semantic_diags(&mut self, semantic: &mut Vec<Semantic>) {
3074		self.semantic_diags.append(semantic);
3075	}
3076
3077	/// Pushes a syntax highlighting token over the given span.
3078	fn push_colour(&mut self, span: Span, token: SyntaxType) {
3079		self.push_colour_with_modifiers(span, token, SyntaxModifiers::empty())
3080	}
3081
3082	/// Pushes a syntax highlighting token with one or more modifiers over the given span.
3083	fn push_colour_with_modifiers(
3084		&mut self,
3085		span: Span,
3086		ty: SyntaxType,
3087		modifiers: SyntaxModifiers,
3088	) {
3089		// When we are within a macro, we don't want to produce syntax tokens.
3090		// Note: This functionality is duplicated in the `ShuntingYard::colour()` method.
3091		if self.streams.len() == 1 {
3092			self.syntax_tokens.push(SyntaxToken {
3093				ty,
3094				modifiers,
3095				span,
3096			});
3097		}
3098	}
3099
3100	/// Appends a collection of syntax highlighting tokens.
3101	fn append_colours(&mut self, colours: &mut Vec<SyntaxToken>) {
3102		self.syntax_tokens.append(colours);
3103	}
3104}
3105
3106/* ACTUAL STATEMENT PARSING LOGIC BELOW */
3107
3108/// Consumes tokens until a beginning of a new statement is reached.
3109///
3110/// This function consumes tokens until a keyword which can begin a statement is found, or until a semi-colon is
3111/// reached (which is consumed). This is used for some instances of error recovery, where no more specific
3112/// strategies can be employed.
3113fn seek_next_stmt<'a, P: TokenStreamProvider<'a>>(walker: &mut Walker<'a, P>) {
3114	loop {
3115		match walker.peek() {
3116			Some((token, span)) => {
3117				if token.can_start_statement() {
3118					return;
3119				} else if *token == Token::Semi {
3120					walker.push_colour(span, SyntaxType::Punctuation);
3121					walker.advance();
3122					return;
3123				} else {
3124					walker.push_colour(span, SyntaxType::Invalid);
3125					walker.advance();
3126					continue;
3127				}
3128			}
3129			None => return,
3130		}
3131	}
3132}
3133
3134/// Invalidates a set of qualifiers.
3135///
3136/// This function is used to emit a diagnostic about the use of qualifiers before a statement which doesn't support
3137/// qualifiers.
3138fn invalidate_qualifiers<'a, P: TokenStreamProvider<'a>>(
3139	walker: &mut Walker<'a, P>,
3140	qualifiers: Vec<Qualifier>,
3141) {
3142	if let Some(q) = qualifiers.last() {
3143		walker.push_syntax_diag(Syntax::Stmt(
3144			StmtDiag::FoundQualifiersBeforeStmt(Span::new(
3145				qualifiers.first().unwrap().span.start,
3146				q.span.end,
3147			)),
3148		));
3149	}
3150}
3151
3152/// Parses an individual statement at the current position.
3153fn parse_stmt<'a, P: TokenStreamProvider<'a>>(
3154	walker: &mut Walker<'a, P>,
3155	nodes: &mut Vec<ast::Node>,
3156) {
3157	let qualifiers = try_parse_qualifiers(walker);
3158
3159	let Some((token, token_span)) = walker.get() else {
3160		return;
3161	};
3162
3163	match token {
3164		Token::LBrace => {
3165			invalidate_qualifiers(walker, qualifiers);
3166			walker.push_colour(token_span, SyntaxType::Punctuation);
3167			walker.advance();
3168			let block = parse_scope(walker, brace_scope, token_span);
3169			nodes.push(Node {
3170				span: block.span,
3171				ty: NodeTy::Block(block),
3172			});
3173		}
3174		Token::Semi => {
3175			walker.push_colour(token_span, SyntaxType::Punctuation);
3176			walker.advance();
3177			if !qualifiers.is_empty() {
3178				nodes.push(Node {
3179					span: Span::new(
3180						qualifiers.first().unwrap().span.start,
3181						qualifiers.last().unwrap().span.end,
3182					),
3183					ty: NodeTy::Qualifiers(qualifiers),
3184				});
3185			} else {
3186				nodes.push(Node {
3187					span: token_span,
3188					ty: NodeTy::Empty,
3189				});
3190			}
3191		}
3192		Token::Struct => parse_struct(walker, nodes, qualifiers, token_span),
3193		Token::Directive(stream) => {
3194			invalidate_qualifiers(walker, qualifiers);
3195			parse_directive(walker, nodes, stream, token_span);
3196			walker.advance();
3197		}
3198		Token::If => parse_if(walker, nodes, token_span),
3199		Token::Switch => parse_switch(walker, nodes, token_span),
3200		Token::For => parse_for_loop(walker, nodes, token_span),
3201		Token::While => parse_while_loop(walker, nodes, token_span),
3202		Token::Do => parse_do_while_loop(walker, nodes, token_span),
3203		Token::Break => {
3204			invalidate_qualifiers(walker, qualifiers);
3205			parse_break_continue_discard(
3206				walker,
3207				nodes,
3208				token_span,
3209				|| NodeTy::Break,
3210				|span| Syntax::Stmt(StmtDiag::BreakExpectedSemiAfterKw(span)),
3211			)
3212		}
3213		Token::Continue => {
3214			invalidate_qualifiers(walker, qualifiers);
3215			parse_break_continue_discard(
3216				walker,
3217				nodes,
3218				token_span,
3219				|| NodeTy::Continue,
3220				|span| {
3221					Syntax::Stmt(StmtDiag::ContinueExpectedSemiAfterKw(span))
3222				},
3223			);
3224		}
3225		Token::Discard => {
3226			invalidate_qualifiers(walker, qualifiers);
3227			parse_break_continue_discard(
3228				walker,
3229				nodes,
3230				token_span,
3231				|| NodeTy::Discard,
3232				|span| Syntax::Stmt(StmtDiag::DiscardExpectedSemiAfterKw(span)),
3233			);
3234		}
3235		Token::Return => {
3236			invalidate_qualifiers(walker, qualifiers);
3237			parse_return(walker, nodes, token_span);
3238		}
3239		Token::RBrace => {
3240			invalidate_qualifiers(walker, qualifiers);
3241			walker.push_colour(token_span, SyntaxType::Punctuation);
3242			walker.push_syntax_diag(Syntax::FoundUnmatchedRBrace(token_span));
3243			walker.advance();
3244		}
3245		Token::Else => {
3246			invalidate_qualifiers(walker, qualifiers);
3247			walker.push_colour(token_span, SyntaxType::Keyword);
3248			walker.push_syntax_diag(Syntax::FoundLonelyElseKw(token_span));
3249			walker.advance();
3250		}
3251		Token::Case => {
3252			invalidate_qualifiers(walker, qualifiers);
3253			walker.push_colour(token_span, SyntaxType::Keyword);
3254			walker.push_syntax_diag(Syntax::FoundLonelyCaseKw(token_span));
3255			walker.advance();
3256		}
3257		Token::Default => {
3258			invalidate_qualifiers(walker, qualifiers);
3259			walker.push_colour(token_span, SyntaxType::Keyword);
3260			walker.push_syntax_diag(Syntax::FoundLonelyDefaultKw(token_span));
3261			walker.advance();
3262		}
3263		Token::Subroutine => {
3264			invalidate_qualifiers(walker, qualifiers);
3265			parse_subroutine(walker, nodes, token_span);
3266		}
3267		Token::Reserved(str) => {
3268			invalidate_qualifiers(walker, qualifiers);
3269			walker.push_colour(token_span, SyntaxType::Invalid);
3270			walker.push_syntax_diag(Syntax::FoundReservedKw(token_span, str));
3271			walker.advance();
3272		}
3273		Token::Invalid(c) => {
3274			invalidate_qualifiers(walker, qualifiers);
3275			walker.push_colour(token_span, SyntaxType::Invalid);
3276			walker.push_syntax_diag(Syntax::FoundIllegalChar(token_span, c));
3277			walker.advance();
3278		}
3279		_ => try_parse_definition_declaration_expr(
3280			walker, nodes, qualifiers, false,
3281		),
3282	}
3283}
3284
3285/// Parses a scope of statements.
3286///
3287/// This function assumes that the opening delimiter is already consumed.
3288fn parse_scope<'a, P: TokenStreamProvider<'a>>(
3289	walker: &mut Walker<'a, P>,
3290	exit_condition: ScopeEnd<'a, P>,
3291	opening_span: Span,
3292) -> Scope {
3293	let mut nodes = Vec::new();
3294	let ending_span = loop {
3295		// Check if we have reached the closing delimiter.
3296		if let Some(span) = exit_condition(walker, opening_span) {
3297			break span;
3298		}
3299		parse_stmt(walker, &mut nodes);
3300	};
3301
3302	Scope {
3303		contents: nodes,
3304		span: Span::new(opening_span.start, ending_span.end),
3305	}
3306}
3307
3308/// A function, which given the `walker`, determines whether to end parsing the current scope of statements and
3309/// return back to the caller. If this returns `Some`, we have reached the end of the scope. If the span returned
3310/// is zero-width, that means we have no closing delimiter.
3311///
3312/// This also takes a span of the opening delimiter which allows for the creation of a syntax error if the function
3313/// never encounters the desired ending delimiter.
3314type ScopeEnd<'a, P> = fn(&mut Walker<'a, P>, Span) -> Option<Span>;
3315
3316fn brace_scope<'a, P: TokenStreamProvider<'a>>(
3317	walker: &mut Walker<'a, P>,
3318	l_brace_span: Span,
3319) -> Option<Span> {
3320	match walker.peek() {
3321		Some((token, span)) => {
3322			if *token == Token::RBrace {
3323				walker.push_colour(span, SyntaxType::Punctuation);
3324				walker.advance();
3325				Some(span)
3326			} else {
3327				None
3328			}
3329		}
3330		None => {
3331			walker.push_syntax_diag(Syntax::Stmt(
3332				StmtDiag::ScopeMissingRBrace(
3333					l_brace_span,
3334					walker.get_last_span().next_single_width(),
3335				),
3336			));
3337			Some(walker.get_last_span().end_zero_width())
3338		}
3339	}
3340}
3341fn switch_case_scope<'a, P: TokenStreamProvider<'a>>(
3342	walker: &mut Walker<'a, P>,
3343	_start_span: Span,
3344) -> Option<Span> {
3345	match walker.peek() {
3346		Some((token, span)) => match token {
3347			Token::Case | Token::Default | Token::RBrace => Some(span),
3348			_ => None,
3349		},
3350		None => {
3351			walker.push_syntax_diag(Syntax::Stmt(
3352				StmtDiag::SwitchExpectedRBrace(
3353					walker.get_last_span().next_single_width(),
3354				),
3355			));
3356			Some(walker.get_last_span().end_zero_width())
3357		}
3358	}
3359}
3360
3361/// Tries to parse one or more qualifiers.
3362///
3363/// This function makes no assumptions as to what the current token is.
3364fn try_parse_qualifiers<'a, P: TokenStreamProvider<'a>>(
3365	walker: &mut Walker<'a, P>,
3366) -> Vec<Qualifier> {
3367	let mut qualifiers = Vec::new();
3368	'outer: loop {
3369		let (token, token_span) = match walker.peek() {
3370			Some(t) => t,
3371			None => break,
3372		};
3373
3374		match token {
3375			Token::Const => {
3376				walker.push_colour(token_span, SyntaxType::Keyword);
3377				qualifiers.push(Qualifier {
3378					span: token_span,
3379					ty: QualifierTy::Const,
3380				});
3381			}
3382			Token::In => {
3383				walker.push_colour(token_span, SyntaxType::Keyword);
3384				qualifiers.push(Qualifier {
3385					span: token_span,
3386					ty: QualifierTy::In,
3387				});
3388			}
3389			Token::Out => {
3390				walker.push_colour(token_span, SyntaxType::Keyword);
3391				qualifiers.push(Qualifier {
3392					span: token_span,
3393					ty: QualifierTy::Out,
3394				});
3395			}
3396			Token::InOut => {
3397				walker.push_colour(token_span, SyntaxType::Keyword);
3398				qualifiers.push(Qualifier {
3399					span: token_span,
3400					ty: QualifierTy::InOut,
3401				});
3402			}
3403			Token::Attribute => {
3404				walker.push_colour(token_span, SyntaxType::Keyword);
3405				qualifiers.push(Qualifier {
3406					span: token_span,
3407					ty: QualifierTy::Attribute,
3408				});
3409			}
3410			Token::Uniform => {
3411				walker.push_colour(token_span, SyntaxType::Keyword);
3412				qualifiers.push(Qualifier {
3413					span: token_span,
3414					ty: QualifierTy::Uniform,
3415				});
3416			}
3417			Token::Varying => {
3418				walker.push_colour(token_span, SyntaxType::Keyword);
3419				qualifiers.push(Qualifier {
3420					span: token_span,
3421					ty: QualifierTy::Varying,
3422				});
3423			}
3424			Token::Buffer => {
3425				walker.push_colour(token_span, SyntaxType::Keyword);
3426				qualifiers.push(Qualifier {
3427					span: token_span,
3428					ty: QualifierTy::Buffer,
3429				});
3430			}
3431			Token::Shared => {
3432				walker.push_colour(token_span, SyntaxType::Keyword);
3433				qualifiers.push(Qualifier {
3434					span: token_span,
3435					ty: QualifierTy::Shared,
3436				});
3437			}
3438			Token::Centroid => {
3439				walker.push_colour(token_span, SyntaxType::Keyword);
3440				qualifiers.push(Qualifier {
3441					span: token_span,
3442					ty: QualifierTy::Centroid,
3443				});
3444			}
3445			Token::Sample => {
3446				walker.push_colour(token_span, SyntaxType::Keyword);
3447				qualifiers.push(Qualifier {
3448					span: token_span,
3449					ty: QualifierTy::Sample,
3450				});
3451			}
3452			Token::Patch => {
3453				walker.push_colour(token_span, SyntaxType::Keyword);
3454				qualifiers.push(Qualifier {
3455					span: token_span,
3456					ty: QualifierTy::Patch,
3457				});
3458			}
3459			Token::Flat => {
3460				walker.push_colour(token_span, SyntaxType::Keyword);
3461				qualifiers.push(Qualifier {
3462					span: token_span,
3463					ty: QualifierTy::Flat,
3464				});
3465			}
3466			Token::Smooth => {
3467				walker.push_colour(token_span, SyntaxType::Keyword);
3468				qualifiers.push(Qualifier {
3469					span: token_span,
3470					ty: QualifierTy::Smooth,
3471				});
3472			}
3473			Token::NoPerspective => {
3474				qualifiers.push(Qualifier {
3475					span: token_span,
3476					ty: QualifierTy::NoPerspective,
3477				});
3478			}
3479			Token::HighP => {
3480				walker.push_colour(token_span, SyntaxType::Keyword);
3481				qualifiers.push(Qualifier {
3482					span: token_span,
3483					ty: QualifierTy::HighP,
3484				});
3485			}
3486			Token::MediumP => {
3487				walker.push_colour(token_span, SyntaxType::Keyword);
3488				qualifiers.push(Qualifier {
3489					span: token_span,
3490					ty: QualifierTy::MediumP,
3491				});
3492			}
3493			Token::LowP => {
3494				walker.push_colour(token_span, SyntaxType::Keyword);
3495				qualifiers.push(Qualifier {
3496					span: token_span,
3497					ty: QualifierTy::LowP,
3498				});
3499			}
3500			Token::Invariant => {
3501				walker.push_colour(token_span, SyntaxType::Keyword);
3502				qualifiers.push(Qualifier {
3503					span: token_span,
3504					ty: QualifierTy::Invariant,
3505				});
3506			}
3507			Token::Precise => {
3508				walker.push_colour(token_span, SyntaxType::Keyword);
3509				qualifiers.push(Qualifier {
3510					span: token_span,
3511					ty: QualifierTy::Precise,
3512				});
3513			}
3514			Token::Coherent => {
3515				walker.push_colour(token_span, SyntaxType::Keyword);
3516				qualifiers.push(Qualifier {
3517					span: token_span,
3518					ty: QualifierTy::Coherent,
3519				});
3520			}
3521			Token::Volatile => {
3522				walker.push_colour(token_span, SyntaxType::Keyword);
3523				qualifiers.push(Qualifier {
3524					span: token_span,
3525					ty: QualifierTy::Volatile,
3526				});
3527			}
3528			Token::Restrict => {
3529				walker.push_colour(token_span, SyntaxType::Keyword);
3530				qualifiers.push(Qualifier {
3531					span: token_span,
3532					ty: QualifierTy::Restrict,
3533				});
3534			}
3535			Token::Readonly => {
3536				walker.push_colour(token_span, SyntaxType::Keyword);
3537				qualifiers.push(Qualifier {
3538					span: token_span,
3539					ty: QualifierTy::Readonly,
3540				});
3541			}
3542			Token::Writeonly => {
3543				walker.push_colour(token_span, SyntaxType::Keyword);
3544				qualifiers.push(Qualifier {
3545					span: token_span,
3546					ty: QualifierTy::Writeonly,
3547				});
3548			}
3549			Token::Layout => {
3550				let kw_span = token_span;
3551				walker.push_colour(kw_span, SyntaxType::Keyword);
3552				walker.advance();
3553
3554				// Consume the `(`.
3555				let (token, token_span) = match walker.peek() {
3556					Some(t) => t,
3557					None => {
3558						// We don't have any layout contents yet.
3559						walker.push_syntax_diag(Syntax::Stmt(
3560							StmtDiag::LayoutExpectedLParenAfterKw(
3561								kw_span.next_single_width(),
3562							),
3563						));
3564						qualifiers.push(Qualifier {
3565							span: kw_span,
3566							ty: QualifierTy::Layout(vec![]),
3567						});
3568						break;
3569					}
3570				};
3571				let l_paren_span = if *token == Token::LParen {
3572					walker.push_colour(token_span, SyntaxType::Punctuation);
3573					walker.advance();
3574					token_span
3575				} else {
3576					// We don't have any layout contents yet.
3577					walker.push_syntax_diag(Syntax::Stmt(
3578						StmtDiag::LayoutExpectedLParenAfterKw(
3579							kw_span.next_single_width(),
3580						),
3581					));
3582					qualifiers.push(Qualifier {
3583						span: kw_span,
3584						ty: QualifierTy::Layout(vec![]),
3585					});
3586					break;
3587				};
3588
3589				// Look for any layouts until we hit a closing `)` parenthesis.
3590				#[derive(PartialEq)]
3591				enum Prev {
3592					None,
3593					Layout,
3594					Comma,
3595					Invalid,
3596				}
3597				let mut prev = Prev::None;
3598				let mut prev_span = l_paren_span;
3599				let mut layouts = Vec::new();
3600				let r_paren_span = loop {
3601					let (token, token_span) = match walker.get() {
3602						Some(t) => t,
3603						None => {
3604							// We have not yet finished parsing the layout list, but we treat this as a valid
3605							// layout qualifier.
3606							let span = Span::new(
3607								kw_span.start,
3608								walker.get_last_span().end,
3609							);
3610							walker.push_syntax_diag(Syntax::Stmt(
3611								StmtDiag::LayoutMissingRParen(
3612									span.next_single_width(),
3613								),
3614							));
3615							qualifiers.push(Qualifier {
3616								span,
3617								ty: QualifierTy::Layout(layouts),
3618							});
3619							break 'outer;
3620						}
3621					};
3622
3623					match token {
3624						Token::Comma => {
3625							walker.push_colour(
3626								token_span,
3627								SyntaxType::Punctuation,
3628							);
3629							walker.advance();
3630							if prev == Prev::Comma {
3631								walker.push_syntax_diag(Syntax::Stmt(
3632									StmtDiag::LayoutExpectedLayoutAfterComma(
3633										Span::new(
3634											prev_span.start,
3635											token_span.end,
3636										),
3637									),
3638								));
3639							} else if prev == Prev::None {
3640								walker.push_syntax_diag(Syntax::Stmt(StmtDiag::LayoutExpectedLayoutBetweenParenComma(
3641									Span::new(prev_span.start, token_span.end)
3642								)));
3643							}
3644							prev = Prev::Comma;
3645							prev_span = token_span;
3646							continue;
3647						}
3648						Token::RParen => {
3649							walker.push_colour(
3650								token_span,
3651								SyntaxType::Punctuation,
3652							);
3653							walker.advance();
3654							break token_span;
3655						}
3656						_ => {}
3657					}
3658
3659					if prev == Prev::Layout {
3660						walker.push_syntax_diag(Syntax::Stmt(
3661							StmtDiag::LayoutExpectedCommaAfterLayout(
3662								prev_span.next_single_width(),
3663							),
3664						));
3665					}
3666					let layout_span_start = token_span.start;
3667
3668					// Consume the layout identifier. This creates a constructor either for a layout which only
3669					// consists of an identifier, or for a layout which expects an expression.
3670					let constructor: Either<
3671						LayoutTy,
3672						fn(Option<Expr>) -> LayoutTy,
3673					> = if let Token::Ident(str) = token {
3674						match str.as_ref() {
3675							"packed" => Either::Left(LayoutTy::Packed),
3676							"std140" => Either::Left(LayoutTy::Std140),
3677							"std430" => Either::Left(LayoutTy::Std430),
3678							"row_major" => Either::Left(LayoutTy::RowMajor),
3679							"column_major" => {
3680								Either::Left(LayoutTy::ColumnMajor)
3681							}
3682							"binding" => Either::Right(LayoutTy::Binding),
3683							"offset" => Either::Right(LayoutTy::Offset),
3684							"align" => Either::Right(LayoutTy::Align),
3685							"location" => Either::Right(LayoutTy::Location),
3686							"component" => Either::Right(LayoutTy::Component),
3687							"index" => Either::Right(LayoutTy::Index),
3688							"points" => Either::Left(LayoutTy::Points),
3689							"lines" => Either::Left(LayoutTy::Lines),
3690							"isolines" => Either::Left(LayoutTy::Isolines),
3691							"triangles" => Either::Left(LayoutTy::Triangles),
3692							"quads" => Either::Left(LayoutTy::Quads),
3693							"equal_spacing" => {
3694								Either::Left(LayoutTy::EqualSpacing)
3695							}
3696							"fractional_even_spacing" => {
3697								Either::Left(LayoutTy::FractionalEvenSpacing)
3698							}
3699							"fractional_odd_spacing" => {
3700								Either::Left(LayoutTy::FractionalOddSpacing)
3701							}
3702							"cw" => Either::Left(LayoutTy::Clockwise),
3703							"ccw" => Either::Left(LayoutTy::CounterClockwise),
3704							"point_mode" => Either::Left(LayoutTy::PointMode),
3705							"line_adjacency" => {
3706								Either::Left(LayoutTy::LineAdjacency)
3707							}
3708							"triangle_adjacency" => {
3709								Either::Left(LayoutTy::TriangleAdjacency)
3710							}
3711							"invocations" => {
3712								Either::Right(LayoutTy::Invocations)
3713							}
3714							"origin_upper_left" => {
3715								Either::Left(LayoutTy::OriginUpperLeft)
3716							}
3717							"pixel_center_integer" => {
3718								Either::Left(LayoutTy::PixelCenterInteger)
3719							}
3720							"early_fragment_tests" => {
3721								Either::Left(LayoutTy::EarlyFragmentTests)
3722							}
3723							"local_size_x" => {
3724								Either::Right(LayoutTy::LocalSizeX)
3725							}
3726							"local_size_y" => {
3727								Either::Right(LayoutTy::LocalSizeY)
3728							}
3729							"local_size_z" => {
3730								Either::Right(LayoutTy::LocalSizeZ)
3731							}
3732							"xfb_buffer" => Either::Right(LayoutTy::XfbBuffer),
3733							"xfb_stride" => Either::Right(LayoutTy::XfbStride),
3734							"xfb_offset" => Either::Right(LayoutTy::XfbOffset),
3735							"vertices" => Either::Right(LayoutTy::Vertices),
3736							"line_strip" => Either::Left(LayoutTy::LineStrip),
3737							"triangle_strip" => {
3738								Either::Left(LayoutTy::TriangleStrip)
3739							}
3740							"max_vertices" => {
3741								Either::Right(LayoutTy::MaxVertices)
3742							}
3743							"stream" => Either::Right(LayoutTy::Stream),
3744							"depth_any" => Either::Left(LayoutTy::DepthAny),
3745							"depth_greater" => {
3746								Either::Left(LayoutTy::DepthGreater)
3747							}
3748							"depth_less" => Either::Left(LayoutTy::DepthLess),
3749							"depth_unchanged" => {
3750								Either::Left(LayoutTy::DepthUnchanged)
3751							}
3752							_ => {
3753								// We have an identifier that is not a valid layout. We ignore it and continue
3754								// for the next layout.
3755								walker.push_colour(
3756									token_span,
3757									SyntaxType::UnresolvedIdent,
3758								);
3759								walker.push_syntax_diag(Syntax::Stmt(
3760									StmtDiag::LayoutInvalidIdent(token_span),
3761								));
3762								walker.advance();
3763								prev = Prev::Invalid;
3764								prev_span = token_span;
3765								continue;
3766							}
3767						}
3768					} else if let Token::Shared = token {
3769						Either::Left(LayoutTy::Shared)
3770					} else {
3771						// We have a token that is not a valid layout. We ignore it and continue for the next
3772						// layout.
3773						walker.push_colour(token_span, SyntaxType::Invalid);
3774						walker.push_syntax_diag(Syntax::Stmt(
3775							StmtDiag::LayoutInvalidIdent(token_span),
3776						));
3777						walker.advance();
3778						prev = Prev::Invalid;
3779						prev_span = token_span;
3780						continue;
3781					};
3782
3783					let (constructor, ident_span) = match constructor {
3784						Either::Left(ty) => {
3785							walker.push_colour(
3786								token_span,
3787								SyntaxType::LayoutQualifier,
3788							);
3789							walker.advance();
3790							layouts.push(Layout {
3791								span: token_span,
3792								ty,
3793							});
3794							prev = Prev::Layout;
3795							prev_span = token_span;
3796							continue;
3797						}
3798						Either::Right(constructor) => {
3799							walker.push_colour(
3800								token_span,
3801								SyntaxType::LayoutQualifier,
3802							);
3803							walker.advance();
3804							(constructor, token_span)
3805						}
3806					};
3807
3808					// We have a layout identifier which expects an expression.
3809
3810					// Consume the `=`.
3811					let (token, token_span) = match walker.peek() {
3812						Some(t) => t,
3813						None => {
3814							// We are missing the equals sign and we don't know what comes after. We ignore this
3815							// layout.
3816							let span = Span::new(
3817								kw_span.start,
3818								walker.get_last_span().end,
3819							);
3820							walker.push_syntax_diag(Syntax::Stmt(
3821								StmtDiag::LayoutExpectedEqAfterIdent(
3822									span.next_single_width(),
3823								),
3824							));
3825							qualifiers.push(Qualifier {
3826								span,
3827								ty: QualifierTy::Layout(layouts),
3828							});
3829							break 'outer;
3830						}
3831					};
3832					let eq_span = if let Token::Op(OpTy::Eq) = token {
3833						walker.push_colour(token_span, SyntaxType::Operator);
3834						walker.advance();
3835						token_span
3836					} else {
3837						// We are missing the equals sign and we don't know what comes after. We ignore this
3838						// layout.
3839						walker.push_syntax_diag(Syntax::Stmt(
3840							StmtDiag::LayoutExpectedEqAfterIdent(
3841								ident_span.next_single_width(),
3842							),
3843						));
3844						prev = Prev::Layout;
3845						prev_span = ident_span;
3846						continue;
3847					};
3848
3849					// Consume the expression.
3850					let value_expr = match expr_parser(
3851						walker,
3852						Mode::DisallowTopLevelList,
3853						[Token::RParen],
3854					) {
3855						(Some(e), mut syntax, mut semantic, mut colours) => {
3856							walker.append_colours(&mut colours);
3857							walker.append_syntax_diags(&mut syntax);
3858							walker.append_semantic_diags(&mut semantic);
3859							e
3860						}
3861						(None, _, _, _) => {
3862							// We are missing the expression.
3863							walker.push_syntax_diag(Syntax::Stmt(
3864								StmtDiag::LayoutExpectedExprAfterEq(
3865									eq_span.next_single_width(),
3866								),
3867							));
3868							layouts.push(Layout {
3869								span: Span::new(layout_span_start, eq_span.end),
3870								ty: constructor(None),
3871							});
3872							prev = Prev::Layout;
3873							prev_span = eq_span;
3874							continue;
3875						}
3876					};
3877
3878					prev = Prev::Layout;
3879					prev_span = value_expr.span;
3880					layouts.push(Layout {
3881						span: Span::new(layout_span_start, value_expr.span.end),
3882						ty: constructor(Some(value_expr)),
3883					});
3884				};
3885
3886				qualifiers.push(Qualifier {
3887					span: Span::new(kw_span.start, r_paren_span.end),
3888					ty: QualifierTy::Layout(layouts),
3889				});
3890				continue;
3891			}
3892			_ => break,
3893		}
3894		walker.advance();
3895	}
3896
3897	qualifiers
3898}
3899
3900/// Tries to parse a variable definition or a function declaration/definition, an expression statement, or an
3901/// interface block.
3902///
3903/// This function attempts to look for a statement at the current position. If this fails, error recovery till the
3904/// next clear statement delineation occurs.
3905///
3906/// - `parsing_last_for_stmt` - Set to `true` if this function is attempting to parse the increment statement in a
3907///   for loop header.
3908fn try_parse_definition_declaration_expr<'a, P: TokenStreamProvider<'a>>(
3909	walker: &mut Walker<'a, P>,
3910	nodes: &mut Vec<Node>,
3911	qualifiers: Vec<Qualifier>,
3912	parsing_last_for_stmt: bool,
3913) {
3914	// We attempt to parse an expression at the current position.
3915	let (start, mut start_syntax, mut start_semantic, mut start_colours) =
3916		match expr_parser(walker, Mode::Default, [Token::Semi]) {
3917			(Some(expr), syntax, semantic, colours) => {
3918				(expr, syntax, semantic, colours)
3919			}
3920			(None, _, _, _) => {
3921				// The current token cannot begin any sort of expression. Since this function gets called if all
3922				// other statement branches have failed to match, it means that whatever we have cannot be a valid
3923				// statement at all.
3924				invalidate_qualifiers(walker, qualifiers);
3925				seek_next_stmt(walker);
3926				return;
3927			}
3928		};
3929
3930	// We test if the expression can be converted into a type.
3931	if let Some(mut type_) = Type::parse(&start) {
3932		// Since we ran the expression parser in the Default mode, what we have so far must be something like
3933		// `foobar`, `int`, `vec2[3]`, `MyStruct` etc. This can be the type part of a declaration/definition, but
3934		// it could be just an expression statement depending on what comes next.
3935
3936		let (token, token_span) = match walker.peek() {
3937			Some(t) => t,
3938			None => {
3939				// We lack any identifiers necessary for a declaration/definition, so this must be an expression
3940				// statement.
3941				invalidate_qualifiers(walker, qualifiers);
3942				walker.append_colours(&mut start_colours);
3943				walker.append_syntax_diags(&mut start_syntax);
3944				walker.append_semantic_diags(&mut start_semantic);
3945				if parsing_last_for_stmt {
3946					walker.push_syntax_diag(Syntax::Stmt(
3947						StmtDiag::ForExpectedRParenAfterStmts(
3948							start.span.next_single_width(),
3949						),
3950					))
3951				} else {
3952					walker.push_syntax_diag(Syntax::Stmt(
3953						StmtDiag::ExprStmtExpectedSemiAfterExpr(
3954							start.span.next_single_width(),
3955						),
3956					));
3957				}
3958				nodes.push(Node {
3959					span: start.span,
3960					ty: NodeTy::Expr(start),
3961				});
3962				return;
3963			}
3964		};
3965
3966		// Check whether we have a function declaration/definition, whether this is an expression immediately
3967		// followed by a semi-colon, or whether this is an expression followed by an opening brace if we have an
3968		// appropriate qualifier to make this an interface block.
3969		match token {
3970			Token::Ident(i) => match walker.lookahead_1() {
3971				Some(next) => match next.0 {
3972					Token::LParen => {
3973						// We have a function declaration/definition.
3974						type_.qualifiers = qualifiers;
3975						let l_paren_span = next.1;
3976						let ident = Ident {
3977							name: i.clone(),
3978							span: token_span,
3979						};
3980						walker.append_colours(&mut start_colours);
3981						start_syntax.retain(|e| {
3982							if let Syntax::Expr(
3983								ExprDiag::FoundOperandAfterOperand(_, _),
3984							) = e
3985							{
3986								false
3987							} else {
3988								true
3989							}
3990						});
3991						walker.append_syntax_diags(&mut start_syntax);
3992						walker.append_semantic_diags(&mut start_semantic);
3993						walker.push_colour(
3994							token_span,
3995							SyntaxType::UncheckedIdent,
3996						);
3997						walker.advance();
3998						walker.push_colour(next.1, SyntaxType::Punctuation);
3999						walker.advance();
4000						parse_function(
4001							walker,
4002							nodes,
4003							type_,
4004							ident,
4005							l_paren_span,
4006						);
4007						return;
4008					}
4009					_ => {}
4010				},
4011				None => {}
4012			},
4013			Token::Semi => {
4014				// We have an expression statement.
4015				invalidate_qualifiers(walker, qualifiers);
4016				let semi_span = token_span;
4017				walker.append_colours(&mut start_colours);
4018				walker.append_syntax_diags(&mut start_syntax);
4019				walker.append_semantic_diags(&mut start_semantic);
4020				walker.push_colour(semi_span, SyntaxType::Punctuation);
4021				walker.advance();
4022				if parsing_last_for_stmt {
4023					walker.push_syntax_diag(Syntax::Stmt(
4024						StmtDiag::ForExpectedRParenAfterStmts(semi_span),
4025					));
4026				}
4027				nodes.push(Node {
4028					span: Span::new(start.span.start, semi_span.end),
4029					ty: NodeTy::Expr(start),
4030				});
4031				return;
4032			}
4033			Token::RParen if parsing_last_for_stmt => {
4034				// We have an expression statement.
4035				invalidate_qualifiers(walker, qualifiers);
4036				walker.append_colours(&mut start_colours);
4037				walker.append_syntax_diags(&mut start_syntax);
4038				walker.append_semantic_diags(&mut start_semantic);
4039				nodes.push(Node {
4040					span: start.span,
4041					ty: NodeTy::Expr(start),
4042				});
4043				return;
4044			}
4045			Token::LBrace => {
4046				// Interface blocks can begin with one of the following:
4047				// in
4048				// out
4049				// patch in
4050				// patch out
4051				// uniform
4052				// buffer
4053				// A layout() qualifier may precede any of these.
4054				if qualifiers.len() == 1 {
4055					match &qualifiers[0].ty {
4056						QualifierTy::In
4057						| QualifierTy::Out
4058						| QualifierTy::Uniform
4059						| QualifierTy::Buffer => {
4060							let l_brace_span = token_span;
4061							walker.append_colours(&mut start_colours);
4062							start_syntax.retain(|e| {
4063								if let Syntax::Expr(
4064									ExprDiag::FoundOperandAfterOperand(_, _),
4065								) = e
4066								{
4067									false
4068								} else {
4069									true
4070								}
4071							});
4072							walker.append_syntax_diags(&mut start_syntax);
4073							walker.append_semantic_diags(&mut start_semantic);
4074							walker.push_colour(
4075								l_brace_span,
4076								SyntaxType::Punctuation,
4077							);
4078							walker.advance();
4079							parse_interface_block(
4080								walker,
4081								nodes,
4082								qualifiers,
4083								start,
4084								l_brace_span,
4085							);
4086							return;
4087						}
4088						_ => {}
4089					}
4090				} else if qualifiers.len() == 2 {
4091					match (&qualifiers[0].ty, &qualifiers[1].ty) {
4092						(QualifierTy::Patch, QualifierTy::In)
4093						| (QualifierTy::Patch, QualifierTy::Out)
4094						| (QualifierTy::Layout(_), QualifierTy::In)
4095						| (QualifierTy::Layout(_), QualifierTy::Out)
4096						| (QualifierTy::Layout(_), QualifierTy::Uniform)
4097						| (QualifierTy::Layout(_), QualifierTy::Buffer) => {
4098							let l_brace_span = token_span;
4099							walker.append_colours(&mut start_colours);
4100							start_syntax.retain(|e| {
4101								if let Syntax::Expr(
4102									ExprDiag::FoundOperandAfterOperand(_, _),
4103								) = e
4104								{
4105									false
4106								} else {
4107									true
4108								}
4109							});
4110							walker.append_syntax_diags(&mut start_syntax);
4111							walker.append_semantic_diags(&mut start_semantic);
4112							walker.push_colour(
4113								l_brace_span,
4114								SyntaxType::Punctuation,
4115							);
4116							walker.advance();
4117							parse_interface_block(
4118								walker,
4119								nodes,
4120								qualifiers,
4121								start,
4122								l_brace_span,
4123							);
4124							return;
4125						}
4126						(_, _) => {}
4127					}
4128				} else if qualifiers.len() == 3 {
4129					match (
4130						&qualifiers[0].ty,
4131						&qualifiers[1].ty,
4132						&qualifiers[2].ty,
4133					) {
4134						(
4135							QualifierTy::Layout(_),
4136							QualifierTy::Patch,
4137							QualifierTy::In,
4138						)
4139						| (
4140							QualifierTy::Layout(_),
4141							QualifierTy::Patch,
4142							QualifierTy::Out,
4143						) => {
4144							let l_brace_span = token_span;
4145							walker.append_colours(&mut start_colours);
4146							start_syntax.retain(|e| {
4147								if let Syntax::Expr(
4148									ExprDiag::FoundOperandAfterOperand(_, _),
4149								) = e
4150								{
4151									false
4152								} else {
4153									true
4154								}
4155							});
4156							walker.append_syntax_diags(&mut start_syntax);
4157							walker.append_semantic_diags(&mut start_semantic);
4158							walker.push_colour(
4159								l_brace_span,
4160								SyntaxType::Punctuation,
4161							);
4162							walker.advance();
4163							parse_interface_block(
4164								walker,
4165								nodes,
4166								qualifiers,
4167								start,
4168								l_brace_span,
4169							);
4170							return;
4171						}
4172						(_, _, _) => {}
4173					}
4174				}
4175			}
4176			_ => {}
4177		}
4178
4179		// We don't have a function declaration/definition, nor an interface block, so this must be a variable
4180		// definition (with possibly an initialization) or an expression statement.
4181
4182		// We attempt to parse an expression for the identifier(s).
4183		let (
4184			ident_expr,
4185			mut ident_syntax,
4186			mut ident_semantic,
4187			mut ident_colours,
4188		) = match expr_parser(walker, Mode::BreakAtEq, [Token::Semi]) {
4189			(Some(e), syntax, semantic, colours) => {
4190				(e, syntax, semantic, colours)
4191			}
4192			(None, _, _, _) => {
4193				// We have an expression followed by neither another expression nor a semi-colon. We treat this
4194				// as an expression statement since that's the closest possible match.
4195				invalidate_qualifiers(walker, qualifiers);
4196				walker.append_colours(&mut start_colours);
4197				walker.append_syntax_diags(&mut start_syntax);
4198				walker.append_semantic_diags(&mut start_semantic);
4199				if parsing_last_for_stmt {
4200					walker.push_syntax_diag(Syntax::Stmt(
4201						StmtDiag::ForExpectedRParenAfterStmts(
4202							start.span.next_single_width(),
4203						),
4204					))
4205				} else {
4206					walker.push_syntax_diag(Syntax::Stmt(
4207						StmtDiag::ExprStmtExpectedSemiAfterExpr(
4208							start.span.next_single_width(),
4209						),
4210					));
4211				}
4212				nodes.push(Node {
4213					span: start.span,
4214					ty: NodeTy::Expr(start),
4215				});
4216				return;
4217			}
4218		};
4219		let ident_span = ident_expr.span;
4220
4221		// We test if the identifier(s) expression can be converted into one or more variable identifiers.
4222		let ident_info = if let Some(i) = Type::parse_var_idents(&ident_expr) {
4223			i
4224		} else {
4225			// We have a second expression after the first expression, but the second expression can't be converted
4226			// to one or more identifiers for a variable definition. We treat the first expression as an expression
4227			// statement, and the second expression as invalid.
4228			invalidate_qualifiers(walker, qualifiers);
4229			walker.append_colours(&mut start_colours);
4230			walker.append_syntax_diags(&mut start_syntax);
4231			walker.append_semantic_diags(&mut start_semantic);
4232			if parsing_last_for_stmt {
4233				walker.push_syntax_diag(Syntax::Stmt(
4234					StmtDiag::ForExpectedRParenAfterStmts(
4235						start.span.next_single_width(),
4236					),
4237				))
4238			} else {
4239				walker.push_syntax_diag(Syntax::Stmt(
4240					StmtDiag::ExprStmtExpectedSemiAfterExpr(
4241						start.span.next_single_width(),
4242					),
4243				));
4244			}
4245			nodes.push(Node {
4246				span: start.span,
4247				ty: NodeTy::Expr(start),
4248			});
4249			for SyntaxToken { span, .. } in ident_colours {
4250				walker.push_colour(span, SyntaxType::Invalid);
4251			}
4252			seek_next_stmt(walker);
4253			return;
4254		};
4255
4256		// We have one expression which can be converted to a type, and a second expression which can be converted
4257		// to one or more identifiers. That means the first expression will have a syntax error about a missing
4258		// operator between the two; we remove that error since in this case it's not applicable.
4259		start_syntax.retain(|e| {
4260			if let Syntax::Expr(ExprDiag::FoundOperandAfterOperand(_, _)) = e {
4261				false
4262			} else {
4263				true
4264			}
4265		});
4266		type_.qualifiers = qualifiers;
4267		walker.append_colours(&mut start_colours);
4268		walker.append_syntax_diags(&mut start_syntax);
4269		walker.append_semantic_diags(&mut start_semantic);
4270		walker.append_colours(&mut ident_colours);
4271		walker.append_syntax_diags(&mut ident_syntax);
4272		walker.append_semantic_diags(&mut ident_semantic);
4273
4274		fn var_def(
4275			type_: Type,
4276			idents: Vec<(Ident, Vec<ArrSize>)>,
4277			end_pos: usize,
4278		) -> Node {
4279			let span = Span::new(type_.span.start, end_pos);
4280			let mut vars = combine_type_with_idents(type_, idents);
4281			match vars.len() {
4282				1 => {
4283					let (type_, ident) = vars.remove(0);
4284					Node {
4285						span,
4286						ty: NodeTy::VarDef { type_, ident },
4287					}
4288				}
4289				_ => Node {
4290					span,
4291					ty: NodeTy::VarDefs(vars),
4292				},
4293			}
4294		}
4295
4296		fn var_def_init(
4297			type_: Type,
4298			idents: Vec<(Ident, Vec<ArrSize>)>,
4299			value: Option<Expr>,
4300			end_pos: usize,
4301		) -> Node {
4302			let span = Span::new(type_.span.start, end_pos);
4303			let mut vars = combine_type_with_idents(type_, idents);
4304			match vars.len() {
4305				1 => {
4306					let (type_, ident) = vars.remove(0);
4307					Node {
4308						span,
4309						ty: NodeTy::VarDefInit {
4310							type_,
4311							ident,
4312							value,
4313						},
4314					}
4315				}
4316				_ => Node {
4317					span,
4318					ty: NodeTy::VarDefInits(vars, value),
4319				},
4320			}
4321		}
4322
4323		// Consume the `;` for a definition, or a `=` for a definition with initialization.
4324		let (token, token_span) = match walker.peek() {
4325			Some(t) => t,
4326			None => {
4327				// We have something that matches the start of a variable definition. Since we have neither the `;`
4328				// or `=`, we assume that this is a definition without initialization that is missing the
4329				// semi-colon.
4330				walker.push_syntax_diag(Syntax::Stmt(
4331					StmtDiag::VarDefExpectedSemiOrEqAfterIdents(
4332						ident_span.next_single_width(),
4333					),
4334				));
4335				nodes.push(var_def(type_, ident_info, ident_span.end));
4336				return;
4337			}
4338		};
4339		if *token == Token::Semi {
4340			// We have a variable definition without initialization.
4341			let semi_span = token_span;
4342			walker.push_colour(semi_span, SyntaxType::Punctuation);
4343			walker.advance();
4344			nodes.push(var_def(type_, ident_info, semi_span.end));
4345			return;
4346		} else if *token == Token::Op(lexer::OpTy::Eq) {
4347			// We have a variable definition with initialization.
4348			let eq_span = token_span;
4349			walker.push_colour(eq_span, SyntaxType::Operator);
4350			walker.advance();
4351
4352			// Consume the value expression.
4353			let value_expr =
4354				match expr_parser(walker, Mode::Default, [Token::Semi]) {
4355					(Some(e), mut syntax, mut semantic, mut colours) => {
4356						walker.append_colours(&mut colours);
4357						walker.append_syntax_diags(&mut syntax);
4358						walker.append_semantic_diags(&mut semantic);
4359						e
4360					}
4361					(None, _, _, _) => {
4362						walker.push_syntax_diag(Syntax::Stmt(
4363							StmtDiag::VarDefInitExpectedValueAfterEq(
4364								eq_span.next_single_width(),
4365							),
4366						));
4367						nodes.push(var_def_init(
4368							type_,
4369							ident_info,
4370							None,
4371							eq_span.end,
4372						));
4373						seek_next_stmt(walker);
4374						return;
4375					}
4376				};
4377
4378			// Consume the semi-colon.
4379			let (token, token_span) = match walker.peek() {
4380				Some(t) => t,
4381				None => {
4382					let value_span = value_expr.span;
4383					walker.push_syntax_diag(Syntax::Stmt(
4384						StmtDiag::VarDefInitExpectedSemiAfterValue(
4385							value_span.next_single_width(),
4386						),
4387					));
4388					nodes.push(var_def_init(
4389						type_,
4390						ident_info,
4391						Some(value_expr),
4392						value_span.end,
4393					));
4394					return;
4395				}
4396			};
4397			if *token == Token::Semi {
4398				let semi_span = token_span;
4399				walker.push_colour(semi_span, SyntaxType::Punctuation);
4400				walker.advance();
4401				nodes.push(var_def_init(
4402					type_,
4403					ident_info,
4404					Some(value_expr),
4405					semi_span.end,
4406				));
4407				return;
4408			} else {
4409				let end_span = token_span;
4410				walker.push_syntax_diag(Syntax::Stmt(
4411					StmtDiag::VarDefInitExpectedSemiAfterValue(
4412						end_span.next_single_width(),
4413					),
4414				));
4415				nodes.push(var_def_init(
4416					type_,
4417					ident_info,
4418					Some(value_expr),
4419					end_span.end,
4420				));
4421				seek_next_stmt(walker);
4422				return;
4423			}
4424		} else {
4425			// We have something that matches the start of a variable definition. Since we have neither the `;` or
4426			// `=`, we assume that this is a definition without initialization which is missing the semi-colon.
4427			walker.push_syntax_diag(Syntax::Stmt(
4428				StmtDiag::VarDefExpectedSemiOrEqAfterIdents(
4429					ident_span.next_single_width(),
4430				),
4431			));
4432			nodes.push(var_def(type_, ident_info, ident_span.end));
4433			seek_next_stmt(walker);
4434			return;
4435		}
4436	}
4437
4438	// We have an expression which cannot be parsed as a type, so this cannot start a declaration/definition nor an
4439	// interface block; it must therefore be a standalone expression statement.
4440	invalidate_qualifiers(walker, qualifiers);
4441	let expr = start;
4442	walker.append_colours(&mut start_colours);
4443	walker.append_syntax_diags(&mut start_syntax);
4444	walker.append_semantic_diags(&mut start_semantic);
4445
4446	// Consume the `;` to end the statement.
4447	let semi_span = match walker.peek() {
4448		Some((token, span)) => {
4449			if *token == Token::Semi {
4450				walker.push_colour(span, SyntaxType::Punctuation);
4451				walker.advance();
4452				Some(span)
4453			} else {
4454				None
4455			}
4456		}
4457		None => None,
4458	};
4459	if semi_span.is_none() {
4460		walker.push_syntax_diag(Syntax::Stmt(
4461			StmtDiag::ExprStmtExpectedSemiAfterExpr(
4462				expr.span.next_single_width(),
4463			),
4464		));
4465		seek_next_stmt(walker);
4466	}
4467
4468	nodes.push(Node {
4469		span: if let Some(semi_span) = semi_span {
4470			Span::new(expr.span.start, semi_span.end)
4471		} else {
4472			expr.span
4473		},
4474		ty: NodeTy::Expr(expr),
4475	});
4476}
4477
4478/// Parses a function declaration/definition.
4479///
4480/// This function assumes that the return type, ident, and opening parenthesis have been consumed.
4481fn parse_function<'a, P: TokenStreamProvider<'a>>(
4482	walker: &mut Walker<'a, P>,
4483	nodes: &mut Vec<Node>,
4484	return_type: Type,
4485	ident: Ident,
4486	l_paren_span: Span,
4487) {
4488	// Look for any parameters until we hit a closing `)` parenthesis.
4489	#[derive(PartialEq)]
4490	enum Prev {
4491		None,
4492		Param,
4493		Comma,
4494		Invalid,
4495	}
4496	let mut prev = Prev::None;
4497	let mut prev_span = l_paren_span;
4498	let mut params = Vec::new();
4499	let param_end_span = loop {
4500		let (token, token_span) = match walker.peek() {
4501			Some(t) => t,
4502			None => {
4503				// We have not yet finished parsing the parameter list, but we treat this as a valid declaration
4504				// since that's the closest match.
4505				let span = Span::new(return_type.span.start, prev_span.end);
4506				walker.push_syntax_diag(Syntax::Stmt(
4507					StmtDiag::ParamsExpectedRParen(
4508						prev_span.next_single_width(),
4509					),
4510				));
4511				nodes.push(Node {
4512					span,
4513					ty: NodeTy::FnDecl {
4514						return_type,
4515						ident,
4516						params,
4517					},
4518				});
4519				return;
4520			}
4521		};
4522
4523		match token {
4524			Token::Comma => {
4525				walker.push_colour(token_span, SyntaxType::Punctuation);
4526				walker.advance();
4527				if prev == Prev::Comma {
4528					walker.push_syntax_diag(Syntax::Stmt(
4529						StmtDiag::ParamsExpectedParamAfterComma(
4530							Span::new_between(prev_span, token_span),
4531						),
4532					));
4533				} else if prev == Prev::None {
4534					walker.push_syntax_diag(Syntax::Stmt(
4535						StmtDiag::ParamsExpectedParamBetweenParenComma(
4536							Span::new_between(l_paren_span, token_span),
4537						),
4538					));
4539				}
4540				prev = Prev::Comma;
4541				prev_span = token_span;
4542				continue;
4543			}
4544			Token::RParen => {
4545				walker.push_colour(token_span, SyntaxType::Punctuation);
4546				walker.advance();
4547				if prev == Prev::Comma {
4548					walker.push_syntax_diag(Syntax::Stmt(
4549						StmtDiag::ParamsExpectedParamAfterComma(
4550							Span::new_between(prev_span, token_span),
4551						),
4552					));
4553				}
4554				break token_span;
4555			}
4556			Token::Semi => {
4557				walker.push_colour(token_span, SyntaxType::Punctuation);
4558				walker.advance();
4559				// We have not yet finished parsing the parameter list but we've encountered a semi-colon. We treat
4560				// this as a valid declaration since that's the closest match.
4561				walker.push_syntax_diag(Syntax::Stmt(
4562					StmtDiag::ParamsExpectedRParen(
4563						prev_span.next_single_width(),
4564					),
4565				));
4566				nodes.push(Node {
4567					span: Span::new(return_type.span.start, token_span.end),
4568					ty: NodeTy::FnDecl {
4569						return_type,
4570						ident,
4571						params,
4572					},
4573				});
4574				return;
4575			}
4576			Token::LBrace => {
4577				walker.push_colour(token_span, SyntaxType::Punctuation);
4578				// We don't advance because the next check after this loop checks for a l-brace.
4579
4580				// We have not yet finished parsing the parameter list but we've encountered a l-brace. We treat
4581				// this as a potentially valid definition since that's the closest match.
4582				walker.push_syntax_diag(Syntax::Stmt(
4583					StmtDiag::ParamsExpectedRParen(
4584						prev_span.next_single_width(),
4585					),
4586				));
4587				break token_span;
4588			}
4589			_ => {}
4590		}
4591
4592		if prev == Prev::Param {
4593			walker.push_syntax_diag(Syntax::Stmt(
4594				StmtDiag::ParamsExpectedCommaAfterParam(
4595					prev_span.next_single_width(),
4596				),
4597			));
4598		}
4599		let param_span_start = token_span.start;
4600
4601		let qualifiers = try_parse_qualifiers(walker);
4602
4603		// Consume the type.
4604		let mut type_ = match expr_parser(
4605			walker,
4606			Mode::TakeOneUnit,
4607			[Token::Semi, Token::LBrace],
4608		) {
4609			(Some(e), _, mut semantic, mut colours) => {
4610				if let Some(type_) = Type::parse(&e) {
4611					walker.append_colours(&mut colours);
4612					walker.append_semantic_diags(&mut semantic);
4613					type_
4614				} else {
4615					// We have an expression which cannot be parsed into a type. We ignore this and continue
4616					// searching for the next parameter.
4617					for SyntaxToken { span, .. } in colours {
4618						walker.push_colour(span, SyntaxType::Invalid);
4619					}
4620					walker.push_syntax_diag(Syntax::Stmt(
4621						StmtDiag::ParamsInvalidTypeExpr(e.span),
4622					));
4623					prev = Prev::Invalid;
4624					prev_span = Span::new(param_span_start, e.span.end);
4625					continue;
4626				}
4627			}
4628			(None, _, _, _) => {
4629				// We immediately have a token that is not an expression. We ignore this and loop until we hit
4630				// something recognizable.
4631				let end_span = loop {
4632					match walker.peek() {
4633						Some((token, span)) => {
4634							if *token == Token::Comma
4635								|| *token == Token::RParen || *token
4636								== Token::Semi || *token == Token::LBrace
4637							{
4638								break span;
4639							} else {
4640								walker.push_colour(span, SyntaxType::Invalid);
4641								walker.advance();
4642								continue;
4643							}
4644						}
4645						None => break walker.get_last_span(),
4646					}
4647				};
4648				walker.push_syntax_diag(Syntax::Stmt(
4649					StmtDiag::ParamsInvalidTypeExpr(Span::new(
4650						param_span_start,
4651						end_span.end,
4652					)),
4653				));
4654				prev = Prev::Invalid;
4655				prev_span = token_span;
4656				continue;
4657			}
4658		};
4659
4660		// Look for the optional ident.
4661		let (ident_expr, ident_colours) = match expr_parser(
4662			walker,
4663			Mode::TakeOneUnit,
4664			[Token::Semi, Token::LBrace],
4665		) {
4666			(Some(e), _, mut semantic, colours) => {
4667				walker.append_semantic_diags(&mut semantic);
4668				(e, colours)
4669			}
4670			(None, _, _, _) => {
4671				// We have a first expression and then something that is not an expression. We treat this as an
4672				// anonymous parameter, whatever the current token is will be dealt with in the next iteration.
4673				type_.qualifiers = qualifiers;
4674				let param_span = Span::new(param_span_start, type_.span.end);
4675				params.push(Param {
4676					span: param_span,
4677					type_,
4678					ident: Omittable::None,
4679				});
4680				prev = Prev::Param;
4681				prev_span = param_span;
4682				continue;
4683			}
4684		};
4685		let ident_span = ident_expr.span;
4686
4687		// Invariant: This vector is guaranteed to have a length of 1 because the `ident_expr` was parsed with the
4688		// `TakeOneUnit` mode which prevents lists.
4689		let ident_info = if let Some(i) = Type::parse_var_idents(&ident_expr) {
4690			i
4691		} else {
4692			// We have a second expression after the first expression, but the second expression can't be converted
4693			// to an identifier for the parameter. We treat the first type expression as an anonymous parameter,
4694			// and the second expression as invalid.
4695			let param_span = Span::new(param_span_start, type_.span.end);
4696			type_.qualifiers = qualifiers;
4697			params.push(Param {
4698				span: Span::new(param_span_start, type_.span.end),
4699				type_,
4700				ident: Omittable::None,
4701			});
4702			walker.push_syntax_diag(Syntax::Stmt(
4703				StmtDiag::ParamsInvalidIdentExpr(ident_expr.span),
4704			));
4705			for SyntaxToken { span, .. } in ident_colours {
4706				walker.push_colour(span, SyntaxType::Invalid);
4707			}
4708			prev = Prev::Param;
4709			prev_span = param_span;
4710			continue;
4711		};
4712
4713		type_.qualifiers = qualifiers;
4714		let (type_, ident) =
4715			combine_type_with_idents(type_, ident_info).remove(0);
4716		let param_span = Span::new(param_span_start, ident_span.end);
4717		params.push(Param {
4718			span: param_span,
4719			type_,
4720			ident: Omittable::Some(ident),
4721		});
4722		prev = Prev::Param;
4723		prev_span = param_span;
4724	};
4725
4726	// Consume the `;` for a declaration or a `{` for a definition.
4727	let (token, token_span) = match walker.peek() {
4728		Some(t) => t,
4729		None => {
4730			// This branch will only be triggered if we exited the param loop with a `)`, it will not trigger if we
4731			// exit with a `{` because that token is not consumed.
4732
4733			// We are missing a `;` for a declaration. We treat this as a declaration since that's the closest
4734			// match.
4735			walker.push_syntax_diag(Syntax::Stmt(
4736				StmtDiag::FnExpectedSemiOrLBraceAfterParams(
4737					param_end_span.next_single_width(),
4738				),
4739			));
4740			nodes.push(Node {
4741				span: Span::new(return_type.span.start, param_end_span.end),
4742				ty: NodeTy::FnDecl {
4743					return_type,
4744					ident,
4745					params,
4746				},
4747			});
4748			return;
4749		}
4750	};
4751	if *token == Token::Semi {
4752		// We have a declaration.
4753		walker.push_colour(token_span, SyntaxType::Punctuation);
4754		walker.advance();
4755		nodes.push(Node {
4756			span: Span::new(return_type.span.start, param_end_span.end),
4757			ty: NodeTy::FnDecl {
4758				return_type,
4759				ident,
4760				params,
4761			},
4762		});
4763	} else if *token == Token::LBrace {
4764		// We have a definition.
4765		let l_brace_span = token_span;
4766		walker.push_colour(l_brace_span, SyntaxType::Punctuation);
4767		walker.advance();
4768		let body = parse_scope(walker, brace_scope, l_brace_span);
4769		nodes.push(Node {
4770			span: Span::new(return_type.span.start, body.span.end),
4771			ty: NodeTy::FnDef {
4772				return_type,
4773				ident,
4774				params,
4775				body,
4776			},
4777		});
4778	} else {
4779		// We are missing a `;` for a declaration. We treat this as a declaration since that's the closest match.
4780		walker.push_syntax_diag(Syntax::Stmt(
4781			StmtDiag::FnExpectedSemiOrLBraceAfterParams(
4782				param_end_span.next_single_width(),
4783			),
4784		));
4785		nodes.push(Node {
4786			span: Span::new(return_type.span.start, param_end_span.end),
4787			ty: NodeTy::FnDecl {
4788				return_type,
4789				ident,
4790				params,
4791			},
4792		});
4793		seek_next_stmt(walker);
4794	}
4795}
4796
4797/// Parses a subroutine type, associated function, or a subroutine uniform.
4798///
4799/// This function assumes that the `subroutine` keyword is not yet consumed.
4800fn parse_subroutine<'a, P: TokenStreamProvider<'a>>(
4801	walker: &mut Walker<'a, P>,
4802	nodes: &mut Vec<Node>,
4803	kw_span: Span,
4804) {
4805	walker.push_colour(kw_span, SyntaxType::Keyword);
4806	walker.advance();
4807
4808	let (token, token_span) = match walker.peek() {
4809		Some(t) => t,
4810		None => {
4811			walker.push_syntax_diag(Syntax::Stmt(
4812				StmtDiag::SubroutineExpectedTypeFuncUniformAfterKw(
4813					kw_span.next_single_width(),
4814				),
4815			));
4816			return;
4817		}
4818	};
4819
4820	if *token == Token::Uniform {
4821		// We have a subroutine uniform definition.
4822		let uniform_kw_span = token_span;
4823		walker.push_colour(uniform_kw_span, SyntaxType::Keyword);
4824		walker.advance();
4825		let mut inner = Vec::new();
4826		try_parse_definition_declaration_expr(
4827			walker,
4828			&mut inner,
4829			vec![],
4830			false,
4831		);
4832
4833		if inner.is_empty() {
4834			walker.push_syntax_diag(Syntax::Stmt(
4835				StmtDiag::SubroutineExpectedVarDefAfterUniformKw(
4836					uniform_kw_span.next_single_width(),
4837				),
4838			));
4839		} else {
4840			let first = inner.remove(0);
4841			match first.ty {
4842				NodeTy::VarDef { type_, ident } => {
4843					nodes.push(Node {
4844						span: Span::new(kw_span.start, first.span.end),
4845						ty: NodeTy::SubroutineUniformDef { type_, ident },
4846					});
4847				}
4848				_ => {
4849					walker.push_syntax_diag(Syntax::Stmt(
4850						StmtDiag::SubroutineExpectedVarDefAfterUniformKw(
4851							uniform_kw_span.next_single_width(),
4852						),
4853					));
4854					nodes.push(first);
4855				}
4856			}
4857			inner.into_iter().for_each(|n| nodes.push(n));
4858		}
4859	} else if *token == Token::LParen {
4860		// We have an associated function definition.
4861		let l_paren_span = token_span;
4862		walker.push_colour(l_paren_span, SyntaxType::Punctuation);
4863		walker.advance();
4864
4865		// Look for any subroutine identifiers until we hit a closing `)` parenthesis.
4866		#[derive(PartialEq)]
4867		enum Prev {
4868			None,
4869			Ident,
4870			Comma,
4871			Invalid,
4872		}
4873		let mut prev = Prev::None;
4874		let mut prev_span = l_paren_span;
4875		let mut associations = Vec::new();
4876		let r_paren_span = loop {
4877			let (token, token_span) = match walker.peek() {
4878				Some(t) => t,
4879				None => {
4880					walker.push_syntax_diag(Syntax::Stmt(
4881						StmtDiag::SubroutineAssociatedListExpectedRParen(
4882							prev_span.next_single_width(),
4883						),
4884					));
4885					return;
4886				}
4887			};
4888
4889			match token {
4890				Token::Comma => {
4891					walker.push_colour(token_span, SyntaxType::Punctuation);
4892					walker.advance();
4893					if prev == Prev::Comma {
4894						walker.push_syntax_diag(Syntax::Stmt(
4895							StmtDiag::SubroutineAssociatedListExpectedIdentAfterComma(
4896								Span::new_between(prev_span, token_span),
4897							),
4898						));
4899					} else if prev == Prev::None {
4900						walker.push_syntax_diag(Syntax::Stmt(
4901							StmtDiag::SubroutineAssociatedListExpectedIdentBetweenParenComma(
4902								Span::new_between(l_paren_span, token_span),
4903							),
4904						));
4905					}
4906					prev = Prev::Comma;
4907					prev_span = token_span;
4908					continue;
4909				}
4910				Token::RParen => {
4911					walker.push_colour(token_span, SyntaxType::Punctuation);
4912					walker.advance();
4913					if prev == Prev::Comma {
4914						walker.push_syntax_diag(Syntax::Stmt(
4915							StmtDiag::SubroutineAssociatedListExpectedIdentAfterComma(
4916								Span::new_between(prev_span, token_span),
4917							),
4918						));
4919					}
4920					break token_span;
4921				}
4922				Token::Ident(str) => {
4923					associations.push(Ident {
4924						name: str.to_owned(),
4925						span: token_span,
4926					});
4927					walker.push_colour(token_span, SyntaxType::UncheckedIdent);
4928					walker.advance();
4929					if prev == Prev::Ident {
4930						walker.push_syntax_diag(Syntax::Stmt(StmtDiag::SubroutineAssociatedListExpectedCommaAfterIdent(
4931							prev_span.next_single_width()
4932						)));
4933					}
4934					prev = Prev::Ident;
4935					prev_span = token_span;
4936					continue;
4937				}
4938				_ => {
4939					walker.push_colour(token_span, SyntaxType::Invalid);
4940					walker.advance();
4941					prev = Prev::Invalid;
4942					prev_span = token_span;
4943				}
4944			}
4945		};
4946
4947		let mut inner = Vec::new();
4948		try_parse_definition_declaration_expr(
4949			walker,
4950			&mut inner,
4951			vec![],
4952			false,
4953		);
4954
4955		if inner.is_empty() {
4956			walker.push_syntax_diag(Syntax::Stmt(
4957				StmtDiag::SubroutineExpectedFnDefAfterAssociatedList(
4958					r_paren_span.next_single_width(),
4959				),
4960			));
4961		} else {
4962			let first = inner.remove(0);
4963			match first.ty {
4964				NodeTy::FnDef {
4965					return_type,
4966					ident,
4967					params,
4968					body,
4969				} => {
4970					nodes.push(Node {
4971						span: Span::new(kw_span.start, first.span.end),
4972						ty: NodeTy::SubroutineFnDef {
4973							associations,
4974							return_type,
4975							ident,
4976							params,
4977							body: Some(body),
4978						},
4979					});
4980				}
4981				NodeTy::FnDecl {
4982					return_type,
4983					ident,
4984					params,
4985				} => {
4986					walker.push_syntax_diag(Syntax::Stmt(
4987						StmtDiag::SubroutineExpectedFnDefAfterAssociatedListFoundDecl(
4988							first.span,
4989						),
4990					));
4991					nodes.push(Node {
4992						span: Span::new(kw_span.start, first.span.end),
4993						ty: NodeTy::SubroutineFnDef {
4994							associations,
4995							return_type,
4996							ident,
4997							params,
4998							body: None,
4999						},
5000					});
5001				}
5002				_ => {
5003					walker.push_syntax_diag(Syntax::Stmt(
5004						StmtDiag::SubroutineExpectedFnDefAfterAssociatedList(
5005							r_paren_span.next_single_width(),
5006						),
5007					));
5008					nodes.push(first);
5009				}
5010			}
5011		}
5012		inner.into_iter().for_each(|n| nodes.push(n));
5013	} else {
5014		// We have a subroutine type declaration.
5015		let mut inner = Vec::new();
5016		try_parse_definition_declaration_expr(
5017			walker,
5018			&mut inner,
5019			vec![],
5020			false,
5021		);
5022
5023		if inner.is_empty() {
5024			walker.push_syntax_diag(Syntax::Stmt(
5025				StmtDiag::SubroutineExpectedTypeFuncUniformAfterKw(
5026					kw_span.next_single_width(),
5027				),
5028			));
5029		} else {
5030			let first = inner.remove(0);
5031			match first.ty {
5032				NodeTy::FnDecl {
5033					return_type,
5034					ident,
5035					params,
5036				} => {
5037					nodes.push(Node {
5038						span: Span::new(kw_span.start, first.span.end),
5039						ty: NodeTy::SubroutineTypeDecl {
5040							return_type,
5041							ident,
5042							params,
5043						},
5044					});
5045				}
5046				NodeTy::FnDef {
5047					return_type,
5048					ident,
5049					params,
5050					body,
5051				} => {
5052					walker.push_syntax_diag(Syntax::Stmt(
5053						StmtDiag::SubroutineMissingAssociationsForFnDef(
5054							Span::new_between(kw_span, return_type.span),
5055						),
5056					));
5057					nodes.push(Node {
5058						span: Span::new(kw_span.start, first.span.end),
5059						ty: NodeTy::SubroutineFnDef {
5060							associations: vec![],
5061							return_type,
5062							ident,
5063							params,
5064							body: Some(body),
5065						},
5066					});
5067				}
5068				NodeTy::VarDef { type_, ident } => {
5069					walker.push_syntax_diag(Syntax::Stmt(
5070						StmtDiag::SubroutineMissingUniformKwForUniformDef(
5071							Span::new_between(kw_span, type_.span),
5072						),
5073					));
5074					nodes.push(Node {
5075						span: Span::new(kw_span.start, first.span.end),
5076						ty: NodeTy::SubroutineUniformDef { type_, ident },
5077					});
5078				}
5079				_ => {
5080					walker.push_syntax_diag(Syntax::Stmt(
5081						StmtDiag::SubroutineExpectedTypeFuncUniformAfterKw(
5082							kw_span.next_single_width(),
5083						),
5084					));
5085					nodes.push(first);
5086				}
5087			}
5088			inner.into_iter().for_each(|n| nodes.push(n));
5089		}
5090	}
5091}
5092
5093/// Parses an interface block.
5094///
5095/// This function assumes that the qualifiers, identifier, and opening brace have been consumed.
5096///
5097/// # Invariants
5098/// `qualifiers` is not empty.
5099fn parse_interface_block<'a, P: TokenStreamProvider<'a>>(
5100	walker: &mut Walker<'a, P>,
5101	nodes: &mut Vec<Node>,
5102	qualifiers: Vec<Qualifier>,
5103	ident_expr: Expr,
5104	l_brace_span: Span,
5105) {
5106	let ident = match ident_expr.ty {
5107		ExprTy::Ident(i) => i,
5108		_ => {
5109			// We do not have an identifier before the opening brace. We consume tokens until we hit a closing
5110			// brace.
5111			loop {
5112				match walker.peek() {
5113					Some((token, span)) => {
5114						if *token == Token::RBrace {
5115							walker.push_colour(span, SyntaxType::Punctuation);
5116							walker.advance();
5117							break;
5118						} else {
5119							walker.push_colour(span, SyntaxType::Invalid);
5120							walker.advance();
5121						}
5122					}
5123					None => break,
5124				}
5125			}
5126			return;
5127		}
5128	};
5129
5130	let interface_span_start = qualifiers.first().unwrap().span.start;
5131
5132	// Parse the contents of the body.
5133	let body = parse_scope(walker, brace_scope, l_brace_span);
5134	if body.contents.is_empty() {
5135		walker.push_syntax_diag(Syntax::Stmt(
5136			StmtDiag::InterfaceExpectedAtLeastOneStmtInBody(body.span),
5137		))
5138	}
5139	for stmt in &body.contents {
5140		match &stmt.ty {
5141			NodeTy::VarDef { .. }
5142			| NodeTy::VarDefInit { .. }
5143			| NodeTy::VarDefs(_)
5144			| NodeTy::VarDefInits(_, _) => {}
5145			_ => {
5146				walker.push_syntax_diag(Syntax::Stmt(
5147					StmtDiag::InterfaceInvalidStmtInBody(stmt.span),
5148				));
5149			}
5150		}
5151	}
5152
5153	// Look for an optional instance definition.
5154	let instance = match expr_parser(walker, Mode::TakeOneUnit, [Token::Semi]) {
5155		(Some(e), mut syntax, mut semantic, mut colours) => {
5156			if let Some(_) = Type::parse(&e) {
5157				// This expression can be a valid instance definition.
5158				walker.append_colours(&mut colours);
5159				walker.append_syntax_diags(&mut syntax);
5160				walker.append_semantic_diags(&mut semantic);
5161				Omittable::Some(e)
5162			} else {
5163				walker.append_colours(&mut colours);
5164				walker.push_syntax_diag(Syntax::Stmt(
5165					StmtDiag::InterfaceExpectedInstanceOrSemiAfterBody(e.span),
5166				));
5167				nodes.push(Node {
5168					span: Span::new(interface_span_start, body.span.end),
5169					ty: NodeTy::InterfaceDef {
5170						qualifiers,
5171						ident,
5172						body,
5173						instance: Omittable::None,
5174					},
5175				});
5176				return;
5177			}
5178		}
5179		_ => Omittable::None,
5180	};
5181
5182	// Consume the `;` to end the statement.
5183	let semi_span = match walker.peek() {
5184		Some((token, span)) => {
5185			if *token == Token::Semi {
5186				walker.push_colour(span, SyntaxType::Punctuation);
5187				walker.advance();
5188				Some(span)
5189			} else {
5190				None
5191			}
5192		}
5193		None => None,
5194	};
5195	if semi_span.is_none() {
5196		if let Omittable::Some(ref i) = instance {
5197			walker.push_syntax_diag(Syntax::Stmt(
5198				StmtDiag::InterfaceExpectedSemiAfterInstance(
5199					i.span.next_single_width(),
5200				),
5201			));
5202		} else {
5203			walker.push_syntax_diag(Syntax::Stmt(
5204				StmtDiag::InterfaceExpectedInstanceOrSemiAfterBody(
5205					body.span.next_single_width(),
5206				),
5207			));
5208		}
5209	}
5210
5211	nodes.push(Node {
5212		span: Span::new(
5213			interface_span_start,
5214			if let Some(semi_span) = semi_span {
5215				semi_span.end
5216			} else {
5217				if let Omittable::Some(ref i) = instance {
5218					i.span.end
5219				} else {
5220					body.span.end
5221				}
5222			},
5223		),
5224		ty: NodeTy::InterfaceDef {
5225			qualifiers,
5226			ident,
5227			body,
5228			instance,
5229		},
5230	});
5231}
5232
5233/// Parses a struct declaration/definition.
5234///
5235/// This function assumes that the keyword is not yet consumed.
5236fn parse_struct<'a, P: TokenStreamProvider<'a>>(
5237	walker: &mut Walker<'a, P>,
5238	nodes: &mut Vec<Node>,
5239	qualifiers: Vec<Qualifier>,
5240	kw_span: Span,
5241) {
5242	walker.push_colour(kw_span, SyntaxType::Keyword);
5243	walker.advance();
5244
5245	// Consume the identifier.
5246	let ident = match expr_parser(
5247		walker,
5248		Mode::TakeOneUnit,
5249		[Token::LBrace, Token::Semi],
5250	) {
5251		(Some(e), _, mut semantic, mut colours) => match e.ty {
5252			ExprTy::Ident(i) => {
5253				walker.append_colours(&mut colours);
5254				walker.append_semantic_diags(&mut semantic);
5255				i
5256			}
5257			_ => {
5258				walker.append_colours(&mut colours);
5259				walker.push_syntax_diag(Syntax::Stmt(
5260					StmtDiag::StructExpectedIdentAfterKw(e.span),
5261				));
5262				return;
5263			}
5264		},
5265		(None, _, _, _) => {
5266			walker.push_syntax_diag(Syntax::Stmt(
5267				StmtDiag::StructExpectedIdentAfterKw(
5268					kw_span.next_single_width(),
5269				),
5270			));
5271			return;
5272		}
5273	};
5274
5275	let struct_span_start = if let Some(q) = qualifiers.first() {
5276		q.span.start
5277	} else {
5278		kw_span.start
5279	};
5280
5281	// Consume the `{`.
5282	let (token, token_span) = match walker.peek() {
5283		Some(t) => t,
5284		None => {
5285			// We don't create a struct declaration because it would result in two errors that would reduce
5286			// clarity.
5287			walker.push_syntax_diag(Syntax::Stmt(
5288				StmtDiag::StructExpectedLBraceAfterIdent(
5289					ident.span.next_single_width(),
5290				),
5291			));
5292			return;
5293		}
5294	};
5295	let l_brace_span = if *token == Token::LBrace {
5296		walker.push_colour(token_span, SyntaxType::Punctuation);
5297		walker.advance();
5298		token_span
5299	} else if *token == Token::Semi {
5300		// We have a declaration, (which is illegal).
5301		let span = Span::new(struct_span_start, token_span.end);
5302		walker.push_colour(token_span, SyntaxType::Punctuation);
5303		walker.push_syntax_diag(Syntax::Stmt(StmtDiag::StructDeclIsIllegal(
5304			span,
5305		)));
5306		walker.advance();
5307		nodes.push(Node {
5308			span,
5309			ty: NodeTy::StructDecl { qualifiers, ident },
5310		});
5311		return;
5312	} else {
5313		// We don't create a struct declaration because it would result in two errors that would reduce clarity.
5314		walker.push_syntax_diag(Syntax::Stmt(
5315			StmtDiag::StructExpectedLBraceAfterIdent(
5316				ident.span.next_single_width(),
5317			),
5318		));
5319		return;
5320	};
5321
5322	// Parse the contents of the body.
5323	let body = parse_scope(walker, brace_scope, l_brace_span);
5324	if body.contents.is_empty() {
5325		walker.push_syntax_diag(Syntax::Stmt(
5326			StmtDiag::StructExpectedAtLeastOneStmtInBody(body.span),
5327		));
5328	}
5329	for stmt in &body.contents {
5330		match &stmt.ty {
5331			NodeTy::VarDef { .. }
5332			| NodeTy::VarDefInit { .. }
5333			| NodeTy::VarDefs(_)
5334			| NodeTy::VarDefInits(_, _) => {}
5335			_ => {
5336				walker.push_syntax_diag(Syntax::Stmt(
5337					StmtDiag::StructInvalidStmtInBody(stmt.span),
5338				));
5339			}
5340		}
5341	}
5342
5343	// Look for an optional instance identifier.
5344	let instance = match expr_parser(walker, Mode::TakeOneUnit, [Token::Semi]) {
5345		(Some(e), _, mut semantic, mut colours) => match e.ty {
5346			ExprTy::Ident(i) => {
5347				walker.append_colours(&mut colours);
5348				walker.append_semantic_diags(&mut semantic);
5349				Omittable::Some(i)
5350			}
5351			_ => {
5352				walker.append_colours(&mut colours);
5353				walker.push_syntax_diag(Syntax::Stmt(
5354					StmtDiag::StructExpectedInstanceOrSemiAfterBody(e.span),
5355				));
5356				nodes.push(Node {
5357					span: Span::new(struct_span_start, body.span.end),
5358					ty: NodeTy::StructDef {
5359						qualifiers,
5360						ident,
5361						body,
5362						instance: Omittable::None,
5363					},
5364				});
5365				return;
5366			}
5367		},
5368		_ => Omittable::None,
5369	};
5370
5371	// Consume the `;` to end the statement.
5372	let semi_span = match walker.peek() {
5373		Some((token, span)) => {
5374			if *token == Token::Semi {
5375				walker.push_colour(span, SyntaxType::Punctuation);
5376				walker.advance();
5377				Some(span)
5378			} else {
5379				None
5380			}
5381		}
5382		None => None,
5383	};
5384	if semi_span.is_none() {
5385		if let Omittable::Some(ref i) = instance {
5386			walker.push_syntax_diag(Syntax::Stmt(
5387				StmtDiag::StructExpectedSemiAfterInstance(
5388					i.span.next_single_width(),
5389				),
5390			));
5391		} else {
5392			walker.push_syntax_diag(Syntax::Stmt(
5393				StmtDiag::StructExpectedInstanceOrSemiAfterBody(
5394					body.span.next_single_width(),
5395				),
5396			));
5397		}
5398	}
5399
5400	nodes.push(Node {
5401		span: Span::new(
5402			struct_span_start,
5403			if let Some(semi_span) = semi_span {
5404				semi_span.end
5405			} else {
5406				if let Omittable::Some(ref i) = instance {
5407					i.span.end
5408				} else {
5409					body.span.end
5410				}
5411			},
5412		),
5413		ty: NodeTy::StructDef {
5414			qualifiers,
5415			ident,
5416			body,
5417			instance,
5418		},
5419	});
5420}
5421
5422/// Parses an if statement.
5423///
5424/// This function assumes that the keyword is not yet consumed.
5425fn parse_if<'a, P: TokenStreamProvider<'a>>(
5426	walker: &mut Walker<'a, P>,
5427	nodes: &mut Vec<Node>,
5428	kw_span: Span,
5429) {
5430	let mut branches = Vec::new();
5431	let mut first_iter = true;
5432	// On the first iteration of this loop, the current token is guaranteed to be `Token::If`.
5433	loop {
5434		let (token, token_span) = match walker.peek() {
5435			Some(t) => t,
5436			None => {
5437				nodes.push(Node {
5438					span: Span::new(kw_span.start, walker.get_last_span().end),
5439					ty: NodeTy::If(branches),
5440				});
5441				return;
5442			}
5443		};
5444
5445		let else_kw_span = if *token != Token::Else && !first_iter {
5446			// We've found a token that is not `else`, which means we've finished the current if statement.
5447			nodes.push(Node {
5448				span: Span::new(
5449					kw_span.start,
5450					if let Some(branch) = branches.last() {
5451						branch.span.end
5452					} else {
5453						kw_span.end
5454					},
5455				),
5456				ty: NodeTy::If(branches),
5457			});
5458			return;
5459		} else if *token == Token::If && first_iter {
5460			// In the first iteration this variable is ignored. This is just to prevent divergence of branches
5461			// which would require a different overall parsing algorithm.
5462			token_span
5463		} else {
5464			// Consume the `else` keyword.
5465			walker.push_colour(token_span, SyntaxType::Keyword);
5466			walker.advance();
5467			token_span
5468		};
5469
5470		let (token, token_span) = match walker.peek() {
5471			Some(t) => t,
5472			None => {
5473				// We have an else keyword followed by nothing.
5474				walker.push_syntax_diag(Syntax::Stmt(
5475					StmtDiag::IfExpectedIfOrLBraceOrStmtAfterElseKw(
5476						walker.get_last_span().next_single_width(),
5477					),
5478				));
5479				nodes.push(Node {
5480					span: Span::new(kw_span.start, walker.get_last_span().end),
5481					ty: NodeTy::If(branches),
5482				});
5483				return;
5484			}
5485		};
5486
5487		if *token == Token::If {
5488			let if_kw_span = token_span;
5489			walker.push_colour(if_kw_span, SyntaxType::Keyword);
5490			walker.advance();
5491
5492			// Consume the `(`.
5493			let l_paren_span = match walker.peek() {
5494				Some((token, span)) => {
5495					if *token == Token::LParen {
5496						walker.push_colour(span, SyntaxType::Punctuation);
5497						walker.advance();
5498						Some(span)
5499					} else {
5500						walker.push_syntax_diag(Syntax::Stmt(
5501							StmtDiag::IfExpectedLParenAfterKw(
5502								if_kw_span.next_single_width(),
5503							),
5504						));
5505						None
5506					}
5507				}
5508				None => {
5509					walker.push_syntax_diag(Syntax::Stmt(
5510						StmtDiag::IfExpectedLParenAfterKw(
5511							if_kw_span.next_single_width(),
5512						),
5513					));
5514					branches.push(IfBranch {
5515						span: if first_iter {
5516							if_kw_span
5517						} else {
5518							Span::new(else_kw_span.start, if_kw_span.end)
5519						},
5520						condition: if first_iter {
5521							(IfCondition::If(None), if_kw_span)
5522						} else {
5523							(
5524								IfCondition::ElseIf(None),
5525								Span::new(else_kw_span.start, if_kw_span.end),
5526							)
5527						},
5528						body: None,
5529					});
5530					nodes.push(Node {
5531						span: Span::new(
5532							kw_span.start,
5533							walker.get_last_span().end,
5534						),
5535						ty: NodeTy::If(branches),
5536					});
5537					return;
5538				}
5539			};
5540
5541			// Consume the condition expression.
5542			let cond_expr = match expr_parser(
5543				walker,
5544				Mode::Default,
5545				[Token::RParen, Token::LBrace],
5546			) {
5547				(Some(e), mut syntax, mut semantic, mut colours) => {
5548					walker.append_colours(&mut colours);
5549					walker.append_syntax_diags(&mut syntax);
5550					walker.append_semantic_diags(&mut semantic);
5551					Some(e)
5552				}
5553				(None, _, _, _) => {
5554					if let Some(l_paren_span) = l_paren_span {
5555						walker.push_syntax_diag(Syntax::Stmt(
5556							StmtDiag::IfExpectedExprAfterLParen(
5557								l_paren_span.next_single_width(),
5558							),
5559						));
5560					}
5561					None
5562				}
5563			};
5564
5565			// Consume the `)`.
5566			let r_paren_span = match walker.peek() {
5567				Some((token, span)) => {
5568					if *token == Token::RParen {
5569						walker.push_colour(span, SyntaxType::Punctuation);
5570						walker.advance();
5571						Some(span)
5572					} else {
5573						if let Some(ref cond_expr) = cond_expr {
5574							walker.push_syntax_diag(Syntax::Stmt(
5575								StmtDiag::IfExpectedRParenAfterExpr(
5576									cond_expr.span.next_single_width(),
5577								),
5578							));
5579						}
5580						None
5581					}
5582				}
5583				None => {
5584					if let Some(ref cond_expr) = cond_expr {
5585						walker.push_syntax_diag(Syntax::Stmt(
5586							StmtDiag::IfExpectedRParenAfterExpr(
5587								cond_expr.span.next_single_width(),
5588							),
5589						));
5590					}
5591					let span = Span::new(
5592						if first_iter {
5593							if_kw_span.start
5594						} else {
5595							else_kw_span.start
5596						},
5597						if let Some(ref cond_expr) = cond_expr {
5598							cond_expr.span.end
5599						} else if let Some(l_paren_span) = l_paren_span {
5600							l_paren_span.end
5601						} else {
5602							if_kw_span.end
5603						},
5604					);
5605					branches.push(IfBranch {
5606						span,
5607						condition: (
5608							if first_iter {
5609								IfCondition::If(cond_expr)
5610							} else {
5611								IfCondition::ElseIf(None)
5612							},
5613							span,
5614						),
5615						body: None,
5616					});
5617					nodes.push(Node {
5618						span: Span::new(
5619							kw_span.start,
5620							walker.get_last_span().end,
5621						),
5622						ty: NodeTy::If(branches),
5623					});
5624					return;
5625				}
5626			};
5627
5628			// Consume either the `{` for a multi-line if body or a statement for a single-statement if body.
5629			match walker.peek() {
5630				Some((token, token_span)) => {
5631					if *token == Token::LBrace {
5632						// We have a multi-line body.
5633						walker.push_colour(token_span, SyntaxType::Punctuation);
5634						walker.advance();
5635						let body = parse_scope(walker, brace_scope, token_span);
5636						let span = Span::new(
5637							if first_iter {
5638								if_kw_span.start
5639							} else {
5640								else_kw_span.start
5641							},
5642							if let Some(r_paren_span) = r_paren_span {
5643								r_paren_span.end
5644							} else if let Some(ref cond_expr) = cond_expr {
5645								cond_expr.span.end
5646							} else if let Some(l_paren_span) = l_paren_span {
5647								l_paren_span.end
5648							} else {
5649								if_kw_span.end
5650							},
5651						);
5652						branches.push(IfBranch {
5653							span: Span::new(if_kw_span.start, body.span.end),
5654							condition: (
5655								if first_iter {
5656									IfCondition::If(cond_expr)
5657								} else {
5658									IfCondition::ElseIf(cond_expr)
5659								},
5660								span,
5661							),
5662							body: Some(body),
5663						});
5664					} else {
5665						// We don't have a multi-line body, so we attempt to parse a single statement.
5666						let mut stmts = Vec::new();
5667						parse_stmt(walker, &mut stmts);
5668
5669						let body = if stmts.is_empty() {
5670							if let Some(r_paren_span) = r_paren_span {
5671								walker.push_syntax_diag(Syntax::Stmt(
5672									StmtDiag::IfExpectedLBraceOrStmtAfterRParen(
5673										r_paren_span,
5674									),
5675								));
5676							}
5677							None
5678						} else {
5679							let stmt = stmts.remove(0);
5680							let body = Scope {
5681								span: stmt.span,
5682								contents: vec![stmt],
5683							};
5684							Some(body)
5685						};
5686
5687						let span = Span::new(
5688							if first_iter {
5689								if_kw_span.start
5690							} else {
5691								else_kw_span.start
5692							},
5693							if let Some(r_paren_span) = r_paren_span {
5694								r_paren_span.end
5695							} else if let Some(ref cond_expr) = cond_expr {
5696								cond_expr.span.end
5697							} else if let Some(l_paren_span) = l_paren_span {
5698								l_paren_span.end
5699							} else {
5700								if_kw_span.end
5701							},
5702						);
5703						branches.push(IfBranch {
5704							span: Span::new(
5705								if_kw_span.start,
5706								if let Some(ref body) = body {
5707									body.span.end
5708								} else {
5709									span.end
5710								},
5711							),
5712							condition: (
5713								if first_iter {
5714									IfCondition::If(cond_expr)
5715								} else {
5716									IfCondition::ElseIf(cond_expr)
5717								},
5718								span,
5719							),
5720							body,
5721						});
5722					}
5723				}
5724				None => {
5725					// We have a if-header but no associated body but we've reached the EOF.
5726					walker.push_syntax_diag(Syntax::Stmt(
5727						StmtDiag::IfExpectedLBraceOrStmtAfterRParen(
5728							walker.get_last_span().next_single_width(),
5729						),
5730					));
5731					let span = Span::new(
5732						if first_iter {
5733							if_kw_span.start
5734						} else {
5735							else_kw_span.start
5736						},
5737						if let Some(r_paren_span) = r_paren_span {
5738							r_paren_span.end
5739						} else if let Some(ref cond_expr) = cond_expr {
5740							cond_expr.span.end
5741						} else if let Some(l_paren_span) = l_paren_span {
5742							l_paren_span.end
5743						} else {
5744							if_kw_span.end
5745						},
5746					);
5747					branches.push(IfBranch {
5748						span,
5749						condition: (
5750							if first_iter {
5751								IfCondition::If(cond_expr)
5752							} else {
5753								IfCondition::ElseIf(cond_expr)
5754							},
5755							span,
5756						),
5757						body: None,
5758					});
5759					nodes.push(Node {
5760						span: Span::new(
5761							kw_span.start,
5762							walker.get_last_span().end,
5763						),
5764						ty: NodeTy::If(branches),
5765					});
5766					return;
5767				}
5768			}
5769		} else {
5770			// Consume either the `{` for a multi-line if body or a statement for a single-statement if body.
5771			match walker.peek() {
5772				Some((token, token_span)) => {
5773					if *token == Token::LBrace {
5774						// We have a multi-line body.
5775						walker.push_colour(token_span, SyntaxType::Punctuation);
5776						walker.advance();
5777						let body = parse_scope(walker, brace_scope, token_span);
5778						branches.push(IfBranch {
5779							span: Span::new(else_kw_span.start, body.span.end),
5780							condition: (IfCondition::Else, else_kw_span),
5781							body: Some(body),
5782						});
5783					} else {
5784						// We don't have a multi-line body, so we attempt to parse a single statement.
5785					}
5786				}
5787				None => {
5788					// We have one else-header but no associated body.
5789					walker.push_syntax_diag(Syntax::Stmt(
5790						StmtDiag::IfExpectedLBraceOrStmtAfterRParen(
5791							walker.get_last_span().next_single_width(),
5792						),
5793					));
5794					branches.push(IfBranch {
5795						span: else_kw_span,
5796						condition: (IfCondition::Else, else_kw_span),
5797						body: None,
5798					});
5799					nodes.push(Node {
5800						span: Span::new(
5801							kw_span.start,
5802							walker.get_last_span().end,
5803						),
5804						ty: NodeTy::If(branches),
5805					});
5806					return;
5807				}
5808			}
5809		}
5810
5811		first_iter = false;
5812	}
5813}
5814
5815/// Parses a switch statement.
5816///
5817/// This function assumes that the keyword is not yet consumed.
5818fn parse_switch<'a, P: TokenStreamProvider<'a>>(
5819	walker: &mut Walker<'a, P>,
5820	nodes: &mut Vec<Node>,
5821	kw_span: Span,
5822) {
5823	walker.push_colour(kw_span, SyntaxType::Keyword);
5824	walker.advance();
5825
5826	// Consume the `(`.
5827	let l_paren_span = match walker.peek() {
5828		Some((token, span)) => {
5829			if *token == Token::LParen {
5830				walker.push_colour(span, SyntaxType::Punctuation);
5831				walker.advance();
5832				Some(span)
5833			} else {
5834				walker.push_syntax_diag(Syntax::Stmt(
5835					StmtDiag::SwitchExpectedLParenAfterKw(
5836						kw_span.next_single_width(),
5837					),
5838				));
5839				None
5840			}
5841		}
5842		None => {
5843			walker.push_syntax_diag(Syntax::Stmt(
5844				StmtDiag::SwitchExpectedLParenAfterKw(
5845					kw_span.next_single_width(),
5846				),
5847			));
5848			return;
5849		}
5850	};
5851
5852	// Consume the condition expression.
5853	let cond_expr = match expr_parser(
5854		walker,
5855		Mode::Default,
5856		[Token::RParen, Token::LBrace],
5857	) {
5858		(Some(e), mut syntax, mut semantic, mut colours) => {
5859			walker.append_colours(&mut colours);
5860			walker.append_syntax_diags(&mut syntax);
5861			walker.append_semantic_diags(&mut semantic);
5862			Some(e)
5863		}
5864		(None, _, _, _) => {
5865			if let Some(l_paren_span) = l_paren_span {
5866				walker.push_syntax_diag(Syntax::Stmt(
5867					StmtDiag::SwitchExpectedExprAfterLParen(
5868						l_paren_span.next_single_width(),
5869					),
5870				));
5871			}
5872			None
5873		}
5874	};
5875
5876	// Consume the `)`.
5877	let r_paren_span = match walker.peek() {
5878		Some((token, span)) => {
5879			if *token == Token::RParen {
5880				walker.push_colour(span, SyntaxType::Punctuation);
5881				walker.advance();
5882				Some(span)
5883			} else {
5884				if let Some(ref cond_expr) = cond_expr {
5885					walker.push_syntax_diag(Syntax::Stmt(
5886						StmtDiag::SwitchExpectedRParenAfterExpr(
5887							cond_expr.span.next_single_width(),
5888						),
5889					));
5890				}
5891				None
5892			}
5893		}
5894		None => {
5895			if let Some(ref cond_expr) = cond_expr {
5896				walker.push_syntax_diag(Syntax::Stmt(
5897					StmtDiag::SwitchExpectedRParenAfterExpr(
5898						cond_expr.span.next_single_width(),
5899					),
5900				));
5901			}
5902			return;
5903		}
5904	};
5905
5906	// Consume the `{`.
5907	let l_brace_span = match walker.peek() {
5908		Some((token, span)) => {
5909			if *token == Token::LBrace {
5910				walker.push_colour(span, SyntaxType::Punctuation);
5911				walker.advance();
5912				span
5913			} else {
5914				if let Some(r_paren_span) = r_paren_span {
5915					walker.push_syntax_diag(Syntax::Stmt(
5916						StmtDiag::SwitchExpectedLBraceAfterCond(
5917							r_paren_span.next_single_width(),
5918						),
5919					));
5920				}
5921				nodes.push(Node {
5922					span: Span::new(
5923						kw_span.start,
5924						if let Some(r_paren_span) = r_paren_span {
5925							r_paren_span.end
5926						} else if let Some(ref cond_expr) = cond_expr {
5927							cond_expr.span.end
5928						} else if let Some(l_paren_span) = l_paren_span {
5929							l_paren_span.end
5930						} else {
5931							kw_span.end
5932						},
5933					),
5934					ty: NodeTy::Switch {
5935						cond: cond_expr,
5936						cases: vec![],
5937					},
5938				});
5939				return;
5940			}
5941		}
5942		None => {
5943			if let Some(r_paren_span) = r_paren_span {
5944				walker.push_syntax_diag(Syntax::Stmt(
5945					StmtDiag::SwitchExpectedLBraceAfterCond(
5946						r_paren_span.next_single_width(),
5947					),
5948				));
5949			}
5950			nodes.push(Node {
5951				span: Span::new(kw_span.start, walker.get_last_span().end),
5952				ty: NodeTy::Switch {
5953					cond: cond_expr,
5954					cases: vec![],
5955				},
5956			});
5957			return;
5958		}
5959	};
5960
5961	// Check if the body is empty.
5962	match walker.peek() {
5963		Some((token, token_span)) => {
5964			if *token == Token::RBrace {
5965				walker.push_syntax_diag(Syntax::Stmt(
5966					StmtDiag::SwitchFoundEmptyBody(Span::new(
5967						l_brace_span.start,
5968						token_span.end,
5969					)),
5970				));
5971				nodes.push(Node {
5972					span: Span::new(kw_span.start, token_span.end),
5973					ty: NodeTy::Switch {
5974						cond: cond_expr,
5975						cases: vec![],
5976					},
5977				});
5978				return;
5979			}
5980		}
5981		None => {
5982			walker.push_syntax_diag(Syntax::Stmt(
5983				StmtDiag::ScopeMissingRBrace(
5984					l_brace_span,
5985					walker.get_last_span().next_single_width(),
5986				),
5987			));
5988			nodes.push(Node {
5989				span: Span::new(kw_span.start, walker.get_last_span().end),
5990				ty: NodeTy::Switch {
5991					cond: cond_expr,
5992					cases: vec![],
5993				},
5994			});
5995			return;
5996		}
5997	}
5998
5999	// Consume cases until we reach the end of the body.
6000	let mut cases = Vec::new();
6001	loop {
6002		let (token, token_span) = match walker.peek() {
6003			Some(t) => t,
6004			None => {
6005				walker.push_syntax_diag(Syntax::Stmt(
6006					StmtDiag::ScopeMissingRBrace(
6007						l_brace_span,
6008						walker.get_last_span().next_single_width(),
6009					),
6010				));
6011				nodes.push(Node {
6012					span: Span::new(kw_span.start, walker.get_last_span().end),
6013					ty: NodeTy::Switch {
6014						cond: cond_expr,
6015						cases,
6016					},
6017				});
6018				return;
6019			}
6020		};
6021
6022		match token {
6023			Token::Case => {
6024				let case_kw_span = token_span;
6025				walker.push_colour(case_kw_span, SyntaxType::Keyword);
6026				walker.advance();
6027
6028				// Consume the expression.
6029				let expr =
6030					match expr_parser(walker, Mode::Default, [Token::Colon]) {
6031						(Some(e), mut syntax, mut semantic, mut colours) => {
6032							walker.append_colours(&mut colours);
6033							walker.append_syntax_diags(&mut syntax);
6034							walker.append_semantic_diags(&mut semantic);
6035							Some(e)
6036						}
6037						(None, _, _, _) => {
6038							walker.push_syntax_diag(Syntax::Stmt(
6039								StmtDiag::SwitchExpectedExprAfterCaseKw(
6040									case_kw_span.next_single_width(),
6041								),
6042							));
6043							None
6044						}
6045					};
6046
6047				// Consume the `:`.
6048				let colon_span = match walker.peek() {
6049					Some((token, token_span)) => {
6050						if *token == Token::Colon {
6051							walker.push_colour(
6052								token_span,
6053								SyntaxType::Punctuation,
6054							);
6055							walker.advance();
6056							Some(token_span)
6057						} else {
6058							if let Some(ref expr) = expr {
6059								walker.push_syntax_diag(Syntax::Stmt(
6060									StmtDiag::SwitchExpectedColonAfterCaseExpr(
6061										expr.span.next_single_width(),
6062									),
6063								));
6064							}
6065							None
6066						}
6067					}
6068					None => {
6069						// We don't have a complete case but we've reached the EOF.
6070						if let Some(ref expr) = expr {
6071							walker.push_syntax_diag(Syntax::Stmt(
6072								StmtDiag::SwitchExpectedColonAfterCaseExpr(
6073									expr.span.next_single_width(),
6074								),
6075							));
6076						}
6077						cases.push(SwitchCase {
6078							span: Span::new(
6079								case_kw_span.start,
6080								walker.get_last_span().end,
6081							),
6082							expr: Either::Left(expr),
6083							body: None,
6084						});
6085						nodes.push(Node {
6086							span: Span::new(
6087								kw_span.start,
6088								walker.get_last_span().end,
6089							),
6090							ty: NodeTy::Switch {
6091								cond: cond_expr,
6092								cases,
6093							},
6094						});
6095						return;
6096					}
6097				};
6098
6099				// Consume the body of the case.
6100				let body = parse_scope(
6101					walker,
6102					switch_case_scope,
6103					colon_span.unwrap_or(if let Some(ref expr) = expr {
6104						expr.span
6105					} else {
6106						case_kw_span
6107					}),
6108				);
6109				cases.push(SwitchCase {
6110					span: Span::new(case_kw_span.start, body.span.end),
6111					expr: Either::Left(expr),
6112					body: Some(body),
6113				});
6114			}
6115			Token::Default => {
6116				let default_kw_span = token_span;
6117				walker.push_colour(default_kw_span, SyntaxType::Keyword);
6118				walker.advance();
6119
6120				// Consume the `:`.
6121				let colon_span = match walker.peek() {
6122					Some((token, token_span)) => {
6123						if *token == Token::Colon {
6124							walker.push_colour(
6125								token_span,
6126								SyntaxType::Punctuation,
6127							);
6128							walker.advance();
6129							Some(token_span)
6130						} else {
6131							walker.push_syntax_diag(Syntax::Stmt(
6132								StmtDiag::SwitchExpectedColonAfterDefaultKw(
6133									default_kw_span.next_single_width(),
6134								),
6135							));
6136							None
6137						}
6138					}
6139					None => {
6140						// We don't have a complete case but we've reached the EOF.
6141						walker.push_syntax_diag(Syntax::Stmt(
6142							StmtDiag::SwitchExpectedColonAfterDefaultKw(
6143								default_kw_span.next_single_width(),
6144							),
6145						));
6146						cases.push(SwitchCase {
6147							span: default_kw_span,
6148							expr: Either::Right(()),
6149							body: None,
6150						});
6151						nodes.push(Node {
6152							span: Span::new(
6153								kw_span.start,
6154								walker.get_last_span().end,
6155							),
6156							ty: NodeTy::Switch {
6157								cond: cond_expr,
6158								cases,
6159							},
6160						});
6161						return;
6162					}
6163				};
6164
6165				// Consume the body of the case.
6166				let body = parse_scope(
6167					walker,
6168					switch_case_scope,
6169					colon_span.unwrap_or(default_kw_span.end_zero_width()),
6170				);
6171				cases.push(SwitchCase {
6172					span: Span::new(default_kw_span.start, body.span.end),
6173					expr: Either::Right(()),
6174					body: Some(body),
6175				});
6176			}
6177			Token::RBrace => {
6178				// If this branch is triggered, this `}` is closing the entire body of the switch statement. In the
6179				// following example:
6180				//
6181				// switch (true) {
6182				//     default: {
6183				//         /*...*/
6184				//     }
6185				// }
6186				//
6187				// the first `}` will be consumed by the `parse_scope()` function of the default case body, whilst
6188				// the second will be consumed by this branch. In the following example:
6189				//
6190				// switch (true) {
6191				//     default:
6192				//         /*...*/
6193				// }
6194				//
6195				// The `}` will close the body of the default case but it won't be consumed, and hence it will be
6196				// consumed by this branch.
6197				walker.push_colour(token_span, SyntaxType::Punctuation);
6198				walker.advance();
6199				nodes.push(Node {
6200					span: Span::new(kw_span.start, token_span.end),
6201					ty: NodeTy::Switch {
6202						cond: cond_expr,
6203						cases,
6204					},
6205				});
6206				return;
6207			}
6208			_ => {
6209				// We have a token which cannot begin a case, nor can finish the switch body. We consume tokens
6210				// until we hit something recognizable.
6211				let invalid_span_start = token_span.start;
6212				let mut invalid_span_end = token_span.end;
6213				loop {
6214					match walker.peek() {
6215						Some((token, token_span)) => {
6216							if *token == Token::Case
6217								|| *token == Token::Default || *token
6218								== Token::RBrace
6219							{
6220								// We don't consume the token because the next iteration of the main loop will deal
6221								// with it appropriately.
6222								walker.push_syntax_diag(Syntax::Stmt(StmtDiag::SwitchExpectedCaseOrDefaultKwOrEnd(
6223									Span::new(invalid_span_start, invalid_span_end)
6224								)));
6225								break;
6226							} else {
6227								invalid_span_end = token_span.end;
6228								walker.push_colour(
6229									token_span,
6230									token.non_semantic_colour(),
6231								);
6232								walker.advance();
6233							}
6234						}
6235						None => {
6236							// We haven't yet hit anything recognizable but we've reached the EOF.
6237							walker.push_syntax_diag(Syntax::Stmt(
6238								StmtDiag::SwitchExpectedCaseOrDefaultKwOrEnd(
6239									Span::new(
6240										invalid_span_start,
6241										walker.get_last_span().end,
6242									),
6243								),
6244							));
6245							nodes.push(Node {
6246								span: Span::new(
6247									kw_span.start,
6248									walker.get_last_span().end,
6249								),
6250								ty: NodeTy::Switch {
6251									cond: cond_expr,
6252									cases,
6253								},
6254							});
6255							return;
6256						}
6257					}
6258				}
6259			}
6260		}
6261	}
6262}
6263
6264/// Parses a for loop statement.
6265///
6266/// This function assumes that the keyword is not yet consumed.
6267fn parse_for_loop<'a, P: TokenStreamProvider<'a>>(
6268	walker: &mut Walker<'a, P>,
6269	nodes: &mut Vec<Node>,
6270	kw_span: Span,
6271) {
6272	walker.push_colour(kw_span, SyntaxType::Keyword);
6273	walker.advance();
6274
6275	// Consume the `(`.
6276	let l_paren_span = match walker.peek() {
6277		Some((token, span)) => {
6278			if *token == Token::LParen {
6279				walker.push_colour(span, SyntaxType::Punctuation);
6280				walker.advance();
6281				Some(span)
6282			} else {
6283				walker.push_syntax_diag(Syntax::Stmt(
6284					StmtDiag::ForExpectedLParenAfterKw(
6285						kw_span.next_single_width(),
6286					),
6287				));
6288				None
6289			}
6290		}
6291		None => {
6292			walker.push_syntax_diag(Syntax::Stmt(
6293				StmtDiag::ForExpectedLParenAfterKw(kw_span.next_single_width()),
6294			));
6295			return;
6296		}
6297	};
6298
6299	// Consume the "expressions" (actually expression/declaration statements).
6300	let mut init: Option<Node> = None;
6301	let mut cond: Option<Node> = None;
6302	let mut inc: Option<Node> = None;
6303	let mut counter = 0;
6304	let r_paren_span = 'outer: loop {
6305		let (token, token_span) = match walker.peek() {
6306			Some(t) => t,
6307			None => {
6308				// We have not encountered a `)` yet.
6309				let span = Span::new(
6310					kw_span.start,
6311					if let Some(ref inc) = inc {
6312						inc.span.end
6313					} else if let Some(ref cond) = cond {
6314						walker.push_syntax_diag(Syntax::Stmt(
6315							StmtDiag::ForExpectedIncStmt(
6316								cond.span.next_single_width(),
6317							),
6318						));
6319						cond.span.end
6320					} else if let Some(ref init) = init {
6321						walker.push_syntax_diag(Syntax::Stmt(
6322							StmtDiag::ForExpectedCondStmt(
6323								init.span.next_single_width(),
6324							),
6325						));
6326						init.span.end
6327					} else if let Some(l_paren_span) = l_paren_span {
6328						walker.push_syntax_diag(Syntax::Stmt(
6329							StmtDiag::ForExpectedInitStmt(
6330								l_paren_span.next_single_width(),
6331							),
6332						));
6333						l_paren_span.end
6334					} else {
6335						kw_span.end
6336					},
6337				);
6338				nodes.push(Node {
6339					span,
6340					ty: NodeTy::For {
6341						init: init.map(|n| Box::from(n)),
6342						cond: cond.map(|n| Box::from(n)),
6343						inc: inc.map(|n| Box::from(n)),
6344						body: None,
6345					},
6346				});
6347				return;
6348			}
6349		};
6350
6351		match token {
6352			Token::RParen => {
6353				if counter < 3 {
6354					walker.push_syntax_diag(Syntax::Stmt(
6355						StmtDiag::ForExpected3Stmts(
6356							token_span.previous_single_width(),
6357						),
6358					));
6359				}
6360				walker.push_colour(token_span, SyntaxType::Punctuation);
6361				walker.advance();
6362				break token_span;
6363			}
6364			_ => {
6365				if counter == 3 {
6366					walker.push_syntax_diag(Syntax::Stmt(
6367						StmtDiag::ForExpectedRParenAfterStmts(
6368							inc.as_ref().unwrap().span.next_single_width(),
6369						),
6370					));
6371
6372					walker.push_colour(token_span, SyntaxType::Invalid);
6373					walker.advance();
6374
6375					loop {
6376						match walker.peek() {
6377							Some((token, span)) => {
6378								if *token == Token::RParen {
6379									walker.push_colour(
6380										span,
6381										SyntaxType::Punctuation,
6382									);
6383									walker.advance();
6384									break 'outer span;
6385								} else {
6386									walker
6387										.push_colour(span, SyntaxType::Invalid);
6388									walker.advance();
6389								}
6390							}
6391							None => break,
6392						}
6393					}
6394
6395					nodes.push(Node {
6396						span: Span::new(
6397							kw_span.start,
6398							inc.as_ref().unwrap().span.end,
6399						),
6400						ty: NodeTy::For {
6401							init: init.map(|n| Box::from(n)),
6402							cond: cond.map(|n| Box::from(n)),
6403							inc: inc.map(|n| Box::from(n)),
6404							body: None,
6405						},
6406					});
6407					return;
6408				}
6409			}
6410		}
6411
6412		let qualifiers = try_parse_qualifiers(walker);
6413		let mut stmt = Vec::new();
6414		try_parse_definition_declaration_expr(
6415			walker,
6416			&mut stmt,
6417			qualifiers,
6418			counter == 2,
6419		);
6420
6421		if !stmt.is_empty() {
6422			if counter == 0 {
6423				init = Some(stmt.remove(0));
6424			} else if counter == 1 {
6425				cond = Some(stmt.remove(0));
6426			} else if counter == 2 {
6427				inc = Some(stmt.remove(0));
6428			}
6429			counter += 1;
6430		}
6431	};
6432
6433	// Consume the `{`.
6434	let l_brace_span = match walker.peek() {
6435		Some((token, span)) => {
6436			if *token == Token::LBrace {
6437				walker.push_colour(span, SyntaxType::Punctuation);
6438				walker.advance();
6439				span
6440			} else {
6441				walker.push_syntax_diag(Syntax::Stmt(
6442					StmtDiag::ForExpectedLBraceAfterHeader(
6443						r_paren_span.next_single_width(),
6444					),
6445				));
6446				nodes.push(Node {
6447					span: Span::new(kw_span.start, r_paren_span.end),
6448					ty: NodeTy::For {
6449						init: init.map(|n| Box::from(n)),
6450						cond: cond.map(|n| Box::from(n)),
6451						inc: inc.map(|n| Box::from(n)),
6452						body: None,
6453					},
6454				});
6455				return;
6456			}
6457		}
6458		None => {
6459			walker.push_syntax_diag(Syntax::Stmt(
6460				StmtDiag::ForExpectedLBraceAfterHeader(
6461					r_paren_span.next_single_width(),
6462				),
6463			));
6464			nodes.push(Node {
6465				span: Span::new(kw_span.start, r_paren_span.end),
6466				ty: NodeTy::For {
6467					init: init.map(|n| Box::from(n)),
6468					cond: cond.map(|n| Box::from(n)),
6469					inc: inc.map(|n| Box::from(n)),
6470					body: None,
6471				},
6472			});
6473			return;
6474		}
6475	};
6476
6477	// Consume the body.
6478	let body = parse_scope(walker, brace_scope, l_brace_span);
6479	nodes.push(Node {
6480		span: Span::new(kw_span.start, body.span.end),
6481		ty: NodeTy::For {
6482			init: init.map(|n| Box::from(n)),
6483			cond: cond.map(|n| Box::from(n)),
6484			inc: inc.map(|n| Box::from(n)),
6485			body: Some(body),
6486		},
6487	});
6488}
6489
6490/// Parses a while loop statement.
6491///
6492/// This function assumes that the keyword is not yet consumed.
6493fn parse_while_loop<'a, P: TokenStreamProvider<'a>>(
6494	walker: &mut Walker<'a, P>,
6495	nodes: &mut Vec<Node>,
6496	kw_span: Span,
6497) {
6498	walker.push_colour(kw_span, SyntaxType::Keyword);
6499	walker.advance();
6500
6501	// Consume the `(`.
6502	let l_paren_span = match walker.peek() {
6503		Some((token, span)) => {
6504			if *token == Token::LParen {
6505				walker.push_colour(span, SyntaxType::Punctuation);
6506				walker.advance();
6507				Some(span)
6508			} else {
6509				walker.push_syntax_diag(Syntax::Stmt(
6510					StmtDiag::WhileExpectedLParenAfterKw(
6511						kw_span.next_single_width(),
6512					),
6513				));
6514				None
6515			}
6516		}
6517		None => {
6518			walker.push_syntax_diag(Syntax::Stmt(
6519				StmtDiag::WhileExpectedLParenAfterKw(
6520					kw_span.next_single_width(),
6521				),
6522			));
6523			return;
6524		}
6525	};
6526
6527	// Consume the condition expression.
6528	let cond_expr = match expr_parser(
6529		walker,
6530		Mode::Default,
6531		[Token::RParen, Token::Semi],
6532	) {
6533		(Some(e), mut syntax, mut semantic, mut colours) => {
6534			walker.append_colours(&mut colours);
6535			walker.append_syntax_diags(&mut syntax);
6536			walker.append_semantic_diags(&mut semantic);
6537			Some(e)
6538		}
6539		(None, _, _, _) => {
6540			if let Some(l_paren_span) = l_paren_span {
6541				walker.push_syntax_diag(Syntax::Stmt(
6542					StmtDiag::WhileExpectedExprAfterLParen(
6543						l_paren_span.next_single_width(),
6544					),
6545				));
6546			}
6547			None
6548		}
6549	};
6550
6551	// Consume the `)`.
6552	let r_paren_span = match walker.peek() {
6553		Some((token, span)) => {
6554			if *token == Token::RParen {
6555				walker.push_colour(span, SyntaxType::Punctuation);
6556				walker.advance();
6557				Some(span)
6558			} else {
6559				if let Some(ref cond_expr) = cond_expr {
6560					walker.push_syntax_diag(Syntax::Stmt(
6561						StmtDiag::WhileExpectedRParenAfterExpr(
6562							cond_expr.span.next_single_width(),
6563						),
6564					));
6565				}
6566				None
6567			}
6568		}
6569		None => {
6570			if let Some(ref cond_expr) = cond_expr {
6571				walker.push_syntax_diag(Syntax::Stmt(
6572					StmtDiag::WhileExpectedRParenAfterExpr(
6573						cond_expr.span.next_single_width(),
6574					),
6575				));
6576			}
6577			return;
6578		}
6579	};
6580
6581	// Consume the `{`.
6582	let l_brace_span = match walker.peek() {
6583		Some((token, span)) => {
6584			if *token == Token::LBrace {
6585				walker.push_colour(span, SyntaxType::Punctuation);
6586				walker.advance();
6587				span
6588			} else {
6589				if let Some(r_paren_span) = r_paren_span {
6590					walker.push_syntax_diag(Syntax::Stmt(
6591						StmtDiag::WhileExpectedLBraceAfterCond(
6592							r_paren_span.next_single_width(),
6593						),
6594					));
6595				}
6596				return;
6597			}
6598		}
6599		None => {
6600			if let Some(r_paren_span) = r_paren_span {
6601				walker.push_syntax_diag(Syntax::Stmt(
6602					StmtDiag::WhileExpectedLBraceAfterCond(
6603						r_paren_span.next_single_width(),
6604					),
6605				));
6606			}
6607			return;
6608		}
6609	};
6610
6611	// Parse the body.
6612	let body = parse_scope(walker, brace_scope, l_brace_span);
6613	nodes.push(Node {
6614		span: Span::new(kw_span.start, body.span.end),
6615		ty: NodeTy::While {
6616			cond: cond_expr,
6617			body,
6618		},
6619	});
6620}
6621
6622/// Parses a do-while loop statement.
6623///
6624/// This function assumes that the keyword is not yet consumed.
6625fn parse_do_while_loop<'a, P: TokenStreamProvider<'a>>(
6626	walker: &mut Walker<'a, P>,
6627	nodes: &mut Vec<Node>,
6628	kw_span: Span,
6629) {
6630	walker.push_colour(kw_span, SyntaxType::Keyword);
6631	walker.advance();
6632
6633	// Consume the `{`.
6634	let l_brace_span = match walker.peek() {
6635		Some((token, span)) => {
6636			if *token == Token::LBrace {
6637				walker.push_colour(span, SyntaxType::Punctuation);
6638				walker.advance();
6639				span
6640			} else {
6641				walker.push_syntax_diag(Syntax::Stmt(
6642					StmtDiag::DoWhileExpectedLBraceAfterKw(
6643						kw_span.next_single_width(),
6644					),
6645				));
6646				return;
6647			}
6648		}
6649		None => {
6650			walker.push_syntax_diag(Syntax::Stmt(
6651				StmtDiag::DoWhileExpectedLBraceAfterKw(
6652					kw_span.next_single_width(),
6653				),
6654			));
6655			return;
6656		}
6657	};
6658
6659	// Parse the body.
6660	let body = parse_scope(walker, brace_scope, l_brace_span);
6661
6662	// Consume the `while` keyword.
6663	let while_kw_span = match walker.peek() {
6664		Some((token, span)) => {
6665			if *token == Token::While {
6666				walker.push_colour(span, SyntaxType::Keyword);
6667				walker.advance();
6668				span
6669			} else {
6670				walker.push_syntax_diag(Syntax::Stmt(
6671					StmtDiag::DoWhileExpectedWhileAfterBody(
6672						body.span.next_single_width(),
6673					),
6674				));
6675				nodes.push(Node {
6676					span: Span::new(kw_span.start, body.span.end),
6677					ty: NodeTy::DoWhile { body, cond: None },
6678				});
6679				return;
6680			}
6681		}
6682		None => {
6683			walker.push_syntax_diag(Syntax::Stmt(
6684				StmtDiag::DoWhileExpectedWhileAfterBody(
6685					body.span.next_single_width(),
6686				),
6687			));
6688			nodes.push(Node {
6689				span: Span::new(kw_span.start, body.span.end),
6690				ty: NodeTy::DoWhile { body, cond: None },
6691			});
6692			return;
6693		}
6694	};
6695
6696	// Consume the `(`.
6697	let l_paren_span = match walker.peek() {
6698		Some((token, span)) => {
6699			if *token == Token::LParen {
6700				walker.push_colour(span, SyntaxType::Punctuation);
6701				walker.advance();
6702				Some(span)
6703			} else {
6704				walker.push_syntax_diag(Syntax::Stmt(
6705					StmtDiag::WhileExpectedLParenAfterKw(
6706						while_kw_span.next_single_width(),
6707					),
6708				));
6709				None
6710			}
6711		}
6712		None => {
6713			walker.push_syntax_diag(Syntax::Stmt(
6714				StmtDiag::WhileExpectedLParenAfterKw(
6715					while_kw_span.next_single_width(),
6716				),
6717			));
6718			nodes.push(Node {
6719				span: Span::new(kw_span.start, while_kw_span.end),
6720				ty: NodeTy::DoWhile { body, cond: None },
6721			});
6722			return;
6723		}
6724	};
6725
6726	// Consume the condition expression.
6727	let cond_expr = match expr_parser(
6728		walker,
6729		Mode::Default,
6730		[Token::RParen, Token::Semi],
6731	) {
6732		(Some(e), mut syntax, mut semantic, mut colours) => {
6733			walker.append_colours(&mut colours);
6734			walker.append_syntax_diags(&mut syntax);
6735			walker.append_semantic_diags(&mut semantic);
6736			Some(e)
6737		}
6738		(None, _, _, _) => {
6739			if let Some(l_paren_span) = l_paren_span {
6740				walker.push_syntax_diag(Syntax::Stmt(
6741					StmtDiag::WhileExpectedExprAfterLParen(
6742						l_paren_span.next_single_width(),
6743					),
6744				));
6745			}
6746			None
6747		}
6748	};
6749
6750	// Consume the `)`.
6751	let r_paren_span = match walker.peek() {
6752		Some((token, span)) => {
6753			if *token == Token::RParen {
6754				walker.push_colour(span, SyntaxType::Punctuation);
6755				walker.advance();
6756				Some(span)
6757			} else {
6758				if let Some(ref cond_expr) = cond_expr {
6759					walker.push_syntax_diag(Syntax::Stmt(
6760						StmtDiag::WhileExpectedRParenAfterExpr(
6761							cond_expr.span.next_single_width(),
6762						),
6763					));
6764				}
6765				None
6766			}
6767		}
6768		None => {
6769			if let Some(ref cond_expr) = cond_expr {
6770				walker.push_syntax_diag(Syntax::Stmt(
6771					StmtDiag::WhileExpectedRParenAfterExpr(
6772						cond_expr.span.next_single_width(),
6773					),
6774				));
6775			}
6776			nodes.push(Node {
6777				span: Span::new(kw_span.start, while_kw_span.end),
6778				ty: NodeTy::DoWhile {
6779					body,
6780					cond: cond_expr,
6781				},
6782			});
6783			return;
6784		}
6785	};
6786
6787	// Consume the `;` to end the statement.
6788	let semi_span = match walker.peek() {
6789		Some((token, span)) => {
6790			if *token == Token::Semi {
6791				walker.push_colour(span, SyntaxType::Punctuation);
6792				walker.advance();
6793				span
6794			} else {
6795				let span = if let Some(r_paren_span) = r_paren_span {
6796					r_paren_span
6797				} else if let Some(ref expr) = cond_expr {
6798					expr.span
6799				} else if let Some(l_paren_span) = l_paren_span {
6800					l_paren_span
6801				} else {
6802					while_kw_span
6803				};
6804				walker.push_syntax_diag(Syntax::Stmt(
6805					StmtDiag::DoWhileExpectedSemiAfterRParen(
6806						span.next_single_width(),
6807					),
6808				));
6809				nodes.push(Node {
6810					span,
6811					ty: NodeTy::DoWhile {
6812						body,
6813						cond: cond_expr,
6814					},
6815				});
6816				return;
6817			}
6818		}
6819		None => {
6820			let span = if let Some(r_paren_span) = r_paren_span {
6821				r_paren_span
6822			} else if let Some(ref expr) = cond_expr {
6823				expr.span
6824			} else if let Some(l_paren_span) = l_paren_span {
6825				l_paren_span
6826			} else {
6827				while_kw_span
6828			};
6829			walker.push_syntax_diag(Syntax::Stmt(
6830				StmtDiag::DoWhileExpectedSemiAfterRParen(
6831					span.next_single_width(),
6832				),
6833			));
6834			nodes.push(Node {
6835				span,
6836				ty: NodeTy::DoWhile {
6837					body,
6838					cond: cond_expr,
6839				},
6840			});
6841			return;
6842		}
6843	};
6844
6845	nodes.push(Node {
6846		span: Span::new(kw_span.start, semi_span.end),
6847		ty: NodeTy::DoWhile {
6848			cond: cond_expr,
6849			body,
6850		},
6851	});
6852}
6853
6854/// Parses a break/continue/discard statement.
6855///
6856/// This function assumes that the keyword is not yet consumed.
6857fn parse_break_continue_discard<'a, P: TokenStreamProvider<'a>>(
6858	walker: &mut Walker<'a, P>,
6859	nodes: &mut Vec<Node>,
6860	kw_span: Span,
6861	ty: impl FnOnce() -> NodeTy,
6862	error: impl FnOnce(Span) -> Syntax,
6863) {
6864	walker.push_colour(kw_span, SyntaxType::Keyword);
6865	walker.advance();
6866
6867	// Consume the `;` to end the statement.
6868	let semi_span = match walker.peek() {
6869		Some((token, span)) => {
6870			if *token == Token::Semi {
6871				walker.push_colour(span, SyntaxType::Punctuation);
6872				walker.advance();
6873				Some(span)
6874			} else {
6875				None
6876			}
6877		}
6878		None => None,
6879	};
6880	if semi_span.is_none() {
6881		walker.push_syntax_diag(error(kw_span.next_single_width()));
6882	}
6883
6884	nodes.push(Node {
6885		span: Span::new(
6886			kw_span.start,
6887			if let Some(s) = semi_span {
6888				s.end
6889			} else {
6890				kw_span.end
6891			},
6892		),
6893		ty: ty(),
6894	});
6895}
6896
6897/// Parses a return statement.
6898///
6899/// This function assumes that the keyword is not yet consumed.
6900fn parse_return<'a, P: TokenStreamProvider<'a>>(
6901	walker: &mut Walker<'a, P>,
6902	nodes: &mut Vec<Node>,
6903	kw_span: Span,
6904) {
6905	walker.push_colour(kw_span, SyntaxType::Keyword);
6906	walker.advance();
6907
6908	// Look for the optional return value expression.
6909	let return_expr = match expr_parser(walker, Mode::Default, [Token::Semi]) {
6910		(Some(expr), mut syntax, mut semantic, mut colours) => {
6911			walker.append_colours(&mut colours);
6912			walker.append_syntax_diags(&mut syntax);
6913			walker.append_semantic_diags(&mut semantic);
6914			Omittable::Some(expr)
6915		}
6916		(None, _, _, _) => Omittable::None,
6917	};
6918
6919	// Consume the `;` to end the statement.
6920	let semi_span = match walker.peek() {
6921		Some((token, span)) => {
6922			if *token == Token::Semi {
6923				walker.push_colour(span, SyntaxType::Punctuation);
6924				walker.advance();
6925				Some(span)
6926			} else {
6927				None
6928			}
6929		}
6930		None => None,
6931	};
6932	if semi_span.is_none() {
6933		if let Omittable::Some(ref return_expr) = return_expr {
6934			walker.push_syntax_diag(Syntax::Stmt(
6935				StmtDiag::ReturnExpectedSemiAfterExpr(
6936					return_expr.span.next_single_width(),
6937				),
6938			));
6939		} else {
6940			walker.push_syntax_diag(Syntax::Stmt(
6941				StmtDiag::ReturnExpectedSemiOrExprAfterKw(
6942					kw_span.next_single_width(),
6943				),
6944			));
6945		}
6946	}
6947
6948	nodes.push(Node {
6949		span: Span::new(
6950			kw_span.start,
6951			if let Some(s) = semi_span {
6952				s.end
6953			} else {
6954				kw_span.end
6955			},
6956		),
6957		ty: NodeTy::Return { value: return_expr },
6958	});
6959}
6960
6961/// Parses a preprocessor directive.
6962///
6963/// This function assumes that the directive has not yet been consumed.
6964fn parse_directive<'a, P: TokenStreamProvider<'a>>(
6965	walker: &mut Walker<'a, P>,
6966	nodes: &mut Vec<Node>,
6967	stream: PreprocStream,
6968	dir_span: Span,
6969) {
6970	use crate::lexer::preprocessor::{self, DefineToken, UndefToken};
6971
6972	match stream {
6973		PreprocStream::Empty => {
6974			walker.push_colour(dir_span, SyntaxType::DirectiveHash);
6975			walker.push_semantic_diag(Semantic::EmptyDirective(dir_span));
6976			nodes.push(Node {
6977				span: dir_span,
6978				ty: NodeTy::EmptyDirective,
6979			});
6980		}
6981		PreprocStream::Custom { kw, content } => {
6982			walker
6983				.push_colour(dir_span.first_char(), SyntaxType::DirectiveHash);
6984			walker.push_colour(kw.1, SyntaxType::DirectiveName);
6985			if let Some(content) = content {
6986				walker.push_colour(content.1, SyntaxType::Directive);
6987			}
6988			walker.push_syntax_diag(Syntax::FoundIllegalPreproc(
6989				dir_span,
6990				Some(kw),
6991			));
6992		}
6993		PreprocStream::Invalid { content } => {
6994			walker
6995				.push_colour(dir_span.first_char(), SyntaxType::DirectiveHash);
6996			walker.push_colour(content.1, SyntaxType::Directive);
6997			walker
6998				.push_syntax_diag(Syntax::FoundIllegalPreproc(dir_span, None));
6999		}
7000		PreprocStream::Version { kw, tokens } => {
7001			parse_version_directive(walker, nodes, dir_span, kw, tokens)
7002		}
7003		PreprocStream::Extension { kw, tokens } => {
7004			parse_extension_directive(walker, nodes, dir_span, kw, tokens)
7005		}
7006		PreprocStream::Line { kw, tokens } => {
7007			parse_line_directive(walker, nodes, dir_span, kw, tokens)
7008		}
7009		PreprocStream::Define {
7010			kw: kw_span,
7011			mut ident_tokens,
7012			body_tokens,
7013		} => {
7014			walker
7015				.push_colour(dir_span.first_char(), SyntaxType::DirectiveHash);
7016			walker.push_colour(kw_span, SyntaxType::DirectiveName);
7017
7018			if ident_tokens.is_empty() {
7019				// We have a macro that's missing a name.
7020
7021				walker.push_syntax_diag(Syntax::PreprocDefine(
7022					PreprocDefineDiag::DefineExpectedMacroName(
7023						kw_span.next_single_width(),
7024					),
7025				));
7026				body_tokens.iter().for_each(|(t, s)| {
7027					walker.push_colour_with_modifiers(
7028						*s,
7029						t.non_semantic_colour(),
7030						SyntaxModifiers::MACRO_BODY,
7031					)
7032				});
7033			} else if ident_tokens.len() == 1 {
7034				// We have an object-like macro.
7035
7036				let ident = match ident_tokens.remove(0) {
7037					(DefineToken::Ident(s), span) => {
7038						walker.push_colour_with_modifiers(
7039							span,
7040							SyntaxType::ObjectMacro,
7041							SyntaxModifiers::MACRO_SIGNATURE,
7042						);
7043						(s, span)
7044					}
7045					_ => unreachable!(),
7046				};
7047
7048				// Since object-like macros don't have parameters, we can perform the concatenation right here
7049				// since we know the contents of the macro body will never change.
7050				let (body_tokens, mut syntax, mut semantic) =
7051					preprocessor::concat_macro_body(
7052						body_tokens,
7053						walker.span_encoding,
7054					);
7055				walker.append_syntax_diags(&mut syntax);
7056				walker.append_semantic_diags(&mut semantic);
7057				body_tokens.iter().for_each(|(t, s)| {
7058					walker.push_colour_with_modifiers(
7059						*s,
7060						t.non_semantic_colour(),
7061						SyntaxModifiers::MACRO_BODY,
7062					)
7063				});
7064
7065				walker.register_macro(
7066					ident.0.clone(),
7067					ident.1,
7068					Macro::Object(body_tokens.clone()),
7069				);
7070				nodes.push(Node {
7071					span: dir_span,
7072					ty: NodeTy::DefineDirective {
7073						macro_: ast::Macro::Object {
7074							ident: Ident {
7075								span: ident.1,
7076								name: ident.0,
7077							},
7078						},
7079						replacement_tokens: body_tokens,
7080					},
7081				});
7082			} else {
7083				// We have a function-like macro.
7084
7085				let ident = match ident_tokens.remove(0) {
7086					(DefineToken::Ident(s), span) => {
7087						walker.push_colour_with_modifiers(
7088							span,
7089							SyntaxType::FunctionMacro,
7090							SyntaxModifiers::MACRO_SIGNATURE,
7091						);
7092						(s, span)
7093					}
7094					_ => unreachable!(),
7095				};
7096
7097				// Consume the `(`.
7098				let l_paren_span = match ident_tokens.remove(0) {
7099					(DefineToken::LParen, span) => {
7100						walker.push_colour_with_modifiers(
7101							span,
7102							SyntaxType::Punctuation,
7103							SyntaxModifiers::MACRO_SIGNATURE,
7104						);
7105						span
7106					}
7107					_ => unreachable!(),
7108				};
7109
7110				// Look for any parameters until we hit a closing `)` parenthesis.
7111				#[derive(PartialEq)]
7112				enum Prev {
7113					None,
7114					Param,
7115					Comma,
7116					Invalid,
7117				}
7118				let mut prev = Prev::None;
7119				let mut prev_span = l_paren_span;
7120				let mut params = Vec::new();
7121				let r_paren_span = loop {
7122					let (token, token_span) = if !ident_tokens.is_empty() {
7123						ident_tokens.remove(0)
7124					} else {
7125						walker.push_syntax_diag(Syntax::PreprocDefine(
7126							PreprocDefineDiag::ParamsExpectedRParen(
7127								prev_span.next_single_width(),
7128							),
7129						));
7130						nodes.push(Node {
7131							span: dir_span,
7132							ty: NodeTy::DefineDirective {
7133								macro_: ast::Macro::Function {
7134									ident: Ident {
7135										span: ident.1,
7136										name: ident.0,
7137									},
7138									params,
7139								},
7140								replacement_tokens: body_tokens,
7141							},
7142						});
7143						return;
7144					};
7145
7146					match token {
7147						DefineToken::Comma => {
7148							walker.push_colour_with_modifiers(
7149								token_span,
7150								SyntaxType::Punctuation,
7151								SyntaxModifiers::MACRO_SIGNATURE,
7152							);
7153							if prev == Prev::Comma {
7154								walker.push_syntax_diag(Syntax::PreprocDefine(
7155									PreprocDefineDiag::ParamsExpectedParamAfterComma(Span::new_between(
7156										prev_span, token_span
7157									))
7158								));
7159							} else if prev == Prev::None {
7160								walker.push_syntax_diag(Syntax::PreprocDefine(
7161									PreprocDefineDiag::ParamsExpectedParamBetweenParenComma(Span::new_between(
7162										l_paren_span, token_span
7163									))
7164								));
7165							}
7166							prev = Prev::Comma;
7167							prev_span = token_span;
7168						}
7169						DefineToken::Ident(str) => {
7170							walker.push_colour_with_modifiers(
7171								token_span,
7172								SyntaxType::Parameter,
7173								SyntaxModifiers::MACRO_SIGNATURE,
7174							);
7175							params.push(Ident {
7176								name: str,
7177								span: token_span,
7178							});
7179							if prev == Prev::Param {
7180								walker.push_syntax_diag(Syntax::PreprocDefine(
7181									PreprocDefineDiag::ParamsExpectedCommaAfterParam(prev_span.next_single_width())
7182								));
7183							}
7184							prev = Prev::Param;
7185							prev_span = token_span;
7186						}
7187						DefineToken::RParen => {
7188							walker.push_colour_with_modifiers(
7189								token_span,
7190								SyntaxType::Punctuation,
7191								SyntaxModifiers::MACRO_SIGNATURE,
7192							);
7193							if prev == Prev::Comma {
7194								walker.push_syntax_diag(Syntax::PreprocDefine(
7195									PreprocDefineDiag::ParamsExpectedParamAfterComma(Span::new_between(
7196										prev_span, token_span
7197									))
7198								));
7199							}
7200							break token_span;
7201						}
7202						DefineToken::Invalid(_) | _ => {
7203							walker.push_colour_with_modifiers(
7204								token_span,
7205								SyntaxType::Invalid,
7206								SyntaxModifiers::MACRO_SIGNATURE,
7207							);
7208							walker.push_syntax_diag(Syntax::PreprocDefine(
7209								PreprocDefineDiag::ParamsExpectedParam(
7210									token_span,
7211								),
7212							));
7213							prev = Prev::Invalid;
7214							prev_span = token_span;
7215						}
7216					}
7217				};
7218
7219				// We can't perform the token concatenation right here since the contents of the macro body will
7220				// change depending on the parameters, but we can still concatenate in order to find any
7221				// syntax/semantic diagnostics.
7222				let (_, mut syntax, mut semantic) =
7223					preprocessor::concat_macro_body(
7224						body_tokens.clone(),
7225						walker.span_encoding,
7226					);
7227				walker.append_syntax_diags(&mut syntax);
7228				walker.append_semantic_diags(&mut semantic);
7229
7230				// Syntax highlight the body. If any identifier matches a parameter, we correctly highlight that.
7231				body_tokens.iter().for_each(|(t, s)| match t {
7232					Token::Ident(str) => {
7233						if let Some(_) =
7234							params.iter().find(|ident| &ident.name == str)
7235						{
7236							walker.push_colour_with_modifiers(
7237								*s,
7238								SyntaxType::Parameter,
7239								SyntaxModifiers::MACRO_BODY,
7240							)
7241						} else {
7242							walker.push_colour_with_modifiers(
7243								*s,
7244								t.non_semantic_colour(),
7245								SyntaxModifiers::MACRO_BODY,
7246							)
7247						}
7248					}
7249					_ => walker.push_colour_with_modifiers(
7250						*s,
7251						t.non_semantic_colour(),
7252						SyntaxModifiers::MACRO_BODY,
7253					),
7254				});
7255
7256				walker.register_macro(
7257					ident.0.clone(),
7258					Span::new(ident.1.start, r_paren_span.end),
7259					Macro::Function {
7260						params: params.clone(),
7261						body: body_tokens.clone(),
7262					},
7263				);
7264				nodes.push(Node {
7265					span: dir_span,
7266					ty: NodeTy::DefineDirective {
7267						macro_: ast::Macro::Function {
7268							ident: Ident {
7269								span: ident.1,
7270								name: ident.0,
7271							},
7272							params,
7273						},
7274						replacement_tokens: body_tokens,
7275					},
7276				});
7277			}
7278		}
7279		PreprocStream::Undef {
7280			kw: kw_span,
7281			mut tokens,
7282		} => {
7283			walker
7284				.push_colour(dir_span.first_char(), SyntaxType::DirectiveHash);
7285			walker.push_colour(kw_span, SyntaxType::DirectiveName);
7286
7287			let ident = if tokens.is_empty() {
7288				walker.push_syntax_diag(Syntax::PreprocDefine(
7289					PreprocDefineDiag::UndefExpectedMacroName(
7290						kw_span.next_single_width(),
7291					),
7292				));
7293				Omittable::None
7294			} else {
7295				let (token, token_span) = tokens.remove(0);
7296				let ident = match token {
7297					UndefToken::Ident(s) => {
7298						walker.unregister_macro(&s, token_span);
7299						Omittable::Some(Ident {
7300							name: s,
7301							span: token_span,
7302						})
7303					}
7304					UndefToken::Invalid(_) => {
7305						walker.push_syntax_diag(Syntax::PreprocDefine(
7306							PreprocDefineDiag::UndefExpectedMacroName(
7307								token_span,
7308							),
7309						));
7310						Omittable::None
7311					}
7312				};
7313
7314				if !tokens.is_empty() {
7315					let (_, first) = tokens.first().unwrap();
7316					let (_, last) = tokens.last().unwrap();
7317					walker.push_colour_with_modifiers(
7318						Span::new(first.start, last.end),
7319						SyntaxType::Invalid,
7320						SyntaxModifiers::UNDEFINE,
7321					);
7322					walker.push_syntax_diag(Syntax::PreprocTrailingTokens(
7323						Span::new(first.start, last.end),
7324					));
7325				}
7326
7327				ident
7328			};
7329
7330			nodes.push(Node {
7331				span: Span::new(
7332					dir_span.start,
7333					if let Omittable::Some(ref ident) = ident {
7334						ident.span.end
7335					} else {
7336						kw_span.end
7337					},
7338				),
7339				ty: NodeTy::UndefDirective { name: ident },
7340			});
7341		}
7342		PreprocStream::Error { kw, message } => {
7343			parse_error_directive(walker, nodes, dir_span, kw, message)
7344		}
7345		PreprocStream::Pragma { kw, options } => {
7346			parse_pragma_directive(walker, nodes, dir_span, kw, options)
7347		}
7348		_ => {}
7349	}
7350}
7351
7352/// Parses a `#version` directive.
7353fn parse_version_directive<'a, P: TokenStreamProvider<'a>>(
7354	walker: &mut Walker<'a, P>,
7355	nodes: &mut Vec<Node>,
7356	dir_span: Span,
7357	kw_span: Span,
7358	tokens: Vec<(VersionToken, Span)>,
7359) {
7360	walker.push_colour(dir_span.first_char(), SyntaxType::DirectiveHash);
7361	walker.push_colour(kw_span, SyntaxType::DirectiveName);
7362
7363	if tokens.is_empty() {
7364		walker.push_syntax_diag(Syntax::PreprocVersion(
7365			PreprocVersionDiag::ExpectedNumber(kw_span.next_single_width()),
7366		));
7367		return;
7368	}
7369	let mut tokens = tokens.into_iter();
7370
7371	/// Consumes the rest of the tokens.
7372	fn seek_end<'a, P: TokenStreamProvider<'a>>(
7373		walker: &mut Walker<'a, P>,
7374		mut tokens: impl Iterator<Item = (VersionToken, Span)>,
7375		emit_diagnostic: bool,
7376	) {
7377		let span_start = match tokens.next() {
7378			Some((_, span)) => span.start,
7379			None => return,
7380		};
7381		let mut span_end = span_start;
7382		for (token, token_span) in tokens {
7383			walker.push_colour(
7384				token_span,
7385				match token {
7386					VersionToken::Invalid(_) => SyntaxType::Invalid,
7387					_ => SyntaxType::Directive,
7388				},
7389			);
7390			span_end = token_span.end;
7391		}
7392		if emit_diagnostic {
7393			walker.push_syntax_diag(Syntax::PreprocTrailingTokens(Span::new(
7394				span_start, span_end,
7395			)));
7396		}
7397	}
7398
7399	/// Parses the version number.
7400	fn parse_version<'a, P: TokenStreamProvider<'a>>(
7401		walker: &mut Walker<'a, P>,
7402		number: usize,
7403		span: Span,
7404	) -> Option<usize> {
7405		match number {
7406			450 => Some(number),
7407			100 | 110 | 120 | 130 | 140 | 150 | 300 | 310 | 320 | 330 | 400
7408			| 410 | 420 | 430 | 460 => {
7409				walker.push_syntax_diag(Syntax::PreprocVersion(
7410					PreprocVersionDiag::UnsupportedVersion(span, number),
7411				));
7412				Some(number)
7413			}
7414			_ => {
7415				walker.push_syntax_diag(Syntax::PreprocVersion(
7416					PreprocVersionDiag::InvalidVersion(span, number),
7417				));
7418				None
7419			}
7420		}
7421	}
7422
7423	/// Parses the profile.
7424	fn parse_profile<'a, P: TokenStreamProvider<'a>>(
7425		walker: &mut Walker<'a, P>,
7426		str: &str,
7427		span: Span,
7428	) -> Option<ProfileTy> {
7429		match str {
7430			"core" => {
7431				walker.push_colour(span, SyntaxType::DirectiveProfile);
7432				Some(ProfileTy::Core)
7433			}
7434			"compatability" => {
7435				walker.push_colour(span, SyntaxType::DirectiveProfile);
7436				Some(ProfileTy::Compatability)
7437			}
7438			"es" => {
7439				walker.push_colour(span, SyntaxType::DirectiveProfile);
7440				Some(ProfileTy::Es)
7441			}
7442			_ => {
7443				let str = str.to_lowercase();
7444				match str.as_ref() {
7445					"core" => {
7446						walker.push_colour(span, SyntaxType::DirectiveProfile);
7447						walker.push_syntax_diag(Syntax::PreprocVersion(
7448							PreprocVersionDiag::InvalidProfileCasing(
7449								span, "core",
7450							),
7451						));
7452						Some(ProfileTy::Core)
7453					}
7454					"compatability" => {
7455						walker.push_colour(span, SyntaxType::DirectiveProfile);
7456						walker.push_syntax_diag(Syntax::PreprocVersion(
7457							PreprocVersionDiag::InvalidProfileCasing(
7458								span,
7459								"compatability",
7460							),
7461						));
7462						Some(ProfileTy::Compatability)
7463					}
7464					"es" => {
7465						walker.push_colour(span, SyntaxType::DirectiveProfile);
7466						walker.push_syntax_diag(Syntax::PreprocVersion(
7467							PreprocVersionDiag::InvalidProfileCasing(
7468								span, "es",
7469							),
7470						));
7471						Some(ProfileTy::Es)
7472					}
7473					_ => None,
7474				}
7475			}
7476		}
7477	}
7478
7479	// Consume the version number.
7480	let version = {
7481		let (token, token_span) = tokens.next().unwrap();
7482		match token {
7483			VersionToken::Num(n) => {
7484				match parse_version(walker, n, token_span) {
7485					Some(n) => {
7486						walker.push_colour(
7487							token_span,
7488							SyntaxType::DirectiveVersion,
7489						);
7490						(n, token_span)
7491					}
7492					None => {
7493						walker.push_colour(token_span, SyntaxType::Directive);
7494						seek_end(walker, tokens, false);
7495						return;
7496					}
7497				}
7498			}
7499			VersionToken::InvalidNum(_) => {
7500				walker.push_colour(token_span, SyntaxType::Invalid);
7501				walker.push_syntax_diag(Syntax::PreprocVersion(
7502					PreprocVersionDiag::InvalidNumber(token_span),
7503				));
7504				seek_end(walker, tokens, false);
7505				return;
7506			}
7507			VersionToken::Invalid(_) => {
7508				walker.push_colour(token_span, SyntaxType::Invalid);
7509				walker.push_syntax_diag(Syntax::PreprocVersion(
7510					PreprocVersionDiag::ExpectedNumber(token_span),
7511				));
7512				seek_end(walker, tokens, false);
7513				return;
7514			}
7515			VersionToken::Word(str) => {
7516				match parse_profile(walker, &str, token_span) {
7517					Some(profile) => {
7518						// We have a profile immediately after the `version` keyword.
7519						walker.push_syntax_diag(Syntax::PreprocVersion(PreprocVersionDiag::MissingNumberBetweenKwAndProfile(
7520								Span::new_between(kw_span, token_span)
7521							)));
7522						seek_end(walker, tokens, true);
7523						nodes.push(Node {
7524							span: Span::new(dir_span.start, token_span.end),
7525							ty: NodeTy::VersionDirective {
7526								version: None,
7527								profile: Omittable::Some((profile, token_span)),
7528							},
7529						});
7530						return;
7531					}
7532					None => {
7533						walker.push_colour(token_span, SyntaxType::Directive);
7534						walker.push_syntax_diag(Syntax::PreprocVersion(
7535							PreprocVersionDiag::ExpectedNumber(token_span),
7536						));
7537						seek_end(walker, tokens, false);
7538						return;
7539					}
7540				}
7541			}
7542		}
7543	};
7544
7545	// Look for an optional profile.
7546	let profile = match tokens.next() {
7547		Some((token, token_span)) => match token {
7548			VersionToken::Word(str) => {
7549				match parse_profile(walker, &str, token_span) {
7550					Some(p) => Omittable::Some((p, token_span)),
7551					None => {
7552						walker.push_syntax_diag(Syntax::PreprocVersion(
7553							PreprocVersionDiag::InvalidProfile(token_span),
7554						));
7555						seek_end(walker, tokens, false);
7556						nodes.push(Node {
7557							span: Span::new(dir_span.start, version.1.end),
7558							ty: NodeTy::VersionDirective {
7559								version: Some(version),
7560								profile: Omittable::None,
7561							},
7562						});
7563						return;
7564					}
7565				}
7566			}
7567			_ => {
7568				walker.push_syntax_diag(Syntax::PreprocVersion(
7569					PreprocVersionDiag::ExpectedProfile(token_span),
7570				));
7571				seek_end(walker, tokens, false);
7572				nodes.push(Node {
7573					span: Span::new(dir_span.start, version.1.end),
7574					ty: NodeTy::VersionDirective {
7575						version: Some(version),
7576						profile: Omittable::None,
7577					},
7578				});
7579				return;
7580			}
7581		},
7582		None => Omittable::None,
7583	};
7584
7585	seek_end(walker, tokens, true);
7586	nodes.push(Node {
7587		span: Span::new(
7588			dir_span.start,
7589			if let Omittable::Some(ref profile) = profile {
7590				profile.1.end
7591			} else {
7592				version.1.end
7593			},
7594		),
7595		ty: NodeTy::VersionDirective {
7596			version: Some(version),
7597			profile,
7598		},
7599	});
7600}
7601
7602/// Parses an `#extension` directive.
7603fn parse_extension_directive<'a, P: TokenStreamProvider<'a>>(
7604	walker: &mut Walker<'a, P>,
7605	nodes: &mut Vec<Node>,
7606	dir_span: Span,
7607	kw_span: Span,
7608	tokens: Vec<(ExtensionToken, Span)>,
7609) {
7610	walker.push_colour(dir_span.first_char(), SyntaxType::DirectiveHash);
7611	walker.push_colour(kw_span, SyntaxType::DirectiveName);
7612
7613	if tokens.is_empty() {
7614		walker.push_syntax_diag(Syntax::PreprocExt(
7615			PreprocExtDiag::ExpectedName(kw_span.next_single_width()),
7616		));
7617		return;
7618	}
7619	let mut tokens = tokens.into_iter();
7620
7621	/// Consumes the rest of the tokens.
7622	fn seek_end<'a, P: TokenStreamProvider<'a>>(
7623		walker: &mut Walker<'a, P>,
7624		mut tokens: impl Iterator<Item = (ExtensionToken, Span)>,
7625		emit_diagnostic: bool,
7626	) {
7627		let span_start = match tokens.next() {
7628			Some((_, span)) => span.start,
7629			None => return,
7630		};
7631		let mut span_end = span_start;
7632		for (token, token_span) in tokens {
7633			walker.push_colour(
7634				token_span,
7635				match token {
7636					ExtensionToken::Invalid(_) => SyntaxType::Invalid,
7637					_ => SyntaxType::Directive,
7638				},
7639			);
7640			span_end = token_span.end;
7641		}
7642		if emit_diagnostic {
7643			walker.push_syntax_diag(Syntax::PreprocTrailingTokens(Span::new(
7644				span_start, span_end,
7645			)));
7646		}
7647	}
7648
7649	/// Parses the behaviour.
7650	fn parse_behaviour(
7651		str: &str,
7652		span: Span,
7653	) -> Option<(BehaviourTy, Option<Syntax>)> {
7654		match str {
7655			"require" => Some((BehaviourTy::Require, None)),
7656			"enable" => Some((BehaviourTy::Enable, None)),
7657			"warn" => Some((BehaviourTy::Warn, None)),
7658			"disable" => Some((BehaviourTy::Disable, None)),
7659			_ => {
7660				let str = str.to_lowercase();
7661				match str.as_ref() {
7662					"require" => Some((
7663						BehaviourTy::Require,
7664						Some(Syntax::PreprocExt(
7665							PreprocExtDiag::InvalidBehaviourCasing(
7666								span, "require",
7667							),
7668						)),
7669					)),
7670					"enable" => Some((
7671						BehaviourTy::Enable,
7672						Some(Syntax::PreprocExt(
7673							PreprocExtDiag::InvalidBehaviourCasing(
7674								span, "enable",
7675							),
7676						)),
7677					)),
7678					"warn" => Some((
7679						BehaviourTy::Warn,
7680						Some(Syntax::PreprocExt(
7681							PreprocExtDiag::InvalidBehaviourCasing(
7682								span, "warn",
7683							),
7684						)),
7685					)),
7686					"disable" => Some((
7687						BehaviourTy::Disable,
7688						Some(Syntax::PreprocExt(
7689							PreprocExtDiag::InvalidBehaviourCasing(
7690								span, "disable",
7691							),
7692						)),
7693					)),
7694					_ => None,
7695				}
7696			}
7697		}
7698	}
7699
7700	// Consume the extension name.
7701	let name = {
7702		let (token, token_span) = tokens.next().unwrap();
7703		match token {
7704			ExtensionToken::Word(str) => {
7705				match parse_behaviour(&str, token_span) {
7706					Some((behaviour, _)) => {
7707						walker.push_colour(
7708							token_span,
7709							SyntaxType::DirectiveExtBehaviour,
7710						);
7711						walker.push_syntax_diag(Syntax::PreprocExt(
7712							PreprocExtDiag::MissingNameBetweenKwAndBehaviour(
7713								Span::new_between(kw_span, token_span),
7714							),
7715						));
7716						seek_end(walker, tokens, false);
7717						nodes.push(Node {
7718							span: Span::new(dir_span.start, token_span.end),
7719							ty: NodeTy::ExtensionDirective {
7720								name: None,
7721								behaviour: Some((behaviour, token_span)),
7722							},
7723						});
7724						return;
7725					}
7726					None => {
7727						walker.push_colour(
7728							token_span,
7729							SyntaxType::DirectiveExtName,
7730						);
7731						(str, token_span)
7732					}
7733				}
7734			}
7735			ExtensionToken::Colon => {
7736				walker.push_colour(token_span, SyntaxType::Directive);
7737				walker.push_syntax_diag(Syntax::PreprocExt(
7738					PreprocExtDiag::MissingNameBetweenKwAndColon(
7739						Span::new_between(kw_span, token_span),
7740					),
7741				));
7742				seek_end(walker, tokens, false);
7743				nodes.push(Node {
7744					span: Span::new(dir_span.start, kw_span.end),
7745					ty: NodeTy::ExtensionDirective {
7746						name: None,
7747						behaviour: None,
7748					},
7749				});
7750				return;
7751			}
7752			ExtensionToken::Invalid(_) => {
7753				walker.push_colour(token_span, SyntaxType::Invalid);
7754				walker.push_syntax_diag(Syntax::PreprocExt(
7755					PreprocExtDiag::ExpectedName(token_span),
7756				));
7757				seek_end(walker, tokens, false);
7758				return;
7759			}
7760		}
7761	};
7762
7763	// Consume the colon.
7764	let colon_span = match tokens.next() {
7765		Some((token, token_span)) => match token {
7766			ExtensionToken::Colon => {
7767				walker.push_colour(token_span, SyntaxType::Directive);
7768				token_span
7769			}
7770			ExtensionToken::Word(str) => {
7771				match parse_behaviour(&str, token_span) {
7772					Some((behaviour, _)) => {
7773						walker.push_colour(
7774							token_span,
7775							SyntaxType::DirectiveExtBehaviour,
7776						);
7777						walker.push_syntax_diag(Syntax::PreprocExt(
7778							PreprocExtDiag::MissingColonBetweenNameAndBehaviour(
7779								Span::new_between(name.1, token_span),
7780							),
7781						));
7782						seek_end(walker, tokens, false);
7783						nodes.push(Node {
7784							span: Span::new(dir_span.start, token_span.end),
7785							ty: NodeTy::ExtensionDirective {
7786								name: Some(name),
7787								behaviour: Some((behaviour, token_span)),
7788							},
7789						});
7790						return;
7791					}
7792					None => {
7793						walker.push_colour(token_span, SyntaxType::Directive);
7794						walker.push_syntax_diag(Syntax::PreprocExt(
7795							PreprocExtDiag::ExpectedColon(token_span),
7796						));
7797						seek_end(walker, tokens, false);
7798						nodes.push(Node {
7799							span: Span::new(dir_span.start, name.1.end),
7800							ty: NodeTy::ExtensionDirective {
7801								name: Some(name),
7802								behaviour: None,
7803							},
7804						});
7805						return;
7806					}
7807				}
7808			}
7809			ExtensionToken::Invalid(_) => {
7810				walker.push_colour(token_span, SyntaxType::Invalid);
7811				walker.push_syntax_diag(Syntax::PreprocExt(
7812					PreprocExtDiag::ExpectedColon(token_span),
7813				));
7814				seek_end(walker, tokens, false);
7815				nodes.push(Node {
7816					span: Span::new(dir_span.start, name.1.end),
7817					ty: NodeTy::ExtensionDirective {
7818						name: Some(name),
7819						behaviour: None,
7820					},
7821				});
7822				return;
7823			}
7824		},
7825		None => {
7826			walker.push_syntax_diag(Syntax::PreprocExt(
7827				PreprocExtDiag::ExpectedColon(name.1.next_single_width()),
7828			));
7829			nodes.push(Node {
7830				span: Span::new(dir_span.start, name.1.end),
7831				ty: NodeTy::ExtensionDirective {
7832					name: Some(name),
7833					behaviour: None,
7834				},
7835			});
7836			return;
7837		}
7838	};
7839
7840	// Consume the behaviour.
7841	let behaviour = match tokens.next() {
7842		Some((token, token_span)) => match token {
7843			ExtensionToken::Word(str) => {
7844				match parse_behaviour(&str, token_span) {
7845					Some((behaviour, diag)) => {
7846						walker.push_colour(
7847							token_span,
7848							SyntaxType::DirectiveExtBehaviour,
7849						);
7850						if let Some(diag) = diag {
7851							walker.push_syntax_diag(diag);
7852						}
7853						(behaviour, token_span)
7854					}
7855					None => {
7856						walker.push_colour(token_span, SyntaxType::Directive);
7857						walker.push_syntax_diag(Syntax::PreprocExt(
7858							PreprocExtDiag::InvalidBehaviour(token_span),
7859						));
7860						seek_end(walker, tokens, false);
7861						nodes.push(Node {
7862							span: Span::new(dir_span.start, colon_span.end),
7863							ty: NodeTy::ExtensionDirective {
7864								name: Some(name),
7865								behaviour: None,
7866							},
7867						});
7868						return;
7869					}
7870				}
7871			}
7872			ExtensionToken::Colon => {
7873				walker.push_colour(token_span, SyntaxType::Directive);
7874				walker.push_syntax_diag(Syntax::PreprocExt(
7875					PreprocExtDiag::ExpectedBehaviour(token_span),
7876				));
7877				seek_end(walker, tokens, false);
7878				nodes.push(Node {
7879					span: Span::new(dir_span.start, colon_span.end),
7880					ty: NodeTy::ExtensionDirective {
7881						name: Some(name),
7882						behaviour: None,
7883					},
7884				});
7885				return;
7886			}
7887			ExtensionToken::Invalid(_) => {
7888				walker.push_colour(token_span, SyntaxType::Invalid);
7889				walker.push_syntax_diag(Syntax::PreprocExt(
7890					PreprocExtDiag::ExpectedBehaviour(token_span),
7891				));
7892				seek_end(walker, tokens, false);
7893				nodes.push(Node {
7894					span: Span::new(dir_span.start, colon_span.end),
7895					ty: NodeTy::ExtensionDirective {
7896						name: Some(name),
7897						behaviour: None,
7898					},
7899				});
7900				return;
7901			}
7902		},
7903		None => {
7904			walker.push_syntax_diag(Syntax::PreprocExt(
7905				PreprocExtDiag::ExpectedBehaviour(name.1.next_single_width()),
7906			));
7907			nodes.push(Node {
7908				span: Span::new(dir_span.start, colon_span.end),
7909				ty: NodeTy::ExtensionDirective {
7910					name: Some(name),
7911					behaviour: None,
7912				},
7913			});
7914			return;
7915		}
7916	};
7917
7918	seek_end(walker, tokens, true);
7919	nodes.push(Node {
7920		span: Span::new(dir_span.start, behaviour.1.end),
7921		ty: NodeTy::ExtensionDirective {
7922			name: Some(name),
7923			behaviour: Some(behaviour),
7924		},
7925	});
7926}
7927
7928/// Parses a `#line` directive.
7929fn parse_line_directive<'a, P: TokenStreamProvider<'a>>(
7930	walker: &mut Walker<'a, P>,
7931	nodes: &mut Vec<Node>,
7932	dir_span: Span,
7933	kw_span: Span,
7934	tokens: Vec<(LineToken, Span)>,
7935) {
7936	walker.push_colour(dir_span.first_char(), SyntaxType::DirectiveHash);
7937	walker.push_colour(kw_span, SyntaxType::DirectiveName);
7938
7939	if tokens.is_empty() {
7940		walker.push_syntax_diag(Syntax::PreprocLine(
7941			PreprocLineDiag::ExpectedNumber(kw_span.next_single_width()),
7942		));
7943		return;
7944	}
7945	let mut tokens = tokens.into_iter();
7946
7947	/// Consumes the rest of the tokens.
7948	fn seek_end<'a, P: TokenStreamProvider<'a>>(
7949		walker: &mut Walker<'a, P>,
7950		mut tokens: impl Iterator<Item = (LineToken, Span)>,
7951		emit_diagnostic: bool,
7952	) {
7953		let span_start = match tokens.next() {
7954			Some((_, span)) => span.start,
7955			None => return,
7956		};
7957		let mut span_end = span_start;
7958		for (token, token_span) in tokens {
7959			walker.push_colour(
7960				token_span,
7961				match token {
7962					LineToken::Invalid(_) => SyntaxType::Invalid,
7963					_ => SyntaxType::Directive,
7964				},
7965			);
7966			span_end = token_span.end;
7967		}
7968		if emit_diagnostic {
7969			walker.push_syntax_diag(Syntax::PreprocTrailingTokens(Span::new(
7970				span_start, span_end,
7971			)));
7972		}
7973	}
7974
7975	let line = {
7976		let (token, token_span) = tokens.next().unwrap();
7977		match token {
7978			LineToken::Num(n) => {
7979				walker.push_colour(token_span, SyntaxType::DirectiveLineNumber);
7980				Some((n, token_span))
7981			}
7982			LineToken::InvalidNum(_) => {
7983				walker.push_colour(token_span, SyntaxType::Invalid);
7984				walker.push_syntax_diag(Syntax::PreprocLine(
7985					PreprocLineDiag::InvalidNumber(token_span),
7986				));
7987				None
7988			}
7989			LineToken::Ident(_str) => {
7990				let _ident_span = token_span;
7991
7992				let line = None;
7993				let src_str_num = Omittable::None;
7994				if src_str_num.is_some() {
7995					seek_end(walker, tokens, true);
7996					nodes.push(Node {
7997						span: Span::new(dir_span.start, kw_span.end),
7998						ty: NodeTy::LineDirective { line, src_str_num },
7999					});
8000					return;
8001				} else {
8002					line
8003				}
8004			}
8005			LineToken::Invalid(_) => {
8006				walker.push_colour(token_span, SyntaxType::Invalid);
8007				walker.push_syntax_diag(Syntax::PreprocLine(
8008					PreprocLineDiag::ExpectedNumber(token_span),
8009				));
8010				seek_end(walker, tokens, false);
8011				nodes.push(Node {
8012					span: Span::new(dir_span.start, kw_span.end),
8013					ty: NodeTy::LineDirective {
8014						line: None,
8015						src_str_num: Omittable::None,
8016					},
8017				});
8018				return;
8019			}
8020		}
8021	};
8022
8023	let src_str_num = match tokens.next() {
8024		Some((token, token_span)) => match token {
8025			LineToken::Num(n) => {
8026				walker.push_colour(token_span, SyntaxType::DirectiveLineNumber);
8027				Omittable::Some((n, token_span))
8028			}
8029			LineToken::InvalidNum(_) => {
8030				walker.push_colour(token_span, SyntaxType::Invalid);
8031				walker.push_syntax_diag(Syntax::PreprocLine(
8032					PreprocLineDiag::InvalidNumber(token_span),
8033				));
8034				Omittable::None
8035			}
8036			LineToken::Ident(_str) => Omittable::None,
8037			LineToken::Invalid(_) => {
8038				walker.push_colour(token_span, SyntaxType::Invalid);
8039				walker.push_syntax_diag(Syntax::PreprocLine(
8040					PreprocLineDiag::ExpectedNumber(token_span),
8041				));
8042				seek_end(walker, tokens, false);
8043				nodes.push(Node {
8044					span: Span::new(
8045						dir_span.start,
8046						if let Some(line) = line {
8047							line.1.end
8048						} else {
8049							kw_span.end
8050						},
8051					),
8052					ty: NodeTy::LineDirective {
8053						line,
8054						src_str_num: Omittable::None,
8055					},
8056				});
8057				return;
8058			}
8059		},
8060		None => Omittable::None,
8061	};
8062
8063	seek_end(walker, tokens, true);
8064	nodes.push(Node {
8065		span: Span::new(
8066			dir_span.start,
8067			if let Omittable::Some(src_str_num) = src_str_num {
8068				src_str_num.1.end
8069			} else if let Some(line) = line {
8070				line.1.end
8071			} else {
8072				kw_span.end
8073			},
8074		),
8075		ty: NodeTy::LineDirective { line, src_str_num },
8076	});
8077}
8078
8079/// Parses an `#error` directive.
8080fn parse_error_directive<'a, P: TokenStreamProvider<'a>>(
8081	walker: &mut Walker<'a, P>,
8082	nodes: &mut Vec<Node>,
8083	dir_span: Span,
8084	kw_span: Span,
8085	message: Option<Spanned<String>>,
8086) {
8087	walker.push_colour(dir_span.first_char(), SyntaxType::DirectiveHash);
8088	walker.push_colour(kw_span, SyntaxType::DirectiveName);
8089	if let Some(ref message) = message {
8090		walker.push_colour(message.1, SyntaxType::DirectiveError);
8091	}
8092	nodes.push(Node {
8093		span: Span::new(
8094			dir_span.start,
8095			if let Some(ref message) = message {
8096				message.1.end
8097			} else {
8098				kw_span.end
8099			},
8100		),
8101		ty: NodeTy::ErrorDirective {
8102			message: message.into(),
8103		},
8104	});
8105}
8106
8107/// Parses a `#pragma` directive.
8108fn parse_pragma_directive<'a, P: TokenStreamProvider<'a>>(
8109	walker: &mut Walker<'a, P>,
8110	nodes: &mut Vec<Node>,
8111	dir_span: Span,
8112	kw_span: Span,
8113	options: Option<Spanned<String>>,
8114) {
8115	walker.push_colour(dir_span.first_char(), SyntaxType::DirectiveHash);
8116	walker.push_colour(kw_span, SyntaxType::DirectiveName);
8117	if let Some(ref options) = options {
8118		walker.push_colour(options.1, SyntaxType::DirectivePragma);
8119	}
8120	nodes.push(Node {
8121		span: Span::new(
8122			dir_span.start,
8123			if let Some(ref options) = options {
8124				options.1.end
8125			} else {
8126				kw_span.end
8127			},
8128		),
8129		ty: NodeTy::PragmaDirective {
8130			options: options.into(),
8131		},
8132	});
8133}
8134
8135/// Combines the ident information with the type to create one or more type-ident pairs. This step is necessary
8136/// because the idents themselves can contain type information, e.g. `int[3] i[9]`.
8137fn combine_type_with_idents(
8138	type_: Type,
8139	ident_info: Vec<(Ident, Vec<ArrSize>)>,
8140) -> Vec<(Type, Ident)> {
8141	let mut vars = Vec::new();
8142	for (ident, sizes) in ident_info {
8143		if sizes.is_empty() {
8144			vars.push((type_.clone(), ident));
8145		} else {
8146			let mut sizes = sizes.clone();
8147			let Type {
8148				ty,
8149				qualifiers,
8150				span,
8151			} = type_.clone();
8152			let primitive = match ty {
8153				TypeTy::Single(p) => p,
8154				TypeTy::Array(p, i) => {
8155					sizes.push(i);
8156					p
8157				}
8158				TypeTy::Array2D(p, i, j) => {
8159					sizes.push(i);
8160					sizes.push(j);
8161					p
8162				}
8163				TypeTy::ArrayND(p, mut v) => {
8164					sizes.append(&mut v);
8165					p
8166				}
8167			};
8168
8169			let type_ = if sizes.len() == 0 {
8170				Type {
8171					span,
8172					ty: TypeTy::Single(primitive),
8173					qualifiers,
8174				}
8175			} else if sizes.len() == 1 {
8176				Type {
8177					span,
8178					ty: TypeTy::Array(primitive, sizes.remove(0)),
8179					qualifiers,
8180				}
8181			} else if sizes.len() == 2 {
8182				Type {
8183					span,
8184					ty: TypeTy::Array2D(
8185						primitive,
8186						sizes.remove(0),
8187						sizes.remove(0),
8188					),
8189					qualifiers,
8190				}
8191			} else {
8192				Type {
8193					span,
8194					ty: TypeTy::ArrayND(primitive, sizes),
8195					qualifiers,
8196				}
8197			};
8198
8199			vars.push((type_, ident))
8200		}
8201	}
8202	vars
8203}