1use crate::{Parse, ParseToEnd, Peek};
2use core::marker::PhantomData;
3use std::cell::RefCell;
4use sway_ast::keywords::Keyword;
5use sway_ast::literal::Literal;
6use sway_ast::token::{
7 DocComment, GenericTokenTree, Group, Punct, Spacing, TokenStream, TokenTree,
8};
9use sway_ast::PubToken;
10use sway_error::error::CompileError;
11use sway_error::handler::{ErrorEmitted, Handler};
12use sway_error::parser_error::{ParseError, ParseErrorKind};
13use sway_features::ExperimentalFeatures;
14use sway_types::{
15 ast::{Delimiter, PunctKind},
16 Ident, Span, Spanned,
17};
18
19pub struct Parser<'a, 'e> {
20 token_trees: &'a [TokenTree],
21 full_span: Span,
22 handler: &'e Handler,
23 pub check_double_underscore: bool,
24 pub experimental: ExperimentalFeatures,
25}
26
27impl<'a, 'e> Parser<'a, 'e> {
28 pub fn new(
29 handler: &'e Handler,
30 token_stream: &'a TokenStream,
31 experimental: ExperimentalFeatures,
32 ) -> Parser<'a, 'e> {
33 Parser {
34 token_trees: token_stream.token_trees(),
35 full_span: token_stream.span(),
36 handler,
37 check_double_underscore: true,
38 experimental,
39 }
40 }
41
42 pub fn emit_error(&mut self, kind: ParseErrorKind) -> ErrorEmitted {
43 let span = match self.token_trees {
44 [token_tree, ..] => token_tree.span(),
45 _ => {
46 let num_trailing_spaces =
49 self.full_span.as_str().len() - self.full_span.as_str().trim_end().len();
50 let trim_offset = if num_trailing_spaces == 0 {
51 1
52 } else {
53 num_trailing_spaces
54 };
55 Span::new(
56 self.full_span.src().clone(),
57 self.full_span.end().saturating_sub(trim_offset),
58 (self.full_span.end() + 1).saturating_sub(trim_offset),
59 self.full_span.source_id().cloned(),
60 )
61 .unwrap_or(Span::dummy())
62 }
63 };
64 self.emit_error_with_span(kind, span)
65 }
66
67 pub fn emit_error_with_span(&mut self, kind: ParseErrorKind, span: Span) -> ErrorEmitted {
68 let error = ParseError { span, kind };
69 self.handler.emit_err(CompileError::Parse { error })
70 }
71
72 pub fn take<P: Peek>(&mut self) -> Option<P> {
76 let (value, tokens) = Peeker::with(self.token_trees)?;
77 self.token_trees = tokens;
78 Some(value)
79 }
80
81 pub fn peek<P: Peek>(&self) -> Option<P> {
85 Peeker::with(self.token_trees).map(|(v, _)| v)
86 }
87
88 pub fn peek_next<P: Peek>(&self) -> Option<P> {
92 Peeker::with(&self.token_trees[1..]).map(|(v, _)| v)
93 }
94
95 pub fn call_parsing_function_with_recovery<
103 'original,
104 T,
105 F: FnOnce(&mut Parser<'a, '_>) -> ParseResult<T>,
106 >(
107 &'original mut self,
108 parsing_function: F,
109 ) -> Result<T, ParseRecoveryStrategies<'original, 'a, 'e>> {
110 let handler = Handler::default();
111 let mut fork = Parser {
112 token_trees: self.token_trees,
113 full_span: self.full_span.clone(),
114 handler: &handler,
115 check_double_underscore: self.check_double_underscore,
116 experimental: self.experimental,
117 };
118
119 match parsing_function(&mut fork) {
120 Ok(result) => {
121 self.token_trees = fork.token_trees;
122 self.handler.append(handler);
123 Ok(result)
124 }
125 Err(error) => {
126 let Parser {
127 token_trees,
128 full_span,
129 ..
130 } = fork;
131 Err(ParseRecoveryStrategies {
132 original: RefCell::new(self),
133 handler,
134 fork_token_trees: token_trees,
135 fork_full_span: full_span,
136 error,
137 })
138 }
139 }
140 }
141
142 pub fn parse_with_recovery<'original, T: Parse>(
150 &'original mut self,
151 ) -> Result<T, ParseRecoveryStrategies<'original, 'a, 'e>> {
152 self.call_parsing_function_with_recovery(|p| p.parse())
153 }
154
155 pub fn guarded_parse_with_recovery<'original, P: Peek, T: Parse>(
165 &'original mut self,
166 ) -> Result<Option<T>, ParseRecoveryStrategies<'original, 'a, 'e>> {
167 if self.peek::<P>().is_none() {
168 return Ok(None);
169 }
170
171 let handler = Handler::default();
172 let mut fork = Parser {
173 token_trees: self.token_trees,
174 full_span: self.full_span.clone(),
175 handler: &handler,
176 check_double_underscore: self.check_double_underscore,
177 experimental: self.experimental,
178 };
179
180 match fork.parse() {
181 Ok(result) => {
182 self.token_trees = fork.token_trees;
183 self.handler.append(handler);
184 Ok(Some(result))
185 }
186 Err(error) => {
187 let Parser {
188 token_trees,
189 full_span,
190 ..
191 } = fork;
192 Err(ParseRecoveryStrategies {
193 original: RefCell::new(self),
194 handler,
195 fork_token_trees: token_trees,
196 fork_full_span: full_span,
197 error,
198 })
199 }
200 }
201 }
202
203 pub fn try_parse<T: Parse>(&mut self, append_diagnostics: bool) -> ParseResult<T> {
206 let handler = Handler::default();
207 let mut fork = Parser {
208 token_trees: self.token_trees,
209 full_span: self.full_span.clone(),
210 handler: &handler,
211 check_double_underscore: self.check_double_underscore,
212 experimental: self.experimental,
213 };
214 let r = match T::parse(&mut fork) {
215 Ok(result) => {
216 self.token_trees = fork.token_trees;
217 Ok(result)
218 }
219 Err(err) => Err(err),
220 };
221 if append_diagnostics {
222 self.handler.append(handler);
223 }
224 r
225 }
226
227 pub fn try_parse_and_check_empty<T: Parse>(
229 mut self,
230 append_diagnostics: bool,
231 ) -> ParseResult<Option<(T, ParserConsumed<'a>)>> {
232 let value = self.try_parse(append_diagnostics)?;
233 match self.check_empty() {
234 Some(consumed) => Ok(Some((value, consumed))),
235 None => Ok(None),
236 }
237 }
238
239 pub fn parse<T: Parse>(&mut self) -> ParseResult<T> {
241 T::parse(self)
242 }
243
244 pub fn guarded_parse<G: Peek, T: Parse>(&mut self) -> ParseResult<Option<T>> {
248 self.peek::<G>().map(|_| self.parse()).transpose()
249 }
250
251 pub fn parse_to_end<T: ParseToEnd>(self) -> ParseResult<(T, ParserConsumed<'a>)> {
252 T::parse_to_end(self)
253 }
254
255 pub fn try_parse_to_end<T: ParseToEnd>(
257 &mut self,
258 append_diagnostics: bool,
259 ) -> ParseResult<(T, ParserConsumed<'a>)> {
260 let handler = Handler::default();
261 let fork = Parser {
262 token_trees: self.token_trees,
263 full_span: self.full_span.clone(),
264 handler: &handler,
265 check_double_underscore: self.check_double_underscore,
266 experimental: self.experimental,
267 };
268 let r = T::parse_to_end(fork);
269 if append_diagnostics {
270 self.handler.append(handler);
271 }
272 r
273 }
274
275 pub fn enter_delimited(
276 &mut self,
277 expected_delimiter: Delimiter,
278 ) -> Option<(Parser<'_, '_>, Span)> {
279 match self.token_trees {
280 [TokenTree::Group(Group {
281 delimiter,
282 token_stream,
283 span,
284 }), rest @ ..]
285 if *delimiter == expected_delimiter =>
286 {
287 self.token_trees = rest;
288 let parser = Parser {
289 token_trees: token_stream.token_trees(),
290 full_span: token_stream.span(),
291 handler: self.handler,
292 check_double_underscore: self.check_double_underscore,
293 experimental: self.experimental,
294 };
295 Some((parser, span.clone()))
296 }
297 _ => None,
298 }
299 }
300
301 pub fn is_empty(&self) -> bool {
302 self.token_trees.is_empty()
303 }
304
305 pub fn check_empty(&self) -> Option<ParserConsumed<'a>> {
306 self.is_empty()
307 .then_some(ParserConsumed { _priv: PhantomData })
308 }
309
310 pub fn debug_tokens(&self) -> &[TokenTree] {
311 let len = std::cmp::min(5, self.token_trees.len());
312 &self.token_trees[..len]
313 }
314
315 pub fn ban_visibility_qualifier(&mut self, vis: &Option<PubToken>) -> ParseResult<()> {
317 if let Some(token) = vis {
318 return Err(self.emit_error_with_span(
319 ParseErrorKind::UnnecessaryVisibilityQualifier {
320 visibility: token.ident(),
321 },
322 token.span(),
323 ));
324 }
325 Ok(())
326 }
327
328 pub fn full_span(&self) -> &Span {
329 &self.full_span
330 }
331 pub fn consume_while_line_equals(&mut self, line: usize) {
337 loop {
338 let Some(current_token) = self.token_trees.first() else {
339 break;
340 };
341
342 let current_span = current_token.span();
343 let current_span_line = current_span.start_line_col_one_index().line;
344
345 if current_span_line != line {
346 break;
347 } else {
348 self.token_trees = &self.token_trees[1..];
349 }
350 }
351 }
352
353 pub fn has_errors(&self) -> bool {
354 self.handler.has_errors()
355 }
356
357 pub fn has_warnings(&self) -> bool {
358 self.handler.has_warnings()
359 }
360}
361
362pub struct Peeker<'a> {
363 pub token_trees: &'a [TokenTree],
364 num_tokens: &'a mut usize,
365}
366
367impl<'a> Peeker<'a> {
368 pub fn with<P: Peek>(token_trees: &'a [TokenTree]) -> Option<(P, &'a [TokenTree])> {
370 let mut num_tokens = 0;
371 let peeker = Peeker {
372 token_trees,
373 num_tokens: &mut num_tokens,
374 };
375 let value = P::peek(peeker)?;
376 Some((value, &token_trees[num_tokens..]))
377 }
378
379 pub fn peek_ident(self) -> Result<&'a Ident, Self> {
380 match self.token_trees {
381 [TokenTree::Ident(ident), ..] => {
382 *self.num_tokens = 1;
383 Ok(ident)
384 }
385 _ => Err(self),
386 }
387 }
388
389 pub fn peek_literal(self) -> Result<&'a Literal, Self> {
390 match self.token_trees {
391 [TokenTree::Literal(literal), ..] => {
392 *self.num_tokens = 1;
393 Ok(literal)
394 }
395 _ => Err(self),
396 }
397 }
398
399 pub fn peek_punct_kinds(
400 self,
401 punct_kinds: &[PunctKind],
402 not_followed_by: &[PunctKind],
403 ) -> Result<Span, Self> {
404 let (last_punct_kind, first_punct_kinds) = punct_kinds
405 .split_last()
406 .unwrap_or_else(|| panic!("peek_punct_kinds called with empty slice"));
407 if self.token_trees.len() < punct_kinds.len() {
408 return Err(self);
409 }
410 for (punct_kind, tt) in first_punct_kinds.iter().zip(self.token_trees.iter()) {
411 match tt {
412 TokenTree::Punct(Punct {
413 kind,
414 spacing: Spacing::Joint,
415 ..
416 }) if *kind == *punct_kind => {}
417 _ => return Err(self),
418 }
419 }
420 let span_end = match &self.token_trees[punct_kinds.len() - 1] {
421 TokenTree::Punct(Punct {
422 kind,
423 spacing,
424 span,
425 }) if *kind == *last_punct_kind => match spacing {
426 Spacing::Alone => span,
427 Spacing::Joint => match &self.token_trees.get(punct_kinds.len()) {
428 Some(TokenTree::Punct(Punct { kind, .. })) => {
429 if not_followed_by.contains(kind) {
430 return Err(self);
431 }
432 span
433 }
434 _ => span,
435 },
436 },
437 _ => return Err(self),
438 };
439 let span_start = match &self.token_trees[0] {
440 TokenTree::Punct(Punct { span, .. }) => span,
441 _ => unreachable!(),
442 };
443 let span = Span::join(span_start.clone(), span_end);
444 *self.num_tokens = punct_kinds.len();
445 Ok(span)
446 }
447
448 pub fn peek_delimiter(self) -> Result<Delimiter, Self> {
449 match self.token_trees {
450 [TokenTree::Group(Group { delimiter, .. }), ..] => {
451 *self.num_tokens = 1;
452 Ok(*delimiter)
453 }
454 _ => Err(self),
455 }
456 }
457
458 pub fn peek_doc_comment(self) -> Result<&'a DocComment, Self> {
459 match self.token_trees {
460 [TokenTree::DocComment(doc_comment), ..] => {
461 *self.num_tokens = 1;
462 Ok(doc_comment)
463 }
464 _ => Err(self),
465 }
466 }
467}
468
469pub struct ParseRecoveryStrategies<'original, 'a, 'e> {
475 original: RefCell<&'original mut Parser<'a, 'e>>,
476 handler: Handler,
477 fork_token_trees: &'a [TokenTree],
478 fork_full_span: Span,
479 error: ErrorEmitted,
480}
481
482impl<'a> ParseRecoveryStrategies<'_, 'a, '_> {
483 pub fn recover_at_next_line_with_fallback_error(
486 &self,
487 kind: ParseErrorKind,
488 ) -> (Box<[Span]>, ErrorEmitted) {
489 let line = if self.fork_token_trees.is_empty() {
490 None
491 } else {
492 self.last_consumed_token()
493 .map(|x| x.span())
494 .or_else(|| self.fork_token_trees.first().map(|x| x.span()))
495 .map(|x| x.start_line_col_one_index().line)
496 };
497
498 self.start(|p| {
499 if let Some(line) = line {
500 p.consume_while_line_equals(line);
501 }
502 if !p.has_errors() {
503 p.emit_error_with_span(kind, self.diff_span(p));
504 }
505 })
506 }
507
508 pub fn start<'this>(
512 &'this self,
513 f: impl FnOnce(&mut Parser<'a, 'this>),
514 ) -> (Box<[Span]>, ErrorEmitted) {
515 let mut p = {
516 let original = self.original.borrow();
517 Parser {
518 token_trees: self.fork_token_trees,
519 full_span: self.fork_full_span.clone(),
520 handler: &self.handler,
521 check_double_underscore: original.check_double_underscore,
522 experimental: original.experimental,
523 }
524 };
525 f(&mut p);
526 self.finish(p)
527 }
528
529 pub fn starting_token(&self) -> &GenericTokenTree<TokenStream> {
531 let original = self.original.borrow();
532 &original.token_trees[0]
533 }
534
535 pub fn last_consumed_token(&self) -> Option<&GenericTokenTree<TokenStream>> {
538 let fork_head_span = self.fork_token_trees.first()?.span();
539
540 let original = self.original.borrow();
542 let fork_pos = original
543 .token_trees
544 .iter()
545 .position(|x| x.span() == fork_head_span)?;
546
547 let before_fork_pos = fork_pos.checked_sub(1)?;
548 original.token_trees.get(before_fork_pos)
549 }
550
551 pub fn diff_span<'this>(&self, p: &Parser<'a, 'this>) -> Span {
556 let original = self.original.borrow_mut();
557
558 let qty = if let Some(first_fork_tt) = p.token_trees.first() {
560 original
561 .token_trees
562 .iter()
563 .position(|tt| tt.span() == first_fork_tt.span())
564 .expect("not finding fork head")
565 } else {
566 original.token_trees.len()
567 };
568
569 let garbage: Vec<_> = original
570 .token_trees
571 .iter()
572 .take(qty)
573 .map(|x| x.span())
574 .collect();
575
576 Span::join_all(garbage)
577 }
578
579 fn finish(&self, p: Parser<'a, '_>) -> (Box<[Span]>, ErrorEmitted) {
580 let mut original = self.original.borrow_mut();
581
582 let qty = if let Some(first_fork_tt) = p.token_trees.first() {
584 original
585 .token_trees
586 .iter()
587 .position(|tt| tt.span() == first_fork_tt.span())
588 .expect("not finding fork head")
589 } else {
590 original.token_trees.len()
591 };
592
593 let garbage: Vec<_> = original
594 .token_trees
595 .iter()
596 .take(qty)
597 .map(|x| x.span())
598 .collect();
599
600 original.token_trees = p.token_trees;
601 original.handler.append(self.handler.clone());
602
603 (garbage.into_boxed_slice(), self.error)
604 }
605}
606
607pub struct ParserConsumed<'a> {
608 _priv: PhantomData<fn(&'a ()) -> &'a ()>,
609}
610
611pub type ParseResult<T> = Result<T, ErrorEmitted>;