moore_vhdl_syntax/parser/
basic.rs

1// Copyright (c) 2016-2021 Fabian Schuiki
2
3//! This module implements a basic parser that accepts tokens from the VHDL
4//! lexer and emits errors back to it.
5
6use crate::lexer::token::Token;
7use crate::lexer::Lexer;
8use crate::parser::TokenStream;
9use moore_common::errors::*;
10use moore_common::grind::Grinder;
11use moore_common::source::*;
12use std::collections::VecDeque;
13
14pub struct BasicParser<T>
15where
16    T: Grinder<Item = Option<u8>, Error = DiagBuilder2>,
17{
18    input: Lexer<T>,
19    queue: VecDeque<Spanned<Token>>,
20    last_span: Span,
21    severity: Severity,
22    consumed: usize,
23}
24
25impl<T> TokenStream<Token> for BasicParser<T>
26where
27    T: Grinder<Item = Option<u8>, Error = DiagBuilder2>,
28{
29    fn peek(&mut self, offset: usize) -> Spanned<Token> {
30        self.ensure_queue_filled(offset);
31        if offset < self.queue.len() {
32            self.queue[offset]
33        } else {
34            *self
35                .queue
36                .back()
37                .expect("At least an Eof token should be in the queue")
38        }
39    }
40
41    fn bump(&mut self) {
42        if self.queue.is_empty() {
43            self.ensure_queue_filled(1);
44        }
45        if let Some(Spanned { value, span }) = self.queue.pop_front() {
46            assert!(value != Token::Eof);
47            self.last_span = span;
48            self.consumed += 1;
49        }
50    }
51
52    fn consumed(&self) -> usize {
53        self.consumed
54    }
55
56    fn last_span(&self) -> Span {
57        self.last_span
58    }
59
60    fn emit(&mut self, diag: DiagBuilder2) {
61        use std::cmp::max;
62        self.severity = max(self.severity, diag.get_severity());
63        self.input.emit(diag);
64    }
65
66    fn severity(&self) -> Severity {
67        self.severity
68    }
69}
70
71impl<T> BasicParser<T>
72where
73    T: Grinder<Item = Option<u8>, Error = DiagBuilder2>,
74{
75    /// Create a new parser which consumes input from the given lexer.
76    pub fn new(input: Lexer<T>) -> BasicParser<T> {
77        BasicParser {
78            input: input,
79            queue: VecDeque::new(),
80            last_span: INVALID_SPAN,
81            severity: Severity::Note,
82            consumed: 0,
83        }
84    }
85
86    /// Ensure that either the end of file has been reached, or at least
87    /// `min_tokens` tokens are in the queue.
88    fn ensure_queue_filled(&mut self, min_tokens: usize) {
89        if let Some(&Spanned {
90            value: Token::Eof, ..
91        }) = self.queue.back()
92        {
93            return;
94        }
95        while self.queue.len() <= min_tokens {
96            match self.input.next() {
97                Some(t) => self.queue.push_back(t),
98                None => {
99                    self.queue
100                        .push_back(Spanned::new(Token::Eof, self.last_span.end().into()));
101                    break;
102                }
103            }
104        }
105    }
106}