1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
use crate::lexer::token::LexToken;
use crate::macros::EntityMacroDefinition;
use crate::syntax::NonEmptySyntax;
use crate::syntax::Syntax;
use lark_collections::{FxIndexMap, Seq};
use lark_debug_with::DebugWith;
use lark_entity::EntityTables;
use lark_error::{Diagnostic, ErrorReported, WithError};
use lark_span::{FileName, Span, Spanned};
use lark_string::{GlobalIdentifier, GlobalIdentifierTables, Text};
use std::sync::Arc;
pub struct Parser<'parse> {
/// The source file name for the file being parsed; used in error reporting
file_name: FileName,
/// Tables for interning global identifiers; extracted from the database.
global_identifier_tables: &'parse GlobalIdentifierTables,
/// Tables for interning entities; extracted from the database.
entity_tables: &'parse EntityTables,
/// Set of macro definitions in scope.
entity_macro_definitions: &'parse FxIndexMap<GlobalIdentifier, Arc<dyn EntityMacroDefinition>>,
/// Complete input; needed to extract the full text of tokens.
input: &'parse Text,
/// List of all tokens.
tokens: &'parse Seq<Spanned<LexToken, FileName>>,
/// Index of the token *after* the current token.
next_lookahead_token: usize,
/// Span of the last consumed token (ignoring whitespace and
/// comments); see the `last_span()` method below.
last_span: Span<FileName>,
/// Current lookahead token.
lookahead_token: Spanned<LexToken, FileName>,
/// Errors reported during parsing; these will be converted into
/// the final `WithError` result
errors: Vec<Diagnostic>,
}
impl Parser<'parse> {
crate fn new(
file_name: FileName,
db: &'parse (impl AsRef<GlobalIdentifierTables> + AsRef<EntityTables> + ?Sized),
entity_macro_definitions: &'parse FxIndexMap<
GlobalIdentifier,
Arc<dyn EntityMacroDefinition>,
>,
input: &'parse Text,
tokens: &'parse Seq<Spanned<LexToken, FileName>>,
start_token: usize,
) -> Self {
// Subtle: the start token may be whitespace etc. So we actually have to invoke
// `advance_next_token` to advance.
let mut next_lookahead_token = start_token;
let lookahead_token =
advance_next_token(input, tokens, &mut next_lookahead_token, file_name);
Parser {
file_name,
global_identifier_tables: db.as_ref(),
entity_tables: db.as_ref(),
entity_macro_definitions,
input,
tokens,
next_lookahead_token,
lookahead_token,
errors: vec![],
last_span: Span::initial(file_name),
}
}
/// Clones the parser to produce a "checkpoint". You can go on
/// using this checkpoint, but any changes to the current token
/// (as well as any reported errors!) will be ignored and will not
/// affect the main parser. This is intended to enable "limited
/// lookahead" of more than one token, e.g. skipping upcoming
/// newlines.
crate fn checkpoint(&self) -> Self {
Parser {
errors: vec![],
..*self
}
}
/// Parse all the instances of `syntax` that we can, stopping only
/// at EOF. Returns a vector of the results plus any parse errors
/// we encountered.
crate fn parse_until_eof<S>(mut self, mut syntax: S) -> WithError<Seq<S::Data>>
where
S: NonEmptySyntax<'parse>,
{
let mut entities = vec![];
loop {
self.skip_newlines();
if self.is(LexToken::EOF) {
break;
}
if self.test(&mut syntax) {
match self.expect(&mut syntax) {
Ok(e) => entities.push(e),
Err(ErrorReported(_)) => (),
}
} else {
let Spanned { span, .. } = self.shift();
self.report_error("unexpected character", span);
}
}
self.into_with_error(Seq::from(entities))
}
crate fn into_with_error<T>(self, value: T) -> WithError<T> {
WithError {
value,
errors: self.errors,
}
}
/// Consume the current token and load the next one. Return the
/// old token.
crate fn shift(&mut self) -> Spanned<LexToken, FileName> {
assert!(!self.is(LexToken::EOF));
self.last_span = self.lookahead_token.span;
let last_token = self.lookahead_token;
self.lookahead_token = advance_next_token(
self.input,
self.tokens,
&mut self.next_lookahead_token,
self.file_name,
);
log::trace!(
"shift: new lookahead token = {}, consumed token = {}",
self.lookahead_token.debug_with(self),
last_token.debug_with(self),
);
last_token
}
/// Extract the complete input
crate fn input(&self) -> &'parse Text {
self.input
}
/// Extract the complete input
crate fn entity_macro_definitions(
&self,
) -> &'parse FxIndexMap<GlobalIdentifier, Arc<dyn EntityMacroDefinition>> {
self.entity_macro_definitions
}
/// Peek at the index of the current lookahead token in the token
/// list. If this is the EOF token, then the index returned is the
/// length of the token list.
crate fn peek_index(&self) -> usize {
self.next_lookahead_token - 1
}
/// Peek at the current lookahead token.
crate fn peek(&self) -> Spanned<LexToken, FileName> {
self.lookahead_token
}
/// Span covering the space *in between* the previous token
/// and the current token. This is the span where something
/// elided would go.
crate fn elided_span(&self) -> Span<FileName> {
// FIXME -- what should we do regarding whitespace etc?
Span::new(
self.file_name,
self.last_span.end(),
self.peek_span().start(),
)
}
/// Span of the current lookahead token.
crate fn peek_span(&self) -> Span<FileName> {
self.peek().span
}
/// Span of the last consumed token, ignoring whitespace and
/// comments. This is very handy when constructing the span of
/// things we are looking at. You basically consume tokens until
/// the lookahead tells you that you are at the end, and then you
/// can look at the `last_span`
crate fn last_span(&self) -> Span<FileName> {
self.last_span
}
/// Peek at the string reprsentation of the current token.
crate fn peek_str(&self) -> &'parse str {
&self.input[self.peek_span()]
}
/// Test if the current token is of the given kind.
crate fn is(&self, kind: LexToken) -> bool {
kind == self.lookahead_token.value
}
/// If at EOF, returns `None`. Otherwise, shifts all remaining
/// tokens out and returns the span that covers them.
crate fn parse_extra_input(&mut self) -> Option<Span<FileName>> {
if self.is(LexToken::EOF) {
return None;
}
let start = self.shift();
while !self.is(LexToken::EOF) {
self.shift();
}
let span = start.span.extended_until_end_of(self.peek_span());
return Some(span);
}
/// Consumes all subsequent newline characters, returning true if
/// at least one newline was found.
crate fn skip_newlines(&mut self) -> bool {
let mut count = 0;
while self.is(LexToken::Newline) {
self.shift();
count += 1;
}
count > 0
}
/// Tests whether the syntax applies at the current point.
crate fn test(&self, mut syntax: impl Syntax<'parse>) -> bool {
log::trace!(
"test({}) at token `{}({})`",
syntax.debug_with(self),
self.lookahead_token.value.debug_with(self),
self.peek_str().debug_with(self),
);
if syntax.test(self) {
log::trace!("test: passed");
true
} else {
false
}
}
/// Parses a `T` if we can and returns true if so; otherwise,
/// reports an error and returns false.
crate fn expect<T>(&'s mut self, mut syntax: T) -> Result<T::Data, ErrorReported>
where
T: Syntax<'parse>,
{
log::debug!(
"expect({}) at token `{}({})`",
syntax.debug_with(self),
self.lookahead_token.value.debug_with(self),
self.peek_str().debug_with(self),
);
syntax.expect(self)
}
/// Parse a piece of syntax (if it is present), otherwise returns
/// `None`. A combination of `test` and `expect`.
crate fn parse_if_present<T>(&mut self, mut syntax: T) -> Option<Result<T::Data, ErrorReported>>
where
T: Syntax<'parse>,
{
log::trace!(
"eat({}) at token `{}({})`",
syntax.debug_with(self),
self.lookahead_token.value.debug_with(self),
self.peek_str().debug_with(self),
);
if self.test(&mut syntax) {
Some(self.expect(syntax))
} else {
None
}
}
/// Report an error with the given message at the given span.
crate fn report_error(
&mut self,
message: impl Into<String>,
span: Span<FileName>,
) -> ErrorReported {
report_error(&mut self.errors, message, span)
}
}
impl AsRef<GlobalIdentifierTables> for Parser<'_> {
fn as_ref(&self) -> &GlobalIdentifierTables {
self.global_identifier_tables
}
}
impl AsRef<EntityTables> for Parser<'_> {
fn as_ref(&self) -> &EntityTables {
self.entity_tables
}
}
fn advance_next_token(
input: &Text,
tokens: &[Spanned<LexToken, FileName>],
next_token: &mut usize,
file_name: FileName,
) -> Spanned<LexToken, FileName> {
loop {
if *next_token >= tokens.len() {
*next_token = tokens.len() + 1;
return Spanned {
value: LexToken::EOF,
span: Span::eof(file_name, input),
};
}
let token = tokens[*next_token];
// Advance to the next token, unless we are at EOF.
*next_token += 1;
// Skip over whitespace/comments automatically (but not
// newlines).
match token.value {
LexToken::Whitespace | LexToken::Comment => continue,
_ => return token,
}
}
}
fn report_error(
errors: &mut Vec<Diagnostic>,
message: impl Into<String>,
span: Span<FileName>,
) -> ErrorReported {
let message: String = message.into();
let diagnostic = crate::diagnostic(message, span);
errors.push(diagnostic);
ErrorReported::at_diagnostic(errors.last().unwrap())
}