hemtt_preprocessor/
lib.rs

1#![deny(clippy::all, clippy::nursery, missing_docs)]
2#![warn(clippy::pedantic)]
3
4//! HEMTT - Arma 3 Preprocessor
5
6use std::path::PathBuf;
7
8use hemtt_tokens::whitespace;
9use hemtt_tokens::{Symbol, Token};
10use ifstate::IfState;
11
12mod context;
13mod defines;
14mod error;
15mod ifstate;
16mod map;
17mod parse;
18mod resolver;
19
20pub use context::{Context, Definition, FunctionDefinition};
21pub use defines::{Defines, DefinitionLibrary};
22pub use error::Error;
23pub use map::{Mapping, Processed};
24use peekmore::{PeekMore, PeekMoreIterator};
25pub use resolver::resolvers;
26pub use resolver::{
27    resolvers::{LocalResolver, NoResolver},
28    Resolver,
29};
30
31/// Preprocesses a config file.
32///
33/// # Errors
34/// [`Error`]
35///
36/// # Panics
37/// If the files
38pub fn preprocess_file<R>(entry: &str, resolver: &R) -> Result<Vec<Token>, Error>
39where
40    R: Resolver,
41{
42    let mut context = Context::new(entry.to_string());
43    let source = resolver.find_include(
44        &context,
45        PathBuf::from(entry).parent().unwrap().to_str().unwrap(),
46        entry,
47        PathBuf::from(entry).file_name().unwrap().to_str().unwrap(),
48        vec![Token::builtin(None)],
49    )?;
50    let mut tokens = crate::parse::parse(entry, &source.1, &None)?;
51    let eoi = tokens.pop().unwrap();
52    tokens.push(Token::ending_newline(None));
53    tokens.push(eoi);
54    let mut tokenstream = tokens.into_iter().peekmore();
55    root_preprocess(resolver, &mut context, &mut tokenstream, false)
56}
57
58/// # Errors
59/// it can fail
60pub fn preprocess_string(source: &str) -> Result<Vec<Token>, Error> {
61    let tokens = crate::parse::parse("%anonymous%", source, &None)?;
62    let mut context = Context::new(String::from("%anonymous%"));
63    let mut tokenstream = tokens.into_iter().peekmore();
64    root_preprocess(&NoResolver::new(), &mut context, &mut tokenstream, false)
65}
66
67fn root_preprocess<R>(
68    resolver: &R,
69    context: &mut Context,
70    tokenstream: &mut PeekMoreIterator<impl Iterator<Item = Token>>,
71    allow_quote: bool,
72) -> Result<Vec<Token>, Error>
73where
74    R: Resolver,
75{
76    let mut output = Vec::new();
77    while let Some(token) = tokenstream.peek() {
78        match token.symbol() {
79            Symbol::Directive => {
80                let token = token.clone();
81                output.append(&mut directive_preprocess(
82                    resolver,
83                    context,
84                    tokenstream,
85                    allow_quote,
86                    token,
87                )?);
88            }
89            Symbol::Comment(_) | Symbol::Whitespace(_) => {
90                tokenstream.next();
91            }
92            Symbol::Slash => {
93                if matches!(
94                    tokenstream.peek_forward(1).map(Token::symbol),
95                    Some(Symbol::Slash)
96                ) {
97                    whitespace::skip_comment(tokenstream);
98                } else if context.ifstates().reading() {
99                    output.push(tokenstream.next().unwrap());
100                }
101            }
102            _ => {
103                if context.ifstates().reading() {
104                    output.append(&mut walk_line(resolver, context, tokenstream)?);
105                } else {
106                    tokenstream.next();
107                }
108            }
109        }
110    }
111    Ok(output)
112}
113
114#[allow(clippy::too_many_lines)]
115fn directive_preprocess<R>(
116    resolver: &R,
117    context: &mut Context,
118    tokenstream: &mut PeekMoreIterator<impl Iterator<Item = Token>>,
119    allow_quote: bool,
120    from: Token,
121) -> Result<Vec<Token>, Error>
122where
123    R: Resolver,
124{
125    let Some(token) = tokenstream.peek() else {
126        return Err(Error::UnexpectedEOF {
127            token: Box::new(from),
128        });
129    };
130    let token = token.clone();
131    match token.symbol() {
132        Symbol::Directive => {}
133        _ => {
134            return Err(Error::UnexpectedToken {
135                token: Box::new(token.clone()),
136                expected: vec![Symbol::Directive],
137                trace: context.trace(),
138            })
139        }
140    }
141    let mut output = Vec::new();
142    tokenstream.next();
143    if let Some(token) = tokenstream.next() {
144        if let Symbol::Word(command) = token.symbol() {
145            match (command.as_str(), context.ifstates().reading()) {
146                ("include", true) => {
147                    whitespace::skip(tokenstream);
148                    context.push(token.clone());
149                    output.append(&mut directive_include_preprocess(
150                        resolver,
151                        context,
152                        tokenstream,
153                        token,
154                    )?);
155                    context.pop();
156                }
157                ("define", true) => {
158                    whitespace::skip(tokenstream);
159                    directive_define_preprocess(resolver, context, tokenstream, token)?;
160                }
161                ("undef", true) => {
162                    whitespace::skip(tokenstream);
163                    directive_undef_preprocess(context, tokenstream, token)?;
164                }
165                ("if", true) => {
166                    whitespace::skip(tokenstream);
167                    directive_if_preprocess(context, tokenstream, token)?;
168                }
169                ("ifdef", true) => {
170                    whitespace::skip(tokenstream);
171                    directive_ifdef_preprocess(context, tokenstream, true, token)?;
172                }
173                ("ifndef", true) => {
174                    whitespace::skip(tokenstream);
175                    directive_ifdef_preprocess(context, tokenstream, false, token)?;
176                }
177                ("ifdef" | "ifndef", false) => {
178                    context.ifstates_mut().push(IfState::PassingChild);
179                    whitespace::skip(tokenstream);
180                    tokenstream.next();
181                    eat_newline(tokenstream, context, &token)?;
182                }
183                ("else", _) => {
184                    context.ifstates_mut().flip();
185                    eat_newline(tokenstream, context, &token)?;
186                }
187                ("endif", _) => {
188                    context.ifstates_mut().pop();
189                    eat_newline(tokenstream, context, &token)?;
190                }
191                (_, true) => {
192                    if allow_quote {
193                        let source = token.source().clone();
194                        output.push(Token::new(
195                            Symbol::DoubleQuote,
196                            source.clone(),
197                            Some(Box::new(token.clone())),
198                        ));
199                        if let Symbol::Word(word) = token.symbol() {
200                            if let Some((_source, definition)) = context.get(word, &token) {
201                                output.append(
202                                    &mut walk_definition(
203                                        resolver,
204                                        context,
205                                        tokenstream,
206                                        token.clone(),
207                                        definition,
208                                    )?
209                                    .into_iter()
210                                    .filter(|t| t.symbol() != &Symbol::Join)
211                                    .collect(),
212                                );
213                            } else {
214                                output.push(token.clone());
215                            }
216                        } else {
217                            output.push(token.clone());
218                        }
219                        output.push(Token::new(
220                            Symbol::DoubleQuote,
221                            source,
222                            Some(Box::new(token)),
223                        ));
224                    } else {
225                        return Err(Error::UnknownDirective {
226                            directive: Box::new(token),
227                            trace: context.trace(),
228                        });
229                    }
230                }
231                _ => {}
232            }
233        }
234    } else if !allow_quote {
235        return Err(Error::UnexpectedEOF {
236            token: Box::new(from),
237        });
238    } else {
239        output.push(token);
240    }
241    Ok(output)
242}
243
244fn directive_include_preprocess<R>(
245    resolver: &R,
246    context: &mut Context,
247    tokenstream: &mut PeekMoreIterator<impl Iterator<Item = Token>>,
248    from: Token,
249) -> Result<Vec<Token>, Error>
250where
251    R: Resolver,
252{
253    let encased_in = match tokenstream.peek().unwrap().symbol() {
254        Symbol::DoubleQuote | Symbol::SingleQuote => tokenstream.next().unwrap().symbol().clone(),
255        Symbol::LeftAngle => {
256            tokenstream.next();
257            Symbol::RightAngle
258        }
259        _ => {
260            return Err(Error::UnexpectedToken {
261                token: Box::new(tokenstream.peek().unwrap().clone()),
262                expected: vec![Symbol::DoubleQuote, Symbol::SingleQuote, Symbol::LeftAngle],
263                trace: context.trace(),
264            })
265        }
266    };
267    let mut path = String::new();
268    let mut path_tokens = Vec::new();
269    let mut last = None;
270    while let Some(token) = tokenstream.peek() {
271        if token.symbol() == &encased_in {
272            tokenstream.next();
273            break;
274        }
275        if token.symbol() == &Symbol::Newline {
276            return Err(Error::UnexpectedToken {
277                token: Box::new(token.clone()),
278                expected: vec![encased_in],
279                trace: context.trace(),
280            });
281        }
282        path.push_str(token.to_string().as_str());
283        path_tokens.push(token.clone());
284        last = tokenstream.next();
285    }
286    if tokenstream.peek().is_none() {
287        return Err(Error::UnexpectedEOF {
288            token: Box::new(last.unwrap_or_else(|| from.clone())),
289        });
290    }
291    let (pathbuf, mut tokens) = {
292        let (resolved_path, source) = resolver.find_include(
293            context,
294            context.entry(),
295            context.current_file(),
296            &path,
297            path_tokens,
298        )?;
299        let parsed = crate::parse::parse(
300            &resolved_path.display().to_string(),
301            &source,
302            &Some(Box::new(from)),
303        )?;
304        (resolved_path, parsed)
305    };
306    // Remove EOI token
307    tokens.pop().unwrap();
308    tokens.push(Token::ending_newline(None));
309    let mut tokenstream = tokens.into_iter().peekmore();
310    let current = context.current_file().clone();
311    context.set_current_file(pathbuf.display().to_string());
312    let output = root_preprocess(resolver, context, &mut tokenstream, false);
313    context.set_current_file(current);
314    output
315}
316
317fn directive_define_preprocess<R>(
318    resolver: &R,
319    context: &mut Context,
320    tokenstream: &mut PeekMoreIterator<impl Iterator<Item = Token>>,
321    from: Token,
322) -> Result<(), Error>
323where
324    R: Resolver,
325{
326    let (ident_token, ident) = if let Some(token) = tokenstream.next() {
327        match token.symbol() {
328            Symbol::Word(ident) => {
329                let ident = ident.to_string();
330                (token, ident)
331            }
332            _ => {
333                return Err(Error::ExpectedIdent {
334                    token: Box::new(token),
335                    trace: context.trace(),
336                })
337            }
338        }
339    } else {
340        return Err(Error::UnexpectedEOF {
341            token: Box::new(from),
342        });
343    };
344    let mut skipped = Vec::new();
345    if let Some(token) = tokenstream.peek() {
346        if let Symbol::Whitespace(_) | Symbol::Comment(_) = token.symbol() {
347            skipped = whitespace::skip(tokenstream);
348        }
349    }
350    // check directive type
351    if let Some(token) = tokenstream.peek() {
352        match (token.symbol(), !skipped.is_empty()) {
353            (Symbol::LeftParenthesis, false) => {
354                let token = token.clone();
355                let args = read_args(resolver, context, tokenstream, &token, false)?;
356                whitespace::skip(tokenstream);
357                if args.iter().any(|arg| arg.len() != 1) {
358                    return Err(Error::DefineMultiTokenArgument {
359                        token: Box::new(ident_token),
360                        trace: context.trace(),
361                    });
362                }
363                let def = FunctionDefinition::new(
364                    args.into_iter()
365                        .map(|a| a.first().unwrap().clone())
366                        .collect(),
367                    directive_define_read_body(tokenstream),
368                );
369                context.define(ident, ident_token, Definition::Function(def))?;
370            }
371            (Symbol::Newline, _) => {
372                context.define(ident, ident_token, Definition::Unit(skipped))?;
373            }
374            (_, _) => {
375                let val = directive_define_read_body(tokenstream);
376                context.define(ident, ident_token, Definition::Value(val))?;
377            }
378        }
379    } else {
380        let last = skipped.last().cloned();
381        return Err(Error::UnexpectedEOF {
382            token: Box::new(last.unwrap_or_else(|| from.clone())),
383        });
384    }
385    Ok(())
386}
387
388fn directive_undef_preprocess(
389    context: &mut Context,
390    tokenstream: &mut PeekMoreIterator<impl Iterator<Item = Token>>,
391    from: Token,
392) -> Result<(), Error> {
393    if let Some(token) = tokenstream.next() {
394        match token.symbol() {
395            Symbol::Word(ident) => {
396                context.undefine(ident, &token)?;
397                whitespace::skip(tokenstream);
398                if matches!(tokenstream.peek().unwrap().symbol(), Symbol::Newline) {
399                    tokenstream.next();
400                } else {
401                    return Err(Error::UnexpectedToken {
402                        token: Box::new(tokenstream.next().unwrap()),
403                        expected: vec![Symbol::Newline],
404                        trace: context.trace(),
405                    });
406                }
407            }
408            _ => {
409                return Err(Error::ExpectedIdent {
410                    token: Box::new(token.clone()),
411                    trace: context.trace(),
412                })
413            }
414        }
415    } else {
416        return Err(Error::UnexpectedEOF {
417            token: Box::new(from),
418        });
419    }
420    Ok(())
421}
422
423fn directive_if_preprocess(
424    context: &mut Context,
425    tokenstream: &mut PeekMoreIterator<impl Iterator<Item = Token>>,
426    from: Token,
427) -> Result<(), Error> {
428    let (ident_token, ident) = if let Some(token) = tokenstream.next() {
429        match token.symbol() {
430            Symbol::Word(ident) => {
431                let ident = ident.to_string();
432                (token, ident)
433            }
434            _ => {
435                return Err(Error::ExpectedIdent {
436                    token: Box::new(token.clone()),
437                    trace: context.trace(),
438                })
439            }
440        }
441    } else {
442        return Err(Error::UnexpectedEOF {
443            token: Box::new(from),
444        });
445    };
446    if let Some((_, definition)) = context.get(&ident, &ident_token) {
447        if let Definition::Value(tokens) = definition {
448            let read = [Symbol::Digit(1), Symbol::Word("1".to_string())]
449                .contains(tokens.first().unwrap().symbol());
450            context.ifstates_mut().push(if read {
451                IfState::ReadingIf
452            } else {
453                IfState::PassingIf
454            });
455        } else {
456            return Err(Error::IfUnitOrFunction {
457                token: Box::new(ident_token),
458                trace: context.trace(),
459            });
460        }
461    } else {
462        return Err(Error::IfUndefined {
463            token: Box::new(ident_token),
464            trace: context.trace(),
465        });
466    }
467    eat_newline(tokenstream, context, &ident_token)
468}
469
470fn directive_ifdef_preprocess(
471    context: &mut Context,
472    tokenstream: &mut PeekMoreIterator<impl Iterator<Item = Token>>,
473    has: bool,
474    from: Token,
475) -> Result<(), Error> {
476    let (ident_token, ident) = if let Some(token) = tokenstream.next() {
477        match token.symbol() {
478            Symbol::Word(ident) => {
479                let ident = ident.to_string();
480                (token, ident)
481            }
482            _ => {
483                return Err(Error::ExpectedIdent {
484                    token: Box::new(token.clone()),
485                    trace: context.trace(),
486                })
487            }
488        }
489    } else {
490        return Err(Error::UnexpectedEOF {
491            token: Box::new(from),
492        });
493    };
494    let has = context.has(&ident) == has;
495    context.ifstates_mut().push(if has {
496        IfState::ReadingIf
497    } else {
498        IfState::PassingIf
499    });
500    eat_newline(tokenstream, context, &ident_token)
501}
502
503fn directive_define_read_body(
504    tokenstream: &mut PeekMoreIterator<impl Iterator<Item = Token>>,
505) -> Vec<Token> {
506    let mut output: Vec<Token> = Vec::new();
507    while let Some(token) = tokenstream.peek() {
508        if matches!(token.symbol(), Symbol::Newline) {
509            let builtin = Token::builtin(Some(Box::new(token.clone())));
510            if output.last().unwrap_or(&builtin).symbol() == &Symbol::Escape {
511                output.pop();
512                output.push(tokenstream.next().unwrap());
513            } else {
514                tokenstream.next();
515                break;
516            }
517        } else {
518            output.push(tokenstream.next().unwrap());
519        }
520    }
521    output
522}
523
524#[allow(clippy::too_many_lines)]
525fn read_args<R>(
526    resolver: &R,
527    context: &mut Context,
528    tokenstream: &mut PeekMoreIterator<impl Iterator<Item = Token>>,
529    from: &Token,
530    recursive: bool,
531) -> Result<Vec<Vec<Token>>, Error>
532where
533    R: Resolver,
534{
535    let mut args = Vec::new();
536    let mut arg: Vec<Token> = Vec::new();
537    if let Some(token) = tokenstream.next() {
538        match token.symbol() {
539            Symbol::LeftParenthesis => {}
540            _ => {
541                return Err(Error::UnexpectedToken {
542                    token: Box::new(token.clone()),
543                    expected: vec![Symbol::LeftParenthesis],
544                    trace: context.trace(),
545                })
546            }
547        }
548    } else {
549        return Err(Error::UnexpectedEOF {
550            token: Box::new(from.clone()),
551        });
552    }
553    let mut depth = 0;
554    let mut quote = false;
555    while let Some(token) = tokenstream.peek() {
556        match token.symbol() {
557            Symbol::DoubleQuote => {
558                quote = !quote;
559                arg.push(tokenstream.next().unwrap());
560            }
561            Symbol::Comma => {
562                if quote {
563                    arg.push(tokenstream.next().unwrap());
564                    continue;
565                }
566                tokenstream.next();
567                while let Symbol::Whitespace(_) = arg.last().unwrap().symbol() {
568                    arg.pop();
569                }
570                args.push(arg);
571                arg = Vec::new();
572                whitespace::skip(tokenstream);
573            }
574            Symbol::LeftParenthesis => {
575                if quote {
576                    arg.push(tokenstream.next().unwrap());
577                    continue;
578                }
579                depth += 1;
580                arg.push(tokenstream.next().unwrap());
581                whitespace::skip(tokenstream);
582            }
583            Symbol::RightParenthesis => {
584                if quote {
585                    arg.push(tokenstream.next().unwrap());
586                    continue;
587                }
588                if depth == 0 {
589                    tokenstream.next();
590                    if !arg.is_empty() {
591                        while let Symbol::Whitespace(_) = arg.last().unwrap().symbol() {
592                            arg.pop();
593                        }
594                    }
595                    args.push(arg);
596                    break;
597                }
598                depth -= 1;
599                arg.push(tokenstream.next().unwrap());
600            }
601            Symbol::Word(word) => {
602                if quote {
603                    arg.push(tokenstream.next().unwrap());
604                    continue;
605                }
606                if recursive {
607                    if let Some((source, definition)) = context.get(word, token) {
608                        let token = token.clone();
609                        tokenstream.next();
610                        if definition.is_function()
611                            && tokenstream.peek().unwrap().symbol() != &Symbol::LeftParenthesis
612                        {
613                            arg.push(tokenstream.next().unwrap());
614                            continue;
615                        }
616                        context.push(source);
617                        arg.append(&mut walk_definition(
618                            resolver,
619                            context,
620                            tokenstream,
621                            token,
622                            definition,
623                        )?);
624                        context.pop();
625                        continue;
626                    }
627                }
628                arg.push(tokenstream.next().unwrap());
629            }
630            Symbol::Newline => {
631                let builtin = Token::builtin(Some(Box::new(token.clone())));
632                if arg.last().unwrap_or(&builtin).symbol() == &Symbol::Escape {
633                    arg.pop();
634                }
635                arg.push(tokenstream.next().unwrap());
636            }
637            _ => {
638                arg.push(tokenstream.next().unwrap());
639            }
640        }
641    }
642    Ok(args)
643}
644
645fn walk_line<R>(
646    resolver: &R,
647    context: &mut Context,
648    tokenstream: &mut PeekMoreIterator<impl Iterator<Item = Token>>,
649) -> Result<Vec<Token>, Error>
650where
651    R: Resolver,
652{
653    let mut output = Vec::new();
654    while let Some(token) = tokenstream.peek() {
655        if matches!(token.symbol(), Symbol::Newline) {
656            // check if last token was an escape
657            let builtin = Token::builtin(Some(Box::new(token.clone())));
658            if output.last().unwrap_or(&builtin).symbol() == &Symbol::Escape {
659                output.pop();
660                tokenstream.next();
661            } else {
662                output.push(tokenstream.next().unwrap());
663            }
664            break;
665        }
666        match token.symbol() {
667            Symbol::Word(word) => {
668                if let Some((source, definition)) = context.get(word, token) {
669                    let token = token.clone();
670                    context.push(source);
671                    tokenstream.next();
672                    output.append(&mut walk_definition(
673                        resolver,
674                        context,
675                        tokenstream,
676                        token,
677                        definition,
678                    )?);
679                    context.pop();
680                } else {
681                    output.push(tokenstream.next().unwrap());
682                }
683            }
684            Symbol::DoubleQuote => {
685                output.push(tokenstream.next().unwrap());
686                while let Some(token) = tokenstream.peek() {
687                    if matches!(token.symbol(), Symbol::DoubleQuote) {
688                        output.push(tokenstream.next().unwrap());
689                        break;
690                    }
691                    output.push(tokenstream.next().unwrap());
692                }
693            }
694            Symbol::Directive => {
695                let token = token.clone();
696                output.append(&mut directive_preprocess(
697                    resolver,
698                    context,
699                    tokenstream,
700                    true,
701                    token,
702                )?);
703            }
704            Symbol::Slash => {
705                if matches!(
706                    tokenstream.peek_forward(1).map(Token::symbol),
707                    Some(Symbol::Slash)
708                ) {
709                    whitespace::skip_comment(tokenstream);
710                } else {
711                    tokenstream.move_cursor_back().unwrap();
712                    output.push(tokenstream.next().unwrap());
713                }
714            }
715            _ => output.push(tokenstream.next().unwrap()),
716        }
717    }
718    Ok(output)
719}
720
721fn walk_definition<R>(
722    resolver: &R,
723    context: &mut Context,
724    tokenstream: &mut PeekMoreIterator<impl Iterator<Item = Token>>,
725    from: Token,
726    definition: Definition,
727) -> Result<Vec<Token>, Error>
728where
729    R: Resolver,
730{
731    let mut output = Vec::new();
732    match definition {
733        Definition::Value(tokens) => {
734            let parent = Some(Box::new(from));
735            let mut tokenstream = tokens
736                .into_iter()
737                .map(|mut t| {
738                    t.set_parent(parent.clone());
739                    t
740                })
741                .collect::<Vec<_>>()
742                .into_iter()
743                .peekmore();
744            while tokenstream.peek().is_some() {
745                output.append(&mut root_preprocess(
746                    resolver,
747                    context,
748                    &mut tokenstream,
749                    true,
750                )?);
751            }
752        }
753        Definition::Function(func) => {
754            let args = read_args(resolver, context, tokenstream, &from, true)?;
755            if args.len() != func.parameters().len() {
756                return Err(Error::FunctionCallArgumentCount {
757                    token: Box::new(from),
758                    expected: func.parameters().len(),
759                    got: args.len(),
760                    trace: context.trace(),
761                    defines: context.definitions().clone(),
762                });
763            }
764            let mut stack = context.stack(from.clone());
765            for (param, arg) in func.parameters().iter().zip(args.into_iter()) {
766                let def = Definition::Value(root_preprocess(
767                    resolver,
768                    &mut stack,
769                    &mut arg.into_iter().peekmore(),
770                    true,
771                )?);
772                stack.define(param.word().unwrap().to_string(), param.clone(), def)?;
773            }
774            let parent = Some(Box::new(from));
775            let mut tokenstream = func
776                .body()
777                .iter()
778                .cloned()
779                .map(|mut t| {
780                    t.set_parent(parent.clone());
781                    t
782                })
783                .collect::<Vec<_>>()
784                .into_iter()
785                .peekmore();
786            while tokenstream.peek().is_some() {
787                output.append(&mut root_preprocess(
788                    resolver,
789                    &mut stack,
790                    &mut tokenstream,
791                    true,
792                )?);
793            }
794        }
795        Definition::Unit(skipped) => {
796            return Err(Error::ExpectedFunctionOrValue {
797                token: Box::new(from),
798                trace: context.trace(),
799                skipped,
800            });
801        }
802    }
803    Ok(output)
804}
805
806fn eat_newline(
807    tokenstream: &mut PeekMoreIterator<impl Iterator<Item = Token>>,
808    context: &mut Context,
809    from: &Token,
810) -> Result<(), Error> {
811    let skipped = whitespace::skip(tokenstream);
812    if let Some(token) = tokenstream.peek() {
813        if matches!(token.symbol(), Symbol::Newline) {
814            tokenstream.next();
815        } else {
816            return Err(Error::UnexpectedToken {
817                token: Box::new(token.clone()),
818                expected: vec![Symbol::Newline],
819                trace: context.trace(),
820            });
821        }
822    } else {
823        let last = skipped.last().cloned();
824        return Err(Error::UnexpectedEOF {
825            token: Box::new(last.unwrap_or_else(|| from.clone())),
826        });
827    }
828    Ok(())
829}