1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
use erl_tokenize::LexicalToken;
use std;
use std::marker::PhantomData;

use crate::traits::{Preprocessor, TokenRead};
use crate::{Error, Result};

#[derive(Debug)]
pub struct TokenReader<T, E> {
    inner: T,
    unread: Vec<LexicalToken>,
    _phantom: PhantomData<E>,
}
impl<T, E> TokenReader<T, E>
where
    T: Iterator<Item = std::result::Result<LexicalToken, E>> + Preprocessor,
    Error: From<E>,
{
    pub fn new(inner: T) -> Self {
        TokenReader {
            inner,
            unread: Vec::new(),
            _phantom: PhantomData,
        }
    }
    pub fn inner(&self) -> &T {
        &self.inner
    }
    pub fn inner_mut(&mut self) -> &mut T {
        &mut self.inner
    }
    pub fn into_inner(self) -> T {
        self.inner
    }
}
impl<T, E> Preprocessor for TokenReader<T, E>
where
    T: Preprocessor,
{
    fn define_macro(&mut self, name: &str, replacement: Vec<LexicalToken>) {
        self.inner.define_macro(name, replacement);
    }
    fn undef_macro(&mut self, name: &str) {
        self.inner.undef_macro(name);
    }
}
impl<T, E> TokenRead for TokenReader<T, E>
where
    T: Iterator<Item = std::result::Result<LexicalToken, E>> + Preprocessor,
    Error: From<E>,
{
    fn try_read_token(&mut self) -> Result<Option<LexicalToken>> {
        match self.unread.pop().map(Ok).or_else(|| self.inner.next()) {
            None => Ok(None),
            Some(Err(e)) => Err(e.into()),
            Some(Ok(t)) => Ok(Some(t)),
        }
    }
    fn unread_token(&mut self, token: LexicalToken) {
        self.unread.push(token);
    }
}