combine_proc_macro/
input.rs

1//! Wrappers and transforms to around `proc_macro` types to implement `combine` traits.
2
3use combine::{Positioned, StreamOnce};
4use combine::stream::StreamErrorFor;
5use combine::stream::easy::Error;
6use combine::stream::buffered::BufferedStream;
7use combine::stream::state::{DefaultPositioned, Positioner, State};
8use proc_macro::{TokenStream as TokenStreamBuiltin};
9use proc_macro2::{Delimiter, Ident, Punct, Literal, Span, TokenStream, TokenTree};
10use proc_macro2::token_stream::IntoIter;
11use std::cmp::Ordering;
12use std::convert::TryFrom;
13
14pub struct Input {
15    source_stack: Vec<(IntoIter, Option<Token>)>,
16    source_pos: usize,
17}
18
19impl Input {
20    /// Returns `true` if the input contains no more tokens.
21    pub fn is_empty(&self) -> bool {
22        self.source_stack.is_empty()
23    }
24
25    /// Wraps the input in a BufferedStream that supports lookahead grammars.
26    ///
27    /// By default `combine` produces an LL(1) parser, unless the `attempt`
28    /// combinator is used, so `1` is the recommended default value for `k`.
29    pub fn with_lookahead(self, k: usize) -> BufferedStream<State<Input, SpanPosition>> {
30        BufferedStream::new(State::new(self), k)
31    }
32
33    fn next(&mut self) -> Option<Token> {
34        if self.source_stack.is_empty() {
35            return None;
36        }
37
38        while !self.source_stack.is_empty() {
39            let next = self.source_stack.last_mut().and_then(|(iter, _)| iter.next());
40            let next = match next {
41                Some(tt) => self.ungroup(tt),
42                None => None,
43            };
44            if let Some(tok) = next {
45                return Some(tok);
46            }
47            let (_empty, close) = self.source_stack.pop().unwrap();
48            if let Some(tok) = close {
49                return Some(tok);
50            }
51        }
52
53        // We're finally at the end of input Bob...
54        None
55    }
56
57    fn ungroup(&mut self, tt: TokenTree) -> Option<Token> {
58        match tt {
59            TokenTree::Punct(tok) => Some(Token::Punct(tok)),
60            TokenTree::Ident(tok) => Some(Token::Ident(tok)),
61            TokenTree::Literal(tok) => Some(Token::Literal(tok)),
62            TokenTree::Group(group) => {
63                let (open, close) = match group.delimiter() {
64                    Delimiter::Parenthesis => (Some('('), Some(')')),
65                    Delimiter::Brace => (Some('{'), Some('}')),
66                    Delimiter::Bracket => (Some('['), Some(']')),
67                    Delimiter::None => (None, None),
68                };
69                self.source_stack.push((
70                    group.stream().into_iter(),
71                    close.map(|c| Token::Delim(c, group.span())),
72                ));
73                match open {
74                    Some(c) => Some(Token::Delim(c, group.span())),
75                    None => None,
76                }
77            }
78        }
79    }
80}
81impl From<TokenStreamBuiltin> for Input {
82    fn from(stream: TokenStreamBuiltin) -> Input {
83        let stream: TokenStream = stream.to_string().parse().unwrap();
84        Input {
85            source_stack: vec![(stream.into_iter(), None)],
86            source_pos: 0,
87        }
88    }
89}
90impl From<TokenStream> for Input {
91    fn from(stream: TokenStream) -> Input {
92        Input {
93            source_stack: vec![(stream.into_iter(), None)],
94            source_pos: 0,
95        }
96    }
97}
98impl From<Input> for TokenStream {
99    fn from(input: Input) -> TokenStream {
100        let mut rem = TokenStream::new();
101        for (source, close) in input.source_stack.into_iter().rev() {
102            rem.extend(source);
103            rem.extend(close.into_iter().map(|tok| TokenTree::try_from(tok).unwrap()));
104        }
105        rem
106    }
107}
108
109impl StreamOnce for Input {
110    type Item = Token;
111    type Range =  Self::Item;
112    type Position = usize;
113    type Error = Error<Self::Item, Self::Range>;
114
115    fn uncons(&mut self) -> Result<Self::Item, StreamErrorFor<Self>> {
116        match self.next() {
117            None => Err(Error::end_of_input()),
118            Some(tok) => {
119                self.source_pos += 1;
120                Ok(tok)
121            }
122        }
123    }
124
125    fn is_partial(&self) -> bool {
126        false
127    }
128}
129
130impl Positioned for Input {
131    fn position(&self) -> Self::Position {
132        self.source_pos
133    }
134}
135
136impl DefaultPositioned for Input {
137    type Positioner = SpanPosition;
138}
139
140#[derive(Clone, Debug)]
141pub struct SpanPosition {
142    pos: usize,
143    span: Span,
144}
145
146impl SpanPosition {
147    pub fn into_span(&self) -> Span {
148        self.span
149    }
150}
151
152impl Default for SpanPosition {
153    fn default() -> Self {
154        SpanPosition {
155            pos: 0,
156            span: Span::call_site()
157        }
158    }
159}
160
161impl PartialOrd for SpanPosition {
162    fn partial_cmp(&self, other: &SpanPosition) -> Option<Ordering> {
163        Some(self.cmp(other))
164    }
165}
166
167impl Ord for SpanPosition {
168    fn cmp(&self, other: &SpanPosition) -> Ordering {
169        self.pos.cmp(&other.pos)
170    }
171}
172
173impl PartialEq for SpanPosition {
174    fn eq(&self, other: &SpanPosition) -> bool {
175        self.pos == other.pos
176    }
177}
178
179impl Eq for SpanPosition {}
180
181impl Positioner<Token> for SpanPosition {
182    type Position = Self;
183
184    #[inline(always)]
185    fn position(&self) -> Self::Position {
186        self.clone()
187    }
188
189    #[inline]
190    fn update(&mut self, item: &Token) {
191        self.pos += 1;
192        self.span = item.span();
193    }
194}
195
196#[derive(Clone, Debug)]
197pub enum Token {
198    Delim(char, Span),
199    Punct(Punct),
200    Ident(Ident),
201    Literal(Literal),
202}
203
204impl Token {
205    pub fn to_char(&self) -> Option<char> {
206        match self {
207            Token::Delim(ch, _) => Some(*ch),
208            Token::Punct(punct) => Some(punct.as_char()),
209            _ => None,
210        }
211    }
212
213    pub fn span(&self) -> Span {
214        match self {
215            Token::Delim(_, span) => span.clone(),
216            Token::Punct(tok) => tok.span(),
217            Token::Ident(tok) => tok.span(),
218            Token::Literal(tok) => tok.span(),
219        }
220    }
221}
222
223impl PartialEq for Token {
224    fn eq(&self, other: &Self) -> bool {
225        match (self, other) {
226            (Token::Delim(l, _), Token::Delim(r, _)) => l == r,
227            (Token::Punct(l), Token::Punct(r)) => l.as_char() == r.as_char(),
228            (Token::Ident(l), Token::Ident(r)) => l.to_string() == r.to_string(),
229            (Token::Literal(l), Token::Literal(r)) => l.to_string() == r.to_string(),
230            _ => false,
231        }
232    }
233}
234
235impl TryFrom<Token> for TokenTree {
236    type Error = ();
237
238    fn try_from(tok: Token) -> Result<TokenTree, Self::Error> {
239        match tok {
240            Token::Delim(_, _) => Err(()),
241            Token::Punct(tok) => Ok(TokenTree::Punct(tok)),
242            Token::Ident(tok) => Ok(TokenTree::Ident(tok)),
243            Token::Literal(tok) => Ok(TokenTree::Literal(tok)),
244        }
245    }
246}