proc_macro_utils/
parser.rs

1use std::ops::{Bound, RangeBounds};
2use std::str::FromStr;
3use std::{iter, mem};
4
5#[cfg(doc)]
6use proc_macro2::Spacing;
7use proc_macro2::{token_stream, Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
8use smallvec::{smallvec, SmallVec};
9
10use crate::{Delimited, TokenStream2Ext, TokenTree2Ext, TokenTreeLiteral, TokenTreePunct};
11
12// TODO move implementation in a trait implemented on both
13// Peekable<token_stream::IntoIter>s
14/// Trait that allows to peek a constant number of tokens.
15pub trait Peeker {
16    /// Number of tokens this peeker checks.
17    fn len(&self) -> usize;
18
19    /// Test if the tokens match.
20    ///
21    /// # Panics
22    ///
23    /// Implementations can panic if `tokens.len() < Self::LENGTH`.
24    #[must_use]
25    fn peek(self, tokens: &[TokenTree]) -> bool;
26}
27
28impl<T: FnOnce(&TokenTree) -> bool> Peeker for T {
29    fn len(&self) -> usize {
30        1
31    }
32
33    #[must_use]
34    fn peek(self, parser: &[TokenTree]) -> bool {
35        self(&parser[0])
36    }
37}
38
39macro_rules! impl_peeker {
40    ($(($($T:ident $idx:tt),+$(,)?),$len:literal;)*) => {
41        $(
42            impl<$($T: FnOnce(&TokenTree) -> bool),+> Peeker for ($($T,)+) {
43                fn len(&self) -> usize { $len }
44                fn peek(self, parser: &[TokenTree]) -> bool {
45                    $(self.$idx(&parser[$idx]))&&+
46                }
47            }
48        )*
49    };
50}
51
52impl_peeker![
53    (T1 0,), 1;
54    (T1 0, T2 1), 2;
55    (T1 0, T2 1, T3 2), 3;
56];
57
58struct PeekLen(usize);
59
60impl Peeker for PeekLen {
61    fn len(&self) -> usize {
62        self.0
63    }
64
65    fn peek(self, _: &[TokenTree]) -> bool {
66        true
67    }
68}
69
70/// Wrapper for [`TokenStream::into_iter`] allowing not only to iterate on
71/// tokens but also to parse simple structures like types or expressions, though
72/// it does not make any claims about their correctness.
73///
74/// ```
75/// # use proc_macro2::TokenStream;
76/// # use proc_macro_utils::{TokenParser, assert_tokens};
77/// # use quote::quote;
78/// let mut token_parser = TokenParser::new(quote! {a + b, c});
79/// assert_tokens!(token_parser.next_expression().unwrap(), { a + b });
80/// ```
81///
82/// # Construction
83///
84/// In most cases use [`new()`](TokenParser::new) to avoid specifying the
85/// generics. To change the on-stack size of the peek-buffer use
86/// [`new_generic()`](TokenParser::new_generic) or
87/// [`From::from`](#impl-From<T>-for-TokenParser<I,+PEEKER_LEN>).
88///
89/// # Peeking
90///
91/// The `TokenParser` allows peeking an arbitrary amount of tokens using
92/// [`peek_n()`](Self::peek_n) and the token specific variants. This uses a
93/// [`SmallVec`] with its capacity specified via `PEEKER_LEN` (default is 6).
94/// This means peeking up to `6` tokens ahead happens without heap allocation.
95/// Token groups can need up to `3` tokens of additional space e.g.
96/// [`peek_n_tt_dot_dot_eq()`](Self::peek_n_tt_dot_dot_eq) can, with the default
97/// allocation free be called with up to `3`, and
98/// [`peek_n_tt_plus_eq()`](Self::peek_n_tt_plus_eq) up to `4`.
99///
100/// **Warning**: Setting `PEEKER_LEN = 0` means even
101/// [`is_empty()`](Self::is_empty) and [`peek()`](Self::peek) allocate, and a
102/// value below `3` will make some of the
103/// [`peek_{punctuation}`](#impl-TokenParser<I,+PEEKER_LEN>-3) allocate
104/// additionally. But do also refrain from setting `PEEKER_LEN` too high, as
105/// this is the stack allocation used.
106#[allow(clippy::module_name_repetitions)]
107#[derive(Clone)]
108#[must_use]
109pub struct TokenParser<
110    I: Iterator<Item = TokenTree> = token_stream::IntoIter,
111    const PEEKER_LEN: usize = 6,
112> {
113    peek: SmallVec<[TokenTree; PEEKER_LEN]>,
114    iter: I,
115}
116
117impl TokenParser {
118    /// Creates a new [`TokenParser`] from a [`TokenTree`] iterator.
119    ///
120    /// This sets the default length for the peeker buffer. Use
121    /// [`new_generic()`](Self::new_generic) to change it.
122    pub fn new<T, I>(value: T) -> TokenParser<I, 6>
123    where
124        T: IntoIterator<Item = TokenTree, IntoIter = I>,
125        I: Iterator<Item = TokenTree>,
126    {
127        TokenParser::new_generic(value)
128    }
129
130    /// Creates a new [`TokenParser`] from a [`TokenTree`] iterator, allowing
131    /// to specify the size of the peeker buffer.
132    ///
133    /// See [Peeking](#Peeking) for implications.
134    pub fn new_generic<const PEEKER_LEN: usize, T, I>(value: T) -> TokenParser<I, PEEKER_LEN>
135    where
136        T: IntoIterator<Item = TokenTree, IntoIter = I>,
137        I: Iterator<Item = TokenTree>,
138    {
139        TokenParser {
140            peek: smallvec![],
141            iter: value.into_iter(),
142        }
143    }
144}
145
146impl<T, I, const PEEKER_LEN: usize> From<T> for TokenParser<I, PEEKER_LEN>
147where
148    T: IntoIterator<Item = TokenTree, IntoIter = I>,
149    I: Iterator<Item = TokenTree>,
150{
151    fn from(value: T) -> Self {
152        TokenParser::new_generic(value)
153    }
154}
155
156impl<I, const PEEKER_LEN: usize> From<TokenParser<I, PEEKER_LEN>> for TokenStream
157where
158    I: Iterator<Item = TokenTree>,
159{
160    #[must_use]
161    fn from(value: TokenParser<I, PEEKER_LEN>) -> Self {
162        value.peek.into_iter().chain(value.iter).collect()
163    }
164}
165
166impl<I, const PEEKER_LEN: usize> Iterator for TokenParser<I, PEEKER_LEN>
167where
168    I: Iterator<Item = TokenTree>,
169{
170    type Item = TokenTree;
171
172    #[must_use]
173    fn next(&mut self) -> Option<Self::Item> {
174        if self.peek.is_empty() {
175            self.iter.next()
176        } else {
177            Some(self.peek.remove(0))
178        }
179    }
180}
181
182impl FromStr for TokenParser {
183    type Err = <TokenStream as FromStr>::Err;
184
185    fn from_str(s: &str) -> Result<Self, Self::Err> {
186        TokenStream::from_str(s).map(Self::new)
187    }
188}
189
190#[cfg(feature = "quote")]
191impl<I, const PEEKER_LEN: usize> quote::ToTokens for TokenParser<I, PEEKER_LEN>
192where
193    I: Clone + Iterator<Item = TokenTree>,
194{
195    fn to_tokens(&self, tokens: &mut TokenStream) {
196        tokens.extend(self.clone());
197    }
198
199    #[must_use]
200    fn to_token_stream(&self) -> TokenStream {
201        self.clone().collect()
202    }
203
204    #[must_use]
205    fn into_token_stream(self) -> TokenStream
206    where
207        Self: Sized,
208    {
209        self.collect()
210    }
211}
212
213macro_rules! next_punct {
214    ($self:ident, $only:ident) => {
215        $self.next_if(TokenTree::$only).map(TokenTree::alone).map(iter::once).map(Iterator::collect)
216    };
217    ($self:ident, $($joint:ident),+ $(!$($not:ident),+)?) => {
218        next_punct!($self, 0, $($joint),+ $(!$($not),+)?;true)
219    };
220    ($self:ident, $idx:expr, $first:ident, $($joint:ident),+ $(!$($not:ident),*)?;$($cond:tt)*) => {
221        next_punct!($self, $idx+1, $($joint),+ $(!$($not),+)?; $($cond)* && matches!($self.peek_n($idx), Some(tt) if tt.$first() && tt.is_joint()))
222    };
223    ($self:ident, $idx:expr, $last:ident;$($cond:tt)*) => {
224        ($($cond)* && matches!($self.peek_n($idx), Some(tt) if tt.$last())).then(|| $self.next_n_alone($idx+1).expect("peeked n"))
225    };
226    ($self:ident, $idx:expr, $last:ident !$($not:ident),+;$($cond:tt)*) => {
227        ($($cond)* && matches!($self.peek_n($idx), Some(tt) if tt.$last())
228         && (matches!($self.peek_n($idx), Some(tt) if tt.is_alone()) ||
229         !(matches!($self.peek_n($idx+1), Some(tt) if false $(|| tt.$not())*))))
230            .then(|| $self.next_n_alone($idx+1).expect("peeked n"))
231    };
232}
233
234macro_rules! peek_punct {
235    ($offset:expr, $self:ident, $only:ident) => {
236        $self.peek_n($offset).filter(|t| t.$only()).cloned().map(TokenTree::alone).map(iter::once).map(Iterator::collect)
237    };
238    ($offset:expr, $self:ident, $($joint:ident),+ $(!$($not:ident),+)?) => {
239        peek_punct!($offset, $self, $offset, $($joint),+ $(!$($not),+)?;true)
240    };
241    ($offset:expr, $self:ident, $idx:expr, $first:ident, $($joint:ident),+ $(!$($not:ident),*)?;$($cond:tt)*) => {
242        peek_punct!($offset, $self, $idx+1, $($joint),+ $(!$($not),+)?; $($cond)* && matches!($self.peek_n($idx), Some(tt) if tt.$first() && tt.is_joint()))
243    };
244    ($offset:expr, $self:ident, $idx:expr, $last:ident;$($cond:tt)*) => {
245        ($($cond)* && matches!($self.peek_n($idx), Some(tt) if tt.$last())).then(|| $self.peek_range_alone($offset..$idx+1).expect("peeked n"))
246    };
247    ($offset:expr, $self:ident, $idx:expr, $last:ident !$($not:ident),+;$($cond:tt)*) => {
248        ($($cond)* && matches!($self.peek_n($idx), Some(tt) if tt.$last())
249         && (matches!($self.peek_n($idx), Some(tt) if tt.is_alone()) ||
250         !(matches!($self.peek_n($idx+1), Some(tt) if false $(|| tt.$not())*))))
251            .then(|| $self.peek_range_alone($offset..$idx+1).expect("peeked n"))
252    };
253}
254
255macro_rules! punct_tt {
256    ($($punct:literal, [$($cond:tt)*], $peek:ident, $peek_n:ident, $name:ident);*$(;)?) => {
257        $(#[doc = concat!("Returns the next token if it is a [punctuation token tree](https://doc.rust-lang.org/reference/tokens.html#punctuation) `", $punct ,"` following the same rules as [macro_rule's tt](https://doc.rust-lang.org/reference/macros-by-example.html#metavariables).")]
258        #[doc = concat!("```
259use proc_macro_utils::{assert_tokens, TokenParser};
260use quote::quote;
261let mut parser = TokenParser::new(quote!(", $punct, " 1 b));
262assert_tokens!(parser.", stringify!($name), "().unwrap(), { ", $punct, " });
263assert_tokens!(parser, { 1 b });
264```")]
265        #[must_use]
266        pub fn $name(&mut self) -> Option<TokenStream> {
267            next_punct!(self, $($cond)*)
268        }
269        #[doc = concat!("Returns the next token if it is a [punctuation token tree](https://doc.rust-lang.org/reference/tokens.html#punctuation) `", $punct ,"` following the same rules as [macro_rule's tt](https://doc.rust-lang.org/reference/macros-by-example.html#metavariables) without advancing the parser")]
270        #[doc = concat!("```
271use proc_macro_utils::{assert_tokens, TokenParser};
272use quote::quote;
273let mut parser = TokenParser::new(quote!(", $punct, " 1 b));
274assert_tokens!(parser.", stringify!($peek), "().unwrap(), { ", $punct, " });
275```")]
276        #[must_use]
277        pub fn $peek(&mut self) -> Option<TokenStream> {
278            peek_punct!(0, self, $($cond)*)
279        }
280        #[doc = concat!("Returns the `n`th token if it is a [punctuation token tree](https://doc.rust-lang.org/reference/tokens.html#punctuation) `", $punct ,"` following the same rules as [macro_rule's tt](https://doc.rust-lang.org/reference/macros-by-example.html#metavariables) without advancing the parser")]
281        #[doc = concat!("```
282use proc_macro_utils::{assert_tokens, TokenParser};
283use quote::quote;
284let mut parser = TokenParser::new(quote!(b ", $punct, " 1));
285assert_tokens!(parser.", stringify!($peek_n), "(1).unwrap(), { ", $punct, " });
286```")]
287        #[must_use]
288        pub fn $peek_n(&mut self, n: usize) -> Option<TokenStream> {
289            peek_punct!(n, self, $($cond)*)
290        })*
291        /// Returns the next token tree as interpreted by the `tt` type in `macro_rules`, i.e., any literal, group,
292        /// or [composed punctuation](https://doc.rust-lang.org/reference/tokens.html#punctuation).
293        pub fn next_tt(&mut self) -> Option<TokenStream> {
294            self.next_if_each(TokenTree::is_group)
295                .or_else(|| self.next_if_each(TokenTree::is_literal))
296                $(.or_else(|| self.$name()))*
297        }
298        /// Peeks the next token tree as interpreted by the `tt` type in `macro_rules`, i.e., any literal, group,
299        /// or [composed punctuation](https://doc.rust-lang.org/reference/tokens.html#punctuation).
300        pub fn peek_tt(&mut self) -> Option<TokenStream> {
301            self.peek_n_tt(0)
302        }
303        /// Peeks the next token tree from the `n`th token as interpreted by the `tt` type in `macro_rules`, i.e., any literal, group,
304        /// or [composed punctuation](https://doc.rust-lang.org/reference/tokens.html#punctuation).
305        pub fn peek_n_tt(&mut self, n: usize) -> Option<TokenStream> {
306            self.peek_if_each(TokenTree::is_group)
307                .or_else(|| self.peek_if_each(TokenTree::is_literal))
308            $(.or_else(|| self.$peek_n(n)))*
309        }
310    };
311    ([$test:ident $($tests:ident)*]) => {
312        matches!(self.peek(), Some(token) if token.$test()) && punct!([$($tests)*])
313    }
314}
315
316macro_rules! token_tree {
317    ($($a:literal, $test:ident, $peek_as:ident, $as:ident, $peek:ident, $peek_n:ident, $name:ident, $token:ident);*$(;)?) => {
318        $(#[doc = concat!("Returns the next token if it is ", $a, " [`", stringify!($token) ,"`].")]
319        #[must_use]
320        pub fn $name(&mut self) -> Option<$token> {
321            self.$peek().is_some().then(|| self.next().expect("token should be present").$as().expect(concat!("should be ", stringify!($token))))
322        })*
323
324        $(#[doc = concat!("Returns the next token if it is ", $a, " [`", stringify!($token) ,"`] without advancing the parser.")]
325        #[must_use]
326        pub fn $peek(&mut self) -> Option<&$token> {
327            self.$peek_n(0)
328        })*
329
330        $(#[doc = concat!("Returns the `n`th token if it is ", $a, " [`", stringify!($token) ,"`] without advancing the parser.")]
331        #[must_use]
332        pub fn $peek_n(&mut self, n: usize) -> Option<&$token> {
333            self.peek_n(n).and_then(TokenTree::$peek_as)
334        })*
335    };
336}
337
338macro_rules! delimited {
339    ($($test:ident, $peek:ident, $peek_n:ident, $name:ident, $doc:literal;)*) => {
340        $(#[doc = concat!("Returns the next token if it is a ", $doc ," group.")]
341        #[must_use]
342        pub fn $name(&mut self) -> Option<Group> {
343            self.$peek().is_some().then(|| {
344                self.next_group().unwrap()
345            })
346        })*
347        $(#[doc = concat!("Returns the next token if it is a", $doc ," group, without advancing the parser.")]
348        #[must_use]
349        pub fn $peek(&mut self) -> Option<&Group> {
350            self.$peek_n(0)
351        })*
352        $(#[doc = concat!("Returns the `n`th token if it is a ", $doc ," group, without advancing the parser.")]
353        #[must_use]
354        pub fn $peek_n(&mut self, n: usize) -> Option<&Group> {
355            self.peek_n_group(n).filter(|g| g.$test())
356        })*
357    };
358}
359
360/// Some Iterator utilities
361impl<I, const PEEKER_LEN: usize> TokenParser<I, PEEKER_LEN>
362where
363    I: Iterator<Item = TokenTree>,
364{
365    /// Returns span of the next token or [`Span::call_site()`].
366    #[must_use]
367    pub fn span(&mut self) -> Span {
368        self.peek().map_or_else(Span::call_site, TokenTree::span)
369    }
370
371    /// Checks if there are remaining tokens
372    /// ```
373    /// use proc_macro_utils::TokenParser;
374    /// use quote::quote;
375    ///
376    /// let mut parser = TokenParser::new(quote!(token));
377    /// assert!(!parser.is_empty());
378    /// _ = parser.next();
379    /// assert!(parser.is_empty())
380    /// ```
381    #[must_use]
382    pub fn is_empty(&mut self) -> bool {
383        self.peek().is_none()
384    }
385
386    /// Peeks the next token without advancing the parser
387    /// ```
388    /// use proc_macro_utils::{assert_tokens, TokenParser};
389    /// use quote::quote;
390    ///
391    /// let mut parser = TokenParser::new(quote!(token));
392    /// assert_tokens!(parser.peek().cloned(), { token });
393    /// _ = parser.next();
394    /// assert!(parser.peek().is_none())
395    /// ```
396    #[must_use]
397    pub fn peek(&mut self) -> Option<&TokenTree> {
398        if self.peek.is_empty() {
399            self.peek.push(self.iter.next()?);
400        }
401        self.peek.first()
402    }
403
404    /// Peeks the `n`th token without advancing the parser
405    /// ```
406    /// use proc_macro_utils::{assert_tokens, TokenParser};
407    /// use quote::quote;
408    ///
409    /// let mut parser = TokenParser::new(quote!(token , third));
410    /// assert_tokens!(parser.peek_n(2).cloned(), { third });
411    /// assert_tokens!(parser.peek_n(1).cloned(), { , });
412    /// assert!(parser.peek_n(3).is_none())
413    /// ```
414    #[must_use]
415    pub fn peek_n(&mut self, n: usize) -> Option<&TokenTree> {
416        for _ in self.peek.len()..=n {
417            self.peek.push(self.iter.next()?);
418        }
419        self.peek.get(n)
420    }
421
422    /// Returns the next token if it fulfills the condition otherwise returns
423    /// None and doesn't advance the parser
424    /// ```
425    /// use proc_macro_utils::{assert_tokens, TokenParser, TokenTreePunct};
426    /// use quote::quote;
427    ///
428    /// let mut parser = TokenParser::new(quote!(::));
429    /// assert!(parser.next_if(TokenTreePunct::is_alone).is_none());
430    /// _ = parser.next();
431    /// assert_tokens!(parser.next_if(TokenTreePunct::is_alone), { : });
432    /// ```
433    #[must_use]
434    #[allow(clippy::missing_panics_doc)]
435    pub fn next_if(&mut self, test: impl FnOnce(&TokenTree) -> bool) -> Option<TokenTree> {
436        test(self.peek()?).then(|| self.next().expect("was peeked"))
437    }
438
439    /// Returns the next tokens if they fulfill the conditions
440    /// otherwise returns None and doesn't advance the parser.
441    /// ```
442    /// use proc_macro_utils::{assert_tokens, TokenParser, TokenTreePunct};
443    /// use quote::quote;
444    ///
445    /// let mut parser = TokenParser::new(quote!( -->));
446    /// assert!(parser.next_if_each((TokenTreePunct::is_minus, TokenTreePunct::is_greater_than)).is_none());
447    /// _ = parser.next();
448    /// assert_tokens!(parser.next_if_each((TokenTreePunct::is_minus, TokenTreePunct::is_greater_than)).unwrap(), { -> });
449    /// ```
450    #[must_use]
451    pub fn next_if_each<P: Peeker>(&mut self, tests: P) -> Option<TokenStream> {
452        let len = tests.len();
453        // Ensure peek is filled;
454        self.peek_n(len - 1)?;
455        tests
456            .peek(&self.peek[..len])
457            .then(|| self.peek.drain(0..len).collect())
458    }
459
460    /// Returns the next tokens if they fulfill the conditions
461    /// otherwise returns None and doesn't advance the parser. If the last token
462    /// is a punct it's [`spacing`](Punct::spacing()) is set to
463    /// [`Alone`](Spacing::Alone).
464    #[must_use]
465    pub fn next_if_each_alone<P: Peeker>(&mut self, tests: P) -> Option<TokenStream> {
466        let len = tests.len();
467        // Ensure peek is filled;
468        self.peek_n(len - 1)?;
469        tests.peek(&self.peek[..len]).then(|| {
470            if self.peek[len - 1].is_punct() {
471                self.peek[len - 1] = self.peek[len - 1].clone().alone();
472            }
473            self.peek.drain(0..len).collect()
474        })
475    }
476
477    /// Returns the next tokens if they fulfill the conditions
478    /// otherwise returns None, without advancing the parser
479    #[must_use]
480    pub fn peek_if_each<P: Peeker>(&mut self, tests: P) -> Option<TokenStream> {
481        // Ensure peek is filled;
482        self.peek_n_if_each(0, tests)
483    }
484
485    /// Returns the next tokens from `n` if they fulfill the
486    /// conditions otherwise returns None, without advancing the parser
487    #[must_use]
488    pub fn peek_n_if_each<P: Peeker>(&mut self, n: usize, tests: P) -> Option<TokenStream> {
489        let len = tests.len();
490        // Ensure peek is filled;
491        self.peek_n(len + n)?;
492        let peeked = &self.peek[n..len + n];
493        tests.peek(peeked).then(|| peeked.iter().cloned().collect())
494    }
495
496    /// Returns the next tokens from `n` if they fulfill the conditions
497    /// otherwise returns None, without advancing the parser. If the last token
498    /// is a punct it's [`spacing`](Punct::spacing()) is set to
499    /// [`Alone`](Spacing::Alone).
500    #[must_use]
501    pub fn peek_n_if_each_alone<P: Peeker>(&mut self, n: usize, tests: P) -> Option<TokenStream> {
502        let len = tests.len();
503        if len == 0 {
504            return Some(TokenStream::new());
505        }
506        // Ensure peek is filled;
507        self.peek_n(len + n - 1)?;
508        let peeked = &self.peek[n..len + n];
509        tests.peek(peeked).then(|| {
510            peeked[..len - 1]
511                .iter()
512                .cloned()
513                .chain(iter::once(peeked[len - 1].clone().alone()))
514                .collect()
515        })
516    }
517
518    /// Returns all tokens while `test` evaluates to true.
519    ///
520    /// Returns `None` if empty or `test(first_token) == false`
521    #[must_use]
522    #[allow(clippy::missing_panics_doc)]
523    pub fn next_while(&mut self, mut test: impl FnMut(&TokenTree) -> bool) -> Option<TokenStream> {
524        if self.peek().is_none() || !test(self.peek().expect("was peeked")) {
525            None
526        } else {
527            let mut token_stream = TokenStream::new();
528            token_stream.push(self.next().expect("was peeked"));
529            while let Some(token) = self.next_if(&mut test) {
530                token_stream.push(token);
531            }
532            Some(token_stream)
533        }
534    }
535
536    /// Returns all tokens while `test` evaluates to true. If the last token
537    /// is a punct it's [`spacing`](Punct::spacing()) is set to
538    /// [`Alone`](Spacing::Alone).
539    ///
540    /// Returns `None` if empty or `test(first_token) == false`
541    #[must_use]
542    #[allow(clippy::missing_panics_doc)]
543    pub fn next_while_alone(
544        &mut self,
545        mut test: impl FnMut(&TokenTree) -> bool,
546    ) -> Option<TokenStream> {
547        if self.peek().is_none() || !test(self.peek().expect("was peeked")) {
548            None
549        } else {
550            let mut token_stream = TokenStream::new();
551            let mut last = self.next().expect("was peeked");
552            while let Some(token) = self.next_if(&mut test) {
553                token_stream.push(mem::replace(&mut last, token));
554            }
555            token_stream.push(last.alone());
556            Some(token_stream)
557        }
558    }
559
560    /// Returns all tokens while `test` evaluates to false.
561    ///
562    /// Returns `None` if empty or `test(first_token) == true`.
563    #[must_use]
564    pub fn next_until(&mut self, mut test: impl FnMut(&TokenTree) -> bool) -> Option<TokenStream> {
565        self.next_while(|token| !test(token))
566    }
567
568    /// Returns all tokens while `test` evaluates to false. If the last token is
569    /// a punct it's [`spacing`](Punct::spacing()) is set to
570    /// [`Alone`](Spacing::Alone).
571    ///
572    /// Returns `None` if empty or `test(first_token) == true`.
573    #[must_use]
574    pub fn next_until_alone(
575        &mut self,
576        mut test: impl FnMut(&TokenTree) -> bool,
577    ) -> Option<TokenStream> {
578        self.next_while_alone(|token| !test(token))
579    }
580
581    /// Returns the next `n` tokens.
582    ///
583    /// Returns `None` if the parser contains less than `n` tokens.
584    ///
585    /// **Note:** This should only be used for small `n` ideally less than
586    /// `PEEKER_LEN`. Otherwise, something like this would be more performant:
587    /// ```
588    /// use proc_macro2::TokenStream;
589    /// use proc_macro_utils::{TokenParser, assert_tokens};
590    /// use quote::quote;
591    ///
592    /// let mut parser = TokenParser::new(quote!(1 2 3 /*...*/ 1000 1001 1002 1003));
593    /// let n = 1000;
594    /// # let n = 4;
595    /// // This does not ensure that `next_up_to_n` contains exactly n tokens
596    /// let next_up_to_n: TokenStream = parser.by_ref().take(n).collect();
597    /// assert_tokens!(next_up_to_n, { 1 2 3 /* ...*/ 1000 });
598    /// assert_tokens!(parser, { 1001 1002 1003 });
599    /// ```
600    #[must_use]
601    pub fn next_n(&mut self, n: usize) -> Option<TokenStream> {
602        self.next_if_each(PeekLen(n))
603    }
604
605    /// Returns the next `n` tokens. If the last token is a punct it's
606    /// [`spacing`](Punct::spacing()) is set to [`Alone`](Spacing::Alone).
607    ///
608    /// Returns `None` if the parser contains less than `n` tokens.
609    ///
610    /// **Note:** This should only be used for small `n` ideally less than
611    /// `PEEKER_LEN`. Otherwise, something like this would be more performant:
612    /// ```
613    /// use proc_macro2::TokenStream;
614    /// use proc_macro_utils::{TokenParser, assert_tokens, TokenTreePunct};
615    /// use quote::quote;
616    ///
617    /// let mut parser = TokenParser::new(quote!(1 2 3 /*...*/ 1000 1001 1002 1003));
618    /// let n = 1000;
619    /// # let n = 4;
620    /// // This does not ensure that `next_up_to_n` contains exactly n tokens
621    /// let mut next_up_to_n: TokenStream = parser.by_ref().take(n - 1).collect();
622    /// next_up_to_n.extend(parser.next().map(TokenTreePunct::alone));
623    /// assert_tokens!(next_up_to_n, { 1 2 3 /* ...*/ 1000 });
624    /// assert_tokens!(parser, { 1001 1002 1003 });
625    /// ```
626    #[must_use]
627    pub fn next_n_alone(&mut self, n: usize) -> Option<TokenStream> {
628        self.next_if_each_alone(PeekLen(n))
629    }
630
631    /// Returns the specified `range` of tokens.
632    ///
633    /// Returns `None` if the parser does not contain these `range` tokens.
634    ///
635    /// **Note:** This should only be used for small and close to start `range`s
636    /// ideally less than `PEEKER_LEN`. Otherwise, something like this could be
637    /// more performant:
638    /// ```
639    /// use proc_macro2::TokenStream;
640    /// use proc_macro_utils::{TokenParser, assert_tokens};
641    /// use quote::quote;
642    ///
643    /// let parser = TokenParser::new(quote!(0 1 2 3 /*...*/ 1000 1001 1002 1003));
644    /// let start = 1000;
645    /// # let start = 4;
646    /// let end = 1003;
647    /// # let end = 7;
648    /// // This does not ensure that `peeked_range` contains any tokens
649    /// let peeked_range: TokenStream = parser.clone().skip(start).take(end -
650    /// start).collect();
651    /// assert_tokens!(peeked_range, { 1000 1001 1002 });
652    /// assert_tokens!(parser, { 0 1 2 3 /*...*/ 1000 1001 1002 1003 });
653    /// ```
654    ///
655    /// # Panics
656    ///
657    /// Panics if used without upper bound i.e. `start..`.
658    #[must_use]
659    pub fn peek_range(&mut self, range: impl RangeBounds<usize>) -> Option<TokenStream> {
660        let start = match range.start_bound() {
661            Bound::Included(&n) => n,
662            Bound::Excluded(&n) => n + 1,
663            Bound::Unbounded => 0,
664        };
665        let len = match range.end_bound() {
666            Bound::Included(&n) if n < start => return None,
667            Bound::Included(&n) => n - start + 1,
668            Bound::Excluded(&n) if n <= start => return None,
669            Bound::Excluded(&n) => n - start,
670            Bound::Unbounded => {
671                panic!("unbounded range not supported, use `clone().skip()` instead")
672            }
673        };
674
675        self.peek_n_if_each(start, PeekLen(len))
676    }
677
678    /// Returns the specified `range` of tokens. If the last token is a punct
679    /// it's [`spacing`](Punct::spacing()) is set to
680    /// [`Alone`](Spacing::Alone).
681    ///
682    /// Returns `None` if the parser does not contain these `range` tokens.
683    ///
684    /// **Note:** This should only be used for small and close to start `range`s
685    /// ideally less than `PEEKER_LEN`. Otherwise, something like this could be
686    /// more performant:
687    ///
688    /// ```
689    /// use proc_macro2::TokenStream;
690    /// use proc_macro_utils::{assert_tokens, TokenParser, TokenTreePunct};
691    /// use quote::quote;
692    ///
693    /// let parser = TokenParser::new(quote!(0 1 2 3 /*...*/ 1000 1001 1002 1003));
694    /// let start = 1000;
695    /// # let start = 4;
696    /// let end = 1003;
697    /// # let end = 7;
698    /// // This does not ensure that `peeked_range` contains any tokens
699    /// let mut cloned = parser.clone().skip(start);
700    /// let mut peeked_range: TokenStream = cloned.by_ref().take(end - start - 1).collect();
701    /// peeked_range.extend(cloned.next().map(TokenTreePunct::alone));
702    ///
703    /// assert_tokens!(peeked_range, { 1000 1001 1002 });
704    /// assert_tokens!(parser, { 0 1 2 3 /*...*/ 1000 1001 1002 1003 });
705    /// ```
706    ///
707    /// # Panics
708    ///
709    /// Panics if used without upper bound i.e. `start..`.
710    #[must_use]
711    pub fn peek_range_alone(&mut self, range: impl RangeBounds<usize>) -> Option<TokenStream> {
712        let start = match range.start_bound() {
713            Bound::Included(&n) => n,
714            Bound::Excluded(&n) => n + 1,
715            Bound::Unbounded => 0,
716        };
717        let len = match range.end_bound() {
718            Bound::Included(&n) if n < start => return None,
719            Bound::Included(&n) => n - start + 1,
720            Bound::Excluded(&n) if n <= start => return None,
721            Bound::Excluded(&n) => n - start,
722            Bound::Unbounded => {
723                panic!("unbounded range not supported, use `clone().skip()` instead")
724            }
725        };
726
727        self.peek_n_if_each_alone(start, PeekLen(len))
728    }
729}
730
731impl<I, const PEEKER_LEN: usize> TokenParser<I, PEEKER_LEN>
732where
733    I: Iterator<Item = TokenTree>,
734{
735    /// Collects remaining tokens back into a [`TokenStream`]
736    #[must_use]
737    pub fn into_token_stream(self) -> TokenStream {
738        self.into()
739    }
740
741    /// Returns the next group of punctuation with [`Punct::spacing`]
742    /// [`Spacing::Joint`]
743    #[must_use]
744    pub fn next_punctuation_group(&mut self) -> Option<TokenStream> {
745        let mut joined = true;
746        self.next_while(move |token| {
747            let ret = joined && token.is_punct();
748            joined = token.is_joint();
749            ret
750        })
751    }
752
753    /// Returns the next ident if it matches the specified keyword without
754    /// advancing the parser.
755    ///
756    /// While this is called `peek_keyword` it is not restricted to rust
757    /// keywords, it can be used with any ident.
758    /// ```
759    /// # use proc_macro_utils::TokenParser;
760    /// # use quote::quote;
761    /// let mut parser = TokenParser::new(quote!( in out ));
762    /// assert_eq!(parser.peek_keyword("in").unwrap().to_string(), "in");
763    /// assert_eq!(parser.peek_keyword("in").unwrap().to_string(), "in");
764    /// assert!(parser.peek_keyword("out").is_none());
765    /// parser.next().unwrap();
766    /// assert_eq!(parser.peek_keyword("out").unwrap().to_string(), "out");
767    /// ```
768    #[must_use]
769    pub fn peek_keyword<K: ?Sized>(&mut self, keyword: &K) -> Option<&Ident>
770    where
771        Ident: PartialEq<K>,
772    {
773        self.peek_n_keyword(0, keyword)
774    }
775
776    /// Returns the nth token if it matches the specified keyword without
777    /// advancing the parser.
778    ///
779    /// While this is called `peek_n_keyword` it is not restricted to rust
780    /// keywords, it can be used with any ident.
781    /// ```
782    /// # use proc_macro_utils::TokenParser;
783    /// # use quote::quote;
784    /// let mut parser = TokenParser::new(quote!( in out ));
785    /// assert_eq!(parser.peek_keyword("in").unwrap().to_string(), "in");
786    /// assert_eq!(parser.peek_n_keyword(1, "out").unwrap().to_string(), "out");
787    /// assert!(parser.peek_keyword("out").is_none());
788    /// ```
789    #[must_use]
790    pub fn peek_n_keyword<K: ?Sized>(&mut self, n: usize, keyword: &K) -> Option<&Ident>
791    where
792        Ident: PartialEq<K>,
793    {
794        self.peek_n_ident(n).filter(|&ident| ident == keyword)
795    }
796
797    /// Returns the next ident if it matches the specified keyword.
798    ///
799    /// While this is called `next_keyword` it is not restricted to rust
800    /// keywords, it can be used with any ident.
801    /// ```
802    /// # use proc_macro_utils::TokenParser;
803    /// # use quote::quote;
804    /// let mut parser = TokenParser::new(quote!( in out ));
805    /// assert_eq!(parser.next_keyword("in").unwrap().to_string(), "in");
806    /// assert!(parser.next_keyword("in").is_none());
807    /// assert_eq!(parser.next_keyword("out").unwrap().to_string(), "out");
808    /// assert!(parser.next_keyword("anything").is_none());
809    /// ```
810    #[must_use]
811    #[allow(clippy::missing_panics_doc)]
812    pub fn next_keyword<K: ?Sized>(&mut self, keyword: &K) -> Option<Ident>
813    where
814        Ident: PartialEq<K>,
815    {
816        self.next_if(|token| matches!(token.ident(), Some(ident) if ident == keyword))
817            .map(|token| token.into_ident().expect("is ident"))
818    }
819
820    /// "Parses" a type expression
821    ///
822    /// This just means it collects all the tokens that should belong to the
823    /// type, until it reaches either:
824    /// - a `;`
825    /// - a `,` or `>` and all `<>` pairs are closed
826    /// - the end of the token stream
827    ///
828    /// If the token stream is empty, or starts with `,`, `>` or `;` [`None`] is
829    /// returned otherwise, [`Some(TokenStream)`](TokenStream) containing
830    /// every token up to but excluding the terminator.
831    ///
832    /// ```
833    /// # use proc_macro_utils::{TokenParser, assert_tokens};
834    /// # use proc_macro2::TokenStream;
835    /// # use quote::quote;
836    ///
837    /// let mut tokens = TokenParser::new(quote! {A<Test, B>, remainder});
838    /// assert_tokens!(tokens.next_type().unwrap(), { A<Test, B> });
839    /// assert!(tokens.next_type().is_none());
840    /// assert_tokens!(tokens, { , remainder });
841    /// ```
842    #[must_use]
843    pub fn next_type(&mut self) -> Option<TokenStream> {
844        let first = self.peek()?;
845        if first.is_comma() || first.is_semi() {
846            return None;
847        };
848
849        let mut chevron_level: u32 = 0;
850
851        self.next_while_alone(|token| {
852            if token.is_less_than() {
853                chevron_level += 1;
854            } else if token.is_greater_than() {
855                if chevron_level == 0 {
856                    return false;
857                }
858                chevron_level -= 1;
859            }
860            !(chevron_level == 0 && token.is_comma() || token.is_semi())
861        })
862    }
863
864    /// "Parses" an expression
865    ///
866    /// This just means it collects all the tokens that should belong to the
867    /// expression, until it reaches (outside a group like `()` or `{}`) either:
868    /// - a `=>`
869    /// - a `;`
870    /// - a `,` outside a type
871    /// - the end of the token stream
872    ///
873    /// If the token stream is empty, or starts with `=>`, `,` or `;` [`None`]
874    /// is returned otherwise, [`Some(TokenStream)`](TokenStream) containing
875    /// every token up to but excluding the terminator.
876    ///
877    /// ```
878    /// # use proc_macro_utils::{TokenParser, assert_tokens};
879    /// # use proc_macro2::TokenStream;
880    /// # use quote::quote;
881    ///
882    /// let mut tokens = TokenParser::new(quote! {A + c ::<a, b>::a < b + <C as Trait<A, B>::C>::nice(), next_token});
883    /// assert_tokens!(tokens.next_expression().unwrap(), { A + c::<a, b>::a < b + <C as Trait<A, B>::C>::nice()});
884    /// assert!(tokens.next_expression().is_none());
885    /// assert_tokens!(tokens, { , next_token });
886    /// ```
887    #[must_use]
888    #[allow(clippy::missing_panics_doc)]
889    pub fn next_expression(&mut self) -> Option<TokenStream> {
890        if self.peek().is_none()
891            || matches!(self.peek(), Some(token) if token.is_comma() || token.is_semi())
892            || self.peek_tt_fat_arrow().is_some()
893        {
894            return None;
895        }
896
897        let mut start = true;
898
899        let mut tokens = TokenStream::new();
900        let mut last = None;
901
902        // <a> * <a>
903        // <a> => <a>
904        'outer: while let Some(token) = self.peek() {
905            if token.is_semi() || token.is_comma() || self.peek_tt_fat_arrow().is_some() {
906                break;
907            }
908            let token = self.peek().unwrap();
909            if start && token.is_less_than() {
910                tokens.extend(mem::replace(
911                    &mut last,
912                    Some(self.next().expect("token was peeked")),
913                ));
914                loop {
915                    if let Some(ty) = self.next_type() {
916                        for token in ty {
917                            tokens.extend(mem::replace(&mut last, Some(token)));
918                        }
919                    }
920                    // next token can only be `,;>` or None
921                    let Some(token) = self.peek() else {
922                        break 'outer;
923                    }; // Invalid expression
924                    if token.is_semi() {
925                        break 'outer;
926                    }
927                    if token.is_greater_than() {
928                        tokens.extend(mem::replace(
929                            &mut last,
930                            Some(self.next().expect("token was peeked")),
931                        ));
932                        break;
933                    } else if token.is_comma() {
934                        tokens.extend(mem::replace(
935                            &mut last,
936                            Some(self.next().expect("token was peeked")),
937                        ));
938                        continue; // Another type
939                    };
940                }
941            }
942            if let Some(token) = self.next() {
943                // TODO this might be too simplistic
944                start = token.is_punct();
945                tokens.extend(mem::replace(&mut last, Some(token)));
946            }
947        }
948
949        // ensure that the last punctuation is not joined (i.e. was touching the
950        // terminator, mainly possible in `1..,`)
951        tokens.extend(last.map(TokenTree::alone));
952
953        Some(tokens.into_iter().collect())
954    }
955
956    /// Returns the next string literal
957    #[must_use]
958    pub fn next_string(&mut self) -> Option<String> {
959        let lit = self.peek().and_then(TokenTreeLiteral::string)?;
960        self.next();
961        Some(lit)
962    }
963
964    /// Returns the next boolean literal
965    #[must_use]
966    pub fn next_bool(&mut self) -> Option<bool> {
967        self.next_if(|t| {
968            t.ident()
969                .map_or(false, |ident| ident == "true" || ident == "false")
970        })
971        .map(|t| matches!(t.ident(), Some(ident) if ident == "true"))
972    }
973}
974
975impl<I, const PEEKER_LEN: usize> TokenParser<I, PEEKER_LEN>
976where
977    I: Iterator<Item = TokenTree>,
978{
979    token_tree!(
980        "a", is_group, group, into_group, peek_group, peek_n_group, next_group, Group;
981        "an", is_ident, ident, into_ident, peek_ident, peek_n_ident, next_ident, Ident;
982        "a", is_punct, punct, into_punct, peek_punct, peek_n_punct, next_punct, Punct;
983        "a", is_literal, literal, into_literal, peek_literal, peek_n_literal, next_literal, Literal;
984    );
985
986    delimited!(
987        is_parenthesized, peek_parenthesized, peek_n_parenthesized, next_parenthesized, "parenthesized";
988        is_braced, peek_braced, peek_n_braced, next_braced, "braced";
989        is_bracketed, peek_bracketed, peek_n_bracketed, next_bracketed, "bracketed";
990    );
991}
992/// For now the naming of the tokens follow the names used in the
993/// [rust reference](https://doc.rust-lang.org/reference/tokens.html#punctuation)
994/// even though they diverge from the names used at [`TokenTreePunct`].
995///
996/// Note that they only match the token with correct [spacing](Spacing), i.e.
997/// [`next_plus`](Self::next_tt_plus) will match `+ =` and `+a` but not `+=`.
998// TODO figure out what the single token ones should return, TokenStream or
999// TokenTree
1000impl<I, const PEEKER_LEN: usize> TokenParser<I, PEEKER_LEN>
1001where
1002    I: Iterator<Item = TokenTree>,
1003{
1004    punct_tt!(
1005        "+", [is_plus !is_equals], peek_tt_plus, peek_n_tt_plus, next_tt_plus;
1006        "-", [is_minus !is_equals], peek_tt_minus, peek_n_tt_minus, next_tt_minus;
1007        "*", [is_asterix !is_equals], peek_tt_star, peek_n_tt_star, next_tt_star;
1008        "/", [is_slash !is_equals], peek_tt_slash, peek_n_tt_slash, next_tt_slash;
1009        "%", [is_percent !is_equals], peek_tt_percent, peek_n_tt_percent, next_tt_percent;
1010        "^", [is_caret !is_equals], peek_tt_caret, peek_n_tt_caret, next_tt_caret;
1011        "!", [is_exclamation !is_equals], peek_tt_not, peek_n_tt_not, next_tt_not;
1012        "&", [is_and !is_equals, is_and], peek_tt_and, peek_n_tt_and, next_tt_and;
1013        "|", [is_pipe !is_equals, is_pipe], peek_tt_or, peek_n_tt_or, next_tt_or;
1014        "&&", [is_and, is_and !is_equals], peek_tt_and_and, peek_n_tt_and_and, next_tt_and_and;
1015        "||", [is_pipe, is_pipe !is_equals], peek_tt_or_or, peek_n_tt_or_or, next_tt_or_or;
1016        "<<", [is_less_than, is_less_than !is_equals], peek_tt_shl, peek_n_tt_shl, next_tt_shl;
1017        ">>", [is_greater_than, is_greater_than !is_equals], peek_tt_shr, peek_n_tt_shr, next_tt_shr;
1018        "+=", [is_plus, is_equals], peek_tt_plus_eq, peek_n_tt_plus_eq, next_tt_plus_eq;
1019        "-=", [is_minus, is_equals], peek_tt_minus_eq, peek_n_tt_minus_eq, next_tt_minus_eq;
1020        "*=", [is_asterix, is_equals], peek_tt_star_eq, peek_n_tt_star_eq, next_tt_star_eq;
1021        "/=", [is_slash, is_equals], peek_tt_slash_eq, peek_n_tt_slash_eq, next_tt_slash_eq;
1022        "%=", [is_percent, is_equals], peek_tt_percent_eq, peek_n_tt_percent_eq, next_tt_percent_eq;
1023        "^=", [is_caret, is_equals], peek_tt_caret_eq, peek_n_tt_caret_eq, next_tt_caret_eq;
1024        "&=", [is_and, is_equals], peek_tt_and_eq, peek_n_tt_and_eq, next_tt_and_eq;
1025        "|=", [is_pipe, is_equals], peek_tt_or_eq, peek_n_tt_or_eq, next_tt_or_eq;
1026        "<<=", [is_less_than, is_less_than, is_equals], peek_tt_shl_eq, peek_n_tt_shl_eq, next_tt_shl_eq;
1027        ">>=", [is_greater_than, is_greater_than, is_equals], peek_tt_shr_eq, peek_n_tt_shr_eq, next_tt_shr_eq;
1028        "=", [is_equals !is_equals], peek_tt_eq, peek_n_tt_eq, next_tt_eq;
1029        "==", [is_equals, is_equals], peek_tt_eq_eq, peek_n_tt_eq_eq, next_tt_eq_eq;
1030        "!=", [is_exclamation, is_equals], peek_tt_ne, peek_n_tt_ne, next_tt_ne;
1031        ">", [is_greater_than !is_equals], peek_tt_gt, peek_n_tt_gt, next_tt_gt;
1032        "<", [is_less_than !is_equals], peek_tt_lt, peek_n_tt_lt, next_tt_lt;
1033        ">=", [is_greater_than, is_equals], peek_tt_ge, peek_n_tt_ge, next_tt_ge;
1034        "<=", [is_less_than, is_equals], peek_tt_le, peek_n_tt_le, next_tt_le;
1035        "@", [is_at], peek_tt_at, peek_n_tt_at, next_tt_at;
1036        ".", [is_dot !is_dot], peek_tt_dot, peek_n_tt_dot, next_tt_dot;
1037        "..", [is_dot, is_dot !is_dot, is_equals], peek_tt_dot_dot, peek_n_tt_dot_dot, next_tt_dot_dot;
1038        "...", [is_dot, is_dot, is_dot], peek_tt_dot_dot_dot, peek_n_tt_dot_dot_dot, next_tt_dot_dot_dot;
1039        "..=", [is_dot, is_dot, is_equals], peek_tt_dot_dot_eq, peek_n_tt_dot_dot_eq, next_tt_dot_dot_eq;
1040        ",", [is_comma], peek_tt_comma, peek_n_tt_comma, next_tt_comma;
1041        ";", [is_semi], peek_tt_semi, peek_n_tt_semi, next_tt_semi;
1042        ":", [is_colon !is_colon], peek_tt_colon, peek_n_tt_colon, next_tt_colon;
1043        "::", [is_colon, is_colon], peek_tt_path_sep, peek_n_tt_path_sep, next_tt_path_sep;
1044        "->", [is_minus, is_greater_than], peek_tt_r_arrow, peek_n_tt_r_arrow, next_tt_r_arrow;
1045        "=>", [is_equals, is_greater_than], peek_tt_fat_arrow, peek_n_tt_fat_arrow, next_tt_fat_arrow;
1046        "#", [is_pound], peek_tt_pound, peek_n_tt_pound, next_tt_pound;
1047        "$", [is_dollar], peek_tt_dollar, peek_n_tt_dollar, next_tt_dollar;
1048        "?", [is_question], peek_tt_question, peek_n_tt_question, next_tt_question;
1049        "~", [is_tilde], peek_tt_tilde, peek_n_tt_tilde, next_tt_tilde;
1050    );
1051
1052    /// Returns the next token if it is a [punctuation token tree](https://doc.rust-lang.org/reference/tokens.html#punctuation) following the same rules as [macro_rule's `tt`](https://doc.rust-lang.org/reference/macros-by-example.html#metavariables).
1053    ///
1054    /// ```
1055    /// use proc_macro_utils::{assert_tokens, TokenParser};
1056    /// use quote::quote;
1057    /// let mut parser = TokenParser::new(quote!(.. =. 1 b));
1058    /// assert_tokens!(parser.next_macro_rules_tt().unwrap(), { .. });
1059    /// assert_tokens!(parser.next_macro_rules_tt().unwrap(), { = });
1060    /// assert_tokens!(parser, { . 1 b });
1061    /// ```
1062    #[must_use]
1063    #[allow(clippy::missing_panics_doc)]
1064    pub fn next_macro_rules_tt(&mut self) -> Option<TokenStream> {
1065        // ensure that the next 3 tokens are peeked if possible
1066        _ = self.peek_n(2);
1067        let first = self.peek.first().and_then(TokenTree::punct)?;
1068        let second = first
1069            .is_joint()
1070            .then(|| self.peek.get(1).and_then(TokenTree::punct))
1071            .flatten();
1072        let third = second
1073            .is_some_and(TokenTreePunct::is_joint)
1074            .then(|| self.peek.get(2).and_then(TokenTree::punct))
1075            .flatten();
1076        let chars = [
1077            first.as_char(),
1078            second.map_or('_', Punct::as_char),
1079            third.map_or('_', Punct::as_char),
1080        ];
1081        if matches!(
1082            chars,
1083            ['.', '.', '.' | '='] | ['<', '<', '='] | ['>', '>', '=']
1084        ) {
1085            self.next_n_alone(3)
1086        } else if matches!(
1087            &chars[0..2],
1088            ['&', '&' | '=']
1089                | ['|', '|' | '=']
1090                | ['<', '<' | '=']
1091                | ['>' | '-' | '=', '>']
1092                | ['+' | '-' | '*' | '/' | '%' | '^' | '=' | '!' | '>', '=']
1093                | ['.', '.']
1094                | [':', ':']
1095        ) {
1096            self.next_n_alone(2)
1097        } else {
1098            self.next_n_alone(1)
1099        }
1100    }
1101}
1102
1103#[cfg(test)]
1104mod test {
1105    use quote::quote;
1106
1107    use super::*;
1108    use crate::assert_tokens;
1109
1110    #[test]
1111    fn ty() {
1112        let mut at = TokenParser::new(quote! {Name, <Some, Generic, Type>});
1113        assert_tokens!(at.next_type().unwrap(), { Name });
1114        at.next();
1115        assert_tokens!(
1116            at.next_type().unwrap(),
1117            { < Some , Generic , Type > }
1118        );
1119    }
1120
1121    #[test]
1122    fn expr() {
1123        let mut at = TokenParser::new(
1124            quote! {a + b, <Some, Generic, Type>::something + <a,b> * a < b, "hi" => hello},
1125        );
1126        assert_tokens!(at.next_expression().unwrap(), { a + b });
1127        at.next();
1128        assert_tokens!(
1129            at.next_expression().unwrap(), { <Some, Generic, Type>::something + <a,b> * a < b }
1130        );
1131        at.next();
1132        assert_tokens!(at.next_expression().unwrap(), { "hi" });
1133        at.next();
1134        at.next();
1135        assert_tokens!(at.next_expression().unwrap(), { hello });
1136
1137        let mut at = TokenParser::from_str("1..,").unwrap();
1138        let expr: Vec<_> = at.next_expression().unwrap().into_iter().collect();
1139        assert!(expr.last().unwrap().is_alone());
1140        assert_tokens!(expr, { 1.. });
1141    }
1142
1143    #[test]
1144    fn combined_tokens() {
1145        // using from_str to be able to verify behavior of splitting the input correctly
1146        // into tts
1147        let mut parser = TokenParser::from_str("->&&..=>=+,-..,+=").unwrap();
1148        assert_tokens!(parser.next_tt_r_arrow().unwrap(), { -> });
1149        assert_tokens!(parser.next_tt_and_and().unwrap(), { && });
1150        assert_tokens!(parser.next_tt_dot_dot_eq().unwrap(), { ..= });
1151        assert_tokens!(parser.next_tt_ge().unwrap(), { >= });
1152        assert_tokens!(parser.next_tt_plus().unwrap(), { + });
1153        assert_tokens!(parser.next_tt_comma().unwrap(), { , });
1154        assert_tokens!(parser.next_tt_minus().unwrap(), { - });
1155        assert_tokens!(parser.next_tt_dot_dot().unwrap(), { .. });
1156        assert_tokens!(parser.next_tt_comma().unwrap(), { , });
1157        assert_tokens!(parser.next_tt_plus_eq().unwrap(), { += });
1158    }
1159
1160    #[test]
1161    fn peek() {
1162        let mut parser = TokenParser::new(quote! {
1163            0 {} 2 3 += .. =
1164        });
1165        assert_eq!(parser.peek().unwrap().to_string(), "0");
1166        assert_eq!(parser.peek_n(0).unwrap().to_string(), "0");
1167        assert_eq!(parser.peek_n(1).unwrap().to_string().replace(' ', ""), "{}");
1168        assert_eq!(parser.peek_n(2).unwrap().to_string(), "2");
1169
1170        assert_eq!(parser.peek_literal().unwrap().to_string(), "0");
1171        assert!(parser.peek_group().is_none());
1172        parser.next().unwrap();
1173        assert!(parser.peek_group().is_some());
1174        assert!(parser.peek_n_tt_plus_eq(3).is_some());
1175        assert!(parser.peek_n_tt_dot_dot(5).is_some());
1176    }
1177
1178    #[test]
1179    fn keyword() {
1180        let mut parser: TokenParser<_, 4> = TokenParser::from(quote! {
1181            in out and or
1182        });
1183        assert_eq!(parser.next_keyword("in").unwrap().to_string(), "in");
1184        assert_eq!(parser.next_keyword("out").unwrap().to_string(), "out");
1185        assert!(parser.next_keyword("or").is_none());
1186        assert_eq!(parser.next_keyword("and").unwrap().to_string(), "and");
1187        assert_eq!(parser.next_keyword("or").unwrap().to_string(), "or");
1188        assert!(parser.next_keyword("or").is_none());
1189    }
1190}