1use std::ops::{Bound, RangeBounds};
2use std::str::FromStr;
3use std::{iter, mem};
4
5#[cfg(doc)]
6use proc_macro2::Spacing;
7use proc_macro2::{token_stream, Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
8use smallvec::{smallvec, SmallVec};
9
10use crate::{Delimited, TokenStream2Ext, TokenTree2Ext, TokenTreeLiteral, TokenTreePunct};
11
12pub trait Peeker {
16 fn len(&self) -> usize;
18
19 #[must_use]
25 fn peek(self, tokens: &[TokenTree]) -> bool;
26}
27
28impl<T: FnOnce(&TokenTree) -> bool> Peeker for T {
29 fn len(&self) -> usize {
30 1
31 }
32
33 #[must_use]
34 fn peek(self, parser: &[TokenTree]) -> bool {
35 self(&parser[0])
36 }
37}
38
39macro_rules! impl_peeker {
40 ($(($($T:ident $idx:tt),+$(,)?),$len:literal;)*) => {
41 $(
42 impl<$($T: FnOnce(&TokenTree) -> bool),+> Peeker for ($($T,)+) {
43 fn len(&self) -> usize { $len }
44 fn peek(self, parser: &[TokenTree]) -> bool {
45 $(self.$idx(&parser[$idx]))&&+
46 }
47 }
48 )*
49 };
50}
51
52impl_peeker![
53 (T1 0,), 1;
54 (T1 0, T2 1), 2;
55 (T1 0, T2 1, T3 2), 3;
56];
57
58struct PeekLen(usize);
59
60impl Peeker for PeekLen {
61 fn len(&self) -> usize {
62 self.0
63 }
64
65 fn peek(self, _: &[TokenTree]) -> bool {
66 true
67 }
68}
69
70#[allow(clippy::module_name_repetitions)]
107#[derive(Clone)]
108#[must_use]
109pub struct TokenParser<
110 I: Iterator<Item = TokenTree> = token_stream::IntoIter,
111 const PEEKER_LEN: usize = 6,
112> {
113 peek: SmallVec<[TokenTree; PEEKER_LEN]>,
114 iter: I,
115}
116
117impl TokenParser {
118 pub fn new<T, I>(value: T) -> TokenParser<I, 6>
123 where
124 T: IntoIterator<Item = TokenTree, IntoIter = I>,
125 I: Iterator<Item = TokenTree>,
126 {
127 TokenParser::new_generic(value)
128 }
129
130 pub fn new_generic<const PEEKER_LEN: usize, T, I>(value: T) -> TokenParser<I, PEEKER_LEN>
135 where
136 T: IntoIterator<Item = TokenTree, IntoIter = I>,
137 I: Iterator<Item = TokenTree>,
138 {
139 TokenParser {
140 peek: smallvec![],
141 iter: value.into_iter(),
142 }
143 }
144}
145
146impl<T, I, const PEEKER_LEN: usize> From<T> for TokenParser<I, PEEKER_LEN>
147where
148 T: IntoIterator<Item = TokenTree, IntoIter = I>,
149 I: Iterator<Item = TokenTree>,
150{
151 fn from(value: T) -> Self {
152 TokenParser::new_generic(value)
153 }
154}
155
156impl<I, const PEEKER_LEN: usize> From<TokenParser<I, PEEKER_LEN>> for TokenStream
157where
158 I: Iterator<Item = TokenTree>,
159{
160 #[must_use]
161 fn from(value: TokenParser<I, PEEKER_LEN>) -> Self {
162 value.peek.into_iter().chain(value.iter).collect()
163 }
164}
165
166impl<I, const PEEKER_LEN: usize> Iterator for TokenParser<I, PEEKER_LEN>
167where
168 I: Iterator<Item = TokenTree>,
169{
170 type Item = TokenTree;
171
172 #[must_use]
173 fn next(&mut self) -> Option<Self::Item> {
174 if self.peek.is_empty() {
175 self.iter.next()
176 } else {
177 Some(self.peek.remove(0))
178 }
179 }
180}
181
182impl FromStr for TokenParser {
183 type Err = <TokenStream as FromStr>::Err;
184
185 fn from_str(s: &str) -> Result<Self, Self::Err> {
186 TokenStream::from_str(s).map(Self::new)
187 }
188}
189
190#[cfg(feature = "quote")]
191impl<I, const PEEKER_LEN: usize> quote::ToTokens for TokenParser<I, PEEKER_LEN>
192where
193 I: Clone + Iterator<Item = TokenTree>,
194{
195 fn to_tokens(&self, tokens: &mut TokenStream) {
196 tokens.extend(self.clone());
197 }
198
199 #[must_use]
200 fn to_token_stream(&self) -> TokenStream {
201 self.clone().collect()
202 }
203
204 #[must_use]
205 fn into_token_stream(self) -> TokenStream
206 where
207 Self: Sized,
208 {
209 self.collect()
210 }
211}
212
213macro_rules! next_punct {
214 ($self:ident, $only:ident) => {
215 $self.next_if(TokenTree::$only).map(TokenTree::alone).map(iter::once).map(Iterator::collect)
216 };
217 ($self:ident, $($joint:ident),+ $(!$($not:ident),+)?) => {
218 next_punct!($self, 0, $($joint),+ $(!$($not),+)?;true)
219 };
220 ($self:ident, $idx:expr, $first:ident, $($joint:ident),+ $(!$($not:ident),*)?;$($cond:tt)*) => {
221 next_punct!($self, $idx+1, $($joint),+ $(!$($not),+)?; $($cond)* && matches!($self.peek_n($idx), Some(tt) if tt.$first() && tt.is_joint()))
222 };
223 ($self:ident, $idx:expr, $last:ident;$($cond:tt)*) => {
224 ($($cond)* && matches!($self.peek_n($idx), Some(tt) if tt.$last())).then(|| $self.next_n_alone($idx+1).expect("peeked n"))
225 };
226 ($self:ident, $idx:expr, $last:ident !$($not:ident),+;$($cond:tt)*) => {
227 ($($cond)* && matches!($self.peek_n($idx), Some(tt) if tt.$last())
228 && (matches!($self.peek_n($idx), Some(tt) if tt.is_alone()) ||
229 !(matches!($self.peek_n($idx+1), Some(tt) if false $(|| tt.$not())*))))
230 .then(|| $self.next_n_alone($idx+1).expect("peeked n"))
231 };
232}
233
234macro_rules! peek_punct {
235 ($offset:expr, $self:ident, $only:ident) => {
236 $self.peek_n($offset).filter(|t| t.$only()).cloned().map(TokenTree::alone).map(iter::once).map(Iterator::collect)
237 };
238 ($offset:expr, $self:ident, $($joint:ident),+ $(!$($not:ident),+)?) => {
239 peek_punct!($offset, $self, $offset, $($joint),+ $(!$($not),+)?;true)
240 };
241 ($offset:expr, $self:ident, $idx:expr, $first:ident, $($joint:ident),+ $(!$($not:ident),*)?;$($cond:tt)*) => {
242 peek_punct!($offset, $self, $idx+1, $($joint),+ $(!$($not),+)?; $($cond)* && matches!($self.peek_n($idx), Some(tt) if tt.$first() && tt.is_joint()))
243 };
244 ($offset:expr, $self:ident, $idx:expr, $last:ident;$($cond:tt)*) => {
245 ($($cond)* && matches!($self.peek_n($idx), Some(tt) if tt.$last())).then(|| $self.peek_range_alone($offset..$idx+1).expect("peeked n"))
246 };
247 ($offset:expr, $self:ident, $idx:expr, $last:ident !$($not:ident),+;$($cond:tt)*) => {
248 ($($cond)* && matches!($self.peek_n($idx), Some(tt) if tt.$last())
249 && (matches!($self.peek_n($idx), Some(tt) if tt.is_alone()) ||
250 !(matches!($self.peek_n($idx+1), Some(tt) if false $(|| tt.$not())*))))
251 .then(|| $self.peek_range_alone($offset..$idx+1).expect("peeked n"))
252 };
253}
254
255macro_rules! punct_tt {
256 ($($punct:literal, [$($cond:tt)*], $peek:ident, $peek_n:ident, $name:ident);*$(;)?) => {
257 $(#[doc = concat!("Returns the next token if it is a [punctuation token tree](https://doc.rust-lang.org/reference/tokens.html#punctuation) `", $punct ,"` following the same rules as [macro_rule's tt](https://doc.rust-lang.org/reference/macros-by-example.html#metavariables).")]
258 #[doc = concat!("```
259use proc_macro_utils::{assert_tokens, TokenParser};
260use quote::quote;
261let mut parser = TokenParser::new(quote!(", $punct, " 1 b));
262assert_tokens!(parser.", stringify!($name), "().unwrap(), { ", $punct, " });
263assert_tokens!(parser, { 1 b });
264```")]
265 #[must_use]
266 pub fn $name(&mut self) -> Option<TokenStream> {
267 next_punct!(self, $($cond)*)
268 }
269 #[doc = concat!("Returns the next token if it is a [punctuation token tree](https://doc.rust-lang.org/reference/tokens.html#punctuation) `", $punct ,"` following the same rules as [macro_rule's tt](https://doc.rust-lang.org/reference/macros-by-example.html#metavariables) without advancing the parser")]
270 #[doc = concat!("```
271use proc_macro_utils::{assert_tokens, TokenParser};
272use quote::quote;
273let mut parser = TokenParser::new(quote!(", $punct, " 1 b));
274assert_tokens!(parser.", stringify!($peek), "().unwrap(), { ", $punct, " });
275```")]
276 #[must_use]
277 pub fn $peek(&mut self) -> Option<TokenStream> {
278 peek_punct!(0, self, $($cond)*)
279 }
280 #[doc = concat!("Returns the `n`th token if it is a [punctuation token tree](https://doc.rust-lang.org/reference/tokens.html#punctuation) `", $punct ,"` following the same rules as [macro_rule's tt](https://doc.rust-lang.org/reference/macros-by-example.html#metavariables) without advancing the parser")]
281 #[doc = concat!("```
282use proc_macro_utils::{assert_tokens, TokenParser};
283use quote::quote;
284let mut parser = TokenParser::new(quote!(b ", $punct, " 1));
285assert_tokens!(parser.", stringify!($peek_n), "(1).unwrap(), { ", $punct, " });
286```")]
287 #[must_use]
288 pub fn $peek_n(&mut self, n: usize) -> Option<TokenStream> {
289 peek_punct!(n, self, $($cond)*)
290 })*
291 pub fn next_tt(&mut self) -> Option<TokenStream> {
294 self.next_if_each(TokenTree::is_group)
295 .or_else(|| self.next_if_each(TokenTree::is_literal))
296 $(.or_else(|| self.$name()))*
297 }
298 pub fn peek_tt(&mut self) -> Option<TokenStream> {
301 self.peek_n_tt(0)
302 }
303 pub fn peek_n_tt(&mut self, n: usize) -> Option<TokenStream> {
306 self.peek_if_each(TokenTree::is_group)
307 .or_else(|| self.peek_if_each(TokenTree::is_literal))
308 $(.or_else(|| self.$peek_n(n)))*
309 }
310 };
311 ([$test:ident $($tests:ident)*]) => {
312 matches!(self.peek(), Some(token) if token.$test()) && punct!([$($tests)*])
313 }
314}
315
316macro_rules! token_tree {
317 ($($a:literal, $test:ident, $peek_as:ident, $as:ident, $peek:ident, $peek_n:ident, $name:ident, $token:ident);*$(;)?) => {
318 $(#[doc = concat!("Returns the next token if it is ", $a, " [`", stringify!($token) ,"`].")]
319 #[must_use]
320 pub fn $name(&mut self) -> Option<$token> {
321 self.$peek().is_some().then(|| self.next().expect("token should be present").$as().expect(concat!("should be ", stringify!($token))))
322 })*
323
324 $(#[doc = concat!("Returns the next token if it is ", $a, " [`", stringify!($token) ,"`] without advancing the parser.")]
325 #[must_use]
326 pub fn $peek(&mut self) -> Option<&$token> {
327 self.$peek_n(0)
328 })*
329
330 $(#[doc = concat!("Returns the `n`th token if it is ", $a, " [`", stringify!($token) ,"`] without advancing the parser.")]
331 #[must_use]
332 pub fn $peek_n(&mut self, n: usize) -> Option<&$token> {
333 self.peek_n(n).and_then(TokenTree::$peek_as)
334 })*
335 };
336}
337
338macro_rules! delimited {
339 ($($test:ident, $peek:ident, $peek_n:ident, $name:ident, $doc:literal;)*) => {
340 $(#[doc = concat!("Returns the next token if it is a ", $doc ," group.")]
341 #[must_use]
342 pub fn $name(&mut self) -> Option<Group> {
343 self.$peek().is_some().then(|| {
344 self.next_group().unwrap()
345 })
346 })*
347 $(#[doc = concat!("Returns the next token if it is a", $doc ," group, without advancing the parser.")]
348 #[must_use]
349 pub fn $peek(&mut self) -> Option<&Group> {
350 self.$peek_n(0)
351 })*
352 $(#[doc = concat!("Returns the `n`th token if it is a ", $doc ," group, without advancing the parser.")]
353 #[must_use]
354 pub fn $peek_n(&mut self, n: usize) -> Option<&Group> {
355 self.peek_n_group(n).filter(|g| g.$test())
356 })*
357 };
358}
359
360impl<I, const PEEKER_LEN: usize> TokenParser<I, PEEKER_LEN>
362where
363 I: Iterator<Item = TokenTree>,
364{
365 #[must_use]
367 pub fn span(&mut self) -> Span {
368 self.peek().map_or_else(Span::call_site, TokenTree::span)
369 }
370
371 #[must_use]
382 pub fn is_empty(&mut self) -> bool {
383 self.peek().is_none()
384 }
385
386 #[must_use]
397 pub fn peek(&mut self) -> Option<&TokenTree> {
398 if self.peek.is_empty() {
399 self.peek.push(self.iter.next()?);
400 }
401 self.peek.first()
402 }
403
404 #[must_use]
415 pub fn peek_n(&mut self, n: usize) -> Option<&TokenTree> {
416 for _ in self.peek.len()..=n {
417 self.peek.push(self.iter.next()?);
418 }
419 self.peek.get(n)
420 }
421
422 #[must_use]
434 #[allow(clippy::missing_panics_doc)]
435 pub fn next_if(&mut self, test: impl FnOnce(&TokenTree) -> bool) -> Option<TokenTree> {
436 test(self.peek()?).then(|| self.next().expect("was peeked"))
437 }
438
439 #[must_use]
451 pub fn next_if_each<P: Peeker>(&mut self, tests: P) -> Option<TokenStream> {
452 let len = tests.len();
453 self.peek_n(len - 1)?;
455 tests
456 .peek(&self.peek[..len])
457 .then(|| self.peek.drain(0..len).collect())
458 }
459
460 #[must_use]
465 pub fn next_if_each_alone<P: Peeker>(&mut self, tests: P) -> Option<TokenStream> {
466 let len = tests.len();
467 self.peek_n(len - 1)?;
469 tests.peek(&self.peek[..len]).then(|| {
470 if self.peek[len - 1].is_punct() {
471 self.peek[len - 1] = self.peek[len - 1].clone().alone();
472 }
473 self.peek.drain(0..len).collect()
474 })
475 }
476
477 #[must_use]
480 pub fn peek_if_each<P: Peeker>(&mut self, tests: P) -> Option<TokenStream> {
481 self.peek_n_if_each(0, tests)
483 }
484
485 #[must_use]
488 pub fn peek_n_if_each<P: Peeker>(&mut self, n: usize, tests: P) -> Option<TokenStream> {
489 let len = tests.len();
490 self.peek_n(len + n)?;
492 let peeked = &self.peek[n..len + n];
493 tests.peek(peeked).then(|| peeked.iter().cloned().collect())
494 }
495
496 #[must_use]
501 pub fn peek_n_if_each_alone<P: Peeker>(&mut self, n: usize, tests: P) -> Option<TokenStream> {
502 let len = tests.len();
503 if len == 0 {
504 return Some(TokenStream::new());
505 }
506 self.peek_n(len + n - 1)?;
508 let peeked = &self.peek[n..len + n];
509 tests.peek(peeked).then(|| {
510 peeked[..len - 1]
511 .iter()
512 .cloned()
513 .chain(iter::once(peeked[len - 1].clone().alone()))
514 .collect()
515 })
516 }
517
518 #[must_use]
522 #[allow(clippy::missing_panics_doc)]
523 pub fn next_while(&mut self, mut test: impl FnMut(&TokenTree) -> bool) -> Option<TokenStream> {
524 if self.peek().is_none() || !test(self.peek().expect("was peeked")) {
525 None
526 } else {
527 let mut token_stream = TokenStream::new();
528 token_stream.push(self.next().expect("was peeked"));
529 while let Some(token) = self.next_if(&mut test) {
530 token_stream.push(token);
531 }
532 Some(token_stream)
533 }
534 }
535
536 #[must_use]
542 #[allow(clippy::missing_panics_doc)]
543 pub fn next_while_alone(
544 &mut self,
545 mut test: impl FnMut(&TokenTree) -> bool,
546 ) -> Option<TokenStream> {
547 if self.peek().is_none() || !test(self.peek().expect("was peeked")) {
548 None
549 } else {
550 let mut token_stream = TokenStream::new();
551 let mut last = self.next().expect("was peeked");
552 while let Some(token) = self.next_if(&mut test) {
553 token_stream.push(mem::replace(&mut last, token));
554 }
555 token_stream.push(last.alone());
556 Some(token_stream)
557 }
558 }
559
560 #[must_use]
564 pub fn next_until(&mut self, mut test: impl FnMut(&TokenTree) -> bool) -> Option<TokenStream> {
565 self.next_while(|token| !test(token))
566 }
567
568 #[must_use]
574 pub fn next_until_alone(
575 &mut self,
576 mut test: impl FnMut(&TokenTree) -> bool,
577 ) -> Option<TokenStream> {
578 self.next_while_alone(|token| !test(token))
579 }
580
581 #[must_use]
601 pub fn next_n(&mut self, n: usize) -> Option<TokenStream> {
602 self.next_if_each(PeekLen(n))
603 }
604
605 #[must_use]
627 pub fn next_n_alone(&mut self, n: usize) -> Option<TokenStream> {
628 self.next_if_each_alone(PeekLen(n))
629 }
630
631 #[must_use]
659 pub fn peek_range(&mut self, range: impl RangeBounds<usize>) -> Option<TokenStream> {
660 let start = match range.start_bound() {
661 Bound::Included(&n) => n,
662 Bound::Excluded(&n) => n + 1,
663 Bound::Unbounded => 0,
664 };
665 let len = match range.end_bound() {
666 Bound::Included(&n) if n < start => return None,
667 Bound::Included(&n) => n - start + 1,
668 Bound::Excluded(&n) if n <= start => return None,
669 Bound::Excluded(&n) => n - start,
670 Bound::Unbounded => {
671 panic!("unbounded range not supported, use `clone().skip()` instead")
672 }
673 };
674
675 self.peek_n_if_each(start, PeekLen(len))
676 }
677
678 #[must_use]
711 pub fn peek_range_alone(&mut self, range: impl RangeBounds<usize>) -> Option<TokenStream> {
712 let start = match range.start_bound() {
713 Bound::Included(&n) => n,
714 Bound::Excluded(&n) => n + 1,
715 Bound::Unbounded => 0,
716 };
717 let len = match range.end_bound() {
718 Bound::Included(&n) if n < start => return None,
719 Bound::Included(&n) => n - start + 1,
720 Bound::Excluded(&n) if n <= start => return None,
721 Bound::Excluded(&n) => n - start,
722 Bound::Unbounded => {
723 panic!("unbounded range not supported, use `clone().skip()` instead")
724 }
725 };
726
727 self.peek_n_if_each_alone(start, PeekLen(len))
728 }
729}
730
731impl<I, const PEEKER_LEN: usize> TokenParser<I, PEEKER_LEN>
732where
733 I: Iterator<Item = TokenTree>,
734{
735 #[must_use]
737 pub fn into_token_stream(self) -> TokenStream {
738 self.into()
739 }
740
741 #[must_use]
744 pub fn next_punctuation_group(&mut self) -> Option<TokenStream> {
745 let mut joined = true;
746 self.next_while(move |token| {
747 let ret = joined && token.is_punct();
748 joined = token.is_joint();
749 ret
750 })
751 }
752
753 #[must_use]
769 pub fn peek_keyword<K: ?Sized>(&mut self, keyword: &K) -> Option<&Ident>
770 where
771 Ident: PartialEq<K>,
772 {
773 self.peek_n_keyword(0, keyword)
774 }
775
776 #[must_use]
790 pub fn peek_n_keyword<K: ?Sized>(&mut self, n: usize, keyword: &K) -> Option<&Ident>
791 where
792 Ident: PartialEq<K>,
793 {
794 self.peek_n_ident(n).filter(|&ident| ident == keyword)
795 }
796
797 #[must_use]
811 #[allow(clippy::missing_panics_doc)]
812 pub fn next_keyword<K: ?Sized>(&mut self, keyword: &K) -> Option<Ident>
813 where
814 Ident: PartialEq<K>,
815 {
816 self.next_if(|token| matches!(token.ident(), Some(ident) if ident == keyword))
817 .map(|token| token.into_ident().expect("is ident"))
818 }
819
820 #[must_use]
843 pub fn next_type(&mut self) -> Option<TokenStream> {
844 let first = self.peek()?;
845 if first.is_comma() || first.is_semi() {
846 return None;
847 };
848
849 let mut chevron_level: u32 = 0;
850
851 self.next_while_alone(|token| {
852 if token.is_less_than() {
853 chevron_level += 1;
854 } else if token.is_greater_than() {
855 if chevron_level == 0 {
856 return false;
857 }
858 chevron_level -= 1;
859 }
860 !(chevron_level == 0 && token.is_comma() || token.is_semi())
861 })
862 }
863
864 #[must_use]
888 #[allow(clippy::missing_panics_doc)]
889 pub fn next_expression(&mut self) -> Option<TokenStream> {
890 if self.peek().is_none()
891 || matches!(self.peek(), Some(token) if token.is_comma() || token.is_semi())
892 || self.peek_tt_fat_arrow().is_some()
893 {
894 return None;
895 }
896
897 let mut start = true;
898
899 let mut tokens = TokenStream::new();
900 let mut last = None;
901
902 'outer: while let Some(token) = self.peek() {
905 if token.is_semi() || token.is_comma() || self.peek_tt_fat_arrow().is_some() {
906 break;
907 }
908 let token = self.peek().unwrap();
909 if start && token.is_less_than() {
910 tokens.extend(mem::replace(
911 &mut last,
912 Some(self.next().expect("token was peeked")),
913 ));
914 loop {
915 if let Some(ty) = self.next_type() {
916 for token in ty {
917 tokens.extend(mem::replace(&mut last, Some(token)));
918 }
919 }
920 let Some(token) = self.peek() else {
922 break 'outer;
923 }; if token.is_semi() {
925 break 'outer;
926 }
927 if token.is_greater_than() {
928 tokens.extend(mem::replace(
929 &mut last,
930 Some(self.next().expect("token was peeked")),
931 ));
932 break;
933 } else if token.is_comma() {
934 tokens.extend(mem::replace(
935 &mut last,
936 Some(self.next().expect("token was peeked")),
937 ));
938 continue; };
940 }
941 }
942 if let Some(token) = self.next() {
943 start = token.is_punct();
945 tokens.extend(mem::replace(&mut last, Some(token)));
946 }
947 }
948
949 tokens.extend(last.map(TokenTree::alone));
952
953 Some(tokens.into_iter().collect())
954 }
955
956 #[must_use]
958 pub fn next_string(&mut self) -> Option<String> {
959 let lit = self.peek().and_then(TokenTreeLiteral::string)?;
960 self.next();
961 Some(lit)
962 }
963
964 #[must_use]
966 pub fn next_bool(&mut self) -> Option<bool> {
967 self.next_if(|t| {
968 t.ident()
969 .map_or(false, |ident| ident == "true" || ident == "false")
970 })
971 .map(|t| matches!(t.ident(), Some(ident) if ident == "true"))
972 }
973}
974
975impl<I, const PEEKER_LEN: usize> TokenParser<I, PEEKER_LEN>
976where
977 I: Iterator<Item = TokenTree>,
978{
979 token_tree!(
980 "a", is_group, group, into_group, peek_group, peek_n_group, next_group, Group;
981 "an", is_ident, ident, into_ident, peek_ident, peek_n_ident, next_ident, Ident;
982 "a", is_punct, punct, into_punct, peek_punct, peek_n_punct, next_punct, Punct;
983 "a", is_literal, literal, into_literal, peek_literal, peek_n_literal, next_literal, Literal;
984 );
985
986 delimited!(
987 is_parenthesized, peek_parenthesized, peek_n_parenthesized, next_parenthesized, "parenthesized";
988 is_braced, peek_braced, peek_n_braced, next_braced, "braced";
989 is_bracketed, peek_bracketed, peek_n_bracketed, next_bracketed, "bracketed";
990 );
991}
992impl<I, const PEEKER_LEN: usize> TokenParser<I, PEEKER_LEN>
1001where
1002 I: Iterator<Item = TokenTree>,
1003{
1004 punct_tt!(
1005 "+", [is_plus !is_equals], peek_tt_plus, peek_n_tt_plus, next_tt_plus;
1006 "-", [is_minus !is_equals], peek_tt_minus, peek_n_tt_minus, next_tt_minus;
1007 "*", [is_asterix !is_equals], peek_tt_star, peek_n_tt_star, next_tt_star;
1008 "/", [is_slash !is_equals], peek_tt_slash, peek_n_tt_slash, next_tt_slash;
1009 "%", [is_percent !is_equals], peek_tt_percent, peek_n_tt_percent, next_tt_percent;
1010 "^", [is_caret !is_equals], peek_tt_caret, peek_n_tt_caret, next_tt_caret;
1011 "!", [is_exclamation !is_equals], peek_tt_not, peek_n_tt_not, next_tt_not;
1012 "&", [is_and !is_equals, is_and], peek_tt_and, peek_n_tt_and, next_tt_and;
1013 "|", [is_pipe !is_equals, is_pipe], peek_tt_or, peek_n_tt_or, next_tt_or;
1014 "&&", [is_and, is_and !is_equals], peek_tt_and_and, peek_n_tt_and_and, next_tt_and_and;
1015 "||", [is_pipe, is_pipe !is_equals], peek_tt_or_or, peek_n_tt_or_or, next_tt_or_or;
1016 "<<", [is_less_than, is_less_than !is_equals], peek_tt_shl, peek_n_tt_shl, next_tt_shl;
1017 ">>", [is_greater_than, is_greater_than !is_equals], peek_tt_shr, peek_n_tt_shr, next_tt_shr;
1018 "+=", [is_plus, is_equals], peek_tt_plus_eq, peek_n_tt_plus_eq, next_tt_plus_eq;
1019 "-=", [is_minus, is_equals], peek_tt_minus_eq, peek_n_tt_minus_eq, next_tt_minus_eq;
1020 "*=", [is_asterix, is_equals], peek_tt_star_eq, peek_n_tt_star_eq, next_tt_star_eq;
1021 "/=", [is_slash, is_equals], peek_tt_slash_eq, peek_n_tt_slash_eq, next_tt_slash_eq;
1022 "%=", [is_percent, is_equals], peek_tt_percent_eq, peek_n_tt_percent_eq, next_tt_percent_eq;
1023 "^=", [is_caret, is_equals], peek_tt_caret_eq, peek_n_tt_caret_eq, next_tt_caret_eq;
1024 "&=", [is_and, is_equals], peek_tt_and_eq, peek_n_tt_and_eq, next_tt_and_eq;
1025 "|=", [is_pipe, is_equals], peek_tt_or_eq, peek_n_tt_or_eq, next_tt_or_eq;
1026 "<<=", [is_less_than, is_less_than, is_equals], peek_tt_shl_eq, peek_n_tt_shl_eq, next_tt_shl_eq;
1027 ">>=", [is_greater_than, is_greater_than, is_equals], peek_tt_shr_eq, peek_n_tt_shr_eq, next_tt_shr_eq;
1028 "=", [is_equals !is_equals], peek_tt_eq, peek_n_tt_eq, next_tt_eq;
1029 "==", [is_equals, is_equals], peek_tt_eq_eq, peek_n_tt_eq_eq, next_tt_eq_eq;
1030 "!=", [is_exclamation, is_equals], peek_tt_ne, peek_n_tt_ne, next_tt_ne;
1031 ">", [is_greater_than !is_equals], peek_tt_gt, peek_n_tt_gt, next_tt_gt;
1032 "<", [is_less_than !is_equals], peek_tt_lt, peek_n_tt_lt, next_tt_lt;
1033 ">=", [is_greater_than, is_equals], peek_tt_ge, peek_n_tt_ge, next_tt_ge;
1034 "<=", [is_less_than, is_equals], peek_tt_le, peek_n_tt_le, next_tt_le;
1035 "@", [is_at], peek_tt_at, peek_n_tt_at, next_tt_at;
1036 ".", [is_dot !is_dot], peek_tt_dot, peek_n_tt_dot, next_tt_dot;
1037 "..", [is_dot, is_dot !is_dot, is_equals], peek_tt_dot_dot, peek_n_tt_dot_dot, next_tt_dot_dot;
1038 "...", [is_dot, is_dot, is_dot], peek_tt_dot_dot_dot, peek_n_tt_dot_dot_dot, next_tt_dot_dot_dot;
1039 "..=", [is_dot, is_dot, is_equals], peek_tt_dot_dot_eq, peek_n_tt_dot_dot_eq, next_tt_dot_dot_eq;
1040 ",", [is_comma], peek_tt_comma, peek_n_tt_comma, next_tt_comma;
1041 ";", [is_semi], peek_tt_semi, peek_n_tt_semi, next_tt_semi;
1042 ":", [is_colon !is_colon], peek_tt_colon, peek_n_tt_colon, next_tt_colon;
1043 "::", [is_colon, is_colon], peek_tt_path_sep, peek_n_tt_path_sep, next_tt_path_sep;
1044 "->", [is_minus, is_greater_than], peek_tt_r_arrow, peek_n_tt_r_arrow, next_tt_r_arrow;
1045 "=>", [is_equals, is_greater_than], peek_tt_fat_arrow, peek_n_tt_fat_arrow, next_tt_fat_arrow;
1046 "#", [is_pound], peek_tt_pound, peek_n_tt_pound, next_tt_pound;
1047 "$", [is_dollar], peek_tt_dollar, peek_n_tt_dollar, next_tt_dollar;
1048 "?", [is_question], peek_tt_question, peek_n_tt_question, next_tt_question;
1049 "~", [is_tilde], peek_tt_tilde, peek_n_tt_tilde, next_tt_tilde;
1050 );
1051
1052 #[must_use]
1063 #[allow(clippy::missing_panics_doc)]
1064 pub fn next_macro_rules_tt(&mut self) -> Option<TokenStream> {
1065 _ = self.peek_n(2);
1067 let first = self.peek.first().and_then(TokenTree::punct)?;
1068 let second = first
1069 .is_joint()
1070 .then(|| self.peek.get(1).and_then(TokenTree::punct))
1071 .flatten();
1072 let third = second
1073 .is_some_and(TokenTreePunct::is_joint)
1074 .then(|| self.peek.get(2).and_then(TokenTree::punct))
1075 .flatten();
1076 let chars = [
1077 first.as_char(),
1078 second.map_or('_', Punct::as_char),
1079 third.map_or('_', Punct::as_char),
1080 ];
1081 if matches!(
1082 chars,
1083 ['.', '.', '.' | '='] | ['<', '<', '='] | ['>', '>', '=']
1084 ) {
1085 self.next_n_alone(3)
1086 } else if matches!(
1087 &chars[0..2],
1088 ['&', '&' | '=']
1089 | ['|', '|' | '=']
1090 | ['<', '<' | '=']
1091 | ['>' | '-' | '=', '>']
1092 | ['+' | '-' | '*' | '/' | '%' | '^' | '=' | '!' | '>', '=']
1093 | ['.', '.']
1094 | [':', ':']
1095 ) {
1096 self.next_n_alone(2)
1097 } else {
1098 self.next_n_alone(1)
1099 }
1100 }
1101}
1102
1103#[cfg(test)]
1104mod test {
1105 use quote::quote;
1106
1107 use super::*;
1108 use crate::assert_tokens;
1109
1110 #[test]
1111 fn ty() {
1112 let mut at = TokenParser::new(quote! {Name, <Some, Generic, Type>});
1113 assert_tokens!(at.next_type().unwrap(), { Name });
1114 at.next();
1115 assert_tokens!(
1116 at.next_type().unwrap(),
1117 { < Some , Generic , Type > }
1118 );
1119 }
1120
1121 #[test]
1122 fn expr() {
1123 let mut at = TokenParser::new(
1124 quote! {a + b, <Some, Generic, Type>::something + <a,b> * a < b, "hi" => hello},
1125 );
1126 assert_tokens!(at.next_expression().unwrap(), { a + b });
1127 at.next();
1128 assert_tokens!(
1129 at.next_expression().unwrap(), { <Some, Generic, Type>::something + <a,b> * a < b }
1130 );
1131 at.next();
1132 assert_tokens!(at.next_expression().unwrap(), { "hi" });
1133 at.next();
1134 at.next();
1135 assert_tokens!(at.next_expression().unwrap(), { hello });
1136
1137 let mut at = TokenParser::from_str("1..,").unwrap();
1138 let expr: Vec<_> = at.next_expression().unwrap().into_iter().collect();
1139 assert!(expr.last().unwrap().is_alone());
1140 assert_tokens!(expr, { 1.. });
1141 }
1142
1143 #[test]
1144 fn combined_tokens() {
1145 let mut parser = TokenParser::from_str("->&&..=>=+,-..,+=").unwrap();
1148 assert_tokens!(parser.next_tt_r_arrow().unwrap(), { -> });
1149 assert_tokens!(parser.next_tt_and_and().unwrap(), { && });
1150 assert_tokens!(parser.next_tt_dot_dot_eq().unwrap(), { ..= });
1151 assert_tokens!(parser.next_tt_ge().unwrap(), { >= });
1152 assert_tokens!(parser.next_tt_plus().unwrap(), { + });
1153 assert_tokens!(parser.next_tt_comma().unwrap(), { , });
1154 assert_tokens!(parser.next_tt_minus().unwrap(), { - });
1155 assert_tokens!(parser.next_tt_dot_dot().unwrap(), { .. });
1156 assert_tokens!(parser.next_tt_comma().unwrap(), { , });
1157 assert_tokens!(parser.next_tt_plus_eq().unwrap(), { += });
1158 }
1159
1160 #[test]
1161 fn peek() {
1162 let mut parser = TokenParser::new(quote! {
1163 0 {} 2 3 += .. =
1164 });
1165 assert_eq!(parser.peek().unwrap().to_string(), "0");
1166 assert_eq!(parser.peek_n(0).unwrap().to_string(), "0");
1167 assert_eq!(parser.peek_n(1).unwrap().to_string().replace(' ', ""), "{}");
1168 assert_eq!(parser.peek_n(2).unwrap().to_string(), "2");
1169
1170 assert_eq!(parser.peek_literal().unwrap().to_string(), "0");
1171 assert!(parser.peek_group().is_none());
1172 parser.next().unwrap();
1173 assert!(parser.peek_group().is_some());
1174 assert!(parser.peek_n_tt_plus_eq(3).is_some());
1175 assert!(parser.peek_n_tt_dot_dot(5).is_some());
1176 }
1177
1178 #[test]
1179 fn keyword() {
1180 let mut parser: TokenParser<_, 4> = TokenParser::from(quote! {
1181 in out and or
1182 });
1183 assert_eq!(parser.next_keyword("in").unwrap().to_string(), "in");
1184 assert_eq!(parser.next_keyword("out").unwrap().to_string(), "out");
1185 assert!(parser.next_keyword("or").is_none());
1186 assert_eq!(parser.next_keyword("and").unwrap().to_string(), "and");
1187 assert_eq!(parser.next_keyword("or").unwrap().to_string(), "or");
1188 assert!(parser.next_keyword("or").is_none());
1189 }
1190}