1#[cfg(feature = "proc_macro2")]
16pub use proc_macro2::{Group, Ident, Literal, Punct, TokenStream, TokenTree};
17
18#[cfg(not(feature = "proc_macro2"))]
19pub use proc_macro::{Group, Ident, Literal, Punct, TokenStream, TokenTree};
20
21#[allow(clippy::wildcard_imports)]
22use crate::*;
23
24use std::marker::PhantomData;
25use std::ops::{Deref, DerefMut};
26
27fn count_nested_tokens(stream: &TokenStream) -> usize {
30 stream
31 .clone()
32 .into_iter()
33 .map(|tt| match tt {
34 TokenTree::Group(g) => count_tokens_recursive(g.stream()),
36 _ => 0, })
38 .sum()
39}
40
41pub(crate) fn count_tokens_recursive(stream: TokenStream) -> usize {
44 stream
45 .into_iter()
46 .map(|tt| match tt {
47 TokenTree::Group(g) => 1 + count_tokens_recursive(g.stream()),
48 _ => 1,
49 })
50 .sum()
51}
52
53#[cfg(test)]
54mod tests {
55 use super::*;
56
57 #[test]
58 fn test_count_tokens_recursive_basic() {
59 let stream: TokenStream = "a b c".parse().unwrap();
60 assert_eq!(count_tokens_recursive(stream), 3);
61 }
62
63 #[test]
64 fn test_count_tokens_recursive_with_group() {
65 let stream: TokenStream = "a { b c } d".parse().unwrap();
66 assert_eq!(count_tokens_recursive(stream), 5);
68 }
69
70 #[test]
71 fn test_count_tokens_recursive_nested_groups() {
72 let stream: TokenStream = "a { b { c } d } e".parse().unwrap();
78 assert_eq!(count_tokens_recursive(stream), 7);
79 }
80
81 #[test]
82 fn test_count_tokens_recursive_empty() {
83 let stream: TokenStream = "".parse().unwrap();
84 assert_eq!(count_tokens_recursive(stream), 0);
85 }
86
87 #[test]
88 fn test_count_tokens_recursive_empty_group() {
89 let stream: TokenStream = "a { } b".parse().unwrap();
94 assert_eq!(count_tokens_recursive(stream), 3);
95 }
96
97 #[test]
98 fn test_count_tokens_recursive_multiple_groups() {
99 let stream: TokenStream = "{ a } { b } { c }".parse().unwrap();
104 assert_eq!(count_tokens_recursive(stream), 6);
105 }
106
107 #[test]
108 fn test_count_nested_tokens() {
109 let stream: TokenStream = "a { b c } d".parse().unwrap();
110 assert_eq!(count_nested_tokens(&stream), 2);
112 }
113
114 #[test]
115 fn test_count_nested_tokens_nested() {
116 let stream: TokenStream = "a { b { c } d } e".parse().unwrap();
117 assert_eq!(count_nested_tokens(&stream), 4);
119 }
120
121 #[test]
122 fn test_count_nested_tokens_empty() {
123 let stream: TokenStream = "a b c".parse().unwrap();
124 assert_eq!(count_nested_tokens(&stream), 0);
126 }
127
128 #[test]
129 fn test_count_nested_tokens_empty_group() {
130 let stream: TokenStream = "a { } b".parse().unwrap();
131 assert_eq!(count_nested_tokens(&stream), 0);
133 }
134
135 #[test]
136 fn test_count_nested_tokens_multiple_groups() {
137 let stream: TokenStream = "{ a } { b } { c }".parse().unwrap();
140 assert_eq!(count_nested_tokens(&stream), 3);
141 }
142}
143
144impl Parser for TokenStream {
148 fn parser(tokens: &mut TokenIter) -> Result<Self> {
149 let mut output = TokenStream::new();
150 output.extend(&mut *tokens);
151
152 let nested_count = count_nested_tokens(&output);
156 tokens.add(nested_count);
157
158 Ok(output)
159 }
160}
161
162impl ToTokens for TokenStream {
163 fn to_tokens(&self, tokens: &mut TokenStream) {
164 tokens.extend(self.clone());
165 }
166}
167
168pub struct NonEmptyTokenStream(pub TokenStream);
171
172impl TryFrom<TokenStream> for NonEmptyTokenStream {
173 type Error = Error;
174
175 fn try_from(value: TokenStream) -> Result<Self> {
176 if value.is_empty() {
177 Error::unexpected_end()
178 } else {
179 Ok(Self(value))
180 }
181 }
182}
183
184impl Parser for NonEmptyTokenStream {
185 fn parser(tokens: &mut TokenIter) -> Result<Self> {
186 tokens.parse::<Expect<TokenTree>>().refine_err::<Self>()?;
187 #[allow(clippy::unwrap_used)]
189 Ok(Self(TokenStream::parser(tokens).unwrap()))
190 }
191}
192
193impl ToTokens for NonEmptyTokenStream {
194 fn to_tokens(&self, tokens: &mut TokenStream) {
195 tokens.extend(self.0.clone());
196 }
197}
198
199#[test]
200#[cfg(feature = "proc_macro2")]
201fn test_non_empty_token_stream() {
202 let mut token_iter = "ident".to_token_iter();
203 let _ = NonEmptyTokenStream::parser(&mut token_iter).unwrap();
204}
205
206#[test]
207#[cfg(feature = "proc_macro2")]
208fn test_empty_token_stream() {
209 let mut token_iter = "".to_token_iter();
210 assert!(NonEmptyTokenStream::parser(&mut token_iter).is_err());
211}
212
213impl Parser for TokenTree {
214 fn parser(tokens: &mut TokenIter) -> Result<Self> {
215 match tokens.next() {
216 Some(token) => Ok(token),
217 None => Error::unexpected_end(),
218 }
219 }
220}
221
222impl ToTokens for TokenTree {
223 #[inline]
224 fn to_tokens(&self, tokens: &mut TokenStream) {
225 tokens.extend(std::iter::once(self.clone()));
226 }
227}
228
229impl Parser for Group {
230 fn parser(tokens: &mut TokenIter) -> Result<Self> {
231 match tokens.next() {
232 Some(TokenTree::Group(group)) => {
233 let nested_count = count_tokens_recursive(group.stream());
235 tokens.add(nested_count);
236 Ok(group)
237 }
238 at => Error::unexpected_token(at, tokens),
239 }
240 }
241}
242
243impl ToTokens for Group {
244 #[inline]
245 fn to_tokens(&self, tokens: &mut TokenStream) {
246 tokens.extend(std::iter::once(TokenTree::Group(self.clone())));
247 }
248}
249
250impl Parser for Ident {
251 fn parser(tokens: &mut TokenIter) -> Result<Self> {
252 match tokens.next() {
253 Some(TokenTree::Ident(ident)) => Ok(ident),
254 at => Error::unexpected_token(at, tokens),
255 }
256 }
257}
258
259impl ToTokens for Ident {
260 #[inline]
261 fn to_tokens(&self, tokens: &mut TokenStream) {
262 tokens.extend(std::iter::once(TokenTree::Ident(self.clone())));
263 }
264}
265
266impl Parser for Punct {
267 fn parser(tokens: &mut TokenIter) -> Result<Self> {
268 match tokens.next() {
269 Some(TokenTree::Punct(punct)) => Ok(punct),
270 at => Error::unexpected_token(at, tokens),
271 }
272 }
273}
274
275impl ToTokens for Punct {
276 #[inline]
277 fn to_tokens(&self, tokens: &mut TokenStream) {
278 tokens.extend(std::iter::once(TokenTree::Punct(self.clone())));
279 }
280}
281
282impl Parser for Literal {
283 fn parser(tokens: &mut TokenIter) -> Result<Self> {
284 match tokens.next() {
285 Some(TokenTree::Literal(literal)) => Ok(literal),
286 at => Error::unexpected_token(at, tokens),
287 }
288 }
289}
290
291impl ToTokens for Literal {
292 #[inline]
293 fn to_tokens(&self, tokens: &mut TokenStream) {
294 tokens.extend(std::iter::once(TokenTree::Literal(self.clone())));
295 }
296}
297
298#[derive(Clone)]
313pub struct Cached<T> {
314 value: T,
315 string: String,
316}
317
318impl<T: Parse + ToTokens> Parser for Cached<T> {
319 fn parser(tokens: &mut TokenIter) -> Result<Self> {
320 let value = T::parser(tokens).refine_err::<Self>()?;
321 let string = value.tokens_to_string();
322 Ok(Self { value, string })
323 }
324}
325
326impl<T: Parse + ToTokens> ToTokens for Cached<T> {
327 #[inline]
328 fn to_tokens(&self, tokens: &mut TokenStream) {
329 self.value.to_tokens(tokens);
330 }
331}
332
333impl<T: Parse + ToTokens> Cached<T> {
334 pub fn set(&mut self, value: T) {
336 self.value = value;
337 self.string = self.value.tokens_to_string();
338 }
339}
340
341impl<T: Parse> Cached<T> {
342 pub fn into_inner(self) -> T {
344 self.value
345 }
346
347 pub fn into_string(self) -> String {
349 self.string
350 }
351
352 #[allow(clippy::missing_const_for_fn)] pub fn as_str(&self) -> &str {
355 &self.string
356 }
357}
358
359#[cfg(feature = "proc_macro2")]
360impl<T: Parse> Cached<T> {
361 #[must_use]
375 pub fn new(s: &str) -> Self {
376 let value = s.into_token_iter().parse().expect("Valid token");
377 Self {
378 value,
379 string: s.to_string(),
380 }
381 }
382
383 pub fn from_string(s: String) -> Result<Self> {
397 let value = s.to_token_iter().parse()?;
398 Ok(Self { value, string: s })
399 }
400}
401
402impl<T: Parse> Deref for Cached<T> {
403 type Target = T;
404
405 fn deref(&self) -> &Self::Target {
406 &self.value
407 }
408}
409
410impl<T: Parse> PartialEq<&str> for Cached<T> {
411 fn eq(&self, other: &&str) -> bool {
412 self.as_str() == *other
413 }
414}
415
416impl<T: Parse> PartialEq for Cached<T> {
417 fn eq(&self, other: &Self) -> bool {
418 self.as_str() == other.as_str()
419 }
420}
421
422impl<T: Parse> Eq for Cached<T> {}
423
424impl<T: Parse> std::hash::Hash for Cached<T> {
425 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
426 self.as_str().hash(state);
427 }
428}
429
430impl<T: Parse> AsRef<T> for Cached<T> {
431 fn as_ref(&self) -> &T {
432 &self.value
433 }
434}
435
436impl<T: Parse> AsRef<str> for Cached<T> {
437 fn as_ref(&self) -> &str {
438 self.as_str()
439 }
440}
441
442#[mutants::skip]
443impl<T: Parse + std::fmt::Debug> std::fmt::Debug for Cached<T> {
444 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
445 f.debug_struct(&format!("Cached<{}>", std::any::type_name::<T>()))
446 .field("value", &self.value)
447 .field("string", &self.string)
448 .finish()
449 }
450}
451
452impl<T: Into<TokenTree>> From<Cached<T>> for TokenTree {
454 fn from(cached: Cached<T>) -> Self {
455 cached.value.into()
456 }
457}
458
459#[cfg(feature = "proc_macro2")]
460impl<T: Parse> TryFrom<String> for Cached<T> {
461 type Error = Error;
462
463 fn try_from(value: String) -> Result<Self> {
464 let mut token_iter = value.to_token_iter();
465 let t = T::parser(&mut token_iter).refine_err::<Self>()?;
466 Ok(Self {
467 value: t,
468 string: value,
469 })
470 }
471}
472
473#[cfg(feature = "proc_macro2")]
474impl<T: Parse> TryFrom<&str> for Cached<T> {
475 type Error = Error;
476
477 fn try_from(value: &str) -> Result<Self> {
478 Self::try_from(value.to_string())
479 }
480}
481
482#[test]
483#[cfg(feature = "proc_macro2")]
484fn test_cached_into_tt() {
485 let mut token_iter = "ident".to_token_iter();
486 let ident = Cached::<Ident>::parser(&mut token_iter).unwrap();
487 let _: TokenTree = ident.into();
488}
489
490macro_rules! gen_cached_types {
491 ($($cached:ident = $basic:ident);* $(;)?) => {
492 $(
493 #[doc = concat!("[`", stringify!($basic), "`] with cached string representation.")]
494 pub type $cached = Cached<$basic>;
495
496 #[doc = concat!("Convert `", stringify!($cached), " into a `", stringify!($basic), "`.")]
497 impl From<$cached> for $basic {
498 fn from(cached: $cached) -> Self {
499 cached.value
500 }
501 }
502 )*
503 }
504}
505
506gen_cached_types! {
507 CachedGroup = Group;
508 CachedIdent = Ident;
509 CachedPunct = Punct;
510 CachedLiteral = Literal;
511 CachedLiteralString = LiteralString;
512 CachedLiteralInteger = LiteralInteger;
513}
514
515pub type CachedTokenTree = Cached<TokenTree>;
518
519#[cfg(feature = "proc_macro2")]
533#[macro_export]
534macro_rules! format_ident {
535 ($($args:tt)*) => {
536 <$crate::Ident as $crate::Parse>::parse(&mut format!($($args)*).into_token_iter()).expect("Not a valid identifier")
537 };
538}
539
540#[cfg(feature = "proc_macro2")]
554#[macro_export]
555macro_rules! format_cached_ident {
556 ($($args:tt)*) => {
557 $crate::CachedIdent::from_string(format!($($args)*)).expect("Not a valid identifier")
558 };
559}
560
561#[cfg(feature = "proc_macro2")]
576#[macro_export]
577macro_rules! format_literal_string {
578 ($fmt:literal $(, $($args:tt)*)?) => {
579 <$crate::LiteralString as $crate::Parse>::parse(&mut format!(concat!("\"",$fmt,"\"") $(, $($args)*)?)
580 .into_token_iter())
581 .expect("Not a valid string literal")
582 };
583}
584
585#[cfg(feature = "proc_macro2")]
600#[macro_export]
601macro_rules! format_literal{
602 ($($args:tt)*) => {
603 <$crate::Literal as $crate::Parse>::parse(&mut format!($($args)*)
604 .into_token_iter())
605 .expect("Not a valid literal")
606 };
607}
608
609#[derive(Debug, Clone, Default)]
614pub struct Nothing;
615
616impl Parser for Nothing {
617 #[inline]
618 #[mutants::skip]
619 fn parser(_tokens: &mut TokenIter) -> Result<Self> {
620 Ok(Self)
621 }
622}
623
624impl ToTokens for Nothing {
625 #[inline]
626 fn to_tokens(&self, _tokens: &mut TokenStream) {
627 }
629}
630
631#[derive(Debug, Clone)]
638pub struct Invalid;
639
640impl Parser for Invalid {
641 fn parser(tokens: &mut TokenIter) -> Result<Self> {
642 Error::unexpected_token(None, tokens)
643 }
644}
645
646impl ToTokens for Invalid {
647 #[inline]
648 fn to_tokens(&self, _tokens: &mut TokenStream) {
649 unimplemented!("`Invalid` can not be converted to tokens")
650 }
651}
652
653#[derive(Debug, Clone)]
676pub struct NonParseable;
677
678#[cfg(feature = "nonparseable")]
679impl Parser for NonParseable {
680 #[inline]
681 fn parser(_tokens: &mut TokenIter) -> Result<Self> {
682 unimplemented!("`NonParseable` can not be parsed")
683 }
684}
685
686#[cfg(feature = "nonparseable")]
687impl ToTokens for NonParseable {
688 #[mutants::skip]
689 #[inline]
690 fn to_tokens(&self, _tokens: &mut TokenStream) {
691 unimplemented!("`NonParseable` can not be converted to tokens")
692 }
693}
694
695#[derive(Clone)]
708pub struct Except<T>(PhantomData<T>);
709
710impl<T: Parse> Parser for Except<T> {
711 fn parser(tokens: &mut TokenIter) -> Result<Self> {
712 let mut ptokens = tokens.clone();
713 match T::parser(&mut ptokens) {
714 Ok(_) => Error::unexpected_token(tokens.clone().next(), tokens),
715 Err(_) => Ok(Self(PhantomData)),
716 }
717 }
718}
719
720impl<T> ToTokens for Except<T> {
721 #[inline]
722 fn to_tokens(&self, _tokens: &mut TokenStream) {
723 }
725}
726
727#[mutants::skip]
728impl<T: std::fmt::Debug> std::fmt::Debug for Except<T> {
729 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
730 f.debug_struct(&format!("Except<{}>", std::any::type_name::<T>()))
731 .finish()
732 }
733}
734
735#[derive(Clone)]
747pub struct Expect<T>(PhantomData<T>);
748
749impl<T: Parse> Parser for Expect<T> {
750 fn parser(tokens: &mut TokenIter) -> Result<Self> {
751 let mut ptokens = tokens.clone();
752 match T::parser(&mut ptokens) {
753 Ok(_) => Ok(Self(PhantomData)),
754 Err(e) => Err(e),
755 }
756 }
757}
758
759impl<T> ToTokens for Expect<T> {
760 #[inline]
761 fn to_tokens(&self, _tokens: &mut TokenStream) {
762 }
764}
765
766#[mutants::skip]
767impl<T: std::fmt::Debug> std::fmt::Debug for Expect<T> {
768 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
769 f.debug_struct(&format!("Expect<{}>", std::any::type_name::<T>()))
770 .finish()
771 }
772}
773
774#[derive(Debug, Clone)]
785pub struct EndOfStream;
786
787impl Parser for EndOfStream {
788 fn parser(tokens: &mut TokenIter) -> Result<Self> {
789 match tokens.next() {
790 None => Ok(Self),
791 at => Error::unexpected_token(at, tokens),
792 }
793 }
794}
795
796impl ToTokens for EndOfStream {
797 #[inline]
798 fn to_tokens(&self, _tokens: &mut TokenStream) {
799 }
801}
802
803#[derive(Clone)]
811pub struct HiddenState<T: Default>(pub T);
812
813impl<T: Default> Deref for HiddenState<T> {
814 type Target = T;
815
816 fn deref(&self) -> &Self::Target {
817 &self.0
818 }
819}
820
821impl<T: Default> DerefMut for HiddenState<T> {
822 fn deref_mut(&mut self) -> &mut Self::Target {
823 &mut self.0
824 }
825}
826
827impl<T: Default> Parser for HiddenState<T> {
828 #[inline]
829 #[mutants::skip]
830 fn parser(_ctokens: &mut TokenIter) -> Result<Self> {
831 Ok(Self(T::default()))
832 }
833}
834
835impl<T: Default> ToTokens for HiddenState<T> {
836 #[inline]
837 fn to_tokens(&self, _tokens: &mut TokenStream) {
838 }
840}
841
842impl<T: Default> Default for HiddenState<T> {
843 fn default() -> Self {
844 Self(Default::default())
845 }
846}
847
848#[mutants::skip]
849impl<T: Default + std::fmt::Debug> std::fmt::Debug for HiddenState<T> {
850 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
851 f.debug_tuple(&format!("HiddenState<{}>", std::any::type_name::<T>()))
852 .field(&self.0)
853 .finish()
854 }
855}