unsynn/
fundamental.rs

1//! This module contains the fundamental parsers. These are the basic tokens from
2//! [`proc_macro2`](https://docs.rs/proc-macro2/latest/proc_macro2/)/[`proc_macro`](https://doc.rust-lang.org/proc_macro/index.html)
3//! and a few other ones defined by unsynn. These are the terminal entities when parsing tokens.
4//! Being able to parse [`TokenTree`] and [`TokenStream`] allows one to parse opaque entities where
5//! internal details are left out. The [`Cached`] type is used to cache the string representation
6//! of the parsed entity. The [`Nothing`] type is used to match without consuming any tokens.
7//! The [`Except`] type is used to match when the next token does not match the given type.
8//! The [`EndOfStream`] type is used to match the end of the stream when no tokens are left.
9//! The [`HiddenState`] type is used to hold additional information that is not part of the parsed syntax.
10//!
11//! **Note**: When the `proc_macro2` feature is disabled, format macros (`format_ident!`,
12//! `format_literal!`) are unavailable, but `Cached<T>` remains fully functional using
13//! `.to_string()` from `proc_macro` types.
14
15#[cfg(feature = "proc_macro2")]
16pub use proc_macro2::{Group, Ident, Literal, Punct, TokenStream, TokenTree};
17
18#[cfg(not(feature = "proc_macro2"))]
19pub use proc_macro::{Group, Ident, Literal, Punct, TokenStream, TokenTree};
20
21#[allow(clippy::wildcard_imports)]
22use crate::*;
23
24use std::marker::PhantomData;
25use std::ops::{Deref, DerefMut};
26
27/// Helper function to count only the tokens INSIDE groups (not the groups themselves).
28/// This is used to adjust the shadow counter after `extend()` which only counts outer-level tokens.
29fn count_nested_tokens(stream: &TokenStream) -> usize {
30    stream
31        .clone()
32        .into_iter()
33        .map(|tt| match tt {
34            // For a group, count all tokens inside it recursively
35            TokenTree::Group(g) => count_tokens_recursive(g.stream()),
36            _ => 0, // Non-group tokens are already counted by extend()
37        })
38        .sum()
39}
40
41/// Helper function to recursively count all tokens in a `TokenStream`, including nested groups.
42/// A Group token counts as 1, plus all tokens inside it (recursively).
43pub(crate) fn count_tokens_recursive(stream: TokenStream) -> usize {
44    stream
45        .into_iter()
46        .map(|tt| match tt {
47            TokenTree::Group(g) => 1 + count_tokens_recursive(g.stream()),
48            _ => 1,
49        })
50        .sum()
51}
52
53#[cfg(test)]
54mod tests {
55    use super::*;
56
57    #[test]
58    fn test_count_tokens_recursive_basic() {
59        let stream: TokenStream = "a b c".parse().unwrap();
60        assert_eq!(count_tokens_recursive(stream), 3);
61    }
62
63    #[test]
64    fn test_count_tokens_recursive_with_group() {
65        let stream: TokenStream = "a { b c } d".parse().unwrap();
66        // a(1) + Group(1) + b(1) + c(1) + d(1) = 5
67        assert_eq!(count_tokens_recursive(stream), 5);
68    }
69
70    #[test]
71    fn test_count_tokens_recursive_nested_groups() {
72        // "a { b { c } d } e"
73        // Outer: a, Group, e = 3
74        // First group: b, Group, d = 3
75        // Second group: c = 1
76        // Total: 3 + 3 + 1 = 7
77        let stream: TokenStream = "a { b { c } d } e".parse().unwrap();
78        assert_eq!(count_tokens_recursive(stream), 7);
79    }
80
81    #[test]
82    fn test_count_tokens_recursive_empty() {
83        let stream: TokenStream = "".parse().unwrap();
84        assert_eq!(count_tokens_recursive(stream), 0);
85    }
86
87    #[test]
88    fn test_count_tokens_recursive_empty_group() {
89        // "a { } b"
90        // Outer: a, Group, b = 3
91        // Inside group: 0
92        // Total: 3
93        let stream: TokenStream = "a { } b".parse().unwrap();
94        assert_eq!(count_tokens_recursive(stream), 3);
95    }
96
97    #[test]
98    fn test_count_tokens_recursive_multiple_groups() {
99        // "{ a } { b } { c }"
100        // Outer: Group, Group, Group = 3
101        // Groups: a, b, c = 3
102        // Total: 6
103        let stream: TokenStream = "{ a } { b } { c }".parse().unwrap();
104        assert_eq!(count_tokens_recursive(stream), 6);
105    }
106
107    #[test]
108    fn test_count_nested_tokens() {
109        let stream: TokenStream = "a { b c } d".parse().unwrap();
110        // Only count tokens INSIDE groups: b(1) + c(1) = 2
111        assert_eq!(count_nested_tokens(&stream), 2);
112    }
113
114    #[test]
115    fn test_count_nested_tokens_nested() {
116        let stream: TokenStream = "a { b { c } d } e".parse().unwrap();
117        // Inside first group: b(1) + Group(1) + c(1) + d(1) = 4
118        assert_eq!(count_nested_tokens(&stream), 4);
119    }
120
121    #[test]
122    fn test_count_nested_tokens_empty() {
123        let stream: TokenStream = "a b c".parse().unwrap();
124        // No groups, so no nested tokens
125        assert_eq!(count_nested_tokens(&stream), 0);
126    }
127
128    #[test]
129    fn test_count_nested_tokens_empty_group() {
130        let stream: TokenStream = "a { } b".parse().unwrap();
131        // Empty group contains 0 tokens
132        assert_eq!(count_nested_tokens(&stream), 0);
133    }
134
135    #[test]
136    fn test_count_nested_tokens_multiple_groups() {
137        // "{ a } { b } { c }"
138        // Inside groups: a, b, c = 3
139        let stream: TokenStream = "{ a } { b } { c }".parse().unwrap();
140        assert_eq!(count_nested_tokens(&stream), 3);
141    }
142}
143
144/// Parses a [`TokenStream`] from the input tokens. This is the primary entity to parse when
145/// dealing with opaque entities where internal details are left out.
146/// Note that this matches a empty stream (see [`EndOfStream`]) as well.
147impl Parser for TokenStream {
148    fn parser(tokens: &mut TokenIter) -> Result<Self> {
149        let mut output = TokenStream::new();
150        output.extend(&mut *tokens);
151
152        // Count tokens INSIDE groups and adjust the shadow counter
153        // `extend()` already counted outer-level tokens (a, Group, b)
154        // but didn't count tokens inside the Groups
155        let nested_count = count_nested_tokens(&output);
156        tokens.add(nested_count);
157
158        Ok(output)
159    }
160}
161
162impl ToTokens for TokenStream {
163    fn to_tokens(&self, tokens: &mut TokenStream) {
164        tokens.extend(self.clone());
165    }
166}
167
168/// Since parsing a [`TokenStream`] succeeds even when no tokens are left, this type is used to
169/// parse a [`TokenStream`] that is not empty.
170pub struct NonEmptyTokenStream(pub TokenStream);
171
172impl TryFrom<TokenStream> for NonEmptyTokenStream {
173    type Error = Error;
174
175    fn try_from(value: TokenStream) -> Result<Self> {
176        if value.is_empty() {
177            Error::unexpected_end()
178        } else {
179            Ok(Self(value))
180        }
181    }
182}
183
184impl Parser for NonEmptyTokenStream {
185    fn parser(tokens: &mut TokenIter) -> Result<Self> {
186        tokens.parse::<Expect<TokenTree>>().refine_err::<Self>()?;
187        // A TokenStream will always match, so we can safely unwrap here.
188        #[allow(clippy::unwrap_used)]
189        Ok(Self(TokenStream::parser(tokens).unwrap()))
190    }
191}
192
193impl ToTokens for NonEmptyTokenStream {
194    fn to_tokens(&self, tokens: &mut TokenStream) {
195        tokens.extend(self.0.clone());
196    }
197}
198
199#[test]
200#[cfg(feature = "proc_macro2")]
201fn test_non_empty_token_stream() {
202    let mut token_iter = "ident".to_token_iter();
203    let _ = NonEmptyTokenStream::parser(&mut token_iter).unwrap();
204}
205
206#[test]
207#[cfg(feature = "proc_macro2")]
208fn test_empty_token_stream() {
209    let mut token_iter = "".to_token_iter();
210    assert!(NonEmptyTokenStream::parser(&mut token_iter).is_err());
211}
212
213impl Parser for TokenTree {
214    fn parser(tokens: &mut TokenIter) -> Result<Self> {
215        match tokens.next() {
216            Some(token) => Ok(token),
217            None => Error::unexpected_end(),
218        }
219    }
220}
221
222impl ToTokens for TokenTree {
223    #[inline]
224    fn to_tokens(&self, tokens: &mut TokenStream) {
225        tokens.extend(std::iter::once(self.clone()));
226    }
227}
228
229impl Parser for Group {
230    fn parser(tokens: &mut TokenIter) -> Result<Self> {
231        match tokens.next() {
232            Some(TokenTree::Group(group)) => {
233                // Count tokens inside the group and advance the token counter
234                let nested_count = count_tokens_recursive(group.stream());
235                tokens.add(nested_count);
236                Ok(group)
237            }
238            at => Error::unexpected_token(at, tokens),
239        }
240    }
241}
242
243impl ToTokens for Group {
244    #[inline]
245    fn to_tokens(&self, tokens: &mut TokenStream) {
246        tokens.extend(std::iter::once(TokenTree::Group(self.clone())));
247    }
248}
249
250impl Parser for Ident {
251    fn parser(tokens: &mut TokenIter) -> Result<Self> {
252        match tokens.next() {
253            Some(TokenTree::Ident(ident)) => Ok(ident),
254            at => Error::unexpected_token(at, tokens),
255        }
256    }
257}
258
259impl ToTokens for Ident {
260    #[inline]
261    fn to_tokens(&self, tokens: &mut TokenStream) {
262        tokens.extend(std::iter::once(TokenTree::Ident(self.clone())));
263    }
264}
265
266impl Parser for Punct {
267    fn parser(tokens: &mut TokenIter) -> Result<Self> {
268        match tokens.next() {
269            Some(TokenTree::Punct(punct)) => Ok(punct),
270            at => Error::unexpected_token(at, tokens),
271        }
272    }
273}
274
275impl ToTokens for Punct {
276    #[inline]
277    fn to_tokens(&self, tokens: &mut TokenStream) {
278        tokens.extend(std::iter::once(TokenTree::Punct(self.clone())));
279    }
280}
281
282impl Parser for Literal {
283    fn parser(tokens: &mut TokenIter) -> Result<Self> {
284        match tokens.next() {
285            Some(TokenTree::Literal(literal)) => Ok(literal),
286            at => Error::unexpected_token(at, tokens),
287        }
288    }
289}
290
291impl ToTokens for Literal {
292    #[inline]
293    fn to_tokens(&self, tokens: &mut TokenStream) {
294        tokens.extend(std::iter::once(TokenTree::Literal(self.clone())));
295    }
296}
297
298/// Getting the underlying string expensive as it always allocates a new [`String`].
299/// This type caches the string representation of a given entity. Note that this is
300/// only reliable for fundamental entities that represent a single token. Spacing between
301/// composed tokens is not stable and should be considered informal only.
302///
303/// # Example
304///
305/// ```
306/// use unsynn::*;
307/// let mut token_iter = "ident 1234".to_token_iter();
308///
309/// let cached_ident = Cached::<Ident>::parse(&mut token_iter).unwrap();
310/// assert!(cached_ident == "ident");
311/// ```
312#[derive(Clone)]
313pub struct Cached<T> {
314    value: T,
315    string: String,
316}
317
318impl<T: Parse + ToTokens> Parser for Cached<T> {
319    fn parser(tokens: &mut TokenIter) -> Result<Self> {
320        let value = T::parser(tokens).refine_err::<Self>()?;
321        let string = value.tokens_to_string();
322        Ok(Self { value, string })
323    }
324}
325
326impl<T: Parse + ToTokens> ToTokens for Cached<T> {
327    #[inline]
328    fn to_tokens(&self, tokens: &mut TokenStream) {
329        self.value.to_tokens(tokens);
330    }
331}
332
333impl<T: Parse + ToTokens> Cached<T> {
334    /// Sets the value and updates the string representation.
335    pub fn set(&mut self, value: T) {
336        self.value = value;
337        self.string = self.value.tokens_to_string();
338    }
339}
340
341impl<T: Parse> Cached<T> {
342    /// Deconstructs self and returns the inner value.
343    pub fn into_inner(self) -> T {
344        self.value
345    }
346
347    /// Deconstructs self and returns the contained `String` representation.
348    pub fn into_string(self) -> String {
349        self.string
350    }
351
352    /// Gets the cached string representation
353    #[allow(clippy::missing_const_for_fn)] // bug in clippy
354    pub fn as_str(&self) -> &str {
355        &self.string
356    }
357}
358
359#[cfg(feature = "proc_macro2")]
360impl<T: Parse> Cached<T> {
361    /// Creates a new `Cached<T>` from a `&str`.
362    ///
363    /// # Panics
364    ///
365    /// Panics when `s` can't be parsed.
366    ///
367    /// # Example
368    ///
369    /// ```
370    /// use unsynn::*;
371    /// let cached_ident = Cached::<Ident>::new("ident");
372    /// assert!(cached_ident == "ident");
373    /// ```
374    #[must_use]
375    pub fn new(s: &str) -> Self {
376        let value = s.into_token_iter().parse().expect("Valid token");
377        Self {
378            value,
379            string: s.to_string(),
380        }
381    }
382
383    /// Creates a new `Cached<T>` from a owned `String`.
384    ///
385    /// # Errors
386    ///
387    /// Returns `Err` when `s` can't be parsed.
388    ///
389    /// # Example
390    ///
391    /// ```
392    /// use unsynn::*;
393    /// let cached_ident = Cached::<Ident>::from_string("ident".into()).unwrap();
394    /// assert!(cached_ident == "ident");
395    /// ```
396    pub fn from_string(s: String) -> Result<Self> {
397        let value = s.to_token_iter().parse()?;
398        Ok(Self { value, string: s })
399    }
400}
401
402impl<T: Parse> Deref for Cached<T> {
403    type Target = T;
404
405    fn deref(&self) -> &Self::Target {
406        &self.value
407    }
408}
409
410impl<T: Parse> PartialEq<&str> for Cached<T> {
411    fn eq(&self, other: &&str) -> bool {
412        self.as_str() == *other
413    }
414}
415
416impl<T: Parse> PartialEq for Cached<T> {
417    fn eq(&self, other: &Self) -> bool {
418        self.as_str() == other.as_str()
419    }
420}
421
422impl<T: Parse> Eq for Cached<T> {}
423
424impl<T: Parse> std::hash::Hash for Cached<T> {
425    fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
426        self.as_str().hash(state);
427    }
428}
429
430impl<T: Parse> AsRef<T> for Cached<T> {
431    fn as_ref(&self) -> &T {
432        &self.value
433    }
434}
435
436impl<T: Parse> AsRef<str> for Cached<T> {
437    fn as_ref(&self) -> &str {
438        self.as_str()
439    }
440}
441
442#[mutants::skip]
443impl<T: Parse + std::fmt::Debug> std::fmt::Debug for Cached<T> {
444    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
445        f.debug_struct(&format!("Cached<{}>", std::any::type_name::<T>()))
446            .field("value", &self.value)
447            .field("string", &self.string)
448            .finish()
449    }
450}
451
452/// Convert a `Cached<T: Into<TokenTree>>` object into a `TokenTree`.
453impl<T: Into<TokenTree>> From<Cached<T>> for TokenTree {
454    fn from(cached: Cached<T>) -> Self {
455        cached.value.into()
456    }
457}
458
459#[cfg(feature = "proc_macro2")]
460impl<T: Parse> TryFrom<String> for Cached<T> {
461    type Error = Error;
462
463    fn try_from(value: String) -> Result<Self> {
464        let mut token_iter = value.to_token_iter();
465        let t = T::parser(&mut token_iter).refine_err::<Self>()?;
466        Ok(Self {
467            value: t,
468            string: value,
469        })
470    }
471}
472
473#[cfg(feature = "proc_macro2")]
474impl<T: Parse> TryFrom<&str> for Cached<T> {
475    type Error = Error;
476
477    fn try_from(value: &str) -> Result<Self> {
478        Self::try_from(value.to_string())
479    }
480}
481
482#[test]
483#[cfg(feature = "proc_macro2")]
484fn test_cached_into_tt() {
485    let mut token_iter = "ident".to_token_iter();
486    let ident = Cached::<Ident>::parser(&mut token_iter).unwrap();
487    let _: TokenTree = ident.into();
488}
489
490macro_rules! gen_cached_types {
491    ($($cached:ident = $basic:ident);* $(;)?) => {
492        $(
493        #[doc = concat!("[`", stringify!($basic), "`] with cached string representation.")]
494        pub type $cached = Cached<$basic>;
495
496        #[doc = concat!("Convert `", stringify!($cached), " into a `", stringify!($basic), "`.")]
497        impl From<$cached> for $basic {
498            fn from(cached: $cached) -> Self {
499                cached.value
500            }
501        }
502        )*
503    }
504}
505
506gen_cached_types! {
507    CachedGroup = Group;
508    CachedIdent = Ident;
509    CachedPunct = Punct;
510    CachedLiteral = Literal;
511    CachedLiteralString = LiteralString;
512    CachedLiteralInteger = LiteralInteger;
513}
514
515// cant use the macro, TokenTree conversion is generic over T defined above
516/// [`TokenTree`] (any token) with cached string representation.
517pub type CachedTokenTree = Cached<TokenTree>;
518
519/// Generates a `Ident` from a format specification.
520///
521/// # Panics
522///
523/// Panics when the formatted string is not a valid identifier.
524///
525/// # Example
526///
527/// ```
528/// use unsynn::*;
529/// let ident = format_ident!("my_{}", "identifier");
530/// assert_tokens_eq!(ident, "my_identifier");
531/// ```
532#[cfg(feature = "proc_macro2")]
533#[macro_export]
534macro_rules! format_ident {
535    ($($args:tt)*) => {
536        <$crate::Ident as $crate::Parse>::parse(&mut format!($($args)*).into_token_iter()).expect("Not a valid identifier")
537    };
538}
539
540/// Generates a `CachedIdent` from a format specification.
541///
542/// # Panics
543///
544/// Panics when the formatted string is not a valid identifier.
545///
546/// # Example
547///
548/// ```
549/// use unsynn::*;
550/// let cached_ident = format_cached_ident!("my_{}", "identifier");
551/// assert_tokens_eq!(cached_ident, "my_identifier");
552/// ```
553#[cfg(feature = "proc_macro2")]
554#[macro_export]
555macro_rules! format_cached_ident {
556    ($($args:tt)*) => {
557        $crate::CachedIdent::from_string(format!($($args)*)).expect("Not a valid identifier")
558    };
559}
560
561/// Generates a `LiteralString` from a format specification. Quote characters around the
562/// string are automatically added.
563///
564/// # Panics
565///
566/// Panics when the formatted string is not a valid literal string.
567///
568/// # Example
569///
570/// ```
571/// use unsynn::*;
572/// let literal_string = format_literal_string!("my_{}", "literal_string");
573/// assert_tokens_eq!(literal_string, r#" "my_literal_string" "#);
574/// ```
575#[cfg(feature = "proc_macro2")]
576#[macro_export]
577macro_rules! format_literal_string {
578    ($fmt:literal $(, $($args:tt)*)?) => {
579        <$crate::LiteralString as $crate::Parse>::parse(&mut format!(concat!("\"",$fmt,"\"") $(, $($args)*)?)
580            .into_token_iter())
581        .expect("Not a valid string literal")
582    };
583}
584
585/// Generates a `Literal` from a format specification. Unlike [`format_literal_string!`], this does not
586/// add quotes and can be used to create any kind of literal, such as integers or floats.
587///
588/// # Panics
589///
590/// Panics when the formatted string is not a valid literal.
591///
592/// # Example
593///
594/// ```
595/// use unsynn::*;
596/// let literal = format_literal!("123{}", ".456");
597/// assert_tokens_eq!(literal, str "123.456");
598/// ```
599#[cfg(feature = "proc_macro2")]
600#[macro_export]
601macro_rules! format_literal{
602    ($($args:tt)*) => {
603        <$crate::Literal as $crate::Parse>::parse(&mut format!($($args)*)
604            .into_token_iter())
605        .expect("Not a valid literal")
606    };
607}
608
609/// A unit that always matches without consuming any tokens.  This is required when one wants
610/// to parse a [`Repeats`] without a delimiter.  Note that using [`Nothing`] as primary entity
611/// in a [`Vec`], [`LazyVec`], [`DelimitedVec`] or [`Repeats`] will result in an infinite
612/// loop.
613#[derive(Debug, Clone, Default)]
614pub struct Nothing;
615
616impl Parser for Nothing {
617    #[inline]
618    #[mutants::skip]
619    fn parser(_tokens: &mut TokenIter) -> Result<Self> {
620        Ok(Self)
621    }
622}
623
624impl ToTokens for Nothing {
625    #[inline]
626    fn to_tokens(&self, _tokens: &mut TokenStream) {
627        /*NOP*/
628    }
629}
630
631/// A unit that always fails to match. This is useful as default for generics.
632/// See how [`Either<A, B, C, D>`] uses this for unused alternatives.
633///
634/// # Panics
635///
636/// `Invalid` tokens can not be emitted and will panic when calling [`ToTokens::to_tokens()`].
637#[derive(Debug, Clone)]
638pub struct Invalid;
639
640impl Parser for Invalid {
641    fn parser(tokens: &mut TokenIter) -> Result<Self> {
642        Error::unexpected_token(None, tokens)
643    }
644}
645
646impl ToTokens for Invalid {
647    #[inline]
648    fn to_tokens(&self, _tokens: &mut TokenStream) {
649        unimplemented!("`Invalid` can not be converted to tokens")
650    }
651}
652
653/// A unit that can not be parsed. This is useful as diagnostic placeholder for parsers that
654/// are (yet) unimplemented. The `nonparseable` feature flag controls if `Parser` and `ToTokens`
655/// will be implemented for it. This is useful in release builds that should not have any
656/// `NonParseable` left behind.
657///
658///
659/// # Panics
660///
661/// Only when the `nonparseable` feature flag is set:
662///
663/// * `NonParseable` will panic when calling [`Parser::parser()`].
664/// * `NonParseable` tokens can not be emitted and will panic when calling [`ToTokens::to_tokens()`].
665///
666/// Otherwise `Parser` and `ToTokens` are not implemented and will result in a compile time error.
667///
668/// # Example
669///
670/// ```should_panic
671/// # use unsynn::*;
672/// let mut tokens = "something".to_token_iter();
673/// let nonparseable: NonParseable = tokens.parse().unwrap();
674/// ```
675#[derive(Debug, Clone)]
676pub struct NonParseable;
677
678#[cfg(feature = "nonparseable")]
679impl Parser for NonParseable {
680    #[inline]
681    fn parser(_tokens: &mut TokenIter) -> Result<Self> {
682        unimplemented!("`NonParseable` can not be parsed")
683    }
684}
685
686#[cfg(feature = "nonparseable")]
687impl ToTokens for NonParseable {
688    #[mutants::skip]
689    #[inline]
690    fn to_tokens(&self, _tokens: &mut TokenStream) {
691        unimplemented!("`NonParseable` can not be converted to tokens")
692    }
693}
694
695/// Succeeds when the next token does not match `T`. **Will not consume any tokens.** Usually
696/// this has to be followed with a conjunctive match such as `Cons<Except<T>, U>` or followed
697/// by another entry in a struct or tuple.
698///
699/// # Example
700///
701/// ```
702/// # use unsynn::*;
703/// let mut token_iter = "ident".to_token_iter();
704///
705/// let _ = Except::<Punct>::parser(&mut token_iter).unwrap();
706/// ```
707#[derive(Clone)]
708pub struct Except<T>(PhantomData<T>);
709
710impl<T: Parse> Parser for Except<T> {
711    fn parser(tokens: &mut TokenIter) -> Result<Self> {
712        let mut ptokens = tokens.clone();
713        match T::parser(&mut ptokens) {
714            Ok(_) => Error::unexpected_token(tokens.clone().next(), tokens),
715            Err(_) => Ok(Self(PhantomData)),
716        }
717    }
718}
719
720impl<T> ToTokens for Except<T> {
721    #[inline]
722    fn to_tokens(&self, _tokens: &mut TokenStream) {
723        /*NOP*/
724    }
725}
726
727#[mutants::skip]
728impl<T: std::fmt::Debug> std::fmt::Debug for Except<T> {
729    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
730        f.debug_struct(&format!("Except<{}>", std::any::type_name::<T>()))
731            .finish()
732    }
733}
734
735/// Succeeds when the next token would match `T`. **Will not consume any tokens.**
736/// This is similar to peeking.
737///
738/// # Example
739///
740/// ```
741/// # use unsynn::*;
742/// let mut token_iter = "ident".to_token_iter();
743///
744/// let _ = Expect::<Ident>::parser(&mut token_iter).unwrap();
745/// ```
746#[derive(Clone)]
747pub struct Expect<T>(PhantomData<T>);
748
749impl<T: Parse> Parser for Expect<T> {
750    fn parser(tokens: &mut TokenIter) -> Result<Self> {
751        let mut ptokens = tokens.clone();
752        match T::parser(&mut ptokens) {
753            Ok(_) => Ok(Self(PhantomData)),
754            Err(e) => Err(e),
755        }
756    }
757}
758
759impl<T> ToTokens for Expect<T> {
760    #[inline]
761    fn to_tokens(&self, _tokens: &mut TokenStream) {
762        /*NOP*/
763    }
764}
765
766#[mutants::skip]
767impl<T: std::fmt::Debug> std::fmt::Debug for Expect<T> {
768    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
769        f.debug_struct(&format!("Expect<{}>", std::any::type_name::<T>()))
770            .finish()
771    }
772}
773
774/// Matches the end of the stream when no tokens are left.
775///
776/// # Example
777///
778/// ```
779/// # use unsynn::*;
780/// let mut token_iter = "".to_token_iter();
781///
782/// let _end_ = EndOfStream::parser(&mut token_iter).unwrap();
783/// ```
784#[derive(Debug, Clone)]
785pub struct EndOfStream;
786
787impl Parser for EndOfStream {
788    fn parser(tokens: &mut TokenIter) -> Result<Self> {
789        match tokens.next() {
790            None => Ok(Self),
791            at => Error::unexpected_token(at, tokens),
792        }
793    }
794}
795
796impl ToTokens for EndOfStream {
797    #[inline]
798    fn to_tokens(&self, _tokens: &mut TokenStream) {
799        /*NOP*/
800    }
801}
802
803/// Sometimes one want to compose types or create structures for unsynn that have members that
804/// are not part of the parsed syntax but add some additional information. This struct can be
805/// used to hold such members while still using the [`Parser`] and [`ToTokens`] trait
806/// implementations automatically generated by the [`unsynn!{}`] macro or composition syntax.
807/// [`HiddenState`] will not consume any tokens when parsing and will not emit any tokens when
808/// generating a [`TokenStream`]. On parsing it is initialized with a default value. It has
809/// [`Deref`] and [`DerefMut`] implemented to access the inner value.
810#[derive(Clone)]
811pub struct HiddenState<T: Default>(pub T);
812
813impl<T: Default> Deref for HiddenState<T> {
814    type Target = T;
815
816    fn deref(&self) -> &Self::Target {
817        &self.0
818    }
819}
820
821impl<T: Default> DerefMut for HiddenState<T> {
822    fn deref_mut(&mut self) -> &mut Self::Target {
823        &mut self.0
824    }
825}
826
827impl<T: Default> Parser for HiddenState<T> {
828    #[inline]
829    #[mutants::skip]
830    fn parser(_ctokens: &mut TokenIter) -> Result<Self> {
831        Ok(Self(T::default()))
832    }
833}
834
835impl<T: Default> ToTokens for HiddenState<T> {
836    #[inline]
837    fn to_tokens(&self, _tokens: &mut TokenStream) {
838        /*NOP*/
839    }
840}
841
842impl<T: Default> Default for HiddenState<T> {
843    fn default() -> Self {
844        Self(Default::default())
845    }
846}
847
848#[mutants::skip]
849impl<T: Default + std::fmt::Debug> std::fmt::Debug for HiddenState<T> {
850    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
851        f.debug_tuple(&format!("HiddenState<{}>", std::any::type_name::<T>()))
852            .field(&self.0)
853            .finish()
854    }
855}