1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154
#![doc = include_str!("../README.md")]
pub use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Spacing, TokenStream, TokenTree};
/// Type alias for the iterator type we use for parsing. This Iterator is Clone and produces
/// `&TokenTree`.
pub type TokenIter = <TokenStream as IntoIterator>::IntoIter;
/// The `Parser` trait that must be implemented by anything we want to parse. We are parsing
/// over a `proc_macro2::TokenStream` iterator.
pub trait Parser
where
Self: Sized,
{
/// The actual parsing function that must be implemented. This mutates the `tokens`
/// iterator directly without a transaction. This should not be called from user code
/// except for implementing parsers itself and then only when the rules below are
/// followed.
///
/// # Implementing Parsers
///
/// The parsers for `TokenStream`, `TokenTree`, `Group`, `Ident`, `Punct`, `Literal`,
/// `Except` and `Nothing` are the fundamental parsers. Any other parser is composed from
/// those. This composition is done by calling other `parse()` (or `parser()`)
/// implementations until eventually one of the above fundamental parsers is called.
///
/// Calling another `parser()` from a `parser()` implementation is only valid when this
/// is a conjunctive operation and a failure is returned immediately by the `?`
/// operator. Failing to do so will leave the iterator in a consumed state which breaks
/// further parsing. When in doubt use `parse()` which is never wrong.
///
/// # Errors
///
/// The `parser()` implementation must return an error if it cannot parse the input. This
/// error must be a `unsynn::Error`. User code will call `parser()` in a transaction
/// trough `Parser::parse` which will call the parser in a transaction and roll back on
/// error.
fn parser(tokens: &mut TokenIter) -> Result<Self>;
}
/// This trait provides the user facing API to parse grammatical entities. It is implemented
/// for anything that implements the `Parser` trait. The methods here putting the iterator
/// that is used for parsing into a transaction. This iterator is always `Copy`. Instead using
/// a peekable iterator or implementing deeper peeking parsers clone this iterator when
/// necessary, operate on that clone and commit changes back to the original iterator when
/// successful. This trait cannot be implemented by user code. It is bound to `ToTokens` as
/// well to ensure that everything that can be parsed can be generated as well.
pub trait Parse
where
Self: Parser + ToTokens,
{
/// This is the user facing API to parse grammatical entities. Calls a `parser()` within a
/// transaction. Commits changes on success and returns the parsed value.
///
/// # Errors
///
/// When the parser returns an error the transaction is rolled back and the error is
/// returned.
fn parse(tokens: &mut TokenIter) -> Result<Self> {
let mut ptokens = tokens.clone();
let result = Self::parser(&mut ptokens)?;
*tokens = ptokens;
Ok(result)
}
/// Exhaustive parsing within a transaction. This is a convenience method that implies a
/// `EndOfStream` at the end. Thus it will error if parsing is not exhaustive.
///
/// # Errors
///
/// When the parser returns an error or there are tokens left in the stream the
/// transaction is rolled back and a error is returned.
fn parse_all(tokens: &mut TokenIter) -> Result<Self> {
let mut ptokens = tokens.clone();
let result = Cons::<Self, EndOfStream>::parser(&mut ptokens)?;
*tokens = ptokens;
Ok(result.0)
}
/// Parse a value in a transaction, pass it to a closure which may modify it or return an Error.
/// When the closure returns an `Ok(Self)` value it is returned.
///
/// # Errors
///
/// When the parser or the closure returns an error, the transaction is rolled back and
/// the errors is returned.
fn parse_with(tokens: &mut TokenIter, f: impl FnOnce(Self) -> Result<Self>) -> Result<Self> {
let mut ptokens = tokens.clone();
let result = f(Self::parser(&mut ptokens)?)?;
*tokens = ptokens;
Ok(result)
}
}
impl<T: Parser + ToTokens> Parse for T {}
/// We need our own `ToTokens` to be able to implement it for std container types
pub trait ToTokens {
/// Write `self` to the given `TokenStream`.
fn to_tokens(&self, tokens: &mut TokenStream);
/// Convert `self` directly into a `TokenStream` object.
fn to_token_stream(&self) -> TokenStream {
let mut tokens = TokenStream::new();
self.to_tokens(&mut tokens);
tokens
}
}
// Result and error type
mod error;
pub use error::*;
// various declarative macros
mod macros;
// Parsers for the `proc_macro2` entities and other fundamental types
pub mod fundamental;
#[doc(inline)]
pub use fundamental::*;
// Groups by explicit bracket types
pub mod group;
#[doc(inline)]
pub use group::*;
// Punctuation, delimiters and operators
pub mod punct;
#[doc(inline)]
pub use punct::*;
// Literals
pub mod literal;
#[doc(inline)]
pub use literal::*;
// Delimited sequences
pub mod delimited;
#[doc(inline)]
pub use delimited::*;
// containers and smart pointers
pub mod container;
#[doc(inline)]
pub use container::*;
// combinators
pub mod combinator;
#[doc(inline)]
pub use combinator::*;
mod private {
pub trait Sealed {}
}