#![doc = include_str!("../README.md")]
#![warn(clippy::pedantic, missing_docs)]
use std::{
self,
any::Any,
collections::{VecDeque, vec_deque},
fmt::Debug,
iter::{self},
marker::PhantomData,
mem,
ops::{Deref, DerefMut},
panic::{AssertUnwindSafe, UnwindSafe, catch_unwind},
vec,
};
use error_priorities::{UNCONSUMED_AFTER_REPEATS, UNCONSUMED_INPUT};
use proc_macro2::{Literal, Span, TokenStream, TokenTree};
mod proc_macro2_impls;
#[deprecated = "The `rust_grammar` module has been spun out into the separate crates `loess-rust` and `loess-rust-opaque`."]
#[cfg(any(doc, feature = "rust_grammar"))]
pub mod rust_grammar;
mod macros;
pub use macros::__;
#[derive(Debug, Clone)]
pub struct Error {
priority: ErrorPriority,
message: String,
spans: Vec<Span>,
}
impl Error {
#[allow(missing_docs)]
pub fn new(
priority: ErrorPriority,
message: impl Into<String>,
spans: impl IntoIterator<Item = Span>,
) -> Self {
Self {
priority,
message: message.into(),
spans: spans.into_iter().collect(),
}
}
pub fn message(&self) -> &str {
&self.message
}
pub fn span(&self) -> Option<Span> {
self.spans
.iter()
.cloned()
.map(|a| Some(a))
.reduce(|a, b| a.zip(b).map(|(a, b)| a.join(b)).flatten())
.flatten()
}
}
impl IntoTokens for Error {
fn into_tokens(self, root: &TokenStream, tokens: &mut impl Extend<TokenTree>) {
let message = Literal::string(&self.message);
let span = self
.spans
.iter()
.copied()
.map(Some)
.reduce(|a, b| a.as_ref().zip(b).map(|(a, b)| a.join(b)).flatten())
.flatten()
.or_else(|| self.spans.first().copied())
.unwrap_or_else(Span::mixed_site);
#[allow(unused_variables, unused_mut)] {
quote_into_with_exact_span! (span, root, tokens, {
{#error { {#(message)} }};
});
}
}
}
#[derive(Debug, Clone)]
pub struct Errors {
errors: Vec<Error>,
}
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd)]
pub struct ErrorPriority(f64);
pub trait ConstErrorPriority {
#[allow(missing_docs)]
const PRIORITY: ErrorPriority;
}
impl ErrorPriority {
const fn new(value: f64) -> Self {
assert!(!value.is_nan());
Self(value)
}
pub const fn next_higher(&self) -> Self {
Self(self.0.next_up())
}
pub const fn next_lower(&self) -> Self {
Self(self.0.next_down())
}
#[allow(missing_docs)]
pub const PANIC: Self = Self::new(0.);
#[allow(missing_docs)]
pub const TOKEN: Self = Self::new(0.);
#[allow(missing_docs)]
pub const GRAMMAR: Self = Self::new(0.);
#[allow(missing_docs)]
pub const UNCONSUMED_AFTER_REPEATS: Self = Self::new(-1.);
#[allow(missing_docs)]
pub const UNCONSUMED_IN_DELIMITER: Self = Self::new(-2.);
#[allow(missing_docs)]
pub const UNCONSUMED_INPUT: Self = Self::new(-3.);
}
pub mod error_priorities {
#![allow(non_camel_case_types)]
use crate::{ConstErrorPriority, ErrorPriority};
#[derive(Clone)]
pub enum PANIC {}
impl ConstErrorPriority for PANIC {
const PRIORITY: ErrorPriority = ErrorPriority::TOKEN;
}
#[derive(Clone)]
pub enum TOKEN {}
impl ConstErrorPriority for TOKEN {
const PRIORITY: ErrorPriority = ErrorPriority::TOKEN;
}
#[derive(Clone)]
pub enum GRAMMAR {}
impl ConstErrorPriority for GRAMMAR {
const PRIORITY: ErrorPriority = ErrorPriority::GRAMMAR;
}
#[derive(Clone)]
pub enum UNCONSUMED_AFTER_REPEATS {}
impl ConstErrorPriority for UNCONSUMED_AFTER_REPEATS {
const PRIORITY: ErrorPriority = ErrorPriority::UNCONSUMED_AFTER_REPEATS;
}
#[derive(Clone)]
pub enum UNCONSUMED_IN_DELIMITER {}
impl ConstErrorPriority for UNCONSUMED_IN_DELIMITER {
const PRIORITY: ErrorPriority = ErrorPriority::UNCONSUMED_IN_DELIMITER;
}
#[derive(Clone)]
pub enum UNCONSUMED_INPUT {}
impl ConstErrorPriority for UNCONSUMED_INPUT {
const PRIORITY: ErrorPriority = ErrorPriority::UNCONSUMED_INPUT;
}
}
impl Eq for ErrorPriority {}
impl Ord for ErrorPriority {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.partial_cmp(other).expect("total")
}
}
impl Errors {
#[allow(missing_docs)]
pub fn new() -> Self {
Self { errors: vec![] }
}
#[allow(missing_docs)]
pub fn push(&mut self, error: Error) {
self.errors.push(error)
}
pub fn into_of_highest_priority(self) -> impl Iterator<Item = Error> {
let highest_priority = self.errors.iter().map(|error| error.priority).max();
self.errors
.into_iter()
.filter(move |e| e.priority == highest_priority.unwrap())
}
}
pub trait IntoTokens {
fn into_tokens(self, root: &TokenStream, tokens: &mut impl Extend<TokenTree>);
fn collect_tokens<T: Default + Extend<TokenTree>>(self, root: &TokenStream) -> T
where
Self: Sized,
{
let mut tokens = T::default();
self.into_tokens(root, &mut tokens);
tokens
}
}
impl<T: IntoTokens + Clone> IntoTokens for &T {
fn into_tokens(self, root: &TokenStream, tokens: &mut impl Extend<TokenTree>) {
T::into_tokens(self.clone(), root, tokens)
}
}
impl<T: IntoTokens> IntoTokens for Option<T> {
fn into_tokens(self, root: &TokenStream, tokens: &mut impl Extend<TokenTree>) {
if let Some(value) = self {
value.into_tokens(root, tokens);
}
}
}
impl<T: IntoTokens> IntoTokens for Vec<T> {
fn into_tokens(self, root: &TokenStream, tokens: &mut impl Extend<TokenTree>) {
for value in self {
value.into_tokens(root, tokens);
}
}
}
impl IntoTokens for Errors {
fn into_tokens(self, root: &TokenStream, tokens: &mut impl Extend<TokenTree>) {
let Some(highest_priority) = self.errors.iter().map(|error| error.priority).max() else {
return;
};
for error in self.errors {
if error.priority == highest_priority {
error.into_tokens(root, tokens);
}
}
}
}
#[derive(Clone)]
pub struct Input {
#[allow(missing_docs)]
pub tokens: VecDeque<TokenTree>,
pub end: Span,
}
impl Input {
pub fn peek<'a, const N: usize>(
&'a self,
f: impl FnOnce([&TokenTree; N], vec_deque::Iter<'a, TokenTree>) -> bool,
) -> bool {
if self.len() < N {
false
} else {
let mut iter = self.tokens.iter();
f(
std::array::from_fn(|_| iter.next().expect("due to !(self.len() < N)")),
iter,
)
}
}
pub fn pop_or_replace<'a, T, const N: usize>(
&'a mut self,
f: impl FnOnce([TokenTree; N], &mut Self) -> Result<T, [TokenTree; N]>,
) -> Result<T, impl 'a + IntoIterator<Item = Span>> {
if self.tokens.len() < N {
Err(self
.tokens
.iter()
.map(|t| t.span())
.chain(iter::once(self.end))
.collect::<Vec<_>>())
} else {
match f(
[(); N].map(|()| self.tokens.pop_front().expect("unreachable")),
self,
) {
Ok(value) => Ok(value),
Err(tts) => {
let spans = tts.iter().map(|t| t.span()).collect();
self.prepend(tts);
Err(spans)
}
}
}
}
#[allow(missing_docs)]
pub fn is_empty(&self) -> bool {
self.tokens.is_empty()
}
#[allow(missing_docs)]
pub fn len(&self) -> usize {
self.tokens.len()
}
#[allow(missing_docs)]
pub fn front(&self) -> Option<&TokenTree> {
self.tokens.front()
}
pub fn front_span(&self) -> Span {
self.tokens.front().map(TokenTree::span).unwrap_or(self.end)
}
#[allow(missing_docs)]
pub fn push_front(&mut self, t: TokenTree) {
self.tokens.push_front(t)
}
#[allow(missing_docs)]
pub fn prepend(
&mut self,
tokens: impl IntoIterator<Item = TokenTree, IntoIter: DoubleEndedIterator>,
) {
for t in tokens.into_iter().rev() {
self.push_front(t);
}
}
}
pub trait PopFrom {
fn pop_from(input: &mut Input, errors: &mut Errors) -> Result<Self, ()>
where
Self: Sized;
fn peek_pop_from(input: &mut Input, errors: &mut Errors) -> Result<Option<Self>, ()>
where
Self: PeekFrom + Sized,
{
Option::<Self>::pop_from(input, errors)
}
}
impl<T: PopFrom> PopFrom for Box<T> {
fn pop_from(input: &mut Input, errors: &mut Errors) -> Result<Self, ()>
where
Self: Sized,
{
Ok(Box::new(T::pop_from(input, errors)?))
}
}
impl<T: IntoTokens> IntoTokens for Box<T> {
fn into_tokens(self, root: &TokenStream, tokens: &mut impl Extend<TokenTree>) {
(*self).into_tokens(root, tokens)
}
}
impl<T: PeekFrom + PopFrom> PopFrom for Option<T> {
fn pop_from(input: &mut Input, errors: &mut Errors) -> Result<Self, ()>
where
Self: Sized,
{
T::peek_from(input)
.then(|| T::pop_from(input, errors))
.transpose()
}
}
pub trait PeekFrom {
fn peek_from(input: &Input) -> bool;
}
impl<T: PeekFrom> PeekFrom for Vec<T> {
fn peek_from(input: &Input) -> bool {
input.is_empty() || T::peek_from(input)
}
}
impl<T: PeekFrom> PeekFrom for VecDeque<T> {
fn peek_from(input: &Input) -> bool {
input.is_empty() || T::peek_from(input)
}
}
const _: () = {
use std::collections::VecDeque;
use crate::{EndOfInput, Errors, PopFrom};
impl<T: PopFrom> PopFrom for Vec<T> {
fn pop_from(input: &mut Input, errors: &mut Errors) -> Result<Self, ()> {
let mut this = vec![];
while !input.is_empty() {
let before_len = input.len();
match T::pop_from(input, errors) {
Ok(item) => this.extend([item]),
Err(()) => {
EndOfInput::<UNCONSUMED_AFTER_REPEATS>::pop_from(input, errors).ok();
return Ok(this);
}
}
if input.len() == before_len {
assert!(
EndOfInput::<UNCONSUMED_AFTER_REPEATS>::pop_from(input, errors).is_err()
);
break;
}
}
Ok(this)
}
}
impl<T: PopFrom> PopFrom for VecDeque<T> {
fn pop_from(input: &mut Input, errors: &mut Errors) -> Result<Self, ()> {
let mut this = Self::default();
while !input.is_empty() {
let before_len = input.len();
match T::pop_from(input, errors) {
Ok(item) => this.extend([item]),
Err(()) => {
EndOfInput::<UNCONSUMED_AFTER_REPEATS>::pop_from(input, errors).ok();
return Ok(this);
}
}
if input.len() == before_len {
assert!(
EndOfInput::<UNCONSUMED_AFTER_REPEATS>::pop_from(input, errors).is_err()
);
break;
}
}
Ok(this)
}
}
};
#[derive(Clone)]
pub struct Exhaustive<T, P: ConstErrorPriority>(pub T, PhantomData<P>);
impl<T: PopFrom, P: ConstErrorPriority> PopFrom for Exhaustive<T, P> {
fn pop_from(input: &mut Input, errors: &mut Errors) -> Result<Self, ()> {
let value = T::pop_from(input, errors);
EndOfInput::<P>::pop_from(input, errors).ok();
Ok(Self(value?, PhantomData))
}
}
impl<T: IntoTokens, P: ConstErrorPriority> IntoTokens for Exhaustive<T, P> {
fn into_tokens(self, root: &TokenStream, tokens: &mut impl Extend<TokenTree>) {
self.0.into_tokens(root, tokens)
}
}
#[derive(Clone)]
pub struct EndOfInput<P: ConstErrorPriority>(PhantomData<P>);
impl<P: ConstErrorPriority> PopFrom for EndOfInput<P> {
fn pop_from(input: &mut Input, errors: &mut Errors) -> Result<Self, ()> {
input
.is_empty()
.then_some(Self(PhantomData))
.ok_or_else(|| {
let rest = input.tokens.iter().cloned().collect::<TokenStream>();
errors.push(Error::new(
P::PRIORITY,
format!("Unconsumed tokens: `{rest}`"),
rest.into_iter().map(|t| t.span()),
));
})
}
}
pub trait SimpleSpanned {
#[allow(missing_docs)]
fn span(&self) -> Span;
#[allow(missing_docs)]
fn set_span(&mut self, span: Span);
#[allow(missing_docs)]
fn with_span(mut self, span: Span) -> Self
where
Self: Sized,
{
self.set_span(span);
self
}
}
pub struct Eager<T: ?Sized>(pub T);
impl<T: ?Sized> Deref for Eager<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<T: ?Sized> DerefMut for Eager<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<T: FromIterator<A>, A> FromIterator<A> for Eager<T> {
fn from_iter<I: IntoIterator<Item = A>>(iter: I) -> Self {
Self(iter.into_iter().collect())
}
}
impl<T: ?Sized + PeekFrom> PeekFrom for Eager<T> {
fn peek_from(input: &Input) -> bool {
T::peek_from(input)
}
}
impl<T: IntoIterator<Item: PeekFrom + PopFrom> + FromIterator<T::Item>> PopFrom for Eager<T> {
fn pop_from(input: &mut Input, errors: &mut Errors) -> Result<Self, ()>
where
Self: Sized,
{
iter::from_fn(|| T::Item::peek_pop_from(input, errors).transpose()).collect()
}
}
impl<T: IntoTokens> IntoTokens for Eager<T> {
fn into_tokens(self, root: &TokenStream, tokens: &mut impl Extend<TokenTree>) {
self.0.into_tokens(root, tokens);
}
}
pub struct HandledPanic;
pub fn parse_once_with_infallible<'a, T>(
input: &'a mut Input,
errors: &'a mut Errors,
f: impl 'a + UnwindSafe + FnOnce(&mut Input, &mut Errors) -> T,
) -> Result<T, ()> {
parse_once_with_infallible_impl(input, errors, f)
}
pub(crate) fn parse_once_with_infallible_impl<'a, T>(
input: &mut Input,
errors: &mut Errors,
f: impl 'a + FnOnce(&mut Input, &mut Errors) -> T,
) -> Result<T, ()> {
fn handle_panic(input: &mut Input, errors: &mut Errors, panic: Box<dyn Any + Send>) {
errors.push(Error::new(
ErrorPriority::PANIC,
&format!(
"proc macro panicked: {:?}",
if panic.as_ref().is::<HandledPanic>() {
return;
} else if let Some(message) = panic.as_ref().downcast_ref::<String>() {
message.clone()
} else if let Some(message) = panic.as_ref().downcast_ref::<&'static str>() {
message.to_string()
} else {
return errors.push(Error::new(
ErrorPriority::PANIC,
"proc macro panicked",
[input.front_span()],
));
}
),
[input.front_span()],
))
}
catch_unwind(AssertUnwindSafe(|| f(input, errors))).map_err(|panic| {
handle_panic(input, errors, panic);
})
}
pub fn parse_once_with<'a, T>(
input: &'a mut Input,
errors: &'a mut Errors,
f: impl 'a + UnwindSafe + FnOnce(&mut Input, &mut Errors) -> Result<T, ()>,
) -> Result<T, ()> {
parse_once_with_impl(input, errors, f)
}
pub(crate) fn parse_once_with_impl<'a, T>(
input: &mut Input,
errors: &mut Errors,
f: impl 'a + FnOnce(&mut Input, &mut Errors) -> Result<T, ()>,
) -> Result<T, ()> {
match parse_once_with_infallible_impl(input, errors, f) {
Ok(ok) => ok,
Err(()) => Err(()),
}
}
pub fn parse_once<'a, T: PopFrom>(input: &'a mut Input, errors: &'a mut Errors) -> Result<T, ()> {
parse_once_with_impl(input, errors, T::pop_from)
}
pub fn parse_all_with_infallible<'a, T>(
input: &'a mut Input,
errors: &'a mut Errors,
f: impl 'a + UnwindSafe + FnMut(&mut Input, &mut Errors) -> T,
) -> impl 'a + Iterator<Item = T> {
parse_all_with_infallible_impl(input, errors, f)
}
pub(crate) fn parse_all_with_infallible_impl<'a, T>(
input: &'a mut Input,
errors: &'a mut Errors,
f: impl 'a + UnwindSafe + FnMut(&mut Input, &mut Errors) -> T,
) -> impl 'a + Iterator<Item = T> {
struct Iter<'a, F> {
input: &'a mut Input,
errors: &'a mut Errors,
f: F,
stalled: bool,
}
impl<'a, T, F: 'a + UnwindSafe + FnMut(&mut Input, &mut Errors) -> T> Iterator for Iter<'a, F> {
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
if mem::take(&mut self.stalled) || self.input.is_empty() {
None
} else {
let before_len = self.input.len();
let next =
match parse_once_with_infallible_impl(self.input, self.errors, &mut self.f) {
Ok(ok) => Some(ok),
Err(()) => None,
};
self.stalled = self.input.len() == before_len;
next
}
}
}
impl<'a, F> Drop for Iter<'a, F> {
fn drop(&mut self) {
EndOfInput::<UNCONSUMED_INPUT>::pop_from(self.input, self.errors).ok();
}
}
Iter {
input,
errors,
f,
stalled: false,
}
}
pub fn parse_all_with<'a, T: 'a>(
input: &'a mut Input,
errors: &'a mut Errors,
f: impl 'a + UnwindSafe + FnMut(&mut Input, &mut Errors) -> Result<T, ()>,
) -> impl 'a + Iterator<Item = T> {
parse_all_with_infallible_impl(input, errors, f).map_while(|item| match item {
Ok(ok) => Some(ok),
Err(()) => None,
})
}
pub fn parse_all<'a, T: 'a + PopFrom>(
input: &'a mut Input,
errors: &'a mut Errors,
) -> impl 'a + Iterator<Item = T> {
parse_all_with(input, errors, T::pop_from)
}