use core::cmp::max_by_key;
use core::ops::Range;
use core::fmt::{self, Debug, Display, Formatter, Write};
use proc_macro2::{TokenStream, TokenTree, Spacing, Delimiter};
use syn::parse::{ParseStream, discouraged::Speculative};
use quote::ToTokens;
use crate::{Error, Result, Span, Spanned, LineColumn};
#[macro_export]
macro_rules! try_parse_quote {
($($tt:tt)*) => {
::parsel::parse2(::parsel::quote::quote!($($tt)*))?
}
}
#[macro_export]
macro_rules! try_parse_quote_spanned {
($span:expr => $($tt:tt)*) => {
::parsel::parse2(::parsel::quote::quote_spanned!($span => $($tt)*))?
}
}
pub trait SpannedExt {
fn span(&self) -> Span;
fn format_span(&self) -> SpanDisplay {
SpanDisplay::new(self.span())
}
fn source_substring<'s>(&self, source: &'s str) -> &'s str {
&source[self.byte_range(source)]
}
fn byte_range(&self, source: &str) -> Range<usize> {
let span = self.span();
let start = byte_offset(source, span.start());
let end = byte_offset(source, span.end());
start..end
}
fn char_range(&self, source: &str) -> Range<usize> {
let span = self.span();
let start = char_offset(source, span.start());
let end = char_offset(source, span.end());
start..end
}
}
impl<T> SpannedExt for T
where
T: ?Sized + Spanned
{
fn span(&self) -> Span {
Spanned::span(self)
}
}
fn byte_offset(source: &str, loc: LineColumn) -> usize {
let mut lines = source.split_inclusive('\n');
let line_offset: usize = lines
.by_ref()
.take(loc.line.saturating_sub(1))
.map(str::len)
.sum();
let char_offset: usize = lines.next().map_or(0, |line| {
line.char_indices()
.nth(loc.column)
.map_or(line.len(), |(index, _)| index)
});
line_offset + char_offset
}
fn char_offset(source: &str, loc: LineColumn) -> usize {
let mut lines = source.split_inclusive('\n');
let line_offset: usize = lines
.by_ref()
.take(loc.line.saturating_sub(1))
.flat_map(str::chars)
.count();
let char_offset = loc.column;
line_offset + char_offset
}
#[derive(Clone, Copy, Debug)]
pub struct SpanDisplay {
span: Span,
}
impl SpanDisplay {
pub const fn new(span: Span) -> Self {
SpanDisplay { span }
}
}
impl From<Span> for SpanDisplay {
fn from(span: Span) -> Self {
SpanDisplay::new(span)
}
}
impl Display for SpanDisplay {
fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result {
write!(
formatter,
"{start_line}:{start_col}..{end_line}:{end_col}",
start_line = self.span.start().line,
start_col = self.span.start().column + 1, end_line = self.span.end().line,
end_col = self.span.end().column, )
}
}
#[doc(hidden)]
pub fn chain_error<T: Display>(
cause: Error,
enum_: &str,
ctor: &str,
field: T,
) -> Error {
let message = if enum_.is_empty() {
format!(
"error parsing {ctor}::{field}, caused by:\n{cause}",
ctor = ctor,
field = field,
cause = cause,
)
} else {
format!(
"error parsing {enum_}::{ctor}::{field}, caused by:\n{cause}",
enum_ = enum_,
ctor = ctor,
field = field,
cause = cause,
)
};
Error::new(cause.span(), message)
}
#[doc(hidden)]
pub fn try_parse_variant<T, F>(
input: ParseStream<'_>,
error_acc: Option<Error>,
parser: F,
) -> Result<T>
where
F: FnOnce(ParseStream<'_>) -> Result<T>,
{
let fork = input.fork();
match parser(&fork) {
Ok(value) => {
input.advance_to(&fork);
Ok(value)
}
Err(error) => {
let farthest = match error_acc {
Some(acc) => max_by_key(acc, error, |e| e.span().end()),
None => error,
};
Err(farthest)
}
}
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub struct TokenStreamFormatter<S, W> {
indent_level: usize,
indent_string: Option<S>,
writer: W,
}
impl<S, W> TokenStreamFormatter<S, W>
where
S: AsRef<str>,
{
pub fn with_indent(indent_string: S, writer: W) -> Result<Self> {
if indent_string.as_ref().trim().is_empty() {
Ok(TokenStreamFormatter {
indent_level: 0,
indent_string: Some(indent_string),
writer,
})
} else {
Err(Error::new(Span::call_site(), "indentation contains non-whitespace characters"))
}
}
}
impl<S, W> TokenStreamFormatter<S, W>
where
S: AsRef<str>,
W: Write,
{
pub fn write(&mut self, stream: TokenStream) -> fmt::Result {
self.write_indent()?;
let mut spacing = Spacing::Joint;
let mut iter = stream.into_iter().peekable();
while let Some(tt) = iter.next() {
if spacing == Spacing::Joint {
spacing = Spacing::Alone;
} else {
self.writer.write_char(' ')?;
}
match tt {
TokenTree::Literal(lit) => write!(self.writer, "{}", lit)?,
TokenTree::Ident(ident) => write!(self.writer, "{}", ident)?,
TokenTree::Punct(punct) => {
write!(self.writer, "{}", punct)?;
spacing = punct.spacing();
if matches!(
(punct.as_char(), spacing, iter.peek()),
(',' | ';', Spacing::Alone, Some(_))
) {
self.write_newline_and_indent(spacing)?;
spacing = Spacing::Joint;
}
}
TokenTree::Group(group) => {
let inner_stream = group.stream();
let (open, close) = match group.delimiter() {
Delimiter::None => {
self.write(inner_stream)?;
continue;
}
Delimiter::Parenthesis => ('(', ')'),
Delimiter::Bracket => ('[', ']'),
Delimiter::Brace => ('{', '}'),
};
self.writer.write_char(open)?;
if !inner_stream.is_empty() {
self.indent_level += 1;
self.write_newline(Spacing::Joint)?;
self.write(inner_stream)?;
self.indent_level -= 1;
self.write_newline_and_indent(Spacing::Joint)?;
}
self.writer.write_char(close)?;
if iter.peek().is_some() {
self.write_newline_and_indent(spacing)?;
spacing = Spacing::Joint;
}
}
}
}
Ok(())
}
pub fn write_ast_node<T>(&mut self, node: &T) -> fmt::Result
where
T: ?Sized + ToTokens,
{
self.write(node.to_token_stream())
}
fn write_indent(&mut self) -> fmt::Result {
if let Some(indent_string) = self.indent_string.as_ref() {
let indent_string: &str = indent_string.as_ref();
for _ in 0..self.indent_level {
self.writer.write_str(indent_string)?;
}
}
Ok(())
}
fn write_newline(&mut self, spacing: Spacing) -> fmt::Result {
if self.indent_string.is_some() {
writeln!(self.writer)
} else if spacing == Spacing::Alone {
self.writer.write_char(' ')
} else {
Ok(())
}
}
fn write_newline_and_indent(&mut self, spacing: Spacing) -> fmt::Result {
self.write_newline(spacing)?;
self.write_indent()
}
}
impl<W> TokenStreamFormatter<&'static str, W> {
pub const fn pretty(writer: W) -> Self {
TokenStreamFormatter {
indent_level: 0,
indent_string: Some(" "),
writer,
}
}
pub const fn compact(writer: W) -> Self {
TokenStreamFormatter {
indent_level: 0,
indent_string: None,
writer,
}
}
}
pub fn format_ast_node_pretty<T, W>(node: &T, writer: W) -> fmt::Result
where
T: ?Sized + ToTokens,
W: Write,
{
TokenStreamFormatter::pretty(writer).write_ast_node(node)
}
pub fn format_ast_node_compact<T, W>(node: &T, writer: W) -> fmt::Result
where
T: ?Sized + ToTokens,
W: Write,
{
TokenStreamFormatter::compact(writer).write_ast_node(node)
}
pub fn format_tokens_pretty<W: Write>(tokens: TokenStream, writer: W) -> fmt::Result {
TokenStreamFormatter::pretty(writer).write(tokens)
}
pub fn format_tokens_compact<W: Write>(tokens: TokenStream, writer: W) -> fmt::Result {
TokenStreamFormatter::compact(writer).write(tokens)
}
pub fn format_ast_node<T>(node: &T, formatter: &mut Formatter<'_>) -> fmt::Result
where
T: ?Sized + ToTokens,
{
if formatter.alternate() {
format_ast_node_compact(node, formatter)
} else {
format_ast_node_pretty(node, formatter)
}
}
pub fn format_tokens(tokens: TokenStream, formatter: &mut Formatter<'_>) -> fmt::Result {
if formatter.alternate() {
format_tokens_compact(tokens, formatter)
} else {
format_tokens_pretty(tokens, formatter)
}
}