use std::fmt;
use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
use crate::IntoTokens;
#[derive(Debug, Clone)]
pub struct TokenQueue {
chunks: Vec<Chunk>,
substream_stack_top_ptr: Option<usize>,
span_tracking: SpanTracking,
}
#[derive(Debug, Clone, Default)]
struct SpanTracking {
current: Option<Span>,
ignored: usize,
}
impl SpanTracking {
const fn none() -> SpanTracking {
SpanTracking {
current: None,
ignored: 0,
}
}
fn set(&mut self, next: Span) {
match self.current {
Some(_) => self.ignored += 1,
None => self.current = Some(next),
}
}
fn unset(&mut self) {
match self.ignored.checked_sub(1) {
Some(n) => self.ignored = n,
None => self.current = None,
}
}
fn current(&self) -> Option<Span> {
self.current
}
}
#[derive(Debug, Clone)]
enum Chunk {
PutGroup(Group),
PutIdent(Ident),
PutLiteral(Literal),
PutPunct(Punct),
Embed(TokenStream),
OpenSubstream(StackParent),
}
#[derive(Debug, Default, Clone, Copy)]
struct StackParent(u32);
impl StackParent {
#[inline]
pub fn new(addr: usize, ptr: Option<usize>) -> StackParent {
let n = ptr.map_or(0, |i| u32::try_from(addr - i).unwrap());
StackParent(n)
}
#[inline]
pub fn get(self, addr: usize) -> Option<usize> {
if self.0 == 0 {
return None;
}
Some(addr - usize::try_from(self.0).unwrap())
}
}
struct DisplayChunks<'a>(&'a [Chunk]);
impl<'a> fmt::Display for DisplayChunks<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fn delim_wings(delim: Delimiter) -> (&'static str, &'static str) {
match delim {
Delimiter::Parenthesis => ("(", ")"),
Delimiter::Brace => ("{", "}"),
Delimiter::Bracket => ("[", "]"),
Delimiter::None => ("∅", "∅"),
}
}
fn fmt_preamble(f: &mut fmt::Formatter<'_>, n: usize) -> fmt::Result {
if f.alternate() {
f.write_str("\n ")?;
for _ in 0..n {
f.write_str(" ")?;
}
} else {
f.write_str(" ")?;
}
Ok(())
}
fn fmt_group(group: &Group, f: &mut fmt::Formatter<'_>, n: usize) -> fmt::Result {
let (open, close) = delim_wings(group.delimiter());
f.write_str(open)?;
fmt_ts(group.stream().into_iter(), f, n + 1)?;
f.write_str(close)
}
fn fmt_ts(
ts: proc_macro::token_stream::IntoIter,
f: &mut fmt::Formatter<'_>,
n: usize,
) -> fmt::Result {
for tt in ts {
fmt_preamble(f, n)?;
match tt {
TokenTree::Group(group) => fmt_group(&group, f, n)?,
TokenTree::Ident(id) => write!(f, "{id}")?,
TokenTree::Punct(p) => write!(f, "{p}")?,
TokenTree::Literal(lit) => write!(f, "{lit}")?,
}
}
Ok(())
}
fn fmt_chunks(chunks: &[Chunk], f: &mut fmt::Formatter<'_>, mut n: usize) -> fmt::Result {
for chunk in chunks {
match chunk {
Chunk::Embed(ts) => fmt_ts(ts.clone().into_iter(), f, n)?,
Chunk::PutGroup(group) => fmt_group(group, f, n)?,
Chunk::PutIdent(id) => write!(f, "{id}")?,
Chunk::PutPunct(p) => write!(f, "{p}")?,
Chunk::PutLiteral(lit) => write!(f, "{lit}")?,
Chunk::OpenSubstream(_) => {
f.write_str("∅")?;
n += 1;
}
}
}
Ok(())
}
fmt_chunks(self.0, f, 0)
}
}
trait ChunkLike: Clone {
fn into_chunk(self) -> Chunk;
fn set_span(&mut self, span: Span);
#[inline]
fn into_chunk_with_span(mut self, span: Option<Span>) -> Chunk {
if let Some(span) = span {
self.set_span(span);
}
self.into_chunk()
}
}
macro_rules! impl_chunklike_for_tt {
($($var:ident($ty:ty),)*) => {
$(
impl ChunkLike for $ty {
#[inline]
fn into_chunk(self) -> Chunk {
Chunk::$var(self.into())
}
#[inline]
fn set_span(&mut self, span: Span) {
self.set_span(span);
}
}
)*
};
}
impl_chunklike_for_tt! {
PutPunct(Punct),
PutIdent(Ident),
PutLiteral(Literal),
PutGroup(Group),
}
impl ChunkLike for TokenTree {
fn into_chunk(self) -> Chunk {
match self {
TokenTree::Group(a) => Chunk::PutGroup(a),
TokenTree::Ident(a) => Chunk::PutIdent(a),
TokenTree::Punct(a) => Chunk::PutPunct(a),
TokenTree::Literal(a) => Chunk::PutLiteral(a),
}
}
fn set_span(&mut self, span: Span) {
self.set_span(span);
}
}
impl From<TokenStream> for Chunk {
#[inline]
fn from(ts: TokenStream) -> Self {
Chunk::Embed(ts)
}
}
macro_rules! impl_into_tokens_for_tt {
($($t:ty),*) => {
$(
impl IntoTokens for $t {
fn extend_tokens(self, q: &mut TokenQueue) {
q.chunks.push(self.into_chunk_with_span(q.tracked_span()));
}
fn queue_size_hint(&self) -> (usize, Option<usize>) {
(1, Some(1))
}
}
)*
};
}
impl_into_tokens_for_tt! { TokenTree, Punct, Ident, Group, Literal }
impl IntoTokens for TokenStream {
fn extend_tokens(self, q: &mut TokenQueue) {
q.chunks.push(self.into());
}
fn into_tokens(self) -> TokenQueue {
self.into()
}
fn queue_size_hint(&self) -> (usize, Option<usize>) {
(!self.is_empty() as usize, None)
}
}
macro_rules! enumerate_into_chunk_implementors {
($macro:ident) => {
$macro! { Punct, Ident, Literal, Group }
};
}
pub trait PushToken: IntoTokens {}
macro_rules! impl_push_token_for_into_chunk {
($($ty:ty),*) => {
$(
impl PushToken for $ty {}
)*
};
}
enumerate_into_chunk_implementors!(impl_push_token_for_into_chunk);
impl PushToken for TokenStream {}
impl PushToken for TokenTree {}
impl<T: PushToken> PushToken for Option<T> {}
impl Default for TokenQueue {
fn default() -> Self {
TokenQueue::new()
}
}
const POP_NO_PUSH_MSG: &str = "found a pop with no push";
const PUSH_NO_POP_MSG: &str = "found a push with no pop";
impl TokenQueue {
pub const fn new() -> TokenQueue {
TokenQueue {
chunks: Vec::new(),
substream_stack_top_ptr: None,
span_tracking: SpanTracking::none(),
}
}
pub fn with_capacity(n: usize) -> TokenQueue {
TokenQueue {
chunks: Vec::with_capacity(n),
substream_stack_top_ptr: None,
span_tracking: SpanTracking::none(),
}
}
#[must_use]
pub fn len(&self) -> usize {
self.chunks.len()
}
#[must_use]
pub fn is_empty(&self) -> bool {
self.chunks.is_empty()
}
#[must_use]
pub fn token_size_hint(&self) -> (usize, Option<usize>) {
token_size_hint_for_chunks(&self.chunks)
}
pub fn reserve(&mut self, n: usize) {
self.chunks.reserve(n);
}
pub fn push<T: PushToken>(&mut self, t: T) {
self.extend_from(t);
}
pub fn extend_stream(mut self, ts: &mut TokenStream) {
self.chunks.drain(..).collect_by(ts)
}
pub fn extend_from<T: IntoTokens>(&mut self, t: T) {
t.extend_tokens(self)
}
pub fn open_substream(&mut self) {
let ptr = self.chunks.len();
let parent = StackParent::new(ptr, self.substream_stack_top_ptr);
self.chunks.push(Chunk::OpenSubstream(parent));
self.substream_stack_top_ptr = Some(ptr);
}
#[must_use = "`close_substream` returns a `TokenStream` and does not enqueue anything."]
#[track_caller]
pub fn close_substream(&mut self) -> TokenStream {
let Some(ptr) = self.substream_stack_top_ptr else {
panic!("{POP_NO_PUSH_MSG}")
};
let mut drain = self.chunks.drain(ptr..);
let Some(Chunk::OpenSubstream(parent)) = drain.next() else {
unreachable!(
"`close_substream`: expected chunk at index {ptr} to be a `OpenSubstream`"
);
};
self.substream_stack_top_ptr = parent.get(ptr);
drain.collect_by(())
}
pub fn close_substream_and_push_as_group_with_span(
&mut self,
delim: Delimiter,
span: Option<Span>,
) {
let mut group = Group::new(delim, self.close_substream());
if let Some(span) = span {
group.set_span(span);
}
self.push(group);
}
pub fn close_substream_and_push_as_group(&mut self, delim: Delimiter) {
self.close_substream_and_push_as_group_with_span(delim, None);
}
pub fn set_tracked_span(&mut self, span: Span) {
self.span_tracking.set(span);
}
pub fn unset_tracked_span(&mut self) {
self.span_tracking.unset();
}
pub fn tracked_span(&self) -> Option<Span> {
self.span_tracking.current()
}
pub fn display(&self) -> impl fmt::Display {
DisplayChunks(&self.chunks)
}
}
fn token_size_hint_for_chunks(chunks: &[Chunk]) -> (usize, Option<usize>) {
let mut n = 0;
let mut bounded_above = true;
for chunk in chunks {
match chunk {
Chunk::Embed(_) => bounded_above = false,
Chunk::PutGroup(_) | Chunk::PutIdent(_) | Chunk::PutLiteral(_) | Chunk::PutPunct(_) => {
n += 1
}
Chunk::OpenSubstream(_) => break,
}
}
(n, bounded_above.then_some(n))
}
macro_rules! impl_extend_for_into_chunk {
($($ty:ty),*) => {
$(
impl Extend<$ty> for TokenQueue {
fn extend<T: IntoIterator<Item = $ty>>(&mut self, tcs: T) {
let span = self.tracked_span();
self.chunks.extend(tcs.into_iter().map(move |tc| tc.into_chunk_with_span(span)));
}
}
impl FromIterator<$ty> for TokenQueue {
fn from_iter<I: IntoIterator<Item = $ty>>(tcs: I) -> TokenQueue {
TokenQueue {
chunks: tcs.into_iter().map(move |tc| tc.into_chunk()).collect(),
substream_stack_top_ptr: None,
span_tracking: SpanTracking::none(),
}
}
}
)*
};
}
enumerate_into_chunk_implementors!(impl_extend_for_into_chunk);
impl From<TokenStream> for TokenQueue {
fn from(ts: TokenStream) -> TokenQueue {
TokenQueue {
chunks: vec![Chunk::Embed(ts)],
substream_stack_top_ptr: None,
span_tracking: SpanTracking::none(),
}
}
}
impl IntoTokens for TokenQueue {
fn extend_tokens(self, q: &mut TokenQueue) {
q.chunks.extend(self.chunks);
}
fn into_tokens(self) -> TokenQueue {
self
}
fn queue_size_hint(&self) -> (usize, Option<usize>) {
let n = self.len();
(n, Some(n))
}
}
impl From<TokenQueue> for TokenStream {
fn from(mut q: TokenQueue) -> TokenStream {
q.chunks.drain(..).collect_by(())
}
}
trait ChunkBuf: Iterator<Item = Chunk> + Sized {
fn remaining(&self) -> &[Chunk];
#[inline]
fn peek(&self) -> Option<&Chunk> {
self.remaining().first()
}
#[inline]
fn as_unwrapped_streams(&mut self) -> Option<impl Iterator<Item = TokenStream> + '_> {
let possible = self
.remaining()
.iter()
.all(|tc| matches!(tc, Chunk::Embed(_)));
if !possible {
return None;
}
let ts = self.by_ref().filter_map(|chunk| match chunk {
Chunk::Embed(ts) => Some(ts),
_ => None,
});
Some(ts)
}
#[inline]
fn take_as_single_stream(&mut self) -> Option<TokenStream> {
let Some(Chunk::Embed(_)) = self.peek() else {
return None;
};
let Chunk::Embed(ts) = self.next().unwrap() else {
unreachable!();
};
Some(ts)
}
#[inline]
fn collect_by<B: FromTokens<T>, T>(mut self, t: T) -> B {
if let Some(ts) = self.take_as_single_stream() {
B::from_lone(t, ts)
} else if let Some(tss) = self.as_unwrapped_streams() {
B::from_streams(t, tss)
} else {
let builder = TokenStreamBuilder {
chunks: self,
ts_queue: None,
};
B::from_tokens(t, builder)
}
}
}
impl<'a> ChunkBuf for std::vec::Drain<'a, Chunk> {
fn remaining(&self) -> &[Chunk] {
self.as_slice()
}
}
struct TokenStreamBuilder<S> {
ts_queue: Option<proc_macro::token_stream::IntoIter>,
chunks: S,
}
impl<S: ChunkBuf> fmt::Debug for TokenStreamBuilder<S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("TokenStreamBuilder")
.field("ts_queue", &self.ts_queue.clone().map(Vec::from_iter))
.field("chunks", &self.chunks.remaining())
.finish()
}
}
trait FromTokens<T> {
fn from_lone(t: T, ts: TokenStream) -> Self;
fn from_streams(t: T, tss: impl Iterator<Item = TokenStream>) -> Self;
fn from_tokens(t: T, tts: impl Iterator<Item = TokenTree>) -> Self;
}
impl FromTokens<()> for TokenStream {
#[inline]
fn from_lone(_: (), ts: TokenStream) -> TokenStream {
ts
}
#[inline]
fn from_streams(_: (), tss: impl Iterator<Item = TokenStream>) -> TokenStream {
tss.collect()
}
#[inline]
fn from_tokens(_: (), tts: impl Iterator<Item = TokenTree>) -> TokenStream {
tts.collect()
}
}
impl<'a> FromTokens<&'a mut TokenStream> for () {
#[inline]
fn from_lone(s: &'a mut TokenStream, ts: TokenStream) {
s.extend(Some(ts));
}
#[inline]
fn from_streams(s: &'a mut TokenStream, tss: impl Iterator<Item = TokenStream>) {
s.extend(tss);
}
#[inline]
fn from_tokens(s: &'a mut TokenStream, tts: impl Iterator<Item = TokenTree>) {
s.extend(tts);
}
}
impl<S: ChunkBuf> Iterator for TokenStreamBuilder<S> {
type Item = TokenTree;
fn size_hint(&self) -> (usize, Option<usize>) {
token_size_hint_for_chunks(self.chunks.remaining())
}
fn next(&mut self) -> Option<TokenTree> {
if let Some(tt) = self.ts_queue.as_mut().and_then(|ts| ts.next()) {
return Some(tt);
}
match self.chunks.next() {
None => None,
Some(Chunk::Embed(ts)) => {
let ts_queue = ts.into_iter();
self.ts_queue = Some(ts_queue);
self.next()
}
Some(Chunk::PutGroup(tt)) => Some(tt.into()),
Some(Chunk::PutIdent(tt)) => Some(tt.into()),
Some(Chunk::PutPunct(tt)) => Some(tt.into()),
Some(Chunk::PutLiteral(tt)) => Some(tt.into()),
Some(Chunk::OpenSubstream(_)) => panic!("{PUSH_NO_POP_MSG}"),
}
}
}