extern crate proc_macro;
use ::proc_macro::{*,
TokenTree as TT,
};
#[macro_use]
mod utils;
struct Input {
tokenized: TokenStream,
metavar: Ident,
template: TokenStream,
}
#[proc_macro] pub
fn unstringify (input: TokenStream)
-> TokenStream
{
match tokenize_string_literal_or_concat_or_stringify(
input.clone().into_iter().peekable()
)
{
| Ok((tokenized, mut remaining)) => if remaining.next().is_none() {
return tokenized;
},
| _ => {}
}
let Input {
tokenized, metavar, template,
} = match let_unstringify(input) {
| Ok(it) => it,
| Err((span, err_msg)) => {
macro_rules! spanned {($expr:expr) => (
match $expr { mut expr => {
expr.set_span(span);
expr
}}
)}
return ts![
Ident::new("compile_error", span),
spanned!(Punct::new('!', Spacing::Alone)),
spanned!(ts![ (
Literal::string(&*err_msg),
)]),
spanned!(Punct::new(';', Spacing::Alone)),
];
},
};
map_replace(&metavar.to_string(), &tokenized, template)
}
fn let_unstringify (input: TokenStream)
-> Result<
Input,
(Span, ::std::borrow::Cow<'static, str>),
>
{
let mut tokens = input.into_iter().peekable();
unwrap_next_token! {
if let TT::Ident(ident) = tokens.next(),
if (ident.to_string() == "let")
{} else {
failwith!("expected `let`");
}
}
unwrap_next_token! {
if let TT::Punct(p) = tokens.next(),
if (p.as_char() == '$')
{} else {
failwith!("expected `$`");
}
}
let metavar = unwrap_next_token! {
if let TT::Ident(it) = tokens.next(), { it } else {
failwith!("expected an identifier");
}
};
unwrap_next_token! {
if let TT::Punct(p) = tokens.next(),
if (p.as_char() == '=')
{} else {
failwith!("expected `=`");
}
}
unwrap_next_token! {
if let TT::Ident(ident) = tokens.next(),
if (ident.to_string() == "unstringify")
{} else {
failwith!("expected `unstringify`");
}
}
unwrap_next_token! {
if let TT::Punct(p) = tokens.next(),
if (p.as_char() == '!')
{} else {
failwith!("expected `!`");
}
}
let tokenized: TokenStream = {
let tokenize_args = unwrap_next_token! {
if let TT::Group(group) = tokens.next(),
if (matches!(group.delimiter(), Delimiter::Parenthesis))
{
group.stream().into_iter()
} else {
failwith!("expected `( ... )`");
}
};
let (tokenized, mut remaining) =
tokenize_string_literal_or_concat_or_stringify(
tokenize_args.into_iter().peekable(),
)?
;
if let Some(extraneous_tt) = remaining.next() {
return Err((
extraneous_tt.span(),
"extraneous token(s)".into(),
));
}
tokenized
};
unwrap_next_token! {
if let TT::Ident(in_) = tokens.next(),
if (in_.to_string() == "in")
{} else {
failwith!("expected `;`");
}
}
let rest = unwrap_next_token! {
if let TT::Group(group) = tokens.next(),
{
group.stream()
} else {
failwith!("expected `{ ... }` or `( ... )` or `[ ... ]`");
}
};
if let Some(extraneous_tt) = tokens.next() {
return Err((
extraneous_tt.span(),
"extraneous token(s)".into(),
));
}
Ok(Input {
tokenized,
metavar,
template: rest,
})
}
fn map_replace (
metavar: &'_ String,
tokenized: &'_ TokenStream,
tokens: TokenStream
) -> TokenStream
{
let mut tokens = tokens.into_iter().peekable();
let mut ret = TokenStream::new();
loop {
match (tokens.next(), tokens.peek()) {
| (
Some(TT::Punct(dollar)),
Some(TT::Ident(ident)),
)
if dollar.as_char() == '$'
&& ident.to_string() == *metavar
=> {
drop(tokens.next());
ret.extend(tokenized.clone());
},
| (Some(TT::Group(group)), _) => {
ret.extend(Some(TT::Group(Group::new(
group.delimiter(),
map_replace(metavar, tokenized, group.stream()),
))));
},
| (None, _) => break,
| (tt, _) => ret.extend(tt),
}
}
ret
}
type Tokens = ::core::iter::Peekable<token_stream::IntoIter>;
fn tokenize_string_literal_or_concat_or_stringify (
mut tokens: Tokens,
) -> Result<
(TokenStream, Tokens),
(Span, ::std::borrow::Cow<'static, str>),
>
{Ok({
let recurse = tokenize_string_literal_or_concat_or_stringify;
macro_rules! err_msg {() => (
"expected \
a string literal, \
a verbatim `stringify!` call, \
or a verbatim `concat!` call.\
"
)}
let mut s: String;
let ret = match tokens.next() {
| Some(TT::Group(group))
if matches!(group.delimiter(), Delimiter::None)
=> {
let mut flattened = group.stream();
flattened.extend(tokens);
return recurse(flattened.into_iter().peekable());
},
| Some(TT::Literal(lit))
if {
s = lit.to_string();
utils::extracted_string_literal(&mut s)
}
=> match s.parse::<TokenStream>() {
| Ok(ts) => ts,
| Err(err) => return Err((
lit.span(),
format!("Invalid tokens: {}", err).into(),
)),
},
| Some(TT::Ident(ident))
if matches!(
tokens.peek(),
Some(TT::Punct(p)) if p.as_char() == '!'
)
=> {
drop(tokens.next());
let group_contents = unwrap_next_token! {
if let TT::Group(group) = tokens.next(), {
group.stream()
} else {
failwith!("\
expected `{ ... }` or `( ... )` or `[ ... ]`\
");
}
};
match ident.to_string().as_str() {
| "stringify" => group_contents,
| "concat" => {
let mut ret = TokenStream::new();
let mut current = group_contents.into_iter().peekable();
loop {
let (parsed, mut remaining) = recurse(current)?;
ret.extend(parsed);
if remaining.peek().is_none() {
break ret;
}
unwrap_next_token! {
if let TT::Punct(p) = remaining.next(),
if (p.as_char() == ',')
{} else {
failwith!("expected nothing or `,`");
}
}
if remaining.peek().is_none() {
break ret;
}
current = remaining;
}
},
| _ => return Err((
ident.span(),
"expected `stringify` or `concat`".into(),
)),
}
},
| Some(bad_tt) => return Err((
bad_tt.span(),
err_msg!().into(),
)),
| None => return Err((
Span::call_site(),
concat!("Unexpected end of input: ", err_msg!()).into(),
)),
};
(ret, tokens)
})}