mod template;
#[cfg(test)]
mod test;
use convert_case::{Case, Casing};
use proc_macro::TokenStream;
use proc_macro2::{Delimiter, Spacing, Span, TokenStream as TokenStream2, TokenTree};
use quote::ToTokens;
use std::sync::atomic::{AtomicUsize, Ordering};
use syn::parse::{Parse, ParseStream};
use syn::{Expr, Ident, Type, parse_macro_input, parse_str};
static COUNTER: AtomicUsize = AtomicUsize::new(0);
struct TsQuoteInput {
template_tokens: TokenStream2,
output_type: Option<Type>,
}
impl Parse for TsQuoteInput {
fn parse(input: ParseStream) -> syn::Result<Self> {
let mut tokens = Vec::new();
while !input.is_empty() {
tokens.push(input.parse::<TokenTree>()?);
}
let len = tokens.len();
let mut type_node = None;
let mut cut_index = len;
if len >= 2 {
if let TokenTree::Ident(ref ident) = tokens[len - 2]
&& *ident == "as"
{
let last_token = &tokens[len - 1];
let type_candidate: TokenStream2 = last_token.clone().into();
if let Ok(ty) = parse_str::<Type>(&type_candidate.to_string()) {
type_node = Some(ty);
cut_index = len - 2;
}
}
}
let template_tokens: TokenStream2 = tokens.into_iter().take(cut_index).collect();
Ok(TsQuoteInput {
template_tokens,
output_type: type_node,
})
}
}
#[proc_macro]
pub fn ts_quote(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as TsQuoteInput);
let (template_str, bindings) = process_tokens(input.template_tokens);
let mut final_str = template_str;
let is_expr_target = if let Some(syn::Type::Path(ref type_path)) = input.output_type {
type_path.path.is_ident("Expr")
} else {
false
};
let trimmed = final_str.trim();
if is_expr_target && trimmed.starts_with('{') && trimmed.ends_with('}') {
final_str = format!("({})", final_str);
}
let mut output = TokenStream2::new();
output.extend(quote_ident("swc_core"));
output.extend(quote_punct("::"));
output.extend(quote_ident("quote"));
output.extend(quote_punct("!"));
let mut args = TokenStream2::new();
let clean_lit = syn::LitStr::new(&final_str, Span::call_site());
args.extend(clean_lit.to_token_stream());
if let Some(ty) = input.output_type {
args.extend(quote_ident("as"));
args.extend(ty.to_token_stream());
}
if !bindings.is_empty() {
args.extend(quote_punct(","));
for (i, binding) in bindings.iter().enumerate() {
if i > 0 {
args.extend(quote_punct(","));
}
args.extend(binding.clone());
}
}
output.extend(TokenStream2::from(TokenTree::Group(
proc_macro2::Group::new(Delimiter::Parenthesis, args),
)));
TokenStream::from(output)
}
fn quote_ident(s: &str) -> TokenStream2 {
let ident = Ident::new(s, Span::call_site());
TokenStream2::from(TokenTree::Ident(ident))
}
fn quote_punct(s: &str) -> TokenStream2 {
use proc_macro2::Punct;
s.chars()
.map(|c| TokenTree::Punct(Punct::new(c, Spacing::Joint)))
.collect()
}
fn process_tokens(tokens: TokenStream2) -> (String, Vec<TokenStream2>) {
let mut output = String::new();
let mut bindings = Vec::new();
let mut iter = tokens.into_iter().peekable();
while let Some(tt) = iter.next() {
match tt {
TokenTree::Punct(ref p) if p.as_char() == '$' => {
if let Some(TokenTree::Group(g)) = iter.peek()
&& g.delimiter() == Delimiter::Parenthesis
{
let inner = g.stream();
iter.next();
let (bind_name, binding_code, is_vec_expansion) = parse_interpolation(inner);
if !binding_code.is_empty() {
bindings.push(binding_code);
}
if is_vec_expansion {
output.push_str(&format!("*${}", bind_name));
} else {
output.push_str(&format!("${}", bind_name));
}
continue;
}
output.push('$');
}
TokenTree::Group(g) => {
let (inner_str, inner_bindings) = process_tokens(g.stream());
bindings.extend(inner_bindings);
let (open, close) = match g.delimiter() {
Delimiter::Parenthesis => ("(", ")"),
Delimiter::Brace => ("{", "}"),
Delimiter::Bracket => ("[", "]"),
Delimiter::None => ("", ""),
};
output.push_str(open);
output.push_str(&inner_str);
output.push_str(close);
}
TokenTree::Ident(ident) => {
output.push_str(&ident.to_string());
output.push(' '); }
TokenTree::Punct(p) => {
output.push(p.as_char());
if p.spacing() == Spacing::Alone {
output.push(' ');
}
}
TokenTree::Literal(lit) => {
output.push_str(&lit.to_string());
}
}
}
(output, bindings)
}
fn parse_interpolation(tokens: TokenStream2) -> (String, TokenStream2, bool) {
let bind_name = format!(
"__ts_bind_{}_{}",
COUNTER.fetch_add(1, Ordering::Relaxed),
0
);
let bind_ident = Ident::new(&bind_name, Span::call_site());
let token_vec: Vec<TokenTree> = tokens.clone().into_iter().collect();
if token_vec.len() >= 3
&& let TokenTree::Ident(ref wrapper) = token_vec[0]
&& let TokenTree::Punct(ref p) = token_vec[1]
&& p.as_char() == '!'
&& let TokenTree::Group(ref g) = token_vec[2]
{
let wrapper_name = wrapper.to_string();
let args = g.stream();
if wrapper_name == "stmt_vec" {
let expr: Expr = syn::parse2(args).expect("Invalid expression in stmt_vec!()");
let mut binding = TokenStream2::new();
binding.extend(bind_ident.to_token_stream());
binding.extend(quote_punct("="));
binding.extend(expr.to_token_stream());
return (bind_name, binding, true);
}
if wrapper_name == "ident" {
let mut fn_args = TokenStream2::new();
fn_args.extend(quote_ident("format"));
fn_args.extend(quote_punct("!"));
fn_args.extend(TokenStream2::from(TokenTree::Group(
proc_macro2::Group::new(Delimiter::Parenthesis, args.clone()),
)));
fn_args.extend(quote_punct("."));
fn_args.extend(quote_ident("into"));
fn_args.extend(TokenStream2::from(TokenTree::Group(
proc_macro2::Group::new(Delimiter::Parenthesis, TokenStream2::new()),
)));
fn_args.extend(quote_punct(","));
fn_args.extend(quote_ident("swc_core"));
fn_args.extend(quote_punct("::"));
fn_args.extend(quote_ident("common"));
fn_args.extend(quote_punct("::"));
fn_args.extend(quote_ident("DUMMY_SP"));
let mut binding = TokenStream2::new();
binding.extend(bind_ident.to_token_stream());
binding.extend(quote_punct("="));
binding.extend(quote_ident("swc_core"));
binding.extend(quote_punct("::"));
binding.extend(quote_ident("ecma"));
binding.extend(quote_punct("::"));
binding.extend(quote_ident("ast"));
binding.extend(quote_punct("::"));
binding.extend(quote_ident("Ident"));
binding.extend(quote_punct("::"));
binding.extend(quote_ident("new_no_ctxt"));
binding.extend(TokenStream2::from(TokenTree::Group(
proc_macro2::Group::new(Delimiter::Parenthesis, fn_args),
)));
return (bind_name, binding, false);
}
let type_name = wrapper_name.to_case(Case::Pascal);
let type_ident = Ident::new(&type_name, Span::call_site());
let mut binding = TokenStream2::new();
binding.extend(bind_ident.to_token_stream());
binding.extend(quote_punct(":"));
binding.extend(type_ident.to_token_stream());
binding.extend(quote_punct("="));
binding.extend(args);
return (bind_name, binding, false);
}
let mut split_idx = None;
for (i, tt) in token_vec.iter().enumerate() {
if let TokenTree::Punct(p) = tt
&& p.as_char() == ':'
{
split_idx = Some(i);
break;
}
}
if let Some(idx) = split_idx {
let expr_tokens: TokenStream2 = token_vec[0..idx].iter().cloned().collect();
let type_tokens: TokenStream2 = token_vec[idx + 1..].iter().cloned().collect();
let type_str = type_tokens.to_string();
if type_str.contains("Vec") && type_str.contains("Stmt") {
let expr: Expr =
syn::parse2(expr_tokens).expect("Invalid expression in $(expr: Vec<Stmt>)");
let mut binding = TokenStream2::new();
binding.extend(bind_ident.to_token_stream());
binding.extend(quote_punct(":"));
binding.extend(quote_ident("Vec"));
binding.extend(quote_punct("<"));
binding.extend(quote_ident("Stmt"));
binding.extend(quote_punct(">"));
binding.extend(quote_punct("="));
binding.extend(expr.to_token_stream());
return (bind_name, binding, true);
}
let ty: Type = parse_str(&type_str).expect("Invalid type in $(expr: Type)");
let expr: Expr = syn::parse2(expr_tokens).expect("Invalid expression in $(expr: Type)");
let mut binding = TokenStream2::new();
binding.extend(bind_ident.to_token_stream());
binding.extend(quote_punct(":"));
binding.extend(ty.to_token_stream());
binding.extend(quote_punct("="));
binding.extend(expr.to_token_stream());
return (bind_name, binding, false);
}
let expr: Expr = syn::parse2(tokens.clone()).expect("Invalid expression in $()");
if let Expr::Call(call) = &expr
&& let Expr::Path(path) = &*call.func
{
let path_str = path
.path
.segments
.iter()
.map(|s| s.ident.to_string())
.collect::<Vec<_>>()
.join("::");
if path_str.contains("StmtVec") && call.args.len() == 1 {
let inner_arg = &call.args[0];
let mut binding = TokenStream2::new();
binding.extend(bind_ident.to_token_stream());
binding.extend(quote_punct("="));
binding.extend(inner_arg.to_token_stream());
return (bind_name, binding, true);
}
}
let mut binding = TokenStream2::new();
binding.extend(bind_ident.to_token_stream());
binding.extend(quote_punct("="));
binding.extend(expr.to_token_stream());
(bind_name, binding, false)
}
#[proc_macro]
pub fn ts_template(input: TokenStream) -> TokenStream {
let input = TokenStream2::from(input);
let template_builder = match template::parse_template(input) {
Ok(s) => s,
Err(e) => return e.to_compile_error().into(),
};
let output = quote::quote! {
{
let (__ts_code, __collected_patches) = #template_builder;
let mut __stream = macroforge_ts::ts_syn::TsStream::from_string(__ts_code);
__stream.runtime_patches = __collected_patches;
__stream
}
};
TokenStream::from(output)
}
#[proc_macro]
pub fn above(input: TokenStream) -> TokenStream {
let input = TokenStream2::from(input);
generate_scoped_template(input, "/* @macroforge:above */")
}
#[proc_macro]
pub fn below(input: TokenStream) -> TokenStream {
let input = TokenStream2::from(input);
generate_scoped_template(input, "/* @macroforge:below */")
}
#[proc_macro]
pub fn body(input: TokenStream) -> TokenStream {
let input = TokenStream2::from(input);
generate_scoped_template(input, "/* @macroforge:body */")
}
#[proc_macro]
pub fn signature(input: TokenStream) -> TokenStream {
let input = TokenStream2::from(input);
generate_scoped_template(input, "/* @macroforge:signature */")
}
fn generate_scoped_template(input: TokenStream2, marker: &str) -> TokenStream {
let template_builder = match template::parse_template(input) {
Ok(s) => s,
Err(e) => return e.to_compile_error().into(),
};
let output = quote::quote! {
{
let mut __ts_code = String::from(#marker);
let (__content, __collected_patches) = #template_builder;
__ts_code.push_str(&__content);
let mut __stream = macroforge_ts::ts_syn::TsStream::from_string(__ts_code);
__stream.runtime_patches = __collected_patches;
__stream
}
};
TokenStream::from(output)
}