use proc_macro2::{Delimiter, Group, Span, TokenStream as TokenStream2, TokenTree};
use quote::{ToTokens, quote};
use std::iter::Peekable;
fn template_error(span: Span, message: &str, context: Option<&str>) -> syn::Error {
let full_message = if let Some(ctx) = context {
format!("{}\n --> in: {}", message, ctx)
} else {
message.to_string()
};
syn::Error::new(span, full_message)
}
pub fn parse_template(input: TokenStream2) -> syn::Result<TokenStream2> {
let (body, _) = parse_fragment(&mut input.into_iter().peekable(), None)?;
Ok(quote! {
{
let mut __out = String::new();
let mut __patches: Vec<macroforge_ts::ts_syn::abi::Patch> = Vec::new();
#body
(__out, __patches)
}
})
}
#[derive(Debug, Clone)]
enum Terminator {
Else,
ElseIf(TokenStream2),
EndIf,
EndFor,
EndWhile,
Case(TokenStream2),
EndMatch,
}
enum TagType {
If(TokenStream2),
IfLet(TokenStream2, TokenStream2),
While(TokenStream2),
WhileLet(TokenStream2, TokenStream2),
For(TokenStream2, TokenStream2),
Match(TokenStream2),
Else,
ElseIf(TokenStream2),
Case(TokenStream2),
EndIf,
EndFor,
EndWhile,
EndMatch,
Let(TokenStream2),
LetMut(TokenStream2),
Do(TokenStream2),
Typescript(TokenStream2),
IdentBlock,
BlockComment(String),
DocComment(String),
Block,
}
fn analyze_tag(g: &Group) -> TagType {
let tokens: Vec<TokenTree> = g.stream().into_iter().collect();
if tokens.len() >= 2
&& let (Some(TokenTree::Punct(first)), Some(TokenTree::Punct(last))) =
(tokens.first(), tokens.last())
&& first.as_char() == '|'
&& last.as_char() == '|'
{
return TagType::IdentBlock;
}
if tokens.len() >= 5
&& let (Some(TokenTree::Punct(p1)), Some(TokenTree::Punct(p2))) =
(tokens.first(), tokens.get(1))
&& p1.as_char() == '>'
&& p2.as_char() == '>'
&& let (Some(TokenTree::Punct(p3)), Some(TokenTree::Punct(p4))) =
(tokens.get(tokens.len() - 2), tokens.last())
&& p3.as_char() == '<'
&& p4.as_char() == '<'
{
if let Some(TokenTree::Literal(lit)) = tokens.get(2) {
let content = extract_string_literal(lit);
return TagType::DocComment(content);
}
let content = tokens_to_spaced_string(&tokens[2..tokens.len() - 2]);
return TagType::DocComment(content);
}
if tokens.len() >= 3
&& let (Some(TokenTree::Punct(first)), Some(TokenTree::Punct(last))) =
(tokens.first(), tokens.last())
&& first.as_char() == '>'
&& last.as_char() == '<'
{
if let Some(TokenTree::Literal(lit)) = tokens.get(1) {
let content = extract_string_literal(lit);
return TagType::BlockComment(content);
}
let content = tokens_to_spaced_string(&tokens[1..tokens.len() - 1]);
return TagType::BlockComment(content);
}
if tokens.len() < 2 {
return TagType::Block;
}
if let (TokenTree::Punct(p), TokenTree::Ident(i)) = (&tokens[0], &tokens[1])
&& p.as_char() == '#'
{
if i == "if" {
if let Some(TokenTree::Ident(let_kw)) = tokens.get(2)
&& let_kw == "let"
{
let mut pattern = TokenStream2::new();
let mut expr = TokenStream2::new();
let mut seen_eq = false;
for t in tokens.iter().skip(3) {
if let TokenTree::Punct(eq) = t
&& eq.as_char() == '='
&& !seen_eq
{
seen_eq = true;
continue;
}
if !seen_eq {
t.to_tokens(&mut pattern);
} else {
t.to_tokens(&mut expr);
}
}
return TagType::IfLet(pattern, expr);
}
let cond: TokenStream2 = tokens.iter().skip(2).map(|t| t.to_token_stream()).collect();
return TagType::If(cond);
}
if i == "match" {
let expr: TokenStream2 = tokens.iter().skip(2).map(|t| t.to_token_stream()).collect();
return TagType::Match(expr);
}
if i == "while" {
if let Some(TokenTree::Ident(let_kw)) = tokens.get(2)
&& let_kw == "let"
{
let mut pattern = TokenStream2::new();
let mut expr = TokenStream2::new();
let mut seen_eq = false;
for t in tokens.iter().skip(3) {
if let TokenTree::Punct(eq) = t
&& eq.as_char() == '='
&& !seen_eq
{
seen_eq = true;
continue;
}
if !seen_eq {
t.to_tokens(&mut pattern);
} else {
t.to_tokens(&mut expr);
}
}
return TagType::WhileLet(pattern, expr);
}
let cond: TokenStream2 = tokens.iter().skip(2).map(|t| t.to_token_stream()).collect();
return TagType::While(cond);
}
if i == "for" {
let mut item = TokenStream2::new();
let mut list = TokenStream2::new();
let mut seen_in = false;
for t in tokens.iter().skip(2) {
if let TokenTree::Ident(id) = t
&& id == "in"
&& !seen_in
{
seen_in = true;
continue;
}
if !seen_in {
t.to_tokens(&mut item);
} else {
t.to_tokens(&mut list);
}
}
return TagType::For(item, list);
}
}
if let (TokenTree::Punct(p), TokenTree::Ident(i)) = (&tokens[0], &tokens[1])
&& p.as_char() == '$'
{
if i == "let" {
if let Some(TokenTree::Ident(mut_kw)) = tokens.get(2)
&& mut_kw == "mut"
{
let body: TokenStream2 =
tokens.iter().skip(3).map(|t| t.to_token_stream()).collect();
return TagType::LetMut(body);
}
let body: TokenStream2 = tokens.iter().skip(2).map(|t| t.to_token_stream()).collect();
return TagType::Let(body);
}
if i == "do" {
let expr: TokenStream2 = tokens.iter().skip(2).map(|t| t.to_token_stream()).collect();
return TagType::Do(expr);
}
if i == "typescript" {
let expr: TokenStream2 = tokens.iter().skip(2).map(|t| t.to_token_stream()).collect();
return TagType::Typescript(expr);
}
}
if let (TokenTree::Punct(p), TokenTree::Ident(i)) = (&tokens[0], &tokens[1])
&& p.as_char() == ':'
{
if i == "else" {
if let Some(TokenTree::Ident(next)) = tokens.get(2)
&& next == "if"
{
let cond: TokenStream2 =
tokens.iter().skip(3).map(|t| t.to_token_stream()).collect();
return TagType::ElseIf(cond);
}
return TagType::Else;
}
if i == "case" {
let pattern: TokenStream2 =
tokens.iter().skip(2).map(|t| t.to_token_stream()).collect();
return TagType::Case(pattern);
}
}
if let (TokenTree::Punct(p), TokenTree::Ident(i)) = (&tokens[0], &tokens[1])
&& p.as_char() == '/'
{
if i == "if" {
return TagType::EndIf;
}
if i == "for" {
return TagType::EndFor;
}
if i == "while" {
return TagType::EndWhile;
}
if i == "match" {
return TagType::EndMatch;
}
}
TagType::Block
}
fn parse_if_chain(
iter: &mut Peekable<proc_macro2::token_stream::IntoIter>,
initial_cond: TokenStream2,
open_span: Span,
) -> syn::Result<TokenStream2> {
let (true_block, terminator) = parse_fragment(
iter,
Some(&[
Terminator::Else,
Terminator::ElseIf(TokenStream2::new()),
Terminator::EndIf,
]),
)?;
match terminator {
Some(Terminator::EndIf) => {
Ok(quote! {
if #initial_cond {
#true_block
}
})
}
Some(Terminator::Else) => {
let (else_block, terminator) = parse_fragment(iter, Some(&[Terminator::EndIf]))?;
if !matches!(terminator, Some(Terminator::EndIf)) {
return Err(template_error(
open_span,
"Unclosed {:else} block: Missing {/if}",
Some("{:else}..."),
));
}
Ok(quote! {
if #initial_cond {
#true_block
} else {
#else_block
}
})
}
Some(Terminator::ElseIf(else_if_cond)) => {
let else_if_chain = parse_if_chain(iter, else_if_cond, open_span)?;
Ok(quote! {
if #initial_cond {
#true_block
} else {
#else_if_chain
}
})
}
None => Err(template_error(
open_span,
"Unclosed {#if} block: Missing {/if}",
Some("{#if condition}..."),
)),
_ => unreachable!(),
}
}
fn parse_if_let_chain(
iter: &mut Peekable<proc_macro2::token_stream::IntoIter>,
pattern: TokenStream2,
expr: TokenStream2,
open_span: Span,
) -> syn::Result<TokenStream2> {
let (true_block, terminator) =
parse_fragment(iter, Some(&[Terminator::Else, Terminator::EndIf]))?;
match terminator {
Some(Terminator::EndIf) => {
Ok(quote! {
if let #pattern = #expr {
#true_block
}
})
}
Some(Terminator::Else) => {
let (else_block, terminator) = parse_fragment(iter, Some(&[Terminator::EndIf]))?;
if !matches!(terminator, Some(Terminator::EndIf)) {
return Err(template_error(
open_span,
"Unclosed {:else} block in {#if let}: Missing {/if}",
Some("{#if let pattern = expr}{:else}..."),
));
}
Ok(quote! {
if let #pattern = #expr {
#true_block
} else {
#else_block
}
})
}
None => Err(template_error(
open_span,
"Unclosed {#if let} block: Missing {/if}",
Some("{#if let pattern = expr}..."),
)),
_ => unreachable!(),
}
}
fn parse_while_loop(
iter: &mut Peekable<proc_macro2::token_stream::IntoIter>,
cond: TokenStream2,
open_span: Span,
) -> syn::Result<TokenStream2> {
let (body, terminator) = parse_fragment(iter, Some(&[Terminator::EndWhile]))?;
if !matches!(terminator, Some(Terminator::EndWhile)) {
return Err(template_error(
open_span,
"Unclosed {#while} block: Missing {/while}",
Some("{#while condition}..."),
));
}
Ok(quote! {
while #cond {
#body
}
})
}
fn parse_while_let_loop(
iter: &mut Peekable<proc_macro2::token_stream::IntoIter>,
pattern: TokenStream2,
expr: TokenStream2,
open_span: Span,
) -> syn::Result<TokenStream2> {
let (body, terminator) = parse_fragment(iter, Some(&[Terminator::EndWhile]))?;
if !matches!(terminator, Some(Terminator::EndWhile)) {
return Err(template_error(
open_span,
"Unclosed {#while let} block: Missing {/while}",
Some("{#while let pattern = expr}..."),
));
}
Ok(quote! {
while let #pattern = #expr {
#body
}
})
}
fn parse_match_arms(
iter: &mut Peekable<proc_macro2::token_stream::IntoIter>,
match_expr: TokenStream2,
open_span: Span,
) -> syn::Result<TokenStream2> {
let mut arms = TokenStream2::new();
let mut current_pattern: Option<TokenStream2> = None;
loop {
let (body, terminator) = parse_fragment(
iter,
Some(&[Terminator::Case(TokenStream2::new()), Terminator::EndMatch]),
)?;
match terminator {
Some(Terminator::Case(pattern)) => {
if let Some(prev_pattern) = current_pattern.take() {
arms.extend(quote! {
#prev_pattern => {
#body
}
});
}
current_pattern = Some(pattern);
}
Some(Terminator::EndMatch) => {
if let Some(prev_pattern) = current_pattern.take() {
arms.extend(quote! {
#prev_pattern => {
#body
}
});
}
break;
}
None => {
return Err(template_error(
open_span,
"Unclosed {#match} block: Missing {/match}",
Some("{#match expr}{:case pattern}...{/match}"),
));
}
_ => unreachable!(),
}
}
Ok(quote! {
match #match_expr {
#arms
}
})
}
fn parse_fragment_no_spacing(
iter: &mut Peekable<proc_macro2::token_stream::IntoIter>,
) -> syn::Result<TokenStream2> {
let mut output = TokenStream2::new();
while let Some(token) = iter.peek().cloned() {
match &token {
TokenTree::Punct(p) if p.as_char() == '@' => {
iter.next();
let is_group = matches!(iter.peek(), Some(TokenTree::Group(g)) if g.delimiter() == Delimiter::Brace);
if is_group {
if let Some(TokenTree::Group(g)) = iter.next() {
let content = g.stream();
output.extend(quote! {
__out.push_str(&#content.to_string());
});
}
} else {
output.extend(quote! { __out.push_str("@"); });
}
}
TokenTree::Group(g) => {
iter.next();
let (open, close) = match g.delimiter() {
Delimiter::Parenthesis => ("(", ")"),
Delimiter::Bracket => ("[", "]"),
Delimiter::Brace => ("{", "}"),
Delimiter::None => ("", ""),
};
output.extend(quote! { __out.push_str(#open); });
let inner = parse_fragment_no_spacing(&mut g.stream().into_iter().peekable())?;
output.extend(inner);
output.extend(quote! { __out.push_str(#close); });
}
_ => {
let t = iter.next().unwrap();
let s = t.to_string();
output.extend(quote! { __out.push_str(#s); });
}
}
}
Ok(output)
}
fn parse_fragment(
iter: &mut Peekable<proc_macro2::token_stream::IntoIter>,
stop_at: Option<&[Terminator]>,
) -> syn::Result<(TokenStream2, Option<Terminator>)> {
let mut output = TokenStream2::new();
while let Some(token) = iter.peek().cloned() {
match &token {
TokenTree::Punct(p) if p.as_char() == '@' => {
let p_clone = p.clone();
iter.next();
let is_group = matches!(iter.peek(), Some(TokenTree::Group(g)) if g.delimiter() == Delimiter::Brace);
if is_group {
if let Some(TokenTree::Group(g)) = iter.next() {
let content = g.stream();
output.extend(quote! {
__out.push_str(&#content.to_string());
});
}
} else {
let s = p_clone.to_string();
output.extend(quote! { __out.push_str(#s); });
}
let next = iter.peek();
let next_char = match next {
Some(TokenTree::Punct(p)) => Some(p.as_char()),
_ => None,
};
let mut add_space = true;
if next.is_none() {
add_space = false;
}
if matches!(next_char, Some(c) if ".,;:?()[]{}<>!".contains(c)) {
add_space = false;
}
if let Some(TokenTree::Group(g)) = next {
match g.delimiter() {
Delimiter::Parenthesis | Delimiter::Bracket => add_space = false,
_ => {}
}
}
if let Some(TokenTree::Punct(p)) = next
&& p.as_char() == '@'
{
let mut peek_iter = iter.clone();
peek_iter.next(); if matches!(peek_iter.peek(), Some(TokenTree::Group(g)) if g.delimiter() == Delimiter::Brace)
{
add_space = false;
}
}
if add_space {
output.extend(quote! { __out.push_str(" "); });
}
}
TokenTree::Group(g) if g.delimiter() == Delimiter::Brace => {
let tag = analyze_tag(g);
let span = g.span();
match tag {
TagType::If(cond) => {
iter.next(); output.extend(parse_if_chain(iter, cond, span)?);
}
TagType::IfLet(pattern, expr) => {
iter.next(); output.extend(parse_if_let_chain(iter, pattern, expr, span)?);
}
TagType::For(item, list) => {
iter.next();
let (body, terminator) = parse_fragment(iter, Some(&[Terminator::EndFor]))?;
if !matches!(terminator, Some(Terminator::EndFor)) {
return Err(template_error(
span,
"Unclosed {#for} block: Missing {/for}",
Some("{#for item in collection}..."),
));
}
output.extend(quote! {
for #item in #list {
#body
}
});
}
TagType::Match(expr) => {
iter.next(); output.extend(parse_match_arms(iter, expr, span)?);
}
TagType::While(cond) => {
iter.next(); output.extend(parse_while_loop(iter, cond, span)?);
}
TagType::WhileLet(pattern, expr) => {
iter.next(); output.extend(parse_while_let_loop(iter, pattern, expr, span)?);
}
TagType::Else => {
if let Some(stops) = stop_at
&& stops.iter().any(|s| matches!(s, Terminator::Else))
{
iter.next(); return Ok((output, Some(Terminator::Else)));
}
return Err(template_error(
span,
"Unexpected {:else} - not inside an {#if} block",
None,
));
}
TagType::ElseIf(cond) => {
if let Some(stops) = stop_at
&& stops.iter().any(|s| matches!(s, Terminator::ElseIf(_)))
{
iter.next(); return Ok((output, Some(Terminator::ElseIf(cond))));
}
return Err(template_error(
span,
"Unexpected {:else if} - not inside an {#if} block",
None,
));
}
TagType::EndIf => {
if let Some(stops) = stop_at
&& stops.iter().any(|s| matches!(s, Terminator::EndIf))
{
iter.next(); return Ok((output, Some(Terminator::EndIf)));
}
return Err(template_error(
span,
"Unexpected {/if} - no matching {#if} block",
None,
));
}
TagType::EndFor => {
if let Some(stops) = stop_at
&& stops.iter().any(|s| matches!(s, Terminator::EndFor))
{
iter.next(); return Ok((output, Some(Terminator::EndFor)));
}
return Err(template_error(
span,
"Unexpected {/for} - no matching {#for} block",
None,
));
}
TagType::EndWhile => {
if let Some(stops) = stop_at
&& stops.iter().any(|s| matches!(s, Terminator::EndWhile))
{
iter.next(); return Ok((output, Some(Terminator::EndWhile)));
}
return Err(template_error(
span,
"Unexpected {/while} - no matching {#while} block",
None,
));
}
TagType::Case(pattern) => {
if let Some(stops) = stop_at
&& stops.iter().any(|s| matches!(s, Terminator::Case(_)))
{
iter.next(); return Ok((output, Some(Terminator::Case(pattern))));
}
return Err(template_error(
span,
"Unexpected {:case} - not inside a {#match} block",
None,
));
}
TagType::EndMatch => {
if let Some(stops) = stop_at
&& stops.iter().any(|s| matches!(s, Terminator::EndMatch))
{
iter.next(); return Ok((output, Some(Terminator::EndMatch)));
}
return Err(template_error(
span,
"Unexpected {/match} - no matching {#match} block",
None,
));
}
TagType::Let(body) => {
iter.next(); output.extend(quote! {
let #body;
});
}
TagType::LetMut(body) => {
iter.next(); output.extend(quote! {
let mut #body;
});
}
TagType::Do(expr) => {
iter.next(); output.extend(quote! {
#expr;
});
}
TagType::Typescript(expr) => {
iter.next(); output.extend(quote! {
{
let __ts_stream = #expr;
__out.push_str(__ts_stream.source());
__patches.extend(__ts_stream.runtime_patches);
}
});
}
TagType::IdentBlock => {
iter.next();
let inner_tokens: Vec<TokenTree> = g.stream().into_iter().collect();
if inner_tokens.len() >= 2 {
let content: TokenStream2 = inner_tokens[1..inner_tokens.len() - 1]
.iter()
.map(|t| t.to_token_stream())
.collect();
let inner_output =
parse_fragment_no_spacing(&mut content.into_iter().peekable())?;
output.extend(inner_output);
}
let next = iter.peek();
let next_char = match next {
Some(TokenTree::Punct(p)) => Some(p.as_char()),
_ => None,
};
let mut add_space = true;
if next.is_none() {
add_space = false;
}
if matches!(next_char, Some(c) if ".,;:?()[]{}<>!".contains(c)) {
add_space = false;
}
if let Some(TokenTree::Group(g)) = next {
match g.delimiter() {
Delimiter::Parenthesis | Delimiter::Bracket => add_space = false,
_ => {}
}
}
if let Some(TokenTree::Punct(p)) = next
&& p.as_char() == '@'
{
let mut peek_iter = iter.clone();
peek_iter.next(); if matches!(peek_iter.peek(), Some(TokenTree::Group(g)) if g.delimiter() == Delimiter::Brace)
{
add_space = false;
}
}
if add_space {
output.extend(quote! { __out.push_str(" "); });
}
}
TagType::BlockComment(content) => {
iter.next(); output.extend(quote! { __out.push_str("/* "); });
output.extend(quote! { __out.push_str(#content); });
output.extend(quote! { __out.push_str(" */"); });
}
TagType::DocComment(content) => {
iter.next(); output.extend(quote! { __out.push_str("/** "); });
output.extend(quote! { __out.push_str(#content); });
output.extend(quote! { __out.push_str(" */"); });
}
TagType::Block => {
iter.next(); let inner_stream = g.stream();
output.extend(quote! { __out.push_str("{"); });
let (inner_parsed, _) =
parse_fragment(&mut inner_stream.into_iter().peekable(), None)?;
output.extend(inner_parsed);
output.extend(quote! { __out.push_str("}"); });
}
}
}
TokenTree::Group(g) => {
iter.next();
let (open, close) = match g.delimiter() {
Delimiter::Parenthesis => ("(", ")"),
Delimiter::Bracket => ("[", "]"),
Delimiter::Brace => ("{", "}"), Delimiter::None => ("", ""),
};
output.extend(quote! { __out.push_str(#open); });
let (inner_parsed, _) =
parse_fragment(&mut g.stream().into_iter().peekable(), None)?;
output.extend(inner_parsed);
output.extend(quote! { __out.push_str(#close); });
}
TokenTree::Literal(lit) if is_backtick_template(lit) => {
iter.next(); let processed = process_backtick_template(lit)?;
output.extend(processed);
output.extend(quote! { __out.push_str(" "); });
}
TokenTree::Literal(lit) if is_string_literal(lit) => {
iter.next(); let interpolated = interpolate_string_literal(lit)?;
output.extend(interpolated);
output.extend(quote! { __out.push_str(" "); });
}
_ => {
let t = iter.next().unwrap();
let s = t.to_string();
let is_ident = matches!(&t, TokenTree::Ident(_));
let punct_char = if let TokenTree::Punct(p) = &t {
Some(p.as_char())
} else {
None
};
let is_joint = if let TokenTree::Punct(p) = &t {
p.spacing() == proc_macro2::Spacing::Joint
} else {
false
};
let next = iter.peek();
let next_char = match next {
Some(TokenTree::Punct(p)) => Some(p.as_char()),
_ => None,
};
output.extend(quote! {
__out.push_str(#s);
});
let mut add_space = true;
if next.is_none() || is_joint {
add_space = false;
} else if is_ident {
if matches!(next_char, Some(c) if ".,;:?()[]{}<>!".contains(c)) {
add_space = false;
} else if let Some(TokenTree::Group(g)) = next {
match g.delimiter() {
Delimiter::Parenthesis | Delimiter::Bracket => add_space = false,
_ => {}
}
}
} else if let Some(c) = punct_char {
match c {
'.' => add_space = false, '!' => add_space = false, '(' | '[' | '{' => add_space = false, '<' | '>' => add_space = false, '@' => add_space = false, '$' => add_space = false, _ => {}
}
if matches!(next_char, Some(nc) if ".,;)]}>".contains(nc)) {
add_space = false;
}
} else {
if matches!(next_char, Some(nc) if ".,;)]}>".contains(nc)) {
add_space = false;
}
}
if add_space {
output.extend(quote! { __out.push_str(" "); });
}
}
}
}
Ok((output, None))
}
fn tokens_to_spaced_string(tokens: &[TokenTree]) -> String {
let mut result = String::new();
for (i, token) in tokens.iter().enumerate() {
if i > 0 {
result.push(' ');
}
result.push_str(&token.to_string());
}
result
}
fn extract_string_literal(lit: &proc_macro2::Literal) -> String {
let s = lit.to_string();
if s.starts_with('"') && s.ends_with('"') && s.len() >= 2 {
let inner = &s[1..s.len() - 1];
return unescape_string(inner);
}
if s.starts_with("r\"") && s.ends_with('"') {
return s[2..s.len() - 1].to_string();
}
if s.starts_with("r#\"") && s.ends_with("\"#") {
return s[3..s.len() - 2].to_string();
}
s
}
fn unescape_string(s: &str) -> String {
let mut result = String::new();
let mut chars = s.chars().peekable();
while let Some(c) = chars.next() {
if c == '\\' {
match chars.next() {
Some('n') => result.push('\n'),
Some('r') => result.push('\r'),
Some('t') => result.push('\t'),
Some('\\') => result.push('\\'),
Some('"') => result.push('"'),
Some('\'') => result.push('\''),
Some(other) => {
result.push('\\');
result.push(other);
}
None => result.push('\\'),
}
} else {
result.push(c);
}
}
result
}
fn is_string_literal(lit: &proc_macro2::Literal) -> bool {
let s = lit.to_string();
s.starts_with('"') || s.starts_with('\'') || s.starts_with("r\"") || s.starts_with("r#")
}
fn is_backtick_template(lit: &proc_macro2::Literal) -> bool {
let s = lit.to_string();
if s.starts_with("\"'^") && s.ends_with("^'\"") && s.len() >= 6 {
return true;
}
if s.starts_with("r\"'^") && s.ends_with("^'\"") {
return true;
}
if s.starts_with("r#\"'^") && s.ends_with("^'\"#") {
return true;
}
false
}
fn process_backtick_template(lit: &proc_macro2::Literal) -> syn::Result<TokenStream2> {
let raw = lit.to_string();
let span = lit.span();
let content = if raw.starts_with("\"'^") && raw.ends_with("^'\"") {
&raw[3..raw.len() - 3]
} else if raw.starts_with("r\"'^") && raw.ends_with("^'\"") {
&raw[4..raw.len() - 3]
} else if raw.starts_with("r#\"'^") && raw.ends_with("^'\"#") {
&raw[5..raw.len() - 4]
} else {
return Ok(quote! { __out.push_str(#raw); });
};
if content.contains("{#") || content.contains("{/") || content.contains("{:") {
return Err(template_error(
span,
"Template control flow tags cannot be used inside backtick template literals",
Some(&format!(
"\"'^{}...^'\"",
content.chars().take(40).collect::<String>()
)),
));
}
if !content.contains('@') {
let mut output = TokenStream2::new();
output.extend(quote! { __out.push_str("`"); });
output.extend(quote! { __out.push_str(#content); });
output.extend(quote! { __out.push_str("`"); });
return Ok(output);
}
let mut output = TokenStream2::new();
output.extend(quote! { __out.push_str("`"); });
let mut chars = content.chars().peekable();
let mut current_literal = String::new();
let mut char_pos = 0usize;
while let Some(c) = chars.next() {
char_pos += 1;
if c == '@' {
match chars.peek() {
Some(&'@') => {
chars.next(); char_pos += 1;
current_literal.push('@');
}
Some(&'{') => {
if !current_literal.is_empty() {
output.extend(quote! { __out.push_str(#current_literal); });
current_literal.clear();
}
chars.next(); char_pos += 1;
let expr_start_pos = char_pos;
let mut expr_str = String::new();
let mut brace_depth = 1;
for ec in chars.by_ref() {
char_pos += 1;
if ec == '{' {
brace_depth += 1;
expr_str.push(ec);
} else if ec == '}' {
brace_depth -= 1;
if brace_depth == 0 {
break;
}
expr_str.push(ec);
} else {
expr_str.push(ec);
}
}
if brace_depth != 0 {
return Err(template_error(
span,
&format!(
"Unclosed @{{}} interpolation at position {}",
expr_start_pos
),
Some(&format!("@{{{}", expr_str)),
));
}
match syn::parse_str::<syn::Expr>(&expr_str) {
Ok(expr) => {
output.extend(quote! {
__out.push_str(&#expr.to_string());
});
}
Err(parse_err) => {
return Err(template_error(
span,
&format!(
"Invalid Rust expression in backtick template interpolation: {}",
parse_err
),
Some(&format!("@{{{}}}", expr_str)),
));
}
}
}
_ => {
current_literal.push('@');
}
}
} else {
current_literal.push(c);
}
}
if !current_literal.is_empty() {
output.extend(quote! { __out.push_str(#current_literal); });
}
output.extend(quote! { __out.push_str("`"); });
Ok(output)
}
fn interpolate_string_literal(lit: &proc_macro2::Literal) -> syn::Result<TokenStream2> {
let raw = lit.to_string();
let span = lit.span();
let (quote_char, content) = if raw.starts_with('"') {
('"', &raw[1..raw.len() - 1])
} else if raw.starts_with('\'') {
('\'', &raw[1..raw.len() - 1])
} else if raw.starts_with("r\"") {
('"', &raw[2..raw.len() - 1])
} else if raw.starts_with("r#") {
let hash_count = raw[1..].chars().take_while(|&c| c == '#').count();
let start = 2 + hash_count; let end = raw.len() - 1 - hash_count; ('"', &raw[start..end])
} else {
return Ok(quote! { __out.push_str(#raw); });
};
if !content.contains('@') {
return Ok(quote! { __out.push_str(#raw); });
}
if content.contains("{#") || content.contains("{/") || content.contains("{:") {
return Err(template_error(
span,
"Template control flow tags cannot be used inside string literals",
Some(&format!(
"\"{}...\"",
content.chars().take(40).collect::<String>()
)),
));
}
let mut output = TokenStream2::new();
let quote_str = quote_char.to_string();
output.extend(quote! { __out.push_str(#quote_str); });
let mut chars = content.chars().peekable();
let mut current_literal = String::new();
let mut char_pos = 0usize;
while let Some(c) = chars.next() {
char_pos += 1;
if c == '@' {
match chars.peek() {
Some(&'@') => {
chars.next(); char_pos += 1;
current_literal.push('@');
}
Some(&'{') => {
if !current_literal.is_empty() {
output.extend(quote! { __out.push_str(#current_literal); });
current_literal.clear();
}
chars.next(); char_pos += 1;
let expr_start_pos = char_pos;
let mut expr_str = String::new();
let mut brace_depth = 1;
for ec in chars.by_ref() {
char_pos += 1;
if ec == '{' {
brace_depth += 1;
expr_str.push(ec);
} else if ec == '}' {
brace_depth -= 1;
if brace_depth == 0 {
break;
}
expr_str.push(ec);
} else {
expr_str.push(ec);
}
}
if brace_depth != 0 {
return Err(template_error(
span,
&format!(
"Unclosed @{{}} interpolation at position {}",
expr_start_pos
),
Some(&format!("@{{{}", expr_str)),
));
}
match syn::parse_str::<syn::Expr>(&expr_str) {
Ok(expr) => {
output.extend(quote! {
__out.push_str(&#expr.to_string());
});
}
Err(parse_err) => {
return Err(template_error(
span,
&format!(
"Invalid Rust expression in string interpolation: {}",
parse_err
),
Some(&format!("@{{{}}}", expr_str)),
));
}
}
}
_ => {
current_literal.push('@');
}
}
} else if c == '\\' {
current_literal.push(c);
if chars.peek().is_some() {
current_literal.push(chars.next().unwrap());
char_pos += 1;
}
} else {
current_literal.push(c);
}
}
if !current_literal.is_empty() {
output.extend(quote! { __out.push_str(#current_literal); });
}
output.extend(quote! { __out.push_str(#quote_str); });
Ok(output)
}