use std::collections::HashMap;
use once_cell::sync::Lazy;
use proc_macro::{Group, Ident, Literal, TokenStream, TokenTree};
use regex::Regex;
static RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"_int(?:_([^_ \s]+))?_").unwrap());
fn map_tokens(token: TokenTree, counters: &mut HashMap<String, usize>) -> TokenTree {
match token {
TokenTree::Ident(v) => {
let mut ident = v.to_string();
let mut has_changed = false;
#[allow(clippy::redundant_clone)]
for captures in RE.captures_iter(&ident.clone()) {
let id = match captures.get(1) {
Some(id) => id.as_str().to_string(),
_ => "!@".to_string(),
};
let counter = match counters.get_mut(&id) {
Some(v) => v,
_ => {
counters.insert(id.clone(), 0);
counters.get_mut(&id).unwrap()
}
};
let full_match = &captures[0];
if full_match == ident {
let to_ret = TokenTree::Literal(Literal::usize_unsuffixed(*counter));
*counter += 1;
return to_ret;
} else {
has_changed = true;
ident = ident.replace(&captures[0], &counter.to_string());
*counter += 1;
}
}
if has_changed {
return TokenTree::Ident(Ident::new(&ident, v.span()));
}
TokenTree::Ident(v)
}
TokenTree::Group(v) => TokenTree::Group(Group::new(
v.delimiter(),
v.stream()
.into_iter()
.map(|token| map_tokens(token, counters))
.collect(),
)),
v => v,
}
}
#[proc_macro]
pub fn count(item: TokenStream) -> TokenStream {
let mut counters = HashMap::new();
item.into_iter()
.map(|token| map_tokens(token, &mut counters))
.collect()
}
fn wrapping_map_tokens(token: TokenTree, counters: &mut HashMap<String, usize>) -> TokenTree {
match token {
TokenTree::Ident(v) => {
let mut ident = v.to_string();
let mut has_changed = false;
#[allow(clippy::redundant_clone)]
for captures in RE.captures_iter(&ident.clone()) {
let id = match captures.get(1) {
Some(id) => id.as_str().to_string(),
_ => "!@".to_string(),
};
let counter = match counters.get_mut(&id) {
Some(v) => v,
_ => {
counters.insert(id.clone(), 0);
counters.get_mut(&id).unwrap()
}
};
let full_match = &captures[0];
if full_match == ident {
let to_ret = TokenTree::Literal(Literal::usize_unsuffixed(*counter));
*counter = counter.wrapping_add(1);
return to_ret;
} else {
has_changed = true;
ident = ident.replace(&captures[0], &counter.to_string());
*counter = counter.wrapping_add(1);
}
}
if has_changed {
return TokenTree::Ident(Ident::new(&ident, v.span()));
}
TokenTree::Ident(v)
}
TokenTree::Group(v) => TokenTree::Group(Group::new(
v.delimiter(),
v.stream()
.into_iter()
.map(|token| map_tokens(token, counters))
.collect(),
)),
v => v,
}
}
#[proc_macro]
pub fn wrapping_count(item: TokenStream) -> TokenStream {
let mut counters = HashMap::new();
item.into_iter()
.map(|token| wrapping_map_tokens(token, &mut counters))
.collect()
}