use proc_macro::TokenStream;
use proc_macro2::{Span, TokenStream as TokenStream2};
use quote::{format_ident, quote};
use std::{
collections::HashSet,
fs,
path::{Path, PathBuf},
};
use syn::{Attribute, ItemEnum, parse_macro_input};
trait ToValidIdent {
fn to_valid_rust_ident_with_no(&self) -> String;
}
impl ToValidIdent for str {
fn to_valid_rust_ident_with_no(&self) -> String {
let parts: Vec<&str> = self.split('/').collect();
let mut segments: Vec<String> = Vec::with_capacity(parts.len());
for part in parts {
let replaced = part.replace('&', "And").replace('.', "・");
let words = replaced
.split(&['-', '_', ' '][..])
.filter(|s| !s.is_empty())
.collect::<Vec<_>>();
let mut pascal = String::new();
for word in words {
let mut chars = word.chars();
if let Some(first) = chars.next() {
let first_up = first.to_uppercase().to_string();
pascal.push_str(&first_up);
pascal.push_str(chars.as_str());
}
}
let safe = match pascal.chars().next() {
Some('_') => pascal,
Some(c) if c.is_alphabetic() => pascal, Some(c) if c.is_ascii_digit() => format!("_{pascal}"),
Some(_) => format!("_{pascal}"),
None => "_".to_string(),
};
segments.push(safe);
}
let joined = segments.join("ノ");
match joined.chars().next() {
Some('_') => joined,
Some(c) if c.is_alphabetic() => joined,
Some(c) if c.is_ascii_digit() => format!("_{joined}"),
Some(_) => format!("_{joined}"),
None => "_".to_string(),
}
}
}
fn collect_paths(
dir: &Path,
allowed_exts: &[String],
variants: &mut Vec<(proc_macro2::Ident, String)>,
current_rel_path: &str,
logical_prefix: &str,
seen: &mut HashSet<String>,
) {
let Ok(entries) = fs::read_dir(dir) else {
return;
};
for entry in entries.filter_map(Result::ok) {
let path = entry.path();
let file_name = entry.file_name();
let name = file_name.to_string_lossy();
let rel_path = if current_rel_path.is_empty() {
name.to_string()
} else {
format!("{current_rel_path}/{name}")
};
if path.is_dir() {
let logical_dir_path = if logical_prefix.is_empty() {
rel_path.clone()
} else {
format!("{logical_prefix}/{rel_path}")
};
if !seen.contains(&logical_dir_path) {
let ident_str = logical_dir_path.to_valid_rust_ident_with_no();
let dir_ident = format_ident!("{}", ident_str);
variants.push((dir_ident, logical_dir_path.clone()));
seen.insert(logical_dir_path.clone());
}
collect_paths(
&path,
allowed_exts,
variants,
&rel_path,
logical_prefix,
seen,
);
} else if path.is_file() && has_allowed_extension(&name, allowed_exts) {
let logical_path = if logical_prefix.is_empty() {
rel_path.clone()
} else {
format!("{logical_prefix}/{rel_path}")
};
if !seen.contains(&logical_path) {
let ident_str = logical_path.to_valid_rust_ident_with_no();
let variant_ident = format_ident!("{}", ident_str);
variants.push((variant_ident, logical_path.clone()));
seen.insert(logical_path);
}
}
}
}
fn has_allowed_extension(file_name: &str, allowed_exts: &[String]) -> bool {
allowed_exts
.iter()
.any(|ext| file_name.ends_with(&format!(".{ext}")))
}
#[proc_macro_attribute]
pub fn magic(attr: TokenStream, item: TokenStream) -> TokenStream {
let input_enum = parse_macro_input!(item as ItemEnum);
let attr_ts2: TokenStream2 = attr.into();
let attr: Attribute = syn::parse_quote!(#[magic(#attr_ts2)]);
let mut root = None;
let mut ext: Option<Vec<String>> = None;
let mut prefix = String::new();
let _ = attr.parse_nested_meta(|meta| {
if meta.path.is_ident("path") {
let value = meta.value()?.parse::<syn::LitStr>()?;
root = Some(value.value());
Ok(())
} else if meta.path.is_ident("ext") {
let value = meta.value()?.parse::<syn::LitStr>()?;
let exts = value
.value()
.split(',')
.map(|s| s.trim().to_string())
.filter(|s| !s.is_empty())
.collect::<Vec<_>>();
ext = Some(exts);
Ok(())
} else if meta.path.is_ident("prefix") {
let value = meta.value()?.parse::<syn::LitStr>()?;
prefix = value.value();
Ok(())
} else {
Err(meta.error("Only `path`, `ext`, and `prefix` are supported"))
}
});
let root = root.unwrap_or_else(|| ".".to_string());
let ext = ext.unwrap_or_else(|| vec!["svg".to_string()]);
let root_path = PathBuf::from(&root);
let enum_ident = &input_enum.ident;
let mut variants = Vec::new();
let mut seen: HashSet<String> = HashSet::new();
collect_paths(&root_path, &ext, &mut variants, "", &prefix, &mut seen);
variants.sort_by(|a, b| a.0.to_string().cmp(&b.0.to_string()));
let variant_defs = variants.iter().map(|(ident, _)| quote! { #ident, });
let match_arms = variants.iter().map(|(ident, original_path)| {
let lit = syn::LitStr::new(original_path, Span::call_site());
quote! {
Self::#ident => #lit,
}
});
let expanded = quote! {
#[allow(mixed_script_confusables)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum #enum_ident {
#(#variant_defs)*
}
impl #enum_ident {
pub fn to_str(&self) -> &'static str {
match self {
#(#match_arms)*
_ => unreachable!("Unrecognized variant in generated enum {}", stringify!(#enum_ident)),
}
}
pub fn to_string(&self) -> String {
self.to_str().to_string()
}
}
};
TokenStream::from(expanded)
}