extern crate glob;
extern crate proc_macro;
use proc_macro::TokenStream;
use self::glob::{glob, Paths};
use quote::quote;
use std::path::PathBuf;
use syn::parse::{Parse, ParseStream, Result};
use syn::{parse_macro_input, Expr, ExprLit, Ident, Lit, Token};
const CONTENT_MAX_LEN: usize = 100;
fn canonical_fn_name(s: &str) -> String {
s.replace(
&['"', ' ', '.', ':', '-', '*', '/', '\\', '\n', '\t', '\r'][..],
"_",
)
}
fn concat_ts(
accu: proc_macro2::TokenStream,
other: proc_macro2::TokenStream,
) -> proc_macro2::TokenStream {
quote! { #accu #other }
}
struct GlobExpand {
glob_pattern: Lit,
lambda: Ident,
}
impl Parse for GlobExpand {
fn parse(input: ParseStream) -> Result<Self> {
let glob_pattern: Lit = input.parse()?;
input.parse::<Token![;]>()?;
let lambda: Ident = input.parse()?;
Ok(GlobExpand {
glob_pattern,
lambda,
})
}
}
const PREFIX: &str = "gen_";
fn fn_ident_from_path(fn_ident: &Ident, path: &PathBuf) -> Ident {
let path_as_str = path
.clone()
.into_os_string()
.into_string()
.expect("bad encoding");
let stringified = format!("{}_{}", fn_ident.to_string(), &path_as_str);
let gen_fn_ident = proc_macro2::Ident::new(
&canonical_fn_name(&stringified),
proc_macro2::Span::call_site(),
);
gen_fn_ident
}
fn fn_ident_from_string(fn_ident: &Ident, name: &str) -> Ident {
let safe_len = std::cmp::min(name.len(), CONTENT_MAX_LEN);
let safe_name = &name[0..safe_len];
let stringified = format!("{}_{}", fn_ident.to_string(), safe_name);
let gen_fn_ident = proc_macro2::Ident::new(
&canonical_fn_name(&stringified),
proc_macro2::Span::call_site(),
);
gen_fn_ident
}
fn expr_stringified(expr: &Expr, int_as_hex: bool) -> String {
let stringified = match expr {
Expr::Lit(lit) => match lit {
ExprLit {
lit: litval,
attrs: _,
} => match litval {
Lit::Int(lit) => {
let val = lit.value();
if int_as_hex {
if val > 255 {
format!("{:016x}", val)
} else {
format!("{:02x}", val as u8)
}
} else {
format!("{:010}", val)
}
}
Lit::Char(lit) => {
let val = lit.value();
format!("{}", val)
}
Lit::Str(lit) => {
let val = lit.value();
val
}
Lit::Float(lit) => {
let val = lit.value();
format!("{}", val)
}
_ => panic!(),
},
},
Expr::Array(ref array_expr) => {
let elems = &array_expr.elems;
let mut composed = String::new();
let mut cnt: usize = 0;
for expr in elems.iter() {
if cnt > 0 && cnt % 8 == 0 {
composed.push_str("_");
}
cnt = cnt + 1;
let expr_str = expr_stringified(&expr, true);
composed.push_str(&expr_str);
}
composed
}
Expr::Path(ref expr_path) => {
let path = &expr_path.path;
let leading_colon = path.leading_colon.is_some();
let mut composed = String::new();
for segment in &path.segments {
if !composed.is_empty() || leading_colon {
composed.push_str("_")
}
let ident = &segment.ident;
composed.push_str(&ident.to_string());
}
composed
}
Expr::Reference(ref reference) => {
let ref_expr = &reference.expr;
expr_stringified(&ref_expr, int_as_hex)
}
_ => panic!(),
};
stringified
}
fn fn_ident_from_expr(fn_ident: &Ident, expr: &Expr) -> Ident {
let stringified = expr_stringified(expr, false);
fn_ident_from_string(fn_ident, &format!("{}", &stringified))
}
#[proc_macro]
pub fn glob_expand(item: TokenStream) -> TokenStream {
let GlobExpand {
glob_pattern,
lambda,
} = parse_macro_input!(item as GlobExpand);
let pattern = if let Lit::Str(s) = glob_pattern {
s.value()
} else {
panic!();
};
let empty_ts: proc_macro2::TokenStream = "".parse().unwrap();
let paths: Paths = glob(&pattern).expect("Failed to read testdata dir.");
fn concat(
accu: proc_macro2::TokenStream,
ts: proc_macro2::TokenStream,
) -> proc_macro2::TokenStream {
quote! { # accu # ts }
}
let result = paths
.map(|path| {
let path_as_str = path
.expect("No such file or directory")
.into_os_string()
.into_string()
.expect("bad encoding");
let canonical_name = path_as_str
.replace("\"", " ")
.replace(" ", "_")
.replace("-", "_")
.replace("*", "_")
.replace("/", "_");
let mut func_name = PREFIX.to_string();
func_name.push_str(&canonical_name);
let func_ident = proc_macro2::Ident::new(&func_name, proc_macro2::Span::call_site());
let item = quote! {
# [test]
fn # func_ident () {
let f = #lambda;
f( #path_as_str );
}
};
item
})
.fold(empty_ts, concat);
result.into()
}
struct ExpandPaths {
fn_ident: Ident,
glob_pattern: Lit,
}
impl Parse for ExpandPaths {
fn parse(input: ParseStream) -> Result<Self> {
let fn_ident: Ident = input.parse()?;
input.parse::<Token![; ]>()?;
let glob_pattern: Lit = input.parse()?;
Ok(ExpandPaths {
glob_pattern,
fn_ident,
})
}
}
#[proc_macro]
pub fn test_expand_paths(item: TokenStream) -> TokenStream {
let ExpandPaths {
fn_ident,
glob_pattern,
} = parse_macro_input!(item as ExpandPaths);
let pattern = if let Lit::Str(s) = glob_pattern {
s.value()
} else {
panic!();
};
let empty_ts: proc_macro2::TokenStream = "".parse().unwrap();
let paths: Paths = glob(&pattern).expect("Invalid 'paths' pattern.");
let result = paths
.map(|path| {
let path = path.expect("No such file or directory");
let gen_fn_ident = fn_ident_from_path(&fn_ident, &path);
let path_as_str = path.into_os_string().into_string().expect("bad encoding");
let item = quote! {
# [test]
fn #gen_fn_ident () {
#fn_ident ( #path_as_str );
}
};
item
})
.fold(empty_ts, concat_ts);
result.into()
}
#[proc_macro]
pub fn bench_expand_paths(item: TokenStream) -> TokenStream {
let ExpandPaths {
fn_ident,
glob_pattern,
} = parse_macro_input!(item as ExpandPaths);
let pattern = if let Lit::Str(s) = glob_pattern {
s.value()
} else {
panic!();
};
let empty_ts: proc_macro2::TokenStream = "".parse().unwrap();
let paths: Paths = glob(&pattern).expect("Invalid 'paths' pattern.");
let result = paths
.map(|path| {
let path = path.expect("No such file or directory");
let gen_fn_ident = fn_ident_from_path(&fn_ident, &path);
let path_as_str = path.into_os_string().into_string().expect("bad encoding");
let item = quote! {
# [bench]
fn #gen_fn_ident (bencher: & mut test::Bencher) {
#fn_ident (bencher, #path_as_str );
}
};
item
})
.fold(empty_ts, concat_ts);
result.into()
}
struct ExpandList {
fn_ident: Ident,
listing: Expr,
}
impl Parse for ExpandList {
fn parse(input: ParseStream) -> Result<Self> {
let fn_ident: Ident = input.parse()?;
input.parse::<Token![; ]>()?;
let listing: syn::Expr = input.parse()?;
Ok(ExpandList { fn_ident, listing })
}
}
#[proc_macro]
pub fn test_expand_list(item: TokenStream) -> TokenStream {
let ExpandList { fn_ident, listing } = parse_macro_input!(item as ExpandList);
let expr_array = if let Expr::Array(expr_array) = listing {
expr_array
} else {
panic!();
};
let empty_ts: proc_macro2::TokenStream = "".parse().unwrap();
let elems: syn::punctuated::Punctuated<Expr, _> = expr_array.elems;
let item = elems
.iter()
.map(|expr| {
let gen_fn_ident = fn_ident_from_expr(&fn_ident, expr);
let ref_symbol_ts: proc_macro2::TokenStream = match expr {
Expr::Reference(_) => "".parse().unwrap(),
_ => "&".parse().unwrap(),
};
quote! {
#[test]
fn #gen_fn_ident() {
let local = #ref_symbol_ts #expr;
#fn_ident ( local );
}
}
})
.fold(empty_ts, concat_ts);
item.into()
}
#[proc_macro]
pub fn bench_expand_list(item: TokenStream) -> TokenStream {
let ExpandList { fn_ident, listing } = parse_macro_input!(item as ExpandList);
let expr_array = if let Expr::Array(expr_array) = listing {
expr_array
} else {
panic!();
};
let empty_ts: proc_macro2::TokenStream = "".parse().unwrap();
let elems: syn::punctuated::Punctuated<Expr, _> = expr_array.elems;
let item = elems
.iter()
.map(|expr| {
let gen_fn_ident = fn_ident_from_expr(&fn_ident, expr);
let ref_symbol_ts: proc_macro2::TokenStream = match expr {
Expr::Reference(_) => "".parse().unwrap(),
_ => "&".parse().unwrap(),
};
quote! {
# [bench]
fn #gen_fn_ident (bencher: & mut test::Bencher) {
let local = #ref_symbol_ts #expr;
#fn_ident (bencher, local );
}
}
})
.fold(empty_ts, concat_ts);
item.into()
}