use quote::{ToTokens, quote};
use syn::{Attribute, GenericParam, Signature, Type, parse::ParseStream, token};
#[derive(Clone)]
pub(crate) struct LightFn {
pub attrs: Vec<Attribute>,
pub vis: syn::Visibility,
pub sig: Signature,
pub brace_token: token::Brace,
pub body: proc_macro2::TokenStream,
}
impl syn::parse::Parse for LightFn {
fn parse(input: ParseStream) -> syn::Result<Self> {
let attrs = input.call(Attribute::parse_outer)?;
let vis: syn::Visibility = input.parse()?;
let sig: Signature = input.parse()?;
let content;
let brace_token = syn::braced!(content in input);
let body: proc_macro2::TokenStream = content.parse()?;
Ok(LightFn {
attrs,
vis,
sig,
brace_token,
body,
})
}
}
impl ToTokens for LightFn {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
for attr in &self.attrs {
attr.to_tokens(tokens);
}
self.vis.to_tokens(tokens);
self.sig.to_tokens(tokens);
self.brace_token.surround(tokens, |tokens| {
self.body.to_tokens(tokens);
});
}
}
pub(crate) fn filter_inline_attrs(attrs: &[Attribute]) -> Vec<&Attribute> {
attrs
.iter()
.filter(|attr| !attr.path().is_ident("inline"))
.collect()
}
pub(crate) fn is_lint_attr(attr: &Attribute) -> bool {
let path = attr.path();
path.is_ident("allow")
|| path.is_ident("expect")
|| path.is_ident("deny")
|| path.is_ident("warn")
|| path.is_ident("forbid")
}
pub(crate) fn filter_lint_attrs(attrs: &[Attribute]) -> Vec<&Attribute> {
attrs.iter().filter(|attr| is_lint_attr(attr)).collect()
}
pub(crate) fn gen_cfg_guard(
target_arch: Option<&str>,
cfg_feature: Option<&str>,
) -> proc_macro2::TokenStream {
match (target_arch, cfg_feature) {
(Some(arch), Some(feat)) => {
quote! { #[cfg(all(target_arch = #arch, feature = #feat))] }
}
(Some(arch), None) => quote! { #[cfg(target_arch = #arch)] },
(None, Some(feat)) => quote! { #[cfg(feature = #feat)] },
(None, None) => quote! {},
}
}
pub(crate) fn build_turbofish(generics: &syn::Generics) -> proc_macro2::TokenStream {
let params: Vec<proc_macro2::TokenStream> = generics
.params
.iter()
.filter_map(|param| match param {
GenericParam::Type(tp) => {
let ident = &tp.ident;
Some(quote! { #ident })
}
GenericParam::Const(cp) => {
let ident = &cp.ident;
Some(quote! { #ident })
}
GenericParam::Lifetime(_) => None,
})
.collect();
if params.is_empty() {
quote! {}
} else {
quote! { ::<#(#params),*> }
}
}
pub(crate) fn replace_ident_in_tokens(
tokens: proc_macro2::TokenStream,
target: &str,
replacement: &proc_macro2::TokenStream,
) -> proc_macro2::TokenStream {
let mut result = proc_macro2::TokenStream::new();
for tt in tokens {
match tt {
proc_macro2::TokenTree::Ident(ref ident) if *ident == target => {
result.extend(replacement.clone());
}
proc_macro2::TokenTree::Group(group) => {
let new_stream = replace_ident_in_tokens(group.stream(), target, replacement);
let mut new_group = proc_macro2::Group::new(group.delimiter(), new_stream);
new_group.set_span(group.span());
result.extend(std::iter::once(proc_macro2::TokenTree::Group(new_group)));
}
other => {
result.extend(std::iter::once(other));
}
}
}
result
}
pub(crate) fn replace_self_in_tokens(
tokens: proc_macro2::TokenStream,
replacement: &Type,
) -> proc_macro2::TokenStream {
replace_ident_in_tokens(tokens, "Self", &replacement.to_token_stream())
}
pub(crate) fn generate_imports(
target_arch: Option<&str>,
magetypes_namespace: Option<&str>,
import_intrinsics: bool,
import_magetypes: bool,
) -> proc_macro2::TokenStream {
let mut imports = proc_macro2::TokenStream::new();
if import_intrinsics && let Some(arch) = target_arch {
let arch_ident = quote::format_ident!("{}", arch);
imports.extend(quote! {
#[allow(unused_imports)]
use archmage::intrinsics::#arch_ident::*;
});
}
if import_magetypes && let Some(ns) = magetypes_namespace {
let ns_ident = quote::format_ident!("{}", ns);
imports.extend(quote! {
#[allow(unused_imports)]
use magetypes::simd::#ns_ident::*;
#[allow(unused_imports)]
use magetypes::simd::backends::*;
});
}
imports
}
pub(crate) fn is_bare_ident_pub(expr: &syn::Expr, name: &str) -> bool {
is_bare_ident(expr, name)
}
fn is_bare_ident(expr: &syn::Expr, name: &str) -> bool {
match expr {
syn::Expr::Path(p) => {
p.qself.is_none() && p.path.segments.len() == 1 && p.path.segments[0].ident == name
}
_ => false,
}
}
pub(crate) enum TokenPlacement {
Explicit,
Variable(usize),
None,
}
pub(crate) fn find_token_placement(
args: &[syn::Expr],
caller_token_ident: Option<&str>,
) -> TokenPlacement {
for arg in args.iter() {
if is_bare_ident(arg, "Token") {
return TokenPlacement::Explicit;
}
}
if let Some(ident_name) = caller_token_ident {
for (i, arg) in args.iter().enumerate() {
if is_bare_ident(arg, ident_name) {
return TokenPlacement::Variable(i);
}
}
}
TokenPlacement::None
}
pub(crate) fn build_call_args(
args: &[syn::Expr],
token_expr: &proc_macro2::TokenStream,
) -> proc_macro2::TokenStream {
build_call_args_with_ident(args, token_expr, None)
}
pub(crate) fn build_call_args_with_ident(
args: &[syn::Expr],
token_expr: &proc_macro2::TokenStream,
caller_token_ident: Option<&str>,
) -> proc_macro2::TokenStream {
match find_token_placement(args, caller_token_ident) {
TokenPlacement::Explicit => {
let replaced: Vec<proc_macro2::TokenStream> = args
.iter()
.map(|arg| replace_ident_in_tokens(arg.to_token_stream(), "Token", token_expr))
.collect();
quote! { #(#replaced),* }
}
TokenPlacement::Variable(idx) => {
let replaced: Vec<proc_macro2::TokenStream> = args
.iter()
.enumerate()
.map(|(i, arg)| {
if i == idx {
token_expr.clone()
} else {
arg.to_token_stream()
}
})
.collect();
quote! { #(#replaced),* }
}
TokenPlacement::None => {
quote! { #token_expr, #(#args),* }
}
}
}
pub(crate) fn build_scalar_call_args(args: &[syn::Expr]) -> proc_macro2::TokenStream {
let scalar = quote! { archmage::ScalarToken };
build_call_args(args, &scalar)
}
pub(crate) fn suffix_path(path: &syn::Path, suffix: &str) -> syn::Path {
let mut suffixed = path.clone();
if let Some(last) = suffixed.segments.last_mut() {
last.ident = quote::format_ident!("{}_{}", last.ident, suffix);
}
suffixed
}