#![warn(missing_docs)]
use std::sync::atomic::{AtomicUsize, Ordering};
use const_random::const_random;
use derive_syn_parse::Parse;
use macro_magic_core_macros::*;
use proc_macro2::{Delimiter, Group, Punct, Spacing, Span, TokenStream as TokenStream2, TokenTree};
use quote::{ToTokens, TokenStreamExt, format_ident, quote};
use syn::{
Attribute, Error, Expr, FnArg, Ident, Item, ItemFn, Pat, Path, Result, Token, Visibility,
parse::{Nothing, ParseStream},
parse_quote, parse2,
spanned::Spanned,
token::{Brace, Comma},
};
pub const MACRO_MAGIC_ROOT: &str = get_macro_magic_root!();
static COUNTER: AtomicUsize = AtomicUsize::new(0);
const COMPILATION_TAG: u32 = const_random!(u32);
mod keywords {
use syn::custom_keyword;
custom_keyword!(proc_macro_attribute);
custom_keyword!(proc_macro);
custom_keyword!(proc_macro_derive);
custom_keyword!(__private_macro_magic_tokens_forwarded);
}
#[derive(Parse)]
pub struct ForwardTokensExtraArg {
#[brace]
_brace: Brace,
#[inside(_brace)]
pub stream: TokenStream2,
}
impl ToTokens for ForwardTokensExtraArg {
fn to_tokens(&self, tokens: &mut TokenStream2) {
let token = Group::new(Delimiter::Brace, self.stream.clone());
tokens.append(token);
}
}
#[derive(Parse)]
pub struct ForwardTokensArgs {
pub source: Path,
_comma1: Comma,
pub target: Path,
_comma2: Option<Comma>,
#[parse_if(_comma2.is_some())]
pub mm_path: Option<Path>,
_comma3: Option<Comma>,
#[parse_if(_comma3.is_some())]
pub extra: Option<ForwardTokensExtraArg>,
}
#[derive(Parse)]
pub struct ForwardedTokens {
pub target_path: Path,
_comma1: Comma,
pub item: Item,
_comma2: Option<Comma>,
#[parse_if(_comma2.is_some())]
pub extra: Option<ForwardTokensExtraArg>,
}
#[derive(Parse)]
pub struct AttrItemWithExtra {
pub imported_item: Item,
_comma1: Comma,
#[brace]
_brace: Brace,
#[brace]
#[inside(_brace)]
_tokens_ident_brace: Brace,
#[inside(_tokens_ident_brace)]
pub tokens_ident: TokenStream2,
#[inside(_brace)]
_comma2: Comma,
#[brace]
#[inside(_brace)]
_source_path_brace: Brace,
#[inside(_source_path_brace)]
pub source_path: TokenStream2,
#[inside(_brace)]
_comma3: Comma,
#[brace]
#[inside(_brace)]
_custom_tokens_brace: Brace,
#[inside(_custom_tokens_brace)]
pub custom_tokens: TokenStream2,
}
#[derive(Parse)]
pub struct ImportTokensArgs {
_let: Token![let],
pub tokens_var_ident: Ident,
_eq: Token![=],
pub source_path: Path,
}
#[derive(Parse)]
pub struct ImportedTokens {
pub tokens_var_ident: Ident,
_comma: Comma,
pub item: Item,
}
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub enum ProcMacroType {
Normal,
Attribute,
Derive,
}
impl ProcMacroType {
pub fn to_str(&self) -> &'static str {
match self {
ProcMacroType::Normal => "#[proc_macro]",
ProcMacroType::Attribute => "#[proc_macro_attribute]",
ProcMacroType::Derive => "#[proc_macro_derive]",
}
}
pub fn to_attr(&self) -> Attribute {
match self {
ProcMacroType::Normal => parse_quote!(#[proc_macro]),
ProcMacroType::Attribute => parse_quote!(#[proc_macro_attribute]),
ProcMacroType::Derive => parse_quote!(#[proc_macro_derive]),
}
}
}
pub trait ForeignPath {
fn foreign_path(&self) -> &syn::Path;
}
#[derive(Clone)]
pub struct ProcMacro {
pub proc_fn: ItemFn,
pub macro_type: ProcMacroType,
pub tokens_ident: Ident,
pub attr_ident: Option<Ident>,
}
impl ProcMacro {
pub fn from<T: Into<TokenStream2>>(tokens: T) -> Result<Self> {
let proc_fn = parse2::<ItemFn>(tokens.into())?;
let Visibility::Public(_) = proc_fn.vis else {
return Err(Error::new(proc_fn.vis.span(), "Visibility must be public"));
};
let mut macro_type: Option<ProcMacroType> = None;
if proc_fn
.attrs
.iter()
.find(|attr| {
if syn::parse2::<keywords::proc_macro>(attr.path().to_token_stream()).is_ok() {
macro_type = Some(ProcMacroType::Normal);
} else if syn::parse2::<keywords::proc_macro_attribute>(
attr.path().to_token_stream(),
)
.is_ok()
{
macro_type = Some(ProcMacroType::Attribute);
} else if syn::parse2::<keywords::proc_macro>(attr.path().to_token_stream()).is_ok()
{
macro_type = Some(ProcMacroType::Derive);
}
macro_type.is_some()
})
.is_none()
{
return Err(Error::new(
proc_fn.sig.ident.span(),
"can only be attached to a proc macro function definition",
));
};
let macro_type = macro_type.unwrap();
let Some(FnArg::Typed(tokens_arg)) = proc_fn.sig.inputs.last() else {
unreachable!("missing tokens arg");
};
let Pat::Ident(tokens_ident) = *tokens_arg.pat.clone() else {
unreachable!("invalid tokens arg");
};
let tokens_ident = tokens_ident.ident;
let attr_ident = match macro_type {
ProcMacroType::Attribute => {
let Some(FnArg::Typed(attr_arg)) = proc_fn.sig.inputs.first() else {
unreachable!("missing attr arg");
};
let Pat::Ident(attr_ident) = *attr_arg.pat.clone() else {
unreachable!("invalid attr arg");
};
Some(attr_ident.ident)
}
_ => None,
};
Ok(ProcMacro {
proc_fn,
macro_type,
tokens_ident,
attr_ident,
})
}
}
pub fn parse_proc_macro_variant<T: Into<TokenStream2>>(
tokens: T,
macro_type: ProcMacroType,
) -> Result<ProcMacro> {
let proc_macro = ProcMacro::from(tokens.into())?;
if proc_macro.macro_type != macro_type {
let actual = proc_macro.macro_type.to_str();
let desired = macro_type.to_str();
return Err(Error::new(
proc_macro.proc_fn.sig.ident.span(),
format!(
"expected a function definition with {} but found {} instead",
actual, desired
),
));
}
Ok(proc_macro)
}
pub fn macro_magic_root() -> Path {
parse2::<Path>(
MACRO_MAGIC_ROOT
.parse::<TokenStream2>()
.expect("environment var `MACRO_MAGIC_ROOT` must parse to a valid TokenStream2"),
)
.expect("environment variable `MACRO_MAGIC_ROOT` must parse to a valid syn::Path")
}
pub fn private_path<T: Into<TokenStream2> + Clone>(subpath: &T) -> Path {
let subpath = subpath.clone().into();
let root = macro_magic_root();
parse_quote!(#root::__private::#subpath)
}
pub fn macro_magic_path<T: Into<TokenStream2> + Clone>(subpath: &T) -> Path {
let subpath = subpath.clone().into();
let root = macro_magic_root();
parse_quote! {
#root::#subpath
}
}
pub fn to_snake_case(input: impl Into<String>) -> String {
let input: String = input.into();
if input.is_empty() {
return input;
}
let mut prev_lower = input.chars().next().unwrap().is_lowercase();
let mut prev_whitespace = true;
let mut first = true;
let mut output: Vec<char> = Vec::new();
for c in input.chars() {
if c == '_' {
prev_whitespace = true;
output.push('_');
continue;
}
if !c.is_ascii_alphanumeric() && c != '_' && !c.is_whitespace() {
continue;
}
if !first && c.is_whitespace() || c == '_' {
if !prev_whitespace {
output.push('_');
}
prev_whitespace = true;
} else {
let current_lower = c.is_lowercase();
if ((prev_lower != current_lower && prev_lower)
|| (prev_lower == current_lower && !prev_lower))
&& !first
&& !prev_whitespace
{
output.push('_');
}
output.push(c.to_ascii_lowercase());
prev_lower = current_lower;
prev_whitespace = false;
}
first = false;
}
output.iter().collect::<String>()
}
pub fn flatten_ident(ident: &Ident) -> Ident {
Ident::new(to_snake_case(ident.to_string()).as_str(), ident.span())
}
pub fn export_tokens_macro_ident(ident: &Ident) -> Ident {
let ident = flatten_ident(ident);
let ident_string = format!("__export_tokens_tt_{}", ident.to_token_stream());
Ident::new(ident_string.as_str(), Span::call_site())
}
pub fn export_tokens_macro_path(item_path: &Path) -> Path {
let mut macro_path = item_path.clone();
let Some(last_seg) = macro_path.segments.pop() else {
unreachable!("must have at least one segment")
};
let last_seg = export_tokens_macro_ident(&last_seg.into_value().ident);
macro_path.segments.push(last_seg.into());
macro_path
}
fn new_unique_export_tokens_ident(ident: &Ident) -> Ident {
let unique_id = COUNTER.fetch_add(1, Ordering::SeqCst);
let ident = flatten_ident(ident).to_token_stream().to_string();
let ident_string = format!("__export_tokens_tt_{COMPILATION_TAG}_{ident}_{unique_id}");
Ident::new(ident_string.as_str(), Span::call_site())
}
pub fn export_tokens_internal<T: Into<TokenStream2>, E: Into<TokenStream2>>(
attr: T,
tokens: E,
emit: bool,
hide_exported_ident: bool,
) -> Result<TokenStream2> {
let attr = attr.into();
let item: Item = parse2(tokens.into())?;
let ident = match item.clone() {
Item::Const(item_const) => Some(item_const.ident),
Item::Enum(item_enum) => Some(item_enum.ident),
Item::ExternCrate(item_extern_crate) => Some(item_extern_crate.ident),
Item::Fn(item_fn) => Some(item_fn.sig.ident),
Item::Macro(item_macro) => item_macro.ident, Item::Mod(item_mod) => Some(item_mod.ident),
Item::Static(item_static) => Some(item_static.ident),
Item::Struct(item_struct) => Some(item_struct.ident),
Item::Trait(item_trait) => Some(item_trait.ident),
Item::TraitAlias(item_trait_alias) => Some(item_trait_alias.ident),
Item::Type(item_type) => Some(item_type.ident),
Item::Union(item_union) => Some(item_union.ident),
_ => None,
};
let ident = match ident {
Some(ident) => {
if parse2::<Nothing>(attr.clone()).is_ok() {
ident
} else {
parse2::<Ident>(attr)?
}
}
None => parse2::<Ident>(attr)?,
};
let macro_ident = new_unique_export_tokens_ident(&ident);
let ident = if hide_exported_ident {
export_tokens_macro_ident(&ident)
} else {
ident
};
let item_emit = match emit {
true => quote! {
#[allow(unused)]
#item
},
false => quote!(),
};
let output = quote! {
#[doc(hidden)]
#[macro_export]
macro_rules! #macro_ident {
(
$(::)?$($tokens_var:ident)::*,
$(::)?$($callback:ident)::*,
{ $( $extra:tt )* }
) => {
$($callback)::*! {
$($tokens_var)::*,
#item,
{ $( $extra )* }
}
};
($(::)?$($tokens_var:ident)::*, $(::)?$($callback:ident)::*) => {
$($callback)::*! {
$($tokens_var)::*,
#item
}
};
}
pub use #macro_ident as #ident;
#item_emit
};
Ok(output)
}
pub fn export_tokens_alias_internal<T: Into<TokenStream2>>(
tokens: T,
emit: bool,
hide_exported_ident: bool,
) -> Result<TokenStream2> {
let alias = parse2::<Ident>(tokens.into())?;
let export_tokens_internal_path = macro_magic_path("e!(mm_core::export_tokens_internal));
Ok(quote! {
#[proc_macro_attribute]
pub fn #alias(attr: proc_macro::TokenStream, tokens: proc_macro::TokenStream) -> proc_macro::TokenStream {
match #export_tokens_internal_path(attr, tokens, #emit, #hide_exported_ident) {
Ok(tokens) => tokens.into(),
Err(err) => err.to_compile_error().into(),
}
}
})
}
pub fn import_tokens_internal<T: Into<TokenStream2>>(tokens: T) -> Result<TokenStream2> {
let args = parse2::<ImportTokensArgs>(tokens.into())?;
let source_path = export_tokens_macro_path(&args.source_path);
let inner_macro_path = private_path("e!(import_tokens_inner));
let tokens_var_ident = args.tokens_var_ident;
Ok(quote! {
#source_path! { #tokens_var_ident, #inner_macro_path }
})
}
pub fn import_tokens_inner_internal<T: Into<TokenStream2>>(tokens: T) -> Result<TokenStream2> {
let parsed = parse2::<ImportedTokens>(tokens.into())?;
let tokens_string = parsed.item.to_token_stream().to_string();
let ident = parsed.tokens_var_ident;
let token_stream_2 = private_path("e!(TokenStream2));
Ok(quote! {
let #ident = #tokens_string.parse::<#token_stream_2>().expect("failed to parse quoted tokens");
})
}
pub fn forward_tokens_internal<T: Into<TokenStream2>>(
tokens: T,
hidden_source_path: bool,
) -> Result<TokenStream2> {
let args = parse2::<ForwardTokensArgs>(tokens.into())?;
let mm_path = match args.mm_path {
Some(path) => path,
None => macro_magic_root(),
};
let source_path = if hidden_source_path {
export_tokens_macro_path(&args.source)
} else {
args.source
};
let target_path = args.target;
if let Some(extra) = args.extra {
Ok(quote! {
#source_path! {
#target_path,
#mm_path::__private::forward_tokens_inner,
#extra
}
})
} else {
Ok(quote! {
#source_path! { #target_path, #mm_path::__private::forward_tokens_inner }
})
}
}
pub fn forward_tokens_inner_internal<T: Into<TokenStream2>>(tokens: T) -> Result<TokenStream2> {
let parsed = parse2::<ForwardedTokens>(tokens.into())?;
let target_path = parsed.target_path;
let imported_tokens = parsed.item;
let tokens_forwarded_keyword = keywords::__private_macro_magic_tokens_forwarded::default();
let pound = Punct::new('#', Spacing::Alone);
match parsed.extra {
Some(extra) => Ok(quote! {
#pound [#target_path(
#tokens_forwarded_keyword
#imported_tokens,
#extra
)] type __Discarded = ();
}),
None => Ok(quote! {
#target_path! {
#tokens_forwarded_keyword
#imported_tokens
}
}),
}
}
pub fn with_custom_parsing_internal<T1: Into<TokenStream2>, T2: Into<TokenStream2>>(
attr: T1,
tokens: T2,
import_tokens_attr_name: &'static str,
) -> Result<TokenStream2> {
let proc_macro = parse_proc_macro_variant(tokens, ProcMacroType::Attribute)?;
if proc_macro
.proc_fn
.attrs
.iter()
.find(|attr| {
if let Some(seg) = attr.meta.path().segments.last() {
return seg.ident == import_tokens_attr_name;
}
false
})
.is_none()
{
return Err(Error::new(
Span::call_site(),
format!(
"Can only be attached to an attribute proc macro marked with `#[{}]`",
import_tokens_attr_name
),
));
}
if proc_macro
.proc_fn
.attrs
.iter()
.find(|attr| {
if let Some(seg) = attr.meta.path().segments.last() {
return seg.ident == "with_custom_parsing_internal";
}
false
})
.is_some()
{
return Err(Error::new(
Span::call_site(),
"Only one instance of #[with_custom_parsing] can be attached at a time.",
));
}
let custom_path = parse2::<Path>(attr.into())?;
let mut item_fn = proc_macro.proc_fn;
item_fn
.attrs
.push(parse_quote!(#[with_custom_parsing(#custom_path)]));
Ok(quote!(#item_fn))
}
enum OverridePath {
Path(Path),
Expr(Expr),
}
impl syn::parse::Parse for OverridePath {
fn parse(input: ParseStream) -> Result<Self> {
if input.is_empty() {
return Ok(OverridePath::Path(macro_magic_root()));
}
let mut remaining = TokenStream2::new();
while !input.is_empty() {
remaining.extend(input.parse::<TokenTree>()?.to_token_stream());
}
if let Ok(path) = parse2::<Path>(remaining.clone()) {
return Ok(OverridePath::Path(path));
}
match parse2::<Expr>(remaining) {
Ok(expr) => Ok(OverridePath::Expr(expr)),
Err(mut err) => {
err.combine(Error::new(
input.span(),
"Expected either a `Path` or an `Expr` that evaluates to something compatible with `Into<String>`."
));
Err(err)
}
}
}
}
impl ToTokens for OverridePath {
fn to_tokens(&self, tokens: &mut TokenStream2) {
match self {
OverridePath::Path(path) => {
let path = path.to_token_stream().to_string();
tokens.extend(quote!(#path))
}
OverridePath::Expr(expr) => tokens.extend(quote!(#expr)),
}
}
}
pub fn import_tokens_attr_internal<T1: Into<TokenStream2>, T2: Into<TokenStream2>>(
attr: T1,
tokens: T2,
hidden_source_path: bool,
) -> Result<TokenStream2> {
let attr = attr.into();
let mm_override_path = parse2::<OverridePath>(attr)?;
let mm_path = macro_magic_root();
let mut proc_macro = parse_proc_macro_variant(tokens, ProcMacroType::Attribute)?;
let attr_ident = proc_macro.attr_ident.unwrap();
let tokens_ident = proc_macro.tokens_ident;
let path_resolver = if let Some(index) = proc_macro.proc_fn.attrs.iter().position(|attr| {
if let Some(seg) = attr.meta.path().segments.last() {
return seg.ident == "with_custom_parsing";
}
false
}) {
let custom_attr = &proc_macro.proc_fn.attrs[index];
let custom_struct_path: Path = custom_attr.parse_args()?;
proc_macro.proc_fn.attrs.remove(index);
quote! {
let custom_parsed = syn::parse_macro_input!(#attr_ident as #custom_struct_path);
let path = (&custom_parsed as &dyn ForeignPath).foreign_path();
let _ = (&custom_parsed as &dyn quote::ToTokens);
}
} else {
quote! {
let custom_parsed = quote::quote!();
let path = syn::parse_macro_input!(#attr_ident as syn::Path);
}
};
let orig_sig = proc_macro.proc_fn.sig;
let orig_stmts = proc_macro.proc_fn.block.stmts;
let orig_attrs = proc_macro.proc_fn.attrs;
let orig_sig_ident = &orig_sig.ident;
let inner_macro_ident = format_ident!("__import_tokens_attr_{}_inner", orig_sig.ident);
let mut inner_sig = orig_sig.clone();
inner_sig.ident = inner_macro_ident.clone();
inner_sig.inputs.pop().unwrap();
let pound = Punct::new('#', Spacing::Alone);
let output = quote! {
#(#orig_attrs)
*
pub #orig_sig {
pub #inner_sig {
let __combined_args = #mm_path::__private::syn::parse_macro_input!(#attr_ident as #mm_path::mm_core::AttrItemWithExtra);
let #attr_ident: proc_macro::TokenStream = __combined_args.imported_item.to_token_stream().into();
let #tokens_ident: proc_macro::TokenStream = __combined_args.tokens_ident.into();
let __source_path: proc_macro::TokenStream = __combined_args.source_path.into();
let __custom_tokens: proc_macro::TokenStream = __combined_args.custom_tokens.into();
#(#orig_stmts)
*
}
fn isolated_mm_override_path() -> String {
String::from(#mm_override_path)
}
use #mm_path::__private::*;
use #mm_path::__private::quote::ToTokens;
use #mm_path::mm_core::*;
syn::custom_keyword!(__private_macro_magic_tokens_forwarded);
let mut cloned_attr = #attr_ident.clone().into_iter();
let first_attr_token = cloned_attr.next();
let attr_minus_first_token = proc_macro::TokenStream::from_iter(cloned_attr);
let forwarded = first_attr_token.map_or(false, |token| {
syn::parse::<__private_macro_magic_tokens_forwarded>(token.into()).is_ok()
});
if forwarded {
#inner_macro_ident(attr_minus_first_token)
} else {
let attached_item = syn::parse_macro_input!(#tokens_ident as syn::Item);
let attached_item = attached_item.to_token_stream();
#path_resolver
let path = path.to_token_stream();
let custom_parsed = custom_parsed.to_token_stream();
let mm_override_tokenstream = isolated_mm_override_path().parse().unwrap();
let resolved_mm_override_path = match syn::parse2::<syn::Path>(mm_override_tokenstream) {
Ok(res) => res,
Err(err) => return err.to_compile_error().into()
};
if #hidden_source_path {
quote::quote! {
#pound resolved_mm_override_path::forward_tokens! {
#pound path,
#orig_sig_ident,
#pound resolved_mm_override_path,
{
{ #pound attached_item },
{ #pound path },
{ #pound custom_parsed }
}
}
}.into()
} else {
quote::quote! {
#pound resolved_mm_override_path::forward_tokens_verbatim! {
#pound path,
#orig_sig_ident,
#pound resolved_mm_override_path,
{
{ #pound attached_item },
{ #pound path },
{ #pound custom_parsed }
}
}
}.into()
}
}
}
};
Ok(output)
}
pub fn import_tokens_proc_internal<T1: Into<TokenStream2>, T2: Into<TokenStream2>>(
attr: T1,
tokens: T2,
) -> Result<TokenStream2> {
let attr = attr.into();
let mm_override_path = parse2::<OverridePath>(attr)?;
let mm_path = macro_magic_root();
let proc_macro = parse_proc_macro_variant(tokens, ProcMacroType::Normal)?;
let orig_sig = proc_macro.proc_fn.sig;
let orig_stmts = proc_macro.proc_fn.block.stmts;
let orig_attrs = proc_macro.proc_fn.attrs;
let orig_sig_ident = &orig_sig.ident;
let inner_macro_ident = format_ident!("__import_tokens_proc_{}_inner", orig_sig.ident);
let mut inner_sig = orig_sig.clone();
inner_sig.ident = inner_macro_ident.clone();
inner_sig.inputs = inner_sig.inputs.iter().rev().cloned().collect();
let tokens_ident = proc_macro.tokens_ident;
let pound = Punct::new('#', Spacing::Alone);
Ok(quote! {
#(#orig_attrs)
*
pub #orig_sig {
#inner_sig {
#(#orig_stmts)
*
}
fn isolated_mm_override_path() -> String {
String::from(#mm_override_path)
}
use #mm_path::__private::*;
use #mm_path::__private::quote::ToTokens;
syn::custom_keyword!(__private_macro_magic_tokens_forwarded);
let mut cloned_tokens = #tokens_ident.clone().into_iter();
let first_token = cloned_tokens.next();
let tokens_minus_first = proc_macro::TokenStream::from_iter(cloned_tokens);
let forwarded = first_token.map_or(false, |token| {
syn::parse::<__private_macro_magic_tokens_forwarded>(token.into()).is_ok()
});
if forwarded {
#inner_macro_ident(tokens_minus_first)
} else {
use #mm_path::__private::*;
use #mm_path::__private::quote::ToTokens;
let source_path = match syn::parse::<syn::Path>(#tokens_ident) {
Ok(path) => path,
Err(e) => return e.to_compile_error().into(),
};
let mm_override_tokenstream = isolated_mm_override_path().parse().unwrap();
let resolved_mm_override_path = match syn::parse2::<syn::Path>(mm_override_tokenstream) {
Ok(res) => res,
Err(err) => return err.to_compile_error().into()
};
quote::quote! {
#pound resolved_mm_override_path::forward_tokens! {
#pound source_path,
#orig_sig_ident,
#pound resolved_mm_override_path
}
}.into()
}
}
})
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn export_tokens_internal_missing_ident() {
assert!(
export_tokens_internal(quote!(), quote!(impl MyTrait for Something), true, true)
.is_err()
);
}
#[test]
fn export_tokens_internal_normal_no_ident() {
assert!(
export_tokens_internal(
quote!(),
quote!(
struct MyStruct {}
),
true,
true
)
.unwrap()
.to_string()
.contains("my_struct")
);
}
#[test]
fn export_tokens_internal_normal_ident() {
assert!(
export_tokens_internal(
quote!(some_name),
quote!(
struct Something {}
),
true,
true
)
.unwrap()
.to_string()
.contains("some_name")
);
}
#[test]
fn export_tokens_internal_generics_no_ident() {
assert!(
export_tokens_internal(
quote!(),
quote!(
struct MyStruct<T> {}
),
true,
true
)
.unwrap()
.to_string()
.contains("__export_tokens_tt_my_struct")
);
}
#[test]
fn export_tokens_internal_bad_ident() {
assert!(
export_tokens_internal(
quote!(Something<T>),
quote!(
struct MyStruct {}
),
true,
true
)
.is_err()
);
assert!(
export_tokens_internal(
quote!(some::path),
quote!(
struct MyStruct {}
),
true,
true
)
.is_err()
);
}
#[test]
fn test_export_tokens_no_emit() {
assert!(
export_tokens_internal(
quote!(some_name),
quote!(
struct Something {}
),
false,
true
)
.unwrap()
.to_string()
.contains("some_name")
);
}
#[test]
fn export_tokens_internal_verbatim_ident() {
assert!(
export_tokens_internal(
quote!(),
quote!(
struct MyStruct<T> {}
),
true,
false
)
.unwrap()
.to_string()
.contains("MyStruct")
);
}
#[test]
fn import_tokens_internal_simple_path() {
assert!(
import_tokens_internal(quote!(let tokens = my_crate::SomethingCool))
.unwrap()
.to_string()
.contains("__export_tokens_tt_something_cool")
);
}
#[test]
fn import_tokens_internal_flatten_long_paths() {
assert!(
import_tokens_internal(quote!(let tokens = my_crate::some_mod::complex::SomethingElse))
.unwrap()
.to_string()
.contains("__export_tokens_tt_something_else")
);
}
#[test]
fn import_tokens_internal_invalid_token_ident() {
assert!(import_tokens_internal(quote!(let 3 * 2 = my_crate::something)).is_err());
}
#[test]
fn import_tokens_internal_invalid_path() {
assert!(import_tokens_internal(quote!(let my_tokens = 2 - 2)).is_err());
}
#[test]
fn import_tokens_inner_internal_basic() {
assert!(
import_tokens_inner_internal(quote! {
my_ident,
fn my_function() -> u32 {
33
}
})
.unwrap()
.to_string()
.contains("my_ident")
);
}
#[test]
fn import_tokens_inner_internal_impl() {
assert!(
import_tokens_inner_internal(quote! {
another_ident,
impl Something for MyThing {
fn something() -> CoolStuff {
CoolStuff {}
}
}
})
.unwrap()
.to_string()
.contains("something ()")
);
}
#[test]
fn import_tokens_inner_internal_missing_comma() {
assert!(
import_tokens_inner_internal(quote! {
{
another_ident
impl Something for MyThing {
fn something() -> CoolStuff {
CoolStuff {}
}
}
}
})
.is_err()
);
}
#[test]
fn import_tokens_inner_internal_non_item() {
assert!(
import_tokens_inner_internal(quote! {
{
another_ident,
2 + 2
}
})
.is_err()
);
}
#[test]
fn test_snake_case() {
assert_eq!(to_snake_case("ThisIsATriumph"), "this_is_a_triumph");
assert_eq!(
to_snake_case("IAmMakingANoteHere"),
"i_am_making_a_note_here"
);
assert_eq!(to_snake_case("huge_success"), "huge_success");
assert_eq!(
to_snake_case("It's hard to Overstate my satisfaction!!!"),
"its_hard_to_overstate_my_satisfaction"
);
assert_eq!(
to_snake_case("__aperature_science__"),
"__aperature_science__"
);
assert_eq!(
to_snake_case("WeDoWhatWeMustBecause!<We, Can>()"),
"we_do_what_we_must_because_we_can"
);
assert_eq!(
to_snake_case("For_The_Good_of_all_of_us_Except_TheOnes_Who Are Dead".to_string()),
"for_the_good_of_all_of_us_except_the_ones_who_are_dead"
);
assert_eq!(to_snake_case("".to_string()), "");
}
}