use crate::prelude::*;
use beet_core::prelude::*;
use proc_macro2::TokenStream;
use proc_macro2::TokenTree;
use quote::ToTokens;
use std::hash::Hash;
use std::hash::Hasher;
pub(super) struct HashNonSnippetRust<'a, H> {
pub macros: &'a TemplateMacros,
pub hasher: &'a mut H,
}
impl<H: Hasher> HashNonSnippetRust<'_, H> {
pub fn hash(&mut self, file: &SourceFile) -> Result<()> {
match file.extension() {
Some(ex) if ex == "rs" => {
let file_content = fs_ext::read_to_string(file)?;
let parsed_file =
syn::parse_file(&file_content).map_err(|err| {
bevyhow!(
"Failed to parse file: {}\n{}",
file.display(),
err
)
})?;
self.walk_tokens(parsed_file.to_token_stream())?;
Ok(())
}
_ => {
Ok(())
}
}
}
fn walk_tokens(&mut self, tokens: TokenStream) -> Result<()> {
let mut iter = tokens.into_iter().peekable();
while let Some(tree) = iter.next() {
match &tree {
TokenTree::Ident(ident)
if ident.to_string() == self.macros.rstml =>
{
if let Some(TokenTree::Punct(punct)) = iter.peek() {
if punct.as_char() == '!' {
iter.next(); if let Some(TokenTree::Group(_group)) = iter.next()
{
continue;
}
}
} else {
ident.to_string().hash(self.hasher);
}
}
TokenTree::Group(group) => {
self.walk_tokens(group.stream())?;
}
tree => {
tree.to_string().hash(self.hasher);
}
}
}
Ok(())
}
}
#[cfg(test)]
mod test {
use super::*;
use proc_macro2::TokenStream;
use quote::quote;
use std::hash::Hasher;
fn hash(tokens: TokenStream) -> u64 {
let mut hasher = FixedHasher::default().build_hasher();
HashNonSnippetRust {
hasher: &mut hasher,
macros: &TemplateMacros::default(),
}
.walk_tokens(tokens)
.unwrap();
hasher.finish()
}
#[test]
#[rustfmt::skip]
fn works() {
hash(quote! {rsx!{1}}).xpect_eq(hash(quote! {rsx!{2}}));
hash(quote! {rsx!{1} rsx!{1}}).xpect_eq(hash(quote! {rsx!{2} rsx!{2}}));
hash(quote! {foo}).xpect_not_eq(hash(quote! {bar}));
hash(quote! {println!(foo)}).xpect_not_eq(hash(quote! {println!(bar)}));
}
}