use proc_macro::{Delimiter, Group, Literal, TokenStream, TokenTree};
use std::{env::current_dir, fs::read_to_string};
#[proc_macro_attribute]
pub fn include_first(_attr: TokenStream, item: TokenStream) -> TokenStream {
include_strings(item)
}
fn include_strings(stream: TokenStream) -> TokenStream {
let mut input = stream.into_iter().collect::<Vec<_>>();
let mut output = TokenStream::new();
let mut i = 0;
while i < input.len() {
if let TokenTree::Ident(ident) = &input[i]
&& ident.span().source_text().unwrap_or_default() == "include_str"
&& let Some(TokenTree::Punct(bang)) = input.get(i + 1)
&& bang.as_char() == '!'
&& let Some(TokenTree::Group(args)) = input.get(i + 2)
&& args.delimiter() == Delimiter::Parenthesis
&& let [TokenTree::Literal(filename)] =
args.stream().into_iter().collect::<Vec<_>>().as_slice()
{
let filename = filename.span().source_text().unwrap();
let filename = format!("src/{}", filename.trim_matches('"'));
let file_contents = match read_to_string(&filename) {
Err(e) => panic!(
"Error reading {filename} from {:?}/src: {e}",
current_dir().unwrap()
),
Ok(contents) => contents,
};
output.extend(Some(TokenTree::Literal(Literal::string(&file_contents))));
i += 3;
continue;
}
if let TokenTree::Group(group) = &mut input[i] {
let stream = include_strings(group.stream());
output.extend(Some(TokenTree::Group(Group::new(
group.delimiter(),
stream,
))));
} else {
output.extend(Some(input[i].clone()));
}
i += 1;
}
output
}