include_glob/
lib.rs

1use std::{fs, path::PathBuf};
2
3use glob::glob as glob_inner;
4use proc_macro::{Delimiter, Group, Literal, Punct, Spacing, TokenStream, TokenTree};
5
6/// Includes a file as a reference to a byte array via a glob pattern.
7///
8/// # Examples
9///
10/// Assume there is a file `file.123.txt` with contents `hello world`:
11///
12///
13/// ```rust
14/// use include_glob::include_glob_bytes;
15///
16/// static FILE: &[u8] = include_glob_bytes!("file.*.txt");
17/// assert_eq!(FILE, b"hello world");
18/// ```
19#[proc_macro]
20pub fn include_glob_bytes(input: TokenStream) -> TokenStream {
21    bytes_to_token_stream(include_glob_inner(input))
22}
23
24/// Includes a UTF-8 encoded file as a string via a glob pattern.
25///
26/// # Examples
27///
28/// Assume there is a file `file.123.txt` with contents `hello world`:
29///
30///
31/// ```rust
32/// use include_glob::include_glob_str;
33///
34/// static FILE: &str = include_glob_str!("file.*.txt");
35/// assert_eq!(FILE, "hello world");
36/// ```
37#[proc_macro]
38pub fn include_glob_str(input: TokenStream) -> TokenStream {
39    str_to_token_stream(include_glob_inner(input))
40}
41
42#[proc_macro]
43pub fn glob(input: TokenStream) -> TokenStream {
44    let path = get_path(input);
45
46    let string = match path.file_name() {
47        Some(path) => match path.to_str() {
48            Some(string) => string,
49            None => panic!("file name is not valid utf8"),
50        },
51        None => panic!("couldn't read file name"),
52    };
53
54    TokenStream::from(TokenTree::Literal(Literal::string(string)))
55}
56
57fn include_glob_inner(input: TokenStream) -> Vec<u8> {
58    let path = get_path(input);
59
60    let bytes: Vec<u8> = match fs::read(&path) {
61        Ok(bytes) => bytes,
62        Err(e) => panic!("couldn't read {path}: {e}", path = path.display()),
63    };
64
65    bytes
66}
67
68fn get_path(input: TokenStream) -> PathBuf {
69    let mut iter = input.into_iter();
70
71    let pattern = match iter.next() {
72        Some(TokenTree::Literal(lit)) => lit.to_string(),
73        _ => panic!("input needs to be a string"),
74    };
75
76    if iter.next().is_some() {
77        panic!("input can only be one string");
78    }
79
80    if !pattern.starts_with('"') || !pattern.ends_with('"') {
81        panic!("this macro only accepts a string argument")
82    }
83
84    let pattern: &str = &pattern[1..(pattern.len() - 1)];
85
86    let mut files = match glob_inner(pattern) {
87        Ok(files) => files,
88        Err(e) => panic!("invalid glob pattern: {}", e),
89    };
90
91    let path = match files.next() {
92        Some(file) => match file {
93            Ok(file) => file,
94            Err(e) => panic!("couldn't read {path}: {e}", path = e.path().display()),
95        },
96        None => panic!("no file found that matches pattern {pattern}"),
97    };
98
99    // only one file should match the pattern so builds are deterministic
100    if files.next().is_some() {
101        panic!("pattern is valid for multiple files");
102    }
103
104    path
105}
106
107fn bytes_to_token_stream(bytes: Vec<u8>) -> TokenStream {
108    let mut tt: Vec<TokenTree> = Vec::with_capacity(bytes.len() * 2);
109
110    for byte in bytes {
111        tt.push(TokenTree::Literal(Literal::u8_unsuffixed(byte)));
112        tt.push(TokenTree::Punct(Punct::new(',', Spacing::Alone)));
113    }
114
115    let res: [TokenTree; 2] = [
116        TokenTree::Punct(Punct::new('&', Spacing::Alone)),
117        TokenTree::Group(Group::new(Delimiter::Bracket, TokenStream::from_iter(tt))),
118    ];
119
120    TokenStream::from_iter(res)
121}
122
123fn str_to_token_stream(bytes: Vec<u8>) -> TokenStream {
124    let string: String = match String::from_utf8(bytes) {
125        Ok(string) => string,
126        Err(e) => panic!("file is not valid utf8: {e}"),
127    };
128
129    TokenStream::from(TokenTree::Literal(Literal::string(&string)))
130}