1use proc_macro::*;
2use yaml_rust::{ YamlLoader, Yaml };
3use yaml_rust::yaml::Hash;
4use quote::quote;
5use quote::format_ident;
6use quote::ToTokens;
7use proc_macro2::TokenStream as TokenStream2;
8use proc_macro2::TokenTree as TokenTree2;
9use proc_macro2::Literal as Literal2;
10use proc_macro2::Ident as Ident2;
11use proc_macro2::Group as Group2;
12use syn::{ Result, Path, Expr, Token , LitStr};
13use syn::parse::{ ParseStream, Parser };
14use syn::punctuated::Punctuated;
15
16macro_rules! panick {
17 ($($arg:tt)*) => {
18 return quote!(compile_error!($($arg),*)).into()
19 }
20}
21
22fn parse_include_args(input: ParseStream) -> Result<(LitStr, syn::Ident)> {
25 let path = input.parse()?;
26 input.parse::<Token![as]>()?;
27 let table_name = input.parse()?;
28 Ok((path, table_name))
29}
30
31#[proc_macro]
53pub fn include_keys(input: TokenStream) -> TokenStream {
54
55 let (path, table_name) = syn::parse_macro_input!(input with parse_include_args);
56
57 let yaml = std::fs::read_to_string(path.value()).expect("Arg 1 to include_keys! should be valid file!");
59 let docs = YamlLoader::load_from_str(&yaml).expect("Keys should be valid YAML!");
60
61 let (langs, keys) = match docs.as_slice() {
62 [Yaml::Array(raw_langs), Yaml::Hash(keys)] => {
63 let langs = raw_langs.into_iter()
64 .map(|s| s.as_str().map(format_lang_code))
65 .collect::<Option<Vec<Ident2>>>()
66 .expect("All language codes should be strings!");
67 (langs, keys)
68 },
69 _ => panick!("Keys should contain langs array and keys mapping in separate YAML documents!")
70 };
71
72 let enum_annotations = if cfg!(feature = "serde") {
73 quote!( #[derive(serde::Deserialize, serde::Serialize)] )
74 } else {
75 quote!()
76 };
77
78 let langs_defs = quote! {
79 #enum_annotations
80 pub enum Language {
81 #(#langs),*
82 }
83
84 pub trait Localizable<T: 'static + ?Sized> {
85 #(const #langs: &'static T;)*
86 fn localize(&self, lang: Language) -> &'static T {
87 match lang {
88 #(Language::#langs => Self::#langs),*
89 }
90 }
91 }
92 };
93
94 let keys_defs = tabulate(format_ident!("keys"), keys);
95
96 quote! {
97 mod #table_name {
98 #langs_defs
99 #keys_defs
100 }
101 }.into()
102}
103
104fn format_lang_code(code: &str) -> Ident2 {
105 let sanitized = code.replace("-", "_");
106 format_ident!("{sanitized}")
107}
108
109fn key_ident(key: &Yaml) -> Ident2 {
110 let key_str = key.as_str().expect("All localization keys should be strings!");
111 format_lang_code(key_str)
113}
114
115fn tabulate(name: Ident2, hash: &Hash) -> TokenStream2 {
116 if let Some(hashes) = hash.into_iter().map(
119 |(key, value)|
120 value.as_hash().map(|h| (key_ident(key), h))
121 ).collect::<Option<Vec<(Ident2, &Hash)>>>() {
122 let children = hashes.into_iter().map(
123 |(key, value)|
124 tabulate(key, value)
125 );
126 quote! {
127 pub mod #name {
128 use super::Localizable; #(#children)*
130 }
131 }
132 } else if let Some(str_keys) = hash.into_iter().map(
133 |(key, value)|
134 value.as_str().map(|s| (key_ident(key), s))
135 ).collect::<Option<Vec<(Ident2, &str)>>>() {
136 let mappings = str_keys.into_iter().map(
137 |(lang, string)| {
138 let lit = Literal2::string(string);
139 quote! {
140 const #lang: &str = #lit;
141 }
142 }
143 );
144 quote! {
145 pub struct #name;
146 impl Localizable<str> for #name {
147 #(#mappings)*
148 }
149 }
150 } else if let Some(arr_keys) = hash.into_iter().map(
151 |(key, value)|
152 value.as_vec().map(
153 |a| {
154 let strs = a.into_iter()
155 .map(Yaml::as_str)
156 .collect::<Option<Vec<&str>>>()
157 .expect("Arrays should only contain strings!");
158 (key_ident(key), strs)
159 })
160 ).collect::<Option<Vec<(Ident2, Vec<&str>)>>>() {
161 let mappings = arr_keys.into_iter().map(
162 |(lang, array)| {
163 let str_lits = array.into_iter().map(Literal2::string);
164 quote! {
165 const #lang: [&str] = [#(#str_lits),*];
166 }
167 }
168 );
169 quote! {
170 pub struct #name;
171 impl Localizable<[&str]> #name {
172 #(#mappings)*
173 }
174 }
175 } else {
176 panick!("Keyed values should contain either all maps, all strings, or all arrays!")
177 }
178}
179
180#[proc_macro_attribute]
195pub fn localize(args: TokenStream, input: TokenStream) -> TokenStream {
196 let (table_path, lang_expr) = syn::parse_macro_input!(args with parse_localize_args);
197 let parse_body = LocContextParser {table_path: &table_path, lang_expr: &lang_expr}.parser();
198 syn::parse_macro_input!(input with parse_body).into()
199}
200
201#[proc_macro]
203pub fn localize_block(input: TokenStream) -> TokenStream {
204 syn::parse_macro_input!(input with parse_localize_block)
205}
206
207fn parse_localize_block(stream: ParseStream) -> Result<TokenStream> {
208 stream.parse::<Token![use]>()?;
209 let (table_path, lang_expr) = stream.call(parse_localize_args)?;
210 stream.parse::<Token![;]>().map(|_| ()).or_else(|_| stream.parse::<Token![in]>().map(|_| ()))?;
211 let parse_body = LocContextParser {table_path: &table_path, lang_expr: &lang_expr};
212 parse_body.parse_from_stream(stream).map(Into::into)
213}
214
215fn parse_localize_args(stream: ParseStream) -> Result<(Path, Expr)> {
216 let table_path = Path::parse_mod_style(stream)?;
217 let raw_lang_expr; syn::bracketed!(raw_lang_expr in stream);
218 let lang_expr = raw_lang_expr.parse()?;
219 Result::Ok((table_path, lang_expr))
220}
221
222mod why_is_this_pub {
224 syn::custom_punctuation!(LocPrefix, $);
225}
226
227use why_is_this_pub::*;
228
229fn parse_loc_substitution(stream: ParseStream) -> Result<Punctuated<syn::Ident, Token![.]>> {
230 stream.parse::<LocPrefix>()?;
231 stream.call(Punctuated::parse_separated_nonempty)
232}
233
234#[derive(Clone)]
235struct LocContextParser<'a, T, L> {
236 table_path: &'a T,
237 lang_expr: &'a L,
238}
239
240impl<'a, T: ToTokens + Clone, L: ToTokens + Clone> LocContextParser<'a, T, L> {
241 fn parse_from_stream(&self, stream: ParseStream) -> Result<TokenStream2> {
242 let mut built = Vec::new();
243 while !stream.is_empty() {
244 if let Ok(punctuated) = parse_loc_substitution(stream) {
245 let hierarchy = punctuated.iter();
246 let table_path = self.table_path;
247 let lang_expr = self.lang_expr;
248 built.extend(
249 quote!( #table_path::Localizable::localize(
250 &#table_path::keys::#(#hierarchy)::*, #lang_expr
251 ) )
252 )
253 } else {
254 let tt = stream.step(
255 |cursor| cursor.token_tree().ok_or_else(|| stream.error("literally how?"))
256 )?;
257 built.push(
258 if let TokenTree2::Group(group) = tt {
259 let deep = self.clone().parser().parse2(group.stream())?;
260 Group2::new(group.delimiter(), deep).into()
261 } else {
262 tt
263 }
264 )
265 }
266 }
267 Ok(TokenStream2::from_iter(built))
268 }
269
270 fn parser(self) -> impl 'a + FnOnce(ParseStream) -> Result<TokenStream2> {
271 move |stream: ParseStream| self.parse_from_stream(stream)
272 }
273
274}
275
276
277