1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
mod load;
mod t;
use std::collections::HashMap;

use load::{default_locale, get_locale};
use proc_macro2::{Literal, TokenStream, TokenTree};
use quote::quote;
use t::{parse_t, RawTokenStream};

fn hashmap_to_tokens(h: &HashMap<String, String>) -> TokenStream {
    let mut tokens = TokenStream::new();
    tokens.extend(quote! {
        use std::collections::HashMap;
        let mut kv = HashMap::new();
    });
    for (key, value) in h {
        let key = Literal::string(&key);
        let value = Literal::string(&value);
        tokens.extend(quote! {
            kv.insert(#key, #value);
        });
    }
    tokens
}

fn append(l: Literal) -> Literal {
    let s = l.to_string();
    let mut s = s[1..s.len() - 1].to_string();
    s.insert(0, '{');
    s.insert(0, '{');
    s.insert(s.len(), '}');
    s.insert(s.len(), '}');
    Literal::string(&s)
}

fn into_literal(ts: &TokenStream) -> Literal {
    let mut ts = ts.clone().into_iter();
    let mut s = String::new();
    while let Some(item) = ts.next() {
        match item {
            TokenTree::Literal(l) => {
                s.push_str(&t::literal_trim(l));
            }
            TokenTree::Punct(p) => {
                s.push_str(&p.to_string());
            }
            TokenTree::Ident(i) => {
                s.push_str(&i.to_string());
            }
            TokenTree::Group(g) => {
                s.push_str(&g.to_string());
            }
        }
    }
    Literal::string(&s)
}

fn replacement_to_tokens(r: &Vec<(TokenStream, Option<TokenStream>)>) -> TokenStream {
    let mut tokens = TokenStream::new();
    for (key, value) in r {
        let value = if let Some(value) = value { value } else { key };
        let key = append(into_literal(key));
        tokens.extend(quote! {
            value = value.replace(
                #key,
                &format!("{}", #value)
            );
        });
    }
    tokens
}

/// Use the localization thing
/// # Example
/// ```
/// use localization::t;
/// fn main() {
///   let name = "John";
///   let age = 42;
///   let s = t!("ja-JP","default:hello", name, age);
///   println!("{}", s);
/// }
/// ```
#[proc_macro]
pub fn t(item: RawTokenStream) -> RawTokenStream {
    let (locale, key, replacement) = parse_t(item);
    let map = match get_locale().get(&key) {
        Some(map) => map,
        None => panic!("Key not found: {}", key),
    };
    let default = match map.get(&default_locale()) {
        Some(default) => default,
        None => panic!("Default locale text not found: {} {}", locale, key),
    };
    let default = Literal::string(default);
    let replacement = replacement_to_tokens(&replacement);
    let map = hashmap_to_tokens(map);
    quote!(
        {
            #map;
            let value = kv.get(#locale).cloned();
            let mut value = value.unwrap_or(#default).to_string();
            #replacement
            value
        }
    )
    .into()
}