jomini/binary/
resolver.rs

1use std::{collections::HashMap, io::BufRead};
2
3use crate::Error;
4
5/// Resolves binary 16bit tokens to field names
6///
7/// One can create their own `TokenResolver` or rely on the HashMap implementation
8///
9/// ```
10/// use std::collections::HashMap;
11/// use jomini::binary::TokenResolver;
12///
13/// let mut map = HashMap::new();
14/// map.insert(0x2d82, String::from("field1"));
15///
16/// assert_eq!(map.resolve(0x2d82), Some("field1"));
17/// ```
18///
19/// The HashMap implementation works with string slices as well
20///
21/// ```
22/// use std::collections::HashMap;
23/// use jomini::binary::TokenResolver;
24///
25/// let mut map = HashMap::new();
26/// map.insert(0x2d82, "field1");
27///
28/// assert_eq!(map.resolve(0x0000), None);
29/// ```
30pub trait TokenResolver {
31    /// Return the string field name of the 16bit token if found
32    fn resolve(&self, token: u16) -> Option<&str>;
33
34    /// Return whether [`TokenResolver::resolve`] will always return `None`.
35    ///
36    /// By default this returns `false`
37    ///
38    /// This method is not used by jomini itself, but rather targeted at
39    /// downstream save file libraries, who accept an application configured
40    /// [`TokenResolver`]. If the application is not configured for ironman
41    /// support, save file parsers can still handle plain text files, so
42    /// `is_empty` allows the save parsers to lazily check the validity of a
43    /// [`TokenResolver`] when the binary format is encountered. Thus, allowing
44    /// for better error messages. Instead of "missing field" errors, the save
45    /// file libraries can raise a more descriptive "binary file encountered but
46    /// tokens are not configured", as only they know if a non-zero amount of
47    /// tokens need to be resolved for a successful deserialization.
48    ///
49    /// There's not a way for jomini to know whether an empty [`TokenResolver`]
50    /// constitutes an error, as the client may only be deserializing data from
51    /// keys that are already strings. Or, alternatively, direct token
52    /// deserialization is exclusively used.
53    fn is_empty(&self) -> bool {
54        false
55    }
56}
57
58impl<S, V> TokenResolver for HashMap<u16, V, S>
59where
60    S: ::std::hash::BuildHasher,
61    V: AsRef<str>,
62{
63    fn resolve(&self, token: u16) -> Option<&str> {
64        self.get(&token).map(|x| x.as_ref())
65    }
66
67    fn is_empty(&self) -> bool {
68        self.is_empty()
69    }
70}
71
72impl<T: TokenResolver> TokenResolver for &'_ T {
73    fn resolve(&self, token: u16) -> Option<&str> {
74        (**self).resolve(token)
75    }
76
77    fn is_empty(&self) -> bool {
78        (**self).is_empty()
79    }
80}
81
82impl<T: TokenResolver + ?Sized> TokenResolver for Box<T> {
83    fn resolve(&self, token: u16) -> Option<&str> {
84        (**self).resolve(token)
85    }
86
87    fn is_empty(&self) -> bool {
88        (**self).is_empty()
89    }
90}
91
92/// Customize how the deserializer reacts when a token can't be resolved
93#[derive(Debug, PartialEq, Eq, Clone, Copy)]
94pub enum FailedResolveStrategy {
95    /// Stop parsing and return an error
96    Error,
97
98    /// Stringify the token as hexadecimal
99    Stringify,
100
101    /// Ignore the token
102    Ignore,
103}
104
105/// A basic token resolver that facilitates loading tokens from an external
106/// source.
107///
108/// This token resolver is geared towards testing use cases and iteration.
109///
110/// It is recommended to use a different implementation if performance is a
111/// concern.
112pub struct BasicTokenResolver {
113    lookup: HashMap<u16, String>,
114}
115
116impl BasicTokenResolver {
117    /// Create resolver from a `BufRead` implementation over a space delimited
118    /// text format:
119    ///
120    /// ```plain
121    /// 0xffff my_test_token
122    /// 0xeeee my_test_token2
123    /// ```
124    pub fn from_text_lines<T>(mut reader: T) -> Result<Self, Error>
125    where
126        T: BufRead,
127    {
128        let mut lookup = HashMap::new();
129        let mut line = String::new();
130        let mut pos = 0;
131        while reader.read_line(&mut line)? != 0 {
132            let (num, text) = line
133                .split_once(' ')
134                .ok_or_else(|| Error::invalid_syntax("expected to split line", pos))?;
135
136            let z = u16::from_str_radix(num.trim_start_matches("0x"), 16)
137                .map_err(|_| Error::invalid_syntax("invalid ironman token", pos))?;
138
139            pos += line.len();
140            lookup.insert(z, String::from(text.trim_ascii_end()));
141            line.clear();
142        }
143
144        Ok(Self { lookup })
145    }
146}
147
148impl TokenResolver for BasicTokenResolver {
149    fn resolve(&self, token: u16) -> Option<&str> {
150        self.lookup.get(&token).map(|x| x.as_str())
151    }
152
153    fn is_empty(&self) -> bool {
154        self.lookup.is_empty()
155    }
156}
157
158#[cfg(test)]
159mod tests {
160    use super::*;
161
162    #[test]
163    fn can_create_resolve() {
164        let data = b"0xffff my_test_token\n0xeeee my_test_token2";
165        let resolver = BasicTokenResolver::from_text_lines(&data[..]).unwrap();
166        assert_eq!(resolver.resolve(0xffff), Some("my_test_token"));
167        assert_eq!(resolver.resolve(0xeeee), Some("my_test_token2"));
168    }
169}