rust_tokenizers/vocab/
reformer_vocab.rs1use crate::error::TokenizerError;
15use crate::vocab::base_vocab::{
16 read_protobuf_file, read_special_token_mapping_file, swap_key_values, SpecialTokenMap,
17};
18use crate::vocab::Vocab;
19use std::collections::HashMap;
20use std::path::Path;
21
22#[derive(Debug, Clone)]
28pub struct ReformerVocab {
29 pub values: HashMap<String, i64>,
31
32 pub indices: HashMap<i64, String>,
34
35 pub special_token_map: SpecialTokenMap,
37
38 pub special_values: HashMap<String, i64>,
42
43 pub special_indices: HashMap<i64, String>,
45}
46
47const DEFAULT_UNK_TOKEN: &str = "<unk>";
48const DEFAULT_EOS_TOKEN: &str = "</s>";
49
50impl ReformerVocab {
51 pub fn get_eos_value(&self) -> &str {
52 self.special_token_map
53 .eos_token
54 .as_deref()
55 .unwrap_or(DEFAULT_EOS_TOKEN)
56 }
57}
58
59impl Vocab for ReformerVocab {
60 fn get_unknown_value(&self) -> &str {
61 &self.special_token_map.unk_token
62 }
63
64 fn values(&self) -> &HashMap<String, i64> {
65 &self.values
66 }
67
68 fn indices(&self) -> &HashMap<i64, String> {
69 &self.indices
70 }
71
72 fn special_values(&self) -> &HashMap<String, i64> {
73 &self.special_values
74 }
75
76 fn special_indices(&self) -> &HashMap<i64, String> {
77 &self.special_indices
78 }
79
80 fn values_mut(&mut self) -> &mut HashMap<String, i64> {
81 &mut self.values
82 }
83
84 fn indices_mut(&mut self) -> &mut HashMap<i64, String> {
85 &mut self.indices
86 }
87
88 fn special_values_mut(&mut self) -> &mut HashMap<String, i64> {
89 &mut self.special_values
90 }
91
92 fn special_indices_mut(&mut self) -> &mut HashMap<i64, String> {
93 &mut self.special_indices
94 }
95
96 fn from_file<P: AsRef<Path>>(path: P) -> Result<ReformerVocab, TokenizerError> {
97 let values = read_protobuf_file(path)?;
98
99 let special_token_map = SpecialTokenMap {
100 unk_token: DEFAULT_UNK_TOKEN.to_string(),
101 pad_token: None,
102 bos_token: None,
103 sep_token: None,
104 cls_token: None,
105 eos_token: Some(DEFAULT_EOS_TOKEN.to_string()),
106 mask_token: None,
107 additional_special_tokens: None,
108 };
109 Self::from_values_and_special_token_map(values, special_token_map)
110 }
111
112 fn from_file_with_special_token_mapping<P: AsRef<Path>, S: AsRef<Path>>(
113 path: P,
114 special_token_mapping_path: S,
115 ) -> Result<Self, TokenizerError> {
116 let values = read_protobuf_file(path)?;
117 let special_token_map = read_special_token_mapping_file(special_token_mapping_path)?;
118 Self::from_values_and_special_token_map(values, special_token_map)
119 }
120
121 fn from_values_and_special_token_map(
122 values: HashMap<String, i64>,
123 special_token_map: SpecialTokenMap,
124 ) -> Result<Self, TokenizerError>
125 where
126 Self: Sized,
127 {
128 let mut special_values = HashMap::new();
129 special_token_map.register_special_values(&values, &mut special_values)?;
130
131 let indices = swap_key_values(&values);
132 let special_indices = swap_key_values(&special_values);
133 Ok(Self {
134 values,
135 indices,
136 special_token_map,
137 special_values,
138 special_indices,
139 })
140 }
141
142 fn token_to_id(&self, token: &str) -> i64 {
143 self._token_to_id(
144 token,
145 &self.values,
146 &self.special_values,
147 self.get_unknown_value(),
148 )
149 }
150
151 fn id_to_token(&self, id: &i64) -> String {
152 self._id_to_token(
153 id,
154 &self.indices,
155 &self.special_indices,
156 self.get_unknown_value(),
157 )
158 }
159}