rust_tokenizers/vocab/
reformer_vocab.rs

1// Copyright 2020 The Trax Authors and The HuggingFace Inc. team.
2// Copyright (c) 2018, NVIDIA CORPORATION.  All rights reserved.
3// Copyright 2020 Guillaume Becquin
4// Licensed under the Apache License, Version 2.0 (the "License");
5// you may not use this file except in compliance with the License.
6// You may obtain a copy of the License at
7//     http://www.apache.org/licenses/LICENSE-2.0
8// Unless required by applicable law or agreed to in writing, software
9// distributed under the License is distributed on an "AS IS" BASIS,
10// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11// See the License for the specific language governing permissions and
12// limitations under the License.
13
14use crate::error::TokenizerError;
15use crate::vocab::base_vocab::{
16    read_protobuf_file, read_special_token_mapping_file, swap_key_values, SpecialTokenMap,
17};
18use crate::vocab::Vocab;
19use std::collections::HashMap;
20use std::path::Path;
21
22/// # ReformerVocab
23/// Vocabulary for reformer tokenizer. Contains the following special values:
24/// - EOS token
25///
26/// Expects a SentencePiece protobuf file when created from file.
27#[derive(Debug, Clone)]
28pub struct ReformerVocab {
29    /// A mapping of tokens as string to indices (i.e. the encoder base)
30    pub values: HashMap<String, i64>,
31
32    /// A mapping of token ids to strings (i.e. the decoder base)
33    pub indices: HashMap<i64, String>,
34
35    /// Special tokens used by the vocabulary
36    pub special_token_map: SpecialTokenMap,
37
38    /// A mapping of special value tokens as strings to IDs (i.e. the encoder base for special
39    /// values), special values typically include things like BOS/EOS markers, class markers, mask
40    /// markers and padding markers
41    pub special_values: HashMap<String, i64>,
42
43    /// A mapping of special value tokens as IDs to strings (i.e. the decoder base for special values)
44    pub special_indices: HashMap<i64, String>,
45}
46
47const DEFAULT_UNK_TOKEN: &str = "<unk>";
48const DEFAULT_EOS_TOKEN: &str = "</s>";
49
50impl ReformerVocab {
51    pub fn get_eos_value(&self) -> &str {
52        self.special_token_map
53            .eos_token
54            .as_deref()
55            .unwrap_or(DEFAULT_EOS_TOKEN)
56    }
57}
58
59impl Vocab for ReformerVocab {
60    fn get_unknown_value(&self) -> &str {
61        &self.special_token_map.unk_token
62    }
63
64    fn values(&self) -> &HashMap<String, i64> {
65        &self.values
66    }
67
68    fn indices(&self) -> &HashMap<i64, String> {
69        &self.indices
70    }
71
72    fn special_values(&self) -> &HashMap<String, i64> {
73        &self.special_values
74    }
75
76    fn special_indices(&self) -> &HashMap<i64, String> {
77        &self.special_indices
78    }
79
80    fn values_mut(&mut self) -> &mut HashMap<String, i64> {
81        &mut self.values
82    }
83
84    fn indices_mut(&mut self) -> &mut HashMap<i64, String> {
85        &mut self.indices
86    }
87
88    fn special_values_mut(&mut self) -> &mut HashMap<String, i64> {
89        &mut self.special_values
90    }
91
92    fn special_indices_mut(&mut self) -> &mut HashMap<i64, String> {
93        &mut self.special_indices
94    }
95
96    fn from_file<P: AsRef<Path>>(path: P) -> Result<ReformerVocab, TokenizerError> {
97        let values = read_protobuf_file(path)?;
98
99        let special_token_map = SpecialTokenMap {
100            unk_token: DEFAULT_UNK_TOKEN.to_string(),
101            pad_token: None,
102            bos_token: None,
103            sep_token: None,
104            cls_token: None,
105            eos_token: Some(DEFAULT_EOS_TOKEN.to_string()),
106            mask_token: None,
107            additional_special_tokens: None,
108        };
109        Self::from_values_and_special_token_map(values, special_token_map)
110    }
111
112    fn from_file_with_special_token_mapping<P: AsRef<Path>, S: AsRef<Path>>(
113        path: P,
114        special_token_mapping_path: S,
115    ) -> Result<Self, TokenizerError> {
116        let values = read_protobuf_file(path)?;
117        let special_token_map = read_special_token_mapping_file(special_token_mapping_path)?;
118        Self::from_values_and_special_token_map(values, special_token_map)
119    }
120
121    fn from_values_and_special_token_map(
122        values: HashMap<String, i64>,
123        special_token_map: SpecialTokenMap,
124    ) -> Result<Self, TokenizerError>
125    where
126        Self: Sized,
127    {
128        let mut special_values = HashMap::new();
129        special_token_map.register_special_values(&values, &mut special_values)?;
130
131        let indices = swap_key_values(&values);
132        let special_indices = swap_key_values(&special_values);
133        Ok(Self {
134            values,
135            indices,
136            special_token_map,
137            special_values,
138            special_indices,
139        })
140    }
141
142    fn token_to_id(&self, token: &str) -> i64 {
143        self._token_to_id(
144            token,
145            &self.values,
146            &self.special_values,
147            self.get_unknown_value(),
148        )
149    }
150
151    fn id_to_token(&self, id: &i64) -> String {
152        self._id_to_token(
153            id,
154            &self.indices,
155            &self.special_indices,
156            self.get_unknown_value(),
157        )
158    }
159}