use crate::error::TokenizerError;
use crate::vocab::sentencepiece_proto::sentencepiece_model::ModelProto;
use protobuf::Message;
use serde::Deserialize;
use std::collections::{HashMap, HashSet};
use std::fs::File;
use std::hash::Hash;
use std::io::{BufRead, BufReader, Read};
use std::path::Path;
pub(crate) fn swap_key_values<T: Clone, U: Hash + Eq + Copy>(
input_hashmap: &HashMap<T, U>,
) -> HashMap<U, T> {
input_hashmap
.iter()
.map(|(key, &value)| (value, key.clone()))
.collect()
}
pub(crate) fn read_flat_file<P: AsRef<Path>>(
path: P,
) -> Result<HashMap<String, i64>, TokenizerError> {
let f = File::open(&path).map_err(|e| {
TokenizerError::FileNotFound(format!(
"{} vocabulary file not found :{}",
path.as_ref().display(),
e
))
})?;
let br = BufReader::new(f);
let mut data = HashMap::new();
for (index, line) in br.lines().enumerate() {
let line = match line {
Ok(value) => value,
Err(e) => {
return Err(TokenizerError::VocabularyParsingError(e.to_string()));
}
};
data.insert(line.trim().to_owned(), index as i64);
}
Ok(data)
}
pub(crate) fn read_json_file<P: AsRef<Path>>(
path: P,
) -> Result<HashMap<String, i64>, TokenizerError> {
let f = File::open(&path).map_err(|e| {
TokenizerError::FileNotFound(format!(
"{} vocabulary file not found :{}",
path.as_ref().display(),
e
))
})?;
let br = BufReader::new(f);
let values: HashMap<String, i64> = match serde_json::from_reader(br) {
Ok(value) => value,
Err(e) => {
return Err(TokenizerError::VocabularyParsingError(e.to_string()));
}
};
Ok(values)
}
pub(crate) fn open_protobuf_file<P: AsRef<Path>>(path: P) -> Result<ModelProto, TokenizerError> {
let mut f = File::open(&path).map_err(|e| {
TokenizerError::FileNotFound(format!(
"{} vocabulary file not found :{}",
path.as_ref().display(),
e
))
})?;
let mut contents = Vec::new();
let proto = match f.read_to_end(&mut contents) {
Ok(_) => match ModelProto::parse_from_bytes(contents.as_slice()) {
Ok(proto_value) => proto_value,
Err(e) => {
return Err(TokenizerError::VocabularyParsingError(e.to_string()));
}
},
Err(e) => {
return Err(TokenizerError::VocabularyParsingError(e.to_string()));
}
};
Ok(proto)
}
pub(crate) fn read_protobuf_file<P: AsRef<Path>>(
path: P,
) -> Result<HashMap<String, i64>, TokenizerError> {
let proto = open_protobuf_file(path)?;
let mut values = HashMap::new();
for (idx, piece) in proto.get_pieces().iter().enumerate() {
values.insert(piece.get_piece().to_owned(), idx as i64);
}
Ok(values)
}
pub(crate) fn read_special_token_mapping_file<P: AsRef<Path>>(
path: P,
) -> Result<SpecialTokenMap, TokenizerError> {
let f = File::open(&path).map_err(|e| {
TokenizerError::FileNotFound(format!(
"{} vocabulary file not found :{}",
path.as_ref().display(),
e
))
})?;
let br = BufReader::new(f);
serde_json::from_reader(br).map_err(|e| {
TokenizerError::FileNotFound(format!("Invalid special token mapping file {e}"))
})
}
pub(crate) fn register_as_special_value(
token: &str,
values: &HashMap<String, i64>,
special_values: &mut HashMap<String, i64>,
) -> Result<(), TokenizerError> {
let token_id = match values.get(token) {
Some(index) => *index,
None => {
return Err(TokenizerError::TokenNotFound(format!(
"The special value {token} could not be found in the vocabulary"
)));
}
};
special_values.insert(String::from(token), token_id);
Ok(())
}
#[derive(Debug, Default, Clone, Deserialize)]
pub struct SpecialTokenMap {
pub unk_token: String,
pub pad_token: Option<String>,
pub bos_token: Option<String>,
pub sep_token: Option<String>,
pub cls_token: Option<String>,
pub eos_token: Option<String>,
pub mask_token: Option<String>,
pub additional_special_tokens: Option<HashSet<String>>,
}
impl SpecialTokenMap {
pub(crate) fn register_special_values(
&self,
values: &HashMap<String, i64>,
special_values: &mut HashMap<String, i64>,
) -> Result<(), TokenizerError> {
register_as_special_value(self.unk_token.as_str(), values, special_values)?;
if let Some(pad_token) = &self.pad_token {
register_as_special_value(pad_token, values, special_values)?;
}
if let Some(bos_token) = &self.bos_token {
register_as_special_value(bos_token, values, special_values)?;
}
if let Some(sep_token) = &self.sep_token {
register_as_special_value(sep_token, values, special_values)?;
}
if let Some(cls_token) = &self.cls_token {
register_as_special_value(cls_token, values, special_values)?;
}
if let Some(eos_token) = &self.eos_token {
register_as_special_value(eos_token, values, special_values)?;
}
if let Some(mask_token) = &self.mask_token {
register_as_special_value(mask_token, values, special_values)?;
}
if let Some(additional_special_tokens) = &self.additional_special_tokens {
for token in additional_special_tokens {
register_as_special_value(token, values, special_values)?;
}
}
Ok(())
}
}
pub trait Vocab {
fn get_unknown_value(&self) -> &str;
fn values(&self) -> &HashMap<String, i64>;
fn indices(&self) -> &HashMap<i64, String>;
fn special_values(&self) -> &HashMap<String, i64>;
fn special_indices(&self) -> &HashMap<i64, String>;
fn values_mut(&mut self) -> &mut HashMap<String, i64>;
fn indices_mut(&mut self) -> &mut HashMap<i64, String>;
fn special_values_mut(&mut self) -> &mut HashMap<String, i64>;
fn special_indices_mut(&mut self) -> &mut HashMap<i64, String>;
fn from_file<P: AsRef<Path>>(path: P) -> Result<Self, TokenizerError>
where
Self: Sized;
fn from_file_with_special_token_mapping<P: AsRef<Path>, S: AsRef<Path>>(
path: P,
special_token_mapping_path: S,
) -> Result<Self, TokenizerError>
where
Self: Sized;
fn from_values_and_special_token_map(
values: HashMap<String, i64>,
special_token_map: SpecialTokenMap,
) -> Result<Self, TokenizerError>
where
Self: Sized;
fn _token_to_id(
&self,
token: &str,
values: &HashMap<String, i64>,
special_values: &HashMap<String, i64>,
unknown_value: &str,
) -> i64 {
match special_values.get(token) {
Some(index) => *index,
None => match values.get(token) {
Some(index) => *index,
None => *values.get(unknown_value).unwrap(),
},
}
}
fn _id_to_token(
&self,
id: &i64,
indices: &HashMap<i64, String>,
special_indices: &HashMap<i64, String>,
unknown_value: &str,
) -> String {
match special_indices.get(id) {
Some(token) => token.clone(),
None => match indices.get(id) {
Some(token) => token.clone(),
None => unknown_value.to_owned(),
},
}
}
fn token_to_id(&self, token: &str) -> i64;
fn id_to_token(&self, id: &i64) -> String;
fn convert_tokens_to_ids(&self, tokens: &[&str]) -> Vec<i64> {
tokens.iter().map(|v| self.token_to_id(v)).collect()
}
fn add_extra_ids(&mut self, num_extra_ids: i64) {
let mut additional_special_tokens: Vec<String> = Vec::with_capacity(num_extra_ids as usize);
for extra_id in 0..num_extra_ids {
additional_special_tokens.push(format!("<extra_id_{extra_id}>"));
}
self.add_tokens(
additional_special_tokens
.iter()
.map(AsRef::as_ref)
.collect::<Vec<&str>>()
.as_slice(),
);
}
fn add_tokens(&mut self, tokens: &[&str]) {
let mut tokens_to_add: Vec<&str> = Vec::with_capacity(tokens.len());
for token in tokens {
if !self.values().contains_key(*token) {
tokens_to_add.push(token);
}
}
let mut current_index = self.values().len() as i64;
for token in tokens_to_add {
self.values_mut().insert(token.to_string(), current_index);
self.indices_mut().insert(current_index, token.to_string());
self.special_values_mut()
.insert(token.to_string(), current_index);
self.special_indices_mut()
.insert(current_index, token.to_string());
current_index += 1;
}
}
}
#[derive(Debug, Clone)]
pub struct BaseVocab {
pub values: HashMap<String, i64>,
pub indices: HashMap<i64, String>,
pub special_token_map: SpecialTokenMap,
pub special_values: HashMap<String, i64>,
pub special_indices: HashMap<i64, String>,
}
const DEFAULT_UNK_TOKEN: &str = "[UNK]";
impl Vocab for BaseVocab {
fn get_unknown_value(&self) -> &str {
&self.special_token_map.unk_token
}
fn values(&self) -> &HashMap<String, i64> {
&self.values
}
fn indices(&self) -> &HashMap<i64, String> {
&self.indices
}
fn special_values(&self) -> &HashMap<String, i64> {
&self.special_values
}
fn special_indices(&self) -> &HashMap<i64, String> {
&self.special_indices
}
fn values_mut(&mut self) -> &mut HashMap<String, i64> {
&mut self.values
}
fn indices_mut(&mut self) -> &mut HashMap<i64, String> {
&mut self.indices
}
fn special_values_mut(&mut self) -> &mut HashMap<String, i64> {
&mut self.special_values
}
fn special_indices_mut(&mut self) -> &mut HashMap<i64, String> {
&mut self.special_indices
}
fn from_file<P: AsRef<Path>>(path: P) -> Result<BaseVocab, TokenizerError> {
let values = read_flat_file(path)?;
let special_token_map = SpecialTokenMap {
unk_token: DEFAULT_UNK_TOKEN.to_string(),
pad_token: None,
bos_token: None,
sep_token: None,
cls_token: None,
eos_token: None,
mask_token: None,
additional_special_tokens: None,
};
Self::from_values_and_special_token_map(values, special_token_map)
}
fn from_file_with_special_token_mapping<P: AsRef<Path>, S: AsRef<Path>>(
path: P,
special_token_mapping_path: S,
) -> Result<Self, TokenizerError> {
let values = read_flat_file(path)?;
let special_token_map = read_special_token_mapping_file(special_token_mapping_path)?;
Self::from_values_and_special_token_map(values, special_token_map)
}
fn from_values_and_special_token_map(
values: HashMap<String, i64>,
special_token_map: SpecialTokenMap,
) -> Result<Self, TokenizerError>
where
Self: Sized,
{
let mut special_values = HashMap::new();
special_token_map.register_special_values(&values, &mut special_values)?;
let indices = swap_key_values(&values);
let special_indices = swap_key_values(&special_values);
Ok(Self {
values,
indices,
special_token_map,
special_values,
special_indices,
})
}
fn token_to_id(&self, token: &str) -> i64 {
self._token_to_id(
token,
&self.values,
&self.special_values,
self.get_unknown_value(),
)
}
fn id_to_token(&self, id: &i64) -> String {
self._id_to_token(
id,
&self.indices,
&self.special_indices,
self.get_unknown_value(),
)
}
}
#[cfg(test)]
mod tests {
extern crate anyhow;
use super::*;
use std::io::Write;
#[test]
fn test_create_object() {
let values: HashMap<String, i64> = HashMap::new();
let special_values: HashMap<String, i64> = HashMap::new();
let indices: HashMap<i64, String> = HashMap::new();
let special_indices: HashMap<i64, String> = HashMap::new();
let special_token_map = SpecialTokenMap {
unk_token: "[UNK]".to_string(),
pad_token: None,
bos_token: None,
sep_token: None,
cls_token: None,
eos_token: None,
mask_token: None,
additional_special_tokens: None,
};
let base_vocab = BaseVocab {
values,
indices,
special_token_map,
special_values,
special_indices,
};
assert_eq!(base_vocab.get_unknown_value(), "[UNK]");
assert_eq!(base_vocab.values, *base_vocab.values());
assert_eq!(base_vocab.special_values, *base_vocab.special_values());
}
#[test]
fn test_create_object_from_file() -> anyhow::Result<()> {
let mut vocab_file = tempfile::NamedTempFile::new()?;
write!(vocab_file, "hello \n world \n [UNK] \n !")?;
let path = vocab_file.into_temp_path();
let target_values: HashMap<String, i64> = [
("hello".to_owned(), 0),
("world".to_owned(), 1),
("[UNK]".to_owned(), 2),
("!".to_owned(), 3),
]
.iter()
.cloned()
.collect();
let special_values: HashMap<String, i64> =
[("[UNK]".to_owned(), 2)].iter().cloned().collect();
let base_vocab = BaseVocab::from_file(&path)?;
assert_eq!(base_vocab.get_unknown_value(), "[UNK]");
assert_eq!(base_vocab.values, target_values);
assert_eq!(base_vocab.special_values, special_values);
drop(path);
Ok(())
}
#[test]
#[should_panic]
fn test_create_object_from_file_without_unknown_token() {
let mut vocab_file = tempfile::NamedTempFile::new().unwrap();
write!(vocab_file, "hello \n world \n !").unwrap();
let path = vocab_file.into_temp_path();
let _base_vocab = BaseVocab::from_file(&path).unwrap();
}
#[test]
fn test_encode_tokens() -> anyhow::Result<()> {
let mut vocab_file = tempfile::NamedTempFile::new()?;
write!(vocab_file, "hello \n world \n [UNK] \n !")?;
let path = vocab_file.into_temp_path();
let base_vocab = BaseVocab::from_file(&path)?;
assert_eq!(base_vocab.token_to_id("hello"), 0);
assert_eq!(base_vocab.token_to_id("world"), 1);
assert_eq!(base_vocab.token_to_id("!"), 3);
assert_eq!(base_vocab.token_to_id("[UNK]"), 2);
assert_eq!(base_vocab.token_to_id("oov_value"), 2);
drop(path);
Ok(())
}
#[test]
fn test_decode_tokens() -> anyhow::Result<()> {
let mut vocab_file = tempfile::NamedTempFile::new()?;
write!(vocab_file, "hello \n world \n [UNK] \n !")?;
let path = vocab_file.into_temp_path();
let base_vocab = BaseVocab::from_file(&path)?;
assert_eq!(base_vocab.id_to_token(&(0_i64)), "hello");
assert_eq!(base_vocab.id_to_token(&(1_i64)), "world");
assert_eq!(base_vocab.id_to_token(&(3_i64)), "!");
assert_eq!(base_vocab.id_to_token(&(2_i64)), "[UNK]");
drop(path);
Ok(())
}
}