pub struct FullTokenizer { /* private fields */ }Expand description
A FullTokenizer that runs basic tokenization and WordPiece tokenization.
§Example
A full tokenizer can be built from a vocabulary as HashMap.
use bert_tokenizer::{FullTokenizer, Tokenizer, Vocab};
let mut vocab = Vocab::new();
vocab.insert("hello".to_string(), 0);
vocab.insert("world".to_string(), 1);
vocab.insert("!".to_string(), 2);
vocab.insert(",".to_string(), 3);
vocab.insert("##,".to_string(), 4);
vocab.insert("##!".to_string(), 5);
vocab.insert("##world".to_string(), 6);
vocab.insert("##hello".to_string(), 7);
let tokenizer = FullTokenizer::new().vocab(vocab).do_lower_case(true).build();
let tokens = tokenizer.tokenize("Hello, World!");
assert_eq!(tokens, vec!["hello", ",", "world", "!"]);Or from a vocabulary file.
use bert_tokenizer::{FullTokenizer, Tokenizer};
let tokenizer = FullTokenizer::new().vocab_from_file("tests/cased_L-12_H-768_A-12/vocab.txt").build();
let tokens = tokenizer.tokenize("Hello, World!");
assert_eq!(tokens, vec!["Hello", ",", "World", "!"]);You can also specify whether to do lower case.
use bert_tokenizer::{FullTokenizer, Tokenizer};
let tokenizer = FullTokenizer::new().vocab_from_file("tests/uncased_L-12_H-768_A-12/vocab.txt").do_lower_case(true).build();
let tokens = tokenizer.tokenize("Hello, World!");
assert_eq!(tokens, vec!["hello", ",", "world", "!"]);Implementations§
Source§impl FullTokenizer
impl FullTokenizer
pub fn new() -> FullTokenizerBuilder
Sourcepub fn convert_tokens_to_ids(&self, tokens: &Vec<String>) -> Vec<u32>
pub fn convert_tokens_to_ids(&self, tokens: &Vec<String>) -> Vec<u32>
Converts a sequence of tokens to a sequence of ids.
§Example
use bert_tokenizer::{FullTokenizer, Tokenizer, Vocab};
let mut vocab = Vocab::new();
vocab.insert("hello".to_string(), 0);
vocab.insert("world".to_string(), 1);
vocab.insert("!".to_string(), 2);
vocab.insert("##!".to_string(), 3);
vocab.insert("##world".to_string(), 4);
vocab.insert("##hello".to_string(), 5);
let tokenizer = FullTokenizer::new().vocab(vocab).build();
let ids = tokenizer.convert_tokens_to_ids(&vec!["hello".to_string(), "!".to_string(), "world".to_string(), "##!".to_string()]);
assert_eq!(ids, vec![0, 2, 1, 3]);§Arguments
tokens- A sequence of tokens.
§Returns
A sequence of ids.
Sourcepub fn convert_ids_to_tokens(&self, ids: &Ids) -> Vec<String>
pub fn convert_ids_to_tokens(&self, ids: &Ids) -> Vec<String>
Converts a sequence of ids to a sequence of tokens.
§Example
use bert_tokenizer::{FullTokenizer, Tokenizer, Vocab};
let mut vocab = Vocab::new();
vocab.insert("hello".to_string(), 0);
vocab.insert("world".to_string(), 1);
vocab.insert("!".to_string(), 2);
vocab.insert("##!".to_string(), 3);
vocab.insert("##world".to_string(), 4);
vocab.insert("##hello".to_string(), 5);
let tokenizer = FullTokenizer::new().vocab(vocab).build();
let tokens = tokenizer.convert_ids_to_tokens(&vec![0, 2, 4, 3]);
assert_eq!(tokens, vec!["hello", "!", "##world", "##!"]);§Arguments
ids- A sequence of ids.
§Returns
A sequence of tokens.
Sourcepub fn convert_tokens_to_string(&self, tokens: &[String]) -> String
pub fn convert_tokens_to_string(&self, tokens: &[String]) -> String
Converts a sequence of subword tokens to a single text string.
§Example
use bert_tokenizer::{FullTokenizer, Tokenizer, Vocab};
let mut vocab = Vocab::new();
vocab.insert("hello".to_string(), 0);
vocab.insert("world".to_string(), 1);
vocab.insert("!".to_string(), 2);
vocab.insert("##!".to_string(), 3);
vocab.insert("##world".to_string(), 4);
vocab.insert("##hello".to_string(), 5);
let tokenizer = FullTokenizer::new().vocab(vocab).build();
let text = "hello, world!";
let tokens = tokenizer.tokenize(text);
let text2 = tokenizer.convert_tokens_to_string(&tokens);
println!("Before: {} -> Tokens: {:?} -> After: {}", text, tokens, text2);§Arguments
tokens- A sequence of tokens.
§Returns
A single text string.
Sourcepub fn get_vocab_words(&self) -> Vec<String>
pub fn get_vocab_words(&self) -> Vec<String>
Get subword tokens from the vocabulary.
§Example
use bert_tokenizer::{FullTokenizer, Tokenizer, Vocab};
let mut vocab = Vocab::new();
vocab.insert("hello".to_string(), 0);
vocab.insert("world".to_string(), 1);
vocab.insert("!".to_string(), 2);
vocab.insert("##!".to_string(), 3);
vocab.insert("##world".to_string(), 4);
vocab.insert("##hello".to_string(), 5);
let tokenizer = FullTokenizer::new().vocab(vocab).build();
let tokens = tokenizer.get_vocab_words();
assert_eq!(tokens, vec!["hello", "world", "!", "##!", "##world", "##hello"]);§Returns
A sequence of subword tokens.
Trait Implementations§
Source§impl Tokenizer for FullTokenizer
impl Tokenizer for FullTokenizer
Source§fn tokenize(&self, text: &str) -> Vec<String>
fn tokenize(&self, text: &str) -> Vec<String>
Tokenize by applying basic and wordpiece tokenization.
§Example
use bert_tokenizer::{FullTokenizer, Tokenizer, Vocab};
let mut vocab = Vocab::new();
vocab.insert("hello".to_string(), 0);
vocab.insert("world".to_string(), 1);
vocab.insert("!".to_string(), 2);
vocab.insert("##!".to_string(), 3);
vocab.insert("##world".to_string(), 4);
vocab.insert("##hello".to_string(), 5);
let tokenizer = FullTokenizer::new().vocab(vocab).build();
let text = "hello, world!";
let tokens = tokenizer.tokenize(text);
println!("Text: {} -> Tokens: {:?}", text, tokens);Auto Trait Implementations§
impl Freeze for FullTokenizer
impl RefUnwindSafe for FullTokenizer
impl Send for FullTokenizer
impl Sync for FullTokenizer
impl Unpin for FullTokenizer
impl UnwindSafe for FullTokenizer
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more