1pub fn main(){
2 let tokenizer:TokenDict=BufReader::new(File::open("words.txt").unwrap()).lines().filter_map(Result::ok).collect();
3 let tokens:Vec<u32>=tokenizer.tokenize_str("some text to tokenize").collect();
4 let detokens:String=tokenizer.detokenize_str(&tokens).collect();
5
6 print!("[");
7 for id in tokens.iter().take(tokens.len().saturating_sub(1)){print!("{id}, ")}
8 if let Some(id)=tokens.last(){print!("{id}")}
9 println!("]");
10 println!("\"{detokens}\"");
11}
12use {
13 std::{
14 fs::File,io::{BufRead,BufReader}
15 },
16 token_dict::{TokenDict}
17};