Crate chinese_segmenter
source · [−]Expand description
About
Segment Chinese sentences into component words using a dictionary-driven largest first matching approach.
Usage
extern crate chinese_segmenter;
use chinese_segmenter::{initialize, tokenize};
let sentence = "今天晚上想吃羊肉吗?";
initialize(); // Optional initialization to load data
let result: Vec<&str> = tokenize(sentence);
println!("{:?}", result); // --> ['今天', '晚上', '想', '吃', '羊肉', '吗']