kham-core
Pure Rust Thai word segmentation engine. no_std compatible (requires alloc).
Quick start
use Tokenizer;
let tokenizer = new;
let tokens = tokenizer.segment;
for token in &tokens
Pure Rust Thai word segmentation engine. no_std compatible (requires alloc).
use kham_core::Tokenizer;
let tokenizer = Tokenizer::new();
let tokens = tokenizer.segment("กินข้าวกับปลา");
for token in &tokens {
println!("{} ({:?})", token.text, token.kind);
}