pub fn tokenize_stemmed_sentence(sentence: &str) -> Vec<String>
Expand description

Converts sentence to stemmed token vector.

Examples

use rnltk::token;
 
let text = "Why hello there. General Kenobi!";
let tokens = vec!["why", "hello", "there", "gener", "kenobi"];
let tokenized_text = token::tokenize_stemmed_sentence(text);

assert_eq!(tokens, tokenized_text);