pub trait SearchTokenizer {
// Required methods
fn search_tokenize(
&mut self,
query_chars: &[char],
query_chars_offset: usize,
query_chars_offset_end: usize,
escape_indices: &[usize],
dict: &Dictionary,
) -> SearchTokenizeResult;
fn is_stop_word(&self, term: &str) -> bool;
fn is_valid_prefix_op_terminator(&self, c: char) -> bool;
}