1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
/// The tokenizer module contains all of the tools used to process
/// text in `tantivy`.
use tokenizer_api::{BoxTokenStream, TokenFilter, Tokenizer};

use crate::tokenizer::empty_tokenizer::EmptyTokenizer;

/// `TextAnalyzer` tokenizes an input text into tokens and modifies the resulting `TokenStream`.
#[derive(Clone)]
pub struct TextAnalyzer {
    tokenizer: Box<dyn BoxableTokenizer>,
}

impl Tokenizer for Box<dyn BoxableTokenizer> {
    type TokenStream<'a> = BoxTokenStream<'a>;

    // Note: we want to call `box_token_stream` on the concrete `Tokenizer`
    // implementation, not the `BoxableTokenizer` one as it will cause
    // a recursive call (and a stack overflow).
    fn token_stream<'a>(&'a mut self, text: &'a str) -> Self::TokenStream<'a> {
        (**self).box_token_stream(text)
    }
}

impl Clone for Box<dyn BoxableTokenizer> {
    // Note: we want to call `box_clone` on the concrete `Tokenizer`
    // implementation in order to clone the concrete `Tokenizer`.
    fn clone(&self) -> Self {
        (**self).box_clone()
    }
}

/// A boxable `Tokenizer`, with its `TokenStream` type erased.
pub trait BoxableTokenizer: 'static + Send + Sync {
    /// Creates a boxed token stream for a given `str`.
    fn box_token_stream<'a>(&'a mut self, text: &'a str) -> BoxTokenStream<'a>;
    /// Clone this tokenizer.
    fn box_clone(&self) -> Box<dyn BoxableTokenizer>;
}

impl<T: Tokenizer> BoxableTokenizer for T {
    fn box_token_stream<'a>(&'a mut self, text: &'a str) -> BoxTokenStream<'a> {
        BoxTokenStream::new(self.token_stream(text))
    }
    fn box_clone(&self) -> Box<dyn BoxableTokenizer> {
        Box::new(self.clone())
    }
}

impl Default for TextAnalyzer {
    fn default() -> TextAnalyzer {
        TextAnalyzer::from(EmptyTokenizer)
    }
}

impl<T: Tokenizer + Clone> From<T> for TextAnalyzer {
    fn from(tokenizer: T) -> Self {
        TextAnalyzer::builder(tokenizer).build()
    }
}

impl TextAnalyzer {
    /// Create a new TextAnalyzerBuilder
    pub fn builder<T: Tokenizer>(tokenizer: T) -> TextAnalyzerBuilder<T> {
        TextAnalyzerBuilder { tokenizer }
    }

    /// Creates a token stream for a given `str`.
    pub fn token_stream<'a>(&'a mut self, text: &'a str) -> BoxTokenStream<'a> {
        self.tokenizer.token_stream(text)
    }
}

/// Builder helper for [`TextAnalyzer`]
pub struct TextAnalyzerBuilder<T = Box<dyn BoxableTokenizer>> {
    tokenizer: T,
}

impl<T: Tokenizer> TextAnalyzerBuilder<T> {
    /// Appends a token filter to the current builder.
    ///
    /// # Example
    ///
    /// ```rust
    /// use tantivy::tokenizer::*;
    ///
    /// let en_stem = TextAnalyzer::builder(SimpleTokenizer::default())
    ///     .filter(RemoveLongFilter::limit(40))
    ///     .filter(LowerCaser)
    ///     .filter(Stemmer::default())
    ///     .build();
    /// ```
    pub fn filter<F: TokenFilter>(self, token_filter: F) -> TextAnalyzerBuilder<F::Tokenizer<T>> {
        TextAnalyzerBuilder {
            tokenizer: token_filter.transform(self.tokenizer),
        }
    }

    /// Boxes the internal tokenizer. This is useful for adding dynamic filters.
    /// Note: this will be less performant than the non boxed version.
    pub fn dynamic(self) -> TextAnalyzerBuilder {
        let boxed_tokenizer = Box::new(self.tokenizer);
        TextAnalyzerBuilder {
            tokenizer: boxed_tokenizer,
        }
    }

    /// Appends a token filter to the current builder and returns a boxed version of the
    /// tokenizer. This is useful when you want to build a `TextAnalyzer` dynamically.
    /// Prefer using `TextAnalyzer::builder(tokenizer).filter(token_filter).build()` if
    /// possible as it will be more performant and create less boxes.
    pub fn filter_dynamic<F: TokenFilter>(self, token_filter: F) -> TextAnalyzerBuilder {
        self.filter(token_filter).dynamic()
    }

    /// Finalize building the TextAnalyzer
    pub fn build(self) -> TextAnalyzer {
        TextAnalyzer {
            tokenizer: Box::new(self.tokenizer),
        }
    }
}

#[cfg(test)]
mod tests {

    use super::*;
    use crate::tokenizer::{LowerCaser, RemoveLongFilter, SimpleTokenizer};

    #[test]
    fn test_text_analyzer_builder() {
        let mut analyzer = TextAnalyzer::builder(SimpleTokenizer::default())
            .filter(RemoveLongFilter::limit(40))
            .filter(LowerCaser)
            .build();
        let mut stream = analyzer.token_stream("- first bullet point");
        assert_eq!(stream.next().unwrap().text, "first");
        assert_eq!(stream.next().unwrap().text, "bullet");
    }

    #[test]
    fn test_text_analyzer_with_filters_boxed() {
        // This test shows how one can build a TextAnalyzer dynamically, by stacking a list
        // of parametrizable token filters.
        //
        // The following enum is the thing that would be serializable.
        // Note that token filters can have their own parameters, too, like the RemoveLongFilter
        enum SerializableTokenFilterEnum {
            LowerCaser(LowerCaser),
            RemoveLongFilter(RemoveLongFilter),
        }
        // Note that everything below is dynamic.
        let filters: Vec<SerializableTokenFilterEnum> = vec![
            SerializableTokenFilterEnum::LowerCaser(LowerCaser),
            SerializableTokenFilterEnum::RemoveLongFilter(RemoveLongFilter::limit(12)),
        ];
        let mut analyzer_builder: TextAnalyzerBuilder =
            TextAnalyzer::builder(SimpleTokenizer::default())
                .filter_dynamic(RemoveLongFilter::limit(40))
                .filter_dynamic(LowerCaser);
        for filter in filters {
            analyzer_builder = match filter {
                SerializableTokenFilterEnum::LowerCaser(lower_caser) => {
                    analyzer_builder.filter_dynamic(lower_caser)
                }
                SerializableTokenFilterEnum::RemoveLongFilter(remove_long_filter) => {
                    analyzer_builder.filter_dynamic(remove_long_filter)
                }
            }
        }
        let mut analyzer = analyzer_builder.build();
        let mut stream = analyzer.token_stream("first bullet point");
        assert_eq!(stream.next().unwrap().text, "first");
        assert_eq!(stream.next().unwrap().text, "bullet");
    }
}