1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
//! Tokenizer implementations for text analysis.
//!
//! This module provides various tokenization strategies for breaking text into tokens.
//! Tokenizers are the first step in the text analysis pipeline, responsible for
//! splitting input text into meaningful units (tokens).
//!
//! # Available Tokenizers
//!
//! - [`whitespace::WhitespaceTokenizer`] - Splits on whitespace characters
//! - [`unicode_word::UnicodeWordTokenizer`] - Uses Unicode word boundaries
//! - [`regex::RegexTokenizer`] - Custom regex-based tokenization
//! - [`ngram::NgramTokenizer`] - Character n-gram tokenization
//! - [`lindera::LinderaTokenizer`] - Japanese morphological analysis (requires `lindera` feature)
//! - [`whole::WholeTokenizer`] - Treats entire text as single token
//!
//! # Examples
//!
//! ```
//! use laurus::analysis::tokenizer::Tokenizer;
//! use laurus::analysis::tokenizer::whitespace::WhitespaceTokenizer;
//!
//! let tokenizer = WhitespaceTokenizer::new();
//! let tokens: Vec<_> = tokenizer.tokenize("Hello world").unwrap().collect();
//! assert_eq!(tokens.len(), 2);
//! ```
use crateTokenStream;
use crateResult;
/// Trait for tokenizers that convert text into tokens.
///
/// All tokenizers must implement this trait to be used in the analysis pipeline.
/// The trait requires `Send + Sync` to allow use in concurrent contexts.
///
/// # Examples
///
/// Implementing a custom tokenizer:
///
/// ```
/// use laurus::analysis::token::{Token, TokenStream};
/// use laurus::analysis::tokenizer::Tokenizer;
/// use laurus::Result;
///
/// struct CustomTokenizer;
///
/// impl Tokenizer for CustomTokenizer {
/// fn tokenize(&self, text: &str) -> Result<TokenStream> {
/// let tokens: Vec<Token> = text
/// .split(',')
/// .enumerate()
/// .map(|(i, s)| Token::new(s.trim(), i))
/// .collect();
/// Ok(Box::new(tokens.into_iter()))
/// }
///
/// fn name(&self) -> &'static str {
/// "custom"
/// }
/// }
/// ```
// Individual tokenizer modules