pub struct FrequencyFilter {
pub min_count: usize,
pub max_count: Option<usize>,
pub max_freq: Option<f64>,
/* private fields */
}Expand description
Filter tokens by frequency in a corpus
Fields§
§min_count: usizeMinimum token frequency
max_count: Option<usize>Maximum token frequency (absolute count)
max_freq: Option<f64>Maximum token frequency (as a fraction of total)
Implementations§
Source§impl FrequencyFilter
impl FrequencyFilter
Sourcepub fn from_tokens_with_vocabulary(
tokens: &[String],
vocabulary: &Vocabulary,
min_count: usize,
) -> Self
pub fn from_tokens_with_vocabulary( tokens: &[String], vocabulary: &Vocabulary, min_count: usize, ) -> Self
Create a new frequency filter from tokens with a vocabulary for reference
Sourcepub fn from_counts(
_token_counts: HashMap<String, usize>,
mincount: usize,
) -> Self
pub fn from_counts( _token_counts: HashMap<String, usize>, mincount: usize, ) -> Self
Create a new frequency filter from token counts
Examples found in repository?
examples/token_filtering_demo.rs (line 48)
8fn main() -> Result<()> {
9 println!("Token Filtering Demo");
10 println!("===================\n");
11
12 // Create a sample text
13 let text = "The quick brown fox jumps over the lazy dog. The fox is quick and brown.";
14 println!("Original text: {text}\n");
15
16 // Create a tokenizer
17 let tokenizer = WordTokenizer::default();
18 let tokens = tokenizer.tokenize(text)?;
19 println!("Tokenized: {tokens:?}\n");
20
21 // 1. Filter by length
22 println!("1. Length Filtering");
23 println!("------------------");
24 let length_filter = LengthFilter::new(4, 6);
25 let filtered = length_filter.apply(&tokens);
26 println!("Tokens with length 4-6: {filtered:?}");
27
28 let filteredtext = length_filter.filtertext(text, &tokenizer)?;
29 println!("Filtered text: {filteredtext}\n");
30
31 // 2. Filter by frequency
32 println!("2. Frequency Filtering");
33 println!("---------------------");
34
35 // Create token counts
36 let mut counts = HashMap::new();
37 for token in &tokens {
38 *counts.entry(token.clone()).or_insert(0) += 1;
39 }
40
41 // Print counts
42 println!("Token counts:");
43 for (token, count) in &counts {
44 println!(" {token} : {count}");
45 }
46
47 // Filter tokens that appear more than once
48 let freq_filter = FrequencyFilter::from_counts(counts.clone(), 2);
49 let filtered = freq_filter.apply(&tokens);
50 println!("\nTokens that appear 2+ times: {filtered:?}");
51
52 let filteredtext = freq_filter.filtertext(text, &tokenizer)?;
53 println!("Filtered text: {filteredtext}\n");
54
55 // 3. Filter by regex pattern
56 println!("3. Regex Filtering");
57 println!("-----------------");
58
59 // Keep only tokens that contain a vowel followed by 'w' or 'r'
60 let regex_filter = RegexFilter::new("[aeiou][wr]", true)?;
61 let filtered = regex_filter.apply(&tokens);
62 println!("Tokens containing a vowel followed by 'w' or 'r': {filtered:?}");
63
64 let filteredtext = regex_filter.filtertext(text, &tokenizer)?;
65 println!("Filtered text: {filteredtext}\n");
66
67 // 4. Stopwords filtering
68 println!("4. Stopwords Filtering");
69 println!("---------------------");
70
71 // Define common stopwords
72 let stopwords = vec![
73 "the".to_string(),
74 "is".to_string(),
75 "and".to_string(),
76 "over".to_string(),
77 "a".to_string(),
78 "an".to_string(),
79 ];
80
81 let stopwords_filter = StopwordsFilter::new(stopwords, true);
82 let filtered = stopwords_filter.apply(&tokens);
83 println!("Tokens with stopwords removed: {filtered:?}");
84
85 let filteredtext = stopwords_filter.filtertext(text, &tokenizer)?;
86 println!("Filtered text: {filteredtext}\n");
87
88 // 5. Composite filtering
89 println!("5. Composite Filtering");
90 println!("---------------------");
91
92 // Create separate filters
93 let length_filter = LengthFilter::new(3, 5);
94 let regex_filter = RegexFilter::new("^[a-z]", true)?;
95
96 // Apply filters sequentially
97 let filtered_by_length = length_filter.apply(&tokens);
98 let filtered = regex_filter.apply(&filtered_by_length);
99 println!("Tokens with length 3-5 AND starting with lowercase letter: {filtered:?}");
100
101 // First filter by length
102 let text_with_length = length_filter.filtertext(text, &tokenizer)?;
103
104 // Then apply regex filter to the already filtered text
105 let filteredtext = regex_filter.filtertext(&text_with_length, &tokenizer)?;
106
107 // We should see only words that are 3-5 chars AND start with lowercase
108 println!("Filtered text: {filteredtext}\n");
109
110 Ok(())
111}Sourcepub fn learn_from_corpus(
texts: &[&str],
tokenizer: &dyn Tokenizer,
min_count: usize,
) -> Result<Self>
pub fn learn_from_corpus( texts: &[&str], tokenizer: &dyn Tokenizer, min_count: usize, ) -> Result<Self>
Learn token frequencies from a corpus
Sourcepub fn with_max_count(self, maxcount: usize) -> Self
pub fn with_max_count(self, maxcount: usize) -> Self
Set the maximum count threshold
Sourcepub fn with_max_freq(self, maxfreq: f64) -> Result<Self>
pub fn with_max_freq(self, maxfreq: f64) -> Result<Self>
Set the maximum frequency threshold (0.0 to 1.0)
Trait Implementations§
Source§impl Clone for FrequencyFilter
impl Clone for FrequencyFilter
Source§fn clone(&self) -> FrequencyFilter
fn clone(&self) -> FrequencyFilter
Returns a duplicate of the value. Read more
1.0.0 · Source§fn clone_from(&mut self, source: &Self)
fn clone_from(&mut self, source: &Self)
Performs copy-assignment from
source. Read moreSource§impl Debug for FrequencyFilter
impl Debug for FrequencyFilter
Auto Trait Implementations§
impl Freeze for FrequencyFilter
impl RefUnwindSafe for FrequencyFilter
impl Send for FrequencyFilter
impl Sync for FrequencyFilter
impl Unpin for FrequencyFilter
impl UnwindSafe for FrequencyFilter
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> CloneToUninit for Twhere
T: Clone,
impl<T> CloneToUninit for Twhere
T: Clone,
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left is true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left(&self) returns true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§impl<T> Pointable for T
impl<T> Pointable for T
Source§impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
Source§fn to_subset(&self) -> Option<SS>
fn to_subset(&self) -> Option<SS>
The inverse inclusion map: attempts to construct
self from the equivalent element of its
superset. Read moreSource§fn is_in_subset(&self) -> bool
fn is_in_subset(&self) -> bool
Checks if
self is actually part of its subset T (and can be converted to it).Source§fn to_subset_unchecked(&self) -> SS
fn to_subset_unchecked(&self) -> SS
Use with care! Same as
self.to_subset but without any property checks. Always succeeds.Source§fn from_subset(element: &SS) -> SP
fn from_subset(element: &SS) -> SP
The inclusion map: converts
self to the equivalent element of its superset.