harper_core/
token_string_ext.rs

1use crate::{Span, Token};
2use itertools::Itertools;
3use paste::paste;
4
5macro_rules! create_decl_for {
6    ($thing:ident) => {
7        paste! {
8            fn [< first_ $thing >](&self) -> Option<&Token>;
9
10            fn [< last_ $thing >](&self) -> Option<&Token>;
11
12            fn [< last_ $thing _index >](&self) -> Option<usize>;
13
14            fn [<iter_ $thing _indices>](&self) -> impl Iterator<Item = usize> + '_;
15
16            fn [<iter_ $thing s>](&self) -> impl Iterator<Item = &Token> + '_;
17        }
18    };
19}
20
21macro_rules! create_fns_for {
22    ($thing:ident) => {
23        paste! {
24            fn [< first_ $thing >](&self) -> Option<&Token> {
25                self.iter().find(|v| v.kind.[<is_ $thing>]())
26            }
27
28            fn [< last_ $thing >](&self) -> Option<&Token> {
29                self.iter().rev().find(|v| v.kind.[<is_ $thing>]())
30            }
31
32            fn [< last_ $thing _index >](&self) -> Option<usize> {
33                self.iter().rev().position(|v| v.kind.[<is_ $thing>]()).map(|i| self.len() - i - 1)
34            }
35
36            fn [<iter_ $thing _indices>](&self) -> impl Iterator<Item = usize> + '_ {
37                self.iter()
38                    .enumerate()
39                    .filter(|(_, t)| t.kind.[<is_ $thing>]())
40                    .map(|(i, _)| i)
41            }
42
43            fn [<iter_ $thing s>](&self) -> impl Iterator<Item = &Token> + '_ {
44                self.[<iter_ $thing _indices>]().map(|i| &self[i])
45            }
46        }
47    };
48}
49
50/// Extension methods for [`Token`] sequences that make them easier to wrangle and query.
51pub trait TokenStringExt {
52    fn first_sentence_word(&self) -> Option<&Token>;
53    fn first_non_whitespace(&self) -> Option<&Token>;
54    /// Grab the span that represents the beginning of the first element and the
55    /// end of the last element.
56    fn span(&self) -> Option<Span>;
57
58    create_decl_for!(word);
59    create_decl_for!(word_like);
60    create_decl_for!(conjunction);
61    create_decl_for!(space);
62    create_decl_for!(apostrophe);
63    create_decl_for!(pipe);
64    create_decl_for!(quote);
65    create_decl_for!(number);
66    create_decl_for!(at);
67    create_decl_for!(ellipsis);
68    create_decl_for!(hostname);
69    create_decl_for!(unlintable);
70    create_decl_for!(sentence_terminator);
71    create_decl_for!(paragraph_break);
72    create_decl_for!(chunk_terminator);
73    create_decl_for!(punctuation);
74    create_decl_for!(currency);
75    create_decl_for!(likely_homograph);
76    create_decl_for!(comma);
77    create_decl_for!(adjective);
78    create_decl_for!(verb);
79    create_decl_for!(preposition);
80
81    fn iter_linking_verb_indices(&self) -> impl Iterator<Item = usize> + '_;
82    fn iter_linking_verbs(&self) -> impl Iterator<Item = &Token> + '_;
83
84    /// Iterate over chunks.
85    ///
86    /// For example, the following sentence contains two chunks separated by a
87    /// comma:
88    ///
89    /// ```text
90    /// Here is an example, it is short.
91    /// ```
92    fn iter_chunks(&self) -> impl Iterator<Item = &'_ [Token]> + '_;
93
94    /// Get an iterator over token slices that represent the individual
95    /// paragraphs in a document.
96    fn iter_paragraphs(&self) -> impl Iterator<Item = &'_ [Token]> + '_;
97
98    /// Get an iterator over token slices that represent the individual
99    /// sentences in a document.
100    fn iter_sentences(&self) -> impl Iterator<Item = &'_ [Token]> + '_;
101}
102
103impl TokenStringExt for [Token] {
104    create_fns_for!(word);
105    create_fns_for!(word_like);
106    create_fns_for!(hostname);
107    create_fns_for!(conjunction);
108    create_fns_for!(space);
109    create_fns_for!(apostrophe);
110    create_fns_for!(pipe);
111    create_fns_for!(quote);
112    create_fns_for!(number);
113    create_fns_for!(at);
114    create_fns_for!(punctuation);
115    create_fns_for!(ellipsis);
116    create_fns_for!(unlintable);
117    create_fns_for!(sentence_terminator);
118    create_fns_for!(paragraph_break);
119    create_fns_for!(chunk_terminator);
120    create_fns_for!(currency);
121    create_fns_for!(likely_homograph);
122    create_fns_for!(comma);
123    create_fns_for!(adjective);
124    create_fns_for!(verb);
125    create_fns_for!(preposition);
126
127    fn first_non_whitespace(&self) -> Option<&Token> {
128        self.iter().find(|t| !t.kind.is_whitespace())
129    }
130
131    fn first_sentence_word(&self) -> Option<&Token> {
132        let (w_idx, word) = self.iter().find_position(|v| v.kind.is_word())?;
133
134        let Some(u_idx) = self.iter().position(|v| v.kind.is_unlintable()) else {
135            return Some(word);
136        };
137
138        if w_idx < u_idx { Some(word) } else { None }
139    }
140
141    fn span(&self) -> Option<Span> {
142        let min_max = self
143            .iter()
144            .flat_map(|v| [v.span.start, v.span.end].into_iter())
145            .minmax();
146
147        match min_max {
148            itertools::MinMaxResult::NoElements => None,
149            itertools::MinMaxResult::OneElement(min) => Some(Span::new(min, min)),
150            itertools::MinMaxResult::MinMax(min, max) => Some(Span::new(min, max)),
151        }
152    }
153
154    fn iter_linking_verb_indices(&self) -> impl Iterator<Item = usize> + '_ {
155        self.iter_word_indices().filter(|idx| {
156            let word = &self[*idx];
157            let Some(Some(meta)) = word.kind.as_word() else {
158                return false;
159            };
160
161            meta.is_linking_verb()
162        })
163    }
164
165    fn iter_linking_verbs(&self) -> impl Iterator<Item = &Token> + '_ {
166        self.iter_linking_verb_indices().map(|idx| &self[idx])
167    }
168
169    fn iter_chunks(&self) -> impl Iterator<Item = &'_ [Token]> + '_ {
170        let first_chunk = self
171            .iter_chunk_terminator_indices()
172            .next()
173            .map(|first_term| &self[0..=first_term]);
174
175        let rest = self
176            .iter_chunk_terminator_indices()
177            .tuple_windows()
178            .map(move |(a, b)| &self[a + 1..=b]);
179
180        let last = if let Some(last_i) = self.last_chunk_terminator_index() {
181            if last_i + 1 < self.len() {
182                Some(&self[last_i + 1..])
183            } else {
184                None
185            }
186        } else {
187            Some(self)
188        };
189
190        first_chunk.into_iter().chain(rest).chain(last)
191    }
192
193    fn iter_paragraphs(&self) -> impl Iterator<Item = &'_ [Token]> + '_ {
194        let first_pg = self
195            .iter_paragraph_break_indices()
196            .next()
197            .map(|first_term| &self[0..=first_term]);
198
199        let rest = self
200            .iter_paragraph_break_indices()
201            .tuple_windows()
202            .map(move |(a, b)| &self[a + 1..=b]);
203
204        let last_pg = if let Some(last_i) = self.last_paragraph_break_index() {
205            if last_i + 1 < self.len() {
206                Some(&self[last_i + 1..])
207            } else {
208                None
209            }
210        } else {
211            Some(self)
212        };
213
214        first_pg.into_iter().chain(rest).chain(last_pg)
215    }
216
217    fn iter_sentences(&self) -> impl Iterator<Item = &'_ [Token]> + '_ {
218        let first_sentence = self
219            .iter_sentence_terminator_indices()
220            .next()
221            .map(|first_term| &self[0..=first_term]);
222
223        let rest = self
224            .iter_sentence_terminator_indices()
225            .tuple_windows()
226            .map(move |(a, b)| &self[a + 1..=b]);
227
228        let last_sentence = if let Some(last_i) = self.last_sentence_terminator_index() {
229            if last_i + 1 < self.len() {
230                Some(&self[last_i + 1..])
231            } else {
232                None
233            }
234        } else {
235            Some(self)
236        };
237
238        first_sentence.into_iter().chain(rest).chain(last_sentence)
239    }
240}