1use crate::recursive_parser::{Lexer, Token};
2
3pub struct TextNavigator;
6
7impl TextNavigator {
8 #[must_use]
10 pub fn get_cursor_token_position(query: &str, cursor_pos: usize) -> (usize, usize) {
11 if query.is_empty() {
12 return (0, 0);
13 }
14
15 let mut lexer = Lexer::new(query);
17 let tokens = lexer.tokenize_all_with_positions();
18
19 if tokens.is_empty() {
20 return (0, 0);
21 }
22
23 if cursor_pos == 0 {
25 return (0, tokens.len());
26 }
27
28 let mut current_token = 0;
30 for (i, (start, end, _)) in tokens.iter().enumerate() {
31 if cursor_pos >= *start && cursor_pos <= *end {
32 current_token = i + 1;
33 break;
34 } else if cursor_pos < *start {
35 current_token = i;
37 break;
38 }
39 }
40
41 if current_token == 0 && cursor_pos > 0 {
43 current_token = tokens.len();
44 }
45
46 (current_token, tokens.len())
47 }
48
49 #[must_use]
51 pub fn get_token_at_cursor(query: &str, cursor_pos: usize) -> Option<String> {
52 if query.is_empty() {
53 return None;
54 }
55
56 let mut lexer = Lexer::new(query);
58 let tokens = lexer.tokenize_all_with_positions();
59
60 for (start, end, token) in &tokens {
62 if cursor_pos >= *start && cursor_pos <= *end {
63 let token_str = Self::format_token(token);
65 return Some(token_str.to_string());
66 }
67 }
68
69 None
70 }
71
72 #[must_use]
74 pub fn calculate_prev_token_position(query: &str, cursor_pos: usize) -> Option<usize> {
75 if cursor_pos == 0 {
76 return None;
77 }
78
79 let mut lexer = Lexer::new(query);
80 let tokens = lexer.tokenize_all_with_positions();
81
82 let mut in_token = false;
84 let mut current_token_start = 0;
85 for (start, end, _) in &tokens {
86 if cursor_pos > *start && cursor_pos <= *end {
87 in_token = true;
88 current_token_start = *start;
89 break;
90 }
91 }
92
93 let target_pos = if in_token && cursor_pos > current_token_start {
95 current_token_start
97 } else {
98 let mut prev_start = 0;
100 for (start, _, _) in tokens.iter().rev() {
101 if *start < cursor_pos {
102 prev_start = *start;
103 break;
104 }
105 }
106 prev_start
107 };
108
109 if target_pos < cursor_pos {
110 Some(target_pos)
111 } else {
112 None
113 }
114 }
115
116 #[must_use]
118 pub fn calculate_next_token_position(query: &str, cursor_pos: usize) -> Option<usize> {
119 let query_len = query.len();
120 if cursor_pos >= query_len {
121 return None;
122 }
123
124 let mut lexer = Lexer::new(query);
125 let tokens = lexer.tokenize_all_with_positions();
126
127 let mut in_token = false;
129 let mut current_token_end = query_len;
130 for (start, end, _) in &tokens {
131 if cursor_pos >= *start && cursor_pos < *end {
132 in_token = true;
133 current_token_end = *end;
134 break;
135 }
136 }
137
138 let target_pos = if in_token && cursor_pos < current_token_end {
140 let mut next_start = query_len;
142 for (start, _, _) in &tokens {
143 if *start > current_token_end {
144 next_start = *start;
145 break;
146 }
147 }
148 next_start
149 } else {
150 let mut next_start = query_len;
152 for (start, _, _) in &tokens {
153 if *start > cursor_pos {
154 next_start = *start;
155 break;
156 }
157 }
158 next_start
159 };
160
161 if target_pos > cursor_pos && target_pos <= query_len {
162 Some(target_pos)
163 } else {
164 None
165 }
166 }
167
168 fn format_token(token: &Token) -> &str {
170 match token {
171 Token::Select => "SELECT",
172 Token::From => "FROM",
173 Token::Where => "WHERE",
174 Token::With => "WITH",
175 Token::GroupBy => "GROUP BY",
176 Token::OrderBy => "ORDER BY",
177 Token::Having => "HAVING",
178 Token::As => "AS",
179 Token::Asc => "ASC",
180 Token::Desc => "DESC",
181 Token::And => "AND",
182 Token::Or => "OR",
183 Token::In => "IN",
184 Token::DateTime => "DateTime",
185 Token::Case => "CASE",
186 Token::When => "WHEN",
187 Token::Then => "THEN",
188 Token::Else => "ELSE",
189 Token::End => "END",
190 Token::Distinct => "DISTINCT",
191 Token::Exclude => "EXCLUDE",
192 Token::Over => "OVER",
193 Token::Partition => "PARTITION",
194 Token::By => "BY",
195 Token::Rows => "ROWS",
197 Token::Range => "RANGE",
198 Token::Unbounded => "UNBOUNDED",
199 Token::Preceding => "PRECEDING",
200 Token::Following => "FOLLOWING",
201 Token::Current => "CURRENT",
202 Token::Row => "ROW",
203 Token::Union => "UNION",
205 Token::Intersect => "INTERSECT",
206 Token::Except => "EXCEPT",
207 Token::Web => "WEB",
209 Token::Unnest => "UNNEST",
211 Token::Identifier(s) => s,
212 Token::QuotedIdentifier(s) => s,
213 Token::StringLiteral(s) => s,
214 Token::JsonBlock(s) => s,
215 Token::NumberLiteral(s) => s,
216 Token::Star => "*",
217 Token::Comma => ",",
218 Token::Colon => ":",
219 Token::Dot => ".",
220 Token::LeftParen => "(",
221 Token::RightParen => ")",
222 Token::Equal => "=",
223 Token::NotEqual => "!=",
224 Token::LessThan => "<",
225 Token::LessThanOrEqual => "<=",
226 Token::GreaterThan => ">",
227 Token::GreaterThanOrEqual => ">=",
228 Token::Like => "LIKE",
229 Token::ILike => "ILIKE",
230 Token::Not => "NOT",
231 Token::Is => "IS",
232 Token::Null => "NULL",
233 Token::Between => "BETWEEN",
234 Token::Limit => "LIMIT",
235 Token::Offset => "OFFSET",
236 Token::Into => "INTO",
237 Token::Plus => "+",
238 Token::Minus => "-",
239 Token::Divide => "/",
240 Token::Modulo => "%",
241 Token::Concat => "||",
242 Token::Join => "JOIN",
243 Token::Inner => "INNER",
244 Token::Left => "LEFT",
245 Token::Right => "RIGHT",
246 Token::Full => "FULL",
247 Token::Cross => "CROSS",
248 Token::Outer => "OUTER",
249 Token::On => "ON",
250 Token::LineComment(text) => text,
251 Token::BlockComment(text) => text,
252 Token::Eof => "EOF",
253 Token::Qualify => "QUALIFY",
254 }
255 }
256}
257
258pub struct TextEditor;
260
261impl TextEditor {
262 #[must_use]
265 pub fn kill_line_backward(text: &str, cursor_pos: usize) -> Option<(String, String)> {
266 if cursor_pos == 0 {
267 return None;
268 }
269
270 let killed_text = text.chars().take(cursor_pos).collect::<String>();
271 let remaining_text = text.chars().skip(cursor_pos).collect::<String>();
272
273 Some((killed_text, remaining_text))
274 }
275
276 #[must_use]
279 pub fn kill_line_forward(text: &str, cursor_pos: usize) -> Option<(String, String)> {
280 if cursor_pos >= text.len() {
281 return None;
282 }
283
284 let remaining_text = text.chars().take(cursor_pos).collect::<String>();
285 let killed_text = text.chars().skip(cursor_pos).collect::<String>();
286
287 Some((killed_text, remaining_text))
288 }
289
290 #[must_use]
293 pub fn delete_word_backward(text: &str, cursor_pos: usize) -> Option<(String, String, usize)> {
294 if cursor_pos == 0 {
295 return None;
296 }
297
298 let before_cursor = &text[..cursor_pos];
299 let after_cursor = &text[cursor_pos..];
300
301 let mut word_start = before_cursor.len();
303 let mut chars = before_cursor.chars().rev().peekable();
304
305 while let Some(&ch) = chars.peek() {
307 if ch.is_whitespace() {
308 word_start -= ch.len_utf8();
309 chars.next();
310 } else {
311 break;
312 }
313 }
314
315 while let Some(&ch) = chars.peek() {
317 if !ch.is_alphanumeric() && ch != '_' {
318 break;
319 }
320 word_start -= ch.len_utf8();
321 chars.next();
322 }
323
324 while let Some(&ch) = chars.peek() {
326 if ch.is_whitespace() {
327 word_start -= ch.len_utf8();
328 chars.next();
329 } else {
330 break;
331 }
332 }
333
334 let deleted_text = text[word_start..cursor_pos].to_string();
335 let remaining_text = format!("{}{}", &text[..word_start], after_cursor);
336
337 Some((deleted_text, remaining_text, word_start))
338 }
339
340 #[must_use]
343 pub fn delete_word_forward(text: &str, cursor_pos: usize) -> Option<(String, String)> {
344 if cursor_pos >= text.len() {
345 return None;
346 }
347
348 let before_cursor = &text[..cursor_pos];
349 let after_cursor = &text[cursor_pos..];
350
351 let mut chars = after_cursor.chars();
353 let mut word_end = 0;
354
355 while let Some(ch) = chars.next() {
357 word_end += ch.len_utf8();
358 if ch.is_alphanumeric() || ch == '_' {
359 for ch in chars.by_ref() {
361 if !ch.is_alphanumeric() && ch != '_' {
362 break;
363 }
364 word_end += ch.len_utf8();
365 }
366 break;
367 }
368 }
369
370 let deleted_text = text[cursor_pos..cursor_pos + word_end].to_string();
371 let remaining_text = format!("{}{}", before_cursor, &after_cursor[word_end..]);
372
373 Some((deleted_text, remaining_text))
374 }
375}
376
377#[cfg(test)]
378mod tests {
379 use super::*;
380
381 #[test]
382 fn test_cursor_token_position() {
383 let query = "SELECT * FROM users WHERE id = 1";
384
385 assert_eq!(TextNavigator::get_cursor_token_position(query, 0), (0, 8));
387
388 assert_eq!(TextNavigator::get_cursor_token_position(query, 3), (1, 8));
390
391 assert_eq!(TextNavigator::get_cursor_token_position(query, 7), (2, 8));
393 }
394
395 #[test]
396 fn test_kill_line_backward() {
397 let text = "SELECT * FROM users";
398
399 let result = TextEditor::kill_line_backward(text, 8);
401 assert_eq!(
402 result,
403 Some(("SELECT *".to_string(), " FROM users".to_string()))
404 );
405
406 let result = TextEditor::kill_line_backward(text, 0);
408 assert_eq!(result, None);
409 }
410
411 #[test]
412 fn test_delete_word_backward() {
413 let text = "SELECT * FROM users";
414
415 let result = TextEditor::delete_word_backward(text, 13);
417 assert_eq!(
418 result,
419 Some((" FROM".to_string(), "SELECT * users".to_string(), 8))
420 );
421 }
422}