sql_cli/
text_navigation.rs

1use crate::recursive_parser::{Lexer, Token};
2
3/// Manages text navigation and token-based movement
4/// Extracted from the monolithic `enhanced_tui.rs`
5pub struct TextNavigator;
6
7impl TextNavigator {
8    /// Get the cursor's position in terms of tokens (`current_token`, `total_tokens`)
9    #[must_use]
10    pub fn get_cursor_token_position(query: &str, cursor_pos: usize) -> (usize, usize) {
11        if query.is_empty() {
12            return (0, 0);
13        }
14
15        // Use lexer to tokenize the query
16        let mut lexer = Lexer::new(query);
17        let tokens = lexer.tokenize_all_with_positions();
18
19        if tokens.is_empty() {
20            return (0, 0);
21        }
22
23        // Special case: cursor at position 0 is always before the first token
24        if cursor_pos == 0 {
25            return (0, tokens.len());
26        }
27
28        // Find which token the cursor is in
29        let mut current_token = 0;
30        for (i, (start, end, _)) in tokens.iter().enumerate() {
31            if cursor_pos >= *start && cursor_pos <= *end {
32                current_token = i + 1;
33                break;
34            } else if cursor_pos < *start {
35                // Cursor is between tokens
36                current_token = i;
37                break;
38            }
39        }
40
41        // If cursor is after all tokens
42        if current_token == 0 && cursor_pos > 0 {
43            current_token = tokens.len();
44        }
45
46        (current_token, tokens.len())
47    }
48
49    /// Get the token at the cursor position
50    #[must_use]
51    pub fn get_token_at_cursor(query: &str, cursor_pos: usize) -> Option<String> {
52        if query.is_empty() {
53            return None;
54        }
55
56        // Use lexer to tokenize the query
57        let mut lexer = Lexer::new(query);
58        let tokens = lexer.tokenize_all_with_positions();
59
60        // Find the token at cursor position
61        for (start, end, token) in &tokens {
62            if cursor_pos >= *start && cursor_pos <= *end {
63                // Format token nicely
64                let token_str = Self::format_token(token);
65                return Some(token_str.to_string());
66            }
67        }
68
69        None
70    }
71
72    /// Calculate the target position for jumping to the previous token
73    #[must_use]
74    pub fn calculate_prev_token_position(query: &str, cursor_pos: usize) -> Option<usize> {
75        if cursor_pos == 0 {
76            return None;
77        }
78
79        let mut lexer = Lexer::new(query);
80        let tokens = lexer.tokenize_all_with_positions();
81
82        // Find current token position
83        let mut in_token = false;
84        let mut current_token_start = 0;
85        for (start, end, _) in &tokens {
86            if cursor_pos > *start && cursor_pos <= *end {
87                in_token = true;
88                current_token_start = *start;
89                break;
90            }
91        }
92
93        // Find the previous token start
94        let target_pos = if in_token && cursor_pos > current_token_start {
95            // If we're in the middle of a token, go to its start
96            current_token_start
97        } else {
98            // Otherwise, find the previous token
99            let mut prev_start = 0;
100            for (start, _, _) in tokens.iter().rev() {
101                if *start < cursor_pos {
102                    prev_start = *start;
103                    break;
104                }
105            }
106            prev_start
107        };
108
109        if target_pos < cursor_pos {
110            Some(target_pos)
111        } else {
112            None
113        }
114    }
115
116    /// Calculate the target position for jumping to the next token
117    #[must_use]
118    pub fn calculate_next_token_position(query: &str, cursor_pos: usize) -> Option<usize> {
119        let query_len = query.len();
120        if cursor_pos >= query_len {
121            return None;
122        }
123
124        let mut lexer = Lexer::new(query);
125        let tokens = lexer.tokenize_all_with_positions();
126
127        // Find current token position
128        let mut in_token = false;
129        let mut current_token_end = query_len;
130        for (start, end, _) in &tokens {
131            if cursor_pos >= *start && cursor_pos < *end {
132                in_token = true;
133                current_token_end = *end;
134                break;
135            }
136        }
137
138        // Find the next token start
139        let target_pos = if in_token && cursor_pos < current_token_end {
140            // If we're in a token, go to the start of the next token
141            let mut next_start = query_len;
142            for (start, _, _) in &tokens {
143                if *start > current_token_end {
144                    next_start = *start;
145                    break;
146                }
147            }
148            next_start
149        } else {
150            // Otherwise, find the next token from current position
151            let mut next_start = query_len;
152            for (start, _, _) in &tokens {
153                if *start > cursor_pos {
154                    next_start = *start;
155                    break;
156                }
157            }
158            next_start
159        };
160
161        if target_pos > cursor_pos && target_pos <= query_len {
162            Some(target_pos)
163        } else {
164            None
165        }
166    }
167
168    /// Format a token for display
169    fn format_token(token: &Token) -> &str {
170        match token {
171            Token::Select => "SELECT",
172            Token::From => "FROM",
173            Token::Where => "WHERE",
174            Token::With => "WITH",
175            Token::GroupBy => "GROUP BY",
176            Token::OrderBy => "ORDER BY",
177            Token::Having => "HAVING",
178            Token::As => "AS",
179            Token::Asc => "ASC",
180            Token::Desc => "DESC",
181            Token::And => "AND",
182            Token::Or => "OR",
183            Token::In => "IN",
184            Token::DateTime => "DateTime",
185            Token::Case => "CASE",
186            Token::When => "WHEN",
187            Token::Then => "THEN",
188            Token::Else => "ELSE",
189            Token::End => "END",
190            Token::Distinct => "DISTINCT",
191            Token::Over => "OVER",
192            Token::Partition => "PARTITION",
193            Token::By => "BY",
194            Token::Identifier(s) => s,
195            Token::QuotedIdentifier(s) => s,
196            Token::StringLiteral(s) => s,
197            Token::NumberLiteral(s) => s,
198            Token::Star => "*",
199            Token::Comma => ",",
200            Token::Dot => ".",
201            Token::LeftParen => "(",
202            Token::RightParen => ")",
203            Token::Equal => "=",
204            Token::NotEqual => "!=",
205            Token::LessThan => "<",
206            Token::LessThanOrEqual => "<=",
207            Token::GreaterThan => ">",
208            Token::GreaterThanOrEqual => ">=",
209            Token::Like => "LIKE",
210            Token::Not => "NOT",
211            Token::Is => "IS",
212            Token::Null => "NULL",
213            Token::Between => "BETWEEN",
214            Token::Limit => "LIMIT",
215            Token::Offset => "OFFSET",
216            Token::Plus => "+",
217            Token::Minus => "-",
218            Token::Divide => "/",
219            Token::Modulo => "%",
220            Token::Join => "JOIN",
221            Token::Inner => "INNER",
222            Token::Left => "LEFT",
223            Token::Right => "RIGHT",
224            Token::Full => "FULL",
225            Token::Cross => "CROSS",
226            Token::Outer => "OUTER",
227            Token::On => "ON",
228            Token::Eof => "EOF",
229        }
230    }
231}
232
233/// Text editing utilities
234pub struct TextEditor;
235
236impl TextEditor {
237    /// Kill text from beginning of line to cursor position
238    /// Returns (`killed_text`, `remaining_text`)
239    #[must_use]
240    pub fn kill_line_backward(text: &str, cursor_pos: usize) -> Option<(String, String)> {
241        if cursor_pos == 0 {
242            return None;
243        }
244
245        let killed_text = text.chars().take(cursor_pos).collect::<String>();
246        let remaining_text = text.chars().skip(cursor_pos).collect::<String>();
247
248        Some((killed_text, remaining_text))
249    }
250
251    /// Kill text from cursor position to end of line
252    /// Returns (`killed_text`, `remaining_text`)
253    #[must_use]
254    pub fn kill_line_forward(text: &str, cursor_pos: usize) -> Option<(String, String)> {
255        if cursor_pos >= text.len() {
256            return None;
257        }
258
259        let remaining_text = text.chars().take(cursor_pos).collect::<String>();
260        let killed_text = text.chars().skip(cursor_pos).collect::<String>();
261
262        Some((killed_text, remaining_text))
263    }
264
265    /// Delete word backward from cursor position
266    /// Returns (`deleted_text`, `remaining_text`, `new_cursor_pos`)
267    #[must_use]
268    pub fn delete_word_backward(text: &str, cursor_pos: usize) -> Option<(String, String, usize)> {
269        if cursor_pos == 0 {
270            return None;
271        }
272
273        let before_cursor = &text[..cursor_pos];
274        let after_cursor = &text[cursor_pos..];
275
276        // Find word boundary, including leading whitespace before the word
277        let mut word_start = before_cursor.len();
278        let mut chars = before_cursor.chars().rev().peekable();
279
280        // Step 1: Skip trailing whitespace (if any)
281        while let Some(&ch) = chars.peek() {
282            if ch.is_whitespace() {
283                word_start -= ch.len_utf8();
284                chars.next();
285            } else {
286                break;
287            }
288        }
289
290        // Step 2: Skip the word itself
291        while let Some(&ch) = chars.peek() {
292            if !ch.is_alphanumeric() && ch != '_' {
293                break;
294            }
295            word_start -= ch.len_utf8();
296            chars.next();
297        }
298
299        // Step 3: Include any whitespace before the word (so deleting at a word boundary includes the space)
300        while let Some(&ch) = chars.peek() {
301            if ch.is_whitespace() {
302                word_start -= ch.len_utf8();
303                chars.next();
304            } else {
305                break;
306            }
307        }
308
309        let deleted_text = text[word_start..cursor_pos].to_string();
310        let remaining_text = format!("{}{}", &text[..word_start], after_cursor);
311
312        Some((deleted_text, remaining_text, word_start))
313    }
314
315    /// Delete word forward from cursor position
316    /// Returns (`deleted_text`, `remaining_text`)
317    #[must_use]
318    pub fn delete_word_forward(text: &str, cursor_pos: usize) -> Option<(String, String)> {
319        if cursor_pos >= text.len() {
320            return None;
321        }
322
323        let before_cursor = &text[..cursor_pos];
324        let after_cursor = &text[cursor_pos..];
325
326        // Find word boundary
327        let mut chars = after_cursor.chars();
328        let mut word_end = 0;
329
330        // Skip any non-alphanumeric chars at the beginning
331        while let Some(ch) = chars.next() {
332            word_end += ch.len_utf8();
333            if ch.is_alphanumeric() || ch == '_' {
334                // Found start of word, now skip the rest of it
335                for ch in chars.by_ref() {
336                    if !ch.is_alphanumeric() && ch != '_' {
337                        break;
338                    }
339                    word_end += ch.len_utf8();
340                }
341                break;
342            }
343        }
344
345        let deleted_text = text[cursor_pos..cursor_pos + word_end].to_string();
346        let remaining_text = format!("{}{}", before_cursor, &after_cursor[word_end..]);
347
348        Some((deleted_text, remaining_text))
349    }
350}
351
352#[cfg(test)]
353mod tests {
354    use super::*;
355
356    #[test]
357    fn test_cursor_token_position() {
358        let query = "SELECT * FROM users WHERE id = 1";
359
360        // Cursor at beginning
361        assert_eq!(TextNavigator::get_cursor_token_position(query, 0), (0, 8));
362
363        // Cursor in SELECT
364        assert_eq!(TextNavigator::get_cursor_token_position(query, 3), (1, 8));
365
366        // Cursor after SELECT
367        assert_eq!(TextNavigator::get_cursor_token_position(query, 7), (2, 8));
368    }
369
370    #[test]
371    fn test_kill_line_backward() {
372        let text = "SELECT * FROM users";
373
374        // Kill from middle
375        let result = TextEditor::kill_line_backward(text, 8);
376        assert_eq!(
377            result,
378            Some(("SELECT *".to_string(), " FROM users".to_string()))
379        );
380
381        // Kill from beginning (no-op)
382        let result = TextEditor::kill_line_backward(text, 0);
383        assert_eq!(result, None);
384    }
385
386    #[test]
387    fn test_delete_word_backward() {
388        let text = "SELECT * FROM users";
389
390        // Delete "FROM"
391        let result = TextEditor::delete_word_backward(text, 13);
392        assert_eq!(
393            result,
394            Some((" FROM".to_string(), "SELECT * users".to_string(), 8))
395        );
396    }
397}