Skip to main content

xtask_todo_lib/devshell/completion/
context.rs

1//! Completion context: tokenization and `CompletionKind` from `(line, pos)`.
2
3/// 当前输入位置是命令名、路径,或不需要补全(用于选择补全源)
4#[derive(Debug, Clone, Copy, PartialEq, Eq)]
5pub enum CompletionKind {
6    Command,
7    Path,
8    /// 前一 token 不是管道也不是路径型参数,不提供补全
9    Other,
10}
11
12/// 从 (line, pos) 解析出的补全上下文:当前词的前缀,以及是命令还是路径
13#[derive(Debug)]
14pub struct CompletionContext {
15    pub prefix: String,
16    pub kind: CompletionKind,
17    /// 当前词在 line 中的起始位置(用于 rustyline 的 replace start)
18    pub start: usize,
19}
20
21/// Tokenize line[..pos] by spaces and delimiters |, <, >, and "2>" as one token.
22/// Returns list of (`token_string`, `start_index`).
23pub(super) fn tokenize(line: &str, pos: usize) -> Vec<(String, usize)> {
24    let slice = line.get(..pos).unwrap_or("");
25    let mut tokens = Vec::new();
26    let mut i = 0;
27    let bytes = slice.as_bytes();
28
29    while i < bytes.len() {
30        // Skip whitespace
31        while i < bytes.len() && bytes[i].is_ascii_whitespace() {
32            i += 1;
33        }
34        if i >= bytes.len() {
35            break;
36        }
37        let token_start = i;
38
39        // Delimiter "2>" as one token
40        if i + 1 < bytes.len() && bytes[i] == b'2' && bytes[i + 1] == b'>' {
41            tokens.push(("2>".to_string(), token_start));
42            i += 2;
43            continue;
44        }
45        // Single-char delimiters
46        if bytes[i] == b'|' || bytes[i] == b'<' || bytes[i] == b'>' {
47            let ch = char::from(bytes[i]);
48            tokens.push((ch.to_string(), token_start));
49            i += 1;
50            continue;
51        }
52
53        // Collect run of non-delimiter, non-whitespace (stop before "2>", |, <, >, or space)
54        let start = i;
55        while i < bytes.len() {
56            if bytes[i].is_ascii_whitespace() {
57                break;
58            }
59            if bytes[i] == b'|' || bytes[i] == b'<' || bytes[i] == b'>' {
60                break;
61            }
62            if bytes[i] == b'2' && i + 1 < bytes.len() && bytes[i + 1] == b'>' {
63                break;
64            }
65            i += 1;
66        }
67        let token = slice[start..i].to_string();
68        if !token.is_empty() {
69            tokens.push((token, start));
70        }
71    }
72
73    tokens
74}
75
76/// Tokens after which the next word is completed as a path (command args or redirect target).
77const PATH_TRIGGER_TOKENS: &[&str] = &[
78    "cd",
79    "ls",
80    "cat",
81    "mkdir",
82    "touch",
83    "export-readonly",
84    "export_readonly",
85    "source",
86    ".",
87    ">",
88    "2>",
89    "<",
90];
91
92/// Returns the (prefix, start) for the token that contains the cursor at `pos`.
93/// Prefix is the part of the token from start up to pos (what the user has typed so far).
94pub(super) fn token_at_cursor(
95    line: &str,
96    tokens: &[(String, usize)],
97    pos: usize,
98) -> Option<(String, usize)> {
99    if pos > line.len() {
100        return None;
101    }
102    for (token, start) in tokens {
103        let end = start + token.len();
104        if *start <= pos && end >= pos {
105            let prefix = line.get(*start..pos).unwrap_or("").to_string();
106            return Some((prefix, *start));
107        }
108    }
109    // Cursor in trailing whitespace: prefix empty, start at pos
110    if !tokens.is_empty() {
111        let (last_token, last_start) = tokens.last().unwrap();
112        let last_end = last_start + last_token.len();
113        if pos >= last_end {
114            return Some((String::new(), pos));
115        }
116    }
117    None
118}
119
120/// Parse completion context at (line, pos). Returns None if line empty or pos out of bounds.
121#[must_use]
122pub fn completion_context(line: &str, pos: usize) -> Option<CompletionContext> {
123    if line.is_empty() {
124        return None;
125    }
126    let line_len = line.len();
127    if pos > line_len {
128        return None;
129    }
130
131    let tokens = tokenize(line, pos);
132    let (prefix, start) = token_at_cursor(line, &tokens, pos)?;
133
134    // If we got empty prefix with start == pos, we're in trailing space; still return context with kind
135    let prefix = if prefix.is_empty() && start == pos && !tokens.is_empty() {
136        String::new()
137    } else if prefix.is_empty() && start == pos {
138        return None;
139    } else {
140        prefix
141    };
142
143    let token_index = tokens
144        .iter()
145        .position(|(t, s)| *s == start && t.as_str() == prefix.as_str())
146        .or({
147            if prefix.is_empty() {
148                Some(tokens.len())
149            } else {
150                None
151            }
152        });
153
154    let idx = token_index.unwrap_or_else(|| tokens.iter().take_while(|(_, s)| *s < start).count());
155
156    let kind = if idx == 0 {
157        CompletionKind::Command
158    } else {
159        let prev = tokens.get(idx.wrapping_sub(1)).map(|(t, _)| t.as_str());
160        if prev == Some("|") {
161            CompletionKind::Command
162        } else if prev.is_some_and(|p| PATH_TRIGGER_TOKENS.contains(&p)) {
163            CompletionKind::Path
164        } else {
165            CompletionKind::Other
166        }
167    };
168
169    Some(CompletionContext {
170        prefix,
171        kind,
172        start,
173    })
174}