intelli_shell/service/
ai.rs1use std::sync::LazyLock;
2
3use futures_util::{Stream, stream};
4use regex::{Captures, Regex};
5use tokio::io::{AsyncRead, AsyncReadExt};
6use tracing::instrument;
7
8use super::{IntelliShellService, import_export::add_tags_to_description};
9use crate::{
10 ai::CommandFix,
11 errors::{Result, UserFacingError},
12 model::{CATEGORY_USER, Command, SOURCE_AI},
13 utils::{
14 execute_shell_command_capture, generate_working_dir_tree, get_executable_version, get_os_info, get_shell_info,
15 },
16};
17
18const WD_MAX_DEPTH: usize = 5;
20const WD_ENTRY_LIMIT: usize = 30;
22
23static PROMPT_PLACEHOLDER_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"##([A-Z_]+)##").unwrap());
25
26#[derive(Debug)]
28pub enum AiFixProgress {
29 Thinking,
31}
32
33impl IntelliShellService {
34 #[instrument(skip_all)]
38 pub async fn fix_command<F>(
39 &self,
40 command: &str,
41 history: Option<&str>,
42 mut on_progress: F,
43 ) -> Result<Option<CommandFix>>
44 where
45 F: FnMut(AiFixProgress),
46 {
47 if !self.ai.enabled {
49 return Err(UserFacingError::AiRequired.into());
50 }
51
52 if command.trim().is_empty() {
54 return Err(UserFacingError::AiEmptyCommand.into());
55 }
56
57 let (status, output, terminated_by_ctrl_c) = execute_shell_command_capture(command, true).await?;
59
60 if terminated_by_ctrl_c {
62 tracing::info!("Command execution was interrupted by user (Ctrl+C), skipping fix");
63 return Ok(None);
64 }
65
66 if status.success() {
68 tracing::info!("The command to fix was succesfully executed, skipping fix");
69 return Ok(None);
70 }
71
72 on_progress(AiFixProgress::Thinking);
73
74 let root_cmd = command.split_whitespace().next();
76 let sys_prompt = replace_prompt_placeholders(&self.ai.prompts.fix, root_cmd, history);
77 let user_prompt = format!(
78 "I've run a command but it failed, help me fix it.\n\ncommand: \
79 {command}\n{status}\noutput:\n```\n{output}\n```"
80 );
81
82 tracing::trace!("System Prompt:\n{sys_prompt}");
83 tracing::trace!("User Prompt:\n{user_prompt}");
84
85 let fix = self
87 .ai
88 .fix_client()?
89 .generate_command_fix(&sys_prompt, &user_prompt)
90 .await?;
91
92 Ok(Some(fix))
93 }
94
95 #[instrument(skip_all)]
97 pub async fn suggest_commands(&self, prompt: &str) -> Result<Vec<Command>> {
98 if !self.ai.enabled {
100 return Err(UserFacingError::AiRequired.into());
101 }
102
103 let sys_prompt = replace_prompt_placeholders(&self.ai.prompts.suggest, None, None);
105
106 tracing::trace!("System Prompt:\n{sys_prompt}");
107
108 let res = self
110 .ai
111 .suggest_client()?
112 .generate_command_suggestions(&sys_prompt, prompt)
113 .await?;
114
115 Ok(res
116 .suggestions
117 .into_iter()
118 .map(|s| Command::new(CATEGORY_USER, SOURCE_AI, s.command).with_description(Some(s.description)))
119 .collect())
120 }
121
122 #[instrument(skip_all)]
124 pub(super) async fn prompt_commands_import(
125 &self,
126 mut content: impl AsyncRead + Unpin + Send,
127 tags: Vec<String>,
128 category: impl Into<String>,
129 source: impl Into<String>,
130 ) -> Result<impl Stream<Item = Result<Command>> + Send + 'static> {
131 if !self.ai.enabled {
133 return Err(UserFacingError::AiRequired.into());
134 }
135
136 let mut prompt = String::new();
138 content.read_to_string(&mut prompt).await?;
139
140 let suggestions = if prompt.is_empty() {
141 Vec::new()
142 } else {
143 let sys_prompt = replace_prompt_placeholders(&self.ai.prompts.import, None, None);
145
146 tracing::trace!("System Prompt:\n{sys_prompt}");
147
148 let res = self
150 .ai
151 .suggest_client()?
152 .generate_command_suggestions(&sys_prompt, &prompt)
153 .await?;
154
155 res.suggestions
156 };
157
158 let category = category.into();
160 let source = source.into();
161 Ok(stream::iter(
162 suggestions
163 .into_iter()
164 .map(move |s| {
165 let mut description = s.description;
166 if !tags.is_empty() {
167 description = add_tags_to_description(&tags, description);
168 }
169 Command::new(category.clone(), source.clone(), s.command).with_description(Some(description))
170 })
171 .map(Ok),
172 ))
173 }
174}
175
176fn replace_prompt_placeholders(prompt: &str, root_cmd: Option<&str>, history: Option<&str>) -> String {
178 PROMPT_PLACEHOLDER_RE
179 .replace_all(prompt, |caps: &Captures| match &caps[1] {
180 "OS_SHELL_INFO" => {
181 let shell_info = get_shell_info();
182 let os_info = get_os_info();
183 format!(
184 "### Context:\n- {os_info}\n- {}{}\n",
185 shell_info
186 .version
187 .clone()
188 .unwrap_or_else(|| shell_info.kind.to_string()),
189 root_cmd
190 .and_then(get_executable_version)
191 .map(|v| format!("\n- {v}"))
192 .unwrap_or_default(),
193 )
194 }
195 "WORKING_DIR" => generate_working_dir_tree(WD_MAX_DEPTH, WD_ENTRY_LIMIT).unwrap_or_default(),
196 "SHELL_HISTORY" => history
197 .map(|h| format!("### User Shell History (oldest to newest):\n{h}\n"))
198 .unwrap_or_default(),
199 _ => {
200 tracing::warn!("Prompt placeholder '{}' not recognized", &caps[0]);
201 String::default()
202 }
203 })
204 .to_string()
205}