syncable_cli/agent/
mod.rs1pub mod commands;
34pub mod history;
35pub mod ide;
36pub mod prompts;
37pub mod session;
38pub mod tools;
39pub mod ui;
40
41use colored::Colorize;
42use history::{ConversationHistory, ToolCallRecord};
43use ide::IdeClient;
44use rig::{
45 client::{CompletionClient, ProviderClient},
46 completion::Prompt,
47 providers::{anthropic, openai},
48};
49use session::ChatSession;
50use commands::TokenUsage;
51use std::path::Path;
52use std::sync::Arc;
53use tokio::sync::Mutex as TokioMutex;
54use ui::{ResponseFormatter, ToolDisplayHook};
55
56#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
58pub enum ProviderType {
59 #[default]
60 OpenAI,
61 Anthropic,
62}
63
64impl std::fmt::Display for ProviderType {
65 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
66 match self {
67 ProviderType::OpenAI => write!(f, "openai"),
68 ProviderType::Anthropic => write!(f, "anthropic"),
69 }
70 }
71}
72
73impl std::str::FromStr for ProviderType {
74 type Err = String;
75
76 fn from_str(s: &str) -> Result<Self, Self::Err> {
77 match s.to_lowercase().as_str() {
78 "openai" => Ok(ProviderType::OpenAI),
79 "anthropic" => Ok(ProviderType::Anthropic),
80 _ => Err(format!("Unknown provider: {}", s)),
81 }
82 }
83}
84
85#[derive(Debug, thiserror::Error)]
87pub enum AgentError {
88 #[error("Missing API key. Set {0} environment variable.")]
89 MissingApiKey(String),
90
91 #[error("Provider error: {0}")]
92 ProviderError(String),
93
94 #[error("Tool error: {0}")]
95 ToolError(String),
96}
97
98pub type AgentResult<T> = Result<T, AgentError>;
99
100fn get_system_prompt(project_path: &Path, query: Option<&str>) -> String {
102 if let Some(q) = query {
104 if prompts::is_generation_query(q) {
105 return prompts::get_devops_prompt(project_path);
106 }
107 }
108 prompts::get_analysis_prompt(project_path)
110}
111
112pub async fn run_interactive(
114 project_path: &Path,
115 provider: ProviderType,
116 model: Option<String>,
117) -> AgentResult<()> {
118 use tools::*;
119
120 let mut session = ChatSession::new(project_path, provider, model);
121
122 let mut conversation_history = ConversationHistory::new();
124
125 let ide_client: Option<Arc<TokioMutex<IdeClient>>> = {
127 let mut client = IdeClient::new().await;
128 if client.is_ide_available() {
129 match client.connect().await {
130 Ok(()) => {
131 println!(
132 "{} Connected to {} IDE companion",
133 "✓".green(),
134 client.ide_name().unwrap_or("VS Code")
135 );
136 Some(Arc::new(TokioMutex::new(client)))
137 }
138 Err(_) => {
139 None
141 }
142 }
143 } else {
144 None
145 }
146 };
147
148 ChatSession::load_api_key_to_env(session.provider);
150
151 if !ChatSession::has_api_key(session.provider) {
153 ChatSession::prompt_api_key(session.provider)?;
154 }
155
156 session.print_banner();
157
158 loop {
159 if !conversation_history.is_empty() {
161 println!("{}", format!(" 💬 Context: {}", conversation_history.status()).dimmed());
162 }
163
164 let input = match session.read_input() {
166 Ok(input) => input,
167 Err(_) => break,
168 };
169
170 if input.is_empty() {
171 continue;
172 }
173
174 if ChatSession::is_command(&input) {
176 if input.trim().to_lowercase() == "/clear" || input.trim().to_lowercase() == "/c" {
178 conversation_history.clear();
179 }
180 match session.process_command(&input) {
181 Ok(true) => continue,
182 Ok(false) => break, Err(e) => {
184 eprintln!("{}", format!("Error: {}", e).red());
185 continue;
186 }
187 }
188 }
189
190 if !ChatSession::has_api_key(session.provider) {
192 eprintln!("{}", "No API key configured. Use /provider to set one.".yellow());
193 continue;
194 }
195
196 if conversation_history.needs_compaction() {
198 println!("{}", " 📦 Compacting conversation history...".dimmed());
199 if let Some(summary) = conversation_history.compact() {
200 println!("{}", format!(" ✓ Compressed {} turns", summary.matches("Turn").count()).dimmed());
201 }
202 }
203
204 let hook = ToolDisplayHook::new();
206
207 let project_path_buf = session.project_path.clone();
208 let preamble = get_system_prompt(&session.project_path, Some(&input));
210 let is_generation = prompts::is_generation_query(&input);
211
212 let mut chat_history = conversation_history.to_messages();
214
215 let response = match session.provider {
216 ProviderType::OpenAI => {
217 let client = openai::Client::from_env();
218 let reasoning_params = if session.model.starts_with("gpt-5") || session.model.starts_with("o1") {
221 Some(serde_json::json!({
222 "reasoning": {
223 "effort": "medium",
224 "summary": "detailed"
225 }
226 }))
227 } else {
228 None
229 };
230
231 let mut builder = client
232 .agent(&session.model)
233 .preamble(&preamble)
234 .max_tokens(4096)
235 .tool(AnalyzeTool::new(project_path_buf.clone()))
236 .tool(SecurityScanTool::new(project_path_buf.clone()))
237 .tool(VulnerabilitiesTool::new(project_path_buf.clone()))
238 .tool(ReadFileTool::new(project_path_buf.clone()))
239 .tool(ListDirectoryTool::new(project_path_buf.clone()));
240
241 if is_generation {
243 let write_file_tool = if let Some(ref client) = ide_client {
245 WriteFileTool::new(project_path_buf.clone())
246 .with_ide_client(client.clone())
247 } else {
248 WriteFileTool::new(project_path_buf.clone())
249 };
250 builder = builder
251 .tool(write_file_tool)
252 .tool(WriteFilesTool::new(project_path_buf.clone()))
253 .tool(ShellTool::new(project_path_buf.clone()));
254 }
255
256 if let Some(params) = reasoning_params {
257 builder = builder.additional_params(params);
258 }
259
260 let agent = builder.build();
261 agent.prompt(&input)
265 .with_history(&mut chat_history)
266 .with_hook(hook.clone())
267 .multi_turn(50)
268 .await
269 }
270 ProviderType::Anthropic => {
271 let client = anthropic::Client::from_env();
272 let mut builder = client
273 .agent(&session.model)
274 .preamble(&preamble)
275 .max_tokens(4096)
276 .tool(AnalyzeTool::new(project_path_buf.clone()))
277 .tool(SecurityScanTool::new(project_path_buf.clone()))
278 .tool(VulnerabilitiesTool::new(project_path_buf.clone()))
279 .tool(ReadFileTool::new(project_path_buf.clone()))
280 .tool(ListDirectoryTool::new(project_path_buf.clone()));
281
282 if is_generation {
284 let write_file_tool = if let Some(ref client) = ide_client {
286 WriteFileTool::new(project_path_buf.clone())
287 .with_ide_client(client.clone())
288 } else {
289 WriteFileTool::new(project_path_buf.clone())
290 };
291 builder = builder
292 .tool(write_file_tool)
293 .tool(WriteFilesTool::new(project_path_buf.clone()))
294 .tool(ShellTool::new(project_path_buf.clone()));
295 }
296
297 let agent = builder.build();
298
299 agent.prompt(&input)
303 .with_history(&mut chat_history)
304 .with_hook(hook.clone())
305 .multi_turn(50)
306 .await
307 }
308 };
309
310 match response {
311 Ok(text) => {
312 println!();
314 ResponseFormatter::print_response(&text);
315
316 let prompt_tokens = TokenUsage::estimate_tokens(&input);
318 let completion_tokens = TokenUsage::estimate_tokens(&text);
319 session.token_usage.add_request(prompt_tokens, completion_tokens);
320
321 let tool_calls = extract_tool_calls_from_hook(&hook).await;
323
324 conversation_history.add_turn(input.clone(), text.clone(), tool_calls);
326
327 session.history.push(("user".to_string(), input));
329 session.history.push(("assistant".to_string(), text));
330 }
331 Err(e) => {
332 let err_str = e.to_string();
333 println!();
334 if err_str.contains("MaxDepth") || err_str.contains("max_depth") || err_str.contains("reached limit") {
336 eprintln!("{}", "Reached tool call limit (50 turns).".yellow());
337 eprintln!("{}", "Type 'continue' to resume, or ask a new question.".dimmed());
338 } else {
339 eprintln!("{}", format!("Error: {}", e).red());
340 }
341 }
342 }
343 println!();
344 }
345
346 Ok(())
347}
348
349async fn extract_tool_calls_from_hook(hook: &ToolDisplayHook) -> Vec<ToolCallRecord> {
351 let state = hook.state();
352 let guard = state.lock().await;
353
354 guard.tool_calls.iter().map(|tc| {
355 ToolCallRecord {
356 tool_name: tc.name.clone(),
357 args_summary: truncate_string(&tc.args, 100),
358 result_summary: tc.output.as_ref()
359 .map(|o| truncate_string(o, 200))
360 .unwrap_or_else(|| "completed".to_string()),
361 }
362 }).collect()
363}
364
365fn truncate_string(s: &str, max_len: usize) -> String {
367 if s.len() <= max_len {
368 s.to_string()
369 } else {
370 format!("{}...", &s[..max_len.saturating_sub(3)])
371 }
372}
373
374pub async fn run_query(
376 project_path: &Path,
377 query: &str,
378 provider: ProviderType,
379 model: Option<String>,
380) -> AgentResult<String> {
381 use tools::*;
382
383 let project_path_buf = project_path.to_path_buf();
384 let preamble = get_system_prompt(project_path, Some(query));
386 let is_generation = prompts::is_generation_query(query);
387
388 match provider {
389 ProviderType::OpenAI => {
390 let client = openai::Client::from_env();
391 let model_name = model.as_deref().unwrap_or("gpt-5.2");
392
393 let reasoning_params = if model_name.starts_with("gpt-5") || model_name.starts_with("o1") {
395 Some(serde_json::json!({
396 "reasoning": {
397 "effort": "medium",
398 "summary": "detailed"
399 }
400 }))
401 } else {
402 None
403 };
404
405 let mut builder = client
406 .agent(model_name)
407 .preamble(&preamble)
408 .max_tokens(4096)
409 .tool(AnalyzeTool::new(project_path_buf.clone()))
410 .tool(SecurityScanTool::new(project_path_buf.clone()))
411 .tool(VulnerabilitiesTool::new(project_path_buf.clone()))
412 .tool(ReadFileTool::new(project_path_buf.clone()))
413 .tool(ListDirectoryTool::new(project_path_buf.clone()));
414
415 if is_generation {
417 builder = builder
418 .tool(WriteFileTool::new(project_path_buf.clone()))
419 .tool(WriteFilesTool::new(project_path_buf.clone()))
420 .tool(ShellTool::new(project_path_buf.clone()));
421 }
422
423 if let Some(params) = reasoning_params {
424 builder = builder.additional_params(params);
425 }
426
427 let agent = builder.build();
428
429 agent
430 .prompt(query)
431 .multi_turn(50)
432 .await
433 .map_err(|e| AgentError::ProviderError(e.to_string()))
434 }
435 ProviderType::Anthropic => {
436 let client = anthropic::Client::from_env();
437 let model_name = model.as_deref().unwrap_or("claude-sonnet-4-20250514");
438
439 let mut builder = client
440 .agent(model_name)
441 .preamble(&preamble)
442 .max_tokens(4096)
443 .tool(AnalyzeTool::new(project_path_buf.clone()))
444 .tool(SecurityScanTool::new(project_path_buf.clone()))
445 .tool(VulnerabilitiesTool::new(project_path_buf.clone()))
446 .tool(ReadFileTool::new(project_path_buf.clone()))
447 .tool(ListDirectoryTool::new(project_path_buf.clone()));
448
449 if is_generation {
451 builder = builder
452 .tool(WriteFileTool::new(project_path_buf.clone()))
453 .tool(WriteFilesTool::new(project_path_buf.clone()))
454 .tool(ShellTool::new(project_path_buf.clone()));
455 }
456
457 let agent = builder.build();
458
459 agent
460 .prompt(query)
461 .multi_turn(50)
462 .await
463 .map_err(|e| AgentError::ProviderError(e.to_string()))
464 }
465 }
466}