syncable_cli/agent/
mod.rs1pub mod commands;
34pub mod history;
35pub mod prompts;
36pub mod session;
37pub mod tools;
38pub mod ui;
39
40use colored::Colorize;
41use history::{ConversationHistory, ToolCallRecord};
42use rig::{
43 client::{CompletionClient, ProviderClient},
44 completion::Prompt,
45 providers::{anthropic, openai},
46};
47use session::ChatSession;
48use commands::TokenUsage;
49use std::path::Path;
50use ui::{ResponseFormatter, ToolDisplayHook};
51
52#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
54pub enum ProviderType {
55 #[default]
56 OpenAI,
57 Anthropic,
58}
59
60impl std::fmt::Display for ProviderType {
61 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
62 match self {
63 ProviderType::OpenAI => write!(f, "openai"),
64 ProviderType::Anthropic => write!(f, "anthropic"),
65 }
66 }
67}
68
69impl std::str::FromStr for ProviderType {
70 type Err = String;
71
72 fn from_str(s: &str) -> Result<Self, Self::Err> {
73 match s.to_lowercase().as_str() {
74 "openai" => Ok(ProviderType::OpenAI),
75 "anthropic" => Ok(ProviderType::Anthropic),
76 _ => Err(format!("Unknown provider: {}", s)),
77 }
78 }
79}
80
81#[derive(Debug, thiserror::Error)]
83pub enum AgentError {
84 #[error("Missing API key. Set {0} environment variable.")]
85 MissingApiKey(String),
86
87 #[error("Provider error: {0}")]
88 ProviderError(String),
89
90 #[error("Tool error: {0}")]
91 ToolError(String),
92}
93
94pub type AgentResult<T> = Result<T, AgentError>;
95
96fn get_system_prompt(project_path: &Path, query: Option<&str>) -> String {
98 if let Some(q) = query {
100 if prompts::is_generation_query(q) {
101 return prompts::get_devops_prompt(project_path);
102 }
103 }
104 prompts::get_analysis_prompt(project_path)
106}
107
108pub async fn run_interactive(
110 project_path: &Path,
111 provider: ProviderType,
112 model: Option<String>,
113) -> AgentResult<()> {
114 use tools::*;
115
116 let mut session = ChatSession::new(project_path, provider, model);
117
118 let mut conversation_history = ConversationHistory::new();
120
121 ChatSession::load_api_key_to_env(session.provider);
123
124 if !ChatSession::has_api_key(session.provider) {
126 ChatSession::prompt_api_key(session.provider)?;
127 }
128
129 session.print_banner();
130
131 loop {
132 if !conversation_history.is_empty() {
134 println!("{}", format!(" 💬 Context: {}", conversation_history.status()).dimmed());
135 }
136
137 let input = match session.read_input() {
139 Ok(input) => input,
140 Err(_) => break,
141 };
142
143 if input.is_empty() {
144 continue;
145 }
146
147 if ChatSession::is_command(&input) {
149 if input.trim().to_lowercase() == "/clear" || input.trim().to_lowercase() == "/c" {
151 conversation_history.clear();
152 }
153 match session.process_command(&input) {
154 Ok(true) => continue,
155 Ok(false) => break, Err(e) => {
157 eprintln!("{}", format!("Error: {}", e).red());
158 continue;
159 }
160 }
161 }
162
163 if !ChatSession::has_api_key(session.provider) {
165 eprintln!("{}", "No API key configured. Use /provider to set one.".yellow());
166 continue;
167 }
168
169 if conversation_history.needs_compaction() {
171 println!("{}", " 📦 Compacting conversation history...".dimmed());
172 if let Some(summary) = conversation_history.compact() {
173 println!("{}", format!(" ✓ Compressed {} turns", summary.matches("Turn").count()).dimmed());
174 }
175 }
176
177 let hook = ToolDisplayHook::new();
179
180 let project_path_buf = session.project_path.clone();
181 let preamble = get_system_prompt(&session.project_path, Some(&input));
183 let is_generation = prompts::is_generation_query(&input);
184
185 let mut chat_history = conversation_history.to_messages();
187
188 let response = match session.provider {
189 ProviderType::OpenAI => {
190 let client = openai::Client::from_env();
191 let reasoning_params = if session.model.starts_with("gpt-5") || session.model.starts_with("o1") {
194 Some(serde_json::json!({
195 "reasoning": {
196 "effort": "medium",
197 "summary": "detailed"
198 }
199 }))
200 } else {
201 None
202 };
203
204 let mut builder = client
205 .agent(&session.model)
206 .preamble(&preamble)
207 .max_tokens(4096)
208 .tool(AnalyzeTool::new(project_path_buf.clone()))
209 .tool(SecurityScanTool::new(project_path_buf.clone()))
210 .tool(VulnerabilitiesTool::new(project_path_buf.clone()))
211 .tool(ReadFileTool::new(project_path_buf.clone()))
212 .tool(ListDirectoryTool::new(project_path_buf.clone()));
213
214 if is_generation {
216 builder = builder
217 .tool(WriteFileTool::new(project_path_buf.clone()))
218 .tool(WriteFilesTool::new(project_path_buf.clone()))
219 .tool(ShellTool::new(project_path_buf.clone()));
220 }
221
222 if let Some(params) = reasoning_params {
223 builder = builder.additional_params(params);
224 }
225
226 let agent = builder.build();
227 agent.prompt(&input)
231 .with_history(&mut chat_history)
232 .with_hook(hook.clone())
233 .multi_turn(50)
234 .await
235 }
236 ProviderType::Anthropic => {
237 let client = anthropic::Client::from_env();
238 let mut builder = client
239 .agent(&session.model)
240 .preamble(&preamble)
241 .max_tokens(4096)
242 .tool(AnalyzeTool::new(project_path_buf.clone()))
243 .tool(SecurityScanTool::new(project_path_buf.clone()))
244 .tool(VulnerabilitiesTool::new(project_path_buf.clone()))
245 .tool(ReadFileTool::new(project_path_buf.clone()))
246 .tool(ListDirectoryTool::new(project_path_buf.clone()));
247
248 if is_generation {
250 builder = builder
251 .tool(WriteFileTool::new(project_path_buf.clone()))
252 .tool(WriteFilesTool::new(project_path_buf.clone()))
253 .tool(ShellTool::new(project_path_buf.clone()));
254 }
255
256 let agent = builder.build();
257
258 agent.prompt(&input)
262 .with_history(&mut chat_history)
263 .with_hook(hook.clone())
264 .multi_turn(50)
265 .await
266 }
267 };
268
269 match response {
270 Ok(text) => {
271 println!();
273 ResponseFormatter::print_response(&text);
274
275 let prompt_tokens = TokenUsage::estimate_tokens(&input);
277 let completion_tokens = TokenUsage::estimate_tokens(&text);
278 session.token_usage.add_request(prompt_tokens, completion_tokens);
279
280 let tool_calls = extract_tool_calls_from_hook(&hook).await;
282
283 conversation_history.add_turn(input.clone(), text.clone(), tool_calls);
285
286 session.history.push(("user".to_string(), input));
288 session.history.push(("assistant".to_string(), text));
289 }
290 Err(e) => {
291 let err_str = e.to_string();
292 println!();
293 if err_str.contains("MaxDepth") || err_str.contains("max_depth") || err_str.contains("reached limit") {
295 eprintln!("{}", "Reached tool call limit (50 turns).".yellow());
296 eprintln!("{}", "Type 'continue' to resume, or ask a new question.".dimmed());
297 } else {
298 eprintln!("{}", format!("Error: {}", e).red());
299 }
300 }
301 }
302 println!();
303 }
304
305 Ok(())
306}
307
308async fn extract_tool_calls_from_hook(hook: &ToolDisplayHook) -> Vec<ToolCallRecord> {
310 let state = hook.state();
311 let guard = state.lock().await;
312
313 guard.tool_calls.iter().map(|tc| {
314 ToolCallRecord {
315 tool_name: tc.name.clone(),
316 args_summary: truncate_string(&tc.args, 100),
317 result_summary: tc.output.as_ref()
318 .map(|o| truncate_string(o, 200))
319 .unwrap_or_else(|| "completed".to_string()),
320 }
321 }).collect()
322}
323
324fn truncate_string(s: &str, max_len: usize) -> String {
326 if s.len() <= max_len {
327 s.to_string()
328 } else {
329 format!("{}...", &s[..max_len.saturating_sub(3)])
330 }
331}
332
333pub async fn run_query(
335 project_path: &Path,
336 query: &str,
337 provider: ProviderType,
338 model: Option<String>,
339) -> AgentResult<String> {
340 use tools::*;
341
342 let project_path_buf = project_path.to_path_buf();
343 let preamble = get_system_prompt(project_path, Some(query));
345 let is_generation = prompts::is_generation_query(query);
346
347 match provider {
348 ProviderType::OpenAI => {
349 let client = openai::Client::from_env();
350 let model_name = model.as_deref().unwrap_or("gpt-5.2");
351
352 let reasoning_params = if model_name.starts_with("gpt-5") || model_name.starts_with("o1") {
354 Some(serde_json::json!({
355 "reasoning": {
356 "effort": "medium",
357 "summary": "detailed"
358 }
359 }))
360 } else {
361 None
362 };
363
364 let mut builder = client
365 .agent(model_name)
366 .preamble(&preamble)
367 .max_tokens(4096)
368 .tool(AnalyzeTool::new(project_path_buf.clone()))
369 .tool(SecurityScanTool::new(project_path_buf.clone()))
370 .tool(VulnerabilitiesTool::new(project_path_buf.clone()))
371 .tool(ReadFileTool::new(project_path_buf.clone()))
372 .tool(ListDirectoryTool::new(project_path_buf.clone()));
373
374 if is_generation {
376 builder = builder
377 .tool(WriteFileTool::new(project_path_buf.clone()))
378 .tool(WriteFilesTool::new(project_path_buf.clone()))
379 .tool(ShellTool::new(project_path_buf.clone()));
380 }
381
382 if let Some(params) = reasoning_params {
383 builder = builder.additional_params(params);
384 }
385
386 let agent = builder.build();
387
388 agent
389 .prompt(query)
390 .multi_turn(50)
391 .await
392 .map_err(|e| AgentError::ProviderError(e.to_string()))
393 }
394 ProviderType::Anthropic => {
395 let client = anthropic::Client::from_env();
396 let model_name = model.as_deref().unwrap_or("claude-sonnet-4-20250514");
397
398 let mut builder = client
399 .agent(model_name)
400 .preamble(&preamble)
401 .max_tokens(4096)
402 .tool(AnalyzeTool::new(project_path_buf.clone()))
403 .tool(SecurityScanTool::new(project_path_buf.clone()))
404 .tool(VulnerabilitiesTool::new(project_path_buf.clone()))
405 .tool(ReadFileTool::new(project_path_buf.clone()))
406 .tool(ListDirectoryTool::new(project_path_buf.clone()));
407
408 if is_generation {
410 builder = builder
411 .tool(WriteFileTool::new(project_path_buf.clone()))
412 .tool(WriteFilesTool::new(project_path_buf.clone()))
413 .tool(ShellTool::new(project_path_buf.clone()));
414 }
415
416 let agent = builder.build();
417
418 agent
419 .prompt(query)
420 .multi_turn(50)
421 .await
422 .map_err(|e| AgentError::ProviderError(e.to_string()))
423 }
424 }
425}