syncable_cli/agent/
mod.rs

1//! Agent module for interactive AI-powered CLI assistance
2//!
3//! This module provides an agent layer using the Rig library that allows users
4//! to interact with the CLI through natural language conversations.
5//!
6//! # Features
7//!
8//! - **Conversation History**: Maintains context across multiple turns
9//! - **Automatic Compaction**: Compresses old history when token count exceeds threshold
10//! - **Tool Tracking**: Records tool calls for better context preservation
11//!
12//! # Usage
13//!
14//! ```bash
15//! # Interactive mode
16//! sync-ctl chat
17//!
18//! # With specific provider
19//! sync-ctl chat --provider openai --model gpt-5.2
20//!
21//! # Single query
22//! sync-ctl chat --query "What security issues does this project have?"
23//! ```
24//!
25//! # Interactive Commands
26//!
27//! - `/model` - Switch to a different AI model
28//! - `/provider` - Switch provider (prompts for API key if needed)
29//! - `/help` - Show available commands
30//! - `/clear` - Clear conversation history
31//! - `/exit` - Exit the chat
32
33pub mod commands;
34pub mod history;
35pub mod ide;
36pub mod prompts;
37pub mod session;
38pub mod tools;
39pub mod ui;
40
41use colored::Colorize;
42use history::{ConversationHistory, ToolCallRecord};
43use ide::IdeClient;
44use rig::{
45    client::{CompletionClient, ProviderClient},
46    completion::Prompt,
47    providers::{anthropic, openai},
48};
49use session::ChatSession;
50use commands::TokenUsage;
51use std::path::Path;
52use std::sync::Arc;
53use tokio::sync::Mutex as TokioMutex;
54use ui::{ResponseFormatter, ToolDisplayHook};
55
56/// Provider type for the agent
57#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
58pub enum ProviderType {
59    #[default]
60    OpenAI,
61    Anthropic,
62}
63
64impl std::fmt::Display for ProviderType {
65    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
66        match self {
67            ProviderType::OpenAI => write!(f, "openai"),
68            ProviderType::Anthropic => write!(f, "anthropic"),
69        }
70    }
71}
72
73impl std::str::FromStr for ProviderType {
74    type Err = String;
75
76    fn from_str(s: &str) -> Result<Self, Self::Err> {
77        match s.to_lowercase().as_str() {
78            "openai" => Ok(ProviderType::OpenAI),
79            "anthropic" => Ok(ProviderType::Anthropic),
80            _ => Err(format!("Unknown provider: {}", s)),
81        }
82    }
83}
84
85/// Error types for the agent
86#[derive(Debug, thiserror::Error)]
87pub enum AgentError {
88    #[error("Missing API key. Set {0} environment variable.")]
89    MissingApiKey(String),
90
91    #[error("Provider error: {0}")]
92    ProviderError(String),
93
94    #[error("Tool error: {0}")]
95    ToolError(String),
96}
97
98pub type AgentResult<T> = Result<T, AgentError>;
99
100/// Get the system prompt for the agent based on query type
101fn get_system_prompt(project_path: &Path, query: Option<&str>) -> String {
102    // If query suggests generation (Docker, Terraform, Helm), use DevOps prompt
103    if let Some(q) = query {
104        if prompts::is_generation_query(q) {
105            return prompts::get_devops_prompt(project_path);
106        }
107    }
108    // Default to analysis prompt
109    prompts::get_analysis_prompt(project_path)
110}
111
112/// Run the agent in interactive mode with custom REPL supporting /model and /provider commands
113pub async fn run_interactive(
114    project_path: &Path,
115    provider: ProviderType,
116    model: Option<String>,
117) -> AgentResult<()> {
118    use tools::*;
119
120    let mut session = ChatSession::new(project_path, provider, model);
121
122    // Initialize conversation history with compaction support
123    let mut conversation_history = ConversationHistory::new();
124
125    // Initialize IDE client for native diff viewing
126    let ide_client: Option<Arc<TokioMutex<IdeClient>>> = {
127        let mut client = IdeClient::new().await;
128        if client.is_ide_available() {
129            match client.connect().await {
130                Ok(()) => {
131                    println!(
132                        "{} Connected to {} IDE companion",
133                        "โœ“".green(),
134                        client.ide_name().unwrap_or("VS Code")
135                    );
136                    Some(Arc::new(TokioMutex::new(client)))
137                }
138                Err(e) => {
139                    // IDE detected but companion not running or connection failed
140                    println!(
141                        "{} IDE companion not connected: {}",
142                        "!".yellow(),
143                        e
144                    );
145                    None
146                }
147            }
148        } else {
149            println!("{} No IDE detected (TERM_PROGRAM={})", "ยท".dimmed(), std::env::var("TERM_PROGRAM").unwrap_or_default());
150            None
151        }
152    };
153
154    // Load API key from config file to env if not already set
155    ChatSession::load_api_key_to_env(session.provider);
156
157    // Check if API key is configured, prompt if not
158    if !ChatSession::has_api_key(session.provider) {
159        ChatSession::prompt_api_key(session.provider)?;
160    }
161
162    session.print_banner();
163
164    loop {
165        // Show conversation status if we have history
166        if !conversation_history.is_empty() {
167            println!("{}", format!("  ๐Ÿ’ฌ Context: {}", conversation_history.status()).dimmed());
168        }
169
170        // Read user input
171        let input = match session.read_input() {
172            Ok(input) => input,
173            Err(_) => break,
174        };
175
176        if input.is_empty() {
177            continue;
178        }
179
180        // Check for commands
181        if ChatSession::is_command(&input) {
182            // Special handling for /clear to also clear conversation history
183            if input.trim().to_lowercase() == "/clear" || input.trim().to_lowercase() == "/c" {
184                conversation_history.clear();
185            }
186            match session.process_command(&input) {
187                Ok(true) => continue,
188                Ok(false) => break, // /exit
189                Err(e) => {
190                    eprintln!("{}", format!("Error: {}", e).red());
191                    continue;
192                }
193            }
194        }
195
196        // Check API key before making request (in case provider changed)
197        if !ChatSession::has_api_key(session.provider) {
198            eprintln!("{}", "No API key configured. Use /provider to set one.".yellow());
199            continue;
200        }
201
202        // Check if compaction is needed before making the request
203        if conversation_history.needs_compaction() {
204            println!("{}", "  ๐Ÿ“ฆ Compacting conversation history...".dimmed());
205            if let Some(summary) = conversation_history.compact() {
206                println!("{}", format!("  โœ“ Compressed {} turns", summary.matches("Turn").count()).dimmed());
207            }
208        }
209
210        // Create hook for Claude Code style tool display
211        let hook = ToolDisplayHook::new();
212
213        let project_path_buf = session.project_path.clone();
214        // Select prompt based on query type (analysis vs generation)
215        let preamble = get_system_prompt(&session.project_path, Some(&input));
216        let is_generation = prompts::is_generation_query(&input);
217
218        // Convert conversation history to Rig Message format
219        let mut chat_history = conversation_history.to_messages();
220
221        let response = match session.provider {
222            ProviderType::OpenAI => {
223                let client = openai::Client::from_env();
224                // For GPT-5.x reasoning models, enable reasoning with summary output
225                // so we can see the model's thinking process
226                let reasoning_params = if session.model.starts_with("gpt-5") || session.model.starts_with("o1") {
227                    Some(serde_json::json!({
228                        "reasoning": {
229                            "effort": "medium",
230                            "summary": "detailed"
231                        }
232                    }))
233                } else {
234                    None
235                };
236
237                let mut builder = client
238                    .agent(&session.model)
239                    .preamble(&preamble)
240                    .max_tokens(4096)
241                    .tool(AnalyzeTool::new(project_path_buf.clone()))
242                    .tool(SecurityScanTool::new(project_path_buf.clone()))
243                    .tool(VulnerabilitiesTool::new(project_path_buf.clone()))
244                    .tool(ReadFileTool::new(project_path_buf.clone()))
245                    .tool(ListDirectoryTool::new(project_path_buf.clone()));
246
247                // Add generation tools if this is a generation query
248                if is_generation {
249                    // Create file tools with IDE client if connected
250                    let (write_file_tool, write_files_tool) = if let Some(ref client) = ide_client {
251                        (
252                            WriteFileTool::new(project_path_buf.clone())
253                                .with_ide_client(client.clone()),
254                            WriteFilesTool::new(project_path_buf.clone())
255                                .with_ide_client(client.clone()),
256                        )
257                    } else {
258                        (
259                            WriteFileTool::new(project_path_buf.clone()),
260                            WriteFilesTool::new(project_path_buf.clone()),
261                        )
262                    };
263                    builder = builder
264                        .tool(write_file_tool)
265                        .tool(write_files_tool)
266                        .tool(ShellTool::new(project_path_buf.clone()));
267                }
268
269                if let Some(params) = reasoning_params {
270                    builder = builder.additional_params(params);
271                }
272
273                let agent = builder.build();
274                // Allow up to 50 tool call turns for complex generation tasks
275                // Use hook to display tool calls as they happen
276                // Pass conversation history for context continuity
277                agent.prompt(&input)
278                    .with_history(&mut chat_history)
279                    .with_hook(hook.clone())
280                    .multi_turn(50)
281                    .await
282            }
283            ProviderType::Anthropic => {
284                let client = anthropic::Client::from_env();
285                let mut builder = client
286                    .agent(&session.model)
287                    .preamble(&preamble)
288                    .max_tokens(4096)
289                    .tool(AnalyzeTool::new(project_path_buf.clone()))
290                    .tool(SecurityScanTool::new(project_path_buf.clone()))
291                    .tool(VulnerabilitiesTool::new(project_path_buf.clone()))
292                    .tool(ReadFileTool::new(project_path_buf.clone()))
293                    .tool(ListDirectoryTool::new(project_path_buf.clone()));
294
295                // Add generation tools if this is a generation query
296                if is_generation {
297                    // Create file tools with IDE client if connected
298                    let (write_file_tool, write_files_tool) = if let Some(ref client) = ide_client {
299                        (
300                            WriteFileTool::new(project_path_buf.clone())
301                                .with_ide_client(client.clone()),
302                            WriteFilesTool::new(project_path_buf.clone())
303                                .with_ide_client(client.clone()),
304                        )
305                    } else {
306                        (
307                            WriteFileTool::new(project_path_buf.clone()),
308                            WriteFilesTool::new(project_path_buf.clone()),
309                        )
310                    };
311                    builder = builder
312                        .tool(write_file_tool)
313                        .tool(write_files_tool)
314                        .tool(ShellTool::new(project_path_buf.clone()));
315                }
316
317                let agent = builder.build();
318
319                // Allow up to 50 tool call turns for complex generation tasks
320                // Use hook to display tool calls as they happen
321                // Pass conversation history for context continuity
322                agent.prompt(&input)
323                    .with_history(&mut chat_history)
324                    .with_hook(hook.clone())
325                    .multi_turn(50)
326                    .await
327            }
328        };
329
330        match response {
331            Ok(text) => {
332                // Show final response
333                println!();
334                ResponseFormatter::print_response(&text);
335
336                // Track token usage (estimate since Rig doesn't expose exact counts)
337                let prompt_tokens = TokenUsage::estimate_tokens(&input);
338                let completion_tokens = TokenUsage::estimate_tokens(&text);
339                session.token_usage.add_request(prompt_tokens, completion_tokens);
340
341                // Extract tool calls from the hook state for history tracking
342                let tool_calls = extract_tool_calls_from_hook(&hook).await;
343
344                // Add to conversation history with tool call records
345                conversation_history.add_turn(input.clone(), text.clone(), tool_calls);
346
347                // Also update legacy session history for compatibility
348                session.history.push(("user".to_string(), input));
349                session.history.push(("assistant".to_string(), text));
350            }
351            Err(e) => {
352                let err_str = e.to_string();
353                println!();
354                // Check if this is a max depth error
355                if err_str.contains("MaxDepth") || err_str.contains("max_depth") || err_str.contains("reached limit") {
356                    eprintln!("{}", "Reached tool call limit (50 turns).".yellow());
357                    eprintln!("{}", "Type 'continue' to resume, or ask a new question.".dimmed());
358                } else {
359                    eprintln!("{}", format!("Error: {}", e).red());
360                }
361            }
362        }
363        println!();
364    }
365
366    Ok(())
367}
368
369/// Extract tool call records from the hook state for history tracking
370async fn extract_tool_calls_from_hook(hook: &ToolDisplayHook) -> Vec<ToolCallRecord> {
371    let state = hook.state();
372    let guard = state.lock().await;
373
374    guard.tool_calls.iter().map(|tc| {
375        ToolCallRecord {
376            tool_name: tc.name.clone(),
377            args_summary: truncate_string(&tc.args, 100),
378            result_summary: tc.output.as_ref()
379                .map(|o| truncate_string(o, 200))
380                .unwrap_or_else(|| "completed".to_string()),
381        }
382    }).collect()
383}
384
385/// Helper to truncate strings for summaries
386fn truncate_string(s: &str, max_len: usize) -> String {
387    if s.len() <= max_len {
388        s.to_string()
389    } else {
390        format!("{}...", &s[..max_len.saturating_sub(3)])
391    }
392}
393
394/// Run a single query and return the response
395pub async fn run_query(
396    project_path: &Path,
397    query: &str,
398    provider: ProviderType,
399    model: Option<String>,
400) -> AgentResult<String> {
401    use tools::*;
402
403    let project_path_buf = project_path.to_path_buf();
404    // Select prompt based on query type (analysis vs generation)
405    let preamble = get_system_prompt(project_path, Some(query));
406    let is_generation = prompts::is_generation_query(query);
407
408    match provider {
409        ProviderType::OpenAI => {
410            let client = openai::Client::from_env();
411            let model_name = model.as_deref().unwrap_or("gpt-5.2");
412
413            // For GPT-5.x reasoning models, enable reasoning with summary output
414            let reasoning_params = if model_name.starts_with("gpt-5") || model_name.starts_with("o1") {
415                Some(serde_json::json!({
416                    "reasoning": {
417                        "effort": "medium",
418                        "summary": "detailed"
419                    }
420                }))
421            } else {
422                None
423            };
424
425            let mut builder = client
426                .agent(model_name)
427                .preamble(&preamble)
428                .max_tokens(4096)
429                .tool(AnalyzeTool::new(project_path_buf.clone()))
430                .tool(SecurityScanTool::new(project_path_buf.clone()))
431                .tool(VulnerabilitiesTool::new(project_path_buf.clone()))
432                .tool(ReadFileTool::new(project_path_buf.clone()))
433                .tool(ListDirectoryTool::new(project_path_buf.clone()));
434
435            // Add generation tools if this is a generation query
436            if is_generation {
437                builder = builder
438                    .tool(WriteFileTool::new(project_path_buf.clone()))
439                    .tool(WriteFilesTool::new(project_path_buf.clone()))
440                    .tool(ShellTool::new(project_path_buf.clone()));
441            }
442
443            if let Some(params) = reasoning_params {
444                builder = builder.additional_params(params);
445            }
446
447            let agent = builder.build();
448
449            agent
450                .prompt(query)
451                .multi_turn(50)
452                .await
453                .map_err(|e| AgentError::ProviderError(e.to_string()))
454        }
455        ProviderType::Anthropic => {
456            let client = anthropic::Client::from_env();
457            let model_name = model.as_deref().unwrap_or("claude-sonnet-4-20250514");
458
459            let mut builder = client
460                .agent(model_name)
461                .preamble(&preamble)
462                .max_tokens(4096)
463                .tool(AnalyzeTool::new(project_path_buf.clone()))
464                .tool(SecurityScanTool::new(project_path_buf.clone()))
465                .tool(VulnerabilitiesTool::new(project_path_buf.clone()))
466                .tool(ReadFileTool::new(project_path_buf.clone()))
467                .tool(ListDirectoryTool::new(project_path_buf.clone()));
468
469            // Add generation tools if this is a generation query
470            if is_generation {
471                builder = builder
472                    .tool(WriteFileTool::new(project_path_buf.clone()))
473                    .tool(WriteFilesTool::new(project_path_buf.clone()))
474                    .tool(ShellTool::new(project_path_buf.clone()));
475            }
476
477            let agent = builder.build();
478
479            agent
480                .prompt(query)
481                .multi_turn(50)
482                .await
483                .map_err(|e| AgentError::ProviderError(e.to_string()))
484        }
485    }
486}