1pub mod commands;
34pub mod history;
35pub mod ide;
36pub mod prompts;
37pub mod session;
38pub mod tools;
39pub mod ui;
40
41use colored::Colorize;
42use history::{ConversationHistory, ToolCallRecord};
43use ide::IdeClient;
44use rig::{
45 client::{CompletionClient, ProviderClient},
46 completion::Prompt,
47 providers::{anthropic, openai},
48};
49use session::ChatSession;
50use commands::TokenUsage;
51use std::path::Path;
52use std::sync::Arc;
53use tokio::sync::Mutex as TokioMutex;
54use ui::{ResponseFormatter, ToolDisplayHook};
55
56#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
58pub enum ProviderType {
59 #[default]
60 OpenAI,
61 Anthropic,
62}
63
64impl std::fmt::Display for ProviderType {
65 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
66 match self {
67 ProviderType::OpenAI => write!(f, "openai"),
68 ProviderType::Anthropic => write!(f, "anthropic"),
69 }
70 }
71}
72
73impl std::str::FromStr for ProviderType {
74 type Err = String;
75
76 fn from_str(s: &str) -> Result<Self, Self::Err> {
77 match s.to_lowercase().as_str() {
78 "openai" => Ok(ProviderType::OpenAI),
79 "anthropic" => Ok(ProviderType::Anthropic),
80 _ => Err(format!("Unknown provider: {}", s)),
81 }
82 }
83}
84
85#[derive(Debug, thiserror::Error)]
87pub enum AgentError {
88 #[error("Missing API key. Set {0} environment variable.")]
89 MissingApiKey(String),
90
91 #[error("Provider error: {0}")]
92 ProviderError(String),
93
94 #[error("Tool error: {0}")]
95 ToolError(String),
96}
97
98pub type AgentResult<T> = Result<T, AgentError>;
99
100fn get_system_prompt(project_path: &Path, query: Option<&str>) -> String {
102 if let Some(q) = query {
104 if prompts::is_generation_query(q) {
105 return prompts::get_devops_prompt(project_path);
106 }
107 }
108 prompts::get_analysis_prompt(project_path)
110}
111
112pub async fn run_interactive(
114 project_path: &Path,
115 provider: ProviderType,
116 model: Option<String>,
117) -> AgentResult<()> {
118 use tools::*;
119
120 let mut session = ChatSession::new(project_path, provider, model);
121
122 let mut conversation_history = ConversationHistory::new();
124
125 let ide_client: Option<Arc<TokioMutex<IdeClient>>> = {
127 let mut client = IdeClient::new().await;
128 if client.is_ide_available() {
129 match client.connect().await {
130 Ok(()) => {
131 println!(
132 "{} Connected to {} IDE companion",
133 "โ".green(),
134 client.ide_name().unwrap_or("VS Code")
135 );
136 Some(Arc::new(TokioMutex::new(client)))
137 }
138 Err(e) => {
139 println!(
141 "{} IDE companion not connected: {}",
142 "!".yellow(),
143 e
144 );
145 None
146 }
147 }
148 } else {
149 println!("{} No IDE detected (TERM_PROGRAM={})", "ยท".dimmed(), std::env::var("TERM_PROGRAM").unwrap_or_default());
150 None
151 }
152 };
153
154 ChatSession::load_api_key_to_env(session.provider);
156
157 if !ChatSession::has_api_key(session.provider) {
159 ChatSession::prompt_api_key(session.provider)?;
160 }
161
162 session.print_banner();
163
164 loop {
165 if !conversation_history.is_empty() {
167 println!("{}", format!(" ๐ฌ Context: {}", conversation_history.status()).dimmed());
168 }
169
170 let input = match session.read_input() {
172 Ok(input) => input,
173 Err(_) => break,
174 };
175
176 if input.is_empty() {
177 continue;
178 }
179
180 if ChatSession::is_command(&input) {
182 if input.trim().to_lowercase() == "/clear" || input.trim().to_lowercase() == "/c" {
184 conversation_history.clear();
185 }
186 match session.process_command(&input) {
187 Ok(true) => continue,
188 Ok(false) => break, Err(e) => {
190 eprintln!("{}", format!("Error: {}", e).red());
191 continue;
192 }
193 }
194 }
195
196 if !ChatSession::has_api_key(session.provider) {
198 eprintln!("{}", "No API key configured. Use /provider to set one.".yellow());
199 continue;
200 }
201
202 if conversation_history.needs_compaction() {
204 println!("{}", " ๐ฆ Compacting conversation history...".dimmed());
205 if let Some(summary) = conversation_history.compact() {
206 println!("{}", format!(" โ Compressed {} turns", summary.matches("Turn").count()).dimmed());
207 }
208 }
209
210 let hook = ToolDisplayHook::new();
212
213 let project_path_buf = session.project_path.clone();
214 let preamble = get_system_prompt(&session.project_path, Some(&input));
216 let is_generation = prompts::is_generation_query(&input);
217
218 let mut chat_history = conversation_history.to_messages();
220
221 let response = match session.provider {
222 ProviderType::OpenAI => {
223 let client = openai::Client::from_env();
224 let reasoning_params = if session.model.starts_with("gpt-5") || session.model.starts_with("o1") {
227 Some(serde_json::json!({
228 "reasoning": {
229 "effort": "medium",
230 "summary": "detailed"
231 }
232 }))
233 } else {
234 None
235 };
236
237 let mut builder = client
238 .agent(&session.model)
239 .preamble(&preamble)
240 .max_tokens(4096)
241 .tool(AnalyzeTool::new(project_path_buf.clone()))
242 .tool(SecurityScanTool::new(project_path_buf.clone()))
243 .tool(VulnerabilitiesTool::new(project_path_buf.clone()))
244 .tool(ReadFileTool::new(project_path_buf.clone()))
245 .tool(ListDirectoryTool::new(project_path_buf.clone()));
246
247 if is_generation {
249 let (write_file_tool, write_files_tool) = if let Some(ref client) = ide_client {
251 (
252 WriteFileTool::new(project_path_buf.clone())
253 .with_ide_client(client.clone()),
254 WriteFilesTool::new(project_path_buf.clone())
255 .with_ide_client(client.clone()),
256 )
257 } else {
258 (
259 WriteFileTool::new(project_path_buf.clone()),
260 WriteFilesTool::new(project_path_buf.clone()),
261 )
262 };
263 builder = builder
264 .tool(write_file_tool)
265 .tool(write_files_tool)
266 .tool(ShellTool::new(project_path_buf.clone()));
267 }
268
269 if let Some(params) = reasoning_params {
270 builder = builder.additional_params(params);
271 }
272
273 let agent = builder.build();
274 agent.prompt(&input)
278 .with_history(&mut chat_history)
279 .with_hook(hook.clone())
280 .multi_turn(50)
281 .await
282 }
283 ProviderType::Anthropic => {
284 let client = anthropic::Client::from_env();
285 let mut builder = client
286 .agent(&session.model)
287 .preamble(&preamble)
288 .max_tokens(4096)
289 .tool(AnalyzeTool::new(project_path_buf.clone()))
290 .tool(SecurityScanTool::new(project_path_buf.clone()))
291 .tool(VulnerabilitiesTool::new(project_path_buf.clone()))
292 .tool(ReadFileTool::new(project_path_buf.clone()))
293 .tool(ListDirectoryTool::new(project_path_buf.clone()));
294
295 if is_generation {
297 let (write_file_tool, write_files_tool) = if let Some(ref client) = ide_client {
299 (
300 WriteFileTool::new(project_path_buf.clone())
301 .with_ide_client(client.clone()),
302 WriteFilesTool::new(project_path_buf.clone())
303 .with_ide_client(client.clone()),
304 )
305 } else {
306 (
307 WriteFileTool::new(project_path_buf.clone()),
308 WriteFilesTool::new(project_path_buf.clone()),
309 )
310 };
311 builder = builder
312 .tool(write_file_tool)
313 .tool(write_files_tool)
314 .tool(ShellTool::new(project_path_buf.clone()));
315 }
316
317 let agent = builder.build();
318
319 agent.prompt(&input)
323 .with_history(&mut chat_history)
324 .with_hook(hook.clone())
325 .multi_turn(50)
326 .await
327 }
328 };
329
330 match response {
331 Ok(text) => {
332 println!();
334 ResponseFormatter::print_response(&text);
335
336 let prompt_tokens = TokenUsage::estimate_tokens(&input);
338 let completion_tokens = TokenUsage::estimate_tokens(&text);
339 session.token_usage.add_request(prompt_tokens, completion_tokens);
340
341 let tool_calls = extract_tool_calls_from_hook(&hook).await;
343
344 conversation_history.add_turn(input.clone(), text.clone(), tool_calls);
346
347 session.history.push(("user".to_string(), input));
349 session.history.push(("assistant".to_string(), text));
350 }
351 Err(e) => {
352 let err_str = e.to_string();
353 println!();
354 if err_str.contains("MaxDepth") || err_str.contains("max_depth") || err_str.contains("reached limit") {
356 eprintln!("{}", "Reached tool call limit (50 turns).".yellow());
357 eprintln!("{}", "Type 'continue' to resume, or ask a new question.".dimmed());
358 } else {
359 eprintln!("{}", format!("Error: {}", e).red());
360 }
361 }
362 }
363 println!();
364 }
365
366 Ok(())
367}
368
369async fn extract_tool_calls_from_hook(hook: &ToolDisplayHook) -> Vec<ToolCallRecord> {
371 let state = hook.state();
372 let guard = state.lock().await;
373
374 guard.tool_calls.iter().map(|tc| {
375 ToolCallRecord {
376 tool_name: tc.name.clone(),
377 args_summary: truncate_string(&tc.args, 100),
378 result_summary: tc.output.as_ref()
379 .map(|o| truncate_string(o, 200))
380 .unwrap_or_else(|| "completed".to_string()),
381 }
382 }).collect()
383}
384
385fn truncate_string(s: &str, max_len: usize) -> String {
387 if s.len() <= max_len {
388 s.to_string()
389 } else {
390 format!("{}...", &s[..max_len.saturating_sub(3)])
391 }
392}
393
394pub async fn run_query(
396 project_path: &Path,
397 query: &str,
398 provider: ProviderType,
399 model: Option<String>,
400) -> AgentResult<String> {
401 use tools::*;
402
403 let project_path_buf = project_path.to_path_buf();
404 let preamble = get_system_prompt(project_path, Some(query));
406 let is_generation = prompts::is_generation_query(query);
407
408 match provider {
409 ProviderType::OpenAI => {
410 let client = openai::Client::from_env();
411 let model_name = model.as_deref().unwrap_or("gpt-5.2");
412
413 let reasoning_params = if model_name.starts_with("gpt-5") || model_name.starts_with("o1") {
415 Some(serde_json::json!({
416 "reasoning": {
417 "effort": "medium",
418 "summary": "detailed"
419 }
420 }))
421 } else {
422 None
423 };
424
425 let mut builder = client
426 .agent(model_name)
427 .preamble(&preamble)
428 .max_tokens(4096)
429 .tool(AnalyzeTool::new(project_path_buf.clone()))
430 .tool(SecurityScanTool::new(project_path_buf.clone()))
431 .tool(VulnerabilitiesTool::new(project_path_buf.clone()))
432 .tool(ReadFileTool::new(project_path_buf.clone()))
433 .tool(ListDirectoryTool::new(project_path_buf.clone()));
434
435 if is_generation {
437 builder = builder
438 .tool(WriteFileTool::new(project_path_buf.clone()))
439 .tool(WriteFilesTool::new(project_path_buf.clone()))
440 .tool(ShellTool::new(project_path_buf.clone()));
441 }
442
443 if let Some(params) = reasoning_params {
444 builder = builder.additional_params(params);
445 }
446
447 let agent = builder.build();
448
449 agent
450 .prompt(query)
451 .multi_turn(50)
452 .await
453 .map_err(|e| AgentError::ProviderError(e.to_string()))
454 }
455 ProviderType::Anthropic => {
456 let client = anthropic::Client::from_env();
457 let model_name = model.as_deref().unwrap_or("claude-sonnet-4-20250514");
458
459 let mut builder = client
460 .agent(model_name)
461 .preamble(&preamble)
462 .max_tokens(4096)
463 .tool(AnalyzeTool::new(project_path_buf.clone()))
464 .tool(SecurityScanTool::new(project_path_buf.clone()))
465 .tool(VulnerabilitiesTool::new(project_path_buf.clone()))
466 .tool(ReadFileTool::new(project_path_buf.clone()))
467 .tool(ListDirectoryTool::new(project_path_buf.clone()));
468
469 if is_generation {
471 builder = builder
472 .tool(WriteFileTool::new(project_path_buf.clone()))
473 .tool(WriteFilesTool::new(project_path_buf.clone()))
474 .tool(ShellTool::new(project_path_buf.clone()));
475 }
476
477 let agent = builder.build();
478
479 agent
480 .prompt(query)
481 .multi_turn(50)
482 .await
483 .map_err(|e| AgentError::ProviderError(e.to_string()))
484 }
485 }
486}