1use crate::agent::commands::{TokenUsage, SLASH_COMMANDS};
12use crate::agent::{AgentError, AgentResult, ProviderType};
13use crate::agent::ui::{SlashCommandAutocomplete, ansi};
14use crate::config::{load_agent_config, save_agent_config};
15use colored::Colorize;
16use inquire::Text;
17use std::io::{self, Write};
18use std::path::Path;
19
20const ROBOT: &str = "🤖";
21
22pub fn get_available_models(provider: ProviderType) -> Vec<(&'static str, &'static str)> {
24 match provider {
25 ProviderType::OpenAI => vec![
26 ("gpt-5.2", "GPT-5.2 - Latest reasoning model (Dec 2025)"),
27 ("gpt-5.2-mini", "GPT-5.2 Mini - Fast and affordable"),
28 ("gpt-4o", "GPT-4o - Multimodal workhorse"),
29 ("o1-preview", "o1-preview - Advanced reasoning"),
30 ],
31 ProviderType::Anthropic => vec![
32 ("claude-sonnet-4-20250514", "Claude 4 Sonnet - Latest (May 2025)"),
33 ("claude-3-5-sonnet-latest", "Claude 3.5 Sonnet - Previous gen"),
34 ("claude-3-opus-latest", "Claude 3 Opus - Most capable"),
35 ("claude-3-haiku-latest", "Claude 3 Haiku - Fast and cheap"),
36 ],
37 }
38}
39
40pub struct ChatSession {
42 pub provider: ProviderType,
43 pub model: String,
44 pub project_path: std::path::PathBuf,
45 pub history: Vec<(String, String)>, pub token_usage: TokenUsage,
47}
48
49impl ChatSession {
50 pub fn new(project_path: &Path, provider: ProviderType, model: Option<String>) -> Self {
51 let default_model = match provider {
52 ProviderType::OpenAI => "gpt-5.2".to_string(),
53 ProviderType::Anthropic => "claude-sonnet-4-20250514".to_string(),
54 };
55
56 Self {
57 provider,
58 model: model.unwrap_or(default_model),
59 project_path: project_path.to_path_buf(),
60 history: Vec::new(),
61 token_usage: TokenUsage::new(),
62 }
63 }
64
65 pub fn has_api_key(provider: ProviderType) -> bool {
67 let env_key = match provider {
69 ProviderType::OpenAI => std::env::var("OPENAI_API_KEY").ok(),
70 ProviderType::Anthropic => std::env::var("ANTHROPIC_API_KEY").ok(),
71 };
72
73 if env_key.is_some() {
74 return true;
75 }
76
77 let agent_config = load_agent_config();
79 match provider {
80 ProviderType::OpenAI => agent_config.openai_api_key.is_some(),
81 ProviderType::Anthropic => agent_config.anthropic_api_key.is_some(),
82 }
83 }
84
85 pub fn load_api_key_to_env(provider: ProviderType) {
87 let env_var = match provider {
88 ProviderType::OpenAI => "OPENAI_API_KEY",
89 ProviderType::Anthropic => "ANTHROPIC_API_KEY",
90 };
91
92 if std::env::var(env_var).is_ok() {
94 return;
95 }
96
97 let agent_config = load_agent_config();
99 let key = match provider {
100 ProviderType::OpenAI => agent_config.openai_api_key,
101 ProviderType::Anthropic => agent_config.anthropic_api_key,
102 };
103
104 if let Some(key) = key {
105 unsafe {
107 std::env::set_var(env_var, &key);
108 }
109 }
110 }
111
112 pub fn get_configured_providers() -> Vec<ProviderType> {
114 let mut providers = Vec::new();
115 if Self::has_api_key(ProviderType::OpenAI) {
116 providers.push(ProviderType::OpenAI);
117 }
118 if Self::has_api_key(ProviderType::Anthropic) {
119 providers.push(ProviderType::Anthropic);
120 }
121 providers
122 }
123
124 pub fn prompt_api_key(provider: ProviderType) -> AgentResult<String> {
126 let env_var = match provider {
127 ProviderType::OpenAI => "OPENAI_API_KEY",
128 ProviderType::Anthropic => "ANTHROPIC_API_KEY",
129 };
130
131 println!("\n{}", format!("🔑 No API key found for {}", provider).yellow());
132 println!("Please enter your {} API key:", provider);
133 print!("> ");
134 io::stdout().flush().unwrap();
135
136 let mut key = String::new();
137 io::stdin().read_line(&mut key).map_err(|e| AgentError::ToolError(e.to_string()))?;
138 let key = key.trim().to_string();
139
140 if key.is_empty() {
141 return Err(AgentError::MissingApiKey(env_var.to_string()));
142 }
143
144 unsafe {
147 std::env::set_var(env_var, &key);
148 }
149
150 let mut agent_config = load_agent_config();
152 match provider {
153 ProviderType::OpenAI => agent_config.openai_api_key = Some(key.clone()),
154 ProviderType::Anthropic => agent_config.anthropic_api_key = Some(key.clone()),
155 }
156
157 if let Err(e) = save_agent_config(&agent_config) {
158 eprintln!("{}", format!("Warning: Could not save config: {}", e).yellow());
159 } else {
160 println!("{}", "✓ API key saved to ~/.syncable.toml".green());
161 }
162
163 Ok(key)
164 }
165
166 pub fn handle_model_command(&mut self) -> AgentResult<()> {
168 let models = get_available_models(self.provider);
169
170 println!("\n{}", format!("📋 Available models for {}:", self.provider).cyan().bold());
171 println!();
172
173 for (i, (id, desc)) in models.iter().enumerate() {
174 let marker = if *id == self.model { "→ " } else { " " };
175 let num = format!("[{}]", i + 1);
176 println!(" {} {} {} - {}", marker, num.dimmed(), id.white().bold(), desc.dimmed());
177 }
178
179 println!();
180 println!("Enter number to select, or press Enter to keep current:");
181 print!("> ");
182 io::stdout().flush().unwrap();
183
184 let mut input = String::new();
185 io::stdin().read_line(&mut input).ok();
186 let input = input.trim();
187
188 if input.is_empty() {
189 println!("{}", format!("Keeping model: {}", self.model).dimmed());
190 return Ok(());
191 }
192
193 if let Ok(num) = input.parse::<usize>() {
194 if num >= 1 && num <= models.len() {
195 let (id, desc) = models[num - 1];
196 self.model = id.to_string();
197 println!("{}", format!("✓ Switched to {} - {}", id, desc).green());
198 } else {
199 println!("{}", "Invalid selection".red());
200 }
201 } else {
202 self.model = input.to_string();
204 println!("{}", format!("✓ Set model to: {}", input).green());
205 }
206
207 Ok(())
208 }
209
210 pub fn handle_provider_command(&mut self) -> AgentResult<()> {
212 let providers = [ProviderType::OpenAI, ProviderType::Anthropic];
213
214 println!("\n{}", "🔄 Available providers:".cyan().bold());
215 println!();
216
217 for (i, provider) in providers.iter().enumerate() {
218 let marker = if *provider == self.provider { "→ " } else { " " };
219 let has_key = if Self::has_api_key(*provider) {
220 "✓ API key configured".green()
221 } else {
222 "⚠ No API key".yellow()
223 };
224 let num = format!("[{}]", i + 1);
225 println!(" {} {} {} - {}", marker, num.dimmed(), provider.to_string().white().bold(), has_key);
226 }
227
228 println!();
229 println!("Enter number to select:");
230 print!("> ");
231 io::stdout().flush().unwrap();
232
233 let mut input = String::new();
234 io::stdin().read_line(&mut input).ok();
235 let input = input.trim();
236
237 if let Ok(num) = input.parse::<usize>() {
238 if num >= 1 && num <= providers.len() {
239 let new_provider = providers[num - 1];
240
241 if !Self::has_api_key(new_provider) {
243 Self::prompt_api_key(new_provider)?;
244 }
245
246 self.provider = new_provider;
247
248 let default_model = match new_provider {
250 ProviderType::OpenAI => "gpt-5.2",
251 ProviderType::Anthropic => "claude-sonnet-4-20250514",
252 };
253 self.model = default_model.to_string();
254
255 println!("{}", format!("✓ Switched to {} with model {}", new_provider, default_model).green());
256 } else {
257 println!("{}", "Invalid selection".red());
258 }
259 }
260
261 Ok(())
262 }
263
264 pub fn print_help() {
266 println!();
267 println!(" {}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━{}", ansi::PURPLE, ansi::RESET);
268 println!(" {}📖 Available Commands{}", ansi::PURPLE, ansi::RESET);
269 println!(" {}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━{}", ansi::PURPLE, ansi::RESET);
270 println!();
271
272 for cmd in SLASH_COMMANDS.iter() {
273 let alias = cmd.alias.map(|a| format!(" ({})", a)).unwrap_or_default();
274 println!(" {}/{:<12}{}{} - {}{}{}",
275 ansi::CYAN, cmd.name, alias, ansi::RESET,
276 ansi::DIM, cmd.description, ansi::RESET
277 );
278 }
279
280 println!();
281 println!(" {}Tip: Type / to see interactive command picker!{}", ansi::DIM, ansi::RESET);
282 println!();
283 }
284
285
286 pub fn print_logo() {
288 let purple = "\x1b[38;5;141m"; let orange = "\x1b[38;5;216m"; let pink = "\x1b[38;5;212m"; let magenta = "\x1b[38;5;207m"; let reset = "\x1b[0m";
299
300 println!();
301 println!(
302 "{} ███████╗{}{} ██╗ ██╗{}{}███╗ ██╗{}{} ██████╗{}{} █████╗ {}{}██████╗ {}{}██╗ {}{}███████╗{}",
303 purple, reset, purple, reset, orange, reset, orange, reset, pink, reset, pink, reset, magenta, reset, magenta, reset
304 );
305 println!(
306 "{} ██╔════╝{}{} ╚██╗ ██╔╝{}{}████╗ ██║{}{} ██╔════╝{}{} ██╔══██╗{}{}██╔══██╗{}{}██║ {}{}██╔════╝{}",
307 purple, reset, purple, reset, orange, reset, orange, reset, pink, reset, pink, reset, magenta, reset, magenta, reset
308 );
309 println!(
310 "{} ███████╗{}{} ╚████╔╝ {}{}██╔██╗ ██║{}{} ██║ {}{} ███████║{}{}██████╔╝{}{}██║ {}{}█████╗ {}",
311 purple, reset, purple, reset, orange, reset, orange, reset, pink, reset, pink, reset, magenta, reset, magenta, reset
312 );
313 println!(
314 "{} ╚════██║{}{} ╚██╔╝ {}{}██║╚██╗██║{}{} ██║ {}{} ██╔══██║{}{}██╔══██╗{}{}██║ {}{}██╔══╝ {}",
315 purple, reset, purple, reset, orange, reset, orange, reset, pink, reset, pink, reset, magenta, reset, magenta, reset
316 );
317 println!(
318 "{} ███████║{}{} ██║ {}{}██║ ╚████║{}{} ╚██████╗{}{} ██║ ██║{}{}██████╔╝{}{}███████╗{}{}███████╗{}",
319 purple, reset, purple, reset, orange, reset, orange, reset, pink, reset, pink, reset, magenta, reset, magenta, reset
320 );
321 println!(
322 "{} ╚══════╝{}{} ╚═╝ {}{}╚═╝ ╚═══╝{}{} ╚═════╝{}{} ╚═╝ ╚═╝{}{}╚═════╝ {}{}╚══════╝{}{}╚══════╝{}",
323 purple, reset, purple, reset, orange, reset, orange, reset, pink, reset, pink, reset, magenta, reset, magenta, reset
324 );
325 println!();
326 }
327
328 pub fn print_banner(&self) {
330 Self::print_logo();
332
333 println!(
335 " {} {} powered by {}: {}",
336 ROBOT,
337 "Syncable Agent".white().bold(),
338 self.provider.to_string().cyan(),
339 self.model.cyan()
340 );
341 println!(
342 " {}",
343 "Your AI-powered code analysis assistant".dimmed()
344 );
345 println!();
346 println!(
347 " {} Type your questions. Use {} to exit.\n",
348 "→".cyan(),
349 "exit".yellow().bold()
350 );
351 }
352
353
354 pub fn process_command(&mut self, input: &str) -> AgentResult<bool> {
356 let cmd = input.trim().to_lowercase();
357
358 if cmd == "/" {
361 Self::print_help();
362 return Ok(true);
363 }
364
365 match cmd.as_str() {
366 "/exit" | "/quit" | "/q" => {
367 println!("\n{}", "👋 Goodbye!".green());
368 return Ok(false);
369 }
370 "/help" | "/h" | "/?" => {
371 Self::print_help();
372 }
373 "/model" | "/m" => {
374 self.handle_model_command()?;
375 }
376 "/provider" | "/p" => {
377 self.handle_provider_command()?;
378 }
379 "/cost" => {
380 self.token_usage.print_report(&self.model);
381 }
382 "/clear" | "/c" => {
383 self.history.clear();
384 println!("{}", "✓ Conversation history cleared".green());
385 }
386 _ => {
387 if cmd.starts_with('/') {
388 println!("{}", format!("Unknown command: {}. Type /help for available commands.", cmd).yellow());
390 }
391 }
392 }
393
394 Ok(true)
395 }
396
397 pub fn is_command(input: &str) -> bool {
399 input.trim().starts_with('/')
400 }
401
402 pub fn read_input(&self) -> io::Result<String> {
405 let input = Text::new("You:")
407 .with_autocomplete(SlashCommandAutocomplete::new())
408 .with_help_message("Type / for commands, or ask a question")
409 .prompt();
410
411 match input {
412 Ok(text) => {
413 let trimmed = text.trim();
414 if trimmed.starts_with('/') && trimmed.contains(" ") {
417 if let Some(cmd) = trimmed.split_whitespace().next() {
419 return Ok(cmd.to_string());
420 }
421 }
422 Ok(trimmed.to_string())
423 }
424 Err(inquire::InquireError::OperationCanceled) => Ok("exit".to_string()),
425 Err(inquire::InquireError::OperationInterrupted) => Ok("exit".to_string()),
426 Err(e) => Err(io::Error::new(io::ErrorKind::Other, e.to_string())),
427 }
428 }
429}