1use anyhow::Result;
2use colored::Colorize;
3use dialoguer::{theme::ColorfulTheme, Confirm, Input, Select};
4
5use crate::auth::oauth::OAuthClient;
6use crate::auth::token_storage;
7use crate::cli::{AuthAction, AuthCommand};
8use crate::config::Config;
9use crate::output::progress;
10
11struct AuthOutput;
13
14impl AuthOutput {
15 fn header(&self, text: &str) {
16 println!("\n{}", text.cyan().bold());
17 }
18
19 fn subheader(&self, text: &str) {
20 println!("{}", text.dimmed());
21 }
22
23 fn success(&self, message: &str) {
24 println!("{}", format!("✓ {}", message).green().bold());
25 }
26
27 fn warning(&self, message: &str) {
28 println!("{}", format!("⚠ {}", message).yellow());
29 }
30
31 fn error(&self, message: &str) {
32 println!("{}", format!("✗ {}", message).red().bold());
33 }
34
35 fn info(&self, message: &str) {
36 println!("{}", message.cyan());
37 }
38
39 fn divider(&self) {
40 println!("{}", "─".repeat(40).dimmed());
41 }
42
43 fn section(&self, title: &str) {
44 self.divider();
45 println!("{}", title.cyan().bold());
46 self.divider();
47 }
48
49 fn key_value(&self, key: &str, value: &str) {
50 println!(" {}: {}", key.dimmed(), value);
51 }
52}
53
54pub async fn execute(cmd: AuthCommand) -> Result<()> {
56 match cmd.action {
57 AuthAction::Login => login().await,
58 AuthAction::Logout => logout().await,
59 AuthAction::Status => status().await,
60 }
61}
62
63async fn login() -> Result<()> {
65 let out = AuthOutput;
66 out.header("🚀 Welcome to Rusty Commit Authentication");
67 out.divider();
68
69 if token_storage::has_valid_token() {
71 let should_reauth = Confirm::with_theme(&ColorfulTheme::default())
72 .with_prompt("You are already authenticated. Do you want to re-authenticate?")
73 .default(false)
74 .interact()?;
75
76 if !should_reauth {
77 out.success("Authentication unchanged");
78 return Ok(());
79 }
80 }
81
82 let providers = vec![
84 (
85 "Anthropic Claude",
86 "Use Claude Pro/Max subscription or API key",
87 ),
88 (
89 "OpenAI Codex (ChatGPT)",
90 "Use ChatGPT Pro/Plus subscription via OAuth",
91 ),
92 (
93 "GitHub Copilot",
94 "Use GitHub Copilot subscription (recommended)",
95 ),
96 ("OpenAI", "GPT models with OpenAI API key"),
97 ("Google Gemini", "Google Gemini and Vertex AI models"),
98 ("OpenRouter", "Access 200+ models via OpenRouter"),
99 (
100 "Perplexity",
101 "Cost-effective AI models with web search capabilities",
102 ),
103 ("Groq", "Fast inference with Groq API"),
104 ("DeepSeek", "DeepSeek models and API"),
105 ("Mistral", "Mistral AI models and API"),
106 ("AWS Bedrock", "Amazon Bedrock AI models"),
107 ("Azure OpenAI", "Azure-hosted OpenAI models"),
108 ("Together AI", "Together AI platform"),
109 ("DeepInfra", "DeepInfra hosted models"),
110 ("Hugging Face", "Hugging Face Inference API"),
111 ("GitHub Models", "GitHub hosted AI models"),
112 ("Qwen (Alibaba)", "Qwen AI models via DashScope"),
113 ("Ollama", "Local Ollama instance"),
114 ("Other", "Custom OpenAI-compatible provider"),
115 ];
116
117 let selection = Select::with_theme(&ColorfulTheme::default())
118 .with_prompt("🤖 Select AI provider")
119 .items(
120 providers
121 .iter()
122 .map(|(name, desc)| format!("{:<20} {}", name, desc.dimmed()))
123 .collect::<Vec<_>>(),
124 )
125 .default(0)
126 .interact()?;
127
128 match selection {
129 0 => handle_anthropic_auth().await,
130 1 => handle_codex_auth().await,
131 2 => handle_github_copilot_auth().await,
132 3 => handle_openai_auth().await,
133 4 => handle_gemini_auth().await,
134 5 => handle_openrouter_auth().await,
135 6 => handle_perplexity_auth().await,
136 7 => handle_groq_auth().await,
137 8 => handle_deepseek_auth().await,
138 9 => handle_mistral_auth().await,
139 10 => handle_aws_bedrock_auth().await,
140 11 => handle_azure_auth().await,
141 12 => handle_together_auth().await,
142 13 => handle_deepinfra_auth().await,
143 14 => handle_huggingface_auth().await,
144 15 => handle_github_models_auth().await,
145 16 => handle_qwen_auth().await,
146 17 => handle_ollama_auth().await,
147 18 => handle_manual_auth().await,
148 _ => unreachable!(),
149 }
150}
151
152async fn handle_anthropic_auth() -> Result<()> {
154 println!("\n{}", "🧠 Anthropic Claude Authentication".cyan().bold());
155
156 let auth_methods = vec![
157 "Claude Pro/Max (OAuth) - Recommended",
158 "API Key (Console) - Create new key",
159 "API Key (Manual) - Enter existing key",
160 ];
161
162 let method = Select::with_theme(&ColorfulTheme::default())
163 .with_prompt("Choose authentication method")
164 .items(&auth_methods)
165 .default(0)
166 .interact()?;
167
168 match method {
169 0 => handle_claude_oauth().await,
170 1 => handle_claude_api_key_creation().await,
171 2 => handle_manual_api_key("anthropic").await,
172 _ => unreachable!(),
173 }
174}
175
176async fn handle_claude_oauth() -> Result<()> {
178 println!("\n{}", "🔐 Starting Claude OAuth authentication...".cyan());
179 println!(
180 "{}",
181 "This will use your Claude Pro/Max subscription".dimmed()
182 );
183
184 let oauth_client = OAuthClient::new();
185 let (auth_url, verifier) = oauth_client.get_authorization_url()?;
186
187 println!(
188 "\n{}",
189 "Please visit the following URL to authenticate:".bold()
190 );
191 println!("{}", auth_url.blue().underline());
192
193 if webbrowser::open(&auth_url).is_ok() {
195 println!("\n{}", "✓ Browser opened automatically".green());
196 } else {
197 println!(
198 "\n{}",
199 "⚠ Could not open browser automatically. Please visit the URL above.".yellow()
200 );
201 }
202
203 let pb = progress::oauth_wait_spinner();
205
206 match oauth_client.start_callback_server(verifier).await {
208 Ok(token_response) => {
209 pb.finish_and_clear();
210
211 token_storage::store_tokens(
213 &token_response.access_token,
214 token_response.refresh_token.as_deref(),
215 token_response.expires_in,
216 )?;
217
218 println!("{}", "✓ Authentication successful!".green().bold());
219 println!(" You can now use Rusty Commit with your Claude account.");
220
221 let mut config = Config::load()?;
223 config.ai_provider = Some("anthropic".to_string());
224 config.save()?;
225
226 Ok(())
227 }
228 Err(e) => {
229 pb.finish_and_clear();
230 println!("{}", format!("✗ Authentication failed: {}", e).red().bold());
231 Err(e)
232 }
233 }
234}
235
236async fn handle_claude_api_key_creation() -> Result<()> {
238 println!("\n{}", "🔑 Creating Claude API Key".cyan());
239 println!(
240 "{}",
241 "This will create a new API key in your Claude Console".dimmed()
242 );
243
244 println!(
246 "{}",
247 "⚠️ Automatic API key creation not yet implemented".yellow()
248 );
249 println!(
250 "{}",
251 "Please create an API key manually at: https://console.anthropic.com/settings/keys".cyan()
252 );
253
254 handle_manual_api_key("anthropic").await
255}
256
257async fn handle_codex_auth() -> Result<()> {
259 use crate::auth::codex_oauth::CodexOAuthClient;
260
261 println!(
262 "\n{}",
263 "🔐 OpenAI Codex (ChatGPT) Authentication".cyan().bold()
264 );
265 println!(
266 "{}",
267 "This will use your ChatGPT Pro/Plus subscription".dimmed()
268 );
269
270 let oauth_client = CodexOAuthClient::new();
271 let (auth_url, verifier) = oauth_client.get_authorization_url()?;
272
273 println!(
274 "\n{}",
275 "Please visit the following URL to authenticate:".bold()
276 );
277 println!("{}", auth_url.blue().underline());
278
279 if webbrowser::open(&auth_url).is_ok() {
281 println!("\n{}", "✓ Browser opened automatically".green());
282 } else {
283 println!(
284 "\n{}",
285 "⚠ Could not open browser automatically. Please visit the URL above.".yellow()
286 );
287 }
288
289 let pb = progress::oauth_wait_spinner();
291
292 match oauth_client.start_callback_server(verifier).await {
294 Ok(token_response) => {
295 pb.finish_and_clear();
296
297 let expires_in = token_response.expires_in.unwrap_or(3600);
299 crate::auth::token_storage::store_tokens(
300 &token_response.access_token,
301 Some(&token_response.refresh_token),
302 Some(expires_in),
303 )?;
304
305 println!("{}", "✓ Authentication successful!".green().bold());
306 println!(" You can now use Rusty Commit with your ChatGPT account.");
307
308 let mut config = Config::load()?;
310 config.ai_provider = Some("codex".to_string());
311 config.model = Some("gpt-5.1-codex".to_string());
312 config.api_url = Some("https://chatgpt.com/backend-api/codex/responses".to_string());
313 config.save()?;
314
315 Ok(())
316 }
317 Err(e) => {
318 pb.finish_and_clear();
319 println!("{}", format!("✗ Authentication failed: {}", e).red().bold());
320 Err(e)
321 }
322 }
323}
324
325async fn handle_openai_auth() -> Result<()> {
327 println!("\n{}", "🤖 OpenAI Authentication".cyan().bold());
328 println!(
329 "{}",
330 "Get your API key from: https://platform.openai.com/api-keys".cyan()
331 );
332
333 handle_manual_api_key("openai").await
334}
335
336async fn handle_ollama_auth() -> Result<()> {
338 println!("\n{}", "🦙 Ollama Configuration".cyan().bold());
339
340 let use_local = Confirm::with_theme(&ColorfulTheme::default())
341 .with_prompt("Are you using a local Ollama instance?")
342 .default(true)
343 .interact()?;
344
345 let mut config = Config::load()?;
346 config.ai_provider = Some("ollama".to_string());
347
348 if use_local {
349 config.api_url = Some("http://localhost:11434".to_string());
350 println!(
351 "{}",
352 "✓ Configured for local Ollama (http://localhost:11434)".green()
353 );
354 eprintln!(
355 "{}",
356 "Warning: Using HTTP for local Ollama. This is acceptable for localhost connections."
357 .yellow()
358 );
359 } else {
360 let url: String = Input::with_theme(&ColorfulTheme::default())
361 .with_prompt("Enter Ollama URL")
362 .default("http://localhost:11434".to_string())
363 .interact_text()?;
364
365 config.api_url = Some(url.clone());
366
367 if url.starts_with("http://") && !url.contains("localhost") && !url.contains("127.0.0.1") {
369 eprintln!(
370 "{}",
371 "Warning: Using insecure HTTP connection. Your API requests may be intercepted. \
372 Consider using HTTPS for production use."
373 .red()
374 );
375 }
376
377 println!(
378 "{}",
379 format!("✓ Configured for remote Ollama ({})", url).green()
380 );
381 }
382
383 let model: String = Input::with_theme(&ColorfulTheme::default())
385 .with_prompt("Enter model name")
386 .default("mistral".to_string())
387 .interact_text()?;
388
389 config.model = Some(model.clone());
390 config.save()?;
391
392 println!(
393 "{}",
394 format!("✓ Ollama configured with model: {}", model)
395 .green()
396 .bold()
397 );
398 Ok(())
399}
400
401async fn handle_gemini_auth() -> Result<()> {
403 println!("\n{}", "💎 Google Gemini Authentication".cyan().bold());
404 println!(
405 "{}",
406 "Get your API key from: https://aistudio.google.com/app/apikey".cyan()
407 );
408
409 handle_manual_api_key("gemini").await
410}
411
412async fn handle_azure_auth() -> Result<()> {
414 println!("\n{}", "☁️ Azure OpenAI Configuration".cyan().bold());
415
416 let mut config = Config::load()?;
417 config.ai_provider = Some("azure".to_string());
418
419 let api_key: String = Input::with_theme(&ColorfulTheme::default())
420 .with_prompt("Enter Azure OpenAI API key")
421 .interact_text()?;
422
423 let endpoint: String = Input::with_theme(&ColorfulTheme::default())
424 .with_prompt("Enter Azure OpenAI endpoint")
425 .default("https://your-resource.openai.azure.com".to_string())
426 .interact_text()?;
427
428 let deployment: String = Input::with_theme(&ColorfulTheme::default())
429 .with_prompt("Enter deployment name")
430 .default("gpt-35-turbo".to_string())
431 .interact_text()?;
432
433 config.api_key = Some(api_key);
434 config.api_url = Some(endpoint);
435 config.model = Some(deployment);
436 config.save()?;
437
438 println!(
439 "{}",
440 "✓ Azure OpenAI configured successfully".green().bold()
441 );
442 Ok(())
443}
444
445async fn handle_manual_api_key(provider: &str) -> Result<()> {
447 let api_key: String = Input::with_theme(&ColorfulTheme::default())
448 .with_prompt(format!("Enter {} API key", provider))
449 .interact_text()?;
450
451 if api_key.trim().is_empty() {
452 println!("{}", "❌ API key cannot be empty".red());
453 return Ok(());
454 }
455
456 let mut config = Config::load()?;
457 config.api_key = Some(api_key);
458 config.ai_provider = Some(provider.to_string());
459
460 match provider {
462 "anthropic" => {
463 config.model = Some("claude-3-5-haiku-20241022".to_string());
464 }
465 "openai" => {
466 config.model = Some("gpt-4o-mini".to_string());
467 }
468 "gemini" => {
469 config.model = Some("gemini-1.5-pro".to_string());
470 config.api_url = Some("https://generativelanguage.googleapis.com/v1beta".to_string());
471 }
472 "openrouter" => {
473 config.model = Some("openai/gpt-4o-mini".to_string());
474 config.api_url = Some("https://openrouter.ai/api/v1".to_string());
475 }
476 "perplexity" => {
477 config.model = Some("llama-3.1-sonar-small-128k-online".to_string());
478 config.api_url = Some("https://api.perplexity.ai".to_string());
479 }
480 "groq" => {
481 config.model = Some("llama-3.1-70b-versatile".to_string());
482 config.api_url = Some("https://api.groq.com/openai/v1".to_string());
483 }
484 "deepseek" => {
485 config.model = Some("deepseek-chat".to_string());
486 config.api_url = Some("https://api.deepseek.com".to_string());
487 }
488 "mistral" => {
489 config.model = Some("mistral-large-latest".to_string());
490 config.api_url = Some("https://api.mistral.ai/v1".to_string());
491 }
492 "together" => {
493 config.model = Some("meta-llama/Llama-3.2-3B-Instruct-Turbo".to_string());
494 config.api_url = Some("https://api.together.xyz/v1".to_string());
495 }
496 "deepinfra" => {
497 config.model = Some("meta-llama/Llama-3.2-3B-Instruct".to_string());
498 config.api_url = Some("https://api.deepinfra.com/v1/openai".to_string());
499 }
500 "huggingface" => {
501 config.model = Some("meta-llama/Llama-3.2-3B-Instruct".to_string());
502 config.api_url = Some("https://api-inference.huggingface.co/v1".to_string());
503 }
504 "github-models" => {
505 config.model = Some("gpt-4o".to_string());
506 config.api_url = Some("https://models.inference.ai.azure.com".to_string());
507 }
508 _ => {}
509 }
510
511 config.save()?;
512
513 println!(
514 "{}",
515 format!("✓ {} API key configured successfully", provider)
516 .green()
517 .bold()
518 );
519 Ok(())
520}
521
522async fn handle_github_copilot_auth() -> Result<()> {
524 println!("\n{}", "🐙 GitHub Copilot Authentication".cyan().bold());
525 println!(
526 "{}",
527 "GitHub Copilot provides free AI assistance to subscribers".dimmed()
528 );
529
530 println!(
532 "{}",
533 "⚠️ GitHub OAuth device flow not yet implemented".yellow()
534 );
535 println!("{}", "Please use GitHub CLI: gh auth login".cyan());
536
537 let mut config = Config::load()?;
538 config.ai_provider = Some("github-copilot".to_string());
539 config.model = Some("gpt-4o".to_string());
540 config.save()?;
541
542 println!(
543 "{}",
544 "✓ GitHub Copilot configured (requires GitHub CLI auth)"
545 .green()
546 .bold()
547 );
548 Ok(())
549}
550
551async fn handle_openrouter_auth() -> Result<()> {
553 println!("\n{}", "🔄 OpenRouter Authentication".cyan().bold());
554 println!(
555 "{}",
556 "Get your API key from: https://openrouter.ai/keys".cyan()
557 );
558
559 let mut config = Config::load()?;
560 config.ai_provider = Some("openrouter".to_string());
561 config.model = Some("openai/gpt-4o".to_string());
562 config.api_url = Some("https://openrouter.ai/api/v1".to_string());
563
564 handle_manual_api_key("openrouter").await
565}
566
567async fn handle_groq_auth() -> Result<()> {
569 println!("\n{}", "⚡ Groq Authentication".cyan().bold());
570 println!(
571 "{}",
572 "Get your API key from: https://console.groq.com/keys".cyan()
573 );
574
575 let mut config = Config::load()?;
576 config.ai_provider = Some("groq".to_string());
577 config.model = Some("llama-3.1-70b-versatile".to_string());
578 config.api_url = Some("https://api.groq.com/openai/v1".to_string());
579
580 handle_manual_api_key("groq").await
581}
582
583async fn handle_deepseek_auth() -> Result<()> {
585 println!("\n{}", "🧠 DeepSeek Authentication".cyan().bold());
586 println!(
587 "{}",
588 "Get your API key from: https://platform.deepseek.com/api_keys".cyan()
589 );
590
591 let mut config = Config::load()?;
592 config.ai_provider = Some("deepseek".to_string());
593 config.model = Some("deepseek-chat".to_string());
594 config.api_url = Some("https://api.deepseek.com".to_string());
595
596 handle_manual_api_key("deepseek").await
597}
598
599async fn handle_mistral_auth() -> Result<()> {
601 println!("\n{}", "🌪️ Mistral AI Authentication".cyan().bold());
602 println!(
603 "{}",
604 "Get your API key from: https://console.mistral.ai/".cyan()
605 );
606
607 let mut config = Config::load()?;
608 config.ai_provider = Some("mistral".to_string());
609 config.model = Some("mistral-large-latest".to_string());
610 config.api_url = Some("https://api.mistral.ai/v1".to_string());
611
612 handle_manual_api_key("mistral").await
613}
614
615async fn handle_qwen_auth() -> Result<()> {
617 println!(
618 "\n{}",
619 "🌟 Qwen AI (Alibaba DashScope) Authentication"
620 .cyan()
621 .bold()
622 );
623 println!(
624 "{}",
625 "Get your API key from: https://dashscope.console.aliyun.com/".cyan()
626 );
627
628 let mut config = Config::load()?;
629 config.ai_provider = Some("qwen".to_string());
630 config.model = Some("qwen3-coder:480b".to_string());
631 config.api_url = Some("https://dashscope.aliyuncs.com/compatible-mode/v1".to_string());
632
633 handle_manual_api_key("qwen").await
634}
635
636async fn handle_aws_bedrock_auth() -> Result<()> {
638 println!("\n{}", "☁️ AWS Bedrock Authentication".cyan().bold());
639 println!(
640 "{}",
641 "AWS Bedrock supports multiple authentication methods".dimmed()
642 );
643
644 let auth_methods = vec![
645 "API Key (Bedrock) - Recommended for quick setup",
646 "AWS Profile - Use configured AWS profile",
647 "Environment Variables - AWS_ACCESS_KEY_ID & AWS_SECRET_ACCESS_KEY",
648 "IAM Role - For EC2/Lambda environments",
649 ];
650
651 let method = Select::with_theme(&ColorfulTheme::default())
652 .with_prompt("Choose AWS authentication method")
653 .items(&auth_methods)
654 .default(0)
655 .interact()?;
656
657 let mut config = Config::load()?;
658 config.ai_provider = Some("amazon-bedrock".to_string());
659 config.model = Some("us.anthropic.claude-3-5-haiku-20241022-v1:0".to_string());
660
661 match method {
662 0 => {
663 println!(
664 "{}",
665 "Enter your AWS Bedrock API key (new feature in 2025)".cyan()
666 );
667 println!(
668 "{}",
669 "This will be stored in AWS_BEARER_TOKEN_BEDROCK".dimmed()
670 );
671 handle_manual_api_key_with_env("amazon-bedrock", "AWS_BEARER_TOKEN_BEDROCK").await
672 }
673 1 => {
674 let profile: String = Input::with_theme(&ColorfulTheme::default())
675 .with_prompt("Enter AWS profile name")
676 .default("default".to_string())
677 .interact_text()?;
678
679 std::env::set_var("AWS_PROFILE", &profile);
680 println!(
681 "{}",
682 format!("✓ AWS Bedrock configured with profile: {}", profile)
683 .green()
684 .bold()
685 );
686 config.save()?;
687 Ok(())
688 }
689 2 => {
690 println!("{}", "Please set these environment variables:".cyan());
691 println!(" export AWS_ACCESS_KEY_ID=your_access_key");
692 println!(" export AWS_SECRET_ACCESS_KEY=your_secret_key");
693 println!(" export AWS_REGION=us-east-1 # optional");
694 println!(
695 "{}",
696 "✓ AWS Bedrock configured for environment variables"
697 .green()
698 .bold()
699 );
700 config.save()?;
701 Ok(())
702 }
703 3 => {
704 println!("{}", "✓ AWS Bedrock configured for IAM role".green().bold());
705 println!(" Ensure your EC2/Lambda role has bedrock:InvokeModel permissions");
706 config.save()?;
707 Ok(())
708 }
709 _ => unreachable!(),
710 }
711}
712
713async fn handle_together_auth() -> Result<()> {
715 println!("\n{}", "🤝 Together AI Authentication".cyan().bold());
716 println!(
717 "{}",
718 "Get your API key from: https://api.together.xyz/settings/api-keys".cyan()
719 );
720
721 let mut config = Config::load()?;
722 config.ai_provider = Some("together".to_string());
723 config.model = Some("meta-llama/Llama-3.2-3B-Instruct-Turbo".to_string());
724 config.api_url = Some("https://api.together.xyz/v1".to_string());
725
726 handle_manual_api_key("together").await
727}
728
729async fn handle_deepinfra_auth() -> Result<()> {
731 println!("\n{}", "🏗️ DeepInfra Authentication".cyan().bold());
732 println!(
733 "{}",
734 "Get your API key from: https://deepinfra.com/dash/api_keys".cyan()
735 );
736
737 let mut config = Config::load()?;
738 config.ai_provider = Some("deepinfra".to_string());
739 config.model = Some("meta-llama/Llama-3.2-3B-Instruct".to_string());
740 config.api_url = Some("https://api.deepinfra.com/v1/openai".to_string());
741
742 handle_manual_api_key("deepinfra").await
743}
744
745async fn handle_huggingface_auth() -> Result<()> {
747 println!("\n{}", "🤗 Hugging Face Authentication".cyan().bold());
748 println!(
749 "{}",
750 "Get your API key from: https://huggingface.co/settings/tokens".cyan()
751 );
752
753 let mut config = Config::load()?;
754 config.ai_provider = Some("huggingface".to_string());
755 config.model = Some("meta-llama/Llama-3.2-3B-Instruct".to_string());
756 config.api_url = Some("https://api-inference.huggingface.co/v1".to_string());
757
758 handle_manual_api_key("huggingface").await
759}
760
761async fn handle_github_models_auth() -> Result<()> {
763 println!("\n{}", "🐙 GitHub Models Authentication".cyan().bold());
764 println!(
765 "{}",
766 "Get your token from: https://github.com/settings/personal-access-tokens".cyan()
767 );
768 println!("{}", "Requires 'Model Inference' permission".dimmed());
769
770 let mut config = Config::load()?;
771 config.ai_provider = Some("github-models".to_string());
772 config.model = Some("gpt-4o".to_string());
773 config.api_url = Some("https://models.inference.ai.azure.com".to_string());
774
775 handle_manual_api_key("github-models").await
776}
777
778async fn handle_manual_api_key_with_env(provider: &str, env_var: &str) -> Result<()> {
780 let api_key: String = Input::with_theme(&ColorfulTheme::default())
781 .with_prompt(format!("Enter {} API key", provider))
782 .interact_text()?;
783
784 if api_key.trim().is_empty() {
785 println!("{}", "❌ API key cannot be empty".red());
786 return Ok(());
787 }
788
789 std::env::set_var(env_var, &api_key);
791
792 let mut config = Config::load()?;
793 config.ai_provider = Some(provider.to_string());
795 config.save()?;
796
797 println!(
798 "{}",
799 format!(
800 "✓ {} configured with environment variable {}",
801 provider, env_var
802 )
803 .green()
804 .bold()
805 );
806 println!(
807 "{}",
808 format!(
809 " Environment variable {} has been set for this session",
810 env_var
811 )
812 .dimmed()
813 );
814 Ok(())
815}
816
817async fn handle_manual_auth() -> Result<()> {
819 println!("\n{}", "🔧 Custom Provider Configuration".cyan().bold());
820 println!("{}", "Configure any OpenAI-compatible provider".dimmed());
821
822 let provider: String = Input::with_theme(&ColorfulTheme::default())
823 .with_prompt("Enter provider name")
824 .interact_text()?;
825
826 let api_url: String = Input::with_theme(&ColorfulTheme::default())
827 .with_prompt("Enter API base URL")
828 .default("https://api.openai.com/v1".to_string())
829 .interact_text()?;
830
831 let model: String = Input::with_theme(&ColorfulTheme::default())
832 .with_prompt("Enter default model name")
833 .default("gpt-3.5-turbo".to_string())
834 .interact_text()?;
835
836 let mut config = Config::load()?;
837 config.ai_provider = Some(provider.clone());
838 config.api_url = Some(api_url);
839 config.model = Some(model);
840 config.save()?;
841
842 handle_manual_api_key(&provider).await
843}
844
845async fn logout() -> Result<()> {
847 let out = AuthOutput;
848 out.info("Logging out...");
849
850 token_storage::delete_tokens()?;
852
853 out.success("Successfully logged out");
854 println!(" Your authentication tokens have been removed.");
855
856 Ok(())
857}
858
859async fn status() -> Result<()> {
861 let out = AuthOutput;
862 out.section("Authentication Status");
863
864 let config = Config::load()?;
865
866 if config.api_key.is_some() {
868 out.success("API Key configured");
869 out.key_value(
870 "Provider",
871 config.ai_provider.as_deref().unwrap_or("openai"),
872 );
873 return Ok(());
874 }
875
876 if let Some(tokens) = token_storage::get_tokens()? {
878 out.success("Authenticated with Claude OAuth");
879
880 if tokens.is_expired() {
882 out.warning("Token expired - please re-authenticate");
883 } else if let Some(expires_at) = tokens.expires_at {
884 let now = std::time::SystemTime::now()
885 .duration_since(std::time::UNIX_EPOCH)
886 .unwrap()
887 .as_secs();
888
889 let remaining = expires_at - now;
890 let hours = remaining / 3600;
891 let minutes = (remaining % 3600) / 60;
892 out.key_value("Token expires in", &format!("{}h {}m", hours, minutes));
893 }
894
895 if tokens.refresh_token.is_some() {
896 out.key_value("Refresh token", "Available");
897 }
898
899 #[cfg(feature = "secure-storage")]
901 if crate::config::secure_storage::is_available() {
902 out.key_value("Storage", "System Keychain");
903 } else {
904 out.key_value("Storage", "~/.config/rustycommit/auth.json");
905 }
906
907 #[cfg(not(feature = "secure-storage"))]
908 {
909 out.key_value("Storage", "~/.config/rustycommit/auth.json");
910 }
911 } else {
912 out.error("Not authenticated");
913 out.subheader("To authenticate, run one of:");
914 println!(
915 " • {} - Use Claude OAuth (recommended for Pro/Max users)",
916 "rco auth login".cyan()
917 );
918 println!(
919 " • {} - Use API key",
920 "rco config set RCO_API_KEY=<your_key>".cyan()
921 );
922 }
923
924 println!("\n{}", "Storage Information:".bold());
925 #[cfg(feature = "secure-storage")]
926 println!(" {}", crate::config::secure_storage::status_message());
927
928 #[cfg(not(feature = "secure-storage"))]
929 println!(" Using file-based storage at ~/.config/rustycommit/auth.json");
930
931 Ok(())
932}
933
934#[allow(dead_code)]
936pub async fn auto_refresh_token() -> Result<()> {
937 if let Some(tokens) = token_storage::get_tokens()? {
939 if tokens.expires_soon() {
940 if let Some(refresh_token) = &tokens.refresh_token {
941 let oauth_client = OAuthClient::new();
942 match oauth_client.refresh_token(refresh_token).await {
943 Ok(token_response) => {
944 token_storage::store_tokens(
946 &token_response.access_token,
947 token_response
948 .refresh_token
949 .as_deref()
950 .or(Some(refresh_token)),
951 token_response.expires_in,
952 )?;
953
954 tracing::debug!("Successfully refreshed OAuth token");
955 }
956 Err(e) => {
957 tracing::warn!(
959 "Failed to refresh token: {}. User may need to re-authenticate.",
960 e
961 );
962 eprintln!(
963 "{}",
964 format!(
965 "Warning: Failed to refresh OAuth token. You may need to run 'rco auth login' again. Error: {}",
966 e
967 ).red()
968 );
969 }
970 }
971 }
972 }
973 }
974
975 Ok(())
976}
977
978async fn handle_perplexity_auth() -> Result<()> {
980 println!("\n{}", "🔍 Perplexity Authentication".cyan().bold());
981 println!(
982 "{}",
983 "Get your API key from: https://www.perplexity.ai/settings/api".cyan()
984 );
985
986 let mut config = Config::load()?;
987 config.ai_provider = Some("perplexity".to_string());
988 config.model = Some("llama-3.1-sonar-small-128k-online".to_string());
989 config.api_url = Some("https://api.perplexity.ai".to_string());
990
991 handle_manual_api_key("perplexity").await
992}