1use anyhow::Result;
2use colored::Colorize;
3use dialoguer::{Input, Select};
4use std::path::PathBuf;
5
6use crate::commands::helpers::is_interactive;
7use crate::config::{Config, LLMConfig};
8use crate::storage::Storage;
9
10pub fn run(project_root: Option<PathBuf>, provider_arg: Option<String>) -> Result<()> {
11 let storage = Storage::new(project_root);
12
13 if storage.is_initialized() {
14 println!("{}", "✓ SCUD is already initialized".green());
15 return Ok(());
16 }
17
18 println!("{}", "Initializing SCUD...".blue());
19 println!();
20
21 let (provider, model) = if let Some(provider_name) = provider_arg {
22 let provider = provider_name.to_lowercase();
24 if !matches!(
25 provider.as_str(),
26 "xai" | "anthropic" | "openai" | "openrouter"
27 ) {
28 anyhow::bail!(
29 "Invalid provider: {}. Valid options: xai, anthropic, openai, openrouter",
30 provider
31 );
32 }
33 let model = Config::default_model_for_provider(&provider).to_string();
34 (provider, model)
35 } else if is_interactive() {
36 let providers = vec![
38 "xAI (Grok)",
39 "Anthropic (Claude)",
40 "OpenAI (GPT)",
41 "OpenRouter",
42 ];
43 let provider_selection = Select::new()
44 .with_prompt("Select your LLM provider")
45 .items(&providers)
46 .default(0)
47 .interact()?;
48
49 let provider = match provider_selection {
50 0 => "xai",
51 1 => "anthropic",
52 2 => "openai",
53 3 => "openrouter",
54 _ => "anthropic",
55 };
56
57 let suggested = Config::suggested_models_for_provider(provider);
59 let mut model_options: Vec<String> = suggested.iter().map(|s| s.to_string()).collect();
60 model_options.push("Custom (enter model name)".to_string());
61
62 let model_selection = Select::new()
63 .with_prompt("Select model (or choose Custom to enter any model)")
64 .items(&model_options)
65 .default(0)
66 .interact()?;
67
68 let model = if model_selection == model_options.len() - 1 {
69 Input::<String>::new()
71 .with_prompt("Enter model name")
72 .interact_text()?
73 } else {
74 suggested[model_selection].to_string()
75 };
76
77 (provider.to_string(), model)
78 } else {
79 let provider = "anthropic";
81 let model = Config::default_model_for_provider(provider);
82 (provider.to_string(), model.to_string())
83 };
84
85 let config = Config {
86 llm: LLMConfig {
87 provider,
88 model,
89 max_tokens: 4096,
90 },
91 };
92
93 storage.initialize_with_config(&config)?;
94
95 println!("\n{}", "✅ SCUD initialized successfully!".green().bold());
96 println!("\n{}", "Configuration:".blue());
97 println!(" Provider: {}", config.llm.provider.yellow());
98 println!(" Model: {}", config.llm.model.yellow());
99 println!("\n{}", "Environment variable required:".blue());
100 println!(
101 " export {}=your-api-key",
102 config.api_key_env_var().yellow()
103 );
104 println!("\n{}", "Next steps:".blue());
105 println!(" 1. Set your API key environment variable");
106 println!(" 2. Run: scud tags");
107 println!(" 3. Create or import tasks, then use: /scud:task-next\n");
108
109 Ok(())
110}