Skip to main content

rusty_commit/commands/
model.rs

1use anyhow::Result;
2use colored::Colorize;
3use dialoguer::{theme::ColorfulTheme, FuzzySelect, Input};
4
5use crate::cli::ModelCommand;
6use crate::config::Config;
7
8pub async fn execute(cmd: ModelCommand) -> Result<()> {
9    let mut config = Config::load()?;
10
11    if cmd.list {
12        list_models(&config, cmd.provider.as_deref()).await?;
13        return Ok(());
14    }
15
16    // Interactive model selection
17    select_model_interactive(&mut config).await?;
18
19    Ok(())
20}
21
22async fn list_models(config: &Config, provider_override: Option<&str>) -> Result<()> {
23    let provider = provider_override
24        .unwrap_or(config.ai_provider.as_deref().unwrap_or("openai"))
25        .to_lowercase();
26
27    println!(
28        "{}",
29        format!("Available models for provider: {}", provider).green()
30    );
31    println!("{}", "─".repeat(50).dimmed());
32
33    let models = match provider.as_str() {
34        "openai" | "deepseek" | "groq" | "openrouter" | "together" | "deepinfra"
35        | "huggingface" | "mistral" | "fireworks" | "moonshot" | "qwen" | "qwen-coder"
36        | "amazon-bedrock" | "github-models" => vec![
37            "gpt-4o",
38            "gpt-4o-mini",
39            "gpt-4-turbo",
40            "gpt-4",
41            "gpt-3.5-turbo",
42        ],
43        "anthropic" | "claude" => vec![
44            "claude-sonnet-4-20250514",
45            "claude-opus-4-20250514",
46            "claude-sonnet-4",
47            "claude-opus-4",
48            "claude-3-5-sonnet-20241022",
49            "claude-3-5-haiku-20241022",
50            "claude-3-haiku-20240307",
51            "claude-3-sonnet-20240229",
52            "claude-3-opus-20240229",
53        ],
54        "ollama" => vec![
55            "llama3.3",
56            "llama3.2",
57            "llama3.1",
58            "llama3",
59            "mistral",
60            "mixtral",
61            "qwen2.5",
62            "codellama",
63            "deepseek-coder",
64            "starcoder2",
65        ],
66        "gemini" | "vertex" | "google-vertex" => vec![
67            "gemini-2.0-flash-exp",
68            "gemini-1.5-pro",
69            "gemini-1.5-flash",
70            "gemini-1.0-pro",
71        ],
72        "azure" | "azure-openai" => vec!["gpt-4o", "gpt-4o-mini", "gpt-4-turbo", "gpt-35-turbo"],
73        "perplexity" => vec!["sonar-reasoning", "sonar", "r1-1776", "doctl"],
74        _ => vec!["gpt-3.5-turbo", "gpt-4", "gpt-4o", "claude-3-5-sonnet"],
75    };
76
77    for (i, model) in models.iter().enumerate() {
78        let marker = if config.model.as_deref() == Some(&model[..]) {
79            "✓"
80        } else {
81            " "
82        };
83        println!("{}. {} {}", i + 1, marker, model);
84    }
85
86    println!();
87    println!("{}", "To set a model:".yellow());
88    println!("  rco config set RCO_MODEL=<model_name>");
89    println!("  rco model  # interactive selection");
90
91    Ok(())
92}
93
94async fn select_model_interactive(config: &mut Config) -> Result<()> {
95    let provider = config
96        .ai_provider
97        .as_deref()
98        .unwrap_or("openai")
99        .to_lowercase();
100
101    println!("{}", "🤖 Interactive Model Selection".green().bold());
102    println!("Current provider: {}", provider.cyan());
103    if let Some(current_model) = &config.model {
104        println!("Current model: {}", current_model.cyan());
105    }
106    println!("{}", "─".repeat(50).dimmed());
107
108    // Get model list for provider
109    let models = get_provider_models(&provider);
110
111    // Add "Custom model" option
112    let mut options = models.clone();
113    options.push("Enter custom model name".to_string());
114
115    let selection = FuzzySelect::with_theme(&ColorfulTheme::default())
116        .with_prompt("Select a model")
117        .items(&options)
118        .default(0)
119        .interact()?;
120
121    if selection == options.len() - 1 {
122        // Custom model
123        let custom_model: String = Input::with_theme(&ColorfulTheme::default())
124            .with_prompt("Enter model name")
125            .interact_text()?;
126
127        config.model = Some(custom_model);
128    } else {
129        config.model = Some(options[selection].clone());
130    }
131
132    // Save config
133    config.save()?;
134
135    println!();
136    if let Some(model) = &config.model {
137        println!("{}", format!("✅ Model set to: {}", model).green());
138    } else {
139        println!("{}", "✅ Model configured".green());
140    }
141
142    Ok(())
143}
144
145fn get_provider_models(provider: &str) -> Vec<String> {
146    match provider {
147        "openai" | "deepseek" | "groq" | "openrouter" | "together" | "deepinfra"
148        | "huggingface" | "mistral" | "fireworks" | "moonshot" | "amazon-bedrock"
149        | "github-models" => vec![
150            "gpt-4o".to_string(),
151            "gpt-4o-mini".to_string(),
152            "gpt-4-turbo".to_string(),
153            "gpt-4".to_string(),
154            "gpt-3.5-turbo".to_string(),
155        ],
156        "anthropic" | "claude" => vec![
157            "claude-sonnet-4-20250514".to_string(),
158            "claude-opus-4-20250514".to_string(),
159            "claude-3-5-sonnet-20241022".to_string(),
160            "claude-3-5-haiku-20241022".to_string(),
161        ],
162        "qwen" | "qwen-coder" | "dashscope" | "alibaba" => vec![
163            "qwen3-coder:480b".to_string(),
164            "qwen3-coder:30b-a3b".to_string(),
165            "qwen3-vl-235b-instruct".to_string(),
166            "qwen-turbo".to_string(),
167            "qwen-plus".to_string(),
168            "qwen-max".to_string(),
169        ],
170        "ollama" => vec![
171            "llama3.3".to_string(),
172            "llama3.2".to_string(),
173            "llama3.1".to_string(),
174            "mistral".to_string(),
175            "mixtral".to_string(),
176            "qwen2.5".to_string(),
177            "codellama".to_string(),
178            "deepseek-coder".to_string(),
179        ],
180        "gemini" | "vertex" | "google-vertex" => vec![
181            "gemini-2.0-flash-exp".to_string(),
182            "gemini-1.5-pro".to_string(),
183            "gemini-1.5-flash".to_string(),
184        ],
185        "xai" | "grok" | "x-ai" => vec![
186            "grok-2-1212".to_string(),
187            "grok-2".to_string(),
188            "grok-beta".to_string(),
189            "grok-2-vision-1212".to_string(),
190        ],
191        "codex" => vec![
192            "gpt-5.1-codex".to_string(),
193            "gpt-5.1-codex-mini".to_string(),
194            "gpt-5.1-codex-max".to_string(),
195        ],
196        "perplexity" => vec![
197            "sonar-reasoning".to_string(),
198            "sonar".to_string(),
199            "r1-1776".to_string(),
200        ],
201        _ => vec![
202            "gpt-3.5-turbo".to_string(),
203            "gpt-4".to_string(),
204            "gpt-4o".to_string(),
205            "claude-3-5-sonnet".to_string(),
206        ],
207    }
208}