Skip to main content

vtcode_config/models/
provider.rs

1use serde::{Deserialize, Serialize};
2use std::fmt;
3use std::str::FromStr;
4
5use super::{ModelId, ModelParseError};
6
7/// Supported AI model providers
8#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
9#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize, Default)]
10pub enum Provider {
11    /// Google Gemini models
12    Gemini,
13    /// OpenAI GPT models
14    #[default]
15    OpenAI,
16    /// Anthropic Claude models
17    Anthropic,
18    /// GitHub Copilot preview integration
19    Copilot,
20    /// DeepSeek native models
21    DeepSeek,
22    /// OpenRouter marketplace models
23    OpenRouter,
24    /// Local Ollama models
25    Ollama,
26    /// LM Studio local models
27    LmStudio,
28    /// Moonshot.ai models
29    Moonshot,
30    /// Z.AI GLM models
31    ZAI,
32    /// MiniMax models
33    Minimax,
34    /// Hugging Face Inference Providers
35    HuggingFace,
36    /// OpenCode Zen gateway (pay-as-you-go)
37    OpenCodeZen,
38    /// OpenCode Go subscription
39    OpenCodeGo,
40}
41
42impl Provider {
43    /// Get the default API key environment variable for this provider
44    pub fn default_api_key_env(&self) -> &'static str {
45        match self {
46            Provider::Gemini => "GEMINI_API_KEY",
47            Provider::OpenAI => "OPENAI_API_KEY",
48            Provider::Anthropic => "ANTHROPIC_API_KEY",
49            Provider::Copilot => "",
50            Provider::DeepSeek => "DEEPSEEK_API_KEY",
51            Provider::OpenRouter => "OPENROUTER_API_KEY",
52            Provider::Ollama => "OLLAMA_API_KEY",
53            Provider::LmStudio => "LMSTUDIO_API_KEY",
54            Provider::Moonshot => "MOONSHOT_API_KEY",
55            Provider::ZAI => "ZAI_API_KEY",
56            Provider::Minimax => "MINIMAX_API_KEY",
57            Provider::HuggingFace => "HF_TOKEN",
58            Provider::OpenCodeZen => "OPENCODE_ZEN_API_KEY",
59            Provider::OpenCodeGo => "OPENCODE_GO_API_KEY",
60        }
61    }
62
63    /// Get all supported providers
64    pub fn all_providers() -> Vec<Provider> {
65        vec![
66            Provider::OpenAI,
67            Provider::Anthropic,
68            Provider::Copilot,
69            Provider::Minimax,
70            Provider::Gemini,
71            Provider::DeepSeek,
72            Provider::HuggingFace,
73            Provider::OpenRouter,
74            Provider::Ollama,
75            Provider::LmStudio,
76            Provider::Moonshot,
77            Provider::ZAI,
78            Provider::OpenCodeZen,
79            Provider::OpenCodeGo,
80        ]
81    }
82
83    /// Human-friendly label for display purposes
84    pub fn label(&self) -> &'static str {
85        match self {
86            Provider::Gemini => "Gemini",
87            Provider::OpenAI => "OpenAI",
88            Provider::Anthropic => "Anthropic",
89            Provider::Copilot => "GitHub Copilot",
90            Provider::DeepSeek => "DeepSeek",
91            Provider::OpenRouter => "OpenRouter",
92            Provider::Ollama => "Ollama",
93            Provider::LmStudio => "LM Studio",
94            Provider::Moonshot => "Moonshot",
95            Provider::ZAI => "Z.AI",
96            Provider::Minimax => "MiniMax",
97            Provider::HuggingFace => "Hugging Face",
98            Provider::OpenCodeZen => "OpenCode Zen",
99            Provider::OpenCodeGo => "OpenCode Go",
100        }
101    }
102
103    pub fn is_dynamic(&self) -> bool {
104        matches!(self, Provider::Copilot) || self.is_local()
105    }
106
107    pub fn is_local(&self) -> bool {
108        matches!(self, Provider::Ollama | Provider::LmStudio)
109    }
110
111    pub fn local_install_instructions(&self) -> Option<&'static str> {
112        match self {
113            Provider::Ollama => Some(
114                "Ollama server is not running. To start:\n  1. Install Ollama from https://ollama.com\n  2. Run 'ollama serve' in a terminal\n  3. Pull models using 'ollama pull <model-name>' (e.g., 'ollama pull gpt-oss:20b')",
115            ),
116            Provider::LmStudio => Some(
117                "LM Studio server is not running. To start:\n  1. Install LM Studio from https://lmstudio.ai\n  2. Open LM Studio and start the Local Server on port 1234\n  3. Load the model you want to use",
118            ),
119            _ => None,
120        }
121    }
122
123    /// Determine if the provider supports configurable reasoning effort for the model
124    pub fn supports_reasoning_effort(&self, model: &str) -> bool {
125        use crate::constants::models;
126
127        match self {
128            Provider::Gemini => models::google::REASONING_MODELS.contains(&model),
129            Provider::OpenAI => models::openai::REASONING_MODELS.contains(&model),
130            Provider::Anthropic => models::anthropic::REASONING_MODELS.contains(&model),
131            Provider::Copilot => false,
132            Provider::DeepSeek => model == models::deepseek::DEEPSEEK_REASONER,
133            Provider::OpenRouter => {
134                if let Ok(model_id) = ModelId::from_str(model) {
135                    if let Some(meta) = crate::models::openrouter_generated::metadata_for(model_id)
136                    {
137                        return meta.reasoning;
138                    }
139                    return matches!(
140                        model_id,
141                        ModelId::OpenRouterMinimaxM25 | ModelId::OpenRouterQwen3CoderNext
142                    );
143                }
144                models::openrouter::REASONING_MODELS.contains(&model)
145            }
146            Provider::Ollama => models::ollama::REASONING_LEVEL_MODELS.contains(&model),
147            Provider::LmStudio => models::lmstudio::REASONING_MODELS.contains(&model),
148            Provider::Moonshot => models::moonshot::REASONING_MODELS.contains(&model),
149            Provider::ZAI => models::zai::REASONING_MODELS.contains(&model),
150            Provider::Minimax => models::minimax::SUPPORTED_MODELS.contains(&model),
151            Provider::HuggingFace => models::huggingface::REASONING_MODELS.contains(&model),
152            Provider::OpenCodeZen => {
153                if models::opencode_zen::OPENAI_MODELS.contains(&model) {
154                    Provider::OpenAI.supports_reasoning_effort(model)
155                } else if models::opencode_zen::ANTHROPIC_MODELS.contains(&model) {
156                    Provider::Anthropic.supports_reasoning_effort(model)
157                } else {
158                    false
159                }
160            }
161            Provider::OpenCodeGo => false,
162        }
163    }
164
165    /// Determine if the provider supports the `service_tier` request parameter for the model.
166    pub fn supports_service_tier(&self, model: &str) -> bool {
167        use crate::constants::models;
168
169        match self {
170            Provider::OpenAI => models::openai::SERVICE_TIER_MODELS.contains(&model),
171            _ => false,
172        }
173    }
174
175    pub fn uses_managed_auth(&self) -> bool {
176        matches!(self, Provider::Copilot)
177    }
178}
179
180impl fmt::Display for Provider {
181    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
182        match self {
183            Provider::Gemini => write!(f, "gemini"),
184            Provider::OpenAI => write!(f, "openai"),
185            Provider::Anthropic => write!(f, "anthropic"),
186            Provider::Copilot => write!(f, "copilot"),
187            Provider::DeepSeek => write!(f, "deepseek"),
188            Provider::OpenRouter => write!(f, "openrouter"),
189            Provider::Ollama => write!(f, "ollama"),
190            Provider::LmStudio => write!(f, "lmstudio"),
191            Provider::Moonshot => write!(f, "moonshot"),
192            Provider::ZAI => write!(f, "zai"),
193            Provider::Minimax => write!(f, "minimax"),
194            Provider::HuggingFace => write!(f, "huggingface"),
195            Provider::OpenCodeZen => write!(f, "opencode-zen"),
196            Provider::OpenCodeGo => write!(f, "opencode-go"),
197        }
198    }
199}
200
201impl AsRef<str> for Provider {
202    fn as_ref(&self) -> &str {
203        match self {
204            Provider::Gemini => "gemini",
205            Provider::OpenAI => "openai",
206            Provider::Anthropic => "anthropic",
207            Provider::Copilot => "copilot",
208            Provider::DeepSeek => "deepseek",
209            Provider::OpenRouter => "openrouter",
210            Provider::Ollama => "ollama",
211            Provider::LmStudio => "lmstudio",
212            Provider::Moonshot => "moonshot",
213            Provider::ZAI => "zai",
214            Provider::Minimax => "minimax",
215            Provider::HuggingFace => "huggingface",
216            Provider::OpenCodeZen => "opencode-zen",
217            Provider::OpenCodeGo => "opencode-go",
218        }
219    }
220}
221
222impl FromStr for Provider {
223    type Err = ModelParseError;
224
225    fn from_str(s: &str) -> Result<Self, Self::Err> {
226        match s.to_lowercase().as_str() {
227            "gemini" => Ok(Provider::Gemini),
228            "openai" => Ok(Provider::OpenAI),
229            "anthropic" => Ok(Provider::Anthropic),
230            "copilot" => Ok(Provider::Copilot),
231            "deepseek" => Ok(Provider::DeepSeek),
232            "openrouter" => Ok(Provider::OpenRouter),
233            "ollama" => Ok(Provider::Ollama),
234            "lmstudio" => Ok(Provider::LmStudio),
235            "moonshot" => Ok(Provider::Moonshot),
236            "zai" => Ok(Provider::ZAI),
237            "minimax" => Ok(Provider::Minimax),
238            "huggingface" => Ok(Provider::HuggingFace),
239            "opencode-zen" | "opencodezen" => Ok(Provider::OpenCodeZen),
240            "opencode-go" | "opencodego" => Ok(Provider::OpenCodeGo),
241            _ => Err(ModelParseError::InvalidProvider(s.to_string())),
242        }
243    }
244}