Skip to main content

vtcode_config/models/
provider.rs

1use serde::{Deserialize, Serialize};
2use std::fmt;
3use std::str::FromStr;
4
5use super::{ModelId, ModelParseError};
6
7/// Supported AI model providers
8#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
9#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize, Default)]
10pub enum Provider {
11    /// Google Gemini models
12    Gemini,
13    /// OpenAI GPT models
14    #[default]
15    OpenAI,
16    /// Anthropic Claude models
17    Anthropic,
18    /// GitHub Copilot preview integration
19    Copilot,
20    /// DeepSeek native models
21    DeepSeek,
22    /// OpenRouter marketplace models
23    OpenRouter,
24    /// Local Ollama models
25    Ollama,
26    /// LM Studio local models
27    LmStudio,
28    /// Moonshot.ai models
29    Moonshot,
30    /// Z.AI GLM models
31    ZAI,
32    /// MiniMax models
33    Minimax,
34    /// Hugging Face Inference Providers
35    HuggingFace,
36    /// LiteLLM proxy models
37    LiteLLM,
38}
39
40impl Provider {
41    /// Get the default API key environment variable for this provider
42    pub fn default_api_key_env(&self) -> &'static str {
43        match self {
44            Provider::Gemini => "GEMINI_API_KEY",
45            Provider::OpenAI => "OPENAI_API_KEY",
46            Provider::Anthropic => "ANTHROPIC_API_KEY",
47            Provider::Copilot => "",
48            Provider::DeepSeek => "DEEPSEEK_API_KEY",
49            Provider::OpenRouter => "OPENROUTER_API_KEY",
50            Provider::Ollama => "OLLAMA_API_KEY",
51            Provider::LmStudio => "LMSTUDIO_API_KEY",
52            Provider::Moonshot => "MOONSHOT_API_KEY",
53            Provider::ZAI => "ZAI_API_KEY",
54            Provider::Minimax => "MINIMAX_API_KEY",
55            Provider::HuggingFace => "HF_TOKEN",
56            Provider::LiteLLM => "LITELLM_API_KEY",
57        }
58    }
59
60    /// Get all supported providers
61    pub fn all_providers() -> Vec<Provider> {
62        vec![
63            Provider::OpenAI,
64            Provider::Anthropic,
65            Provider::Copilot,
66            Provider::Minimax,
67            Provider::Gemini,
68            Provider::DeepSeek,
69            Provider::HuggingFace,
70            Provider::OpenRouter,
71            Provider::Ollama,
72            Provider::LmStudio,
73            Provider::Moonshot,
74            Provider::ZAI,
75            Provider::LiteLLM,
76        ]
77    }
78
79    /// Human-friendly label for display purposes
80    pub fn label(&self) -> &'static str {
81        match self {
82            Provider::Gemini => "Gemini",
83            Provider::OpenAI => "OpenAI",
84            Provider::Anthropic => "Anthropic",
85            Provider::Copilot => "GitHub Copilot",
86            Provider::DeepSeek => "DeepSeek",
87            Provider::OpenRouter => "OpenRouter",
88            Provider::Ollama => "Ollama",
89            Provider::LmStudio => "LM Studio",
90            Provider::Moonshot => "Moonshot",
91            Provider::ZAI => "Z.AI",
92            Provider::Minimax => "MiniMax",
93            Provider::HuggingFace => "Hugging Face",
94            Provider::LiteLLM => "LiteLLM",
95        }
96    }
97
98    pub fn is_dynamic(&self) -> bool {
99        matches!(self, Provider::Copilot) || self.is_local()
100    }
101
102    pub fn is_local(&self) -> bool {
103        matches!(self, Provider::Ollama | Provider::LmStudio)
104    }
105
106    pub fn local_install_instructions(&self) -> Option<&'static str> {
107        match self {
108            Provider::Ollama => Some(
109                "Ollama server is not running. To start:\n  1. Install Ollama from https://ollama.com\n  2. Run 'ollama serve' in a terminal\n  3. Pull models using 'ollama pull <model-name>' (e.g., 'ollama pull gpt-oss:20b')",
110            ),
111            Provider::LmStudio => Some(
112                "LM Studio server is not running. To start:\n  1. Install LM Studio from https://lmstudio.ai\n  2. Open LM Studio and start the Local Server on port 1234\n  3. Load the model you want to use",
113            ),
114            _ => None,
115        }
116    }
117
118    /// Determine if the provider supports configurable reasoning effort for the model
119    pub fn supports_reasoning_effort(&self, model: &str) -> bool {
120        use crate::constants::models;
121
122        match self {
123            Provider::Gemini => models::google::REASONING_MODELS.contains(&model),
124            Provider::OpenAI => models::openai::REASONING_MODELS.contains(&model),
125            Provider::Anthropic => models::anthropic::REASONING_MODELS.contains(&model),
126            Provider::Copilot => false,
127            Provider::DeepSeek => model == models::deepseek::DEEPSEEK_REASONER,
128            Provider::OpenRouter => {
129                if let Ok(model_id) = ModelId::from_str(model) {
130                    if let Some(meta) = crate::models::openrouter_generated::metadata_for(model_id)
131                    {
132                        return meta.reasoning;
133                    }
134                    return matches!(
135                        model_id,
136                        ModelId::OpenRouterMinimaxM25 | ModelId::OpenRouterQwen3CoderNext
137                    );
138                }
139                models::openrouter::REASONING_MODELS.contains(&model)
140            }
141            Provider::Ollama => models::ollama::REASONING_LEVEL_MODELS.contains(&model),
142            Provider::LmStudio => models::lmstudio::REASONING_MODELS.contains(&model),
143            Provider::Moonshot => models::moonshot::REASONING_MODELS.contains(&model),
144            Provider::ZAI => models::zai::REASONING_MODELS.contains(&model),
145            Provider::Minimax => models::minimax::SUPPORTED_MODELS.contains(&model),
146            Provider::HuggingFace => models::huggingface::REASONING_MODELS.contains(&model),
147            Provider::LiteLLM => false,
148        }
149    }
150
151    /// Determine if the provider supports the `service_tier` request parameter for the model.
152    pub fn supports_service_tier(&self, model: &str) -> bool {
153        use crate::constants::models;
154
155        match self {
156            Provider::OpenAI => models::openai::SERVICE_TIER_MODELS.contains(&model),
157            _ => false,
158        }
159    }
160
161    pub fn uses_managed_auth(&self) -> bool {
162        matches!(self, Provider::Copilot)
163    }
164}
165
166impl fmt::Display for Provider {
167    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
168        match self {
169            Provider::Gemini => write!(f, "gemini"),
170            Provider::OpenAI => write!(f, "openai"),
171            Provider::Anthropic => write!(f, "anthropic"),
172            Provider::Copilot => write!(f, "copilot"),
173            Provider::DeepSeek => write!(f, "deepseek"),
174            Provider::OpenRouter => write!(f, "openrouter"),
175            Provider::Ollama => write!(f, "ollama"),
176            Provider::LmStudio => write!(f, "lmstudio"),
177            Provider::Moonshot => write!(f, "moonshot"),
178            Provider::ZAI => write!(f, "zai"),
179            Provider::Minimax => write!(f, "minimax"),
180            Provider::HuggingFace => write!(f, "huggingface"),
181            Provider::LiteLLM => write!(f, "litellm"),
182        }
183    }
184}
185
186impl AsRef<str> for Provider {
187    fn as_ref(&self) -> &str {
188        match self {
189            Provider::Gemini => "gemini",
190            Provider::OpenAI => "openai",
191            Provider::Anthropic => "anthropic",
192            Provider::Copilot => "copilot",
193            Provider::DeepSeek => "deepseek",
194            Provider::OpenRouter => "openrouter",
195            Provider::Ollama => "ollama",
196            Provider::LmStudio => "lmstudio",
197            Provider::Moonshot => "moonshot",
198            Provider::ZAI => "zai",
199            Provider::Minimax => "minimax",
200            Provider::HuggingFace => "huggingface",
201            Provider::LiteLLM => "litellm",
202        }
203    }
204}
205
206impl FromStr for Provider {
207    type Err = ModelParseError;
208
209    fn from_str(s: &str) -> Result<Self, Self::Err> {
210        match s.to_lowercase().as_str() {
211            "gemini" => Ok(Provider::Gemini),
212            "openai" => Ok(Provider::OpenAI),
213            "anthropic" => Ok(Provider::Anthropic),
214            "copilot" => Ok(Provider::Copilot),
215            "deepseek" => Ok(Provider::DeepSeek),
216            "openrouter" => Ok(Provider::OpenRouter),
217            "ollama" => Ok(Provider::Ollama),
218            "lmstudio" => Ok(Provider::LmStudio),
219            "moonshot" => Ok(Provider::Moonshot),
220            "zai" => Ok(Provider::ZAI),
221            "minimax" => Ok(Provider::Minimax),
222            "huggingface" => Ok(Provider::HuggingFace),
223            "litellm" => Ok(Provider::LiteLLM),
224            _ => Err(ModelParseError::InvalidProvider(s.to_string())),
225        }
226    }
227}