Skip to main content

vtcode_config/models/
provider.rs

1use serde::{Deserialize, Serialize};
2use std::fmt;
3use std::str::FromStr;
4
5use super::{ModelId, ModelParseError};
6
7/// Supported AI model providers
8#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
9#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize, Default)]
10pub enum Provider {
11    /// Google Gemini models
12    Gemini,
13    /// OpenAI GPT models
14    #[default]
15    OpenAI,
16    /// Anthropic Claude models
17    Anthropic,
18    /// GitHub Copilot preview integration
19    Copilot,
20    /// DeepSeek native models
21    DeepSeek,
22    /// OpenRouter marketplace models
23    OpenRouter,
24    /// Local Ollama models
25    Ollama,
26    /// LM Studio local models
27    LmStudio,
28    /// Moonshot.ai models
29    Moonshot,
30    /// Z.AI GLM models
31    ZAI,
32    /// MiniMax models
33    Minimax,
34    /// Hugging Face Inference Providers
35    HuggingFace,
36}
37
38impl Provider {
39    /// Get the default API key environment variable for this provider
40    pub fn default_api_key_env(&self) -> &'static str {
41        match self {
42            Provider::Gemini => "GEMINI_API_KEY",
43            Provider::OpenAI => "OPENAI_API_KEY",
44            Provider::Anthropic => "ANTHROPIC_API_KEY",
45            Provider::Copilot => "",
46            Provider::DeepSeek => "DEEPSEEK_API_KEY",
47            Provider::OpenRouter => "OPENROUTER_API_KEY",
48            Provider::Ollama => "OLLAMA_API_KEY",
49            Provider::LmStudio => "LMSTUDIO_API_KEY",
50            Provider::Moonshot => "MOONSHOT_API_KEY",
51            Provider::ZAI => "ZAI_API_KEY",
52            Provider::Minimax => "MINIMAX_API_KEY",
53            Provider::HuggingFace => "HF_TOKEN",
54        }
55    }
56
57    /// Get all supported providers
58    pub fn all_providers() -> Vec<Provider> {
59        vec![
60            Provider::OpenAI,
61            Provider::Anthropic,
62            Provider::Copilot,
63            Provider::Minimax,
64            Provider::Gemini,
65            Provider::DeepSeek,
66            Provider::HuggingFace,
67            Provider::OpenRouter,
68            Provider::Ollama,
69            Provider::LmStudio,
70            Provider::Moonshot,
71            Provider::ZAI,
72        ]
73    }
74
75    /// Human-friendly label for display purposes
76    pub fn label(&self) -> &'static str {
77        match self {
78            Provider::Gemini => "Gemini",
79            Provider::OpenAI => "OpenAI",
80            Provider::Anthropic => "Anthropic",
81            Provider::Copilot => "GitHub Copilot",
82            Provider::DeepSeek => "DeepSeek",
83            Provider::OpenRouter => "OpenRouter",
84            Provider::Ollama => "Ollama",
85            Provider::LmStudio => "LM Studio",
86            Provider::Moonshot => "Moonshot",
87            Provider::ZAI => "Z.AI",
88            Provider::Minimax => "MiniMax",
89            Provider::HuggingFace => "Hugging Face",
90        }
91    }
92
93    pub fn is_dynamic(&self) -> bool {
94        matches!(self, Provider::Copilot) || self.is_local()
95    }
96
97    pub fn is_local(&self) -> bool {
98        matches!(self, Provider::Ollama | Provider::LmStudio)
99    }
100
101    pub fn local_install_instructions(&self) -> Option<&'static str> {
102        match self {
103            Provider::Ollama => Some(
104                "Ollama server is not running. To start:\n  1. Install Ollama from https://ollama.com\n  2. Run 'ollama serve' in a terminal\n  3. Pull models using 'ollama pull <model-name>' (e.g., 'ollama pull gpt-oss:20b')",
105            ),
106            Provider::LmStudio => Some(
107                "LM Studio server is not running. To start:\n  1. Install LM Studio from https://lmstudio.ai\n  2. Open LM Studio and start the Local Server on port 1234\n  3. Load the model you want to use",
108            ),
109            _ => None,
110        }
111    }
112
113    /// Determine if the provider supports configurable reasoning effort for the model
114    pub fn supports_reasoning_effort(&self, model: &str) -> bool {
115        use crate::constants::models;
116
117        match self {
118            Provider::Gemini => models::google::REASONING_MODELS.contains(&model),
119            Provider::OpenAI => models::openai::REASONING_MODELS.contains(&model),
120            Provider::Anthropic => models::anthropic::REASONING_MODELS.contains(&model),
121            Provider::Copilot => false,
122            Provider::DeepSeek => model == models::deepseek::DEEPSEEK_REASONER,
123            Provider::OpenRouter => {
124                if let Ok(model_id) = ModelId::from_str(model) {
125                    if let Some(meta) = crate::models::openrouter_generated::metadata_for(model_id)
126                    {
127                        return meta.reasoning;
128                    }
129                    return matches!(
130                        model_id,
131                        ModelId::OpenRouterMinimaxM25 | ModelId::OpenRouterQwen3CoderNext
132                    );
133                }
134                models::openrouter::REASONING_MODELS.contains(&model)
135            }
136            Provider::Ollama => models::ollama::REASONING_LEVEL_MODELS.contains(&model),
137            Provider::LmStudio => models::lmstudio::REASONING_MODELS.contains(&model),
138            Provider::Moonshot => models::moonshot::REASONING_MODELS.contains(&model),
139            Provider::ZAI => models::zai::REASONING_MODELS.contains(&model),
140            Provider::Minimax => models::minimax::SUPPORTED_MODELS.contains(&model),
141            Provider::HuggingFace => models::huggingface::REASONING_MODELS.contains(&model),
142        }
143    }
144
145    /// Determine if the provider supports the `service_tier` request parameter for the model.
146    pub fn supports_service_tier(&self, model: &str) -> bool {
147        use crate::constants::models;
148
149        match self {
150            Provider::OpenAI => models::openai::SERVICE_TIER_MODELS.contains(&model),
151            _ => false,
152        }
153    }
154
155    pub fn uses_managed_auth(&self) -> bool {
156        matches!(self, Provider::Copilot)
157    }
158}
159
160impl fmt::Display for Provider {
161    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
162        match self {
163            Provider::Gemini => write!(f, "gemini"),
164            Provider::OpenAI => write!(f, "openai"),
165            Provider::Anthropic => write!(f, "anthropic"),
166            Provider::Copilot => write!(f, "copilot"),
167            Provider::DeepSeek => write!(f, "deepseek"),
168            Provider::OpenRouter => write!(f, "openrouter"),
169            Provider::Ollama => write!(f, "ollama"),
170            Provider::LmStudio => write!(f, "lmstudio"),
171            Provider::Moonshot => write!(f, "moonshot"),
172            Provider::ZAI => write!(f, "zai"),
173            Provider::Minimax => write!(f, "minimax"),
174            Provider::HuggingFace => write!(f, "huggingface"),
175        }
176    }
177}
178
179impl AsRef<str> for Provider {
180    fn as_ref(&self) -> &str {
181        match self {
182            Provider::Gemini => "gemini",
183            Provider::OpenAI => "openai",
184            Provider::Anthropic => "anthropic",
185            Provider::Copilot => "copilot",
186            Provider::DeepSeek => "deepseek",
187            Provider::OpenRouter => "openrouter",
188            Provider::Ollama => "ollama",
189            Provider::LmStudio => "lmstudio",
190            Provider::Moonshot => "moonshot",
191            Provider::ZAI => "zai",
192            Provider::Minimax => "minimax",
193            Provider::HuggingFace => "huggingface",
194        }
195    }
196}
197
198impl FromStr for Provider {
199    type Err = ModelParseError;
200
201    fn from_str(s: &str) -> Result<Self, Self::Err> {
202        match s.to_lowercase().as_str() {
203            "gemini" => Ok(Provider::Gemini),
204            "openai" => Ok(Provider::OpenAI),
205            "anthropic" => Ok(Provider::Anthropic),
206            "copilot" => Ok(Provider::Copilot),
207            "deepseek" => Ok(Provider::DeepSeek),
208            "openrouter" => Ok(Provider::OpenRouter),
209            "ollama" => Ok(Provider::Ollama),
210            "lmstudio" => Ok(Provider::LmStudio),
211            "moonshot" => Ok(Provider::Moonshot),
212            "zai" => Ok(Provider::ZAI),
213            "minimax" => Ok(Provider::Minimax),
214            "huggingface" => Ok(Provider::HuggingFace),
215            _ => Err(ModelParseError::InvalidProvider(s.to_string())),
216        }
217    }
218}