Skip to main content

vtcode_config/models/
provider.rs

1use serde::{Deserialize, Serialize};
2use std::fmt;
3use std::str::FromStr;
4
5use super::{ModelId, ModelParseError};
6
7/// Supported AI model providers
8#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
9#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize, Default)]
10pub enum Provider {
11    /// Google Gemini models
12    Gemini,
13    /// OpenAI GPT models
14    #[default]
15    OpenAI,
16    /// Anthropic Claude models
17    Anthropic,
18    /// DeepSeek native models
19    DeepSeek,
20    /// OpenRouter marketplace models
21    OpenRouter,
22    /// Local Ollama models
23    Ollama,
24    /// LM Studio local models
25    LmStudio,
26    /// Moonshot.ai models
27    Moonshot,
28    /// Z.AI GLM models
29    ZAI,
30    /// MiniMax models
31    Minimax,
32    /// Hugging Face Inference Providers
33    HuggingFace,
34    /// LiteLLM proxy models
35    LiteLLM,
36}
37
38impl Provider {
39    /// Get the default API key environment variable for this provider
40    pub fn default_api_key_env(&self) -> &'static str {
41        match self {
42            Provider::Gemini => "GEMINI_API_KEY",
43            Provider::OpenAI => "OPENAI_API_KEY",
44            Provider::Anthropic => "ANTHROPIC_API_KEY",
45            Provider::DeepSeek => "DEEPSEEK_API_KEY",
46            Provider::OpenRouter => "OPENROUTER_API_KEY",
47            Provider::Ollama => "OLLAMA_API_KEY",
48            Provider::LmStudio => "LMSTUDIO_API_KEY",
49            Provider::Moonshot => "MOONSHOT_API_KEY",
50            Provider::ZAI => "ZAI_API_KEY",
51            Provider::Minimax => "MINIMAX_API_KEY",
52            Provider::HuggingFace => "HF_TOKEN",
53            Provider::LiteLLM => "LITELLM_API_KEY",
54        }
55    }
56
57    /// Get all supported providers
58    pub fn all_providers() -> Vec<Provider> {
59        vec![
60            Provider::OpenAI,
61            Provider::Anthropic,
62            Provider::Minimax,
63            Provider::Gemini,
64            Provider::DeepSeek,
65            Provider::HuggingFace,
66            Provider::OpenRouter,
67            Provider::Ollama,
68            Provider::LmStudio,
69            Provider::Moonshot,
70            Provider::ZAI,
71            Provider::LiteLLM,
72        ]
73    }
74
75    /// Human-friendly label for display purposes
76    pub fn label(&self) -> &'static str {
77        match self {
78            Provider::Gemini => "Gemini",
79            Provider::OpenAI => "OpenAI",
80            Provider::Anthropic => "Anthropic",
81            Provider::DeepSeek => "DeepSeek",
82            Provider::OpenRouter => "OpenRouter",
83            Provider::Ollama => "Ollama",
84            Provider::LmStudio => "LM Studio",
85            Provider::Moonshot => "Moonshot",
86            Provider::ZAI => "Z.AI",
87            Provider::Minimax => "MiniMax",
88            Provider::HuggingFace => "Hugging Face",
89            Provider::LiteLLM => "LiteLLM",
90        }
91    }
92
93    pub fn is_dynamic(&self) -> bool {
94        self.is_local()
95    }
96
97    pub fn is_local(&self) -> bool {
98        matches!(self, Provider::Ollama | Provider::LmStudio)
99    }
100
101    pub fn local_install_instructions(&self) -> Option<&'static str> {
102        match self {
103            Provider::Ollama => Some(
104                "Ollama server is not running. To start:\n  1. Install Ollama from https://ollama.com\n  2. Run 'ollama serve' in a terminal\n  3. Pull models using 'ollama pull <model-name>' (e.g., 'ollama pull gpt-oss:20b')",
105            ),
106            Provider::LmStudio => Some(
107                "LM Studio server is not running. To start:\n  1. Install LM Studio from https://lmstudio.ai\n  2. Open LM Studio and start the Local Server on port 1234\n  3. Load the model you want to use",
108            ),
109            _ => None,
110        }
111    }
112
113    /// Determine if the provider supports configurable reasoning effort for the model
114    pub fn supports_reasoning_effort(&self, model: &str) -> bool {
115        use crate::constants::models;
116
117        match self {
118            Provider::Gemini => models::google::REASONING_MODELS.contains(&model),
119            Provider::OpenAI => models::openai::REASONING_MODELS.contains(&model),
120            Provider::Anthropic => models::anthropic::REASONING_MODELS.contains(&model),
121            Provider::DeepSeek => model == models::deepseek::DEEPSEEK_REASONER,
122            Provider::OpenRouter => {
123                if let Ok(model_id) = ModelId::from_str(model) {
124                    if let Some(meta) = crate::models::openrouter_generated::metadata_for(model_id)
125                    {
126                        return meta.reasoning;
127                    }
128                    return matches!(
129                        model_id,
130                        ModelId::OpenRouterMinimaxM25 | ModelId::OpenRouterQwen3CoderNext
131                    );
132                }
133                models::openrouter::REASONING_MODELS.contains(&model)
134            }
135            Provider::Ollama => models::ollama::REASONING_LEVEL_MODELS.contains(&model),
136            Provider::LmStudio => models::lmstudio::REASONING_MODELS.contains(&model),
137            Provider::Moonshot => models::moonshot::REASONING_MODELS.contains(&model),
138            Provider::ZAI => models::zai::REASONING_MODELS.contains(&model),
139            Provider::Minimax => models::minimax::SUPPORTED_MODELS.contains(&model),
140            Provider::HuggingFace => models::huggingface::REASONING_MODELS.contains(&model),
141            Provider::LiteLLM => false,
142        }
143    }
144
145    /// Determine if the provider supports the `service_tier` request parameter for the model.
146    pub fn supports_service_tier(&self, model: &str) -> bool {
147        use crate::constants::models;
148
149        match self {
150            Provider::OpenAI => models::openai::SERVICE_TIER_MODELS.contains(&model),
151            _ => false,
152        }
153    }
154}
155
156impl fmt::Display for Provider {
157    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
158        match self {
159            Provider::Gemini => write!(f, "gemini"),
160            Provider::OpenAI => write!(f, "openai"),
161            Provider::Anthropic => write!(f, "anthropic"),
162            Provider::DeepSeek => write!(f, "deepseek"),
163            Provider::OpenRouter => write!(f, "openrouter"),
164            Provider::Ollama => write!(f, "ollama"),
165            Provider::LmStudio => write!(f, "lmstudio"),
166            Provider::Moonshot => write!(f, "moonshot"),
167            Provider::ZAI => write!(f, "zai"),
168            Provider::Minimax => write!(f, "minimax"),
169            Provider::HuggingFace => write!(f, "huggingface"),
170            Provider::LiteLLM => write!(f, "litellm"),
171        }
172    }
173}
174
175impl AsRef<str> for Provider {
176    fn as_ref(&self) -> &str {
177        match self {
178            Provider::Gemini => "gemini",
179            Provider::OpenAI => "openai",
180            Provider::Anthropic => "anthropic",
181            Provider::DeepSeek => "deepseek",
182            Provider::OpenRouter => "openrouter",
183            Provider::Ollama => "ollama",
184            Provider::LmStudio => "lmstudio",
185            Provider::Moonshot => "moonshot",
186            Provider::ZAI => "zai",
187            Provider::Minimax => "minimax",
188            Provider::HuggingFace => "huggingface",
189            Provider::LiteLLM => "litellm",
190        }
191    }
192}
193
194impl FromStr for Provider {
195    type Err = ModelParseError;
196
197    fn from_str(s: &str) -> Result<Self, Self::Err> {
198        match s.to_lowercase().as_str() {
199            "gemini" => Ok(Provider::Gemini),
200            "openai" => Ok(Provider::OpenAI),
201            "anthropic" => Ok(Provider::Anthropic),
202            "deepseek" => Ok(Provider::DeepSeek),
203            "openrouter" => Ok(Provider::OpenRouter),
204            "ollama" => Ok(Provider::Ollama),
205            "lmstudio" => Ok(Provider::LmStudio),
206            "moonshot" => Ok(Provider::Moonshot),
207            "zai" => Ok(Provider::ZAI),
208            "minimax" => Ok(Provider::Minimax),
209            "huggingface" => Ok(Provider::HuggingFace),
210            "litellm" => Ok(Provider::LiteLLM),
211            _ => Err(ModelParseError::InvalidProvider(s.to_string())),
212        }
213    }
214}