Skip to main content

vtcode_config/models/
provider.rs

1use serde::{Deserialize, Serialize};
2use std::fmt;
3use std::str::FromStr;
4
5use super::{ModelId, ModelParseError};
6
7/// Supported AI model providers
8#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
9#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize, Default)]
10pub enum Provider {
11    /// Google Gemini models
12    Gemini,
13    /// OpenAI GPT models
14    #[default]
15    OpenAI,
16    /// Anthropic Claude models
17    Anthropic,
18    /// DeepSeek native models
19    DeepSeek,
20    /// OpenRouter marketplace models
21    OpenRouter,
22    /// Local Ollama models
23    Ollama,
24    /// LM Studio local models
25    LmStudio,
26    /// Moonshot.ai models
27    Moonshot,
28    /// Z.AI GLM models
29    ZAI,
30    /// MiniMax models
31    Minimax,
32    /// Hugging Face Inference Providers
33    HuggingFace,
34}
35
36impl Provider {
37    /// Get the default API key environment variable for this provider
38    pub fn default_api_key_env(&self) -> &'static str {
39        match self {
40            Provider::Gemini => "GEMINI_API_KEY",
41            Provider::OpenAI => "OPENAI_API_KEY",
42            Provider::Anthropic => "ANTHROPIC_API_KEY",
43            Provider::DeepSeek => "DEEPSEEK_API_KEY",
44            Provider::OpenRouter => "OPENROUTER_API_KEY",
45            Provider::Ollama => "OLLAMA_API_KEY",
46            Provider::LmStudio => "LMSTUDIO_API_KEY",
47            Provider::Moonshot => "MOONSHOT_API_KEY",
48            Provider::ZAI => "ZAI_API_KEY",
49            Provider::Minimax => "MINIMAX_API_KEY",
50            Provider::HuggingFace => "HF_TOKEN",
51        }
52    }
53
54    /// Get all supported providers
55    pub fn all_providers() -> Vec<Provider> {
56        vec![
57            Provider::OpenAI,
58            Provider::Anthropic,
59            Provider::Minimax,
60            Provider::Gemini,
61            Provider::DeepSeek,
62            Provider::HuggingFace,
63            Provider::OpenRouter,
64            Provider::Ollama,
65            Provider::LmStudio,
66            Provider::Moonshot,
67            Provider::ZAI,
68        ]
69    }
70
71    /// Human-friendly label for display purposes
72    pub fn label(&self) -> &'static str {
73        match self {
74            Provider::Gemini => "Gemini",
75            Provider::OpenAI => "OpenAI",
76            Provider::Anthropic => "Anthropic",
77            Provider::DeepSeek => "DeepSeek",
78            Provider::OpenRouter => "OpenRouter",
79            Provider::Ollama => "Ollama",
80            Provider::LmStudio => "LM Studio",
81            Provider::Moonshot => "Moonshot",
82            Provider::ZAI => "Z.AI",
83            Provider::Minimax => "MiniMax",
84            Provider::HuggingFace => "Hugging Face",
85        }
86    }
87
88    pub fn is_dynamic(&self) -> bool {
89        self.is_local()
90    }
91
92    pub fn is_local(&self) -> bool {
93        matches!(self, Provider::Ollama | Provider::LmStudio)
94    }
95
96    pub fn local_install_instructions(&self) -> Option<&'static str> {
97        match self {
98            Provider::Ollama => Some(
99                "Ollama server is not running. To start:\n  1. Install Ollama from https://ollama.com\n  2. Run 'ollama serve' in a terminal\n  3. Pull models using 'ollama pull <model-name>' (e.g., 'ollama pull gpt-oss:20b')",
100            ),
101            Provider::LmStudio => Some(
102                "LM Studio server is not running. To start:\n  1. Install LM Studio from https://lmstudio.ai\n  2. Open LM Studio and start the Local Server on port 1234\n  3. Load the model you want to use",
103            ),
104            _ => None,
105        }
106    }
107
108    /// Determine if the provider supports configurable reasoning effort for the model
109    pub fn supports_reasoning_effort(&self, model: &str) -> bool {
110        use crate::constants::models;
111
112        match self {
113            Provider::Gemini => models::google::REASONING_MODELS.contains(&model),
114            Provider::OpenAI => models::openai::REASONING_MODELS.contains(&model),
115            Provider::Anthropic => models::anthropic::REASONING_MODELS.contains(&model),
116            Provider::DeepSeek => model == models::deepseek::DEEPSEEK_REASONER,
117            Provider::OpenRouter => {
118                if let Ok(model_id) = ModelId::from_str(model) {
119                    if let Some(meta) = crate::models::openrouter_generated::metadata_for(model_id)
120                    {
121                        return meta.reasoning;
122                    }
123                    return matches!(
124                        model_id,
125                        ModelId::OpenRouterMinimaxM25 | ModelId::OpenRouterQwen3CoderNext
126                    );
127                }
128                models::openrouter::REASONING_MODELS.contains(&model)
129            }
130            Provider::Ollama => models::ollama::REASONING_LEVEL_MODELS.contains(&model),
131            Provider::LmStudio => models::lmstudio::REASONING_MODELS.contains(&model),
132            Provider::Moonshot => models::moonshot::REASONING_MODELS.contains(&model),
133            Provider::ZAI => models::zai::REASONING_MODELS.contains(&model),
134            Provider::Minimax => {
135                model == models::minimax::MINIMAX_M2_5 || model == models::minimax::MINIMAX_M2
136            }
137            Provider::HuggingFace => models::huggingface::REASONING_MODELS.contains(&model),
138        }
139    }
140}
141
142impl fmt::Display for Provider {
143    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
144        match self {
145            Provider::Gemini => write!(f, "gemini"),
146            Provider::OpenAI => write!(f, "openai"),
147            Provider::Anthropic => write!(f, "anthropic"),
148            Provider::DeepSeek => write!(f, "deepseek"),
149            Provider::OpenRouter => write!(f, "openrouter"),
150            Provider::Ollama => write!(f, "ollama"),
151            Provider::LmStudio => write!(f, "lmstudio"),
152            Provider::Moonshot => write!(f, "moonshot"),
153            Provider::ZAI => write!(f, "zai"),
154            Provider::Minimax => write!(f, "minimax"),
155            Provider::HuggingFace => write!(f, "huggingface"),
156        }
157    }
158}
159
160impl FromStr for Provider {
161    type Err = ModelParseError;
162
163    fn from_str(s: &str) -> Result<Self, Self::Err> {
164        match s.to_lowercase().as_str() {
165            "gemini" => Ok(Provider::Gemini),
166            "openai" => Ok(Provider::OpenAI),
167            "anthropic" => Ok(Provider::Anthropic),
168            "deepseek" => Ok(Provider::DeepSeek),
169            "openrouter" => Ok(Provider::OpenRouter),
170            "ollama" => Ok(Provider::Ollama),
171            "lmstudio" => Ok(Provider::LmStudio),
172            "moonshot" => Ok(Provider::Moonshot),
173            "zai" => Ok(Provider::ZAI),
174            "minimax" => Ok(Provider::Minimax),
175            "huggingface" => Ok(Provider::HuggingFace),
176            _ => Err(ModelParseError::InvalidProvider(s.to_string())),
177        }
178    }
179}