1use serde::{Deserialize, Serialize};
2use std::fmt;
3use std::str::FromStr;
4
5use super::{ModelId, ModelParseError};
6
7#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
9#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize, Default)]
10pub enum Provider {
11 Gemini,
13 #[default]
15 OpenAI,
16 Anthropic,
18 Copilot,
20 DeepSeek,
22 OpenRouter,
24 Ollama,
26 LmStudio,
28 Moonshot,
30 ZAI,
32 Minimax,
34 HuggingFace,
36 OpenCodeZen,
38 OpenCodeGo,
40}
41
42impl Provider {
43 pub fn default_api_key_env(&self) -> &'static str {
45 match self {
46 Provider::Gemini => "GEMINI_API_KEY",
47 Provider::OpenAI => "OPENAI_API_KEY",
48 Provider::Anthropic => "ANTHROPIC_API_KEY",
49 Provider::Copilot => "",
50 Provider::DeepSeek => "DEEPSEEK_API_KEY",
51 Provider::OpenRouter => "OPENROUTER_API_KEY",
52 Provider::Ollama => "OLLAMA_API_KEY",
53 Provider::LmStudio => "LMSTUDIO_API_KEY",
54 Provider::Moonshot => "MOONSHOT_API_KEY",
55 Provider::ZAI => "ZAI_API_KEY",
56 Provider::Minimax => "MINIMAX_API_KEY",
57 Provider::HuggingFace => "HF_TOKEN",
58 Provider::OpenCodeZen => "OPENCODE_ZEN_API_KEY",
59 Provider::OpenCodeGo => "OPENCODE_GO_API_KEY",
60 }
61 }
62
63 pub fn all_providers() -> Vec<Provider> {
65 vec![
66 Provider::OpenAI,
67 Provider::Anthropic,
68 Provider::Copilot,
69 Provider::Minimax,
70 Provider::Gemini,
71 Provider::DeepSeek,
72 Provider::HuggingFace,
73 Provider::OpenRouter,
74 Provider::Ollama,
75 Provider::LmStudio,
76 Provider::Moonshot,
77 Provider::ZAI,
78 Provider::OpenCodeZen,
79 Provider::OpenCodeGo,
80 ]
81 }
82
83 pub fn label(&self) -> &'static str {
85 match self {
86 Provider::Gemini => "Gemini",
87 Provider::OpenAI => "OpenAI",
88 Provider::Anthropic => "Anthropic",
89 Provider::Copilot => "GitHub Copilot",
90 Provider::DeepSeek => "DeepSeek",
91 Provider::OpenRouter => "OpenRouter",
92 Provider::Ollama => "Ollama",
93 Provider::LmStudio => "LM Studio",
94 Provider::Moonshot => "Moonshot",
95 Provider::ZAI => "Z.AI",
96 Provider::Minimax => "MiniMax",
97 Provider::HuggingFace => "Hugging Face",
98 Provider::OpenCodeZen => "OpenCode Zen",
99 Provider::OpenCodeGo => "OpenCode Go",
100 }
101 }
102
103 pub fn is_dynamic(&self) -> bool {
104 matches!(self, Provider::Copilot) || self.is_local()
105 }
106
107 pub fn is_local(&self) -> bool {
108 matches!(self, Provider::Ollama | Provider::LmStudio)
109 }
110
111 pub fn local_install_instructions(&self) -> Option<&'static str> {
112 match self {
113 Provider::Ollama => Some(
114 "Ollama server is not running. To start:\n 1. Install Ollama from https://ollama.com\n 2. Run 'ollama serve' in a terminal\n 3. Pull models using 'ollama pull <model-name>' (e.g., 'ollama pull gpt-oss:20b')",
115 ),
116 Provider::LmStudio => Some(
117 "LM Studio server is not running. To start:\n 1. Install LM Studio from https://lmstudio.ai\n 2. Open LM Studio and start the Local Server on port 1234\n 3. Load the model you want to use",
118 ),
119 _ => None,
120 }
121 }
122
123 pub fn supports_reasoning_effort(&self, model: &str) -> bool {
125 use crate::constants::models;
126
127 match self {
128 Provider::Gemini => models::google::REASONING_MODELS.contains(&model),
129 Provider::OpenAI => models::openai::REASONING_MODELS.contains(&model),
130 Provider::Anthropic => models::anthropic::REASONING_MODELS.contains(&model),
131 Provider::Copilot => false,
132 Provider::DeepSeek => {
133 model == models::deepseek::DEEPSEEK_V4_PRO || model == "deepseek-reasoner"
134 }
135 Provider::OpenRouter => {
136 if let Ok(model_id) = ModelId::from_str(model) {
137 if let Some(meta) = crate::models::openrouter_generated::metadata_for(model_id)
138 {
139 return meta.reasoning;
140 }
141 return matches!(
142 model_id,
143 ModelId::OpenRouterMinimaxM25 | ModelId::OpenRouterQwen3CoderNext
144 );
145 }
146 models::openrouter::REASONING_MODELS.contains(&model)
147 }
148 Provider::Ollama => models::ollama::REASONING_LEVEL_MODELS.contains(&model),
149 Provider::LmStudio => models::lmstudio::REASONING_MODELS.contains(&model),
150 Provider::Moonshot => models::moonshot::REASONING_MODELS.contains(&model),
151 Provider::ZAI => models::zai::REASONING_MODELS.contains(&model),
152 Provider::Minimax => models::minimax::SUPPORTED_MODELS.contains(&model),
153 Provider::HuggingFace => models::huggingface::REASONING_MODELS.contains(&model),
154 Provider::OpenCodeZen => {
155 if models::opencode_zen::OPENAI_MODELS.contains(&model) {
156 Provider::OpenAI.supports_reasoning_effort(model)
157 } else if models::opencode_zen::ANTHROPIC_MODELS.contains(&model) {
158 Provider::Anthropic.supports_reasoning_effort(model)
159 } else {
160 false
161 }
162 }
163 Provider::OpenCodeGo => false,
164 }
165 }
166
167 pub fn supports_service_tier(&self, model: &str) -> bool {
169 use crate::constants::models;
170
171 match self {
172 Provider::OpenAI => models::openai::SERVICE_TIER_MODELS.contains(&model),
173 _ => false,
174 }
175 }
176
177 pub fn uses_managed_auth(&self) -> bool {
178 matches!(self, Provider::Copilot)
179 }
180}
181
182impl fmt::Display for Provider {
183 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
184 match self {
185 Provider::Gemini => write!(f, "gemini"),
186 Provider::OpenAI => write!(f, "openai"),
187 Provider::Anthropic => write!(f, "anthropic"),
188 Provider::Copilot => write!(f, "copilot"),
189 Provider::DeepSeek => write!(f, "deepseek"),
190 Provider::OpenRouter => write!(f, "openrouter"),
191 Provider::Ollama => write!(f, "ollama"),
192 Provider::LmStudio => write!(f, "lmstudio"),
193 Provider::Moonshot => write!(f, "moonshot"),
194 Provider::ZAI => write!(f, "zai"),
195 Provider::Minimax => write!(f, "minimax"),
196 Provider::HuggingFace => write!(f, "huggingface"),
197 Provider::OpenCodeZen => write!(f, "opencode-zen"),
198 Provider::OpenCodeGo => write!(f, "opencode-go"),
199 }
200 }
201}
202
203impl AsRef<str> for Provider {
204 fn as_ref(&self) -> &str {
205 match self {
206 Provider::Gemini => "gemini",
207 Provider::OpenAI => "openai",
208 Provider::Anthropic => "anthropic",
209 Provider::Copilot => "copilot",
210 Provider::DeepSeek => "deepseek",
211 Provider::OpenRouter => "openrouter",
212 Provider::Ollama => "ollama",
213 Provider::LmStudio => "lmstudio",
214 Provider::Moonshot => "moonshot",
215 Provider::ZAI => "zai",
216 Provider::Minimax => "minimax",
217 Provider::HuggingFace => "huggingface",
218 Provider::OpenCodeZen => "opencode-zen",
219 Provider::OpenCodeGo => "opencode-go",
220 }
221 }
222}
223
224impl FromStr for Provider {
225 type Err = ModelParseError;
226
227 fn from_str(s: &str) -> Result<Self, Self::Err> {
228 match s.to_lowercase().as_str() {
229 "gemini" => Ok(Provider::Gemini),
230 "openai" => Ok(Provider::OpenAI),
231 "anthropic" => Ok(Provider::Anthropic),
232 "copilot" => Ok(Provider::Copilot),
233 "deepseek" => Ok(Provider::DeepSeek),
234 "openrouter" => Ok(Provider::OpenRouter),
235 "ollama" => Ok(Provider::Ollama),
236 "lmstudio" => Ok(Provider::LmStudio),
237 "moonshot" => Ok(Provider::Moonshot),
238 "zai" => Ok(Provider::ZAI),
239 "minimax" => Ok(Provider::Minimax),
240 "huggingface" => Ok(Provider::HuggingFace),
241 "opencode-zen" | "opencodezen" => Ok(Provider::OpenCodeZen),
242 "opencode-go" | "opencodego" => Ok(Provider::OpenCodeGo),
243 _ => Err(ModelParseError::InvalidProvider(s.to_string())),
244 }
245 }
246}