1use serde::{Deserialize, Serialize};
2use std::fmt;
3use std::str::FromStr;
4
5use super::{ModelId, ModelParseError};
6
7#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
9#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize, Default)]
10pub enum Provider {
11 Gemini,
13 #[default]
15 OpenAI,
16 Anthropic,
18 DeepSeek,
20 OpenRouter,
22 Ollama,
24 LmStudio,
26 Moonshot,
28 ZAI,
30 Minimax,
32 HuggingFace,
34 LiteLLM,
36}
37
38impl Provider {
39 pub fn default_api_key_env(&self) -> &'static str {
41 match self {
42 Provider::Gemini => "GEMINI_API_KEY",
43 Provider::OpenAI => "OPENAI_API_KEY",
44 Provider::Anthropic => "ANTHROPIC_API_KEY",
45 Provider::DeepSeek => "DEEPSEEK_API_KEY",
46 Provider::OpenRouter => "OPENROUTER_API_KEY",
47 Provider::Ollama => "OLLAMA_API_KEY",
48 Provider::LmStudio => "LMSTUDIO_API_KEY",
49 Provider::Moonshot => "MOONSHOT_API_KEY",
50 Provider::ZAI => "ZAI_API_KEY",
51 Provider::Minimax => "MINIMAX_API_KEY",
52 Provider::HuggingFace => "HF_TOKEN",
53 Provider::LiteLLM => "LITELLM_API_KEY",
54 }
55 }
56
57 pub fn all_providers() -> Vec<Provider> {
59 vec![
60 Provider::OpenAI,
61 Provider::Anthropic,
62 Provider::Minimax,
63 Provider::Gemini,
64 Provider::DeepSeek,
65 Provider::HuggingFace,
66 Provider::OpenRouter,
67 Provider::Ollama,
68 Provider::LmStudio,
69 Provider::Moonshot,
70 Provider::ZAI,
71 Provider::LiteLLM,
72 ]
73 }
74
75 pub fn label(&self) -> &'static str {
77 match self {
78 Provider::Gemini => "Gemini",
79 Provider::OpenAI => "OpenAI",
80 Provider::Anthropic => "Anthropic",
81 Provider::DeepSeek => "DeepSeek",
82 Provider::OpenRouter => "OpenRouter",
83 Provider::Ollama => "Ollama",
84 Provider::LmStudio => "LM Studio",
85 Provider::Moonshot => "Moonshot",
86 Provider::ZAI => "Z.AI",
87 Provider::Minimax => "MiniMax",
88 Provider::HuggingFace => "Hugging Face",
89 Provider::LiteLLM => "LiteLLM",
90 }
91 }
92
93 pub fn is_dynamic(&self) -> bool {
94 self.is_local()
95 }
96
97 pub fn is_local(&self) -> bool {
98 matches!(self, Provider::Ollama | Provider::LmStudio)
99 }
100
101 pub fn local_install_instructions(&self) -> Option<&'static str> {
102 match self {
103 Provider::Ollama => Some(
104 "Ollama server is not running. To start:\n 1. Install Ollama from https://ollama.com\n 2. Run 'ollama serve' in a terminal\n 3. Pull models using 'ollama pull <model-name>' (e.g., 'ollama pull gpt-oss:20b')",
105 ),
106 Provider::LmStudio => Some(
107 "LM Studio server is not running. To start:\n 1. Install LM Studio from https://lmstudio.ai\n 2. Open LM Studio and start the Local Server on port 1234\n 3. Load the model you want to use",
108 ),
109 _ => None,
110 }
111 }
112
113 pub fn supports_reasoning_effort(&self, model: &str) -> bool {
115 use crate::constants::models;
116
117 match self {
118 Provider::Gemini => models::google::REASONING_MODELS.contains(&model),
119 Provider::OpenAI => models::openai::REASONING_MODELS.contains(&model),
120 Provider::Anthropic => models::anthropic::REASONING_MODELS.contains(&model),
121 Provider::DeepSeek => model == models::deepseek::DEEPSEEK_REASONER,
122 Provider::OpenRouter => {
123 if let Ok(model_id) = ModelId::from_str(model) {
124 if let Some(meta) = crate::models::openrouter_generated::metadata_for(model_id)
125 {
126 return meta.reasoning;
127 }
128 return matches!(
129 model_id,
130 ModelId::OpenRouterMinimaxM25 | ModelId::OpenRouterQwen3CoderNext
131 );
132 }
133 models::openrouter::REASONING_MODELS.contains(&model)
134 }
135 Provider::Ollama => models::ollama::REASONING_LEVEL_MODELS.contains(&model),
136 Provider::LmStudio => models::lmstudio::REASONING_MODELS.contains(&model),
137 Provider::Moonshot => models::moonshot::REASONING_MODELS.contains(&model),
138 Provider::ZAI => models::zai::REASONING_MODELS.contains(&model),
139 Provider::Minimax => model == models::minimax::MINIMAX_M2_5,
140 Provider::HuggingFace => models::huggingface::REASONING_MODELS.contains(&model),
141 Provider::LiteLLM => false,
142 }
143 }
144
145 pub fn supports_service_tier(&self, model: &str) -> bool {
147 use crate::constants::models;
148
149 match self {
150 Provider::OpenAI => models::openai::SERVICE_TIER_MODELS.contains(&model),
151 _ => false,
152 }
153 }
154}
155
156impl fmt::Display for Provider {
157 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
158 match self {
159 Provider::Gemini => write!(f, "gemini"),
160 Provider::OpenAI => write!(f, "openai"),
161 Provider::Anthropic => write!(f, "anthropic"),
162 Provider::DeepSeek => write!(f, "deepseek"),
163 Provider::OpenRouter => write!(f, "openrouter"),
164 Provider::Ollama => write!(f, "ollama"),
165 Provider::LmStudio => write!(f, "lmstudio"),
166 Provider::Moonshot => write!(f, "moonshot"),
167 Provider::ZAI => write!(f, "zai"),
168 Provider::Minimax => write!(f, "minimax"),
169 Provider::HuggingFace => write!(f, "huggingface"),
170 Provider::LiteLLM => write!(f, "litellm"),
171 }
172 }
173}
174
175impl FromStr for Provider {
176 type Err = ModelParseError;
177
178 fn from_str(s: &str) -> Result<Self, Self::Err> {
179 match s.to_lowercase().as_str() {
180 "gemini" => Ok(Provider::Gemini),
181 "openai" => Ok(Provider::OpenAI),
182 "anthropic" => Ok(Provider::Anthropic),
183 "deepseek" => Ok(Provider::DeepSeek),
184 "openrouter" => Ok(Provider::OpenRouter),
185 "ollama" => Ok(Provider::Ollama),
186 "lmstudio" => Ok(Provider::LmStudio),
187 "moonshot" => Ok(Provider::Moonshot),
188 "zai" => Ok(Provider::ZAI),
189 "minimax" => Ok(Provider::Minimax),
190 "huggingface" => Ok(Provider::HuggingFace),
191 "litellm" => Ok(Provider::LiteLLM),
192 _ => Err(ModelParseError::InvalidProvider(s.to_string())),
193 }
194 }
195}