vtcode_config/models/model_id/
capabilities.rs1use crate::models::Provider;
2
3use super::ModelId;
4
5#[cfg(not(docsrs))]
6#[allow(dead_code)]
7mod capability_generated {
8 include!(concat!(env!("OUT_DIR"), "/model_capabilities.rs"));
9}
10
11#[cfg(docsrs)]
12#[allow(dead_code)]
13mod capability_generated {
14 #[derive(Clone, Copy)]
15 pub struct Pricing {
16 pub input: Option<f64>,
17 pub output: Option<f64>,
18 pub cache_read: Option<f64>,
19 pub cache_write: Option<f64>,
20 }
21
22 #[derive(Clone, Copy)]
23 pub struct Entry {
24 pub provider: &'static str,
25 pub id: &'static str,
26 pub display_name: &'static str,
27 pub description: &'static str,
28 pub context_window: usize,
29 pub max_output_tokens: Option<usize>,
30 pub reasoning: bool,
31 pub tool_call: bool,
32 pub vision: bool,
33 pub input_modalities: &'static [&'static str],
34 pub caching: bool,
35 pub structured_output: bool,
36 pub pricing: Pricing,
37 }
38
39 pub const ENTRIES: &[Entry] = &[];
40 pub const PROVIDERS: &[&str] = &[];
41
42 pub fn metadata_for(_provider: &str, _id: &str) -> Option<Entry> {
43 None
44 }
45
46 pub fn models_for_provider(_provider: &str) -> Option<&'static [&'static str]> {
47 None
48 }
49}
50
51#[derive(Clone, Copy, Debug, PartialEq)]
53pub struct ModelPricing {
54 pub input: Option<f64>,
55 pub output: Option<f64>,
56 pub cache_read: Option<f64>,
57 pub cache_write: Option<f64>,
58}
59
60#[derive(Clone, Copy, Debug, PartialEq)]
61pub struct ModelCatalogEntry {
62 pub provider: &'static str,
63 pub id: &'static str,
64 pub display_name: &'static str,
65 pub description: &'static str,
66 pub context_window: usize,
67 pub max_output_tokens: Option<usize>,
68 pub reasoning: bool,
69 pub tool_call: bool,
70 pub vision: bool,
71 pub input_modalities: &'static [&'static str],
72 pub caching: bool,
73 pub structured_output: bool,
74 pub pricing: ModelPricing,
75}
76
77fn catalog_provider_key(provider: &str) -> &str {
78 if provider.eq_ignore_ascii_case("google") || provider.eq_ignore_ascii_case("gemini") {
79 "gemini"
80 } else if provider.eq_ignore_ascii_case("openai") {
81 "openai"
82 } else if provider.eq_ignore_ascii_case("anthropic") {
83 "anthropic"
84 } else if provider.eq_ignore_ascii_case("deepseek") {
85 "deepseek"
86 } else if provider.eq_ignore_ascii_case("openrouter") {
87 "openrouter"
88 } else if provider.eq_ignore_ascii_case("ollama") {
89 "ollama"
90 } else if provider.eq_ignore_ascii_case("lmstudio") {
91 "lmstudio"
92 } else if provider.eq_ignore_ascii_case("moonshot") {
93 "moonshot"
94 } else if provider.eq_ignore_ascii_case("zai") {
95 "zai"
96 } else if provider.eq_ignore_ascii_case("minimax") {
97 "minimax"
98 } else if provider.eq_ignore_ascii_case("huggingface") {
99 "huggingface"
100 } else if provider.eq_ignore_ascii_case("litellm") {
101 "litellm"
102 } else {
103 provider
104 }
105}
106
107fn capability_provider_key(provider: Provider) -> &'static str {
108 match provider {
109 Provider::Gemini => "gemini",
110 Provider::OpenAI => "openai",
111 Provider::Anthropic => "anthropic",
112 Provider::Copilot => "copilot",
113 Provider::DeepSeek => "deepseek",
114 Provider::OpenRouter => "openrouter",
115 Provider::Ollama => "ollama",
116 Provider::LmStudio => "lmstudio",
117 Provider::Moonshot => "moonshot",
118 Provider::ZAI => "zai",
119 Provider::Minimax => "minimax",
120 Provider::HuggingFace => "huggingface",
121 Provider::LiteLLM => "litellm",
122 }
123}
124
125fn generated_catalog_entry(provider: &str, id: &str) -> Option<ModelCatalogEntry> {
126 capability_generated::metadata_for(catalog_provider_key(provider), id).map(|entry| {
127 ModelCatalogEntry {
128 provider: entry.provider,
129 id: entry.id,
130 display_name: entry.display_name,
131 description: entry.description,
132 context_window: entry.context_window,
133 max_output_tokens: entry.max_output_tokens,
134 reasoning: entry.reasoning,
135 tool_call: entry.tool_call,
136 vision: entry.vision,
137 input_modalities: entry.input_modalities,
138 caching: entry.caching,
139 structured_output: entry.structured_output,
140 pricing: ModelPricing {
141 input: entry.pricing.input,
142 output: entry.pricing.output,
143 cache_read: entry.pricing.cache_read,
144 cache_write: entry.pricing.cache_write,
145 },
146 }
147 })
148}
149
150pub fn model_catalog_entry(provider: &str, id: &str) -> Option<ModelCatalogEntry> {
151 generated_catalog_entry(provider, id)
152}
153
154pub fn supported_models_for_provider(provider: &str) -> Option<&'static [&'static str]> {
155 capability_generated::models_for_provider(catalog_provider_key(provider))
156}
157
158pub fn catalog_provider_keys() -> &'static [&'static str] {
159 capability_generated::PROVIDERS
160}
161
162impl ModelId {
163 fn generated_capabilities(&self) -> Option<ModelCatalogEntry> {
164 generated_catalog_entry(capability_provider_key(self.provider()), self.as_str())
165 }
166
167 pub fn non_reasoning_variant(&self) -> Option<Self> {
169 if let Some(meta) = self.openrouter_metadata() {
170 if !meta.reasoning {
171 return None;
172 }
173
174 let vendor = meta.vendor;
175 let mut candidates: Vec<Self> = Self::openrouter_vendor_groups()
176 .into_iter()
177 .find(|(candidate_vendor, _)| *candidate_vendor == vendor)
178 .map(|(_, models)| {
179 models
180 .iter()
181 .copied()
182 .filter(|candidate| candidate != self)
183 .filter(|candidate| {
184 candidate
185 .openrouter_metadata()
186 .map(|other| !other.reasoning)
187 .unwrap_or(false)
188 })
189 .collect()
190 })
191 .unwrap_or_default();
192
193 if candidates.is_empty() {
194 return None;
195 }
196
197 candidates.sort_by_key(|candidate| {
198 candidate
199 .openrouter_metadata()
200 .map(|data| (!data.efficient, data.display))
201 .unwrap_or((true, ""))
202 });
203
204 return candidates.into_iter().next();
205 }
206
207 let direct = match self {
208 ModelId::Gemini31ProPreview
209 | ModelId::Gemini31ProPreviewCustomTools
210 | ModelId::Gemini31FlashLitePreview => Some(ModelId::Gemini3FlashPreview),
211 ModelId::GPT52
212 | ModelId::GPT54
213 | ModelId::GPT54Pro
214 | ModelId::GPT54Nano
215 | ModelId::GPT54Mini
216 | ModelId::GPT5 => Some(ModelId::GPT5Mini),
217 ModelId::CopilotGPT52Codex | ModelId::CopilotGPT54 => Some(ModelId::CopilotGPT54Mini),
218 ModelId::DeepSeekReasoner => Some(ModelId::DeepSeekChat),
219 ModelId::ZaiGlm5 => Some(ModelId::OllamaGlm5Cloud),
220 ModelId::ClaudeOpus46 | ModelId::ClaudeSonnet46 => Some(ModelId::ClaudeSonnet46),
221 ModelId::MinimaxM27 | ModelId::MinimaxM25 => None,
222 _ => None,
223 };
224
225 direct.and_then(|candidate| {
226 if candidate.supports_reasoning_effort() {
227 None
228 } else {
229 Some(candidate)
230 }
231 })
232 }
233
234 pub fn is_flash_variant(&self) -> bool {
236 matches!(
237 self,
238 ModelId::Gemini3FlashPreview
239 | ModelId::Gemini31FlashLitePreview
240 | ModelId::OpenRouterStepfunStep35FlashFree
241 | ModelId::OpenRouterNvidiaNemotron3Super120bA12bFree
242 | ModelId::OllamaGemini3FlashPreviewCloud
243 | ModelId::HuggingFaceStep35Flash
244 )
245 }
246
247 pub fn is_pro_variant(&self) -> bool {
249 matches!(
250 self,
251 ModelId::Gemini31ProPreview
252 | ModelId::Gemini31ProPreviewCustomTools
253 | ModelId::OpenRouterGoogleGemini31ProPreview
254 | ModelId::GPT5
255 | ModelId::GPT52
256 | ModelId::GPT52Codex
257 | ModelId::GPT54
258 | ModelId::GPT54Pro
259 | ModelId::GPT53Codex
260 | ModelId::GPT51Codex
261 | ModelId::GPT51CodexMax
262 | ModelId::CopilotGPT52Codex
263 | ModelId::CopilotGPT51CodexMax
264 | ModelId::CopilotGPT54
265 | ModelId::CopilotClaudeSonnet46
266 | ModelId::GPT5Codex
267 | ModelId::ClaudeOpus46
268 | ModelId::ClaudeSonnet46
269 | ModelId::DeepSeekReasoner
270 | ModelId::ZaiGlm5
271 | ModelId::OpenRouterStepfunStep35FlashFree
272 | ModelId::OpenRouterNvidiaNemotron3Super120bA12bFree
273 | ModelId::MinimaxM27
274 | ModelId::MinimaxM25
275 | ModelId::OllamaGlm5Cloud
276 | ModelId::OllamaNemotron3SuperCloud
277 | ModelId::OllamaMinimaxM25Cloud
278 | ModelId::HuggingFaceQwen3CoderNextNovita
279 | ModelId::HuggingFaceQwen35397BA17BTogether
280 )
281 }
282
283 pub fn is_efficient_variant(&self) -> bool {
285 if let Some(meta) = self.openrouter_metadata() {
286 return meta.efficient;
287 }
288 matches!(
289 self,
290 ModelId::Gemini3FlashPreview
291 | ModelId::Gemini31FlashLitePreview
292 | ModelId::GPT5Mini
293 | ModelId::GPT5Nano
294 | ModelId::CopilotGPT54Mini
295 | ModelId::ClaudeHaiku45
296 | ModelId::DeepSeekChat
297 | ModelId::HuggingFaceStep35Flash
298 )
299 }
300
301 pub fn is_top_tier(&self) -> bool {
303 if let Some(meta) = self.openrouter_metadata() {
304 return meta.top_tier;
305 }
306 matches!(
307 self,
308 ModelId::Gemini31ProPreview
309 | ModelId::Gemini31ProPreviewCustomTools
310 | ModelId::OpenRouterGoogleGemini31ProPreview
311 | ModelId::Gemini3FlashPreview
312 | ModelId::Gemini31FlashLitePreview
313 | ModelId::GPT5
314 | ModelId::GPT52
315 | ModelId::GPT52Codex
316 | ModelId::GPT54
317 | ModelId::GPT54Pro
318 | ModelId::GPT53Codex
319 | ModelId::GPT51Codex
320 | ModelId::GPT51CodexMax
321 | ModelId::GPT5Codex
322 | ModelId::ClaudeOpus46
323 | ModelId::ClaudeSonnet46
324 | ModelId::DeepSeekReasoner
325 | ModelId::ZaiGlm5
326 | ModelId::OpenRouterStepfunStep35FlashFree
327 | ModelId::HuggingFaceQwen3CoderNextNovita
328 | ModelId::HuggingFaceQwen35397BA17BTogether
329 )
330 }
331
332 pub fn is_reasoning_variant(&self) -> bool {
334 if let Some(meta) = self.openrouter_metadata() {
335 return meta.reasoning;
336 }
337 self.provider().supports_reasoning_effort(self.as_str())
338 }
339
340 pub fn supports_tool_calls(&self) -> bool {
342 if let Some(meta) = self.generated_capabilities() {
343 return meta.tool_call;
344 }
345 if let Some(meta) = self.openrouter_metadata() {
346 return meta.tool_call;
347 }
348 true
349 }
350
351 pub fn input_modalities(&self) -> &'static [&'static str] {
353 self.generated_capabilities()
354 .map(|meta| meta.input_modalities)
355 .unwrap_or(&[])
356 }
357
358 pub fn generation(&self) -> &'static str {
360 if let Some(meta) = self.openrouter_metadata() {
361 return meta.generation;
362 }
363 match self {
364 ModelId::Gemini31ProPreview | ModelId::Gemini31ProPreviewCustomTools => "3.1",
366 ModelId::Gemini31FlashLitePreview => "3.1-lite",
367 ModelId::Gemini3FlashPreview => "3",
368 ModelId::GPT52 | ModelId::GPT52Codex => "5.2",
370 ModelId::GPT54 | ModelId::GPT54Pro | ModelId::GPT54Nano | ModelId::GPT54Mini => "5.4",
371 ModelId::GPT53Codex => "5.3",
372 ModelId::GPT51Codex | ModelId::GPT51CodexMax => "5.1",
373 ModelId::GPT5
374 | ModelId::GPT5Codex
375 | ModelId::GPT5Mini
376 | ModelId::GPT5Nano
377 | ModelId::OpenAIGptOss20b
378 | ModelId::OpenAIGptOss120b => "5",
379 ModelId::ClaudeOpus46 | ModelId::ClaudeSonnet46 => "4.6",
381 ModelId::ClaudeHaiku45 => "4.5",
382 ModelId::DeepSeekChat | ModelId::DeepSeekReasoner => "V3.2-Exp",
384 ModelId::ZaiGlm5 => "5",
386 ModelId::OllamaGptOss20b => "oss",
387 ModelId::OllamaGptOss20bCloud => "oss-cloud",
388 ModelId::OllamaGptOss120bCloud => "oss-cloud",
389 ModelId::OllamaQwen317b => "oss",
390 ModelId::OllamaQwen3CoderNext => "qwen3-coder-next:cloud",
391 ModelId::OllamaDeepseekV32Cloud => "deepseek-v3.2",
392 ModelId::OllamaQwen3Next80bCloud => "qwen3-next",
393 ModelId::OllamaMinimaxM2Cloud => "minimax-m2",
394 ModelId::OllamaMinimaxM27Cloud => "minimax-m2.7",
395 ModelId::OllamaGlm5Cloud => "glm-5",
396 ModelId::OllamaMinimaxM25Cloud => "minimax-m2.5",
397 ModelId::OllamaNemotron3SuperCloud => "nemotron-3",
398 ModelId::OllamaGemini3FlashPreviewCloud => "gemini-3",
399 ModelId::MinimaxM27 => "M2.7",
401 ModelId::MinimaxM25 => "M2.5",
402 ModelId::MoonshotKimiK25 => "k2.5",
404 ModelId::HuggingFaceDeepseekV32 => "V3.2-Exp",
406 ModelId::HuggingFaceOpenAIGptOss20b => "oss",
407 ModelId::HuggingFaceOpenAIGptOss120b => "oss",
408 ModelId::HuggingFaceMinimaxM25Novita => "m2.5",
409 ModelId::HuggingFaceDeepseekV32Novita => "v3.2",
410 ModelId::HuggingFaceXiaomiMimoV2FlashNovita => "v2-flash",
411 ModelId::HuggingFaceGlm5Novita => "5",
412 ModelId::HuggingFaceStep35Flash => "3.5",
413 ModelId::HuggingFaceQwen3CoderNextNovita | ModelId::OpenRouterQwen3CoderNext => {
414 "qwen3-coder-next"
415 }
416 _ => "unknown",
417 }
418 }
419
420 pub fn supports_shell_tool(&self) -> bool {
422 matches!(
423 self,
424 ModelId::GPT52
425 | ModelId::GPT52Codex
426 | ModelId::GPT54
427 | ModelId::GPT54Pro
428 | ModelId::GPT53Codex
429 | ModelId::GPT51Codex
430 | ModelId::GPT51CodexMax
431 | ModelId::GPT5Codex
432 )
433 }
434
435 pub fn supports_apply_patch_tool(&self) -> bool {
437 false }
439}