vtcode_config/models/model_id/
capabilities.rs1use crate::models::Provider;
2
3use super::ModelId;
4
5#[cfg(not(docsrs))]
6#[allow(dead_code)]
7mod capability_generated {
8 include!(concat!(env!("OUT_DIR"), "/model_capabilities.rs"));
9}
10
11#[cfg(docsrs)]
12#[allow(dead_code)]
13mod capability_generated {
14 #[derive(Clone, Copy)]
15 pub struct Pricing {
16 pub input: Option<f64>,
17 pub output: Option<f64>,
18 pub cache_read: Option<f64>,
19 pub cache_write: Option<f64>,
20 }
21
22 #[derive(Clone, Copy)]
23 pub struct Entry {
24 pub provider: &'static str,
25 pub id: &'static str,
26 pub display_name: &'static str,
27 pub description: &'static str,
28 pub context_window: usize,
29 pub max_output_tokens: Option<usize>,
30 pub reasoning: bool,
31 pub tool_call: bool,
32 pub vision: bool,
33 pub input_modalities: &'static [&'static str],
34 pub caching: bool,
35 pub structured_output: bool,
36 pub pricing: Pricing,
37 }
38
39 pub const ENTRIES: &[Entry] = &[];
40 pub const PROVIDERS: &[&str] = &[];
41
42 pub fn metadata_for(_provider: &str, _id: &str) -> Option<Entry> {
43 None
44 }
45
46 pub fn models_for_provider(_provider: &str) -> Option<&'static [&'static str]> {
47 None
48 }
49}
50
51#[derive(Clone, Copy, Debug, PartialEq)]
53pub struct ModelPricing {
54 pub input: Option<f64>,
55 pub output: Option<f64>,
56 pub cache_read: Option<f64>,
57 pub cache_write: Option<f64>,
58}
59
60#[derive(Clone, Copy, Debug, PartialEq)]
61pub struct ModelCatalogEntry {
62 pub provider: &'static str,
63 pub id: &'static str,
64 pub display_name: &'static str,
65 pub description: &'static str,
66 pub context_window: usize,
67 pub max_output_tokens: Option<usize>,
68 pub reasoning: bool,
69 pub tool_call: bool,
70 pub vision: bool,
71 pub input_modalities: &'static [&'static str],
72 pub caching: bool,
73 pub structured_output: bool,
74 pub pricing: ModelPricing,
75}
76
77fn catalog_provider_key(provider: &str) -> &str {
78 if provider.eq_ignore_ascii_case("google") || provider.eq_ignore_ascii_case("gemini") {
79 "gemini"
80 } else if provider.eq_ignore_ascii_case("openai") {
81 "openai"
82 } else if provider.eq_ignore_ascii_case("anthropic") {
83 "anthropic"
84 } else if provider.eq_ignore_ascii_case("deepseek") {
85 "deepseek"
86 } else if provider.eq_ignore_ascii_case("openrouter") {
87 "openrouter"
88 } else if provider.eq_ignore_ascii_case("ollama") {
89 "ollama"
90 } else if provider.eq_ignore_ascii_case("lmstudio") {
91 "lmstudio"
92 } else if provider.eq_ignore_ascii_case("moonshot") {
93 "moonshot"
94 } else if provider.eq_ignore_ascii_case("zai") {
95 "zai"
96 } else if provider.eq_ignore_ascii_case("minimax") {
97 "minimax"
98 } else if provider.eq_ignore_ascii_case("huggingface") {
99 "huggingface"
100 } else {
101 provider
102 }
103}
104
105fn capability_provider_key(provider: Provider) -> &'static str {
106 match provider {
107 Provider::Gemini => "gemini",
108 Provider::OpenAI => "openai",
109 Provider::Anthropic => "anthropic",
110 Provider::Copilot => "copilot",
111 Provider::DeepSeek => "deepseek",
112 Provider::OpenRouter => "openrouter",
113 Provider::Ollama => "ollama",
114 Provider::LmStudio => "lmstudio",
115 Provider::Moonshot => "moonshot",
116 Provider::ZAI => "zai",
117 Provider::Minimax => "minimax",
118 Provider::HuggingFace => "huggingface",
119 }
120}
121
122fn generated_catalog_entry(provider: &str, id: &str) -> Option<ModelCatalogEntry> {
123 capability_generated::metadata_for(catalog_provider_key(provider), id).map(|entry| {
124 ModelCatalogEntry {
125 provider: entry.provider,
126 id: entry.id,
127 display_name: entry.display_name,
128 description: entry.description,
129 context_window: entry.context_window,
130 max_output_tokens: entry.max_output_tokens,
131 reasoning: entry.reasoning,
132 tool_call: entry.tool_call,
133 vision: entry.vision,
134 input_modalities: entry.input_modalities,
135 caching: entry.caching,
136 structured_output: entry.structured_output,
137 pricing: ModelPricing {
138 input: entry.pricing.input,
139 output: entry.pricing.output,
140 cache_read: entry.pricing.cache_read,
141 cache_write: entry.pricing.cache_write,
142 },
143 }
144 })
145}
146
147pub fn model_catalog_entry(provider: &str, id: &str) -> Option<ModelCatalogEntry> {
148 generated_catalog_entry(provider, id)
149}
150
151pub fn supported_models_for_provider(provider: &str) -> Option<&'static [&'static str]> {
152 capability_generated::models_for_provider(catalog_provider_key(provider))
153}
154
155pub fn catalog_provider_keys() -> &'static [&'static str] {
156 capability_generated::PROVIDERS
157}
158
159impl ModelId {
160 fn generated_capabilities(&self) -> Option<ModelCatalogEntry> {
161 generated_catalog_entry(capability_provider_key(self.provider()), self.as_str())
162 }
163
164 pub fn non_reasoning_variant(&self) -> Option<Self> {
166 if let Some(meta) = self.openrouter_metadata() {
167 if !meta.reasoning {
168 return None;
169 }
170
171 let vendor = meta.vendor;
172 let mut candidates: Vec<Self> = Self::openrouter_vendor_groups()
173 .into_iter()
174 .find(|(candidate_vendor, _)| *candidate_vendor == vendor)
175 .map(|(_, models)| {
176 models
177 .iter()
178 .copied()
179 .filter(|candidate| candidate != self)
180 .filter(|candidate| {
181 candidate
182 .openrouter_metadata()
183 .map(|other| !other.reasoning)
184 .unwrap_or(false)
185 })
186 .collect()
187 })
188 .unwrap_or_default();
189
190 if candidates.is_empty() {
191 return None;
192 }
193
194 candidates.sort_by_key(|candidate| {
195 candidate
196 .openrouter_metadata()
197 .map(|data| (!data.efficient, data.display))
198 .unwrap_or((true, ""))
199 });
200
201 return candidates.into_iter().next();
202 }
203
204 let direct = match self {
205 ModelId::Gemini31ProPreview
206 | ModelId::Gemini31ProPreviewCustomTools
207 | ModelId::Gemini31FlashLitePreview => Some(ModelId::Gemini3FlashPreview),
208 ModelId::GPT52
209 | ModelId::GPT54
210 | ModelId::GPT54Pro
211 | ModelId::GPT54Nano
212 | ModelId::GPT54Mini
213 | ModelId::GPT5 => Some(ModelId::GPT5Mini),
214 ModelId::CopilotGPT52Codex | ModelId::CopilotGPT54 => Some(ModelId::CopilotGPT54Mini),
215 ModelId::DeepSeekReasoner => Some(ModelId::DeepSeekChat),
216 ModelId::ZaiGlm5 => Some(ModelId::OllamaGlm5Cloud),
217 ModelId::ClaudeOpus46 | ModelId::ClaudeSonnet46 => Some(ModelId::ClaudeSonnet46),
218 ModelId::MinimaxM27 | ModelId::MinimaxM25 => None,
219 _ => None,
220 };
221
222 direct.and_then(|candidate| {
223 if candidate.supports_reasoning_effort() {
224 None
225 } else {
226 Some(candidate)
227 }
228 })
229 }
230
231 pub fn is_flash_variant(&self) -> bool {
233 matches!(
234 self,
235 ModelId::Gemini3FlashPreview
236 | ModelId::Gemini31FlashLitePreview
237 | ModelId::OpenRouterStepfunStep35FlashFree
238 | ModelId::OpenRouterNvidiaNemotron3Super120bA12bFree
239 | ModelId::OllamaGemini3FlashPreviewCloud
240 | ModelId::HuggingFaceStep35Flash
241 )
242 }
243
244 pub fn is_pro_variant(&self) -> bool {
246 matches!(
247 self,
248 ModelId::Gemini31ProPreview
249 | ModelId::Gemini31ProPreviewCustomTools
250 | ModelId::OpenRouterGoogleGemini31ProPreview
251 | ModelId::GPT5
252 | ModelId::GPT52
253 | ModelId::GPT52Codex
254 | ModelId::GPT54
255 | ModelId::GPT54Pro
256 | ModelId::GPT53Codex
257 | ModelId::GPT51Codex
258 | ModelId::GPT51CodexMax
259 | ModelId::CopilotGPT52Codex
260 | ModelId::CopilotGPT51CodexMax
261 | ModelId::CopilotGPT54
262 | ModelId::CopilotClaudeSonnet46
263 | ModelId::GPT5Codex
264 | ModelId::ClaudeOpus46
265 | ModelId::ClaudeSonnet46
266 | ModelId::DeepSeekReasoner
267 | ModelId::ZaiGlm5
268 | ModelId::OpenRouterStepfunStep35FlashFree
269 | ModelId::OpenRouterNvidiaNemotron3Super120bA12bFree
270 | ModelId::MinimaxM27
271 | ModelId::MinimaxM25
272 | ModelId::OllamaGlm5Cloud
273 | ModelId::OllamaNemotron3SuperCloud
274 | ModelId::OllamaMinimaxM25Cloud
275 | ModelId::HuggingFaceQwen3CoderNextNovita
276 | ModelId::HuggingFaceQwen35397BA17BTogether
277 )
278 }
279
280 pub fn is_efficient_variant(&self) -> bool {
282 if let Some(meta) = self.openrouter_metadata() {
283 return meta.efficient;
284 }
285 matches!(
286 self,
287 ModelId::Gemini3FlashPreview
288 | ModelId::Gemini31FlashLitePreview
289 | ModelId::GPT5Mini
290 | ModelId::GPT5Nano
291 | ModelId::CopilotGPT54Mini
292 | ModelId::ClaudeHaiku45
293 | ModelId::DeepSeekChat
294 | ModelId::HuggingFaceStep35Flash
295 )
296 }
297
298 pub fn is_top_tier(&self) -> bool {
300 if let Some(meta) = self.openrouter_metadata() {
301 return meta.top_tier;
302 }
303 matches!(
304 self,
305 ModelId::Gemini31ProPreview
306 | ModelId::Gemini31ProPreviewCustomTools
307 | ModelId::OpenRouterGoogleGemini31ProPreview
308 | ModelId::Gemini3FlashPreview
309 | ModelId::Gemini31FlashLitePreview
310 | ModelId::GPT5
311 | ModelId::GPT52
312 | ModelId::GPT52Codex
313 | ModelId::GPT54
314 | ModelId::GPT54Pro
315 | ModelId::GPT53Codex
316 | ModelId::GPT51Codex
317 | ModelId::GPT51CodexMax
318 | ModelId::GPT5Codex
319 | ModelId::ClaudeOpus46
320 | ModelId::ClaudeSonnet46
321 | ModelId::DeepSeekReasoner
322 | ModelId::ZaiGlm5
323 | ModelId::OpenRouterStepfunStep35FlashFree
324 | ModelId::HuggingFaceQwen3CoderNextNovita
325 | ModelId::HuggingFaceQwen35397BA17BTogether
326 )
327 }
328
329 pub fn is_reasoning_variant(&self) -> bool {
331 if let Some(meta) = self.openrouter_metadata() {
332 return meta.reasoning;
333 }
334 self.provider().supports_reasoning_effort(self.as_str())
335 }
336
337 pub fn supports_tool_calls(&self) -> bool {
339 if let Some(meta) = self.generated_capabilities() {
340 return meta.tool_call;
341 }
342 if let Some(meta) = self.openrouter_metadata() {
343 return meta.tool_call;
344 }
345 true
346 }
347
348 pub fn input_modalities(&self) -> &'static [&'static str] {
350 self.generated_capabilities()
351 .map(|meta| meta.input_modalities)
352 .unwrap_or(&[])
353 }
354
355 pub fn generation(&self) -> &'static str {
357 if let Some(meta) = self.openrouter_metadata() {
358 return meta.generation;
359 }
360 match self {
361 ModelId::Gemini31ProPreview | ModelId::Gemini31ProPreviewCustomTools => "3.1",
363 ModelId::Gemini31FlashLitePreview => "3.1-lite",
364 ModelId::Gemini3FlashPreview => "3",
365 ModelId::GPT52 | ModelId::GPT52Codex => "5.2",
367 ModelId::GPT54 | ModelId::GPT54Pro | ModelId::GPT54Nano | ModelId::GPT54Mini => "5.4",
368 ModelId::GPT53Codex => "5.3",
369 ModelId::GPT51Codex | ModelId::GPT51CodexMax => "5.1",
370 ModelId::GPT5
371 | ModelId::GPT5Codex
372 | ModelId::GPT5Mini
373 | ModelId::GPT5Nano
374 | ModelId::OpenAIGptOss20b
375 | ModelId::OpenAIGptOss120b => "5",
376 ModelId::ClaudeOpus46 | ModelId::ClaudeSonnet46 => "4.6",
378 ModelId::ClaudeHaiku45 => "4.5",
379 ModelId::DeepSeekChat | ModelId::DeepSeekReasoner => "V3.2-Exp",
381 ModelId::ZaiGlm5 => "5",
383 ModelId::OllamaGptOss20b => "oss",
384 ModelId::OllamaGptOss20bCloud => "oss-cloud",
385 ModelId::OllamaGptOss120bCloud => "oss-cloud",
386 ModelId::OllamaQwen317b => "oss",
387 ModelId::OllamaQwen3CoderNext => "qwen3-coder-next:cloud",
388 ModelId::OllamaDeepseekV32Cloud => "deepseek-v3.2",
389 ModelId::OllamaQwen3Next80bCloud => "qwen3-next",
390 ModelId::OllamaMinimaxM2Cloud => "minimax-m2",
391 ModelId::OllamaMinimaxM27Cloud => "minimax-m2.7",
392 ModelId::OllamaGlm5Cloud => "glm-5",
393 ModelId::OllamaMinimaxM25Cloud => "minimax-m2.5",
394 ModelId::OllamaNemotron3SuperCloud => "nemotron-3",
395 ModelId::OllamaGemini3FlashPreviewCloud => "gemini-3",
396 ModelId::MinimaxM27 => "M2.7",
398 ModelId::MinimaxM25 => "M2.5",
399 ModelId::MoonshotKimiK25 => "k2.5",
401 ModelId::HuggingFaceDeepseekV32 => "V3.2-Exp",
403 ModelId::HuggingFaceOpenAIGptOss20b => "oss",
404 ModelId::HuggingFaceOpenAIGptOss120b => "oss",
405 ModelId::HuggingFaceMinimaxM25Novita => "m2.5",
406 ModelId::HuggingFaceDeepseekV32Novita => "v3.2",
407 ModelId::HuggingFaceXiaomiMimoV2FlashNovita => "v2-flash",
408 ModelId::HuggingFaceGlm5Novita => "5",
409 ModelId::HuggingFaceStep35Flash => "3.5",
410 ModelId::HuggingFaceQwen3CoderNextNovita | ModelId::OpenRouterQwen3CoderNext => {
411 "qwen3-coder-next"
412 }
413 _ => "unknown",
414 }
415 }
416
417 pub fn supports_shell_tool(&self) -> bool {
419 matches!(
420 self,
421 ModelId::GPT52
422 | ModelId::GPT52Codex
423 | ModelId::GPT54
424 | ModelId::GPT54Pro
425 | ModelId::GPT53Codex
426 | ModelId::GPT51Codex
427 | ModelId::GPT51CodexMax
428 | ModelId::GPT5Codex
429 )
430 }
431
432 pub fn supports_apply_patch_tool(&self) -> bool {
434 false }
436}