Skip to main content

vtcode_config/models/model_id/
capabilities.rs

1use crate::models::Provider;
2
3use super::ModelId;
4
5#[cfg(not(docsrs))]
6#[allow(dead_code)]
7mod capability_generated {
8    include!(concat!(env!("OUT_DIR"), "/model_capabilities.rs"));
9}
10
11#[cfg(docsrs)]
12#[allow(dead_code)]
13mod capability_generated {
14    #[derive(Clone, Copy)]
15    pub struct Entry {
16        pub provider: &'static str,
17        pub id: &'static str,
18        pub context_window: usize,
19        pub tool_call: bool,
20        pub input_modalities: &'static [&'static str],
21    }
22
23    pub const ENTRIES: &[Entry] = &[];
24    pub const PROVIDERS: &[&str] = &[];
25
26    pub fn metadata_for(_provider: &str, _id: &str) -> Option<Entry> {
27        None
28    }
29
30    pub fn models_for_provider(_provider: &str) -> Option<&'static [&'static str]> {
31        None
32    }
33}
34
35/// Catalog metadata generated from `docs/models.json`.
36#[derive(Clone, Copy, Debug, PartialEq, Eq)]
37pub struct ModelCatalogEntry {
38    pub provider: &'static str,
39    pub id: &'static str,
40    pub context_window: usize,
41    pub tool_call: bool,
42    pub input_modalities: &'static [&'static str],
43}
44
45fn catalog_provider_key(provider: &str) -> &str {
46    if provider.eq_ignore_ascii_case("google") || provider.eq_ignore_ascii_case("gemini") {
47        "gemini"
48    } else if provider.eq_ignore_ascii_case("openai") {
49        "openai"
50    } else if provider.eq_ignore_ascii_case("anthropic") {
51        "anthropic"
52    } else if provider.eq_ignore_ascii_case("deepseek") {
53        "deepseek"
54    } else if provider.eq_ignore_ascii_case("openrouter") {
55        "openrouter"
56    } else if provider.eq_ignore_ascii_case("ollama") {
57        "ollama"
58    } else if provider.eq_ignore_ascii_case("lmstudio") {
59        "lmstudio"
60    } else if provider.eq_ignore_ascii_case("moonshot") {
61        "moonshot"
62    } else if provider.eq_ignore_ascii_case("zai") {
63        "zai"
64    } else if provider.eq_ignore_ascii_case("minimax") {
65        "minimax"
66    } else if provider.eq_ignore_ascii_case("huggingface") {
67        "huggingface"
68    } else if provider.eq_ignore_ascii_case("litellm") {
69        "litellm"
70    } else {
71        provider
72    }
73}
74
75fn capability_provider_key(provider: Provider) -> &'static str {
76    match provider {
77        Provider::Gemini => "gemini",
78        Provider::OpenAI => "openai",
79        Provider::Anthropic => "anthropic",
80        Provider::DeepSeek => "deepseek",
81        Provider::OpenRouter => "openrouter",
82        Provider::Ollama => "ollama",
83        Provider::LmStudio => "lmstudio",
84        Provider::Moonshot => "moonshot",
85        Provider::ZAI => "zai",
86        Provider::Minimax => "minimax",
87        Provider::HuggingFace => "huggingface",
88        Provider::LiteLLM => "litellm",
89    }
90}
91
92fn generated_catalog_entry(provider: &str, id: &str) -> Option<ModelCatalogEntry> {
93    capability_generated::metadata_for(catalog_provider_key(provider), id).map(|entry| {
94        ModelCatalogEntry {
95            provider: entry.provider,
96            id: entry.id,
97            context_window: entry.context_window,
98            tool_call: entry.tool_call,
99            input_modalities: entry.input_modalities,
100        }
101    })
102}
103
104pub fn model_catalog_entry(provider: &str, id: &str) -> Option<ModelCatalogEntry> {
105    generated_catalog_entry(provider, id)
106}
107
108pub fn supported_models_for_provider(provider: &str) -> Option<&'static [&'static str]> {
109    capability_generated::models_for_provider(catalog_provider_key(provider))
110}
111
112pub fn catalog_provider_keys() -> &'static [&'static str] {
113    capability_generated::PROVIDERS
114}
115
116impl ModelId {
117    fn generated_capabilities(&self) -> Option<ModelCatalogEntry> {
118        generated_catalog_entry(capability_provider_key(self.provider()), self.as_str())
119    }
120
121    /// Attempt to find a non-reasoning variant for this model.
122    pub fn non_reasoning_variant(&self) -> Option<Self> {
123        if let Some(meta) = self.openrouter_metadata() {
124            if !meta.reasoning {
125                return None;
126            }
127
128            let vendor = meta.vendor;
129            let mut candidates: Vec<Self> = Self::openrouter_vendor_groups()
130                .into_iter()
131                .find(|(candidate_vendor, _)| *candidate_vendor == vendor)
132                .map(|(_, models)| {
133                    models
134                        .iter()
135                        .copied()
136                        .filter(|candidate| candidate != self)
137                        .filter(|candidate| {
138                            candidate
139                                .openrouter_metadata()
140                                .map(|other| !other.reasoning)
141                                .unwrap_or(false)
142                        })
143                        .collect()
144                })
145                .unwrap_or_default();
146
147            if candidates.is_empty() {
148                return None;
149            }
150
151            candidates.sort_by_key(|candidate| {
152                candidate
153                    .openrouter_metadata()
154                    .map(|data| (!data.efficient, data.display))
155                    .unwrap_or((true, ""))
156            });
157
158            return candidates.into_iter().next();
159        }
160
161        let direct = match self {
162            ModelId::Gemini31ProPreview
163            | ModelId::Gemini31ProPreviewCustomTools
164            | ModelId::Gemini31FlashLitePreview => Some(ModelId::Gemini3FlashPreview),
165            ModelId::GPT52
166            | ModelId::GPT54
167            | ModelId::GPT54Pro
168            | ModelId::GPT54Nano
169            | ModelId::GPT54Mini
170            | ModelId::GPT5 => Some(ModelId::GPT5Mini),
171            ModelId::DeepSeekReasoner => Some(ModelId::DeepSeekChat),
172            ModelId::ZaiGlm5 => Some(ModelId::OllamaGlm5Cloud),
173            ModelId::ClaudeOpus46 | ModelId::ClaudeSonnet46 => Some(ModelId::ClaudeSonnet46),
174            ModelId::MinimaxM27 | ModelId::MinimaxM25 => None,
175            _ => None,
176        };
177
178        direct.and_then(|candidate| {
179            if candidate.supports_reasoning_effort() {
180                None
181            } else {
182                Some(candidate)
183            }
184        })
185    }
186
187    /// Check if this is a "flash" variant (optimized for speed)
188    pub fn is_flash_variant(&self) -> bool {
189        matches!(
190            self,
191            ModelId::Gemini3FlashPreview
192                | ModelId::Gemini31FlashLitePreview
193                | ModelId::OpenRouterStepfunStep35FlashFree
194                | ModelId::OpenRouterNvidiaNemotron3Super120bA12bFree
195                | ModelId::OllamaGemini3FlashPreviewCloud
196                | ModelId::HuggingFaceStep35Flash
197        )
198    }
199
200    /// Check if this is a "pro" variant (optimized for capability)
201    pub fn is_pro_variant(&self) -> bool {
202        matches!(
203            self,
204            ModelId::Gemini31ProPreview
205                | ModelId::Gemini31ProPreviewCustomTools
206                | ModelId::OpenRouterGoogleGemini31ProPreview
207                | ModelId::GPT5
208                | ModelId::GPT52
209                | ModelId::GPT52Codex
210                | ModelId::GPT54
211                | ModelId::GPT54Pro
212                | ModelId::GPT53Codex
213                | ModelId::GPT51Codex
214                | ModelId::GPT51CodexMax
215                | ModelId::GPT5Codex
216                | ModelId::ClaudeOpus46
217                | ModelId::ClaudeSonnet46
218                | ModelId::DeepSeekReasoner
219                | ModelId::ZaiGlm5
220                | ModelId::OpenRouterStepfunStep35FlashFree
221                | ModelId::OpenRouterNvidiaNemotron3Super120bA12bFree
222                | ModelId::MinimaxM27
223                | ModelId::MinimaxM25
224                | ModelId::OllamaGlm5Cloud
225                | ModelId::OllamaNemotron3SuperCloud
226                | ModelId::OllamaMinimaxM25Cloud
227                | ModelId::HuggingFaceQwen3CoderNextNovita
228                | ModelId::HuggingFaceQwen35397BA17BTogether
229        )
230    }
231
232    /// Check if this is an optimized/efficient variant
233    pub fn is_efficient_variant(&self) -> bool {
234        if let Some(meta) = self.openrouter_metadata() {
235            return meta.efficient;
236        }
237        matches!(
238            self,
239            ModelId::Gemini3FlashPreview
240                | ModelId::Gemini31FlashLitePreview
241                | ModelId::GPT5Mini
242                | ModelId::GPT5Nano
243                | ModelId::ClaudeHaiku45
244                | ModelId::DeepSeekChat
245                | ModelId::HuggingFaceStep35Flash
246        )
247    }
248
249    /// Check if this is a top-tier model
250    pub fn is_top_tier(&self) -> bool {
251        if let Some(meta) = self.openrouter_metadata() {
252            return meta.top_tier;
253        }
254        matches!(
255            self,
256            ModelId::Gemini31ProPreview
257                | ModelId::Gemini31ProPreviewCustomTools
258                | ModelId::OpenRouterGoogleGemini31ProPreview
259                | ModelId::Gemini3FlashPreview
260                | ModelId::Gemini31FlashLitePreview
261                | ModelId::GPT5
262                | ModelId::GPT52
263                | ModelId::GPT52Codex
264                | ModelId::GPT54
265                | ModelId::GPT54Pro
266                | ModelId::GPT53Codex
267                | ModelId::GPT51Codex
268                | ModelId::GPT51CodexMax
269                | ModelId::GPT5Codex
270                | ModelId::ClaudeOpus46
271                | ModelId::ClaudeSonnet46
272                | ModelId::DeepSeekReasoner
273                | ModelId::ZaiGlm5
274                | ModelId::OpenRouterStepfunStep35FlashFree
275                | ModelId::HuggingFaceQwen3CoderNextNovita
276                | ModelId::HuggingFaceQwen35397BA17BTogether
277        )
278    }
279
280    /// Determine whether the model is a reasoning-capable variant
281    pub fn is_reasoning_variant(&self) -> bool {
282        if let Some(meta) = self.openrouter_metadata() {
283            return meta.reasoning;
284        }
285        self.provider().supports_reasoning_effort(self.as_str())
286    }
287
288    /// Determine whether the model supports tool calls/function execution
289    pub fn supports_tool_calls(&self) -> bool {
290        if let Some(meta) = self.generated_capabilities() {
291            return meta.tool_call;
292        }
293        if let Some(meta) = self.openrouter_metadata() {
294            return meta.tool_call;
295        }
296        true
297    }
298
299    /// Ordered list of supported input modalities when VT Code has metadata for this model.
300    pub fn input_modalities(&self) -> &'static [&'static str] {
301        self.generated_capabilities()
302            .map(|meta| meta.input_modalities)
303            .unwrap_or(&[])
304    }
305
306    /// Get the generation/version string for this model
307    pub fn generation(&self) -> &'static str {
308        if let Some(meta) = self.openrouter_metadata() {
309            return meta.generation;
310        }
311        match self {
312            // Gemini generations
313            ModelId::Gemini31ProPreview | ModelId::Gemini31ProPreviewCustomTools => "3.1",
314            ModelId::Gemini31FlashLitePreview => "3.1-lite",
315            ModelId::Gemini3FlashPreview => "3",
316            // OpenAI generations
317            ModelId::GPT52 | ModelId::GPT52Codex => "5.2",
318            ModelId::GPT54 | ModelId::GPT54Pro | ModelId::GPT54Nano | ModelId::GPT54Mini => "5.4",
319            ModelId::GPT53Codex => "5.3",
320            ModelId::GPT51Codex | ModelId::GPT51CodexMax => "5.1",
321            ModelId::GPT5
322            | ModelId::GPT5Codex
323            | ModelId::GPT5Mini
324            | ModelId::GPT5Nano
325            | ModelId::OpenAIGptOss20b
326            | ModelId::OpenAIGptOss120b => "5",
327            // Anthropic generations
328            ModelId::ClaudeOpus46 | ModelId::ClaudeSonnet46 => "4.6",
329            ModelId::ClaudeHaiku45 => "4.5",
330            // DeepSeek generations
331            ModelId::DeepSeekChat | ModelId::DeepSeekReasoner => "V3.2-Exp",
332            // Z.AI generations
333            ModelId::ZaiGlm5 => "5",
334            ModelId::OllamaGptOss20b => "oss",
335            ModelId::OllamaGptOss20bCloud => "oss-cloud",
336            ModelId::OllamaGptOss120bCloud => "oss-cloud",
337            ModelId::OllamaQwen317b => "oss",
338            ModelId::OllamaQwen3CoderNext => "qwen3-coder-next:cloud",
339            ModelId::OllamaDeepseekV32Cloud => "deepseek-v3.2",
340            ModelId::OllamaQwen3Next80bCloud => "qwen3-next",
341            ModelId::OllamaMinimaxM2Cloud => "minimax-m2",
342            ModelId::OllamaGlm5Cloud => "glm-5",
343            ModelId::OllamaMinimaxM25Cloud => "minimax-m2.5",
344            ModelId::OllamaNemotron3SuperCloud => "nemotron-3",
345            ModelId::OllamaGemini3FlashPreviewCloud => "gemini-3",
346            // MiniMax models
347            ModelId::MinimaxM27 => "M2.7",
348            ModelId::MinimaxM25 => "M2.5",
349            // Moonshot models
350            ModelId::MoonshotKimiK25 => "k2.5",
351            // Hugging Face generations
352            ModelId::HuggingFaceDeepseekV32 => "V3.2-Exp",
353            ModelId::HuggingFaceOpenAIGptOss20b => "oss",
354            ModelId::HuggingFaceOpenAIGptOss120b => "oss",
355            ModelId::HuggingFaceMinimaxM25Novita => "m2.5",
356            ModelId::HuggingFaceDeepseekV32Novita => "v3.2",
357            ModelId::HuggingFaceXiaomiMimoV2FlashNovita => "v2-flash",
358            ModelId::HuggingFaceGlm5Novita => "5",
359            ModelId::HuggingFaceStep35Flash => "3.5",
360            ModelId::HuggingFaceQwen3CoderNextNovita | ModelId::OpenRouterQwen3CoderNext => {
361                "qwen3-coder-next"
362            }
363            _ => "unknown",
364        }
365    }
366
367    /// Determine if this model supports GPT-5.1+/5.2+/5.3+ shell tool type
368    pub fn supports_shell_tool(&self) -> bool {
369        matches!(
370            self,
371            ModelId::GPT52
372                | ModelId::GPT52Codex
373                | ModelId::GPT54
374                | ModelId::GPT54Pro
375                | ModelId::GPT53Codex
376                | ModelId::GPT51Codex
377                | ModelId::GPT51CodexMax
378                | ModelId::GPT5Codex
379        )
380    }
381
382    /// Determine if this model supports optimized apply_patch tool
383    pub fn supports_apply_patch_tool(&self) -> bool {
384        false // Placeholder for future optimization
385    }
386}