Skip to main content

vtcode_config/models/model_id/
capabilities.rs

1use crate::models::Provider;
2
3use super::ModelId;
4
5#[cfg(not(docsrs))]
6#[allow(dead_code)]
7mod capability_generated {
8    include!(concat!(env!("OUT_DIR"), "/model_capabilities.rs"));
9}
10
11#[cfg(docsrs)]
12#[allow(dead_code)]
13mod capability_generated {
14    #[derive(Clone, Copy)]
15    pub struct Entry {
16        pub provider: &'static str,
17        pub id: &'static str,
18        pub context_window: usize,
19        pub tool_call: bool,
20        pub input_modalities: &'static [&'static str],
21    }
22
23    pub const ENTRIES: &[Entry] = &[];
24    pub const PROVIDERS: &[&str] = &[];
25
26    pub fn metadata_for(_provider: &str, _id: &str) -> Option<Entry> {
27        None
28    }
29
30    pub fn models_for_provider(_provider: &str) -> Option<&'static [&'static str]> {
31        None
32    }
33}
34
35/// Catalog metadata generated from `docs/models.json`.
36#[derive(Clone, Copy, Debug, PartialEq, Eq)]
37pub struct ModelCatalogEntry {
38    pub provider: &'static str,
39    pub id: &'static str,
40    pub context_window: usize,
41    pub tool_call: bool,
42    pub input_modalities: &'static [&'static str],
43}
44
45fn catalog_provider_key(provider: &str) -> &str {
46    if provider.eq_ignore_ascii_case("google") || provider.eq_ignore_ascii_case("gemini") {
47        "gemini"
48    } else if provider.eq_ignore_ascii_case("openai") {
49        "openai"
50    } else if provider.eq_ignore_ascii_case("anthropic") {
51        "anthropic"
52    } else if provider.eq_ignore_ascii_case("deepseek") {
53        "deepseek"
54    } else if provider.eq_ignore_ascii_case("openrouter") {
55        "openrouter"
56    } else if provider.eq_ignore_ascii_case("ollama") {
57        "ollama"
58    } else if provider.eq_ignore_ascii_case("lmstudio") {
59        "lmstudio"
60    } else if provider.eq_ignore_ascii_case("moonshot") {
61        "moonshot"
62    } else if provider.eq_ignore_ascii_case("zai") {
63        "zai"
64    } else if provider.eq_ignore_ascii_case("minimax") {
65        "minimax"
66    } else if provider.eq_ignore_ascii_case("huggingface") {
67        "huggingface"
68    } else if provider.eq_ignore_ascii_case("litellm") {
69        "litellm"
70    } else {
71        provider
72    }
73}
74
75fn capability_provider_key(provider: Provider) -> &'static str {
76    match provider {
77        Provider::Gemini => "gemini",
78        Provider::OpenAI => "openai",
79        Provider::Anthropic => "anthropic",
80        Provider::Copilot => "copilot",
81        Provider::DeepSeek => "deepseek",
82        Provider::OpenRouter => "openrouter",
83        Provider::Ollama => "ollama",
84        Provider::LmStudio => "lmstudio",
85        Provider::Moonshot => "moonshot",
86        Provider::ZAI => "zai",
87        Provider::Minimax => "minimax",
88        Provider::HuggingFace => "huggingface",
89        Provider::LiteLLM => "litellm",
90    }
91}
92
93fn generated_catalog_entry(provider: &str, id: &str) -> Option<ModelCatalogEntry> {
94    capability_generated::metadata_for(catalog_provider_key(provider), id).map(|entry| {
95        ModelCatalogEntry {
96            provider: entry.provider,
97            id: entry.id,
98            context_window: entry.context_window,
99            tool_call: entry.tool_call,
100            input_modalities: entry.input_modalities,
101        }
102    })
103}
104
105pub fn model_catalog_entry(provider: &str, id: &str) -> Option<ModelCatalogEntry> {
106    generated_catalog_entry(provider, id)
107}
108
109pub fn supported_models_for_provider(provider: &str) -> Option<&'static [&'static str]> {
110    capability_generated::models_for_provider(catalog_provider_key(provider))
111}
112
113pub fn catalog_provider_keys() -> &'static [&'static str] {
114    capability_generated::PROVIDERS
115}
116
117impl ModelId {
118    fn generated_capabilities(&self) -> Option<ModelCatalogEntry> {
119        generated_catalog_entry(capability_provider_key(self.provider()), self.as_str())
120    }
121
122    /// Attempt to find a non-reasoning variant for this model.
123    pub fn non_reasoning_variant(&self) -> Option<Self> {
124        if let Some(meta) = self.openrouter_metadata() {
125            if !meta.reasoning {
126                return None;
127            }
128
129            let vendor = meta.vendor;
130            let mut candidates: Vec<Self> = Self::openrouter_vendor_groups()
131                .into_iter()
132                .find(|(candidate_vendor, _)| *candidate_vendor == vendor)
133                .map(|(_, models)| {
134                    models
135                        .iter()
136                        .copied()
137                        .filter(|candidate| candidate != self)
138                        .filter(|candidate| {
139                            candidate
140                                .openrouter_metadata()
141                                .map(|other| !other.reasoning)
142                                .unwrap_or(false)
143                        })
144                        .collect()
145                })
146                .unwrap_or_default();
147
148            if candidates.is_empty() {
149                return None;
150            }
151
152            candidates.sort_by_key(|candidate| {
153                candidate
154                    .openrouter_metadata()
155                    .map(|data| (!data.efficient, data.display))
156                    .unwrap_or((true, ""))
157            });
158
159            return candidates.into_iter().next();
160        }
161
162        let direct = match self {
163            ModelId::Gemini31ProPreview
164            | ModelId::Gemini31ProPreviewCustomTools
165            | ModelId::Gemini31FlashLitePreview => Some(ModelId::Gemini3FlashPreview),
166            ModelId::GPT52
167            | ModelId::GPT54
168            | ModelId::GPT54Pro
169            | ModelId::GPT54Nano
170            | ModelId::GPT54Mini
171            | ModelId::GPT5 => Some(ModelId::GPT5Mini),
172            ModelId::CopilotGPT52Codex | ModelId::CopilotGPT54 => Some(ModelId::CopilotGPT54Mini),
173            ModelId::DeepSeekReasoner => Some(ModelId::DeepSeekChat),
174            ModelId::ZaiGlm5 => Some(ModelId::OllamaGlm5Cloud),
175            ModelId::ClaudeOpus46 | ModelId::ClaudeSonnet46 => Some(ModelId::ClaudeSonnet46),
176            ModelId::MinimaxM27 | ModelId::MinimaxM25 => None,
177            _ => None,
178        };
179
180        direct.and_then(|candidate| {
181            if candidate.supports_reasoning_effort() {
182                None
183            } else {
184                Some(candidate)
185            }
186        })
187    }
188
189    /// Check if this is a "flash" variant (optimized for speed)
190    pub fn is_flash_variant(&self) -> bool {
191        matches!(
192            self,
193            ModelId::Gemini3FlashPreview
194                | ModelId::Gemini31FlashLitePreview
195                | ModelId::OpenRouterStepfunStep35FlashFree
196                | ModelId::OpenRouterNvidiaNemotron3Super120bA12bFree
197                | ModelId::OllamaGemini3FlashPreviewCloud
198                | ModelId::HuggingFaceStep35Flash
199        )
200    }
201
202    /// Check if this is a "pro" variant (optimized for capability)
203    pub fn is_pro_variant(&self) -> bool {
204        matches!(
205            self,
206            ModelId::Gemini31ProPreview
207                | ModelId::Gemini31ProPreviewCustomTools
208                | ModelId::OpenRouterGoogleGemini31ProPreview
209                | ModelId::GPT5
210                | ModelId::GPT52
211                | ModelId::GPT52Codex
212                | ModelId::GPT54
213                | ModelId::GPT54Pro
214                | ModelId::GPT53Codex
215                | ModelId::GPT51Codex
216                | ModelId::GPT51CodexMax
217                | ModelId::CopilotGPT52Codex
218                | ModelId::CopilotGPT51CodexMax
219                | ModelId::CopilotGPT54
220                | ModelId::CopilotClaudeSonnet46
221                | ModelId::GPT5Codex
222                | ModelId::ClaudeOpus46
223                | ModelId::ClaudeSonnet46
224                | ModelId::DeepSeekReasoner
225                | ModelId::ZaiGlm5
226                | ModelId::OpenRouterStepfunStep35FlashFree
227                | ModelId::OpenRouterNvidiaNemotron3Super120bA12bFree
228                | ModelId::MinimaxM27
229                | ModelId::MinimaxM25
230                | ModelId::OllamaGlm5Cloud
231                | ModelId::OllamaNemotron3SuperCloud
232                | ModelId::OllamaMinimaxM25Cloud
233                | ModelId::HuggingFaceQwen3CoderNextNovita
234                | ModelId::HuggingFaceQwen35397BA17BTogether
235        )
236    }
237
238    /// Check if this is an optimized/efficient variant
239    pub fn is_efficient_variant(&self) -> bool {
240        if let Some(meta) = self.openrouter_metadata() {
241            return meta.efficient;
242        }
243        matches!(
244            self,
245            ModelId::Gemini3FlashPreview
246                | ModelId::Gemini31FlashLitePreview
247                | ModelId::GPT5Mini
248                | ModelId::GPT5Nano
249                | ModelId::CopilotGPT54Mini
250                | ModelId::ClaudeHaiku45
251                | ModelId::DeepSeekChat
252                | ModelId::HuggingFaceStep35Flash
253        )
254    }
255
256    /// Check if this is a top-tier model
257    pub fn is_top_tier(&self) -> bool {
258        if let Some(meta) = self.openrouter_metadata() {
259            return meta.top_tier;
260        }
261        matches!(
262            self,
263            ModelId::Gemini31ProPreview
264                | ModelId::Gemini31ProPreviewCustomTools
265                | ModelId::OpenRouterGoogleGemini31ProPreview
266                | ModelId::Gemini3FlashPreview
267                | ModelId::Gemini31FlashLitePreview
268                | ModelId::GPT5
269                | ModelId::GPT52
270                | ModelId::GPT52Codex
271                | ModelId::GPT54
272                | ModelId::GPT54Pro
273                | ModelId::GPT53Codex
274                | ModelId::GPT51Codex
275                | ModelId::GPT51CodexMax
276                | ModelId::GPT5Codex
277                | ModelId::ClaudeOpus46
278                | ModelId::ClaudeSonnet46
279                | ModelId::DeepSeekReasoner
280                | ModelId::ZaiGlm5
281                | ModelId::OpenRouterStepfunStep35FlashFree
282                | ModelId::HuggingFaceQwen3CoderNextNovita
283                | ModelId::HuggingFaceQwen35397BA17BTogether
284        )
285    }
286
287    /// Determine whether the model is a reasoning-capable variant
288    pub fn is_reasoning_variant(&self) -> bool {
289        if let Some(meta) = self.openrouter_metadata() {
290            return meta.reasoning;
291        }
292        self.provider().supports_reasoning_effort(self.as_str())
293    }
294
295    /// Determine whether the model supports tool calls/function execution
296    pub fn supports_tool_calls(&self) -> bool {
297        if let Some(meta) = self.generated_capabilities() {
298            return meta.tool_call;
299        }
300        if let Some(meta) = self.openrouter_metadata() {
301            return meta.tool_call;
302        }
303        true
304    }
305
306    /// Ordered list of supported input modalities when VT Code has metadata for this model.
307    pub fn input_modalities(&self) -> &'static [&'static str] {
308        self.generated_capabilities()
309            .map(|meta| meta.input_modalities)
310            .unwrap_or(&[])
311    }
312
313    /// Get the generation/version string for this model
314    pub fn generation(&self) -> &'static str {
315        if let Some(meta) = self.openrouter_metadata() {
316            return meta.generation;
317        }
318        match self {
319            // Gemini generations
320            ModelId::Gemini31ProPreview | ModelId::Gemini31ProPreviewCustomTools => "3.1",
321            ModelId::Gemini31FlashLitePreview => "3.1-lite",
322            ModelId::Gemini3FlashPreview => "3",
323            // OpenAI generations
324            ModelId::GPT52 | ModelId::GPT52Codex => "5.2",
325            ModelId::GPT54 | ModelId::GPT54Pro | ModelId::GPT54Nano | ModelId::GPT54Mini => "5.4",
326            ModelId::GPT53Codex => "5.3",
327            ModelId::GPT51Codex | ModelId::GPT51CodexMax => "5.1",
328            ModelId::GPT5
329            | ModelId::GPT5Codex
330            | ModelId::GPT5Mini
331            | ModelId::GPT5Nano
332            | ModelId::OpenAIGptOss20b
333            | ModelId::OpenAIGptOss120b => "5",
334            // Anthropic generations
335            ModelId::ClaudeOpus46 | ModelId::ClaudeSonnet46 => "4.6",
336            ModelId::ClaudeHaiku45 => "4.5",
337            // DeepSeek generations
338            ModelId::DeepSeekChat | ModelId::DeepSeekReasoner => "V3.2-Exp",
339            // Z.AI generations
340            ModelId::ZaiGlm5 => "5",
341            ModelId::OllamaGptOss20b => "oss",
342            ModelId::OllamaGptOss20bCloud => "oss-cloud",
343            ModelId::OllamaGptOss120bCloud => "oss-cloud",
344            ModelId::OllamaQwen317b => "oss",
345            ModelId::OllamaQwen3CoderNext => "qwen3-coder-next:cloud",
346            ModelId::OllamaDeepseekV32Cloud => "deepseek-v3.2",
347            ModelId::OllamaQwen3Next80bCloud => "qwen3-next",
348            ModelId::OllamaMinimaxM2Cloud => "minimax-m2",
349            ModelId::OllamaGlm5Cloud => "glm-5",
350            ModelId::OllamaMinimaxM25Cloud => "minimax-m2.5",
351            ModelId::OllamaNemotron3SuperCloud => "nemotron-3",
352            ModelId::OllamaGemini3FlashPreviewCloud => "gemini-3",
353            // MiniMax models
354            ModelId::MinimaxM27 => "M2.7",
355            ModelId::MinimaxM25 => "M2.5",
356            // Moonshot models
357            ModelId::MoonshotKimiK25 => "k2.5",
358            // Hugging Face generations
359            ModelId::HuggingFaceDeepseekV32 => "V3.2-Exp",
360            ModelId::HuggingFaceOpenAIGptOss20b => "oss",
361            ModelId::HuggingFaceOpenAIGptOss120b => "oss",
362            ModelId::HuggingFaceMinimaxM25Novita => "m2.5",
363            ModelId::HuggingFaceDeepseekV32Novita => "v3.2",
364            ModelId::HuggingFaceXiaomiMimoV2FlashNovita => "v2-flash",
365            ModelId::HuggingFaceGlm5Novita => "5",
366            ModelId::HuggingFaceStep35Flash => "3.5",
367            ModelId::HuggingFaceQwen3CoderNextNovita | ModelId::OpenRouterQwen3CoderNext => {
368                "qwen3-coder-next"
369            }
370            _ => "unknown",
371        }
372    }
373
374    /// Determine if this model supports GPT-5.1+/5.2+/5.3+ shell tool type
375    pub fn supports_shell_tool(&self) -> bool {
376        matches!(
377            self,
378            ModelId::GPT52
379                | ModelId::GPT52Codex
380                | ModelId::GPT54
381                | ModelId::GPT54Pro
382                | ModelId::GPT53Codex
383                | ModelId::GPT51Codex
384                | ModelId::GPT51CodexMax
385                | ModelId::GPT5Codex
386        )
387    }
388
389    /// Determine if this model supports optimized apply_patch tool
390    pub fn supports_apply_patch_tool(&self) -> bool {
391        false // Placeholder for future optimization
392    }
393}