Skip to main content

vtcode_config/models/model_id/
capabilities.rs

1use crate::models::Provider;
2
3use super::ModelId;
4
5#[cfg(not(docsrs))]
6mod capability_generated {
7    include!(concat!(env!("OUT_DIR"), "/model_capabilities.rs"));
8}
9
10#[cfg(docsrs)]
11mod capability_generated {
12    #[derive(Clone, Copy)]
13    pub struct Entry {
14        pub provider: &'static str,
15        pub id: &'static str,
16        pub tool_call: bool,
17        pub input_modalities: &'static [&'static str],
18    }
19
20    pub const ENTRIES: &[Entry] = &[];
21
22    pub fn metadata_for(_provider: &str, _id: &str) -> Option<Entry> {
23        None
24    }
25}
26
27fn capability_provider_key(provider: Provider) -> &'static str {
28    match provider {
29        Provider::Gemini => "gemini",
30        Provider::OpenAI => "openai",
31        Provider::Anthropic => "anthropic",
32        Provider::DeepSeek => "deepseek",
33        Provider::OpenRouter => "openrouter",
34        Provider::Ollama => "ollama",
35        Provider::LmStudio => "lmstudio",
36        Provider::Moonshot => "moonshot",
37        Provider::ZAI => "zai",
38        Provider::Minimax => "minimax",
39        Provider::HuggingFace => "huggingface",
40        Provider::LiteLLM => "litellm",
41    }
42}
43
44impl ModelId {
45    fn generated_capabilities(&self) -> Option<capability_generated::Entry> {
46        capability_generated::metadata_for(capability_provider_key(self.provider()), self.as_str())
47    }
48
49    /// Attempt to find a non-reasoning variant for this model.
50    pub fn non_reasoning_variant(&self) -> Option<Self> {
51        if let Some(meta) = self.openrouter_metadata() {
52            if !meta.reasoning {
53                return None;
54            }
55
56            let vendor = meta.vendor;
57            let mut candidates: Vec<Self> = Self::openrouter_vendor_groups()
58                .into_iter()
59                .find(|(candidate_vendor, _)| *candidate_vendor == vendor)
60                .map(|(_, models)| {
61                    models
62                        .iter()
63                        .copied()
64                        .filter(|candidate| candidate != self)
65                        .filter(|candidate| {
66                            candidate
67                                .openrouter_metadata()
68                                .map(|other| !other.reasoning)
69                                .unwrap_or(false)
70                        })
71                        .collect()
72                })
73                .unwrap_or_default();
74
75            if candidates.is_empty() {
76                return None;
77            }
78
79            candidates.sort_by_key(|candidate| {
80                candidate
81                    .openrouter_metadata()
82                    .map(|data| (!data.efficient, data.display))
83                    .unwrap_or((true, ""))
84            });
85
86            return candidates.into_iter().next();
87        }
88
89        let direct = match self {
90            ModelId::Gemini31ProPreview
91            | ModelId::Gemini31ProPreviewCustomTools
92            | ModelId::Gemini31FlashLitePreview => Some(ModelId::Gemini3FlashPreview),
93            ModelId::GPT | ModelId::GPT52 | ModelId::GPT54 | ModelId::GPT54Pro | ModelId::GPT5 => {
94                Some(ModelId::GPT5Mini)
95            }
96            ModelId::DeepSeekReasoner => Some(ModelId::DeepSeekChat),
97            ModelId::ZaiGlm5 => Some(ModelId::OllamaGlm5Cloud),
98            ModelId::ClaudeOpus46 | ModelId::ClaudeSonnet46 => Some(ModelId::ClaudeSonnet46),
99            ModelId::MinimaxM25 => None,
100            _ => None,
101        };
102
103        direct.and_then(|candidate| {
104            if candidate.supports_reasoning_effort() {
105                None
106            } else {
107                Some(candidate)
108            }
109        })
110    }
111
112    /// Check if this is a "flash" variant (optimized for speed)
113    pub fn is_flash_variant(&self) -> bool {
114        matches!(
115            self,
116            ModelId::Gemini3FlashPreview
117                | ModelId::Gemini31FlashLitePreview
118                | ModelId::OpenRouterStepfunStep35FlashFree
119                | ModelId::OpenRouterNvidiaNemotron3Super120bA12bFree
120                | ModelId::OllamaGemini3FlashPreviewCloud
121                | ModelId::HuggingFaceStep35Flash
122        )
123    }
124
125    /// Check if this is a "pro" variant (optimized for capability)
126    pub fn is_pro_variant(&self) -> bool {
127        matches!(
128            self,
129            ModelId::Gemini31ProPreview
130                | ModelId::Gemini31ProPreviewCustomTools
131                | ModelId::OpenRouterGoogleGemini31ProPreview
132                | ModelId::GPT
133                | ModelId::GPT5
134                | ModelId::GPT52
135                | ModelId::GPT54
136                | ModelId::GPT54Pro
137                | ModelId::GPT53Codex
138                | ModelId::ClaudeOpus46
139                | ModelId::ClaudeSonnet46
140                | ModelId::DeepSeekReasoner
141                | ModelId::ZaiGlm5
142                | ModelId::OpenRouterStepfunStep35FlashFree
143                | ModelId::OpenRouterNvidiaNemotron3Super120bA12bFree
144                | ModelId::MinimaxM25
145                | ModelId::OllamaGlm5Cloud
146                | ModelId::OllamaNemotron3SuperCloud
147                | ModelId::OllamaMinimaxM25Cloud
148                | ModelId::HuggingFaceQwen3CoderNextNovita
149                | ModelId::HuggingFaceQwen35397BA17BTogether
150        )
151    }
152
153    /// Check if this is an optimized/efficient variant
154    pub fn is_efficient_variant(&self) -> bool {
155        if let Some(meta) = self.openrouter_metadata() {
156            return meta.efficient;
157        }
158        matches!(
159            self,
160            ModelId::Gemini3FlashPreview
161                | ModelId::Gemini31FlashLitePreview
162                | ModelId::GPT5Mini
163                | ModelId::GPT5Nano
164                | ModelId::ClaudeHaiku45
165                | ModelId::DeepSeekChat
166                | ModelId::HuggingFaceStep35Flash
167        )
168    }
169
170    /// Check if this is a top-tier model
171    pub fn is_top_tier(&self) -> bool {
172        if let Some(meta) = self.openrouter_metadata() {
173            return meta.top_tier;
174        }
175        matches!(
176            self,
177            ModelId::Gemini31ProPreview
178                | ModelId::Gemini31ProPreviewCustomTools
179                | ModelId::OpenRouterGoogleGemini31ProPreview
180                | ModelId::Gemini3FlashPreview
181                | ModelId::Gemini31FlashLitePreview
182                | ModelId::GPT
183                | ModelId::GPT5
184                | ModelId::GPT52
185                | ModelId::GPT54
186                | ModelId::GPT54Pro
187                | ModelId::GPT53Codex
188                | ModelId::ClaudeOpus46
189                | ModelId::ClaudeSonnet46
190                | ModelId::DeepSeekReasoner
191                | ModelId::ZaiGlm5
192                | ModelId::OpenRouterStepfunStep35FlashFree
193                | ModelId::HuggingFaceQwen3CoderNextNovita
194                | ModelId::HuggingFaceQwen35397BA17BTogether
195        )
196    }
197
198    /// Determine whether the model is a reasoning-capable variant
199    pub fn is_reasoning_variant(&self) -> bool {
200        if let Some(meta) = self.openrouter_metadata() {
201            return meta.reasoning;
202        }
203        self.provider().supports_reasoning_effort(self.as_str())
204    }
205
206    /// Determine whether the model supports tool calls/function execution
207    pub fn supports_tool_calls(&self) -> bool {
208        if let Some(meta) = self.generated_capabilities() {
209            return meta.tool_call;
210        }
211        if let Some(meta) = self.openrouter_metadata() {
212            return meta.tool_call;
213        }
214        true
215    }
216
217    /// Ordered list of supported input modalities when VT Code has metadata for this model.
218    pub fn input_modalities(&self) -> &'static [&'static str] {
219        self.generated_capabilities()
220            .map(|meta| meta.input_modalities)
221            .unwrap_or(&[])
222    }
223
224    /// Get the generation/version string for this model
225    pub fn generation(&self) -> &'static str {
226        if let Some(meta) = self.openrouter_metadata() {
227            return meta.generation;
228        }
229        match self {
230            // Gemini generations
231            ModelId::Gemini31ProPreview | ModelId::Gemini31ProPreviewCustomTools => "3.1",
232            ModelId::Gemini31FlashLitePreview => "3.1-lite",
233            ModelId::Gemini3FlashPreview => "3",
234            // OpenAI generations
235            ModelId::GPT => "5.4",
236            ModelId::GPT52 => "5.2",
237            ModelId::GPT54 | ModelId::GPT54Pro => "5.4",
238            ModelId::GPT53Codex => "5.3",
239            ModelId::GPT5
240            | ModelId::GPT5Mini
241            | ModelId::GPT5Nano
242            | ModelId::OpenAIGptOss20b
243            | ModelId::OpenAIGptOss120b => "5",
244            // Anthropic generations
245            ModelId::ClaudeOpus46 | ModelId::ClaudeSonnet46 => "4.6",
246            ModelId::ClaudeHaiku45 => "4.5",
247            // DeepSeek generations
248            ModelId::DeepSeekChat | ModelId::DeepSeekReasoner => "V3.2-Exp",
249            // Z.AI generations
250            ModelId::ZaiGlm5 => "5",
251            ModelId::OllamaGptOss20b => "oss",
252            ModelId::OllamaGptOss20bCloud => "oss-cloud",
253            ModelId::OllamaGptOss120bCloud => "oss-cloud",
254            ModelId::OllamaQwen317b => "oss",
255            ModelId::OllamaQwen3CoderNext => "qwen3-coder-next:cloud",
256            ModelId::OllamaDeepseekV32Cloud => "deepseek-v3.2",
257            ModelId::OllamaQwen3Next80bCloud => "qwen3-next",
258            ModelId::OllamaMinimaxM2Cloud => "minimax-m2",
259            ModelId::OllamaGlm5Cloud => "glm-5",
260            ModelId::OllamaMinimaxM25Cloud => "minimax-m2.5",
261            ModelId::OllamaNemotron3SuperCloud => "nemotron-3",
262            ModelId::OllamaGemini3FlashPreviewCloud => "gemini-3",
263            // MiniMax models
264            ModelId::MinimaxM25 => "M2.5",
265            // Moonshot models
266            ModelId::MoonshotKimiK25 => "k2.5",
267            // Hugging Face generations
268            ModelId::HuggingFaceDeepseekV32 => "V3.2-Exp",
269            ModelId::HuggingFaceOpenAIGptOss20b => "oss",
270            ModelId::HuggingFaceOpenAIGptOss120b => "oss",
271            ModelId::HuggingFaceMinimaxM25Novita => "m2.5",
272            ModelId::HuggingFaceDeepseekV32Novita => "v3.2",
273            ModelId::HuggingFaceXiaomiMimoV2FlashNovita => "v2-flash",
274            ModelId::HuggingFaceGlm5Novita => "5",
275            ModelId::HuggingFaceStep35Flash => "3.5",
276            ModelId::HuggingFaceQwen3CoderNextNovita | ModelId::OpenRouterQwen3CoderNext => {
277                "qwen3-coder-next"
278            }
279            _ => "unknown",
280        }
281    }
282
283    /// Determine if this model supports GPT-5.1+/5.2+/5.3+ shell tool type
284    pub fn supports_shell_tool(&self) -> bool {
285        matches!(
286            self,
287            ModelId::GPT
288                | ModelId::GPT52
289                | ModelId::GPT54
290                | ModelId::GPT54Pro
291                | ModelId::GPT53Codex
292        )
293    }
294
295    /// Determine if this model supports optimized apply_patch tool
296    pub fn supports_apply_patch_tool(&self) -> bool {
297        false // Placeholder for future optimization
298    }
299}