Skip to main content

vtcode_config/models/model_id/
capabilities.rs

1use crate::models::Provider;
2
3use super::ModelId;
4
5#[cfg(not(docsrs))]
6mod capability_generated {
7    include!(concat!(env!("OUT_DIR"), "/model_capabilities.rs"));
8}
9
10#[cfg(docsrs)]
11mod capability_generated {
12    #[derive(Clone, Copy)]
13    pub struct Entry {
14        pub provider: &'static str,
15        pub id: &'static str,
16        pub tool_call: bool,
17        pub input_modalities: &'static [&'static str],
18    }
19
20    pub const ENTRIES: &[Entry] = &[];
21
22    pub fn metadata_for(_provider: &str, _id: &str) -> Option<Entry> {
23        None
24    }
25}
26
27fn capability_provider_key(provider: Provider) -> &'static str {
28    match provider {
29        Provider::Gemini => "gemini",
30        Provider::OpenAI => "openai",
31        Provider::Anthropic => "anthropic",
32        Provider::DeepSeek => "deepseek",
33        Provider::OpenRouter => "openrouter",
34        Provider::Ollama => "ollama",
35        Provider::LmStudio => "lmstudio",
36        Provider::Moonshot => "moonshot",
37        Provider::ZAI => "zai",
38        Provider::Minimax => "minimax",
39        Provider::HuggingFace => "huggingface",
40        Provider::LiteLLM => "litellm",
41    }
42}
43
44impl ModelId {
45    fn generated_capabilities(&self) -> Option<capability_generated::Entry> {
46        capability_generated::metadata_for(capability_provider_key(self.provider()), self.as_str())
47    }
48
49    /// Attempt to find a non-reasoning variant for this model.
50    pub fn non_reasoning_variant(&self) -> Option<Self> {
51        if let Some(meta) = self.openrouter_metadata() {
52            if !meta.reasoning {
53                return None;
54            }
55
56            let vendor = meta.vendor;
57            let mut candidates: Vec<Self> = Self::openrouter_vendor_groups()
58                .into_iter()
59                .find(|(candidate_vendor, _)| *candidate_vendor == vendor)
60                .map(|(_, models)| {
61                    models
62                        .iter()
63                        .copied()
64                        .filter(|candidate| candidate != self)
65                        .filter(|candidate| {
66                            candidate
67                                .openrouter_metadata()
68                                .map(|other| !other.reasoning)
69                                .unwrap_or(false)
70                        })
71                        .collect()
72                })
73                .unwrap_or_default();
74
75            if candidates.is_empty() {
76                return None;
77            }
78
79            candidates.sort_by_key(|candidate| {
80                candidate
81                    .openrouter_metadata()
82                    .map(|data| (!data.efficient, data.display))
83                    .unwrap_or((true, ""))
84            });
85
86            return candidates.into_iter().next();
87        }
88
89        let direct = match self {
90            ModelId::Gemini31ProPreview
91            | ModelId::Gemini31ProPreviewCustomTools
92            | ModelId::Gemini31FlashLitePreview => Some(ModelId::Gemini3FlashPreview),
93            ModelId::GPT52 | ModelId::GPT54 | ModelId::GPT54Pro | ModelId::GPT5 => {
94                Some(ModelId::GPT5Mini)
95            }
96            ModelId::DeepSeekReasoner => Some(ModelId::DeepSeekChat),
97            ModelId::ZaiGlm5 => Some(ModelId::OllamaGlm5Cloud),
98            ModelId::ClaudeOpus46 | ModelId::ClaudeSonnet46 => Some(ModelId::ClaudeSonnet46),
99            ModelId::MinimaxM25 => None,
100            _ => None,
101        };
102
103        direct.and_then(|candidate| {
104            if candidate.supports_reasoning_effort() {
105                None
106            } else {
107                Some(candidate)
108            }
109        })
110    }
111
112    /// Check if this is a "flash" variant (optimized for speed)
113    pub fn is_flash_variant(&self) -> bool {
114        matches!(
115            self,
116            ModelId::Gemini3FlashPreview
117                | ModelId::Gemini31FlashLitePreview
118                | ModelId::OpenRouterStepfunStep35FlashFree
119                | ModelId::OpenRouterNvidiaNemotron3Super120bA12bFree
120                | ModelId::OllamaGemini3FlashPreviewCloud
121                | ModelId::HuggingFaceStep35Flash
122        )
123    }
124
125    /// Check if this is a "pro" variant (optimized for capability)
126    pub fn is_pro_variant(&self) -> bool {
127        matches!(
128            self,
129            ModelId::Gemini31ProPreview
130                | ModelId::Gemini31ProPreviewCustomTools
131                | ModelId::OpenRouterGoogleGemini31ProPreview
132                | ModelId::GPT5
133                | ModelId::GPT52
134                | ModelId::GPT52Codex
135                | ModelId::GPT54
136                | ModelId::GPT54Pro
137                | ModelId::GPT53Codex
138                | ModelId::GPT51Codex
139                | ModelId::GPT51CodexMax
140                | ModelId::GPT5Codex
141                | ModelId::ClaudeOpus46
142                | ModelId::ClaudeSonnet46
143                | ModelId::DeepSeekReasoner
144                | ModelId::ZaiGlm5
145                | ModelId::OpenRouterStepfunStep35FlashFree
146                | ModelId::OpenRouterNvidiaNemotron3Super120bA12bFree
147                | ModelId::MinimaxM25
148                | ModelId::OllamaGlm5Cloud
149                | ModelId::OllamaNemotron3SuperCloud
150                | ModelId::OllamaMinimaxM25Cloud
151                | ModelId::HuggingFaceQwen3CoderNextNovita
152                | ModelId::HuggingFaceQwen35397BA17BTogether
153        )
154    }
155
156    /// Check if this is an optimized/efficient variant
157    pub fn is_efficient_variant(&self) -> bool {
158        if let Some(meta) = self.openrouter_metadata() {
159            return meta.efficient;
160        }
161        matches!(
162            self,
163            ModelId::Gemini3FlashPreview
164                | ModelId::Gemini31FlashLitePreview
165                | ModelId::GPT5Mini
166                | ModelId::GPT5Nano
167                | ModelId::ClaudeHaiku45
168                | ModelId::DeepSeekChat
169                | ModelId::HuggingFaceStep35Flash
170        )
171    }
172
173    /// Check if this is a top-tier model
174    pub fn is_top_tier(&self) -> bool {
175        if let Some(meta) = self.openrouter_metadata() {
176            return meta.top_tier;
177        }
178        matches!(
179            self,
180            ModelId::Gemini31ProPreview
181                | ModelId::Gemini31ProPreviewCustomTools
182                | ModelId::OpenRouterGoogleGemini31ProPreview
183                | ModelId::Gemini3FlashPreview
184                | ModelId::Gemini31FlashLitePreview
185                | ModelId::GPT5
186                | ModelId::GPT52
187                | ModelId::GPT52Codex
188                | ModelId::GPT54
189                | ModelId::GPT54Pro
190                | ModelId::GPT53Codex
191                | ModelId::GPT51Codex
192                | ModelId::GPT51CodexMax
193                | ModelId::GPT5Codex
194                | ModelId::ClaudeOpus46
195                | ModelId::ClaudeSonnet46
196                | ModelId::DeepSeekReasoner
197                | ModelId::ZaiGlm5
198                | ModelId::OpenRouterStepfunStep35FlashFree
199                | ModelId::HuggingFaceQwen3CoderNextNovita
200                | ModelId::HuggingFaceQwen35397BA17BTogether
201        )
202    }
203
204    /// Determine whether the model is a reasoning-capable variant
205    pub fn is_reasoning_variant(&self) -> bool {
206        if let Some(meta) = self.openrouter_metadata() {
207            return meta.reasoning;
208        }
209        self.provider().supports_reasoning_effort(self.as_str())
210    }
211
212    /// Determine whether the model supports tool calls/function execution
213    pub fn supports_tool_calls(&self) -> bool {
214        if let Some(meta) = self.generated_capabilities() {
215            return meta.tool_call;
216        }
217        if let Some(meta) = self.openrouter_metadata() {
218            return meta.tool_call;
219        }
220        true
221    }
222
223    /// Ordered list of supported input modalities when VT Code has metadata for this model.
224    pub fn input_modalities(&self) -> &'static [&'static str] {
225        self.generated_capabilities()
226            .map(|meta| meta.input_modalities)
227            .unwrap_or(&[])
228    }
229
230    /// Get the generation/version string for this model
231    pub fn generation(&self) -> &'static str {
232        if let Some(meta) = self.openrouter_metadata() {
233            return meta.generation;
234        }
235        match self {
236            // Gemini generations
237            ModelId::Gemini31ProPreview | ModelId::Gemini31ProPreviewCustomTools => "3.1",
238            ModelId::Gemini31FlashLitePreview => "3.1-lite",
239            ModelId::Gemini3FlashPreview => "3",
240            // OpenAI generations
241            ModelId::GPT52 | ModelId::GPT52Codex => "5.2",
242            ModelId::GPT54 | ModelId::GPT54Pro => "5.4",
243            ModelId::GPT53Codex => "5.3",
244            ModelId::GPT51Codex | ModelId::GPT51CodexMax => "5.1",
245            ModelId::GPT5
246            | ModelId::GPT5Codex
247            | ModelId::GPT5Mini
248            | ModelId::GPT5Nano
249            | ModelId::OpenAIGptOss20b
250            | ModelId::OpenAIGptOss120b => "5",
251            // Anthropic generations
252            ModelId::ClaudeOpus46 | ModelId::ClaudeSonnet46 => "4.6",
253            ModelId::ClaudeHaiku45 => "4.5",
254            // DeepSeek generations
255            ModelId::DeepSeekChat | ModelId::DeepSeekReasoner => "V3.2-Exp",
256            // Z.AI generations
257            ModelId::ZaiGlm5 => "5",
258            ModelId::OllamaGptOss20b => "oss",
259            ModelId::OllamaGptOss20bCloud => "oss-cloud",
260            ModelId::OllamaGptOss120bCloud => "oss-cloud",
261            ModelId::OllamaQwen317b => "oss",
262            ModelId::OllamaQwen3CoderNext => "qwen3-coder-next:cloud",
263            ModelId::OllamaDeepseekV32Cloud => "deepseek-v3.2",
264            ModelId::OllamaQwen3Next80bCloud => "qwen3-next",
265            ModelId::OllamaMinimaxM2Cloud => "minimax-m2",
266            ModelId::OllamaGlm5Cloud => "glm-5",
267            ModelId::OllamaMinimaxM25Cloud => "minimax-m2.5",
268            ModelId::OllamaNemotron3SuperCloud => "nemotron-3",
269            ModelId::OllamaGemini3FlashPreviewCloud => "gemini-3",
270            // MiniMax models
271            ModelId::MinimaxM25 => "M2.5",
272            // Moonshot models
273            ModelId::MoonshotKimiK25 => "k2.5",
274            // Hugging Face generations
275            ModelId::HuggingFaceDeepseekV32 => "V3.2-Exp",
276            ModelId::HuggingFaceOpenAIGptOss20b => "oss",
277            ModelId::HuggingFaceOpenAIGptOss120b => "oss",
278            ModelId::HuggingFaceMinimaxM25Novita => "m2.5",
279            ModelId::HuggingFaceDeepseekV32Novita => "v3.2",
280            ModelId::HuggingFaceXiaomiMimoV2FlashNovita => "v2-flash",
281            ModelId::HuggingFaceGlm5Novita => "5",
282            ModelId::HuggingFaceStep35Flash => "3.5",
283            ModelId::HuggingFaceQwen3CoderNextNovita | ModelId::OpenRouterQwen3CoderNext => {
284                "qwen3-coder-next"
285            }
286            _ => "unknown",
287        }
288    }
289
290    /// Determine if this model supports GPT-5.1+/5.2+/5.3+ shell tool type
291    pub fn supports_shell_tool(&self) -> bool {
292        matches!(
293            self,
294            ModelId::GPT52
295                | ModelId::GPT52Codex
296                | ModelId::GPT54
297                | ModelId::GPT54Pro
298                | ModelId::GPT53Codex
299                | ModelId::GPT51Codex
300                | ModelId::GPT51CodexMax
301                | ModelId::GPT5Codex
302        )
303    }
304
305    /// Determine if this model supports optimized apply_patch tool
306    pub fn supports_apply_patch_tool(&self) -> bool {
307        false // Placeholder for future optimization
308    }
309}