Skip to main content

vtcode_config/models/model_id/
capabilities.rs

1use crate::models::Provider;
2
3use super::ModelId;
4
5#[cfg(not(docsrs))]
6mod capability_generated {
7    include!(concat!(env!("OUT_DIR"), "/model_capabilities.rs"));
8}
9
10#[cfg(docsrs)]
11mod capability_generated {
12    #[derive(Clone, Copy)]
13    pub struct Entry {
14        pub provider: &'static str,
15        pub id: &'static str,
16        pub tool_call: bool,
17        pub input_modalities: &'static [&'static str],
18    }
19
20    pub const ENTRIES: &[Entry] = &[];
21
22    pub fn metadata_for(_provider: &str, _id: &str) -> Option<Entry> {
23        None
24    }
25}
26
27fn capability_provider_key(provider: Provider) -> &'static str {
28    match provider {
29        Provider::Gemini => "gemini",
30        Provider::OpenAI => "openai",
31        Provider::Anthropic => "anthropic",
32        Provider::DeepSeek => "deepseek",
33        Provider::OpenRouter => "openrouter",
34        Provider::Ollama => "ollama",
35        Provider::LmStudio => "lmstudio",
36        Provider::Moonshot => "moonshot",
37        Provider::ZAI => "zai",
38        Provider::Minimax => "minimax",
39        Provider::HuggingFace => "huggingface",
40        Provider::LiteLLM => "litellm",
41    }
42}
43
44impl ModelId {
45    fn generated_capabilities(&self) -> Option<capability_generated::Entry> {
46        capability_generated::metadata_for(capability_provider_key(self.provider()), self.as_str())
47    }
48
49    /// Attempt to find a non-reasoning variant for this model.
50    pub fn non_reasoning_variant(&self) -> Option<Self> {
51        if let Some(meta) = self.openrouter_metadata() {
52            if !meta.reasoning {
53                return None;
54            }
55
56            let vendor = meta.vendor;
57            let mut candidates: Vec<Self> = Self::openrouter_vendor_groups()
58                .into_iter()
59                .find(|(candidate_vendor, _)| *candidate_vendor == vendor)
60                .map(|(_, models)| {
61                    models
62                        .iter()
63                        .copied()
64                        .filter(|candidate| candidate != self)
65                        .filter(|candidate| {
66                            candidate
67                                .openrouter_metadata()
68                                .map(|other| !other.reasoning)
69                                .unwrap_or(false)
70                        })
71                        .collect()
72                })
73                .unwrap_or_default();
74
75            if candidates.is_empty() {
76                return None;
77            }
78
79            candidates.sort_by_key(|candidate| {
80                candidate
81                    .openrouter_metadata()
82                    .map(|data| (!data.efficient, data.display))
83                    .unwrap_or((true, ""))
84            });
85
86            return candidates.into_iter().next();
87        }
88
89        let direct = match self {
90            ModelId::Gemini31ProPreview
91            | ModelId::Gemini31ProPreviewCustomTools
92            | ModelId::Gemini31FlashLitePreview => Some(ModelId::Gemini3FlashPreview),
93            ModelId::GPT52
94            | ModelId::GPT54
95            | ModelId::GPT54Pro
96            | ModelId::GPT54Nano
97            | ModelId::GPT54Mini
98            | ModelId::GPT5 => Some(ModelId::GPT5Mini),
99            ModelId::DeepSeekReasoner => Some(ModelId::DeepSeekChat),
100            ModelId::ZaiGlm5 => Some(ModelId::OllamaGlm5Cloud),
101            ModelId::ClaudeOpus46 | ModelId::ClaudeSonnet46 => Some(ModelId::ClaudeSonnet46),
102            ModelId::MinimaxM25 => None,
103            _ => None,
104        };
105
106        direct.and_then(|candidate| {
107            if candidate.supports_reasoning_effort() {
108                None
109            } else {
110                Some(candidate)
111            }
112        })
113    }
114
115    /// Check if this is a "flash" variant (optimized for speed)
116    pub fn is_flash_variant(&self) -> bool {
117        matches!(
118            self,
119            ModelId::Gemini3FlashPreview
120                | ModelId::Gemini31FlashLitePreview
121                | ModelId::OpenRouterStepfunStep35FlashFree
122                | ModelId::OpenRouterNvidiaNemotron3Super120bA12bFree
123                | ModelId::OllamaGemini3FlashPreviewCloud
124                | ModelId::HuggingFaceStep35Flash
125        )
126    }
127
128    /// Check if this is a "pro" variant (optimized for capability)
129    pub fn is_pro_variant(&self) -> bool {
130        matches!(
131            self,
132            ModelId::Gemini31ProPreview
133                | ModelId::Gemini31ProPreviewCustomTools
134                | ModelId::OpenRouterGoogleGemini31ProPreview
135                | ModelId::GPT5
136                | ModelId::GPT52
137                | ModelId::GPT52Codex
138                | ModelId::GPT54
139                | ModelId::GPT54Pro
140                | ModelId::GPT53Codex
141                | ModelId::GPT51Codex
142                | ModelId::GPT51CodexMax
143                | ModelId::GPT5Codex
144                | ModelId::ClaudeOpus46
145                | ModelId::ClaudeSonnet46
146                | ModelId::DeepSeekReasoner
147                | ModelId::ZaiGlm5
148                | ModelId::OpenRouterStepfunStep35FlashFree
149                | ModelId::OpenRouterNvidiaNemotron3Super120bA12bFree
150                | ModelId::MinimaxM25
151                | ModelId::OllamaGlm5Cloud
152                | ModelId::OllamaNemotron3SuperCloud
153                | ModelId::OllamaMinimaxM25Cloud
154                | ModelId::HuggingFaceQwen3CoderNextNovita
155                | ModelId::HuggingFaceQwen35397BA17BTogether
156        )
157    }
158
159    /// Check if this is an optimized/efficient variant
160    pub fn is_efficient_variant(&self) -> bool {
161        if let Some(meta) = self.openrouter_metadata() {
162            return meta.efficient;
163        }
164        matches!(
165            self,
166            ModelId::Gemini3FlashPreview
167                | ModelId::Gemini31FlashLitePreview
168                | ModelId::GPT5Mini
169                | ModelId::GPT5Nano
170                | ModelId::ClaudeHaiku45
171                | ModelId::DeepSeekChat
172                | ModelId::HuggingFaceStep35Flash
173        )
174    }
175
176    /// Check if this is a top-tier model
177    pub fn is_top_tier(&self) -> bool {
178        if let Some(meta) = self.openrouter_metadata() {
179            return meta.top_tier;
180        }
181        matches!(
182            self,
183            ModelId::Gemini31ProPreview
184                | ModelId::Gemini31ProPreviewCustomTools
185                | ModelId::OpenRouterGoogleGemini31ProPreview
186                | ModelId::Gemini3FlashPreview
187                | ModelId::Gemini31FlashLitePreview
188                | ModelId::GPT5
189                | ModelId::GPT52
190                | ModelId::GPT52Codex
191                | ModelId::GPT54
192                | ModelId::GPT54Pro
193                | ModelId::GPT53Codex
194                | ModelId::GPT51Codex
195                | ModelId::GPT51CodexMax
196                | ModelId::GPT5Codex
197                | ModelId::ClaudeOpus46
198                | ModelId::ClaudeSonnet46
199                | ModelId::DeepSeekReasoner
200                | ModelId::ZaiGlm5
201                | ModelId::OpenRouterStepfunStep35FlashFree
202                | ModelId::HuggingFaceQwen3CoderNextNovita
203                | ModelId::HuggingFaceQwen35397BA17BTogether
204        )
205    }
206
207    /// Determine whether the model is a reasoning-capable variant
208    pub fn is_reasoning_variant(&self) -> bool {
209        if let Some(meta) = self.openrouter_metadata() {
210            return meta.reasoning;
211        }
212        self.provider().supports_reasoning_effort(self.as_str())
213    }
214
215    /// Determine whether the model supports tool calls/function execution
216    pub fn supports_tool_calls(&self) -> bool {
217        if let Some(meta) = self.generated_capabilities() {
218            return meta.tool_call;
219        }
220        if let Some(meta) = self.openrouter_metadata() {
221            return meta.tool_call;
222        }
223        true
224    }
225
226    /// Ordered list of supported input modalities when VT Code has metadata for this model.
227    pub fn input_modalities(&self) -> &'static [&'static str] {
228        self.generated_capabilities()
229            .map(|meta| meta.input_modalities)
230            .unwrap_or(&[])
231    }
232
233    /// Get the generation/version string for this model
234    pub fn generation(&self) -> &'static str {
235        if let Some(meta) = self.openrouter_metadata() {
236            return meta.generation;
237        }
238        match self {
239            // Gemini generations
240            ModelId::Gemini31ProPreview | ModelId::Gemini31ProPreviewCustomTools => "3.1",
241            ModelId::Gemini31FlashLitePreview => "3.1-lite",
242            ModelId::Gemini3FlashPreview => "3",
243            // OpenAI generations
244            ModelId::GPT52 | ModelId::GPT52Codex => "5.2",
245            ModelId::GPT54 | ModelId::GPT54Pro | ModelId::GPT54Nano | ModelId::GPT54Mini => "5.4",
246            ModelId::GPT53Codex => "5.3",
247            ModelId::GPT51Codex | ModelId::GPT51CodexMax => "5.1",
248            ModelId::GPT5
249            | ModelId::GPT5Codex
250            | ModelId::GPT5Mini
251            | ModelId::GPT5Nano
252            | ModelId::OpenAIGptOss20b
253            | ModelId::OpenAIGptOss120b => "5",
254            // Anthropic generations
255            ModelId::ClaudeOpus46 | ModelId::ClaudeSonnet46 => "4.6",
256            ModelId::ClaudeHaiku45 => "4.5",
257            // DeepSeek generations
258            ModelId::DeepSeekChat | ModelId::DeepSeekReasoner => "V3.2-Exp",
259            // Z.AI generations
260            ModelId::ZaiGlm5 => "5",
261            ModelId::OllamaGptOss20b => "oss",
262            ModelId::OllamaGptOss20bCloud => "oss-cloud",
263            ModelId::OllamaGptOss120bCloud => "oss-cloud",
264            ModelId::OllamaQwen317b => "oss",
265            ModelId::OllamaQwen3CoderNext => "qwen3-coder-next:cloud",
266            ModelId::OllamaDeepseekV32Cloud => "deepseek-v3.2",
267            ModelId::OllamaQwen3Next80bCloud => "qwen3-next",
268            ModelId::OllamaMinimaxM2Cloud => "minimax-m2",
269            ModelId::OllamaGlm5Cloud => "glm-5",
270            ModelId::OllamaMinimaxM25Cloud => "minimax-m2.5",
271            ModelId::OllamaNemotron3SuperCloud => "nemotron-3",
272            ModelId::OllamaGemini3FlashPreviewCloud => "gemini-3",
273            // MiniMax models
274            ModelId::MinimaxM25 => "M2.5",
275            // Moonshot models
276            ModelId::MoonshotKimiK25 => "k2.5",
277            // Hugging Face generations
278            ModelId::HuggingFaceDeepseekV32 => "V3.2-Exp",
279            ModelId::HuggingFaceOpenAIGptOss20b => "oss",
280            ModelId::HuggingFaceOpenAIGptOss120b => "oss",
281            ModelId::HuggingFaceMinimaxM25Novita => "m2.5",
282            ModelId::HuggingFaceDeepseekV32Novita => "v3.2",
283            ModelId::HuggingFaceXiaomiMimoV2FlashNovita => "v2-flash",
284            ModelId::HuggingFaceGlm5Novita => "5",
285            ModelId::HuggingFaceStep35Flash => "3.5",
286            ModelId::HuggingFaceQwen3CoderNextNovita | ModelId::OpenRouterQwen3CoderNext => {
287                "qwen3-coder-next"
288            }
289            _ => "unknown",
290        }
291    }
292
293    /// Determine if this model supports GPT-5.1+/5.2+/5.3+ shell tool type
294    pub fn supports_shell_tool(&self) -> bool {
295        matches!(
296            self,
297            ModelId::GPT52
298                | ModelId::GPT52Codex
299                | ModelId::GPT54
300                | ModelId::GPT54Pro
301                | ModelId::GPT53Codex
302                | ModelId::GPT51Codex
303                | ModelId::GPT51CodexMax
304                | ModelId::GPT5Codex
305        )
306    }
307
308    /// Determine if this model supports optimized apply_patch tool
309    pub fn supports_apply_patch_tool(&self) -> bool {
310        false // Placeholder for future optimization
311    }
312}