Skip to main content

vtcode_config/models/model_id/
capabilities.rs

1use super::ModelId;
2
3impl ModelId {
4    /// Attempt to find a non-reasoning variant for this model.
5    pub fn non_reasoning_variant(&self) -> Option<Self> {
6        if let Some(meta) = self.openrouter_metadata() {
7            if !meta.reasoning {
8                return None;
9            }
10
11            let vendor = meta.vendor;
12            let mut candidates: Vec<Self> = Self::openrouter_vendor_groups()
13                .into_iter()
14                .find(|(candidate_vendor, _)| *candidate_vendor == vendor)
15                .map(|(_, models)| {
16                    models
17                        .iter()
18                        .copied()
19                        .filter(|candidate| candidate != self)
20                        .filter(|candidate| {
21                            candidate
22                                .openrouter_metadata()
23                                .map(|other| !other.reasoning)
24                                .unwrap_or(false)
25                        })
26                        .collect()
27                })
28                .unwrap_or_default();
29
30            if candidates.is_empty() {
31                return None;
32            }
33
34            candidates.sort_by_key(|candidate| {
35                candidate
36                    .openrouter_metadata()
37                    .map(|data| (!data.efficient, data.display))
38                    .unwrap_or((true, ""))
39            });
40
41            return candidates.into_iter().next();
42        }
43
44        let direct = match self {
45            ModelId::Gemini31ProPreview
46            | ModelId::Gemini31ProPreviewCustomTools
47            | ModelId::Gemini31FlashLitePreview => Some(ModelId::Gemini3FlashPreview),
48            ModelId::GPT | ModelId::GPT52 | ModelId::GPT54 | ModelId::GPT54Pro | ModelId::GPT5 => {
49                Some(ModelId::GPT5Mini)
50            }
51            ModelId::DeepSeekReasoner => Some(ModelId::DeepSeekChat),
52            ModelId::ZaiGlm5 => Some(ModelId::OllamaGlm5Cloud),
53            ModelId::ClaudeOpus46 | ModelId::ClaudeSonnet46 => Some(ModelId::ClaudeSonnet46),
54            ModelId::MinimaxM25 => None,
55            _ => None,
56        };
57
58        direct.and_then(|candidate| {
59            if candidate.supports_reasoning_effort() {
60                None
61            } else {
62                Some(candidate)
63            }
64        })
65    }
66
67    /// Check if this is a "flash" variant (optimized for speed)
68    pub fn is_flash_variant(&self) -> bool {
69        matches!(
70            self,
71            ModelId::Gemini3FlashPreview
72                | ModelId::Gemini31FlashLitePreview
73                | ModelId::OpenRouterStepfunStep35FlashFree
74                | ModelId::OllamaGemini3FlashPreviewCloud
75                | ModelId::HuggingFaceStep35Flash
76        )
77    }
78
79    /// Check if this is a "pro" variant (optimized for capability)
80    pub fn is_pro_variant(&self) -> bool {
81        matches!(
82            self,
83            ModelId::Gemini31ProPreview
84                | ModelId::Gemini31ProPreviewCustomTools
85                | ModelId::OpenRouterGoogleGemini31ProPreview
86                | ModelId::GPT
87                | ModelId::GPT5
88                | ModelId::GPT52
89                | ModelId::GPT54
90                | ModelId::GPT54Pro
91                | ModelId::GPT53Codex
92                | ModelId::ClaudeOpus46
93                | ModelId::ClaudeSonnet46
94                | ModelId::DeepSeekReasoner
95                | ModelId::ZaiGlm5
96                | ModelId::OpenRouterStepfunStep35FlashFree
97                | ModelId::MinimaxM25
98                | ModelId::OllamaGlm5Cloud
99                | ModelId::OllamaMinimaxM25Cloud
100                | ModelId::HuggingFaceQwen3CoderNextNovita
101                | ModelId::HuggingFaceQwen35397BA17BTogether
102        )
103    }
104
105    /// Check if this is an optimized/efficient variant
106    pub fn is_efficient_variant(&self) -> bool {
107        if let Some(meta) = self.openrouter_metadata() {
108            return meta.efficient;
109        }
110        matches!(
111            self,
112            ModelId::Gemini3FlashPreview
113                | ModelId::Gemini31FlashLitePreview
114                | ModelId::GPT5Mini
115                | ModelId::GPT5Nano
116                | ModelId::ClaudeHaiku45
117                | ModelId::DeepSeekChat
118                | ModelId::HuggingFaceStep35Flash
119        )
120    }
121
122    /// Check if this is a top-tier model
123    pub fn is_top_tier(&self) -> bool {
124        if let Some(meta) = self.openrouter_metadata() {
125            return meta.top_tier;
126        }
127        matches!(
128            self,
129            ModelId::Gemini31ProPreview
130                | ModelId::Gemini31ProPreviewCustomTools
131                | ModelId::OpenRouterGoogleGemini31ProPreview
132                | ModelId::Gemini3FlashPreview
133                | ModelId::Gemini31FlashLitePreview
134                | ModelId::GPT
135                | ModelId::GPT5
136                | ModelId::GPT52
137                | ModelId::GPT54
138                | ModelId::GPT54Pro
139                | ModelId::GPT53Codex
140                | ModelId::ClaudeOpus46
141                | ModelId::ClaudeSonnet46
142                | ModelId::DeepSeekReasoner
143                | ModelId::ZaiGlm5
144                | ModelId::OpenRouterStepfunStep35FlashFree
145                | ModelId::HuggingFaceQwen3CoderNextNovita
146                | ModelId::HuggingFaceQwen35397BA17BTogether
147        )
148    }
149
150    /// Determine whether the model is a reasoning-capable variant
151    pub fn is_reasoning_variant(&self) -> bool {
152        if let Some(meta) = self.openrouter_metadata() {
153            return meta.reasoning;
154        }
155        self.provider().supports_reasoning_effort(self.as_str())
156    }
157
158    /// Determine whether the model supports tool calls/function execution
159    pub fn supports_tool_calls(&self) -> bool {
160        if let Some(meta) = self.openrouter_metadata() {
161            return meta.tool_call;
162        }
163        true
164    }
165
166    /// Get the generation/version string for this model
167    pub fn generation(&self) -> &'static str {
168        if let Some(meta) = self.openrouter_metadata() {
169            return meta.generation;
170        }
171        match self {
172            // Gemini generations
173            ModelId::Gemini31ProPreview | ModelId::Gemini31ProPreviewCustomTools => "3.1",
174            ModelId::Gemini31FlashLitePreview => "3.1-lite",
175            ModelId::Gemini3FlashPreview => "3",
176            // OpenAI generations
177            ModelId::GPT => "5.4",
178            ModelId::GPT52 => "5.2",
179            ModelId::GPT54 | ModelId::GPT54Pro => "5.4",
180            ModelId::GPT53Codex => "5.3",
181            ModelId::GPT5
182            | ModelId::GPT5Mini
183            | ModelId::GPT5Nano
184            | ModelId::OpenAIGptOss20b
185            | ModelId::OpenAIGptOss120b => "5",
186            // Anthropic generations
187            ModelId::ClaudeOpus46 | ModelId::ClaudeSonnet46 => "4.6",
188            ModelId::ClaudeHaiku45 => "4.5",
189            // DeepSeek generations
190            ModelId::DeepSeekChat | ModelId::DeepSeekReasoner => "V3.2-Exp",
191            // Z.AI generations
192            ModelId::ZaiGlm5 => "5",
193            ModelId::OllamaGptOss20b => "oss",
194            ModelId::OllamaGptOss20bCloud => "oss-cloud",
195            ModelId::OllamaGptOss120bCloud => "oss-cloud",
196            ModelId::OllamaQwen317b => "oss",
197            ModelId::OllamaQwen3CoderNext => "qwen3-coder-next:cloud",
198            ModelId::OllamaDeepseekV32Cloud => "deepseek-v3.2",
199            ModelId::OllamaQwen3Next80bCloud => "qwen3-next",
200            ModelId::OllamaMinimaxM2Cloud => "minimax-m2",
201            ModelId::OllamaGlm5Cloud => "glm-5",
202            ModelId::OllamaMinimaxM25Cloud => "minimax-m2.5",
203            ModelId::OllamaGemini3FlashPreviewCloud => "gemini-3",
204            // MiniMax models
205            ModelId::MinimaxM25 => "M2.5",
206            // Moonshot models
207            ModelId::MoonshotKimiK25 => "k2.5",
208            // Hugging Face generations
209            ModelId::HuggingFaceDeepseekV32 => "V3.2-Exp",
210            ModelId::HuggingFaceOpenAIGptOss20b => "oss",
211            ModelId::HuggingFaceOpenAIGptOss120b => "oss",
212            ModelId::HuggingFaceMinimaxM25Novita => "m2.5",
213            ModelId::HuggingFaceDeepseekV32Novita => "v3.2",
214            ModelId::HuggingFaceXiaomiMimoV2FlashNovita => "v2-flash",
215            ModelId::HuggingFaceGlm5Novita => "5",
216            ModelId::HuggingFaceStep35Flash => "3.5",
217            ModelId::HuggingFaceQwen3CoderNextNovita | ModelId::OpenRouterQwen3CoderNext => {
218                "qwen3-coder-next"
219            }
220            _ => "unknown",
221        }
222    }
223
224    /// Determine if this model supports GPT-5.1+/5.2+/5.3+ shell tool type
225    pub fn supports_shell_tool(&self) -> bool {
226        matches!(
227            self,
228            ModelId::GPT
229                | ModelId::GPT52
230                | ModelId::GPT54
231                | ModelId::GPT54Pro
232                | ModelId::GPT53Codex
233        )
234    }
235
236    /// Determine if this model supports optimized apply_patch tool
237    pub fn supports_apply_patch_tool(&self) -> bool {
238        false // Placeholder for future optimization
239    }
240}