1use schemars::JsonSchema;
12use serde::{Deserialize, Serialize};
13use std::borrow::Cow;
14
15#[derive(Debug, Clone, PartialEq, Eq, Default)]
16pub enum Model {
17 Gpt54,
18 #[default]
19 Gpt53Codex,
20 Gpt53CodexSpark,
21 Gpt53,
22 Gpt52Codex,
23 Gpt52,
24 Glm47,
25 Custom(String),
26}
27
28impl Serialize for Model {
29 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
30 where
31 S: serde::Serializer,
32 {
33 serializer.serialize_str(self.as_str())
34 }
35}
36
37impl<'de> Deserialize<'de> for Model {
38 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
39 where
40 D: serde::Deserializer<'de>,
41 {
42 let value = String::deserialize(deserializer)?;
43 value.parse().map_err(serde::de::Error::custom)
44 }
45}
46
47impl Model {
48 pub fn as_str(&self) -> &str {
49 match self {
50 Model::Gpt54 => "gpt-5.4",
51 Model::Gpt53Codex => "gpt-5.3-codex",
52 Model::Gpt53CodexSpark => "gpt-5.3-codex-spark",
53 Model::Gpt53 => "gpt-5.3",
54 Model::Gpt52Codex => "gpt-5.2-codex",
55 Model::Gpt52 => "gpt-5.2",
56 Model::Glm47 => "zai-coding-plan/glm-4.7",
57 Model::Custom(value) => value.as_str(),
58 }
59 }
60}
61
62impl std::str::FromStr for Model {
63 type Err = &'static str;
64
65 fn from_str(value: &str) -> Result<Self, Self::Err> {
66 let trimmed = value.trim();
67 if trimmed.is_empty() {
68 return Err("model cannot be empty");
69 }
70 Ok(match trimmed {
71 "gpt-5.4" => Model::Gpt54,
72 "gpt-5.3-codex" => Model::Gpt53Codex,
73 "gpt-5.3-codex-spark" => Model::Gpt53CodexSpark,
74 "gpt-5.3" => Model::Gpt53,
75 "gpt-5.2-codex" => Model::Gpt52Codex,
76 "gpt-5.2" => Model::Gpt52,
77 "zai-coding-plan/glm-4.7" => Model::Glm47,
78 other => Model::Custom(other.to_string()),
79 })
80 }
81}
82
83impl schemars::JsonSchema for Model {
85 fn schema_name() -> Cow<'static, str> {
86 "Model".into()
87 }
88
89 fn json_schema(_: &mut schemars::SchemaGenerator) -> schemars::Schema {
90 schemars::json_schema!({
91 "oneOf": [
92 {
93 "type": "string",
94 "const": "gpt-5.4",
95 "description": "OpenAI GPT-5.4 (default Codex model)"
96 },
97 {
98 "type": "string",
99 "const": "gpt-5.3-codex",
100 "description": "OpenAI GPT-5.3 Codex"
101 },
102 {
103 "type": "string",
104 "const": "gpt-5.3-codex-spark",
105 "description": "OpenAI GPT-5.3 Codex Spark (fast)"
106 },
107 {
108 "type": "string",
109 "const": "gpt-5.3",
110 "description": "OpenAI GPT-5.3"
111 },
112 {
113 "type": "string",
114 "const": "gpt-5.2-codex",
115 "description": "OpenAI GPT-5.2 Codex"
116 },
117 {
118 "type": "string",
119 "const": "gpt-5.2",
120 "description": "OpenAI GPT-5.2"
121 },
122 {
123 "type": "string",
124 "const": "zai-coding-plan/glm-4.7",
125 "description": "ZhipuAI GLM-4.7"
126 },
127 {
128 "type": "string",
129 "description": "Custom model identifier",
130 "minLength": 1
131 }
132 ]
133 })
134 }
135}
136
137#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Default, JsonSchema)]
138#[serde(rename_all = "snake_case")]
139pub enum ReasoningEffort {
140 Low,
141 #[default]
142 Medium,
143 High,
144 #[serde(rename = "xhigh")]
145 #[schemars(rename = "xhigh")]
146 XHigh,
147}
148
149#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Default, JsonSchema)]
150#[serde(rename_all = "snake_case")]
151pub enum ModelEffort {
152 #[default]
153 Default,
154 Low,
155 Medium,
156 High,
157 #[serde(rename = "xhigh")]
158 #[schemars(rename = "xhigh")]
159 XHigh,
160}
161
162impl ModelEffort {
163 pub fn as_reasoning_effort(self) -> Option<ReasoningEffort> {
164 match self {
165 ModelEffort::Default => None,
166 ModelEffort::Low => Some(ReasoningEffort::Low),
167 ModelEffort::Medium => Some(ReasoningEffort::Medium),
168 ModelEffort::High => Some(ReasoningEffort::High),
169 ModelEffort::XHigh => Some(ReasoningEffort::XHigh),
170 }
171 }
172}
173
174#[cfg(test)]
175mod tests {
176 use super::{Model, ModelEffort, ReasoningEffort};
177
178 #[test]
179 fn model_parses_known_variants() {
180 assert_eq!("gpt-5.4".parse::<Model>().unwrap(), Model::Gpt54);
181 assert_eq!("gpt-5.3-codex".parse::<Model>().unwrap(), Model::Gpt53Codex);
182 assert_eq!(
183 "gpt-5.3-codex-spark".parse::<Model>().unwrap(),
184 Model::Gpt53CodexSpark
185 );
186 assert_eq!("gpt-5.3".parse::<Model>().unwrap(), Model::Gpt53);
187 assert_eq!("gpt-5.2-codex".parse::<Model>().unwrap(), Model::Gpt52Codex);
188 assert_eq!("gpt-5.2".parse::<Model>().unwrap(), Model::Gpt52);
189 assert_eq!(
190 "zai-coding-plan/glm-4.7".parse::<Model>().unwrap(),
191 Model::Glm47
192 );
193 }
194
195 #[test]
196 fn model_parses_custom_values() {
197 let custom = "claude-opus-4".parse::<Model>().unwrap();
198 assert_eq!(custom, Model::Custom("claude-opus-4".to_string()));
199 assert_eq!(custom.as_str(), "claude-opus-4");
200 }
201
202 #[test]
203 fn model_rejects_empty_string() {
204 let result = "".parse::<Model>();
205 assert!(result.is_err());
206 assert!(result.unwrap_err().contains("cannot be empty"));
207 }
208
209 #[test]
210 fn model_serializes_to_string() {
211 let model = Model::Gpt54;
212 let json = serde_json::to_string(&model).unwrap();
213 assert_eq!(json, "\"gpt-5.4\"");
214
215 let model = Model::Gpt53Codex;
216 let json = serde_json::to_string(&model).unwrap();
217 assert_eq!(json, "\"gpt-5.3-codex\"");
218
219 let model = Model::Gpt53CodexSpark;
220 let json = serde_json::to_string(&model).unwrap();
221 assert_eq!(json, "\"gpt-5.3-codex-spark\"");
222
223 let model = Model::Gpt52Codex;
224 let json = serde_json::to_string(&model).unwrap();
225 assert_eq!(json, "\"gpt-5.2-codex\"");
226 }
227
228 #[test]
229 fn model_deserializes_from_string() {
230 let model: Model = serde_json::from_str("\"sonnet\"").unwrap();
231 assert_eq!(model, Model::Custom("sonnet".to_string()));
232 }
233
234 #[test]
235 fn reasoning_effort_parses_snake_case() {
236 let effort: ReasoningEffort = serde_json::from_str("\"low\"").unwrap();
237 assert_eq!(effort, ReasoningEffort::Low);
238 let effort: ReasoningEffort = serde_json::from_str("\"medium\"").unwrap();
239 assert_eq!(effort, ReasoningEffort::Medium);
240 let effort: ReasoningEffort = serde_json::from_str("\"high\"").unwrap();
241 assert_eq!(effort, ReasoningEffort::High);
242 let effort: ReasoningEffort = serde_json::from_str("\"xhigh\"").unwrap();
243 assert_eq!(effort, ReasoningEffort::XHigh);
244 }
245
246 #[test]
247 fn model_effort_converts_to_reasoning_effort() {
248 assert_eq!(ModelEffort::Default.as_reasoning_effort(), None);
249 assert_eq!(
250 ModelEffort::Low.as_reasoning_effort(),
251 Some(ReasoningEffort::Low)
252 );
253 assert_eq!(
254 ModelEffort::Medium.as_reasoning_effort(),
255 Some(ReasoningEffort::Medium)
256 );
257 assert_eq!(
258 ModelEffort::High.as_reasoning_effort(),
259 Some(ReasoningEffort::High)
260 );
261 assert_eq!(
262 ModelEffort::XHigh.as_reasoning_effort(),
263 Some(ReasoningEffort::XHigh)
264 );
265 }
266
267 #[test]
268 fn model_json_schema_includes_known_models() {
269 use schemars::JsonSchema;
270
271 let schema = Model::json_schema(&mut schemars::SchemaGenerator::default());
272 let schema_json = serde_json::to_string(&schema).unwrap();
273
274 assert!(
276 schema_json.contains("gpt-5.4"),
277 "schema should list gpt-5.4"
278 );
279 assert!(
280 schema_json.contains("gpt-5.3-codex"),
281 "schema should list gpt-5.3-codex"
282 );
283 assert!(
284 schema_json.contains("gpt-5.3-codex-spark"),
285 "schema should list gpt-5.3-codex-spark"
286 );
287 assert!(
288 schema_json.contains("gpt-5.3"),
289 "schema should list gpt-5.3"
290 );
291 assert!(
292 schema_json.contains("gpt-5.2-codex"),
293 "schema should list gpt-5.2-codex"
294 );
295 assert!(
296 schema_json.contains("gpt-5.2"),
297 "schema should list gpt-5.2"
298 );
299 assert!(
300 schema_json.contains("zai-coding-plan/glm-4.7"),
301 "schema should list glm-4.7"
302 );
303
304 assert!(schema_json.contains("oneOf"), "schema should use oneOf");
306
307 assert!(
309 schema_json.contains("Custom model identifier"),
310 "schema should have custom fallback"
311 );
312 }
313}