1use crate::api::get_json;
32use crate::config::OpenAIClient;
33use crate::error::OpenAIError;
34use serde::{Deserialize, Serialize};
35
36#[derive(Debug, Deserialize)]
41pub struct ModelInfo {
42 pub id: String,
44 pub object: String,
46 #[serde(default)]
48 pub created: Option<u64>,
49 pub owned_by: String,
51 #[serde(default)]
53 pub permission: Vec<ModelPermission>,
54 #[serde(default)]
56 pub root: Option<String>,
57 #[serde(default)]
59 pub parent: Option<String>,
60}
61
62#[derive(Debug, Deserialize)]
67pub struct ModelPermission {
68 pub id: String,
70 pub object: String,
72 pub created: u64,
74 pub allow_create_engine: bool,
76 pub allow_sampling: bool,
78 pub allow_logprobs: bool,
80 pub allow_search_indices: bool,
82 pub allow_view: bool,
84 pub allow_fine_tuning: bool,
86 pub organization: String,
88 #[serde(default)]
90 pub group: Option<String>,
91 pub is_blocking: bool,
93}
94
95#[derive(Debug, Deserialize)]
98struct ModelList {
99 #[serde(rename = "object")] _object: String,
102 data: Vec<ModelInfo>,
104}
105
106pub async fn list_models(client: &OpenAIClient) -> Result<Vec<ModelInfo>, OpenAIError> {
115 let endpoint = "models";
116 let response: ModelList = get_json(client, endpoint).await?;
117 Ok(response.data)
118}
119
120pub async fn retrieve_model(
129 client: &OpenAIClient,
130 model_id: &str,
131) -> Result<ModelInfo, OpenAIError> {
132 let endpoint = format!("models/{}", model_id);
133 get_json(client, &endpoint).await
134}
135
136#[derive(Default, Debug, Clone, PartialEq)]
145pub enum Model {
146 Gpt45Preview,
148 Gpt45Preview2025_02_27,
150 Gpt4oMiniAudioPreview,
152 Gpt4oMiniAudioPreview2024_12_17,
154 Gpt4oMiniRealtimePreview,
156 DallE2,
158 Gpt4o2024_11_20,
160 O1Mini2024_09_12,
162 O1Preview2024_09_12,
164 #[default]
166 O1Mini,
167 O1Preview,
169 ChatGpt4oLatest,
171 Whisper1,
173 DallE3,
175 Gpt4Turbo,
177 Gpt4TurboPreview,
179 Gpt4oAudioPreview,
181 Gpt4oAudioPreview2024_10_01,
183 Babbage002,
185 OmniModerationLatest,
187 OmniModeration2024_09_26,
189 Tts1Hd1106,
191 Gpt4o2024_08_06,
193 Gpt4o,
195 Gpt4o2024_05_13,
197 Tts1Hd,
199 Gpt4Turbo2024_04_09,
201 Tts1,
203 Gpt3_5Turbo16k,
205 Tts1_1106,
207 Davinci002,
209 Gpt3_5Turbo1106,
211 Gpt4oMiniRealtimePreview2024_12_17,
213 Gpt3_5TurboInstruct,
215 Gpt4oRealtimePreview2024_10_01,
217 Gpt3_5TurboInstruct0914,
219 Gpt3_5Turbo0125,
221 Gpt4oAudioPreview2024_12_17,
223 Gpt4oRealtimePreview2024_12_17,
225 Gpt3_5Turbo,
227 TextEmbedding3Large,
229 Gpt4oRealtimePreview,
231 TextEmbedding3Small,
233 Gpt40125Preview,
235 Gpt4,
237 TextEmbeddingAda002,
239 Gpt40106Preview,
241 Gpt4oMini,
243 Gpt40613,
245 Gpt4oMini2024_07_18,
247 Gpt41Nano,
249 Gpt41Nano2025_04_14,
251 Gpt41Mini,
253 Gpt41Mini2025_04_14,
255 Gpt41,
257 Gpt41_2025_04_14,
259 Gpt4oMiniSearchPreview,
261 Gpt4oMiniSearchPreview2025_03_11,
263 Gpt4oSearchPreview,
265 Gpt4oSearchPreview2025_03_11,
267 Gpt4oMiniTts,
269 Gpt4oMiniTranscribe,
271 Gpt4oTranscribe,
273 GptImage1,
275 O12024_12_17,
277 O1,
279 O1Pro,
281 O1Pro2025_03_19,
283 O3Mini,
285 O3Mini2025_01_31,
287 O4Mini,
289 O4Mini2025_04_16,
291
292 Other(String),
294}
295
296fn parse_model_str(s: &str) -> Model {
298 match s {
299 "gpt-4.5-preview" => Model::Gpt45Preview,
300 "gpt-4.5-preview-2025-02-27" => Model::Gpt45Preview2025_02_27,
301 "gpt-4o-mini-audio-preview" => Model::Gpt4oMiniAudioPreview,
302 "gpt-4o-mini-audio-preview-2024-12-17" => Model::Gpt4oMiniAudioPreview2024_12_17,
303 "gpt-4o-mini-realtime-preview" => Model::Gpt4oMiniRealtimePreview,
304 "dall-e-2" => Model::DallE2,
305 "gpt-4o-2024-11-20" => Model::Gpt4o2024_11_20,
306 "o1-mini-2024-09-12" => Model::O1Mini2024_09_12,
307 "o1-preview-2024-09-12" => Model::O1Preview2024_09_12,
308 "o1-mini" => Model::O1Mini,
309 "o1-preview" => Model::O1Preview,
310 "chatgpt-4o-latest" => Model::ChatGpt4oLatest,
311 "whisper-1" => Model::Whisper1,
312 "dall-e-3" => Model::DallE3,
313 "gpt-4-turbo" => Model::Gpt4Turbo,
314 "gpt-4-turbo-preview" => Model::Gpt4TurboPreview,
315 "gpt-4o-audio-preview" => Model::Gpt4oAudioPreview,
316 "gpt-4o-audio-preview-2024-10-01" => Model::Gpt4oAudioPreview2024_10_01,
317 "babbage-002" => Model::Babbage002,
318 "omni-moderation-latest" => Model::OmniModerationLatest,
319 "omni-moderation-2024-09-26" => Model::OmniModeration2024_09_26,
320 "tts-1-hd-1106" => Model::Tts1Hd1106,
321 "gpt-4o-2024-08-06" => Model::Gpt4o2024_08_06,
322 "gpt-4o" => Model::Gpt4o,
323 "gpt-4o-2024-05-13" => Model::Gpt4o2024_05_13,
324 "tts-1-hd" => Model::Tts1Hd,
325 "gpt-4-turbo-2024-04-09" => Model::Gpt4Turbo2024_04_09,
326 "tts-1" => Model::Tts1,
327 "gpt-3.5-turbo-16k" => Model::Gpt3_5Turbo16k,
328 "tts-1-1106" => Model::Tts1_1106,
329 "davinci-002" => Model::Davinci002,
330 "gpt-3.5-turbo-1106" => Model::Gpt3_5Turbo1106,
331 "gpt-4o-mini-realtime-preview-2024-12-17" => Model::Gpt4oMiniRealtimePreview2024_12_17,
332 "gpt-3.5-turbo-instruct" => Model::Gpt3_5TurboInstruct,
333 "gpt-4o-realtime-preview-2024-10-01" => Model::Gpt4oRealtimePreview2024_10_01,
334 "gpt-3.5-turbo-instruct-0914" => Model::Gpt3_5TurboInstruct0914,
335 "gpt-3.5-turbo-0125" => Model::Gpt3_5Turbo0125,
336 "gpt-4o-audio-preview-2024-12-17" => Model::Gpt4oAudioPreview2024_12_17,
337 "gpt-4o-realtime-preview-2024-12-17" => Model::Gpt4oRealtimePreview2024_12_17,
338 "gpt-3.5-turbo" => Model::Gpt3_5Turbo,
339 "text-embedding-3-large" => Model::TextEmbedding3Large,
340 "gpt-4o-realtime-preview" => Model::Gpt4oRealtimePreview,
341 "text-embedding-3-small" => Model::TextEmbedding3Small,
342 "gpt-4-0125-preview" => Model::Gpt40125Preview,
343 "gpt-4" => Model::Gpt4,
344 "text-embedding-ada-002" => Model::TextEmbeddingAda002,
345 "gpt-4-1106-preview" => Model::Gpt40106Preview,
346 "gpt-4o-mini" => Model::Gpt4oMini,
347 "gpt-4-0613" => Model::Gpt40613,
348 "gpt-4o-mini-2024-07-18" => Model::Gpt4oMini2024_07_18,
349 "gpt-4.1-nano" => Model::Gpt41Nano,
350 "gpt-4.1-nano-2025-04-14" => Model::Gpt41Nano2025_04_14,
351 "gpt-4.1-mini" => Model::Gpt41Mini,
352 "gpt-4.1-mini-2025-04-14" => Model::Gpt41Mini2025_04_14,
353 "gpt-4.1" => Model::Gpt41,
354 "gpt-4.1-2025-04-14" => Model::Gpt41_2025_04_14,
355 "gpt-4o-mini-search-preview" => Model::Gpt4oMiniSearchPreview,
356 "gpt-4o-mini-search-preview-2025-03-11" => Model::Gpt4oMiniSearchPreview2025_03_11,
357 "gpt-4o-search-preview" => Model::Gpt4oSearchPreview,
358 "gpt-4o-search-preview-2025-03-11" => Model::Gpt4oSearchPreview2025_03_11,
359 "gpt-4o-mini-tts" => Model::Gpt4oMiniTts,
360 "gpt-4o-mini-transcribe" => Model::Gpt4oMiniTranscribe,
361 "gpt-4o-transcribe" => Model::Gpt4oTranscribe,
362 "gpt-image-1" => Model::GptImage1,
363 "o1-2024-12-17" => Model::O12024_12_17,
364 "o1" => Model::O1,
365 "o1-pro" => Model::O1Pro,
366 "o1-pro-2025-03-19" => Model::O1Pro2025_03_19,
367 "o3-mini" => Model::O3Mini,
368 "o3-mini-2025-01-31" => Model::O3Mini2025_01_31,
369 "o4-mini" => Model::O4Mini,
370 "o4-mini-2025-04-16" => Model::O4Mini2025_04_16,
371 _ => Model::Other(s.to_owned()),
372 }
373}
374
375impl From<&str> for Model {
376 fn from(s: &str) -> Self {
377 parse_model_str(s)
378 }
379}
380
381impl From<String> for Model {
382 fn from(s: String) -> Self {
383 parse_model_str(&s)
384 }
385}
386
387impl From<&String> for Model {
388 fn from(s: &String) -> Self {
389 parse_model_str(s)
390 }
391}
392
393impl Model {
394 pub fn as_str(&self) -> &str {
397 match self {
398 Model::Gpt45Preview => "gpt-4.5-preview",
399 Model::Gpt45Preview2025_02_27 => "gpt-4.5-preview-2025-02-27",
400 Model::Gpt4oMiniAudioPreview => "gpt-4o-mini-audio-preview",
401 Model::Gpt4oMiniAudioPreview2024_12_17 => "gpt-4o-mini-audio-preview-2024-12-17",
402 Model::Gpt4oMiniRealtimePreview => "gpt-4o-mini-realtime-preview",
403 Model::DallE2 => "dall-e-2",
404 Model::Gpt4o2024_11_20 => "gpt-4o-2024-11-20",
405 Model::O1Mini2024_09_12 => "o1-mini-2024-09-12",
406 Model::O1Preview2024_09_12 => "o1-preview-2024-09-12",
407 Model::O1Mini => "o1-mini",
408 Model::O1Preview => "o1-preview",
409 Model::ChatGpt4oLatest => "chatgpt-4o-latest",
410 Model::Whisper1 => "whisper-1",
411 Model::DallE3 => "dall-e-3",
412 Model::Gpt4Turbo => "gpt-4-turbo",
413 Model::Gpt4TurboPreview => "gpt-4-turbo-preview",
414 Model::Gpt4oAudioPreview => "gpt-4o-audio-preview",
415 Model::Gpt4oAudioPreview2024_10_01 => "gpt-4o-audio-preview-2024-10-01",
416 Model::Babbage002 => "babbage-002",
417 Model::OmniModerationLatest => "omni-moderation-latest",
418 Model::OmniModeration2024_09_26 => "omni-moderation-2024-09-26",
419 Model::Tts1Hd1106 => "tts-1-hd-1106",
420 Model::Gpt4o2024_08_06 => "gpt-4o-2024-08-06",
421 Model::Gpt4o => "gpt-4o",
422 Model::Gpt4o2024_05_13 => "gpt-4o-2024-05-13",
423 Model::Tts1Hd => "tts-1-hd",
424 Model::Gpt4Turbo2024_04_09 => "gpt-4-turbo-2024-04-09",
425 Model::Tts1 => "tts-1",
426 Model::Gpt3_5Turbo16k => "gpt-3.5-turbo-16k",
427 Model::Tts1_1106 => "tts-1-1106",
428 Model::Davinci002 => "davinci-002",
429 Model::Gpt3_5Turbo1106 => "gpt-3.5-turbo-1106",
430 Model::Gpt4oMiniRealtimePreview2024_12_17 => "gpt-4o-mini-realtime-preview-2024-12-17",
431 Model::Gpt3_5TurboInstruct => "gpt-3.5-turbo-instruct",
432 Model::Gpt4oRealtimePreview2024_10_01 => "gpt-4o-realtime-preview-2024-10-01",
433 Model::Gpt3_5TurboInstruct0914 => "gpt-3.5-turbo-instruct-0914",
434 Model::Gpt3_5Turbo0125 => "gpt-3.5-turbo-0125",
435 Model::Gpt4oAudioPreview2024_12_17 => "gpt-4o-audio-preview-2024-12-17",
436 Model::Gpt4oRealtimePreview2024_12_17 => "gpt-4o-realtime-preview-2024-12-17",
437 Model::Gpt3_5Turbo => "gpt-3.5-turbo",
438 Model::TextEmbedding3Large => "text-embedding-3-large",
439 Model::Gpt4oRealtimePreview => "gpt-4o-realtime-preview",
440 Model::TextEmbedding3Small => "text-embedding-3-small",
441 Model::Gpt40125Preview => "gpt-4-0125-preview",
442 Model::Gpt4 => "gpt-4",
443 Model::TextEmbeddingAda002 => "text-embedding-ada-002",
444 Model::Gpt40106Preview => "gpt-4-1106-preview",
445 Model::Gpt4oMini => "gpt-4o-mini",
446 Model::Gpt40613 => "gpt-4-0613",
447 Model::Gpt4oMini2024_07_18 => "gpt-4o-mini-2024-07-18",
448 Model::Gpt41Nano => "gpt-4.1-nano",
449 Model::Gpt41Nano2025_04_14 => "gpt-4.1-nano-2025-04-14",
450 Model::Gpt41Mini => "gpt-4.1-mini",
451 Model::Gpt41Mini2025_04_14 => "gpt-4.1-mini-2025-04-14",
452 Model::Gpt41 => "gpt-4.1",
453 Model::Gpt41_2025_04_14 => "gpt-4.1-2025-04-14",
454 Model::Gpt4oMiniSearchPreview => "gpt-4o-mini-search-preview",
455 Model::Gpt4oMiniSearchPreview2025_03_11 => "gpt-4o-mini-search-preview-2025-03-11",
456 Model::Gpt4oSearchPreview => "gpt-4o-search-preview",
457 Model::Gpt4oSearchPreview2025_03_11 => "gpt-4o-search-preview-2025-03-11",
458 Model::Gpt4oMiniTts => "gpt-4o-mini-tts",
459 Model::Gpt4oMiniTranscribe => "gpt-4o-mini-transcribe",
460 Model::Gpt4oTranscribe => "gpt-4o-transcribe",
461 Model::GptImage1 => "gpt-image-1",
462 Model::O12024_12_17 => "o1-2024-12-17",
463 Model::O1 => "o1",
464 Model::O1Pro => "o1-pro",
465 Model::O1Pro2025_03_19 => "o1-pro-2025-03-19",
466 Model::O3Mini => "o3-mini",
467 Model::O3Mini2025_01_31 => "o3-mini-2025-01-31",
468 Model::O4Mini => "o4-mini",
469 Model::O4Mini2025_04_16 => "o4-mini-2025-04-16",
470 Model::Other(s) => s.as_str(),
471 }
472 }
473}
474
475impl Serialize for Model {
476 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
477 where
478 S: serde::Serializer,
479 {
480 serializer.serialize_str(self.as_str())
481 }
482}
483
484impl<'de> Deserialize<'de> for Model {
485 fn deserialize<D>(deserializer: D) -> Result<Model, D::Error>
486 where
487 D: serde::Deserializer<'de>,
488 {
489 let s = String::deserialize(deserializer)?;
490 Ok(parse_model_str(&s))
491 }
492}
493
494#[cfg(test)]
495mod tests {
496 use super::*;
505 use crate::config::OpenAIClient;
506 use crate::error::OpenAIError;
507 use serde_json::json;
508 use wiremock::matchers::{method, path, path_regex};
509 use wiremock::{Mock, MockServer, ResponseTemplate};
510
511 #[tokio::test]
512 async fn test_list_models_success() {
513 let mock_server = MockServer::start().await;
515
516 let success_body = json!({
518 "object": "list",
519 "data": [
520 {
521 "id": "text-davinci-003",
522 "object": "model",
523 "created": 1673643147,
524 "owned_by": "openai",
525 "permission": [
526 {
527 "id": "modelperm-abc123",
528 "object": "model_permission",
529 "created": 1673643000,
530 "allow_create_engine": true,
531 "allow_sampling": true,
532 "allow_logprobs": true,
533 "allow_search_indices": true,
534 "allow_view": true,
535 "allow_fine_tuning": true,
536 "organization": "openai",
537 "group": null,
538 "is_blocking": false
539 }
540 ],
541 "root": "text-davinci-003",
542 "parent": null
543 }
544 ]
545 });
546
547 Mock::given(method("GET"))
548 .and(path("/models"))
549 .respond_with(ResponseTemplate::new(200).set_body_json(success_body))
550 .mount(&mock_server)
551 .await;
552
553 let client = OpenAIClient::builder()
554 .with_api_key("test-key")
555 .with_base_url(&mock_server.uri())
556 .build()
557 .unwrap();
558
559 let result = list_models(&client).await;
561 assert!(result.is_ok(), "Expected Ok, got: {:?}", result);
562
563 let models = result.unwrap();
564 assert_eq!(models.len(), 1);
565 let first_model = &models[0];
566 assert_eq!(first_model.id, "text-davinci-003");
567 assert_eq!(first_model.object, "model");
568 assert_eq!(first_model.owned_by, "openai");
569 assert!(first_model.permission.len() > 0);
570 assert_eq!(first_model.root.as_deref(), Some("text-davinci-003"));
571 }
572
573 #[tokio::test]
574 async fn test_list_models_api_error() {
575 let mock_server = MockServer::start().await;
576
577 let error_body = json!({
579 "error": {
580 "message": "Could not list models",
581 "type": "server_error",
582 "code": null
583 }
584 });
585
586 Mock::given(method("GET"))
587 .and(path("/models"))
588 .respond_with(ResponseTemplate::new(500).set_body_json(error_body))
589 .mount(&mock_server)
590 .await;
591
592 let client = OpenAIClient::builder()
593 .with_api_key("test-key")
594 .with_base_url(&mock_server.uri())
595 .build()
596 .unwrap();
597
598 let result = list_models(&client).await;
599 match result {
600 Err(OpenAIError::APIError { message, .. }) => {
601 assert!(message.contains("Could not list models"));
602 }
603 other => panic!("Expected APIError, got: {:?}", other),
604 }
605 }
606
607 #[tokio::test]
608 async fn test_retrieve_model_success() {
609 let mock_server = MockServer::start().await;
610
611 let success_body = json!({
612 "id": "text-curie-001",
613 "object": "model",
614 "created": 1673645000,
615 "owned_by": "openai",
616 "permission": [],
617 "root": "text-curie-001",
618 "parent": null
619 });
620
621 Mock::given(method("GET"))
622 .and(path_regex(r"^/models/text-curie-001$"))
623 .respond_with(ResponseTemplate::new(200).set_body_json(success_body))
624 .mount(&mock_server)
625 .await;
626
627 let client = OpenAIClient::builder()
628 .with_api_key("test-key")
629 .with_base_url(&mock_server.uri())
630 .build()
631 .unwrap();
632
633 let result = retrieve_model(&client, "text-curie-001").await;
634 assert!(result.is_ok(), "Expected Ok, got: {:?}", result);
635
636 let model = result.unwrap();
637 assert_eq!(model.id, "text-curie-001");
638 assert_eq!(model.object, "model");
639 assert_eq!(model.owned_by, "openai");
640 assert_eq!(model.permission.len(), 0);
641 assert_eq!(model.root.as_deref(), Some("text-curie-001"));
642 }
643
644 #[tokio::test]
645 async fn test_retrieve_model_api_error() {
646 let mock_server = MockServer::start().await;
647
648 let error_body = json!({
649 "error": {
650 "message": "Model not found",
651 "type": "invalid_request_error",
652 "code": null
653 }
654 });
655
656 Mock::given(method("GET"))
657 .and(path_regex(r"^/models/does-not-exist$"))
658 .respond_with(ResponseTemplate::new(404).set_body_json(error_body))
659 .mount(&mock_server)
660 .await;
661
662 let client = OpenAIClient::builder()
663 .with_api_key("test-key")
664 .with_base_url(&mock_server.uri())
665 .build()
666 .unwrap();
667
668 let result = retrieve_model(&client, "does-not-exist").await;
669 match result {
670 Err(OpenAIError::APIError { message, .. }) => {
671 assert!(message.contains("Model not found"));
672 }
673 other => panic!("Expected APIError, got {:?}", other),
674 }
675 }
676
677 #[test]
678 fn test_model_round_trip() {
679 let all_known_variants = vec![
683 Model::Gpt45Preview,
684 Model::Gpt45Preview2025_02_27,
685 Model::Gpt4oMiniAudioPreview,
686 Model::Gpt4oMiniAudioPreview2024_12_17,
687 Model::Gpt4oMiniRealtimePreview,
688 Model::DallE2,
689 Model::Gpt4o2024_11_20,
690 Model::O1Mini2024_09_12,
691 Model::O1Preview2024_09_12,
692 Model::O1Mini,
693 Model::O1Preview,
694 Model::ChatGpt4oLatest,
695 Model::Whisper1,
696 Model::DallE3,
697 Model::Gpt4Turbo,
698 Model::Gpt4TurboPreview,
699 Model::Gpt4oAudioPreview,
700 Model::Gpt4oAudioPreview2024_10_01,
701 Model::Babbage002,
702 Model::OmniModerationLatest,
703 Model::OmniModeration2024_09_26,
704 Model::Tts1Hd1106,
705 Model::Gpt4o2024_08_06,
706 Model::Gpt4o,
707 Model::Gpt4o2024_05_13,
708 Model::Tts1Hd,
709 Model::Gpt4Turbo2024_04_09,
710 Model::Tts1,
711 Model::Gpt3_5Turbo16k,
712 Model::Tts1_1106,
713 Model::Davinci002,
714 Model::Gpt3_5Turbo1106,
715 Model::Gpt4oMiniRealtimePreview2024_12_17,
716 Model::Gpt3_5TurboInstruct,
717 Model::Gpt4oRealtimePreview2024_10_01,
718 Model::Gpt3_5TurboInstruct0914,
719 Model::Gpt3_5Turbo0125,
720 Model::Gpt4oAudioPreview2024_12_17,
721 Model::Gpt4oRealtimePreview2024_12_17,
722 Model::Gpt3_5Turbo,
723 Model::TextEmbedding3Large,
724 Model::Gpt4oRealtimePreview,
725 Model::TextEmbedding3Small,
726 Model::Gpt40125Preview,
727 Model::Gpt4,
728 Model::TextEmbeddingAda002,
729 Model::Gpt40106Preview,
730 Model::Gpt4oMini,
731 Model::Gpt40613,
732 Model::Gpt4oMini2024_07_18,
733 Model::Gpt41Nano,
734 Model::Gpt41Nano2025_04_14,
735 Model::Gpt41Mini,
736 Model::Gpt41Mini2025_04_14,
737 Model::Gpt41,
738 Model::Gpt41_2025_04_14,
739 Model::Gpt4oMiniSearchPreview,
740 Model::Gpt4oMiniSearchPreview2025_03_11,
741 Model::Gpt4oSearchPreview,
742 Model::Gpt4oSearchPreview2025_03_11,
743 Model::Gpt4oMiniTts,
744 Model::Gpt4oMiniTranscribe,
745 Model::Gpt4oTranscribe,
746 Model::GptImage1,
747 Model::O12024_12_17,
748 Model::O1,
749 Model::O1Pro,
750 Model::O1Pro2025_03_19,
751 Model::O3Mini,
752 Model::O3Mini2025_01_31,
753 Model::O4Mini,
754 Model::O4Mini2025_04_16,
755 ];
756
757 for variant in &all_known_variants {
759 let string_id = variant.as_str();
760 let parsed_variant = Model::from(string_id);
761 assert_eq!(
762 *variant, parsed_variant,
763 "Expected round trip to yield {:?}, but got {:?}",
764 variant, parsed_variant
765 );
766 }
767
768 let unknown_str = "my-awesome-custom-model";
771 let other_variant = Model::Other(unknown_str.to_string());
772 let from_str = Model::from(other_variant.as_str());
774 assert_eq!(
776 other_variant, from_str,
777 "Custom model string did not properly round-trip."
778 );
779 }
780}