1use serde::{Deserialize, Serialize};
44
45#[derive(Debug, Clone, PartialEq)]
54pub enum ParameterRestriction {
55 Any,
57 FixedValue(f64),
59 NotSupported,
61}
62
63#[derive(Debug, Clone)]
83pub struct ParameterSupport {
84 pub temperature: ParameterRestriction,
86 pub frequency_penalty: ParameterRestriction,
88 pub presence_penalty: ParameterRestriction,
90 pub logprobs: bool,
92 pub top_logprobs: bool,
94 pub logit_bias: bool,
96 pub n_multiple: bool,
98 pub top_p: ParameterRestriction,
100 pub reasoning: bool,
102}
103
104impl ParameterSupport {
105 pub fn standard_model() -> Self {
109 Self {
110 temperature: ParameterRestriction::Any,
111 frequency_penalty: ParameterRestriction::Any,
112 presence_penalty: ParameterRestriction::Any,
113 logprobs: true,
114 top_logprobs: true,
115 logit_bias: true,
116 n_multiple: true,
117 top_p: ParameterRestriction::Any,
118 reasoning: false,
119 }
120 }
121
122 pub fn reasoning_model() -> Self {
133 Self {
134 temperature: ParameterRestriction::FixedValue(1.0),
135 frequency_penalty: ParameterRestriction::FixedValue(0.0),
136 presence_penalty: ParameterRestriction::FixedValue(0.0),
137 logprobs: false,
138 top_logprobs: false,
139 logit_bias: false,
140 n_multiple: false,
141 top_p: ParameterRestriction::FixedValue(1.0),
142 reasoning: true,
143 }
144 }
145}
146
147#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, Default)]
208pub enum ChatModel {
209 #[serde(rename = "gpt-5.2")]
216 Gpt5_2,
217
218 #[serde(rename = "gpt-5.2-chat-latest")]
222 Gpt5_2ChatLatest,
223
224 #[serde(rename = "gpt-5.2-pro")]
229 Gpt5_2Pro,
230
231 #[serde(rename = "gpt-5.1")]
236 Gpt5_1,
237
238 #[serde(rename = "gpt-5.1-chat-latest")]
240 Gpt5_1ChatLatest,
241
242 #[serde(rename = "gpt-5.1-codex-max")]
247 Gpt5_1CodexMax,
248
249 #[serde(rename = "gpt-5-mini")]
251 Gpt5Mini,
252
253 #[serde(rename = "gpt-4.1")]
256 Gpt4_1,
257
258 #[serde(rename = "gpt-4.1-mini")]
260 Gpt4_1Mini,
261
262 #[serde(rename = "gpt-4.1-nano")]
264 Gpt4_1Nano,
265
266 #[serde(rename = "gpt-4o")]
269 Gpt4o,
270
271 #[serde(rename = "gpt-4o-mini")]
273 #[default]
274 Gpt4oMini,
275
276 #[serde(rename = "gpt-4o-audio-preview")]
278 Gpt4oAudioPreview,
279
280 #[serde(rename = "gpt-4-turbo")]
283 Gpt4Turbo,
284
285 #[serde(rename = "gpt-4")]
287 Gpt4,
288
289 #[serde(rename = "gpt-3.5-turbo")]
292 Gpt3_5Turbo,
293
294 #[serde(rename = "o1")]
297 O1,
298
299 #[serde(rename = "o1-pro")]
301 O1Pro,
302
303 #[serde(rename = "o3")]
305 O3,
306
307 #[serde(rename = "o3-mini")]
309 O3Mini,
310
311 #[serde(rename = "o4-mini")]
313 O4Mini,
314
315 #[serde(untagged)]
318 Custom(String),
319}
320
321impl ChatModel {
322 pub fn as_str(&self) -> &str {
334 match self {
335 Self::Gpt5_2 => "gpt-5.2",
337 Self::Gpt5_2ChatLatest => "gpt-5.2-chat-latest",
338 Self::Gpt5_2Pro => "gpt-5.2-pro",
339 Self::Gpt5_1 => "gpt-5.1",
340 Self::Gpt5_1ChatLatest => "gpt-5.1-chat-latest",
341 Self::Gpt5_1CodexMax => "gpt-5.1-codex-max",
342 Self::Gpt5Mini => "gpt-5-mini",
343 Self::Gpt4_1 => "gpt-4.1",
345 Self::Gpt4_1Mini => "gpt-4.1-mini",
346 Self::Gpt4_1Nano => "gpt-4.1-nano",
347 Self::Gpt4o => "gpt-4o",
349 Self::Gpt4oMini => "gpt-4o-mini",
350 Self::Gpt4oAudioPreview => "gpt-4o-audio-preview",
351 Self::Gpt4Turbo => "gpt-4-turbo",
353 Self::Gpt4 => "gpt-4",
354 Self::Gpt3_5Turbo => "gpt-3.5-turbo",
356 Self::O1 => "o1",
358 Self::O1Pro => "o1-pro",
359 Self::O3 => "o3",
360 Self::O3Mini => "o3-mini",
361 Self::O4Mini => "o4-mini",
362 Self::Custom(s) => s.as_str(),
364 }
365 }
366
367 pub fn is_reasoning_model(&self) -> bool {
386 matches!(
387 self,
388 Self::Gpt5_2 | Self::Gpt5_2ChatLatest | Self::Gpt5_2Pro |
390 Self::Gpt5_1 | Self::Gpt5_1ChatLatest | Self::Gpt5_1CodexMax |
391 Self::Gpt5Mini |
392 Self::O1 | Self::O1Pro | Self::O3 | Self::O3Mini | Self::O4Mini
394 ) || matches!(
395 self,
396 Self::Custom(s) if s.starts_with("gpt-5") || s.starts_with("o1") || s.starts_with("o3") || s.starts_with("o4")
397 )
398 }
399
400 pub fn parameter_support(&self) -> ParameterSupport {
424 if self.is_reasoning_model() {
425 ParameterSupport::reasoning_model()
426 } else {
427 ParameterSupport::standard_model()
428 }
429 }
430
431 pub fn custom(model_id: impl Into<String>) -> Self {
444 Self::Custom(model_id.into())
445 }
446}
447
448impl std::fmt::Display for ChatModel {
449 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
450 write!(f, "{}", self.as_str())
451 }
452}
453
454impl From<&str> for ChatModel {
455 fn from(s: &str) -> Self {
456 match s {
457 "gpt-5.2" => Self::Gpt5_2,
459 "gpt-5.2-chat-latest" => Self::Gpt5_2ChatLatest,
460 "gpt-5.2-pro" => Self::Gpt5_2Pro,
461 "gpt-5.1" => Self::Gpt5_1,
462 "gpt-5.1-chat-latest" => Self::Gpt5_1ChatLatest,
463 "gpt-5.1-codex-max" => Self::Gpt5_1CodexMax,
464 "gpt-5-mini" => Self::Gpt5Mini,
465 "gpt-4.1" => Self::Gpt4_1,
467 "gpt-4.1-mini" => Self::Gpt4_1Mini,
468 "gpt-4.1-nano" => Self::Gpt4_1Nano,
469 "gpt-4o" => Self::Gpt4o,
471 "gpt-4o-mini" => Self::Gpt4oMini,
472 "gpt-4o-audio-preview" => Self::Gpt4oAudioPreview,
473 "gpt-4-turbo" => Self::Gpt4Turbo,
475 "gpt-4" => Self::Gpt4,
476 "gpt-3.5-turbo" => Self::Gpt3_5Turbo,
478 "o1" => Self::O1,
480 "o1-pro" => Self::O1Pro,
481 "o3" => Self::O3,
482 "o3-mini" => Self::O3Mini,
483 "o4-mini" => Self::O4Mini,
484 other => Self::Custom(other.to_string()),
486 }
487 }
488}
489
490impl From<String> for ChatModel {
491 fn from(s: String) -> Self {
492 Self::from(s.as_str())
493 }
494}
495
496#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, Default)]
525pub enum EmbeddingModel {
526 #[serde(rename = "text-embedding-3-small")]
532 #[default]
533 TextEmbedding3Small,
534
535 #[serde(rename = "text-embedding-3-large")]
541 TextEmbedding3Large,
542
543 #[serde(rename = "text-embedding-ada-002")]
549 TextEmbeddingAda002,
550}
551
552impl EmbeddingModel {
553 pub fn as_str(&self) -> &str {
555 match self {
556 Self::TextEmbedding3Small => "text-embedding-3-small",
557 Self::TextEmbedding3Large => "text-embedding-3-large",
558 Self::TextEmbeddingAda002 => "text-embedding-ada-002",
559 }
560 }
561
562 pub fn dimensions(&self) -> usize {
567 match self {
568 Self::TextEmbedding3Small => 1536,
569 Self::TextEmbedding3Large => 3072,
570 Self::TextEmbeddingAda002 => 1536,
571 }
572 }
573}
574
575impl std::fmt::Display for EmbeddingModel {
576 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
577 write!(f, "{}", self.as_str())
578 }
579}
580
581impl From<&str> for EmbeddingModel {
582 fn from(s: &str) -> Self {
583 match s {
584 "text-embedding-3-small" => Self::TextEmbedding3Small,
585 "text-embedding-3-large" => Self::TextEmbedding3Large,
586 "text-embedding-ada-002" => Self::TextEmbeddingAda002,
587 _ => Self::TextEmbedding3Small, }
589 }
590}
591
592#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, Default)]
619pub enum RealtimeModel {
620 #[serde(rename = "gpt-4o-realtime-preview")]
622 #[default]
623 Gpt4oRealtimePreview,
624
625 #[serde(rename = "gpt-4o-mini-realtime-preview")]
627 Gpt4oMiniRealtimePreview,
628
629 #[serde(untagged)]
631 Custom(String),
632}
633
634impl RealtimeModel {
635 pub fn as_str(&self) -> &str {
637 match self {
638 Self::Gpt4oRealtimePreview => "gpt-4o-realtime-preview",
639 Self::Gpt4oMiniRealtimePreview => "gpt-4o-mini-realtime-preview",
640 Self::Custom(s) => s.as_str(),
641 }
642 }
643
644 pub fn custom(model_id: impl Into<String>) -> Self {
646 Self::Custom(model_id.into())
647 }
648}
649
650impl std::fmt::Display for RealtimeModel {
651 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
652 write!(f, "{}", self.as_str())
653 }
654}
655
656impl From<&str> for RealtimeModel {
657 fn from(s: &str) -> Self {
658 match s {
659 "gpt-4o-realtime-preview" => Self::Gpt4oRealtimePreview,
660 "gpt-4o-mini-realtime-preview" => Self::Gpt4oMiniRealtimePreview,
661 other => Self::Custom(other.to_string()),
662 }
663 }
664}
665
666#[allow(non_camel_case_types)]
706#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, Default)]
707pub enum FineTuningModel {
708 #[serde(rename = "gpt-4.1-2025-04-14")]
711 Gpt41_2025_04_14,
712
713 #[serde(rename = "gpt-4.1-mini-2025-04-14")]
715 Gpt41Mini_2025_04_14,
716
717 #[serde(rename = "gpt-4.1-nano-2025-04-14")]
719 Gpt41Nano_2025_04_14,
720
721 #[serde(rename = "gpt-4o-mini-2024-07-18")]
724 #[default]
725 Gpt4oMini_2024_07_18,
726
727 #[serde(rename = "gpt-4o-2024-08-06")]
729 Gpt4o_2024_08_06,
730
731 #[serde(rename = "gpt-4-0613")]
734 Gpt4_0613,
735
736 #[serde(rename = "gpt-3.5-turbo-0125")]
739 Gpt35Turbo_0125,
740
741 #[serde(rename = "gpt-3.5-turbo-1106")]
743 Gpt35Turbo_1106,
744
745 #[serde(rename = "gpt-3.5-turbo-0613")]
747 Gpt35Turbo_0613,
748}
749
750impl FineTuningModel {
751 pub fn as_str(&self) -> &str {
753 match self {
754 Self::Gpt41_2025_04_14 => "gpt-4.1-2025-04-14",
756 Self::Gpt41Mini_2025_04_14 => "gpt-4.1-mini-2025-04-14",
757 Self::Gpt41Nano_2025_04_14 => "gpt-4.1-nano-2025-04-14",
758 Self::Gpt4oMini_2024_07_18 => "gpt-4o-mini-2024-07-18",
760 Self::Gpt4o_2024_08_06 => "gpt-4o-2024-08-06",
761 Self::Gpt4_0613 => "gpt-4-0613",
763 Self::Gpt35Turbo_0125 => "gpt-3.5-turbo-0125",
765 Self::Gpt35Turbo_1106 => "gpt-3.5-turbo-1106",
766 Self::Gpt35Turbo_0613 => "gpt-3.5-turbo-0613",
767 }
768 }
769}
770
771impl std::fmt::Display for FineTuningModel {
772 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
773 write!(f, "{}", self.as_str())
774 }
775}
776
777#[cfg(test)]
778mod tests {
779 use super::*;
780
781 #[test]
782 fn test_chat_model_as_str() {
783 assert_eq!(ChatModel::Gpt4oMini.as_str(), "gpt-4o-mini");
784 assert_eq!(ChatModel::O3Mini.as_str(), "o3-mini");
785 assert_eq!(ChatModel::Gpt4_1.as_str(), "gpt-4.1");
786 assert_eq!(ChatModel::Gpt5_2.as_str(), "gpt-5.2");
788 assert_eq!(ChatModel::Gpt5_2ChatLatest.as_str(), "gpt-5.2-chat-latest");
789 assert_eq!(ChatModel::Gpt5_2Pro.as_str(), "gpt-5.2-pro");
790 assert_eq!(ChatModel::Gpt5_1.as_str(), "gpt-5.1");
791 assert_eq!(ChatModel::Gpt5_1CodexMax.as_str(), "gpt-5.1-codex-max");
792 assert_eq!(ChatModel::Gpt5Mini.as_str(), "gpt-5-mini");
793 }
794
795 #[test]
796 fn test_chat_model_is_reasoning() {
797 assert!(ChatModel::O1.is_reasoning_model());
799 assert!(ChatModel::O3.is_reasoning_model());
800 assert!(ChatModel::O3Mini.is_reasoning_model());
801 assert!(ChatModel::O4Mini.is_reasoning_model());
802 assert!(ChatModel::Gpt5_2.is_reasoning_model());
804 assert!(ChatModel::Gpt5_2ChatLatest.is_reasoning_model());
805 assert!(ChatModel::Gpt5_2Pro.is_reasoning_model());
806 assert!(ChatModel::Gpt5_1.is_reasoning_model());
807 assert!(ChatModel::Gpt5_1CodexMax.is_reasoning_model());
808 assert!(ChatModel::Gpt5Mini.is_reasoning_model());
809 assert!(!ChatModel::Gpt4oMini.is_reasoning_model());
811 assert!(!ChatModel::Gpt4_1.is_reasoning_model());
812 }
813
814 #[test]
815 fn test_chat_model_from_str() {
816 assert_eq!(ChatModel::from("gpt-4o-mini"), ChatModel::Gpt4oMini);
817 assert_eq!(ChatModel::from("o3-mini"), ChatModel::O3Mini);
818 assert_eq!(ChatModel::from("gpt-5.2"), ChatModel::Gpt5_2);
820 assert_eq!(ChatModel::from("gpt-5.2-chat-latest"), ChatModel::Gpt5_2ChatLatest);
821 assert_eq!(ChatModel::from("gpt-5.2-pro"), ChatModel::Gpt5_2Pro);
822 assert_eq!(ChatModel::from("gpt-5.1"), ChatModel::Gpt5_1);
823 assert_eq!(ChatModel::from("gpt-5.1-codex-max"), ChatModel::Gpt5_1CodexMax);
824 assert_eq!(ChatModel::from("gpt-5-mini"), ChatModel::Gpt5Mini);
825 assert!(matches!(ChatModel::from("unknown-model"), ChatModel::Custom(_)));
827 }
828
829 #[test]
830 fn test_chat_model_custom() {
831 let custom = ChatModel::custom("ft:gpt-4o-mini:org::123");
832 assert_eq!(custom.as_str(), "ft:gpt-4o-mini:org::123");
833 }
834
835 #[test]
836 fn test_chat_model_custom_gpt5_is_reasoning() {
837 let custom_gpt5 = ChatModel::custom("gpt-5.3-preview");
839 assert!(custom_gpt5.is_reasoning_model());
840 }
841
842 #[test]
843 fn test_embedding_model_dimensions() {
844 assert_eq!(EmbeddingModel::TextEmbedding3Small.dimensions(), 1536);
845 assert_eq!(EmbeddingModel::TextEmbedding3Large.dimensions(), 3072);
846 }
847
848 #[test]
849 fn test_realtime_model_as_str() {
850 assert_eq!(RealtimeModel::Gpt4oRealtimePreview.as_str(), "gpt-4o-realtime-preview");
851 }
852
853 #[test]
854 fn test_fine_tuning_model_as_str() {
855 assert_eq!(FineTuningModel::Gpt4oMini_2024_07_18.as_str(), "gpt-4o-mini-2024-07-18");
856 assert_eq!(FineTuningModel::Gpt41_2025_04_14.as_str(), "gpt-4.1-2025-04-14");
857 }
858
859 #[test]
860 fn test_chat_model_serialization() {
861 let model = ChatModel::Gpt4oMini;
862 let json = serde_json::to_string(&model).unwrap();
863 assert_eq!(json, "\"gpt-4o-mini\"");
864 let gpt52 = ChatModel::Gpt5_2;
866 let json = serde_json::to_string(&gpt52).unwrap();
867 assert_eq!(json, "\"gpt-5.2\"");
868 }
869
870 #[test]
871 fn test_chat_model_deserialization() {
872 let model: ChatModel = serde_json::from_str("\"gpt-4o-mini\"").unwrap();
873 assert_eq!(model, ChatModel::Gpt4oMini);
874 let gpt52: ChatModel = serde_json::from_str("\"gpt-5.2\"").unwrap();
876 assert_eq!(gpt52, ChatModel::Gpt5_2);
877 }
878
879 #[test]
880 fn test_parameter_support_standard_model() {
881 let model = ChatModel::Gpt4oMini;
882 let support = model.parameter_support();
883
884 assert_eq!(support.temperature, ParameterRestriction::Any);
886 assert_eq!(support.frequency_penalty, ParameterRestriction::Any);
887 assert_eq!(support.presence_penalty, ParameterRestriction::Any);
888 assert_eq!(support.top_p, ParameterRestriction::Any);
889 assert!(support.logprobs);
890 assert!(support.top_logprobs);
891 assert!(support.logit_bias);
892 assert!(support.n_multiple);
893 assert!(!support.reasoning); }
895
896 #[test]
897 fn test_parameter_support_reasoning_model() {
898 let model = ChatModel::O3Mini;
899 let support = model.parameter_support();
900
901 assert_eq!(support.temperature, ParameterRestriction::FixedValue(1.0));
903 assert_eq!(support.frequency_penalty, ParameterRestriction::FixedValue(0.0));
904 assert_eq!(support.presence_penalty, ParameterRestriction::FixedValue(0.0));
905 assert_eq!(support.top_p, ParameterRestriction::FixedValue(1.0));
906 assert!(!support.logprobs);
907 assert!(!support.top_logprobs);
908 assert!(!support.logit_bias);
909 assert!(!support.n_multiple);
910 assert!(support.reasoning); }
912
913 #[test]
914 fn test_parameter_support_gpt5_model() {
915 let model = ChatModel::Gpt5_2;
917 let support = model.parameter_support();
918
919 assert_eq!(support.temperature, ParameterRestriction::FixedValue(1.0));
920 assert!(!support.logprobs);
921 assert!(support.reasoning);
922 }
923
924 #[test]
929 fn test_all_o_series_models_are_reasoning() {
930 let o_series = vec![ChatModel::O1, ChatModel::O1Pro, ChatModel::O3, ChatModel::O3Mini, ChatModel::O4Mini];
932
933 for model in o_series {
934 assert!(model.is_reasoning_model(), "Expected {} to be a reasoning model", model.as_str());
935 }
936 }
937
938 #[test]
939 fn test_all_gpt5_models_are_reasoning() {
940 let gpt5_series = vec![
942 ChatModel::Gpt5_2,
943 ChatModel::Gpt5_2ChatLatest,
944 ChatModel::Gpt5_2Pro,
945 ChatModel::Gpt5_1,
946 ChatModel::Gpt5_1ChatLatest,
947 ChatModel::Gpt5_1CodexMax,
948 ChatModel::Gpt5Mini,
949 ];
950
951 for model in gpt5_series {
952 assert!(model.is_reasoning_model(), "Expected {} to be a reasoning model", model.as_str());
953 }
954 }
955
956 #[test]
957 fn test_all_standard_models_are_not_reasoning() {
958 let standard_models = vec![
960 ChatModel::Gpt4oMini,
961 ChatModel::Gpt4o,
962 ChatModel::Gpt4oAudioPreview,
963 ChatModel::Gpt4Turbo,
964 ChatModel::Gpt4,
965 ChatModel::Gpt3_5Turbo,
966 ChatModel::Gpt4_1,
967 ChatModel::Gpt4_1Mini,
968 ChatModel::Gpt4_1Nano,
969 ];
970
971 for model in standard_models {
972 assert!(!model.is_reasoning_model(), "Expected {} to NOT be a reasoning model", model.as_str());
973 }
974 }
975
976 #[test]
981 fn test_custom_o1_models_are_reasoning() {
982 let custom_o1_variants = vec!["o1-mini", "o1-preview", "o1-pro-2025", "o1-high"];
983
984 for model_str in custom_o1_variants {
985 let model = ChatModel::custom(model_str);
986 assert!(model.is_reasoning_model(), "Expected custom model '{}' to be a reasoning model", model_str);
987 }
988 }
989
990 #[test]
991 fn test_custom_o3_models_are_reasoning() {
992 let custom_o3_variants = vec!["o3-preview", "o3-high", "o3-2025-01-15"];
993
994 for model_str in custom_o3_variants {
995 let model = ChatModel::custom(model_str);
996 assert!(model.is_reasoning_model(), "Expected custom model '{}' to be a reasoning model", model_str);
997 }
998 }
999
1000 #[test]
1001 fn test_custom_o4_models_are_reasoning() {
1002 let custom_o4_variants = vec!["o4-preview", "o4-mini-2025", "o4-high"];
1003
1004 for model_str in custom_o4_variants {
1005 let model = ChatModel::custom(model_str);
1006 assert!(model.is_reasoning_model(), "Expected custom model '{}' to be a reasoning model", model_str);
1007 }
1008 }
1009
1010 #[test]
1011 fn test_custom_gpt5_models_are_reasoning() {
1012 let custom_gpt5_variants = vec!["gpt-5.3", "gpt-5.3-preview", "gpt-5-turbo", "gpt-5.0"];
1013
1014 for model_str in custom_gpt5_variants {
1015 let model = ChatModel::custom(model_str);
1016 assert!(model.is_reasoning_model(), "Expected custom model '{}' to be a reasoning model", model_str);
1017 }
1018 }
1019
1020 #[test]
1021 fn test_custom_standard_models_are_not_reasoning() {
1022 let custom_standard_variants = vec![
1023 "ft:gpt-4o-mini:org::123",
1024 "gpt-4o-2025-01-15",
1025 "gpt-4-turbo-preview",
1026 "gpt-3.5-turbo-instruct",
1027 "text-davinci-003",
1028 "claude-3-opus", ];
1030
1031 for model_str in custom_standard_variants {
1032 let model = ChatModel::custom(model_str);
1033 assert!(!model.is_reasoning_model(), "Expected custom model '{}' to NOT be a reasoning model", model_str);
1034 }
1035 }
1036
1037 #[test]
1042 fn test_parameter_support_all_o_series() {
1043 let o_series = vec![ChatModel::O1, ChatModel::O1Pro, ChatModel::O3, ChatModel::O3Mini, ChatModel::O4Mini];
1044
1045 for model in o_series {
1046 let support = model.parameter_support();
1047
1048 assert_eq!(support.temperature, ParameterRestriction::FixedValue(1.0), "{} should only support temperature=1.0", model.as_str());
1049 assert_eq!(
1050 support.frequency_penalty,
1051 ParameterRestriction::FixedValue(0.0),
1052 "{} should only support frequency_penalty=0.0",
1053 model.as_str()
1054 );
1055 assert_eq!(
1056 support.presence_penalty,
1057 ParameterRestriction::FixedValue(0.0),
1058 "{} should only support presence_penalty=0.0",
1059 model.as_str()
1060 );
1061 assert_eq!(support.top_p, ParameterRestriction::FixedValue(1.0), "{} should only support top_p=1.0", model.as_str());
1062 assert!(!support.logprobs, "{} should not support logprobs", model.as_str());
1063 assert!(!support.top_logprobs, "{} should not support top_logprobs", model.as_str());
1064 assert!(!support.logit_bias, "{} should not support logit_bias", model.as_str());
1065 assert!(!support.n_multiple, "{} should only support n=1", model.as_str());
1066 assert!(support.reasoning, "{} should support reasoning parameter", model.as_str());
1067 }
1068 }
1069
1070 #[test]
1071 fn test_parameter_support_all_gpt5_series() {
1072 let gpt5_series = vec![
1073 ChatModel::Gpt5_2,
1074 ChatModel::Gpt5_2ChatLatest,
1075 ChatModel::Gpt5_2Pro,
1076 ChatModel::Gpt5_1,
1077 ChatModel::Gpt5_1ChatLatest,
1078 ChatModel::Gpt5_1CodexMax,
1079 ChatModel::Gpt5Mini,
1080 ];
1081
1082 for model in gpt5_series {
1083 let support = model.parameter_support();
1084
1085 assert_eq!(support.temperature, ParameterRestriction::FixedValue(1.0), "{} should only support temperature=1.0", model.as_str());
1086 assert!(support.reasoning, "{} should support reasoning parameter", model.as_str());
1087 }
1088 }
1089
1090 #[test]
1091 fn test_parameter_support_all_standard_gpt4_series() {
1092 let gpt4_series = vec![
1093 ChatModel::Gpt4oMini,
1094 ChatModel::Gpt4o,
1095 ChatModel::Gpt4Turbo,
1096 ChatModel::Gpt4,
1097 ChatModel::Gpt4_1,
1098 ChatModel::Gpt4_1Mini,
1099 ChatModel::Gpt4_1Nano,
1100 ];
1101
1102 for model in gpt4_series {
1103 let support = model.parameter_support();
1104
1105 assert_eq!(support.temperature, ParameterRestriction::Any, "{} should support any temperature", model.as_str());
1106 assert_eq!(support.frequency_penalty, ParameterRestriction::Any, "{} should support any frequency_penalty", model.as_str());
1107 assert_eq!(support.presence_penalty, ParameterRestriction::Any, "{} should support any presence_penalty", model.as_str());
1108 assert!(support.logprobs, "{} should support logprobs", model.as_str());
1109 assert!(support.top_logprobs, "{} should support top_logprobs", model.as_str());
1110 assert!(support.logit_bias, "{} should support logit_bias", model.as_str());
1111 assert!(support.n_multiple, "{} should support n > 1", model.as_str());
1112 assert!(!support.reasoning, "{} should NOT support reasoning parameter", model.as_str());
1113 }
1114 }
1115
1116 #[test]
1121 fn test_parameter_restriction_equality() {
1122 assert_eq!(ParameterRestriction::Any, ParameterRestriction::Any);
1123 assert_eq!(ParameterRestriction::NotSupported, ParameterRestriction::NotSupported);
1124 assert_eq!(ParameterRestriction::FixedValue(1.0), ParameterRestriction::FixedValue(1.0));
1125
1126 assert_ne!(ParameterRestriction::Any, ParameterRestriction::NotSupported);
1127 assert_ne!(ParameterRestriction::FixedValue(1.0), ParameterRestriction::FixedValue(0.0));
1128 }
1129
1130 #[test]
1131 fn test_parameter_support_factory_methods() {
1132 let standard = ParameterSupport::standard_model();
1133 assert_eq!(standard.temperature, ParameterRestriction::Any);
1134 assert!(standard.logprobs);
1135 assert!(!standard.reasoning);
1136
1137 let reasoning = ParameterSupport::reasoning_model();
1138 assert_eq!(reasoning.temperature, ParameterRestriction::FixedValue(1.0));
1139 assert!(!reasoning.logprobs);
1140 assert!(reasoning.reasoning);
1141 }
1142
1143 #[test]
1148 fn test_all_gpt5_model_string_roundtrip() {
1149 let gpt5_models = vec![
1150 ("gpt-5.2", ChatModel::Gpt5_2),
1151 ("gpt-5.2-chat-latest", ChatModel::Gpt5_2ChatLatest),
1152 ("gpt-5.2-pro", ChatModel::Gpt5_2Pro),
1153 ("gpt-5.1", ChatModel::Gpt5_1),
1154 ("gpt-5.1-chat-latest", ChatModel::Gpt5_1ChatLatest),
1155 ("gpt-5.1-codex-max", ChatModel::Gpt5_1CodexMax),
1156 ("gpt-5-mini", ChatModel::Gpt5Mini),
1157 ];
1158
1159 for (model_str, expected_model) in gpt5_models {
1160 let parsed = ChatModel::from(model_str);
1162 assert_eq!(parsed, expected_model, "Failed to parse '{}'", model_str);
1163
1164 assert_eq!(expected_model.as_str(), model_str, "Failed to convert {:?} to string", expected_model);
1166
1167 let json = serde_json::to_string(&expected_model).unwrap();
1169 let deserialized: ChatModel = serde_json::from_str(&json).unwrap();
1170 assert_eq!(deserialized, expected_model, "Serialization roundtrip failed for {}", model_str);
1171 }
1172 }
1173
1174 #[test]
1175 fn test_all_o_series_model_string_roundtrip() {
1176 let o_series_models = vec![
1177 ("o1", ChatModel::O1),
1178 ("o1-pro", ChatModel::O1Pro),
1179 ("o3", ChatModel::O3),
1180 ("o3-mini", ChatModel::O3Mini),
1181 ("o4-mini", ChatModel::O4Mini),
1182 ];
1183
1184 for (model_str, expected_model) in o_series_models {
1185 let parsed = ChatModel::from(model_str);
1186 assert_eq!(parsed, expected_model, "Failed to parse '{}'", model_str);
1187 assert_eq!(expected_model.as_str(), model_str, "Failed to convert {:?} to string", expected_model);
1188 }
1189 }
1190
1191 #[test]
1196 fn test_embedding_model_string_roundtrip() {
1197 let embedding_models = vec![
1198 ("text-embedding-3-small", EmbeddingModel::TextEmbedding3Small),
1199 ("text-embedding-3-large", EmbeddingModel::TextEmbedding3Large),
1200 ("text-embedding-ada-002", EmbeddingModel::TextEmbeddingAda002),
1201 ];
1202
1203 for (model_str, expected_model) in embedding_models {
1204 let parsed = EmbeddingModel::from(model_str);
1205 assert_eq!(parsed, expected_model, "Failed to parse '{}'", model_str);
1206 assert_eq!(expected_model.as_str(), model_str, "Failed to convert {:?} to string", expected_model);
1207 }
1208 }
1209
1210 #[test]
1211 fn test_embedding_model_all_dimensions() {
1212 assert_eq!(EmbeddingModel::TextEmbedding3Small.dimensions(), 1536);
1213 assert_eq!(EmbeddingModel::TextEmbedding3Large.dimensions(), 3072);
1214 assert_eq!(EmbeddingModel::TextEmbeddingAda002.dimensions(), 1536);
1215 }
1216
1217 #[test]
1222 fn test_realtime_model_string_roundtrip() {
1223 let realtime_models = vec![
1224 ("gpt-4o-realtime-preview", RealtimeModel::Gpt4oRealtimePreview),
1225 ("gpt-4o-mini-realtime-preview", RealtimeModel::Gpt4oMiniRealtimePreview),
1226 ];
1227
1228 for (model_str, expected_model) in realtime_models {
1229 let parsed = RealtimeModel::from(model_str);
1230 assert_eq!(parsed, expected_model, "Failed to parse '{}'", model_str);
1231 assert_eq!(expected_model.as_str(), model_str, "Failed to convert {:?} to string", expected_model);
1232 }
1233 }
1234
1235 #[test]
1236 fn test_realtime_model_custom() {
1237 let custom = RealtimeModel::custom("gpt-4o-realtime-2025");
1238 assert_eq!(custom.as_str(), "gpt-4o-realtime-2025");
1239 assert!(matches!(custom, RealtimeModel::Custom(_)));
1240 }
1241
1242 #[test]
1247 fn test_fine_tuning_model_as_str_all_variants() {
1248 let fine_tuning_models = vec![
1249 ("gpt-4.1-2025-04-14", FineTuningModel::Gpt41_2025_04_14),
1250 ("gpt-4.1-mini-2025-04-14", FineTuningModel::Gpt41Mini_2025_04_14),
1251 ("gpt-4.1-nano-2025-04-14", FineTuningModel::Gpt41Nano_2025_04_14),
1252 ("gpt-4o-mini-2024-07-18", FineTuningModel::Gpt4oMini_2024_07_18),
1253 ("gpt-4o-2024-08-06", FineTuningModel::Gpt4o_2024_08_06),
1254 ("gpt-4-0613", FineTuningModel::Gpt4_0613),
1255 ("gpt-3.5-turbo-0125", FineTuningModel::Gpt35Turbo_0125),
1256 ("gpt-3.5-turbo-1106", FineTuningModel::Gpt35Turbo_1106),
1257 ("gpt-3.5-turbo-0613", FineTuningModel::Gpt35Turbo_0613),
1258 ];
1259
1260 for (model_str, expected_model) in fine_tuning_models {
1261 assert_eq!(expected_model.as_str(), model_str, "Failed to convert {:?} to string", expected_model);
1262 }
1263 }
1264
1265 #[test]
1266 fn test_fine_tuning_model_serialization_roundtrip() {
1267 let models = vec![FineTuningModel::Gpt41_2025_04_14, FineTuningModel::Gpt4oMini_2024_07_18, FineTuningModel::Gpt35Turbo_0125];
1268
1269 for model in models {
1270 let json = serde_json::to_string(&model).unwrap();
1271 let deserialized: FineTuningModel = serde_json::from_str(&json).unwrap();
1272 assert_eq!(deserialized, model, "Serialization roundtrip failed for {:?}", model);
1273 }
1274 }
1275}