use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, PartialEq)]
pub enum ParameterRestriction {
Any,
FixedValue(f64),
NotSupported,
}
#[derive(Debug, Clone)]
pub struct ParameterSupport {
pub temperature: ParameterRestriction,
pub frequency_penalty: ParameterRestriction,
pub presence_penalty: ParameterRestriction,
pub logprobs: bool,
pub top_logprobs: bool,
pub logit_bias: bool,
pub n_multiple: bool,
pub top_p: ParameterRestriction,
pub reasoning: bool,
}
impl ParameterSupport {
pub fn standard_model() -> Self {
Self {
temperature: ParameterRestriction::Any,
frequency_penalty: ParameterRestriction::Any,
presence_penalty: ParameterRestriction::Any,
logprobs: true,
top_logprobs: true,
logit_bias: true,
n_multiple: true,
top_p: ParameterRestriction::Any,
reasoning: false,
}
}
pub fn reasoning_model() -> Self {
Self {
temperature: ParameterRestriction::FixedValue(1.0),
frequency_penalty: ParameterRestriction::FixedValue(0.0),
presence_penalty: ParameterRestriction::FixedValue(0.0),
logprobs: false,
top_logprobs: false,
logit_bias: false,
n_multiple: false,
top_p: ParameterRestriction::FixedValue(1.0),
reasoning: true,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, Default)]
pub enum ChatModel {
#[serde(rename = "gpt-5.2")]
Gpt5_2,
#[serde(rename = "gpt-5.2-chat-latest")]
Gpt5_2ChatLatest,
#[serde(rename = "gpt-5.2-pro")]
Gpt5_2Pro,
#[serde(rename = "gpt-5.1")]
Gpt5_1,
#[serde(rename = "gpt-5.1-chat-latest")]
Gpt5_1ChatLatest,
#[serde(rename = "gpt-5.1-codex-max")]
Gpt5_1CodexMax,
#[serde(rename = "gpt-5-mini")]
Gpt5Mini,
#[serde(rename = "gpt-4.1")]
Gpt4_1,
#[serde(rename = "gpt-4.1-mini")]
Gpt4_1Mini,
#[serde(rename = "gpt-4.1-nano")]
Gpt4_1Nano,
#[serde(rename = "gpt-4o")]
Gpt4o,
#[serde(rename = "gpt-4o-mini")]
#[default]
Gpt4oMini,
#[serde(rename = "gpt-4o-audio-preview")]
Gpt4oAudioPreview,
#[serde(rename = "gpt-4-turbo")]
Gpt4Turbo,
#[serde(rename = "gpt-4")]
Gpt4,
#[serde(rename = "gpt-3.5-turbo")]
Gpt3_5Turbo,
#[serde(rename = "o1")]
O1,
#[serde(rename = "o1-pro")]
O1Pro,
#[serde(rename = "o3")]
O3,
#[serde(rename = "o3-mini")]
O3Mini,
#[serde(rename = "o4-mini")]
O4Mini,
#[serde(untagged)]
Custom(String),
}
impl ChatModel {
pub fn as_str(&self) -> &str {
match self {
Self::Gpt5_2 => "gpt-5.2",
Self::Gpt5_2ChatLatest => "gpt-5.2-chat-latest",
Self::Gpt5_2Pro => "gpt-5.2-pro",
Self::Gpt5_1 => "gpt-5.1",
Self::Gpt5_1ChatLatest => "gpt-5.1-chat-latest",
Self::Gpt5_1CodexMax => "gpt-5.1-codex-max",
Self::Gpt5Mini => "gpt-5-mini",
Self::Gpt4_1 => "gpt-4.1",
Self::Gpt4_1Mini => "gpt-4.1-mini",
Self::Gpt4_1Nano => "gpt-4.1-nano",
Self::Gpt4o => "gpt-4o",
Self::Gpt4oMini => "gpt-4o-mini",
Self::Gpt4oAudioPreview => "gpt-4o-audio-preview",
Self::Gpt4Turbo => "gpt-4-turbo",
Self::Gpt4 => "gpt-4",
Self::Gpt3_5Turbo => "gpt-3.5-turbo",
Self::O1 => "o1",
Self::O1Pro => "o1-pro",
Self::O3 => "o3",
Self::O3Mini => "o3-mini",
Self::O4Mini => "o4-mini",
Self::Custom(s) => s.as_str(),
}
}
pub fn is_reasoning_model(&self) -> bool {
matches!(
self,
Self::Gpt5_2 | Self::Gpt5_2ChatLatest | Self::Gpt5_2Pro |
Self::Gpt5_1 | Self::Gpt5_1ChatLatest | Self::Gpt5_1CodexMax |
Self::Gpt5Mini |
Self::O1 | Self::O1Pro | Self::O3 | Self::O3Mini | Self::O4Mini
) || matches!(
self,
Self::Custom(s) if s.starts_with("gpt-5") || s.starts_with("o1") || s.starts_with("o3") || s.starts_with("o4")
)
}
pub fn parameter_support(&self) -> ParameterSupport {
if self.is_reasoning_model() {
ParameterSupport::reasoning_model()
} else {
ParameterSupport::standard_model()
}
}
pub fn custom(model_id: impl Into<String>) -> Self {
Self::Custom(model_id.into())
}
}
impl std::fmt::Display for ChatModel {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.as_str())
}
}
impl From<&str> for ChatModel {
fn from(s: &str) -> Self {
match s {
"gpt-5.2" => Self::Gpt5_2,
"gpt-5.2-chat-latest" => Self::Gpt5_2ChatLatest,
"gpt-5.2-pro" => Self::Gpt5_2Pro,
"gpt-5.1" => Self::Gpt5_1,
"gpt-5.1-chat-latest" => Self::Gpt5_1ChatLatest,
"gpt-5.1-codex-max" => Self::Gpt5_1CodexMax,
"gpt-5-mini" => Self::Gpt5Mini,
"gpt-4.1" => Self::Gpt4_1,
"gpt-4.1-mini" => Self::Gpt4_1Mini,
"gpt-4.1-nano" => Self::Gpt4_1Nano,
"gpt-4o" => Self::Gpt4o,
"gpt-4o-mini" => Self::Gpt4oMini,
"gpt-4o-audio-preview" => Self::Gpt4oAudioPreview,
"gpt-4-turbo" => Self::Gpt4Turbo,
"gpt-4" => Self::Gpt4,
"gpt-3.5-turbo" => Self::Gpt3_5Turbo,
"o1" => Self::O1,
"o1-pro" => Self::O1Pro,
"o3" => Self::O3,
"o3-mini" => Self::O3Mini,
"o4-mini" => Self::O4Mini,
other => Self::Custom(other.to_string()),
}
}
}
impl From<String> for ChatModel {
fn from(s: String) -> Self {
Self::from(s.as_str())
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, Default)]
pub enum EmbeddingModel {
#[serde(rename = "text-embedding-3-small")]
#[default]
TextEmbedding3Small,
#[serde(rename = "text-embedding-3-large")]
TextEmbedding3Large,
#[serde(rename = "text-embedding-ada-002")]
TextEmbeddingAda002,
}
impl EmbeddingModel {
pub fn as_str(&self) -> &str {
match self {
Self::TextEmbedding3Small => "text-embedding-3-small",
Self::TextEmbedding3Large => "text-embedding-3-large",
Self::TextEmbeddingAda002 => "text-embedding-ada-002",
}
}
pub fn dimensions(&self) -> usize {
match self {
Self::TextEmbedding3Small => 1536,
Self::TextEmbedding3Large => 3072,
Self::TextEmbeddingAda002 => 1536,
}
}
}
impl std::fmt::Display for EmbeddingModel {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.as_str())
}
}
impl From<&str> for EmbeddingModel {
fn from(s: &str) -> Self {
match s {
"text-embedding-3-small" => Self::TextEmbedding3Small,
"text-embedding-3-large" => Self::TextEmbedding3Large,
"text-embedding-ada-002" => Self::TextEmbeddingAda002,
_ => Self::TextEmbedding3Small, }
}
}
#[allow(non_camel_case_types)]
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, Default)]
pub enum RealtimeModel {
#[serde(rename = "gpt-realtime-2025-08-28")]
#[default]
GptRealtime_2025_08_28,
#[serde(untagged)]
Custom(String),
}
impl RealtimeModel {
pub fn as_str(&self) -> &str {
match self {
Self::GptRealtime_2025_08_28 => "gpt-realtime-2025-08-28",
Self::Custom(s) => s.as_str(),
}
}
pub fn custom(model_id: impl Into<String>) -> Self {
Self::Custom(model_id.into())
}
}
impl std::fmt::Display for RealtimeModel {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.as_str())
}
}
impl From<&str> for RealtimeModel {
fn from(s: &str) -> Self {
match s {
"gpt-realtime-2025-08-28" => Self::GptRealtime_2025_08_28,
other => Self::Custom(other.to_string()),
}
}
}
#[allow(non_camel_case_types)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, Default)]
pub enum FineTuningModel {
#[serde(rename = "gpt-4.1-2025-04-14")]
Gpt41_2025_04_14,
#[serde(rename = "gpt-4.1-mini-2025-04-14")]
Gpt41Mini_2025_04_14,
#[serde(rename = "gpt-4.1-nano-2025-04-14")]
Gpt41Nano_2025_04_14,
#[serde(rename = "gpt-4o-mini-2024-07-18")]
#[default]
Gpt4oMini_2024_07_18,
#[serde(rename = "gpt-4o-2024-08-06")]
Gpt4o_2024_08_06,
#[serde(rename = "gpt-4-0613")]
Gpt4_0613,
#[serde(rename = "gpt-3.5-turbo-0125")]
Gpt35Turbo_0125,
#[serde(rename = "gpt-3.5-turbo-1106")]
Gpt35Turbo_1106,
#[serde(rename = "gpt-3.5-turbo-0613")]
Gpt35Turbo_0613,
}
impl FineTuningModel {
pub fn as_str(&self) -> &str {
match self {
Self::Gpt41_2025_04_14 => "gpt-4.1-2025-04-14",
Self::Gpt41Mini_2025_04_14 => "gpt-4.1-mini-2025-04-14",
Self::Gpt41Nano_2025_04_14 => "gpt-4.1-nano-2025-04-14",
Self::Gpt4oMini_2024_07_18 => "gpt-4o-mini-2024-07-18",
Self::Gpt4o_2024_08_06 => "gpt-4o-2024-08-06",
Self::Gpt4_0613 => "gpt-4-0613",
Self::Gpt35Turbo_0125 => "gpt-3.5-turbo-0125",
Self::Gpt35Turbo_1106 => "gpt-3.5-turbo-1106",
Self::Gpt35Turbo_0613 => "gpt-3.5-turbo-0613",
}
}
}
impl std::fmt::Display for FineTuningModel {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.as_str())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_chat_model_as_str() {
assert_eq!(ChatModel::Gpt4oMini.as_str(), "gpt-4o-mini");
assert_eq!(ChatModel::O3Mini.as_str(), "o3-mini");
assert_eq!(ChatModel::Gpt4_1.as_str(), "gpt-4.1");
assert_eq!(ChatModel::Gpt5_2.as_str(), "gpt-5.2");
assert_eq!(ChatModel::Gpt5_2ChatLatest.as_str(), "gpt-5.2-chat-latest");
assert_eq!(ChatModel::Gpt5_2Pro.as_str(), "gpt-5.2-pro");
assert_eq!(ChatModel::Gpt5_1.as_str(), "gpt-5.1");
assert_eq!(ChatModel::Gpt5_1CodexMax.as_str(), "gpt-5.1-codex-max");
assert_eq!(ChatModel::Gpt5Mini.as_str(), "gpt-5-mini");
}
#[test]
fn test_chat_model_is_reasoning() {
assert!(ChatModel::O1.is_reasoning_model());
assert!(ChatModel::O3.is_reasoning_model());
assert!(ChatModel::O3Mini.is_reasoning_model());
assert!(ChatModel::O4Mini.is_reasoning_model());
assert!(ChatModel::Gpt5_2.is_reasoning_model());
assert!(ChatModel::Gpt5_2ChatLatest.is_reasoning_model());
assert!(ChatModel::Gpt5_2Pro.is_reasoning_model());
assert!(ChatModel::Gpt5_1.is_reasoning_model());
assert!(ChatModel::Gpt5_1CodexMax.is_reasoning_model());
assert!(ChatModel::Gpt5Mini.is_reasoning_model());
assert!(!ChatModel::Gpt4oMini.is_reasoning_model());
assert!(!ChatModel::Gpt4_1.is_reasoning_model());
}
#[test]
fn test_chat_model_from_str() {
assert_eq!(ChatModel::from("gpt-4o-mini"), ChatModel::Gpt4oMini);
assert_eq!(ChatModel::from("o3-mini"), ChatModel::O3Mini);
assert_eq!(ChatModel::from("gpt-5.2"), ChatModel::Gpt5_2);
assert_eq!(ChatModel::from("gpt-5.2-chat-latest"), ChatModel::Gpt5_2ChatLatest);
assert_eq!(ChatModel::from("gpt-5.2-pro"), ChatModel::Gpt5_2Pro);
assert_eq!(ChatModel::from("gpt-5.1"), ChatModel::Gpt5_1);
assert_eq!(ChatModel::from("gpt-5.1-codex-max"), ChatModel::Gpt5_1CodexMax);
assert_eq!(ChatModel::from("gpt-5-mini"), ChatModel::Gpt5Mini);
assert!(matches!(ChatModel::from("unknown-model"), ChatModel::Custom(_)));
}
#[test]
fn test_chat_model_custom() {
let custom = ChatModel::custom("ft:gpt-4o-mini:org::123");
assert_eq!(custom.as_str(), "ft:gpt-4o-mini:org::123");
}
#[test]
fn test_chat_model_custom_gpt5_is_reasoning() {
let custom_gpt5 = ChatModel::custom("gpt-5.3-preview");
assert!(custom_gpt5.is_reasoning_model());
}
#[test]
fn test_embedding_model_dimensions() {
assert_eq!(EmbeddingModel::TextEmbedding3Small.dimensions(), 1536);
assert_eq!(EmbeddingModel::TextEmbedding3Large.dimensions(), 3072);
}
#[test]
fn test_realtime_model_as_str() {
assert_eq!(RealtimeModel::GptRealtime_2025_08_28.as_str(), "gpt-realtime-2025-08-28");
}
#[test]
fn test_fine_tuning_model_as_str() {
assert_eq!(FineTuningModel::Gpt4oMini_2024_07_18.as_str(), "gpt-4o-mini-2024-07-18");
assert_eq!(FineTuningModel::Gpt41_2025_04_14.as_str(), "gpt-4.1-2025-04-14");
}
#[test]
fn test_chat_model_serialization() {
let model = ChatModel::Gpt4oMini;
let json = serde_json::to_string(&model).unwrap();
assert_eq!(json, "\"gpt-4o-mini\"");
let gpt52 = ChatModel::Gpt5_2;
let json = serde_json::to_string(&gpt52).unwrap();
assert_eq!(json, "\"gpt-5.2\"");
}
#[test]
fn test_chat_model_deserialization() {
let model: ChatModel = serde_json::from_str("\"gpt-4o-mini\"").unwrap();
assert_eq!(model, ChatModel::Gpt4oMini);
let gpt52: ChatModel = serde_json::from_str("\"gpt-5.2\"").unwrap();
assert_eq!(gpt52, ChatModel::Gpt5_2);
}
#[test]
fn test_parameter_support_standard_model() {
let model = ChatModel::Gpt4oMini;
let support = model.parameter_support();
assert_eq!(support.temperature, ParameterRestriction::Any);
assert_eq!(support.frequency_penalty, ParameterRestriction::Any);
assert_eq!(support.presence_penalty, ParameterRestriction::Any);
assert_eq!(support.top_p, ParameterRestriction::Any);
assert!(support.logprobs);
assert!(support.top_logprobs);
assert!(support.logit_bias);
assert!(support.n_multiple);
assert!(!support.reasoning); }
#[test]
fn test_parameter_support_reasoning_model() {
let model = ChatModel::O3Mini;
let support = model.parameter_support();
assert_eq!(support.temperature, ParameterRestriction::FixedValue(1.0));
assert_eq!(support.frequency_penalty, ParameterRestriction::FixedValue(0.0));
assert_eq!(support.presence_penalty, ParameterRestriction::FixedValue(0.0));
assert_eq!(support.top_p, ParameterRestriction::FixedValue(1.0));
assert!(!support.logprobs);
assert!(!support.top_logprobs);
assert!(!support.logit_bias);
assert!(!support.n_multiple);
assert!(support.reasoning); }
#[test]
fn test_parameter_support_gpt5_model() {
let model = ChatModel::Gpt5_2;
let support = model.parameter_support();
assert_eq!(support.temperature, ParameterRestriction::FixedValue(1.0));
assert!(!support.logprobs);
assert!(support.reasoning);
}
#[test]
fn test_all_o_series_models_are_reasoning() {
let o_series = vec![ChatModel::O1, ChatModel::O1Pro, ChatModel::O3, ChatModel::O3Mini, ChatModel::O4Mini];
for model in o_series {
assert!(model.is_reasoning_model(), "Expected {} to be a reasoning model", model.as_str());
}
}
#[test]
fn test_all_gpt5_models_are_reasoning() {
let gpt5_series = vec![
ChatModel::Gpt5_2,
ChatModel::Gpt5_2ChatLatest,
ChatModel::Gpt5_2Pro,
ChatModel::Gpt5_1,
ChatModel::Gpt5_1ChatLatest,
ChatModel::Gpt5_1CodexMax,
ChatModel::Gpt5Mini,
];
for model in gpt5_series {
assert!(model.is_reasoning_model(), "Expected {} to be a reasoning model", model.as_str());
}
}
#[test]
fn test_all_standard_models_are_not_reasoning() {
let standard_models = vec![
ChatModel::Gpt4oMini,
ChatModel::Gpt4o,
ChatModel::Gpt4oAudioPreview,
ChatModel::Gpt4Turbo,
ChatModel::Gpt4,
ChatModel::Gpt3_5Turbo,
ChatModel::Gpt4_1,
ChatModel::Gpt4_1Mini,
ChatModel::Gpt4_1Nano,
];
for model in standard_models {
assert!(!model.is_reasoning_model(), "Expected {} to NOT be a reasoning model", model.as_str());
}
}
#[test]
fn test_custom_o1_models_are_reasoning() {
let custom_o1_variants = vec!["o1-mini", "o1-preview", "o1-pro-2025", "o1-high"];
for model_str in custom_o1_variants {
let model = ChatModel::custom(model_str);
assert!(model.is_reasoning_model(), "Expected custom model '{}' to be a reasoning model", model_str);
}
}
#[test]
fn test_custom_o3_models_are_reasoning() {
let custom_o3_variants = vec!["o3-preview", "o3-high", "o3-2025-01-15"];
for model_str in custom_o3_variants {
let model = ChatModel::custom(model_str);
assert!(model.is_reasoning_model(), "Expected custom model '{}' to be a reasoning model", model_str);
}
}
#[test]
fn test_custom_o4_models_are_reasoning() {
let custom_o4_variants = vec!["o4-preview", "o4-mini-2025", "o4-high"];
for model_str in custom_o4_variants {
let model = ChatModel::custom(model_str);
assert!(model.is_reasoning_model(), "Expected custom model '{}' to be a reasoning model", model_str);
}
}
#[test]
fn test_custom_gpt5_models_are_reasoning() {
let custom_gpt5_variants = vec!["gpt-5.3", "gpt-5.3-preview", "gpt-5-turbo", "gpt-5.0"];
for model_str in custom_gpt5_variants {
let model = ChatModel::custom(model_str);
assert!(model.is_reasoning_model(), "Expected custom model '{}' to be a reasoning model", model_str);
}
}
#[test]
fn test_custom_standard_models_are_not_reasoning() {
let custom_standard_variants = vec![
"ft:gpt-4o-mini:org::123",
"gpt-4o-2025-01-15",
"gpt-4-turbo-preview",
"gpt-3.5-turbo-instruct",
"text-davinci-003",
"claude-3-opus", ];
for model_str in custom_standard_variants {
let model = ChatModel::custom(model_str);
assert!(!model.is_reasoning_model(), "Expected custom model '{}' to NOT be a reasoning model", model_str);
}
}
#[test]
fn test_parameter_support_all_o_series() {
let o_series = vec![ChatModel::O1, ChatModel::O1Pro, ChatModel::O3, ChatModel::O3Mini, ChatModel::O4Mini];
for model in o_series {
let support = model.parameter_support();
assert_eq!(support.temperature, ParameterRestriction::FixedValue(1.0), "{} should only support temperature=1.0", model.as_str());
assert_eq!(
support.frequency_penalty,
ParameterRestriction::FixedValue(0.0),
"{} should only support frequency_penalty=0.0",
model.as_str()
);
assert_eq!(
support.presence_penalty,
ParameterRestriction::FixedValue(0.0),
"{} should only support presence_penalty=0.0",
model.as_str()
);
assert_eq!(support.top_p, ParameterRestriction::FixedValue(1.0), "{} should only support top_p=1.0", model.as_str());
assert!(!support.logprobs, "{} should not support logprobs", model.as_str());
assert!(!support.top_logprobs, "{} should not support top_logprobs", model.as_str());
assert!(!support.logit_bias, "{} should not support logit_bias", model.as_str());
assert!(!support.n_multiple, "{} should only support n=1", model.as_str());
assert!(support.reasoning, "{} should support reasoning parameter", model.as_str());
}
}
#[test]
fn test_parameter_support_all_gpt5_series() {
let gpt5_series = vec![
ChatModel::Gpt5_2,
ChatModel::Gpt5_2ChatLatest,
ChatModel::Gpt5_2Pro,
ChatModel::Gpt5_1,
ChatModel::Gpt5_1ChatLatest,
ChatModel::Gpt5_1CodexMax,
ChatModel::Gpt5Mini,
];
for model in gpt5_series {
let support = model.parameter_support();
assert_eq!(support.temperature, ParameterRestriction::FixedValue(1.0), "{} should only support temperature=1.0", model.as_str());
assert!(support.reasoning, "{} should support reasoning parameter", model.as_str());
}
}
#[test]
fn test_parameter_support_all_standard_gpt4_series() {
let gpt4_series = vec![
ChatModel::Gpt4oMini,
ChatModel::Gpt4o,
ChatModel::Gpt4Turbo,
ChatModel::Gpt4,
ChatModel::Gpt4_1,
ChatModel::Gpt4_1Mini,
ChatModel::Gpt4_1Nano,
];
for model in gpt4_series {
let support = model.parameter_support();
assert_eq!(support.temperature, ParameterRestriction::Any, "{} should support any temperature", model.as_str());
assert_eq!(support.frequency_penalty, ParameterRestriction::Any, "{} should support any frequency_penalty", model.as_str());
assert_eq!(support.presence_penalty, ParameterRestriction::Any, "{} should support any presence_penalty", model.as_str());
assert!(support.logprobs, "{} should support logprobs", model.as_str());
assert!(support.top_logprobs, "{} should support top_logprobs", model.as_str());
assert!(support.logit_bias, "{} should support logit_bias", model.as_str());
assert!(support.n_multiple, "{} should support n > 1", model.as_str());
assert!(!support.reasoning, "{} should NOT support reasoning parameter", model.as_str());
}
}
#[test]
fn test_parameter_restriction_equality() {
assert_eq!(ParameterRestriction::Any, ParameterRestriction::Any);
assert_eq!(ParameterRestriction::NotSupported, ParameterRestriction::NotSupported);
assert_eq!(ParameterRestriction::FixedValue(1.0), ParameterRestriction::FixedValue(1.0));
assert_ne!(ParameterRestriction::Any, ParameterRestriction::NotSupported);
assert_ne!(ParameterRestriction::FixedValue(1.0), ParameterRestriction::FixedValue(0.0));
}
#[test]
fn test_parameter_support_factory_methods() {
let standard = ParameterSupport::standard_model();
assert_eq!(standard.temperature, ParameterRestriction::Any);
assert!(standard.logprobs);
assert!(!standard.reasoning);
let reasoning = ParameterSupport::reasoning_model();
assert_eq!(reasoning.temperature, ParameterRestriction::FixedValue(1.0));
assert!(!reasoning.logprobs);
assert!(reasoning.reasoning);
}
#[test]
fn test_all_gpt5_model_string_roundtrip() {
let gpt5_models = vec![
("gpt-5.2", ChatModel::Gpt5_2),
("gpt-5.2-chat-latest", ChatModel::Gpt5_2ChatLatest),
("gpt-5.2-pro", ChatModel::Gpt5_2Pro),
("gpt-5.1", ChatModel::Gpt5_1),
("gpt-5.1-chat-latest", ChatModel::Gpt5_1ChatLatest),
("gpt-5.1-codex-max", ChatModel::Gpt5_1CodexMax),
("gpt-5-mini", ChatModel::Gpt5Mini),
];
for (model_str, expected_model) in gpt5_models {
let parsed = ChatModel::from(model_str);
assert_eq!(parsed, expected_model, "Failed to parse '{}'", model_str);
assert_eq!(expected_model.as_str(), model_str, "Failed to convert {:?} to string", expected_model);
let json = serde_json::to_string(&expected_model).unwrap();
let deserialized: ChatModel = serde_json::from_str(&json).unwrap();
assert_eq!(deserialized, expected_model, "Serialization roundtrip failed for {}", model_str);
}
}
#[test]
fn test_all_o_series_model_string_roundtrip() {
let o_series_models = vec![
("o1", ChatModel::O1),
("o1-pro", ChatModel::O1Pro),
("o3", ChatModel::O3),
("o3-mini", ChatModel::O3Mini),
("o4-mini", ChatModel::O4Mini),
];
for (model_str, expected_model) in o_series_models {
let parsed = ChatModel::from(model_str);
assert_eq!(parsed, expected_model, "Failed to parse '{}'", model_str);
assert_eq!(expected_model.as_str(), model_str, "Failed to convert {:?} to string", expected_model);
}
}
#[test]
fn test_embedding_model_string_roundtrip() {
let embedding_models = vec![
("text-embedding-3-small", EmbeddingModel::TextEmbedding3Small),
("text-embedding-3-large", EmbeddingModel::TextEmbedding3Large),
("text-embedding-ada-002", EmbeddingModel::TextEmbeddingAda002),
];
for (model_str, expected_model) in embedding_models {
let parsed = EmbeddingModel::from(model_str);
assert_eq!(parsed, expected_model, "Failed to parse '{}'", model_str);
assert_eq!(expected_model.as_str(), model_str, "Failed to convert {:?} to string", expected_model);
}
}
#[test]
fn test_embedding_model_all_dimensions() {
assert_eq!(EmbeddingModel::TextEmbedding3Small.dimensions(), 1536);
assert_eq!(EmbeddingModel::TextEmbedding3Large.dimensions(), 3072);
assert_eq!(EmbeddingModel::TextEmbeddingAda002.dimensions(), 1536);
}
#[test]
fn test_realtime_model_string_roundtrip() {
let realtime_models = vec![("gpt-realtime-2025-08-28", RealtimeModel::GptRealtime_2025_08_28)];
for (model_str, expected_model) in realtime_models {
let parsed = RealtimeModel::from(model_str);
assert_eq!(parsed, expected_model, "Failed to parse '{}'", model_str);
assert_eq!(expected_model.as_str(), model_str, "Failed to convert {:?} to string", expected_model);
}
}
#[test]
fn test_realtime_model_custom() {
let custom = RealtimeModel::custom("gpt-4o-realtime-2025");
assert_eq!(custom.as_str(), "gpt-4o-realtime-2025");
assert!(matches!(custom, RealtimeModel::Custom(_)));
}
#[test]
fn test_fine_tuning_model_as_str_all_variants() {
let fine_tuning_models = vec![
("gpt-4.1-2025-04-14", FineTuningModel::Gpt41_2025_04_14),
("gpt-4.1-mini-2025-04-14", FineTuningModel::Gpt41Mini_2025_04_14),
("gpt-4.1-nano-2025-04-14", FineTuningModel::Gpt41Nano_2025_04_14),
("gpt-4o-mini-2024-07-18", FineTuningModel::Gpt4oMini_2024_07_18),
("gpt-4o-2024-08-06", FineTuningModel::Gpt4o_2024_08_06),
("gpt-4-0613", FineTuningModel::Gpt4_0613),
("gpt-3.5-turbo-0125", FineTuningModel::Gpt35Turbo_0125),
("gpt-3.5-turbo-1106", FineTuningModel::Gpt35Turbo_1106),
("gpt-3.5-turbo-0613", FineTuningModel::Gpt35Turbo_0613),
];
for (model_str, expected_model) in fine_tuning_models {
assert_eq!(expected_model.as_str(), model_str, "Failed to convert {:?} to string", expected_model);
}
}
#[test]
fn test_fine_tuning_model_serialization_roundtrip() {
let models = vec![FineTuningModel::Gpt41_2025_04_14, FineTuningModel::Gpt4oMini_2024_07_18, FineTuningModel::Gpt35Turbo_0125];
for model in models {
let json = serde_json::to_string(&model).unwrap();
let deserialized: FineTuningModel = serde_json::from_str(&json).unwrap();
assert_eq!(deserialized, model, "Serialization roundtrip failed for {:?}", model);
}
}
}