use std::collections::HashMap;
use super::{
config::{OpenAIConfig, OpenAIFeature},
models::{OpenAIModelFamily, OpenAIModelFeature},
};
pub struct OpenAICapabilities {
config: OpenAIConfig,
}
impl OpenAICapabilities {
pub fn new(config: OpenAIConfig) -> Self {
Self { config }
}
pub fn is_available(&self, capability: OpenAICapability) -> bool {
match capability {
OpenAICapability::ChatCompletion => true, OpenAICapability::Streaming => true, OpenAICapability::FunctionCalling => true, OpenAICapability::VisionSupport => true, OpenAICapability::ImageGeneration => self
.config
.is_feature_enabled(OpenAIFeature::ImageGeneration),
OpenAICapability::AudioTranscription => self
.config
.is_feature_enabled(OpenAIFeature::AudioTranscription),
OpenAICapability::AudioOutput => {
self.config.is_feature_enabled(OpenAIFeature::AudioModels)
}
OpenAICapability::Embeddings => true, OpenAICapability::FineTuning => {
self.config.is_feature_enabled(OpenAIFeature::FineTuning)
}
OpenAICapability::VectorStores => {
self.config.is_feature_enabled(OpenAIFeature::VectorStores)
}
OpenAICapability::RealtimeAudio => {
self.config.is_feature_enabled(OpenAIFeature::RealtimeAudio)
}
OpenAICapability::OSeriesOptimizations => self
.config
.is_feature_enabled(OpenAIFeature::OSeriesOptimizations),
OpenAICapability::GPT5Features => {
self.config.is_feature_enabled(OpenAIFeature::GPT5Features)
}
}
}
pub fn get_available_capabilities(&self) -> Vec<OpenAICapability> {
OpenAICapability::all()
.into_iter()
.filter(|cap| self.is_available(*cap))
.collect()
}
pub fn get_model_capability_matrix(&self) -> HashMap<String, Vec<OpenAIModelFeature>> {
use super::models::get_openai_registry;
let registry = get_openai_registry();
let models = registry.get_all_models();
let mut matrix = HashMap::with_capacity(models.len());
for model_info in models {
if let Some(spec) = registry.get_model_spec(&model_info.id) {
matrix.insert(model_info.id.clone(), spec.features.clone());
}
}
matrix
}
pub fn get_models_for_capability(&self, capability: OpenAICapability) -> Vec<String> {
use super::models::{OpenAIModelFeature, get_openai_registry};
let registry = get_openai_registry();
let feature = match capability {
OpenAICapability::ChatCompletion => OpenAIModelFeature::ChatCompletion,
OpenAICapability::Streaming => OpenAIModelFeature::StreamingSupport,
OpenAICapability::FunctionCalling => OpenAIModelFeature::FunctionCalling,
OpenAICapability::VisionSupport => OpenAIModelFeature::VisionSupport,
OpenAICapability::ImageGeneration => OpenAIModelFeature::ImageGeneration,
OpenAICapability::AudioTranscription => OpenAIModelFeature::AudioTranscription,
OpenAICapability::AudioOutput => OpenAIModelFeature::AudioOutput,
OpenAICapability::Embeddings => OpenAIModelFeature::Embeddings,
OpenAICapability::FineTuning => OpenAIModelFeature::FineTuning,
OpenAICapability::RealtimeAudio => OpenAIModelFeature::RealtimeAudio,
OpenAICapability::OSeriesOptimizations => OpenAIModelFeature::ReasoningMode,
_ => return Vec::new(),
};
registry.get_models_with_feature(&feature)
}
pub fn get_family_capabilities(&self, family: OpenAIModelFamily) -> Vec<OpenAICapability> {
match family {
OpenAIModelFamily::GPT4 | OpenAIModelFamily::GPT4Turbo | OpenAIModelFamily::GPT4O => {
vec![
OpenAICapability::ChatCompletion,
OpenAICapability::Streaming,
OpenAICapability::FunctionCalling,
OpenAICapability::VisionSupport,
]
}
OpenAIModelFamily::GPT35 => vec![
OpenAICapability::ChatCompletion,
OpenAICapability::Streaming,
OpenAICapability::FunctionCalling,
],
OpenAIModelFamily::O1 => vec![
OpenAICapability::ChatCompletion,
OpenAICapability::Streaming,
OpenAICapability::OSeriesOptimizations,
],
OpenAIModelFamily::DALLE2 | OpenAIModelFamily::DALLE3 => {
vec![OpenAICapability::ImageGeneration]
}
OpenAIModelFamily::Whisper => vec![OpenAICapability::AudioTranscription],
OpenAIModelFamily::TTS => vec![OpenAICapability::AudioOutput],
OpenAIModelFamily::Embedding => vec![OpenAICapability::Embeddings],
_ => Vec::new(),
}
}
pub fn get_capability_requirements(
&self,
capability: OpenAICapability,
) -> CapabilityRequirements {
match capability {
OpenAICapability::ChatCompletion => CapabilityRequirements {
min_api_version: None,
required_features: vec![],
enterprise_only: false,
beta_feature: false,
},
OpenAICapability::ImageGeneration => CapabilityRequirements {
min_api_version: None,
required_features: vec![OpenAIFeature::ImageGeneration],
enterprise_only: false,
beta_feature: false,
},
OpenAICapability::FineTuning => CapabilityRequirements {
min_api_version: None,
required_features: vec![OpenAIFeature::FineTuning],
enterprise_only: true,
beta_feature: false,
},
OpenAICapability::RealtimeAudio => CapabilityRequirements {
min_api_version: Some("2024-10-01".to_string()),
required_features: vec![OpenAIFeature::RealtimeAudio],
enterprise_only: false,
beta_feature: true,
},
OpenAICapability::GPT5Features => CapabilityRequirements {
min_api_version: Some("2024-12-01".to_string()),
required_features: vec![OpenAIFeature::GPT5Features],
enterprise_only: true,
beta_feature: true,
},
_ => CapabilityRequirements::default(),
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum OpenAICapability {
ChatCompletion,
Streaming,
FunctionCalling,
VisionSupport,
ImageGeneration,
AudioTranscription,
AudioOutput,
Embeddings,
FineTuning,
VectorStores,
RealtimeAudio,
OSeriesOptimizations,
GPT5Features,
}
impl OpenAICapability {
pub fn all() -> Vec<Self> {
vec![
Self::ChatCompletion,
Self::Streaming,
Self::FunctionCalling,
Self::VisionSupport,
Self::ImageGeneration,
Self::AudioTranscription,
Self::AudioOutput,
Self::Embeddings,
Self::FineTuning,
Self::VectorStores,
Self::RealtimeAudio,
Self::OSeriesOptimizations,
Self::GPT5Features,
]
}
pub fn name(&self) -> &'static str {
match self {
Self::ChatCompletion => "Chat Completion",
Self::Streaming => "Streaming Responses",
Self::FunctionCalling => "Function Calling",
Self::VisionSupport => "Vision Support",
Self::ImageGeneration => "Image Generation",
Self::AudioTranscription => "Audio Transcription",
Self::AudioOutput => "Audio Output",
Self::Embeddings => "Text Embeddings",
Self::FineTuning => "Model Fine-tuning",
Self::VectorStores => "Vector Stores",
Self::RealtimeAudio => "Real-time Audio",
Self::OSeriesOptimizations => "O-series Optimizations",
Self::GPT5Features => "GPT-5 Features",
}
}
pub fn description(&self) -> &'static str {
match self {
Self::ChatCompletion => "Basic text generation and conversation",
Self::Streaming => "Real-time streaming of response tokens",
Self::FunctionCalling => "Structured function and tool calling",
Self::VisionSupport => "Image and multimodal input processing",
Self::ImageGeneration => "AI-powered image creation with DALL-E",
Self::AudioTranscription => "Speech-to-text conversion with Whisper",
Self::AudioOutput => "Text-to-speech synthesis",
Self::Embeddings => "Vector representations for semantic search",
Self::FineTuning => "Custom model training and optimization",
Self::VectorStores => "Integrated vector database operations",
Self::RealtimeAudio => "Low-latency audio processing",
Self::OSeriesOptimizations => "Advanced reasoning optimizations for O1 models",
Self::GPT5Features => "Next-generation GPT-5 capabilities",
}
}
}
#[derive(Debug, Clone, Default)]
pub struct CapabilityRequirements {
pub min_api_version: Option<String>,
pub required_features: Vec<OpenAIFeature>,
pub enterprise_only: bool,
pub beta_feature: bool,
}
#[derive(Debug)]
pub struct CapabilityValidation {
pub available: bool,
pub reasons: Vec<String>,
}
impl OpenAICapabilities {
pub fn validate_capability(&self, capability: OpenAICapability) -> CapabilityValidation {
let mut reasons = Vec::new();
let requirements = self.get_capability_requirements(capability);
if !self.is_available(capability) {
reasons.push(format!(
"Feature {} is disabled in configuration",
capability.name()
));
}
for required_feature in &requirements.required_features {
if !self.config.is_feature_enabled(required_feature.clone()) {
reasons.push(format!(
"Required feature {:?} is not enabled",
required_feature
));
}
}
if requirements.enterprise_only && self.config.base.api_key.is_none() {
reasons.push("Enterprise feature requires valid API key".to_string());
}
if requirements.beta_feature {
reasons.push(format!(
"{} is a beta feature and may change",
capability.name()
));
}
CapabilityValidation {
available: reasons.is_empty(),
reasons,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_capability_detection() {
let config = OpenAIConfig::default();
let capabilities = OpenAICapabilities::new(config);
assert!(capabilities.is_available(OpenAICapability::ChatCompletion));
assert!(capabilities.is_available(OpenAICapability::Streaming));
assert!(capabilities.is_available(OpenAICapability::FunctionCalling));
assert!(capabilities.is_available(OpenAICapability::ImageGeneration)); assert!(!capabilities.is_available(OpenAICapability::FineTuning)); }
#[test]
fn test_capability_validation() {
let config = OpenAIConfig::default();
let capabilities = OpenAICapabilities::new(config);
let chat_validation = capabilities.validate_capability(OpenAICapability::ChatCompletion);
assert!(chat_validation.available);
assert!(chat_validation.reasons.is_empty());
let fine_tuning_validation = capabilities.validate_capability(OpenAICapability::FineTuning);
assert!(!fine_tuning_validation.available);
assert!(!fine_tuning_validation.reasons.is_empty());
}
#[test]
fn test_model_recommendations() {
let config = OpenAIConfig::default();
let capabilities = OpenAICapabilities::new(config);
let chat_models = capabilities.get_models_for_capability(OpenAICapability::ChatCompletion);
assert!(!chat_models.is_empty());
let vision_models = capabilities.get_models_for_capability(OpenAICapability::VisionSupport);
let _ = vision_models; }
#[test]
fn test_capability_names() {
assert_eq!(OpenAICapability::ChatCompletion.name(), "Chat Completion");
assert_eq!(OpenAICapability::ImageGeneration.name(), "Image Generation");
assert!(!OpenAICapability::ChatCompletion.description().is_empty());
assert!(!OpenAICapability::VisionSupport.description().is_empty());
}
}