pub mod capabilities;
pub mod client;
pub mod conversions;
pub mod embedding_model;
pub mod language_model;
pub mod settings;
use crate::core::DynamicModel;
use crate::core::capabilities::ModelName;
use crate::core::utils::validate_base_url;
use crate::error::Error;
use crate::providers::openai::client::{OpenAIEmbeddingOptions, OpenAILanguageModelOptions};
use crate::providers::openai::settings::OpenAIProviderSettings;
#[derive(Debug, Clone)]
pub struct OpenAI<M: ModelName> {
pub settings: OpenAIProviderSettings,
pub(crate) lm_options: OpenAILanguageModelOptions,
pub(crate) embedding_options: OpenAIEmbeddingOptions,
pub(crate) _phantom: std::marker::PhantomData<M>,
}
impl<M: ModelName> OpenAI<M> {
pub fn builder() -> OpenAIBuilder<M> {
OpenAIBuilder::default()
}
}
impl<M: ModelName> Default for OpenAI<M> {
fn default() -> Self {
let settings = OpenAIProviderSettings::default();
let lm_options = OpenAILanguageModelOptions::builder()
.model(M::MODEL_NAME.to_string())
.build()
.unwrap();
let embedding_options = OpenAIEmbeddingOptions {
input: vec![],
model: M::MODEL_NAME.to_string(),
user: None,
dimensions: None,
encoding_format: None,
};
Self {
settings,
lm_options,
embedding_options,
_phantom: std::marker::PhantomData,
}
}
}
impl OpenAI<DynamicModel> {
pub fn model_name(name: impl Into<String>) -> Self {
let settings = OpenAIProviderSettings::default();
let model_name = name.into();
let lm_options = OpenAILanguageModelOptions::builder()
.model(model_name.clone())
.build()
.unwrap();
let embedding_options = OpenAIEmbeddingOptions {
input: vec![],
model: model_name,
user: None,
dimensions: None,
encoding_format: None,
};
OpenAI {
settings,
lm_options,
embedding_options,
_phantom: std::marker::PhantomData,
}
}
}
pub struct OpenAIBuilder<M: ModelName> {
settings: OpenAIProviderSettings,
options: OpenAILanguageModelOptions,
_phantom: std::marker::PhantomData<M>,
}
impl OpenAIBuilder<DynamicModel> {
pub fn model_name(mut self, model_name: impl Into<String>) -> Self {
self.options.model = model_name.into();
self
}
}
impl<M: ModelName> Default for OpenAIBuilder<M> {
fn default() -> Self {
let settings = OpenAIProviderSettings::default();
let options = OpenAILanguageModelOptions::builder()
.model(M::MODEL_NAME.to_string())
.build()
.unwrap();
Self {
settings,
options,
_phantom: std::marker::PhantomData,
}
}
}
impl<M: ModelName> OpenAIBuilder<M> {
pub fn base_url(mut self, base_url: impl Into<String>) -> Self {
self.settings.base_url = base_url.into();
self
}
pub fn api_key(mut self, api_key: impl Into<String>) -> Self {
self.settings.api_key = api_key.into();
self
}
pub fn provider_name(mut self, provider_name: impl Into<String>) -> Self {
self.settings.provider_name = provider_name.into();
self
}
pub fn path(mut self, path: impl Into<String>) -> Self {
self.settings.path = Some(path.into());
self
}
pub fn build(self) -> Result<OpenAI<M>, Error> {
let base_url = validate_base_url(&self.settings.base_url)?;
if self.settings.api_key.is_empty() {
return Err(Error::MissingField("api_key".to_string()));
}
let lm_options = self.options;
let model_name = lm_options.model.clone();
let embedding_options = OpenAIEmbeddingOptions {
input: vec![],
model: model_name,
user: None,
dimensions: None,
encoding_format: None,
};
Ok(OpenAI {
settings: OpenAIProviderSettings {
base_url,
..self.settings
},
lm_options,
embedding_options,
_phantom: std::marker::PhantomData,
})
}
}
pub use capabilities::*;