use anyhow::{Error, Result};
use async_openai::Client;
use async_openai::config::OpenAIConfig;
use crate::traits::{AsyncGenerateJSON, GenerateJSON, IsLLM};
#[derive(Debug, Clone)]
pub struct OpenAILLM {
model: String,
client: Client<OpenAIConfig>,
}
impl OpenAILLM {
pub fn new(api_base: &str, api_key: &str, model: &str) -> Result<Self, Error> {
let llm_configuration: OpenAIConfig = OpenAIConfig::default()
.with_api_key(api_key)
.with_api_base(api_base);
let client: Client<OpenAIConfig> = async_openai::Client::with_config(llm_configuration);
Ok(Self {
model: model.to_string(),
client,
})
}
}
impl IsLLM for OpenAILLM {
fn access_client(&self) -> &Client<impl async_openai::config::Config> {
&self.client
}
fn access_model(&self) -> &str {
&self.model
}
}
impl GenerateJSON for OpenAILLM {}
impl AsyncGenerateJSON for OpenAILLM {}