use anyhow::{Error, Result};
use async_openai::Client;
use async_openai::config::AzureConfig;
use crate::traits::{AsyncGenerateJSON, GenerateJSON, IsLLM};
#[derive(Debug, Clone)]
pub struct AzureOpenAILLM {
model: String,
client: Client<AzureConfig>,
}
impl AzureOpenAILLM {
pub fn new(api_base: &str, api_key: &str, deployment_id: &str, api_version: &str) -> Result<Self, Error> {
let llm_configuration: AzureConfig = AzureConfig::default()
.with_deployment_id(deployment_id)
.with_api_version(api_version)
.with_api_key(api_key)
.with_api_base(api_base);
let client: Client<AzureConfig> = async_openai::Client::with_config(llm_configuration);
Ok(Self {
model: deployment_id.to_string(),
client,
})
}
}
impl IsLLM for AzureOpenAILLM {
fn access_client(&self) -> &Client<impl async_openai::config::Config> {
&self.client
}
fn access_model(&self) -> &str {
&self.model
}
}
impl GenerateJSON for AzureOpenAILLM {}
impl AsyncGenerateJSON for AzureOpenAILLM {}