use crate::{
agent::AgentBuilder,
completion::{self, CompletionError, CompletionRequest},
embeddings::{self, EmbeddingError},
extractor::ExtractorBuilder,
json_utils,
model::ModelBuilder,
rag::RagAgentBuilder,
vector_store::{NoIndex, VectorStoreIndex},
};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use serde_json::json;
const OPENAI_API_BASE_URL: &str = "https://api.openai.com";
#[derive(Clone)]
pub struct Client {
base_url: String,
http_client: reqwest::Client,
}
impl Client {
pub fn new(api_key: &str) -> Self {
Self::from_url(api_key, OPENAI_API_BASE_URL)
}
pub fn from_url(api_key: &str, base_url: &str) -> Self {
Self {
base_url: base_url.to_string(),
http_client: reqwest::Client::builder()
.default_headers({
let mut headers = reqwest::header::HeaderMap::new();
headers.insert(
"Authorization",
format!("Bearer {}", api_key)
.parse()
.expect("Bearer token should parse"),
);
headers
})
.build()
.expect("OpenAI reqwest client should build"),
}
}
pub fn from_env() -> Self {
let api_key = std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY not set");
Self::new(&api_key)
}
fn post(&self, path: &str) -> reqwest::RequestBuilder {
let url = format!("{}/{}", self.base_url, path).replace("//", "/");
self.http_client.post(url)
}
pub fn embedding_model(&self, model: &str) -> EmbeddingModel {
EmbeddingModel::new(self.clone(), model)
}
pub fn embeddings(&self, model: &str) -> embeddings::EmbeddingsBuilder<EmbeddingModel> {
embeddings::EmbeddingsBuilder::new(self.embedding_model(model))
}
pub fn completion_model(&self, model: &str) -> CompletionModel {
CompletionModel::new(self.clone(), model)
}
pub fn model(&self, model: &str) -> ModelBuilder<CompletionModel> {
ModelBuilder::new(self.completion_model(model))
}
pub fn agent(&self, model: &str) -> AgentBuilder<CompletionModel> {
AgentBuilder::new(self.completion_model(model))
}
pub fn extractor<T: JsonSchema + for<'a> Deserialize<'a> + Serialize + Send + Sync>(
&self,
model: &str,
) -> ExtractorBuilder<T, CompletionModel> {
ExtractorBuilder::new(self.completion_model(model))
}
pub fn rag_agent<C: VectorStoreIndex, T: VectorStoreIndex>(
&self,
model: &str,
) -> RagAgentBuilder<CompletionModel, C, T> {
RagAgentBuilder::new(self.completion_model(model))
}
pub fn tool_rag_agent<T: VectorStoreIndex>(
&self,
model: &str,
) -> RagAgentBuilder<CompletionModel, NoIndex, T> {
RagAgentBuilder::new(self.completion_model(model))
}
pub fn context_rag_agent<C: VectorStoreIndex>(
&self,
model: &str,
) -> RagAgentBuilder<CompletionModel, C, NoIndex> {
RagAgentBuilder::new(self.completion_model(model))
}
}
#[derive(Debug, Deserialize)]
struct ApiErrorResponse {
message: String,
}
#[derive(Debug, Deserialize)]
#[serde(untagged)]
enum ApiResponse<T> {
Ok(T),
Err(ApiErrorResponse),
}
pub const TEXT_EMBEDDING_3_LARGE: &str = "text-embedding-3-large";
pub const TEXT_EMBEDDING_3_SMALL: &str = "text-embedding-3-small";
pub const TEXT_EMBEDDING_ADA_002: &str = "text-embedding-ada-002";
#[derive(Debug, Deserialize)]
pub struct EmbeddingResponse {
pub object: String,
pub data: Vec<EmbeddingData>,
pub model: String,
pub usage: Usage,
}
impl From<ApiErrorResponse> for EmbeddingError {
fn from(err: ApiErrorResponse) -> Self {
EmbeddingError::ProviderError(err.message)
}
}
impl From<ApiResponse<EmbeddingResponse>> for Result<EmbeddingResponse, EmbeddingError> {
fn from(value: ApiResponse<EmbeddingResponse>) -> Self {
match value {
ApiResponse::Ok(response) => Ok(response),
ApiResponse::Err(err) => Err(EmbeddingError::ProviderError(err.message)),
}
}
}
#[derive(Debug, Deserialize)]
pub struct EmbeddingData {
pub object: String,
pub embedding: Vec<f64>,
pub index: usize,
}
#[derive(Debug, Deserialize)]
pub struct Usage {
pub prompt_tokens: usize,
pub total_tokens: usize,
}
#[derive(Clone)]
pub struct EmbeddingModel {
client: Client,
pub model: String,
}
impl embeddings::EmbeddingModel for EmbeddingModel {
const MAX_DOCUMENTS: usize = 1024;
async fn embed_documents(
&self,
documents: Vec<String>,
) -> Result<Vec<embeddings::Embedding>, EmbeddingError> {
let response = self
.client
.post("/v1/embeddings")
.json(&json!({
"model": self.model,
"input": documents,
}))
.send()
.await?
.error_for_status()?
.json::<ApiResponse<EmbeddingResponse>>()
.await?;
match response {
ApiResponse::Ok(response) => {
if response.data.len() != documents.len() {
return Err(EmbeddingError::ResponseError(
"Response data length does not match input length".into(),
));
}
Ok(response
.data
.into_iter()
.zip(documents.into_iter())
.map(|(embedding, document)| embeddings::Embedding {
document,
vec: embedding.embedding,
})
.collect())
}
ApiResponse::Err(err) => Err(EmbeddingError::ProviderError(err.message)),
}
}
}
impl EmbeddingModel {
pub fn new(client: Client, model: &str) -> Self {
Self {
client,
model: model.to_string(),
}
}
}
pub const O1_PREVIEW: &str = "o1-preview";
pub const O1_PREVIEW_2024_09_12: &str = "o1-preview-2024-09-12";
pub const O1_MINI: &str = "o1-mini";
pub const O1_MINI_2024_09_12: &str = "o1-mini-2024-09-12";
pub const GPT_4O: &str = "gpt-4o";
pub const GPT_4O_2024_05_13: &str = "gpt-4o-2024-05-13";
pub const GPT_4_TURBO: &str = "gpt-4-turbo";
pub const GPT_4_TURBO_2024_04_09: &str = "gpt-4-turbo-2024-04-09";
pub const GPT_4_TURBO_PREVIEW: &str = "gpt-4-turbo-preview";
pub const GPT_4_0125_PREVIEW: &str = "gpt-4-0125-preview";
pub const GPT_4_1106_PREVIEW: &str = "gpt-4-1106-preview";
pub const GPT_4_VISION_PREVIEW: &str = "gpt-4-vision-preview";
pub const GPT_4_1106_VISION_PREVIEW: &str = "gpt-4-1106-vision-preview";
pub const GPT_4: &str = "gpt-4";
pub const GPT_4_0613: &str = "gpt-4-0613";
pub const GPT_4_32K: &str = "gpt-4-32k";
pub const GPT_4_32K_0613: &str = "gpt-4-32k-0613";
pub const GPT_35_TURBO: &str = "gpt-3.5-turbo";
pub const GPT_35_TURBO_0125: &str = "gpt-3.5-turbo-0125";
pub const GPT_35_TURBO_1106: &str = "gpt-3.5-turbo-1106";
pub const GPT_35_TURBO_INSTRUCT: &str = "gpt-3.5-turbo-instruct";
#[derive(Debug, Deserialize)]
pub struct CompletionResponse {
pub id: String,
pub object: String,
pub created: u64,
pub model: String,
pub system_fingerprint: Option<String>,
pub choices: Vec<Choice>,
pub usage: Option<Usage>,
}
impl From<ApiErrorResponse> for CompletionError {
fn from(err: ApiErrorResponse) -> Self {
CompletionError::ProviderError(err.message)
}
}
impl TryFrom<CompletionResponse> for completion::CompletionResponse<CompletionResponse> {
type Error = CompletionError;
fn try_from(value: CompletionResponse) -> std::prelude::v1::Result<Self, Self::Error> {
match value.choices.as_slice() {
[Choice {
message:
Message {
content: Some(content),
..
},
..
}, ..] => Ok(completion::CompletionResponse {
choice: completion::ModelChoice::Message(content.to_string()),
raw_response: value,
}),
[Choice {
message:
Message {
tool_calls: Some(calls),
..
},
..
}, ..] => {
let call = calls.first().ok_or(CompletionError::ResponseError(
"Tool selection is empty".into(),
))?;
Ok(completion::CompletionResponse {
choice: completion::ModelChoice::ToolCall(
call.function.name.clone(),
serde_json::from_str(&call.function.arguments)?,
),
raw_response: value,
})
}
_ => Err(CompletionError::ResponseError(
"Response did not contain a message or tool call".into(),
)),
}
}
}
#[derive(Debug, Deserialize)]
pub struct Choice {
pub index: usize,
pub message: Message,
pub logprobs: Option<serde_json::Value>,
pub finish_reason: String,
}
#[derive(Debug, Deserialize)]
pub struct Message {
pub role: String,
pub content: Option<String>,
pub tool_calls: Option<Vec<ToolCall>>,
}
#[derive(Debug, Deserialize)]
pub struct ToolCall {
pub id: String,
pub r#type: String,
pub function: Function,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ToolDefinition {
pub r#type: String,
pub function: completion::ToolDefinition,
}
impl From<completion::ToolDefinition> for ToolDefinition {
fn from(tool: completion::ToolDefinition) -> Self {
Self {
r#type: "function".into(),
function: tool,
}
}
}
#[derive(Debug, Deserialize)]
pub struct Function {
pub name: String,
pub arguments: String,
}
#[derive(Clone)]
pub struct CompletionModel {
client: Client,
pub model: String,
}
impl CompletionModel {
pub fn new(client: Client, model: &str) -> Self {
Self {
client,
model: model.to_string(),
}
}
}
impl completion::CompletionModel for CompletionModel {
type Response = CompletionResponse;
async fn completion(
&self,
mut completion_request: CompletionRequest,
) -> Result<completion::CompletionResponse<CompletionResponse>, CompletionError> {
let mut full_history = if let Some(preamble) = &completion_request.preamble {
vec![completion::Message {
role: "system".into(),
content: preamble.clone(),
}]
} else {
vec![]
};
full_history.append(
completion_request
.documents
.into_iter()
.map(|doc| completion::Message {
role: "system".into(),
content: serde_json::to_string(&doc).expect("Document should serialize"),
})
.collect::<Vec<_>>()
.as_mut(),
);
full_history.append(&mut completion_request.chat_history);
full_history.push(completion::Message {
role: "user".into(),
content: completion_request.prompt,
});
let request = if completion_request.tools.is_empty() {
json!({
"model": self.model,
"messages": full_history,
"temperature": completion_request.temperature,
})
} else {
json!({
"model": self.model,
"messages": full_history,
"temperature": completion_request.temperature,
"tools": completion_request.tools.into_iter().map(ToolDefinition::from).collect::<Vec<_>>(),
"tool_choice": "auto",
})
};
let response = self
.client
.post("/v1/chat/completions")
.json(
&if let Some(params) = completion_request.additional_params {
json_utils::merge(request, params)
} else {
request
},
)
.send()
.await?
.error_for_status()?
.json::<ApiResponse<CompletionResponse>>()
.await?;
match response {
ApiResponse::Ok(response) => response.try_into(),
ApiResponse::Err(err) => Err(CompletionError::ProviderError(err.message)),
}
}
}