#![allow(clippy::too_many_arguments)]
use crate::interceptor::{
AfterResponseContext, BeforeRequestContext, ErrorContext, InterceptorChain,
};
use crate::semantic_conventions::operation_names;
use crate::{
builders::{
assistants::{AssistantBuilder, MessageBuilder, RunBuilder},
audio::{
SpeechBuilder, TranscriptionBuilder, TranscriptionRequest, TranslationBuilder,
TranslationRequest,
},
completions::CompletionsBuilder,
embeddings::EmbeddingsBuilder,
files::{FileDeleteBuilder, FileListBuilder, FileRetrievalBuilder, FileUploadBuilder},
images::{
ImageEditBuilder, ImageEditRequest, ImageGenerationBuilder, ImageVariationBuilder,
ImageVariationRequest,
},
models::{ModelDeleteBuilder, ModelRetrievalBuilder},
moderations::ModerationBuilder,
threads::ThreadRequestBuilder,
uploads::UploadBuilder,
usage::UsageBuilder,
Builder, ChatCompletionBuilder, ResponsesBuilder,
},
config::Config,
errors::Result,
responses::ChatCompletionResponseWrapper,
Error, UploadPurpose,
};
use openai_client_base::apis::Error as ApiError;
use openai_client_base::{
apis::{
assistants_api, audio_api, batch_api, chat_api, completions_api,
configuration::Configuration, embeddings_api, files_api, fine_tuning_api, images_api,
models_api, moderations_api, uploads_api, usage_api, vector_stores_api,
},
models::{
AssistantObject, Batch, CreateBatchRequest, CreateChatCompletionRequest,
CreateCompletionResponse, CreateEmbeddingResponse, CreateFineTuningJobRequest,
CreateModerationResponse, CreateTranscription200Response, CreateTranslation200Response,
DeleteAssistantResponse, DeleteFileResponse, DeleteModelResponse,
DeleteVectorStoreFileResponse, DeleteVectorStoreResponse, FineTuningJob, ImagesResponse,
ListAssistantsResponse, ListBatchesResponse, ListFilesResponse,
ListFineTuningJobCheckpointsResponse, ListFineTuningJobEventsResponse,
ListMessagesResponse, ListModelsResponse, ListPaginatedFineTuningJobsResponse,
ListRunStepsResponse, ListRunsResponse, ListVectorStoreFilesResponse,
ListVectorStoresResponse, MessageObject, Model, OpenAiFile, RunObject, RunStepObject,
SubmitToolOutputsRunRequestToolOutputsInner, ThreadObject, Upload, UsageResponse,
VectorStoreFileObject, VectorStoreObject, VectorStoreSearchResultsPage,
},
};
use reqwest_middleware::ClientWithMiddleware as HttpClient;
use std::sync::Arc;
use std::time::Instant;
use tokio::time::Duration;
macro_rules! impl_interceptor_helpers {
($client_type:ty) => {
impl<T: Default + Send + Sync> $client_type {
async fn call_before_request(
&self,
operation: &str,
model: &str,
request_json: &str,
state: &mut T,
) -> Result<()> {
if !self.client.interceptors.is_empty() {
let mut ctx = BeforeRequestContext {
operation,
model,
request_json,
state,
};
if let Err(e) = self.client.interceptors.before_request(&mut ctx).await {
let error_ctx = ErrorContext {
operation,
model: Some(model),
request_json: Some(request_json),
error: &e,
state: Some(state),
};
self.client.interceptors.on_error(&error_ctx).await;
return Err(e);
}
}
Ok(())
}
async fn handle_api_error<E>(
&self,
error: openai_client_base::apis::Error<E>,
operation: &str,
model: &str,
request_json: &str,
state: &T,
) -> Error {
let error = map_api_error(error);
if !self.client.interceptors.is_empty() {
let error_ctx = ErrorContext {
operation,
model: Some(model),
request_json: Some(request_json),
error: &error,
state: Some(state),
};
self.client.interceptors.on_error(&error_ctx).await;
}
error
}
async fn call_after_response<R>(
&self,
response: &R,
operation: &str,
model: &str,
request_json: &str,
state: &T,
duration: std::time::Duration,
input_tokens: Option<i64>,
output_tokens: Option<i64>,
) where
R: serde::Serialize + Sync,
{
if !self.client.interceptors.is_empty() {
let response_json = serde_json::to_string(response).unwrap_or_default();
let ctx = AfterResponseContext {
operation,
model,
request_json,
response_json: &response_json,
duration,
input_tokens,
output_tokens,
state,
};
if let Err(e) = self.client.interceptors.after_response(&ctx).await {
tracing::warn!("Interceptor after_response failed: {}", e);
}
}
}
}
};
}
pub struct ClientBuilder<T = ()> {
config: Arc<Config>,
http: HttpClient,
base_configuration: Configuration,
interceptors: InterceptorChain<T>,
}
#[derive(Clone)]
pub struct Client<T = ()> {
config: Arc<Config>,
http: HttpClient,
base_configuration: Configuration,
interceptors: Arc<InterceptorChain<T>>,
}
impl<T> std::fmt::Debug for Client<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Client")
.field("config", &self.config)
.field("http", &"<HttpClient>")
.field("base_configuration", &"<Configuration>")
.field("interceptors", &"<InterceptorChain>")
.finish()
}
}
impl ClientBuilder {
pub fn new(config: Config) -> Result<Self> {
let is_azure = config.is_azure();
let http_client = if let Some(client) = config.http_client() {
client.clone()
} else {
let reqwest_client = reqwest::Client::builder()
.timeout(Duration::from_secs(120)) .user_agent(format!("openai-ergonomic/{}", env!("CARGO_PKG_VERSION")))
.build()
.map_err(Error::Http)?;
let mut client_builder = reqwest_middleware::ClientBuilder::new(reqwest_client);
if is_azure {
let azure_middleware = crate::azure_middleware::AzureAuthMiddleware::new(
config.api_key().to_string(),
config.azure_api_version().map(String::from),
config.azure_deployment().map(String::from),
);
client_builder = client_builder.with(azure_middleware);
}
client_builder.build()
};
let mut base_configuration = Configuration::new();
base_configuration.client = http_client.clone();
if !is_azure {
base_configuration.bearer_access_token = Some(config.api_key().to_string());
}
if let Some(base_url) = config.base_url() {
base_configuration.base_path = base_url.to_string();
}
if let Some(org_id) = config.organization_id() {
base_configuration.user_agent = Some(format!(
"openai-ergonomic/{} org/{}",
env!("CARGO_PKG_VERSION"),
org_id
));
}
Ok(Self {
config: Arc::new(config),
http: http_client,
base_configuration,
interceptors: InterceptorChain::new(),
})
}
pub fn from_env() -> Result<Self> {
Self::new(Config::from_env()?)
}
}
impl<T> ClientBuilder<T> {
#[must_use]
pub fn with_interceptor<U>(
self,
interceptor: Box<dyn crate::interceptor::Interceptor<U>>,
) -> ClientBuilder<U> {
let mut new_chain = InterceptorChain::new();
new_chain.add(interceptor);
ClientBuilder {
config: self.config,
http: self.http,
base_configuration: self.base_configuration,
interceptors: new_chain,
}
}
#[must_use]
pub fn add_interceptor(
mut self,
interceptor: Box<dyn crate::interceptor::Interceptor<T>>,
) -> Self {
self.interceptors.add(interceptor);
self
}
#[must_use]
pub fn build(self) -> Client<T> {
Client {
config: self.config,
http: self.http,
base_configuration: self.base_configuration,
interceptors: Arc::new(self.interceptors),
}
}
}
impl Client {
pub fn builder(config: Config) -> Result<ClientBuilder> {
ClientBuilder::new(config)
}
pub fn from_env() -> Result<ClientBuilder> {
ClientBuilder::from_env()
}
}
impl<T> Client<T> {
pub fn config(&self) -> &Config {
&self.config
}
pub fn http_client(&self) -> &HttpClient {
&self.http
}
}
impl<T: Default + Send + Sync> Client<T> {
async fn call_before_request(
&self,
operation: &str,
model: &str,
request_json: &str,
state: &mut T,
) -> Result<()> {
if !self.interceptors.is_empty() {
let mut ctx = BeforeRequestContext {
operation,
model,
request_json,
state,
};
if let Err(e) = self.interceptors.before_request(&mut ctx).await {
let error_ctx = ErrorContext {
operation,
model: Some(model),
request_json: Some(request_json),
error: &e,
state: Some(state),
};
self.interceptors.on_error(&error_ctx).await;
return Err(e);
}
}
Ok(())
}
async fn handle_api_error<E>(
&self,
error: openai_client_base::apis::Error<E>,
operation: &str,
model: &str,
request_json: &str,
state: &T,
) -> Error {
let error = map_api_error(error);
if !self.interceptors.is_empty() {
let error_ctx = ErrorContext {
operation,
model: Some(model),
request_json: Some(request_json),
error: &error,
state: Some(state),
};
self.interceptors.on_error(&error_ctx).await;
}
error
}
async fn call_after_response<R>(
&self,
response: &R,
operation: &str,
model: &str,
request_json: &str,
state: &T,
duration: std::time::Duration,
input_tokens: Option<i64>,
output_tokens: Option<i64>,
) where
R: serde::Serialize + Sync,
{
if !self.interceptors.is_empty() {
let response_json = serde_json::to_string(response).unwrap_or_default();
let ctx = AfterResponseContext {
operation,
model,
request_json,
response_json: &response_json,
duration,
input_tokens,
output_tokens,
state,
};
if let Err(e) = self.interceptors.after_response(&ctx).await {
tracing::warn!("Interceptor after_response failed: {}", e);
}
}
}
}
impl<T: Default + Send + Sync + 'static> Client<T> {
pub fn chat(&self) -> ChatCompletionBuilder {
let model = self.config.default_model().unwrap_or("gpt-4");
ChatCompletionBuilder::new(model)
}
pub fn chat_simple(&self, message: impl Into<String>) -> ChatCompletionBuilder {
self.chat().user(message)
}
pub fn chat_with_system(
&self,
system: impl Into<String>,
user: impl Into<String>,
) -> ChatCompletionBuilder {
self.chat().system(system).user(user)
}
pub async fn execute_chat(
&self,
request: CreateChatCompletionRequest,
) -> Result<ChatCompletionResponseWrapper> {
let mut state = T::default();
let operation = operation_names::CHAT;
let model = request.model.clone();
let request_json = serde_json::to_string(&request).unwrap_or_default();
self.call_before_request(operation, &model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match chat_api::create_chat_completion()
.configuration(&self.base_configuration)
.create_chat_completion_request(request)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, &model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
&model,
&request_json,
&state,
duration,
response.usage.as_ref().map(|u| i64::from(u.prompt_tokens)),
response
.usage
.as_ref()
.map(|u| i64::from(u.completion_tokens)),
)
.await;
Ok(ChatCompletionResponseWrapper::new(response))
}
pub async fn send_chat(
&self,
builder: ChatCompletionBuilder,
) -> Result<ChatCompletionResponseWrapper> {
let request = builder.build()?;
self.execute_chat(request).await
}
pub async fn send_chat_stream(
&self,
mut builder: ChatCompletionBuilder,
) -> Result<crate::streaming::BoxedChatStream> {
builder = builder.stream(true);
let mut request = builder.build()?;
request.stream = Some(true);
self.execute_chat_stream(request, crate::semantic_conventions::operation_names::CHAT)
.await
}
async fn execute_chat_stream(
&self,
request: CreateChatCompletionRequest,
operation: &str,
) -> Result<crate::streaming::BoxedChatStream> {
let uri_str = format!("{}/chat/completions", self.config.api_base());
let mut req_builder = self
.http_client()
.request(reqwest::Method::POST, &uri_str)
.bearer_auth(self.config.api_key())
.json(&request);
if let Some(org_id) = self.config.organization_id() {
req_builder = req_builder.header("OpenAI-Organization", org_id);
}
if let Some(project_id) = self.config.project() {
req_builder = req_builder.header("OpenAI-Project", project_id);
}
let req = req_builder.build()?;
let request_json = serde_json::to_string(&request).unwrap_or_else(|_| "{}".to_string());
let model = request.model.clone();
let mut state = T::default();
if !self.interceptors.is_empty() {
let mut ctx = crate::interceptor::BeforeRequestContext {
operation,
model: &model,
request_json: &request_json,
state: &mut state,
};
self.interceptors.before_request(&mut ctx).await?;
}
let response = self.http_client().execute(req).await?;
let status = response.status();
if !status.is_success() {
let error_text = response.text().await?;
return Err(Error::Api {
status: status.as_u16(),
message: error_text,
error_type: None,
error_code: None,
});
}
let stream = crate::streaming::ChatCompletionStream::new(response);
if self.interceptors.is_empty() {
Ok(Box::pin(stream))
} else {
let intercepted = crate::streaming::InterceptedStream::new(
stream,
std::sync::Arc::clone(&self.interceptors),
operation.to_string(),
model,
request_json,
state,
);
Ok(Box::pin(intercepted))
}
}
}
impl<T: Default + Send + Sync + 'static> Client<T> {
pub fn responses(&self) -> ResponsesBuilder {
let model = self.config.default_model().unwrap_or("gpt-4");
ResponsesBuilder::new(model)
}
pub fn responses_simple(&self, message: impl Into<String>) -> ResponsesBuilder {
self.responses().user(message)
}
pub async fn execute_responses(
&self,
request: CreateChatCompletionRequest,
) -> Result<ChatCompletionResponseWrapper> {
self.execute_chat(request).await
}
pub async fn send_responses(
&self,
builder: ResponsesBuilder,
) -> Result<ChatCompletionResponseWrapper> {
let request = builder.build()?;
self.execute_responses(request).await
}
pub async fn send_responses_stream(
&self,
mut builder: ResponsesBuilder,
) -> Result<crate::streaming::BoxedChatStream> {
builder = builder.stream(true);
let mut request = builder.build()?;
request.stream = Some(true);
self.execute_chat_stream(
request,
crate::semantic_conventions::operation_names::RESPONSES,
)
.await
}
}
impl<T: Default + Send + Sync> Client<T> {
#[must_use]
pub fn assistants(&self) -> AssistantsClient<'_, T> {
AssistantsClient { client: self }
}
#[must_use]
pub fn audio(&self) -> AudioClient<'_, T> {
AudioClient { client: self }
}
#[must_use]
pub fn embeddings(&self) -> EmbeddingsClient<'_, T> {
EmbeddingsClient { client: self }
}
#[must_use]
pub fn images(&self) -> ImagesClient<'_, T> {
ImagesClient { client: self }
}
#[must_use]
pub fn files(&self) -> FilesClient<'_, T> {
FilesClient { client: self }
}
#[must_use]
pub fn fine_tuning(&self) -> FineTuningClient<'_, T> {
FineTuningClient { client: self }
}
#[must_use]
pub fn batch(&self) -> BatchClient<'_, T> {
BatchClient { client: self }
}
#[must_use]
pub fn vector_stores(&self) -> VectorStoresClient<'_, T> {
VectorStoresClient { client: self }
}
#[must_use]
pub fn moderations(&self) -> ModerationsClient<'_, T> {
ModerationsClient { client: self }
}
#[must_use]
pub fn threads(&self) -> ThreadsClient<'_, T> {
ThreadsClient { client: self }
}
#[must_use]
pub fn uploads(&self) -> UploadsClient<'_, T> {
UploadsClient { client: self }
}
#[must_use]
pub fn models(&self) -> ModelsClient<'_, T> {
ModelsClient { client: self }
}
#[must_use]
pub fn completions(&self) -> CompletionsClient<'_, T> {
CompletionsClient { client: self }
}
#[must_use]
pub fn usage(&self) -> UsageClient<'_, T> {
UsageClient { client: self }
}
}
impl<T: Default + Send + Sync> AudioClient<'_, T> {
#[must_use]
pub fn speech(
&self,
model: impl Into<String>,
input: impl Into<String>,
voice: impl Into<String>,
) -> SpeechBuilder {
SpeechBuilder::new(model, input, voice)
}
pub async fn create_speech(&self, builder: SpeechBuilder) -> Result<Vec<u8>> {
let request = builder.build()?;
let mut state = T::default();
let operation = operation_names::AUDIO_SPEECH;
let model = request.model.clone();
let request_json = serde_json::to_string(&request).unwrap_or_default();
self.call_before_request(operation, &model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match audio_api::create_speech()
.configuration(&self.client.base_configuration)
.create_speech_request(request)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, &model, &request_json, &state)
.await;
return Err(error);
}
};
let bytes = response.bytes().await.map_err(Error::Http)?;
let duration = start_time.elapsed();
let response_json = format!("{{\"size\": {}}}", bytes.len());
self.call_after_response(
&response_json,
operation,
&model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(bytes.to_vec())
}
#[must_use]
pub fn transcription(
&self,
file: impl AsRef<std::path::Path>,
model: impl Into<String>,
) -> TranscriptionBuilder {
TranscriptionBuilder::new(file, model)
}
pub async fn create_transcription(
&self,
builder: TranscriptionBuilder,
) -> Result<CreateTranscription200Response> {
let request = builder.build()?;
let model_str = request.model.clone();
let mut state = T::default();
let operation = operation_names::AUDIO_TRANSCRIPTION;
let request_json = format!(r#"{{"model":"{model_str}","file":"<audio_file>"}}"#);
self.call_before_request(operation, &model_str, &request_json, &mut state)
.await?;
let TranscriptionRequest {
file,
model,
language,
prompt,
response_format,
temperature,
stream,
chunking_strategy,
timestamp_granularities,
include,
} = request;
let timestamp_strings = timestamp_granularities.as_ref().map(|values| {
values
.iter()
.map(|granularity| granularity.as_str().to_string())
.collect::<Vec<_>>()
});
let start_time = Instant::now();
let response = match audio_api::create_transcription()
.configuration(&self.client.base_configuration)
.file(file)
.model(&model)
.maybe_language(language.as_deref())
.maybe_prompt(prompt.as_deref())
.maybe_response_format(response_format)
.maybe_temperature(temperature)
.maybe_stream(stream)
.maybe_chunking_strategy(chunking_strategy)
.maybe_timestamp_granularities(timestamp_strings)
.maybe_include(include)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, &model_str, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
&model_str,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
#[must_use]
pub fn translation(
&self,
file: impl AsRef<std::path::Path>,
model: impl Into<String>,
) -> TranslationBuilder {
TranslationBuilder::new(file, model)
}
pub async fn create_translation(
&self,
builder: TranslationBuilder,
) -> Result<CreateTranslation200Response> {
let request = builder.build()?;
let model_str = request.model.clone();
let mut state = T::default();
let operation = operation_names::AUDIO_TRANSLATION;
let request_json = format!(r#"{{"model":"{model_str}","file":"<audio_file>"}}"#);
self.call_before_request(operation, &model_str, &request_json, &mut state)
.await?;
let TranslationRequest {
file,
model,
prompt,
response_format,
temperature,
} = request;
let response_format_owned = response_format.map(|format| format.to_string());
let start_time = Instant::now();
let response = match audio_api::create_translation()
.configuration(&self.client.base_configuration)
.file(file)
.model(&model)
.maybe_prompt(prompt.as_deref())
.maybe_response_format(response_format_owned.as_deref())
.maybe_temperature(temperature)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, &model_str, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
&model_str,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
}
impl<T: Default + Send + Sync> EmbeddingsClient<'_, T> {
#[must_use]
pub fn builder(&self, model: impl Into<String>) -> EmbeddingsBuilder {
EmbeddingsBuilder::new(model)
}
#[must_use]
pub fn text(&self, model: impl Into<String>, input: impl Into<String>) -> EmbeddingsBuilder {
self.builder(model).input_text(input)
}
#[must_use]
pub fn tokens<I>(&self, model: impl Into<String>, tokens: I) -> EmbeddingsBuilder
where
I: IntoIterator<Item = i32>,
{
self.builder(model).input_tokens(tokens)
}
pub async fn create(&self, builder: EmbeddingsBuilder) -> Result<CreateEmbeddingResponse> {
let request = builder.build()?;
let mut state = T::default();
let operation = operation_names::EMBEDDINGS;
let model = request.model.clone();
let request_json = serde_json::to_string(&request).unwrap_or_default();
self.call_before_request(operation, &model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match embeddings_api::create_embedding()
.configuration(&self.client.base_configuration)
.create_embedding_request(request)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, &model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
&model,
&request_json,
&state,
duration,
Some(i64::from(response.usage.prompt_tokens)),
Some(i64::from(response.usage.total_tokens)),
)
.await;
Ok(response)
}
}
impl<T: Default + Send + Sync> ImagesClient<'_, T> {
#[must_use]
pub fn generate(&self, prompt: impl Into<String>) -> ImageGenerationBuilder {
ImageGenerationBuilder::new(prompt)
}
pub async fn create(&self, builder: ImageGenerationBuilder) -> Result<ImagesResponse> {
let request = builder.build()?;
let mut state = T::default();
let operation = operation_names::IMAGE_GENERATION;
let model = request
.model
.as_ref()
.map_or_else(|| "dall-e-2".to_string(), ToString::to_string);
let request_json = serde_json::to_string(&request).unwrap_or_default();
self.call_before_request(operation, &model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match images_api::create_image()
.configuration(&self.client.base_configuration)
.create_image_request(request)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, &model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
&model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
#[must_use]
pub fn edit(
&self,
image: impl AsRef<std::path::Path>,
prompt: impl Into<String>,
) -> ImageEditBuilder {
ImageEditBuilder::new(image, prompt)
}
pub async fn create_edit(&self, builder: ImageEditBuilder) -> Result<ImagesResponse> {
let request = builder.build()?;
let model_str = request
.model
.as_ref()
.map_or_else(|| "dall-e-2".to_string(), ToString::to_string);
let mut state = T::default();
let operation = operation_names::IMAGE_EDIT;
let request_json = format!(
r#"{{"prompt":"{}","model":"{}"}}"#,
request.prompt, model_str
);
self.call_before_request(operation, &model_str, &request_json, &mut state)
.await?;
let ImageEditRequest {
image,
prompt,
mask,
background,
model,
n,
size,
response_format,
output_format,
output_compression,
user,
input_fidelity,
stream,
partial_images,
quality,
} = request;
let start_time = Instant::now();
let response = match images_api::create_image_edit()
.configuration(&self.client.base_configuration)
.image(image)
.prompt(&prompt)
.maybe_mask(mask)
.maybe_background(background.as_deref())
.maybe_model(model.as_deref())
.maybe_n(n)
.maybe_size(size.as_deref())
.maybe_response_format(response_format.as_deref())
.maybe_output_format(output_format.as_deref())
.maybe_output_compression(output_compression)
.maybe_user(user.as_deref())
.maybe_input_fidelity(input_fidelity)
.maybe_stream(stream)
.maybe_partial_images(partial_images)
.maybe_quality(quality.as_deref())
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, &model_str, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
&model_str,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
#[must_use]
pub fn variation(&self, image: impl AsRef<std::path::Path>) -> ImageVariationBuilder {
ImageVariationBuilder::new(image)
}
pub async fn create_variation(&self, builder: ImageVariationBuilder) -> Result<ImagesResponse> {
let request = builder.build()?;
let model_str = request
.model
.as_ref()
.map_or_else(|| "dall-e-2".to_string(), ToString::to_string);
let mut state = T::default();
let operation = operation_names::IMAGE_VARIATION;
let request_json = format!(r#"{{"model":"{model_str}"}}"#);
self.call_before_request(operation, &model_str, &request_json, &mut state)
.await?;
let ImageVariationRequest {
image,
model,
n,
response_format,
size,
user,
} = request;
let start_time = Instant::now();
let response = match images_api::create_image_variation()
.configuration(&self.client.base_configuration)
.image(image)
.maybe_model(model.as_deref())
.maybe_n(n)
.maybe_response_format(response_format.as_deref())
.maybe_size(size.as_deref())
.maybe_user(user.as_deref())
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, &model_str, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
&model_str,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
}
impl<T: Default + Send + Sync> ThreadsClient<'_, T> {
#[must_use]
pub fn builder(&self) -> ThreadRequestBuilder {
ThreadRequestBuilder::new()
}
pub async fn create(&self, builder: ThreadRequestBuilder) -> Result<ThreadObject> {
let request = builder.build()?;
let mut state = T::default();
let operation = operation_names::THREAD_CREATE;
let model = "thread"; let request_json = serde_json::to_string(&request).unwrap_or_default();
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match assistants_api::create_thread()
.configuration(&self.client.base_configuration)
.maybe_create_thread_request(Some(request))
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
}
impl<T: Default + Send + Sync> UploadsClient<'_, T> {
#[must_use]
pub fn builder(
&self,
filename: impl Into<String>,
purpose: UploadPurpose,
bytes: i32,
mime_type: impl Into<String>,
) -> UploadBuilder {
UploadBuilder::new(filename, purpose, bytes, mime_type)
}
pub async fn create(&self, builder: UploadBuilder) -> Result<Upload> {
let request = builder.build()?;
let mut state = T::default();
let operation = operation_names::UPLOAD_CREATE;
let model = "upload"; let request_json = serde_json::to_string(&request).unwrap_or_default();
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match uploads_api::create_upload()
.configuration(&self.client.base_configuration)
.create_upload_request(request)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
}
impl<T: Default + Send + Sync> ModerationsClient<'_, T> {
#[must_use]
pub fn builder(&self, input: impl Into<String>) -> ModerationBuilder {
ModerationBuilder::new(input)
}
#[must_use]
pub fn check(&self, input: impl Into<String>) -> ModerationBuilder {
ModerationBuilder::new(input)
}
pub async fn create(&self, builder: ModerationBuilder) -> Result<CreateModerationResponse> {
let request = builder.build()?;
let mut state = T::default();
let operation = operation_names::MODERATION;
let model = request
.model
.as_ref()
.map_or_else(|| "text-moderation-latest".to_string(), ToString::to_string);
let request_json = serde_json::to_string(&request).unwrap_or_default();
self.call_before_request(operation, &model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match moderations_api::create_moderation()
.configuration(&self.client.base_configuration)
.create_moderation_request(request)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, &model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
&model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
}
impl<T: Default + Send + Sync> FilesClient<'_, T> {
pub async fn upload(&self, builder: FileUploadBuilder) -> Result<OpenAiFile> {
let temp_dir = std::env::temp_dir();
let temp_file_path = temp_dir.join(builder.filename());
std::fs::write(&temp_file_path, builder.content()).map_err(Error::File)?;
let purpose = match builder.purpose().to_string().as_str() {
"fine-tune" => openai_client_base::models::FilePurpose::FineTune,
"vision" => openai_client_base::models::FilePurpose::Vision,
"batch" => openai_client_base::models::FilePurpose::Batch,
_ => openai_client_base::models::FilePurpose::Assistants, };
let mut state = T::default();
let operation = operation_names::FILE_UPLOAD;
let model = "file-upload"; let request_json = format!(
r#"{{"filename":"{}","purpose":"{}","size":{}}}"#,
builder.filename(),
builder.purpose(),
builder.content().len()
);
if let Err(e) = self
.call_before_request(operation, model, &request_json, &mut state)
.await
{
let _ = std::fs::remove_file(&temp_file_path);
return Err(e);
}
let start_time = Instant::now();
let result = match files_api::create_file()
.configuration(&self.client.base_configuration)
.file(temp_file_path.clone())
.purpose(purpose)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let _ = std::fs::remove_file(&temp_file_path);
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let _ = std::fs::remove_file(temp_file_path);
let duration = start_time.elapsed();
self.call_after_response(
&result,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(result)
}
pub async fn create(&self, builder: FileUploadBuilder) -> Result<OpenAiFile> {
self.upload(builder).await
}
#[must_use]
pub fn upload_text(
&self,
filename: impl Into<String>,
purpose: crate::builders::files::FilePurpose,
text: impl Into<String>,
) -> FileUploadBuilder {
FileUploadBuilder::from_text(filename, purpose, text)
}
#[must_use]
pub fn upload_bytes(
&self,
filename: impl Into<String>,
purpose: crate::builders::files::FilePurpose,
content: Vec<u8>,
) -> FileUploadBuilder {
FileUploadBuilder::new(filename, purpose, content)
}
pub fn upload_from_path(
&self,
path: impl AsRef<std::path::Path>,
purpose: crate::builders::files::FilePurpose,
) -> Result<FileUploadBuilder> {
FileUploadBuilder::from_path(path, purpose).map_err(Error::File)
}
pub async fn list(&self, builder: FileListBuilder) -> Result<ListFilesResponse> {
let purpose = builder.purpose_ref().map(ToString::to_string);
let limit = builder.limit_ref();
let order = builder.order_ref().map(ToString::to_string);
let mut state = T::default();
let operation = operation_names::FILE_LIST;
let model = "files";
let request_json = format!(
r#"{{"purpose":"{}","limit":{},"order":"{}"}}"#,
purpose.as_deref().unwrap_or(""),
limit.unwrap_or(10000),
order.as_deref().unwrap_or("desc")
);
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match files_api::list_files()
.configuration(&self.client.base_configuration)
.maybe_purpose(purpose.as_deref())
.maybe_limit(limit)
.maybe_order(order.as_deref())
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
#[must_use]
pub fn list_builder(&self) -> FileListBuilder {
FileListBuilder::new()
}
pub async fn retrieve(&self, file_id: impl Into<String>) -> Result<OpenAiFile> {
let file_id = file_id.into();
let mut state = T::default();
let operation = operation_names::FILE_RETRIEVE;
let model = "files";
let request_json = format!(r#"{{"file_id":"{file_id}"}}"#);
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match files_api::retrieve_file()
.configuration(&self.client.base_configuration)
.file_id(&file_id)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn get(&self, builder: FileRetrievalBuilder) -> Result<OpenAiFile> {
self.retrieve(builder.file_id()).await
}
pub async fn download(&self, file_id: impl Into<String>) -> Result<String> {
let file_id = file_id.into();
let mut state = T::default();
let operation = operation_names::FILE_DOWNLOAD;
let model = "files";
let request_json = format!(r#"{{"file_id":"{file_id}"}}"#);
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match files_api::download_file()
.configuration(&self.client.base_configuration)
.file_id(&file_id)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
let response_size = format!(r#"{{"size":{}}}"#, response.len());
self.call_after_response(
&response_size,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn download_bytes(&self, file_id: impl Into<String>) -> Result<Vec<u8>> {
let content = self.download(file_id).await?;
Ok(content.into_bytes())
}
pub async fn delete(&self, file_id: impl Into<String>) -> Result<DeleteFileResponse> {
let file_id = file_id.into();
let mut state = T::default();
let operation = operation_names::FILE_DELETE;
let model = "files";
let request_json = format!(r#"{{"file_id":"{file_id}"}}"#);
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match files_api::delete_file()
.configuration(&self.client.base_configuration)
.file_id(&file_id)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn remove(&self, builder: FileDeleteBuilder) -> Result<DeleteFileResponse> {
self.delete(builder.file_id()).await
}
}
impl<T: Default + Send + Sync> VectorStoresClient<'_, T> {
pub async fn create(
&self,
builder: crate::builders::vector_stores::VectorStoreBuilder,
) -> Result<VectorStoreObject> {
use openai_client_base::models::{CreateVectorStoreRequest, VectorStoreExpirationAfter};
let mut request = CreateVectorStoreRequest::new();
request.name = builder.name_ref().map(String::from);
request.file_ids = if builder.has_files() {
Some(builder.file_ids_ref().to_vec())
} else {
None
};
if let Some(expires_after) = builder.expires_after_ref() {
use openai_client_base::models::vector_store_expiration_after::Anchor;
request.expires_after = Some(Box::new(VectorStoreExpirationAfter::new(
Anchor::LastActiveAt,
expires_after.days,
)));
}
if !builder.metadata_ref().is_empty() {
request.metadata = Some(Some(builder.metadata_ref().clone()));
}
let mut state = T::default();
let operation = operation_names::VECTOR_STORE_CREATE;
let model = "vector-store";
let request_json = serde_json::to_string(&request).unwrap_or_default();
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match vector_stores_api::create_vector_store()
.configuration(&self.client.base_configuration)
.create_vector_store_request(request)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn list(
&self,
limit: Option<i32>,
order: Option<&str>,
after: Option<&str>,
before: Option<&str>,
) -> Result<ListVectorStoresResponse> {
let mut state = T::default();
let operation = operation_names::VECTOR_STORE_LIST;
let model = "vector-store";
let request_json = format!(
r#"{{"limit":{},"order":"{}"}}"#,
limit.unwrap_or(20),
order.unwrap_or("desc")
);
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match vector_stores_api::list_vector_stores()
.configuration(&self.client.base_configuration)
.maybe_limit(limit)
.maybe_order(order)
.maybe_after(after)
.maybe_before(before)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn get(&self, vector_store_id: impl Into<String>) -> Result<VectorStoreObject> {
let id = vector_store_id.into();
let mut state = T::default();
let operation = operation_names::VECTOR_STORE_RETRIEVE;
let model = "vector-store";
let request_json = format!(r#"{{"vector_store_id":"{id}"}}"#);
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match vector_stores_api::get_vector_store()
.configuration(&self.client.base_configuration)
.vector_store_id(&id)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn update(
&self,
vector_store_id: impl Into<String>,
builder: crate::builders::vector_stores::VectorStoreBuilder,
) -> Result<VectorStoreObject> {
use openai_client_base::models::{UpdateVectorStoreRequest, VectorStoreExpirationAfter};
let id = vector_store_id.into();
let mut request = UpdateVectorStoreRequest::new();
request.name = builder.name_ref().map(String::from);
if let Some(expires_after) = builder.expires_after_ref() {
use openai_client_base::models::vector_store_expiration_after::Anchor;
request.expires_after = Some(Box::new(VectorStoreExpirationAfter::new(
Anchor::LastActiveAt,
expires_after.days,
)));
}
if !builder.metadata_ref().is_empty() {
request.metadata = Some(Some(builder.metadata_ref().clone()));
}
let mut state = T::default();
let operation = operation_names::VECTOR_STORE_UPDATE;
let model = "vector-store";
let request_json = serde_json::to_string(&request).unwrap_or_default();
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match vector_stores_api::modify_vector_store()
.configuration(&self.client.base_configuration)
.vector_store_id(&id)
.update_vector_store_request(request)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn delete(
&self,
vector_store_id: impl Into<String>,
) -> Result<DeleteVectorStoreResponse> {
let id = vector_store_id.into();
let mut state = T::default();
let operation = operation_names::VECTOR_STORE_DELETE;
let model = "vector-store";
let request_json = format!(r#"{{"vector_store_id":"{id}"}}"#);
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match vector_stores_api::delete_vector_store()
.configuration(&self.client.base_configuration)
.vector_store_id(&id)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn add_file(
&self,
vector_store_id: impl Into<String>,
file_id: impl Into<String>,
) -> Result<VectorStoreFileObject> {
use openai_client_base::models::CreateVectorStoreFileRequest;
let vs_id = vector_store_id.into();
let f_id = file_id.into();
let request = CreateVectorStoreFileRequest::new(f_id.clone());
let mut state = T::default();
let operation = operation_names::VECTOR_STORE_FILE_ADD;
let model = "vector-store";
let request_json = format!(r#"{{"vector_store_id":"{vs_id}","file_id":"{f_id}"}}"#);
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match vector_stores_api::create_vector_store_file()
.configuration(&self.client.base_configuration)
.vector_store_id(&vs_id)
.create_vector_store_file_request(request)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn list_files(
&self,
vector_store_id: impl Into<String>,
limit: Option<i32>,
order: Option<&str>,
after: Option<&str>,
before: Option<&str>,
filter: Option<&str>,
) -> Result<ListVectorStoreFilesResponse> {
let id = vector_store_id.into();
let mut state = T::default();
let operation = operation_names::VECTOR_STORE_FILE_LIST;
let model = "vector-store";
let request_json = format!(r#"{{"vector_store_id":"{id}"}}"#);
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match vector_stores_api::list_vector_store_files()
.configuration(&self.client.base_configuration)
.vector_store_id(&id)
.maybe_limit(limit)
.maybe_order(order)
.maybe_after(after)
.maybe_before(before)
.maybe_filter(filter)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn get_file(
&self,
vector_store_id: impl Into<String>,
file_id: impl Into<String>,
) -> Result<VectorStoreFileObject> {
let vs_id = vector_store_id.into();
let f_id = file_id.into();
let mut state = T::default();
let operation = operation_names::VECTOR_STORE_FILE_RETRIEVE;
let model = "vector-store";
let request_json = format!(r#"{{"vector_store_id":"{vs_id}","file_id":"{f_id}"}}"#);
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match vector_stores_api::get_vector_store_file()
.configuration(&self.client.base_configuration)
.vector_store_id(&vs_id)
.file_id(&f_id)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn delete_file(
&self,
vector_store_id: impl Into<String>,
file_id: impl Into<String>,
) -> Result<DeleteVectorStoreFileResponse> {
let vs_id = vector_store_id.into();
let f_id = file_id.into();
let mut state = T::default();
let operation = operation_names::VECTOR_STORE_FILE_DELETE;
let model = "vector-store";
let request_json = format!(r#"{{"vector_store_id":"{vs_id}","file_id":"{f_id}"}}"#);
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match vector_stores_api::delete_vector_store_file()
.configuration(&self.client.base_configuration)
.vector_store_id(&vs_id)
.file_id(&f_id)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn search(
&self,
builder: crate::builders::vector_stores::VectorStoreSearchBuilder,
) -> Result<VectorStoreSearchResultsPage> {
use openai_client_base::models::{VectorStoreSearchRequest, VectorStoreSearchRequestQuery};
let query = VectorStoreSearchRequestQuery::new_text(builder.query().to_string());
let mut request = VectorStoreSearchRequest::new(query);
if let Some(limit) = builder.limit_ref() {
request.max_num_results = Some(limit);
}
let vs_id = builder.vector_store_id().to_string();
let mut state = T::default();
let operation = operation_names::VECTOR_STORE_SEARCH;
let model = "vector-store";
let request_json = format!(
r#"{{"vector_store_id":"{}","query":"{}"}}"#,
vs_id,
builder.query()
);
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match vector_stores_api::search_vector_store()
.configuration(&self.client.base_configuration)
.vector_store_id(&vs_id)
.vector_store_search_request(request)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
}
impl<T: Default + Send + Sync> BatchClient<'_, T> {
pub async fn create(&self, builder: crate::builders::batch::BatchJobBuilder) -> Result<Batch> {
use openai_client_base::models::create_batch_request::{CompletionWindow, Endpoint};
let endpoint = match builder.endpoint() {
crate::builders::batch::BatchEndpoint::ChatCompletions => {
Endpoint::SlashV1SlashChatSlashCompletions
}
crate::builders::batch::BatchEndpoint::Embeddings => Endpoint::SlashV1SlashEmbeddings,
crate::builders::batch::BatchEndpoint::Completions => Endpoint::SlashV1SlashCompletions,
};
let mut request = CreateBatchRequest::new(
builder.input_file_id().to_string(),
endpoint,
CompletionWindow::Variant24h,
);
if builder.has_metadata() {
request.metadata = Some(Some(builder.metadata_ref().clone()));
}
let mut state = T::default();
let operation = operation_names::BATCH_CREATE;
let model = "batch";
let request_json = serde_json::to_string(&request).unwrap_or_default();
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match batch_api::create_batch()
.configuration(&self.client.base_configuration)
.create_batch_request(request)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn list(
&self,
after: Option<&str>,
limit: Option<i32>,
) -> Result<ListBatchesResponse> {
let mut state = T::default();
let operation = operation_names::BATCH_LIST;
let model = "batch";
let request_json = format!("{{\"after\":{after:?},\"limit\":{limit:?}}}");
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match batch_api::list_batches()
.configuration(&self.client.base_configuration)
.maybe_after(after)
.maybe_limit(limit)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn get(&self, batch_id: impl Into<String>) -> Result<Batch> {
let id = batch_id.into();
let mut state = T::default();
let operation = operation_names::BATCH_RETRIEVE;
let model = "batch";
let request_json = format!("{{\"batch_id\":\"{id}\"}}");
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match batch_api::retrieve_batch()
.configuration(&self.client.base_configuration)
.batch_id(&id)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn cancel(&self, batch_id: impl Into<String>) -> Result<Batch> {
let id = batch_id.into();
let mut state = T::default();
let operation = operation_names::BATCH_CANCEL;
let model = "batch";
let request_json = format!("{{\"batch_id\":\"{id}\"}}");
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match batch_api::cancel_batch()
.configuration(&self.client.base_configuration)
.batch_id(&id)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
}
impl<T: Default + Send + Sync> FineTuningClient<'_, T> {
pub async fn create_job(
&self,
builder: crate::builders::fine_tuning::FineTuningJobBuilder,
) -> Result<FineTuningJob> {
let mut request = CreateFineTuningJobRequest::new(
builder.model().to_string(),
builder.training_file().to_string(),
);
if let Some(validation_file) = builder.validation_file_ref() {
request.validation_file = Some(validation_file.to_string());
}
if let Some(suffix) = builder.suffix_ref() {
request.suffix = Some(suffix.to_string());
}
let mut state = T::default();
let operation = operation_names::FINE_TUNING_CREATE;
let model = builder.model();
let request_json = serde_json::to_string(&request).unwrap_or_default();
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match fine_tuning_api::create_fine_tuning_job()
.configuration(&self.client.base_configuration)
.create_fine_tuning_job_request(request)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn list_jobs(
&self,
after: Option<&str>,
limit: Option<i32>,
) -> Result<ListPaginatedFineTuningJobsResponse> {
let mut state = T::default();
let operation = operation_names::FINE_TUNING_LIST;
let model = "fine-tuning";
let request_json = format!("{{\"after\":{after:?},\"limit\":{limit:?}}}");
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match fine_tuning_api::list_paginated_fine_tuning_jobs()
.configuration(&self.client.base_configuration)
.maybe_after(after)
.maybe_limit(limit)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn get_job(&self, job_id: impl Into<String>) -> Result<FineTuningJob> {
let id = job_id.into();
let mut state = T::default();
let operation = operation_names::FINE_TUNING_RETRIEVE;
let model = "fine-tuning";
let request_json = format!("{{\"job_id\":\"{id}\"}}");
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match fine_tuning_api::retrieve_fine_tuning_job()
.configuration(&self.client.base_configuration)
.fine_tuning_job_id(&id)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn cancel_job(&self, job_id: impl Into<String>) -> Result<FineTuningJob> {
let id = job_id.into();
let mut state = T::default();
let operation = operation_names::FINE_TUNING_CANCEL;
let model = "fine-tuning";
let request_json = format!("{{\"job_id\":\"{id}\"}}");
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match fine_tuning_api::cancel_fine_tuning_job()
.configuration(&self.client.base_configuration)
.fine_tuning_job_id(&id)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn list_events(
&self,
job_id: impl Into<String>,
after: Option<&str>,
limit: Option<i32>,
) -> Result<ListFineTuningJobEventsResponse> {
let id = job_id.into();
let mut state = T::default();
let operation = operation_names::FINE_TUNING_LIST_EVENTS;
let model = "fine-tuning";
let request_json =
format!("{{\"job_id\":\"{id}\",\"after\":{after:?},\"limit\":{limit:?}}}");
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match fine_tuning_api::list_fine_tuning_events()
.configuration(&self.client.base_configuration)
.fine_tuning_job_id(&id)
.maybe_after(after)
.maybe_limit(limit)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn list_checkpoints(
&self,
job_id: impl Into<String>,
after: Option<&str>,
limit: Option<i32>,
) -> Result<ListFineTuningJobCheckpointsResponse> {
let id = job_id.into();
let mut state = T::default();
let operation = operation_names::FINE_TUNING_LIST_CHECKPOINTS;
let model = "fine-tuning";
let request_json =
format!("{{\"job_id\":\"{id}\",\"after\":{after:?},\"limit\":{limit:?}}}");
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match fine_tuning_api::list_fine_tuning_job_checkpoints()
.configuration(&self.client.base_configuration)
.fine_tuning_job_id(&id)
.maybe_after(after)
.maybe_limit(limit)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
}
fn map_api_error<T>(error: ApiError<T>) -> Error {
match error {
ApiError::Reqwest(err) => Error::Http(err),
ApiError::ReqwestMiddleware(err) => {
Error::Internal(format!("reqwest middleware error: {err}"))
}
ApiError::Serde(err) => Error::Json(err),
ApiError::Io(err) => Error::File(err),
ApiError::ResponseError(response) => Error::Api {
status: response.status.as_u16(),
message: response.content,
error_type: None,
error_code: None,
},
}
}
#[cfg(test)]
mod tests {
use super::*;
use openai_client_base::apis::{Error as BaseError, ResponseContent};
#[test]
fn map_api_error_converts_response() {
let response = ResponseContent {
status: reqwest::StatusCode::BAD_REQUEST,
content: "bad request".to_string(),
entity: Option::<()>::None,
};
let error = map_api_error(BaseError::ResponseError(response));
match error {
Error::Api {
status, message, ..
} => {
assert_eq!(status, 400);
assert!(message.contains("bad request"));
}
other => panic!("expected API error, got {other:?}"),
}
}
#[test]
fn test_moderation_builder_creation() {
use crate::builders::moderations::ModerationBuilder;
let builder = ModerationBuilder::new("Test content");
let request = builder.build().unwrap();
assert_eq!(request.input, "Test content");
assert!(request.model.is_none());
}
#[test]
fn test_moderation_builder_with_model() {
use crate::builders::moderations::ModerationBuilder;
let builder = ModerationBuilder::new("Test content").model("text-moderation-stable");
let request = builder.build().unwrap();
assert_eq!(request.input, "Test content");
assert_eq!(request.model, Some("text-moderation-stable".to_string()));
}
#[test]
fn test_moderation_builder_array_input() {
use crate::builders::moderations::ModerationBuilder;
let inputs = vec!["First text".to_string(), "Second text".to_string()];
let builder = ModerationBuilder::new_array(inputs);
let request = builder.build().unwrap();
assert_eq!(request.input, "First text\nSecond text");
}
#[test]
fn test_file_upload_builder_creation() {
use crate::builders::files::{FilePurpose, FileUploadBuilder};
let content = b"test content".to_vec();
let builder = FileUploadBuilder::new("test.txt", FilePurpose::Assistants, content.clone());
assert_eq!(builder.filename(), "test.txt");
assert_eq!(builder.content(), content.as_slice());
assert_eq!(builder.content_size(), content.len());
assert!(!builder.is_empty());
}
#[test]
fn test_file_upload_builder_from_text() {
use crate::builders::files::{FilePurpose, FileUploadBuilder};
let builder =
FileUploadBuilder::from_text("hello.txt", FilePurpose::FineTune, "Hello, world!");
assert_eq!(builder.filename(), "hello.txt");
assert_eq!(
builder.content_as_string(),
Some("Hello, world!".to_string())
);
assert!(!builder.is_empty());
}
#[test]
fn test_file_list_builder() {
use crate::builders::files::{FileListBuilder, FileOrder, FilePurpose};
let builder = FileListBuilder::new()
.purpose(FilePurpose::Assistants)
.limit(10)
.order(FileOrder::Desc);
assert!(builder.purpose_ref().is_some());
assert_eq!(builder.limit_ref(), Some(10));
assert!(builder.order_ref().is_some());
}
#[test]
fn test_file_retrieval_builder() {
use crate::builders::files::FileRetrievalBuilder;
let builder = FileRetrievalBuilder::new("file-123");
assert_eq!(builder.file_id(), "file-123");
}
#[test]
fn test_file_delete_builder() {
use crate::builders::files::FileDeleteBuilder;
let builder = FileDeleteBuilder::new("file-456");
assert_eq!(builder.file_id(), "file-456");
}
#[test]
fn test_file_purpose_display() {
use crate::builders::files::FilePurpose;
assert_eq!(FilePurpose::FineTune.to_string(), "fine-tune");
assert_eq!(FilePurpose::Assistants.to_string(), "assistants");
assert_eq!(FilePurpose::Vision.to_string(), "vision");
assert_eq!(FilePurpose::Batch.to_string(), "batch");
}
#[test]
fn test_vector_store_builder_basic() {
use crate::builders::vector_stores::VectorStoreBuilder;
let builder = VectorStoreBuilder::new()
.name("Test Store")
.add_file("file-1")
.metadata("key", "value");
assert_eq!(builder.name_ref(), Some("Test Store"));
assert_eq!(builder.file_count(), 1);
assert!(builder.has_files());
assert_eq!(builder.metadata_ref().len(), 1);
}
#[test]
fn test_vector_store_builder_with_expiration() {
use crate::builders::vector_stores::VectorStoreBuilder;
let builder = VectorStoreBuilder::new()
.name("Temp Store")
.expires_after_days(30);
assert_eq!(builder.name_ref(), Some("Temp Store"));
assert!(builder.expires_after_ref().is_some());
assert_eq!(builder.expires_after_ref().unwrap().days, 30);
}
#[test]
fn test_vector_store_builder_multiple_files() {
use crate::builders::vector_stores::VectorStoreBuilder;
let files = vec!["file-1".to_string(), "file-2".to_string()];
let builder = VectorStoreBuilder::new()
.name("Multi-File Store")
.file_ids(files.clone());
assert_eq!(builder.file_ids_ref(), files.as_slice());
assert_eq!(builder.file_count(), 2);
}
#[test]
fn test_vector_store_file_builder() {
use crate::builders::vector_stores::VectorStoreFileBuilder;
let builder = VectorStoreFileBuilder::new("vs-123", "file-456");
assert_eq!(builder.vector_store_id(), "vs-123");
assert_eq!(builder.file_id(), "file-456");
}
#[test]
fn test_vector_store_search_builder() {
use crate::builders::vector_stores::VectorStoreSearchBuilder;
let builder = VectorStoreSearchBuilder::new("vs-123", "test query")
.limit(10)
.filter("category", "docs");
assert_eq!(builder.vector_store_id(), "vs-123");
assert_eq!(builder.query(), "test query");
assert_eq!(builder.limit_ref(), Some(10));
assert_eq!(builder.filter_ref().len(), 1);
}
#[test]
fn test_vector_store_search_builder_default() {
use crate::builders::vector_stores::VectorStoreSearchBuilder;
let builder = VectorStoreSearchBuilder::new("vs-123", "query");
assert!(builder.limit_ref().is_none());
assert!(builder.filter_ref().is_empty());
}
}
#[derive(Debug, Clone, Copy)]
pub struct AssistantsClient<'a, T = ()> {
client: &'a Client<T>,
}
impl<T: Default + Send + Sync> AssistantsClient<'_, T> {
pub async fn create(&self, builder: AssistantBuilder) -> Result<AssistantObject> {
let request = builder.build()?;
let mut state = T::default();
let operation = operation_names::ASSISTANT_CREATE;
let model = request.model.clone();
let request_json = serde_json::to_string(&request).unwrap_or_default();
self.call_before_request(operation, &model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match assistants_api::create_assistant()
.configuration(&self.client.base_configuration)
.create_assistant_request(request)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, &model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
&model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn list(
&self,
limit: Option<i32>,
order: Option<&str>,
after: Option<&str>,
before: Option<&str>,
) -> Result<ListAssistantsResponse> {
let mut state = T::default();
let operation = operation_names::ASSISTANT_LIST;
let model = "assistants";
let request_json = format!(
"{{\"limit\":{limit:?},\"order\":{order:?},\"after\":{after:?},\"before\":{before:?}}}"
);
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match assistants_api::list_assistants()
.configuration(&self.client.base_configuration)
.maybe_limit(limit)
.maybe_order(order)
.maybe_after(after)
.maybe_before(before)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn get(&self, assistant_id: impl Into<String>) -> Result<AssistantObject> {
let id = assistant_id.into();
let mut state = T::default();
let operation = operation_names::ASSISTANT_RETRIEVE;
let model = "assistants";
let request_json = format!("{{\"assistant_id\":\"{id}\"}}");
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match assistants_api::get_assistant()
.configuration(&self.client.base_configuration)
.assistant_id(&id)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn update(
&self,
assistant_id: impl Into<String>,
builder: AssistantBuilder,
) -> Result<AssistantObject> {
use openai_client_base::models::ModifyAssistantRequest;
let id = assistant_id.into();
let request_data = builder.build()?;
let mut request = ModifyAssistantRequest::new();
request.model = Some(request_data.model);
request.name = request_data.name.and_then(|n| match *n {
openai_client_base::models::CreateAssistantRequestName::Text(text) => Some(Some(text)),
openai_client_base::models::CreateAssistantRequestName::Null => None,
});
request.description = request_data.description.and_then(|d| match *d {
openai_client_base::models::CreateAssistantRequestDescription::Text(text) => {
Some(Some(text))
}
openai_client_base::models::CreateAssistantRequestDescription::Null => None,
});
request.instructions = request_data.instructions.and_then(|i| match *i {
openai_client_base::models::CreateAssistantRequestInstructions::Text(text) => {
Some(Some(text))
}
openai_client_base::models::CreateAssistantRequestInstructions::Null => None,
});
request.tools = request_data.tools;
request.metadata = request_data.metadata;
let mut state = T::default();
let operation = operation_names::ASSISTANT_UPDATE;
let model = request
.model
.as_ref()
.map_or_else(|| "assistants".to_string(), Clone::clone);
let request_json = serde_json::to_string(&request).unwrap_or_default();
self.call_before_request(operation, &model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match assistants_api::modify_assistant()
.configuration(&self.client.base_configuration)
.assistant_id(&id)
.modify_assistant_request(request)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, &model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
&model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn delete(&self, assistant_id: impl Into<String>) -> Result<DeleteAssistantResponse> {
let id = assistant_id.into();
let mut state = T::default();
let operation = operation_names::ASSISTANT_DELETE;
let model = "assistants";
let request_json = format!("{{\"assistant_id\":\"{id}\"}}");
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match assistants_api::delete_assistant()
.configuration(&self.client.base_configuration)
.assistant_id(&id)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn create_run(
&self,
thread_id: impl Into<String>,
builder: RunBuilder,
) -> Result<RunObject> {
let thread_id = thread_id.into();
let request = builder.build()?;
let mut state = T::default();
let operation = operation_names::RUN_CREATE;
let model = request
.model
.as_ref()
.map_or_else(|| "runs".to_string(), Clone::clone);
let request_json = serde_json::to_string(&request).unwrap_or_default();
self.call_before_request(operation, &model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match assistants_api::create_run()
.configuration(&self.client.base_configuration)
.thread_id(&thread_id)
.create_run_request(request)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, &model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
&model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn list_runs(
&self,
thread_id: impl Into<String>,
limit: Option<i32>,
order: Option<&str>,
after: Option<&str>,
before: Option<&str>,
) -> Result<ListRunsResponse> {
let thread_id = thread_id.into();
let mut state = T::default();
let operation = operation_names::RUN_LIST;
let model = "runs";
let request_json = format!(
"{{\"thread_id\":\"{thread_id}\",\"limit\":{limit:?},\"order\":{order:?},\"after\":{after:?},\"before\":{before:?}}}"
);
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match assistants_api::list_runs()
.configuration(&self.client.base_configuration)
.thread_id(&thread_id)
.maybe_limit(limit)
.maybe_order(order)
.maybe_after(after)
.maybe_before(before)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn get_run(
&self,
thread_id: impl Into<String>,
run_id: impl Into<String>,
) -> Result<RunObject> {
let thread_id = thread_id.into();
let run_id = run_id.into();
let mut state = T::default();
let operation = operation_names::RUN_RETRIEVE;
let model = "runs";
let request_json = format!("{{\"thread_id\":\"{thread_id}\",\"run_id\":\"{run_id}\"}}");
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match assistants_api::get_run()
.configuration(&self.client.base_configuration)
.thread_id(&thread_id)
.run_id(&run_id)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn cancel_run(
&self,
thread_id: impl Into<String>,
run_id: impl Into<String>,
) -> Result<RunObject> {
let thread_id = thread_id.into();
let run_id = run_id.into();
let mut state = T::default();
let operation = operation_names::RUN_CANCEL;
let model = "runs";
let request_json = format!("{{\"thread_id\":\"{thread_id}\",\"run_id\":\"{run_id}\"}}");
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match assistants_api::cancel_run()
.configuration(&self.client.base_configuration)
.thread_id(&thread_id)
.run_id(&run_id)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn submit_tool_outputs(
&self,
thread_id: impl Into<String>,
run_id: impl Into<String>,
tool_outputs: Vec<SubmitToolOutputsRunRequestToolOutputsInner>,
) -> Result<RunObject> {
use openai_client_base::models::SubmitToolOutputsRunRequest;
let thread_id = thread_id.into();
let run_id = run_id.into();
let request = SubmitToolOutputsRunRequest::new(tool_outputs);
let mut state = T::default();
let operation = operation_names::RUN_SUBMIT_TOOL_OUTPUTS;
let model = "runs";
let request_json = serde_json::to_string(&request).unwrap_or_default();
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match assistants_api::submit_tool_ouputs_to_run()
.configuration(&self.client.base_configuration)
.thread_id(&thread_id)
.run_id(&run_id)
.submit_tool_outputs_run_request(request)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn create_message(
&self,
thread_id: impl Into<String>,
builder: MessageBuilder,
) -> Result<MessageObject> {
let thread_id = thread_id.into();
let request = builder.build()?;
let mut state = T::default();
let operation = operation_names::MESSAGE_CREATE;
let model = "messages";
let request_json = serde_json::to_string(&request).unwrap_or_default();
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match assistants_api::create_message()
.configuration(&self.client.base_configuration)
.thread_id(&thread_id)
.create_message_request(request)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn list_messages(
&self,
thread_id: impl Into<String>,
limit: Option<i32>,
order: Option<&str>,
after: Option<&str>,
before: Option<&str>,
run_id: Option<&str>,
) -> Result<ListMessagesResponse> {
let thread_id = thread_id.into();
let mut state = T::default();
let operation = operation_names::MESSAGE_LIST;
let model = "messages";
let request_json = format!("{{\"thread_id\":\"{thread_id}\",\"limit\":{limit:?},\"order\":{order:?},\"after\":{after:?},\"before\":{before:?},\"run_id\":{run_id:?}}}");
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match assistants_api::list_messages()
.configuration(&self.client.base_configuration)
.thread_id(&thread_id)
.maybe_limit(limit)
.maybe_order(order)
.maybe_after(after)
.maybe_before(before)
.maybe_run_id(run_id)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn get_message(
&self,
thread_id: impl Into<String>,
message_id: impl Into<String>,
) -> Result<MessageObject> {
let thread_id = thread_id.into();
let message_id = message_id.into();
let mut state = T::default();
let operation = operation_names::MESSAGE_RETRIEVE;
let model = "messages";
let request_json =
format!("{{\"thread_id\":\"{thread_id}\",\"message_id\":\"{message_id}\"}}");
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match assistants_api::get_message()
.configuration(&self.client.base_configuration)
.thread_id(&thread_id)
.message_id(&message_id)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
#[allow(clippy::too_many_arguments)]
pub async fn list_run_steps(
&self,
thread_id: impl Into<String>,
run_id: impl Into<String>,
limit: Option<i32>,
order: Option<&str>,
after: Option<&str>,
before: Option<&str>,
include: Option<Vec<String>>,
) -> Result<ListRunStepsResponse> {
let thread_id = thread_id.into();
let run_id = run_id.into();
let mut state = T::default();
let operation = operation_names::RUN_STEP_LIST;
let model = "run_steps";
let request_json = format!("{{\"thread_id\":\"{thread_id}\",\"run_id\":\"{run_id}\",\"limit\":{limit:?},\"order\":{order:?},\"after\":{after:?},\"before\":{before:?},\"include\":{include:?}}}");
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match assistants_api::list_run_steps()
.configuration(&self.client.base_configuration)
.thread_id(&thread_id)
.run_id(&run_id)
.maybe_limit(limit)
.maybe_order(order)
.maybe_after(after)
.maybe_before(before)
.maybe_include_left_square_bracket_right_square_bracket(include)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn get_run_step(
&self,
thread_id: impl Into<String>,
run_id: impl Into<String>,
step_id: impl Into<String>,
include: Option<Vec<String>>,
) -> Result<RunStepObject> {
let thread_id = thread_id.into();
let run_id = run_id.into();
let step_id = step_id.into();
let mut state = T::default();
let operation = operation_names::RUN_STEP_RETRIEVE;
let model = "run_steps";
let request_json = format!(
"{{\"thread_id\":\"{thread_id}\",\"run_id\":\"{run_id}\",\"step_id\":\"{step_id}\",\"include\":{include:?}}}"
);
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match assistants_api::get_run_step()
.configuration(&self.client.base_configuration)
.thread_id(&thread_id)
.run_id(&run_id)
.step_id(&step_id)
.maybe_include_left_square_bracket_right_square_bracket(include)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
}
#[derive(Debug, Clone, Copy)]
#[allow(dead_code)]
pub struct AudioClient<'a, T = ()> {
client: &'a Client<T>,
}
#[derive(Debug, Clone, Copy)]
#[allow(dead_code)]
pub struct EmbeddingsClient<'a, T = ()> {
client: &'a Client<T>,
}
#[derive(Debug, Clone, Copy)]
#[allow(dead_code)]
pub struct ImagesClient<'a, T = ()> {
client: &'a Client<T>,
}
#[derive(Debug, Clone, Copy)]
#[allow(dead_code)]
pub struct FilesClient<'a, T = ()> {
client: &'a Client<T>,
}
#[derive(Debug, Clone, Copy)]
#[allow(dead_code)]
pub struct FineTuningClient<'a, T = ()> {
client: &'a Client<T>,
}
#[derive(Debug, Clone, Copy)]
#[allow(dead_code)]
pub struct BatchClient<'a, T = ()> {
client: &'a Client<T>,
}
#[derive(Debug, Clone, Copy)]
#[allow(dead_code)]
pub struct VectorStoresClient<'a, T = ()> {
client: &'a Client<T>,
}
#[derive(Debug, Clone, Copy)]
#[allow(dead_code)]
pub struct ModerationsClient<'a, T = ()> {
client: &'a Client<T>,
}
#[derive(Debug, Clone, Copy)]
#[allow(dead_code)]
pub struct ThreadsClient<'a, T = ()> {
client: &'a Client<T>,
}
#[derive(Debug, Clone, Copy)]
#[allow(dead_code)]
pub struct UploadsClient<'a, T = ()> {
client: &'a Client<T>,
}
#[derive(Debug, Clone, Copy)]
pub struct ModelsClient<'a, T = ()> {
client: &'a Client<T>,
}
#[derive(Debug, Clone, Copy)]
pub struct CompletionsClient<'a, T = ()> {
client: &'a Client<T>,
}
#[derive(Debug, Clone, Copy)]
pub struct UsageClient<'a, T = ()> {
client: &'a Client<T>,
}
impl_interceptor_helpers!(AssistantsClient<'_, T>);
impl_interceptor_helpers!(AudioClient<'_, T>);
impl_interceptor_helpers!(EmbeddingsClient<'_, T>);
impl_interceptor_helpers!(ImagesClient<'_, T>);
impl_interceptor_helpers!(FilesClient<'_, T>);
impl_interceptor_helpers!(FineTuningClient<'_, T>);
impl_interceptor_helpers!(BatchClient<'_, T>);
impl_interceptor_helpers!(VectorStoresClient<'_, T>);
impl_interceptor_helpers!(ModerationsClient<'_, T>);
impl_interceptor_helpers!(ThreadsClient<'_, T>);
impl_interceptor_helpers!(UploadsClient<'_, T>);
impl_interceptor_helpers!(ModelsClient<'_, T>);
impl_interceptor_helpers!(CompletionsClient<'_, T>);
impl_interceptor_helpers!(UsageClient<'_, T>);
impl<T: Default + Send + Sync> ModelsClient<'_, T> {
pub async fn list(&self) -> Result<ListModelsResponse> {
let mut state = T::default();
let operation = operation_names::MODEL_LIST;
let model = "models";
let request_json = "{}".to_string();
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match models_api::list_models()
.configuration(&self.client.base_configuration)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn get(&self, model_id: impl Into<String>) -> Result<Model> {
let id = model_id.into();
let mut state = T::default();
let operation = operation_names::MODEL_RETRIEVE;
let model = "models";
let request_json = format!("{{\"model_id\":\"{id}\"}}");
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match models_api::retrieve_model()
.configuration(&self.client.base_configuration)
.model(&id)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn retrieve(&self, builder: ModelRetrievalBuilder) -> Result<Model> {
self.get(builder.model_id()).await
}
pub async fn delete(&self, model_id: impl Into<String>) -> Result<DeleteModelResponse> {
let id = model_id.into();
let mut state = T::default();
let operation = operation_names::MODEL_DELETE;
let model = "models";
let request_json = format!("{{\"model_id\":\"{id}\"}}");
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match models_api::delete_model()
.configuration(&self.client.base_configuration)
.model(&id)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn remove(&self, builder: ModelDeleteBuilder) -> Result<DeleteModelResponse> {
self.delete(builder.model_id()).await
}
}
impl<T: Default + Send + Sync> CompletionsClient<'_, T> {
#[must_use]
pub fn builder(&self, model: impl Into<String>) -> CompletionsBuilder {
CompletionsBuilder::new(model)
}
pub async fn create(&self, builder: CompletionsBuilder) -> Result<CreateCompletionResponse> {
let request = builder.build()?;
let mut state = T::default();
let operation = operation_names::TEXT_COMPLETION;
let model = request.model.clone();
let request_json = serde_json::to_string(&request).unwrap_or_default();
self.call_before_request(operation, &model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match completions_api::create_completion()
.configuration(&self.client.base_configuration)
.create_completion_request(request)
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, &model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
&model,
&request_json,
&state,
duration,
response.usage.as_ref().map(|u| i64::from(u.prompt_tokens)),
response
.usage
.as_ref()
.map(|u| i64::from(u.completion_tokens)),
)
.await;
Ok(response)
}
}
impl<T: Default + Send + Sync> UsageClient<'_, T> {
pub async fn audio_speeches(&self, builder: UsageBuilder) -> Result<UsageResponse> {
let mut state = T::default();
let operation = operation_names::USAGE_AUDIO_SPEECHES;
let model = "usage";
let start_time = builder.start_time();
let request_json = format!("{{\"start_time\":{start_time}}}");
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match usage_api::usage_audio_speeches()
.configuration(&self.client.base_configuration)
.start_time(builder.start_time())
.maybe_end_time(builder.end_time())
.maybe_bucket_width(builder.bucket_width_str())
.maybe_project_ids(builder.project_ids_option())
.maybe_user_ids(builder.user_ids_option())
.maybe_api_key_ids(builder.api_key_ids_option())
.maybe_models(builder.models_option())
.maybe_group_by(builder.group_by_option())
.maybe_limit(builder.limit_ref())
.maybe_page(builder.page_ref())
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn audio_transcriptions(&self, builder: UsageBuilder) -> Result<UsageResponse> {
let mut state = T::default();
let operation = operation_names::USAGE_AUDIO_TRANSCRIPTIONS;
let model = "usage";
let start_time = builder.start_time();
let request_json = format!("{{\"start_time\":{start_time}}}");
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match usage_api::usage_audio_transcriptions()
.configuration(&self.client.base_configuration)
.start_time(builder.start_time())
.maybe_end_time(builder.end_time())
.maybe_bucket_width(builder.bucket_width_str())
.maybe_project_ids(builder.project_ids_option())
.maybe_user_ids(builder.user_ids_option())
.maybe_api_key_ids(builder.api_key_ids_option())
.maybe_models(builder.models_option())
.maybe_group_by(builder.group_by_option())
.maybe_limit(builder.limit_ref())
.maybe_page(builder.page_ref())
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn code_interpreter_sessions(&self, builder: UsageBuilder) -> Result<UsageResponse> {
let mut state = T::default();
let operation = operation_names::USAGE_CODE_INTERPRETER;
let model = "usage";
let start_time = builder.start_time();
let request_json = format!("{{\"start_time\":{start_time}}}");
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match usage_api::usage_code_interpreter_sessions()
.configuration(&self.client.base_configuration)
.start_time(builder.start_time())
.maybe_end_time(builder.end_time())
.maybe_bucket_width(builder.bucket_width_str())
.maybe_project_ids(builder.project_ids_option())
.maybe_group_by(builder.group_by_option())
.maybe_limit(builder.limit_ref())
.maybe_page(builder.page_ref())
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn completions(&self, builder: UsageBuilder) -> Result<UsageResponse> {
let mut state = T::default();
let operation = operation_names::USAGE_COMPLETIONS;
let model = "usage";
let start_time = builder.start_time();
let request_json = format!("{{\"start_time\":{start_time}}}");
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match usage_api::usage_completions()
.configuration(&self.client.base_configuration)
.start_time(builder.start_time())
.maybe_end_time(builder.end_time())
.maybe_bucket_width(builder.bucket_width_str())
.maybe_project_ids(builder.project_ids_option())
.maybe_user_ids(builder.user_ids_option())
.maybe_api_key_ids(builder.api_key_ids_option())
.maybe_models(builder.models_option())
.maybe_group_by(builder.group_by_option())
.maybe_limit(builder.limit_ref())
.maybe_page(builder.page_ref())
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn embeddings(&self, builder: UsageBuilder) -> Result<UsageResponse> {
let mut state = T::default();
let operation = operation_names::USAGE_EMBEDDINGS;
let model = "usage";
let start_time = builder.start_time();
let request_json = format!("{{\"start_time\":{start_time}}}");
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match usage_api::usage_embeddings()
.configuration(&self.client.base_configuration)
.start_time(builder.start_time())
.maybe_end_time(builder.end_time())
.maybe_bucket_width(builder.bucket_width_str())
.maybe_project_ids(builder.project_ids_option())
.maybe_user_ids(builder.user_ids_option())
.maybe_api_key_ids(builder.api_key_ids_option())
.maybe_models(builder.models_option())
.maybe_group_by(builder.group_by_option())
.maybe_limit(builder.limit_ref())
.maybe_page(builder.page_ref())
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn images(&self, builder: UsageBuilder) -> Result<UsageResponse> {
let mut state = T::default();
let operation = operation_names::USAGE_IMAGES;
let model = "usage";
let start_time = builder.start_time();
let request_json = format!("{{\"start_time\":{start_time}}}");
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match usage_api::usage_images()
.configuration(&self.client.base_configuration)
.start_time(builder.start_time())
.maybe_end_time(builder.end_time())
.maybe_bucket_width(builder.bucket_width_str())
.maybe_project_ids(builder.project_ids_option())
.maybe_user_ids(builder.user_ids_option())
.maybe_api_key_ids(builder.api_key_ids_option())
.maybe_models(builder.models_option())
.maybe_group_by(builder.group_by_option())
.maybe_limit(builder.limit_ref())
.maybe_page(builder.page_ref())
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn moderations(&self, builder: UsageBuilder) -> Result<UsageResponse> {
let mut state = T::default();
let operation = operation_names::USAGE_MODERATIONS;
let model = "usage";
let start_time = builder.start_time();
let request_json = format!("{{\"start_time\":{start_time}}}");
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match usage_api::usage_moderations()
.configuration(&self.client.base_configuration)
.start_time(builder.start_time())
.maybe_end_time(builder.end_time())
.maybe_bucket_width(builder.bucket_width_str())
.maybe_project_ids(builder.project_ids_option())
.maybe_user_ids(builder.user_ids_option())
.maybe_api_key_ids(builder.api_key_ids_option())
.maybe_models(builder.models_option())
.maybe_group_by(builder.group_by_option())
.maybe_limit(builder.limit_ref())
.maybe_page(builder.page_ref())
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn vector_stores(&self, builder: UsageBuilder) -> Result<UsageResponse> {
let mut state = T::default();
let operation = operation_names::USAGE_VECTOR_STORES;
let model = "usage";
let start_time = builder.start_time();
let request_json = format!("{{\"start_time\":{start_time}}}");
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match usage_api::usage_vector_stores()
.configuration(&self.client.base_configuration)
.start_time(builder.start_time())
.maybe_end_time(builder.end_time())
.maybe_bucket_width(builder.bucket_width_str())
.maybe_project_ids(builder.project_ids_option())
.maybe_group_by(builder.group_by_option())
.maybe_limit(builder.limit_ref())
.maybe_page(builder.page_ref())
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
pub async fn costs(&self, builder: UsageBuilder) -> Result<UsageResponse> {
let mut state = T::default();
let operation = operation_names::USAGE_COSTS;
let model = "usage";
let start_time = builder.start_time();
let request_json = format!("{{\"start_time\":{start_time}}}");
self.call_before_request(operation, model, &request_json, &mut state)
.await?;
let start_time = Instant::now();
let response = match usage_api::usage_costs()
.configuration(&self.client.base_configuration)
.start_time(builder.start_time())
.maybe_end_time(builder.end_time())
.maybe_bucket_width(builder.bucket_width_str())
.maybe_project_ids(builder.project_ids_option())
.maybe_group_by(builder.group_by_option())
.maybe_limit(builder.limit_ref())
.maybe_page(builder.page_ref())
.call()
.await
{
Ok(resp) => resp,
Err(e) => {
let error = self
.handle_api_error(e, operation, model, &request_json, &state)
.await;
return Err(error);
}
};
let duration = start_time.elapsed();
self.call_after_response(
&response,
operation,
model,
&request_json,
&state,
duration,
None,
None,
)
.await;
Ok(response)
}
}