1#![allow(clippy::too_many_arguments)]
9
10use crate::interceptor::{
11 AfterResponseContext, BeforeRequestContext, ErrorContext, InterceptorChain,
12};
13use crate::semantic_conventions::operation_names;
14use crate::{
15 builders::{
16 assistants::{AssistantBuilder, MessageBuilder, RunBuilder},
17 audio::{
18 SpeechBuilder, TranscriptionBuilder, TranscriptionRequest, TranslationBuilder,
19 TranslationRequest,
20 },
21 completions::CompletionsBuilder,
22 embeddings::EmbeddingsBuilder,
23 files::{FileDeleteBuilder, FileListBuilder, FileRetrievalBuilder, FileUploadBuilder},
24 images::{
25 ImageEditBuilder, ImageEditRequest, ImageGenerationBuilder, ImageVariationBuilder,
26 ImageVariationRequest,
27 },
28 models::{ModelDeleteBuilder, ModelRetrievalBuilder},
29 moderations::ModerationBuilder,
30 threads::ThreadRequestBuilder,
31 uploads::UploadBuilder,
32 usage::UsageBuilder,
33 Builder, ChatCompletionBuilder, ResponsesBuilder,
34 },
35 config::Config,
36 errors::Result,
37 responses::ChatCompletionResponseWrapper,
38 Error, UploadPurpose,
39};
40use openai_client_base::apis::Error as ApiError;
41use openai_client_base::{
42 apis::{
43 assistants_api, audio_api, batch_api, chat_api, completions_api,
44 configuration::Configuration, embeddings_api, files_api, fine_tuning_api, images_api,
45 models_api, moderations_api, uploads_api, usage_api, vector_stores_api,
46 },
47 models::{
48 AssistantObject, Batch, CreateBatchRequest, CreateChatCompletionRequest,
49 CreateCompletionResponse, CreateEmbeddingResponse, CreateFineTuningJobRequest,
50 CreateModerationResponse, CreateTranscription200Response, CreateTranslation200Response,
51 DeleteAssistantResponse, DeleteFileResponse, DeleteModelResponse,
52 DeleteVectorStoreFileResponse, DeleteVectorStoreResponse, FineTuningJob, ImagesResponse,
53 ListAssistantsResponse, ListBatchesResponse, ListFilesResponse,
54 ListFineTuningJobCheckpointsResponse, ListFineTuningJobEventsResponse,
55 ListMessagesResponse, ListModelsResponse, ListPaginatedFineTuningJobsResponse,
56 ListRunStepsResponse, ListRunsResponse, ListVectorStoreFilesResponse,
57 ListVectorStoresResponse, MessageObject, Model, OpenAiFile, RunObject, RunStepObject,
58 SubmitToolOutputsRunRequestToolOutputsInner, ThreadObject, Upload, UsageResponse,
59 VectorStoreFileObject, VectorStoreObject, VectorStoreSearchResultsPage,
60 },
61};
62use reqwest_middleware::ClientWithMiddleware as HttpClient;
63use std::sync::Arc;
64use std::time::Instant;
65use tokio::time::Duration;
66
67macro_rules! impl_interceptor_helpers {
69 ($client_type:ty) => {
70 impl<T: Default + Send + Sync> $client_type {
71 async fn call_before_request(
73 &self,
74 operation: &str,
75 model: &str,
76 request_json: &str,
77 state: &mut T,
78 ) -> Result<()> {
79 if !self.client.interceptors.is_empty() {
80 let mut ctx = BeforeRequestContext {
81 operation,
82 model,
83 request_json,
84 state,
85 };
86 if let Err(e) = self.client.interceptors.before_request(&mut ctx).await {
87 let error_ctx = ErrorContext {
88 operation,
89 model: Some(model),
90 request_json: Some(request_json),
91 error: &e,
92 state: Some(state),
93 };
94 self.client.interceptors.on_error(&error_ctx).await;
95 return Err(e);
96 }
97 }
98 Ok(())
99 }
100
101 async fn handle_api_error<E>(
103 &self,
104 error: openai_client_base::apis::Error<E>,
105 operation: &str,
106 model: &str,
107 request_json: &str,
108 state: &T,
109 ) -> Error {
110 let error = map_api_error(error);
111
112 if !self.client.interceptors.is_empty() {
113 let error_ctx = ErrorContext {
114 operation,
115 model: Some(model),
116 request_json: Some(request_json),
117 error: &error,
118 state: Some(state),
119 };
120 self.client.interceptors.on_error(&error_ctx).await;
121 }
122
123 error
124 }
125
126 async fn call_after_response<R>(
128 &self,
129 response: &R,
130 operation: &str,
131 model: &str,
132 request_json: &str,
133 state: &T,
134 duration: std::time::Duration,
135 input_tokens: Option<i64>,
136 output_tokens: Option<i64>,
137 ) where
138 R: serde::Serialize + Sync,
139 {
140 if !self.client.interceptors.is_empty() {
141 let response_json = serde_json::to_string(response).unwrap_or_default();
142 let ctx = AfterResponseContext {
143 operation,
144 model,
145 request_json,
146 response_json: &response_json,
147 duration,
148 input_tokens,
149 output_tokens,
150 state,
151 };
152 if let Err(e) = self.client.interceptors.after_response(&ctx).await {
153 tracing::warn!("Interceptor after_response failed: {}", e);
154 }
155 }
156 }
157 }
158 };
159}
160
161pub struct ClientBuilder<T = ()> {
175 config: Arc<Config>,
176 http: HttpClient,
177 base_configuration: Configuration,
178 interceptors: InterceptorChain<T>,
179}
180
181#[derive(Clone)]
202pub struct Client<T = ()> {
203 config: Arc<Config>,
204 http: HttpClient,
205 base_configuration: Configuration,
206 interceptors: Arc<InterceptorChain<T>>,
207}
208
209impl<T> std::fmt::Debug for Client<T> {
211 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
212 f.debug_struct("Client")
213 .field("config", &self.config)
214 .field("http", &"<HttpClient>")
215 .field("base_configuration", &"<Configuration>")
216 .field("interceptors", &"<InterceptorChain>")
217 .finish()
218 }
219}
220
221impl ClientBuilder {
223 pub fn new(config: Config) -> Result<Self> {
225 let is_azure = config.is_azure();
227
228 let http_client = if let Some(client) = config.http_client() {
230 client.clone()
231 } else {
232 let reqwest_client = reqwest::Client::builder()
233 .timeout(Duration::from_secs(120)) .user_agent(format!("openai-ergonomic/{}", env!("CARGO_PKG_VERSION")))
235 .build()
236 .map_err(Error::Http)?;
237
238 let mut client_builder = reqwest_middleware::ClientBuilder::new(reqwest_client);
239
240 if is_azure {
242 let azure_middleware = crate::azure_middleware::AzureAuthMiddleware::new(
243 config.api_key().to_string(),
244 config.azure_api_version().map(String::from),
245 config.azure_deployment().map(String::from),
246 );
247 client_builder = client_builder.with(azure_middleware);
248 }
249
250 client_builder.build()
251 };
252
253 let mut base_configuration = Configuration::new();
255
256 base_configuration.client = http_client.clone();
258
259 if !is_azure {
262 base_configuration.bearer_access_token = Some(config.api_key().to_string());
263 }
264
265 if let Some(base_url) = config.base_url() {
266 base_configuration.base_path = base_url.to_string();
267 }
268
269 if let Some(org_id) = config.organization_id() {
270 base_configuration.user_agent = Some(format!(
271 "openai-ergonomic/{} org/{}",
272 env!("CARGO_PKG_VERSION"),
273 org_id
274 ));
275 }
276
277 Ok(Self {
278 config: Arc::new(config),
279 http: http_client,
280 base_configuration,
281 interceptors: InterceptorChain::new(),
282 })
283 }
284
285 pub fn from_env() -> Result<Self> {
287 Self::new(Config::from_env()?)
288 }
289}
290
291impl<T> ClientBuilder<T> {
293 #[must_use]
334 pub fn with_interceptor<U>(
335 self,
336 interceptor: Box<dyn crate::interceptor::Interceptor<U>>,
337 ) -> ClientBuilder<U> {
338 let mut new_chain = InterceptorChain::new();
339 new_chain.add(interceptor);
340
341 ClientBuilder {
342 config: self.config,
343 http: self.http,
344 base_configuration: self.base_configuration,
345 interceptors: new_chain,
346 }
347 }
348
349 #[must_use]
363 pub fn add_interceptor(
364 mut self,
365 interceptor: Box<dyn crate::interceptor::Interceptor<T>>,
366 ) -> Self {
367 self.interceptors.add(interceptor);
368 self
369 }
370
371 #[must_use]
375 pub fn build(self) -> Client<T> {
376 Client {
377 config: self.config,
378 http: self.http,
379 base_configuration: self.base_configuration,
380 interceptors: Arc::new(self.interceptors),
381 }
382 }
383}
384
385impl Client {
387 pub fn builder(config: Config) -> Result<ClientBuilder> {
389 ClientBuilder::new(config)
390 }
391
392 pub fn from_env() -> Result<ClientBuilder> {
394 ClientBuilder::from_env()
395 }
396}
397
398impl<T> Client<T> {
399 pub fn config(&self) -> &Config {
401 &self.config
402 }
403
404 pub fn http_client(&self) -> &HttpClient {
406 &self.http
407 }
408}
409
410impl<T: Default + Send + Sync> Client<T> {
412 async fn call_before_request(
414 &self,
415 operation: &str,
416 model: &str,
417 request_json: &str,
418 state: &mut T,
419 ) -> Result<()> {
420 if !self.interceptors.is_empty() {
421 let mut ctx = BeforeRequestContext {
422 operation,
423 model,
424 request_json,
425 state,
426 };
427 if let Err(e) = self.interceptors.before_request(&mut ctx).await {
428 let error_ctx = ErrorContext {
429 operation,
430 model: Some(model),
431 request_json: Some(request_json),
432 error: &e,
433 state: Some(state),
434 };
435 self.interceptors.on_error(&error_ctx).await;
436 return Err(e);
437 }
438 }
439 Ok(())
440 }
441
442 async fn handle_api_error<E>(
444 &self,
445 error: openai_client_base::apis::Error<E>,
446 operation: &str,
447 model: &str,
448 request_json: &str,
449 state: &T,
450 ) -> Error {
451 let error = map_api_error(error);
452
453 if !self.interceptors.is_empty() {
454 let error_ctx = ErrorContext {
455 operation,
456 model: Some(model),
457 request_json: Some(request_json),
458 error: &error,
459 state: Some(state),
460 };
461 self.interceptors.on_error(&error_ctx).await;
462 }
463
464 error
465 }
466
467 async fn call_after_response<R>(
469 &self,
470 response: &R,
471 operation: &str,
472 model: &str,
473 request_json: &str,
474 state: &T,
475 duration: std::time::Duration,
476 input_tokens: Option<i64>,
477 output_tokens: Option<i64>,
478 ) where
479 R: serde::Serialize + Sync,
480 {
481 if !self.interceptors.is_empty() {
482 let response_json = serde_json::to_string(response).unwrap_or_default();
483 let ctx = AfterResponseContext {
484 operation,
485 model,
486 request_json,
487 response_json: &response_json,
488 duration,
489 input_tokens,
490 output_tokens,
491 state,
492 };
493 if let Err(e) = self.interceptors.after_response(&ctx).await {
494 tracing::warn!("Interceptor after_response failed: {}", e);
495 }
496 }
497 }
498}
499
500impl<T: Default + Send + Sync + 'static> Client<T> {
502 pub fn chat(&self) -> ChatCompletionBuilder {
504 let model = self.config.default_model().unwrap_or("gpt-4");
505 ChatCompletionBuilder::new(model)
506 }
507
508 pub fn chat_simple(&self, message: impl Into<String>) -> ChatCompletionBuilder {
510 self.chat().user(message)
511 }
512
513 pub fn chat_with_system(
515 &self,
516 system: impl Into<String>,
517 user: impl Into<String>,
518 ) -> ChatCompletionBuilder {
519 self.chat().system(system).user(user)
520 }
521
522 pub async fn execute_chat(
524 &self,
525 request: CreateChatCompletionRequest,
526 ) -> Result<ChatCompletionResponseWrapper> {
527 let mut state = T::default();
528 let operation = operation_names::CHAT;
529 let model = request.model.clone();
530 let request_json = serde_json::to_string(&request).unwrap_or_default();
531
532 self.call_before_request(operation, &model, &request_json, &mut state)
534 .await?;
535
536 let start_time = Instant::now();
537
538 let response = match chat_api::create_chat_completion()
540 .configuration(&self.base_configuration)
541 .create_chat_completion_request(request)
542 .call()
543 .await
544 {
545 Ok(resp) => resp,
546 Err(e) => {
547 let error = self
548 .handle_api_error(e, operation, &model, &request_json, &state)
549 .await;
550 return Err(error);
551 }
552 };
553
554 let duration = start_time.elapsed();
555
556 self.call_after_response(
558 &response,
559 operation,
560 &model,
561 &request_json,
562 &state,
563 duration,
564 response.usage.as_ref().map(|u| i64::from(u.prompt_tokens)),
565 response
566 .usage
567 .as_ref()
568 .map(|u| i64::from(u.completion_tokens)),
569 )
570 .await;
571
572 Ok(ChatCompletionResponseWrapper::new(response))
573 }
574
575 pub async fn send_chat(
577 &self,
578 builder: ChatCompletionBuilder,
579 ) -> Result<ChatCompletionResponseWrapper> {
580 let request = builder.build()?;
581 self.execute_chat(request).await
582 }
583
584 pub async fn send_chat_stream(
613 &self,
614 mut builder: ChatCompletionBuilder,
615 ) -> Result<crate::streaming::BoxedChatStream> {
616 builder = builder.stream(true);
618 let mut request = builder.build()?;
619 request.stream = Some(true);
620
621 self.execute_chat_stream(request, crate::semantic_conventions::operation_names::CHAT)
622 .await
623 }
624
625 async fn execute_chat_stream(
629 &self,
630 request: CreateChatCompletionRequest,
631 operation: &str,
632 ) -> Result<crate::streaming::BoxedChatStream> {
633 let uri_str = format!("{}/chat/completions", self.config.api_base());
634
635 let mut req_builder = self
636 .http_client()
637 .request(reqwest::Method::POST, &uri_str)
638 .bearer_auth(self.config.api_key())
639 .json(&request);
640
641 if let Some(org_id) = self.config.organization_id() {
643 req_builder = req_builder.header("OpenAI-Organization", org_id);
644 }
645
646 if let Some(project_id) = self.config.project() {
648 req_builder = req_builder.header("OpenAI-Project", project_id);
649 }
650
651 let req = req_builder.build()?;
652
653 let request_json = serde_json::to_string(&request).unwrap_or_else(|_| "{}".to_string());
655 let model = request.model.clone();
656
657 let mut state = T::default();
659 if !self.interceptors.is_empty() {
660 let mut ctx = crate::interceptor::BeforeRequestContext {
661 operation,
662 model: &model,
663 request_json: &request_json,
664 state: &mut state,
665 };
666 self.interceptors.before_request(&mut ctx).await?;
667 }
668
669 let response = self.http_client().execute(req).await?;
670
671 let status = response.status();
672 if !status.is_success() {
673 let error_text = response.text().await?;
674 return Err(Error::Api {
675 status: status.as_u16(),
676 message: error_text,
677 error_type: None,
678 error_code: None,
679 });
680 }
681
682 let stream = crate::streaming::ChatCompletionStream::new(response);
683
684 if self.interceptors.is_empty() {
686 Ok(Box::pin(stream))
687 } else {
688 let intercepted = crate::streaming::InterceptedStream::new(
689 stream,
690 std::sync::Arc::clone(&self.interceptors),
691 operation.to_string(),
692 model,
693 request_json,
694 state,
695 );
696 Ok(Box::pin(intercepted))
697 }
698 }
699}
700
701impl<T: Default + Send + Sync + 'static> Client<T> {
703 pub fn responses(&self) -> ResponsesBuilder {
705 let model = self.config.default_model().unwrap_or("gpt-4");
706 ResponsesBuilder::new(model)
707 }
708
709 pub fn responses_simple(&self, message: impl Into<String>) -> ResponsesBuilder {
711 self.responses().user(message)
712 }
713
714 pub async fn execute_responses(
716 &self,
717 request: CreateChatCompletionRequest,
718 ) -> Result<ChatCompletionResponseWrapper> {
719 self.execute_chat(request).await
721 }
722
723 pub async fn send_responses(
725 &self,
726 builder: ResponsesBuilder,
727 ) -> Result<ChatCompletionResponseWrapper> {
728 let request = builder.build()?;
729 self.execute_responses(request).await
730 }
731
732 pub async fn send_responses_stream(
737 &self,
738 mut builder: ResponsesBuilder,
739 ) -> Result<crate::streaming::BoxedChatStream> {
740 builder = builder.stream(true);
742 let mut request = builder.build()?;
743 request.stream = Some(true);
744
745 self.execute_chat_stream(
747 request,
748 crate::semantic_conventions::operation_names::RESPONSES,
749 )
750 .await
751 }
752}
753
754impl<T: Default + Send + Sync> Client<T> {
756 #[must_use]
758 pub fn assistants(&self) -> AssistantsClient<'_, T> {
759 AssistantsClient { client: self }
760 }
761
762 #[must_use]
764 pub fn audio(&self) -> AudioClient<'_, T> {
765 AudioClient { client: self }
766 }
767
768 #[must_use]
770 pub fn embeddings(&self) -> EmbeddingsClient<'_, T> {
771 EmbeddingsClient { client: self }
772 }
773
774 #[must_use]
776 pub fn images(&self) -> ImagesClient<'_, T> {
777 ImagesClient { client: self }
778 }
779
780 #[must_use]
782 pub fn files(&self) -> FilesClient<'_, T> {
783 FilesClient { client: self }
784 }
785
786 #[must_use]
788 pub fn fine_tuning(&self) -> FineTuningClient<'_, T> {
789 FineTuningClient { client: self }
790 }
791
792 #[must_use]
794 pub fn batch(&self) -> BatchClient<'_, T> {
795 BatchClient { client: self }
796 }
797
798 #[must_use]
800 pub fn vector_stores(&self) -> VectorStoresClient<'_, T> {
801 VectorStoresClient { client: self }
802 }
803
804 #[must_use]
806 pub fn moderations(&self) -> ModerationsClient<'_, T> {
807 ModerationsClient { client: self }
808 }
809
810 #[must_use]
812 pub fn threads(&self) -> ThreadsClient<'_, T> {
813 ThreadsClient { client: self }
814 }
815
816 #[must_use]
818 pub fn uploads(&self) -> UploadsClient<'_, T> {
819 UploadsClient { client: self }
820 }
821
822 #[must_use]
824 pub fn models(&self) -> ModelsClient<'_, T> {
825 ModelsClient { client: self }
826 }
827
828 #[must_use]
830 pub fn completions(&self) -> CompletionsClient<'_, T> {
831 CompletionsClient { client: self }
832 }
833
834 #[must_use]
836 pub fn usage(&self) -> UsageClient<'_, T> {
837 UsageClient { client: self }
838 }
839}
840
841impl<T: Default + Send + Sync> AudioClient<'_, T> {
842 #[must_use]
844 pub fn speech(
845 &self,
846 model: impl Into<String>,
847 input: impl Into<String>,
848 voice: impl Into<String>,
849 ) -> SpeechBuilder {
850 SpeechBuilder::new(model, input, voice)
851 }
852
853 pub async fn create_speech(&self, builder: SpeechBuilder) -> Result<Vec<u8>> {
855 let request = builder.build()?;
856 let mut state = T::default();
857 let operation = operation_names::AUDIO_SPEECH;
858 let model = request.model.clone();
859 let request_json = serde_json::to_string(&request).unwrap_or_default();
860
861 self.call_before_request(operation, &model, &request_json, &mut state)
863 .await?;
864
865 let start_time = Instant::now();
866
867 let response = match audio_api::create_speech()
869 .configuration(&self.client.base_configuration)
870 .create_speech_request(request)
871 .call()
872 .await
873 {
874 Ok(resp) => resp,
875 Err(e) => {
876 let error = self
877 .handle_api_error(e, operation, &model, &request_json, &state)
878 .await;
879 return Err(error);
880 }
881 };
882
883 let bytes = response.bytes().await.map_err(Error::Http)?;
884 let duration = start_time.elapsed();
885
886 let response_json = format!("{{\"size\": {}}}", bytes.len());
888 self.call_after_response(
889 &response_json,
890 operation,
891 &model,
892 &request_json,
893 &state,
894 duration,
895 None,
896 None,
897 )
898 .await;
899
900 Ok(bytes.to_vec())
901 }
902
903 #[must_use]
905 pub fn transcription(
906 &self,
907 file: impl AsRef<std::path::Path>,
908 model: impl Into<String>,
909 ) -> TranscriptionBuilder {
910 TranscriptionBuilder::new(file, model)
911 }
912
913 pub async fn create_transcription(
915 &self,
916 builder: TranscriptionBuilder,
917 ) -> Result<CreateTranscription200Response> {
918 let request = builder.build()?;
919 let model_str = request.model.clone();
920 let mut state = T::default();
921 let operation = operation_names::AUDIO_TRANSCRIPTION;
922 let request_json = format!(r#"{{"model":"{model_str}","file":"<audio_file>"}}"#);
924
925 self.call_before_request(operation, &model_str, &request_json, &mut state)
927 .await?;
928
929 let TranscriptionRequest {
930 file,
931 model,
932 language,
933 prompt,
934 response_format,
935 temperature,
936 stream,
937 chunking_strategy,
938 timestamp_granularities,
939 include,
940 } = request;
941
942 let timestamp_strings = timestamp_granularities.as_ref().map(|values| {
943 values
944 .iter()
945 .map(|granularity| granularity.as_str().to_string())
946 .collect::<Vec<_>>()
947 });
948
949 let start_time = Instant::now();
950
951 let response = match audio_api::create_transcription()
953 .configuration(&self.client.base_configuration)
954 .file(file)
955 .model(&model)
956 .maybe_language(language.as_deref())
957 .maybe_prompt(prompt.as_deref())
958 .maybe_response_format(response_format)
959 .maybe_temperature(temperature)
960 .maybe_stream(stream)
961 .maybe_chunking_strategy(chunking_strategy)
962 .maybe_timestamp_granularities(timestamp_strings)
963 .maybe_include(include)
964 .call()
965 .await
966 {
967 Ok(resp) => resp,
968 Err(e) => {
969 let error = self
970 .handle_api_error(e, operation, &model_str, &request_json, &state)
971 .await;
972 return Err(error);
973 }
974 };
975
976 let duration = start_time.elapsed();
977
978 self.call_after_response(
980 &response,
981 operation,
982 &model_str,
983 &request_json,
984 &state,
985 duration,
986 None,
987 None,
988 )
989 .await;
990
991 Ok(response)
992 }
993
994 #[must_use]
996 pub fn translation(
997 &self,
998 file: impl AsRef<std::path::Path>,
999 model: impl Into<String>,
1000 ) -> TranslationBuilder {
1001 TranslationBuilder::new(file, model)
1002 }
1003
1004 pub async fn create_translation(
1006 &self,
1007 builder: TranslationBuilder,
1008 ) -> Result<CreateTranslation200Response> {
1009 let request = builder.build()?;
1010 let model_str = request.model.clone();
1011
1012 let mut state = T::default();
1014 let operation = operation_names::AUDIO_TRANSLATION;
1015 let request_json = format!(r#"{{"model":"{model_str}","file":"<audio_file>"}}"#);
1016
1017 self.call_before_request(operation, &model_str, &request_json, &mut state)
1019 .await?;
1020
1021 let TranslationRequest {
1022 file,
1023 model,
1024 prompt,
1025 response_format,
1026 temperature,
1027 } = request;
1028
1029 let response_format_owned = response_format.map(|format| format.to_string());
1030
1031 let start_time = Instant::now();
1032
1033 let response = match audio_api::create_translation()
1035 .configuration(&self.client.base_configuration)
1036 .file(file)
1037 .model(&model)
1038 .maybe_prompt(prompt.as_deref())
1039 .maybe_response_format(response_format_owned.as_deref())
1040 .maybe_temperature(temperature)
1041 .call()
1042 .await
1043 {
1044 Ok(resp) => resp,
1045 Err(e) => {
1046 let error = self
1047 .handle_api_error(e, operation, &model_str, &request_json, &state)
1048 .await;
1049 return Err(error);
1050 }
1051 };
1052
1053 let duration = start_time.elapsed();
1054
1055 self.call_after_response(
1057 &response,
1058 operation,
1059 &model_str,
1060 &request_json,
1061 &state,
1062 duration,
1063 None,
1064 None,
1065 )
1066 .await;
1067
1068 Ok(response)
1069 }
1070}
1071
1072impl<T: Default + Send + Sync> EmbeddingsClient<'_, T> {
1073 #[must_use]
1075 pub fn builder(&self, model: impl Into<String>) -> EmbeddingsBuilder {
1076 EmbeddingsBuilder::new(model)
1077 }
1078
1079 #[must_use]
1081 pub fn text(&self, model: impl Into<String>, input: impl Into<String>) -> EmbeddingsBuilder {
1082 self.builder(model).input_text(input)
1083 }
1084
1085 #[must_use]
1087 pub fn tokens<I>(&self, model: impl Into<String>, tokens: I) -> EmbeddingsBuilder
1088 where
1089 I: IntoIterator<Item = i32>,
1090 {
1091 self.builder(model).input_tokens(tokens)
1092 }
1093
1094 pub async fn create(&self, builder: EmbeddingsBuilder) -> Result<CreateEmbeddingResponse> {
1096 let request = builder.build()?;
1097
1098 let mut state = T::default();
1100 let operation = operation_names::EMBEDDINGS;
1101 let model = request.model.clone();
1102 let request_json = serde_json::to_string(&request).unwrap_or_default();
1103
1104 self.call_before_request(operation, &model, &request_json, &mut state)
1106 .await?;
1107
1108 let start_time = Instant::now();
1109
1110 let response = match embeddings_api::create_embedding()
1112 .configuration(&self.client.base_configuration)
1113 .create_embedding_request(request)
1114 .call()
1115 .await
1116 {
1117 Ok(resp) => resp,
1118 Err(e) => {
1119 let error = self
1120 .handle_api_error(e, operation, &model, &request_json, &state)
1121 .await;
1122 return Err(error);
1123 }
1124 };
1125
1126 let duration = start_time.elapsed();
1127
1128 self.call_after_response(
1130 &response,
1131 operation,
1132 &model,
1133 &request_json,
1134 &state,
1135 duration,
1136 Some(i64::from(response.usage.prompt_tokens)),
1137 Some(i64::from(response.usage.total_tokens)),
1138 )
1139 .await;
1140
1141 Ok(response)
1142 }
1143}
1144
1145impl<T: Default + Send + Sync> ImagesClient<'_, T> {
1146 #[must_use]
1148 pub fn generate(&self, prompt: impl Into<String>) -> ImageGenerationBuilder {
1149 ImageGenerationBuilder::new(prompt)
1150 }
1151
1152 pub async fn create(&self, builder: ImageGenerationBuilder) -> Result<ImagesResponse> {
1154 let request = builder.build()?;
1155
1156 let mut state = T::default();
1158 let operation = operation_names::IMAGE_GENERATION;
1159 let model = request
1160 .model
1161 .as_ref()
1162 .map_or_else(|| "dall-e-2".to_string(), ToString::to_string);
1163 let request_json = serde_json::to_string(&request).unwrap_or_default();
1164
1165 self.call_before_request(operation, &model, &request_json, &mut state)
1167 .await?;
1168
1169 let start_time = Instant::now();
1170
1171 let response = match images_api::create_image()
1173 .configuration(&self.client.base_configuration)
1174 .create_image_request(request)
1175 .call()
1176 .await
1177 {
1178 Ok(resp) => resp,
1179 Err(e) => {
1180 let error = self
1181 .handle_api_error(e, operation, &model, &request_json, &state)
1182 .await;
1183 return Err(error);
1184 }
1185 };
1186
1187 let duration = start_time.elapsed();
1188
1189 self.call_after_response(
1191 &response,
1192 operation,
1193 &model,
1194 &request_json,
1195 &state,
1196 duration,
1197 None,
1198 None,
1199 )
1200 .await;
1201
1202 Ok(response)
1203 }
1204
1205 #[must_use]
1207 pub fn edit(
1208 &self,
1209 image: impl AsRef<std::path::Path>,
1210 prompt: impl Into<String>,
1211 ) -> ImageEditBuilder {
1212 ImageEditBuilder::new(image, prompt)
1213 }
1214
1215 pub async fn create_edit(&self, builder: ImageEditBuilder) -> Result<ImagesResponse> {
1217 let request = builder.build()?;
1218 let model_str = request
1219 .model
1220 .as_ref()
1221 .map_or_else(|| "dall-e-2".to_string(), ToString::to_string);
1222
1223 let mut state = T::default();
1225 let operation = operation_names::IMAGE_EDIT;
1226 let request_json = format!(
1227 r#"{{"prompt":"{}","model":"{}"}}"#,
1228 request.prompt, model_str
1229 );
1230
1231 self.call_before_request(operation, &model_str, &request_json, &mut state)
1233 .await?;
1234
1235 let ImageEditRequest {
1236 image,
1237 prompt,
1238 mask,
1239 background,
1240 model,
1241 n,
1242 size,
1243 response_format,
1244 output_format,
1245 output_compression,
1246 user,
1247 input_fidelity,
1248 stream,
1249 partial_images,
1250 quality,
1251 } = request;
1252
1253 let start_time = Instant::now();
1254
1255 let response = match images_api::create_image_edit()
1257 .configuration(&self.client.base_configuration)
1258 .image(image)
1259 .prompt(&prompt)
1260 .maybe_mask(mask)
1261 .maybe_background(background.as_deref())
1262 .maybe_model(model.as_deref())
1263 .maybe_n(n)
1264 .maybe_size(size.as_deref())
1265 .maybe_response_format(response_format.as_deref())
1266 .maybe_output_format(output_format.as_deref())
1267 .maybe_output_compression(output_compression)
1268 .maybe_user(user.as_deref())
1269 .maybe_input_fidelity(input_fidelity)
1270 .maybe_stream(stream)
1271 .maybe_partial_images(partial_images)
1272 .maybe_quality(quality.as_deref())
1273 .call()
1274 .await
1275 {
1276 Ok(resp) => resp,
1277 Err(e) => {
1278 let error = self
1279 .handle_api_error(e, operation, &model_str, &request_json, &state)
1280 .await;
1281 return Err(error);
1282 }
1283 };
1284
1285 let duration = start_time.elapsed();
1286
1287 self.call_after_response(
1289 &response,
1290 operation,
1291 &model_str,
1292 &request_json,
1293 &state,
1294 duration,
1295 None,
1296 None,
1297 )
1298 .await;
1299
1300 Ok(response)
1301 }
1302
1303 #[must_use]
1305 pub fn variation(&self, image: impl AsRef<std::path::Path>) -> ImageVariationBuilder {
1306 ImageVariationBuilder::new(image)
1307 }
1308
1309 pub async fn create_variation(&self, builder: ImageVariationBuilder) -> Result<ImagesResponse> {
1311 let request = builder.build()?;
1312 let model_str = request
1313 .model
1314 .as_ref()
1315 .map_or_else(|| "dall-e-2".to_string(), ToString::to_string);
1316
1317 let mut state = T::default();
1319 let operation = operation_names::IMAGE_VARIATION;
1320 let request_json = format!(r#"{{"model":"{model_str}"}}"#);
1321
1322 self.call_before_request(operation, &model_str, &request_json, &mut state)
1324 .await?;
1325
1326 let ImageVariationRequest {
1327 image,
1328 model,
1329 n,
1330 response_format,
1331 size,
1332 user,
1333 } = request;
1334
1335 let start_time = Instant::now();
1336
1337 let response = match images_api::create_image_variation()
1339 .configuration(&self.client.base_configuration)
1340 .image(image)
1341 .maybe_model(model.as_deref())
1342 .maybe_n(n)
1343 .maybe_response_format(response_format.as_deref())
1344 .maybe_size(size.as_deref())
1345 .maybe_user(user.as_deref())
1346 .call()
1347 .await
1348 {
1349 Ok(resp) => resp,
1350 Err(e) => {
1351 let error = self
1352 .handle_api_error(e, operation, &model_str, &request_json, &state)
1353 .await;
1354 return Err(error);
1355 }
1356 };
1357
1358 let duration = start_time.elapsed();
1359
1360 self.call_after_response(
1362 &response,
1363 operation,
1364 &model_str,
1365 &request_json,
1366 &state,
1367 duration,
1368 None,
1369 None,
1370 )
1371 .await;
1372
1373 Ok(response)
1374 }
1375}
1376
1377impl<T: Default + Send + Sync> ThreadsClient<'_, T> {
1378 #[must_use]
1380 pub fn builder(&self) -> ThreadRequestBuilder {
1381 ThreadRequestBuilder::new()
1382 }
1383
1384 pub async fn create(&self, builder: ThreadRequestBuilder) -> Result<ThreadObject> {
1386 let request = builder.build()?;
1387
1388 let mut state = T::default();
1390 let operation = operation_names::THREAD_CREATE;
1391 let model = "thread"; let request_json = serde_json::to_string(&request).unwrap_or_default();
1393
1394 self.call_before_request(operation, model, &request_json, &mut state)
1396 .await?;
1397
1398 let start_time = Instant::now();
1399
1400 let response = match assistants_api::create_thread()
1402 .configuration(&self.client.base_configuration)
1403 .maybe_create_thread_request(Some(request))
1404 .call()
1405 .await
1406 {
1407 Ok(resp) => resp,
1408 Err(e) => {
1409 let error = self
1410 .handle_api_error(e, operation, model, &request_json, &state)
1411 .await;
1412 return Err(error);
1413 }
1414 };
1415
1416 let duration = start_time.elapsed();
1417
1418 self.call_after_response(
1420 &response,
1421 operation,
1422 model,
1423 &request_json,
1424 &state,
1425 duration,
1426 None,
1427 None,
1428 )
1429 .await;
1430
1431 Ok(response)
1432 }
1433}
1434
1435impl<T: Default + Send + Sync> UploadsClient<'_, T> {
1436 #[must_use]
1438 pub fn builder(
1439 &self,
1440 filename: impl Into<String>,
1441 purpose: UploadPurpose,
1442 bytes: i32,
1443 mime_type: impl Into<String>,
1444 ) -> UploadBuilder {
1445 UploadBuilder::new(filename, purpose, bytes, mime_type)
1446 }
1447
1448 pub async fn create(&self, builder: UploadBuilder) -> Result<Upload> {
1450 let request = builder.build()?;
1451
1452 let mut state = T::default();
1454 let operation = operation_names::UPLOAD_CREATE;
1455 let model = "upload"; let request_json = serde_json::to_string(&request).unwrap_or_default();
1457
1458 self.call_before_request(operation, model, &request_json, &mut state)
1460 .await?;
1461
1462 let start_time = Instant::now();
1463
1464 let response = match uploads_api::create_upload()
1466 .configuration(&self.client.base_configuration)
1467 .create_upload_request(request)
1468 .call()
1469 .await
1470 {
1471 Ok(resp) => resp,
1472 Err(e) => {
1473 let error = self
1474 .handle_api_error(e, operation, model, &request_json, &state)
1475 .await;
1476 return Err(error);
1477 }
1478 };
1479
1480 let duration = start_time.elapsed();
1481
1482 self.call_after_response(
1484 &response,
1485 operation,
1486 model,
1487 &request_json,
1488 &state,
1489 duration,
1490 None,
1491 None,
1492 )
1493 .await;
1494
1495 Ok(response)
1496 }
1497}
1498
1499impl<T: Default + Send + Sync> ModerationsClient<'_, T> {
1500 #[must_use]
1516 pub fn builder(&self, input: impl Into<String>) -> ModerationBuilder {
1517 ModerationBuilder::new(input)
1518 }
1519
1520 #[must_use]
1539 pub fn check(&self, input: impl Into<String>) -> ModerationBuilder {
1540 ModerationBuilder::new(input)
1541 }
1542
1543 pub async fn create(&self, builder: ModerationBuilder) -> Result<CreateModerationResponse> {
1574 let request = builder.build()?;
1575
1576 let mut state = T::default();
1578 let operation = operation_names::MODERATION;
1579 let model = request
1580 .model
1581 .as_ref()
1582 .map_or_else(|| "text-moderation-latest".to_string(), ToString::to_string);
1583 let request_json = serde_json::to_string(&request).unwrap_or_default();
1584
1585 self.call_before_request(operation, &model, &request_json, &mut state)
1587 .await?;
1588
1589 let start_time = Instant::now();
1590
1591 let response = match moderations_api::create_moderation()
1593 .configuration(&self.client.base_configuration)
1594 .create_moderation_request(request)
1595 .call()
1596 .await
1597 {
1598 Ok(resp) => resp,
1599 Err(e) => {
1600 let error = self
1601 .handle_api_error(e, operation, &model, &request_json, &state)
1602 .await;
1603 return Err(error);
1604 }
1605 };
1606
1607 let duration = start_time.elapsed();
1608
1609 self.call_after_response(
1611 &response,
1612 operation,
1613 &model,
1614 &request_json,
1615 &state,
1616 duration,
1617 None,
1618 None,
1619 )
1620 .await;
1621
1622 Ok(response)
1623 }
1624}
1625
1626impl<T: Default + Send + Sync> FilesClient<'_, T> {
1627 pub async fn upload(&self, builder: FileUploadBuilder) -> Result<OpenAiFile> {
1646 let temp_dir = std::env::temp_dir();
1648 let temp_file_path = temp_dir.join(builder.filename());
1649 std::fs::write(&temp_file_path, builder.content()).map_err(Error::File)?;
1650
1651 let purpose = match builder.purpose().to_string().as_str() {
1653 "fine-tune" => openai_client_base::models::FilePurpose::FineTune,
1654 "vision" => openai_client_base::models::FilePurpose::Vision,
1655 "batch" => openai_client_base::models::FilePurpose::Batch,
1656 _ => openai_client_base::models::FilePurpose::Assistants, };
1658
1659 let mut state = T::default();
1661 let operation = operation_names::FILE_UPLOAD;
1662 let model = "file-upload"; let request_json = format!(
1664 r#"{{"filename":"{}","purpose":"{}","size":{}}}"#,
1665 builder.filename(),
1666 builder.purpose(),
1667 builder.content().len()
1668 );
1669
1670 if let Err(e) = self
1672 .call_before_request(operation, model, &request_json, &mut state)
1673 .await
1674 {
1675 let _ = std::fs::remove_file(&temp_file_path);
1677 return Err(e);
1678 }
1679
1680 let start_time = Instant::now();
1681
1682 let result = match files_api::create_file()
1684 .configuration(&self.client.base_configuration)
1685 .file(temp_file_path.clone())
1686 .purpose(purpose)
1687 .call()
1688 .await
1689 {
1690 Ok(resp) => resp,
1691 Err(e) => {
1692 let _ = std::fs::remove_file(&temp_file_path);
1694 let error = self
1695 .handle_api_error(e, operation, model, &request_json, &state)
1696 .await;
1697 return Err(error);
1698 }
1699 };
1700
1701 let _ = std::fs::remove_file(temp_file_path);
1703
1704 let duration = start_time.elapsed();
1705
1706 self.call_after_response(
1708 &result,
1709 operation,
1710 model,
1711 &request_json,
1712 &state,
1713 duration,
1714 None,
1715 None,
1716 )
1717 .await;
1718
1719 Ok(result)
1720 }
1721
1722 pub async fn create(&self, builder: FileUploadBuilder) -> Result<OpenAiFile> {
1741 self.upload(builder).await
1742 }
1743
1744 #[must_use]
1746 pub fn upload_text(
1747 &self,
1748 filename: impl Into<String>,
1749 purpose: crate::builders::files::FilePurpose,
1750 text: impl Into<String>,
1751 ) -> FileUploadBuilder {
1752 FileUploadBuilder::from_text(filename, purpose, text)
1753 }
1754
1755 #[must_use]
1757 pub fn upload_bytes(
1758 &self,
1759 filename: impl Into<String>,
1760 purpose: crate::builders::files::FilePurpose,
1761 content: Vec<u8>,
1762 ) -> FileUploadBuilder {
1763 FileUploadBuilder::new(filename, purpose, content)
1764 }
1765
1766 pub fn upload_from_path(
1768 &self,
1769 path: impl AsRef<std::path::Path>,
1770 purpose: crate::builders::files::FilePurpose,
1771 ) -> Result<FileUploadBuilder> {
1772 FileUploadBuilder::from_path(path, purpose).map_err(Error::File)
1773 }
1774
1775 pub async fn list(&self, builder: FileListBuilder) -> Result<ListFilesResponse> {
1791 let purpose = builder.purpose_ref().map(ToString::to_string);
1792 let limit = builder.limit_ref();
1793 let order = builder.order_ref().map(ToString::to_string);
1794
1795 let mut state = T::default();
1797 let operation = operation_names::FILE_LIST;
1798 let model = "files";
1799 let request_json = format!(
1800 r#"{{"purpose":"{}","limit":{},"order":"{}"}}"#,
1801 purpose.as_deref().unwrap_or(""),
1802 limit.unwrap_or(10000),
1803 order.as_deref().unwrap_or("desc")
1804 );
1805
1806 self.call_before_request(operation, model, &request_json, &mut state)
1808 .await?;
1809
1810 let start_time = Instant::now();
1811
1812 let response = match files_api::list_files()
1814 .configuration(&self.client.base_configuration)
1815 .maybe_purpose(purpose.as_deref())
1816 .maybe_limit(limit)
1817 .maybe_order(order.as_deref())
1818 .call()
1819 .await
1820 {
1821 Ok(resp) => resp,
1822 Err(e) => {
1823 let error = self
1824 .handle_api_error(e, operation, model, &request_json, &state)
1825 .await;
1826 return Err(error);
1827 }
1828 };
1829
1830 let duration = start_time.elapsed();
1831
1832 self.call_after_response(
1834 &response,
1835 operation,
1836 model,
1837 &request_json,
1838 &state,
1839 duration,
1840 None,
1841 None,
1842 )
1843 .await;
1844
1845 Ok(response)
1846 }
1847
1848 #[must_use]
1850 pub fn list_builder(&self) -> FileListBuilder {
1851 FileListBuilder::new()
1852 }
1853
1854 pub async fn retrieve(&self, file_id: impl Into<String>) -> Result<OpenAiFile> {
1869 let file_id = file_id.into();
1870
1871 let mut state = T::default();
1873 let operation = operation_names::FILE_RETRIEVE;
1874 let model = "files";
1875 let request_json = format!(r#"{{"file_id":"{file_id}"}}"#);
1876
1877 self.call_before_request(operation, model, &request_json, &mut state)
1879 .await?;
1880
1881 let start_time = Instant::now();
1882
1883 let response = match files_api::retrieve_file()
1885 .configuration(&self.client.base_configuration)
1886 .file_id(&file_id)
1887 .call()
1888 .await
1889 {
1890 Ok(resp) => resp,
1891 Err(e) => {
1892 let error = self
1893 .handle_api_error(e, operation, model, &request_json, &state)
1894 .await;
1895 return Err(error);
1896 }
1897 };
1898
1899 let duration = start_time.elapsed();
1900
1901 self.call_after_response(
1903 &response,
1904 operation,
1905 model,
1906 &request_json,
1907 &state,
1908 duration,
1909 None,
1910 None,
1911 )
1912 .await;
1913
1914 Ok(response)
1915 }
1916
1917 pub async fn get(&self, builder: FileRetrievalBuilder) -> Result<OpenAiFile> {
1919 self.retrieve(builder.file_id()).await
1920 }
1921
1922 pub async fn download(&self, file_id: impl Into<String>) -> Result<String> {
1937 let file_id = file_id.into();
1938
1939 let mut state = T::default();
1941 let operation = operation_names::FILE_DOWNLOAD;
1942 let model = "files";
1943 let request_json = format!(r#"{{"file_id":"{file_id}"}}"#);
1944
1945 self.call_before_request(operation, model, &request_json, &mut state)
1947 .await?;
1948
1949 let start_time = Instant::now();
1950
1951 let response = match files_api::download_file()
1953 .configuration(&self.client.base_configuration)
1954 .file_id(&file_id)
1955 .call()
1956 .await
1957 {
1958 Ok(resp) => resp,
1959 Err(e) => {
1960 let error = self
1961 .handle_api_error(e, operation, model, &request_json, &state)
1962 .await;
1963 return Err(error);
1964 }
1965 };
1966
1967 let duration = start_time.elapsed();
1968
1969 let response_size = format!(r#"{{"size":{}}}"#, response.len());
1971 self.call_after_response(
1972 &response_size,
1973 operation,
1974 model,
1975 &request_json,
1976 &state,
1977 duration,
1978 None,
1979 None,
1980 )
1981 .await;
1982
1983 Ok(response)
1984 }
1985
1986 pub async fn download_bytes(&self, file_id: impl Into<String>) -> Result<Vec<u8>> {
1988 let content = self.download(file_id).await?;
1989 Ok(content.into_bytes())
1990 }
1991
1992 pub async fn delete(&self, file_id: impl Into<String>) -> Result<DeleteFileResponse> {
2007 let file_id = file_id.into();
2008
2009 let mut state = T::default();
2011 let operation = operation_names::FILE_DELETE;
2012 let model = "files";
2013 let request_json = format!(r#"{{"file_id":"{file_id}"}}"#);
2014
2015 self.call_before_request(operation, model, &request_json, &mut state)
2017 .await?;
2018
2019 let start_time = Instant::now();
2020
2021 let response = match files_api::delete_file()
2023 .configuration(&self.client.base_configuration)
2024 .file_id(&file_id)
2025 .call()
2026 .await
2027 {
2028 Ok(resp) => resp,
2029 Err(e) => {
2030 let error = self
2031 .handle_api_error(e, operation, model, &request_json, &state)
2032 .await;
2033 return Err(error);
2034 }
2035 };
2036
2037 let duration = start_time.elapsed();
2038
2039 self.call_after_response(
2041 &response,
2042 operation,
2043 model,
2044 &request_json,
2045 &state,
2046 duration,
2047 None,
2048 None,
2049 )
2050 .await;
2051
2052 Ok(response)
2053 }
2054
2055 pub async fn remove(&self, builder: FileDeleteBuilder) -> Result<DeleteFileResponse> {
2057 self.delete(builder.file_id()).await
2058 }
2059}
2060
2061impl<T: Default + Send + Sync> VectorStoresClient<'_, T> {
2062 pub async fn create(
2081 &self,
2082 builder: crate::builders::vector_stores::VectorStoreBuilder,
2083 ) -> Result<VectorStoreObject> {
2084 use openai_client_base::models::{CreateVectorStoreRequest, VectorStoreExpirationAfter};
2085
2086 let mut request = CreateVectorStoreRequest::new();
2087 request.name = builder.name_ref().map(String::from);
2088 request.file_ids = if builder.has_files() {
2089 Some(builder.file_ids_ref().to_vec())
2090 } else {
2091 None
2092 };
2093
2094 if let Some(expires_after) = builder.expires_after_ref() {
2095 use openai_client_base::models::vector_store_expiration_after::Anchor;
2096 request.expires_after = Some(Box::new(VectorStoreExpirationAfter::new(
2097 Anchor::LastActiveAt,
2098 expires_after.days,
2099 )));
2100 }
2101
2102 if !builder.metadata_ref().is_empty() {
2103 request.metadata = Some(Some(builder.metadata_ref().clone()));
2104 }
2105
2106 let mut state = T::default();
2108 let operation = operation_names::VECTOR_STORE_CREATE;
2109 let model = "vector-store";
2110 let request_json = serde_json::to_string(&request).unwrap_or_default();
2111
2112 self.call_before_request(operation, model, &request_json, &mut state)
2114 .await?;
2115
2116 let start_time = Instant::now();
2117
2118 let response = match vector_stores_api::create_vector_store()
2120 .configuration(&self.client.base_configuration)
2121 .create_vector_store_request(request)
2122 .call()
2123 .await
2124 {
2125 Ok(resp) => resp,
2126 Err(e) => {
2127 let error = self
2128 .handle_api_error(e, operation, model, &request_json, &state)
2129 .await;
2130 return Err(error);
2131 }
2132 };
2133
2134 let duration = start_time.elapsed();
2135
2136 self.call_after_response(
2138 &response,
2139 operation,
2140 model,
2141 &request_json,
2142 &state,
2143 duration,
2144 None,
2145 None,
2146 )
2147 .await;
2148
2149 Ok(response)
2150 }
2151
2152 pub async fn list(
2167 &self,
2168 limit: Option<i32>,
2169 order: Option<&str>,
2170 after: Option<&str>,
2171 before: Option<&str>,
2172 ) -> Result<ListVectorStoresResponse> {
2173 let mut state = T::default();
2175 let operation = operation_names::VECTOR_STORE_LIST;
2176 let model = "vector-store";
2177 let request_json = format!(
2178 r#"{{"limit":{},"order":"{}"}}"#,
2179 limit.unwrap_or(20),
2180 order.unwrap_or("desc")
2181 );
2182
2183 self.call_before_request(operation, model, &request_json, &mut state)
2185 .await?;
2186
2187 let start_time = Instant::now();
2188
2189 let response = match vector_stores_api::list_vector_stores()
2191 .configuration(&self.client.base_configuration)
2192 .maybe_limit(limit)
2193 .maybe_order(order)
2194 .maybe_after(after)
2195 .maybe_before(before)
2196 .call()
2197 .await
2198 {
2199 Ok(resp) => resp,
2200 Err(e) => {
2201 let error = self
2202 .handle_api_error(e, operation, model, &request_json, &state)
2203 .await;
2204 return Err(error);
2205 }
2206 };
2207
2208 let duration = start_time.elapsed();
2209
2210 self.call_after_response(
2212 &response,
2213 operation,
2214 model,
2215 &request_json,
2216 &state,
2217 duration,
2218 None,
2219 None,
2220 )
2221 .await;
2222
2223 Ok(response)
2224 }
2225
2226 pub async fn get(&self, vector_store_id: impl Into<String>) -> Result<VectorStoreObject> {
2241 let id = vector_store_id.into();
2242
2243 let mut state = T::default();
2245 let operation = operation_names::VECTOR_STORE_RETRIEVE;
2246 let model = "vector-store";
2247 let request_json = format!(r#"{{"vector_store_id":"{id}"}}"#);
2248
2249 self.call_before_request(operation, model, &request_json, &mut state)
2251 .await?;
2252
2253 let start_time = Instant::now();
2254
2255 let response = match vector_stores_api::get_vector_store()
2257 .configuration(&self.client.base_configuration)
2258 .vector_store_id(&id)
2259 .call()
2260 .await
2261 {
2262 Ok(resp) => resp,
2263 Err(e) => {
2264 let error = self
2265 .handle_api_error(e, operation, model, &request_json, &state)
2266 .await;
2267 return Err(error);
2268 }
2269 };
2270
2271 let duration = start_time.elapsed();
2272
2273 self.call_after_response(
2275 &response,
2276 operation,
2277 model,
2278 &request_json,
2279 &state,
2280 duration,
2281 None,
2282 None,
2283 )
2284 .await;
2285
2286 Ok(response)
2287 }
2288
2289 pub async fn update(
2308 &self,
2309 vector_store_id: impl Into<String>,
2310 builder: crate::builders::vector_stores::VectorStoreBuilder,
2311 ) -> Result<VectorStoreObject> {
2312 use openai_client_base::models::{UpdateVectorStoreRequest, VectorStoreExpirationAfter};
2313
2314 let id = vector_store_id.into();
2315 let mut request = UpdateVectorStoreRequest::new();
2316 request.name = builder.name_ref().map(String::from);
2317
2318 if let Some(expires_after) = builder.expires_after_ref() {
2319 use openai_client_base::models::vector_store_expiration_after::Anchor;
2320 request.expires_after = Some(Box::new(VectorStoreExpirationAfter::new(
2321 Anchor::LastActiveAt,
2322 expires_after.days,
2323 )));
2324 }
2325
2326 if !builder.metadata_ref().is_empty() {
2327 request.metadata = Some(Some(builder.metadata_ref().clone()));
2328 }
2329
2330 let mut state = T::default();
2332 let operation = operation_names::VECTOR_STORE_UPDATE;
2333 let model = "vector-store";
2334 let request_json = serde_json::to_string(&request).unwrap_or_default();
2335
2336 self.call_before_request(operation, model, &request_json, &mut state)
2338 .await?;
2339
2340 let start_time = Instant::now();
2341
2342 let response = match vector_stores_api::modify_vector_store()
2344 .configuration(&self.client.base_configuration)
2345 .vector_store_id(&id)
2346 .update_vector_store_request(request)
2347 .call()
2348 .await
2349 {
2350 Ok(resp) => resp,
2351 Err(e) => {
2352 let error = self
2353 .handle_api_error(e, operation, model, &request_json, &state)
2354 .await;
2355 return Err(error);
2356 }
2357 };
2358
2359 let duration = start_time.elapsed();
2360
2361 self.call_after_response(
2363 &response,
2364 operation,
2365 model,
2366 &request_json,
2367 &state,
2368 duration,
2369 None,
2370 None,
2371 )
2372 .await;
2373
2374 Ok(response)
2375 }
2376
2377 pub async fn delete(
2392 &self,
2393 vector_store_id: impl Into<String>,
2394 ) -> Result<DeleteVectorStoreResponse> {
2395 let id = vector_store_id.into();
2396
2397 let mut state = T::default();
2399 let operation = operation_names::VECTOR_STORE_DELETE;
2400 let model = "vector-store";
2401 let request_json = format!(r#"{{"vector_store_id":"{id}"}}"#);
2402
2403 self.call_before_request(operation, model, &request_json, &mut state)
2405 .await?;
2406
2407 let start_time = Instant::now();
2408
2409 let response = match vector_stores_api::delete_vector_store()
2411 .configuration(&self.client.base_configuration)
2412 .vector_store_id(&id)
2413 .call()
2414 .await
2415 {
2416 Ok(resp) => resp,
2417 Err(e) => {
2418 let error = self
2419 .handle_api_error(e, operation, model, &request_json, &state)
2420 .await;
2421 return Err(error);
2422 }
2423 };
2424
2425 let duration = start_time.elapsed();
2426
2427 self.call_after_response(
2429 &response,
2430 operation,
2431 model,
2432 &request_json,
2433 &state,
2434 duration,
2435 None,
2436 None,
2437 )
2438 .await;
2439
2440 Ok(response)
2441 }
2442
2443 pub async fn add_file(
2458 &self,
2459 vector_store_id: impl Into<String>,
2460 file_id: impl Into<String>,
2461 ) -> Result<VectorStoreFileObject> {
2462 use openai_client_base::models::CreateVectorStoreFileRequest;
2463
2464 let vs_id = vector_store_id.into();
2465 let f_id = file_id.into();
2466 let request = CreateVectorStoreFileRequest::new(f_id.clone());
2467
2468 let mut state = T::default();
2470 let operation = operation_names::VECTOR_STORE_FILE_ADD;
2471 let model = "vector-store";
2472 let request_json = format!(r#"{{"vector_store_id":"{vs_id}","file_id":"{f_id}"}}"#);
2473
2474 self.call_before_request(operation, model, &request_json, &mut state)
2476 .await?;
2477
2478 let start_time = Instant::now();
2479
2480 let response = match vector_stores_api::create_vector_store_file()
2482 .configuration(&self.client.base_configuration)
2483 .vector_store_id(&vs_id)
2484 .create_vector_store_file_request(request)
2485 .call()
2486 .await
2487 {
2488 Ok(resp) => resp,
2489 Err(e) => {
2490 let error = self
2491 .handle_api_error(e, operation, model, &request_json, &state)
2492 .await;
2493 return Err(error);
2494 }
2495 };
2496
2497 let duration = start_time.elapsed();
2498
2499 self.call_after_response(
2501 &response,
2502 operation,
2503 model,
2504 &request_json,
2505 &state,
2506 duration,
2507 None,
2508 None,
2509 )
2510 .await;
2511
2512 Ok(response)
2513 }
2514
2515 pub async fn list_files(
2530 &self,
2531 vector_store_id: impl Into<String>,
2532 limit: Option<i32>,
2533 order: Option<&str>,
2534 after: Option<&str>,
2535 before: Option<&str>,
2536 filter: Option<&str>,
2537 ) -> Result<ListVectorStoreFilesResponse> {
2538 let id = vector_store_id.into();
2539
2540 let mut state = T::default();
2542 let operation = operation_names::VECTOR_STORE_FILE_LIST;
2543 let model = "vector-store";
2544 let request_json = format!(r#"{{"vector_store_id":"{id}"}}"#);
2545
2546 self.call_before_request(operation, model, &request_json, &mut state)
2548 .await?;
2549
2550 let start_time = Instant::now();
2551
2552 let response = match vector_stores_api::list_vector_store_files()
2554 .configuration(&self.client.base_configuration)
2555 .vector_store_id(&id)
2556 .maybe_limit(limit)
2557 .maybe_order(order)
2558 .maybe_after(after)
2559 .maybe_before(before)
2560 .maybe_filter(filter)
2561 .call()
2562 .await
2563 {
2564 Ok(resp) => resp,
2565 Err(e) => {
2566 let error = self
2567 .handle_api_error(e, operation, model, &request_json, &state)
2568 .await;
2569 return Err(error);
2570 }
2571 };
2572
2573 let duration = start_time.elapsed();
2574
2575 self.call_after_response(
2577 &response,
2578 operation,
2579 model,
2580 &request_json,
2581 &state,
2582 duration,
2583 None,
2584 None,
2585 )
2586 .await;
2587
2588 Ok(response)
2589 }
2590
2591 pub async fn get_file(
2606 &self,
2607 vector_store_id: impl Into<String>,
2608 file_id: impl Into<String>,
2609 ) -> Result<VectorStoreFileObject> {
2610 let vs_id = vector_store_id.into();
2611 let f_id = file_id.into();
2612
2613 let mut state = T::default();
2615 let operation = operation_names::VECTOR_STORE_FILE_RETRIEVE;
2616 let model = "vector-store";
2617 let request_json = format!(r#"{{"vector_store_id":"{vs_id}","file_id":"{f_id}"}}"#);
2618
2619 self.call_before_request(operation, model, &request_json, &mut state)
2621 .await?;
2622
2623 let start_time = Instant::now();
2624
2625 let response = match vector_stores_api::get_vector_store_file()
2627 .configuration(&self.client.base_configuration)
2628 .vector_store_id(&vs_id)
2629 .file_id(&f_id)
2630 .call()
2631 .await
2632 {
2633 Ok(resp) => resp,
2634 Err(e) => {
2635 let error = self
2636 .handle_api_error(e, operation, model, &request_json, &state)
2637 .await;
2638 return Err(error);
2639 }
2640 };
2641
2642 let duration = start_time.elapsed();
2643
2644 self.call_after_response(
2646 &response,
2647 operation,
2648 model,
2649 &request_json,
2650 &state,
2651 duration,
2652 None,
2653 None,
2654 )
2655 .await;
2656
2657 Ok(response)
2658 }
2659
2660 pub async fn delete_file(
2675 &self,
2676 vector_store_id: impl Into<String>,
2677 file_id: impl Into<String>,
2678 ) -> Result<DeleteVectorStoreFileResponse> {
2679 let vs_id = vector_store_id.into();
2680 let f_id = file_id.into();
2681
2682 let mut state = T::default();
2684 let operation = operation_names::VECTOR_STORE_FILE_DELETE;
2685 let model = "vector-store";
2686 let request_json = format!(r#"{{"vector_store_id":"{vs_id}","file_id":"{f_id}"}}"#);
2687
2688 self.call_before_request(operation, model, &request_json, &mut state)
2690 .await?;
2691
2692 let start_time = Instant::now();
2693
2694 let response = match vector_stores_api::delete_vector_store_file()
2696 .configuration(&self.client.base_configuration)
2697 .vector_store_id(&vs_id)
2698 .file_id(&f_id)
2699 .call()
2700 .await
2701 {
2702 Ok(resp) => resp,
2703 Err(e) => {
2704 let error = self
2705 .handle_api_error(e, operation, model, &request_json, &state)
2706 .await;
2707 return Err(error);
2708 }
2709 };
2710
2711 let duration = start_time.elapsed();
2712
2713 self.call_after_response(
2715 &response,
2716 operation,
2717 model,
2718 &request_json,
2719 &state,
2720 duration,
2721 None,
2722 None,
2723 )
2724 .await;
2725
2726 Ok(response)
2727 }
2728
2729 pub async fn search(
2746 &self,
2747 builder: crate::builders::vector_stores::VectorStoreSearchBuilder,
2748 ) -> Result<VectorStoreSearchResultsPage> {
2749 use openai_client_base::models::{VectorStoreSearchRequest, VectorStoreSearchRequestQuery};
2750
2751 let query = VectorStoreSearchRequestQuery::new_text(builder.query().to_string());
2752 let mut request = VectorStoreSearchRequest::new(query);
2753
2754 if let Some(limit) = builder.limit_ref() {
2755 request.max_num_results = Some(limit);
2756 }
2757
2758 let vs_id = builder.vector_store_id().to_string();
2759
2760 let mut state = T::default();
2762 let operation = operation_names::VECTOR_STORE_SEARCH;
2763 let model = "vector-store";
2764 let request_json = format!(
2765 r#"{{"vector_store_id":"{}","query":"{}"}}"#,
2766 vs_id,
2767 builder.query()
2768 );
2769
2770 self.call_before_request(operation, model, &request_json, &mut state)
2772 .await?;
2773
2774 let start_time = Instant::now();
2775
2776 let response = match vector_stores_api::search_vector_store()
2778 .configuration(&self.client.base_configuration)
2779 .vector_store_id(&vs_id)
2780 .vector_store_search_request(request)
2781 .call()
2782 .await
2783 {
2784 Ok(resp) => resp,
2785 Err(e) => {
2786 let error = self
2787 .handle_api_error(e, operation, model, &request_json, &state)
2788 .await;
2789 return Err(error);
2790 }
2791 };
2792
2793 let duration = start_time.elapsed();
2794
2795 self.call_after_response(
2797 &response,
2798 operation,
2799 model,
2800 &request_json,
2801 &state,
2802 duration,
2803 None,
2804 None,
2805 )
2806 .await;
2807
2808 Ok(response)
2809 }
2810}
2811
2812impl<T: Default + Send + Sync> BatchClient<'_, T> {
2813 pub async fn create(&self, builder: crate::builders::batch::BatchJobBuilder) -> Result<Batch> {
2830 use openai_client_base::models::create_batch_request::{CompletionWindow, Endpoint};
2831
2832 let endpoint = match builder.endpoint() {
2834 crate::builders::batch::BatchEndpoint::ChatCompletions => {
2835 Endpoint::SlashV1SlashChatSlashCompletions
2836 }
2837 crate::builders::batch::BatchEndpoint::Embeddings => Endpoint::SlashV1SlashEmbeddings,
2838 crate::builders::batch::BatchEndpoint::Completions => Endpoint::SlashV1SlashCompletions,
2839 };
2840
2841 let mut request = CreateBatchRequest::new(
2842 builder.input_file_id().to_string(),
2843 endpoint,
2844 CompletionWindow::Variant24h,
2845 );
2846
2847 if builder.has_metadata() {
2848 request.metadata = Some(Some(builder.metadata_ref().clone()));
2849 }
2850
2851 let mut state = T::default();
2853 let operation = operation_names::BATCH_CREATE;
2854 let model = "batch";
2855 let request_json = serde_json::to_string(&request).unwrap_or_default();
2856
2857 self.call_before_request(operation, model, &request_json, &mut state)
2859 .await?;
2860
2861 let start_time = Instant::now();
2862
2863 let response = match batch_api::create_batch()
2865 .configuration(&self.client.base_configuration)
2866 .create_batch_request(request)
2867 .call()
2868 .await
2869 {
2870 Ok(resp) => resp,
2871 Err(e) => {
2872 let error = self
2873 .handle_api_error(e, operation, model, &request_json, &state)
2874 .await;
2875 return Err(error);
2876 }
2877 };
2878
2879 let duration = start_time.elapsed();
2880
2881 self.call_after_response(
2883 &response,
2884 operation,
2885 model,
2886 &request_json,
2887 &state,
2888 duration,
2889 None,
2890 None,
2891 )
2892 .await;
2893
2894 Ok(response)
2895 }
2896
2897 pub async fn list(
2912 &self,
2913 after: Option<&str>,
2914 limit: Option<i32>,
2915 ) -> Result<ListBatchesResponse> {
2916 let mut state = T::default();
2918 let operation = operation_names::BATCH_LIST;
2919 let model = "batch";
2920 let request_json = format!("{{\"after\":{after:?},\"limit\":{limit:?}}}");
2921
2922 self.call_before_request(operation, model, &request_json, &mut state)
2924 .await?;
2925
2926 let start_time = Instant::now();
2927
2928 let response = match batch_api::list_batches()
2930 .configuration(&self.client.base_configuration)
2931 .maybe_after(after)
2932 .maybe_limit(limit)
2933 .call()
2934 .await
2935 {
2936 Ok(resp) => resp,
2937 Err(e) => {
2938 let error = self
2939 .handle_api_error(e, operation, model, &request_json, &state)
2940 .await;
2941 return Err(error);
2942 }
2943 };
2944
2945 let duration = start_time.elapsed();
2946
2947 self.call_after_response(
2949 &response,
2950 operation,
2951 model,
2952 &request_json,
2953 &state,
2954 duration,
2955 None,
2956 None,
2957 )
2958 .await;
2959
2960 Ok(response)
2961 }
2962
2963 pub async fn get(&self, batch_id: impl Into<String>) -> Result<Batch> {
2978 let id = batch_id.into();
2979
2980 let mut state = T::default();
2982 let operation = operation_names::BATCH_RETRIEVE;
2983 let model = "batch";
2984 let request_json = format!("{{\"batch_id\":\"{id}\"}}");
2985
2986 self.call_before_request(operation, model, &request_json, &mut state)
2988 .await?;
2989
2990 let start_time = Instant::now();
2991
2992 let response = match batch_api::retrieve_batch()
2994 .configuration(&self.client.base_configuration)
2995 .batch_id(&id)
2996 .call()
2997 .await
2998 {
2999 Ok(resp) => resp,
3000 Err(e) => {
3001 let error = self
3002 .handle_api_error(e, operation, model, &request_json, &state)
3003 .await;
3004 return Err(error);
3005 }
3006 };
3007
3008 let duration = start_time.elapsed();
3009
3010 self.call_after_response(
3012 &response,
3013 operation,
3014 model,
3015 &request_json,
3016 &state,
3017 duration,
3018 None,
3019 None,
3020 )
3021 .await;
3022
3023 Ok(response)
3024 }
3025
3026 pub async fn cancel(&self, batch_id: impl Into<String>) -> Result<Batch> {
3041 let id = batch_id.into();
3042
3043 let mut state = T::default();
3045 let operation = operation_names::BATCH_CANCEL;
3046 let model = "batch";
3047 let request_json = format!("{{\"batch_id\":\"{id}\"}}");
3048
3049 self.call_before_request(operation, model, &request_json, &mut state)
3051 .await?;
3052
3053 let start_time = Instant::now();
3054
3055 let response = match batch_api::cancel_batch()
3057 .configuration(&self.client.base_configuration)
3058 .batch_id(&id)
3059 .call()
3060 .await
3061 {
3062 Ok(resp) => resp,
3063 Err(e) => {
3064 let error = self
3065 .handle_api_error(e, operation, model, &request_json, &state)
3066 .await;
3067 return Err(error);
3068 }
3069 };
3070
3071 let duration = start_time.elapsed();
3072
3073 self.call_after_response(
3075 &response,
3076 operation,
3077 model,
3078 &request_json,
3079 &state,
3080 duration,
3081 None,
3082 None,
3083 )
3084 .await;
3085
3086 Ok(response)
3087 }
3088}
3089
3090impl<T: Default + Send + Sync> FineTuningClient<'_, T> {
3091 pub async fn create_job(
3108 &self,
3109 builder: crate::builders::fine_tuning::FineTuningJobBuilder,
3110 ) -> Result<FineTuningJob> {
3111 let mut request = CreateFineTuningJobRequest::new(
3112 builder.model().to_string(),
3113 builder.training_file().to_string(),
3114 );
3115
3116 if let Some(validation_file) = builder.validation_file_ref() {
3117 request.validation_file = Some(validation_file.to_string());
3118 }
3119
3120 if let Some(suffix) = builder.suffix_ref() {
3121 request.suffix = Some(suffix.to_string());
3122 }
3123
3124 let mut state = T::default();
3131 let operation = operation_names::FINE_TUNING_CREATE;
3132 let model = builder.model();
3133 let request_json = serde_json::to_string(&request).unwrap_or_default();
3134
3135 self.call_before_request(operation, model, &request_json, &mut state)
3137 .await?;
3138
3139 let start_time = Instant::now();
3140
3141 let response = match fine_tuning_api::create_fine_tuning_job()
3143 .configuration(&self.client.base_configuration)
3144 .create_fine_tuning_job_request(request)
3145 .call()
3146 .await
3147 {
3148 Ok(resp) => resp,
3149 Err(e) => {
3150 let error = self
3151 .handle_api_error(e, operation, model, &request_json, &state)
3152 .await;
3153 return Err(error);
3154 }
3155 };
3156
3157 let duration = start_time.elapsed();
3158
3159 self.call_after_response(
3161 &response,
3162 operation,
3163 model,
3164 &request_json,
3165 &state,
3166 duration,
3167 None,
3168 None,
3169 )
3170 .await;
3171
3172 Ok(response)
3173 }
3174
3175 pub async fn list_jobs(
3190 &self,
3191 after: Option<&str>,
3192 limit: Option<i32>,
3193 ) -> Result<ListPaginatedFineTuningJobsResponse> {
3194 let mut state = T::default();
3196 let operation = operation_names::FINE_TUNING_LIST;
3197 let model = "fine-tuning";
3198 let request_json = format!("{{\"after\":{after:?},\"limit\":{limit:?}}}");
3199
3200 self.call_before_request(operation, model, &request_json, &mut state)
3202 .await?;
3203
3204 let start_time = Instant::now();
3205
3206 let response = match fine_tuning_api::list_paginated_fine_tuning_jobs()
3208 .configuration(&self.client.base_configuration)
3209 .maybe_after(after)
3210 .maybe_limit(limit)
3211 .call()
3212 .await
3213 {
3214 Ok(resp) => resp,
3215 Err(e) => {
3216 let error = self
3217 .handle_api_error(e, operation, model, &request_json, &state)
3218 .await;
3219 return Err(error);
3220 }
3221 };
3222
3223 let duration = start_time.elapsed();
3224
3225 self.call_after_response(
3227 &response,
3228 operation,
3229 model,
3230 &request_json,
3231 &state,
3232 duration,
3233 None,
3234 None,
3235 )
3236 .await;
3237
3238 Ok(response)
3239 }
3240
3241 pub async fn get_job(&self, job_id: impl Into<String>) -> Result<FineTuningJob> {
3256 let id = job_id.into();
3257
3258 let mut state = T::default();
3260 let operation = operation_names::FINE_TUNING_RETRIEVE;
3261 let model = "fine-tuning";
3262 let request_json = format!("{{\"job_id\":\"{id}\"}}");
3263
3264 self.call_before_request(operation, model, &request_json, &mut state)
3266 .await?;
3267
3268 let start_time = Instant::now();
3269
3270 let response = match fine_tuning_api::retrieve_fine_tuning_job()
3272 .configuration(&self.client.base_configuration)
3273 .fine_tuning_job_id(&id)
3274 .call()
3275 .await
3276 {
3277 Ok(resp) => resp,
3278 Err(e) => {
3279 let error = self
3280 .handle_api_error(e, operation, model, &request_json, &state)
3281 .await;
3282 return Err(error);
3283 }
3284 };
3285
3286 let duration = start_time.elapsed();
3287
3288 self.call_after_response(
3290 &response,
3291 operation,
3292 model,
3293 &request_json,
3294 &state,
3295 duration,
3296 None,
3297 None,
3298 )
3299 .await;
3300
3301 Ok(response)
3302 }
3303
3304 pub async fn cancel_job(&self, job_id: impl Into<String>) -> Result<FineTuningJob> {
3319 let id = job_id.into();
3320
3321 let mut state = T::default();
3323 let operation = operation_names::FINE_TUNING_CANCEL;
3324 let model = "fine-tuning";
3325 let request_json = format!("{{\"job_id\":\"{id}\"}}");
3326
3327 self.call_before_request(operation, model, &request_json, &mut state)
3329 .await?;
3330
3331 let start_time = Instant::now();
3332
3333 let response = match fine_tuning_api::cancel_fine_tuning_job()
3335 .configuration(&self.client.base_configuration)
3336 .fine_tuning_job_id(&id)
3337 .call()
3338 .await
3339 {
3340 Ok(resp) => resp,
3341 Err(e) => {
3342 let error = self
3343 .handle_api_error(e, operation, model, &request_json, &state)
3344 .await;
3345 return Err(error);
3346 }
3347 };
3348
3349 let duration = start_time.elapsed();
3350
3351 self.call_after_response(
3353 &response,
3354 operation,
3355 model,
3356 &request_json,
3357 &state,
3358 duration,
3359 None,
3360 None,
3361 )
3362 .await;
3363
3364 Ok(response)
3365 }
3366
3367 pub async fn list_events(
3382 &self,
3383 job_id: impl Into<String>,
3384 after: Option<&str>,
3385 limit: Option<i32>,
3386 ) -> Result<ListFineTuningJobEventsResponse> {
3387 let id = job_id.into();
3388
3389 let mut state = T::default();
3391 let operation = operation_names::FINE_TUNING_LIST_EVENTS;
3392 let model = "fine-tuning";
3393 let request_json =
3394 format!("{{\"job_id\":\"{id}\",\"after\":{after:?},\"limit\":{limit:?}}}");
3395
3396 self.call_before_request(operation, model, &request_json, &mut state)
3398 .await?;
3399
3400 let start_time = Instant::now();
3401
3402 let response = match fine_tuning_api::list_fine_tuning_events()
3404 .configuration(&self.client.base_configuration)
3405 .fine_tuning_job_id(&id)
3406 .maybe_after(after)
3407 .maybe_limit(limit)
3408 .call()
3409 .await
3410 {
3411 Ok(resp) => resp,
3412 Err(e) => {
3413 let error = self
3414 .handle_api_error(e, operation, model, &request_json, &state)
3415 .await;
3416 return Err(error);
3417 }
3418 };
3419
3420 let duration = start_time.elapsed();
3421
3422 self.call_after_response(
3424 &response,
3425 operation,
3426 model,
3427 &request_json,
3428 &state,
3429 duration,
3430 None,
3431 None,
3432 )
3433 .await;
3434
3435 Ok(response)
3436 }
3437
3438 pub async fn list_checkpoints(
3453 &self,
3454 job_id: impl Into<String>,
3455 after: Option<&str>,
3456 limit: Option<i32>,
3457 ) -> Result<ListFineTuningJobCheckpointsResponse> {
3458 let id = job_id.into();
3459
3460 let mut state = T::default();
3462 let operation = operation_names::FINE_TUNING_LIST_CHECKPOINTS;
3463 let model = "fine-tuning";
3464 let request_json =
3465 format!("{{\"job_id\":\"{id}\",\"after\":{after:?},\"limit\":{limit:?}}}");
3466
3467 self.call_before_request(operation, model, &request_json, &mut state)
3469 .await?;
3470
3471 let start_time = Instant::now();
3472
3473 let response = match fine_tuning_api::list_fine_tuning_job_checkpoints()
3475 .configuration(&self.client.base_configuration)
3476 .fine_tuning_job_id(&id)
3477 .maybe_after(after)
3478 .maybe_limit(limit)
3479 .call()
3480 .await
3481 {
3482 Ok(resp) => resp,
3483 Err(e) => {
3484 let error = self
3485 .handle_api_error(e, operation, model, &request_json, &state)
3486 .await;
3487 return Err(error);
3488 }
3489 };
3490
3491 let duration = start_time.elapsed();
3492
3493 self.call_after_response(
3495 &response,
3496 operation,
3497 model,
3498 &request_json,
3499 &state,
3500 duration,
3501 None,
3502 None,
3503 )
3504 .await;
3505
3506 Ok(response)
3507 }
3508}
3509
3510fn map_api_error<T>(error: ApiError<T>) -> Error {
3511 match error {
3512 ApiError::Reqwest(err) => Error::Http(err),
3513 ApiError::ReqwestMiddleware(err) => {
3514 Error::Internal(format!("reqwest middleware error: {err}"))
3515 }
3516 ApiError::Serde(err) => Error::Json(err),
3517 ApiError::Io(err) => Error::File(err),
3518 ApiError::ResponseError(response) => Error::Api {
3519 status: response.status.as_u16(),
3520 message: response.content,
3521 error_type: None,
3522 error_code: None,
3523 },
3524 }
3525}
3526
3527#[cfg(test)]
3528mod tests {
3529 use super::*;
3530 use openai_client_base::apis::{Error as BaseError, ResponseContent};
3531
3532 #[test]
3533 fn map_api_error_converts_response() {
3534 let response = ResponseContent {
3535 status: reqwest::StatusCode::BAD_REQUEST,
3536 content: "bad request".to_string(),
3537 entity: Option::<()>::None,
3538 };
3539
3540 let error = map_api_error(BaseError::ResponseError(response));
3541 match error {
3542 Error::Api {
3543 status, message, ..
3544 } => {
3545 assert_eq!(status, 400);
3546 assert!(message.contains("bad request"));
3547 }
3548 other => panic!("expected API error, got {other:?}"),
3549 }
3550 }
3551
3552 #[test]
3553 fn test_moderation_builder_creation() {
3554 use crate::builders::moderations::ModerationBuilder;
3555
3556 let builder = ModerationBuilder::new("Test content");
3557 let request = builder.build().unwrap();
3558
3559 assert_eq!(request.input, "Test content");
3560 assert!(request.model.is_none());
3561 }
3562
3563 #[test]
3564 fn test_moderation_builder_with_model() {
3565 use crate::builders::moderations::ModerationBuilder;
3566
3567 let builder = ModerationBuilder::new("Test content").model("text-moderation-stable");
3568 let request = builder.build().unwrap();
3569
3570 assert_eq!(request.input, "Test content");
3571 assert_eq!(request.model, Some("text-moderation-stable".to_string()));
3572 }
3573
3574 #[test]
3575 fn test_moderation_builder_array_input() {
3576 use crate::builders::moderations::ModerationBuilder;
3577
3578 let inputs = vec!["First text".to_string(), "Second text".to_string()];
3579 let builder = ModerationBuilder::new_array(inputs);
3580 let request = builder.build().unwrap();
3581
3582 assert_eq!(request.input, "First text\nSecond text");
3584 }
3585
3586 #[test]
3587 fn test_file_upload_builder_creation() {
3588 use crate::builders::files::{FilePurpose, FileUploadBuilder};
3589
3590 let content = b"test content".to_vec();
3591 let builder = FileUploadBuilder::new("test.txt", FilePurpose::Assistants, content.clone());
3592
3593 assert_eq!(builder.filename(), "test.txt");
3594 assert_eq!(builder.content(), content.as_slice());
3595 assert_eq!(builder.content_size(), content.len());
3596 assert!(!builder.is_empty());
3597 }
3598
3599 #[test]
3600 fn test_file_upload_builder_from_text() {
3601 use crate::builders::files::{FilePurpose, FileUploadBuilder};
3602
3603 let builder =
3604 FileUploadBuilder::from_text("hello.txt", FilePurpose::FineTune, "Hello, world!");
3605
3606 assert_eq!(builder.filename(), "hello.txt");
3607 assert_eq!(
3608 builder.content_as_string(),
3609 Some("Hello, world!".to_string())
3610 );
3611 assert!(!builder.is_empty());
3612 }
3613
3614 #[test]
3615 fn test_file_list_builder() {
3616 use crate::builders::files::{FileListBuilder, FileOrder, FilePurpose};
3617
3618 let builder = FileListBuilder::new()
3619 .purpose(FilePurpose::Assistants)
3620 .limit(10)
3621 .order(FileOrder::Desc);
3622
3623 assert!(builder.purpose_ref().is_some());
3624 assert_eq!(builder.limit_ref(), Some(10));
3625 assert!(builder.order_ref().is_some());
3626 }
3627
3628 #[test]
3629 fn test_file_retrieval_builder() {
3630 use crate::builders::files::FileRetrievalBuilder;
3631
3632 let builder = FileRetrievalBuilder::new("file-123");
3633 assert_eq!(builder.file_id(), "file-123");
3634 }
3635
3636 #[test]
3637 fn test_file_delete_builder() {
3638 use crate::builders::files::FileDeleteBuilder;
3639
3640 let builder = FileDeleteBuilder::new("file-456");
3641 assert_eq!(builder.file_id(), "file-456");
3642 }
3643
3644 #[test]
3645 fn test_file_purpose_display() {
3646 use crate::builders::files::FilePurpose;
3647
3648 assert_eq!(FilePurpose::FineTune.to_string(), "fine-tune");
3649 assert_eq!(FilePurpose::Assistants.to_string(), "assistants");
3650 assert_eq!(FilePurpose::Vision.to_string(), "vision");
3651 assert_eq!(FilePurpose::Batch.to_string(), "batch");
3652 }
3653
3654 #[test]
3655 fn test_vector_store_builder_basic() {
3656 use crate::builders::vector_stores::VectorStoreBuilder;
3657
3658 let builder = VectorStoreBuilder::new()
3659 .name("Test Store")
3660 .add_file("file-1")
3661 .metadata("key", "value");
3662
3663 assert_eq!(builder.name_ref(), Some("Test Store"));
3664 assert_eq!(builder.file_count(), 1);
3665 assert!(builder.has_files());
3666 assert_eq!(builder.metadata_ref().len(), 1);
3667 }
3668
3669 #[test]
3670 fn test_vector_store_builder_with_expiration() {
3671 use crate::builders::vector_stores::VectorStoreBuilder;
3672
3673 let builder = VectorStoreBuilder::new()
3674 .name("Temp Store")
3675 .expires_after_days(30);
3676
3677 assert_eq!(builder.name_ref(), Some("Temp Store"));
3678 assert!(builder.expires_after_ref().is_some());
3679 assert_eq!(builder.expires_after_ref().unwrap().days, 30);
3680 }
3681
3682 #[test]
3683 fn test_vector_store_builder_multiple_files() {
3684 use crate::builders::vector_stores::VectorStoreBuilder;
3685
3686 let files = vec!["file-1".to_string(), "file-2".to_string()];
3687 let builder = VectorStoreBuilder::new()
3688 .name("Multi-File Store")
3689 .file_ids(files.clone());
3690
3691 assert_eq!(builder.file_ids_ref(), files.as_slice());
3692 assert_eq!(builder.file_count(), 2);
3693 }
3694
3695 #[test]
3696 fn test_vector_store_file_builder() {
3697 use crate::builders::vector_stores::VectorStoreFileBuilder;
3698
3699 let builder = VectorStoreFileBuilder::new("vs-123", "file-456");
3700 assert_eq!(builder.vector_store_id(), "vs-123");
3701 assert_eq!(builder.file_id(), "file-456");
3702 }
3703
3704 #[test]
3705 fn test_vector_store_search_builder() {
3706 use crate::builders::vector_stores::VectorStoreSearchBuilder;
3707
3708 let builder = VectorStoreSearchBuilder::new("vs-123", "test query")
3709 .limit(10)
3710 .filter("category", "docs");
3711
3712 assert_eq!(builder.vector_store_id(), "vs-123");
3713 assert_eq!(builder.query(), "test query");
3714 assert_eq!(builder.limit_ref(), Some(10));
3715 assert_eq!(builder.filter_ref().len(), 1);
3716 }
3717
3718 #[test]
3719 fn test_vector_store_search_builder_default() {
3720 use crate::builders::vector_stores::VectorStoreSearchBuilder;
3721
3722 let builder = VectorStoreSearchBuilder::new("vs-123", "query");
3723 assert!(builder.limit_ref().is_none());
3724 assert!(builder.filter_ref().is_empty());
3725 }
3726}
3727
3728#[derive(Debug, Clone, Copy)]
3733pub struct AssistantsClient<'a, T = ()> {
3734 client: &'a Client<T>,
3735}
3736
3737impl<T: Default + Send + Sync> AssistantsClient<'_, T> {
3738 pub async fn create(&self, builder: AssistantBuilder) -> Result<AssistantObject> {
3757 let request = builder.build()?;
3758
3759 let mut state = T::default();
3761 let operation = operation_names::ASSISTANT_CREATE;
3762 let model = request.model.clone();
3763 let request_json = serde_json::to_string(&request).unwrap_or_default();
3764
3765 self.call_before_request(operation, &model, &request_json, &mut state)
3767 .await?;
3768
3769 let start_time = Instant::now();
3770
3771 let response = match assistants_api::create_assistant()
3773 .configuration(&self.client.base_configuration)
3774 .create_assistant_request(request)
3775 .call()
3776 .await
3777 {
3778 Ok(resp) => resp,
3779 Err(e) => {
3780 let error = self
3781 .handle_api_error(e, operation, &model, &request_json, &state)
3782 .await;
3783 return Err(error);
3784 }
3785 };
3786
3787 let duration = start_time.elapsed();
3788
3789 self.call_after_response(
3791 &response,
3792 operation,
3793 &model,
3794 &request_json,
3795 &state,
3796 duration,
3797 None,
3798 None,
3799 )
3800 .await;
3801
3802 Ok(response)
3803 }
3804
3805 pub async fn list(
3820 &self,
3821 limit: Option<i32>,
3822 order: Option<&str>,
3823 after: Option<&str>,
3824 before: Option<&str>,
3825 ) -> Result<ListAssistantsResponse> {
3826 let mut state = T::default();
3828 let operation = operation_names::ASSISTANT_LIST;
3829 let model = "assistants";
3830 let request_json = format!(
3831 "{{\"limit\":{limit:?},\"order\":{order:?},\"after\":{after:?},\"before\":{before:?}}}"
3832 );
3833
3834 self.call_before_request(operation, model, &request_json, &mut state)
3836 .await?;
3837
3838 let start_time = Instant::now();
3839
3840 let response = match assistants_api::list_assistants()
3842 .configuration(&self.client.base_configuration)
3843 .maybe_limit(limit)
3844 .maybe_order(order)
3845 .maybe_after(after)
3846 .maybe_before(before)
3847 .call()
3848 .await
3849 {
3850 Ok(resp) => resp,
3851 Err(e) => {
3852 let error = self
3853 .handle_api_error(e, operation, model, &request_json, &state)
3854 .await;
3855 return Err(error);
3856 }
3857 };
3858
3859 let duration = start_time.elapsed();
3860
3861 self.call_after_response(
3863 &response,
3864 operation,
3865 model,
3866 &request_json,
3867 &state,
3868 duration,
3869 None,
3870 None,
3871 )
3872 .await;
3873
3874 Ok(response)
3875 }
3876
3877 pub async fn get(&self, assistant_id: impl Into<String>) -> Result<AssistantObject> {
3892 let id = assistant_id.into();
3893
3894 let mut state = T::default();
3896 let operation = operation_names::ASSISTANT_RETRIEVE;
3897 let model = "assistants";
3898 let request_json = format!("{{\"assistant_id\":\"{id}\"}}");
3899
3900 self.call_before_request(operation, model, &request_json, &mut state)
3902 .await?;
3903
3904 let start_time = Instant::now();
3905
3906 let response = match assistants_api::get_assistant()
3908 .configuration(&self.client.base_configuration)
3909 .assistant_id(&id)
3910 .call()
3911 .await
3912 {
3913 Ok(resp) => resp,
3914 Err(e) => {
3915 let error = self
3916 .handle_api_error(e, operation, model, &request_json, &state)
3917 .await;
3918 return Err(error);
3919 }
3920 };
3921
3922 let duration = start_time.elapsed();
3923
3924 self.call_after_response(
3926 &response,
3927 operation,
3928 model,
3929 &request_json,
3930 &state,
3931 duration,
3932 None,
3933 None,
3934 )
3935 .await;
3936
3937 Ok(response)
3938 }
3939
3940 pub async fn update(
3959 &self,
3960 assistant_id: impl Into<String>,
3961 builder: AssistantBuilder,
3962 ) -> Result<AssistantObject> {
3963 use openai_client_base::models::ModifyAssistantRequest;
3964
3965 let id = assistant_id.into();
3966 let request_data = builder.build()?;
3967
3968 let mut request = ModifyAssistantRequest::new();
3970 request.model = Some(request_data.model);
3971 request.name = request_data.name.and_then(|n| match *n {
3973 openai_client_base::models::CreateAssistantRequestName::Text(text) => Some(Some(text)),
3974 openai_client_base::models::CreateAssistantRequestName::Null => None,
3975 });
3976 request.description = request_data.description.and_then(|d| match *d {
3977 openai_client_base::models::CreateAssistantRequestDescription::Text(text) => {
3978 Some(Some(text))
3979 }
3980 openai_client_base::models::CreateAssistantRequestDescription::Null => None,
3981 });
3982 request.instructions = request_data.instructions.and_then(|i| match *i {
3983 openai_client_base::models::CreateAssistantRequestInstructions::Text(text) => {
3984 Some(Some(text))
3985 }
3986 openai_client_base::models::CreateAssistantRequestInstructions::Null => None,
3987 });
3988 request.tools = request_data.tools;
3989 request.metadata = request_data.metadata;
3990
3991 let mut state = T::default();
3993 let operation = operation_names::ASSISTANT_UPDATE;
3994 let model = request
3995 .model
3996 .as_ref()
3997 .map_or_else(|| "assistants".to_string(), Clone::clone);
3998 let request_json = serde_json::to_string(&request).unwrap_or_default();
3999
4000 self.call_before_request(operation, &model, &request_json, &mut state)
4002 .await?;
4003
4004 let start_time = Instant::now();
4005
4006 let response = match assistants_api::modify_assistant()
4008 .configuration(&self.client.base_configuration)
4009 .assistant_id(&id)
4010 .modify_assistant_request(request)
4011 .call()
4012 .await
4013 {
4014 Ok(resp) => resp,
4015 Err(e) => {
4016 let error = self
4017 .handle_api_error(e, operation, &model, &request_json, &state)
4018 .await;
4019 return Err(error);
4020 }
4021 };
4022
4023 let duration = start_time.elapsed();
4024
4025 self.call_after_response(
4027 &response,
4028 operation,
4029 &model,
4030 &request_json,
4031 &state,
4032 duration,
4033 None,
4034 None,
4035 )
4036 .await;
4037
4038 Ok(response)
4039 }
4040
4041 pub async fn delete(&self, assistant_id: impl Into<String>) -> Result<DeleteAssistantResponse> {
4056 let id = assistant_id.into();
4057
4058 let mut state = T::default();
4060 let operation = operation_names::ASSISTANT_DELETE;
4061 let model = "assistants";
4062 let request_json = format!("{{\"assistant_id\":\"{id}\"}}");
4063
4064 self.call_before_request(operation, model, &request_json, &mut state)
4066 .await?;
4067
4068 let start_time = Instant::now();
4069
4070 let response = match assistants_api::delete_assistant()
4072 .configuration(&self.client.base_configuration)
4073 .assistant_id(&id)
4074 .call()
4075 .await
4076 {
4077 Ok(resp) => resp,
4078 Err(e) => {
4079 let error = self
4080 .handle_api_error(e, operation, model, &request_json, &state)
4081 .await;
4082 return Err(error);
4083 }
4084 };
4085
4086 let duration = start_time.elapsed();
4087
4088 self.call_after_response(
4090 &response,
4091 operation,
4092 model,
4093 &request_json,
4094 &state,
4095 duration,
4096 None,
4097 None,
4098 )
4099 .await;
4100
4101 Ok(response)
4102 }
4103
4104 pub async fn create_run(
4121 &self,
4122 thread_id: impl Into<String>,
4123 builder: RunBuilder,
4124 ) -> Result<RunObject> {
4125 let thread_id = thread_id.into();
4126 let request = builder.build()?;
4127
4128 let mut state = T::default();
4130 let operation = operation_names::RUN_CREATE;
4131 let model = request
4132 .model
4133 .as_ref()
4134 .map_or_else(|| "runs".to_string(), Clone::clone);
4135 let request_json = serde_json::to_string(&request).unwrap_or_default();
4136
4137 self.call_before_request(operation, &model, &request_json, &mut state)
4139 .await?;
4140
4141 let start_time = Instant::now();
4142
4143 let response = match assistants_api::create_run()
4145 .configuration(&self.client.base_configuration)
4146 .thread_id(&thread_id)
4147 .create_run_request(request)
4148 .call()
4149 .await
4150 {
4151 Ok(resp) => resp,
4152 Err(e) => {
4153 let error = self
4154 .handle_api_error(e, operation, &model, &request_json, &state)
4155 .await;
4156 return Err(error);
4157 }
4158 };
4159
4160 let duration = start_time.elapsed();
4161
4162 self.call_after_response(
4164 &response,
4165 operation,
4166 &model,
4167 &request_json,
4168 &state,
4169 duration,
4170 None,
4171 None,
4172 )
4173 .await;
4174
4175 Ok(response)
4176 }
4177
4178 pub async fn list_runs(
4193 &self,
4194 thread_id: impl Into<String>,
4195 limit: Option<i32>,
4196 order: Option<&str>,
4197 after: Option<&str>,
4198 before: Option<&str>,
4199 ) -> Result<ListRunsResponse> {
4200 let thread_id = thread_id.into();
4201
4202 let mut state = T::default();
4204 let operation = operation_names::RUN_LIST;
4205 let model = "runs";
4206 let request_json = format!(
4207 "{{\"thread_id\":\"{thread_id}\",\"limit\":{limit:?},\"order\":{order:?},\"after\":{after:?},\"before\":{before:?}}}"
4208 );
4209
4210 self.call_before_request(operation, model, &request_json, &mut state)
4212 .await?;
4213
4214 let start_time = Instant::now();
4215
4216 let response = match assistants_api::list_runs()
4218 .configuration(&self.client.base_configuration)
4219 .thread_id(&thread_id)
4220 .maybe_limit(limit)
4221 .maybe_order(order)
4222 .maybe_after(after)
4223 .maybe_before(before)
4224 .call()
4225 .await
4226 {
4227 Ok(resp) => resp,
4228 Err(e) => {
4229 let error = self
4230 .handle_api_error(e, operation, model, &request_json, &state)
4231 .await;
4232 return Err(error);
4233 }
4234 };
4235
4236 let duration = start_time.elapsed();
4237
4238 self.call_after_response(
4240 &response,
4241 operation,
4242 model,
4243 &request_json,
4244 &state,
4245 duration,
4246 None,
4247 None,
4248 )
4249 .await;
4250
4251 Ok(response)
4252 }
4253
4254 pub async fn get_run(
4269 &self,
4270 thread_id: impl Into<String>,
4271 run_id: impl Into<String>,
4272 ) -> Result<RunObject> {
4273 let thread_id = thread_id.into();
4274 let run_id = run_id.into();
4275
4276 let mut state = T::default();
4278 let operation = operation_names::RUN_RETRIEVE;
4279 let model = "runs";
4280 let request_json = format!("{{\"thread_id\":\"{thread_id}\",\"run_id\":\"{run_id}\"}}");
4281
4282 self.call_before_request(operation, model, &request_json, &mut state)
4284 .await?;
4285
4286 let start_time = Instant::now();
4287
4288 let response = match assistants_api::get_run()
4290 .configuration(&self.client.base_configuration)
4291 .thread_id(&thread_id)
4292 .run_id(&run_id)
4293 .call()
4294 .await
4295 {
4296 Ok(resp) => resp,
4297 Err(e) => {
4298 let error = self
4299 .handle_api_error(e, operation, model, &request_json, &state)
4300 .await;
4301 return Err(error);
4302 }
4303 };
4304
4305 let duration = start_time.elapsed();
4306
4307 self.call_after_response(
4309 &response,
4310 operation,
4311 model,
4312 &request_json,
4313 &state,
4314 duration,
4315 None,
4316 None,
4317 )
4318 .await;
4319
4320 Ok(response)
4321 }
4322
4323 pub async fn cancel_run(
4338 &self,
4339 thread_id: impl Into<String>,
4340 run_id: impl Into<String>,
4341 ) -> Result<RunObject> {
4342 let thread_id = thread_id.into();
4343 let run_id = run_id.into();
4344
4345 let mut state = T::default();
4347 let operation = operation_names::RUN_CANCEL;
4348 let model = "runs";
4349 let request_json = format!("{{\"thread_id\":\"{thread_id}\",\"run_id\":\"{run_id}\"}}");
4350
4351 self.call_before_request(operation, model, &request_json, &mut state)
4353 .await?;
4354
4355 let start_time = Instant::now();
4356
4357 let response = match assistants_api::cancel_run()
4359 .configuration(&self.client.base_configuration)
4360 .thread_id(&thread_id)
4361 .run_id(&run_id)
4362 .call()
4363 .await
4364 {
4365 Ok(resp) => resp,
4366 Err(e) => {
4367 let error = self
4368 .handle_api_error(e, operation, model, &request_json, &state)
4369 .await;
4370 return Err(error);
4371 }
4372 };
4373
4374 let duration = start_time.elapsed();
4375
4376 self.call_after_response(
4378 &response,
4379 operation,
4380 model,
4381 &request_json,
4382 &state,
4383 duration,
4384 None,
4385 None,
4386 )
4387 .await;
4388
4389 Ok(response)
4390 }
4391
4392 pub async fn submit_tool_outputs(
4410 &self,
4411 thread_id: impl Into<String>,
4412 run_id: impl Into<String>,
4413 tool_outputs: Vec<SubmitToolOutputsRunRequestToolOutputsInner>,
4414 ) -> Result<RunObject> {
4415 use openai_client_base::models::SubmitToolOutputsRunRequest;
4416
4417 let thread_id = thread_id.into();
4418 let run_id = run_id.into();
4419 let request = SubmitToolOutputsRunRequest::new(tool_outputs);
4420
4421 let mut state = T::default();
4423 let operation = operation_names::RUN_SUBMIT_TOOL_OUTPUTS;
4424 let model = "runs";
4425 let request_json = serde_json::to_string(&request).unwrap_or_default();
4426
4427 self.call_before_request(operation, model, &request_json, &mut state)
4429 .await?;
4430
4431 let start_time = Instant::now();
4432
4433 let response = match assistants_api::submit_tool_ouputs_to_run()
4435 .configuration(&self.client.base_configuration)
4436 .thread_id(&thread_id)
4437 .run_id(&run_id)
4438 .submit_tool_outputs_run_request(request)
4439 .call()
4440 .await
4441 {
4442 Ok(resp) => resp,
4443 Err(e) => {
4444 let error = self
4445 .handle_api_error(e, operation, model, &request_json, &state)
4446 .await;
4447 return Err(error);
4448 }
4449 };
4450
4451 let duration = start_time.elapsed();
4452
4453 self.call_after_response(
4455 &response,
4456 operation,
4457 model,
4458 &request_json,
4459 &state,
4460 duration,
4461 None,
4462 None,
4463 )
4464 .await;
4465
4466 Ok(response)
4467 }
4468
4469 pub async fn create_message(
4486 &self,
4487 thread_id: impl Into<String>,
4488 builder: MessageBuilder,
4489 ) -> Result<MessageObject> {
4490 let thread_id = thread_id.into();
4491 let request = builder.build()?;
4492
4493 let mut state = T::default();
4495 let operation = operation_names::MESSAGE_CREATE;
4496 let model = "messages";
4497 let request_json = serde_json::to_string(&request).unwrap_or_default();
4498
4499 self.call_before_request(operation, model, &request_json, &mut state)
4501 .await?;
4502
4503 let start_time = Instant::now();
4504
4505 let response = match assistants_api::create_message()
4507 .configuration(&self.client.base_configuration)
4508 .thread_id(&thread_id)
4509 .create_message_request(request)
4510 .call()
4511 .await
4512 {
4513 Ok(resp) => resp,
4514 Err(e) => {
4515 let error = self
4516 .handle_api_error(e, operation, model, &request_json, &state)
4517 .await;
4518 return Err(error);
4519 }
4520 };
4521
4522 let duration = start_time.elapsed();
4523
4524 self.call_after_response(
4526 &response,
4527 operation,
4528 model,
4529 &request_json,
4530 &state,
4531 duration,
4532 None,
4533 None,
4534 )
4535 .await;
4536
4537 Ok(response)
4538 }
4539
4540 pub async fn list_messages(
4555 &self,
4556 thread_id: impl Into<String>,
4557 limit: Option<i32>,
4558 order: Option<&str>,
4559 after: Option<&str>,
4560 before: Option<&str>,
4561 run_id: Option<&str>,
4562 ) -> Result<ListMessagesResponse> {
4563 let thread_id = thread_id.into();
4564
4565 let mut state = T::default();
4567 let operation = operation_names::MESSAGE_LIST;
4568 let model = "messages";
4569 let request_json = format!("{{\"thread_id\":\"{thread_id}\",\"limit\":{limit:?},\"order\":{order:?},\"after\":{after:?},\"before\":{before:?},\"run_id\":{run_id:?}}}");
4570
4571 self.call_before_request(operation, model, &request_json, &mut state)
4573 .await?;
4574
4575 let start_time = Instant::now();
4576
4577 let response = match assistants_api::list_messages()
4579 .configuration(&self.client.base_configuration)
4580 .thread_id(&thread_id)
4581 .maybe_limit(limit)
4582 .maybe_order(order)
4583 .maybe_after(after)
4584 .maybe_before(before)
4585 .maybe_run_id(run_id)
4586 .call()
4587 .await
4588 {
4589 Ok(resp) => resp,
4590 Err(e) => {
4591 let error = self
4592 .handle_api_error(e, operation, model, &request_json, &state)
4593 .await;
4594 return Err(error);
4595 }
4596 };
4597
4598 let duration = start_time.elapsed();
4599
4600 self.call_after_response(
4602 &response,
4603 operation,
4604 model,
4605 &request_json,
4606 &state,
4607 duration,
4608 None,
4609 None,
4610 )
4611 .await;
4612
4613 Ok(response)
4614 }
4615
4616 pub async fn get_message(
4631 &self,
4632 thread_id: impl Into<String>,
4633 message_id: impl Into<String>,
4634 ) -> Result<MessageObject> {
4635 let thread_id = thread_id.into();
4636 let message_id = message_id.into();
4637
4638 let mut state = T::default();
4640 let operation = operation_names::MESSAGE_RETRIEVE;
4641 let model = "messages";
4642 let request_json =
4643 format!("{{\"thread_id\":\"{thread_id}\",\"message_id\":\"{message_id}\"}}");
4644
4645 self.call_before_request(operation, model, &request_json, &mut state)
4647 .await?;
4648
4649 let start_time = Instant::now();
4650
4651 let response = match assistants_api::get_message()
4653 .configuration(&self.client.base_configuration)
4654 .thread_id(&thread_id)
4655 .message_id(&message_id)
4656 .call()
4657 .await
4658 {
4659 Ok(resp) => resp,
4660 Err(e) => {
4661 let error = self
4662 .handle_api_error(e, operation, model, &request_json, &state)
4663 .await;
4664 return Err(error);
4665 }
4666 };
4667
4668 let duration = start_time.elapsed();
4669
4670 self.call_after_response(
4672 &response,
4673 operation,
4674 model,
4675 &request_json,
4676 &state,
4677 duration,
4678 None,
4679 None,
4680 )
4681 .await;
4682
4683 Ok(response)
4684 }
4685
4686 #[allow(clippy::too_many_arguments)]
4701 pub async fn list_run_steps(
4702 &self,
4703 thread_id: impl Into<String>,
4704 run_id: impl Into<String>,
4705 limit: Option<i32>,
4706 order: Option<&str>,
4707 after: Option<&str>,
4708 before: Option<&str>,
4709 include: Option<Vec<String>>,
4710 ) -> Result<ListRunStepsResponse> {
4711 let thread_id = thread_id.into();
4712 let run_id = run_id.into();
4713
4714 let mut state = T::default();
4716 let operation = operation_names::RUN_STEP_LIST;
4717 let model = "run_steps";
4718 let request_json = format!("{{\"thread_id\":\"{thread_id}\",\"run_id\":\"{run_id}\",\"limit\":{limit:?},\"order\":{order:?},\"after\":{after:?},\"before\":{before:?},\"include\":{include:?}}}");
4719
4720 self.call_before_request(operation, model, &request_json, &mut state)
4722 .await?;
4723
4724 let start_time = Instant::now();
4725
4726 let response = match assistants_api::list_run_steps()
4728 .configuration(&self.client.base_configuration)
4729 .thread_id(&thread_id)
4730 .run_id(&run_id)
4731 .maybe_limit(limit)
4732 .maybe_order(order)
4733 .maybe_after(after)
4734 .maybe_before(before)
4735 .maybe_include_left_square_bracket_right_square_bracket(include)
4736 .call()
4737 .await
4738 {
4739 Ok(resp) => resp,
4740 Err(e) => {
4741 let error = self
4742 .handle_api_error(e, operation, model, &request_json, &state)
4743 .await;
4744 return Err(error);
4745 }
4746 };
4747
4748 let duration = start_time.elapsed();
4749
4750 self.call_after_response(
4752 &response,
4753 operation,
4754 model,
4755 &request_json,
4756 &state,
4757 duration,
4758 None,
4759 None,
4760 )
4761 .await;
4762
4763 Ok(response)
4764 }
4765
4766 pub async fn get_run_step(
4781 &self,
4782 thread_id: impl Into<String>,
4783 run_id: impl Into<String>,
4784 step_id: impl Into<String>,
4785 include: Option<Vec<String>>,
4786 ) -> Result<RunStepObject> {
4787 let thread_id = thread_id.into();
4788 let run_id = run_id.into();
4789 let step_id = step_id.into();
4790
4791 let mut state = T::default();
4793 let operation = operation_names::RUN_STEP_RETRIEVE;
4794 let model = "run_steps";
4795 let request_json = format!(
4796 "{{\"thread_id\":\"{thread_id}\",\"run_id\":\"{run_id}\",\"step_id\":\"{step_id}\",\"include\":{include:?}}}"
4797 );
4798
4799 self.call_before_request(operation, model, &request_json, &mut state)
4801 .await?;
4802
4803 let start_time = Instant::now();
4804
4805 let response = match assistants_api::get_run_step()
4807 .configuration(&self.client.base_configuration)
4808 .thread_id(&thread_id)
4809 .run_id(&run_id)
4810 .step_id(&step_id)
4811 .maybe_include_left_square_bracket_right_square_bracket(include)
4812 .call()
4813 .await
4814 {
4815 Ok(resp) => resp,
4816 Err(e) => {
4817 let error = self
4818 .handle_api_error(e, operation, model, &request_json, &state)
4819 .await;
4820 return Err(error);
4821 }
4822 };
4823
4824 let duration = start_time.elapsed();
4825
4826 self.call_after_response(
4828 &response,
4829 operation,
4830 model,
4831 &request_json,
4832 &state,
4833 duration,
4834 None,
4835 None,
4836 )
4837 .await;
4838
4839 Ok(response)
4840 }
4841}
4842
4843#[derive(Debug, Clone, Copy)]
4845#[allow(dead_code)]
4846pub struct AudioClient<'a, T = ()> {
4847 client: &'a Client<T>,
4848}
4849
4850#[derive(Debug, Clone, Copy)]
4852#[allow(dead_code)]
4853pub struct EmbeddingsClient<'a, T = ()> {
4854 client: &'a Client<T>,
4855}
4856
4857#[derive(Debug, Clone, Copy)]
4859#[allow(dead_code)]
4860pub struct ImagesClient<'a, T = ()> {
4861 client: &'a Client<T>,
4862}
4863
4864#[derive(Debug, Clone, Copy)]
4866#[allow(dead_code)]
4867pub struct FilesClient<'a, T = ()> {
4868 client: &'a Client<T>,
4869}
4870
4871#[derive(Debug, Clone, Copy)]
4873#[allow(dead_code)]
4874pub struct FineTuningClient<'a, T = ()> {
4875 client: &'a Client<T>,
4876}
4877
4878#[derive(Debug, Clone, Copy)]
4880#[allow(dead_code)]
4881pub struct BatchClient<'a, T = ()> {
4882 client: &'a Client<T>,
4883}
4884
4885#[derive(Debug, Clone, Copy)]
4887#[allow(dead_code)]
4888pub struct VectorStoresClient<'a, T = ()> {
4889 client: &'a Client<T>,
4890}
4891
4892#[derive(Debug, Clone, Copy)]
4894#[allow(dead_code)]
4895pub struct ModerationsClient<'a, T = ()> {
4896 client: &'a Client<T>,
4897}
4898
4899#[derive(Debug, Clone, Copy)]
4901#[allow(dead_code)]
4902pub struct ThreadsClient<'a, T = ()> {
4903 client: &'a Client<T>,
4904}
4905
4906#[derive(Debug, Clone, Copy)]
4908#[allow(dead_code)]
4909pub struct UploadsClient<'a, T = ()> {
4910 client: &'a Client<T>,
4911}
4912
4913#[derive(Debug, Clone, Copy)]
4915pub struct ModelsClient<'a, T = ()> {
4916 client: &'a Client<T>,
4917}
4918
4919#[derive(Debug, Clone, Copy)]
4921pub struct CompletionsClient<'a, T = ()> {
4922 client: &'a Client<T>,
4923}
4924
4925#[derive(Debug, Clone, Copy)]
4927pub struct UsageClient<'a, T = ()> {
4928 client: &'a Client<T>,
4929}
4930
4931impl_interceptor_helpers!(AssistantsClient<'_, T>);
4933impl_interceptor_helpers!(AudioClient<'_, T>);
4934impl_interceptor_helpers!(EmbeddingsClient<'_, T>);
4935impl_interceptor_helpers!(ImagesClient<'_, T>);
4936impl_interceptor_helpers!(FilesClient<'_, T>);
4937impl_interceptor_helpers!(FineTuningClient<'_, T>);
4938impl_interceptor_helpers!(BatchClient<'_, T>);
4939impl_interceptor_helpers!(VectorStoresClient<'_, T>);
4940impl_interceptor_helpers!(ModerationsClient<'_, T>);
4941impl_interceptor_helpers!(ThreadsClient<'_, T>);
4942impl_interceptor_helpers!(UploadsClient<'_, T>);
4943impl_interceptor_helpers!(ModelsClient<'_, T>);
4944impl_interceptor_helpers!(CompletionsClient<'_, T>);
4945impl_interceptor_helpers!(UsageClient<'_, T>);
4946
4947impl<T: Default + Send + Sync> ModelsClient<'_, T> {
4948 pub async fn list(&self) -> Result<ListModelsResponse> {
4963 let mut state = T::default();
4965 let operation = operation_names::MODEL_LIST;
4966 let model = "models";
4967 let request_json = "{}".to_string();
4968
4969 self.call_before_request(operation, model, &request_json, &mut state)
4971 .await?;
4972
4973 let start_time = Instant::now();
4974
4975 let response = match models_api::list_models()
4977 .configuration(&self.client.base_configuration)
4978 .call()
4979 .await
4980 {
4981 Ok(resp) => resp,
4982 Err(e) => {
4983 let error = self
4984 .handle_api_error(e, operation, model, &request_json, &state)
4985 .await;
4986 return Err(error);
4987 }
4988 };
4989
4990 let duration = start_time.elapsed();
4991
4992 self.call_after_response(
4994 &response,
4995 operation,
4996 model,
4997 &request_json,
4998 &state,
4999 duration,
5000 None,
5001 None,
5002 )
5003 .await;
5004
5005 Ok(response)
5006 }
5007
5008 pub async fn get(&self, model_id: impl Into<String>) -> Result<Model> {
5023 let id = model_id.into();
5024
5025 let mut state = T::default();
5027 let operation = operation_names::MODEL_RETRIEVE;
5028 let model = "models";
5029 let request_json = format!("{{\"model_id\":\"{id}\"}}");
5030
5031 self.call_before_request(operation, model, &request_json, &mut state)
5033 .await?;
5034
5035 let start_time = Instant::now();
5036
5037 let response = match models_api::retrieve_model()
5039 .configuration(&self.client.base_configuration)
5040 .model(&id)
5041 .call()
5042 .await
5043 {
5044 Ok(resp) => resp,
5045 Err(e) => {
5046 let error = self
5047 .handle_api_error(e, operation, model, &request_json, &state)
5048 .await;
5049 return Err(error);
5050 }
5051 };
5052
5053 let duration = start_time.elapsed();
5054
5055 self.call_after_response(
5057 &response,
5058 operation,
5059 model,
5060 &request_json,
5061 &state,
5062 duration,
5063 None,
5064 None,
5065 )
5066 .await;
5067
5068 Ok(response)
5069 }
5070
5071 pub async fn retrieve(&self, builder: ModelRetrievalBuilder) -> Result<Model> {
5073 self.get(builder.model_id()).await
5074 }
5075
5076 pub async fn delete(&self, model_id: impl Into<String>) -> Result<DeleteModelResponse> {
5093 let id = model_id.into();
5094
5095 let mut state = T::default();
5097 let operation = operation_names::MODEL_DELETE;
5098 let model = "models";
5099 let request_json = format!("{{\"model_id\":\"{id}\"}}");
5100
5101 self.call_before_request(operation, model, &request_json, &mut state)
5103 .await?;
5104
5105 let start_time = Instant::now();
5106
5107 let response = match models_api::delete_model()
5109 .configuration(&self.client.base_configuration)
5110 .model(&id)
5111 .call()
5112 .await
5113 {
5114 Ok(resp) => resp,
5115 Err(e) => {
5116 let error = self
5117 .handle_api_error(e, operation, model, &request_json, &state)
5118 .await;
5119 return Err(error);
5120 }
5121 };
5122
5123 let duration = start_time.elapsed();
5124
5125 self.call_after_response(
5127 &response,
5128 operation,
5129 model,
5130 &request_json,
5131 &state,
5132 duration,
5133 None,
5134 None,
5135 )
5136 .await;
5137
5138 Ok(response)
5139 }
5140
5141 pub async fn remove(&self, builder: ModelDeleteBuilder) -> Result<DeleteModelResponse> {
5143 self.delete(builder.model_id()).await
5144 }
5145}
5146
5147impl<T: Default + Send + Sync> CompletionsClient<'_, T> {
5148 #[must_use]
5162 pub fn builder(&self, model: impl Into<String>) -> CompletionsBuilder {
5163 CompletionsBuilder::new(model)
5164 }
5165
5166 pub async fn create(&self, builder: CompletionsBuilder) -> Result<CreateCompletionResponse> {
5185 let request = builder.build()?;
5186
5187 let mut state = T::default();
5189 let operation = operation_names::TEXT_COMPLETION;
5190 let model = request.model.clone();
5191 let request_json = serde_json::to_string(&request).unwrap_or_default();
5192
5193 self.call_before_request(operation, &model, &request_json, &mut state)
5195 .await?;
5196
5197 let start_time = Instant::now();
5198
5199 let response = match completions_api::create_completion()
5201 .configuration(&self.client.base_configuration)
5202 .create_completion_request(request)
5203 .call()
5204 .await
5205 {
5206 Ok(resp) => resp,
5207 Err(e) => {
5208 let error = self
5209 .handle_api_error(e, operation, &model, &request_json, &state)
5210 .await;
5211 return Err(error);
5212 }
5213 };
5214
5215 let duration = start_time.elapsed();
5216
5217 self.call_after_response(
5219 &response,
5220 operation,
5221 &model,
5222 &request_json,
5223 &state,
5224 duration,
5225 response.usage.as_ref().map(|u| i64::from(u.prompt_tokens)),
5226 response
5227 .usage
5228 .as_ref()
5229 .map(|u| i64::from(u.completion_tokens)),
5230 )
5231 .await;
5232
5233 Ok(response)
5234 }
5235}
5236
5237impl<T: Default + Send + Sync> UsageClient<'_, T> {
5238 pub async fn audio_speeches(&self, builder: UsageBuilder) -> Result<UsageResponse> {
5255 let mut state = T::default();
5257 let operation = operation_names::USAGE_AUDIO_SPEECHES;
5258 let model = "usage";
5259 let start_time = builder.start_time();
5260 let request_json = format!("{{\"start_time\":{start_time}}}");
5261
5262 self.call_before_request(operation, model, &request_json, &mut state)
5264 .await?;
5265
5266 let start_time = Instant::now();
5267
5268 let response = match usage_api::usage_audio_speeches()
5270 .configuration(&self.client.base_configuration)
5271 .start_time(builder.start_time())
5272 .maybe_end_time(builder.end_time())
5273 .maybe_bucket_width(builder.bucket_width_str())
5274 .maybe_project_ids(builder.project_ids_option())
5275 .maybe_user_ids(builder.user_ids_option())
5276 .maybe_api_key_ids(builder.api_key_ids_option())
5277 .maybe_models(builder.models_option())
5278 .maybe_group_by(builder.group_by_option())
5279 .maybe_limit(builder.limit_ref())
5280 .maybe_page(builder.page_ref())
5281 .call()
5282 .await
5283 {
5284 Ok(resp) => resp,
5285 Err(e) => {
5286 let error = self
5287 .handle_api_error(e, operation, model, &request_json, &state)
5288 .await;
5289 return Err(error);
5290 }
5291 };
5292
5293 let duration = start_time.elapsed();
5294
5295 self.call_after_response(
5297 &response,
5298 operation,
5299 model,
5300 &request_json,
5301 &state,
5302 duration,
5303 None,
5304 None,
5305 )
5306 .await;
5307
5308 Ok(response)
5309 }
5310
5311 pub async fn audio_transcriptions(&self, builder: UsageBuilder) -> Result<UsageResponse> {
5313 let mut state = T::default();
5315 let operation = operation_names::USAGE_AUDIO_TRANSCRIPTIONS;
5316 let model = "usage";
5317 let start_time = builder.start_time();
5318 let request_json = format!("{{\"start_time\":{start_time}}}");
5319
5320 self.call_before_request(operation, model, &request_json, &mut state)
5322 .await?;
5323
5324 let start_time = Instant::now();
5325
5326 let response = match usage_api::usage_audio_transcriptions()
5328 .configuration(&self.client.base_configuration)
5329 .start_time(builder.start_time())
5330 .maybe_end_time(builder.end_time())
5331 .maybe_bucket_width(builder.bucket_width_str())
5332 .maybe_project_ids(builder.project_ids_option())
5333 .maybe_user_ids(builder.user_ids_option())
5334 .maybe_api_key_ids(builder.api_key_ids_option())
5335 .maybe_models(builder.models_option())
5336 .maybe_group_by(builder.group_by_option())
5337 .maybe_limit(builder.limit_ref())
5338 .maybe_page(builder.page_ref())
5339 .call()
5340 .await
5341 {
5342 Ok(resp) => resp,
5343 Err(e) => {
5344 let error = self
5345 .handle_api_error(e, operation, model, &request_json, &state)
5346 .await;
5347 return Err(error);
5348 }
5349 };
5350
5351 let duration = start_time.elapsed();
5352
5353 self.call_after_response(
5355 &response,
5356 operation,
5357 model,
5358 &request_json,
5359 &state,
5360 duration,
5361 None,
5362 None,
5363 )
5364 .await;
5365
5366 Ok(response)
5367 }
5368
5369 pub async fn code_interpreter_sessions(&self, builder: UsageBuilder) -> Result<UsageResponse> {
5371 let mut state = T::default();
5373 let operation = operation_names::USAGE_CODE_INTERPRETER;
5374 let model = "usage";
5375 let start_time = builder.start_time();
5376 let request_json = format!("{{\"start_time\":{start_time}}}");
5377
5378 self.call_before_request(operation, model, &request_json, &mut state)
5380 .await?;
5381
5382 let start_time = Instant::now();
5383
5384 let response = match usage_api::usage_code_interpreter_sessions()
5386 .configuration(&self.client.base_configuration)
5387 .start_time(builder.start_time())
5388 .maybe_end_time(builder.end_time())
5389 .maybe_bucket_width(builder.bucket_width_str())
5390 .maybe_project_ids(builder.project_ids_option())
5391 .maybe_group_by(builder.group_by_option())
5392 .maybe_limit(builder.limit_ref())
5393 .maybe_page(builder.page_ref())
5394 .call()
5395 .await
5396 {
5397 Ok(resp) => resp,
5398 Err(e) => {
5399 let error = self
5400 .handle_api_error(e, operation, model, &request_json, &state)
5401 .await;
5402 return Err(error);
5403 }
5404 };
5405
5406 let duration = start_time.elapsed();
5407
5408 self.call_after_response(
5410 &response,
5411 operation,
5412 model,
5413 &request_json,
5414 &state,
5415 duration,
5416 None,
5417 None,
5418 )
5419 .await;
5420
5421 Ok(response)
5422 }
5423
5424 pub async fn completions(&self, builder: UsageBuilder) -> Result<UsageResponse> {
5426 let mut state = T::default();
5428 let operation = operation_names::USAGE_COMPLETIONS;
5429 let model = "usage";
5430 let start_time = builder.start_time();
5431 let request_json = format!("{{\"start_time\":{start_time}}}");
5432
5433 self.call_before_request(operation, model, &request_json, &mut state)
5435 .await?;
5436
5437 let start_time = Instant::now();
5438
5439 let response = match usage_api::usage_completions()
5441 .configuration(&self.client.base_configuration)
5442 .start_time(builder.start_time())
5443 .maybe_end_time(builder.end_time())
5444 .maybe_bucket_width(builder.bucket_width_str())
5445 .maybe_project_ids(builder.project_ids_option())
5446 .maybe_user_ids(builder.user_ids_option())
5447 .maybe_api_key_ids(builder.api_key_ids_option())
5448 .maybe_models(builder.models_option())
5449 .maybe_group_by(builder.group_by_option())
5450 .maybe_limit(builder.limit_ref())
5451 .maybe_page(builder.page_ref())
5452 .call()
5453 .await
5454 {
5455 Ok(resp) => resp,
5456 Err(e) => {
5457 let error = self
5458 .handle_api_error(e, operation, model, &request_json, &state)
5459 .await;
5460 return Err(error);
5461 }
5462 };
5463
5464 let duration = start_time.elapsed();
5465
5466 self.call_after_response(
5468 &response,
5469 operation,
5470 model,
5471 &request_json,
5472 &state,
5473 duration,
5474 None,
5475 None,
5476 )
5477 .await;
5478
5479 Ok(response)
5480 }
5481
5482 pub async fn embeddings(&self, builder: UsageBuilder) -> Result<UsageResponse> {
5484 let mut state = T::default();
5486 let operation = operation_names::USAGE_EMBEDDINGS;
5487 let model = "usage";
5488 let start_time = builder.start_time();
5489 let request_json = format!("{{\"start_time\":{start_time}}}");
5490
5491 self.call_before_request(operation, model, &request_json, &mut state)
5493 .await?;
5494
5495 let start_time = Instant::now();
5496
5497 let response = match usage_api::usage_embeddings()
5499 .configuration(&self.client.base_configuration)
5500 .start_time(builder.start_time())
5501 .maybe_end_time(builder.end_time())
5502 .maybe_bucket_width(builder.bucket_width_str())
5503 .maybe_project_ids(builder.project_ids_option())
5504 .maybe_user_ids(builder.user_ids_option())
5505 .maybe_api_key_ids(builder.api_key_ids_option())
5506 .maybe_models(builder.models_option())
5507 .maybe_group_by(builder.group_by_option())
5508 .maybe_limit(builder.limit_ref())
5509 .maybe_page(builder.page_ref())
5510 .call()
5511 .await
5512 {
5513 Ok(resp) => resp,
5514 Err(e) => {
5515 let error = self
5516 .handle_api_error(e, operation, model, &request_json, &state)
5517 .await;
5518 return Err(error);
5519 }
5520 };
5521
5522 let duration = start_time.elapsed();
5523
5524 self.call_after_response(
5526 &response,
5527 operation,
5528 model,
5529 &request_json,
5530 &state,
5531 duration,
5532 None,
5533 None,
5534 )
5535 .await;
5536
5537 Ok(response)
5538 }
5539
5540 pub async fn images(&self, builder: UsageBuilder) -> Result<UsageResponse> {
5542 let mut state = T::default();
5544 let operation = operation_names::USAGE_IMAGES;
5545 let model = "usage";
5546 let start_time = builder.start_time();
5547 let request_json = format!("{{\"start_time\":{start_time}}}");
5548
5549 self.call_before_request(operation, model, &request_json, &mut state)
5551 .await?;
5552
5553 let start_time = Instant::now();
5554
5555 let response = match usage_api::usage_images()
5557 .configuration(&self.client.base_configuration)
5558 .start_time(builder.start_time())
5559 .maybe_end_time(builder.end_time())
5560 .maybe_bucket_width(builder.bucket_width_str())
5561 .maybe_project_ids(builder.project_ids_option())
5562 .maybe_user_ids(builder.user_ids_option())
5563 .maybe_api_key_ids(builder.api_key_ids_option())
5564 .maybe_models(builder.models_option())
5565 .maybe_group_by(builder.group_by_option())
5566 .maybe_limit(builder.limit_ref())
5567 .maybe_page(builder.page_ref())
5568 .call()
5569 .await
5570 {
5571 Ok(resp) => resp,
5572 Err(e) => {
5573 let error = self
5574 .handle_api_error(e, operation, model, &request_json, &state)
5575 .await;
5576 return Err(error);
5577 }
5578 };
5579
5580 let duration = start_time.elapsed();
5581
5582 self.call_after_response(
5584 &response,
5585 operation,
5586 model,
5587 &request_json,
5588 &state,
5589 duration,
5590 None,
5591 None,
5592 )
5593 .await;
5594
5595 Ok(response)
5596 }
5597
5598 pub async fn moderations(&self, builder: UsageBuilder) -> Result<UsageResponse> {
5600 let mut state = T::default();
5602 let operation = operation_names::USAGE_MODERATIONS;
5603 let model = "usage";
5604 let start_time = builder.start_time();
5605 let request_json = format!("{{\"start_time\":{start_time}}}");
5606
5607 self.call_before_request(operation, model, &request_json, &mut state)
5609 .await?;
5610
5611 let start_time = Instant::now();
5612
5613 let response = match usage_api::usage_moderations()
5615 .configuration(&self.client.base_configuration)
5616 .start_time(builder.start_time())
5617 .maybe_end_time(builder.end_time())
5618 .maybe_bucket_width(builder.bucket_width_str())
5619 .maybe_project_ids(builder.project_ids_option())
5620 .maybe_user_ids(builder.user_ids_option())
5621 .maybe_api_key_ids(builder.api_key_ids_option())
5622 .maybe_models(builder.models_option())
5623 .maybe_group_by(builder.group_by_option())
5624 .maybe_limit(builder.limit_ref())
5625 .maybe_page(builder.page_ref())
5626 .call()
5627 .await
5628 {
5629 Ok(resp) => resp,
5630 Err(e) => {
5631 let error = self
5632 .handle_api_error(e, operation, model, &request_json, &state)
5633 .await;
5634 return Err(error);
5635 }
5636 };
5637
5638 let duration = start_time.elapsed();
5639
5640 self.call_after_response(
5642 &response,
5643 operation,
5644 model,
5645 &request_json,
5646 &state,
5647 duration,
5648 None,
5649 None,
5650 )
5651 .await;
5652
5653 Ok(response)
5654 }
5655
5656 pub async fn vector_stores(&self, builder: UsageBuilder) -> Result<UsageResponse> {
5658 let mut state = T::default();
5660 let operation = operation_names::USAGE_VECTOR_STORES;
5661 let model = "usage";
5662 let start_time = builder.start_time();
5663 let request_json = format!("{{\"start_time\":{start_time}}}");
5664
5665 self.call_before_request(operation, model, &request_json, &mut state)
5667 .await?;
5668
5669 let start_time = Instant::now();
5670
5671 let response = match usage_api::usage_vector_stores()
5673 .configuration(&self.client.base_configuration)
5674 .start_time(builder.start_time())
5675 .maybe_end_time(builder.end_time())
5676 .maybe_bucket_width(builder.bucket_width_str())
5677 .maybe_project_ids(builder.project_ids_option())
5678 .maybe_group_by(builder.group_by_option())
5679 .maybe_limit(builder.limit_ref())
5680 .maybe_page(builder.page_ref())
5681 .call()
5682 .await
5683 {
5684 Ok(resp) => resp,
5685 Err(e) => {
5686 let error = self
5687 .handle_api_error(e, operation, model, &request_json, &state)
5688 .await;
5689 return Err(error);
5690 }
5691 };
5692
5693 let duration = start_time.elapsed();
5694
5695 self.call_after_response(
5697 &response,
5698 operation,
5699 model,
5700 &request_json,
5701 &state,
5702 duration,
5703 None,
5704 None,
5705 )
5706 .await;
5707
5708 Ok(response)
5709 }
5710
5711 pub async fn costs(&self, builder: UsageBuilder) -> Result<UsageResponse> {
5713 let mut state = T::default();
5715 let operation = operation_names::USAGE_COSTS;
5716 let model = "usage";
5717 let start_time = builder.start_time();
5718 let request_json = format!("{{\"start_time\":{start_time}}}");
5719
5720 self.call_before_request(operation, model, &request_json, &mut state)
5722 .await?;
5723
5724 let start_time = Instant::now();
5725
5726 let response = match usage_api::usage_costs()
5728 .configuration(&self.client.base_configuration)
5729 .start_time(builder.start_time())
5730 .maybe_end_time(builder.end_time())
5731 .maybe_bucket_width(builder.bucket_width_str())
5732 .maybe_project_ids(builder.project_ids_option())
5733 .maybe_group_by(builder.group_by_option())
5734 .maybe_limit(builder.limit_ref())
5735 .maybe_page(builder.page_ref())
5736 .call()
5737 .await
5738 {
5739 Ok(resp) => resp,
5740 Err(e) => {
5741 let error = self
5742 .handle_api_error(e, operation, model, &request_json, &state)
5743 .await;
5744 return Err(error);
5745 }
5746 };
5747
5748 let duration = start_time.elapsed();
5749
5750 self.call_after_response(
5752 &response,
5753 operation,
5754 model,
5755 &request_json,
5756 &state,
5757 duration,
5758 None,
5759 None,
5760 )
5761 .await;
5762
5763 Ok(response)
5764 }
5765}