1#![allow(clippy::too_many_arguments)]
9
10use crate::interceptor::{
11 AfterResponseContext, BeforeRequestContext, ErrorContext, InterceptorChain,
12};
13use crate::semantic_conventions::operation_names;
14use crate::{
15 builders::{
16 assistants::{AssistantBuilder, MessageBuilder, RunBuilder},
17 audio::{
18 SpeechBuilder, TranscriptionBuilder, TranscriptionRequest, TranslationBuilder,
19 TranslationRequest,
20 },
21 completions::CompletionsBuilder,
22 embeddings::EmbeddingsBuilder,
23 files::{FileDeleteBuilder, FileListBuilder, FileRetrievalBuilder, FileUploadBuilder},
24 images::{
25 ImageEditBuilder, ImageEditRequest, ImageGenerationBuilder, ImageVariationBuilder,
26 ImageVariationRequest,
27 },
28 models::{ModelDeleteBuilder, ModelRetrievalBuilder},
29 moderations::ModerationBuilder,
30 threads::ThreadRequestBuilder,
31 uploads::UploadBuilder,
32 usage::UsageBuilder,
33 Builder, ChatCompletionBuilder, ResponsesBuilder,
34 },
35 config::Config,
36 errors::Result,
37 responses::ChatCompletionResponseWrapper,
38 Error, UploadPurpose,
39};
40use openai_client_base::apis::Error as ApiError;
41use openai_client_base::{
42 apis::{
43 assistants_api, audio_api, batch_api, chat_api, completions_api,
44 configuration::Configuration, embeddings_api, files_api, fine_tuning_api, images_api,
45 models_api, moderations_api, uploads_api, usage_api, vector_stores_api,
46 },
47 models::{
48 AssistantObject, Batch, CreateBatchRequest, CreateChatCompletionRequest,
49 CreateCompletionResponse, CreateEmbeddingResponse, CreateFineTuningJobRequest,
50 CreateModerationResponse, CreateTranscription200Response, CreateTranslation200Response,
51 DeleteAssistantResponse, DeleteFileResponse, DeleteModelResponse,
52 DeleteVectorStoreFileResponse, DeleteVectorStoreResponse, FineTuningJob, ImagesResponse,
53 ListAssistantsResponse, ListBatchesResponse, ListFilesResponse,
54 ListFineTuningJobCheckpointsResponse, ListFineTuningJobEventsResponse,
55 ListMessagesResponse, ListModelsResponse, ListPaginatedFineTuningJobsResponse,
56 ListRunStepsResponse, ListRunsResponse, ListVectorStoreFilesResponse,
57 ListVectorStoresResponse, MessageObject, Model, OpenAiFile, RunObject, RunStepObject,
58 SubmitToolOutputsRunRequestToolOutputsInner, ThreadObject, Upload, UsageResponse,
59 VectorStoreFileObject, VectorStoreObject, VectorStoreSearchResultsPage,
60 },
61};
62use reqwest_middleware::ClientWithMiddleware as HttpClient;
63use std::sync::Arc;
64use std::time::Instant;
65use tokio::time::Duration;
66
67macro_rules! impl_interceptor_helpers {
69 ($client_type:ty) => {
70 impl<T: Default + Send + Sync> $client_type {
71 async fn call_before_request(
73 &self,
74 operation: &str,
75 model: &str,
76 request_json: &str,
77 state: &mut T,
78 ) -> Result<()> {
79 if !self.client.interceptors.is_empty() {
80 let mut ctx = BeforeRequestContext {
81 operation,
82 model,
83 request_json,
84 state,
85 };
86 if let Err(e) = self.client.interceptors.before_request(&mut ctx).await {
87 let error_ctx = ErrorContext {
88 operation,
89 model: Some(model),
90 request_json: Some(request_json),
91 error: &e,
92 state: Some(state),
93 };
94 self.client.interceptors.on_error(&error_ctx).await;
95 return Err(e);
96 }
97 }
98 Ok(())
99 }
100
101 async fn handle_api_error<E>(
103 &self,
104 error: openai_client_base::apis::Error<E>,
105 operation: &str,
106 model: &str,
107 request_json: &str,
108 state: &T,
109 ) -> Error {
110 let error = map_api_error(error);
111
112 if !self.client.interceptors.is_empty() {
113 let error_ctx = ErrorContext {
114 operation,
115 model: Some(model),
116 request_json: Some(request_json),
117 error: &error,
118 state: Some(state),
119 };
120 self.client.interceptors.on_error(&error_ctx).await;
121 }
122
123 error
124 }
125
126 async fn call_after_response<R>(
128 &self,
129 response: &R,
130 operation: &str,
131 model: &str,
132 request_json: &str,
133 state: &T,
134 duration: std::time::Duration,
135 input_tokens: Option<i64>,
136 output_tokens: Option<i64>,
137 ) where
138 R: serde::Serialize + Sync,
139 {
140 if !self.client.interceptors.is_empty() {
141 let response_json = serde_json::to_string(response).unwrap_or_default();
142 let ctx = AfterResponseContext {
143 operation,
144 model,
145 request_json,
146 response_json: &response_json,
147 duration,
148 input_tokens,
149 output_tokens,
150 state,
151 };
152 if let Err(e) = self.client.interceptors.after_response(&ctx).await {
153 tracing::warn!("Interceptor after_response failed: {}", e);
154 }
155 }
156 }
157 }
158 };
159}
160
161pub struct ClientBuilder<T = ()> {
175 config: Arc<Config>,
176 http: HttpClient,
177 base_configuration: Configuration,
178 interceptors: InterceptorChain<T>,
179}
180
181#[derive(Clone)]
202pub struct Client<T = ()> {
203 config: Arc<Config>,
204 http: HttpClient,
205 base_configuration: Configuration,
206 interceptors: Arc<InterceptorChain<T>>,
207}
208
209impl<T> std::fmt::Debug for Client<T> {
211 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
212 f.debug_struct("Client")
213 .field("config", &self.config)
214 .field("http", &"<HttpClient>")
215 .field("base_configuration", &"<Configuration>")
216 .field("interceptors", &"<InterceptorChain>")
217 .finish()
218 }
219}
220
221impl ClientBuilder {
223 pub fn new(config: Config) -> Result<Self> {
225 let http_client = if let Some(client) = config.http_client() {
227 client.clone()
228 } else {
229 let reqwest_client = reqwest::Client::builder()
230 .timeout(Duration::from_secs(120)) .user_agent(format!("openai-ergonomic/{}", env!("CARGO_PKG_VERSION")))
232 .build()
233 .map_err(Error::Http)?;
234 reqwest_middleware::ClientBuilder::new(reqwest_client).build()
235 };
236
237 let mut base_configuration = Configuration::new();
239 base_configuration.bearer_access_token = Some(config.api_key().to_string());
240 if let Some(base_url) = config.base_url() {
241 base_configuration.base_path = base_url.to_string();
242 }
243 if let Some(org_id) = config.organization_id() {
244 base_configuration.user_agent = Some(format!(
245 "openai-ergonomic/{} org/{}",
246 env!("CARGO_PKG_VERSION"),
247 org_id
248 ));
249 }
250
251 Ok(Self {
252 config: Arc::new(config),
253 http: http_client,
254 base_configuration,
255 interceptors: InterceptorChain::new(),
256 })
257 }
258
259 pub fn from_env() -> Result<Self> {
261 Self::new(Config::from_env()?)
262 }
263}
264
265impl<T> ClientBuilder<T> {
267 #[must_use]
308 pub fn with_interceptor<U>(
309 self,
310 interceptor: Box<dyn crate::interceptor::Interceptor<U>>,
311 ) -> ClientBuilder<U> {
312 let mut new_chain = InterceptorChain::new();
313 new_chain.add(interceptor);
314
315 ClientBuilder {
316 config: self.config,
317 http: self.http,
318 base_configuration: self.base_configuration,
319 interceptors: new_chain,
320 }
321 }
322
323 #[must_use]
337 pub fn add_interceptor(
338 mut self,
339 interceptor: Box<dyn crate::interceptor::Interceptor<T>>,
340 ) -> Self {
341 self.interceptors.add(interceptor);
342 self
343 }
344
345 #[must_use]
349 pub fn build(self) -> Client<T> {
350 Client {
351 config: self.config,
352 http: self.http,
353 base_configuration: self.base_configuration,
354 interceptors: Arc::new(self.interceptors),
355 }
356 }
357}
358
359impl Client {
361 pub fn builder(config: Config) -> Result<ClientBuilder> {
363 ClientBuilder::new(config)
364 }
365
366 pub fn from_env() -> Result<ClientBuilder> {
368 ClientBuilder::from_env()
369 }
370}
371
372impl<T> Client<T> {
373 pub fn config(&self) -> &Config {
375 &self.config
376 }
377
378 pub fn http_client(&self) -> &HttpClient {
380 &self.http
381 }
382}
383
384impl<T: Default + Send + Sync> Client<T> {
386 async fn call_before_request(
388 &self,
389 operation: &str,
390 model: &str,
391 request_json: &str,
392 state: &mut T,
393 ) -> Result<()> {
394 if !self.interceptors.is_empty() {
395 let mut ctx = BeforeRequestContext {
396 operation,
397 model,
398 request_json,
399 state,
400 };
401 if let Err(e) = self.interceptors.before_request(&mut ctx).await {
402 let error_ctx = ErrorContext {
403 operation,
404 model: Some(model),
405 request_json: Some(request_json),
406 error: &e,
407 state: Some(state),
408 };
409 self.interceptors.on_error(&error_ctx).await;
410 return Err(e);
411 }
412 }
413 Ok(())
414 }
415
416 async fn handle_api_error<E>(
418 &self,
419 error: openai_client_base::apis::Error<E>,
420 operation: &str,
421 model: &str,
422 request_json: &str,
423 state: &T,
424 ) -> Error {
425 let error = map_api_error(error);
426
427 if !self.interceptors.is_empty() {
428 let error_ctx = ErrorContext {
429 operation,
430 model: Some(model),
431 request_json: Some(request_json),
432 error: &error,
433 state: Some(state),
434 };
435 self.interceptors.on_error(&error_ctx).await;
436 }
437
438 error
439 }
440
441 async fn call_after_response<R>(
443 &self,
444 response: &R,
445 operation: &str,
446 model: &str,
447 request_json: &str,
448 state: &T,
449 duration: std::time::Duration,
450 input_tokens: Option<i64>,
451 output_tokens: Option<i64>,
452 ) where
453 R: serde::Serialize + Sync,
454 {
455 if !self.interceptors.is_empty() {
456 let response_json = serde_json::to_string(response).unwrap_or_default();
457 let ctx = AfterResponseContext {
458 operation,
459 model,
460 request_json,
461 response_json: &response_json,
462 duration,
463 input_tokens,
464 output_tokens,
465 state,
466 };
467 if let Err(e) = self.interceptors.after_response(&ctx).await {
468 tracing::warn!("Interceptor after_response failed: {}", e);
469 }
470 }
471 }
472}
473
474impl<T: Default + Send + Sync> Client<T> {
476 pub fn chat(&self) -> ChatCompletionBuilder {
478 let model = self.config.default_model().unwrap_or("gpt-4");
479 ChatCompletionBuilder::new(model)
480 }
481
482 pub fn chat_simple(&self, message: impl Into<String>) -> ChatCompletionBuilder {
484 self.chat().user(message)
485 }
486
487 pub fn chat_with_system(
489 &self,
490 system: impl Into<String>,
491 user: impl Into<String>,
492 ) -> ChatCompletionBuilder {
493 self.chat().system(system).user(user)
494 }
495
496 pub async fn execute_chat(
498 &self,
499 request: CreateChatCompletionRequest,
500 ) -> Result<ChatCompletionResponseWrapper> {
501 let mut state = T::default();
502 let operation = operation_names::CHAT;
503 let model = request.model.clone();
504 let request_json = serde_json::to_string(&request).unwrap_or_default();
505
506 self.call_before_request(operation, &model, &request_json, &mut state)
508 .await?;
509
510 let start_time = Instant::now();
511
512 let response = match chat_api::create_chat_completion()
514 .configuration(&self.base_configuration)
515 .create_chat_completion_request(request)
516 .call()
517 .await
518 {
519 Ok(resp) => resp,
520 Err(e) => {
521 let error = self
522 .handle_api_error(e, operation, &model, &request_json, &state)
523 .await;
524 return Err(error);
525 }
526 };
527
528 let duration = start_time.elapsed();
529
530 self.call_after_response(
532 &response,
533 operation,
534 &model,
535 &request_json,
536 &state,
537 duration,
538 response.usage.as_ref().map(|u| i64::from(u.prompt_tokens)),
539 response
540 .usage
541 .as_ref()
542 .map(|u| i64::from(u.completion_tokens)),
543 )
544 .await;
545
546 Ok(ChatCompletionResponseWrapper::new(response))
547 }
548
549 pub async fn send_chat(
551 &self,
552 builder: ChatCompletionBuilder,
553 ) -> Result<ChatCompletionResponseWrapper> {
554 let request = builder.build()?;
555 self.execute_chat(request).await
556 }
557}
558
559impl<T: Default + Send + Sync> Client<T> {
561 pub fn responses(&self) -> ResponsesBuilder {
563 let model = self.config.default_model().unwrap_or("gpt-4");
564 ResponsesBuilder::new(model)
565 }
566
567 pub fn responses_simple(&self, message: impl Into<String>) -> ResponsesBuilder {
569 self.responses().user(message)
570 }
571
572 pub async fn execute_responses(
574 &self,
575 request: CreateChatCompletionRequest,
576 ) -> Result<ChatCompletionResponseWrapper> {
577 self.execute_chat(request).await
579 }
580
581 pub async fn send_responses(
583 &self,
584 builder: ResponsesBuilder,
585 ) -> Result<ChatCompletionResponseWrapper> {
586 let request = builder.build()?;
587 self.execute_responses(request).await
588 }
589}
590
591impl<T: Default + Send + Sync> Client<T> {
593 #[must_use]
595 pub fn assistants(&self) -> AssistantsClient<'_, T> {
596 AssistantsClient { client: self }
597 }
598
599 #[must_use]
601 pub fn audio(&self) -> AudioClient<'_, T> {
602 AudioClient { client: self }
603 }
604
605 #[must_use]
607 pub fn embeddings(&self) -> EmbeddingsClient<'_, T> {
608 EmbeddingsClient { client: self }
609 }
610
611 #[must_use]
613 pub fn images(&self) -> ImagesClient<'_, T> {
614 ImagesClient { client: self }
615 }
616
617 #[must_use]
619 pub fn files(&self) -> FilesClient<'_, T> {
620 FilesClient { client: self }
621 }
622
623 #[must_use]
625 pub fn fine_tuning(&self) -> FineTuningClient<'_, T> {
626 FineTuningClient { client: self }
627 }
628
629 #[must_use]
631 pub fn batch(&self) -> BatchClient<'_, T> {
632 BatchClient { client: self }
633 }
634
635 #[must_use]
637 pub fn vector_stores(&self) -> VectorStoresClient<'_, T> {
638 VectorStoresClient { client: self }
639 }
640
641 #[must_use]
643 pub fn moderations(&self) -> ModerationsClient<'_, T> {
644 ModerationsClient { client: self }
645 }
646
647 #[must_use]
649 pub fn threads(&self) -> ThreadsClient<'_, T> {
650 ThreadsClient { client: self }
651 }
652
653 #[must_use]
655 pub fn uploads(&self) -> UploadsClient<'_, T> {
656 UploadsClient { client: self }
657 }
658
659 #[must_use]
661 pub fn models(&self) -> ModelsClient<'_, T> {
662 ModelsClient { client: self }
663 }
664
665 #[must_use]
667 pub fn completions(&self) -> CompletionsClient<'_, T> {
668 CompletionsClient { client: self }
669 }
670
671 #[must_use]
673 pub fn usage(&self) -> UsageClient<'_, T> {
674 UsageClient { client: self }
675 }
676}
677
678impl<T: Default + Send + Sync> AudioClient<'_, T> {
679 #[must_use]
681 pub fn speech(
682 &self,
683 model: impl Into<String>,
684 input: impl Into<String>,
685 voice: impl Into<String>,
686 ) -> SpeechBuilder {
687 SpeechBuilder::new(model, input, voice)
688 }
689
690 pub async fn create_speech(&self, builder: SpeechBuilder) -> Result<Vec<u8>> {
692 let request = builder.build()?;
693 let mut state = T::default();
694 let operation = operation_names::AUDIO_SPEECH;
695 let model = request.model.clone();
696 let request_json = serde_json::to_string(&request).unwrap_or_default();
697
698 self.call_before_request(operation, &model, &request_json, &mut state)
700 .await?;
701
702 let start_time = Instant::now();
703
704 let response = match audio_api::create_speech()
706 .configuration(&self.client.base_configuration)
707 .create_speech_request(request)
708 .call()
709 .await
710 {
711 Ok(resp) => resp,
712 Err(e) => {
713 let error = self
714 .handle_api_error(e, operation, &model, &request_json, &state)
715 .await;
716 return Err(error);
717 }
718 };
719
720 let bytes = response.bytes().await.map_err(Error::Http)?;
721 let duration = start_time.elapsed();
722
723 let response_json = format!("{{\"size\": {}}}", bytes.len());
725 self.call_after_response(
726 &response_json,
727 operation,
728 &model,
729 &request_json,
730 &state,
731 duration,
732 None,
733 None,
734 )
735 .await;
736
737 Ok(bytes.to_vec())
738 }
739
740 #[must_use]
742 pub fn transcription(
743 &self,
744 file: impl AsRef<std::path::Path>,
745 model: impl Into<String>,
746 ) -> TranscriptionBuilder {
747 TranscriptionBuilder::new(file, model)
748 }
749
750 pub async fn create_transcription(
752 &self,
753 builder: TranscriptionBuilder,
754 ) -> Result<CreateTranscription200Response> {
755 let request = builder.build()?;
756 let model_str = request.model.clone();
757 let mut state = T::default();
758 let operation = operation_names::AUDIO_TRANSCRIPTION;
759 let request_json = format!(r#"{{"model":"{model_str}","file":"<audio_file>"}}"#);
761
762 self.call_before_request(operation, &model_str, &request_json, &mut state)
764 .await?;
765
766 let TranscriptionRequest {
767 file,
768 model,
769 language,
770 prompt,
771 response_format,
772 temperature,
773 stream,
774 chunking_strategy,
775 timestamp_granularities,
776 include,
777 } = request;
778
779 let timestamp_strings = timestamp_granularities.as_ref().map(|values| {
780 values
781 .iter()
782 .map(|granularity| granularity.as_str().to_string())
783 .collect::<Vec<_>>()
784 });
785
786 let start_time = Instant::now();
787
788 let response = match audio_api::create_transcription()
790 .configuration(&self.client.base_configuration)
791 .file(file)
792 .model(&model)
793 .maybe_language(language.as_deref())
794 .maybe_prompt(prompt.as_deref())
795 .maybe_response_format(response_format)
796 .maybe_temperature(temperature)
797 .maybe_stream(stream)
798 .maybe_chunking_strategy(chunking_strategy)
799 .maybe_timestamp_granularities(timestamp_strings)
800 .maybe_include(include)
801 .call()
802 .await
803 {
804 Ok(resp) => resp,
805 Err(e) => {
806 let error = self
807 .handle_api_error(e, operation, &model_str, &request_json, &state)
808 .await;
809 return Err(error);
810 }
811 };
812
813 let duration = start_time.elapsed();
814
815 self.call_after_response(
817 &response,
818 operation,
819 &model_str,
820 &request_json,
821 &state,
822 duration,
823 None,
824 None,
825 )
826 .await;
827
828 Ok(response)
829 }
830
831 #[must_use]
833 pub fn translation(
834 &self,
835 file: impl AsRef<std::path::Path>,
836 model: impl Into<String>,
837 ) -> TranslationBuilder {
838 TranslationBuilder::new(file, model)
839 }
840
841 pub async fn create_translation(
843 &self,
844 builder: TranslationBuilder,
845 ) -> Result<CreateTranslation200Response> {
846 let request = builder.build()?;
847 let model_str = request.model.clone();
848
849 let mut state = T::default();
851 let operation = operation_names::AUDIO_TRANSLATION;
852 let request_json = format!(r#"{{"model":"{model_str}","file":"<audio_file>"}}"#);
853
854 self.call_before_request(operation, &model_str, &request_json, &mut state)
856 .await?;
857
858 let TranslationRequest {
859 file,
860 model,
861 prompt,
862 response_format,
863 temperature,
864 } = request;
865
866 let response_format_owned = response_format.map(|format| format.to_string());
867
868 let start_time = Instant::now();
869
870 let response = match audio_api::create_translation()
872 .configuration(&self.client.base_configuration)
873 .file(file)
874 .model(&model)
875 .maybe_prompt(prompt.as_deref())
876 .maybe_response_format(response_format_owned.as_deref())
877 .maybe_temperature(temperature)
878 .call()
879 .await
880 {
881 Ok(resp) => resp,
882 Err(e) => {
883 let error = self
884 .handle_api_error(e, operation, &model_str, &request_json, &state)
885 .await;
886 return Err(error);
887 }
888 };
889
890 let duration = start_time.elapsed();
891
892 self.call_after_response(
894 &response,
895 operation,
896 &model_str,
897 &request_json,
898 &state,
899 duration,
900 None,
901 None,
902 )
903 .await;
904
905 Ok(response)
906 }
907}
908
909impl<T: Default + Send + Sync> EmbeddingsClient<'_, T> {
910 #[must_use]
912 pub fn builder(&self, model: impl Into<String>) -> EmbeddingsBuilder {
913 EmbeddingsBuilder::new(model)
914 }
915
916 #[must_use]
918 pub fn text(&self, model: impl Into<String>, input: impl Into<String>) -> EmbeddingsBuilder {
919 self.builder(model).input_text(input)
920 }
921
922 #[must_use]
924 pub fn tokens<I>(&self, model: impl Into<String>, tokens: I) -> EmbeddingsBuilder
925 where
926 I: IntoIterator<Item = i32>,
927 {
928 self.builder(model).input_tokens(tokens)
929 }
930
931 pub async fn create(&self, builder: EmbeddingsBuilder) -> Result<CreateEmbeddingResponse> {
933 let request = builder.build()?;
934
935 let mut state = T::default();
937 let operation = operation_names::EMBEDDINGS;
938 let model = request.model.clone();
939 let request_json = serde_json::to_string(&request).unwrap_or_default();
940
941 self.call_before_request(operation, &model, &request_json, &mut state)
943 .await?;
944
945 let start_time = Instant::now();
946
947 let response = match embeddings_api::create_embedding()
949 .configuration(&self.client.base_configuration)
950 .create_embedding_request(request)
951 .call()
952 .await
953 {
954 Ok(resp) => resp,
955 Err(e) => {
956 let error = self
957 .handle_api_error(e, operation, &model, &request_json, &state)
958 .await;
959 return Err(error);
960 }
961 };
962
963 let duration = start_time.elapsed();
964
965 self.call_after_response(
967 &response,
968 operation,
969 &model,
970 &request_json,
971 &state,
972 duration,
973 Some(i64::from(response.usage.prompt_tokens)),
974 Some(i64::from(response.usage.total_tokens)),
975 )
976 .await;
977
978 Ok(response)
979 }
980}
981
982impl<T: Default + Send + Sync> ImagesClient<'_, T> {
983 #[must_use]
985 pub fn generate(&self, prompt: impl Into<String>) -> ImageGenerationBuilder {
986 ImageGenerationBuilder::new(prompt)
987 }
988
989 pub async fn create(&self, builder: ImageGenerationBuilder) -> Result<ImagesResponse> {
991 let request = builder.build()?;
992
993 let mut state = T::default();
995 let operation = operation_names::IMAGE_GENERATION;
996 let model = request
997 .model
998 .as_ref()
999 .map_or_else(|| "dall-e-2".to_string(), ToString::to_string);
1000 let request_json = serde_json::to_string(&request).unwrap_or_default();
1001
1002 self.call_before_request(operation, &model, &request_json, &mut state)
1004 .await?;
1005
1006 let start_time = Instant::now();
1007
1008 let response = match images_api::create_image()
1010 .configuration(&self.client.base_configuration)
1011 .create_image_request(request)
1012 .call()
1013 .await
1014 {
1015 Ok(resp) => resp,
1016 Err(e) => {
1017 let error = self
1018 .handle_api_error(e, operation, &model, &request_json, &state)
1019 .await;
1020 return Err(error);
1021 }
1022 };
1023
1024 let duration = start_time.elapsed();
1025
1026 self.call_after_response(
1028 &response,
1029 operation,
1030 &model,
1031 &request_json,
1032 &state,
1033 duration,
1034 None,
1035 None,
1036 )
1037 .await;
1038
1039 Ok(response)
1040 }
1041
1042 #[must_use]
1044 pub fn edit(
1045 &self,
1046 image: impl AsRef<std::path::Path>,
1047 prompt: impl Into<String>,
1048 ) -> ImageEditBuilder {
1049 ImageEditBuilder::new(image, prompt)
1050 }
1051
1052 pub async fn create_edit(&self, builder: ImageEditBuilder) -> Result<ImagesResponse> {
1054 let request = builder.build()?;
1055 let model_str = request
1056 .model
1057 .as_ref()
1058 .map_or_else(|| "dall-e-2".to_string(), ToString::to_string);
1059
1060 let mut state = T::default();
1062 let operation = operation_names::IMAGE_EDIT;
1063 let request_json = format!(
1064 r#"{{"prompt":"{}","model":"{}"}}"#,
1065 request.prompt, model_str
1066 );
1067
1068 self.call_before_request(operation, &model_str, &request_json, &mut state)
1070 .await?;
1071
1072 let ImageEditRequest {
1073 image,
1074 prompt,
1075 mask,
1076 background,
1077 model,
1078 n,
1079 size,
1080 response_format,
1081 output_format,
1082 output_compression,
1083 user,
1084 input_fidelity,
1085 stream,
1086 partial_images,
1087 quality,
1088 } = request;
1089
1090 let start_time = Instant::now();
1091
1092 let response = match images_api::create_image_edit()
1094 .configuration(&self.client.base_configuration)
1095 .image(image)
1096 .prompt(&prompt)
1097 .maybe_mask(mask)
1098 .maybe_background(background.as_deref())
1099 .maybe_model(model.as_deref())
1100 .maybe_n(n)
1101 .maybe_size(size.as_deref())
1102 .maybe_response_format(response_format.as_deref())
1103 .maybe_output_format(output_format.as_deref())
1104 .maybe_output_compression(output_compression)
1105 .maybe_user(user.as_deref())
1106 .maybe_input_fidelity(input_fidelity)
1107 .maybe_stream(stream)
1108 .maybe_partial_images(partial_images)
1109 .maybe_quality(quality.as_deref())
1110 .call()
1111 .await
1112 {
1113 Ok(resp) => resp,
1114 Err(e) => {
1115 let error = self
1116 .handle_api_error(e, operation, &model_str, &request_json, &state)
1117 .await;
1118 return Err(error);
1119 }
1120 };
1121
1122 let duration = start_time.elapsed();
1123
1124 self.call_after_response(
1126 &response,
1127 operation,
1128 &model_str,
1129 &request_json,
1130 &state,
1131 duration,
1132 None,
1133 None,
1134 )
1135 .await;
1136
1137 Ok(response)
1138 }
1139
1140 #[must_use]
1142 pub fn variation(&self, image: impl AsRef<std::path::Path>) -> ImageVariationBuilder {
1143 ImageVariationBuilder::new(image)
1144 }
1145
1146 pub async fn create_variation(&self, builder: ImageVariationBuilder) -> Result<ImagesResponse> {
1148 let request = builder.build()?;
1149 let model_str = request
1150 .model
1151 .as_ref()
1152 .map_or_else(|| "dall-e-2".to_string(), ToString::to_string);
1153
1154 let mut state = T::default();
1156 let operation = operation_names::IMAGE_VARIATION;
1157 let request_json = format!(r#"{{"model":"{model_str}"}}"#);
1158
1159 self.call_before_request(operation, &model_str, &request_json, &mut state)
1161 .await?;
1162
1163 let ImageVariationRequest {
1164 image,
1165 model,
1166 n,
1167 response_format,
1168 size,
1169 user,
1170 } = request;
1171
1172 let start_time = Instant::now();
1173
1174 let response = match images_api::create_image_variation()
1176 .configuration(&self.client.base_configuration)
1177 .image(image)
1178 .maybe_model(model.as_deref())
1179 .maybe_n(n)
1180 .maybe_response_format(response_format.as_deref())
1181 .maybe_size(size.as_deref())
1182 .maybe_user(user.as_deref())
1183 .call()
1184 .await
1185 {
1186 Ok(resp) => resp,
1187 Err(e) => {
1188 let error = self
1189 .handle_api_error(e, operation, &model_str, &request_json, &state)
1190 .await;
1191 return Err(error);
1192 }
1193 };
1194
1195 let duration = start_time.elapsed();
1196
1197 self.call_after_response(
1199 &response,
1200 operation,
1201 &model_str,
1202 &request_json,
1203 &state,
1204 duration,
1205 None,
1206 None,
1207 )
1208 .await;
1209
1210 Ok(response)
1211 }
1212}
1213
1214impl<T: Default + Send + Sync> ThreadsClient<'_, T> {
1215 #[must_use]
1217 pub fn builder(&self) -> ThreadRequestBuilder {
1218 ThreadRequestBuilder::new()
1219 }
1220
1221 pub async fn create(&self, builder: ThreadRequestBuilder) -> Result<ThreadObject> {
1223 let request = builder.build()?;
1224
1225 let mut state = T::default();
1227 let operation = operation_names::THREAD_CREATE;
1228 let model = "thread"; let request_json = serde_json::to_string(&request).unwrap_or_default();
1230
1231 self.call_before_request(operation, model, &request_json, &mut state)
1233 .await?;
1234
1235 let start_time = Instant::now();
1236
1237 let response = match assistants_api::create_thread()
1239 .configuration(&self.client.base_configuration)
1240 .maybe_create_thread_request(Some(request))
1241 .call()
1242 .await
1243 {
1244 Ok(resp) => resp,
1245 Err(e) => {
1246 let error = self
1247 .handle_api_error(e, operation, model, &request_json, &state)
1248 .await;
1249 return Err(error);
1250 }
1251 };
1252
1253 let duration = start_time.elapsed();
1254
1255 self.call_after_response(
1257 &response,
1258 operation,
1259 model,
1260 &request_json,
1261 &state,
1262 duration,
1263 None,
1264 None,
1265 )
1266 .await;
1267
1268 Ok(response)
1269 }
1270}
1271
1272impl<T: Default + Send + Sync> UploadsClient<'_, T> {
1273 #[must_use]
1275 pub fn builder(
1276 &self,
1277 filename: impl Into<String>,
1278 purpose: UploadPurpose,
1279 bytes: i32,
1280 mime_type: impl Into<String>,
1281 ) -> UploadBuilder {
1282 UploadBuilder::new(filename, purpose, bytes, mime_type)
1283 }
1284
1285 pub async fn create(&self, builder: UploadBuilder) -> Result<Upload> {
1287 let request = builder.build()?;
1288
1289 let mut state = T::default();
1291 let operation = operation_names::UPLOAD_CREATE;
1292 let model = "upload"; let request_json = serde_json::to_string(&request).unwrap_or_default();
1294
1295 self.call_before_request(operation, model, &request_json, &mut state)
1297 .await?;
1298
1299 let start_time = Instant::now();
1300
1301 let response = match uploads_api::create_upload()
1303 .configuration(&self.client.base_configuration)
1304 .create_upload_request(request)
1305 .call()
1306 .await
1307 {
1308 Ok(resp) => resp,
1309 Err(e) => {
1310 let error = self
1311 .handle_api_error(e, operation, model, &request_json, &state)
1312 .await;
1313 return Err(error);
1314 }
1315 };
1316
1317 let duration = start_time.elapsed();
1318
1319 self.call_after_response(
1321 &response,
1322 operation,
1323 model,
1324 &request_json,
1325 &state,
1326 duration,
1327 None,
1328 None,
1329 )
1330 .await;
1331
1332 Ok(response)
1333 }
1334}
1335
1336impl<T: Default + Send + Sync> ModerationsClient<'_, T> {
1337 #[must_use]
1353 pub fn builder(&self, input: impl Into<String>) -> ModerationBuilder {
1354 ModerationBuilder::new(input)
1355 }
1356
1357 #[must_use]
1376 pub fn check(&self, input: impl Into<String>) -> ModerationBuilder {
1377 ModerationBuilder::new(input)
1378 }
1379
1380 pub async fn create(&self, builder: ModerationBuilder) -> Result<CreateModerationResponse> {
1411 let request = builder.build()?;
1412
1413 let mut state = T::default();
1415 let operation = operation_names::MODERATION;
1416 let model = request
1417 .model
1418 .as_ref()
1419 .map_or_else(|| "text-moderation-latest".to_string(), ToString::to_string);
1420 let request_json = serde_json::to_string(&request).unwrap_or_default();
1421
1422 self.call_before_request(operation, &model, &request_json, &mut state)
1424 .await?;
1425
1426 let start_time = Instant::now();
1427
1428 let response = match moderations_api::create_moderation()
1430 .configuration(&self.client.base_configuration)
1431 .create_moderation_request(request)
1432 .call()
1433 .await
1434 {
1435 Ok(resp) => resp,
1436 Err(e) => {
1437 let error = self
1438 .handle_api_error(e, operation, &model, &request_json, &state)
1439 .await;
1440 return Err(error);
1441 }
1442 };
1443
1444 let duration = start_time.elapsed();
1445
1446 self.call_after_response(
1448 &response,
1449 operation,
1450 &model,
1451 &request_json,
1452 &state,
1453 duration,
1454 None,
1455 None,
1456 )
1457 .await;
1458
1459 Ok(response)
1460 }
1461}
1462
1463impl<T: Default + Send + Sync> FilesClient<'_, T> {
1464 pub async fn upload(&self, builder: FileUploadBuilder) -> Result<OpenAiFile> {
1483 let temp_dir = std::env::temp_dir();
1485 let temp_file_path = temp_dir.join(builder.filename());
1486 std::fs::write(&temp_file_path, builder.content()).map_err(Error::File)?;
1487
1488 let purpose = match builder.purpose().to_string().as_str() {
1490 "fine-tune" => openai_client_base::models::FilePurpose::FineTune,
1491 "vision" => openai_client_base::models::FilePurpose::Vision,
1492 "batch" => openai_client_base::models::FilePurpose::Batch,
1493 _ => openai_client_base::models::FilePurpose::Assistants, };
1495
1496 let mut state = T::default();
1498 let operation = operation_names::FILE_UPLOAD;
1499 let model = "file-upload"; let request_json = format!(
1501 r#"{{"filename":"{}","purpose":"{}","size":{}}}"#,
1502 builder.filename(),
1503 builder.purpose(),
1504 builder.content().len()
1505 );
1506
1507 if let Err(e) = self
1509 .call_before_request(operation, model, &request_json, &mut state)
1510 .await
1511 {
1512 let _ = std::fs::remove_file(&temp_file_path);
1514 return Err(e);
1515 }
1516
1517 let start_time = Instant::now();
1518
1519 let result = match files_api::create_file()
1521 .configuration(&self.client.base_configuration)
1522 .file(temp_file_path.clone())
1523 .purpose(purpose)
1524 .call()
1525 .await
1526 {
1527 Ok(resp) => resp,
1528 Err(e) => {
1529 let _ = std::fs::remove_file(&temp_file_path);
1531 let error = self
1532 .handle_api_error(e, operation, model, &request_json, &state)
1533 .await;
1534 return Err(error);
1535 }
1536 };
1537
1538 let _ = std::fs::remove_file(temp_file_path);
1540
1541 let duration = start_time.elapsed();
1542
1543 self.call_after_response(
1545 &result,
1546 operation,
1547 model,
1548 &request_json,
1549 &state,
1550 duration,
1551 None,
1552 None,
1553 )
1554 .await;
1555
1556 Ok(result)
1557 }
1558
1559 pub async fn create(&self, builder: FileUploadBuilder) -> Result<OpenAiFile> {
1578 self.upload(builder).await
1579 }
1580
1581 #[must_use]
1583 pub fn upload_text(
1584 &self,
1585 filename: impl Into<String>,
1586 purpose: crate::builders::files::FilePurpose,
1587 text: impl Into<String>,
1588 ) -> FileUploadBuilder {
1589 FileUploadBuilder::from_text(filename, purpose, text)
1590 }
1591
1592 #[must_use]
1594 pub fn upload_bytes(
1595 &self,
1596 filename: impl Into<String>,
1597 purpose: crate::builders::files::FilePurpose,
1598 content: Vec<u8>,
1599 ) -> FileUploadBuilder {
1600 FileUploadBuilder::new(filename, purpose, content)
1601 }
1602
1603 pub fn upload_from_path(
1605 &self,
1606 path: impl AsRef<std::path::Path>,
1607 purpose: crate::builders::files::FilePurpose,
1608 ) -> Result<FileUploadBuilder> {
1609 FileUploadBuilder::from_path(path, purpose).map_err(Error::File)
1610 }
1611
1612 pub async fn list(&self, builder: FileListBuilder) -> Result<ListFilesResponse> {
1628 let purpose = builder.purpose_ref().map(ToString::to_string);
1629 let limit = builder.limit_ref();
1630 let order = builder.order_ref().map(ToString::to_string);
1631
1632 let mut state = T::default();
1634 let operation = operation_names::FILE_LIST;
1635 let model = "files";
1636 let request_json = format!(
1637 r#"{{"purpose":"{}","limit":{},"order":"{}"}}"#,
1638 purpose.as_deref().unwrap_or(""),
1639 limit.unwrap_or(10000),
1640 order.as_deref().unwrap_or("desc")
1641 );
1642
1643 self.call_before_request(operation, model, &request_json, &mut state)
1645 .await?;
1646
1647 let start_time = Instant::now();
1648
1649 let response = match files_api::list_files()
1651 .configuration(&self.client.base_configuration)
1652 .maybe_purpose(purpose.as_deref())
1653 .maybe_limit(limit)
1654 .maybe_order(order.as_deref())
1655 .call()
1656 .await
1657 {
1658 Ok(resp) => resp,
1659 Err(e) => {
1660 let error = self
1661 .handle_api_error(e, operation, model, &request_json, &state)
1662 .await;
1663 return Err(error);
1664 }
1665 };
1666
1667 let duration = start_time.elapsed();
1668
1669 self.call_after_response(
1671 &response,
1672 operation,
1673 model,
1674 &request_json,
1675 &state,
1676 duration,
1677 None,
1678 None,
1679 )
1680 .await;
1681
1682 Ok(response)
1683 }
1684
1685 #[must_use]
1687 pub fn list_builder(&self) -> FileListBuilder {
1688 FileListBuilder::new()
1689 }
1690
1691 pub async fn retrieve(&self, file_id: impl Into<String>) -> Result<OpenAiFile> {
1706 let file_id = file_id.into();
1707
1708 let mut state = T::default();
1710 let operation = operation_names::FILE_RETRIEVE;
1711 let model = "files";
1712 let request_json = format!(r#"{{"file_id":"{file_id}"}}"#);
1713
1714 self.call_before_request(operation, model, &request_json, &mut state)
1716 .await?;
1717
1718 let start_time = Instant::now();
1719
1720 let response = match files_api::retrieve_file()
1722 .configuration(&self.client.base_configuration)
1723 .file_id(&file_id)
1724 .call()
1725 .await
1726 {
1727 Ok(resp) => resp,
1728 Err(e) => {
1729 let error = self
1730 .handle_api_error(e, operation, model, &request_json, &state)
1731 .await;
1732 return Err(error);
1733 }
1734 };
1735
1736 let duration = start_time.elapsed();
1737
1738 self.call_after_response(
1740 &response,
1741 operation,
1742 model,
1743 &request_json,
1744 &state,
1745 duration,
1746 None,
1747 None,
1748 )
1749 .await;
1750
1751 Ok(response)
1752 }
1753
1754 pub async fn get(&self, builder: FileRetrievalBuilder) -> Result<OpenAiFile> {
1756 self.retrieve(builder.file_id()).await
1757 }
1758
1759 pub async fn download(&self, file_id: impl Into<String>) -> Result<String> {
1774 let file_id = file_id.into();
1775
1776 let mut state = T::default();
1778 let operation = operation_names::FILE_DOWNLOAD;
1779 let model = "files";
1780 let request_json = format!(r#"{{"file_id":"{file_id}"}}"#);
1781
1782 self.call_before_request(operation, model, &request_json, &mut state)
1784 .await?;
1785
1786 let start_time = Instant::now();
1787
1788 let response = match files_api::download_file()
1790 .configuration(&self.client.base_configuration)
1791 .file_id(&file_id)
1792 .call()
1793 .await
1794 {
1795 Ok(resp) => resp,
1796 Err(e) => {
1797 let error = self
1798 .handle_api_error(e, operation, model, &request_json, &state)
1799 .await;
1800 return Err(error);
1801 }
1802 };
1803
1804 let duration = start_time.elapsed();
1805
1806 let response_size = format!(r#"{{"size":{}}}"#, response.len());
1808 self.call_after_response(
1809 &response_size,
1810 operation,
1811 model,
1812 &request_json,
1813 &state,
1814 duration,
1815 None,
1816 None,
1817 )
1818 .await;
1819
1820 Ok(response)
1821 }
1822
1823 pub async fn download_bytes(&self, file_id: impl Into<String>) -> Result<Vec<u8>> {
1825 let content = self.download(file_id).await?;
1826 Ok(content.into_bytes())
1827 }
1828
1829 pub async fn delete(&self, file_id: impl Into<String>) -> Result<DeleteFileResponse> {
1844 let file_id = file_id.into();
1845
1846 let mut state = T::default();
1848 let operation = operation_names::FILE_DELETE;
1849 let model = "files";
1850 let request_json = format!(r#"{{"file_id":"{file_id}"}}"#);
1851
1852 self.call_before_request(operation, model, &request_json, &mut state)
1854 .await?;
1855
1856 let start_time = Instant::now();
1857
1858 let response = match files_api::delete_file()
1860 .configuration(&self.client.base_configuration)
1861 .file_id(&file_id)
1862 .call()
1863 .await
1864 {
1865 Ok(resp) => resp,
1866 Err(e) => {
1867 let error = self
1868 .handle_api_error(e, operation, model, &request_json, &state)
1869 .await;
1870 return Err(error);
1871 }
1872 };
1873
1874 let duration = start_time.elapsed();
1875
1876 self.call_after_response(
1878 &response,
1879 operation,
1880 model,
1881 &request_json,
1882 &state,
1883 duration,
1884 None,
1885 None,
1886 )
1887 .await;
1888
1889 Ok(response)
1890 }
1891
1892 pub async fn remove(&self, builder: FileDeleteBuilder) -> Result<DeleteFileResponse> {
1894 self.delete(builder.file_id()).await
1895 }
1896}
1897
1898impl<T: Default + Send + Sync> VectorStoresClient<'_, T> {
1899 pub async fn create(
1918 &self,
1919 builder: crate::builders::vector_stores::VectorStoreBuilder,
1920 ) -> Result<VectorStoreObject> {
1921 use openai_client_base::models::{CreateVectorStoreRequest, VectorStoreExpirationAfter};
1922
1923 let mut request = CreateVectorStoreRequest::new();
1924 request.name = builder.name_ref().map(String::from);
1925 request.file_ids = if builder.has_files() {
1926 Some(builder.file_ids_ref().to_vec())
1927 } else {
1928 None
1929 };
1930
1931 if let Some(expires_after) = builder.expires_after_ref() {
1932 use openai_client_base::models::vector_store_expiration_after::Anchor;
1933 request.expires_after = Some(Box::new(VectorStoreExpirationAfter::new(
1934 Anchor::LastActiveAt,
1935 expires_after.days,
1936 )));
1937 }
1938
1939 if !builder.metadata_ref().is_empty() {
1940 request.metadata = Some(Some(builder.metadata_ref().clone()));
1941 }
1942
1943 let mut state = T::default();
1945 let operation = operation_names::VECTOR_STORE_CREATE;
1946 let model = "vector-store";
1947 let request_json = serde_json::to_string(&request).unwrap_or_default();
1948
1949 self.call_before_request(operation, model, &request_json, &mut state)
1951 .await?;
1952
1953 let start_time = Instant::now();
1954
1955 let response = match vector_stores_api::create_vector_store()
1957 .configuration(&self.client.base_configuration)
1958 .create_vector_store_request(request)
1959 .call()
1960 .await
1961 {
1962 Ok(resp) => resp,
1963 Err(e) => {
1964 let error = self
1965 .handle_api_error(e, operation, model, &request_json, &state)
1966 .await;
1967 return Err(error);
1968 }
1969 };
1970
1971 let duration = start_time.elapsed();
1972
1973 self.call_after_response(
1975 &response,
1976 operation,
1977 model,
1978 &request_json,
1979 &state,
1980 duration,
1981 None,
1982 None,
1983 )
1984 .await;
1985
1986 Ok(response)
1987 }
1988
1989 pub async fn list(
2004 &self,
2005 limit: Option<i32>,
2006 order: Option<&str>,
2007 after: Option<&str>,
2008 before: Option<&str>,
2009 ) -> Result<ListVectorStoresResponse> {
2010 let mut state = T::default();
2012 let operation = operation_names::VECTOR_STORE_LIST;
2013 let model = "vector-store";
2014 let request_json = format!(
2015 r#"{{"limit":{},"order":"{}"}}"#,
2016 limit.unwrap_or(20),
2017 order.unwrap_or("desc")
2018 );
2019
2020 self.call_before_request(operation, model, &request_json, &mut state)
2022 .await?;
2023
2024 let start_time = Instant::now();
2025
2026 let response = match vector_stores_api::list_vector_stores()
2028 .configuration(&self.client.base_configuration)
2029 .maybe_limit(limit)
2030 .maybe_order(order)
2031 .maybe_after(after)
2032 .maybe_before(before)
2033 .call()
2034 .await
2035 {
2036 Ok(resp) => resp,
2037 Err(e) => {
2038 let error = self
2039 .handle_api_error(e, operation, model, &request_json, &state)
2040 .await;
2041 return Err(error);
2042 }
2043 };
2044
2045 let duration = start_time.elapsed();
2046
2047 self.call_after_response(
2049 &response,
2050 operation,
2051 model,
2052 &request_json,
2053 &state,
2054 duration,
2055 None,
2056 None,
2057 )
2058 .await;
2059
2060 Ok(response)
2061 }
2062
2063 pub async fn get(&self, vector_store_id: impl Into<String>) -> Result<VectorStoreObject> {
2078 let id = vector_store_id.into();
2079
2080 let mut state = T::default();
2082 let operation = operation_names::VECTOR_STORE_RETRIEVE;
2083 let model = "vector-store";
2084 let request_json = format!(r#"{{"vector_store_id":"{id}"}}"#);
2085
2086 self.call_before_request(operation, model, &request_json, &mut state)
2088 .await?;
2089
2090 let start_time = Instant::now();
2091
2092 let response = match vector_stores_api::get_vector_store()
2094 .configuration(&self.client.base_configuration)
2095 .vector_store_id(&id)
2096 .call()
2097 .await
2098 {
2099 Ok(resp) => resp,
2100 Err(e) => {
2101 let error = self
2102 .handle_api_error(e, operation, model, &request_json, &state)
2103 .await;
2104 return Err(error);
2105 }
2106 };
2107
2108 let duration = start_time.elapsed();
2109
2110 self.call_after_response(
2112 &response,
2113 operation,
2114 model,
2115 &request_json,
2116 &state,
2117 duration,
2118 None,
2119 None,
2120 )
2121 .await;
2122
2123 Ok(response)
2124 }
2125
2126 pub async fn update(
2145 &self,
2146 vector_store_id: impl Into<String>,
2147 builder: crate::builders::vector_stores::VectorStoreBuilder,
2148 ) -> Result<VectorStoreObject> {
2149 use openai_client_base::models::{UpdateVectorStoreRequest, VectorStoreExpirationAfter};
2150
2151 let id = vector_store_id.into();
2152 let mut request = UpdateVectorStoreRequest::new();
2153 request.name = builder.name_ref().map(String::from);
2154
2155 if let Some(expires_after) = builder.expires_after_ref() {
2156 use openai_client_base::models::vector_store_expiration_after::Anchor;
2157 request.expires_after = Some(Box::new(VectorStoreExpirationAfter::new(
2158 Anchor::LastActiveAt,
2159 expires_after.days,
2160 )));
2161 }
2162
2163 if !builder.metadata_ref().is_empty() {
2164 request.metadata = Some(Some(builder.metadata_ref().clone()));
2165 }
2166
2167 let mut state = T::default();
2169 let operation = operation_names::VECTOR_STORE_UPDATE;
2170 let model = "vector-store";
2171 let request_json = serde_json::to_string(&request).unwrap_or_default();
2172
2173 self.call_before_request(operation, model, &request_json, &mut state)
2175 .await?;
2176
2177 let start_time = Instant::now();
2178
2179 let response = match vector_stores_api::modify_vector_store()
2181 .configuration(&self.client.base_configuration)
2182 .vector_store_id(&id)
2183 .update_vector_store_request(request)
2184 .call()
2185 .await
2186 {
2187 Ok(resp) => resp,
2188 Err(e) => {
2189 let error = self
2190 .handle_api_error(e, operation, model, &request_json, &state)
2191 .await;
2192 return Err(error);
2193 }
2194 };
2195
2196 let duration = start_time.elapsed();
2197
2198 self.call_after_response(
2200 &response,
2201 operation,
2202 model,
2203 &request_json,
2204 &state,
2205 duration,
2206 None,
2207 None,
2208 )
2209 .await;
2210
2211 Ok(response)
2212 }
2213
2214 pub async fn delete(
2229 &self,
2230 vector_store_id: impl Into<String>,
2231 ) -> Result<DeleteVectorStoreResponse> {
2232 let id = vector_store_id.into();
2233
2234 let mut state = T::default();
2236 let operation = operation_names::VECTOR_STORE_DELETE;
2237 let model = "vector-store";
2238 let request_json = format!(r#"{{"vector_store_id":"{id}"}}"#);
2239
2240 self.call_before_request(operation, model, &request_json, &mut state)
2242 .await?;
2243
2244 let start_time = Instant::now();
2245
2246 let response = match vector_stores_api::delete_vector_store()
2248 .configuration(&self.client.base_configuration)
2249 .vector_store_id(&id)
2250 .call()
2251 .await
2252 {
2253 Ok(resp) => resp,
2254 Err(e) => {
2255 let error = self
2256 .handle_api_error(e, operation, model, &request_json, &state)
2257 .await;
2258 return Err(error);
2259 }
2260 };
2261
2262 let duration = start_time.elapsed();
2263
2264 self.call_after_response(
2266 &response,
2267 operation,
2268 model,
2269 &request_json,
2270 &state,
2271 duration,
2272 None,
2273 None,
2274 )
2275 .await;
2276
2277 Ok(response)
2278 }
2279
2280 pub async fn add_file(
2295 &self,
2296 vector_store_id: impl Into<String>,
2297 file_id: impl Into<String>,
2298 ) -> Result<VectorStoreFileObject> {
2299 use openai_client_base::models::CreateVectorStoreFileRequest;
2300
2301 let vs_id = vector_store_id.into();
2302 let f_id = file_id.into();
2303 let request = CreateVectorStoreFileRequest::new(f_id.clone());
2304
2305 let mut state = T::default();
2307 let operation = operation_names::VECTOR_STORE_FILE_ADD;
2308 let model = "vector-store";
2309 let request_json = format!(r#"{{"vector_store_id":"{vs_id}","file_id":"{f_id}"}}"#);
2310
2311 self.call_before_request(operation, model, &request_json, &mut state)
2313 .await?;
2314
2315 let start_time = Instant::now();
2316
2317 let response = match vector_stores_api::create_vector_store_file()
2319 .configuration(&self.client.base_configuration)
2320 .vector_store_id(&vs_id)
2321 .create_vector_store_file_request(request)
2322 .call()
2323 .await
2324 {
2325 Ok(resp) => resp,
2326 Err(e) => {
2327 let error = self
2328 .handle_api_error(e, operation, model, &request_json, &state)
2329 .await;
2330 return Err(error);
2331 }
2332 };
2333
2334 let duration = start_time.elapsed();
2335
2336 self.call_after_response(
2338 &response,
2339 operation,
2340 model,
2341 &request_json,
2342 &state,
2343 duration,
2344 None,
2345 None,
2346 )
2347 .await;
2348
2349 Ok(response)
2350 }
2351
2352 pub async fn list_files(
2367 &self,
2368 vector_store_id: impl Into<String>,
2369 limit: Option<i32>,
2370 order: Option<&str>,
2371 after: Option<&str>,
2372 before: Option<&str>,
2373 filter: Option<&str>,
2374 ) -> Result<ListVectorStoreFilesResponse> {
2375 let id = vector_store_id.into();
2376
2377 let mut state = T::default();
2379 let operation = operation_names::VECTOR_STORE_FILE_LIST;
2380 let model = "vector-store";
2381 let request_json = format!(r#"{{"vector_store_id":"{id}"}}"#);
2382
2383 self.call_before_request(operation, model, &request_json, &mut state)
2385 .await?;
2386
2387 let start_time = Instant::now();
2388
2389 let response = match vector_stores_api::list_vector_store_files()
2391 .configuration(&self.client.base_configuration)
2392 .vector_store_id(&id)
2393 .maybe_limit(limit)
2394 .maybe_order(order)
2395 .maybe_after(after)
2396 .maybe_before(before)
2397 .maybe_filter(filter)
2398 .call()
2399 .await
2400 {
2401 Ok(resp) => resp,
2402 Err(e) => {
2403 let error = self
2404 .handle_api_error(e, operation, model, &request_json, &state)
2405 .await;
2406 return Err(error);
2407 }
2408 };
2409
2410 let duration = start_time.elapsed();
2411
2412 self.call_after_response(
2414 &response,
2415 operation,
2416 model,
2417 &request_json,
2418 &state,
2419 duration,
2420 None,
2421 None,
2422 )
2423 .await;
2424
2425 Ok(response)
2426 }
2427
2428 pub async fn get_file(
2443 &self,
2444 vector_store_id: impl Into<String>,
2445 file_id: impl Into<String>,
2446 ) -> Result<VectorStoreFileObject> {
2447 let vs_id = vector_store_id.into();
2448 let f_id = file_id.into();
2449
2450 let mut state = T::default();
2452 let operation = operation_names::VECTOR_STORE_FILE_RETRIEVE;
2453 let model = "vector-store";
2454 let request_json = format!(r#"{{"vector_store_id":"{vs_id}","file_id":"{f_id}"}}"#);
2455
2456 self.call_before_request(operation, model, &request_json, &mut state)
2458 .await?;
2459
2460 let start_time = Instant::now();
2461
2462 let response = match vector_stores_api::get_vector_store_file()
2464 .configuration(&self.client.base_configuration)
2465 .vector_store_id(&vs_id)
2466 .file_id(&f_id)
2467 .call()
2468 .await
2469 {
2470 Ok(resp) => resp,
2471 Err(e) => {
2472 let error = self
2473 .handle_api_error(e, operation, model, &request_json, &state)
2474 .await;
2475 return Err(error);
2476 }
2477 };
2478
2479 let duration = start_time.elapsed();
2480
2481 self.call_after_response(
2483 &response,
2484 operation,
2485 model,
2486 &request_json,
2487 &state,
2488 duration,
2489 None,
2490 None,
2491 )
2492 .await;
2493
2494 Ok(response)
2495 }
2496
2497 pub async fn delete_file(
2512 &self,
2513 vector_store_id: impl Into<String>,
2514 file_id: impl Into<String>,
2515 ) -> Result<DeleteVectorStoreFileResponse> {
2516 let vs_id = vector_store_id.into();
2517 let f_id = file_id.into();
2518
2519 let mut state = T::default();
2521 let operation = operation_names::VECTOR_STORE_FILE_DELETE;
2522 let model = "vector-store";
2523 let request_json = format!(r#"{{"vector_store_id":"{vs_id}","file_id":"{f_id}"}}"#);
2524
2525 self.call_before_request(operation, model, &request_json, &mut state)
2527 .await?;
2528
2529 let start_time = Instant::now();
2530
2531 let response = match vector_stores_api::delete_vector_store_file()
2533 .configuration(&self.client.base_configuration)
2534 .vector_store_id(&vs_id)
2535 .file_id(&f_id)
2536 .call()
2537 .await
2538 {
2539 Ok(resp) => resp,
2540 Err(e) => {
2541 let error = self
2542 .handle_api_error(e, operation, model, &request_json, &state)
2543 .await;
2544 return Err(error);
2545 }
2546 };
2547
2548 let duration = start_time.elapsed();
2549
2550 self.call_after_response(
2552 &response,
2553 operation,
2554 model,
2555 &request_json,
2556 &state,
2557 duration,
2558 None,
2559 None,
2560 )
2561 .await;
2562
2563 Ok(response)
2564 }
2565
2566 pub async fn search(
2583 &self,
2584 builder: crate::builders::vector_stores::VectorStoreSearchBuilder,
2585 ) -> Result<VectorStoreSearchResultsPage> {
2586 use openai_client_base::models::{VectorStoreSearchRequest, VectorStoreSearchRequestQuery};
2587
2588 let query = VectorStoreSearchRequestQuery::new_text(builder.query().to_string());
2589 let mut request = VectorStoreSearchRequest::new(query);
2590
2591 if let Some(limit) = builder.limit_ref() {
2592 request.max_num_results = Some(limit);
2593 }
2594
2595 let vs_id = builder.vector_store_id().to_string();
2596
2597 let mut state = T::default();
2599 let operation = operation_names::VECTOR_STORE_SEARCH;
2600 let model = "vector-store";
2601 let request_json = format!(
2602 r#"{{"vector_store_id":"{}","query":"{}"}}"#,
2603 vs_id,
2604 builder.query()
2605 );
2606
2607 self.call_before_request(operation, model, &request_json, &mut state)
2609 .await?;
2610
2611 let start_time = Instant::now();
2612
2613 let response = match vector_stores_api::search_vector_store()
2615 .configuration(&self.client.base_configuration)
2616 .vector_store_id(&vs_id)
2617 .vector_store_search_request(request)
2618 .call()
2619 .await
2620 {
2621 Ok(resp) => resp,
2622 Err(e) => {
2623 let error = self
2624 .handle_api_error(e, operation, model, &request_json, &state)
2625 .await;
2626 return Err(error);
2627 }
2628 };
2629
2630 let duration = start_time.elapsed();
2631
2632 self.call_after_response(
2634 &response,
2635 operation,
2636 model,
2637 &request_json,
2638 &state,
2639 duration,
2640 None,
2641 None,
2642 )
2643 .await;
2644
2645 Ok(response)
2646 }
2647}
2648
2649impl<T: Default + Send + Sync> BatchClient<'_, T> {
2650 pub async fn create(&self, builder: crate::builders::batch::BatchJobBuilder) -> Result<Batch> {
2667 use openai_client_base::models::create_batch_request::{CompletionWindow, Endpoint};
2668
2669 let endpoint = match builder.endpoint() {
2671 crate::builders::batch::BatchEndpoint::ChatCompletions => {
2672 Endpoint::SlashV1SlashChatSlashCompletions
2673 }
2674 crate::builders::batch::BatchEndpoint::Embeddings => Endpoint::SlashV1SlashEmbeddings,
2675 crate::builders::batch::BatchEndpoint::Completions => Endpoint::SlashV1SlashCompletions,
2676 };
2677
2678 let mut request = CreateBatchRequest::new(
2679 builder.input_file_id().to_string(),
2680 endpoint,
2681 CompletionWindow::Variant24h,
2682 );
2683
2684 if builder.has_metadata() {
2685 request.metadata = Some(Some(builder.metadata_ref().clone()));
2686 }
2687
2688 let mut state = T::default();
2690 let operation = operation_names::BATCH_CREATE;
2691 let model = "batch";
2692 let request_json = serde_json::to_string(&request).unwrap_or_default();
2693
2694 self.call_before_request(operation, model, &request_json, &mut state)
2696 .await?;
2697
2698 let start_time = Instant::now();
2699
2700 let response = match batch_api::create_batch()
2702 .configuration(&self.client.base_configuration)
2703 .create_batch_request(request)
2704 .call()
2705 .await
2706 {
2707 Ok(resp) => resp,
2708 Err(e) => {
2709 let error = self
2710 .handle_api_error(e, operation, model, &request_json, &state)
2711 .await;
2712 return Err(error);
2713 }
2714 };
2715
2716 let duration = start_time.elapsed();
2717
2718 self.call_after_response(
2720 &response,
2721 operation,
2722 model,
2723 &request_json,
2724 &state,
2725 duration,
2726 None,
2727 None,
2728 )
2729 .await;
2730
2731 Ok(response)
2732 }
2733
2734 pub async fn list(
2749 &self,
2750 after: Option<&str>,
2751 limit: Option<i32>,
2752 ) -> Result<ListBatchesResponse> {
2753 let mut state = T::default();
2755 let operation = operation_names::BATCH_LIST;
2756 let model = "batch";
2757 let request_json = format!("{{\"after\":{after:?},\"limit\":{limit:?}}}");
2758
2759 self.call_before_request(operation, model, &request_json, &mut state)
2761 .await?;
2762
2763 let start_time = Instant::now();
2764
2765 let response = match batch_api::list_batches()
2767 .configuration(&self.client.base_configuration)
2768 .maybe_after(after)
2769 .maybe_limit(limit)
2770 .call()
2771 .await
2772 {
2773 Ok(resp) => resp,
2774 Err(e) => {
2775 let error = self
2776 .handle_api_error(e, operation, model, &request_json, &state)
2777 .await;
2778 return Err(error);
2779 }
2780 };
2781
2782 let duration = start_time.elapsed();
2783
2784 self.call_after_response(
2786 &response,
2787 operation,
2788 model,
2789 &request_json,
2790 &state,
2791 duration,
2792 None,
2793 None,
2794 )
2795 .await;
2796
2797 Ok(response)
2798 }
2799
2800 pub async fn get(&self, batch_id: impl Into<String>) -> Result<Batch> {
2815 let id = batch_id.into();
2816
2817 let mut state = T::default();
2819 let operation = operation_names::BATCH_RETRIEVE;
2820 let model = "batch";
2821 let request_json = format!("{{\"batch_id\":\"{id}\"}}");
2822
2823 self.call_before_request(operation, model, &request_json, &mut state)
2825 .await?;
2826
2827 let start_time = Instant::now();
2828
2829 let response = match batch_api::retrieve_batch()
2831 .configuration(&self.client.base_configuration)
2832 .batch_id(&id)
2833 .call()
2834 .await
2835 {
2836 Ok(resp) => resp,
2837 Err(e) => {
2838 let error = self
2839 .handle_api_error(e, operation, model, &request_json, &state)
2840 .await;
2841 return Err(error);
2842 }
2843 };
2844
2845 let duration = start_time.elapsed();
2846
2847 self.call_after_response(
2849 &response,
2850 operation,
2851 model,
2852 &request_json,
2853 &state,
2854 duration,
2855 None,
2856 None,
2857 )
2858 .await;
2859
2860 Ok(response)
2861 }
2862
2863 pub async fn cancel(&self, batch_id: impl Into<String>) -> Result<Batch> {
2878 let id = batch_id.into();
2879
2880 let mut state = T::default();
2882 let operation = operation_names::BATCH_CANCEL;
2883 let model = "batch";
2884 let request_json = format!("{{\"batch_id\":\"{id}\"}}");
2885
2886 self.call_before_request(operation, model, &request_json, &mut state)
2888 .await?;
2889
2890 let start_time = Instant::now();
2891
2892 let response = match batch_api::cancel_batch()
2894 .configuration(&self.client.base_configuration)
2895 .batch_id(&id)
2896 .call()
2897 .await
2898 {
2899 Ok(resp) => resp,
2900 Err(e) => {
2901 let error = self
2902 .handle_api_error(e, operation, model, &request_json, &state)
2903 .await;
2904 return Err(error);
2905 }
2906 };
2907
2908 let duration = start_time.elapsed();
2909
2910 self.call_after_response(
2912 &response,
2913 operation,
2914 model,
2915 &request_json,
2916 &state,
2917 duration,
2918 None,
2919 None,
2920 )
2921 .await;
2922
2923 Ok(response)
2924 }
2925}
2926
2927impl<T: Default + Send + Sync> FineTuningClient<'_, T> {
2928 pub async fn create_job(
2945 &self,
2946 builder: crate::builders::fine_tuning::FineTuningJobBuilder,
2947 ) -> Result<FineTuningJob> {
2948 let mut request = CreateFineTuningJobRequest::new(
2949 builder.model().to_string(),
2950 builder.training_file().to_string(),
2951 );
2952
2953 if let Some(validation_file) = builder.validation_file_ref() {
2954 request.validation_file = Some(validation_file.to_string());
2955 }
2956
2957 if let Some(suffix) = builder.suffix_ref() {
2958 request.suffix = Some(suffix.to_string());
2959 }
2960
2961 let mut state = T::default();
2968 let operation = operation_names::FINE_TUNING_CREATE;
2969 let model = builder.model();
2970 let request_json = serde_json::to_string(&request).unwrap_or_default();
2971
2972 self.call_before_request(operation, model, &request_json, &mut state)
2974 .await?;
2975
2976 let start_time = Instant::now();
2977
2978 let response = match fine_tuning_api::create_fine_tuning_job()
2980 .configuration(&self.client.base_configuration)
2981 .create_fine_tuning_job_request(request)
2982 .call()
2983 .await
2984 {
2985 Ok(resp) => resp,
2986 Err(e) => {
2987 let error = self
2988 .handle_api_error(e, operation, model, &request_json, &state)
2989 .await;
2990 return Err(error);
2991 }
2992 };
2993
2994 let duration = start_time.elapsed();
2995
2996 self.call_after_response(
2998 &response,
2999 operation,
3000 model,
3001 &request_json,
3002 &state,
3003 duration,
3004 None,
3005 None,
3006 )
3007 .await;
3008
3009 Ok(response)
3010 }
3011
3012 pub async fn list_jobs(
3027 &self,
3028 after: Option<&str>,
3029 limit: Option<i32>,
3030 ) -> Result<ListPaginatedFineTuningJobsResponse> {
3031 let mut state = T::default();
3033 let operation = operation_names::FINE_TUNING_LIST;
3034 let model = "fine-tuning";
3035 let request_json = format!("{{\"after\":{after:?},\"limit\":{limit:?}}}");
3036
3037 self.call_before_request(operation, model, &request_json, &mut state)
3039 .await?;
3040
3041 let start_time = Instant::now();
3042
3043 let response = match fine_tuning_api::list_paginated_fine_tuning_jobs()
3045 .configuration(&self.client.base_configuration)
3046 .maybe_after(after)
3047 .maybe_limit(limit)
3048 .call()
3049 .await
3050 {
3051 Ok(resp) => resp,
3052 Err(e) => {
3053 let error = self
3054 .handle_api_error(e, operation, model, &request_json, &state)
3055 .await;
3056 return Err(error);
3057 }
3058 };
3059
3060 let duration = start_time.elapsed();
3061
3062 self.call_after_response(
3064 &response,
3065 operation,
3066 model,
3067 &request_json,
3068 &state,
3069 duration,
3070 None,
3071 None,
3072 )
3073 .await;
3074
3075 Ok(response)
3076 }
3077
3078 pub async fn get_job(&self, job_id: impl Into<String>) -> Result<FineTuningJob> {
3093 let id = job_id.into();
3094
3095 let mut state = T::default();
3097 let operation = operation_names::FINE_TUNING_RETRIEVE;
3098 let model = "fine-tuning";
3099 let request_json = format!("{{\"job_id\":\"{id}\"}}");
3100
3101 self.call_before_request(operation, model, &request_json, &mut state)
3103 .await?;
3104
3105 let start_time = Instant::now();
3106
3107 let response = match fine_tuning_api::retrieve_fine_tuning_job()
3109 .configuration(&self.client.base_configuration)
3110 .fine_tuning_job_id(&id)
3111 .call()
3112 .await
3113 {
3114 Ok(resp) => resp,
3115 Err(e) => {
3116 let error = self
3117 .handle_api_error(e, operation, model, &request_json, &state)
3118 .await;
3119 return Err(error);
3120 }
3121 };
3122
3123 let duration = start_time.elapsed();
3124
3125 self.call_after_response(
3127 &response,
3128 operation,
3129 model,
3130 &request_json,
3131 &state,
3132 duration,
3133 None,
3134 None,
3135 )
3136 .await;
3137
3138 Ok(response)
3139 }
3140
3141 pub async fn cancel_job(&self, job_id: impl Into<String>) -> Result<FineTuningJob> {
3156 let id = job_id.into();
3157
3158 let mut state = T::default();
3160 let operation = operation_names::FINE_TUNING_CANCEL;
3161 let model = "fine-tuning";
3162 let request_json = format!("{{\"job_id\":\"{id}\"}}");
3163
3164 self.call_before_request(operation, model, &request_json, &mut state)
3166 .await?;
3167
3168 let start_time = Instant::now();
3169
3170 let response = match fine_tuning_api::cancel_fine_tuning_job()
3172 .configuration(&self.client.base_configuration)
3173 .fine_tuning_job_id(&id)
3174 .call()
3175 .await
3176 {
3177 Ok(resp) => resp,
3178 Err(e) => {
3179 let error = self
3180 .handle_api_error(e, operation, model, &request_json, &state)
3181 .await;
3182 return Err(error);
3183 }
3184 };
3185
3186 let duration = start_time.elapsed();
3187
3188 self.call_after_response(
3190 &response,
3191 operation,
3192 model,
3193 &request_json,
3194 &state,
3195 duration,
3196 None,
3197 None,
3198 )
3199 .await;
3200
3201 Ok(response)
3202 }
3203
3204 pub async fn list_events(
3219 &self,
3220 job_id: impl Into<String>,
3221 after: Option<&str>,
3222 limit: Option<i32>,
3223 ) -> Result<ListFineTuningJobEventsResponse> {
3224 let id = job_id.into();
3225
3226 let mut state = T::default();
3228 let operation = operation_names::FINE_TUNING_LIST_EVENTS;
3229 let model = "fine-tuning";
3230 let request_json =
3231 format!("{{\"job_id\":\"{id}\",\"after\":{after:?},\"limit\":{limit:?}}}");
3232
3233 self.call_before_request(operation, model, &request_json, &mut state)
3235 .await?;
3236
3237 let start_time = Instant::now();
3238
3239 let response = match fine_tuning_api::list_fine_tuning_events()
3241 .configuration(&self.client.base_configuration)
3242 .fine_tuning_job_id(&id)
3243 .maybe_after(after)
3244 .maybe_limit(limit)
3245 .call()
3246 .await
3247 {
3248 Ok(resp) => resp,
3249 Err(e) => {
3250 let error = self
3251 .handle_api_error(e, operation, model, &request_json, &state)
3252 .await;
3253 return Err(error);
3254 }
3255 };
3256
3257 let duration = start_time.elapsed();
3258
3259 self.call_after_response(
3261 &response,
3262 operation,
3263 model,
3264 &request_json,
3265 &state,
3266 duration,
3267 None,
3268 None,
3269 )
3270 .await;
3271
3272 Ok(response)
3273 }
3274
3275 pub async fn list_checkpoints(
3290 &self,
3291 job_id: impl Into<String>,
3292 after: Option<&str>,
3293 limit: Option<i32>,
3294 ) -> Result<ListFineTuningJobCheckpointsResponse> {
3295 let id = job_id.into();
3296
3297 let mut state = T::default();
3299 let operation = operation_names::FINE_TUNING_LIST_CHECKPOINTS;
3300 let model = "fine-tuning";
3301 let request_json =
3302 format!("{{\"job_id\":\"{id}\",\"after\":{after:?},\"limit\":{limit:?}}}");
3303
3304 self.call_before_request(operation, model, &request_json, &mut state)
3306 .await?;
3307
3308 let start_time = Instant::now();
3309
3310 let response = match fine_tuning_api::list_fine_tuning_job_checkpoints()
3312 .configuration(&self.client.base_configuration)
3313 .fine_tuning_job_id(&id)
3314 .maybe_after(after)
3315 .maybe_limit(limit)
3316 .call()
3317 .await
3318 {
3319 Ok(resp) => resp,
3320 Err(e) => {
3321 let error = self
3322 .handle_api_error(e, operation, model, &request_json, &state)
3323 .await;
3324 return Err(error);
3325 }
3326 };
3327
3328 let duration = start_time.elapsed();
3329
3330 self.call_after_response(
3332 &response,
3333 operation,
3334 model,
3335 &request_json,
3336 &state,
3337 duration,
3338 None,
3339 None,
3340 )
3341 .await;
3342
3343 Ok(response)
3344 }
3345}
3346
3347fn map_api_error<T>(error: ApiError<T>) -> Error {
3348 match error {
3349 ApiError::Reqwest(err) => Error::Http(err),
3350 ApiError::ReqwestMiddleware(err) => {
3351 Error::Internal(format!("reqwest middleware error: {err}"))
3352 }
3353 ApiError::Serde(err) => Error::Json(err),
3354 ApiError::Io(err) => Error::File(err),
3355 ApiError::ResponseError(response) => Error::Api {
3356 status: response.status.as_u16(),
3357 message: response.content,
3358 error_type: None,
3359 error_code: None,
3360 },
3361 }
3362}
3363
3364#[cfg(test)]
3365mod tests {
3366 use super::*;
3367 use openai_client_base::apis::{Error as BaseError, ResponseContent};
3368
3369 #[test]
3370 fn map_api_error_converts_response() {
3371 let response = ResponseContent {
3372 status: reqwest::StatusCode::BAD_REQUEST,
3373 content: "bad request".to_string(),
3374 entity: Option::<()>::None,
3375 };
3376
3377 let error = map_api_error(BaseError::ResponseError(response));
3378 match error {
3379 Error::Api {
3380 status, message, ..
3381 } => {
3382 assert_eq!(status, 400);
3383 assert!(message.contains("bad request"));
3384 }
3385 other => panic!("expected API error, got {other:?}"),
3386 }
3387 }
3388
3389 #[test]
3390 fn test_moderation_builder_creation() {
3391 use crate::builders::moderations::ModerationBuilder;
3392
3393 let builder = ModerationBuilder::new("Test content");
3394 let request = builder.build().unwrap();
3395
3396 assert_eq!(request.input, "Test content");
3397 assert!(request.model.is_none());
3398 }
3399
3400 #[test]
3401 fn test_moderation_builder_with_model() {
3402 use crate::builders::moderations::ModerationBuilder;
3403
3404 let builder = ModerationBuilder::new("Test content").model("text-moderation-stable");
3405 let request = builder.build().unwrap();
3406
3407 assert_eq!(request.input, "Test content");
3408 assert_eq!(request.model, Some("text-moderation-stable".to_string()));
3409 }
3410
3411 #[test]
3412 fn test_moderation_builder_array_input() {
3413 use crate::builders::moderations::ModerationBuilder;
3414
3415 let inputs = vec!["First text".to_string(), "Second text".to_string()];
3416 let builder = ModerationBuilder::new_array(inputs);
3417 let request = builder.build().unwrap();
3418
3419 assert_eq!(request.input, "First text\nSecond text");
3421 }
3422
3423 #[test]
3424 fn test_file_upload_builder_creation() {
3425 use crate::builders::files::{FilePurpose, FileUploadBuilder};
3426
3427 let content = b"test content".to_vec();
3428 let builder = FileUploadBuilder::new("test.txt", FilePurpose::Assistants, content.clone());
3429
3430 assert_eq!(builder.filename(), "test.txt");
3431 assert_eq!(builder.content(), content.as_slice());
3432 assert_eq!(builder.content_size(), content.len());
3433 assert!(!builder.is_empty());
3434 }
3435
3436 #[test]
3437 fn test_file_upload_builder_from_text() {
3438 use crate::builders::files::{FilePurpose, FileUploadBuilder};
3439
3440 let builder =
3441 FileUploadBuilder::from_text("hello.txt", FilePurpose::FineTune, "Hello, world!");
3442
3443 assert_eq!(builder.filename(), "hello.txt");
3444 assert_eq!(
3445 builder.content_as_string(),
3446 Some("Hello, world!".to_string())
3447 );
3448 assert!(!builder.is_empty());
3449 }
3450
3451 #[test]
3452 fn test_file_list_builder() {
3453 use crate::builders::files::{FileListBuilder, FileOrder, FilePurpose};
3454
3455 let builder = FileListBuilder::new()
3456 .purpose(FilePurpose::Assistants)
3457 .limit(10)
3458 .order(FileOrder::Desc);
3459
3460 assert!(builder.purpose_ref().is_some());
3461 assert_eq!(builder.limit_ref(), Some(10));
3462 assert!(builder.order_ref().is_some());
3463 }
3464
3465 #[test]
3466 fn test_file_retrieval_builder() {
3467 use crate::builders::files::FileRetrievalBuilder;
3468
3469 let builder = FileRetrievalBuilder::new("file-123");
3470 assert_eq!(builder.file_id(), "file-123");
3471 }
3472
3473 #[test]
3474 fn test_file_delete_builder() {
3475 use crate::builders::files::FileDeleteBuilder;
3476
3477 let builder = FileDeleteBuilder::new("file-456");
3478 assert_eq!(builder.file_id(), "file-456");
3479 }
3480
3481 #[test]
3482 fn test_file_purpose_display() {
3483 use crate::builders::files::FilePurpose;
3484
3485 assert_eq!(FilePurpose::FineTune.to_string(), "fine-tune");
3486 assert_eq!(FilePurpose::Assistants.to_string(), "assistants");
3487 assert_eq!(FilePurpose::Vision.to_string(), "vision");
3488 assert_eq!(FilePurpose::Batch.to_string(), "batch");
3489 }
3490
3491 #[test]
3492 fn test_vector_store_builder_basic() {
3493 use crate::builders::vector_stores::VectorStoreBuilder;
3494
3495 let builder = VectorStoreBuilder::new()
3496 .name("Test Store")
3497 .add_file("file-1")
3498 .metadata("key", "value");
3499
3500 assert_eq!(builder.name_ref(), Some("Test Store"));
3501 assert_eq!(builder.file_count(), 1);
3502 assert!(builder.has_files());
3503 assert_eq!(builder.metadata_ref().len(), 1);
3504 }
3505
3506 #[test]
3507 fn test_vector_store_builder_with_expiration() {
3508 use crate::builders::vector_stores::VectorStoreBuilder;
3509
3510 let builder = VectorStoreBuilder::new()
3511 .name("Temp Store")
3512 .expires_after_days(30);
3513
3514 assert_eq!(builder.name_ref(), Some("Temp Store"));
3515 assert!(builder.expires_after_ref().is_some());
3516 assert_eq!(builder.expires_after_ref().unwrap().days, 30);
3517 }
3518
3519 #[test]
3520 fn test_vector_store_builder_multiple_files() {
3521 use crate::builders::vector_stores::VectorStoreBuilder;
3522
3523 let files = vec!["file-1".to_string(), "file-2".to_string()];
3524 let builder = VectorStoreBuilder::new()
3525 .name("Multi-File Store")
3526 .file_ids(files.clone());
3527
3528 assert_eq!(builder.file_ids_ref(), files.as_slice());
3529 assert_eq!(builder.file_count(), 2);
3530 }
3531
3532 #[test]
3533 fn test_vector_store_file_builder() {
3534 use crate::builders::vector_stores::VectorStoreFileBuilder;
3535
3536 let builder = VectorStoreFileBuilder::new("vs-123", "file-456");
3537 assert_eq!(builder.vector_store_id(), "vs-123");
3538 assert_eq!(builder.file_id(), "file-456");
3539 }
3540
3541 #[test]
3542 fn test_vector_store_search_builder() {
3543 use crate::builders::vector_stores::VectorStoreSearchBuilder;
3544
3545 let builder = VectorStoreSearchBuilder::new("vs-123", "test query")
3546 .limit(10)
3547 .filter("category", "docs");
3548
3549 assert_eq!(builder.vector_store_id(), "vs-123");
3550 assert_eq!(builder.query(), "test query");
3551 assert_eq!(builder.limit_ref(), Some(10));
3552 assert_eq!(builder.filter_ref().len(), 1);
3553 }
3554
3555 #[test]
3556 fn test_vector_store_search_builder_default() {
3557 use crate::builders::vector_stores::VectorStoreSearchBuilder;
3558
3559 let builder = VectorStoreSearchBuilder::new("vs-123", "query");
3560 assert!(builder.limit_ref().is_none());
3561 assert!(builder.filter_ref().is_empty());
3562 }
3563}
3564
3565#[derive(Debug, Clone, Copy)]
3570pub struct AssistantsClient<'a, T = ()> {
3571 client: &'a Client<T>,
3572}
3573
3574impl<T: Default + Send + Sync> AssistantsClient<'_, T> {
3575 pub async fn create(&self, builder: AssistantBuilder) -> Result<AssistantObject> {
3594 let request = builder.build()?;
3595
3596 let mut state = T::default();
3598 let operation = operation_names::ASSISTANT_CREATE;
3599 let model = request.model.clone();
3600 let request_json = serde_json::to_string(&request).unwrap_or_default();
3601
3602 self.call_before_request(operation, &model, &request_json, &mut state)
3604 .await?;
3605
3606 let start_time = Instant::now();
3607
3608 let response = match assistants_api::create_assistant()
3610 .configuration(&self.client.base_configuration)
3611 .create_assistant_request(request)
3612 .call()
3613 .await
3614 {
3615 Ok(resp) => resp,
3616 Err(e) => {
3617 let error = self
3618 .handle_api_error(e, operation, &model, &request_json, &state)
3619 .await;
3620 return Err(error);
3621 }
3622 };
3623
3624 let duration = start_time.elapsed();
3625
3626 self.call_after_response(
3628 &response,
3629 operation,
3630 &model,
3631 &request_json,
3632 &state,
3633 duration,
3634 None,
3635 None,
3636 )
3637 .await;
3638
3639 Ok(response)
3640 }
3641
3642 pub async fn list(
3657 &self,
3658 limit: Option<i32>,
3659 order: Option<&str>,
3660 after: Option<&str>,
3661 before: Option<&str>,
3662 ) -> Result<ListAssistantsResponse> {
3663 let mut state = T::default();
3665 let operation = operation_names::ASSISTANT_LIST;
3666 let model = "assistants";
3667 let request_json = format!(
3668 "{{\"limit\":{limit:?},\"order\":{order:?},\"after\":{after:?},\"before\":{before:?}}}"
3669 );
3670
3671 self.call_before_request(operation, model, &request_json, &mut state)
3673 .await?;
3674
3675 let start_time = Instant::now();
3676
3677 let response = match assistants_api::list_assistants()
3679 .configuration(&self.client.base_configuration)
3680 .maybe_limit(limit)
3681 .maybe_order(order)
3682 .maybe_after(after)
3683 .maybe_before(before)
3684 .call()
3685 .await
3686 {
3687 Ok(resp) => resp,
3688 Err(e) => {
3689 let error = self
3690 .handle_api_error(e, operation, model, &request_json, &state)
3691 .await;
3692 return Err(error);
3693 }
3694 };
3695
3696 let duration = start_time.elapsed();
3697
3698 self.call_after_response(
3700 &response,
3701 operation,
3702 model,
3703 &request_json,
3704 &state,
3705 duration,
3706 None,
3707 None,
3708 )
3709 .await;
3710
3711 Ok(response)
3712 }
3713
3714 pub async fn get(&self, assistant_id: impl Into<String>) -> Result<AssistantObject> {
3729 let id = assistant_id.into();
3730
3731 let mut state = T::default();
3733 let operation = operation_names::ASSISTANT_RETRIEVE;
3734 let model = "assistants";
3735 let request_json = format!("{{\"assistant_id\":\"{id}\"}}");
3736
3737 self.call_before_request(operation, model, &request_json, &mut state)
3739 .await?;
3740
3741 let start_time = Instant::now();
3742
3743 let response = match assistants_api::get_assistant()
3745 .configuration(&self.client.base_configuration)
3746 .assistant_id(&id)
3747 .call()
3748 .await
3749 {
3750 Ok(resp) => resp,
3751 Err(e) => {
3752 let error = self
3753 .handle_api_error(e, operation, model, &request_json, &state)
3754 .await;
3755 return Err(error);
3756 }
3757 };
3758
3759 let duration = start_time.elapsed();
3760
3761 self.call_after_response(
3763 &response,
3764 operation,
3765 model,
3766 &request_json,
3767 &state,
3768 duration,
3769 None,
3770 None,
3771 )
3772 .await;
3773
3774 Ok(response)
3775 }
3776
3777 pub async fn update(
3796 &self,
3797 assistant_id: impl Into<String>,
3798 builder: AssistantBuilder,
3799 ) -> Result<AssistantObject> {
3800 use openai_client_base::models::ModifyAssistantRequest;
3801
3802 let id = assistant_id.into();
3803 let request_data = builder.build()?;
3804
3805 let mut request = ModifyAssistantRequest::new();
3807 request.model = Some(request_data.model);
3808 request.name = request_data.name.and_then(|n| match *n {
3810 openai_client_base::models::CreateAssistantRequestName::Text(text) => Some(Some(text)),
3811 openai_client_base::models::CreateAssistantRequestName::Null => None,
3812 });
3813 request.description = request_data.description.and_then(|d| match *d {
3814 openai_client_base::models::CreateAssistantRequestDescription::Text(text) => {
3815 Some(Some(text))
3816 }
3817 openai_client_base::models::CreateAssistantRequestDescription::Null => None,
3818 });
3819 request.instructions = request_data.instructions.and_then(|i| match *i {
3820 openai_client_base::models::CreateAssistantRequestInstructions::Text(text) => {
3821 Some(Some(text))
3822 }
3823 openai_client_base::models::CreateAssistantRequestInstructions::Null => None,
3824 });
3825 request.tools = request_data.tools;
3826 request.metadata = request_data.metadata;
3827
3828 let mut state = T::default();
3830 let operation = operation_names::ASSISTANT_UPDATE;
3831 let model = request
3832 .model
3833 .as_ref()
3834 .map_or_else(|| "assistants".to_string(), Clone::clone);
3835 let request_json = serde_json::to_string(&request).unwrap_or_default();
3836
3837 self.call_before_request(operation, &model, &request_json, &mut state)
3839 .await?;
3840
3841 let start_time = Instant::now();
3842
3843 let response = match assistants_api::modify_assistant()
3845 .configuration(&self.client.base_configuration)
3846 .assistant_id(&id)
3847 .modify_assistant_request(request)
3848 .call()
3849 .await
3850 {
3851 Ok(resp) => resp,
3852 Err(e) => {
3853 let error = self
3854 .handle_api_error(e, operation, &model, &request_json, &state)
3855 .await;
3856 return Err(error);
3857 }
3858 };
3859
3860 let duration = start_time.elapsed();
3861
3862 self.call_after_response(
3864 &response,
3865 operation,
3866 &model,
3867 &request_json,
3868 &state,
3869 duration,
3870 None,
3871 None,
3872 )
3873 .await;
3874
3875 Ok(response)
3876 }
3877
3878 pub async fn delete(&self, assistant_id: impl Into<String>) -> Result<DeleteAssistantResponse> {
3893 let id = assistant_id.into();
3894
3895 let mut state = T::default();
3897 let operation = operation_names::ASSISTANT_DELETE;
3898 let model = "assistants";
3899 let request_json = format!("{{\"assistant_id\":\"{id}\"}}");
3900
3901 self.call_before_request(operation, model, &request_json, &mut state)
3903 .await?;
3904
3905 let start_time = Instant::now();
3906
3907 let response = match assistants_api::delete_assistant()
3909 .configuration(&self.client.base_configuration)
3910 .assistant_id(&id)
3911 .call()
3912 .await
3913 {
3914 Ok(resp) => resp,
3915 Err(e) => {
3916 let error = self
3917 .handle_api_error(e, operation, model, &request_json, &state)
3918 .await;
3919 return Err(error);
3920 }
3921 };
3922
3923 let duration = start_time.elapsed();
3924
3925 self.call_after_response(
3927 &response,
3928 operation,
3929 model,
3930 &request_json,
3931 &state,
3932 duration,
3933 None,
3934 None,
3935 )
3936 .await;
3937
3938 Ok(response)
3939 }
3940
3941 pub async fn create_run(
3958 &self,
3959 thread_id: impl Into<String>,
3960 builder: RunBuilder,
3961 ) -> Result<RunObject> {
3962 let thread_id = thread_id.into();
3963 let request = builder.build()?;
3964
3965 let mut state = T::default();
3967 let operation = operation_names::RUN_CREATE;
3968 let model = request
3969 .model
3970 .as_ref()
3971 .map_or_else(|| "runs".to_string(), Clone::clone);
3972 let request_json = serde_json::to_string(&request).unwrap_or_default();
3973
3974 self.call_before_request(operation, &model, &request_json, &mut state)
3976 .await?;
3977
3978 let start_time = Instant::now();
3979
3980 let response = match assistants_api::create_run()
3982 .configuration(&self.client.base_configuration)
3983 .thread_id(&thread_id)
3984 .create_run_request(request)
3985 .call()
3986 .await
3987 {
3988 Ok(resp) => resp,
3989 Err(e) => {
3990 let error = self
3991 .handle_api_error(e, operation, &model, &request_json, &state)
3992 .await;
3993 return Err(error);
3994 }
3995 };
3996
3997 let duration = start_time.elapsed();
3998
3999 self.call_after_response(
4001 &response,
4002 operation,
4003 &model,
4004 &request_json,
4005 &state,
4006 duration,
4007 None,
4008 None,
4009 )
4010 .await;
4011
4012 Ok(response)
4013 }
4014
4015 pub async fn list_runs(
4030 &self,
4031 thread_id: impl Into<String>,
4032 limit: Option<i32>,
4033 order: Option<&str>,
4034 after: Option<&str>,
4035 before: Option<&str>,
4036 ) -> Result<ListRunsResponse> {
4037 let thread_id = thread_id.into();
4038
4039 let mut state = T::default();
4041 let operation = operation_names::RUN_LIST;
4042 let model = "runs";
4043 let request_json = format!(
4044 "{{\"thread_id\":\"{thread_id}\",\"limit\":{limit:?},\"order\":{order:?},\"after\":{after:?},\"before\":{before:?}}}"
4045 );
4046
4047 self.call_before_request(operation, model, &request_json, &mut state)
4049 .await?;
4050
4051 let start_time = Instant::now();
4052
4053 let response = match assistants_api::list_runs()
4055 .configuration(&self.client.base_configuration)
4056 .thread_id(&thread_id)
4057 .maybe_limit(limit)
4058 .maybe_order(order)
4059 .maybe_after(after)
4060 .maybe_before(before)
4061 .call()
4062 .await
4063 {
4064 Ok(resp) => resp,
4065 Err(e) => {
4066 let error = self
4067 .handle_api_error(e, operation, model, &request_json, &state)
4068 .await;
4069 return Err(error);
4070 }
4071 };
4072
4073 let duration = start_time.elapsed();
4074
4075 self.call_after_response(
4077 &response,
4078 operation,
4079 model,
4080 &request_json,
4081 &state,
4082 duration,
4083 None,
4084 None,
4085 )
4086 .await;
4087
4088 Ok(response)
4089 }
4090
4091 pub async fn get_run(
4106 &self,
4107 thread_id: impl Into<String>,
4108 run_id: impl Into<String>,
4109 ) -> Result<RunObject> {
4110 let thread_id = thread_id.into();
4111 let run_id = run_id.into();
4112
4113 let mut state = T::default();
4115 let operation = operation_names::RUN_RETRIEVE;
4116 let model = "runs";
4117 let request_json = format!("{{\"thread_id\":\"{thread_id}\",\"run_id\":\"{run_id}\"}}");
4118
4119 self.call_before_request(operation, model, &request_json, &mut state)
4121 .await?;
4122
4123 let start_time = Instant::now();
4124
4125 let response = match assistants_api::get_run()
4127 .configuration(&self.client.base_configuration)
4128 .thread_id(&thread_id)
4129 .run_id(&run_id)
4130 .call()
4131 .await
4132 {
4133 Ok(resp) => resp,
4134 Err(e) => {
4135 let error = self
4136 .handle_api_error(e, operation, model, &request_json, &state)
4137 .await;
4138 return Err(error);
4139 }
4140 };
4141
4142 let duration = start_time.elapsed();
4143
4144 self.call_after_response(
4146 &response,
4147 operation,
4148 model,
4149 &request_json,
4150 &state,
4151 duration,
4152 None,
4153 None,
4154 )
4155 .await;
4156
4157 Ok(response)
4158 }
4159
4160 pub async fn cancel_run(
4175 &self,
4176 thread_id: impl Into<String>,
4177 run_id: impl Into<String>,
4178 ) -> Result<RunObject> {
4179 let thread_id = thread_id.into();
4180 let run_id = run_id.into();
4181
4182 let mut state = T::default();
4184 let operation = operation_names::RUN_CANCEL;
4185 let model = "runs";
4186 let request_json = format!("{{\"thread_id\":\"{thread_id}\",\"run_id\":\"{run_id}\"}}");
4187
4188 self.call_before_request(operation, model, &request_json, &mut state)
4190 .await?;
4191
4192 let start_time = Instant::now();
4193
4194 let response = match assistants_api::cancel_run()
4196 .configuration(&self.client.base_configuration)
4197 .thread_id(&thread_id)
4198 .run_id(&run_id)
4199 .call()
4200 .await
4201 {
4202 Ok(resp) => resp,
4203 Err(e) => {
4204 let error = self
4205 .handle_api_error(e, operation, model, &request_json, &state)
4206 .await;
4207 return Err(error);
4208 }
4209 };
4210
4211 let duration = start_time.elapsed();
4212
4213 self.call_after_response(
4215 &response,
4216 operation,
4217 model,
4218 &request_json,
4219 &state,
4220 duration,
4221 None,
4222 None,
4223 )
4224 .await;
4225
4226 Ok(response)
4227 }
4228
4229 pub async fn submit_tool_outputs(
4247 &self,
4248 thread_id: impl Into<String>,
4249 run_id: impl Into<String>,
4250 tool_outputs: Vec<SubmitToolOutputsRunRequestToolOutputsInner>,
4251 ) -> Result<RunObject> {
4252 use openai_client_base::models::SubmitToolOutputsRunRequest;
4253
4254 let thread_id = thread_id.into();
4255 let run_id = run_id.into();
4256 let request = SubmitToolOutputsRunRequest::new(tool_outputs);
4257
4258 let mut state = T::default();
4260 let operation = operation_names::RUN_SUBMIT_TOOL_OUTPUTS;
4261 let model = "runs";
4262 let request_json = serde_json::to_string(&request).unwrap_or_default();
4263
4264 self.call_before_request(operation, model, &request_json, &mut state)
4266 .await?;
4267
4268 let start_time = Instant::now();
4269
4270 let response = match assistants_api::submit_tool_ouputs_to_run()
4272 .configuration(&self.client.base_configuration)
4273 .thread_id(&thread_id)
4274 .run_id(&run_id)
4275 .submit_tool_outputs_run_request(request)
4276 .call()
4277 .await
4278 {
4279 Ok(resp) => resp,
4280 Err(e) => {
4281 let error = self
4282 .handle_api_error(e, operation, model, &request_json, &state)
4283 .await;
4284 return Err(error);
4285 }
4286 };
4287
4288 let duration = start_time.elapsed();
4289
4290 self.call_after_response(
4292 &response,
4293 operation,
4294 model,
4295 &request_json,
4296 &state,
4297 duration,
4298 None,
4299 None,
4300 )
4301 .await;
4302
4303 Ok(response)
4304 }
4305
4306 pub async fn create_message(
4323 &self,
4324 thread_id: impl Into<String>,
4325 builder: MessageBuilder,
4326 ) -> Result<MessageObject> {
4327 let thread_id = thread_id.into();
4328 let request = builder.build()?;
4329
4330 let mut state = T::default();
4332 let operation = operation_names::MESSAGE_CREATE;
4333 let model = "messages";
4334 let request_json = serde_json::to_string(&request).unwrap_or_default();
4335
4336 self.call_before_request(operation, model, &request_json, &mut state)
4338 .await?;
4339
4340 let start_time = Instant::now();
4341
4342 let response = match assistants_api::create_message()
4344 .configuration(&self.client.base_configuration)
4345 .thread_id(&thread_id)
4346 .create_message_request(request)
4347 .call()
4348 .await
4349 {
4350 Ok(resp) => resp,
4351 Err(e) => {
4352 let error = self
4353 .handle_api_error(e, operation, model, &request_json, &state)
4354 .await;
4355 return Err(error);
4356 }
4357 };
4358
4359 let duration = start_time.elapsed();
4360
4361 self.call_after_response(
4363 &response,
4364 operation,
4365 model,
4366 &request_json,
4367 &state,
4368 duration,
4369 None,
4370 None,
4371 )
4372 .await;
4373
4374 Ok(response)
4375 }
4376
4377 pub async fn list_messages(
4392 &self,
4393 thread_id: impl Into<String>,
4394 limit: Option<i32>,
4395 order: Option<&str>,
4396 after: Option<&str>,
4397 before: Option<&str>,
4398 run_id: Option<&str>,
4399 ) -> Result<ListMessagesResponse> {
4400 let thread_id = thread_id.into();
4401
4402 let mut state = T::default();
4404 let operation = operation_names::MESSAGE_LIST;
4405 let model = "messages";
4406 let request_json = format!("{{\"thread_id\":\"{thread_id}\",\"limit\":{limit:?},\"order\":{order:?},\"after\":{after:?},\"before\":{before:?},\"run_id\":{run_id:?}}}");
4407
4408 self.call_before_request(operation, model, &request_json, &mut state)
4410 .await?;
4411
4412 let start_time = Instant::now();
4413
4414 let response = match assistants_api::list_messages()
4416 .configuration(&self.client.base_configuration)
4417 .thread_id(&thread_id)
4418 .maybe_limit(limit)
4419 .maybe_order(order)
4420 .maybe_after(after)
4421 .maybe_before(before)
4422 .maybe_run_id(run_id)
4423 .call()
4424 .await
4425 {
4426 Ok(resp) => resp,
4427 Err(e) => {
4428 let error = self
4429 .handle_api_error(e, operation, model, &request_json, &state)
4430 .await;
4431 return Err(error);
4432 }
4433 };
4434
4435 let duration = start_time.elapsed();
4436
4437 self.call_after_response(
4439 &response,
4440 operation,
4441 model,
4442 &request_json,
4443 &state,
4444 duration,
4445 None,
4446 None,
4447 )
4448 .await;
4449
4450 Ok(response)
4451 }
4452
4453 pub async fn get_message(
4468 &self,
4469 thread_id: impl Into<String>,
4470 message_id: impl Into<String>,
4471 ) -> Result<MessageObject> {
4472 let thread_id = thread_id.into();
4473 let message_id = message_id.into();
4474
4475 let mut state = T::default();
4477 let operation = operation_names::MESSAGE_RETRIEVE;
4478 let model = "messages";
4479 let request_json =
4480 format!("{{\"thread_id\":\"{thread_id}\",\"message_id\":\"{message_id}\"}}");
4481
4482 self.call_before_request(operation, model, &request_json, &mut state)
4484 .await?;
4485
4486 let start_time = Instant::now();
4487
4488 let response = match assistants_api::get_message()
4490 .configuration(&self.client.base_configuration)
4491 .thread_id(&thread_id)
4492 .message_id(&message_id)
4493 .call()
4494 .await
4495 {
4496 Ok(resp) => resp,
4497 Err(e) => {
4498 let error = self
4499 .handle_api_error(e, operation, model, &request_json, &state)
4500 .await;
4501 return Err(error);
4502 }
4503 };
4504
4505 let duration = start_time.elapsed();
4506
4507 self.call_after_response(
4509 &response,
4510 operation,
4511 model,
4512 &request_json,
4513 &state,
4514 duration,
4515 None,
4516 None,
4517 )
4518 .await;
4519
4520 Ok(response)
4521 }
4522
4523 #[allow(clippy::too_many_arguments)]
4538 pub async fn list_run_steps(
4539 &self,
4540 thread_id: impl Into<String>,
4541 run_id: impl Into<String>,
4542 limit: Option<i32>,
4543 order: Option<&str>,
4544 after: Option<&str>,
4545 before: Option<&str>,
4546 include: Option<Vec<String>>,
4547 ) -> Result<ListRunStepsResponse> {
4548 let thread_id = thread_id.into();
4549 let run_id = run_id.into();
4550
4551 let mut state = T::default();
4553 let operation = operation_names::RUN_STEP_LIST;
4554 let model = "run_steps";
4555 let request_json = format!("{{\"thread_id\":\"{thread_id}\",\"run_id\":\"{run_id}\",\"limit\":{limit:?},\"order\":{order:?},\"after\":{after:?},\"before\":{before:?},\"include\":{include:?}}}");
4556
4557 self.call_before_request(operation, model, &request_json, &mut state)
4559 .await?;
4560
4561 let start_time = Instant::now();
4562
4563 let response = match assistants_api::list_run_steps()
4565 .configuration(&self.client.base_configuration)
4566 .thread_id(&thread_id)
4567 .run_id(&run_id)
4568 .maybe_limit(limit)
4569 .maybe_order(order)
4570 .maybe_after(after)
4571 .maybe_before(before)
4572 .maybe_include_left_square_bracket_right_square_bracket(include)
4573 .call()
4574 .await
4575 {
4576 Ok(resp) => resp,
4577 Err(e) => {
4578 let error = self
4579 .handle_api_error(e, operation, model, &request_json, &state)
4580 .await;
4581 return Err(error);
4582 }
4583 };
4584
4585 let duration = start_time.elapsed();
4586
4587 self.call_after_response(
4589 &response,
4590 operation,
4591 model,
4592 &request_json,
4593 &state,
4594 duration,
4595 None,
4596 None,
4597 )
4598 .await;
4599
4600 Ok(response)
4601 }
4602
4603 pub async fn get_run_step(
4618 &self,
4619 thread_id: impl Into<String>,
4620 run_id: impl Into<String>,
4621 step_id: impl Into<String>,
4622 include: Option<Vec<String>>,
4623 ) -> Result<RunStepObject> {
4624 let thread_id = thread_id.into();
4625 let run_id = run_id.into();
4626 let step_id = step_id.into();
4627
4628 let mut state = T::default();
4630 let operation = operation_names::RUN_STEP_RETRIEVE;
4631 let model = "run_steps";
4632 let request_json = format!(
4633 "{{\"thread_id\":\"{thread_id}\",\"run_id\":\"{run_id}\",\"step_id\":\"{step_id}\",\"include\":{include:?}}}"
4634 );
4635
4636 self.call_before_request(operation, model, &request_json, &mut state)
4638 .await?;
4639
4640 let start_time = Instant::now();
4641
4642 let response = match assistants_api::get_run_step()
4644 .configuration(&self.client.base_configuration)
4645 .thread_id(&thread_id)
4646 .run_id(&run_id)
4647 .step_id(&step_id)
4648 .maybe_include_left_square_bracket_right_square_bracket(include)
4649 .call()
4650 .await
4651 {
4652 Ok(resp) => resp,
4653 Err(e) => {
4654 let error = self
4655 .handle_api_error(e, operation, model, &request_json, &state)
4656 .await;
4657 return Err(error);
4658 }
4659 };
4660
4661 let duration = start_time.elapsed();
4662
4663 self.call_after_response(
4665 &response,
4666 operation,
4667 model,
4668 &request_json,
4669 &state,
4670 duration,
4671 None,
4672 None,
4673 )
4674 .await;
4675
4676 Ok(response)
4677 }
4678}
4679
4680#[derive(Debug, Clone, Copy)]
4682#[allow(dead_code)]
4683pub struct AudioClient<'a, T = ()> {
4684 client: &'a Client<T>,
4685}
4686
4687#[derive(Debug, Clone, Copy)]
4689#[allow(dead_code)]
4690pub struct EmbeddingsClient<'a, T = ()> {
4691 client: &'a Client<T>,
4692}
4693
4694#[derive(Debug, Clone, Copy)]
4696#[allow(dead_code)]
4697pub struct ImagesClient<'a, T = ()> {
4698 client: &'a Client<T>,
4699}
4700
4701#[derive(Debug, Clone, Copy)]
4703#[allow(dead_code)]
4704pub struct FilesClient<'a, T = ()> {
4705 client: &'a Client<T>,
4706}
4707
4708#[derive(Debug, Clone, Copy)]
4710#[allow(dead_code)]
4711pub struct FineTuningClient<'a, T = ()> {
4712 client: &'a Client<T>,
4713}
4714
4715#[derive(Debug, Clone, Copy)]
4717#[allow(dead_code)]
4718pub struct BatchClient<'a, T = ()> {
4719 client: &'a Client<T>,
4720}
4721
4722#[derive(Debug, Clone, Copy)]
4724#[allow(dead_code)]
4725pub struct VectorStoresClient<'a, T = ()> {
4726 client: &'a Client<T>,
4727}
4728
4729#[derive(Debug, Clone, Copy)]
4731#[allow(dead_code)]
4732pub struct ModerationsClient<'a, T = ()> {
4733 client: &'a Client<T>,
4734}
4735
4736#[derive(Debug, Clone, Copy)]
4738#[allow(dead_code)]
4739pub struct ThreadsClient<'a, T = ()> {
4740 client: &'a Client<T>,
4741}
4742
4743#[derive(Debug, Clone, Copy)]
4745#[allow(dead_code)]
4746pub struct UploadsClient<'a, T = ()> {
4747 client: &'a Client<T>,
4748}
4749
4750#[derive(Debug, Clone, Copy)]
4752pub struct ModelsClient<'a, T = ()> {
4753 client: &'a Client<T>,
4754}
4755
4756#[derive(Debug, Clone, Copy)]
4758pub struct CompletionsClient<'a, T = ()> {
4759 client: &'a Client<T>,
4760}
4761
4762#[derive(Debug, Clone, Copy)]
4764pub struct UsageClient<'a, T = ()> {
4765 client: &'a Client<T>,
4766}
4767
4768impl_interceptor_helpers!(AssistantsClient<'_, T>);
4770impl_interceptor_helpers!(AudioClient<'_, T>);
4771impl_interceptor_helpers!(EmbeddingsClient<'_, T>);
4772impl_interceptor_helpers!(ImagesClient<'_, T>);
4773impl_interceptor_helpers!(FilesClient<'_, T>);
4774impl_interceptor_helpers!(FineTuningClient<'_, T>);
4775impl_interceptor_helpers!(BatchClient<'_, T>);
4776impl_interceptor_helpers!(VectorStoresClient<'_, T>);
4777impl_interceptor_helpers!(ModerationsClient<'_, T>);
4778impl_interceptor_helpers!(ThreadsClient<'_, T>);
4779impl_interceptor_helpers!(UploadsClient<'_, T>);
4780impl_interceptor_helpers!(ModelsClient<'_, T>);
4781impl_interceptor_helpers!(CompletionsClient<'_, T>);
4782impl_interceptor_helpers!(UsageClient<'_, T>);
4783
4784impl<T: Default + Send + Sync> ModelsClient<'_, T> {
4785 pub async fn list(&self) -> Result<ListModelsResponse> {
4800 let mut state = T::default();
4802 let operation = operation_names::MODEL_LIST;
4803 let model = "models";
4804 let request_json = "{}".to_string();
4805
4806 self.call_before_request(operation, model, &request_json, &mut state)
4808 .await?;
4809
4810 let start_time = Instant::now();
4811
4812 let response = match models_api::list_models()
4814 .configuration(&self.client.base_configuration)
4815 .call()
4816 .await
4817 {
4818 Ok(resp) => resp,
4819 Err(e) => {
4820 let error = self
4821 .handle_api_error(e, operation, model, &request_json, &state)
4822 .await;
4823 return Err(error);
4824 }
4825 };
4826
4827 let duration = start_time.elapsed();
4828
4829 self.call_after_response(
4831 &response,
4832 operation,
4833 model,
4834 &request_json,
4835 &state,
4836 duration,
4837 None,
4838 None,
4839 )
4840 .await;
4841
4842 Ok(response)
4843 }
4844
4845 pub async fn get(&self, model_id: impl Into<String>) -> Result<Model> {
4860 let id = model_id.into();
4861
4862 let mut state = T::default();
4864 let operation = operation_names::MODEL_RETRIEVE;
4865 let model = "models";
4866 let request_json = format!("{{\"model_id\":\"{id}\"}}");
4867
4868 self.call_before_request(operation, model, &request_json, &mut state)
4870 .await?;
4871
4872 let start_time = Instant::now();
4873
4874 let response = match models_api::retrieve_model()
4876 .configuration(&self.client.base_configuration)
4877 .model(&id)
4878 .call()
4879 .await
4880 {
4881 Ok(resp) => resp,
4882 Err(e) => {
4883 let error = self
4884 .handle_api_error(e, operation, model, &request_json, &state)
4885 .await;
4886 return Err(error);
4887 }
4888 };
4889
4890 let duration = start_time.elapsed();
4891
4892 self.call_after_response(
4894 &response,
4895 operation,
4896 model,
4897 &request_json,
4898 &state,
4899 duration,
4900 None,
4901 None,
4902 )
4903 .await;
4904
4905 Ok(response)
4906 }
4907
4908 pub async fn retrieve(&self, builder: ModelRetrievalBuilder) -> Result<Model> {
4910 self.get(builder.model_id()).await
4911 }
4912
4913 pub async fn delete(&self, model_id: impl Into<String>) -> Result<DeleteModelResponse> {
4930 let id = model_id.into();
4931
4932 let mut state = T::default();
4934 let operation = operation_names::MODEL_DELETE;
4935 let model = "models";
4936 let request_json = format!("{{\"model_id\":\"{id}\"}}");
4937
4938 self.call_before_request(operation, model, &request_json, &mut state)
4940 .await?;
4941
4942 let start_time = Instant::now();
4943
4944 let response = match models_api::delete_model()
4946 .configuration(&self.client.base_configuration)
4947 .model(&id)
4948 .call()
4949 .await
4950 {
4951 Ok(resp) => resp,
4952 Err(e) => {
4953 let error = self
4954 .handle_api_error(e, operation, model, &request_json, &state)
4955 .await;
4956 return Err(error);
4957 }
4958 };
4959
4960 let duration = start_time.elapsed();
4961
4962 self.call_after_response(
4964 &response,
4965 operation,
4966 model,
4967 &request_json,
4968 &state,
4969 duration,
4970 None,
4971 None,
4972 )
4973 .await;
4974
4975 Ok(response)
4976 }
4977
4978 pub async fn remove(&self, builder: ModelDeleteBuilder) -> Result<DeleteModelResponse> {
4980 self.delete(builder.model_id()).await
4981 }
4982}
4983
4984impl<T: Default + Send + Sync> CompletionsClient<'_, T> {
4985 #[must_use]
4999 pub fn builder(&self, model: impl Into<String>) -> CompletionsBuilder {
5000 CompletionsBuilder::new(model)
5001 }
5002
5003 pub async fn create(&self, builder: CompletionsBuilder) -> Result<CreateCompletionResponse> {
5022 let request = builder.build()?;
5023
5024 let mut state = T::default();
5026 let operation = operation_names::TEXT_COMPLETION;
5027 let model = request.model.clone();
5028 let request_json = serde_json::to_string(&request).unwrap_or_default();
5029
5030 self.call_before_request(operation, &model, &request_json, &mut state)
5032 .await?;
5033
5034 let start_time = Instant::now();
5035
5036 let response = match completions_api::create_completion()
5038 .configuration(&self.client.base_configuration)
5039 .create_completion_request(request)
5040 .call()
5041 .await
5042 {
5043 Ok(resp) => resp,
5044 Err(e) => {
5045 let error = self
5046 .handle_api_error(e, operation, &model, &request_json, &state)
5047 .await;
5048 return Err(error);
5049 }
5050 };
5051
5052 let duration = start_time.elapsed();
5053
5054 self.call_after_response(
5056 &response,
5057 operation,
5058 &model,
5059 &request_json,
5060 &state,
5061 duration,
5062 response.usage.as_ref().map(|u| i64::from(u.prompt_tokens)),
5063 response
5064 .usage
5065 .as_ref()
5066 .map(|u| i64::from(u.completion_tokens)),
5067 )
5068 .await;
5069
5070 Ok(response)
5071 }
5072}
5073
5074impl<T: Default + Send + Sync> UsageClient<'_, T> {
5075 pub async fn audio_speeches(&self, builder: UsageBuilder) -> Result<UsageResponse> {
5092 let mut state = T::default();
5094 let operation = operation_names::USAGE_AUDIO_SPEECHES;
5095 let model = "usage";
5096 let start_time = builder.start_time();
5097 let request_json = format!("{{\"start_time\":{start_time}}}");
5098
5099 self.call_before_request(operation, model, &request_json, &mut state)
5101 .await?;
5102
5103 let start_time = Instant::now();
5104
5105 let response = match usage_api::usage_audio_speeches()
5107 .configuration(&self.client.base_configuration)
5108 .start_time(builder.start_time())
5109 .maybe_end_time(builder.end_time())
5110 .maybe_bucket_width(builder.bucket_width_str())
5111 .maybe_project_ids(builder.project_ids_option())
5112 .maybe_user_ids(builder.user_ids_option())
5113 .maybe_api_key_ids(builder.api_key_ids_option())
5114 .maybe_models(builder.models_option())
5115 .maybe_group_by(builder.group_by_option())
5116 .maybe_limit(builder.limit_ref())
5117 .maybe_page(builder.page_ref())
5118 .call()
5119 .await
5120 {
5121 Ok(resp) => resp,
5122 Err(e) => {
5123 let error = self
5124 .handle_api_error(e, operation, model, &request_json, &state)
5125 .await;
5126 return Err(error);
5127 }
5128 };
5129
5130 let duration = start_time.elapsed();
5131
5132 self.call_after_response(
5134 &response,
5135 operation,
5136 model,
5137 &request_json,
5138 &state,
5139 duration,
5140 None,
5141 None,
5142 )
5143 .await;
5144
5145 Ok(response)
5146 }
5147
5148 pub async fn audio_transcriptions(&self, builder: UsageBuilder) -> Result<UsageResponse> {
5150 let mut state = T::default();
5152 let operation = operation_names::USAGE_AUDIO_TRANSCRIPTIONS;
5153 let model = "usage";
5154 let start_time = builder.start_time();
5155 let request_json = format!("{{\"start_time\":{start_time}}}");
5156
5157 self.call_before_request(operation, model, &request_json, &mut state)
5159 .await?;
5160
5161 let start_time = Instant::now();
5162
5163 let response = match usage_api::usage_audio_transcriptions()
5165 .configuration(&self.client.base_configuration)
5166 .start_time(builder.start_time())
5167 .maybe_end_time(builder.end_time())
5168 .maybe_bucket_width(builder.bucket_width_str())
5169 .maybe_project_ids(builder.project_ids_option())
5170 .maybe_user_ids(builder.user_ids_option())
5171 .maybe_api_key_ids(builder.api_key_ids_option())
5172 .maybe_models(builder.models_option())
5173 .maybe_group_by(builder.group_by_option())
5174 .maybe_limit(builder.limit_ref())
5175 .maybe_page(builder.page_ref())
5176 .call()
5177 .await
5178 {
5179 Ok(resp) => resp,
5180 Err(e) => {
5181 let error = self
5182 .handle_api_error(e, operation, model, &request_json, &state)
5183 .await;
5184 return Err(error);
5185 }
5186 };
5187
5188 let duration = start_time.elapsed();
5189
5190 self.call_after_response(
5192 &response,
5193 operation,
5194 model,
5195 &request_json,
5196 &state,
5197 duration,
5198 None,
5199 None,
5200 )
5201 .await;
5202
5203 Ok(response)
5204 }
5205
5206 pub async fn code_interpreter_sessions(&self, builder: UsageBuilder) -> Result<UsageResponse> {
5208 let mut state = T::default();
5210 let operation = operation_names::USAGE_CODE_INTERPRETER;
5211 let model = "usage";
5212 let start_time = builder.start_time();
5213 let request_json = format!("{{\"start_time\":{start_time}}}");
5214
5215 self.call_before_request(operation, model, &request_json, &mut state)
5217 .await?;
5218
5219 let start_time = Instant::now();
5220
5221 let response = match usage_api::usage_code_interpreter_sessions()
5223 .configuration(&self.client.base_configuration)
5224 .start_time(builder.start_time())
5225 .maybe_end_time(builder.end_time())
5226 .maybe_bucket_width(builder.bucket_width_str())
5227 .maybe_project_ids(builder.project_ids_option())
5228 .maybe_group_by(builder.group_by_option())
5229 .maybe_limit(builder.limit_ref())
5230 .maybe_page(builder.page_ref())
5231 .call()
5232 .await
5233 {
5234 Ok(resp) => resp,
5235 Err(e) => {
5236 let error = self
5237 .handle_api_error(e, operation, model, &request_json, &state)
5238 .await;
5239 return Err(error);
5240 }
5241 };
5242
5243 let duration = start_time.elapsed();
5244
5245 self.call_after_response(
5247 &response,
5248 operation,
5249 model,
5250 &request_json,
5251 &state,
5252 duration,
5253 None,
5254 None,
5255 )
5256 .await;
5257
5258 Ok(response)
5259 }
5260
5261 pub async fn completions(&self, builder: UsageBuilder) -> Result<UsageResponse> {
5263 let mut state = T::default();
5265 let operation = operation_names::USAGE_COMPLETIONS;
5266 let model = "usage";
5267 let start_time = builder.start_time();
5268 let request_json = format!("{{\"start_time\":{start_time}}}");
5269
5270 self.call_before_request(operation, model, &request_json, &mut state)
5272 .await?;
5273
5274 let start_time = Instant::now();
5275
5276 let response = match usage_api::usage_completions()
5278 .configuration(&self.client.base_configuration)
5279 .start_time(builder.start_time())
5280 .maybe_end_time(builder.end_time())
5281 .maybe_bucket_width(builder.bucket_width_str())
5282 .maybe_project_ids(builder.project_ids_option())
5283 .maybe_user_ids(builder.user_ids_option())
5284 .maybe_api_key_ids(builder.api_key_ids_option())
5285 .maybe_models(builder.models_option())
5286 .maybe_group_by(builder.group_by_option())
5287 .maybe_limit(builder.limit_ref())
5288 .maybe_page(builder.page_ref())
5289 .call()
5290 .await
5291 {
5292 Ok(resp) => resp,
5293 Err(e) => {
5294 let error = self
5295 .handle_api_error(e, operation, model, &request_json, &state)
5296 .await;
5297 return Err(error);
5298 }
5299 };
5300
5301 let duration = start_time.elapsed();
5302
5303 self.call_after_response(
5305 &response,
5306 operation,
5307 model,
5308 &request_json,
5309 &state,
5310 duration,
5311 None,
5312 None,
5313 )
5314 .await;
5315
5316 Ok(response)
5317 }
5318
5319 pub async fn embeddings(&self, builder: UsageBuilder) -> Result<UsageResponse> {
5321 let mut state = T::default();
5323 let operation = operation_names::USAGE_EMBEDDINGS;
5324 let model = "usage";
5325 let start_time = builder.start_time();
5326 let request_json = format!("{{\"start_time\":{start_time}}}");
5327
5328 self.call_before_request(operation, model, &request_json, &mut state)
5330 .await?;
5331
5332 let start_time = Instant::now();
5333
5334 let response = match usage_api::usage_embeddings()
5336 .configuration(&self.client.base_configuration)
5337 .start_time(builder.start_time())
5338 .maybe_end_time(builder.end_time())
5339 .maybe_bucket_width(builder.bucket_width_str())
5340 .maybe_project_ids(builder.project_ids_option())
5341 .maybe_user_ids(builder.user_ids_option())
5342 .maybe_api_key_ids(builder.api_key_ids_option())
5343 .maybe_models(builder.models_option())
5344 .maybe_group_by(builder.group_by_option())
5345 .maybe_limit(builder.limit_ref())
5346 .maybe_page(builder.page_ref())
5347 .call()
5348 .await
5349 {
5350 Ok(resp) => resp,
5351 Err(e) => {
5352 let error = self
5353 .handle_api_error(e, operation, model, &request_json, &state)
5354 .await;
5355 return Err(error);
5356 }
5357 };
5358
5359 let duration = start_time.elapsed();
5360
5361 self.call_after_response(
5363 &response,
5364 operation,
5365 model,
5366 &request_json,
5367 &state,
5368 duration,
5369 None,
5370 None,
5371 )
5372 .await;
5373
5374 Ok(response)
5375 }
5376
5377 pub async fn images(&self, builder: UsageBuilder) -> Result<UsageResponse> {
5379 let mut state = T::default();
5381 let operation = operation_names::USAGE_IMAGES;
5382 let model = "usage";
5383 let start_time = builder.start_time();
5384 let request_json = format!("{{\"start_time\":{start_time}}}");
5385
5386 self.call_before_request(operation, model, &request_json, &mut state)
5388 .await?;
5389
5390 let start_time = Instant::now();
5391
5392 let response = match usage_api::usage_images()
5394 .configuration(&self.client.base_configuration)
5395 .start_time(builder.start_time())
5396 .maybe_end_time(builder.end_time())
5397 .maybe_bucket_width(builder.bucket_width_str())
5398 .maybe_project_ids(builder.project_ids_option())
5399 .maybe_user_ids(builder.user_ids_option())
5400 .maybe_api_key_ids(builder.api_key_ids_option())
5401 .maybe_models(builder.models_option())
5402 .maybe_group_by(builder.group_by_option())
5403 .maybe_limit(builder.limit_ref())
5404 .maybe_page(builder.page_ref())
5405 .call()
5406 .await
5407 {
5408 Ok(resp) => resp,
5409 Err(e) => {
5410 let error = self
5411 .handle_api_error(e, operation, model, &request_json, &state)
5412 .await;
5413 return Err(error);
5414 }
5415 };
5416
5417 let duration = start_time.elapsed();
5418
5419 self.call_after_response(
5421 &response,
5422 operation,
5423 model,
5424 &request_json,
5425 &state,
5426 duration,
5427 None,
5428 None,
5429 )
5430 .await;
5431
5432 Ok(response)
5433 }
5434
5435 pub async fn moderations(&self, builder: UsageBuilder) -> Result<UsageResponse> {
5437 let mut state = T::default();
5439 let operation = operation_names::USAGE_MODERATIONS;
5440 let model = "usage";
5441 let start_time = builder.start_time();
5442 let request_json = format!("{{\"start_time\":{start_time}}}");
5443
5444 self.call_before_request(operation, model, &request_json, &mut state)
5446 .await?;
5447
5448 let start_time = Instant::now();
5449
5450 let response = match usage_api::usage_moderations()
5452 .configuration(&self.client.base_configuration)
5453 .start_time(builder.start_time())
5454 .maybe_end_time(builder.end_time())
5455 .maybe_bucket_width(builder.bucket_width_str())
5456 .maybe_project_ids(builder.project_ids_option())
5457 .maybe_user_ids(builder.user_ids_option())
5458 .maybe_api_key_ids(builder.api_key_ids_option())
5459 .maybe_models(builder.models_option())
5460 .maybe_group_by(builder.group_by_option())
5461 .maybe_limit(builder.limit_ref())
5462 .maybe_page(builder.page_ref())
5463 .call()
5464 .await
5465 {
5466 Ok(resp) => resp,
5467 Err(e) => {
5468 let error = self
5469 .handle_api_error(e, operation, model, &request_json, &state)
5470 .await;
5471 return Err(error);
5472 }
5473 };
5474
5475 let duration = start_time.elapsed();
5476
5477 self.call_after_response(
5479 &response,
5480 operation,
5481 model,
5482 &request_json,
5483 &state,
5484 duration,
5485 None,
5486 None,
5487 )
5488 .await;
5489
5490 Ok(response)
5491 }
5492
5493 pub async fn vector_stores(&self, builder: UsageBuilder) -> Result<UsageResponse> {
5495 let mut state = T::default();
5497 let operation = operation_names::USAGE_VECTOR_STORES;
5498 let model = "usage";
5499 let start_time = builder.start_time();
5500 let request_json = format!("{{\"start_time\":{start_time}}}");
5501
5502 self.call_before_request(operation, model, &request_json, &mut state)
5504 .await?;
5505
5506 let start_time = Instant::now();
5507
5508 let response = match usage_api::usage_vector_stores()
5510 .configuration(&self.client.base_configuration)
5511 .start_time(builder.start_time())
5512 .maybe_end_time(builder.end_time())
5513 .maybe_bucket_width(builder.bucket_width_str())
5514 .maybe_project_ids(builder.project_ids_option())
5515 .maybe_group_by(builder.group_by_option())
5516 .maybe_limit(builder.limit_ref())
5517 .maybe_page(builder.page_ref())
5518 .call()
5519 .await
5520 {
5521 Ok(resp) => resp,
5522 Err(e) => {
5523 let error = self
5524 .handle_api_error(e, operation, model, &request_json, &state)
5525 .await;
5526 return Err(error);
5527 }
5528 };
5529
5530 let duration = start_time.elapsed();
5531
5532 self.call_after_response(
5534 &response,
5535 operation,
5536 model,
5537 &request_json,
5538 &state,
5539 duration,
5540 None,
5541 None,
5542 )
5543 .await;
5544
5545 Ok(response)
5546 }
5547
5548 pub async fn costs(&self, builder: UsageBuilder) -> Result<UsageResponse> {
5550 let mut state = T::default();
5552 let operation = operation_names::USAGE_COSTS;
5553 let model = "usage";
5554 let start_time = builder.start_time();
5555 let request_json = format!("{{\"start_time\":{start_time}}}");
5556
5557 self.call_before_request(operation, model, &request_json, &mut state)
5559 .await?;
5560
5561 let start_time = Instant::now();
5562
5563 let response = match usage_api::usage_costs()
5565 .configuration(&self.client.base_configuration)
5566 .start_time(builder.start_time())
5567 .maybe_end_time(builder.end_time())
5568 .maybe_bucket_width(builder.bucket_width_str())
5569 .maybe_project_ids(builder.project_ids_option())
5570 .maybe_group_by(builder.group_by_option())
5571 .maybe_limit(builder.limit_ref())
5572 .maybe_page(builder.page_ref())
5573 .call()
5574 .await
5575 {
5576 Ok(resp) => resp,
5577 Err(e) => {
5578 let error = self
5579 .handle_api_error(e, operation, model, &request_json, &state)
5580 .await;
5581 return Err(error);
5582 }
5583 };
5584
5585 let duration = start_time.elapsed();
5586
5587 self.call_after_response(
5589 &response,
5590 operation,
5591 model,
5592 &request_json,
5593 &state,
5594 duration,
5595 None,
5596 None,
5597 )
5598 .await;
5599
5600 Ok(response)
5601 }
5602}