batch_mode_batch_client/
openai_client_handle.rs1crate::ix!();
3
4pub trait OpenAIConfigInterface = async_openai::config::Config;
5
6pub struct OpenAIClientHandle {
7 client: async_openai::Client<OpenAIConfig>,
8}
9
10impl OpenAIClientHandle {
11
12 pub fn new() -> Arc<Self> {
13
14 info!("creating new OpenAI Client Handle");
15
16 let openai_api_key
17 = std::env::var("OPENAI_API_KEY")
18 .expect("OPENAI_API_KEY environment variable not set");
19
20 let config = OpenAIConfig::new().with_api_key(openai_api_key);
22
23 let client = async_openai::Client::with_config(config);
24
25 Arc::new(Self { client })
26 }
27
28 delegate!{
29 to self.client {
30 fn batches(&self) -> async_openai::Batches<OpenAIConfig>;
31 fn files(&self) -> async_openai::Files<OpenAIConfig>;
32 }
33 }
34
35 pub async fn retrieve_batch(&self, batch_id: &str)
36 -> Result<Batch,OpenAIClientError>
37 {
38 info!("retrieving batch {} from online", batch_id);
39
40 Ok(self.batches().retrieve(batch_id).await?)
41 }
42
43 pub async fn file_content(&self, file_id: &str) -> Result<Bytes,OpenAIClientError> {
44
45 info!("retrieving file {} content from online", file_id);
46
47 let file_content = self.files().content(file_id).await?;
48 Ok(file_content)
49 }
50
51 pub async fn upload_batch_file(
52 &self,
53 file_path: impl AsRef<Path>,
54
55 ) -> Result<OpenAIFile, OpenAIClientError> {
56
57 info!("uploading batch file at path={:?} to online", file_path.as_ref());
58
59 let create_file_request = CreateFileRequest {
60 file: file_path.into(),
61 purpose: FilePurpose::Batch,
62 };
63
64 let file = self.files().create(create_file_request).await?;
65 Ok(file)
66 }
67
68 pub async fn create_batch(
69 &self,
70 input_file_id: &str,
71 ) -> Result<Batch, OpenAIClientError> {
72
73 info!("creating batch with input_file_id={}", input_file_id);
74
75 let batch_request = BatchRequest {
76 input_file_id: input_file_id.to_string(),
77 endpoint: BatchEndpoint::V1ChatCompletions,
78 completion_window: BatchCompletionWindow::W24H,
79 metadata: None,
80 };
81
82 let batch = self.batches().create(batch_request).await?;
83
84 Ok(batch)
85 }
86
87 pub async fn wait_for_batch_completion(
88 &self,
89 batch_id: &str,
90 ) -> Result<Batch, OpenAIClientError> {
91
92 info!("waiting for batch completion");
93
94 loop {
95 let batch = self.retrieve_batch(&batch_id).await?;
96 match batch.status {
97 BatchStatus::Completed => return Ok(batch),
98 BatchStatus::Failed => {
99 return Err(OpenAIClientError::ApiError(OpenAIApiError {
100 message: "Batch failed".to_string(),
101 r#type: None,
102 param: None,
103 code: None,
104 }))
105 }
106 _ => {
107 println!("Batch status: {:?}", batch.status);
108 sleep(Duration::from_secs(20)).await; }
110 }
111 }
112 }
113}