clia_async_openai/types/
batch.rs

1use std::collections::HashMap;
2
3use derive_builder::Builder;
4use serde::{Deserialize, Serialize};
5
6use crate::error::OpenAIError;
7
8#[derive(Debug, Serialize, Default, Clone, Builder, PartialEq, Deserialize)]
9#[builder(name = "BatchRequestArgs")]
10#[builder(pattern = "mutable")]
11#[builder(setter(into, strip_option), default)]
12#[builder(derive(Debug))]
13#[builder(build_fn(error = "OpenAIError"))]
14pub struct BatchRequest {
15    /// The ID of an uploaded file that contains requests for the new batch.
16    ///
17    /// See [upload file](https://platform.openai.com/docs/api-reference/files/create) for how to upload a file.
18    ///
19    /// Your input file must be formatted as a [JSONL file](https://platform.openai.com/docs/api-reference/batch/request-input), and must be uploaded with the purpose `batch`. The file can contain up to 50,000 requests, and can be up to 100 MB in size.
20    pub input_file_id: String,
21
22    /// The endpoint to be used for all requests in the batch. Currently `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` are supported. Note that `/v1/embeddings` batches are also restricted to a maximum of 50,000 embedding inputs across all requests in the batch.
23    pub endpoint: BatchEndpoint,
24
25    /// The time frame within which the batch should be processed. Currently only `24h` is supported.
26    pub completion_window: BatchCompletionWindow,
27
28    /// Optional custom metadata for the batch.
29    pub metadata: Option<HashMap<String, serde_json::Value>>,
30}
31
32#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, Default)]
33pub enum BatchEndpoint {
34    #[default]
35    #[serde(rename = "/v1/chat/completions")]
36    V1ChatCompletions,
37    #[serde(rename = "/v1/embeddings")]
38    V1Embeddings,
39    #[serde(rename = "/v1/completions")]
40    V1Completions,
41}
42
43#[derive(Debug, Clone, PartialEq, Serialize, Default, Deserialize)]
44pub enum BatchCompletionWindow {
45    #[default]
46    #[serde(rename = "24h")]
47    W24H,
48}
49
50#[derive(Debug, Deserialize, Clone, PartialEq, Serialize)]
51pub struct Batch {
52    pub id: String,
53    /// The object type, which is always `batch`.
54    pub object: String,
55    /// The OpenAI API endpoint used by the batch.
56    pub endpoint: String,
57    pub errors: Option<BatchErrors>,
58    /// The ID of the input file for the batch.
59    pub input_file_id: String,
60    /// The time frame within which the batch should be processed.
61    pub completion_window: String,
62    /// The current status of the batch.
63    pub status: BatchStatus,
64    /// The ID of the file containing the outputs of successfully executed requests.
65    pub output_file_id: Option<String>,
66    /// The ID of the file containing the outputs of requests with errors.
67    pub error_file_id: Option<String>,
68    /// The Unix timestamp (in seconds) for when the batch was created.
69    pub created_at: u32,
70    /// The Unix timestamp (in seconds) for when the batch started processing.
71    pub in_progress_at: Option<u32>,
72    /// The Unix timestamp (in seconds) for when the batch will expire.
73    pub expires_at: Option<u32>,
74    /// The Unix timestamp (in seconds) for when the batch started finalizing.
75    pub finalizing_at: Option<u32>,
76    /// The Unix timestamp (in seconds) for when the batch was completed.
77    pub completed_at: Option<u32>,
78    /// The Unix timestamp (in seconds) for when the batch failed.
79    pub failed_at: Option<u32>,
80    /// he Unix timestamp (in seconds) for when the batch expired.
81    pub expired_at: Option<u32>,
82    /// The Unix timestamp (in seconds) for when the batch started cancelling.
83    pub cancelling_at: Option<u32>,
84    /// The Unix timestamp (in seconds) for when the batch was cancelled.
85    pub cancelled_at: Option<u32>,
86    /// The request counts for different statuses within the batch.
87    pub request_counts: Option<BatchRequestCounts>,
88    /// Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format. Keys can be a maximum of 64 characters long and values can be a maximum of 512 characters long.
89    pub metadata: Option<HashMap<String, serde_json::Value>>,
90}
91
92#[derive(Debug, Deserialize, Clone, PartialEq, Serialize)]
93pub struct BatchErrors {
94    /// The object type, which is always `list`.
95    pub object: String,
96    pub data: Vec<BatchError>,
97}
98
99#[derive(Debug, Deserialize, Clone, PartialEq, Serialize)]
100pub struct BatchError {
101    /// An error code identifying the error type.
102    pub code: String,
103    /// A human-readable message providing more details about the error.
104    pub message: String,
105    /// The name of the parameter that caused the error, if applicable.
106    pub param: Option<String>,
107    /// The line number of the input file where the error occurred, if applicable.
108    pub line: Option<u32>,
109}
110
111#[derive(Debug, Deserialize, Clone, PartialEq, Serialize)]
112#[serde(rename_all = "snake_case")]
113pub enum BatchStatus {
114    Validating,
115    Failed,
116    InProgress,
117    Finalizing,
118    Completed,
119    Expired,
120    Cancelling,
121    Cancelled,
122}
123
124#[derive(Debug, Deserialize, Clone, PartialEq, Serialize)]
125pub struct BatchRequestCounts {
126    /// Total number of requests in the batch.
127    pub total: u32,
128    /// Number of requests that have been completed successfully.
129    pub completed: u32,
130    /// Number of requests that have failed.
131    pub failed: u32,
132}
133
134#[derive(Debug, Deserialize, Clone, PartialEq, Serialize)]
135pub struct ListBatchesResponse {
136    pub data: Vec<Batch>,
137    pub first_id: Option<String>,
138    pub last_id: Option<String>,
139    pub has_more: bool,
140    pub object: String,
141}
142
143#[derive(Debug, Deserialize, Clone, PartialEq, Serialize)]
144#[serde(rename_all = "UPPERCASE")]
145pub enum BatchRequestInputMethod {
146    POST,
147}
148
149/// The per-line object of the batch input file
150#[derive(Debug, Deserialize, Clone, PartialEq, Serialize)]
151pub struct BatchRequestInput {
152    /// A developer-provided per-request id that will be used to match outputs to inputs. Must be unique for each request in a batch.
153    pub custom_id: String,
154    /// The HTTP method to be used for the request. Currently only `POST` is supported.
155    pub method: BatchRequestInputMethod,
156    /// The OpenAI API relative URL to be used for the request. Currently `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` are supported.
157    pub url: BatchEndpoint,
158    pub body: Option<serde_json::Value>,
159}
160
161#[derive(Debug, Deserialize, Clone, PartialEq, Serialize)]
162pub struct BatchRequestOutputResponse {
163    /// The HTTP status code of the response
164    pub status_code: u16,
165    /// An unique identifier for the OpenAI API request. Please include this request ID when contacting support.
166    pub request_id: String,
167    /// The JSON body of the response
168    pub body: serde_json::Value,
169}
170
171#[derive(Debug, Deserialize, Clone, PartialEq, Serialize)]
172pub struct BatchRequestOutputError {
173    /// A machine-readable error code.
174    pub code: String,
175    /// A human-readable error message.
176    pub message: String,
177}
178
179/// The per-line object of the batch output and error files
180#[derive(Debug, Deserialize, Clone, PartialEq, Serialize)]
181pub struct BatchRequestOutput {
182    pub id: String,
183    /// A developer-provided per-request id that will be used to match outputs to inputs.
184    pub custom_id: String,
185    pub response: Option<BatchRequestOutputResponse>,
186    ///  For requests that failed with a non-HTTP error, this will contain more information on the cause of the failure.
187    pub error: Option<BatchRequestOutputError>,
188}