Skip to main content

openai_tools/models/
request.rs

1//! OpenAI Models API Request Module
2//!
3//! This module provides the functionality to interact with the OpenAI Models API.
4//! It allows you to list, retrieve, and delete models available in the OpenAI platform.
5//!
6//! # Key Features
7//!
8//! - **List Models**: Retrieve all available models
9//! - **Retrieve Model**: Get details of a specific model
10//! - **Delete Model**: Delete a fine-tuned model (only for models you own)
11//!
12//! # Quick Start
13//!
14//! ```rust,no_run
15//! use openai_tools::models::request::Models;
16//!
17//! #[tokio::main]
18//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
19//!     let models = Models::new()?;
20//!
21//!     // List all available models
22//!     let response = models.list().await?;
23//!     for model in &response.data {
24//!         println!("{}: owned by {}", model.id, model.owned_by);
25//!     }
26//!
27//!     Ok(())
28//! }
29//! ```
30
31use crate::common::auth::AuthProvider;
32use crate::common::client::create_http_client;
33use crate::common::errors::{ErrorResponse, OpenAIToolError, Result};
34use crate::models::response::{DeleteResponse, Model, ModelsListResponse};
35use std::time::Duration;
36
37/// Default API path for Models
38const MODELS_PATH: &str = "models";
39
40/// Client for interacting with the OpenAI Models API.
41///
42/// This struct provides methods to list, retrieve, and delete models.
43/// Use [`Models::new()`] to create a new instance.
44///
45/// # Example
46///
47/// ```rust,no_run
48/// use openai_tools::models::request::Models;
49///
50/// #[tokio::main]
51/// async fn main() -> Result<(), Box<dyn std::error::Error>> {
52///     let models = Models::new()?;
53///
54///     // Get details of a specific model
55///     let model = models.retrieve("gpt-4o-mini").await?;
56///     println!("Model: {} (created: {})", model.id, model.created);
57///
58///     Ok(())
59/// }
60/// ```
61pub struct Models {
62    /// Authentication provider (OpenAI or Azure)
63    auth: AuthProvider,
64    /// Optional request timeout duration
65    timeout: Option<Duration>,
66}
67
68impl Models {
69    /// Creates a new Models client for OpenAI API.
70    ///
71    /// Initializes the client by loading the OpenAI API key from
72    /// the environment variable `OPENAI_API_KEY`. Supports `.env` file loading
73    /// via dotenvy.
74    ///
75    /// # Returns
76    ///
77    /// * `Ok(Models)` - A new Models client ready for use
78    /// * `Err(OpenAIToolError)` - If the API key is not found in the environment
79    ///
80    /// # Example
81    ///
82    /// ```rust,no_run
83    /// use openai_tools::models::request::Models;
84    ///
85    /// let models = Models::new().expect("API key should be set");
86    /// ```
87    pub fn new() -> Result<Self> {
88        let auth = AuthProvider::openai_from_env()?;
89        Ok(Self { auth, timeout: None })
90    }
91
92    /// Creates a new Models client with a custom authentication provider
93    pub fn with_auth(auth: AuthProvider) -> Self {
94        Self { auth, timeout: None }
95    }
96
97    /// Creates a new Models client for Azure OpenAI API
98    pub fn azure() -> Result<Self> {
99        let auth = AuthProvider::azure_from_env()?;
100        Ok(Self { auth, timeout: None })
101    }
102
103    /// Creates a new Models client by auto-detecting the provider
104    pub fn detect_provider() -> Result<Self> {
105        let auth = AuthProvider::from_env()?;
106        Ok(Self { auth, timeout: None })
107    }
108
109    /// Creates a new Models client with URL-based provider detection
110    pub fn with_url<S: Into<String>>(base_url: S, api_key: S) -> Self {
111        let auth = AuthProvider::from_url_with_key(base_url, api_key);
112        Self { auth, timeout: None }
113    }
114
115    /// Creates a new Models client from URL using environment variables
116    pub fn from_url<S: Into<String>>(url: S) -> Result<Self> {
117        let auth = AuthProvider::from_url(url)?;
118        Ok(Self { auth, timeout: None })
119    }
120
121    /// Returns the authentication provider
122    pub fn auth(&self) -> &AuthProvider {
123        &self.auth
124    }
125
126    /// Sets the request timeout duration.
127    ///
128    /// # Arguments
129    ///
130    /// * `timeout` - The maximum time to wait for a response
131    ///
132    /// # Returns
133    ///
134    /// A mutable reference to self for method chaining
135    ///
136    /// # Example
137    ///
138    /// ```rust,no_run
139    /// use std::time::Duration;
140    /// use openai_tools::models::request::Models;
141    ///
142    /// let mut models = Models::new().unwrap();
143    /// models.timeout(Duration::from_secs(30));
144    /// ```
145    pub fn timeout(&mut self, timeout: Duration) -> &mut Self {
146        self.timeout = Some(timeout);
147        self
148    }
149
150    /// Creates the HTTP client with default headers.
151    fn create_client(&self) -> Result<(request::Client, request::header::HeaderMap)> {
152        let client = create_http_client(self.timeout)?;
153        let mut headers = request::header::HeaderMap::new();
154        self.auth.apply_headers(&mut headers)?;
155        headers.insert("User-Agent", request::header::HeaderValue::from_static("openai-tools-rust"));
156        Ok((client, headers))
157    }
158
159    /// Lists all available models.
160    ///
161    /// Returns a list of models that are currently available in the OpenAI API.
162    ///
163    /// # Returns
164    ///
165    /// * `Ok(ModelsListResponse)` - The list of available models
166    /// * `Err(OpenAIToolError)` - If the request fails or response parsing fails
167    ///
168    /// # Example
169    ///
170    /// ```rust,no_run
171    /// use openai_tools::models::request::Models;
172    ///
173    /// #[tokio::main]
174    /// async fn main() -> Result<(), Box<dyn std::error::Error>> {
175    ///     let models = Models::new()?;
176    ///     let response = models.list().await?;
177    ///
178    ///     println!("Found {} models", response.data.len());
179    ///     for model in &response.data {
180    ///         println!("- {}", model.id);
181    ///     }
182    ///     Ok(())
183    /// }
184    /// ```
185    pub async fn list(&self) -> Result<ModelsListResponse> {
186        let (client, headers) = self.create_client()?;
187
188        let url = self.auth.endpoint(MODELS_PATH);
189        let response = client.get(&url).headers(headers).send().await.map_err(OpenAIToolError::RequestError)?;
190
191        let status = response.status();
192        let content = response.text().await.map_err(OpenAIToolError::RequestError)?;
193
194        if cfg!(test) {
195            tracing::info!("Response content: {}", content);
196        }
197
198        if !status.is_success() {
199            if let Ok(error_resp) = serde_json::from_str::<ErrorResponse>(&content) {
200                return Err(OpenAIToolError::Error(error_resp.error.message.unwrap_or_default()));
201            }
202            return Err(OpenAIToolError::Error(format!("API error ({}): {}", status, content)));
203        }
204
205        serde_json::from_str::<ModelsListResponse>(&content).map_err(OpenAIToolError::SerdeJsonError)
206    }
207
208    /// Retrieves details of a specific model.
209    ///
210    /// Gets information about a model by its ID, including when it was created
211    /// and who owns it.
212    ///
213    /// # Arguments
214    ///
215    /// * `model_id` - The ID of the model to retrieve (e.g., "gpt-4o-mini")
216    ///
217    /// # Returns
218    ///
219    /// * `Ok(Model)` - The model details
220    /// * `Err(OpenAIToolError)` - If the model is not found or the request fails
221    ///
222    /// # Example
223    ///
224    /// ```rust,no_run
225    /// use openai_tools::models::request::Models;
226    ///
227    /// #[tokio::main]
228    /// async fn main() -> Result<(), Box<dyn std::error::Error>> {
229    ///     let models = Models::new()?;
230    ///     let model = models.retrieve("gpt-4o-mini").await?;
231    ///
232    ///     println!("Model: {}", model.id);
233    ///     println!("Owned by: {}", model.owned_by);
234    ///     println!("Created: {}", model.created);
235    ///     Ok(())
236    /// }
237    /// ```
238    pub async fn retrieve(&self, model_id: &str) -> Result<Model> {
239        let (client, headers) = self.create_client()?;
240        let url = format!("{}/{}", self.auth.endpoint(MODELS_PATH), model_id);
241
242        let response = client.get(&url).headers(headers).send().await.map_err(OpenAIToolError::RequestError)?;
243
244        let status = response.status();
245        let content = response.text().await.map_err(OpenAIToolError::RequestError)?;
246
247        if cfg!(test) {
248            tracing::info!("Response content: {}", content);
249        }
250
251        if !status.is_success() {
252            if let Ok(error_resp) = serde_json::from_str::<ErrorResponse>(&content) {
253                return Err(OpenAIToolError::Error(error_resp.error.message.unwrap_or_default()));
254            }
255            return Err(OpenAIToolError::Error(format!("API error ({}): {}", status, content)));
256        }
257
258        serde_json::from_str::<Model>(&content).map_err(OpenAIToolError::SerdeJsonError)
259    }
260
261    /// Deletes a fine-tuned model.
262    ///
263    /// You must have the Owner role in your organization or be allowed to delete models.
264    /// This only works for fine-tuned models that you have created.
265    ///
266    /// # Arguments
267    ///
268    /// * `model_id` - The ID of the fine-tuned model to delete
269    ///
270    /// # Returns
271    ///
272    /// * `Ok(DeleteResponse)` - Confirmation of deletion
273    /// * `Err(OpenAIToolError)` - If the model cannot be deleted or the request fails
274    ///
275    /// # Example
276    ///
277    /// ```rust,no_run
278    /// use openai_tools::models::request::Models;
279    ///
280    /// #[tokio::main]
281    /// async fn main() -> Result<(), Box<dyn std::error::Error>> {
282    ///     let models = Models::new()?;
283    ///
284    ///     // Delete a fine-tuned model
285    ///     let result = models.delete("ft:gpt-4o-mini:my-org:my-model:abc123").await?;
286    ///     if result.deleted {
287    ///         println!("Model {} was deleted", result.id);
288    ///     }
289    ///     Ok(())
290    /// }
291    /// ```
292    pub async fn delete(&self, model_id: &str) -> Result<DeleteResponse> {
293        let (client, headers) = self.create_client()?;
294        let url = format!("{}/{}", self.auth.endpoint(MODELS_PATH), model_id);
295
296        let response = client.delete(&url).headers(headers).send().await.map_err(OpenAIToolError::RequestError)?;
297
298        let status = response.status();
299        let content = response.text().await.map_err(OpenAIToolError::RequestError)?;
300
301        if cfg!(test) {
302            tracing::info!("Response content: {}", content);
303        }
304
305        if !status.is_success() {
306            if let Ok(error_resp) = serde_json::from_str::<ErrorResponse>(&content) {
307                return Err(OpenAIToolError::Error(error_resp.error.message.unwrap_or_default()));
308            }
309            return Err(OpenAIToolError::Error(format!("API error ({}): {}", status, content)));
310        }
311
312        serde_json::from_str::<DeleteResponse>(&content).map_err(OpenAIToolError::SerdeJsonError)
313    }
314}