openai_tools/conversations/request.rs
1//! OpenAI Conversations API Request Module
2//!
3//! This module provides the functionality to interact with the OpenAI Conversations API.
4//! The Conversations API allows you to create and manage long-running conversations
5//! with the Responses API.
6//!
7//! # Key Features
8//!
9//! - **Create Conversations**: Create new conversations with optional metadata and items
10//! - **Retrieve Conversations**: Get details of a specific conversation
11//! - **Update Conversations**: Modify conversation metadata
12//! - **Delete Conversations**: Remove conversations
13//! - **Manage Items**: Add and list conversation items
14//!
15//! # Quick Start
16//!
17//! ```rust,no_run
18//! use openai_tools::conversations::request::Conversations;
19//! use openai_tools::conversations::response::InputItem;
20//! use std::collections::HashMap;
21//!
22//! #[tokio::main]
23//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
24//! let conversations = Conversations::new()?;
25//!
26//! // Create a new conversation
27//! let mut metadata = HashMap::new();
28//! metadata.insert("topic".to_string(), "demo".to_string());
29//!
30//! let conversation = conversations.create(Some(metadata), None).await?;
31//! println!("Created conversation: {}", conversation.id);
32//!
33//! // Add items to the conversation
34//! let items = vec![InputItem::user_message("Hello!")];
35//! let added_items = conversations.create_items(&conversation.id, items).await?;
36//!
37//! Ok(())
38//! }
39//! ```
40
41use crate::common::auth::AuthProvider;
42use crate::common::client::create_http_client;
43use crate::common::errors::{ErrorResponse, OpenAIToolError, Result};
44use crate::conversations::response::{Conversation, ConversationItemListResponse, ConversationListResponse, DeleteConversationResponse, InputItem};
45use serde::{Deserialize, Serialize};
46use std::collections::HashMap;
47use std::time::Duration;
48
49/// Default API path for Conversations
50const CONVERSATIONS_PATH: &str = "conversations";
51
52/// Specifies additional data to include in conversation item responses.
53///
54/// This enum defines various types of additional information that can be
55/// included when listing conversation items.
56#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
57pub enum ConversationInclude {
58 /// Include web search call action sources
59 #[serde(rename = "web_search_call.action.sources")]
60 WebSearchCallSources,
61 /// Include code interpreter call outputs
62 #[serde(rename = "code_interpreter_call.outputs")]
63 CodeInterpreterCallOutputs,
64 /// Include file search call results
65 #[serde(rename = "file_search_call.results")]
66 FileSearchCallResults,
67 /// Include image URLs from input messages
68 #[serde(rename = "message.input_image.image_url")]
69 MessageInputImageUrl,
70 /// Include encrypted reasoning content
71 #[serde(rename = "reasoning.encrypted_content")]
72 ReasoningEncryptedContent,
73}
74
75impl ConversationInclude {
76 /// Returns the string representation for API requests.
77 pub fn as_str(&self) -> &'static str {
78 match self {
79 ConversationInclude::WebSearchCallSources => "web_search_call.action.sources",
80 ConversationInclude::CodeInterpreterCallOutputs => "code_interpreter_call.outputs",
81 ConversationInclude::FileSearchCallResults => "file_search_call.results",
82 ConversationInclude::MessageInputImageUrl => "message.input_image.image_url",
83 ConversationInclude::ReasoningEncryptedContent => "reasoning.encrypted_content",
84 }
85 }
86}
87
88/// Request body for creating a conversation.
89#[derive(Debug, Clone, Serialize)]
90struct CreateConversationRequest {
91 #[serde(skip_serializing_if = "Option::is_none")]
92 metadata: Option<HashMap<String, String>>,
93 #[serde(skip_serializing_if = "Option::is_none")]
94 items: Option<Vec<InputItem>>,
95}
96
97/// Request body for updating a conversation.
98#[derive(Debug, Clone, Serialize)]
99struct UpdateConversationRequest {
100 metadata: HashMap<String, String>,
101}
102
103/// Request body for creating conversation items.
104#[derive(Debug, Clone, Serialize)]
105struct CreateItemsRequest {
106 items: Vec<InputItem>,
107}
108
109/// Client for interacting with the OpenAI Conversations API.
110///
111/// This struct provides methods to create, retrieve, update, delete conversations,
112/// and manage conversation items. Use [`Conversations::new()`] to create a new instance.
113///
114/// # Example
115///
116/// ```rust,no_run
117/// use openai_tools::conversations::request::Conversations;
118/// use std::collections::HashMap;
119///
120/// #[tokio::main]
121/// async fn main() -> Result<(), Box<dyn std::error::Error>> {
122/// let conversations = Conversations::new()?;
123///
124/// // Create a conversation with metadata
125/// let mut metadata = HashMap::new();
126/// metadata.insert("user_id".to_string(), "user123".to_string());
127///
128/// let conv = conversations.create(Some(metadata), None).await?;
129/// println!("Created: {}", conv.id);
130///
131/// // Retrieve the conversation
132/// let retrieved = conversations.retrieve(&conv.id).await?;
133/// println!("Retrieved: {:?}", retrieved.metadata);
134///
135/// Ok(())
136/// }
137/// ```
138pub struct Conversations {
139 /// Authentication provider (OpenAI or Azure)
140 auth: AuthProvider,
141 /// Optional request timeout duration
142 timeout: Option<Duration>,
143}
144
145impl Conversations {
146 /// Creates a new Conversations client for OpenAI API.
147 ///
148 /// Initializes the client by loading the OpenAI API key from
149 /// the environment variable `OPENAI_API_KEY`. Supports `.env` file loading
150 /// via dotenvy.
151 ///
152 /// # Returns
153 ///
154 /// * `Ok(Conversations)` - A new Conversations client ready for use
155 /// * `Err(OpenAIToolError)` - If the API key is not found in the environment
156 ///
157 /// # Example
158 ///
159 /// ```rust,no_run
160 /// use openai_tools::conversations::request::Conversations;
161 ///
162 /// let conversations = Conversations::new().expect("API key should be set");
163 /// ```
164 pub fn new() -> Result<Self> {
165 let auth = AuthProvider::openai_from_env()?;
166 Ok(Self { auth, timeout: None })
167 }
168
169 /// Creates a new Conversations client with a custom authentication provider
170 pub fn with_auth(auth: AuthProvider) -> Self {
171 Self { auth, timeout: None }
172 }
173
174 /// Creates a new Conversations client for Azure OpenAI API
175 pub fn azure() -> Result<Self> {
176 let auth = AuthProvider::azure_from_env()?;
177 Ok(Self { auth, timeout: None })
178 }
179
180 /// Creates a new Conversations client by auto-detecting the provider
181 pub fn detect_provider() -> Result<Self> {
182 let auth = AuthProvider::from_env()?;
183 Ok(Self { auth, timeout: None })
184 }
185
186 /// Creates a new Conversations client with URL-based provider detection
187 pub fn with_url<S: Into<String>>(base_url: S, api_key: S) -> Self {
188 let auth = AuthProvider::from_url_with_key(base_url, api_key);
189 Self { auth, timeout: None }
190 }
191
192 /// Creates a new Conversations client from URL using environment variables
193 pub fn from_url<S: Into<String>>(url: S) -> Result<Self> {
194 let auth = AuthProvider::from_url(url)?;
195 Ok(Self { auth, timeout: None })
196 }
197
198 /// Returns the authentication provider
199 pub fn auth(&self) -> &AuthProvider {
200 &self.auth
201 }
202
203 /// Sets the request timeout duration.
204 ///
205 /// # Arguments
206 ///
207 /// * `timeout` - The maximum time to wait for a response
208 ///
209 /// # Returns
210 ///
211 /// A mutable reference to self for method chaining
212 pub fn timeout(&mut self, timeout: Duration) -> &mut Self {
213 self.timeout = Some(timeout);
214 self
215 }
216
217 /// Creates the HTTP client with default headers.
218 fn create_client(&self) -> Result<(request::Client, request::header::HeaderMap)> {
219 let client = create_http_client(self.timeout)?;
220 let mut headers = request::header::HeaderMap::new();
221 self.auth.apply_headers(&mut headers)?;
222 headers.insert("Content-Type", request::header::HeaderValue::from_static("application/json"));
223 headers.insert("User-Agent", request::header::HeaderValue::from_static("openai-tools-rust"));
224 Ok((client, headers))
225 }
226
227 /// Handles API error responses.
228 fn handle_error(status: request::StatusCode, content: &str) -> OpenAIToolError {
229 if let Ok(error_resp) = serde_json::from_str::<ErrorResponse>(content) {
230 OpenAIToolError::Error(error_resp.error.message.unwrap_or_default())
231 } else {
232 OpenAIToolError::Error(format!("API error ({}): {}", status, content))
233 }
234 }
235
236 /// Creates a new conversation.
237 ///
238 /// You can optionally provide metadata and initial items to include
239 /// in the conversation.
240 ///
241 /// # Arguments
242 ///
243 /// * `metadata` - Optional key-value pairs for storing additional information
244 /// * `items` - Optional initial items to add to the conversation (up to 20 items)
245 ///
246 /// # Returns
247 ///
248 /// * `Ok(Conversation)` - The created conversation object
249 /// * `Err(OpenAIToolError)` - If the request fails
250 ///
251 /// # Example
252 ///
253 /// ```rust,no_run
254 /// use openai_tools::conversations::request::Conversations;
255 /// use openai_tools::conversations::response::InputItem;
256 /// use std::collections::HashMap;
257 ///
258 /// #[tokio::main]
259 /// async fn main() -> Result<(), Box<dyn std::error::Error>> {
260 /// let conversations = Conversations::new()?;
261 ///
262 /// // Create with metadata and initial message
263 /// let mut metadata = HashMap::new();
264 /// metadata.insert("topic".to_string(), "greeting".to_string());
265 ///
266 /// let items = vec![InputItem::user_message("Hello!")];
267 ///
268 /// let conv = conversations.create(Some(metadata), Some(items)).await?;
269 /// println!("Created conversation: {}", conv.id);
270 /// Ok(())
271 /// }
272 /// ```
273 pub async fn create(&self, metadata: Option<HashMap<String, String>>, items: Option<Vec<InputItem>>) -> Result<Conversation> {
274 let (client, headers) = self.create_client()?;
275
276 let request_body = CreateConversationRequest { metadata, items };
277 let body = serde_json::to_string(&request_body)?;
278
279 let url = self.auth.endpoint(CONVERSATIONS_PATH);
280 let response = client.post(&url).headers(headers).body(body).send().await.map_err(OpenAIToolError::RequestError)?;
281
282 let status = response.status();
283 let content = response.text().await.map_err(OpenAIToolError::RequestError)?;
284
285 if cfg!(test) {
286 tracing::info!("Response content: {}", content);
287 }
288
289 if !status.is_success() {
290 return Err(Self::handle_error(status, &content));
291 }
292
293 serde_json::from_str::<Conversation>(&content).map_err(OpenAIToolError::SerdeJsonError)
294 }
295
296 /// Retrieves a specific conversation.
297 ///
298 /// # Arguments
299 ///
300 /// * `conversation_id` - The ID of the conversation to retrieve
301 ///
302 /// # Returns
303 ///
304 /// * `Ok(Conversation)` - The conversation object
305 /// * `Err(OpenAIToolError)` - If the conversation is not found or the request fails
306 ///
307 /// # Example
308 ///
309 /// ```rust,no_run
310 /// use openai_tools::conversations::request::Conversations;
311 ///
312 /// #[tokio::main]
313 /// async fn main() -> Result<(), Box<dyn std::error::Error>> {
314 /// let conversations = Conversations::new()?;
315 /// let conv = conversations.retrieve("conv_abc123").await?;
316 ///
317 /// println!("Conversation: {}", conv.id);
318 /// println!("Created at: {}", conv.created_at);
319 /// Ok(())
320 /// }
321 /// ```
322 pub async fn retrieve(&self, conversation_id: &str) -> Result<Conversation> {
323 let (client, headers) = self.create_client()?;
324 let url = format!("{}/{}", self.auth.endpoint(CONVERSATIONS_PATH), conversation_id);
325
326 let response = client.get(&url).headers(headers).send().await.map_err(OpenAIToolError::RequestError)?;
327
328 let status = response.status();
329 let content = response.text().await.map_err(OpenAIToolError::RequestError)?;
330
331 if cfg!(test) {
332 tracing::info!("Response content: {}", content);
333 }
334
335 if !status.is_success() {
336 return Err(Self::handle_error(status, &content));
337 }
338
339 serde_json::from_str::<Conversation>(&content).map_err(OpenAIToolError::SerdeJsonError)
340 }
341
342 /// Updates a conversation's metadata.
343 ///
344 /// # Arguments
345 ///
346 /// * `conversation_id` - The ID of the conversation to update
347 /// * `metadata` - The new metadata to set
348 ///
349 /// # Returns
350 ///
351 /// * `Ok(Conversation)` - The updated conversation object
352 /// * `Err(OpenAIToolError)` - If the request fails
353 ///
354 /// # Example
355 ///
356 /// ```rust,no_run
357 /// use openai_tools::conversations::request::Conversations;
358 /// use std::collections::HashMap;
359 ///
360 /// #[tokio::main]
361 /// async fn main() -> Result<(), Box<dyn std::error::Error>> {
362 /// let conversations = Conversations::new()?;
363 ///
364 /// let mut metadata = HashMap::new();
365 /// metadata.insert("topic".to_string(), "updated-topic".to_string());
366 ///
367 /// let conv = conversations.update("conv_abc123", metadata).await?;
368 /// println!("Updated: {:?}", conv.metadata);
369 /// Ok(())
370 /// }
371 /// ```
372 pub async fn update(&self, conversation_id: &str, metadata: HashMap<String, String>) -> Result<Conversation> {
373 let (client, headers) = self.create_client()?;
374 let url = format!("{}/{}", self.auth.endpoint(CONVERSATIONS_PATH), conversation_id);
375
376 let request_body = UpdateConversationRequest { metadata };
377 let body = serde_json::to_string(&request_body)?;
378
379 let response = client.post(&url).headers(headers).body(body).send().await.map_err(OpenAIToolError::RequestError)?;
380
381 let status = response.status();
382 let content = response.text().await.map_err(OpenAIToolError::RequestError)?;
383
384 if cfg!(test) {
385 tracing::info!("Response content: {}", content);
386 }
387
388 if !status.is_success() {
389 return Err(Self::handle_error(status, &content));
390 }
391
392 serde_json::from_str::<Conversation>(&content).map_err(OpenAIToolError::SerdeJsonError)
393 }
394
395 /// Deletes a conversation.
396 ///
397 /// # Arguments
398 ///
399 /// * `conversation_id` - The ID of the conversation to delete
400 ///
401 /// # Returns
402 ///
403 /// * `Ok(DeleteConversationResponse)` - Confirmation of deletion
404 /// * `Err(OpenAIToolError)` - If the request fails
405 ///
406 /// # Example
407 ///
408 /// ```rust,no_run
409 /// use openai_tools::conversations::request::Conversations;
410 ///
411 /// #[tokio::main]
412 /// async fn main() -> Result<(), Box<dyn std::error::Error>> {
413 /// let conversations = Conversations::new()?;
414 /// let result = conversations.delete("conv_abc123").await?;
415 ///
416 /// if result.deleted {
417 /// println!("Conversation {} was deleted", result.id);
418 /// }
419 /// Ok(())
420 /// }
421 /// ```
422 pub async fn delete(&self, conversation_id: &str) -> Result<DeleteConversationResponse> {
423 let (client, headers) = self.create_client()?;
424 let url = format!("{}/{}", self.auth.endpoint(CONVERSATIONS_PATH), conversation_id);
425
426 let response = client.delete(&url).headers(headers).send().await.map_err(OpenAIToolError::RequestError)?;
427
428 let status = response.status();
429 let content = response.text().await.map_err(OpenAIToolError::RequestError)?;
430
431 if cfg!(test) {
432 tracing::info!("Response content: {}", content);
433 }
434
435 if !status.is_success() {
436 return Err(Self::handle_error(status, &content));
437 }
438
439 serde_json::from_str::<DeleteConversationResponse>(&content).map_err(OpenAIToolError::SerdeJsonError)
440 }
441
442 /// Creates items in a conversation.
443 ///
444 /// You can add up to 20 items at a time.
445 ///
446 /// # Arguments
447 ///
448 /// * `conversation_id` - The ID of the conversation
449 /// * `items` - The items to add to the conversation
450 ///
451 /// # Returns
452 ///
453 /// * `Ok(ConversationItemListResponse)` - The created items
454 /// * `Err(OpenAIToolError)` - If the request fails
455 ///
456 /// # Example
457 ///
458 /// ```rust,no_run
459 /// use openai_tools::conversations::request::Conversations;
460 /// use openai_tools::conversations::response::InputItem;
461 ///
462 /// #[tokio::main]
463 /// async fn main() -> Result<(), Box<dyn std::error::Error>> {
464 /// let conversations = Conversations::new()?;
465 ///
466 /// let items = vec![
467 /// InputItem::user_message("What is the weather like?"),
468 /// InputItem::assistant_message("I'd be happy to help with weather information!"),
469 /// ];
470 ///
471 /// let result = conversations.create_items("conv_abc123", items).await?;
472 /// println!("Added {} items", result.data.len());
473 /// Ok(())
474 /// }
475 /// ```
476 pub async fn create_items(&self, conversation_id: &str, items: Vec<InputItem>) -> Result<ConversationItemListResponse> {
477 let (client, headers) = self.create_client()?;
478 let url = format!("{}/{}/items", self.auth.endpoint(CONVERSATIONS_PATH), conversation_id);
479
480 let request_body = CreateItemsRequest { items };
481 let body = serde_json::to_string(&request_body)?;
482
483 let response = client.post(&url).headers(headers).body(body).send().await.map_err(OpenAIToolError::RequestError)?;
484
485 let status = response.status();
486 let content = response.text().await.map_err(OpenAIToolError::RequestError)?;
487
488 if cfg!(test) {
489 tracing::info!("Response content: {}", content);
490 }
491
492 if !status.is_success() {
493 return Err(Self::handle_error(status, &content));
494 }
495
496 serde_json::from_str::<ConversationItemListResponse>(&content).map_err(OpenAIToolError::SerdeJsonError)
497 }
498
499 /// Lists items in a conversation.
500 ///
501 /// # Arguments
502 ///
503 /// * `conversation_id` - The ID of the conversation
504 /// * `limit` - Maximum number of items to return (1-100, default 20)
505 /// * `after` - Cursor for pagination (item ID to start after)
506 /// * `order` - Sort order ("asc" or "desc", default "desc")
507 /// * `include` - Additional data to include in the response
508 ///
509 /// # Returns
510 ///
511 /// * `Ok(ConversationItemListResponse)` - The list of items
512 /// * `Err(OpenAIToolError)` - If the request fails
513 ///
514 /// # Example
515 ///
516 /// ```rust,no_run
517 /// use openai_tools::conversations::request::{Conversations, ConversationInclude};
518 ///
519 /// #[tokio::main]
520 /// async fn main() -> Result<(), Box<dyn std::error::Error>> {
521 /// let conversations = Conversations::new()?;
522 ///
523 /// // List items with pagination
524 /// let items = conversations.list_items(
525 /// "conv_abc123",
526 /// Some(20),
527 /// None,
528 /// Some("desc"),
529 /// None,
530 /// ).await?;
531 ///
532 /// for item in &items.data {
533 /// println!("Item: {} ({})", item.id, item.item_type);
534 /// }
535 /// Ok(())
536 /// }
537 /// ```
538 pub async fn list_items(
539 &self,
540 conversation_id: &str,
541 limit: Option<u32>,
542 after: Option<&str>,
543 order: Option<&str>,
544 include: Option<Vec<ConversationInclude>>,
545 ) -> Result<ConversationItemListResponse> {
546 let (client, headers) = self.create_client()?;
547
548 // Build query parameters
549 let mut params = Vec::new();
550 if let Some(l) = limit {
551 params.push(format!("limit={}", l));
552 }
553 if let Some(a) = after {
554 params.push(format!("after={}", a));
555 }
556 if let Some(o) = order {
557 params.push(format!("order={}", o));
558 }
559 if let Some(inc) = include {
560 for i in inc {
561 params.push(format!("include[]={}", i.as_str()));
562 }
563 }
564
565 let url = if params.is_empty() {
566 format!("{}/{}/items", self.auth.endpoint(CONVERSATIONS_PATH), conversation_id)
567 } else {
568 format!("{}/{}/items?{}", self.auth.endpoint(CONVERSATIONS_PATH), conversation_id, params.join("&"))
569 };
570
571 let response = client.get(&url).headers(headers).send().await.map_err(OpenAIToolError::RequestError)?;
572
573 let status = response.status();
574 let content = response.text().await.map_err(OpenAIToolError::RequestError)?;
575
576 if cfg!(test) {
577 tracing::info!("Response content: {}", content);
578 }
579
580 if !status.is_success() {
581 return Err(Self::handle_error(status, &content));
582 }
583
584 serde_json::from_str::<ConversationItemListResponse>(&content).map_err(OpenAIToolError::SerdeJsonError)
585 }
586
587 /// Lists all conversations (if available).
588 ///
589 /// Note: This endpoint may not be available in all API versions.
590 ///
591 /// # Arguments
592 ///
593 /// * `limit` - Maximum number of conversations to return (1-100, default 20)
594 /// * `after` - Cursor for pagination (conversation ID to start after)
595 ///
596 /// # Returns
597 ///
598 /// * `Ok(ConversationListResponse)` - The list of conversations
599 /// * `Err(OpenAIToolError)` - If the request fails
600 ///
601 /// # Example
602 ///
603 /// ```rust,no_run
604 /// use openai_tools::conversations::request::Conversations;
605 ///
606 /// #[tokio::main]
607 /// async fn main() -> Result<(), Box<dyn std::error::Error>> {
608 /// let conversations = Conversations::new()?;
609 ///
610 /// let response = conversations.list(Some(10), None).await?;
611 /// for conv in &response.data {
612 /// println!("Conversation: {} (created: {})", conv.id, conv.created_at);
613 /// }
614 /// Ok(())
615 /// }
616 /// ```
617 pub async fn list(&self, limit: Option<u32>, after: Option<&str>) -> Result<ConversationListResponse> {
618 let (client, headers) = self.create_client()?;
619
620 // Build query parameters
621 let mut params = Vec::new();
622 if let Some(l) = limit {
623 params.push(format!("limit={}", l));
624 }
625 if let Some(a) = after {
626 params.push(format!("after={}", a));
627 }
628
629 let url = if params.is_empty() {
630 self.auth.endpoint(CONVERSATIONS_PATH)
631 } else {
632 format!("{}?{}", self.auth.endpoint(CONVERSATIONS_PATH), params.join("&"))
633 };
634
635 let response = client.get(&url).headers(headers).send().await.map_err(OpenAIToolError::RequestError)?;
636
637 let status = response.status();
638 let content = response.text().await.map_err(OpenAIToolError::RequestError)?;
639
640 if cfg!(test) {
641 tracing::info!("Response content: {}", content);
642 }
643
644 if !status.is_success() {
645 return Err(Self::handle_error(status, &content));
646 }
647
648 serde_json::from_str::<ConversationListResponse>(&content).map_err(OpenAIToolError::SerdeJsonError)
649 }
650}