openai_ergonomic/responses/
mod.rs

1//! Response type wrappers and ergonomic helpers.
2//!
3//! This module provides ergonomic wrappers around `OpenAI` API responses with
4//! convenient methods for common operations. The responses-first approach
5//! makes it easy to work with structured outputs and tool calling.
6//!
7//! # Example
8//!
9//! ```rust,ignore
10//! # use openai_ergonomic::responses::*;
11//! let response = client.responses()
12//!     .model("gpt-4")
13//!     .user("What is the weather?")
14//!     .tool(tool_web_search())
15//!     .send()
16//!     .await?;
17//!
18//! // Access response content
19//! if let Some(content) = response.content() {
20//!     println!("{}", content);
21//! }
22//!
23//! // Handle tool calls
24//! for tool_call in response.tool_calls() {
25//!     println!("Tool: {} Args: {}", tool_call.name(), tool_call.arguments());
26//! }
27//! ```
28
29use openai_client_base::models::{
30    AssistantsNamedToolChoiceFunction, ChatCompletionTool, ChatCompletionToolChoiceOption,
31    CompletionUsage, CreateChatCompletionResponse, CreateChatCompletionStreamResponse,
32    FunctionObject,
33};
34
35pub mod assistants;
36pub mod audio;
37pub mod batch;
38pub mod chat;
39pub mod embeddings;
40pub mod files;
41pub mod fine_tuning;
42pub mod images;
43pub mod moderations;
44pub mod threads;
45pub mod uploads;
46pub mod vector_stores;
47
48// Re-export response types for convenience
49// NOTE: Re-exports will be enabled as modules are implemented
50// pub use assistants::*;
51// pub use audio::*;
52// pub use batch::*;
53pub use chat::*; // Has implementation
54                 // pub use embeddings::*;
55                 // pub use files::*;
56                 // pub use fine_tuning::*;
57                 // pub use images::*;
58                 // pub use moderations::*;
59                 // pub use threads::*;
60                 // pub use uploads::*;
61                 // pub use vector_stores::*;
62
63/// Common trait for all response types to provide consistent access patterns.
64pub trait Response {
65    /// Get the unique identifier for this response, if available.
66    fn id(&self) -> Option<&str>;
67
68    /// Get the model that generated this response, if available.
69    fn model(&self) -> Option<&str>;
70
71    /// Get any usage information from the response, if available.
72    fn usage(&self) -> Option<&CompletionUsage>;
73}
74
75/// Wrapper for chat completion responses with ergonomic helpers.
76#[derive(Debug, Clone)]
77pub struct ChatCompletionResponseWrapper {
78    inner: CreateChatCompletionResponse,
79    base_url: Option<String>,
80}
81
82impl ChatCompletionResponseWrapper {
83    /// Create a new response wrapper.
84    pub fn new(response: CreateChatCompletionResponse) -> Self {
85        Self {
86            inner: response,
87            base_url: None,
88        }
89    }
90
91    /// Create a response wrapper with a base URL for generating links.
92    pub fn with_base_url(response: CreateChatCompletionResponse, base_url: String) -> Self {
93        Self {
94            inner: response,
95            base_url: Some(base_url),
96        }
97    }
98
99    /// Get the first message content from the response.
100    pub fn content(&self) -> Option<&str> {
101        self.inner.choices.first()?.message.content.as_deref()
102    }
103
104    /// Get all choices from the response.
105    pub fn choices(
106        &self,
107    ) -> &[openai_client_base::models::CreateChatCompletionResponseChoicesInner] {
108        &self.inner.choices
109    }
110
111    /// Get tool calls from the first choice, if any.
112    pub fn tool_calls(
113        &self,
114    ) -> Vec<&openai_client_base::models::ChatCompletionMessageToolCallsInner> {
115        self.inner
116            .choices
117            .first()
118            .and_then(|c| c.message.tool_calls.as_ref())
119            .map(|calls| calls.iter().collect())
120            .unwrap_or_default()
121    }
122
123    /// Check if the response was refused.
124    pub fn is_refusal(&self) -> bool {
125        self.inner
126            .choices
127            .first()
128            .and_then(|c| c.message.refusal.as_ref())
129            .is_some()
130    }
131
132    /// Get the refusal message if the response was refused.
133    pub fn refusal(&self) -> Option<&str> {
134        self.inner
135            .choices
136            .first()
137            .and_then(|c| c.message.refusal.as_ref())
138            .map(std::string::String::as_str)
139    }
140
141    /// Get the finish reason for the first choice.
142    pub fn finish_reason(&self) -> Option<String> {
143        use openai_client_base::models::create_chat_completion_response_choices_inner::FinishReason;
144        self.inner.choices.first().map(|c| match &c.finish_reason {
145            FinishReason::Stop => "stop".to_string(),
146            FinishReason::Length => "length".to_string(),
147            FinishReason::ToolCalls => "tool_calls".to_string(),
148            FinishReason::ContentFilter => "content_filter".to_string(),
149            FinishReason::FunctionCall => "function_call".to_string(),
150        })
151    }
152
153    /// Generate a URL for this response if `base_url` was provided.
154    pub fn url(&self) -> Option<String> {
155        self.base_url
156            .as_ref()
157            .map(|base| format!("{}/chat/{}", base, self.inner.id))
158    }
159
160    /// Get the inner response object.
161    pub fn inner(&self) -> &CreateChatCompletionResponse {
162        &self.inner
163    }
164}
165
166impl Response for ChatCompletionResponseWrapper {
167    fn id(&self) -> Option<&str> {
168        Some(&self.inner.id)
169    }
170
171    fn model(&self) -> Option<&str> {
172        Some(&self.inner.model)
173    }
174
175    fn usage(&self) -> Option<&CompletionUsage> {
176        self.inner.usage.as_deref()
177    }
178}
179
180/// Wrapper for streaming chat completion responses.
181#[derive(Debug, Clone)]
182pub struct ChatCompletionStreamResponseWrapper {
183    inner: CreateChatCompletionStreamResponse,
184}
185
186impl ChatCompletionStreamResponseWrapper {
187    /// Create a new stream response wrapper.
188    pub fn new(response: CreateChatCompletionStreamResponse) -> Self {
189        Self { inner: response }
190    }
191
192    /// Get the delta content from this chunk.
193    pub fn delta_content(&self) -> Option<&str> {
194        self.inner
195            .choices
196            .first()
197            .and_then(|c| c.delta.content.as_ref())
198            .and_then(|c| c.as_ref())
199            .map(std::string::String::as_str)
200    }
201
202    /// Get tool call deltas from this chunk.
203    pub fn delta_tool_calls(
204        &self,
205    ) -> Vec<&openai_client_base::models::ChatCompletionMessageToolCallChunk> {
206        self.inner
207            .choices
208            .first()
209            .and_then(|c| c.delta.tool_calls.as_ref())
210            .map(|calls| calls.iter().collect())
211            .unwrap_or_default()
212    }
213
214    /// Check if this is the final chunk.
215    pub fn is_finished(&self) -> bool {
216        use openai_client_base::models::create_chat_completion_stream_response_choices_inner::FinishReason;
217        self.inner.choices.first().is_none_or(|c| {
218            !matches!(
219                c.finish_reason,
220                FinishReason::Stop
221                    | FinishReason::Length
222                    | FinishReason::ToolCalls
223                    | FinishReason::ContentFilter
224                    | FinishReason::FunctionCall
225            )
226        })
227    }
228
229    /// Get the inner stream response object.
230    pub fn inner(&self) -> &CreateChatCompletionStreamResponse {
231        &self.inner
232    }
233}
234
235// Helper functions for creating tools
236
237/// Create a function tool definition.
238#[must_use]
239pub fn tool_function(
240    name: impl Into<String>,
241    description: impl Into<String>,
242    parameters: serde_json::Value,
243) -> ChatCompletionTool {
244    use std::collections::HashMap;
245
246    // Convert Value to HashMap<String, Value>
247    let params_map = if let serde_json::Value::Object(map) = parameters {
248        map.into_iter()
249            .collect::<HashMap<String, serde_json::Value>>()
250    } else {
251        HashMap::new()
252    };
253
254    ChatCompletionTool {
255        r#type: openai_client_base::models::chat_completion_tool::Type::Function,
256        function: Box::new(FunctionObject {
257            name: name.into(),
258            description: Some(description.into()),
259            parameters: Some(params_map),
260            strict: None,
261        }),
262    }
263}
264
265/// Create a web search tool definition.
266#[must_use]
267pub fn tool_web_search() -> ChatCompletionTool {
268    tool_function(
269        "web_search",
270        "Search the web for current information",
271        serde_json::json!({
272            "type": "object",
273            "properties": {
274                "query": {
275                    "type": "string",
276                    "description": "The search query"
277                }
278            },
279            "required": ["query"],
280            "additionalProperties": false
281        }),
282    )
283}
284
285/// Helper for creating tool choice options.
286pub struct ToolChoiceHelper;
287
288impl ToolChoiceHelper {
289    /// Let the model automatically decide whether to use tools.
290    pub fn auto() -> ChatCompletionToolChoiceOption {
291        use openai_client_base::models::chat_completion_tool_choice_option::ChatCompletionToolChoiceOptionAutoEnum;
292        ChatCompletionToolChoiceOption::Auto(ChatCompletionToolChoiceOptionAutoEnum::Auto)
293    }
294
295    /// Prevent the model from using any tools.
296    pub fn none() -> ChatCompletionToolChoiceOption {
297        use openai_client_base::models::chat_completion_tool_choice_option::ChatCompletionToolChoiceOptionAutoEnum;
298        ChatCompletionToolChoiceOption::Auto(ChatCompletionToolChoiceOptionAutoEnum::None)
299    }
300
301    /// Require the model to use a tool.
302    pub fn required() -> ChatCompletionToolChoiceOption {
303        use openai_client_base::models::chat_completion_tool_choice_option::ChatCompletionToolChoiceOptionAutoEnum;
304        ChatCompletionToolChoiceOption::Auto(ChatCompletionToolChoiceOptionAutoEnum::Required)
305    }
306
307    /// Require the model to use a specific tool.
308    pub fn specific(name: impl Into<String>) -> ChatCompletionToolChoiceOption {
309        ChatCompletionToolChoiceOption::Chatcompletionnamedtoolchoice(
310            openai_client_base::models::ChatCompletionNamedToolChoice {
311                r#type:
312                    openai_client_base::models::chat_completion_named_tool_choice::Type::Function,
313                function: Box::new(AssistantsNamedToolChoiceFunction { name: name.into() }),
314            },
315        )
316    }
317}
318
319/// Re-export commonly used types from openai-client-base for convenience
320pub use openai_client_base::models::{
321    ChatCompletionMessageToolCall as ToolCall,
322    ChatCompletionResponseMessageFunctionCall as FunctionCall, ChatCompletionTool as Tool,
323    ChatCompletionToolChoiceOption as ToolChoice, CompletionUsage as Usage,
324    CreateChatCompletionResponse as ChatResponse,
325    CreateChatCompletionStreamResponse as StreamResponse,
326};
327
328/// Placeholder for the `ResponseBuilder` until client is ready
329#[derive(Debug, Clone)]
330pub struct ResponseBuilder;
331
332/// Placeholder for the Response struct
333#[derive(Debug, Clone)]
334pub struct ResponsePlaceholder;