Skip to main content

rig/providers/openai/responses_api/
mod.rs

1//! The OpenAI Responses API.
2//!
3//! By default when creating a completion client, this is the API that gets used.
4//!
5//! If you'd like to switch back to the regular Completions API, you can do so by using the `.completions_api()` function - see below for an example:
6//! ```rust
7//! let openai_client = rig::providers::openai::Client::from_env();
8//! let model = openai_client.completion_model("gpt-4o").completions_api();
9//! ```
10use super::InputAudio;
11use super::completion::ToolChoice;
12use super::{Client, responses_api::streaming::StreamingCompletionResponse};
13use crate::completion::CompletionError;
14use crate::http_client;
15use crate::http_client::HttpClientExt;
16use crate::json_utils;
17use crate::message::{
18    AudioMediaType, Document, DocumentMediaType, DocumentSourceKind, ImageDetail, MessageError,
19    MimeType, Text,
20};
21use crate::one_or_many::string_or_one_or_many;
22
23use crate::wasm_compat::{WasmCompatSend, WasmCompatSync};
24use crate::{OneOrMany, completion, message};
25use serde::{Deserialize, Serialize};
26use serde_json::{Map, Value};
27use tracing::{Instrument, Level, enabled, info_span};
28
29use std::convert::Infallible;
30use std::ops::Add;
31use std::str::FromStr;
32
33pub mod streaming;
34#[cfg(not(target_family = "wasm"))]
35pub mod websocket;
36
37/// The completion request type for OpenAI's Response API: <https://platform.openai.com/docs/api-reference/responses/create>
38/// Intended to be derived from [`crate::completion::request::CompletionRequest`].
39#[derive(Debug, Deserialize, Serialize, Clone)]
40pub struct CompletionRequest {
41    /// Message inputs
42    pub input: OneOrMany<InputItem>,
43    /// The model name
44    pub model: String,
45    /// Instructions (also referred to as preamble, although in other APIs this would be the "system prompt")
46    #[serde(skip_serializing_if = "Option::is_none")]
47    pub instructions: Option<String>,
48    /// The maximum number of output tokens.
49    #[serde(skip_serializing_if = "Option::is_none")]
50    pub max_output_tokens: Option<u64>,
51    /// Toggle to true for streaming responses.
52    #[serde(skip_serializing_if = "Option::is_none")]
53    pub stream: Option<bool>,
54    /// The temperature. Set higher (up to a max of 1.0) for more creative responses.
55    #[serde(skip_serializing_if = "Option::is_none")]
56    pub temperature: Option<f64>,
57    /// Whether the LLM should be forced to use a tool before returning a response.
58    /// If none provided, the default option is "auto".
59    #[serde(skip_serializing_if = "Option::is_none")]
60    tool_choice: Option<ToolChoice>,
61    /// The tools you want to use. This supports both function tools and hosted tools
62    /// such as `web_search`, `file_search`, and `computer_use`.
63    #[serde(skip_serializing_if = "Vec::is_empty")]
64    pub tools: Vec<ResponsesToolDefinition>,
65    /// Additional parameters
66    #[serde(flatten)]
67    pub additional_parameters: AdditionalParameters,
68}
69
70impl CompletionRequest {
71    pub fn with_structured_outputs<S>(mut self, schema_name: S, schema: serde_json::Value) -> Self
72    where
73        S: Into<String>,
74    {
75        self.additional_parameters.text = Some(TextConfig::structured_output(schema_name, schema));
76
77        self
78    }
79
80    pub fn with_reasoning(mut self, reasoning: Reasoning) -> Self {
81        self.additional_parameters.reasoning = Some(reasoning);
82
83        self
84    }
85
86    /// Adds a provider-native hosted tool (e.g. `web_search`, `file_search`, `computer_use`)
87    /// to the request. These tools are executed by OpenAI's infrastructure, not by Rig's
88    /// agent loop.
89    pub fn with_tool(mut self, tool: impl Into<ResponsesToolDefinition>) -> Self {
90        self.tools.push(tool.into());
91        self
92    }
93
94    /// Adds multiple provider-native hosted tools to the request. These tools are executed
95    /// by OpenAI's infrastructure, not by Rig's agent loop.
96    pub fn with_tools<I, Tool>(mut self, tools: I) -> Self
97    where
98        I: IntoIterator<Item = Tool>,
99        Tool: Into<ResponsesToolDefinition>,
100    {
101        self.tools.extend(tools.into_iter().map(Into::into));
102        self
103    }
104}
105
106/// An input item for [`CompletionRequest`].
107#[derive(Debug, Deserialize, Clone)]
108pub struct InputItem {
109    /// The role of an input item/message.
110    /// Input messages should be Some(Role::User), and output messages should be Some(Role::Assistant).
111    /// Everything else should be None.
112    #[serde(skip_serializing_if = "Option::is_none")]
113    role: Option<Role>,
114    /// The input content itself.
115    #[serde(flatten)]
116    input: InputContent,
117}
118
119impl Serialize for InputItem {
120    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
121    where
122        S: serde::Serializer,
123    {
124        let mut value = serde_json::to_value(&self.input).map_err(serde::ser::Error::custom)?;
125        let map = value.as_object_mut().ok_or_else(|| {
126            serde::ser::Error::custom("Input content must serialize to an object")
127        })?;
128
129        if let Some(role) = &self.role
130            && !map.contains_key("role")
131        {
132            map.insert(
133                "role".to_string(),
134                serde_json::to_value(role).map_err(serde::ser::Error::custom)?,
135            );
136        }
137
138        value.serialize(serializer)
139    }
140}
141
142impl InputItem {
143    pub fn system_message(content: impl Into<String>) -> Self {
144        Self {
145            role: Some(Role::System),
146            input: InputContent::Message(Message::System {
147                content: OneOrMany::one(SystemContent::InputText {
148                    text: content.into(),
149                }),
150                name: None,
151            }),
152        }
153    }
154}
155
156/// Message roles. Used by OpenAI Responses API to determine who created a given message.
157#[derive(Debug, Deserialize, Serialize, Clone)]
158#[serde(rename_all = "lowercase")]
159pub enum Role {
160    User,
161    Assistant,
162    System,
163}
164
165/// The type of content used in an [`InputItem`]. Additionally holds data for each type of input content.
166#[derive(Debug, Deserialize, Serialize, Clone)]
167#[serde(tag = "type", rename_all = "snake_case")]
168pub enum InputContent {
169    Message(Message),
170    Reasoning(OpenAIReasoning),
171    FunctionCall(OutputFunctionCall),
172    FunctionCallOutput(ToolResult),
173}
174
175#[derive(Debug, Deserialize, Serialize, Clone, PartialEq)]
176pub struct OpenAIReasoning {
177    id: String,
178    pub summary: Vec<ReasoningSummary>,
179    pub encrypted_content: Option<String>,
180    #[serde(skip_serializing_if = "Option::is_none")]
181    pub status: Option<ToolStatus>,
182}
183
184#[derive(Debug, Deserialize, Serialize, Clone, PartialEq)]
185#[serde(tag = "type", rename_all = "snake_case")]
186pub enum ReasoningSummary {
187    SummaryText { text: String },
188}
189
190impl ReasoningSummary {
191    fn new(input: &str) -> Self {
192        Self::SummaryText {
193            text: input.to_string(),
194        }
195    }
196
197    pub fn text(&self) -> String {
198        let ReasoningSummary::SummaryText { text } = self;
199        text.clone()
200    }
201}
202
203/// A tool result.
204#[derive(Debug, Deserialize, Serialize, Clone)]
205pub struct ToolResult {
206    /// The call ID of a tool (this should be linked to the call ID for a tool call, otherwise an error will be received)
207    call_id: String,
208    /// The result of a tool call.
209    output: String,
210    /// The status of a tool call (if used in a completion request, this should always be Completed)
211    status: ToolStatus,
212}
213
214impl From<Message> for InputItem {
215    fn from(value: Message) -> Self {
216        match value {
217            Message::User { .. } => Self {
218                role: Some(Role::User),
219                input: InputContent::Message(value),
220            },
221            Message::Assistant { ref content, .. } => {
222                let role = if content
223                    .iter()
224                    .any(|x| matches!(x, AssistantContentType::Reasoning(_)))
225                {
226                    None
227                } else {
228                    Some(Role::Assistant)
229                };
230                Self {
231                    role,
232                    input: InputContent::Message(value),
233                }
234            }
235            Message::System { .. } => Self {
236                role: Some(Role::System),
237                input: InputContent::Message(value),
238            },
239            Message::ToolResult {
240                tool_call_id,
241                output,
242            } => Self {
243                role: None,
244                input: InputContent::FunctionCallOutput(ToolResult {
245                    call_id: tool_call_id,
246                    output,
247                    status: ToolStatus::Completed,
248                }),
249            },
250        }
251    }
252}
253
254impl TryFrom<crate::completion::Message> for Vec<InputItem> {
255    type Error = CompletionError;
256
257    fn try_from(value: crate::completion::Message) -> Result<Self, Self::Error> {
258        match value {
259            crate::completion::Message::System { content } => Ok(vec![InputItem {
260                role: Some(Role::System),
261                input: InputContent::Message(Message::System {
262                    content: OneOrMany::one(content.into()),
263                    name: None,
264                }),
265            }]),
266            crate::completion::Message::User { content } => {
267                let mut items = Vec::new();
268
269                for user_content in content {
270                    match user_content {
271                        crate::message::UserContent::Text(Text { text }) => {
272                            items.push(InputItem {
273                                role: Some(Role::User),
274                                input: InputContent::Message(Message::User {
275                                    content: OneOrMany::one(UserContent::InputText { text }),
276                                    name: None,
277                                }),
278                            });
279                        }
280                        crate::message::UserContent::ToolResult(
281                            crate::completion::message::ToolResult {
282                                call_id,
283                                content: tool_content,
284                                ..
285                            },
286                        ) => {
287                            for tool_result_content in tool_content {
288                                let crate::completion::message::ToolResultContent::Text(Text {
289                                    text,
290                                }) = tool_result_content
291                                else {
292                                    return Err(CompletionError::ProviderError(
293                                        "This thing only supports text!".to_string(),
294                                    ));
295                                };
296                                // let output = serde_json::from_str(&text)?;
297                                items.push(InputItem {
298                                    role: None,
299                                    input: InputContent::FunctionCallOutput(ToolResult {
300                                        call_id: require_call_id(call_id.clone(), "Tool result")?,
301                                        output: text,
302                                        status: ToolStatus::Completed,
303                                    }),
304                                });
305                            }
306                        }
307                        crate::message::UserContent::Document(Document {
308                            data,
309                            media_type: Some(DocumentMediaType::PDF),
310                            ..
311                        }) => {
312                            let (file_data, file_url) = match data {
313                                DocumentSourceKind::Base64(data) => {
314                                    (Some(format!("data:application/pdf;base64,{data}")), None)
315                                }
316                                DocumentSourceKind::Url(url) => (None, Some(url)),
317                                DocumentSourceKind::Raw(_) => {
318                                    return Err(CompletionError::RequestError(
319                                        "Raw file data not supported, encode as base64 first"
320                                            .into(),
321                                    ));
322                                }
323                                doc => {
324                                    return Err(CompletionError::RequestError(
325                                        format!("Unsupported document type: {doc}").into(),
326                                    ));
327                                }
328                            };
329
330                            items.push(InputItem {
331                                role: Some(Role::User),
332                                input: InputContent::Message(Message::User {
333                                    content: OneOrMany::one(UserContent::InputFile {
334                                        file_data,
335                                        file_url,
336                                        filename: Some("document.pdf".to_string()),
337                                    }),
338                                    name: None,
339                                }),
340                            })
341                        }
342                        crate::message::UserContent::Document(Document {
343                            data:
344                                DocumentSourceKind::Base64(text) | DocumentSourceKind::String(text),
345                            ..
346                        }) => items.push(InputItem {
347                            role: Some(Role::User),
348                            input: InputContent::Message(Message::User {
349                                content: OneOrMany::one(UserContent::InputText { text }),
350                                name: None,
351                            }),
352                        }),
353                        crate::message::UserContent::Image(crate::message::Image {
354                            data,
355                            media_type,
356                            detail,
357                            ..
358                        }) => {
359                            let url = match data {
360                                DocumentSourceKind::Base64(data) => {
361                                    let media_type = if let Some(media_type) = media_type {
362                                        media_type.to_mime_type().to_string()
363                                    } else {
364                                        String::new()
365                                    };
366                                    format!("data:{media_type};base64,{data}")
367                                }
368                                DocumentSourceKind::Url(url) => url,
369                                DocumentSourceKind::Raw(_) => {
370                                    return Err(CompletionError::RequestError(
371                                        "Raw file data not supported, encode as base64 first"
372                                            .into(),
373                                    ));
374                                }
375                                doc => {
376                                    return Err(CompletionError::RequestError(
377                                        format!("Unsupported document type: {doc}").into(),
378                                    ));
379                                }
380                            };
381                            items.push(InputItem {
382                                role: Some(Role::User),
383                                input: InputContent::Message(Message::User {
384                                    content: OneOrMany::one(UserContent::InputImage {
385                                        image_url: url,
386                                        detail: detail.unwrap_or_default(),
387                                    }),
388                                    name: None,
389                                }),
390                            });
391                        }
392                        message => {
393                            return Err(CompletionError::ProviderError(format!(
394                                "Unsupported message: {message:?}"
395                            )));
396                        }
397                    }
398                }
399
400                Ok(items)
401            }
402            crate::completion::Message::Assistant { id, content } => {
403                let mut reasoning_items = Vec::new();
404                let mut other_items = Vec::new();
405
406                for assistant_content in content {
407                    match assistant_content {
408                        crate::message::AssistantContent::Text(Text { text }) => {
409                            let id = id.as_ref().unwrap_or(&String::default()).clone();
410                            other_items.push(InputItem {
411                                role: Some(Role::Assistant),
412                                input: InputContent::Message(Message::Assistant {
413                                    content: OneOrMany::one(AssistantContentType::Text(
414                                        AssistantContent::OutputText(Text { text }),
415                                    )),
416                                    id,
417                                    name: None,
418                                    status: ToolStatus::Completed,
419                                }),
420                            });
421                        }
422                        crate::message::AssistantContent::ToolCall(crate::message::ToolCall {
423                            id: tool_id,
424                            call_id,
425                            function,
426                            ..
427                        }) => {
428                            other_items.push(InputItem {
429                                role: None,
430                                input: InputContent::FunctionCall(OutputFunctionCall {
431                                    arguments: function.arguments,
432                                    call_id: require_call_id(call_id, "Assistant tool call")?,
433                                    id: tool_id,
434                                    name: function.name,
435                                    status: ToolStatus::Completed,
436                                }),
437                            });
438                        }
439                        crate::message::AssistantContent::Reasoning(reasoning) => {
440                            let openai_reasoning = openai_reasoning_from_core(&reasoning)
441                                .map_err(|err| CompletionError::ProviderError(err.to_string()))?;
442                            reasoning_items.push(InputItem {
443                                role: None,
444                                input: InputContent::Reasoning(openai_reasoning),
445                            });
446                        }
447                        crate::message::AssistantContent::Image(_) => {
448                            return Err(CompletionError::ProviderError(
449                                "Assistant image content is not supported in OpenAI Responses API"
450                                    .to_string(),
451                            ));
452                        }
453                    }
454                }
455
456                let mut items = reasoning_items;
457                items.extend(other_items);
458                Ok(items)
459            }
460        }
461    }
462}
463
464impl From<OneOrMany<String>> for Vec<ReasoningSummary> {
465    fn from(value: OneOrMany<String>) -> Self {
466        value.iter().map(|x| ReasoningSummary::new(x)).collect()
467    }
468}
469
470fn require_call_id(call_id: Option<String>, context: &str) -> Result<String, CompletionError> {
471    call_id.ok_or_else(|| {
472        CompletionError::RequestError(
473            format!("{context} `call_id` is required for OpenAI Responses API").into(),
474        )
475    })
476}
477
478fn openai_reasoning_from_core(
479    reasoning: &crate::message::Reasoning,
480) -> Result<OpenAIReasoning, MessageError> {
481    let id = reasoning.id.clone().ok_or_else(|| {
482        MessageError::ConversionError(
483            "An OpenAI-generated ID is required when using OpenAI reasoning items".to_string(),
484        )
485    })?;
486    let mut summary = Vec::new();
487    let mut encrypted_content = None;
488    for content in &reasoning.content {
489        match content {
490            crate::message::ReasoningContent::Text { text, .. }
491            | crate::message::ReasoningContent::Summary(text) => {
492                summary.push(ReasoningSummary::new(text));
493            }
494            // OpenAI reasoning input has one opaque payload field; preserve either
495            // encrypted or redacted blocks there, preferring the first one seen.
496            crate::message::ReasoningContent::Encrypted(data)
497            | crate::message::ReasoningContent::Redacted { data } => {
498                encrypted_content.get_or_insert_with(|| data.clone());
499            }
500        }
501    }
502
503    Ok(OpenAIReasoning {
504        id,
505        summary,
506        encrypted_content,
507        status: None,
508    })
509}
510
511/// The definition of a tool response, repurposed for OpenAI's Responses API.
512#[derive(Debug, Deserialize, Serialize, Clone, PartialEq)]
513pub struct ResponsesToolDefinition {
514    /// The type of tool.
515    #[serde(rename = "type")]
516    pub kind: String,
517    /// Tool name
518    #[serde(default, skip_serializing_if = "String::is_empty")]
519    pub name: String,
520    /// Parameters - this should be a JSON schema. Tools should additionally ensure an "additionalParameters" field has been added with the value set to false, as this is required if using OpenAI's strict mode (enabled by default).
521    #[serde(default, skip_serializing_if = "is_json_null")]
522    pub parameters: serde_json::Value,
523    /// Whether to use strict mode. Enabled by default as it allows for improved efficiency.
524    #[serde(default, skip_serializing_if = "is_false")]
525    pub strict: bool,
526    /// Tool description.
527    #[serde(default, skip_serializing_if = "String::is_empty")]
528    pub description: String,
529    /// Additional provider-specific configuration for hosted tools.
530    #[serde(flatten, default, skip_serializing_if = "Map::is_empty")]
531    pub config: Map<String, Value>,
532}
533
534fn is_json_null(value: &Value) -> bool {
535    value.is_null()
536}
537
538fn is_false(value: &bool) -> bool {
539    !value
540}
541
542impl ResponsesToolDefinition {
543    /// Creates a function tool definition.
544    pub fn function(
545        name: impl Into<String>,
546        description: impl Into<String>,
547        mut parameters: serde_json::Value,
548    ) -> Self {
549        super::sanitize_schema(&mut parameters);
550
551        Self {
552            kind: "function".to_string(),
553            name: name.into(),
554            parameters,
555            strict: true,
556            description: description.into(),
557            config: Map::new(),
558        }
559    }
560
561    /// Creates a hosted tool definition for an arbitrary hosted tool type.
562    pub fn hosted(kind: impl Into<String>) -> Self {
563        Self {
564            kind: kind.into(),
565            name: String::new(),
566            parameters: Value::Null,
567            strict: false,
568            description: String::new(),
569            config: Map::new(),
570        }
571    }
572
573    /// Creates a hosted `web_search` tool definition.
574    pub fn web_search() -> Self {
575        Self::hosted("web_search")
576    }
577
578    /// Creates a hosted `file_search` tool definition.
579    pub fn file_search() -> Self {
580        Self::hosted("file_search")
581    }
582
583    /// Creates a hosted `computer_use` tool definition.
584    pub fn computer_use() -> Self {
585        Self::hosted("computer_use")
586    }
587
588    /// Adds hosted-tool configuration fields.
589    pub fn with_config(mut self, key: impl Into<String>, value: Value) -> Self {
590        self.config.insert(key.into(), value);
591        self
592    }
593
594    fn normalize(mut self) -> Self {
595        if self.kind == "function" {
596            super::sanitize_schema(&mut self.parameters);
597            self.strict = true;
598        }
599        self
600    }
601}
602
603impl From<completion::ToolDefinition> for ResponsesToolDefinition {
604    fn from(value: completion::ToolDefinition) -> Self {
605        let completion::ToolDefinition {
606            name,
607            parameters,
608            description,
609        } = value;
610
611        Self::function(name, description, parameters)
612    }
613}
614
615/// Token usage.
616/// Token usage from the OpenAI Responses API generally shows the input tokens and output tokens (both with more in-depth details) as well as a total tokens field.
617#[derive(Clone, Debug, Serialize, Deserialize)]
618pub struct ResponsesUsage {
619    /// Input tokens
620    pub input_tokens: u64,
621    /// In-depth detail on input tokens (cached tokens)
622    #[serde(skip_serializing_if = "Option::is_none")]
623    pub input_tokens_details: Option<InputTokensDetails>,
624    /// Output tokens
625    pub output_tokens: u64,
626    /// In-depth detail on output tokens (reasoning tokens)
627    pub output_tokens_details: OutputTokensDetails,
628    /// Total tokens used (for a given prompt)
629    pub total_tokens: u64,
630}
631
632impl ResponsesUsage {
633    /// Create a new ResponsesUsage instance
634    pub(crate) fn new() -> Self {
635        Self {
636            input_tokens: 0,
637            input_tokens_details: Some(InputTokensDetails::new()),
638            output_tokens: 0,
639            output_tokens_details: OutputTokensDetails::new(),
640            total_tokens: 0,
641        }
642    }
643}
644
645impl Add for ResponsesUsage {
646    type Output = Self;
647
648    fn add(self, rhs: Self) -> Self::Output {
649        let input_tokens = self.input_tokens + rhs.input_tokens;
650        let input_tokens_details = self.input_tokens_details.map(|lhs| {
651            if let Some(tokens) = rhs.input_tokens_details {
652                lhs + tokens
653            } else {
654                lhs
655            }
656        });
657        let output_tokens = self.output_tokens + rhs.output_tokens;
658        let output_tokens_details = self.output_tokens_details + rhs.output_tokens_details;
659        let total_tokens = self.total_tokens + rhs.total_tokens;
660        Self {
661            input_tokens,
662            input_tokens_details,
663            output_tokens,
664            output_tokens_details,
665            total_tokens,
666        }
667    }
668}
669
670/// In-depth details on input tokens.
671#[derive(Clone, Debug, Serialize, Deserialize)]
672pub struct InputTokensDetails {
673    /// Cached tokens from OpenAI
674    pub cached_tokens: u64,
675}
676
677impl InputTokensDetails {
678    pub(crate) fn new() -> Self {
679        Self { cached_tokens: 0 }
680    }
681}
682
683impl Add for InputTokensDetails {
684    type Output = Self;
685    fn add(self, rhs: Self) -> Self::Output {
686        Self {
687            cached_tokens: self.cached_tokens + rhs.cached_tokens,
688        }
689    }
690}
691
692/// In-depth details on output tokens.
693#[derive(Clone, Debug, Serialize, Deserialize)]
694pub struct OutputTokensDetails {
695    /// Reasoning tokens
696    pub reasoning_tokens: u64,
697}
698
699impl OutputTokensDetails {
700    pub(crate) fn new() -> Self {
701        Self {
702            reasoning_tokens: 0,
703        }
704    }
705}
706
707impl Add for OutputTokensDetails {
708    type Output = Self;
709    fn add(self, rhs: Self) -> Self::Output {
710        Self {
711            reasoning_tokens: self.reasoning_tokens + rhs.reasoning_tokens,
712        }
713    }
714}
715
716/// Occasionally, when using OpenAI's Responses API you may get an incomplete response. This struct holds the reason as to why it happened.
717#[derive(Clone, Debug, Default, Serialize, Deserialize)]
718pub struct IncompleteDetailsReason {
719    /// The reason for an incomplete [`CompletionResponse`].
720    pub reason: String,
721}
722
723/// A response error from OpenAI's Response API.
724#[derive(Clone, Debug, Default, Serialize, Deserialize)]
725pub struct ResponseError {
726    /// Error code
727    pub code: String,
728    /// Error message
729    pub message: String,
730}
731
732/// A response object as an enum (ensures type validation)
733#[derive(Clone, Debug, Deserialize, Serialize)]
734#[serde(rename_all = "snake_case")]
735pub enum ResponseObject {
736    Response,
737}
738
739/// The response status as an enum (ensures type validation)
740#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)]
741#[serde(rename_all = "snake_case")]
742pub enum ResponseStatus {
743    InProgress,
744    Completed,
745    Failed,
746    Cancelled,
747    Queued,
748    Incomplete,
749}
750
751/// Attempt to try and create a `NewCompletionRequest` from a model name and [`crate::completion::CompletionRequest`]
752impl TryFrom<(String, crate::completion::CompletionRequest)> for CompletionRequest {
753    type Error = CompletionError;
754    fn try_from(
755        (model, mut req): (String, crate::completion::CompletionRequest),
756    ) -> Result<Self, Self::Error> {
757        let model = req.model.clone().unwrap_or(model);
758        let input = {
759            let mut partial_history = vec![];
760            if let Some(docs) = req.normalized_documents() {
761                partial_history.push(docs);
762            }
763            partial_history.extend(req.chat_history);
764
765            // Initialize full history with preamble (or empty if non-existent)
766            // Some "Responses API compatible" providers don't support `instructions` field
767            // so we need to add a system message until further notice
768            let mut full_history: Vec<InputItem> = if let Some(content) = req.preamble {
769                vec![InputItem::system_message(content)]
770            } else {
771                Vec::new()
772            };
773
774            for history_item in partial_history {
775                full_history.extend(<Vec<InputItem>>::try_from(history_item)?);
776            }
777
778            full_history
779        };
780
781        let input = OneOrMany::many(input).map_err(|_| {
782            CompletionError::RequestError(
783                "OpenAI Responses request input must contain at least one item".into(),
784            )
785        })?;
786
787        let mut additional_params_payload = req.additional_params.take().unwrap_or(Value::Null);
788        let stream = match &additional_params_payload {
789            Value::Bool(stream) => Some(*stream),
790            Value::Object(map) => map.get("stream").and_then(Value::as_bool),
791            _ => None,
792        };
793
794        let mut additional_tools = Vec::new();
795        if let Some(additional_params_map) = additional_params_payload.as_object_mut() {
796            if let Some(raw_tools) = additional_params_map.remove("tools") {
797                additional_tools = serde_json::from_value::<Vec<ResponsesToolDefinition>>(
798                    raw_tools,
799                )
800                .map_err(|err| {
801                    CompletionError::RequestError(
802                        format!(
803                            "Invalid OpenAI Responses tools payload in additional_params: {err}"
804                        )
805                        .into(),
806                    )
807                })?;
808            }
809            additional_params_map.remove("stream");
810        }
811
812        if additional_params_payload.is_boolean() {
813            additional_params_payload = Value::Null;
814        }
815
816        additional_tools = additional_tools
817            .into_iter()
818            .map(ResponsesToolDefinition::normalize)
819            .collect();
820
821        let mut additional_parameters = if additional_params_payload.is_null() {
822            // If there's no additional parameters, initialise an empty object
823            AdditionalParameters::default()
824        } else {
825            serde_json::from_value::<AdditionalParameters>(additional_params_payload).map_err(
826                |err| {
827                    CompletionError::RequestError(
828                        format!("Invalid OpenAI Responses additional_params payload: {err}").into(),
829                    )
830                },
831            )?
832        };
833        if additional_parameters.reasoning.is_some() {
834            let include = additional_parameters.include.get_or_insert_with(Vec::new);
835            if !include
836                .iter()
837                .any(|item| matches!(item, Include::ReasoningEncryptedContent))
838            {
839                include.push(Include::ReasoningEncryptedContent);
840            }
841        }
842
843        // Apply output_schema as structured output if not already configured via additional_params
844        if additional_parameters.text.is_none()
845            && let Some(schema) = req.output_schema
846        {
847            let name = schema
848                .as_object()
849                .and_then(|o| o.get("title"))
850                .and_then(|v| v.as_str())
851                .unwrap_or("response_schema")
852                .to_string();
853            let mut schema_value = schema.to_value();
854            super::sanitize_schema(&mut schema_value);
855            additional_parameters.text = Some(TextConfig::structured_output(name, schema_value));
856        }
857
858        let tool_choice = req.tool_choice.map(ToolChoice::try_from).transpose()?;
859        let mut tools: Vec<ResponsesToolDefinition> = req
860            .tools
861            .into_iter()
862            .map(ResponsesToolDefinition::from)
863            .collect();
864        tools.append(&mut additional_tools);
865
866        Ok(Self {
867            input,
868            model,
869            instructions: None, // is currently None due to lack of support in compliant providers
870            max_output_tokens: req.max_tokens,
871            stream,
872            tool_choice,
873            tools,
874            temperature: req.temperature,
875            additional_parameters,
876        })
877    }
878}
879
880/// The completion model struct for OpenAI's response API.
881#[derive(Clone)]
882pub struct ResponsesCompletionModel<T = reqwest::Client> {
883    /// The OpenAI client
884    pub(crate) client: Client<T>,
885    /// Name of the model (e.g.: gpt-3.5-turbo-1106)
886    pub model: String,
887    /// Model-level default tools that are always added to outgoing requests.
888    pub tools: Vec<ResponsesToolDefinition>,
889}
890
891impl<T> ResponsesCompletionModel<T>
892where
893    T: HttpClientExt + Clone + Default + std::fmt::Debug + 'static,
894{
895    /// Creates a new [`ResponsesCompletionModel`].
896    pub fn new(client: Client<T>, model: impl Into<String>) -> Self {
897        Self {
898            client,
899            model: model.into(),
900            tools: Vec::new(),
901        }
902    }
903
904    pub fn with_model(client: Client<T>, model: &str) -> Self {
905        Self {
906            client,
907            model: model.to_string(),
908            tools: Vec::new(),
909        }
910    }
911
912    /// Adds a default tool to all requests from this model.
913    pub fn with_tool(mut self, tool: impl Into<ResponsesToolDefinition>) -> Self {
914        self.tools.push(tool.into());
915        self
916    }
917
918    /// Adds default tools to all requests from this model.
919    pub fn with_tools<I, Tool>(mut self, tools: I) -> Self
920    where
921        I: IntoIterator<Item = Tool>,
922        Tool: Into<ResponsesToolDefinition>,
923    {
924        self.tools.extend(tools.into_iter().map(Into::into));
925        self
926    }
927
928    /// Use the Completions API instead of Responses.
929    pub fn completions_api(self) -> crate::providers::openai::completion::CompletionModel<T> {
930        super::completion::CompletionModel::with_model(self.client.completions_api(), &self.model)
931    }
932
933    /// Attempt to create a completion request from [`crate::completion::CompletionRequest`].
934    pub(crate) fn create_completion_request(
935        &self,
936        completion_request: crate::completion::CompletionRequest,
937    ) -> Result<CompletionRequest, CompletionError> {
938        let mut req = CompletionRequest::try_from((self.model.clone(), completion_request))?;
939        req.tools.extend(self.tools.clone());
940
941        Ok(req)
942    }
943}
944
945/// The standard response format from OpenAI's Responses API.
946#[derive(Clone, Debug, Serialize, Deserialize)]
947pub struct CompletionResponse {
948    /// The ID of a completion response.
949    pub id: String,
950    /// The type of the object.
951    pub object: ResponseObject,
952    /// The time at which a given response has been created, in seconds from the UNIX epoch (01/01/1970 00:00:00).
953    pub created_at: u64,
954    /// The status of the response.
955    pub status: ResponseStatus,
956    /// Response error (optional)
957    pub error: Option<ResponseError>,
958    /// Incomplete response details (optional)
959    pub incomplete_details: Option<IncompleteDetailsReason>,
960    /// System prompt/preamble
961    pub instructions: Option<String>,
962    /// The maximum number of tokens the model should output
963    pub max_output_tokens: Option<u64>,
964    /// The model name
965    pub model: String,
966    /// Token usage
967    pub usage: Option<ResponsesUsage>,
968    /// The model output (messages, etc will go here)
969    pub output: Vec<Output>,
970    /// Tools
971    #[serde(default)]
972    pub tools: Vec<ResponsesToolDefinition>,
973    /// Additional parameters
974    #[serde(flatten)]
975    pub additional_parameters: AdditionalParameters,
976}
977
978/// Additional parameters for the completion request type for OpenAI's Response API: <https://platform.openai.com/docs/api-reference/responses/create>
979/// Intended to be derived from [`crate::completion::request::CompletionRequest`].
980#[derive(Clone, Debug, Deserialize, Serialize, Default)]
981pub struct AdditionalParameters {
982    /// Whether or not a given model task should run in the background (ie a detached process).
983    #[serde(skip_serializing_if = "Option::is_none")]
984    pub background: Option<bool>,
985    /// The text response format. This is where you would add structured outputs (if you want them).
986    #[serde(skip_serializing_if = "Option::is_none")]
987    pub text: Option<TextConfig>,
988    /// What types of extra data you would like to include. This is mostly useless at the moment since the types of extra data to add is currently unsupported, but this will be coming soon!
989    #[serde(skip_serializing_if = "Option::is_none")]
990    pub include: Option<Vec<Include>>,
991    /// `top_p`. Mutually exclusive with the `temperature` argument.
992    #[serde(skip_serializing_if = "Option::is_none")]
993    pub top_p: Option<f64>,
994    /// Whether or not the response should be truncated.
995    #[serde(skip_serializing_if = "Option::is_none")]
996    pub truncation: Option<TruncationStrategy>,
997    /// The username of the user (that you want to use).
998    #[serde(skip_serializing_if = "Option::is_none")]
999    pub user: Option<String>,
1000    /// Any additional metadata you'd like to add. This will additionally be returned by the response.
1001    #[serde(skip_serializing_if = "Map::is_empty", default)]
1002    pub metadata: serde_json::Map<String, serde_json::Value>,
1003    /// Whether or not you want tool calls to run in parallel.
1004    #[serde(skip_serializing_if = "Option::is_none")]
1005    pub parallel_tool_calls: Option<bool>,
1006    /// Previous response ID. If you are not sending a full conversation, this can help to track the message flow.
1007    #[serde(skip_serializing_if = "Option::is_none")]
1008    pub previous_response_id: Option<String>,
1009    /// Add thinking/reasoning to your response. The response will be emitted as a list member of the `output` field.
1010    #[serde(skip_serializing_if = "Option::is_none")]
1011    pub reasoning: Option<Reasoning>,
1012    /// The service tier you're using.
1013    #[serde(skip_serializing_if = "Option::is_none")]
1014    pub service_tier: Option<OpenAIServiceTier>,
1015    /// Whether or not to store the response for later retrieval by API.
1016    #[serde(skip_serializing_if = "Option::is_none")]
1017    pub store: Option<bool>,
1018}
1019
1020impl AdditionalParameters {
1021    pub fn to_json(self) -> serde_json::Value {
1022        serde_json::to_value(self).unwrap_or_else(|_| serde_json::Value::Object(Map::new()))
1023    }
1024}
1025
1026/// The truncation strategy.
1027/// When using auto, if the context of this response and previous ones exceeds the model's context window size, the model will truncate the response to fit the context window by dropping input items in the middle of the conversation.
1028/// Otherwise, does nothing (and is disabled by default).
1029#[derive(Clone, Debug, Default, Serialize, Deserialize)]
1030#[serde(rename_all = "snake_case")]
1031pub enum TruncationStrategy {
1032    Auto,
1033    #[default]
1034    Disabled,
1035}
1036
1037/// The model output format configuration.
1038/// You can either have plain text by default, or attach a JSON schema for the purposes of structured outputs.
1039#[derive(Clone, Debug, Serialize, Deserialize)]
1040pub struct TextConfig {
1041    pub format: TextFormat,
1042}
1043
1044impl TextConfig {
1045    pub(crate) fn structured_output<S>(name: S, schema: serde_json::Value) -> Self
1046    where
1047        S: Into<String>,
1048    {
1049        Self {
1050            format: TextFormat::JsonSchema(StructuredOutputsInput {
1051                name: name.into(),
1052                schema,
1053                strict: true,
1054            }),
1055        }
1056    }
1057}
1058
1059/// The text format (contained by [`TextConfig`]).
1060/// You can either have plain text by default, or attach a JSON schema for the purposes of structured outputs.
1061#[derive(Clone, Debug, Serialize, Deserialize, Default)]
1062#[serde(tag = "type")]
1063#[serde(rename_all = "snake_case")]
1064pub enum TextFormat {
1065    JsonSchema(StructuredOutputsInput),
1066    #[default]
1067    Text,
1068}
1069
1070/// The inputs required for adding structured outputs.
1071#[derive(Clone, Debug, Serialize, Deserialize)]
1072pub struct StructuredOutputsInput {
1073    /// The name of your schema.
1074    pub name: String,
1075    /// Your required output schema. It is recommended that you use the JsonSchema macro, which you can check out at <https://docs.rs/schemars/latest/schemars/trait.JsonSchema.html>.
1076    pub schema: serde_json::Value,
1077    /// Enable strict output. If you are using your AI agent in a data pipeline or another scenario that requires the data to be absolutely fixed to a given schema, it is recommended to set this to true.
1078    #[serde(default)]
1079    pub strict: bool,
1080}
1081
1082/// Add reasoning to a [`CompletionRequest`].
1083#[derive(Clone, Debug, Default, Serialize, Deserialize)]
1084pub struct Reasoning {
1085    /// How much effort you want the model to put into thinking/reasoning.
1086    pub effort: Option<ReasoningEffort>,
1087    /// How much effort you want the model to put into writing the reasoning summary.
1088    #[serde(skip_serializing_if = "Option::is_none")]
1089    pub summary: Option<ReasoningSummaryLevel>,
1090}
1091
1092impl Reasoning {
1093    /// Creates a new Reasoning instantiation (with empty values).
1094    pub fn new() -> Self {
1095        Self {
1096            effort: None,
1097            summary: None,
1098        }
1099    }
1100
1101    /// Adds reasoning effort.
1102    pub fn with_effort(mut self, reasoning_effort: ReasoningEffort) -> Self {
1103        self.effort = Some(reasoning_effort);
1104
1105        self
1106    }
1107
1108    /// Adds summary level (how detailed the reasoning summary will be).
1109    pub fn with_summary_level(mut self, reasoning_summary_level: ReasoningSummaryLevel) -> Self {
1110        self.summary = Some(reasoning_summary_level);
1111
1112        self
1113    }
1114}
1115
1116/// The billing service tier that will be used. On auto by default.
1117#[derive(Clone, Debug, Default, Serialize, Deserialize)]
1118#[serde(rename_all = "snake_case")]
1119pub enum OpenAIServiceTier {
1120    #[default]
1121    Auto,
1122    Default,
1123    Flex,
1124}
1125
1126/// The amount of reasoning effort that will be used by a given model.
1127#[derive(Clone, Debug, Default, Serialize, Deserialize)]
1128#[serde(rename_all = "snake_case")]
1129pub enum ReasoningEffort {
1130    None,
1131    Minimal,
1132    Low,
1133    #[default]
1134    Medium,
1135    High,
1136    Xhigh,
1137}
1138
1139/// The amount of effort that will go into a reasoning summary by a given model.
1140#[derive(Clone, Debug, Default, Serialize, Deserialize)]
1141#[serde(rename_all = "snake_case")]
1142pub enum ReasoningSummaryLevel {
1143    #[default]
1144    Auto,
1145    Concise,
1146    Detailed,
1147}
1148
1149/// Results to additionally include in the OpenAI Responses API.
1150/// Note that most of these are currently unsupported, but have been added for completeness.
1151#[derive(Clone, Debug, Deserialize, Serialize)]
1152pub enum Include {
1153    #[serde(rename = "file_search_call.results")]
1154    FileSearchCallResults,
1155    #[serde(rename = "message.input_image.image_url")]
1156    MessageInputImageImageUrl,
1157    #[serde(rename = "computer_call.output.image_url")]
1158    ComputerCallOutputOutputImageUrl,
1159    #[serde(rename = "reasoning.encrypted_content")]
1160    ReasoningEncryptedContent,
1161    #[serde(rename = "code_interpreter_call.outputs")]
1162    CodeInterpreterCallOutputs,
1163}
1164
1165/// A currently non-exhaustive list of output types.
1166#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)]
1167#[serde(tag = "type")]
1168#[serde(rename_all = "snake_case")]
1169pub enum Output {
1170    Message(OutputMessage),
1171    #[serde(alias = "function_call")]
1172    FunctionCall(OutputFunctionCall),
1173    Reasoning {
1174        id: String,
1175        summary: Vec<ReasoningSummary>,
1176        #[serde(default)]
1177        encrypted_content: Option<String>,
1178        #[serde(default)]
1179        status: Option<ToolStatus>,
1180    },
1181}
1182
1183impl From<Output> for Vec<completion::AssistantContent> {
1184    fn from(value: Output) -> Self {
1185        let res: Vec<completion::AssistantContent> = match value {
1186            Output::Message(OutputMessage { content, .. }) => content
1187                .into_iter()
1188                .map(completion::AssistantContent::from)
1189                .collect(),
1190            Output::FunctionCall(OutputFunctionCall {
1191                id,
1192                arguments,
1193                call_id,
1194                name,
1195                ..
1196            }) => vec![completion::AssistantContent::tool_call_with_call_id(
1197                id, call_id, name, arguments,
1198            )],
1199            Output::Reasoning {
1200                id,
1201                summary,
1202                encrypted_content,
1203                ..
1204            } => {
1205                let mut content = summary
1206                    .into_iter()
1207                    .map(|summary| match summary {
1208                        ReasoningSummary::SummaryText { text } => {
1209                            message::ReasoningContent::Summary(text)
1210                        }
1211                    })
1212                    .collect::<Vec<_>>();
1213                if let Some(encrypted_content) = encrypted_content {
1214                    content.push(message::ReasoningContent::Encrypted(encrypted_content));
1215                }
1216                vec![completion::AssistantContent::Reasoning(
1217                    message::Reasoning {
1218                        id: Some(id),
1219                        content,
1220                    },
1221                )]
1222            }
1223        };
1224
1225        res
1226    }
1227}
1228
1229#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)]
1230pub struct OutputReasoning {
1231    id: String,
1232    summary: Vec<ReasoningSummary>,
1233    status: ToolStatus,
1234}
1235
1236/// An OpenAI Responses API tool call. A call ID will be returned that must be used when creating a tool result to send back to OpenAI as a message input, otherwise an error will be received.
1237#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)]
1238pub struct OutputFunctionCall {
1239    pub id: String,
1240    #[serde(with = "json_utils::stringified_json")]
1241    pub arguments: serde_json::Value,
1242    pub call_id: String,
1243    pub name: String,
1244    pub status: ToolStatus,
1245}
1246
1247/// The status of a given tool.
1248#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)]
1249#[serde(rename_all = "snake_case")]
1250pub enum ToolStatus {
1251    InProgress,
1252    Completed,
1253    Incomplete,
1254}
1255
1256/// An output message from OpenAI's Responses API.
1257#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)]
1258pub struct OutputMessage {
1259    /// The message ID. Must be included when sending the message back to OpenAI
1260    pub id: String,
1261    /// The role (currently only Assistant is available as this struct is only created when receiving an LLM message as a response)
1262    pub role: OutputRole,
1263    /// The status of the response
1264    pub status: ResponseStatus,
1265    /// The actual message content
1266    pub content: Vec<AssistantContent>,
1267}
1268
1269/// The role of an output message.
1270#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)]
1271#[serde(rename_all = "snake_case")]
1272pub enum OutputRole {
1273    Assistant,
1274}
1275
1276impl<T> completion::CompletionModel for ResponsesCompletionModel<T>
1277where
1278    T: HttpClientExt
1279        + Clone
1280        + std::fmt::Debug
1281        + Default
1282        + WasmCompatSend
1283        + WasmCompatSync
1284        + 'static,
1285{
1286    type Response = CompletionResponse;
1287    type StreamingResponse = StreamingCompletionResponse;
1288
1289    type Client = super::Client<T>;
1290
1291    fn make(client: &Self::Client, model: impl Into<String>) -> Self {
1292        Self::new(client.clone(), model)
1293    }
1294
1295    async fn completion(
1296        &self,
1297        completion_request: crate::completion::CompletionRequest,
1298    ) -> Result<completion::CompletionResponse<Self::Response>, CompletionError> {
1299        let span = if tracing::Span::current().is_disabled() {
1300            info_span!(
1301                target: "rig::completions",
1302                "chat",
1303                gen_ai.operation.name = "chat",
1304                gen_ai.provider.name = tracing::field::Empty,
1305                gen_ai.request.model = tracing::field::Empty,
1306                gen_ai.response.id = tracing::field::Empty,
1307                gen_ai.response.model = tracing::field::Empty,
1308                gen_ai.usage.output_tokens = tracing::field::Empty,
1309                gen_ai.usage.input_tokens = tracing::field::Empty,
1310                gen_ai.usage.cached_tokens = tracing::field::Empty,
1311                gen_ai.input.messages = tracing::field::Empty,
1312                gen_ai.output.messages = tracing::field::Empty,
1313            )
1314        } else {
1315            tracing::Span::current()
1316        };
1317
1318        span.record("gen_ai.provider.name", "openai");
1319        span.record("gen_ai.request.model", &self.model);
1320        let request = self.create_completion_request(completion_request)?;
1321        let body = serde_json::to_vec(&request)?;
1322
1323        if enabled!(Level::TRACE) {
1324            tracing::trace!(
1325                target: "rig::completions",
1326                "OpenAI Responses completion request: {request}",
1327                request = serde_json::to_string_pretty(&request)?
1328            );
1329        }
1330
1331        let req = self
1332            .client
1333            .post("/responses")?
1334            .body(body)
1335            .map_err(|e| CompletionError::HttpError(e.into()))?;
1336
1337        async move {
1338            let response = self.client.send(req).await?;
1339
1340            if response.status().is_success() {
1341                let t = http_client::text(response).await?;
1342                let response = serde_json::from_str::<Self::Response>(&t)?;
1343                let span = tracing::Span::current();
1344                span.record("gen_ai.response.id", &response.id);
1345                span.record("gen_ai.response.model", &response.model);
1346                if let Some(ref usage) = response.usage {
1347                    span.record("gen_ai.usage.output_tokens", usage.output_tokens);
1348                    span.record("gen_ai.usage.input_tokens", usage.input_tokens);
1349                    span.record(
1350                        "gen_ai.usage.cached_tokens",
1351                        usage
1352                            .input_tokens_details
1353                            .as_ref()
1354                            .map(|d| d.cached_tokens)
1355                            .unwrap_or(0),
1356                    );
1357                }
1358                if enabled!(Level::TRACE) {
1359                    tracing::trace!(
1360                        target: "rig::completions",
1361                        "OpenAI Responses completion response: {response}",
1362                        response = serde_json::to_string_pretty(&response)?
1363                    );
1364                }
1365                response.try_into()
1366            } else {
1367                let text = http_client::text(response).await?;
1368                Err(CompletionError::ProviderError(text))
1369            }
1370        }
1371        .instrument(span)
1372        .await
1373    }
1374
1375    async fn stream(
1376        &self,
1377        request: crate::completion::CompletionRequest,
1378    ) -> Result<
1379        crate::streaming::StreamingCompletionResponse<Self::StreamingResponse>,
1380        CompletionError,
1381    > {
1382        ResponsesCompletionModel::stream(self, request).await
1383    }
1384}
1385
1386impl TryFrom<CompletionResponse> for completion::CompletionResponse<CompletionResponse> {
1387    type Error = CompletionError;
1388
1389    fn try_from(response: CompletionResponse) -> Result<Self, Self::Error> {
1390        if response.output.is_empty() {
1391            return Err(CompletionError::ResponseError(
1392                "Response contained no parts".to_owned(),
1393            ));
1394        }
1395
1396        // Extract the msg_ ID from the first Output::Message item
1397        let message_id = response.output.iter().find_map(|item| match item {
1398            Output::Message(msg) => Some(msg.id.clone()),
1399            _ => None,
1400        });
1401
1402        let content: Vec<completion::AssistantContent> = response
1403            .output
1404            .iter()
1405            .cloned()
1406            .flat_map(<Vec<completion::AssistantContent>>::from)
1407            .collect();
1408
1409        let choice = OneOrMany::many(content).map_err(|_| {
1410            CompletionError::ResponseError(
1411                "Response contained no message or tool call (empty)".to_owned(),
1412            )
1413        })?;
1414
1415        let usage = response
1416            .usage
1417            .as_ref()
1418            .map(|usage| completion::Usage {
1419                input_tokens: usage.input_tokens,
1420                output_tokens: usage.output_tokens,
1421                total_tokens: usage.total_tokens,
1422                cached_input_tokens: usage
1423                    .input_tokens_details
1424                    .as_ref()
1425                    .map(|d| d.cached_tokens)
1426                    .unwrap_or(0),
1427            })
1428            .unwrap_or_default();
1429
1430        Ok(completion::CompletionResponse {
1431            choice,
1432            usage,
1433            raw_response: response,
1434            message_id,
1435        })
1436    }
1437}
1438
1439/// An OpenAI Responses API message.
1440#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
1441#[serde(tag = "role", rename_all = "lowercase")]
1442pub enum Message {
1443    #[serde(alias = "developer")]
1444    System {
1445        #[serde(deserialize_with = "string_or_one_or_many")]
1446        content: OneOrMany<SystemContent>,
1447        #[serde(skip_serializing_if = "Option::is_none")]
1448        name: Option<String>,
1449    },
1450    User {
1451        #[serde(deserialize_with = "string_or_one_or_many")]
1452        content: OneOrMany<UserContent>,
1453        #[serde(skip_serializing_if = "Option::is_none")]
1454        name: Option<String>,
1455    },
1456    Assistant {
1457        content: OneOrMany<AssistantContentType>,
1458        #[serde(skip_serializing_if = "String::is_empty")]
1459        id: String,
1460        #[serde(skip_serializing_if = "Option::is_none")]
1461        name: Option<String>,
1462        status: ToolStatus,
1463    },
1464    #[serde(rename = "tool")]
1465    ToolResult {
1466        tool_call_id: String,
1467        output: String,
1468    },
1469}
1470
1471/// The type of a tool result content item.
1472#[derive(Default, Debug, Serialize, Deserialize, PartialEq, Clone)]
1473#[serde(rename_all = "lowercase")]
1474pub enum ToolResultContentType {
1475    #[default]
1476    Text,
1477}
1478
1479impl Message {
1480    pub fn system(content: &str) -> Self {
1481        Message::System {
1482            content: OneOrMany::one(content.to_owned().into()),
1483            name: None,
1484        }
1485    }
1486}
1487
1488/// Text assistant content.
1489/// Note that the text type in comparison to the Completions API is actually `output_text` rather than `text`.
1490#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
1491#[serde(tag = "type", rename_all = "snake_case")]
1492pub enum AssistantContent {
1493    OutputText(Text),
1494    Refusal { refusal: String },
1495}
1496
1497impl From<AssistantContent> for completion::AssistantContent {
1498    fn from(value: AssistantContent) -> Self {
1499        match value {
1500            AssistantContent::Refusal { refusal } => {
1501                completion::AssistantContent::Text(Text { text: refusal })
1502            }
1503            AssistantContent::OutputText(Text { text }) => {
1504                completion::AssistantContent::Text(Text { text })
1505            }
1506        }
1507    }
1508}
1509
1510/// The type of assistant content.
1511#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
1512#[serde(untagged)]
1513pub enum AssistantContentType {
1514    Text(AssistantContent),
1515    ToolCall(OutputFunctionCall),
1516    Reasoning(OpenAIReasoning),
1517}
1518
1519/// System content for the OpenAI Responses API.
1520/// Uses `input_text` type to match the Responses API format.
1521#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
1522#[serde(tag = "type", rename_all = "snake_case")]
1523pub enum SystemContent {
1524    InputText { text: String },
1525}
1526
1527impl From<String> for SystemContent {
1528    fn from(s: String) -> Self {
1529        SystemContent::InputText { text: s }
1530    }
1531}
1532
1533impl std::str::FromStr for SystemContent {
1534    type Err = std::convert::Infallible;
1535
1536    fn from_str(s: &str) -> Result<Self, Self::Err> {
1537        Ok(SystemContent::InputText {
1538            text: s.to_string(),
1539        })
1540    }
1541}
1542
1543/// Different types of user content.
1544#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
1545#[serde(tag = "type", rename_all = "snake_case")]
1546pub enum UserContent {
1547    InputText {
1548        text: String,
1549    },
1550    InputImage {
1551        image_url: String,
1552        #[serde(default)]
1553        detail: ImageDetail,
1554    },
1555    InputFile {
1556        #[serde(skip_serializing_if = "Option::is_none")]
1557        file_url: Option<String>,
1558        #[serde(skip_serializing_if = "Option::is_none")]
1559        file_data: Option<String>,
1560        #[serde(skip_serializing_if = "Option::is_none")]
1561        filename: Option<String>,
1562    },
1563    Audio {
1564        input_audio: InputAudio,
1565    },
1566    #[serde(rename = "tool")]
1567    ToolResult {
1568        tool_call_id: String,
1569        output: String,
1570    },
1571}
1572
1573impl TryFrom<message::Message> for Vec<Message> {
1574    type Error = message::MessageError;
1575
1576    fn try_from(message: message::Message) -> Result<Self, Self::Error> {
1577        match message {
1578            message::Message::System { content } => Ok(vec![Message::System {
1579                content: OneOrMany::one(content.into()),
1580                name: None,
1581            }]),
1582            message::Message::User { content } => {
1583                let (tool_results, other_content): (Vec<_>, Vec<_>) = content
1584                    .into_iter()
1585                    .partition(|content| matches!(content, message::UserContent::ToolResult(_)));
1586
1587                // If there are messages with both tool results and user content, openai will only
1588                //  handle tool results. It's unlikely that there will be both.
1589                if !tool_results.is_empty() {
1590                    tool_results
1591                        .into_iter()
1592                        .map(|content| match content {
1593                            message::UserContent::ToolResult(message::ToolResult {
1594                                call_id,
1595                                content,
1596                                ..
1597                            }) => Ok::<_, message::MessageError>(Message::ToolResult {
1598                                tool_call_id: call_id.ok_or_else(|| {
1599                                    MessageError::ConversionError(
1600                                        "Tool result `call_id` is required for OpenAI Responses API"
1601                                            .into(),
1602                                    )
1603                                })?,
1604                                output: {
1605                                    let res = content.first();
1606                                    match res {
1607                                        completion::message::ToolResultContent::Text(Text {
1608                                            text,
1609                                        }) => text,
1610                                        _ => return  Err(MessageError::ConversionError("This API only currently supports text tool results".into()))
1611                                    }
1612                                },
1613                            }),
1614                            _ => unreachable!(),
1615                        })
1616                        .collect::<Result<Vec<_>, _>>()
1617                } else {
1618                    let other_content = other_content
1619                        .into_iter()
1620                        .map(|content| match content {
1621                            message::UserContent::Text(message::Text { text }) => {
1622                                Ok(UserContent::InputText { text })
1623                            }
1624                            message::UserContent::Image(message::Image {
1625                                data,
1626                                detail,
1627                                media_type,
1628                                ..
1629                            }) => {
1630                                let url = match data {
1631                                    DocumentSourceKind::Base64(data) => {
1632                                        let media_type = if let Some(media_type) = media_type {
1633                                            media_type.to_mime_type().to_string()
1634                                        } else {
1635                                            String::new()
1636                                        };
1637                                        format!("data:{media_type};base64,{data}")
1638                                    }
1639                                    DocumentSourceKind::Url(url) => url,
1640                                    DocumentSourceKind::Raw(_) => {
1641                                        return Err(MessageError::ConversionError(
1642                                            "Raw files not supported, encode as base64 first"
1643                                                .into(),
1644                                        ));
1645                                    }
1646                                    doc => {
1647                                        return Err(MessageError::ConversionError(format!(
1648                                            "Unsupported document type: {doc}"
1649                                        )));
1650                                    }
1651                                };
1652
1653                                Ok(UserContent::InputImage {
1654                                    image_url: url,
1655                                    detail: detail.unwrap_or_default(),
1656                                })
1657                            }
1658                            message::UserContent::Document(message::Document {
1659                                media_type: Some(DocumentMediaType::PDF),
1660                                data,
1661                                ..
1662                            }) => {
1663                                let (file_data, file_url, filename) = match data {
1664                                    DocumentSourceKind::Base64(data) => (
1665                                        Some(format!("data:application/pdf;base64,{data}")),
1666                                        None,
1667                                        Some("document.pdf".to_string()),
1668                                    ),
1669                                    DocumentSourceKind::Url(url) => (None, Some(url), None),
1670                                    DocumentSourceKind::Raw(_) => {
1671                                        return Err(MessageError::ConversionError(
1672                                            "Raw files not supported, encode as base64 first"
1673                                                .into(),
1674                                        ));
1675                                    }
1676                                    doc => {
1677                                        return Err(MessageError::ConversionError(format!(
1678                                            "Unsupported document type: {doc}"
1679                                        )));
1680                                    }
1681                                };
1682
1683                                Ok(UserContent::InputFile {
1684                                    file_url,
1685                                    file_data,
1686                                    filename,
1687                                })
1688                            }
1689                            message::UserContent::Document(message::Document {
1690                                data: DocumentSourceKind::Base64(text),
1691                                ..
1692                            }) => Ok(UserContent::InputText { text }),
1693                            message::UserContent::Audio(message::Audio {
1694                                data: DocumentSourceKind::Base64(data),
1695                                media_type,
1696                                ..
1697                            }) => Ok(UserContent::Audio {
1698                                input_audio: InputAudio {
1699                                    data,
1700                                    format: match media_type {
1701                                        Some(media_type) => media_type,
1702                                        None => AudioMediaType::MP3,
1703                                    },
1704                                },
1705                            }),
1706                            message::UserContent::Audio(_) => Err(MessageError::ConversionError(
1707                                "Audio must be base64 encoded data".into(),
1708                            )),
1709                            _ => unreachable!(),
1710                        })
1711                        .collect::<Result<Vec<_>, _>>()?;
1712
1713                    let other_content = OneOrMany::many(other_content).map_err(|_| {
1714                        MessageError::ConversionError(
1715                            "User message did not contain OpenAI Responses-compatible content"
1716                                .to_string(),
1717                        )
1718                    })?;
1719
1720                    Ok(vec![Message::User {
1721                        content: other_content,
1722                        name: None,
1723                    }])
1724                }
1725            }
1726            message::Message::Assistant { content, id } => {
1727                let assistant_message_id = id.ok_or_else(|| {
1728                    MessageError::ConversionError(
1729                        "Assistant message ID is required for OpenAI Responses API".into(),
1730                    )
1731                })?;
1732
1733                match content.first() {
1734                    crate::message::AssistantContent::Text(Text { text }) => {
1735                        Ok(vec![Message::Assistant {
1736                            id: assistant_message_id.clone(),
1737                            status: ToolStatus::Completed,
1738                            content: OneOrMany::one(AssistantContentType::Text(
1739                                AssistantContent::OutputText(Text { text }),
1740                            )),
1741                            name: None,
1742                        }])
1743                    }
1744                    crate::message::AssistantContent::ToolCall(crate::message::ToolCall {
1745                        id,
1746                        call_id,
1747                        function,
1748                        ..
1749                    }) => Ok(vec![Message::Assistant {
1750                        content: OneOrMany::one(AssistantContentType::ToolCall(
1751                            OutputFunctionCall {
1752                                call_id: call_id.ok_or_else(|| {
1753                                    MessageError::ConversionError(
1754                                        "Tool call `call_id` is required for OpenAI Responses API"
1755                                            .into(),
1756                                    )
1757                                })?,
1758                                arguments: function.arguments,
1759                                id,
1760                                name: function.name,
1761                                status: ToolStatus::Completed,
1762                            },
1763                        )),
1764                        id: assistant_message_id.clone(),
1765                        name: None,
1766                        status: ToolStatus::Completed,
1767                    }]),
1768                    crate::message::AssistantContent::Reasoning(reasoning) => {
1769                        let openai_reasoning = openai_reasoning_from_core(&reasoning)?;
1770                        Ok(vec![Message::Assistant {
1771                            content: OneOrMany::one(AssistantContentType::Reasoning(
1772                                openai_reasoning,
1773                            )),
1774                            id: assistant_message_id,
1775                            name: None,
1776                            status: ToolStatus::Completed,
1777                        }])
1778                    }
1779                    crate::message::AssistantContent::Image(_) => {
1780                        Err(MessageError::ConversionError(
1781                            "Assistant image content is not supported in OpenAI Responses API"
1782                                .into(),
1783                        ))
1784                    }
1785                }
1786            }
1787        }
1788    }
1789}
1790
1791impl FromStr for UserContent {
1792    type Err = Infallible;
1793
1794    fn from_str(s: &str) -> Result<Self, Self::Err> {
1795        Ok(UserContent::InputText {
1796            text: s.to_string(),
1797        })
1798    }
1799}