use crate::adapter::adapters::support::get_api_key;
use crate::adapter::openai::OpenAIAdapter;
use crate::adapter::openai_resp::OpenAIRespStreamer;
use crate::adapter::openai_resp::resp_types::RespResponse;
use crate::adapter::{Adapter, AdapterDispatcher, AdapterKind, ServiceType, WebRequestData};
use crate::chat::{
ChatOptionsSet, ChatRequest, ChatResponse, ChatResponseFormat, ChatRole, ChatStream, ChatStreamResponse,
ContentPart, MessageContent, ReasoningEffort, StopReason, Tool, ToolConfig, ToolName, Usage,
};
use crate::resolver::{AuthData, Endpoint};
use crate::webc::{EventSourceStream, WebResponse};
use crate::{Error, Headers, Result};
use crate::{ModelIden, ServiceTarget};
use reqwest::RequestBuilder;
use serde_json::{Map, Value, json};
use value_ext::JsonValueExt;
pub struct OpenAIRespAdapter;
impl OpenAIRespAdapter {
pub const API_KEY_DEFAULT_ENV_NAME: &str = "OPENAI_API_KEY";
}
impl Adapter for OpenAIRespAdapter {
const DEFAULT_API_KEY_ENV_NAME: Option<&'static str> = Some(Self::API_KEY_DEFAULT_ENV_NAME);
fn default_auth() -> AuthData {
match Self::DEFAULT_API_KEY_ENV_NAME {
Some(env_name) => AuthData::from_env(env_name),
None => AuthData::None,
}
}
fn default_endpoint() -> Endpoint {
const BASE_URL: &str = "https://api.openai.com/v1/";
Endpoint::from_static(BASE_URL)
}
async fn all_model_names(kind: AdapterKind, endpoint: Endpoint, auth: AuthData) -> Result<Vec<String>> {
OpenAIAdapter::list_model_names_for_end_target(kind, endpoint, auth).await
}
fn get_service_url(model: &ModelIden, service_type: ServiceType, endpoint: Endpoint) -> Result<String> {
Self::util_get_service_url(model, service_type, endpoint)
}
fn to_web_request_data(
target: ServiceTarget,
service_type: ServiceType,
chat_req: ChatRequest,
chat_options: ChatOptionsSet<'_, '_>,
) -> Result<WebRequestData> {
let ServiceTarget { model, auth, endpoint } = target;
let (_, model_name) = model.model_name.namespace_and_name();
let adapter_kind = model.adapter_kind;
let api_key = get_api_key(auth, &model)?;
let url = AdapterDispatcher::get_service_url(&model, service_type, endpoint)?;
let headers = Headers::from(("Authorization".to_string(), format!("Bearer {api_key}")));
let stream = matches!(service_type, ServiceType::ChatStream);
let (reasoning_effort, model_name): (Option<ReasoningEffort>, &str) =
if matches!(adapter_kind, AdapterKind::OpenAIResp) {
let (reasoning_effort, model_name) = chat_options
.reasoning_effort()
.cloned()
.map(|v| (Some(v), model_name))
.unwrap_or_else(|| ReasoningEffort::from_model_name(model_name));
(reasoning_effort, model_name)
} else {
(None, model_name)
};
let OpenAIRespRequestParts {
input_items: messages,
tools,
} = Self::into_openai_request_parts(&model, chat_req)?;
let mut payload = json!({
"store": false,
"model": model_name,
"input": messages,
"stream": stream,
});
if let Some(reasoning_effort) = reasoning_effort
&& let Some(keyword) = reasoning_effort.as_keyword()
{
payload.x_insert("reasoning", json!({"effort": keyword}))?;
}
if let Some(tools) = tools {
payload.x_insert("/tools", tools)?;
}
let response_format = if let Some(response_format) = chat_options.response_format() {
match response_format {
ChatResponseFormat::JsonMode => Some(json!({"type": "json_object"})),
ChatResponseFormat::JsonSpec(st_json) => {
let mut schema = st_json.schema.clone();
schema.x_walk(|parent_map, name| {
if name == "type" {
let typ = parent_map.get("type").and_then(|v| v.as_str()).unwrap_or("");
if typ == "object" {
parent_map.insert("additionalProperties".to_string(), false.into());
}
}
true
});
Some(json!({
"type": "json_schema",
"name": st_json.name.clone(),
"strict": true,
"schema": schema,
}))
}
}
} else {
None
};
let verbosity = chat_options.verbosity().and_then(|v| v.as_keyword());
if response_format.is_some() || verbosity.is_some() {
let mut value_map = Map::new();
if let Some(verbosity) = verbosity {
value_map.insert("verbosity".into(), verbosity.into());
}
if let Some(response_format) = response_format {
value_map.insert("format".into(), response_format);
}
payload.x_insert("text", value_map)?;
}
if let Some(temperature) = chat_options.temperature() {
payload.x_insert("temperature", temperature)?;
}
if !chat_options.stop_sequences().is_empty() {
payload.x_insert("stop", chat_options.stop_sequences())?;
}
if let Some(max_tokens) = chat_options.max_tokens() {
payload.x_insert("max_tokens", max_tokens)?;
}
if let Some(top_p) = chat_options.top_p() {
payload.x_insert("top_p", top_p)?;
}
if let Some(seed) = chat_options.seed() {
payload.x_insert("seed", seed)?;
}
Ok(WebRequestData { url, headers, payload })
}
fn to_chat_response(
model_iden: ModelIden,
web_response: WebResponse,
options_set: ChatOptionsSet<'_, '_>,
) -> Result<ChatResponse> {
let WebResponse { body, .. } = web_response;
let captured_raw_body = options_set.capture_raw_body().unwrap_or_default().then(|| body.clone());
let resp: RespResponse = serde_json::from_value(body)?;
let provider_model_iden = model_iden.from_name(&resp.model);
let usage = resp.usage.map(Usage::from).unwrap_or_default();
let mut content: MessageContent = MessageContent::default();
let reasoning_content: Option<String> = None;
for output_item in resp.output {
let parts = ContentPart::from_resp_output_item(output_item)?;
content.extend(parts);
}
Ok(ChatResponse {
content,
reasoning_content,
model_iden,
provider_model_iden,
stop_reason: Some(StopReason::from(resp.status)),
usage,
captured_raw_body,
})
}
fn to_chat_stream(
model_iden: ModelIden,
reqwest_builder: RequestBuilder,
options_sets: ChatOptionsSet<'_, '_>,
) -> Result<ChatStreamResponse> {
let event_source = EventSourceStream::new(reqwest_builder);
let openai_stream = OpenAIRespStreamer::new(event_source, model_iden.clone(), options_sets);
let chat_stream = ChatStream::from_inter_stream(openai_stream);
Ok(ChatStreamResponse {
model_iden,
stream: chat_stream,
})
}
fn to_embed_request_data(
_service_target: ServiceTarget,
_embed_req: crate::embed::EmbedRequest,
_options_set: crate::embed::EmbedOptionsSet<'_, '_>,
) -> Result<WebRequestData> {
Err(crate::Error::AdapterNotSupported {
adapter_kind: crate::adapter::AdapterKind::OpenAIResp,
feature: "embeddings".to_string(),
})
}
fn to_embed_response(
_model_iden: ModelIden,
_web_response: WebResponse,
_options_set: crate::embed::EmbedOptionsSet<'_, '_>,
) -> Result<crate::embed::EmbedResponse> {
Err(crate::Error::AdapterNotSupported {
adapter_kind: crate::adapter::AdapterKind::OpenAIResp,
feature: "embeddings".to_string(),
})
}
}
impl OpenAIRespAdapter {
pub(in crate::adapter::adapters) fn util_get_service_url(
_model: &ModelIden,
service_type: ServiceType,
default_endpoint: Endpoint,
) -> Result<String> {
let base_url = default_endpoint.base_url();
let base_url = reqwest::Url::parse(base_url)
.map_err(|err| Error::Internal(format!("Cannot parse url: {base_url}. Cause:\n{err}")))?;
let original_query_params = base_url.query().to_owned();
let suffix = match service_type {
ServiceType::Chat | ServiceType::ChatStream => "responses",
ServiceType::Embed => "embeddings", };
let mut full_url = base_url.join(suffix).map_err(|err| {
Error::Internal(format!(
"Cannot joing url suffix '{suffix}' for base_url '{base_url}'. Cause:\n{err}"
))
})?;
full_url.set_query(original_query_params);
Ok(full_url.to_string())
}
fn into_openai_request_parts(_model_iden: &ModelIden, chat_req: ChatRequest) -> Result<OpenAIRespRequestParts> {
let mut input_items: Vec<Value> = Vec::new();
if let Some(system_msg) = chat_req.system {
input_items.push(json!({"role": "system", "content": system_msg}));
}
let mut unamed_file_count = 0;
for msg in chat_req.messages {
match msg.role {
ChatRole::System => {
if let Some(content) = msg.content.into_joined_texts() {
input_items.push(json!({"role": "system", "content": content}))
}
}
ChatRole::User => {
if msg.content.is_text_only() {
let content = json!(msg.content.joined_texts().unwrap_or_else(String::new));
input_items.push(json! ({"role": "user", "content": content}));
} else {
let mut values: Vec<Value> = Vec::new();
for part in msg.content {
match part {
ContentPart::Text(content) => {
values.push(json!({"type": "input_text", "text": content}))
}
ContentPart::Binary(mut binary) => {
let is_image = binary.is_image();
if is_image {
let image_url = binary.into_url();
let input_image = json!({
"type": "input_image",
"detail": "auto",
"image_url": image_url
});
values.push(input_image);
}
else {
let mut input_file = Map::new();
input_file.insert("type".into(), "input_file".into());
if let Some(file_name) = binary.name.take() {
input_file.insert("filename".into(), file_name.into());
} else {
unamed_file_count += 1;
input_file
.insert("filename".into(), format!("file-{unamed_file_count}").into());
}
let file_url = binary.into_url();
if file_url.starts_with("data") {
input_file.insert("file_data".into(), file_url.into());
} else {
input_file.insert("file_url".into(), file_url.into());
}
let input_file: Value = input_file.into();
values.push(input_file);
}
}
ContentPart::ToolCall(_) => (),
ContentPart::ToolResponse(_) => (),
ContentPart::ThoughtSignature(_) => (),
ContentPart::ReasoningContent(_) => (),
ContentPart::Custom(_) => {}
}
}
input_items.push(json! ({"role": "user", "content": values}));
}
}
ChatRole::Assistant => {
let mut item_message_content: Vec<Value> = Vec::new();
for part in msg.content {
match part {
ContentPart::Text(text) => {
item_message_content.push(json!({
"type": "output_text",
"text": text
}));
}
ContentPart::ToolCall(tool_call) => {
if !item_message_content.is_empty() {
input_items.push(json!({
"type": "message",
"role": "assistant",
"content": item_message_content
}));
item_message_content = Vec::new();
}
input_items.push(json!({
"type": "function_call",
"call_id": tool_call.call_id,
"name": tool_call.fn_name,
"arguments": tool_call.fn_arguments.to_string(),
}))
}
ContentPart::Binary(_) => {}
ContentPart::ToolResponse(_) => {}
ContentPart::ThoughtSignature(_) => {}
ContentPart::ReasoningContent(_) => {}
ContentPart::Custom(_) => {}
}
}
if !item_message_content.is_empty() {
input_items.push(json!({
"type": "message",
"role": "assistant",
"content": item_message_content
}));
}
}
ChatRole::Tool => {
for part in msg.content {
if let ContentPart::ToolResponse(tool_response) = part {
input_items.push(json!({
"type": "function_call_output",
"call_id": tool_response.call_id,
"output": tool_response.content,
}))
}
}
}
}
}
let tools = chat_req
.tools
.map(|tools| tools.into_iter().map(Self::tool_to_openai_tool).collect::<Result<Vec<Value>>>())
.transpose()?;
Ok(OpenAIRespRequestParts { input_items, tools })
}
fn tool_to_openai_tool(tool: Tool) -> Result<Value> {
let Tool {
name,
description,
schema,
config,
} = tool;
let name = match name {
ToolName::WebSearch => "web_search".to_string(),
ToolName::Custom(name) => name,
};
let tool_value = match name.as_ref() {
"web_search" => {
let mut tool_value = json!({"type": "web_search"});
match config {
Some(ToolConfig::WebSearch(_ws_config)) => {
}
Some(ToolConfig::Custom(config_value)) => {
tool_value.x_merge(config_value)?;
}
None => (),
};
tool_value
}
name => {
json!({
"type": "function",
"name": name,
"description": description,
"parameters": schema,
"strict": false,
})
}
};
Ok(tool_value)
}
}
struct OpenAIRespRequestParts {
input_items: Vec<Value>,
tools: Option<Vec<Value>>,
}
#[cfg(test)]
mod tests {
use super::*;
use crate::adapter::AdapterKind;
use crate::chat::ChatMessage;
#[test]
fn test_assistant_message_uses_output_text_content_type() {
let model_iden = ModelIden::new(AdapterKind::OpenAIResp, "gpt-5-codex");
let chat_req = ChatRequest::default()
.with_system("You are a helpful assistant.")
.append_message(ChatMessage::user("What's the weather?"))
.append_message(ChatMessage::assistant("The weather is sunny."));
let parts =
OpenAIRespAdapter::into_openai_request_parts(&model_iden, chat_req).expect("Should serialize successfully");
let assistant_msg = parts
.input_items
.iter()
.find(|item| {
item.get("type").and_then(|t| t.as_str()) == Some("message")
&& item.get("role").and_then(|r| r.as_str()) == Some("assistant")
})
.expect("Should have an assistant message");
let content = assistant_msg
.get("content")
.and_then(|c| c.as_array())
.expect("Assistant message should have content array");
assert!(!content.is_empty(), "Content should not be empty");
let first_content = &content[0];
let content_type = first_content
.get("type")
.and_then(|t| t.as_str())
.expect("Content should have a type");
assert_eq!(
content_type, "output_text",
"Assistant message content should use 'output_text' type, not 'input_text'"
);
}
}