use crate::{
common::{
auth::{AuthProvider, OpenAIAuth},
client::create_http_client,
errors::{OpenAIToolError, Result},
message::Message,
models::{ChatModel, ParameterRestriction},
structured_output::Schema,
tool::Tool,
},
responses::response::{CompactedResponse, DeleteResponseResult, InputItemsListResponse, InputTokensResponse, Response},
};
use derive_new::new;
use request;
use serde::{ser::SerializeStruct, Serialize};
use std::collections::HashMap;
use std::time::Duration;
use strum::{Display, EnumString};
#[derive(Debug, Clone, EnumString, Display, Serialize, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum Include {
#[strum(serialize = "web_search_call.results")]
#[serde(rename = "web_search_call.results")]
WebSearchCall,
#[strum(serialize = "code_interpreter_call.outputs")]
#[serde(rename = "code_interpreter_call.outputs")]
CodeInterpreterCall,
#[strum(serialize = "computer_call_output.output.image_url")]
#[serde(rename = "computer_call_output.output.image_url")]
ImageUrlInComputerCallOutput,
#[strum(serialize = "file_search_call.results")]
#[serde(rename = "file_search_call.results")]
FileSearchCall,
#[strum(serialize = "message.input_image.image_url")]
#[serde(rename = "message.input_image.image_url")]
ImageUrlInInputMessages,
#[strum(serialize = "message.output_text.logprobs")]
#[serde(rename = "message.output_text.logprobs")]
LogprobsInOutput,
#[strum(serialize = "reasoning.encrypted_content")]
#[serde(rename = "reasoning.encrypted_content")]
ReasoningEncryptedContent,
}
#[derive(Debug, Clone, Serialize, EnumString, Display, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum ReasoningEffort {
#[strum(serialize = "none")]
#[serde(rename = "none")]
None,
#[strum(serialize = "minimal")]
#[serde(rename = "minimal")]
Minimal,
#[strum(serialize = "low")]
#[serde(rename = "low")]
Low,
#[strum(serialize = "medium")]
#[serde(rename = "medium")]
Medium,
#[strum(serialize = "high")]
#[serde(rename = "high")]
High,
#[strum(serialize = "xhigh")]
#[serde(rename = "xhigh")]
Xhigh,
}
#[derive(Debug, Clone, Serialize, EnumString, Display, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum ReasoningSummary {
#[strum(serialize = "auto")]
#[serde(rename = "auto")]
Auto,
#[strum(serialize = "concise")]
#[serde(rename = "concise")]
Concise,
#[strum(serialize = "detailed")]
#[serde(rename = "detailed")]
Detailed,
}
#[derive(Debug, Clone, Serialize)]
pub struct Reasoning {
pub effort: Option<ReasoningEffort>,
pub summary: Option<ReasoningSummary>,
}
#[derive(Debug, Clone, Serialize, EnumString, Display, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum TextVerbosity {
#[strum(serialize = "low")]
#[serde(rename = "low")]
Low,
#[strum(serialize = "medium")]
#[serde(rename = "medium")]
Medium,
#[strum(serialize = "high")]
#[serde(rename = "high")]
High,
}
#[derive(Debug, Clone, Serialize)]
pub struct TextConfig {
pub verbosity: Option<TextVerbosity>,
}
#[derive(Debug, Clone, Serialize, EnumString, Display, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum ToolChoiceMode {
#[strum(serialize = "none")]
#[serde(rename = "none")]
None,
#[strum(serialize = "auto")]
#[serde(rename = "auto")]
Auto,
#[strum(serialize = "required")]
#[serde(rename = "required")]
Required,
}
#[derive(Debug, Clone, Serialize, EnumString, Display, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum Truncation {
#[strum(serialize = "auto")]
#[serde(rename = "auto")]
Auto,
#[strum(serialize = "disabled")]
#[serde(rename = "disabled")]
Disabled,
}
#[derive(Debug, Clone, Serialize)]
pub struct NamedFunctionChoice {
#[serde(rename = "type")]
pub type_name: String,
pub name: String,
}
impl NamedFunctionChoice {
pub fn new<S: AsRef<str>>(name: S) -> Self {
Self { type_name: "function".to_string(), name: name.as_ref().to_string() }
}
}
#[derive(Debug, Clone, Serialize)]
#[serde(untagged)]
pub enum ToolChoice {
Simple(ToolChoiceMode),
Function(NamedFunctionChoice),
}
#[derive(Debug, Clone, Serialize)]
pub struct Prompt {
pub id: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub variables: Option<HashMap<String, String>>,
}
impl Prompt {
pub fn new<S: AsRef<str>>(id: S) -> Self {
Self { id: id.as_ref().to_string(), variables: None }
}
pub fn with_variables<S: AsRef<str>>(id: S, variables: HashMap<String, String>) -> Self {
Self { id: id.as_ref().to_string(), variables: Some(variables) }
}
}
#[derive(Debug, Clone, Serialize)]
pub struct StreamOptions {
pub include_obfuscation: bool,
}
#[derive(Debug, Clone, Default, Serialize, new)]
pub struct Format {
pub format: Schema,
}
#[derive(Debug, Clone, Default, new)]
#[allow(clippy::too_many_arguments)]
pub struct Body {
pub model: ChatModel,
pub instructions: Option<String>,
pub plain_text_input: Option<String>,
pub messages_input: Option<Vec<Message>>,
pub tools: Option<Vec<Tool>>,
pub tool_choice: Option<ToolChoice>,
pub prompt: Option<Prompt>,
pub prompt_cache_key: Option<String>,
pub prompt_cache_retention: Option<String>,
pub structured_output: Option<Format>,
pub temperature: Option<f64>,
pub max_output_tokens: Option<usize>,
pub max_tool_calls: Option<usize>,
pub metadata: Option<HashMap<String, serde_json::Value>>,
pub parallel_tool_calls: Option<bool>,
pub include: Option<Vec<Include>>,
pub background: Option<bool>,
pub conversation: Option<String>,
pub previous_response_id: Option<String>,
pub reasoning: Option<Reasoning>,
pub text: Option<TextConfig>,
pub safety_identifier: Option<String>,
pub service_tier: Option<String>,
pub store: Option<bool>,
pub stream: Option<bool>,
pub stream_options: Option<StreamOptions>,
pub top_logprobs: Option<usize>,
pub top_p: Option<f64>,
pub truncation: Option<Truncation>,
}
impl Serialize for Body {
fn serialize<S>(&self, serializer: S) -> anyhow::Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let mut state = serializer.serialize_struct("ResponsesBody", 4)?;
state.serialize_field("model", &self.model)?;
if self.plain_text_input.is_some() {
state.serialize_field("input", &self.plain_text_input.clone().unwrap())?;
} else if self.messages_input.is_some() {
state.serialize_field("input", &self.messages_input.clone().unwrap())?;
} else {
return Err(serde::ser::Error::custom("Either plain_text_input or messages_input must be set."));
};
if self.temperature.is_some() {
state.serialize_field("temperature", &self.temperature)?;
}
if self.instructions.is_some() {
state.serialize_field("instructions", &self.instructions)?;
}
if self.tools.is_some() {
state.serialize_field("tools", &self.tools)?;
}
if self.tool_choice.is_some() {
state.serialize_field("tool_choice", &self.tool_choice)?;
}
if self.prompt.is_some() {
state.serialize_field("prompt", &self.prompt)?;
}
if self.prompt_cache_key.is_some() {
state.serialize_field("prompt_cache_key", &self.prompt_cache_key)?;
}
if self.prompt_cache_retention.is_some() {
state.serialize_field("prompt_cache_retention", &self.prompt_cache_retention)?;
}
if self.structured_output.is_some() {
state.serialize_field("text", &self.structured_output)?;
}
if self.max_output_tokens.is_some() {
state.serialize_field("max_output_tokens", &self.max_output_tokens)?;
}
if self.max_tool_calls.is_some() {
state.serialize_field("max_tool_calls", &self.max_tool_calls)?;
}
if self.metadata.is_some() {
state.serialize_field("metadata", &self.metadata)?;
}
if self.parallel_tool_calls.is_some() {
state.serialize_field("parallel_tool_calls", &self.parallel_tool_calls)?;
}
if self.include.is_some() {
state.serialize_field("include", &self.include)?;
}
if self.background.is_some() {
state.serialize_field("background", &self.background)?;
}
if self.conversation.is_some() {
state.serialize_field("conversation", &self.conversation)?;
}
if self.previous_response_id.is_some() {
state.serialize_field("previous_response_id", &self.previous_response_id)?;
}
if self.reasoning.is_some() {
state.serialize_field("reasoning", &self.reasoning)?;
}
if self.text.is_some() {
state.serialize_field("text", &self.text)?;
}
if self.safety_identifier.is_some() {
state.serialize_field("safety_identifier", &self.safety_identifier)?;
}
if self.service_tier.is_some() {
state.serialize_field("service_tier", &self.service_tier)?;
}
if self.store.is_some() {
state.serialize_field("store", &self.store)?;
}
if self.stream.is_some() {
state.serialize_field("stream", &self.stream)?;
}
if self.stream_options.is_some() {
state.serialize_field("stream_options", &self.stream_options)?;
}
if self.top_logprobs.is_some() {
state.serialize_field("top_logprobs", &self.top_logprobs)?;
}
if self.top_p.is_some() {
state.serialize_field("top_p", &self.top_p)?;
}
if self.truncation.is_some() {
state.serialize_field("truncation", &self.truncation)?;
}
state.end()
}
}
const RESPONSES_PATH: &str = "responses";
#[derive(Debug, Clone)]
pub struct Responses {
auth: AuthProvider,
user_agent: String,
pub request_body: Body,
timeout: Option<Duration>,
}
impl Default for Responses {
fn default() -> Self {
Self::new()
}
}
impl Responses {
pub fn new() -> Self {
let auth = AuthProvider::openai_from_env().map_err(|e| OpenAIToolError::Error(format!("Failed to load OpenAI auth: {}", e))).unwrap();
Self { auth, user_agent: "".into(), request_body: Body::default(), timeout: None }
}
#[deprecated(since = "0.3.0", note = "Use `with_auth()` with custom OpenAIAuth for custom endpoints")]
pub fn from_endpoint<T: AsRef<str>>(endpoint: T) -> Self {
let auth = AuthProvider::openai_from_env().map_err(|e| OpenAIToolError::Error(format!("Failed to load OpenAI auth: {}", e))).unwrap();
let mut responses = Self { auth, user_agent: "".into(), request_body: Body::default(), timeout: None };
responses.base_url(endpoint.as_ref().trim_end_matches("/responses"));
responses
}
pub fn with_model(model: ChatModel) -> Self {
let auth = AuthProvider::openai_from_env().map_err(|e| OpenAIToolError::Error(format!("Failed to load OpenAI auth: {}", e))).unwrap();
Self { auth, user_agent: "".into(), request_body: Body { model, ..Default::default() }, timeout: None }
}
pub fn with_auth(auth: AuthProvider) -> Self {
Self { auth, user_agent: "".into(), request_body: Body::default(), timeout: None }
}
pub fn azure() -> Result<Self> {
let auth = AuthProvider::azure_from_env()?;
Ok(Self { auth, user_agent: "".into(), request_body: Body::default(), timeout: None })
}
pub fn detect_provider() -> Result<Self> {
let auth = AuthProvider::from_env()?;
Ok(Self { auth, user_agent: "".into(), request_body: Body::default(), timeout: None })
}
pub fn with_url<S: Into<String>>(base_url: S, api_key: S) -> Self {
let auth = AuthProvider::from_url_with_key(base_url, api_key);
Self { auth, user_agent: "".into(), request_body: Body::default(), timeout: None }
}
pub fn from_url<S: Into<String>>(url: S) -> Result<Self> {
let auth = AuthProvider::from_url(url)?;
Ok(Self { auth, user_agent: "".into(), request_body: Body::default(), timeout: None })
}
pub fn auth(&self) -> &AuthProvider {
&self.auth
}
pub fn base_url<T: AsRef<str>>(&mut self, url: T) -> &mut Self {
if let AuthProvider::OpenAI(ref openai_auth) = self.auth {
let new_auth = OpenAIAuth::new(openai_auth.api_key()).with_base_url(url.as_ref());
self.auth = AuthProvider::OpenAI(new_auth);
} else {
tracing::warn!("base_url() is only supported for OpenAI provider. Use azure() or with_auth() for Azure.");
}
self
}
pub fn model(&mut self, model: ChatModel) -> &mut Self {
self.request_body.model = model;
self
}
#[deprecated(since = "0.2.0", note = "Use `model(ChatModel)` instead for type safety")]
pub fn model_id<T: AsRef<str>>(&mut self, model_id: T) -> &mut Self {
self.request_body.model = ChatModel::from(model_id.as_ref());
self
}
pub fn timeout(&mut self, timeout: Duration) -> &mut Self {
self.timeout = Some(timeout);
self
}
pub fn user_agent<T: AsRef<str>>(&mut self, user_agent: T) -> &mut Self {
self.user_agent = user_agent.as_ref().to_string();
self
}
pub fn instructions<T: AsRef<str>>(&mut self, instructions: T) -> &mut Self {
self.request_body.instructions = Some(instructions.as_ref().to_string());
self
}
pub fn str_message<T: AsRef<str>>(&mut self, input: T) -> &mut Self {
self.request_body.plain_text_input = Some(input.as_ref().to_string());
self
}
pub fn messages(&mut self, messages: Vec<Message>) -> &mut Self {
self.request_body.messages_input = Some(messages);
self
}
pub fn tools(&mut self, tools: Vec<Tool>) -> &mut Self {
self.request_body.tools = Some(tools);
self
}
pub fn tool_choice(&mut self, tool_choice: ToolChoice) -> &mut Self {
self.request_body.tool_choice = Some(tool_choice);
self
}
pub fn prompt(&mut self, prompt: Prompt) -> &mut Self {
self.request_body.prompt = Some(prompt);
self
}
pub fn prompt_cache_key<T: AsRef<str>>(&mut self, key: T) -> &mut Self {
self.request_body.prompt_cache_key = Some(key.as_ref().to_string());
self
}
pub fn prompt_cache_retention<T: AsRef<str>>(&mut self, retention: T) -> &mut Self {
self.request_body.prompt_cache_retention = Some(retention.as_ref().to_string());
self
}
pub fn structured_output(&mut self, text_format: Schema) -> &mut Self {
self.request_body.structured_output = Option::from(Format::new(text_format));
self
}
pub fn temperature(&mut self, temperature: f64) -> &mut Self {
assert!((0.0..=2.0).contains(&temperature), "Temperature must be between 0.0 and 2.0, got {}", temperature);
let support = self.request_body.model.parameter_support();
match support.temperature {
ParameterRestriction::FixedValue(fixed) => {
if (temperature - fixed).abs() > f64::EPSILON {
tracing::warn!("Model '{}' only supports temperature={}. Ignoring temperature={}.", self.request_body.model, fixed, temperature);
return self;
}
}
ParameterRestriction::NotSupported => {
tracing::warn!("Model '{}' does not support temperature parameter. Ignoring.", self.request_body.model);
return self;
}
ParameterRestriction::Any => {}
}
self.request_body.temperature = Some(temperature);
self
}
pub fn max_output_tokens(&mut self, max_tokens: usize) -> &mut Self {
self.request_body.max_output_tokens = Some(max_tokens);
self
}
pub fn max_tool_calls(&mut self, max_tokens: usize) -> &mut Self {
self.request_body.max_tool_calls = Some(max_tokens);
self
}
pub fn metadata(&mut self, key: String, value: serde_json::Value) -> &mut Self {
if self.request_body.metadata.is_none() {
self.request_body.metadata = Some(HashMap::new());
}
if self.request_body.metadata.as_ref().unwrap().keys().any(|k| k == &key) {
self.request_body.metadata.as_mut().unwrap().remove(&key);
}
self.request_body.metadata.as_mut().unwrap().insert(key, value);
self
}
pub fn parallel_tool_calls(&mut self, enable: bool) -> &mut Self {
self.request_body.parallel_tool_calls = Some(enable);
self
}
pub fn include(&mut self, includes: Vec<Include>) -> &mut Self {
self.request_body.include = Some(includes);
self
}
pub fn background(&mut self, enable: bool) -> &mut Self {
self.request_body.background = Some(enable);
self
}
pub fn conversation<T: AsRef<str>>(&mut self, conversation_id: T) -> &mut Self {
self.request_body.conversation = Some(conversation_id.as_ref().to_string());
self
}
pub fn previous_response_id<T: AsRef<str>>(&mut self, response_id: T) -> &mut Self {
self.request_body.previous_response_id = Some(response_id.as_ref().to_string());
self
}
pub fn reasoning(&mut self, effort: ReasoningEffort, summary: ReasoningSummary) -> &mut Self {
self.request_body.reasoning = Some(Reasoning { effort: Some(effort), summary: Some(summary) });
self
}
pub fn text_verbosity(&mut self, verbosity: TextVerbosity) -> &mut Self {
self.request_body.text = Some(TextConfig { verbosity: Some(verbosity) });
self
}
pub fn safety_identifier<T: AsRef<str>>(&mut self, safety_id: T) -> &mut Self {
self.request_body.safety_identifier = Some(safety_id.as_ref().to_string());
self
}
pub fn service_tier<T: AsRef<str>>(&mut self, tier: T) -> &mut Self {
self.request_body.service_tier = Some(tier.as_ref().to_string());
self
}
pub fn store(&mut self, enable: bool) -> &mut Self {
self.request_body.store = Some(enable);
self
}
pub fn stream(&mut self, enable: bool) -> &mut Self {
self.request_body.stream = Some(enable);
self
}
pub fn stream_options(&mut self, include_obfuscation: bool) -> &mut Self {
self.request_body.stream_options = Some(StreamOptions { include_obfuscation });
self
}
pub fn top_logprobs(&mut self, n: usize) -> &mut Self {
let support = self.request_body.model.parameter_support();
if !support.top_logprobs {
tracing::warn!("Model '{}' does not support top_logprobs parameter. Ignoring.", self.request_body.model);
return self;
}
self.request_body.top_logprobs = Some(n);
self
}
pub fn top_p(&mut self, p: f64) -> &mut Self {
let support = self.request_body.model.parameter_support();
match support.top_p {
ParameterRestriction::FixedValue(fixed) => {
if (p - fixed).abs() > f64::EPSILON {
tracing::warn!("Model '{}' only supports top_p={}. Ignoring top_p={}.", self.request_body.model, fixed, p);
return self;
}
}
ParameterRestriction::NotSupported => {
tracing::warn!("Model '{}' does not support top_p parameter. Ignoring.", self.request_body.model);
return self;
}
ParameterRestriction::Any => {}
}
self.request_body.top_p = Some(p);
self
}
pub fn truncation(&mut self, truncation: Truncation) -> &mut Self {
self.request_body.truncation = Some(truncation);
self
}
fn is_reasoning_model(&self) -> bool {
self.request_body.model.is_reasoning_model()
}
pub async fn complete(&self) -> Result<Response> {
if self.request_body.messages_input.is_none() && self.request_body.plain_text_input.is_none() {
return Err(OpenAIToolError::Error("Messages are not set.".into()));
} else if self.request_body.plain_text_input.is_none() && self.request_body.messages_input.is_none() {
return Err(OpenAIToolError::Error("Both plain text input and messages are set. Please use one of them.".into()));
}
let mut request_body = self.request_body.clone();
if self.is_reasoning_model() {
let model = &self.request_body.model;
if let Some(temp) = request_body.temperature {
if (temp - 1.0).abs() > f64::EPSILON {
tracing::warn!(
"Reasoning model '{}' does not support custom temperature. \
Ignoring temperature={} and using default (1.0).",
model,
temp
);
request_body.temperature = None;
}
}
if let Some(top_p) = request_body.top_p {
if (top_p - 1.0).abs() > f64::EPSILON {
tracing::warn!(
"Reasoning model '{}' does not support custom top_p. \
Ignoring top_p={} and using default (1.0).",
model,
top_p
);
request_body.top_p = None;
}
}
if request_body.top_logprobs.is_some() {
tracing::warn!("Reasoning model '{}' does not support top_logprobs. Ignoring top_logprobs parameter.", model);
request_body.top_logprobs = None;
}
}
let body = serde_json::to_string(&request_body)?;
let client = create_http_client(self.timeout)?;
let mut headers = request::header::HeaderMap::new();
headers.insert("Content-Type", request::header::HeaderValue::from_static("application/json"));
if !self.user_agent.is_empty() {
headers.insert("User-Agent", request::header::HeaderValue::from_str(&self.user_agent).unwrap());
}
self.auth.apply_headers(&mut headers)?;
let endpoint = self.auth.endpoint(RESPONSES_PATH);
if cfg!(test) {
tracing::info!("Endpoint: {}", endpoint);
let body_for_debug = serde_json::to_string_pretty(&request_body).unwrap().replace(self.auth.api_key(), "*************");
tracing::info!("Request body: {}", body_for_debug);
}
match client.post(&endpoint).headers(headers).body(body).send().await.map_err(OpenAIToolError::RequestError) {
Err(e) => {
tracing::error!("Request error: {}", e);
Err(e)
}
Ok(response) if !response.status().is_success() => {
let status = response.status();
let error_text = response.text().await.unwrap_or_else(|_| "Failed to read error response".to_string());
tracing::error!("API error (status: {}): {}", status, error_text);
Err(OpenAIToolError::Error(format!("API request failed with status {}: {}", status, error_text)))
}
Ok(response) => {
let content = response.text().await.map_err(OpenAIToolError::RequestError)?;
if cfg!(test) {
tracing::info!("Response content: {}", content);
}
serde_json::from_str::<Response>(&content).map_err(OpenAIToolError::SerdeJsonError)
}
}
}
fn create_api_client(&self) -> Result<(request::Client, request::header::HeaderMap)> {
let client = create_http_client(self.timeout)?;
let mut headers = request::header::HeaderMap::new();
headers.insert("Content-Type", request::header::HeaderValue::from_static("application/json"));
if !self.user_agent.is_empty() {
headers.insert(
"User-Agent",
request::header::HeaderValue::from_str(&self.user_agent).map_err(|e| OpenAIToolError::Error(format!("Invalid user agent: {}", e)))?,
);
}
self.auth.apply_headers(&mut headers)?;
Ok((client, headers))
}
fn handle_api_error(status: request::StatusCode, content: &str) -> OpenAIToolError {
tracing::error!("API error (status: {}): {}", status, content);
OpenAIToolError::Error(format!("API request failed with status {}: {}", status, content))
}
pub async fn retrieve(&self, response_id: &str) -> Result<Response> {
let (client, headers) = self.create_api_client()?;
let endpoint = format!("{}/{}", self.auth.endpoint(RESPONSES_PATH), response_id);
match client.get(&endpoint).headers(headers).send().await.map_err(OpenAIToolError::RequestError) {
Err(e) => {
tracing::error!("Request error: {}", e);
Err(e)
}
Ok(response) if !response.status().is_success() => {
let status = response.status();
let error_text = response.text().await.unwrap_or_else(|_| "Failed to read error response".to_string());
Err(Self::handle_api_error(status, &error_text))
}
Ok(response) => {
let content = response.text().await.map_err(OpenAIToolError::RequestError)?;
serde_json::from_str::<Response>(&content).map_err(OpenAIToolError::SerdeJsonError)
}
}
}
pub async fn delete(&self, response_id: &str) -> Result<DeleteResponseResult> {
let (client, headers) = self.create_api_client()?;
let endpoint = format!("{}/{}", self.auth.endpoint(RESPONSES_PATH), response_id);
match client.delete(&endpoint).headers(headers).send().await.map_err(OpenAIToolError::RequestError) {
Err(e) => {
tracing::error!("Request error: {}", e);
Err(e)
}
Ok(response) if !response.status().is_success() => {
let status = response.status();
let error_text = response.text().await.unwrap_or_else(|_| "Failed to read error response".to_string());
Err(Self::handle_api_error(status, &error_text))
}
Ok(response) => {
let content = response.text().await.map_err(OpenAIToolError::RequestError)?;
serde_json::from_str::<DeleteResponseResult>(&content).map_err(OpenAIToolError::SerdeJsonError)
}
}
}
pub async fn cancel(&self, response_id: &str) -> Result<Response> {
let (client, headers) = self.create_api_client()?;
let endpoint = format!("{}/{}/cancel", self.auth.endpoint(RESPONSES_PATH), response_id);
match client.post(&endpoint).headers(headers).send().await.map_err(OpenAIToolError::RequestError) {
Err(e) => {
tracing::error!("Request error: {}", e);
Err(e)
}
Ok(response) if !response.status().is_success() => {
let status = response.status();
let error_text = response.text().await.unwrap_or_else(|_| "Failed to read error response".to_string());
Err(Self::handle_api_error(status, &error_text))
}
Ok(response) => {
let content = response.text().await.map_err(OpenAIToolError::RequestError)?;
serde_json::from_str::<Response>(&content).map_err(OpenAIToolError::SerdeJsonError)
}
}
}
pub async fn list_input_items(
&self,
response_id: &str,
limit: Option<u32>,
after: Option<&str>,
before: Option<&str>,
) -> Result<InputItemsListResponse> {
let (client, headers) = self.create_api_client()?;
let base_endpoint = format!("{}/{}/input_items", self.auth.endpoint(RESPONSES_PATH), response_id);
let mut query_params = Vec::new();
if let Some(limit) = limit {
query_params.push(format!("limit={}", limit));
}
if let Some(after) = after {
query_params.push(format!("after={}", after));
}
if let Some(before) = before {
query_params.push(format!("before={}", before));
}
let endpoint = if query_params.is_empty() { base_endpoint } else { format!("{}?{}", base_endpoint, query_params.join("&")) };
match client.get(&endpoint).headers(headers).send().await.map_err(OpenAIToolError::RequestError) {
Err(e) => {
tracing::error!("Request error: {}", e);
Err(e)
}
Ok(response) if !response.status().is_success() => {
let status = response.status();
let error_text = response.text().await.unwrap_or_else(|_| "Failed to read error response".to_string());
Err(Self::handle_api_error(status, &error_text))
}
Ok(response) => {
let content = response.text().await.map_err(OpenAIToolError::RequestError)?;
serde_json::from_str::<InputItemsListResponse>(&content).map_err(OpenAIToolError::SerdeJsonError)
}
}
}
pub async fn compact(&self, previous_response_id: &str, model: Option<&str>) -> Result<CompactedResponse> {
let (client, headers) = self.create_api_client()?;
let endpoint = format!("{}/compact", self.auth.endpoint(RESPONSES_PATH));
let mut body = serde_json::json!({
"previous_response_id": previous_response_id
});
if let Some(model) = model {
body["model"] = serde_json::json!(model);
}
match client.post(&endpoint).headers(headers).body(serde_json::to_string(&body)?).send().await.map_err(OpenAIToolError::RequestError) {
Err(e) => {
tracing::error!("Request error: {}", e);
Err(e)
}
Ok(response) if !response.status().is_success() => {
let status = response.status();
let error_text = response.text().await.unwrap_or_else(|_| "Failed to read error response".to_string());
Err(Self::handle_api_error(status, &error_text))
}
Ok(response) => {
let content = response.text().await.map_err(OpenAIToolError::RequestError)?;
serde_json::from_str::<CompactedResponse>(&content).map_err(OpenAIToolError::SerdeJsonError)
}
}
}
pub async fn get_input_tokens(&self, model: &str, input: serde_json::Value) -> Result<InputTokensResponse> {
let (client, headers) = self.create_api_client()?;
let endpoint = format!("{}/input_tokens", self.auth.endpoint(RESPONSES_PATH));
let body = serde_json::json!({
"model": model,
"input": input
});
match client.post(&endpoint).headers(headers).body(serde_json::to_string(&body)?).send().await.map_err(OpenAIToolError::RequestError) {
Err(e) => {
tracing::error!("Request error: {}", e);
Err(e)
}
Ok(response) if !response.status().is_success() => {
let status = response.status();
let error_text = response.text().await.unwrap_or_else(|_| "Failed to read error response".to_string());
Err(Self::handle_api_error(status, &error_text))
}
Ok(response) => {
let content = response.text().await.map_err(OpenAIToolError::RequestError)?;
serde_json::from_str::<InputTokensResponse>(&content).map_err(OpenAIToolError::SerdeJsonError)
}
}
}
}