use serde::{Deserialize, Serialize};
use crate::llm::ToolCall;
#[derive(Serialize)]
pub struct OpenAIRequest {
pub model: String,
pub messages: Vec<OpenAIMessage>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tools: Option<Vec<OpenAITool>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tool_choice: Option<serde_json::Value>,
#[serde(skip_serializing_if = "Option::is_none")]
pub temperature: Option<f32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub max_completion_tokens: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub stream: Option<bool>,
}
#[derive(Serialize)]
pub struct OpenAIMessage {
pub role: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub content: Option<serde_json::Value>,
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tool_calls: Option<Vec<ToolCall>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tool_call_id: Option<String>,
}
#[derive(Serialize)]
pub struct OpenAITool {
#[serde(rename = "type")]
pub tool_type: String,
pub function: OpenAIFunction,
}
#[derive(Serialize)]
pub struct OpenAIFunction {
pub name: String,
pub description: String,
pub parameters: serde_json::Map<String, serde_json::Value>,
pub strict: bool,
}
#[derive(Deserialize)]
pub struct OpenAIResponse {
pub choices: Vec<OpenAIChoice>,
pub usage: Option<OpenAIUsage>,
}
#[derive(Deserialize)]
pub struct OpenAIChoice {
pub message: OpenAIMessageResponse,
pub finish_reason: Option<String>,
}
#[derive(Deserialize)]
pub struct OpenAIMessageResponse {
pub content: Option<String>,
pub tool_calls: Option<Vec<ToolCall>>,
pub reasoning_content: Option<String>,
}
#[derive(Deserialize)]
pub struct OpenAIUsage {
pub prompt_tokens: u64,
pub completion_tokens: u64,
pub total_tokens: u64,
#[serde(default)]
pub prompt_tokens_details: Option<OpenAIPromptTokenDetails>,
}
#[derive(Deserialize, Default)]
pub struct OpenAIPromptTokenDetails {
pub cached_tokens: u64,
}