use reqwest::ClientBuilder; use serde::{Deserialize, Serialize}; use std::time::Duration;
#[derive(Serialize, Debug)]
pub struct Request {
pub model: String, pub messages: Vec<Message>, pub temperature: f64, pub response_format: serde_json::Value,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Message {
pub role: String, pub content: String, }
#[derive(Deserialize, Debug)]
#[allow(dead_code)] pub struct Response {
pub id: String, pub object: String, pub created: u64, pub model: String, pub choices: Vec<Choice>, pub usage: Usage, }
#[derive(Deserialize, Debug)]
#[allow(dead_code)] pub struct Choice {
pub message: Message, pub finish_reason: Option<String>, pub index: usize, }
#[derive(Serialize, Deserialize, Debug)]
pub struct Usage {
pub prompt_tokens: u32, pub completion_tokens: u32, pub total_tokens: u32, }
pub async fn send_request(
model: &String,
prompt: &String,
timeout_in_sec: u64,
token: &String,
response_format: serde_json::Value,
) -> Result<String, Box<dyn std::error::Error>> {
let client = ClientBuilder::new()
.timeout(Duration::from_secs(timeout_in_sec))
.build()?;
let temperature = if model.starts_with("gpt-5") {
1.0
} else {
crate::DEFAULT_TEMPERATURE
};
let role = String::from("user");
let message = Message {
role,
content: prompt.clone(),
};
let messages = vec![message];
let request = Request {
model: model.clone(),
messages,
temperature,
response_format: response_format.clone(),
};
let http_resp = client
.post("https://api.openai.com/v1/chat/completions")
.header("Authorization", format!("Bearer {}", token))
.header("Content-Type", "application/json")
.json(&request)
.send()
.await?;
let body_bytes = http_resp.bytes().await?;
let response: Response = match serde_json::from_slice(&body_bytes) {
Ok(resp) => resp,
Err(e) => {
let raw = String::from_utf8_lossy(&body_bytes);
return Err(format!("Failed to parse JSON: {}\nRaw response:\n{}", e, raw).into());
}
};
let response_content = response
.choices
.get(0)
.ok_or("No ChatGPT Response Index 0 Choice.")?
.message
.content
.clone();
Ok(response_content)
}