use std::env;
use serde_derive::{Deserialize, Serialize};
use crate::openai::get_client;
#[derive(Debug, Serialize, Clone)]
pub struct ImageCompletion {
pub model: String,
pub prompt: String,
pub n: usize,
pub size: String,
}
#[derive(Debug, Deserialize)]
pub struct ImageResponse {
pub created: u64,
pub data: Vec<ImageData>,
}
#[derive(Debug, Deserialize)]
pub struct ImageData {
pub revised_prompt: String,
pub url: String,
}
#[derive(Debug, Serialize, Clone)]
pub struct Embedding {
pub input: Vec<String>,
pub model: String,
pub dimensions: usize,
}
#[derive(Debug, Deserialize)]
pub struct EmbeddingResponse {
pub data: Vec<EmbeddingData>,
pub model: String,
}
#[derive(Debug, Deserialize)]
pub struct EmbeddingData {
pub object: String,
pub index: usize,
pub embedding: Vec<f32>,
}
pub async fn call_gpt_image_model(model: &str, prompt: &str, size: &str, n: usize) -> Result<String, Box<dyn std::error::Error + Send>> {
let url: String =
env::var("GPT_IMAGE_URL").expect("GPT_IMAGE_URL not found in enviroment variables");
let client = get_client().await?;
let image_completion: ImageCompletion = ImageCompletion {
model: model.into(),
prompt: prompt.into(),
n,
size: size.into(),
};
let res = client
.post(url)
.json(&image_completion)
.send()
.await;
let res: ImageResponse = res
.map_err(|e| -> Box<dyn std::error::Error + Send> { Box::new(e) })?
.json()
.await
.map_err(|e| -> Box<dyn std::error::Error + Send> { Box::new(e) })?;
Ok(res.data[0].url.clone())
}