llmclient 0.3.2

Rust LLM client - Gemini, OpenAI, Claude, Mistral, DeepSeek, Groq
Documentation
use std::env;
use serde_derive::{Deserialize, Serialize};
use crate::openai::get_client;

// Input structures
// Image

// Image
#[derive(Debug, Serialize, Clone)]
pub struct ImageCompletion {
    pub model: String,
    pub prompt: String,
    pub n: usize,
    pub size: String,
}

#[derive(Debug, Deserialize)]
pub struct ImageResponse {
    pub created: u64,
    pub data: Vec<ImageData>,
}

#[derive(Debug, Deserialize)]
pub struct ImageData {
    pub revised_prompt: String,
    pub url: String,
}

// Embeddings

#[derive(Debug, Serialize, Clone)]
pub struct Embedding {
    pub input: Vec<String>,
    pub model: String,
    pub dimensions: usize,
}

#[derive(Debug, Deserialize)]
pub struct EmbeddingResponse {
    pub data: Vec<EmbeddingData>,
    pub model: String,
}

#[derive(Debug, Deserialize)]
pub struct EmbeddingData {
    pub object: String,
    pub index: usize,
    pub embedding: Vec<f32>,
}

pub async fn call_gpt_image_model(model: &str, prompt: &str, size: &str, n: usize) -> Result<String, Box<dyn std::error::Error + Send>> {
    // Confirm endpoint
    let url: String =
        env::var("GPT_IMAGE_URL").expect("GPT_IMAGE_URL not found in enviroment variables");

    // Create chat completion
    let client = get_client().await?;

    let image_completion: ImageCompletion = ImageCompletion {
        model: model.into(),
        prompt: prompt.into(),
        n,
        size: size.into(),
    };

//println!("{:?}", serde_json::to_string(&chat_completion));
    // Extract API Response
    let res = client
        .post(url)
        .json(&image_completion)
        .send()
        .await;
//println!("### {:?}", res);
    let res: ImageResponse = res
        .map_err(|e| -> Box<dyn std::error::Error + Send> { Box::new(e) })?
        .json()
        .await
        .map_err(|e| -> Box<dyn std::error::Error + Send> { Box::new(e) })?;
//println!("### {:?}", res);

    // Send Response
    Ok(res.data[0].url.clone())
}