openai-client-base 0.12.0

Auto-generated Rust client for the OpenAI API
/*
 * OpenAI API
 *
 * The OpenAI REST API. Please see https://platform.openai.com/docs/api-reference for more details.
 *
 * The version of the OpenAPI document: 2.3.0
 *
 * Generated by: https://openapi-generator.tech
 */

use crate::models;
use serde::{Deserialize, Serialize};

/// Eval : An Eval object with a data source config and testing criteria. An Eval represents a task to be done for your LLM integration. Like:  - Improve the quality of my chatbot  - See how well my chatbot handles customer support  - Check if o4-mini is better at my usecase than gpt-4o
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, bon::Builder)]
pub struct Eval {
    /// The object type.
    #[serde(rename = "object")]
    pub object: Object,
    /// Unique identifier for the evaluation.
    #[serde(rename = "id")]
    pub id: String,
    /// The name of the evaluation.
    #[serde(rename = "name")]
    pub name: String,
    #[serde(rename = "data_source_config")]
    pub data_source_config: Box<models::EvalDataSourceConfig>,
    /// A list of testing criteria.
    #[serde(rename = "testing_criteria")]
    pub testing_criteria: Vec<models::EvalTestingCriteriaInner>,
    /// The Unix timestamp (in seconds) for when the eval was created.
    #[serde(rename = "created_at")]
    pub created_at: i32,
    /// Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format, and querying for objects via API or the dashboard.  Keys are strings with a maximum length of 64 characters. Values are strings with a maximum length of 512 characters.
    #[serde(rename = "metadata", deserialize_with = "Option::deserialize")]
    pub metadata: Option<std::collections::HashMap<String, String>>,
}

impl Eval {
    /// An Eval object with a data source config and testing criteria. An Eval represents a task to be done for your LLM integration. Like:  - Improve the quality of my chatbot  - See how well my chatbot handles customer support  - Check if o4-mini is better at my usecase than gpt-4o
    pub fn new(
        object: Object,
        id: String,
        name: String,
        data_source_config: models::EvalDataSourceConfig,
        testing_criteria: Vec<models::EvalTestingCriteriaInner>,
        created_at: i32,
        metadata: Option<std::collections::HashMap<String, String>>,
    ) -> Eval {
        Eval {
            object,
            id,
            name,
            data_source_config: Box::new(data_source_config),
            testing_criteria,
            created_at,
            metadata,
        }
    }
}
/// The object type.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
pub enum Object {
    #[serde(rename = "eval")]
    Eval,
}

impl Default for Object {
    fn default() -> Object {
        Self::Eval
    }
}

impl std::fmt::Display for Eval {
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
        match serde_json::to_string(self) {
            Ok(s) => write!(f, "{}", s),
            Err(_) => Err(std::fmt::Error),
        }
    }
}