openai-client-base 0.12.0

Auto-generated Rust client for the OpenAI API
/*
 * OpenAI API
 *
 * The OpenAI REST API. Please see https://platform.openai.com/docs/api-reference for more details.
 *
 * The version of the OpenAPI document: 2.3.0
 *
 * Generated by: https://openapi-generator.tech
 */

use crate::models;
use serde::{Deserialize, Serialize};

#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, bon::Builder)]
pub struct CreateEvalRequest {
    /// The name of the evaluation.
    #[serde(rename = "name", skip_serializing_if = "Option::is_none")]
    pub name: Option<String>,
    /// Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format, and querying for objects via API or the dashboard.  Keys are strings with a maximum length of 64 characters. Values are strings with a maximum length of 512 characters.
    #[serde(
        rename = "metadata",
        default,
        with = "::serde_with::rust::double_option",
        skip_serializing_if = "Option::is_none"
    )]
    pub metadata: Option<Option<std::collections::HashMap<String, String>>>,
    #[serde(rename = "data_source_config")]
    pub data_source_config: Box<models::CreateEvalRequestDataSourceConfig>,
    /// A list of graders for all eval runs in this group. Graders can reference variables in the data source using double curly braces notation, like `{{item.variable_name}}`. To reference the model's output, use the `sample` namespace (ie, `{{sample.output_text}}`).
    #[serde(rename = "testing_criteria")]
    pub testing_criteria: Vec<models::CreateEvalRequestTestingCriteriaInner>,
}

impl CreateEvalRequest {
    pub fn new(
        data_source_config: models::CreateEvalRequestDataSourceConfig,
        testing_criteria: Vec<models::CreateEvalRequestTestingCriteriaInner>,
    ) -> CreateEvalRequest {
        CreateEvalRequest {
            name: None,
            metadata: None,
            data_source_config: Box::new(data_source_config),
            testing_criteria,
        }
    }
}

impl std::fmt::Display for CreateEvalRequest {
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
        match serde_json::to_string(self) {
            Ok(s) => write!(f, "{}", s),
            Err(_) => Err(std::fmt::Error),
        }
    }
}