openai-client-base 0.12.0

Auto-generated Rust client for the OpenAI API
/*
 * OpenAI API
 *
 * The OpenAI REST API. Please see https://platform.openai.com/docs/api-reference for more details.
 *
 * The version of the OpenAPI document: 2.3.0
 *
 * Generated by: https://openapi-generator.tech
 */

use crate::models;
use serde::{Deserialize, Serialize};

#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, bon::Builder)]
pub struct VectorStoreSearchRequest {
    #[serde(rename = "query")]
    pub query: Box<models::VectorStoreSearchRequestQuery>,
    /// Whether to rewrite the natural language query for vector search.
    #[serde(rename = "rewrite_query", skip_serializing_if = "Option::is_none")]
    pub rewrite_query: Option<bool>,
    /// The maximum number of results to return. This number should be between 1 and 50 inclusive.
    #[serde(rename = "max_num_results", skip_serializing_if = "Option::is_none")]
    pub max_num_results: Option<i32>,
    #[serde(rename = "filters", skip_serializing_if = "Option::is_none")]
    pub filters: Option<Box<models::VectorStoreSearchRequestFilters>>,
    #[serde(rename = "ranking_options", skip_serializing_if = "Option::is_none")]
    pub ranking_options: Option<models::VectorStoreSearchRequestRankingOptions>,
}

impl VectorStoreSearchRequest {
    pub fn new(query: models::VectorStoreSearchRequestQuery) -> VectorStoreSearchRequest {
        VectorStoreSearchRequest {
            query: Box::new(query),
            rewrite_query: None,
            max_num_results: None,
            filters: None,
            ranking_options: None,
        }
    }
}

impl std::fmt::Display for VectorStoreSearchRequest {
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
        match serde_json::to_string(self) {
            Ok(s) => write!(f, "{}", s),
            Err(_) => Err(std::fmt::Error),
        }
    }
}