openai-client-base 0.12.0

Auto-generated Rust client for the OpenAI API
/*
 * OpenAI API
 *
 * The OpenAI REST API. Please see https://platform.openai.com/docs/api-reference for more details.
 *
 * The version of the OpenAPI document: 2.3.0
 *
 * Generated by: https://openapi-generator.tech
 */

use crate::models;
use serde::{Deserialize, Serialize};

#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, bon::Builder)]
pub struct CreateChatCompletionStreamResponseChoicesInner {
    #[serde(rename = "delta")]
    pub delta: Box<models::ChatCompletionStreamResponseDelta>,
    #[serde(rename = "logprobs", skip_serializing_if = "Option::is_none")]
    pub logprobs: Option<Box<models::CreateChatCompletionStreamResponseChoicesInnerLogprobs>>,
    /// The reason the model stopped generating tokens. This will be `stop` if the model hit a natural stop point or a provided stop sequence, `length` if the maximum number of tokens specified in the request was reached, `content_filter` if content was omitted due to a flag from our content filters, `tool_calls` if the model called a tool, or `function_call` (deprecated) if the model called a function.
    #[serde(rename = "finish_reason")]
    pub finish_reason: FinishReason,
    /// The index of the choice in the list of choices.
    #[serde(rename = "index")]
    pub index: i32,
}

impl CreateChatCompletionStreamResponseChoicesInner {
    pub fn new(
        delta: models::ChatCompletionStreamResponseDelta,
        finish_reason: FinishReason,
        index: i32,
    ) -> CreateChatCompletionStreamResponseChoicesInner {
        CreateChatCompletionStreamResponseChoicesInner {
            delta: Box::new(delta),
            logprobs: None,
            finish_reason,
            index,
        }
    }
}
/// The reason the model stopped generating tokens. This will be `stop` if the model hit a natural stop point or a provided stop sequence, `length` if the maximum number of tokens specified in the request was reached, `content_filter` if content was omitted due to a flag from our content filters, `tool_calls` if the model called a tool, or `function_call` (deprecated) if the model called a function.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
pub enum FinishReason {
    #[serde(rename = "stop")]
    Stop,
    #[serde(rename = "length")]
    Length,
    #[serde(rename = "tool_calls")]
    ToolCalls,
    #[serde(rename = "content_filter")]
    ContentFilter,
    #[serde(rename = "function_call")]
    FunctionCall,
}

impl Default for FinishReason {
    fn default() -> FinishReason {
        Self::Stop
    }
}

impl std::fmt::Display for CreateChatCompletionStreamResponseChoicesInner {
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
        match serde_json::to_string(self) {
            Ok(s) => write!(f, "{}", s),
            Err(_) => Err(std::fmt::Error),
        }
    }
}