1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
/*
* OpenAI API
*
* The OpenAI REST API. Please see https://platform.openai.com/docs/api-reference for more details.
*
* The version of the OpenAPI document: 2.3.0
*
* Generated by: https://openapi-generator.tech
*/
use crate::models;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, bon::Builder)]
pub struct CreateChatCompletionStreamResponseChoicesInner {
#[serde(rename = "delta")]
pub delta: Box<models::ChatCompletionStreamResponseDelta>,
#[serde(rename = "logprobs", skip_serializing_if = "Option::is_none")]
pub logprobs: Option<Box<models::CreateChatCompletionStreamResponseChoicesInnerLogprobs>>,
/// The reason the model stopped generating tokens. This will be `stop` if the model hit a natural stop point or a provided stop sequence, `length` if the maximum number of tokens specified in the request was reached, `content_filter` if content was omitted due to a flag from our content filters, `tool_calls` if the model called a tool, or `function_call` (deprecated) if the model called a function.
#[serde(rename = "finish_reason")]
pub finish_reason: FinishReason,
/// The index of the choice in the list of choices.
#[serde(rename = "index")]
pub index: i32,
}
impl CreateChatCompletionStreamResponseChoicesInner {
pub fn new(
delta: models::ChatCompletionStreamResponseDelta,
finish_reason: FinishReason,
index: i32,
) -> CreateChatCompletionStreamResponseChoicesInner {
CreateChatCompletionStreamResponseChoicesInner {
delta: Box::new(delta),
logprobs: None,
finish_reason,
index,
}
}
}
/// The reason the model stopped generating tokens. This will be `stop` if the model hit a natural stop point or a provided stop sequence, `length` if the maximum number of tokens specified in the request was reached, `content_filter` if content was omitted due to a flag from our content filters, `tool_calls` if the model called a tool, or `function_call` (deprecated) if the model called a function.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
pub enum FinishReason {
#[serde(rename = "stop")]
Stop,
#[serde(rename = "length")]
Length,
#[serde(rename = "tool_calls")]
ToolCalls,
#[serde(rename = "content_filter")]
ContentFilter,
#[serde(rename = "function_call")]
FunctionCall,
}
impl Default for FinishReason {
fn default() -> FinishReason {
Self::Stop
}
}
impl std::fmt::Display for CreateChatCompletionStreamResponseChoicesInner {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match serde_json::to_string(self) {
Ok(s) => write!(f, "{}", s),
Err(_) => Err(std::fmt::Error),
}
}
}