1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
/*
* OpenAI API
*
* The OpenAI REST API. Please see https://platform.openai.com/docs/api-reference for more details.
*
* The version of the OpenAPI document: 2.3.0
*
* Generated by: https://openapi-generator.tech
*/
use crate::models;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, bon::Builder)]
pub struct CreateChatCompletionResponseChoicesInner {
/// The reason the model stopped generating tokens. This will be `stop` if the model hit a natural stop point or a provided stop sequence, `length` if the maximum number of tokens specified in the request was reached, `content_filter` if content was omitted due to a flag from our content filters, `tool_calls` if the model called a tool, or `function_call` (deprecated) if the model called a function.
#[serde(rename = "finish_reason")]
pub finish_reason: FinishReason,
/// The index of the choice in the list of choices.
#[serde(rename = "index")]
pub index: i32,
#[serde(rename = "message")]
pub message: Box<models::ChatCompletionResponseMessage>,
#[serde(rename = "logprobs", deserialize_with = "Option::deserialize")]
pub logprobs: Option<Box<models::Object05>>,
}
impl CreateChatCompletionResponseChoicesInner {
pub fn new(
finish_reason: FinishReason,
index: i32,
message: models::ChatCompletionResponseMessage,
logprobs: Option<models::Object05>,
) -> CreateChatCompletionResponseChoicesInner {
CreateChatCompletionResponseChoicesInner {
finish_reason,
index,
message: Box::new(message),
logprobs: logprobs.map(Box::new),
}
}
}
/// The reason the model stopped generating tokens. This will be `stop` if the model hit a natural stop point or a provided stop sequence, `length` if the maximum number of tokens specified in the request was reached, `content_filter` if content was omitted due to a flag from our content filters, `tool_calls` if the model called a tool, or `function_call` (deprecated) if the model called a function.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
pub enum FinishReason {
#[serde(rename = "stop")]
Stop,
#[serde(rename = "length")]
Length,
#[serde(rename = "tool_calls")]
ToolCalls,
#[serde(rename = "content_filter")]
ContentFilter,
#[serde(rename = "function_call")]
FunctionCall,
}
impl Default for FinishReason {
fn default() -> FinishReason {
Self::Stop
}
}
impl std::fmt::Display for CreateChatCompletionResponseChoicesInner {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match serde_json::to_string(self) {
Ok(s) => write!(f, "{}", s),
Err(_) => Err(std::fmt::Error),
}
}
}