1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
use serde::{Deserialize, Serialize};
use crate::chat::AssistantMessage;
use crate::chat::ChatModel;
use crate::chat::Logprobs;
use crate::macros::impl_display_for_serialize;
/// The chat completion object.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct ChatCompletionObject {
/// A unique identifier for the chat completion.
pub id: String,
/// A list of chat completion choices. Can be more than one if n is greater than 1.
pub choices: Vec<ChatCompletionChoice>,
/// The Unix timestamp (in seconds) of when the chat completion was created.
pub created: u64,
/// The model used for the chat completion.
pub model: ChatModel,
/// This fingerprint represents the backend configuration that the model runs with.
///
/// Can be used in conjunction with the seed request parameter to understand when backend changes have been made that might impact determinism.
#[serde(skip_serializing_if = "Option::is_none")]
pub system_fingerprint: Option<String>,
/// The object type, which is always chat.completion.
pub object: String,
/// Usage statistics for the completion request.
pub usage: Usage,
}
impl_display_for_serialize!(ChatCompletionObject);
/// The choice of chat completion object.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct ChatCompletionChoice {
/// The reason the model stopped generating tokens.
/// This will be stop if the model hit a natural stop point or a provided stop sequence, length if the maximum number of tokens specified in the request was reached, content_filter if content was omitted due to a flag from our content filters, tool_calls if the model called a tool, or function_call (deprecated) if the model called a function.
pub finish_reason: String,
/// The index of the choice in the list of choices.
pub index: u32,
/// A chat completion message generated by the model.
pub message: AssistantMessage,
/// Log probability information for the choice.
#[serde(skip_serializing_if = "Option::is_none")]
pub logprobs: Option<Logprobs>,
}
impl_display_for_serialize!(ChatCompletionChoice);
/// The usage statistics for the completion request.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Usage {
/// Number of tokens in the generated completion.
pub completion_tokens: u32,
/// Number of tokens in the prompt.
pub prompt_tokens: u32,
/// Total number of tokens used in the request (prompt + completion).
pub total_tokens: u32,
}
impl_display_for_serialize!(Usage);
#[cfg(test)]
mod test {
use crate::chat::AssistantMessage;
use super::*;
#[test]
fn serialize() {
let json = r#"{
"id": "chatcmpl-8km4YC01Ve1RijCTeBE7e3Gh7PVDR",
"object": "chat.completion",
"created": 1706158570,
"model": "gpt-4-1106-vision-preview",
"usage": {
"prompt_tokens": 277,
"completion_tokens": 10,
"total_tokens": 287
},
"choices": [
{
"message": {
"role": "assistant",
"content": "The animal in the image is a domestic cat."
},
"finish_reason": "stop",
"index": 0
}
]
}"#;
let object = ChatCompletionObject {
id: "chatcmpl-8km4YC01Ve1RijCTeBE7e3Gh7PVDR".to_string(),
object: "chat.completion".to_string(),
created: 1706158570,
model: ChatModel::Gpt41106VisionPreview,
usage: Usage {
prompt_tokens: 277,
completion_tokens: 10,
total_tokens: 287,
},
choices: vec![
ChatCompletionChoice {
message: AssistantMessage::new(
Some(
"The animal in the image is a domestic cat."
.to_string(),
),
None,
None,
)
.into(),
finish_reason: "stop".to_string(),
index: 0,
logprobs: None,
},
],
system_fingerprint: None,
};
let deserialized =
serde_json::from_str::<ChatCompletionObject>(json).unwrap();
assert_eq!(deserialized, object);
let serialized = serde_json::to_string(&object).unwrap();
assert_eq!(
serialized,
"{\"id\":\"chatcmpl-8km4YC01Ve1RijCTeBE7e3Gh7PVDR\",\"choices\":[{\"finish_reason\":\"stop\",\"index\":0,\"message\":{\"content\":\"The animal in the image is a domestic cat.\",\"role\":\"assistant\"}}],\"created\":1706158570,\"model\":\"gpt-4-1106-vision-preview\",\"object\":\"chat.completion\",\"usage\":{\"completion_tokens\":10,\"prompt_tokens\":277,\"total_tokens\":287}}"
);
}
}