openai_protocol/builders/chat/
response.rs1use crate::{chat::*, common::Usage};
6
7#[must_use = "Builder does nothing until .build() is called"]
11#[derive(Clone, Debug)]
12pub struct ChatCompletionResponseBuilder {
13 id: String,
14 object: String,
15 created: u64,
16 model: String,
17 choices: Vec<ChatChoice>,
18 usage: Option<Usage>,
19 system_fingerprint: Option<String>,
20}
21
22impl ChatCompletionResponseBuilder {
23 pub fn new(id: impl Into<String>, model: impl Into<String>) -> Self {
29 Self {
30 id: id.into(),
31 object: "chat.completion".to_string(),
32 created: chrono::Utc::now().timestamp() as u64,
33 model: model.into(),
34 choices: Vec::new(),
35 usage: None,
36 system_fingerprint: None,
37 }
38 }
39
40 pub fn copy_from_request(mut self, request: &ChatCompletionRequest) -> Self {
44 self.model = request.model.clone();
45 self
46 }
47
48 pub fn object(mut self, object: impl Into<String>) -> Self {
50 self.object = object.into();
51 self
52 }
53
54 pub fn created(mut self, timestamp: u64) -> Self {
56 self.created = timestamp;
57 self
58 }
59
60 pub fn choices(mut self, choices: Vec<ChatChoice>) -> Self {
62 self.choices = choices;
63 self
64 }
65
66 pub fn add_choice(mut self, choice: ChatChoice) -> Self {
68 self.choices.push(choice);
69 self
70 }
71
72 pub fn usage(mut self, usage: Usage) -> Self {
74 self.usage = Some(usage);
75 self
76 }
77
78 pub fn maybe_usage(mut self, usage: Option<Usage>) -> Self {
80 if let Some(u) = usage {
81 self.usage = Some(u);
82 }
83 self
84 }
85
86 pub fn maybe_system_fingerprint(mut self, fingerprint: Option<impl Into<String>>) -> Self {
88 if let Some(fp) = fingerprint {
89 self.system_fingerprint = Some(fp.into());
90 }
91 self
92 }
93
94 pub fn build(self) -> ChatCompletionResponse {
96 ChatCompletionResponse {
97 id: self.id,
98 object: self.object,
99 created: self.created,
100 model: self.model,
101 choices: self.choices,
102 usage: self.usage,
103 system_fingerprint: self.system_fingerprint,
104 }
105 }
106}
107
108#[cfg(test)]
113mod tests {
114 use super::*;
115
116 #[test]
117 fn test_build_minimal() {
118 let response = ChatCompletionResponse::builder("chatcmpl_123", "gpt-4").build();
119
120 assert_eq!(response.id, "chatcmpl_123");
121 assert_eq!(response.model, "gpt-4");
122 assert_eq!(response.object, "chat.completion");
123 assert!(response.choices.is_empty());
124 assert!(response.usage.is_none());
125 assert!(response.system_fingerprint.is_none());
126 }
127
128 #[test]
129 fn test_build_complete() {
130 let choice = ChatChoice {
131 index: 0,
132 message: ChatCompletionMessage {
133 role: "assistant".to_string(),
134 content: Some("Hello!".to_string()),
135 tool_calls: None,
136 reasoning_content: None,
137 },
138 logprobs: None,
139 finish_reason: Some("stop".to_string()),
140 matched_stop: None,
141 hidden_states: None,
142 };
143
144 let usage = Usage {
145 prompt_tokens: 10,
146 completion_tokens: 20,
147 total_tokens: 30,
148 completion_tokens_details: None,
149 };
150
151 let response = ChatCompletionResponse::builder("chatcmpl_456", "gpt-4")
152 .choices(vec![choice.clone()])
153 .maybe_usage(Some(usage))
154 .maybe_system_fingerprint(Some("fp_123abc"))
155 .build();
156
157 assert_eq!(response.id, "chatcmpl_456");
158 assert_eq!(response.choices.len(), 1);
159 assert_eq!(response.choices[0].index, 0);
160 assert!(response.usage.is_some());
161 assert_eq!(response.system_fingerprint.as_ref().unwrap(), "fp_123abc");
162 }
163
164 #[test]
165 fn test_add_multiple_choices() {
166 let choice1 = ChatChoice {
167 index: 0,
168 message: ChatCompletionMessage {
169 role: "assistant".to_string(),
170 content: Some("Option 1".to_string()),
171 tool_calls: None,
172 reasoning_content: None,
173 },
174 logprobs: None,
175 finish_reason: Some("stop".to_string()),
176 matched_stop: None,
177 hidden_states: None,
178 };
179
180 let choice2 = ChatChoice {
181 index: 1,
182 message: ChatCompletionMessage {
183 role: "assistant".to_string(),
184 content: Some("Option 2".to_string()),
185 tool_calls: None,
186 reasoning_content: None,
187 },
188 logprobs: None,
189 finish_reason: Some("stop".to_string()),
190 matched_stop: None,
191 hidden_states: None,
192 };
193
194 let response = ChatCompletionResponse::builder("chatcmpl_789", "gpt-4")
195 .add_choice(choice1)
196 .add_choice(choice2)
197 .build();
198
199 assert_eq!(response.choices.len(), 2);
200 assert_eq!(response.choices[0].index, 0);
201 assert_eq!(response.choices[1].index, 1);
202 }
203
204 #[test]
205 fn test_copy_from_request() {
206 let request = ChatCompletionRequest {
207 messages: vec![],
208 model: "gpt-3.5-turbo".to_string(),
209 ..Default::default()
210 };
211
212 let response = ChatCompletionResponse::builder("chatcmpl_101", "gpt-4")
213 .copy_from_request(&request)
214 .build();
215
216 assert_eq!(response.model, "gpt-3.5-turbo"); }
218}