1use std::{collections::VecDeque, str::FromStr};
2
3use serde::{Deserialize, Serialize};
4use thiserror::Error;
5
6use super::{
7 request::{FinishReason, ToolCall, ToolCallFunction, ToolCallFunctionObj},
8 response::{
9 Annotation, ChatCompletionAudio, Choice as ChatCompletionChoice, Logprobs, Message,
10 Response as ChatCompletionResponse, Role, ServiceTier, Usage,
11 },
12};
13
14#[derive(Debug, Error)]
15pub enum ParserError {
16 #[error("Failed to parse JSON: {0}")]
17 JsonError(#[from] serde_json::Error),
18}
19
20#[derive(Debug, Clone, PartialEq)]
21pub enum Chunk {
22 Done,
23 Data(ChunkResponse),
24}
25
26impl FromStr for Chunk {
27 type Err = serde_json::Error;
28
29 fn from_str(s: &str) -> Result<Self, Self::Err> {
30 match s {
31 "[DONE]" => Ok(Chunk::Done),
32 _ => {
33 let response = serde_json::from_str::<ChunkResponse>(s)?;
34 Ok(Chunk::Data(response))
35 }
36 }
37 }
38}
39
40impl Chunk {
41 pub fn try_to_string(&self) -> Result<String, serde_json::Error> {
42 match self {
43 Chunk::Done => Ok("[DONE]".to_string()),
44 Chunk::Data(response) => serde_json::to_string(response),
45 }
46 }
47}
48
49#[derive(Debug, Default, Deserialize, Clone, PartialEq, Serialize)]
50pub struct ChunkResponse {
51 pub id: String,
53 pub choices: Vec<Choice>,
54 pub created: u64,
56
57 pub model: String,
59
60 #[serde(skip_serializing_if = "Option::is_none")]
65 pub system_fingerprint: Option<String>,
66
67 pub object: String,
69
70 #[serde(skip_serializing_if = "Option::is_none")]
73 pub service_tier: Option<ServiceTier>,
74
75 #[serde(skip_serializing_if = "Option::is_none")]
78 pub usage: Option<Usage>,
79}
80
81#[derive(Debug, Default, Deserialize, Serialize, Clone, PartialEq)]
82pub struct Choice {
83 pub index: usize,
84 pub delta: DeltaMessage,
85 #[serde(skip_serializing_if = "Option::is_none")]
90 pub finish_reason: Option<FinishReason>,
91 #[serde(skip_serializing_if = "Option::is_none")]
93 pub logprobs: Option<Logprobs>,
94}
95
96#[derive(Debug, Deserialize, Serialize, Default, Clone, PartialEq)]
97pub struct DeltaMessage {
98 #[serde(skip_serializing_if = "Option::is_none")]
100 pub content: Option<String>,
101
102 #[serde(skip_serializing_if = "Option::is_none")]
105 pub reasoning: Option<String>,
106
107 #[serde(skip_serializing_if = "Option::is_none")]
109 pub tool_calls: Option<Vec<ToolCallChunk>>,
110
111 #[serde(skip_serializing_if = "Option::is_none")]
113 pub role: Option<Role>,
114
115 #[serde(skip_serializing_if = "Option::is_none")]
117 pub refusal: Option<String>,
118
119 #[serde(skip_serializing_if = "Option::is_none")]
121 pub annotations: Option<Vec<Annotation>>,
122
123 #[serde(skip_serializing_if = "Option::is_none")]
125 pub audio: Option<ChatCompletionAudio>,
126}
127
128#[derive(Debug, Deserialize, Serialize, Default, Clone, PartialEq)]
129pub struct ToolCallChunk {
130 pub index: usize,
131 #[serde(skip_serializing_if = "Option::is_none")]
132 pub id: Option<String>,
133 #[serde(skip_serializing_if = "Option::is_none")]
134 pub r#type: Option<String>,
135 pub function: ToolCallFunctionObjChunk,
136}
137
138#[derive(Debug, Deserialize, Default, Serialize, Clone, PartialEq)]
139pub struct ToolCallFunctionObjChunk {
140 #[serde(skip_serializing_if = "Option::is_none")]
142 pub name: Option<String>,
143 pub arguments: String,
145}
146
147#[derive(Debug, Default, Clone, PartialEq)]
148pub struct OpenAIChunkParser {
149 pub id: String,
150 tool_call_chunk: Option<ToolCallChunk>,
151 pub object: String,
152 pub created: u64,
153 pub model: String,
154 system_fingerprint: Option<String>,
155 tool_calls: VecDeque<ToolCall>,
156 pub content: String,
157 refusal: Option<String>,
158 annotations: Option<Vec<Annotation>>,
159 audio: Option<ChatCompletionAudio>,
160 logprobs: Option<Logprobs>,
161 finish_reason: Option<FinishReason>,
162 pub think_content: String,
163}
164
165impl OpenAIChunkParser {
166 pub fn parse(
167 &mut self,
168 data: &str,
169 ) -> Result<(Option<ChatCompletionResponse>, Option<ToolCall>), ParserError> {
170 let chunk = Chunk::from_str(data)?;
171
172 match chunk {
173 Chunk::Data(response) => {
174 self.update_basic_info(&response);
175 let tool_call = if let Some(choice) = response.choices.first() {
176 self.logprobs.clone_from(&choice.logprobs);
177
178 if let Some(reason) = choice.finish_reason {
179 self.finish_reason = Some(reason);
180 }
181
182 if let Some(c) = choice.delta.content.as_ref() {
184 self.content.push_str(c);
185 }
186 if let Some(c) = choice.delta.reasoning.as_ref() {
187 self.think_content.push_str(c);
188 }
189
190 if let Some(refusal) = choice.delta.refusal.as_ref() {
191 self.refusal = Some(refusal.clone());
192 }
193
194 if let Some(annotations) = choice.delta.annotations.as_ref() {
195 self.annotations = Some(annotations.clone());
196 }
197
198 if let Some(audio) = choice.delta.audio.as_ref() {
199 self.audio = Some(audio.clone());
200 }
201
202 if let Some(tool_calls) = choice.delta.tool_calls.as_ref() {
203 self.parse_tool_call_chunk(tool_calls.first())
204 } else {
205 self.parse_tool_call_chunk(None)
206 }
207 } else {
208 None
209 };
210
211 Ok((None, tool_call))
212 }
213 Chunk::Done => {
214 let response = self.clone().response();
215 Ok((Some(response), None))
216 }
217 }
218 }
219
220 pub fn response(self) -> ChatCompletionResponse {
221 let mut m = Message {
222 role: Role::Assistant,
223 ..Default::default()
224 };
225 if !self.think_content.is_empty() {
226 m.reasoning = Some(self.think_content);
227 }
228 if !self.content.is_empty() {
229 m.content = Some(self.content);
230 }
231 if let Some(refusal) = self.refusal {
232 m.refusal = Some(refusal);
233 }
234 if let Some(annotations) = self.annotations {
235 m.annotations = Some(annotations);
236 }
237 if let Some(audio) = self.audio {
238 m.audio = Some(audio);
239 }
240 if !self.tool_calls.is_empty() {
241 m.tool_calls = Some(self.tool_calls.into());
242 }
243 ChatCompletionResponse {
244 id: self.id,
245 object: self.object,
246 created: self.created,
247 model: self.model,
248 system_fingerprint: self.system_fingerprint.clone(),
249 service_tier: None,
250 choices: vec![ChatCompletionChoice {
251 index: 0,
252 message: m,
253 finish_reason: self.finish_reason,
254 logprobs: self.logprobs,
255 }],
256 usage: Usage::default(),
257 }
258 }
259
260 pub fn update_id_if_empty(&mut self, id: &str) {
261 if !self.id.is_empty() {
262 return;
263 }
264 self.id = id.to_string();
265 }
266
267 pub fn update_model_if_empty(&mut self, model: &str) {
268 if !self.model.is_empty() {
269 return;
270 }
271 self.model = model.to_string();
272 }
273
274 pub fn set_system_fingerprint(&mut self, system_fingerprint: Option<String>) {
275 self.system_fingerprint = system_fingerprint;
276 }
277
278 pub fn set_finish_reason(&mut self, finish_reason: Option<FinishReason>) {
279 self.finish_reason = finish_reason;
280 }
281
282 pub fn push_content(&mut self, content: &str) {
283 self.content.push_str(content);
284 }
285
286 pub fn push_thinking(&mut self, content: &str) {
287 self.think_content.push_str(content);
288 }
289
290 pub fn push_tool_call(&mut self, tool_call: ToolCall) {
291 self.tool_calls.push_back(tool_call);
292 }
293}
294
295impl OpenAIChunkParser {
296 fn update_basic_info(&mut self, response: &ChunkResponse) {
297 if self.id.is_empty() {
298 self.id = response.id.to_string();
299 }
300 self.object = response.object.to_string();
301 self.created = response.created;
302 self.model = response.model.to_string();
303 self.system_fingerprint = response.system_fingerprint.clone();
304 }
305
306 fn parse_tool_call_chunk(
307 &mut self,
308 tool_call_chunk: Option<&ToolCallChunk>,
309 ) -> Option<ToolCall> {
310 match (self.tool_call_chunk.take(), tool_call_chunk) {
311 (Some(mut prev_tool_call), Some(new_tool_call)) => {
312 if new_tool_call.id.is_some() && prev_tool_call.id != new_tool_call.id {
315 let tool_call = ToolCall::Function(ToolCallFunction {
316 id: prev_tool_call.id.unwrap_or_default(),
317 function: ToolCallFunctionObj {
318 name: prev_tool_call.function.name.unwrap_or_default(),
319 arguments: prev_tool_call.function.arguments.clone(),
320 },
321 });
322 self.tool_calls.push_back(tool_call.clone());
323 self.tool_call_chunk = Some(new_tool_call.clone());
325
326 return Some(tool_call);
327 }
328 prev_tool_call
330 .function
331 .arguments
332 .push_str(new_tool_call.function.arguments.as_str());
333 self.tool_call_chunk = Some(prev_tool_call);
334 None
335 }
336 (None, Some(tool)) => {
337 self.tool_call_chunk = Some(tool.clone());
339 None
340 }
341 (Some(prev_tool_call), None) => {
342 let tool_call = ToolCall::Function(ToolCallFunction {
343 id: prev_tool_call.id.unwrap_or_default(),
344 function: ToolCallFunctionObj {
345 name: prev_tool_call.function.name.unwrap_or_default(),
346 arguments: prev_tool_call.function.arguments.clone(),
347 },
348 });
349 self.tool_calls.push_back(tool_call.clone());
350 Some(tool_call)
351 }
352 (None, None) => None,
353 }
354 }
355}
356
357#[cfg(test)]
358mod tests {
359 use super::*;
360
361 #[test]
362 fn serde() {
363 let tests = vec![
364 (
365 "start",
366 r#"{"id":"chatcmpl-123","object":"chat.completion.chunk","created":1694268190,"model":"gpt-3.5-turbo-0613", "system_fingerprint": "fp_44709d6fcb", "choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}"#,
367 ChunkResponse {
368 id: "chatcmpl-123".to_string(),
369 object: "chat.completion.chunk".to_string(),
370 created: 1694268190,
371 model: "gpt-3.5-turbo-0613".to_string(),
372 system_fingerprint: Some("fp_44709d6fcb".to_string()),
373 choices: vec![Choice {
374 index: 0,
375 delta: DeltaMessage {
376 role: Some(Role::Assistant),
377 content: Some("".to_string()),
378 ..Default::default()
379 },
380 ..Default::default()
381 }],
382 service_tier: None,
383 usage: None,
384 },
385 ),
386 (
387 "data",
388 r#"{"id":"chatcmpl-123","object":"chat.completion.chunk","created":1694268190,"model":"gpt-3.5-turbo-0613", "system_fingerprint": "fp_44709d6fcb", "choices":[{"index":0,"delta":{"content":"!"},"logprobs":null,"finish_reason":null}]}"#,
389 ChunkResponse {
390 id: "chatcmpl-123".to_string(),
391 object: "chat.completion.chunk".to_string(),
392 created: 1694268190,
393 model: "gpt-3.5-turbo-0613".to_string(),
394 system_fingerprint: Some("fp_44709d6fcb".to_string()),
395 service_tier: None,
396 choices: vec![Choice {
397 index: 0,
398 delta: DeltaMessage {
399 content: Some("!".to_string()),
400 ..Default::default()
401 },
402 ..Default::default()
403 }],
404 usage: None,
405 },
406 ),
407 (
408 "end",
409 r#"{"id":"chatcmpl-123","object":"chat.completion.chunk","created":1694268190,"model":"gpt-3.5-turbo-0613", "system_fingerprint": "fp_44709d6fcb", "choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}"#,
410 ChunkResponse {
411 id: "chatcmpl-123".to_string(),
412 object: "chat.completion.chunk".to_string(),
413 created: 1694268190,
414 model: "gpt-3.5-turbo-0613".to_string(),
415 system_fingerprint: Some("fp_44709d6fcb".to_string()),
416 service_tier: None,
417 choices: vec![Choice {
418 index: 0,
419 delta: DeltaMessage {
420 ..Default::default()
421 },
422 finish_reason: Some(FinishReason::Stop),
423 ..Default::default()
424 }],
425 usage: None,
426 },
427 ),
428 (
429 "function_call",
430 r#"{"id":"chatcmpl-8v4PobBwtSalCtjghlORb2l72yfPM","object":"chat.completion.chunk","created":1708612360,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_cbdb91ce3f","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"id":"call_UjeNA45J26mfwbeEXi3AfNL1","type":"function","function":{"name":"get_current_weather","arguments":""}}]},"logprobs":null,"finish_reason":null}]}"#,
431 ChunkResponse {
432 id: "chatcmpl-8v4PobBwtSalCtjghlORb2l72yfPM".to_string(),
433 object: "chat.completion.chunk".to_string(),
434 created: 1708612360,
435 model: "gpt-3.5-turbo-0125".to_string(),
436 system_fingerprint: Some("fp_cbdb91ce3f".to_string()),
437 service_tier: None,
438 choices: vec![Choice {
439 index: 0,
440 delta: DeltaMessage {
441 tool_calls: Some(vec![ToolCallChunk {
442 index: 0,
443 id: Some("call_UjeNA45J26mfwbeEXi3AfNL1".to_string()),
444 r#type: Some("function".to_string()),
445 function: ToolCallFunctionObjChunk {
446 name: Some("get_current_weather".to_string()),
447 arguments: "".to_string(),
448 },
449 }]),
450 ..Default::default()
451 },
452 ..Default::default()
453 }],
454 usage: None,
455 },
456 ),
457 ];
458 for (name, json, expected) in tests {
459 let actual: ChunkResponse = serde_json::from_str(json).unwrap();
461 assert_eq!(actual, expected, "deserialize test failed: {}", name);
462 let serialized = serde_json::to_string(&expected).unwrap();
464 let actual: ChunkResponse = serde_json::from_str(&serialized).unwrap();
465 assert_eq!(actual, expected, "serialize test failed: {}", name);
466
467 let got: Chunk = json.parse().unwrap();
469 let want = Chunk::Data(expected);
470 assert_eq!(got, want, "enum test failed: {}", name)
471 }
472 }
473
474 #[test]
475 fn test_done() {
476 let input = "[DONE]";
477 let want = Chunk::Done;
478 let got: Chunk = input.parse().unwrap();
479 assert_eq!(want, got, "test [DONE]");
480 }
481
482 #[test]
483 fn test_parser_for_tool_call() {
484 let test_cases = vec![
485 (
486 "start",
487 r#"{"nonce": "3cc0e9", "id":"chatcmpl-941BLfWSKMsoPyCbL3UpYjvQG3oTU","object":"chat.completion.chunk","created":1710744883,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"role":"assistant","content":null},"logprobs":null,"finish_reason":null}]}"#,
488 None,
489 ),
490 (
491 "data1",
492 r#"{"nonce": "3cc0e9", "id":"chatcmpl-941BLfWSKMsoPyCbL3UpYjvQG3oTU","object":"chat.completion.chunk","created":1710744883,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"id":"call_UjeNA45J26mfwbeEXi3AfNL1","type":"function","function":{"name":"get_current_weather","arguments":""}}]},"logprobs":null,"finish_reason":null}]}"#,
493 None,
494 ),
495 (
496 "data2",
497 r#"{"nonce": "3cc0e9", "id":"chatcmpl-941BLfWSKMsoPyCbL3UpYjvQG3oTU","object":"chat.completion.chunk","created":1710744883,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\""}}]},"logprobs":null,"finish_reason":null}]}"#,
498 None,
499 ),
500 (
501 "data3",
502 r#"{"nonce": "3cc0e9", "id":"chatcmpl-941BLfWSKMsoPyCbL3UpYjvQG3oTU","object":"chat.completion.chunk","created":1710744883,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"locatio"}}]},"logprobs":null,"finish_reason":null}]}"#,
503 None,
504 ),
505 (
506 "data4",
507 r#"{"nonce": "3cc0e9", "id":"chatcmpl-941BLfWSKMsoPyCbL3UpYjvQG3oTU","object":"chat.completion.chunk","created":1710744883,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"n\": \"N"}}]},"logprobs":null,"finish_reason":null}]}"#,
508 None,
509 ),
510 (
511 "data5",
512 r#"{"nonce": "3cc0e9", "id":"chatcmpl-941BLfWSKMsoPyCbL3UpYjvQG3oTU","object":"chat.completion.chunk","created":1710744883,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"ew Y"}}]},"logprobs":null,"finish_reason":null}]}"#,
513 None,
514 ),
515 (
516 "data6",
517 r#"{"nonce": "3cc0e9", "id":"chatcmpl-941BLfWSKMsoPyCbL3UpYjvQG3oTU","object":"chat.completion.chunk","created":1710744883,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"ork\","}}]},"logprobs":null,"finish_reason":null}]}"#,
518 None,
519 ),
520 (
521 "data7",
522 r#"{"nonce": "3cc0e9", "id":"chatcmpl-941BLfWSKMsoPyCbL3UpYjvQG3oTU","object":"chat.completion.chunk","created":1710744883,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" \"unit"}}]},"logprobs":null,"finish_reason":null}]}"#,
523 None,
524 ),
525 (
526 "data8",
527 r#"{"nonce": "3cc0e9", "id":"chatcmpl-941BLfWSKMsoPyCbL3UpYjvQG3oTU","object":"chat.completion.chunk","created":1710744883,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\": \""}}]},"logprobs":null,"finish_reason":null}]}"#,
528 None,
529 ),
530 (
531 "data9",
532 r#"{"nonce": "3cc0e9", "id":"chatcmpl-941BLfWSKMsoPyCbL3UpYjvQG3oTU","object":"chat.completion.chunk","created":1710744883,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"celsi"}}]},"logprobs":null,"finish_reason":null}]}"#,
533 None,
534 ),
535 (
536 "data10",
537 r#"{"nonce": "3cc0e9", "id":"chatcmpl-941BLfWSKMsoPyCbL3UpYjvQG3oTU","object":"chat.completion.chunk","created":1710744883,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"us\"}"}}]},"logprobs":null,"finish_reason":null}]}"#,
538 None,
539 ),
540 (
541 "data11",
542 r#"{"nonce": "1684c1", "id":"chatcmpl-941BLfWSKMsoPyCbL3UpYjvQG3oTU","object":"chat.completion.chunk","created":1710744883,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"id":"call_7e8O5F7pyvxpqYLPiiIL2FMH","type":"function","function":{"name":"get_current_weather","arguments":""}}]},"logprobs":null,"finish_reason":null}]}"#,
543 Some(ToolCall::Function(ToolCallFunction {
544 id: "call_UjeNA45J26mfwbeEXi3AfNL1".to_string(),
545 function: ToolCallFunctionObj {
546 name: "get_current_weather".to_string(),
547 arguments: "{\"location\": \"New York\", \"unit\": \"celsius\"}"
548 .to_string(),
549 },
550 })),
551 ),
552 (
553 "data12",
554 r#"{"nonce": "1684c1", "id":"chatcmpl-941BLfWSKMsoPyCbL3UpYjvQG3oTU","object":"chat.completion.chunk","created":1710744883,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"{\"lo"}}]},"logprobs":null,"finish_reason":null}]}"#,
555 None,
556 ),
557 (
558 "data13",
559 r#"{"nonce": "1684c1", "id":"chatcmpl-941BLfWSKMsoPyCbL3UpYjvQG3oTU","object":"chat.completion.chunk","created":1710744883,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"catio"}}]},"logprobs":null,"finish_reason":null}]}"#,
560 None,
561 ),
562 (
563 "data14",
564 r#"{"nonce": "1684c1", "id":"chatcmpl-941BLfWSKMsoPyCbL3UpYjvQG3oTU","object":"chat.completion.chunk","created":1710744883,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"n\": \"T"}}]},"logprobs":null,"finish_reason":null}]}"#,
565 None,
566 ),
567 (
568 "data15",
569 r#"{"nonce": "1684c1", "id":"chatcmpl-941BLfWSKMsoPyCbL3UpYjvQG3oTU","object":"chat.completion.chunk","created":1710744883,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"okyo"}}]},"logprobs":null,"finish_reason":null}]}"#,
570 None,
571 ),
572 (
573 "data16",
574 r#"{"nonce": "1684c1", "id":"chatcmpl-941BLfWSKMsoPyCbL3UpYjvQG3oTU","object":"chat.completion.chunk","created":1710744883,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"\", \"u"}}]},"logprobs":null,"finish_reason":null}]}"#,
575 None,
576 ),
577 (
578 "data17",
579 r#"{"nonce": "1684c1", "id":"chatcmpl-941BLfWSKMsoPyCbL3UpYjvQG3oTU","object":"chat.completion.chunk","created":1710744883,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"nit\": "}}]},"logprobs":null,"finish_reason":null}]}"#,
580 None,
581 ),
582 (
583 "data18",
584 r#"{"nonce": "1684c1", "id":"chatcmpl-941BLfWSKMsoPyCbL3UpYjvQG3oTU","object":"chat.completion.chunk","created":1710744883,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"\"cel"}}]},"logprobs":null,"finish_reason":null}]}"#,
585 None,
586 ),
587 (
588 "data19",
589 r#"{"nonce": "1684c1", "id":"chatcmpl-941BLfWSKMsoPyCbL3UpYjvQG3oTU","object":"chat.completion.chunk","created":1710744883,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"sius\""}}]},"logprobs":null,"finish_reason":null}]}"#,
590 None,
591 ),
592 (
593 "data20",
594 r#"{"nonce": "1684c1", "id":"chatcmpl-941BLfWSKMsoPyCbL3UpYjvQG3oTU","object":"chat.completion.chunk","created":1710744883,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"tool_calls":[{"index":1,"function":{"arguments":"}"}}]},"logprobs":null,"finish_reason":null}]}"#,
595 None,
596 ),
597 (
598 "data21",
599 r#"{"nonce": "dd25883214", "id":"chatcmpl-941BLfWSKMsoPyCbL3UpYjvQG3oTU","object":"chat.completion.chunk","created":1710744883,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}]}"#,
600 Some(ToolCall::Function(ToolCallFunction {
601 id: "call_7e8O5F7pyvxpqYLPiiIL2FMH".to_string(),
602 function: ToolCallFunctionObj {
603 name: "get_current_weather".to_string(),
604 arguments: "{\"location\": \"Tokyo\", \"unit\": \"celsius\"}".to_string(),
605 },
606 })),
607 ),
608 ("Done", "[DONE]", None),
609 ];
610 let mut parser = OpenAIChunkParser::default();
611 let mut final_response = None;
612 for (name, data, want) in test_cases {
613 let (response, tool_call) = parser
614 .parse(data)
615 .map_err(|e| {
616 panic!("test_parser failed: {} with err: {}", name, e);
617 })
618 .unwrap();
619
620 if let Some(resp) = response {
621 final_response = Some(resp);
622 }
623 assert_eq!(tool_call, want, "test_parser failed: {}", name);
624 }
625 let res = final_response.expect("Expected final response from [DONE]");
626 assert_eq!(
627 res,
628 ChatCompletionResponse {
629 id: "chatcmpl-941BLfWSKMsoPyCbL3UpYjvQG3oTU".to_string(),
630 object: "chat.completion.chunk".to_string(),
631 created: 1710744883,
632 model: "gpt-3.5-turbo-0125".to_string(),
633 system_fingerprint: Some("fp_4f2ebda25a".to_string()),
634 service_tier: None,
635 choices: vec![ChatCompletionChoice {
636 index: 0,
637 message: Message {
638 role: Role::Assistant,
639 content: None,
640 tool_calls: Some(vec![
641 ToolCall::Function(ToolCallFunction {
642 id: "call_UjeNA45J26mfwbeEXi3AfNL1".to_string(),
643 function: ToolCallFunctionObj {
644 name: "get_current_weather".to_string(),
645 arguments:
646 "{\"location\": \"New York\", \"unit\": \"celsius\"}"
647 .to_string(),
648 }
649 }),
650 ToolCall::Function(ToolCallFunction {
651 id: "call_7e8O5F7pyvxpqYLPiiIL2FMH".to_string(),
652 function: ToolCallFunctionObj {
653 name: "get_current_weather".to_string(),
654 arguments: "{\"location\": \"Tokyo\", \"unit\": \"celsius\"}"
655 .to_string(),
656 }
657 })
658 ]),
659 refusal: None,
660 annotations: None,
661 audio: None,
662 reasoning: None,
663 },
664 finish_reason: Some(FinishReason::ToolCalls),
665 logprobs: None,
666 },],
667 usage: Usage::default(),
668 }
669 )
670 }
671
672 #[test]
673 fn test_last_chunk_with_content_and_finish_reason() {
675 let test_cases = vec![
676 (
677 "start",
678 r#"{"id":"chatcmpl-123","object":"chat.completion.chunk","created":1694268190,"model":"gpt-3.5-turbo-0613", "system_fingerprint": "fp_44709d6fcb", "choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}"#,
679 None,
680 ),
681 (
682 "data",
683 r#"{"id":"chatcmpl-123","object":"chat.completion.chunk","created":1694268190,"model":"gpt-3.5-turbo-0613", "system_fingerprint": "fp_44709d6fcb", "choices":[{"index":0,"delta":{"content":"Hello"},"logprobs":null,"finish_reason":null}]}"#,
684 None,
685 ),
686 (
687 "last_chunk_with_content_and_finish",
688 r#"{"id":"chatcmpl-123","object":"chat.completion.chunk","created":1694268190,"model":"gpt-3.5-turbo-0613", "system_fingerprint": "fp_44709d6fcb", "choices":[{"index":0,"delta":{"content":" world!"},"logprobs":null,"finish_reason":"stop"}]}"#,
689 None,
690 ),
691 ("Done", "[DONE]", None),
692 ];
693
694 let mut parser = OpenAIChunkParser::default();
695 let mut final_response = None;
696 for (name, data, want) in test_cases {
697 let (response, tool_call) = parser
698 .parse(data)
699 .map_err(|e| {
700 panic!("test_parser failed: {} with err: {}", name, e);
701 })
702 .unwrap();
703
704 if let Some(resp) = response {
705 final_response = Some(resp);
706 }
707 assert_eq!(tool_call, want, "test_parser failed: {}", name);
708 }
709
710 let res = final_response.expect("Expected final response from [DONE]");
711 assert_eq!(
712 res,
713 ChatCompletionResponse {
714 id: "chatcmpl-123".to_string(),
715 object: "chat.completion.chunk".to_string(),
716 created: 1694268190,
717 model: "gpt-3.5-turbo-0613".to_string(),
718 system_fingerprint: Some("fp_44709d6fcb".to_string()),
719 service_tier: None,
720 choices: vec![ChatCompletionChoice {
721 index: 0,
722 message: Message {
723 role: Role::Assistant,
724 content: Some("Hello world!".to_string()),
725 tool_calls: None,
726 refusal: None,
727 annotations: None,
728 audio: None,
729 reasoning: None,
730 },
731 finish_reason: Some(FinishReason::Stop),
732 logprobs: None,
733 },],
734 usage: Usage::default(),
735 }
736 );
737 }
738
739 #[test]
740 fn test_parser_for_content() {
741 let test_cases = vec![
742 (
743 "data",
744 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}"#,
745 None,
746 ),
747 (
748 "data",
749 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":"Hello"},"logprobs":null,"finish_reason":null}]}"#,
750 None,
751 ),
752 (
753 "data",
754 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":"!"},"logprobs":null,"finish_reason":null}]}"#,
755 None,
756 ),
757 (
758 "data",
759 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":" I"},"logprobs":null,"finish_reason":null}]}"#,
760 None,
761 ),
762 (
763 "data",
764 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":"'m"},"logprobs":null,"finish_reason":null}]}"#,
765 None,
766 ),
767 (
768 "data",
769 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":" just"},"logprobs":null,"finish_reason":null}]}"#,
770 None,
771 ),
772 (
773 "data",
774 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":" a"},"logprobs":null,"finish_reason":null}]}"#,
775 None,
776 ),
777 (
778 "data",
779 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":" computer"},"logprobs":null,"finish_reason":null}]}"#,
780 None,
781 ),
782 (
783 "data",
784 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":" program"},"logprobs":null,"finish_reason":null}]}"#,
785 None,
786 ),
787 (
788 "data",
789 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}"#,
790 None,
791 ),
792 (
793 "data",
794 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":" so"},"logprobs":null,"finish_reason":null}]}"#,
795 None,
796 ),
797 (
798 "data",
799 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":" I"},"logprobs":null,"finish_reason":null}]}"#,
800 None,
801 ),
802 (
803 "data",
804 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":" don"},"logprobs":null,"finish_reason":null}]}"#,
805 None,
806 ),
807 (
808 "data",
809 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":"'t"},"logprobs":null,"finish_reason":null}]}"#,
810 None,
811 ),
812 (
813 "data",
814 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":" have"},"logprobs":null,"finish_reason":null}]}"#,
815 None,
816 ),
817 (
818 "data",
819 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":" feelings"},"logprobs":null,"finish_reason":null}]}"#,
820 None,
821 ),
822 (
823 "data",
824 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}"#,
825 None,
826 ),
827 (
828 "data",
829 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":" but"},"logprobs":null,"finish_reason":null}]}"#,
830 None,
831 ),
832 (
833 "data",
834 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":" I"},"logprobs":null,"finish_reason":null}]}"#,
835 None,
836 ),
837 (
838 "data",
839 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":"'m"},"logprobs":null,"finish_reason":null}]}"#,
840 None,
841 ),
842 (
843 "data",
844 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":" ready"},"logprobs":null,"finish_reason":null}]}"#,
845 None,
846 ),
847 (
848 "data",
849 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":" to"},"logprobs":null,"finish_reason":null}]}"#,
850 None,
851 ),
852 (
853 "data",
854 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":" assist"},"logprobs":null,"finish_reason":null}]}"#,
855 None,
856 ),
857 (
858 "data",
859 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":" you"},"logprobs":null,"finish_reason":null}]}"#,
860 None,
861 ),
862 (
863 "data",
864 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":" with"},"logprobs":null,"finish_reason":null}]}"#,
865 None,
866 ),
867 (
868 "data",
869 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":" anything"},"logprobs":null,"finish_reason":null}]}"#,
870 None,
871 ),
872 (
873 "data",
874 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":" you"},"logprobs":null,"finish_reason":null}]}"#,
875 None,
876 ),
877 (
878 "data",
879 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":" need"},"logprobs":null,"finish_reason":null}]}"#,
880 None,
881 ),
882 (
883 "data",
884 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}"#,
885 None,
886 ),
887 (
888 "data",
889 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":" How"},"logprobs":null,"finish_reason":null}]}"#,
890 None,
891 ),
892 (
893 "data",
894 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":" can"},"logprobs":null,"finish_reason":null}]}"#,
895 None,
896 ),
897 (
898 "data",
899 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":" I"},"logprobs":null,"finish_reason":null}]}"#,
900 None,
901 ),
902 (
903 "data",
904 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":" help"},"logprobs":null,"finish_reason":null}]}"#,
905 None,
906 ),
907 (
908 "data",
909 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":" you"},"logprobs":null,"finish_reason":null}]}"#,
910 None,
911 ),
912 (
913 "data",
914 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":" today"},"logprobs":null,"finish_reason":null}]}"#,
915 None,
916 ),
917 (
918 "data",
919 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{"content":"?"},"logprobs":null,"finish_reason":null}]}"#,
920 None,
921 ),
922 (
923 "data",
924 r#"{"id":"chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L","object":"chat.completion.chunk","created":1710814154,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_4f2ebda25a","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}"#,
925 None,
926 ),
927 ("data", r#"[DONE]"#, None),
928 ];
929 let mut parser = OpenAIChunkParser::default();
930 let mut final_response = None;
931 for (name, data, want) in test_cases {
932 let (response, tool_call) = parser
933 .parse(data)
934 .map_err(|e| {
935 panic!("test_parser failed: {} with err: {}", name, e);
936 })
937 .unwrap();
938
939 if let Some(resp) = response {
940 final_response = Some(resp);
941 }
942 assert_eq!(tool_call, want, "test_parser failed: {}", name);
943 }
944 let res = final_response.expect("Expected final response from [DONE]");
945 let want_res = ChatCompletionResponse {
946 id: "chatcmpl-94JCcQJ9TY5hHx1el8uXAzojc511L".to_string(),
947 object: "chat.completion.chunk".to_string(),
948 created: 1710814154,
949 model: "gpt-3.5-turbo-0125".to_string(),
950 system_fingerprint: Some("fp_4f2ebda25a".to_string()),
951 service_tier: None,
952 choices: vec![ChatCompletionChoice {
953 index: 0,
954 message: Message {
955 role: Role::Assistant,
956 content: Some("Hello! I'm just a computer program, so I don't have feelings, but I'm ready to assist you with anything you need. How can I help you today?".to_string()),
957 tool_calls: None,
958 refusal: None,
959 annotations: None,
960 audio: None,
961 reasoning: None,
962 },
963 finish_reason: Some(FinishReason::Stop),
964 logprobs: None,
965 }],
966 usage: Usage::default(),
967 };
968 assert_eq!(res, want_res, "get_response failed")
969 }
970}