1use std::collections::HashMap;
2
3use async_stream::stream;
4use futures::StreamExt;
5use http::Request;
6use serde::{Deserialize, Serialize};
7use serde_json::Value;
8use tracing::info_span;
9use tracing_futures::Instrument;
10
11use crate::completion::{CompletionError, CompletionRequest, GetTokenUsage};
12use crate::http_client::HttpClientExt;
13use crate::http_client::sse::{Event, GenericEventSource};
14use crate::json_utils;
15use crate::providers::openrouter::{
16 OpenRouterRequestParams, OpenrouterCompletionRequest, ReasoningDetails,
17};
18use crate::streaming;
19
20#[derive(Clone, Serialize, Deserialize, Debug)]
21pub struct StreamingCompletionResponse {
22 pub usage: Usage,
23}
24
25impl GetTokenUsage for StreamingCompletionResponse {
26 fn token_usage(&self) -> Option<crate::completion::Usage> {
27 let mut usage = crate::completion::Usage::new();
28
29 usage.input_tokens = self.usage.prompt_tokens as u64;
30 usage.output_tokens = self.usage.completion_tokens as u64;
31 usage.total_tokens = self.usage.total_tokens as u64;
32
33 Some(usage)
34 }
35}
36
37#[derive(Deserialize, Debug, PartialEq)]
38#[serde(rename_all = "snake_case")]
39pub enum FinishReason {
40 ToolCalls,
41 Stop,
42 Error,
43 ContentFilter,
44 Length,
45 #[serde(untagged)]
46 Other(String),
47}
48
49#[derive(Deserialize, Debug)]
50#[allow(dead_code)]
51struct StreamingChoice {
52 pub finish_reason: Option<FinishReason>,
53 pub native_finish_reason: Option<String>,
54 pub logprobs: Option<Value>,
55 pub index: usize,
56 pub delta: StreamingDelta,
57}
58
59#[derive(Deserialize, Debug)]
60struct StreamingFunction {
61 pub name: Option<String>,
62 pub arguments: Option<String>,
63}
64
65#[derive(Deserialize, Debug)]
66#[allow(dead_code)]
67struct StreamingToolCall {
68 pub index: usize,
69 pub id: Option<String>,
70 pub r#type: Option<String>,
71 pub function: StreamingFunction,
72}
73
74#[derive(Serialize, Deserialize, Debug, Clone, Default)]
75pub struct Usage {
76 pub prompt_tokens: u32,
77 pub completion_tokens: u32,
78 pub total_tokens: u32,
79}
80
81#[derive(Deserialize, Debug)]
82#[allow(dead_code)]
83struct ErrorResponse {
84 pub code: i32,
85 pub message: String,
86}
87
88#[derive(Deserialize, Debug)]
89#[allow(dead_code)]
90struct StreamingDelta {
91 pub role: Option<String>,
92 pub content: Option<String>,
93 #[serde(default, deserialize_with = "json_utils::null_or_vec")]
94 pub tool_calls: Vec<StreamingToolCall>,
95 pub reasoning: Option<String>,
96 #[serde(default, deserialize_with = "json_utils::null_or_vec")]
97 pub reasoning_details: Vec<ReasoningDetails>,
98}
99
100#[derive(Deserialize, Debug)]
101#[allow(dead_code)]
102struct StreamingCompletionChunk {
103 id: String,
104 model: String,
105 choices: Vec<StreamingChoice>,
106 usage: Option<Usage>,
107 error: Option<ErrorResponse>,
108}
109
110impl<T> super::CompletionModel<T>
111where
112 T: HttpClientExt + Clone + std::fmt::Debug + Default + 'static,
113{
114 pub(crate) async fn stream(
115 &self,
116 completion_request: CompletionRequest,
117 ) -> Result<streaming::StreamingCompletionResponse<StreamingCompletionResponse>, CompletionError>
118 {
119 let request_model = completion_request
120 .model
121 .clone()
122 .unwrap_or_else(|| self.model.clone());
123 let preamble = completion_request.preamble.clone();
124 let mut request = OpenrouterCompletionRequest::try_from(OpenRouterRequestParams {
125 model: request_model.as_ref(),
126 request: completion_request,
127 strict_tools: self.strict_tools,
128 })?;
129
130 let params = json_utils::merge(
131 request.additional_params.unwrap_or(serde_json::json!({})),
132 serde_json::json!({"stream": true }),
133 );
134
135 request.additional_params = Some(params);
136
137 let body = serde_json::to_vec(&request)?;
138
139 let req = self
140 .client
141 .post("/chat/completions")?
142 .body(body)
143 .map_err(|x| CompletionError::HttpError(x.into()))?;
144
145 let span = if tracing::Span::current().is_disabled() {
146 info_span!(
147 target: "rig::completions",
148 "chat_streaming",
149 gen_ai.operation.name = "chat_streaming",
150 gen_ai.provider.name = "openrouter",
151 gen_ai.request.model = &request_model,
152 gen_ai.system_instructions = preamble,
153 gen_ai.response.id = tracing::field::Empty,
154 gen_ai.response.model = tracing::field::Empty,
155 gen_ai.usage.output_tokens = tracing::field::Empty,
156 gen_ai.usage.input_tokens = tracing::field::Empty,
157 )
158 } else {
159 tracing::Span::current()
160 };
161
162 tracing::Instrument::instrument(
163 send_compatible_streaming_request(self.client.clone(), req),
164 span,
165 )
166 .await
167 }
168}
169
170pub async fn send_compatible_streaming_request<T>(
171 http_client: T,
172 req: Request<Vec<u8>>,
173) -> Result<streaming::StreamingCompletionResponse<StreamingCompletionResponse>, CompletionError>
174where
175 T: HttpClientExt + Clone + 'static,
176{
177 let span = tracing::Span::current();
178 let mut event_source = GenericEventSource::new(http_client, req);
180
181 let stream = stream! {
182 let mut tool_calls: HashMap<usize, streaming::RawStreamingToolCall> = HashMap::new();
184 let mut final_usage = None;
185
186 while let Some(event_result) = event_source.next().await {
187 match event_result {
188 Ok(Event::Open) => {
189 tracing::trace!("SSE connection opened");
190 continue;
191 }
192
193 Ok(Event::Message(message)) => {
194 if message.data.trim().is_empty() || message.data == "[DONE]" {
195 continue;
196 }
197
198 let data = match serde_json::from_str::<StreamingCompletionChunk>(&message.data) {
199 Ok(data) => data,
200 Err(error) => {
201 tracing::error!(?error, message = message.data, "Failed to parse SSE message");
202 continue;
203 }
204 };
205
206 let Some(choice) = data.choices.first() else {
208 tracing::debug!("There is no choice");
209 continue;
210 };
211 let delta = &choice.delta;
212
213 if !delta.tool_calls.is_empty() {
214 for tool_call in &delta.tool_calls {
215 let index = tool_call.index;
216
217 let existing_tool_call = tool_calls.entry(index).or_insert_with(streaming::RawStreamingToolCall::empty);
219
220 if let Some(id) = &tool_call.id && !id.is_empty() {
222 existing_tool_call.id = id.clone();
223 }
224
225 if let Some(name) = &tool_call.function.name && !name.is_empty() {
226 existing_tool_call.name = name.clone();
227 yield Ok(streaming::RawStreamingChoice::ToolCallDelta {
228 id: existing_tool_call.id.clone(),
229 internal_call_id: existing_tool_call.internal_call_id.clone(),
230 content: streaming::ToolCallDeltaContent::Name(name.clone()),
231 });
232 }
233
234 if let Some(chunk) = &tool_call.function.arguments && !chunk.is_empty() {
236 let current_args = match &existing_tool_call.arguments {
237 serde_json::Value::Null => String::new(),
238 serde_json::Value::String(s) => s.clone(),
239 v => v.to_string(),
240 };
241
242 let combined = format!("{current_args}{chunk}");
244
245 if combined.trim_start().starts_with('{') && combined.trim_end().ends_with('}') {
247 match serde_json::from_str(&combined) {
248 Ok(parsed) => existing_tool_call.arguments = parsed,
249 Err(_) => existing_tool_call.arguments = serde_json::Value::String(combined),
250 }
251 } else {
252 existing_tool_call.arguments = serde_json::Value::String(combined);
253 }
254
255 yield Ok(streaming::RawStreamingChoice::ToolCallDelta {
257 id: existing_tool_call.id.clone(),
258 internal_call_id: existing_tool_call.internal_call_id.clone(),
259 content: streaming::ToolCallDeltaContent::Delta(chunk.clone()),
260 });
261 }
262 }
263
264 for reasoning_detail in &delta.reasoning_details {
266 if let ReasoningDetails::Encrypted { id, data, .. } = reasoning_detail
267 && let Some(id) = id
268 && let Some(tool_call) = tool_calls.values_mut().find(|tool_call| tool_call.id.eq(id))
269 && let Ok(additional_params) = serde_json::to_value(reasoning_detail) {
270 tool_call.signature = Some(data.clone());
271 tool_call.additional_params = Some(additional_params);
272 }
273 }
274 }
275
276 if let Some(reasoning) = &delta.reasoning && !reasoning.is_empty() {
278 yield Ok(streaming::RawStreamingChoice::ReasoningDelta {
279 reasoning: reasoning.clone(),
280 id: None,
281 });
282 }
283
284 if let Some(content) = &delta.content && !content.is_empty() {
286 yield Ok(streaming::RawStreamingChoice::Message(content.clone()));
287 }
288
289 if let Some(usage) = data.usage {
291 final_usage = Some(usage);
292 }
293
294 if let Some(finish_reason) = &choice.finish_reason && *finish_reason == FinishReason::ToolCalls {
296 for (_idx, tool_call) in tool_calls.into_iter() {
297 yield Ok(streaming::RawStreamingChoice::ToolCall(tool_call));
298 }
299 tool_calls = HashMap::new();
300 }
301 }
302 Err(crate::http_client::Error::StreamEnded) => {
303 break;
304 }
305 Err(error) => {
306 tracing::error!(?error, "SSE error");
307 yield Err(CompletionError::ProviderError(error.to_string()));
308 break;
309 }
310 }
311 }
312
313 event_source.close();
315
316 for (_idx, tool_call) in tool_calls.into_iter() {
318 yield Ok(streaming::RawStreamingChoice::ToolCall(tool_call));
319 }
320
321 yield Ok(streaming::RawStreamingChoice::FinalResponse(StreamingCompletionResponse {
323 usage: final_usage.unwrap_or_default(),
324 }));
325 }.instrument(span);
326
327 Ok(streaming::StreamingCompletionResponse::stream(Box::pin(
328 stream,
329 )))
330}
331
332#[cfg(test)]
333mod tests {
334 use super::*;
335 use serde_json::json;
336
337 #[test]
338 fn test_streaming_completion_response_deserialization() {
339 let json = json!({
340 "id": "gen-abc123",
341 "choices": [{
342 "index": 0,
343 "delta": {
344 "role": "assistant",
345 "content": "Hello"
346 }
347 }],
348 "created": 1234567890u64,
349 "model": "gpt-3.5-turbo",
350 "object": "chat.completion.chunk"
351 });
352
353 let response: StreamingCompletionChunk = serde_json::from_value(json).unwrap();
354 assert_eq!(response.id, "gen-abc123");
355 assert_eq!(response.model, "gpt-3.5-turbo");
356 assert_eq!(response.choices.len(), 1);
357 }
358
359 #[test]
360 fn test_delta_with_content() {
361 let json = json!({
362 "role": "assistant",
363 "content": "Hello, world!"
364 });
365
366 let delta: StreamingDelta = serde_json::from_value(json).unwrap();
367 assert_eq!(delta.role, Some("assistant".to_string()));
368 assert_eq!(delta.content, Some("Hello, world!".to_string()));
369 }
370
371 #[test]
372 fn test_delta_with_tool_call() {
373 let json = json!({
374 "role": "assistant",
375 "tool_calls": [{
376 "index": 0,
377 "id": "call_abc",
378 "type": "function",
379 "function": {
380 "name": "get_weather",
381 "arguments": "{\"location\":"
382 }
383 }]
384 });
385
386 let delta: StreamingDelta = serde_json::from_value(json).unwrap();
387 assert_eq!(delta.tool_calls.len(), 1);
388 assert_eq!(delta.tool_calls[0].index, 0);
389 assert_eq!(delta.tool_calls[0].id, Some("call_abc".to_string()));
390 }
391
392 #[test]
393 fn test_tool_call_with_partial_arguments() {
394 let json = json!({
395 "index": 0,
396 "id": null,
397 "type": null,
398 "function": {
399 "name": null,
400 "arguments": "Paris"
401 }
402 });
403
404 let tool_call: StreamingToolCall = serde_json::from_value(json).unwrap();
405 assert_eq!(tool_call.index, 0);
406 assert!(tool_call.id.is_none());
407 assert_eq!(tool_call.function.arguments, Some("Paris".to_string()));
408 }
409
410 #[test]
411 fn test_streaming_with_usage() {
412 let json = json!({
413 "id": "gen-xyz",
414 "choices": [{
415 "index": 0,
416 "delta": {
417 "content": null
418 }
419 }],
420 "created": 1234567890u64,
421 "model": "gpt-4",
422 "object": "chat.completion.chunk",
423 "usage": {
424 "prompt_tokens": 100,
425 "completion_tokens": 50,
426 "total_tokens": 150
427 }
428 });
429
430 let response: StreamingCompletionChunk = serde_json::from_value(json).unwrap();
431 assert!(response.usage.is_some());
432 let usage = response.usage.unwrap();
433 assert_eq!(usage.prompt_tokens, 100);
434 assert_eq!(usage.completion_tokens, 50);
435 assert_eq!(usage.total_tokens, 150);
436 }
437
438 #[test]
439 fn test_multiple_tool_call_deltas() {
440 let start_json = json!({
442 "id": "gen-1",
443 "choices": [{
444 "index": 0,
445 "delta": {
446 "tool_calls": [{
447 "index": 0,
448 "id": "call_123",
449 "type": "function",
450 "function": {
451 "name": "search",
452 "arguments": ""
453 }
454 }]
455 }
456 }],
457 "created": 1234567890u64,
458 "model": "gpt-4",
459 "object": "chat.completion.chunk"
460 });
461
462 let delta1_json = json!({
463 "id": "gen-2",
464 "choices": [{
465 "index": 0,
466 "delta": {
467 "tool_calls": [{
468 "index": 0,
469 "function": {
470 "arguments": "{\"query\":"
471 }
472 }]
473 }
474 }],
475 "created": 1234567890u64,
476 "model": "gpt-4",
477 "object": "chat.completion.chunk"
478 });
479
480 let delta2_json = json!({
481 "id": "gen-3",
482 "choices": [{
483 "index": 0,
484 "delta": {
485 "tool_calls": [{
486 "index": 0,
487 "function": {
488 "arguments": "\"Rust programming\"}"
489 }
490 }]
491 }
492 }],
493 "created": 1234567890u64,
494 "model": "gpt-4",
495 "object": "chat.completion.chunk"
496 });
497
498 let start: StreamingCompletionChunk = serde_json::from_value(start_json).unwrap();
500 assert_eq!(
501 start.choices[0].delta.tool_calls[0].id,
502 Some("call_123".to_string())
503 );
504
505 let delta1: StreamingCompletionChunk = serde_json::from_value(delta1_json).unwrap();
506 assert_eq!(
507 delta1.choices[0].delta.tool_calls[0].function.arguments,
508 Some("{\"query\":".to_string())
509 );
510
511 let delta2: StreamingCompletionChunk = serde_json::from_value(delta2_json).unwrap();
512 assert_eq!(
513 delta2.choices[0].delta.tool_calls[0].function.arguments,
514 Some("\"Rust programming\"}".to_string())
515 );
516 }
517
518 #[test]
519 fn test_response_with_error() {
520 let json = json!({
521 "id": "cmpl-abc123",
522 "object": "chat.completion.chunk",
523 "created": 1234567890,
524 "model": "gpt-3.5-turbo",
525 "provider": "openai",
526 "error": { "code": 500, "message": "Provider disconnected" },
527 "choices": [
528 { "index": 0, "delta": { "content": "" }, "finish_reason": "error" }
529 ]
530 });
531
532 let response: StreamingCompletionChunk = serde_json::from_value(json).unwrap();
533 assert!(response.error.is_some());
534 let error = response.error.as_ref().unwrap();
535 assert_eq!(error.code, 500);
536 assert_eq!(error.message, "Provider disconnected");
537 }
538}