1use std::collections::HashMap;
2
3use async_stream::stream;
4use futures::StreamExt;
5use http::Request;
6use serde::{Deserialize, Serialize};
7use serde_json::Value;
8use tracing::info_span;
9use tracing_futures::Instrument;
10
11use crate::completion::{CompletionError, CompletionRequest, GetTokenUsage};
12use crate::http_client::HttpClientExt;
13use crate::http_client::sse::{Event, GenericEventSource};
14use crate::json_utils;
15use crate::providers::openrouter::{
16 OpenRouterRequestParams, OpenrouterCompletionRequest, ReasoningDetails,
17};
18use crate::streaming;
19
20#[derive(Clone, Serialize, Deserialize, Debug)]
21pub struct StreamingCompletionResponse {
22 pub usage: Usage,
23}
24
25impl GetTokenUsage for StreamingCompletionResponse {
26 fn token_usage(&self) -> Option<crate::completion::Usage> {
27 let mut usage = crate::completion::Usage::new();
28
29 usage.input_tokens = self.usage.prompt_tokens as u64;
30 usage.output_tokens = self.usage.completion_tokens as u64;
31 usage.total_tokens = self.usage.total_tokens as u64;
32
33 Some(usage)
34 }
35}
36
37#[derive(Deserialize, Debug, PartialEq)]
38#[serde(rename_all = "snake_case")]
39pub enum FinishReason {
40 ToolCalls,
41 Stop,
42 Error,
43 ContentFilter,
44 Length,
45 #[serde(untagged)]
46 Other(String),
47}
48
49#[derive(Deserialize, Debug)]
50#[allow(dead_code)]
51struct StreamingChoice {
52 pub finish_reason: Option<FinishReason>,
53 pub native_finish_reason: Option<String>,
54 pub logprobs: Option<Value>,
55 pub index: usize,
56 pub delta: StreamingDelta,
57}
58
59#[derive(Deserialize, Debug)]
60struct StreamingFunction {
61 pub name: Option<String>,
62 pub arguments: Option<String>,
63}
64
65#[derive(Deserialize, Debug)]
66#[allow(dead_code)]
67struct StreamingToolCall {
68 pub index: usize,
69 pub id: Option<String>,
70 pub r#type: Option<String>,
71 pub function: StreamingFunction,
72}
73
74#[derive(Serialize, Deserialize, Debug, Clone, Default)]
75pub struct Usage {
76 pub prompt_tokens: u32,
77 pub completion_tokens: u32,
78 pub total_tokens: u32,
79}
80
81#[derive(Deserialize, Debug)]
82#[allow(dead_code)]
83struct ErrorResponse {
84 pub code: i32,
85 pub message: String,
86}
87
88#[derive(Deserialize, Debug)]
89#[allow(dead_code)]
90struct StreamingDelta {
91 pub role: Option<String>,
92 pub content: Option<String>,
93 #[serde(default, deserialize_with = "json_utils::null_or_vec")]
94 pub tool_calls: Vec<StreamingToolCall>,
95 pub reasoning: Option<String>,
96 #[serde(default, deserialize_with = "json_utils::null_or_vec")]
97 pub reasoning_details: Vec<ReasoningDetails>,
98}
99
100#[derive(Deserialize, Debug)]
101#[allow(dead_code)]
102struct StreamingCompletionChunk {
103 id: String,
104 model: String,
105 choices: Vec<StreamingChoice>,
106 usage: Option<Usage>,
107 error: Option<ErrorResponse>,
108}
109
110impl<T> super::CompletionModel<T>
111where
112 T: HttpClientExt + Clone + std::fmt::Debug + Default + 'static,
113{
114 pub(crate) async fn stream(
115 &self,
116 completion_request: CompletionRequest,
117 ) -> Result<streaming::StreamingCompletionResponse<StreamingCompletionResponse>, CompletionError>
118 {
119 let preamble = completion_request.preamble.clone();
120 let mut request = OpenrouterCompletionRequest::try_from(OpenRouterRequestParams {
121 model: self.model.as_ref(),
122 request: completion_request,
123 strict_tools: self.strict_tools,
124 })?;
125
126 let params = json_utils::merge(
127 request.additional_params.unwrap_or(serde_json::json!({})),
128 serde_json::json!({"stream": true }),
129 );
130
131 request.additional_params = Some(params);
132
133 let body = serde_json::to_vec(&request)?;
134
135 let req = self
136 .client
137 .post("/chat/completions")?
138 .body(body)
139 .map_err(|x| CompletionError::HttpError(x.into()))?;
140
141 let span = if tracing::Span::current().is_disabled() {
142 info_span!(
143 target: "rig::completions",
144 "chat_streaming",
145 gen_ai.operation.name = "chat_streaming",
146 gen_ai.provider.name = "openrouter",
147 gen_ai.request.model = self.model,
148 gen_ai.system_instructions = preamble,
149 gen_ai.response.id = tracing::field::Empty,
150 gen_ai.response.model = tracing::field::Empty,
151 gen_ai.usage.output_tokens = tracing::field::Empty,
152 gen_ai.usage.input_tokens = tracing::field::Empty,
153 )
154 } else {
155 tracing::Span::current()
156 };
157
158 tracing::Instrument::instrument(
159 send_compatible_streaming_request(self.client.clone(), req),
160 span,
161 )
162 .await
163 }
164}
165
166pub async fn send_compatible_streaming_request<T>(
167 http_client: T,
168 req: Request<Vec<u8>>,
169) -> Result<streaming::StreamingCompletionResponse<StreamingCompletionResponse>, CompletionError>
170where
171 T: HttpClientExt + Clone + 'static,
172{
173 let span = tracing::Span::current();
174 let mut event_source = GenericEventSource::new(http_client, req);
176
177 let stream = stream! {
178 let mut tool_calls: HashMap<usize, streaming::RawStreamingToolCall> = HashMap::new();
180 let mut final_usage = None;
181
182 while let Some(event_result) = event_source.next().await {
183 match event_result {
184 Ok(Event::Open) => {
185 tracing::trace!("SSE connection opened");
186 continue;
187 }
188
189 Ok(Event::Message(message)) => {
190 if message.data.trim().is_empty() || message.data == "[DONE]" {
191 continue;
192 }
193
194 let data = match serde_json::from_str::<StreamingCompletionChunk>(&message.data) {
195 Ok(data) => data,
196 Err(error) => {
197 tracing::error!(?error, message = message.data, "Failed to parse SSE message");
198 continue;
199 }
200 };
201
202 let Some(choice) = data.choices.first() else {
204 tracing::debug!("There is no choice");
205 continue;
206 };
207 let delta = &choice.delta;
208
209 if !delta.tool_calls.is_empty() {
210 for tool_call in &delta.tool_calls {
211 let index = tool_call.index;
212
213 let existing_tool_call = tool_calls.entry(index).or_insert_with(streaming::RawStreamingToolCall::empty);
215
216 if let Some(id) = &tool_call.id && !id.is_empty() {
218 existing_tool_call.id = id.clone();
219 }
220
221 if let Some(name) = &tool_call.function.name && !name.is_empty() {
222 existing_tool_call.name = name.clone();
223 }
224
225 if let Some(chunk) = &tool_call.function.arguments {
226 let current_args = match &existing_tool_call.arguments {
228 serde_json::Value::Null => String::new(),
229 serde_json::Value::String(s) => s.clone(),
230 v => v.to_string(),
231 };
232
233 let combined = format!("{current_args}{chunk}");
235
236 if combined.trim_start().starts_with('{') && combined.trim_end().ends_with('}') {
238 match serde_json::from_str(&combined) {
239 Ok(parsed) => existing_tool_call.arguments = parsed,
240 Err(_) => existing_tool_call.arguments = serde_json::Value::String(combined),
241 }
242 } else {
243 existing_tool_call.arguments = serde_json::Value::String(combined);
244 }
245
246 yield Ok(streaming::RawStreamingChoice::ToolCallDelta {
248 id: existing_tool_call.id.clone(),
249 delta: chunk.clone(),
250 });
251 }
252 }
253
254 for reasoning_detail in &delta.reasoning_details {
256 if let ReasoningDetails::Encrypted { id, data, .. } = reasoning_detail
257 && let Some(id) = id
258 && let Some(tool_call) = tool_calls.values_mut().find(|tool_call| tool_call.id.eq(id))
259 && let Ok(additional_params) = serde_json::to_value(reasoning_detail) {
260 tool_call.signature = Some(data.clone());
261 tool_call.additional_params = Some(additional_params);
262 }
263 }
264 }
265
266 if let Some(reasoning) = &delta.reasoning && !reasoning.is_empty() {
268 yield Ok(streaming::RawStreamingChoice::ReasoningDelta {
269 reasoning: reasoning.clone(),
270 id: None,
271 });
272 }
273
274 if let Some(content) = &delta.content && !content.is_empty() {
276 yield Ok(streaming::RawStreamingChoice::Message(content.clone()));
277 }
278
279 if let Some(usage) = data.usage {
281 final_usage = Some(usage);
282 }
283
284 if let Some(finish_reason) = &choice.finish_reason && *finish_reason == FinishReason::ToolCalls {
286 for (_idx, tool_call) in tool_calls.into_iter() {
287 yield Ok(streaming::RawStreamingChoice::ToolCall(tool_call));
288 }
289 tool_calls = HashMap::new();
290 }
291 }
292 Err(crate::http_client::Error::StreamEnded) => {
293 break;
294 }
295 Err(error) => {
296 tracing::error!(?error, "SSE error");
297 yield Err(CompletionError::ProviderError(error.to_string()));
298 break;
299 }
300 }
301 }
302
303 event_source.close();
305
306 for (_idx, tool_call) in tool_calls.into_iter() {
308 yield Ok(streaming::RawStreamingChoice::ToolCall(tool_call));
309 }
310
311 yield Ok(streaming::RawStreamingChoice::FinalResponse(StreamingCompletionResponse {
313 usage: final_usage.unwrap_or_default(),
314 }));
315 }.instrument(span);
316
317 Ok(streaming::StreamingCompletionResponse::stream(Box::pin(
318 stream,
319 )))
320}
321
322#[cfg(test)]
323mod tests {
324 use super::*;
325 use serde_json::json;
326
327 #[test]
328 fn test_streaming_completion_response_deserialization() {
329 let json = json!({
330 "id": "gen-abc123",
331 "choices": [{
332 "index": 0,
333 "delta": {
334 "role": "assistant",
335 "content": "Hello"
336 }
337 }],
338 "created": 1234567890u64,
339 "model": "gpt-3.5-turbo",
340 "object": "chat.completion.chunk"
341 });
342
343 let response: StreamingCompletionChunk = serde_json::from_value(json).unwrap();
344 assert_eq!(response.id, "gen-abc123");
345 assert_eq!(response.model, "gpt-3.5-turbo");
346 assert_eq!(response.choices.len(), 1);
347 }
348
349 #[test]
350 fn test_delta_with_content() {
351 let json = json!({
352 "role": "assistant",
353 "content": "Hello, world!"
354 });
355
356 let delta: StreamingDelta = serde_json::from_value(json).unwrap();
357 assert_eq!(delta.role, Some("assistant".to_string()));
358 assert_eq!(delta.content, Some("Hello, world!".to_string()));
359 }
360
361 #[test]
362 fn test_delta_with_tool_call() {
363 let json = json!({
364 "role": "assistant",
365 "tool_calls": [{
366 "index": 0,
367 "id": "call_abc",
368 "type": "function",
369 "function": {
370 "name": "get_weather",
371 "arguments": "{\"location\":"
372 }
373 }]
374 });
375
376 let delta: StreamingDelta = serde_json::from_value(json).unwrap();
377 assert_eq!(delta.tool_calls.len(), 1);
378 assert_eq!(delta.tool_calls[0].index, 0);
379 assert_eq!(delta.tool_calls[0].id, Some("call_abc".to_string()));
380 }
381
382 #[test]
383 fn test_tool_call_with_partial_arguments() {
384 let json = json!({
385 "index": 0,
386 "id": null,
387 "type": null,
388 "function": {
389 "name": null,
390 "arguments": "Paris"
391 }
392 });
393
394 let tool_call: StreamingToolCall = serde_json::from_value(json).unwrap();
395 assert_eq!(tool_call.index, 0);
396 assert!(tool_call.id.is_none());
397 assert_eq!(tool_call.function.arguments, Some("Paris".to_string()));
398 }
399
400 #[test]
401 fn test_streaming_with_usage() {
402 let json = json!({
403 "id": "gen-xyz",
404 "choices": [{
405 "index": 0,
406 "delta": {
407 "content": null
408 }
409 }],
410 "created": 1234567890u64,
411 "model": "gpt-4",
412 "object": "chat.completion.chunk",
413 "usage": {
414 "prompt_tokens": 100,
415 "completion_tokens": 50,
416 "total_tokens": 150
417 }
418 });
419
420 let response: StreamingCompletionChunk = serde_json::from_value(json).unwrap();
421 assert!(response.usage.is_some());
422 let usage = response.usage.unwrap();
423 assert_eq!(usage.prompt_tokens, 100);
424 assert_eq!(usage.completion_tokens, 50);
425 assert_eq!(usage.total_tokens, 150);
426 }
427
428 #[test]
429 fn test_multiple_tool_call_deltas() {
430 let start_json = json!({
432 "id": "gen-1",
433 "choices": [{
434 "index": 0,
435 "delta": {
436 "tool_calls": [{
437 "index": 0,
438 "id": "call_123",
439 "type": "function",
440 "function": {
441 "name": "search",
442 "arguments": ""
443 }
444 }]
445 }
446 }],
447 "created": 1234567890u64,
448 "model": "gpt-4",
449 "object": "chat.completion.chunk"
450 });
451
452 let delta1_json = json!({
453 "id": "gen-2",
454 "choices": [{
455 "index": 0,
456 "delta": {
457 "tool_calls": [{
458 "index": 0,
459 "function": {
460 "arguments": "{\"query\":"
461 }
462 }]
463 }
464 }],
465 "created": 1234567890u64,
466 "model": "gpt-4",
467 "object": "chat.completion.chunk"
468 });
469
470 let delta2_json = json!({
471 "id": "gen-3",
472 "choices": [{
473 "index": 0,
474 "delta": {
475 "tool_calls": [{
476 "index": 0,
477 "function": {
478 "arguments": "\"Rust programming\"}"
479 }
480 }]
481 }
482 }],
483 "created": 1234567890u64,
484 "model": "gpt-4",
485 "object": "chat.completion.chunk"
486 });
487
488 let start: StreamingCompletionChunk = serde_json::from_value(start_json).unwrap();
490 assert_eq!(
491 start.choices[0].delta.tool_calls[0].id,
492 Some("call_123".to_string())
493 );
494
495 let delta1: StreamingCompletionChunk = serde_json::from_value(delta1_json).unwrap();
496 assert_eq!(
497 delta1.choices[0].delta.tool_calls[0].function.arguments,
498 Some("{\"query\":".to_string())
499 );
500
501 let delta2: StreamingCompletionChunk = serde_json::from_value(delta2_json).unwrap();
502 assert_eq!(
503 delta2.choices[0].delta.tool_calls[0].function.arguments,
504 Some("\"Rust programming\"}".to_string())
505 );
506 }
507
508 #[test]
509 fn test_response_with_error() {
510 let json = json!({
511 "id": "cmpl-abc123",
512 "object": "chat.completion.chunk",
513 "created": 1234567890,
514 "model": "gpt-3.5-turbo",
515 "provider": "openai",
516 "error": { "code": 500, "message": "Provider disconnected" },
517 "choices": [
518 { "index": 0, "delta": { "content": "" }, "finish_reason": "error" }
519 ]
520 });
521
522 let response: StreamingCompletionChunk = serde_json::from_value(json).unwrap();
523 assert!(response.error.is_some());
524 let error = response.error.as_ref().unwrap();
525 assert_eq!(error.code, 500);
526 assert_eq!(error.message, "Provider disconnected");
527 }
528}