rig/providers/openai/completion/
mod.rs

1// ================================================================
2// OpenAI Completion API
3// ================================================================
4
5use super::{ApiErrorResponse, ApiResponse, Client, streaming::StreamingCompletionResponse};
6use crate::completion::{CompletionError, CompletionRequest};
7use crate::message::{AudioMediaType, ImageDetail};
8use crate::one_or_many::string_or_one_or_many;
9use crate::{OneOrMany, completion, json_utils, message};
10use serde::{Deserialize, Serialize};
11use serde_json::{Value, json};
12use std::convert::Infallible;
13use std::fmt;
14
15use std::str::FromStr;
16
17pub mod streaming;
18
19/// `o4-mini-2025-04-16` completion model
20pub const O4_MINI_2025_04_16: &str = "o4-mini-2025-04-16";
21/// `o4-mini` completion model
22pub const O4_MINI: &str = "o4-mini";
23/// `o3` completion model
24pub const O3: &str = "o3";
25/// `o3-mini` completion model
26pub const O3_MINI: &str = "o3-mini";
27/// `o3-mini-2025-01-31` completion model
28pub const O3_MINI_2025_01_31: &str = "o3-mini-2025-01-31";
29/// `o1-pro` completion model
30pub const O1_PRO: &str = "o1-pro";
31/// `o1`` completion model
32pub const O1: &str = "o1";
33/// `o1-2024-12-17` completion model
34pub const O1_2024_12_17: &str = "o1-2024-12-17";
35/// `o1-preview` completion model
36pub const O1_PREVIEW: &str = "o1-preview";
37/// `o1-preview-2024-09-12` completion model
38pub const O1_PREVIEW_2024_09_12: &str = "o1-preview-2024-09-12";
39/// `o1-mini completion model
40pub const O1_MINI: &str = "o1-mini";
41/// `o1-mini-2024-09-12` completion model
42pub const O1_MINI_2024_09_12: &str = "o1-mini-2024-09-12";
43
44/// `gpt-4.1-mini` completion model
45pub const GPT_4_1_MINI: &str = "gpt-4.1-mini";
46/// `gpt-4.1-nano` completion model
47pub const GPT_4_1_NANO: &str = "gpt-4.1-nano";
48/// `gpt-4.1-2025-04-14` completion model
49pub const GPT_4_1_2025_04_14: &str = "gpt-4.1-2025-04-14";
50/// `gpt-4.1` completion model
51pub const GPT_4_1: &str = "gpt-4.1";
52/// `gpt-4.5-preview` completion model
53pub const GPT_4_5_PREVIEW: &str = "gpt-4.5-preview";
54/// `gpt-4.5-preview-2025-02-27` completion model
55pub const GPT_4_5_PREVIEW_2025_02_27: &str = "gpt-4.5-preview-2025-02-27";
56/// `gpt-4o-2024-11-20` completion model (this is newer than 4o)
57pub const GPT_4O_2024_11_20: &str = "gpt-4o-2024-11-20";
58/// `gpt-4o` completion model
59pub const GPT_4O: &str = "gpt-4o";
60/// `gpt-4o-mini` completion model
61pub const GPT_4O_MINI: &str = "gpt-4o-mini";
62/// `gpt-4o-2024-05-13` completion model
63pub const GPT_4O_2024_05_13: &str = "gpt-4o-2024-05-13";
64/// `gpt-4-turbo` completion model
65pub const GPT_4_TURBO: &str = "gpt-4-turbo";
66/// `gpt-4-turbo-2024-04-09` completion model
67pub const GPT_4_TURBO_2024_04_09: &str = "gpt-4-turbo-2024-04-09";
68/// `gpt-4-turbo-preview` completion model
69pub const GPT_4_TURBO_PREVIEW: &str = "gpt-4-turbo-preview";
70/// `gpt-4-0125-preview` completion model
71pub const GPT_4_0125_PREVIEW: &str = "gpt-4-0125-preview";
72/// `gpt-4-1106-preview` completion model
73pub const GPT_4_1106_PREVIEW: &str = "gpt-4-1106-preview";
74/// `gpt-4-vision-preview` completion model
75pub const GPT_4_VISION_PREVIEW: &str = "gpt-4-vision-preview";
76/// `gpt-4-1106-vision-preview` completion model
77pub const GPT_4_1106_VISION_PREVIEW: &str = "gpt-4-1106-vision-preview";
78/// `gpt-4` completion model
79pub const GPT_4: &str = "gpt-4";
80/// `gpt-4-0613` completion model
81pub const GPT_4_0613: &str = "gpt-4-0613";
82/// `gpt-4-32k` completion model
83pub const GPT_4_32K: &str = "gpt-4-32k";
84/// `gpt-4-32k-0613` completion model
85pub const GPT_4_32K_0613: &str = "gpt-4-32k-0613";
86/// `gpt-3.5-turbo` completion model
87pub const GPT_35_TURBO: &str = "gpt-3.5-turbo";
88/// `gpt-3.5-turbo-0125` completion model
89pub const GPT_35_TURBO_0125: &str = "gpt-3.5-turbo-0125";
90/// `gpt-3.5-turbo-1106` completion model
91pub const GPT_35_TURBO_1106: &str = "gpt-3.5-turbo-1106";
92/// `gpt-3.5-turbo-instruct` completion model
93pub const GPT_35_TURBO_INSTRUCT: &str = "gpt-3.5-turbo-instruct";
94
95impl From<ApiErrorResponse> for CompletionError {
96    fn from(err: ApiErrorResponse) -> Self {
97        CompletionError::ProviderError(err.message)
98    }
99}
100
101#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
102#[serde(tag = "role", rename_all = "lowercase")]
103pub enum Message {
104    #[serde(alias = "developer")]
105    System {
106        #[serde(deserialize_with = "string_or_one_or_many")]
107        content: OneOrMany<SystemContent>,
108        #[serde(skip_serializing_if = "Option::is_none")]
109        name: Option<String>,
110    },
111    User {
112        #[serde(deserialize_with = "string_or_one_or_many")]
113        content: OneOrMany<UserContent>,
114        #[serde(skip_serializing_if = "Option::is_none")]
115        name: Option<String>,
116    },
117    Assistant {
118        #[serde(default, deserialize_with = "json_utils::string_or_vec")]
119        content: Vec<AssistantContent>,
120        #[serde(skip_serializing_if = "Option::is_none")]
121        refusal: Option<String>,
122        #[serde(skip_serializing_if = "Option::is_none")]
123        audio: Option<AudioAssistant>,
124        #[serde(skip_serializing_if = "Option::is_none")]
125        name: Option<String>,
126        #[serde(
127            default,
128            deserialize_with = "json_utils::null_or_vec",
129            skip_serializing_if = "Vec::is_empty"
130        )]
131        tool_calls: Vec<ToolCall>,
132    },
133    #[serde(rename = "tool")]
134    ToolResult {
135        tool_call_id: String,
136        content: OneOrMany<ToolResultContent>,
137    },
138}
139
140impl Message {
141    pub fn system(content: &str) -> Self {
142        Message::System {
143            content: OneOrMany::one(content.to_owned().into()),
144            name: None,
145        }
146    }
147}
148
149#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
150pub struct AudioAssistant {
151    id: String,
152}
153
154#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
155pub struct SystemContent {
156    #[serde(default)]
157    r#type: SystemContentType,
158    text: String,
159}
160
161#[derive(Default, Debug, Serialize, Deserialize, PartialEq, Clone)]
162#[serde(rename_all = "lowercase")]
163pub enum SystemContentType {
164    #[default]
165    Text,
166}
167
168#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
169#[serde(tag = "type", rename_all = "lowercase")]
170pub enum AssistantContent {
171    Text { text: String },
172    Refusal { refusal: String },
173}
174
175impl From<AssistantContent> for completion::AssistantContent {
176    fn from(value: AssistantContent) -> Self {
177        match value {
178            AssistantContent::Text { text } => completion::AssistantContent::text(text),
179            AssistantContent::Refusal { refusal } => completion::AssistantContent::text(refusal),
180        }
181    }
182}
183
184#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
185#[serde(tag = "type", rename_all = "lowercase")]
186pub enum UserContent {
187    Text {
188        text: String,
189    },
190    #[serde(rename = "image_url")]
191    Image {
192        image_url: ImageUrl,
193    },
194    Audio {
195        input_audio: InputAudio,
196    },
197}
198
199#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
200pub struct ImageUrl {
201    pub url: String,
202    #[serde(default)]
203    pub detail: ImageDetail,
204}
205
206#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
207pub struct InputAudio {
208    pub data: String,
209    pub format: AudioMediaType,
210}
211
212#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
213pub struct ToolResultContent {
214    #[serde(default)]
215    r#type: ToolResultContentType,
216    pub text: String,
217}
218
219#[derive(Default, Debug, Serialize, Deserialize, PartialEq, Clone)]
220#[serde(rename_all = "lowercase")]
221pub enum ToolResultContentType {
222    #[default]
223    Text,
224}
225
226impl FromStr for ToolResultContent {
227    type Err = Infallible;
228
229    fn from_str(s: &str) -> Result<Self, Self::Err> {
230        Ok(s.to_owned().into())
231    }
232}
233
234impl From<String> for ToolResultContent {
235    fn from(s: String) -> Self {
236        ToolResultContent {
237            r#type: ToolResultContentType::default(),
238            text: s,
239        }
240    }
241}
242
243#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
244pub struct ToolCall {
245    pub id: String,
246    #[serde(default)]
247    pub r#type: ToolType,
248    pub function: Function,
249}
250
251#[derive(Default, Debug, Serialize, Deserialize, PartialEq, Clone)]
252#[serde(rename_all = "lowercase")]
253pub enum ToolType {
254    #[default]
255    Function,
256}
257
258#[derive(Debug, Deserialize, Serialize, Clone)]
259pub struct ToolDefinition {
260    pub r#type: String,
261    pub function: completion::ToolDefinition,
262}
263
264impl From<completion::ToolDefinition> for ToolDefinition {
265    fn from(tool: completion::ToolDefinition) -> Self {
266        Self {
267            r#type: "function".into(),
268            function: tool,
269        }
270    }
271}
272
273#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
274pub struct Function {
275    pub name: String,
276    #[serde(with = "json_utils::stringified_json")]
277    pub arguments: serde_json::Value,
278}
279
280impl TryFrom<message::Message> for Vec<Message> {
281    type Error = message::MessageError;
282
283    fn try_from(message: message::Message) -> Result<Self, Self::Error> {
284        match message {
285            message::Message::User { content } => {
286                let (tool_results, other_content): (Vec<_>, Vec<_>) = content
287                    .into_iter()
288                    .partition(|content| matches!(content, message::UserContent::ToolResult(_)));
289
290                // If there are messages with both tool results and user content, openai will only
291                //  handle tool results. It's unlikely that there will be both.
292                if !tool_results.is_empty() {
293                    tool_results
294                        .into_iter()
295                        .map(|content| match content {
296                            message::UserContent::ToolResult(message::ToolResult {
297                                id,
298                                content,
299                                ..
300                            }) => Ok::<_, message::MessageError>(Message::ToolResult {
301                                tool_call_id: id,
302                                content: content.try_map(|content| match content {
303                                    message::ToolResultContent::Text(message::Text { text }) => {
304                                        Ok(text.into())
305                                    }
306                                    _ => Err(message::MessageError::ConversionError(
307                                        "Tool result content does not support non-text".into(),
308                                    )),
309                                })?,
310                            }),
311                            _ => unreachable!(),
312                        })
313                        .collect::<Result<Vec<_>, _>>()
314                } else {
315                    let other_content = OneOrMany::many(other_content).expect(
316                        "There must be other content here if there were no tool result content",
317                    );
318
319                    Ok(vec![Message::User {
320                        content: other_content.map(|content| match content {
321                            message::UserContent::Text(message::Text { text }) => {
322                                UserContent::Text { text }
323                            }
324                            message::UserContent::Image(message::Image {
325                                data, detail, ..
326                            }) => UserContent::Image {
327                                image_url: ImageUrl {
328                                    url: data,
329                                    detail: detail.unwrap_or_default(),
330                                },
331                            },
332                            message::UserContent::Document(message::Document { data, .. }) => {
333                                UserContent::Text { text: data }
334                            }
335                            message::UserContent::Audio(message::Audio {
336                                data,
337                                media_type,
338                                ..
339                            }) => UserContent::Audio {
340                                input_audio: InputAudio {
341                                    data,
342                                    format: match media_type {
343                                        Some(media_type) => media_type,
344                                        None => AudioMediaType::MP3,
345                                    },
346                                },
347                            },
348                            _ => unreachable!(),
349                        }),
350                        name: None,
351                    }])
352                }
353            }
354            message::Message::Assistant { content, .. } => {
355                let (text_content, tool_calls) = content.into_iter().fold(
356                    (Vec::new(), Vec::new()),
357                    |(mut texts, mut tools), content| {
358                        match content {
359                            message::AssistantContent::Text(text) => texts.push(text),
360                            message::AssistantContent::ToolCall(tool_call) => tools.push(tool_call),
361                            message::AssistantContent::Reasoning(_) => {
362                                unimplemented!(
363                                    "The OpenAI Completions API doesn't support reasoning!"
364                                );
365                            }
366                        }
367                        (texts, tools)
368                    },
369                );
370
371                // `OneOrMany` ensures at least one `AssistantContent::Text` or `ToolCall` exists,
372                //  so either `content` or `tool_calls` will have some content.
373                Ok(vec![Message::Assistant {
374                    content: text_content
375                        .into_iter()
376                        .map(|content| content.text.into())
377                        .collect::<Vec<_>>(),
378                    refusal: None,
379                    audio: None,
380                    name: None,
381                    tool_calls: tool_calls
382                        .into_iter()
383                        .map(|tool_call| tool_call.into())
384                        .collect::<Vec<_>>(),
385                }])
386            }
387        }
388    }
389}
390
391impl From<message::ToolCall> for ToolCall {
392    fn from(tool_call: message::ToolCall) -> Self {
393        Self {
394            id: tool_call.id,
395            r#type: ToolType::default(),
396            function: Function {
397                name: tool_call.function.name,
398                arguments: tool_call.function.arguments,
399            },
400        }
401    }
402}
403
404impl From<ToolCall> for message::ToolCall {
405    fn from(tool_call: ToolCall) -> Self {
406        Self {
407            id: tool_call.id,
408            call_id: None,
409            function: message::ToolFunction {
410                name: tool_call.function.name,
411                arguments: tool_call.function.arguments,
412            },
413        }
414    }
415}
416
417impl TryFrom<Message> for message::Message {
418    type Error = message::MessageError;
419
420    fn try_from(message: Message) -> Result<Self, Self::Error> {
421        Ok(match message {
422            Message::User { content, .. } => message::Message::User {
423                content: content.map(|content| content.into()),
424            },
425            Message::Assistant {
426                content,
427                tool_calls,
428                ..
429            } => {
430                let mut content = content
431                    .into_iter()
432                    .map(|content| match content {
433                        AssistantContent::Text { text } => message::AssistantContent::text(text),
434
435                        // TODO: Currently, refusals are converted into text, but should be
436                        //  investigated for generalization.
437                        AssistantContent::Refusal { refusal } => {
438                            message::AssistantContent::text(refusal)
439                        }
440                    })
441                    .collect::<Vec<_>>();
442
443                content.extend(
444                    tool_calls
445                        .into_iter()
446                        .map(|tool_call| Ok(message::AssistantContent::ToolCall(tool_call.into())))
447                        .collect::<Result<Vec<_>, _>>()?,
448                );
449
450                message::Message::Assistant {
451                    id: None,
452                    content: OneOrMany::many(content).map_err(|_| {
453                        message::MessageError::ConversionError(
454                            "Neither `content` nor `tool_calls` was provided to the Message"
455                                .to_owned(),
456                        )
457                    })?,
458                }
459            }
460
461            Message::ToolResult {
462                tool_call_id,
463                content,
464            } => message::Message::User {
465                content: OneOrMany::one(message::UserContent::tool_result(
466                    tool_call_id,
467                    content.map(|content| message::ToolResultContent::text(content.text)),
468                )),
469            },
470
471            // System messages should get stripped out when converting messages, this is just a
472            // stop gap to avoid obnoxious error handling or panic occurring.
473            Message::System { content, .. } => message::Message::User {
474                content: content.map(|content| message::UserContent::text(content.text)),
475            },
476        })
477    }
478}
479
480impl From<UserContent> for message::UserContent {
481    fn from(content: UserContent) -> Self {
482        match content {
483            UserContent::Text { text } => message::UserContent::text(text),
484            UserContent::Image { image_url } => message::UserContent::image(
485                image_url.url,
486                Some(message::ContentFormat::default()),
487                None,
488                Some(image_url.detail),
489            ),
490            UserContent::Audio { input_audio } => message::UserContent::audio(
491                input_audio.data,
492                Some(message::ContentFormat::default()),
493                Some(input_audio.format),
494            ),
495        }
496    }
497}
498
499impl From<String> for UserContent {
500    fn from(s: String) -> Self {
501        UserContent::Text { text: s }
502    }
503}
504
505impl FromStr for UserContent {
506    type Err = Infallible;
507
508    fn from_str(s: &str) -> Result<Self, Self::Err> {
509        Ok(UserContent::Text {
510            text: s.to_string(),
511        })
512    }
513}
514
515impl From<String> for AssistantContent {
516    fn from(s: String) -> Self {
517        AssistantContent::Text { text: s }
518    }
519}
520
521impl FromStr for AssistantContent {
522    type Err = Infallible;
523
524    fn from_str(s: &str) -> Result<Self, Self::Err> {
525        Ok(AssistantContent::Text {
526            text: s.to_string(),
527        })
528    }
529}
530impl From<String> for SystemContent {
531    fn from(s: String) -> Self {
532        SystemContent {
533            r#type: SystemContentType::default(),
534            text: s,
535        }
536    }
537}
538
539impl FromStr for SystemContent {
540    type Err = Infallible;
541
542    fn from_str(s: &str) -> Result<Self, Self::Err> {
543        Ok(SystemContent {
544            r#type: SystemContentType::default(),
545            text: s.to_string(),
546        })
547    }
548}
549
550#[derive(Debug, Deserialize, Serialize)]
551pub struct CompletionResponse {
552    pub id: String,
553    pub object: String,
554    pub created: u64,
555    pub model: String,
556    pub system_fingerprint: Option<String>,
557    pub choices: Vec<Choice>,
558    pub usage: Option<Usage>,
559}
560
561impl TryFrom<CompletionResponse> for completion::CompletionResponse<CompletionResponse> {
562    type Error = CompletionError;
563
564    fn try_from(response: CompletionResponse) -> Result<Self, Self::Error> {
565        let choice = response.choices.first().ok_or_else(|| {
566            CompletionError::ResponseError("Response contained no choices".to_owned())
567        })?;
568
569        let content = match &choice.message {
570            Message::Assistant {
571                content,
572                tool_calls,
573                ..
574            } => {
575                let mut content = content
576                    .iter()
577                    .filter_map(|c| {
578                        let s = match c {
579                            AssistantContent::Text { text } => text,
580                            AssistantContent::Refusal { refusal } => refusal,
581                        };
582                        if s.is_empty() {
583                            None
584                        } else {
585                            Some(completion::AssistantContent::text(s))
586                        }
587                    })
588                    .collect::<Vec<_>>();
589
590                content.extend(
591                    tool_calls
592                        .iter()
593                        .map(|call| {
594                            completion::AssistantContent::tool_call(
595                                &call.id,
596                                &call.function.name,
597                                call.function.arguments.clone(),
598                            )
599                        })
600                        .collect::<Vec<_>>(),
601                );
602                Ok(content)
603            }
604            _ => Err(CompletionError::ResponseError(
605                "Response did not contain a valid message or tool call".into(),
606            )),
607        }?;
608
609        let choice = OneOrMany::many(content).map_err(|_| {
610            CompletionError::ResponseError(
611                "Response contained no message or tool call (empty)".to_owned(),
612            )
613        })?;
614
615        let usage = response
616            .usage
617            .as_ref()
618            .map(|usage| completion::Usage {
619                input_tokens: usage.prompt_tokens as u64,
620                output_tokens: (usage.total_tokens - usage.prompt_tokens) as u64,
621                total_tokens: usage.total_tokens as u64,
622            })
623            .unwrap_or_default();
624
625        Ok(completion::CompletionResponse {
626            choice,
627            usage,
628            raw_response: response,
629        })
630    }
631}
632
633#[derive(Debug, Serialize, Deserialize)]
634pub struct Choice {
635    pub index: usize,
636    pub message: Message,
637    pub logprobs: Option<serde_json::Value>,
638    pub finish_reason: String,
639}
640
641#[derive(Clone, Debug, Deserialize, Serialize)]
642pub struct Usage {
643    pub prompt_tokens: usize,
644    pub total_tokens: usize,
645}
646
647impl fmt::Display for Usage {
648    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
649        let Usage {
650            prompt_tokens,
651            total_tokens,
652        } = self;
653        write!(
654            f,
655            "Prompt tokens: {prompt_tokens} Total tokens: {total_tokens}"
656        )
657    }
658}
659
660#[derive(Clone)]
661pub struct CompletionModel {
662    pub(crate) client: Client,
663    /// Name of the model (e.g.: gpt-3.5-turbo-1106)
664    pub model: String,
665}
666
667impl CompletionModel {
668    pub fn new(client: Client, model: &str) -> Self {
669        Self {
670            client,
671            model: model.to_string(),
672        }
673    }
674
675    pub fn into_agent_builder(self) -> crate::agent::AgentBuilder<Self> {
676        crate::agent::AgentBuilder::new(self)
677    }
678
679    pub(crate) fn create_completion_request(
680        &self,
681        completion_request: CompletionRequest,
682    ) -> Result<Value, CompletionError> {
683        // Build up the order of messages (context, chat_history)
684        let mut partial_history = vec![];
685        if let Some(docs) = completion_request.normalized_documents() {
686            partial_history.push(docs);
687        }
688        partial_history.extend(completion_request.chat_history);
689
690        // Initialize full history with preamble (or empty if non-existent)
691        let mut full_history: Vec<Message> = completion_request
692            .preamble
693            .map_or_else(Vec::new, |preamble| vec![Message::system(&preamble)]);
694
695        // Convert and extend the rest of the history
696        full_history.extend(
697            partial_history
698                .into_iter()
699                .map(message::Message::try_into)
700                .collect::<Result<Vec<Vec<Message>>, _>>()?
701                .into_iter()
702                .flatten()
703                .collect::<Vec<_>>(),
704        );
705
706        let request = if completion_request.tools.is_empty() {
707            serde_json::json!({
708                "model": self.model,
709                "messages": full_history,
710
711            })
712        } else {
713            json!({
714                "model": self.model,
715                "messages": full_history,
716                "tools": completion_request.tools.into_iter().map(ToolDefinition::from).collect::<Vec<_>>(),
717                "tool_choice": "auto",
718            })
719        };
720
721        // only include temperature if it exists
722        // because some models don't support temperature
723        let request = if let Some(temperature) = completion_request.temperature {
724            json_utils::merge(
725                request,
726                json!({
727                    "temperature": temperature,
728                }),
729            )
730        } else {
731            request
732        };
733
734        let request = if let Some(params) = completion_request.additional_params {
735            json_utils::merge(request, params)
736        } else {
737            request
738        };
739
740        Ok(request)
741    }
742}
743
744impl completion::CompletionModel for CompletionModel {
745    type Response = CompletionResponse;
746    type StreamingResponse = StreamingCompletionResponse;
747
748    #[cfg_attr(feature = "worker", worker::send)]
749    async fn completion(
750        &self,
751        completion_request: CompletionRequest,
752    ) -> Result<completion::CompletionResponse<CompletionResponse>, CompletionError> {
753        let request = self.create_completion_request(completion_request)?;
754
755        tracing::debug!(
756            "OpenAI request: {request}",
757            request = serde_json::to_string_pretty(&request).unwrap()
758        );
759
760        let response = self
761            .client
762            .post("/chat/completions")
763            .json(&request)
764            .send()
765            .await?;
766
767        if response.status().is_success() {
768            let t = response.text().await?;
769            tracing::debug!(target: "rig", "OpenAI completion error: {}", t);
770
771            match serde_json::from_str::<ApiResponse<CompletionResponse>>(&t)? {
772                ApiResponse::Ok(response) => {
773                    tracing::info!(target: "rig",
774                        "OpenAI completion token usage: {:?}",
775                        response.usage.clone().map(|usage| format!("{}", usage.total_tokens)).unwrap_or("N/A".to_string())
776                    );
777                    response.try_into()
778                }
779                ApiResponse::Err(err) => Err(CompletionError::ProviderError(err.message)),
780            }
781        } else {
782            Err(CompletionError::ProviderError(response.text().await?))
783        }
784    }
785
786    #[cfg_attr(feature = "worker", worker::send)]
787    async fn stream(
788        &self,
789        request: CompletionRequest,
790    ) -> Result<
791        crate::streaming::StreamingCompletionResponse<Self::StreamingResponse>,
792        CompletionError,
793    > {
794        CompletionModel::stream(self, request).await
795    }
796}