steer_core/api/openai/
client.rs1use super::OpenAIMode;
2use super::chat;
3use super::responses;
4use crate::api::error::ApiError;
5use crate::api::provider::{CompletionResponse, CompletionStream, Provider};
6use crate::app::SystemContext;
7use crate::app::conversation::Message;
8use crate::auth::OpenAiResponsesAuth;
9use crate::config::model::{ModelId, ModelParameters};
10use async_trait::async_trait;
11use steer_tools::ToolSchema;
12use tokio_util::sync::CancellationToken;
13
14pub struct OpenAIClient {
19 responses_client: responses::Client,
20 chat_client: Option<chat::Client>,
21 default_mode: OpenAIMode,
22}
23
24impl OpenAIClient {
25 pub fn with_mode(api_key: String, mode: OpenAIMode) -> Result<Self, ApiError> {
27 Ok(Self {
28 responses_client: responses::Client::new(api_key.clone())?,
29 chat_client: Some(chat::Client::new(api_key)?),
30 default_mode: mode,
31 })
32 }
33
34 pub fn with_base_url_mode(
36 api_key: String,
37 base_url: Option<String>,
38 mode: OpenAIMode,
39 ) -> Result<Self, ApiError> {
40 Ok(Self {
41 responses_client: responses::Client::with_base_url(api_key.clone(), base_url.clone())?,
42 chat_client: Some(chat::Client::with_base_url(api_key, base_url)?),
43 default_mode: mode,
44 })
45 }
46
47 pub fn with_directive(
48 directive: OpenAiResponsesAuth,
49 base_url: Option<String>,
50 ) -> Result<Self, ApiError> {
51 Ok(Self {
52 responses_client: responses::Client::with_directive(directive, base_url)?,
53 chat_client: None,
54 default_mode: OpenAIMode::Responses,
55 })
56 }
57}
58
59#[async_trait]
60impl Provider for OpenAIClient {
61 fn name(&self) -> &'static str {
62 "openai"
63 }
64
65 async fn complete(
66 &self,
67 model_id: &ModelId,
68 messages: Vec<Message>,
69 system: Option<SystemContext>,
70 tools: Option<Vec<ToolSchema>>,
71 call_options: Option<ModelParameters>,
72 token: CancellationToken,
73 ) -> Result<CompletionResponse, ApiError> {
74 match self.default_mode {
75 OpenAIMode::Responses => {
76 self.responses_client
77 .complete(model_id, messages, system, tools, call_options, token)
78 .await
79 }
80 OpenAIMode::Chat => {
81 let chat_client = self.chat_client.as_ref().ok_or_else(|| {
82 ApiError::Configuration(
83 "OpenAI chat mode is not available with OAuth authentication".to_string(),
84 )
85 })?;
86 chat_client
87 .complete(model_id, messages, system, tools, call_options, token)
88 .await
89 }
90 }
91 }
92
93 async fn stream_complete(
94 &self,
95 model_id: &ModelId,
96 messages: Vec<Message>,
97 system: Option<SystemContext>,
98 tools: Option<Vec<ToolSchema>>,
99 call_options: Option<ModelParameters>,
100 token: CancellationToken,
101 ) -> Result<CompletionStream, ApiError> {
102 match self.default_mode {
103 OpenAIMode::Responses => {
104 self.responses_client
105 .stream_complete(model_id, messages, system, tools, call_options, token)
106 .await
107 }
108 OpenAIMode::Chat => {
109 let chat_client = self.chat_client.as_ref().ok_or_else(|| {
110 ApiError::Configuration(
111 "OpenAI chat mode is not available with OAuth authentication".to_string(),
112 )
113 })?;
114 chat_client
115 .stream_complete(model_id, messages, system, tools, call_options, token)
116 .await
117 }
118 }
119 }
120}