1use super::mappers::{map_messages, map_tools};
2use super::oauth::CodexTokenManager;
3use super::streaming::process_response_stream;
4use crate::oauth::credential_store::OAuthCredentialStore;
5use crate::provider::{LlmResponseStream, ProviderFactory, StreamingModelProvider, get_context_window};
6use crate::{Context, LlmError, Result};
7use async_openai::types::responses::{
8 CreateResponse, IncludeEnum, InputParam, Reasoning, ReasoningEffort, ReasoningSummary, ResponseStreamEvent,
9 ResponseTextParam, TextResponseFormatConfiguration, Verbosity,
10};
11use eventsource_stream::Eventsource;
12use futures::StreamExt;
13use reqwest::header::{AUTHORIZATION, HeaderMap, HeaderValue};
14use std::sync::Arc;
15use tracing::debug;
16
17const CODEX_API_BASE: &str = "https://chatgpt.com/backend-api/codex";
18
19#[derive(Clone)]
20pub struct CodexProvider {
21 client: reqwest::Client,
22 model: String,
23 token_manager: Arc<CodexTokenManager<OAuthCredentialStore>>,
24}
25
26impl CodexProvider {
27 pub fn new(token_manager: CodexTokenManager<OAuthCredentialStore>) -> Self {
28 Self { client: reqwest::Client::new(), model: "gpt-5.4".to_string(), token_manager: Arc::new(token_manager) }
29 }
30
31 fn build_request(&self, context: &Context) -> Result<CreateResponse> {
32 let (system_prompt, input) = map_messages(context.messages())?;
33 let tools = if context.tools().is_empty() { None } else { Some(map_tools(context.tools())?) };
34
35 let codex_effort = context.reasoning_effort().map_or(ReasoningEffort::Medium, to_codex_effort);
36
37 Ok(CreateResponse {
38 model: Some(self.model.clone()),
39 input: InputParam::Items(input),
40 instructions: system_prompt,
41 tools,
42 store: Some(false),
43 stream: Some(true),
44 reasoning: Some(Reasoning { effort: Some(codex_effort), summary: Some(ReasoningSummary::Auto) }),
45 include: Some(vec![IncludeEnum::ReasoningEncryptedContent]),
46 text: Some(ResponseTextParam {
47 format: TextResponseFormatConfiguration::Text,
48 verbosity: Some(Verbosity::Medium),
49 }),
50 prompt_cache_key: context.prompt_cache_key().map(String::from),
51 ..Default::default()
52 })
53 }
54
55 async fn build_headers(&self) -> Result<HeaderMap> {
56 let (access_token, account_id) = self.token_manager.get_valid_token().await?;
57
58 let mut headers = HeaderMap::new();
59 headers.insert(
60 AUTHORIZATION,
61 HeaderValue::from_str(&format!("Bearer {access_token}"))
62 .map_err(|e| LlmError::InvalidApiKey(e.to_string()))?,
63 );
64 headers.insert(
65 "chatgpt-account-id",
66 HeaderValue::from_str(&account_id).map_err(|e| LlmError::InvalidApiKey(e.to_string()))?,
67 );
68 headers.insert("OpenAI-Beta", HeaderValue::from_static("responses=experimental"));
69 headers.insert("originator", HeaderValue::from_static("codex_cli_rs"));
70
71 Ok(headers)
72 }
73
74 async fn send_request(
80 &self,
81 request: CreateResponse,
82 headers: HeaderMap,
83 ) -> Result<impl futures::Stream<Item = Result<ResponseStreamEvent>>> {
84 let url = format!("{CODEX_API_BASE}/responses");
85
86 debug!("Sending request to Codex API: {url}");
87 debug!(
88 "Codex request body: {}",
89 serde_json::to_string(&request).unwrap_or_else(|_| "<failed to serialize>".to_string())
90 );
91
92 let response = self
93 .client
94 .post(&url)
95 .headers(headers)
96 .json(&request)
97 .send()
98 .await
99 .map_err(|e| LlmError::ApiRequest(e.to_string()))?;
100
101 if !response.status().is_success() {
102 let status = response.status();
103 let error_text = response.text().await.unwrap_or_else(|_| "Unknown error".to_string());
104
105 if status.as_u16() == 401 || status.as_u16() == 403 {
106 self.token_manager.clear_cache().await;
107 }
108
109 return Err(LlmError::ApiError(format!("Codex API request failed with status {status}: {error_text}")));
110 }
111
112 let event_stream = response.bytes_stream().eventsource().filter_map(|result| {
113 std::future::ready(match result {
114 Ok(event) if event.data == "[DONE]" => None,
115 Ok(event) => match serde_json::from_str::<ResponseStreamEvent>(&event.data) {
116 Ok(parsed) => Some(Ok(parsed)),
117 Err(e) => {
118 debug!("Failed to parse Codex SSE line: {} - Error: {e}", event.data);
119 None
120 }
121 },
122 Err(e) => Some(Err(LlmError::IoError(e.to_string()))),
123 })
124 });
125
126 Ok(event_stream)
127 }
128}
129
130impl ProviderFactory for CodexProvider {
131 async fn from_env() -> Result<Self> {
132 let store = OAuthCredentialStore::new(super::PROVIDER_ID);
133 let token_manager = CodexTokenManager::new(store, super::PROVIDER_ID);
134 Ok(Self::new(token_manager))
135 }
136
137 fn with_model(mut self, model: &str) -> Self {
138 self.model = model.to_string();
139 self
140 }
141}
142
143impl StreamingModelProvider for CodexProvider {
144 fn model(&self) -> Option<crate::LlmModel> {
145 format!("{}:{}", super::PROVIDER_ID, self.model).parse().ok()
146 }
147
148 fn context_window(&self) -> Option<u32> {
149 get_context_window(super::PROVIDER_ID, &self.model)
150 }
151
152 fn stream_response(&self, context: &Context) -> LlmResponseStream {
153 let provider = self.clone();
154 let context = match self.model() {
155 Some(model) => context.filter_encrypted_reasoning(&model),
156 None => context.clone(),
157 };
158
159 Box::pin(async_stream::stream! {
160 let headers = match provider.build_headers().await {
161 Ok(h) => h,
162 Err(e) => {
163 yield Err(e);
164 return;
165 }
166 };
167
168 let request = match provider.build_request(&context) {
169 Ok(r) => r,
170 Err(e) => {
171 yield Err(e);
172 return;
173 }
174 };
175
176 let event_stream = match provider.send_request(request, headers).await {
177 Ok(s) => s,
178 Err(e) => {
179 yield Err(e);
180 return;
181 }
182 };
183
184 let mut response_stream = Box::pin(process_response_stream(event_stream));
185 while let Some(result) = response_stream.next().await {
186 yield result;
187 }
188 })
189 }
190
191 fn display_name(&self) -> String {
192 format!("Codex ({})", self.model)
193 }
194}
195
196fn to_codex_effort(effort: crate::ReasoningEffort) -> ReasoningEffort {
197 match effort {
198 crate::ReasoningEffort::Low => ReasoningEffort::Low,
199 crate::ReasoningEffort::Medium => ReasoningEffort::Medium,
200 crate::ReasoningEffort::High => ReasoningEffort::High,
201 crate::ReasoningEffort::Xhigh => ReasoningEffort::Xhigh,
202 }
203}
204
205#[cfg(test)]
206mod tests {
207 use super::*;
208 use crate::ChatMessage;
209 use crate::ContentBlock;
210 use crate::ToolDefinition;
211 use crate::types::IsoString;
212
213 fn create_test_provider() -> CodexProvider {
214 let store = OAuthCredentialStore::new("codex-test");
215 let tm = CodexTokenManager::new(store, "codex-test");
216 CodexProvider::new(tm).with_model("gpt-5.4")
217 }
218
219 #[test]
220 fn build_request_simple() {
221 let provider = create_test_provider();
222 let context = Context::new(
223 vec![ChatMessage::User { content: vec![ContentBlock::text("Hello")], timestamp: IsoString::now() }],
224 vec![],
225 );
226
227 let request = provider.build_request(&context).unwrap();
228 assert_eq!(request.model.as_deref(), Some("gpt-5.4"));
229 assert_eq!(request.store, Some(false));
230 assert_eq!(request.stream, Some(true));
231 assert!(request.tools.is_none());
232 assert!(request.instructions.is_none());
233 if let InputParam::Items(items) = &request.input {
234 assert_eq!(items.len(), 1);
235 } else {
236 panic!("Expected InputParam::Items");
237 }
238 }
239
240 #[test]
241 fn build_request_with_system_and_tools() {
242 let provider = create_test_provider();
243 let context = Context::new(
244 vec![
245 ChatMessage::System { content: "You are helpful".to_string(), timestamp: IsoString::now() },
246 ChatMessage::User { content: vec![ContentBlock::text("Hello")], timestamp: IsoString::now() },
247 ],
248 vec![ToolDefinition {
249 name: "bash".to_string(),
250 description: "Run a command".to_string(),
251 parameters: r#"{"type": "object", "properties": {"cmd": {"type": "string"}}}"#.to_string(),
252 server: None,
253 }],
254 );
255
256 let request = provider.build_request(&context).unwrap();
257 assert!(request.instructions.is_some());
258 if let InputParam::Items(items) = &request.input {
259 assert_eq!(items.len(), 1);
260 } else {
261 panic!("Expected InputParam::Items");
262 }
263 assert!(request.tools.is_some());
264 assert_eq!(request.tools.as_ref().unwrap().len(), 1);
265 }
266
267 #[test]
268 fn display_name_includes_model() {
269 let provider = create_test_provider();
270 assert_eq!(provider.display_name(), "Codex (gpt-5.4)");
271 }
272
273 #[test]
274 fn build_request_defaults_to_medium_effort() {
275 let provider = create_test_provider();
276 let context = Context::new(
277 vec![ChatMessage::User { content: vec![ContentBlock::text("Hi")], timestamp: IsoString::now() }],
278 vec![],
279 );
280
281 let request = provider.build_request(&context).unwrap();
282 let json = serde_json::to_value(&request).unwrap();
283 assert_eq!(json["reasoning"]["effort"], "medium");
284 }
285
286 #[test]
287 fn build_request_uses_context_reasoning_effort() {
288 let provider = create_test_provider();
289 let mut context = Context::new(
290 vec![ChatMessage::User { content: vec![ContentBlock::text("Think hard")], timestamp: IsoString::now() }],
291 vec![],
292 );
293 context.set_reasoning_effort(Some(crate::ReasoningEffort::High));
294
295 let request = provider.build_request(&context).unwrap();
296 let json = serde_json::to_value(&request).unwrap();
297 assert_eq!(json["reasoning"]["effort"], "high");
298 }
299
300 #[test]
301 fn build_request_serializes_correctly() {
302 let provider = create_test_provider();
303 let context = Context::new(
304 vec![ChatMessage::User { content: vec![ContentBlock::text("Hi")], timestamp: IsoString::now() }],
305 vec![],
306 );
307
308 let request = provider.build_request(&context).unwrap();
309 let json = serde_json::to_value(&request).unwrap();
310
311 assert_eq!(json["model"], "gpt-5.4");
312 assert_eq!(json["store"], false);
313 assert_eq!(json["stream"], true);
314 assert_eq!(json["reasoning"]["effort"], "medium");
315 assert_eq!(json["text"]["verbosity"], "medium");
316 assert_eq!(json["include"][0], "reasoning.encrypted_content");
317 }
318
319 #[test]
320 fn build_request_includes_prompt_cache_key_when_set() {
321 let provider = create_test_provider();
322 let mut context = Context::new(
323 vec![ChatMessage::User { content: vec![ContentBlock::text("Hi")], timestamp: IsoString::now() }],
324 vec![],
325 );
326 context.set_prompt_cache_key(Some("session-abc".to_string()));
327
328 let request = provider.build_request(&context).unwrap();
329 assert_eq!(request.prompt_cache_key.as_deref(), Some("session-abc"));
330 }
331
332 #[test]
333 fn build_request_omits_prompt_cache_key_when_unset() {
334 let provider = create_test_provider();
335 let context = Context::new(
336 vec![ChatMessage::User { content: vec![ContentBlock::text("Hi")], timestamp: IsoString::now() }],
337 vec![],
338 );
339
340 let request = provider.build_request(&context).unwrap();
341 assert!(request.prompt_cache_key.is_none());
342 }
343}