vtcode_core/llm/providers/
openai.rs

1use crate::config::constants::{models, urls};
2use crate::llm::client::LLMClient;
3use crate::llm::error_display;
4use crate::llm::provider::{
5    FinishReason, LLMError, LLMProvider, LLMRequest, LLMResponse, Message, MessageRole, ToolCall,
6    ToolChoice, ToolDefinition,
7};
8use crate::llm::types as llm_types;
9use async_trait::async_trait;
10use reqwest::Client as HttpClient;
11use serde_json::{Value, json};
12
13use super::{extract_reasoning_trace, gpt5_codex_developer_prompt};
14
15pub struct OpenAIProvider {
16    api_key: String,
17    http_client: HttpClient,
18    base_url: String,
19    model: String,
20}
21
22impl OpenAIProvider {
23    fn is_gpt5_codex_model(model: &str) -> bool {
24        model == models::openai::GPT_5_CODEX
25    }
26
27    fn is_reasoning_model(model: &str) -> bool {
28        models::openai::REASONING_MODELS
29            .iter()
30            .any(|candidate| *candidate == model)
31    }
32
33    fn uses_responses_api(model: &str) -> bool {
34        Self::is_gpt5_codex_model(model)
35    }
36
37    pub fn new(api_key: String) -> Self {
38        Self::with_model(api_key, models::openai::DEFAULT_MODEL.to_string())
39    }
40
41    pub fn with_model(api_key: String, model: String) -> Self {
42        Self {
43            api_key,
44            http_client: HttpClient::new(),
45            base_url: urls::OPENAI_API_BASE.to_string(),
46            model,
47        }
48    }
49
50    pub fn from_config(
51        api_key: Option<String>,
52        model: Option<String>,
53        base_url: Option<String>,
54    ) -> Self {
55        let api_key_value = api_key.unwrap_or_default();
56        let mut provider = if let Some(model_value) = model {
57            Self::with_model(api_key_value, model_value)
58        } else {
59            Self::new(api_key_value)
60        };
61        if let Some(base) = base_url {
62            provider.base_url = base;
63        }
64        provider
65    }
66
67    fn supports_temperature_parameter(model: &str) -> bool {
68        // GPT-5 variants and GPT-5 Codex models don't support temperature parameter
69        // All other OpenAI models generally support it
70        !Self::is_gpt5_codex_model(model)
71            && model != models::openai::GPT_5
72            && model != models::openai::GPT_5_MINI
73            && model != models::openai::GPT_5_NANO
74    }
75
76    fn default_request(&self, prompt: &str) -> LLMRequest {
77        LLMRequest {
78            messages: vec![Message::user(prompt.to_string())],
79            system_prompt: None,
80            tools: None,
81            model: self.model.clone(),
82            max_tokens: None,
83            temperature: None,
84            stream: false,
85            tool_choice: None,
86            parallel_tool_calls: None,
87            parallel_tool_config: None,
88            reasoning_effort: None,
89        }
90    }
91
92    fn parse_client_prompt(&self, prompt: &str) -> LLMRequest {
93        let trimmed = prompt.trim_start();
94        if trimmed.starts_with('{') {
95            if let Ok(value) = serde_json::from_str::<Value>(trimmed) {
96                if let Some(request) = self.parse_chat_request(&value) {
97                    return request;
98                }
99            }
100        }
101
102        self.default_request(prompt)
103    }
104
105    fn parse_chat_request(&self, value: &Value) -> Option<LLMRequest> {
106        let messages_value = value.get("messages")?.as_array()?;
107        let mut system_prompt = None;
108        let mut messages = Vec::new();
109
110        for entry in messages_value {
111            let role = entry
112                .get("role")
113                .and_then(|r| r.as_str())
114                .unwrap_or(crate::config::constants::message_roles::USER);
115            let content = entry.get("content");
116            let text_content = content.map(Self::extract_content_text).unwrap_or_default();
117
118            match role {
119                "system" => {
120                    if system_prompt.is_none() && !text_content.is_empty() {
121                        system_prompt = Some(text_content);
122                    }
123                }
124                "assistant" => {
125                    let tool_calls = entry
126                        .get("tool_calls")
127                        .and_then(|tc| tc.as_array())
128                        .map(|calls| {
129                            calls
130                                .iter()
131                                .filter_map(|call| {
132                                    let id = call.get("id").and_then(|v| v.as_str())?;
133                                    let function = call.get("function")?;
134                                    let name = function.get("name").and_then(|v| v.as_str())?;
135                                    let arguments = function.get("arguments");
136                                    let serialized = arguments.map_or("{}".to_string(), |value| {
137                                        if value.is_string() {
138                                            value.as_str().unwrap_or("").to_string()
139                                        } else {
140                                            value.to_string()
141                                        }
142                                    });
143                                    Some(ToolCall::function(
144                                        id.to_string(),
145                                        name.to_string(),
146                                        serialized,
147                                    ))
148                                })
149                                .collect::<Vec<_>>()
150                        })
151                        .filter(|calls| !calls.is_empty());
152
153                    let message = if let Some(calls) = tool_calls {
154                        Message {
155                            role: MessageRole::Assistant,
156                            content: text_content,
157                            tool_calls: Some(calls),
158                            tool_call_id: None,
159                        }
160                    } else {
161                        Message::assistant(text_content)
162                    };
163                    messages.push(message);
164                }
165                "tool" => {
166                    let tool_call_id = entry
167                        .get("tool_call_id")
168                        .and_then(|id| id.as_str())
169                        .map(|s| s.to_string());
170                    let content_value = entry
171                        .get("content")
172                        .map(|value| {
173                            if text_content.is_empty() {
174                                value.to_string()
175                            } else {
176                                text_content.clone()
177                            }
178                        })
179                        .unwrap_or_else(|| text_content.clone());
180                    messages.push(Message {
181                        role: MessageRole::Tool,
182                        content: content_value,
183                        tool_calls: None,
184                        tool_call_id,
185                    });
186                }
187                _ => {
188                    messages.push(Message::user(text_content));
189                }
190            }
191        }
192
193        if messages.is_empty() {
194            return None;
195        }
196
197        let tools = value.get("tools").and_then(|tools_value| {
198            let tools_array = tools_value.as_array()?;
199            let converted: Vec<_> = tools_array
200                .iter()
201                .filter_map(|tool| {
202                    let function = tool.get("function")?;
203                    let name = function.get("name").and_then(|n| n.as_str())?;
204                    let description = function
205                        .get("description")
206                        .and_then(|d| d.as_str())
207                        .unwrap_or("")
208                        .to_string();
209                    let parameters = function
210                        .get("parameters")
211                        .cloned()
212                        .unwrap_or_else(|| json!({}));
213                    Some(ToolDefinition::function(
214                        name.to_string(),
215                        description,
216                        parameters,
217                    ))
218                })
219                .collect();
220
221            if converted.is_empty() {
222                None
223            } else {
224                Some(converted)
225            }
226        });
227        let temperature = value
228            .get("temperature")
229            .and_then(|v| v.as_f64())
230            .map(|v| v as f32);
231        let max_tokens = value
232            .get("max_tokens")
233            .and_then(|v| v.as_u64())
234            .map(|v| v as u32);
235        let stream = value
236            .get("stream")
237            .and_then(|v| v.as_bool())
238            .unwrap_or(false);
239        let tool_choice = value.get("tool_choice").and_then(Self::parse_tool_choice);
240        let parallel_tool_calls = value.get("parallel_tool_calls").and_then(|v| v.as_bool());
241        let reasoning_effort = value
242            .get("reasoning_effort")
243            .and_then(|v| v.as_str())
244            .map(|s| s.to_string())
245            .or_else(|| {
246                value
247                    .get("reasoning")
248                    .and_then(|r| r.get("effort"))
249                    .and_then(|effort| effort.as_str())
250                    .map(|s| s.to_string())
251            });
252
253        let model = value
254            .get("model")
255            .and_then(|m| m.as_str())
256            .unwrap_or(&self.model)
257            .to_string();
258
259        Some(LLMRequest {
260            messages,
261            system_prompt,
262            tools,
263            model,
264            max_tokens,
265            temperature,
266            stream,
267            tool_choice,
268            parallel_tool_calls,
269            parallel_tool_config: None,
270            reasoning_effort,
271        })
272    }
273
274    fn extract_content_text(content: &Value) -> String {
275        match content {
276            Value::String(text) => text.to_string(),
277            Value::Array(parts) => parts
278                .iter()
279                .filter_map(|part| {
280                    if let Some(text) = part.get("text").and_then(|t| t.as_str()) {
281                        Some(text.to_string())
282                    } else if let Some(Value::String(text)) = part.get("content") {
283                        Some(text.clone())
284                    } else {
285                        None
286                    }
287                })
288                .collect::<Vec<_>>()
289                .join(""),
290            _ => String::new(),
291        }
292    }
293
294    fn parse_tool_choice(choice: &Value) -> Option<ToolChoice> {
295        match choice {
296            Value::String(value) => match value.as_str() {
297                "auto" => Some(ToolChoice::auto()),
298                "none" => Some(ToolChoice::none()),
299                "required" => Some(ToolChoice::any()),
300                _ => None,
301            },
302            Value::Object(map) => {
303                let choice_type = map.get("type").and_then(|t| t.as_str())?;
304                match choice_type {
305                    "function" => map
306                        .get("function")
307                        .and_then(|f| f.get("name"))
308                        .and_then(|n| n.as_str())
309                        .map(|name| ToolChoice::function(name.to_string())),
310                    "auto" => Some(ToolChoice::auto()),
311                    "none" => Some(ToolChoice::none()),
312                    "any" | "required" => Some(ToolChoice::any()),
313                    _ => None,
314                }
315            }
316            _ => None,
317        }
318    }
319
320    fn convert_to_openai_format(&self, request: &LLMRequest) -> Result<Value, LLMError> {
321        let mut messages = Vec::new();
322
323        if let Some(system_prompt) = &request.system_prompt {
324            messages.push(json!({
325                "role": crate::config::constants::message_roles::SYSTEM,
326                "content": system_prompt
327            }));
328        }
329
330        for msg in &request.messages {
331            let role = msg.role.as_openai_str();
332            let mut message = json!({
333                "role": role,
334                "content": msg.content
335            });
336
337            if msg.role == MessageRole::Assistant {
338                if let Some(tool_calls) = &msg.tool_calls {
339                    if !tool_calls.is_empty() {
340                        let tool_calls_json: Vec<Value> = tool_calls
341                            .iter()
342                            .map(|tc| {
343                                json!({
344                                    "id": tc.id,
345                                    "type": "function",
346                                    "function": {
347                                        "name": tc.function.name,
348                                        "arguments": tc.function.arguments
349                                    }
350                                })
351                            })
352                            .collect();
353                        message["tool_calls"] = Value::Array(tool_calls_json);
354                    }
355                }
356            }
357
358            if msg.role == MessageRole::Tool {
359                if let Some(tool_call_id) = &msg.tool_call_id {
360                    message["tool_call_id"] = Value::String(tool_call_id.clone());
361                }
362            }
363
364            messages.push(message);
365        }
366
367        if messages.is_empty() {
368            let formatted_error = error_display::format_llm_error("OpenAI", "No messages provided");
369            return Err(LLMError::InvalidRequest(formatted_error));
370        }
371
372        let mut openai_request = json!({
373            "model": request.model,
374            "messages": messages,
375            "stream": request.stream
376        });
377
378        if let Some(max_tokens) = request.max_tokens {
379
380        if request.temperature.is_some() && Self::supports_temperature_parameter(&request.model) {
381            if let Some(temperature) = request.temperature {
382                openai_request["temperature"] = json!(temperature);
383            }
384        }            openai_request["max_tokens"] = json!(max_tokens);
385        }
386
387        if let Some(tools) = &request.tools {
388            if !tools.is_empty() {
389                let tools_json: Vec<Value> = tools
390                    .iter()
391                    .map(|tool| {
392                        json!({
393                            "type": "function",
394                            "name": tool.function.name,
395                            "description": tool.function.description,
396                            "parameters": tool.function.parameters
397                        })
398                    })
399                    .collect();
400                openai_request["tools"] = Value::Array(tools_json);
401            }
402        }
403
404        if let Some(tool_choice) = &request.tool_choice {
405            openai_request["tool_choice"] = tool_choice.to_provider_format("openai");
406        }
407
408        if let Some(parallel) = request.parallel_tool_calls {
409            openai_request["parallel_tool_calls"] = Value::Bool(parallel);
410        }
411
412        if let Some(effort) = request.reasoning_effort.as_deref() {
413            if self.supports_reasoning_effort(&request.model) {
414                openai_request["reasoning"] = json!({ "effort": effort });
415            }
416        }
417
418        Ok(openai_request)
419    }
420
421    fn convert_to_openai_responses_format(&self, request: &LLMRequest) -> Result<Value, LLMError> {
422        let input = if Self::is_gpt5_codex_model(&request.model) {
423            build_codex_responses_input_openai(request)?
424        } else {
425            build_standard_responses_input_openai(request)?
426        };
427
428        if input.is_empty() {
429            let formatted_error =
430                error_display::format_llm_error("OpenAI", "No messages provided for Responses API");
431            return Err(LLMError::InvalidRequest(formatted_error));
432        }
433
434        let mut openai_request = json!({
435            "model": request.model,
436            "input": input,
437            "stream": request.stream
438        });
439
440        if let Some(max_tokens) = request.max_tokens {
441
442        if request.temperature.is_some() && Self::supports_temperature_parameter(&request.model) {
443            if let Some(temperature) = request.temperature {
444                openai_request["temperature"] = json!(temperature);
445            }
446        }            openai_request["max_output_tokens"] = json!(max_tokens);
447        }
448
449        if let Some(tools) = &request.tools {
450            if !tools.is_empty() {
451                let tools_json: Vec<Value> = tools
452                    .iter()
453                    .map(|tool| {
454                        json!({
455                            "type": "function",
456                            "name": tool.function.name,
457                            "description": tool.function.description,
458                            "parameters": tool.function.parameters
459                        })
460                    })
461                    .collect();
462                openai_request["tools"] = Value::Array(tools_json);
463            }
464        }
465
466        if let Some(tool_choice) = &request.tool_choice {
467            openai_request["tool_choice"] = tool_choice.to_provider_format("openai");
468        }
469
470        if let Some(parallel) = request.parallel_tool_calls {
471            openai_request["parallel_tool_calls"] = Value::Bool(parallel);
472        }
473
474        if let Some(effort) = request.reasoning_effort.as_deref() {
475            if self.supports_reasoning_effort(&request.model) {
476                openai_request["reasoning"] = json!({ "effort": effort });
477            }
478        }
479
480        if Self::is_reasoning_model(&request.model) {
481            openai_request["reasoning"] = json!({ "effort": "medium" });
482        }
483
484        Ok(openai_request)
485    }
486
487    fn parse_openai_response(&self, response_json: Value) -> Result<LLMResponse, LLMError> {
488        let choices = response_json
489            .get("choices")
490            .and_then(|c| c.as_array())
491            .ok_or_else(|| {
492                let formatted_error = error_display::format_llm_error(
493                    "OpenAI",
494                    "Invalid response format: missing choices",
495                );
496                LLMError::Provider(formatted_error)
497            })?;
498
499        if choices.is_empty() {
500            let formatted_error =
501                error_display::format_llm_error("OpenAI", "No choices in response");
502            return Err(LLMError::Provider(formatted_error));
503        }
504
505        let choice = &choices[0];
506        let message = choice.get("message").ok_or_else(|| {
507            let formatted_error = error_display::format_llm_error(
508                "OpenAI",
509                "Invalid response format: missing message",
510            );
511            LLMError::Provider(formatted_error)
512        })?;
513
514        let content = match message.get("content") {
515            Some(Value::String(text)) => Some(text.to_string()),
516            Some(Value::Array(parts)) => {
517                let text = parts
518                    .iter()
519                    .filter_map(|part| part.get("text").and_then(|t| t.as_str()))
520                    .collect::<Vec<_>>()
521                    .join("");
522                if text.is_empty() { None } else { Some(text) }
523            }
524            _ => None,
525        };
526
527        let tool_calls = message
528            .get("tool_calls")
529            .and_then(|tc| tc.as_array())
530            .map(|calls| {
531                calls
532                    .iter()
533                    .filter_map(|call| {
534                        let id = call.get("id").and_then(|v| v.as_str())?;
535                        let function = call.get("function")?;
536                        let name = function.get("name").and_then(|v| v.as_str())?;
537                        let arguments = function.get("arguments");
538                        let serialized = arguments.map_or("{}".to_string(), |value| {
539                            if value.is_string() {
540                                value.as_str().unwrap_or("").to_string()
541                            } else {
542                                value.to_string()
543                            }
544                        });
545                        Some(ToolCall::function(
546                            id.to_string(),
547                            name.to_string(),
548                            serialized,
549                        ))
550                    })
551                    .collect::<Vec<_>>()
552            })
553            .filter(|calls| !calls.is_empty());
554
555        let reasoning = message
556            .get("reasoning")
557            .and_then(extract_reasoning_trace)
558            .or_else(|| choice.get("reasoning").and_then(extract_reasoning_trace));
559
560        let finish_reason = choice
561            .get("finish_reason")
562            .and_then(|fr| fr.as_str())
563            .map(|fr| match fr {
564                "stop" => FinishReason::Stop,
565                "length" => FinishReason::Length,
566                "tool_calls" => FinishReason::ToolCalls,
567                "content_filter" => FinishReason::ContentFilter,
568                other => FinishReason::Error(other.to_string()),
569            })
570            .unwrap_or(FinishReason::Stop);
571
572        let usage = response_json
573            .get("usage")
574            .map(|usage_value| crate::llm::provider::Usage {
575                prompt_tokens: usage_value
576                    .get("prompt_tokens")
577                    .and_then(|pt| pt.as_u64())
578                    .unwrap_or(0) as u32,
579                completion_tokens: usage_value
580                    .get("completion_tokens")
581                    .and_then(|ct| ct.as_u64())
582                    .unwrap_or(0) as u32,
583                total_tokens: usage_value
584                    .get("total_tokens")
585                    .and_then(|tt| tt.as_u64())
586                    .unwrap_or(0) as u32,
587            });
588
589        Ok(LLMResponse {
590            content,
591            tool_calls,
592            usage,
593            finish_reason,
594            reasoning,
595        })
596    }
597
598    fn parse_openai_responses_response(
599        &self,
600        response_json: Value,
601    ) -> Result<LLMResponse, LLMError> {
602        let output = response_json
603            .get("output")
604            .and_then(|value| value.as_array())
605            .ok_or_else(|| {
606                let formatted_error = error_display::format_llm_error(
607                    "OpenAI",
608                    "Invalid response format: missing output",
609                );
610                LLMError::Provider(formatted_error)
611            })?;
612
613        if output.is_empty() {
614            let formatted_error =
615                error_display::format_llm_error("OpenAI", "No output in response");
616            return Err(LLMError::Provider(formatted_error));
617        }
618
619        let mut content_fragments = Vec::new();
620        let mut reasoning_fragments = Vec::new();
621        let mut tool_calls_vec = Vec::new();
622
623        for item in output {
624            let item_type = item
625                .get("type")
626                .and_then(|value| value.as_str())
627                .unwrap_or("");
628            if item_type != "message" {
629                continue;
630            }
631
632            if let Some(content_array) = item.get("content").and_then(|value| value.as_array()) {
633                for entry in content_array {
634                    let entry_type = entry
635                        .get("type")
636                        .and_then(|value| value.as_str())
637                        .unwrap_or("");
638                    match entry_type {
639                        "output_text" | "text" => {
640                            if let Some(text) = entry.get("text").and_then(|value| value.as_str()) {
641                                if !text.is_empty() {
642                                    content_fragments.push(text.to_string());
643                                }
644                            }
645                        }
646                        "reasoning" => {
647                            if let Some(text) = entry.get("text").and_then(|value| value.as_str()) {
648                                if !text.is_empty() {
649                                    reasoning_fragments.push(text.to_string());
650                                }
651                            }
652                        }
653                        "tool_call" => {
654                            let (name_value, arguments_value) = if let Some(function) =
655                                entry.get("function").and_then(|value| value.as_object())
656                            {
657                                let name = function.get("name").and_then(|value| value.as_str());
658                                let arguments = function.get("arguments");
659                                (name, arguments)
660                            } else {
661                                let name = entry.get("name").and_then(|value| value.as_str());
662                                let arguments = entry.get("arguments");
663                                (name, arguments)
664                            };
665
666                            if let Some(name) = name_value {
667                                let id = entry
668                                    .get("id")
669                                    .and_then(|value| value.as_str())
670                                    .unwrap_or_else(|| "");
671                                let serialized =
672                                    arguments_value.map_or("{}".to_string(), |value| {
673                                        if value.is_string() {
674                                            value.as_str().unwrap_or("").to_string()
675                                        } else {
676                                            value.to_string()
677                                        }
678                                    });
679                                tool_calls_vec.push(ToolCall::function(
680                                    id.to_string(),
681                                    name.to_string(),
682                                    serialized,
683                                ));
684                            }
685                        }
686                        _ => {}
687                    }
688                }
689            }
690        }
691
692        let content = if content_fragments.is_empty() {
693            None
694        } else {
695            Some(content_fragments.join(""))
696        };
697
698        let reasoning = if reasoning_fragments.is_empty() {
699            None
700        } else {
701            Some(reasoning_fragments.join(""))
702        };
703
704        let tool_calls = if tool_calls_vec.is_empty() {
705            None
706        } else {
707            Some(tool_calls_vec)
708        };
709
710        let usage = response_json
711            .get("usage")
712            .map(|usage_value| crate::llm::provider::Usage {
713                prompt_tokens: usage_value
714                    .get("input_tokens")
715                    .and_then(|pt| pt.as_u64())
716                    .unwrap_or(0) as u32,
717                completion_tokens: usage_value
718                    .get("output_tokens")
719                    .and_then(|ct| ct.as_u64())
720                    .unwrap_or(0) as u32,
721                total_tokens: usage_value
722                    .get("total_tokens")
723                    .and_then(|tt| tt.as_u64())
724                    .unwrap_or(0) as u32,
725            });
726
727        let stop_reason = response_json
728            .get("stop_reason")
729            .and_then(|value| value.as_str())
730            .or_else(|| {
731                output
732                    .iter()
733                    .find_map(|item| item.get("stop_reason").and_then(|value| value.as_str()))
734            })
735            .unwrap_or("stop");
736
737        let finish_reason = match stop_reason {
738            "stop" => FinishReason::Stop,
739            "max_output_tokens" | "length" => FinishReason::Length,
740            "tool_use" | "tool_calls" => FinishReason::ToolCalls,
741            other => FinishReason::Error(other.to_string()),
742        };
743
744        Ok(LLMResponse {
745            content,
746            tool_calls,
747            usage,
748            finish_reason,
749            reasoning,
750        })
751    }
752}
753
754fn build_standard_responses_input_openai(request: &LLMRequest) -> Result<Vec<Value>, LLMError> {
755    let mut input = Vec::new();
756
757    if let Some(system_prompt) = &request.system_prompt {
758        if !system_prompt.trim().is_empty() {
759            input.push(json!({
760                "role": "developer",
761                "content": [{
762                    "type": "input_text",
763                    "text": system_prompt.clone()
764                }]
765            }));
766        }
767    }
768
769    for msg in &request.messages {
770        match msg.role {
771            MessageRole::System => {
772                if !msg.content.trim().is_empty() {
773                    input.push(json!({
774                        "role": "developer",
775                        "content": [{
776                            "type": "input_text",
777                            "text": msg.content.clone()
778                        }]
779                    }));
780                }
781            }
782            MessageRole::User => {
783                input.push(json!({
784                    "role": "user",
785                    "content": [{
786                        "type": "input_text",
787                        "text": msg.content.clone()
788                    }]
789                }));
790            }
791            MessageRole::Assistant => {
792                let mut content_parts = Vec::new();
793                if !msg.content.is_empty() {
794                    content_parts.push(json!({
795                        "type": "output_text",
796                        "text": msg.content.clone()
797                    }));
798                }
799
800                if let Some(tool_calls) = &msg.tool_calls {
801                    for call in tool_calls {
802                        content_parts.push(json!({
803                            "type": "tool_call",
804                            "id": call.id.clone(),
805                            "function": {
806                                "name": call.function.name.clone(),
807                                "arguments": call.function.arguments.clone()
808                            }
809                        }));
810                    }
811                }
812
813                if !content_parts.is_empty() {
814                    input.push(json!({
815                        "role": "assistant",
816                        "content": content_parts
817                    }));
818                }
819            }
820            MessageRole::Tool => {
821                let tool_call_id = msg.tool_call_id.clone().ok_or_else(|| {
822                    let formatted_error = error_display::format_llm_error(
823                        "OpenAI",
824                        "Tool messages must include tool_call_id for Responses API",
825                    );
826                    LLMError::InvalidRequest(formatted_error)
827                })?;
828
829                let mut tool_content = Vec::new();
830                if !msg.content.trim().is_empty() {
831                    tool_content.push(json!({
832                        "type": "output_text",
833                        "text": msg.content.clone()
834                    }));
835                }
836
837                let mut tool_result = json!({
838                    "type": "tool_result",
839                    "tool_call_id": tool_call_id
840                });
841
842                if !tool_content.is_empty() {
843                    if let Value::Object(ref mut map) = tool_result {
844                        map.insert("content".to_string(), json!(tool_content));
845                    }
846                }
847
848                input.push(json!({
849                    "role": "tool",
850                    "content": [tool_result]
851                }));
852            }
853        }
854    }
855
856    Ok(input)
857}
858
859fn build_codex_responses_input_openai(request: &LLMRequest) -> Result<Vec<Value>, LLMError> {
860    let mut additional_guidance = Vec::new();
861
862    if let Some(system_prompt) = &request.system_prompt {
863        let trimmed = system_prompt.trim();
864        if !trimmed.is_empty() {
865            additional_guidance.push(trimmed.to_string());
866        }
867    }
868
869    let mut input = Vec::new();
870
871    for msg in &request.messages {
872        match msg.role {
873            MessageRole::System => {
874                let trimmed = msg.content.trim();
875                if !trimmed.is_empty() {
876                    additional_guidance.push(trimmed.to_string());
877                }
878            }
879            MessageRole::User => {
880                input.push(json!({
881                    "role": "user",
882                    "content": [{
883                        "type": "input_text",
884                        "text": msg.content.clone()
885                    }]
886                }));
887            }
888            MessageRole::Assistant => {
889                let mut content_parts = Vec::new();
890                if !msg.content.is_empty() {
891                    content_parts.push(json!({
892                        "type": "output_text",
893                        "text": msg.content.clone()
894                    }));
895                }
896
897                if let Some(tool_calls) = &msg.tool_calls {
898                    for call in tool_calls {
899                        content_parts.push(json!({
900                            "type": "tool_call",
901                            "id": call.id.clone(),
902                            "function": {
903                                "name": call.function.name.clone(),
904                                "arguments": call.function.arguments.clone()
905                            }
906                        }));
907                    }
908                }
909
910                if !content_parts.is_empty() {
911                    input.push(json!({
912                        "role": "assistant",
913                        "content": content_parts
914                    }));
915                }
916            }
917            MessageRole::Tool => {
918                let tool_call_id = msg.tool_call_id.clone().ok_or_else(|| {
919                    let formatted_error = error_display::format_llm_error(
920                        "OpenAI",
921                        "Tool messages must include tool_call_id for Responses API",
922                    );
923                    LLMError::InvalidRequest(formatted_error)
924                })?;
925
926                let mut tool_content = Vec::new();
927                if !msg.content.trim().is_empty() {
928                    tool_content.push(json!({
929                        "type": "output_text",
930                        "text": msg.content.clone()
931                    }));
932                }
933
934                let mut tool_result = json!({
935                    "type": "tool_result",
936                    "tool_call_id": tool_call_id
937                });
938
939                if !tool_content.is_empty() {
940                    if let Value::Object(ref mut map) = tool_result {
941                        map.insert("content".to_string(), json!(tool_content));
942                    }
943                }
944
945                input.push(json!({
946                    "role": "tool",
947                    "content": [tool_result]
948                }));
949            }
950        }
951    }
952
953    let developer_prompt = gpt5_codex_developer_prompt(&additional_guidance);
954    input.insert(
955        0,
956        json!({
957            "role": "developer",
958            "content": [{
959                "type": "input_text",
960                "text": developer_prompt
961            }]
962        }),
963    );
964
965    Ok(input)
966}
967
968#[async_trait]
969impl LLMProvider for OpenAIProvider {
970    fn name(&self) -> &str {
971        "openai"
972    }
973
974    fn supports_reasoning(&self, _model: &str) -> bool {
975        false
976    }
977
978    fn supports_reasoning_effort(&self, model: &str) -> bool {
979        let requested = if model.trim().is_empty() {
980            self.model.as_str()
981        } else {
982            model
983        };
984        models::openai::REASONING_MODELS
985            .iter()
986            .any(|candidate| *candidate == requested)
987    }
988
989    async fn generate(&self, request: LLMRequest) -> Result<LLMResponse, LLMError> {
990        let mut request = request;
991        if request.model.trim().is_empty() {
992            request.model = self.model.clone();
993        }
994
995        if Self::uses_responses_api(&request.model) {
996            let openai_request = self.convert_to_openai_responses_format(&request)?;
997            let url = format!("{}/responses", self.base_url);
998
999            let response = self
1000                .http_client
1001                .post(&url)
1002                .bearer_auth(&self.api_key)
1003                .json(&openai_request)
1004                .send()
1005                .await
1006                .map_err(|e| {
1007                    let formatted_error =
1008                        error_display::format_llm_error("OpenAI", &format!("Network error: {}", e));
1009                    LLMError::Network(formatted_error)
1010                })?;
1011
1012            if !response.status().is_success() {
1013                let status = response.status();
1014                let error_text = response.text().await.unwrap_or_default();
1015
1016                if status.as_u16() == 429
1017                    || error_text.contains("insufficient_quota")
1018                    || error_text.contains("quota")
1019                    || error_text.contains("rate limit")
1020                {
1021                    return Err(LLMError::RateLimit);
1022                }
1023
1024                let formatted_error = error_display::format_llm_error(
1025                    "OpenAI",
1026                    &format!("HTTP {}: {}", status, error_text),
1027                );
1028                return Err(LLMError::Provider(formatted_error));
1029            }
1030
1031            let openai_response: Value = response.json().await.map_err(|e| {
1032                let formatted_error = error_display::format_llm_error(
1033                    "OpenAI",
1034                    &format!("Failed to parse response: {}", e),
1035                );
1036                LLMError::Provider(formatted_error)
1037            })?;
1038
1039            self.parse_openai_responses_response(openai_response)
1040        } else {
1041            let openai_request = self.convert_to_openai_format(&request)?;
1042            let url = format!("{}/chat/completions", self.base_url);
1043
1044            let response = self
1045                .http_client
1046                .post(&url)
1047                .bearer_auth(&self.api_key)
1048                .json(&openai_request)
1049                .send()
1050                .await
1051                .map_err(|e| {
1052                    let formatted_error =
1053                        error_display::format_llm_error("OpenAI", &format!("Network error: {}", e));
1054                    LLMError::Network(formatted_error)
1055                })?;
1056
1057            if !response.status().is_success() {
1058                let status = response.status();
1059                let error_text = response.text().await.unwrap_or_default();
1060
1061                if status.as_u16() == 429
1062                    || error_text.contains("insufficient_quota")
1063                    || error_text.contains("quota")
1064                    || error_text.contains("rate limit")
1065                {
1066                    return Err(LLMError::RateLimit);
1067                }
1068
1069                let formatted_error = error_display::format_llm_error(
1070                    "OpenAI",
1071                    &format!("HTTP {}: {}", status, error_text),
1072                );
1073                return Err(LLMError::Provider(formatted_error));
1074            }
1075
1076            let openai_response: Value = response.json().await.map_err(|e| {
1077                let formatted_error = error_display::format_llm_error(
1078                    "OpenAI",
1079                    &format!("Failed to parse response: {}", e),
1080                );
1081                LLMError::Provider(formatted_error)
1082            })?;
1083
1084            self.parse_openai_response(openai_response)
1085        }
1086    }
1087
1088    fn supported_models(&self) -> Vec<String> {
1089        models::openai::SUPPORTED_MODELS
1090            .iter()
1091            .map(|s| s.to_string())
1092            .collect()
1093    }
1094
1095    fn validate_request(&self, request: &LLMRequest) -> Result<(), LLMError> {
1096        if request.messages.is_empty() {
1097            let formatted_error =
1098                error_display::format_llm_error("OpenAI", "Messages cannot be empty");
1099            return Err(LLMError::InvalidRequest(formatted_error));
1100        }
1101
1102        if !self.supported_models().contains(&request.model) {
1103            let formatted_error = error_display::format_llm_error(
1104                "OpenAI",
1105                &format!("Unsupported model: {}", request.model),
1106            );
1107            return Err(LLMError::InvalidRequest(formatted_error));
1108        }
1109
1110        for message in &request.messages {
1111            if let Err(err) = message.validate_for_provider("openai") {
1112                let formatted = error_display::format_llm_error("OpenAI", &err);
1113                return Err(LLMError::InvalidRequest(formatted));
1114            }
1115        }
1116
1117        Ok(())
1118    }
1119}
1120
1121#[async_trait]
1122impl LLMClient for OpenAIProvider {
1123    async fn generate(&mut self, prompt: &str) -> Result<llm_types::LLMResponse, LLMError> {
1124        let request = self.parse_client_prompt(prompt);
1125        let request_model = request.model.clone();
1126        let response = LLMProvider::generate(self, request).await?;
1127
1128        Ok(llm_types::LLMResponse {
1129            content: response.content.unwrap_or_default(),
1130            model: request_model,
1131            usage: response.usage.map(|u| llm_types::Usage {
1132                prompt_tokens: u.prompt_tokens as usize,
1133                completion_tokens: u.completion_tokens as usize,
1134                total_tokens: u.total_tokens as usize,
1135            }),
1136            reasoning: response.reasoning,
1137        })
1138    }
1139
1140    fn backend_kind(&self) -> llm_types::BackendKind {
1141        llm_types::BackendKind::OpenAI
1142    }
1143
1144    fn model_id(&self) -> &str {
1145        &self.model
1146    }
1147}