ferrous_llm_openai/
types.rs

1//! OpenAI-specific request and response types.
2
3use chrono::{DateTime, Utc};
4use ferrous_llm_core::{
5    ChatResponse, CompletionResponse, FinishReason, FunctionCall, Metadata, ToolCall, Usage,
6};
7use serde::{Deserialize, Serialize};
8use std::collections::HashMap;
9
10/// OpenAI chat completion request.
11#[derive(Debug, Clone, Serialize)]
12pub struct OpenAIChatRequest {
13    pub model: String,
14    pub messages: Vec<OpenAIMessage>,
15    #[serde(skip_serializing_if = "Option::is_none")]
16    pub temperature: Option<f32>,
17    #[serde(skip_serializing_if = "Option::is_none")]
18    pub max_tokens: Option<u32>,
19    #[serde(skip_serializing_if = "Option::is_none")]
20    pub top_p: Option<f32>,
21    #[serde(skip_serializing_if = "Option::is_none")]
22    pub frequency_penalty: Option<f32>,
23    #[serde(skip_serializing_if = "Option::is_none")]
24    pub presence_penalty: Option<f32>,
25    #[serde(skip_serializing_if = "Vec::is_empty")]
26    pub stop: Vec<String>,
27    #[serde(skip_serializing_if = "Option::is_none")]
28    pub stream: Option<bool>,
29    #[serde(skip_serializing_if = "Option::is_none")]
30    pub tools: Option<Vec<OpenAITool>>,
31    #[serde(skip_serializing_if = "Option::is_none")]
32    pub tool_choice: Option<serde_json::Value>,
33    #[serde(skip_serializing_if = "Option::is_none")]
34    pub user: Option<String>,
35}
36
37/// OpenAI message format.
38#[derive(Debug, Clone, Serialize, Deserialize)]
39pub struct OpenAIMessage {
40    pub role: String,
41    #[serde(skip_serializing_if = "Option::is_none")]
42    pub content: Option<serde_json::Value>,
43    #[serde(skip_serializing_if = "Option::is_none")]
44    pub name: Option<String>,
45    #[serde(skip_serializing_if = "Option::is_none")]
46    pub tool_calls: Option<Vec<OpenAIToolCall>>,
47    #[serde(skip_serializing_if = "Option::is_none")]
48    pub tool_call_id: Option<String>,
49}
50
51/// OpenAI tool call format.
52#[derive(Debug, Clone, Serialize, Deserialize)]
53pub struct OpenAIToolCall {
54    pub id: String,
55    #[serde(rename = "type")]
56    pub call_type: String,
57    pub function: OpenAIFunctionCall,
58}
59
60/// OpenAI function call format.
61#[derive(Debug, Clone, Serialize, Deserialize)]
62pub struct OpenAIFunctionCall {
63    pub name: String,
64    pub arguments: String,
65}
66
67/// OpenAI tool definition.
68#[derive(Debug, Clone, Serialize, Deserialize)]
69pub struct OpenAITool {
70    #[serde(rename = "type")]
71    pub tool_type: String,
72    pub function: OpenAIFunction,
73}
74
75/// OpenAI function definition.
76#[derive(Debug, Clone, Serialize, Deserialize)]
77pub struct OpenAIFunction {
78    pub name: String,
79    pub description: String,
80    pub parameters: serde_json::Value,
81}
82
83/// OpenAI chat completion response.
84#[derive(Debug, Clone, Deserialize)]
85pub struct OpenAIChatResponse {
86    pub id: String,
87    pub object: String,
88    pub created: u64,
89    pub model: String,
90    pub choices: Vec<OpenAIChatChoice>,
91    pub usage: Option<OpenAIUsage>,
92    pub system_fingerprint: Option<String>,
93}
94
95/// OpenAI chat choice.
96#[derive(Debug, Clone, Deserialize)]
97pub struct OpenAIChatChoice {
98    pub index: u32,
99    pub message: OpenAIMessage,
100    pub finish_reason: Option<String>,
101    pub logprobs: Option<serde_json::Value>,
102}
103
104/// OpenAI usage statistics.
105#[derive(Debug, Clone, Deserialize)]
106pub struct OpenAIUsage {
107    pub prompt_tokens: u32,
108    pub completion_tokens: u32,
109    pub total_tokens: u32,
110}
111
112/// OpenAI embeddings usage statistics (no completion_tokens).
113#[derive(Debug, Clone, Deserialize)]
114pub struct OpenAIEmbeddingsUsage {
115    pub prompt_tokens: u32,
116    pub total_tokens: u32,
117}
118
119/// OpenAI completion request.
120#[derive(Debug, Clone, Serialize)]
121pub struct OpenAICompletionRequest {
122    pub model: String,
123    pub prompt: String,
124    #[serde(skip_serializing_if = "Option::is_none")]
125    pub max_tokens: Option<u32>,
126    #[serde(skip_serializing_if = "Option::is_none")]
127    pub temperature: Option<f32>,
128    #[serde(skip_serializing_if = "Option::is_none")]
129    pub top_p: Option<f32>,
130    #[serde(skip_serializing_if = "Option::is_none")]
131    pub frequency_penalty: Option<f32>,
132    #[serde(skip_serializing_if = "Option::is_none")]
133    pub presence_penalty: Option<f32>,
134    #[serde(skip_serializing_if = "Vec::is_empty")]
135    pub stop: Vec<String>,
136    #[serde(skip_serializing_if = "Option::is_none")]
137    pub stream: Option<bool>,
138    #[serde(skip_serializing_if = "Option::is_none")]
139    pub user: Option<String>,
140}
141
142/// OpenAI completion response.
143#[derive(Debug, Clone, Deserialize)]
144pub struct OpenAICompletionResponse {
145    pub id: String,
146    pub object: String,
147    pub created: u64,
148    pub model: String,
149    pub choices: Vec<OpenAICompletionChoice>,
150    pub usage: Option<OpenAIUsage>,
151}
152
153/// OpenAI completion choice.
154#[derive(Debug, Clone, Deserialize)]
155pub struct OpenAICompletionChoice {
156    pub index: u32,
157    pub text: String,
158    pub finish_reason: Option<String>,
159    pub logprobs: Option<serde_json::Value>,
160}
161
162/// OpenAI embeddings request.
163#[derive(Debug, Clone, Serialize)]
164pub struct OpenAIEmbeddingsRequest {
165    pub model: String,
166    pub input: serde_json::Value, // Can be string or array of strings
167    #[serde(skip_serializing_if = "Option::is_none")]
168    pub encoding_format: Option<String>,
169    #[serde(skip_serializing_if = "Option::is_none")]
170    pub dimensions: Option<u32>,
171    #[serde(skip_serializing_if = "Option::is_none")]
172    pub user: Option<String>,
173}
174
175/// OpenAI embeddings response.
176#[derive(Debug, Clone, Deserialize)]
177pub struct OpenAIEmbeddingsResponse {
178    pub object: String,
179    pub data: Vec<OpenAIEmbedding>,
180    pub model: String,
181    pub usage: OpenAIEmbeddingsUsage,
182}
183
184/// OpenAI embedding data.
185#[derive(Debug, Clone, Deserialize)]
186pub struct OpenAIEmbedding {
187    pub object: String,
188    pub index: usize,
189    pub embedding: Vec<f32>,
190}
191
192/// OpenAI streaming response chunk.
193#[derive(Debug, Clone, Deserialize)]
194pub struct OpenAIStreamChunk {
195    pub id: String,
196    pub object: String,
197    pub created: u64,
198    pub model: String,
199    pub choices: Vec<OpenAIStreamChoice>,
200}
201
202/// OpenAI streaming choice.
203#[derive(Debug, Clone, Deserialize)]
204pub struct OpenAIStreamChoice {
205    pub index: u32,
206    pub delta: OpenAIStreamDelta,
207    pub finish_reason: Option<String>,
208}
209
210/// OpenAI streaming delta.
211#[derive(Debug, Clone, Deserialize)]
212pub struct OpenAIStreamDelta {
213    pub role: Option<String>,
214    pub content: Option<String>,
215    pub tool_calls: Option<Vec<OpenAIStreamToolCall>>,
216}
217
218/// OpenAI streaming tool call.
219#[derive(Debug, Clone, Deserialize)]
220pub struct OpenAIStreamToolCall {
221    pub index: u32,
222    pub id: Option<String>,
223    #[serde(rename = "type")]
224    pub call_type: Option<String>,
225    pub function: Option<OpenAIStreamFunction>,
226}
227
228/// OpenAI streaming function.
229#[derive(Debug, Clone, Deserialize)]
230pub struct OpenAIStreamFunction {
231    pub name: Option<String>,
232    pub arguments: Option<String>,
233}
234
235/// Wrapper for OpenAI chat response that includes converted generic data
236#[derive(Debug, Clone)]
237pub struct OpenAIChatResponseWrapper {
238    pub response: OpenAIChatResponse,
239    pub converted_usage: Option<Usage>,
240    pub converted_metadata: Metadata,
241    pub converted_tool_calls: Option<Vec<ToolCall>>,
242}
243
244/// Wrapper for OpenAI completion response that includes converted generic data
245#[derive(Debug, Clone)]
246pub struct OpenAICompletionResponseWrapper {
247    pub response: OpenAICompletionResponse,
248    pub converted_usage: Option<Usage>,
249    pub converted_metadata: Metadata,
250}
251
252impl OpenAIChatResponseWrapper {
253    pub fn new(response: OpenAIChatResponse, request_id: Option<String>) -> Self {
254        let converted_usage = response.usage.as_ref().map(|usage| Usage {
255            prompt_tokens: usage.prompt_tokens,
256            completion_tokens: usage.completion_tokens,
257            total_tokens: usage.total_tokens,
258        });
259
260        let converted_metadata = Metadata {
261            extensions: HashMap::new(),
262            request_id,
263            user_id: None,
264            created_at: DateTime::from_timestamp(response.created as i64, 0)
265                .unwrap_or_else(Utc::now),
266        };
267
268        let converted_tool_calls = response
269            .choices
270            .first()
271            .and_then(|choice| choice.message.tool_calls.as_ref())
272            .map(|tool_calls| {
273                tool_calls
274                    .iter()
275                    .map(|tc| ToolCall {
276                        id: tc.id.clone(),
277                        call_type: tc.call_type.clone(),
278                        function: FunctionCall {
279                            name: tc.function.name.clone(),
280                            arguments: tc.function.arguments.clone(),
281                        },
282                    })
283                    .collect()
284            });
285
286        Self {
287            response,
288            converted_usage,
289            converted_metadata,
290            converted_tool_calls,
291        }
292    }
293}
294
295impl OpenAICompletionResponseWrapper {
296    pub fn new(response: OpenAICompletionResponse, request_id: Option<String>) -> Self {
297        let converted_usage = response.usage.as_ref().map(|usage| Usage {
298            prompt_tokens: usage.prompt_tokens,
299            completion_tokens: usage.completion_tokens,
300            total_tokens: usage.total_tokens,
301        });
302
303        let converted_metadata = Metadata {
304            extensions: HashMap::new(),
305            request_id,
306            user_id: None,
307            created_at: DateTime::from_timestamp(response.created as i64, 0)
308                .unwrap_or_else(Utc::now),
309        };
310
311        Self {
312            response,
313            converted_usage,
314            converted_metadata,
315        }
316    }
317}
318
319// Implement ChatResponse for OpenAIChatResponseWrapper
320impl ChatResponse for OpenAIChatResponseWrapper {
321    fn content(&self) -> String {
322        self.response
323            .choices
324            .first()
325            .and_then(|choice| match &choice.message.content {
326                Some(serde_json::Value::String(s)) => Some(s.clone()),
327                _ => None,
328            })
329            .unwrap_or_default()
330    }
331
332    fn usage(&self) -> Option<Usage> {
333        self.converted_usage.clone()
334    }
335
336    fn finish_reason(&self) -> Option<FinishReason> {
337        self.response
338            .choices
339            .first()
340            .and_then(|choice| choice.finish_reason.as_ref())
341            .and_then(|reason| match reason.as_str() {
342                "stop" => Some(FinishReason::Stop),
343                "length" => Some(FinishReason::Length),
344                "tool_calls" => Some(FinishReason::ToolCalls),
345                "content_filter" => Some(FinishReason::ContentFilter),
346                _ => None,
347            })
348    }
349
350    fn metadata(&self) -> Metadata {
351        self.converted_metadata.clone()
352    }
353
354    fn tool_calls(&self) -> Option<Vec<ToolCall>> {
355        self.converted_tool_calls.clone()
356    }
357}
358
359// Implement CompletionResponse for OpenAICompletionResponseWrapper
360impl CompletionResponse for OpenAICompletionResponseWrapper {
361    fn text(&self) -> String {
362        self.response
363            .choices
364            .first()
365            .map(|choice| choice.text.clone())
366            .unwrap_or_default()
367    }
368
369    fn usage(&self) -> Option<Usage> {
370        self.converted_usage.clone()
371    }
372
373    fn finish_reason(&self) -> Option<FinishReason> {
374        self.response
375            .choices
376            .first()
377            .and_then(|choice| choice.finish_reason.as_ref())
378            .and_then(|reason| match reason.as_str() {
379                "stop" => Some(FinishReason::Stop),
380                "length" => Some(FinishReason::Length),
381                _ => None,
382            })
383    }
384
385    fn metadata(&self) -> Metadata {
386        self.converted_metadata.clone()
387    }
388}
389
390// Implement ChatResponse for OpenAIChatResponse
391impl ChatResponse for OpenAIChatResponse {
392    fn content(&self) -> String {
393        self.choices
394            .first()
395            .and_then(|choice| choice.message.content.as_ref())
396            .and_then(|content| content.as_str())
397            .unwrap_or("")
398            .to_string()
399    }
400
401    fn usage(&self) -> Option<Usage> {
402        self.usage.as_ref().map(|usage| Usage {
403            prompt_tokens: usage.prompt_tokens,
404            completion_tokens: usage.completion_tokens,
405            total_tokens: usage.total_tokens,
406        })
407    }
408
409    fn finish_reason(&self) -> Option<FinishReason> {
410        self.choices
411            .first()
412            .and_then(|choice| choice.finish_reason.as_ref())
413            .and_then(|reason| match reason.as_str() {
414                "stop" => Some(FinishReason::Stop),
415                "length" => Some(FinishReason::Length),
416                "tool_calls" => Some(FinishReason::ToolCalls),
417                "content_filter" => Some(FinishReason::ContentFilter),
418                _ => None,
419            })
420    }
421
422    fn metadata(&self) -> Metadata {
423        Metadata {
424            extensions: HashMap::new(),
425            request_id: Some(self.id.clone()),
426            user_id: None,
427            created_at: DateTime::from_timestamp(self.created as i64, 0).unwrap_or_else(Utc::now),
428        }
429    }
430
431    fn tool_calls(&self) -> Option<Vec<ToolCall>> {
432        self.choices
433            .first()
434            .and_then(|choice| choice.message.tool_calls.as_ref())
435            .map(|tool_calls| {
436                tool_calls
437                    .iter()
438                    .map(|tc| ToolCall {
439                        id: tc.id.clone(),
440                        call_type: tc.call_type.clone(),
441                        function: FunctionCall {
442                            name: tc.function.name.clone(),
443                            arguments: tc.function.arguments.clone(),
444                        },
445                    })
446                    .collect()
447            })
448    }
449}
450
451// Implement CompletionResponse for OpenAICompletionResponse
452impl CompletionResponse for OpenAICompletionResponse {
453    fn text(&self) -> String {
454        self.choices
455            .first()
456            .map(|choice| choice.text.clone())
457            .unwrap_or_default()
458    }
459
460    fn usage(&self) -> Option<Usage> {
461        self.usage.as_ref().map(|usage| Usage {
462            prompt_tokens: usage.prompt_tokens,
463            completion_tokens: usage.completion_tokens,
464            total_tokens: usage.total_tokens,
465        })
466    }
467
468    fn finish_reason(&self) -> Option<FinishReason> {
469        self.choices
470            .first()
471            .and_then(|choice| choice.finish_reason.as_ref())
472            .and_then(|reason| match reason.as_str() {
473                "stop" => Some(FinishReason::Stop),
474                "length" => Some(FinishReason::Length),
475                _ => None,
476            })
477    }
478
479    fn metadata(&self) -> Metadata {
480        Metadata {
481            extensions: HashMap::new(),
482            request_id: Some(self.id.clone()),
483            user_id: None,
484            created_at: DateTime::from_timestamp(self.created as i64, 0).unwrap_or_else(Utc::now),
485        }
486    }
487}
488
489// Conversion utilities
490impl From<&ferrous_llm_core::Message> for OpenAIMessage {
491    fn from(message: &ferrous_llm_core::Message) -> Self {
492        let role = match message.role {
493            ferrous_llm_core::Role::User => "user".to_string(),
494            ferrous_llm_core::Role::Assistant => "assistant".to_string(),
495            ferrous_llm_core::Role::System => "system".to_string(),
496            ferrous_llm_core::Role::Tool => "tool".to_string(),
497        };
498
499        let content = match &message.content {
500            ferrous_llm_core::MessageContent::Text(text) => {
501                Some(serde_json::Value::String(text.clone()))
502            }
503            ferrous_llm_core::MessageContent::Multimodal(parts) => {
504                let content_array: Vec<serde_json::Value> = parts
505                    .iter()
506                    .map(|part| match part {
507                        ferrous_llm_core::ContentPart::Text { text } => serde_json::json!({
508                            "type": "text",
509                            "text": text
510                        }),
511                        ferrous_llm_core::ContentPart::Image {
512                            image_source,
513                            detail,
514                        } => {
515                            let url: String = image_source.clone().into();
516                            serde_json::json!({
517                                "type": "image_url",
518                                "image_url": {
519                                    "url": url,
520                                    "detail": detail.as_deref().unwrap_or("auto")
521                                }
522                            })
523                        }
524                        ferrous_llm_core::ContentPart::Audio { audio_url, format } => {
525                            serde_json::json!({
526                                "type": "audio",
527                                "audio": {
528                                    "mime_type": format
529                                    .as_deref()
530                                    .map(|f| format!("audio/{f}"))
531                                    .unwrap_or_else(|| "audio/mpeg".to_string()),
532                                    "segments": [
533                                        {
534                                            "url": audio_url,
535                                            // Add `"caption": "<caption>"` here if available
536                                        }
537                                    ]
538                                }
539                            })
540                        }
541                    })
542                    .collect();
543                Some(serde_json::Value::Array(content_array))
544            }
545            ferrous_llm_core::MessageContent::Tool(tool_content) => {
546                // Handle tool content - use text if available, otherwise create a placeholder
547                let text = tool_content.text.as_deref().unwrap_or("[Tool response]");
548                Some(serde_json::Value::String(text.to_string()))
549            }
550        };
551
552        // Extract tool information from MessageContent::Tool if present
553        let (tool_calls, tool_call_id) = match &message.content {
554            ferrous_llm_core::MessageContent::Tool(tool_content) => {
555                let tool_calls = tool_content.tool_calls.as_ref().map(|calls| {
556                    calls
557                        .iter()
558                        .map(|call| OpenAIToolCall {
559                            id: call.id.clone(),
560                            call_type: call.call_type.clone(),
561                            function: OpenAIFunctionCall {
562                                name: call.function.name.clone(),
563                                arguments: call.function.arguments.clone(),
564                            },
565                        })
566                        .collect()
567                });
568                (tool_calls, tool_content.tool_call_id.clone())
569            }
570            _ => (None, None),
571        };
572
573        Self {
574            role,
575            content,
576            name: None, // Name field removed from core Message
577            tool_calls,
578            tool_call_id,
579        }
580    }
581}
582
583impl From<&ferrous_llm_core::Tool> for OpenAITool {
584    fn from(tool: &ferrous_llm_core::Tool) -> Self {
585        Self {
586            tool_type: tool.tool_type.clone(),
587            function: OpenAIFunction {
588                name: tool.function.name.clone(),
589                description: tool.function.description.clone(),
590                parameters: tool.function.parameters.clone(),
591            },
592        }
593    }
594}
595
596// Conversion from OpenAI types to core types
597impl From<OpenAIUsage> for Usage {
598    fn from(openai_usage: OpenAIUsage) -> Self {
599        Self {
600            prompt_tokens: openai_usage.prompt_tokens,
601            completion_tokens: openai_usage.completion_tokens,
602            total_tokens: openai_usage.total_tokens,
603        }
604    }
605}
606
607impl From<&OpenAIUsage> for Usage {
608    fn from(openai_usage: &OpenAIUsage) -> Self {
609        Self {
610            prompt_tokens: openai_usage.prompt_tokens,
611            completion_tokens: openai_usage.completion_tokens,
612            total_tokens: openai_usage.total_tokens,
613        }
614    }
615}
616
617impl From<OpenAIToolCall> for ToolCall {
618    fn from(openai_tool_call: OpenAIToolCall) -> Self {
619        Self {
620            id: openai_tool_call.id,
621            call_type: openai_tool_call.call_type,
622            function: FunctionCall {
623                name: openai_tool_call.function.name,
624                arguments: openai_tool_call.function.arguments,
625            },
626        }
627    }
628}
629
630impl From<&OpenAIToolCall> for ToolCall {
631    fn from(openai_tool_call: &OpenAIToolCall) -> Self {
632        Self {
633            id: openai_tool_call.id.clone(),
634            call_type: openai_tool_call.call_type.clone(),
635            function: FunctionCall {
636                name: openai_tool_call.function.name.clone(),
637                arguments: openai_tool_call.function.arguments.clone(),
638            },
639        }
640    }
641}