Skip to main content

fprovider/adapters/openai/
types.rs

1//! OpenAI adapter types and provider-agnostic conversion logic.
2
3use std::fmt::Formatter;
4
5use crate::{
6    Message, ModelResponse, OutputItem, ProviderId, Role, SecretString, StopReason, StreamEvent,
7    TokenUsage, ToolCall, ToolDefinition, ToolResult,
8};
9
10#[derive(Debug, Clone, PartialEq)]
11pub struct OpenAiRequest {
12    pub model: String,
13    pub messages: Vec<OpenAiMessage>,
14    pub tools: Vec<OpenAiTool>,
15    pub temperature: Option<f32>,
16    pub max_tokens: Option<u32>,
17    pub stream: bool,
18}
19
20#[derive(Debug, Clone, PartialEq, Eq)]
21pub struct OpenAiMessage {
22    pub role: OpenAiRole,
23    pub content: String,
24    pub tool_call_id: Option<String>,
25}
26
27impl OpenAiMessage {
28    pub(crate) fn tool_result(tool_result: ToolResult) -> Self {
29        Self {
30            role: OpenAiRole::Tool,
31            content: tool_result.output,
32            tool_call_id: Some(tool_result.tool_call_id),
33        }
34    }
35}
36
37impl From<Message> for OpenAiMessage {
38    fn from(value: Message) -> Self {
39        Self {
40            role: value.role.into(),
41            content: value.content,
42            tool_call_id: None,
43        }
44    }
45}
46
47#[derive(Debug, Clone, Copy, PartialEq, Eq)]
48pub enum OpenAiRole {
49    System,
50    User,
51    Assistant,
52    Tool,
53}
54
55impl OpenAiRole {
56    pub(crate) fn as_str(self) -> &'static str {
57        match self {
58            Self::System => "system",
59            Self::User => "user",
60            Self::Assistant => "assistant",
61            Self::Tool => "tool",
62        }
63    }
64}
65
66impl From<Role> for OpenAiRole {
67    fn from(value: Role) -> Self {
68        match value {
69            Role::System => Self::System,
70            Role::User => Self::User,
71            Role::Assistant => Self::Assistant,
72            Role::Tool => Self::Tool,
73        }
74    }
75}
76
77#[derive(Debug, Clone, PartialEq, Eq)]
78pub struct OpenAiTool {
79    pub name: String,
80    pub description: String,
81    pub input_schema: String,
82}
83
84impl From<ToolDefinition> for OpenAiTool {
85    fn from(value: ToolDefinition) -> Self {
86        Self {
87            name: value.name,
88            description: value.description,
89            input_schema: value.input_schema,
90        }
91    }
92}
93
94#[derive(Debug, Clone, PartialEq, Eq)]
95pub struct OpenAiResponse {
96    pub model: String,
97    pub message: OpenAiAssistantMessage,
98    pub finish_reason: OpenAiFinishReason,
99    pub usage: OpenAiUsage,
100}
101
102impl OpenAiResponse {
103    pub(crate) fn into_model_response(self) -> ModelResponse {
104        let mut output = Vec::new();
105        if !self.message.content.is_empty() {
106            output.push(OutputItem::Message(Message::new(
107                Role::Assistant,
108                self.message.content,
109            )));
110        }
111
112        output.extend(
113            self.message
114                .tool_calls
115                .into_iter()
116                .map(|tool_call| OutputItem::ToolCall(ToolCall::from(tool_call))),
117        );
118
119        ModelResponse {
120            provider: ProviderId::OpenAi,
121            model: self.model,
122            output,
123            stop_reason: self.finish_reason.into(),
124            usage: self.usage.into(),
125        }
126    }
127}
128
129#[derive(Debug, Clone, PartialEq, Eq)]
130pub struct OpenAiAssistantMessage {
131    pub content: String,
132    pub tool_calls: Vec<OpenAiToolCall>,
133}
134
135#[derive(Debug, Clone, PartialEq, Eq)]
136pub struct OpenAiToolCall {
137    pub id: String,
138    pub name: String,
139    pub arguments: String,
140}
141
142impl From<OpenAiToolCall> for ToolCall {
143    fn from(value: OpenAiToolCall) -> Self {
144        Self {
145            id: value.id,
146            name: value.name,
147            arguments: value.arguments,
148        }
149    }
150}
151
152#[derive(Debug, Clone, PartialEq, Eq)]
153pub enum OpenAiFinishReason {
154    Stop,
155    Length,
156    ToolCalls,
157    Cancelled,
158    Other,
159}
160
161impl From<OpenAiFinishReason> for StopReason {
162    fn from(value: OpenAiFinishReason) -> Self {
163        match value {
164            OpenAiFinishReason::Stop => Self::EndTurn,
165            OpenAiFinishReason::Length => Self::MaxTokens,
166            OpenAiFinishReason::ToolCalls => Self::ToolUse,
167            OpenAiFinishReason::Cancelled => Self::Cancelled,
168            OpenAiFinishReason::Other => Self::Other,
169        }
170    }
171}
172
173#[derive(Debug, Clone, Copy, PartialEq, Eq)]
174pub struct OpenAiUsage {
175    pub prompt_tokens: u32,
176    pub completion_tokens: u32,
177    pub total_tokens: u32,
178}
179
180impl From<OpenAiUsage> for TokenUsage {
181    fn from(value: OpenAiUsage) -> Self {
182        Self {
183            input_tokens: value.prompt_tokens,
184            output_tokens: value.completion_tokens,
185            total_tokens: value.total_tokens,
186        }
187    }
188}
189
190#[derive(Clone, PartialEq, Eq)]
191pub enum OpenAiAuth {
192    ApiKey(SecretString),
193    BrowserSession(SecretString),
194}
195
196impl std::fmt::Debug for OpenAiAuth {
197    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
198        match self {
199            Self::ApiKey(_) => f.write_str("OpenAiAuth::ApiKey([REDACTED])"),
200            Self::BrowserSession(_) => f.write_str("OpenAiAuth::BrowserSession([REDACTED])"),
201        }
202    }
203}
204
205#[derive(Debug, Clone, PartialEq, Eq)]
206pub enum OpenAiStreamChunk {
207    TextDelta(String),
208    ToolCallDelta(OpenAiToolCall),
209    MessageComplete(OpenAiAssistantMessage),
210    ResponseComplete(OpenAiResponse),
211}
212
213impl From<OpenAiStreamChunk> for StreamEvent {
214    fn from(value: OpenAiStreamChunk) -> Self {
215        match value {
216            OpenAiStreamChunk::TextDelta(delta) => Self::TextDelta(delta),
217            OpenAiStreamChunk::ToolCallDelta(tool_call) => Self::ToolCallDelta(tool_call.into()),
218            OpenAiStreamChunk::MessageComplete(message) => {
219                Self::MessageComplete(Message::new(Role::Assistant, message.content))
220            }
221            OpenAiStreamChunk::ResponseComplete(response) => {
222                Self::ResponseComplete(response.into_model_response())
223            }
224        }
225    }
226}