st/proxy/
openai_compat.rs1use crate::proxy::{LlmMessage, LlmRole};
9use serde::{Deserialize, Serialize};
10
11#[derive(Debug, Deserialize)]
13pub struct OpenAiRequest {
14 pub model: String,
15 pub messages: Vec<OpenAiMessage>,
16 pub temperature: Option<f32>,
17 #[serde(rename = "max_tokens")]
18 pub max_tokens: Option<usize>,
19 pub stream: Option<bool>,
20 pub user: Option<String>,
22}
23
24#[derive(Debug, Deserialize)]
25pub struct OpenAiMessage {
26 pub role: String,
27 pub content: String,
28}
29
30impl From<OpenAiMessage> for LlmMessage {
31 fn from(msg: OpenAiMessage) -> Self {
32 Self {
33 role: match msg.role.as_str() {
34 "system" => LlmRole::System,
35 "assistant" => LlmRole::Assistant,
36 _ => LlmRole::User,
37 },
38 content: msg.content,
39 }
40 }
41}
42
43#[derive(Debug, Serialize)]
45pub struct OpenAiResponse {
46 pub id: String,
47 pub object: String,
48 pub created: u64,
49 pub model: String,
50 pub choices: Vec<OpenAiChoice>,
51 #[serde(skip_serializing_if = "Option::is_none")]
52 pub usage: Option<OpenAiUsage>,
53}
54
55#[derive(Debug, Serialize)]
57pub struct OpenAiErrorResponse {
58 pub error: OpenAiError,
59}
60
61#[derive(Debug, Serialize)]
62pub struct OpenAiError {
63 pub message: String,
64 #[serde(rename = "type")]
65 pub error_type: String,
66 pub code: Option<String>,
67}
68
69#[derive(Debug, Serialize)]
70pub struct OpenAiChoice {
71 pub index: usize,
72 pub message: OpenAiResponseMessage,
73 pub finish_reason: String,
74}
75
76#[derive(Debug, Serialize)]
77pub struct OpenAiResponseMessage {
78 pub role: String,
79 pub content: String,
80}
81
82#[derive(Debug, Serialize)]
83pub struct OpenAiUsage {
84 pub prompt_tokens: usize,
85 pub completion_tokens: usize,
86 pub total_tokens: usize,
87}