systemprompt_ai/models/
message_converters.rs1use crate::models::ai::{AiContentPart, AiMessage, MessageRole};
2use crate::models::providers::anthropic::AnthropicMessage;
3use crate::models::providers::gemini::GeminiContent;
4use crate::models::providers::openai::{
5 OpenAiContentPart, OpenAiImageUrl, OpenAiMessage, OpenAiMessageContent,
6};
7
8impl From<&AiMessage> for OpenAiMessage {
9 fn from(message: &AiMessage) -> Self {
10 let role = match message.role {
11 MessageRole::System => "system",
12 MessageRole::User => "user",
13 MessageRole::Assistant => "assistant",
14 }
15 .to_string();
16
17 let content = if message.parts.is_empty() {
18 OpenAiMessageContent::Text(message.content.clone())
19 } else {
20 OpenAiMessageContent::Parts(convert_to_openai_parts(message))
21 };
22
23 Self { role, content }
24 }
25}
26
27fn convert_to_openai_parts(message: &AiMessage) -> Vec<OpenAiContentPart> {
28 let mut parts = Vec::new();
29
30 if !message.content.is_empty() {
31 parts.push(OpenAiContentPart::Text {
32 text: message.content.clone(),
33 });
34 }
35
36 for part in &message.parts {
37 match part {
38 AiContentPart::Text { text } => {
39 parts.push(OpenAiContentPart::Text { text: text.clone() });
40 },
41 AiContentPart::Image { mime_type, data } => {
42 let data_uri = format!("data:{mime_type};base64,{data}");
43 parts.push(OpenAiContentPart::ImageUrl {
44 image_url: OpenAiImageUrl {
45 url: data_uri,
46 detail: None,
47 },
48 });
49 },
50 AiContentPart::Audio { .. } => {
51 tracing::warn!("Audio content not supported by OpenAI vision, skipping");
52 },
53 AiContentPart::Video { .. } => {
54 tracing::warn!("Video content not supported by OpenAI vision, skipping");
55 },
56 }
57 }
58
59 parts
60}
61
62impl From<&AiMessage> for AnthropicMessage {
63 fn from(message: &AiMessage) -> Self {
64 use crate::models::providers::anthropic::AnthropicContent;
65
66 let role = match message.role {
67 MessageRole::System | MessageRole::Assistant => "assistant",
68 MessageRole::User => "user",
69 }
70 .to_string();
71
72 let content = if message.parts.is_empty() {
73 AnthropicContent::Text(message.content.clone())
74 } else {
75 AnthropicContent::Blocks(convert_to_anthropic_blocks(message))
76 };
77
78 Self { role, content }
79 }
80}
81
82fn convert_to_anthropic_blocks(
83 message: &AiMessage,
84) -> Vec<crate::models::providers::anthropic::AnthropicContentBlock> {
85 use crate::models::providers::anthropic::{AnthropicContentBlock, AnthropicImageSource};
86
87 let mut blocks = Vec::new();
88
89 if !message.content.is_empty() {
90 blocks.push(AnthropicContentBlock::Text {
91 text: message.content.clone(),
92 });
93 }
94
95 for part in &message.parts {
96 match part {
97 AiContentPart::Text { text } => {
98 blocks.push(AnthropicContentBlock::Text { text: text.clone() });
99 },
100 AiContentPart::Image { mime_type, data } => {
101 blocks.push(AnthropicContentBlock::Image {
102 source: AnthropicImageSource::Base64 {
103 media_type: mime_type.clone(),
104 data: data.clone(),
105 },
106 });
107 },
108 AiContentPart::Audio { .. } => {
109 tracing::warn!("Audio content not supported by Anthropic, skipping");
110 },
111 AiContentPart::Video { .. } => {
112 tracing::warn!("Video content not supported by Anthropic, skipping");
113 },
114 }
115 }
116
117 blocks
118}
119
120impl From<&AiMessage> for GeminiContent {
121 fn from(message: &AiMessage) -> Self {
122 use crate::models::providers::gemini::GeminiPart;
123
124 Self {
125 role: match message.role {
126 MessageRole::System | MessageRole::User => "user",
127 MessageRole::Assistant => "model",
128 }
129 .to_string(),
130 parts: vec![GeminiPart::Text {
131 text: message.content.clone(),
132 }],
133 }
134 }
135}