use serde::{Deserialize, Serialize};
use serde_json::json;
use crate::config::LogLevel;
use crate::error::{ProxyError, Result};
fn skip_empty_tools(tools: &Option<Vec<AnthropicTool>>) -> bool {
match tools {
None => true,
Some(vec) => vec.is_empty(),
}
}
#[derive(Debug, Deserialize)]
pub struct OpenAiRequest {
pub model: Option<String>,
pub messages: Vec<OpenAiMessage>,
pub max_tokens: Option<u32>,
pub temperature: Option<f64>,
pub stream: Option<bool>,
pub tools: Option<Vec<OpenAiTool>>,
pub tool_choice: Option<OpenAiToolChoice>,
}
#[derive(Debug, Deserialize)]
pub struct OpenAiMessage {
pub role: String,
pub content: Option<OpenAiContent>,
pub tool_calls: Option<Vec<OpenAiToolCall>>,
#[serde(rename = "tool_call_id")]
pub tool_call_id: Option<String>,
}
#[derive(Debug, Deserialize)]
#[serde(untagged)]
pub enum OpenAiContent {
String(String),
Array(Vec<OpenAiContentBlock>),
}
#[derive(Debug, Deserialize)]
pub struct OpenAiContentBlock {
#[serde(rename = "type")]
pub block_type: String,
pub text: Option<String>,
#[serde(rename = "image_url")]
pub image_url: Option<ImageUrl>,
}
#[derive(Debug, Deserialize)]
pub struct ImageUrl {
pub url: String,
}
#[derive(Debug, Deserialize)]
pub struct OpenAiToolCall {
pub id: String,
#[serde(rename = "type")]
#[allow(dead_code)]
pub call_type: String,
pub function: OpenAiFunction,
}
#[derive(Debug, Deserialize)]
pub struct OpenAiFunction {
pub name: String,
pub arguments: serde_json::Value,
}
#[derive(Debug, Deserialize)]
pub struct OpenAiTool {
#[serde(rename = "type")]
#[allow(dead_code)]
pub tool_type: String,
pub function: OpenAiToolFunction,
}
#[derive(Debug, Deserialize)]
pub struct OpenAiToolFunction {
pub name: String,
pub description: String,
pub parameters: serde_json::Value,
}
#[derive(Debug, Deserialize)]
#[serde(untagged)]
pub enum OpenAiToolChoice {
String(String),
Object(OpenAiToolChoiceObject),
}
#[derive(Debug, Deserialize)]
pub struct OpenAiToolChoiceObject {
#[serde(rename = "type")]
#[allow(dead_code)]
pub choice_type: String,
pub function: Option<OpenAiToolChoiceFunction>,
}
#[derive(Debug, Deserialize)]
pub struct OpenAiToolChoiceFunction {
pub name: String,
}
#[derive(Debug, Serialize)]
pub struct AnthropicRequest {
#[serde(rename = "anthropic_version")]
pub anthropic_version: String,
pub messages: Vec<AnthropicMessage>,
#[serde(rename = "max_tokens")]
pub max_tokens: u32,
pub temperature: f64,
pub stream: bool,
#[serde(skip_serializing_if = "skip_empty_tools")]
pub tools: Option<Vec<AnthropicTool>>,
#[serde(rename = "tool_choice", skip_serializing_if = "Option::is_none")]
pub tool_choice: Option<AnthropicToolChoice>,
}
#[derive(Debug, Serialize)]
pub struct AnthropicMessage {
pub role: String,
pub content: Vec<AnthropicContentBlock>,
}
#[derive(Debug, Serialize)]
#[serde(tag = "type")]
pub enum AnthropicContentBlock {
#[serde(rename = "text")]
Text {
text: String,
},
#[serde(rename = "tool_use")]
ToolUse {
id: String,
name: String,
input: serde_json::Value,
},
#[serde(rename = "tool_result")]
ToolResult {
#[serde(rename = "tool_use_id")]
tool_use_id: String,
content: AnthropicToolResultContent,
},
#[serde(rename = "image")]
Image {
source: ImageSource,
},
}
#[derive(Debug, Serialize)]
#[serde(untagged)]
pub enum AnthropicToolResultContent {
String(String),
Array(Vec<serde_json::Value>),
}
#[derive(Debug, Serialize)]
pub struct ImageSource {
#[serde(rename = "type")]
pub source_type: String,
pub url: String,
}
#[derive(Debug, Serialize)]
pub struct AnthropicTool {
pub name: String,
pub description: String,
#[serde(rename = "input_schema")]
pub input_schema: serde_json::Value,
}
#[derive(Debug, Serialize)]
#[serde(tag = "type")]
pub enum AnthropicToolChoice {
#[serde(rename = "auto")]
Auto,
#[serde(rename = "tool")]
Tool {
name: String,
},
}
pub struct OpenAiToAnthropicConverter {
log_level: LogLevel,
}
const ANTHROPIC_VERSION: &str = "vertex-2023-10-16";
const DEFAULT_MAX_TOKENS: u32 = 8000;
const DEFAULT_TEMPERATURE: f64 = 0.9;
impl OpenAiToAnthropicConverter {
pub fn new(log_level: LogLevel) -> Self {
Self { log_level }
}
pub fn convert(&self, request: OpenAiRequest) -> Result<AnthropicRequest> {
self.debug(&format!(
"Converting {} message(s) from OpenAI to Anthropic format",
request.messages.len()
));
let mut anthropic_messages = Vec::new();
let mut pending_tool_results = Vec::new();
let mut last_assistant_message: Option<&'_ OpenAiMessage> = None;
let mut system_messages = Vec::new();
self.process_messages(
&request.messages,
&mut anthropic_messages,
&mut pending_tool_results,
&mut last_assistant_message,
&mut system_messages,
)?;
self.handle_remaining_tool_results(
&mut anthropic_messages,
&mut pending_tool_results,
last_assistant_message,
)?;
self.prepend_system_messages(&mut anthropic_messages, system_messages);
let tools = self.convert_tools(request.tools);
let tool_choice = self.convert_tool_choice(request.tool_choice);
let anthropic_request = AnthropicRequest {
anthropic_version: ANTHROPIC_VERSION.to_string(),
messages: anthropic_messages,
max_tokens: request.max_tokens.unwrap_or(DEFAULT_MAX_TOKENS),
temperature: request.temperature.unwrap_or(DEFAULT_TEMPERATURE),
stream: request.stream.unwrap_or(false),
tools,
tool_choice,
};
self.debug(&format!(
"Converted Anthropic request with {} messages",
anthropic_request.messages.len()
));
Ok(anthropic_request)
}
fn process_messages<'a>(
&self,
messages: &'a [OpenAiMessage],
anthropic_messages: &mut Vec<AnthropicMessage>,
pending_tool_results: &mut Vec<(String, AnthropicToolResultContent)>,
last_assistant_message: &mut Option<&'a OpenAiMessage>,
system_messages: &mut Vec<String>,
) -> Result<()> {
for msg in messages {
self.debug(&format!("Processing message with role: {}", msg.role));
match msg.role.as_str() {
"system" => {
self.process_system_message(msg, system_messages);
}
"assistant" => {
self.process_assistant_message(
msg,
anthropic_messages,
pending_tool_results,
last_assistant_message,
)?;
}
"tool" => {
self.process_tool_message(msg, pending_tool_results);
}
"user" => {
self.process_user_message(
msg,
anthropic_messages,
pending_tool_results,
*last_assistant_message,
)?;
}
_ => {
return Err(ProxyError::Conversion(format!(
"Unknown message role: {}",
msg.role
)));
}
}
}
Ok(())
}
fn process_system_message(&self, msg: &OpenAiMessage, system_messages: &mut Vec<String>) {
if let Some(OpenAiContent::String(content)) = &msg.content {
system_messages.push(content.clone());
}
}
fn process_assistant_message<'a>(
&self,
msg: &'a OpenAiMessage,
anthropic_messages: &mut Vec<AnthropicMessage>,
pending_tool_results: &mut Vec<(String, AnthropicToolResultContent)>,
last_assistant_message: &mut Option<&'a OpenAiMessage>,
) -> Result<()> {
if last_assistant_message.is_some() && !pending_tool_results.is_empty() {
self.attach_tool_results(anthropic_messages, pending_tool_results)?;
}
let anthropic_msg = self.convert_assistant_message(msg)?;
anthropic_messages.push(anthropic_msg);
*last_assistant_message = Some(msg);
Ok(())
}
fn process_tool_message(
&self,
msg: &OpenAiMessage,
pending_tool_results: &mut Vec<(String, AnthropicToolResultContent)>,
) {
if let Some(tool_call_id) = &msg.tool_call_id {
let content = self.convert_tool_result_content(&msg.content);
pending_tool_results.push((tool_call_id.clone(), content));
self.debug(&format!("Collected tool result for tool_call_id: {}", tool_call_id));
}
}
fn process_user_message<'a>(
&self,
msg: &'a OpenAiMessage,
anthropic_messages: &mut Vec<AnthropicMessage>,
pending_tool_results: &mut Vec<(String, AnthropicToolResultContent)>,
last_assistant_message: Option<&'a OpenAiMessage>,
) -> Result<()> {
if last_assistant_message.is_some() && !pending_tool_results.is_empty() {
self.debug(&format!(
"Attaching {} tool result(s) before user message",
pending_tool_results.len()
));
self.attach_tool_results(anthropic_messages, pending_tool_results)?;
}
let anthropic_msg = self.convert_user_message(msg)?;
anthropic_messages.push(anthropic_msg);
Ok(())
}
fn convert_tool_result_content(
&self,
content: &Option<OpenAiContent>,
) -> AnthropicToolResultContent {
match content {
Some(OpenAiContent::String(s)) => AnthropicToolResultContent::String(s.clone()),
Some(OpenAiContent::Array(arr)) => {
let mut json_blocks = Vec::new();
for block in arr {
match block.block_type.as_str() {
"text" => {
if let Some(text) = &block.text {
json_blocks.push(json!({ "type": "text", "text": text }));
}
}
"image_url" => {
if let Some(img) = &block.image_url {
json_blocks.push(
json!({ "type": "image_url", "image_url": { "url": img.url } }),
);
}
}
_ => {}
}
}
AnthropicToolResultContent::Array(json_blocks)
}
None => AnthropicToolResultContent::String(String::new()),
}
}
fn handle_remaining_tool_results(
&self,
anthropic_messages: &mut Vec<AnthropicMessage>,
pending_tool_results: &mut Vec<(String, AnthropicToolResultContent)>,
last_assistant_message: Option<&OpenAiMessage>,
) -> Result<()> {
if last_assistant_message.is_some() && !pending_tool_results.is_empty() {
self.attach_tool_results(anthropic_messages, pending_tool_results)?;
}
Ok(())
}
fn prepend_system_messages(
&self,
anthropic_messages: &mut [AnthropicMessage],
system_messages: Vec<String>,
) {
if !system_messages.is_empty() && !anthropic_messages.is_empty() {
let system_text = system_messages.join("\n\n");
if let Some(first_user_msg) = anthropic_messages.iter_mut().find(|m| m.role == "user") {
self.prepend_system_text(first_user_msg, &system_text);
}
}
}
fn convert_tools(&self, tools: Option<Vec<OpenAiTool>>) -> Option<Vec<AnthropicTool>> {
tools.map(|tools| {
self.debug(&format!(
"Converting {} tool(s) from OpenAI to Anthropic format",
tools.len()
));
tools
.into_iter()
.map(|tool| AnthropicTool {
name: tool.function.name,
description: tool.function.description,
input_schema: tool.function.parameters,
})
.collect()
})
}
fn convert_tool_choice(
&self,
tool_choice: Option<OpenAiToolChoice>,
) -> Option<AnthropicToolChoice> {
tool_choice.and_then(|choice| {
self.debug(&format!("Tool choice: {:?}", choice));
match choice {
OpenAiToolChoice::String(s) if s == "auto" => Some(AnthropicToolChoice::Auto),
OpenAiToolChoice::String(s) if s == "none" => {
self.debug("Tool choice 'none' not supported by Anthropic, omitting");
None
}
OpenAiToolChoice::Object(obj) => {
if let Some(function) = obj.function {
self.debug(&format!("Forced tool choice: {}", function.name));
Some(AnthropicToolChoice::Tool { name: function.name })
} else {
None
}
}
_ => None,
}
})
}
fn convert_assistant_message(&self, msg: &OpenAiMessage) -> Result<AnthropicMessage> {
let mut content = Vec::new();
self.add_text_content(&mut content, &msg.content);
self.add_tool_calls(&mut content, &msg.tool_calls)?;
if content.is_empty() {
content.push(AnthropicContentBlock::Text { text: String::new() });
}
Ok(AnthropicMessage { role: "assistant".to_string(), content })
}
fn add_text_content(
&self,
content: &mut Vec<AnthropicContentBlock>,
openai_content: &Option<OpenAiContent>,
) {
match openai_content {
Some(OpenAiContent::String(text)) if !text.is_empty() => {
content.push(AnthropicContentBlock::Text { text: text.clone() });
}
Some(OpenAiContent::Array(blocks)) => {
for block in blocks {
if block.block_type == "text" {
if let Some(text) = &block.text {
content.push(AnthropicContentBlock::Text { text: text.clone() });
}
}
}
}
_ => {}
}
}
fn add_tool_calls(
&self,
content: &mut Vec<AnthropicContentBlock>,
tool_calls: &Option<Vec<OpenAiToolCall>>,
) -> Result<()> {
if let Some(tool_calls) = tool_calls {
self.debug(&format!(
"Converting {} tool call(s) from assistant message",
tool_calls.len()
));
for tool_call in tool_calls {
let args = self.parse_tool_arguments(&tool_call.function.arguments);
content.push(AnthropicContentBlock::ToolUse {
id: tool_call.id.clone(),
name: tool_call.function.name.clone(),
input: args,
});
}
}
Ok(())
}
fn parse_tool_arguments(&self, arguments: &serde_json::Value) -> serde_json::Value {
match arguments {
serde_json::Value::String(s) => {
serde_json::from_str(s).unwrap_or_else(|_| arguments.clone())
}
_ => arguments.clone(),
}
}
fn convert_user_message(&self, msg: &OpenAiMessage) -> Result<AnthropicMessage> {
let content = match &msg.content {
Some(OpenAiContent::String(text)) => {
vec![AnthropicContentBlock::Text { text: text.clone() }]
}
Some(OpenAiContent::Array(blocks)) => self.convert_content_blocks(blocks),
None => vec![AnthropicContentBlock::Text { text: String::new() }],
};
Ok(AnthropicMessage { role: "user".to_string(), content })
}
fn convert_content_blocks(&self, blocks: &[OpenAiContentBlock]) -> Vec<AnthropicContentBlock> {
blocks
.iter()
.filter_map(|block| match block.block_type.as_str() {
"text" => {
block.text.as_ref().map(|t| AnthropicContentBlock::Text { text: t.clone() })
}
"image_url" => block.image_url.as_ref().map(|img| AnthropicContentBlock::Image {
source: ImageSource { source_type: "url".to_string(), url: img.url.clone() },
}),
_ => None,
})
.collect()
}
fn attach_tool_results(
&self,
anthropic_messages: &mut Vec<AnthropicMessage>,
pending_tool_results: &mut Vec<(String, AnthropicToolResultContent)>,
) -> Result<()> {
if let Some(last_msg) = anthropic_messages.last() {
if last_msg.role == "assistant" {
let tool_results: Vec<AnthropicContentBlock> = pending_tool_results
.drain(..)
.map(|(tool_use_id, content)| AnthropicContentBlock::ToolResult {
tool_use_id,
content,
})
.collect();
self.debug(&format!(
"Adding tool results user message with {} result(s)",
tool_results.len()
));
anthropic_messages
.push(AnthropicMessage { role: "user".to_string(), content: tool_results });
} else {
self.debug("WARNING: Last message is not assistant, cannot attach tool results");
}
}
Ok(())
}
fn prepend_system_text(&self, msg: &mut AnthropicMessage, system_text: &str) {
if let Some(first_text_block) =
msg.content.iter_mut().find(|c| matches!(c, AnthropicContentBlock::Text { .. }))
{
if let AnthropicContentBlock::Text { text } = first_text_block {
*text = format!("{}\n\n{}", system_text, text);
}
} else {
msg.content.insert(0, AnthropicContentBlock::Text { text: system_text.to_string() });
}
}
pub(crate) fn debug(&self, msg: &str) {
if self.log_level.is_trace_enabled() {
tracing::debug!("[TRACE] {}", msg);
}
}
}