use super::types::{
AnthropicAuth, AnthropicCacheControl, AnthropicConfig, AnthropicContent, AnthropicMessage,
AnthropicMessageContent, AnthropicRequest, AnthropicResponse, AnthropicSource,
AnthropicSystemBlock, AnthropicSystemContent, AnthropicThinkingConfig as AnthropicThinking,
CLAUDE_CODE_SYSTEM_PREFIX, infer_max_tokens,
};
use crate::error::{Error, Result};
use crate::types::{
CacheContext, CacheControlValidator, CacheWarning, ContentPart, FinishReason, FinishReasonKind,
GenerateRequest, GenerateResponse, InputTokenDetails, Message, OutputTokenDetails,
ResponseContent, Role, Usage,
};
use serde_json::json;
pub struct AnthropicConversionResult {
pub request: AnthropicRequest,
pub warnings: Vec<CacheWarning>,
pub has_cache_control: bool,
}
pub fn to_anthropic_request(
req: &GenerateRequest,
config: &AnthropicConfig,
stream: bool,
) -> Result<AnthropicConversionResult> {
let mut validator = CacheControlValidator::new();
let cache_strategy = req
.options
.cache_strategy
.clone()
.unwrap_or_else(|| config.default_cache_strategy.clone());
let cache_config = cache_strategy.to_anthropic_config();
let has_tools = req.options.tools.as_ref().is_some_and(|t| !t.is_empty());
let tools = build_tools_with_caching(
&req.options.tools,
&mut validator,
cache_config
.as_ref()
.is_some_and(|c| c.cache_tools && has_tools),
)?;
let system = build_system_content_with_caching(
&req.messages,
&config.auth,
&mut validator,
cache_config.as_ref().is_some_and(|c| c.cache_system),
)?;
let tail_budget = cache_config.as_ref().map_or(0, |c| {
let used = validator.breakpoint_count();
let max = 4usize; let remaining = max.saturating_sub(used);
c.tail_message_count.min(remaining)
});
let messages = build_messages_with_caching(&req.messages, &mut validator, tail_budget)?;
let max_tokens = req
.options
.max_tokens
.unwrap_or_else(|| infer_max_tokens(&req.model.id));
let tool_choice = req.options.tool_choice.as_ref().map(|choice| match choice {
crate::types::ToolChoice::Auto => json!({"type": "auto"}),
crate::types::ToolChoice::None => json!({"type": "none"}),
crate::types::ToolChoice::Required { name } => json!({
"type": "tool",
"name": name
}),
});
let thinking = req.provider_options.as_ref().and_then(|opts| {
if let crate::types::ProviderOptions::Anthropic(anthropic) = opts {
anthropic.thinking.as_ref().map(|t| AnthropicThinking {
type_: "enabled".to_string(),
budget_tokens: t.budget_tokens.max(1024),
})
} else {
None
}
});
let has_cache_control = validator.breakpoint_count() > 0;
let warnings = validator.take_warnings();
Ok(AnthropicConversionResult {
request: AnthropicRequest {
model: req.model.id.clone(),
messages,
max_tokens,
system,
temperature: req.options.temperature,
top_p: req.options.top_p,
top_k: None,
metadata: None,
stop_sequences: req.options.stop_sequences.clone(),
stream: if stream { Some(true) } else { None },
thinking,
tools,
tool_choice,
},
warnings,
has_cache_control,
})
}
fn build_system_content_with_caching(
messages: &[Message],
auth: &AnthropicAuth,
validator: &mut CacheControlValidator,
auto_cache_last: bool,
) -> Result<Option<AnthropicSystemContent>> {
let system_messages: Vec<&Message> =
messages.iter().filter(|m| m.role == Role::System).collect();
let is_oauth = matches!(auth, AnthropicAuth::OAuth { .. });
if system_messages.is_empty() && !is_oauth {
return Ok(None);
}
let has_explicit_cache = system_messages.iter().any(|m| m.cache_control().is_some());
let use_blocks = is_oauth || has_explicit_cache || auto_cache_last;
if is_oauth {
let mut blocks = vec![];
blocks.push(AnthropicSystemBlock {
type_: "text".to_string(),
text: CLAUDE_CODE_SYSTEM_PREFIX.to_string(),
cache_control: Some(AnthropicCacheControl::ephemeral_with_ttl("1h")),
});
validator.validate(
Some(&crate::types::CacheControl::ephemeral_with_ttl("1h")),
CacheContext::system_message(),
);
let msg_count = system_messages.len();
for (i, msg) in system_messages.iter().enumerate() {
if let Some(text) = msg.text() {
let is_last = i == msg_count - 1;
let cache_control = msg.cache_control().cloned().or_else(|| {
if is_last && auto_cache_last {
Some(crate::types::CacheControl::ephemeral_with_ttl("1h"))
} else {
None
}
});
let validated_cache =
validator.validate(cache_control.as_ref(), CacheContext::system_message());
blocks.push(AnthropicSystemBlock {
type_: "text".to_string(),
text,
cache_control: validated_cache.map(|c| AnthropicCacheControl::from(&c)),
});
}
}
return Ok(Some(AnthropicSystemContent::Blocks(blocks)));
}
if !use_blocks {
let combined = system_messages
.iter()
.filter_map(|m| m.text())
.collect::<Vec<_>>()
.join("\n\n");
return Ok(Some(AnthropicSystemContent::String(combined)));
}
let msg_count = system_messages.len();
let blocks: Vec<AnthropicSystemBlock> = system_messages
.iter()
.enumerate()
.filter_map(|(i, msg)| {
let text = msg.text()?;
let is_last = i == msg_count - 1;
let cache_control = msg.cache_control().cloned().or_else(|| {
if is_last && auto_cache_last {
Some(crate::types::CacheControl::ephemeral_with_ttl("1h"))
} else {
None
}
});
let validated_cache =
validator.validate(cache_control.as_ref(), CacheContext::system_message());
Some(AnthropicSystemBlock {
type_: "text".to_string(),
text,
cache_control: validated_cache.map(|c| AnthropicCacheControl::from(&c)),
})
})
.collect();
if blocks.is_empty() {
Ok(None)
} else {
Ok(Some(AnthropicSystemContent::Blocks(blocks)))
}
}
fn build_tools_with_caching(
tools: &Option<Vec<crate::types::Tool>>,
validator: &mut CacheControlValidator,
auto_cache_last: bool,
) -> Result<Option<Vec<serde_json::Value>>> {
let tools = match tools {
Some(t) if !t.is_empty() => t,
_ => return Ok(None),
};
let len = tools.len();
let converted: Vec<serde_json::Value> = tools
.iter()
.enumerate()
.map(|(i, tool)| {
let is_last = i == len - 1;
let cache_control = tool.cache_control().cloned().or_else(|| {
if is_last && auto_cache_last {
Some(crate::types::CacheControl::ephemeral_with_ttl("1h"))
} else {
None
}
});
let validated_cache =
validator.validate(cache_control.as_ref(), CacheContext::tool_definition());
let mut tool_json = json!({
"name": tool.function.name,
"description": tool.function.description,
"input_schema": tool.function.parameters,
});
if let Some(cache) = validated_cache {
tool_json["cache_control"] = json!(AnthropicCacheControl::from(&cache));
}
tool_json
})
.collect();
Ok(Some(converted))
}
fn build_messages_with_caching(
messages: &[Message],
validator: &mut CacheControlValidator,
tail_count: usize,
) -> Result<Vec<AnthropicMessage>> {
let non_system: Vec<&Message> = messages.iter().filter(|m| m.role != Role::System).collect();
let len = non_system.len();
let cache_start_index = len.saturating_sub(tail_count);
non_system
.iter()
.enumerate()
.map(|(i, msg)| {
let should_auto_cache = tail_count > 0 && i >= cache_start_index;
to_anthropic_message_with_caching(msg, validator, should_auto_cache)
})
.collect()
}
fn to_anthropic_message_with_caching(
msg: &Message,
validator: &mut CacheControlValidator,
auto_cache: bool,
) -> Result<AnthropicMessage> {
let role = match msg.role {
Role::User => "user",
Role::Assistant => "assistant",
Role::Tool => "user", Role::System => {
return Err(Error::invalid_response(
"System messages should be filtered out",
));
}
};
let msg_cache_control = msg.cache_control().cloned().or_else(|| {
if auto_cache {
Some(crate::types::CacheControl::ephemeral())
} else {
None
}
});
let parts = msg.parts();
let has_cache_control =
msg_cache_control.is_some() || parts.iter().any(|p| p.cache_control().is_some());
let force_blocks = msg.role == Role::Tool;
let content = if parts.len() == 1 && !has_cache_control && !force_blocks {
match &parts[0] {
ContentPart::Text { text, .. } => AnthropicMessageContent::String(text.clone()),
_ => AnthropicMessageContent::Blocks(vec![to_anthropic_content_part(
&parts[0], None, validator, true,
)?]),
}
} else {
let num_parts = parts.len();
let content_parts = parts
.iter()
.enumerate()
.map(|(i, part)| {
let is_last = i == num_parts - 1;
let fallback_cache = if is_last {
msg_cache_control.as_ref()
} else {
None
};
to_anthropic_content_part(part, fallback_cache, validator, is_last)
})
.collect::<Result<Vec<_>>>()?;
AnthropicMessageContent::Blocks(content_parts)
};
Ok(AnthropicMessage {
role: role.to_string(),
content,
})
}
#[cfg(test)]
fn to_anthropic_message(
msg: &Message,
validator: &mut CacheControlValidator,
) -> Result<AnthropicMessage> {
to_anthropic_message_with_caching(msg, validator, false)
}
fn to_anthropic_content_part(
part: &ContentPart,
fallback_cache: Option<&crate::types::CacheControl>,
validator: &mut CacheControlValidator,
is_last_part: bool,
) -> Result<AnthropicContent> {
let part_cache = part.cache_control();
let effective_cache = if part_cache.is_some() {
part_cache
} else if is_last_part {
fallback_cache
} else {
None
};
match part {
ContentPart::Text { text, .. } => {
let context = CacheContext::user_message_part();
let validated_cache = validator.validate(effective_cache, context);
Ok(AnthropicContent::Text {
text: text.clone(),
cache_control: validated_cache.map(|c| AnthropicCacheControl::from(&c)),
})
}
ContentPart::Image { url, .. } => {
let context = CacheContext::image_content();
let validated_cache = validator.validate(effective_cache, context);
Ok(AnthropicContent::Image {
source: parse_image_source(url)?,
cache_control: validated_cache.map(|c| AnthropicCacheControl::from(&c)),
})
}
ContentPart::ToolCall {
id,
name,
arguments,
..
} => {
let context = CacheContext::assistant_message_part();
let validated_cache = validator.validate(effective_cache, context);
Ok(AnthropicContent::ToolUse {
id: id.clone(),
name: name.clone(),
input: arguments.clone(),
cache_control: validated_cache.map(|c| AnthropicCacheControl::from(&c)),
})
}
ContentPart::ToolResult {
tool_call_id,
content,
..
} => {
let context = CacheContext::tool_result();
let validated_cache = validator.validate(effective_cache, context);
Ok(AnthropicContent::ToolResult {
tool_use_id: tool_call_id.clone(),
content: Some(AnthropicMessageContent::String(content.to_string())),
is_error: None,
cache_control: validated_cache.map(|c| AnthropicCacheControl::from(&c)),
})
}
}
}
fn parse_image_source(url: &str) -> Result<AnthropicSource> {
if url.starts_with("data:") {
let parts: Vec<&str> = url.splitn(2, ',').collect();
if parts.len() != 2 {
return Err(Error::invalid_response("Invalid data URL format"));
}
let media_type = parts[0]
.strip_prefix("data:")
.and_then(|s| s.strip_suffix(";base64"))
.ok_or_else(|| Error::invalid_response("Invalid data URL media type"))?;
Ok(AnthropicSource {
type_: "base64".to_string(),
media_type: media_type.to_string(),
data: parts[1].to_string(),
})
} else {
Err(Error::invalid_response(
"Anthropic requires base64-encoded images, not URLs",
))
}
}
pub fn from_anthropic_response_with_warnings(
resp: AnthropicResponse,
warnings: Vec<CacheWarning>,
) -> Result<GenerateResponse> {
use crate::types::{ResponseWarning, ToolCall};
let content: Vec<ResponseContent> = resp
.content
.iter()
.filter_map(|c| match c {
AnthropicContent::Text { text, .. } => {
Some(ResponseContent::Text { text: text.clone() })
}
AnthropicContent::Thinking { thinking, .. } => Some(ResponseContent::Reasoning {
reasoning: thinking.clone(),
}),
AnthropicContent::ToolUse {
id, name, input, ..
} => Some(ResponseContent::ToolCall(ToolCall {
id: id.clone(),
name: name.clone(),
arguments: input.clone(),
metadata: None,
})),
_ => None,
})
.collect();
if content.is_empty() {
return Err(Error::invalid_response("No content in response"));
}
let finish_reason = if content
.iter()
.any(|c| matches!(c, ResponseContent::ToolCall(_)))
{
FinishReason::with_raw(FinishReasonKind::ToolCalls, "tool_use")
} else {
parse_stop_reason(&resp.stop_reason)
};
let cache_creation = resp.usage.cache_creation_input_tokens.unwrap_or(0);
let cache_read = resp.usage.cache_read_input_tokens.unwrap_or(0);
let input_tokens = resp.usage.input_tokens;
let output_tokens = resp.usage.output_tokens;
let total_input = input_tokens + cache_creation + cache_read;
let usage = Usage::with_details(
InputTokenDetails {
total: Some(total_input),
no_cache: Some(input_tokens),
cache_read: if cache_read > 0 {
Some(cache_read)
} else {
None
},
cache_write: if cache_creation > 0 {
Some(cache_creation)
} else {
None
},
},
OutputTokenDetails {
total: Some(output_tokens),
text: None, reasoning: None, },
Some(serde_json::to_value(&resp.usage).unwrap_or_default()),
);
let response_warnings: Option<Vec<ResponseWarning>> = if warnings.is_empty() {
None
} else {
Some(warnings.into_iter().map(ResponseWarning::from).collect())
};
Ok(GenerateResponse {
content,
usage,
finish_reason,
metadata: Some(json!({
"id": resp.id,
"model": resp.model,
})),
warnings: response_warnings,
})
}
fn parse_stop_reason(reason: &Option<String>) -> FinishReason {
match reason.as_deref() {
Some("end_turn") => FinishReason::with_raw(FinishReasonKind::Stop, "end_turn"),
Some("max_tokens") => FinishReason::with_raw(FinishReasonKind::Length, "max_tokens"),
Some("stop_sequence") => FinishReason::with_raw(FinishReasonKind::Stop, "stop_sequence"),
Some("tool_use") => FinishReason::with_raw(FinishReasonKind::ToolCalls, "tool_use"),
Some(raw) => FinishReason::with_raw(FinishReasonKind::Other, raw),
None => FinishReason::other(),
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::types::MessageContent;
#[test]
fn test_infer_max_tokens() {
assert_eq!(infer_max_tokens("claude-opus-4-5"), 64000);
assert_eq!(infer_max_tokens("claude-sonnet-4"), 64000);
assert_eq!(infer_max_tokens("claude-opus-4"), 32000);
assert_eq!(infer_max_tokens("claude-3-5-sonnet"), 8192);
assert_eq!(infer_max_tokens("claude-3-opus"), 4096);
}
#[test]
fn test_parse_image_source() {
let data_url = "data:image/png;base64,iVBORw0KGgoAAAANS";
let result = parse_image_source(data_url).unwrap();
assert_eq!(result.type_, "base64");
assert_eq!(result.media_type, "image/png");
assert_eq!(result.data, "iVBORw0KGgoAAAANS");
}
#[test]
fn test_tool_role_message_converted_to_user_with_tool_result() {
let mut validator = CacheControlValidator::new();
let tool_msg = Message {
role: Role::Tool,
content: MessageContent::Parts(vec![ContentPart::ToolResult {
tool_call_id: "toolu_01Abc123".to_string(),
content: serde_json::json!("Tool execution result"),
provider_options: None,
}]),
name: None,
provider_options: None,
};
let result = to_anthropic_message(&tool_msg, &mut validator).unwrap();
assert_eq!(
result.role, "user",
"Tool role should be converted to user for Anthropic"
);
match result.content {
AnthropicMessageContent::Blocks(blocks) => {
assert_eq!(blocks.len(), 1, "Should have exactly one content block");
match &blocks[0] {
AnthropicContent::ToolResult {
tool_use_id,
content,
..
} => {
assert_eq!(tool_use_id, "toolu_01Abc123");
match content {
Some(AnthropicMessageContent::String(s)) => {
assert_eq!(s, "\"Tool execution result\"");
}
_ => panic!("Expected string content in tool result"),
}
}
_ => panic!("Expected ToolResult content block, got {:?}", blocks[0]),
}
}
_ => panic!("Expected Blocks content, got {:?}", result.content),
}
}
#[test]
fn test_tool_role_message_with_text_content() {
let mut validator = CacheControlValidator::new();
let tool_msg = Message {
role: Role::Tool,
content: MessageContent::Parts(vec![ContentPart::ToolResult {
tool_call_id: "toolu_02Xyz789".to_string(),
content: serde_json::json!({"temperature": 22, "unit": "celsius"}),
provider_options: None,
}]),
name: None,
provider_options: None,
};
let result = to_anthropic_message(&tool_msg, &mut validator).unwrap();
assert_eq!(result.role, "user");
match result.content {
AnthropicMessageContent::Blocks(blocks) => {
assert_eq!(blocks.len(), 1);
match &blocks[0] {
AnthropicContent::ToolResult {
tool_use_id,
content,
..
} => {
assert_eq!(tool_use_id, "toolu_02Xyz789");
match content {
Some(AnthropicMessageContent::String(s)) => {
assert!(s.contains("temperature"));
assert!(s.contains("22"));
}
_ => panic!("Expected string content"),
}
}
_ => panic!("Expected ToolResult"),
}
}
_ => panic!("Expected Blocks"),
}
}
#[test]
fn test_assistant_message_not_affected_by_tool_conversion() {
let mut validator = CacheControlValidator::new();
let assistant_msg = Message {
role: Role::Assistant,
content: MessageContent::Text("I'll help you with that.".to_string()),
name: None,
provider_options: None,
};
let result = to_anthropic_message(&assistant_msg, &mut validator).unwrap();
assert_eq!(result.role, "assistant");
match result.content {
AnthropicMessageContent::String(s) => {
assert_eq!(s, "I'll help you with that.");
}
_ => panic!("Expected string content for simple assistant message"),
}
}
#[test]
fn test_user_message_not_affected_by_tool_conversion() {
let mut validator = CacheControlValidator::new();
let user_msg = Message {
role: Role::User,
content: MessageContent::Text("Hello!".to_string()),
name: None,
provider_options: None,
};
let result = to_anthropic_message(&user_msg, &mut validator).unwrap();
assert_eq!(result.role, "user");
match result.content {
AnthropicMessageContent::String(s) => {
assert_eq!(s, "Hello!");
}
_ => panic!("Expected string content for simple user message"),
}
}
}