use async_trait::async_trait;
use futures::stream::BoxStream;
use futures::StreamExt;
use reqwest::Client;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use tracing::{debug, instrument, warn};
use crate::error::{LlmError, Result};
use crate::traits::{
ChatMessage, ChatRole, CompletionOptions, FunctionCall, LLMProvider, LLMResponse, StreamChunk,
StreamUsage, ToolCall, ToolChoice, ToolDefinition,
};
const ANTHROPIC_API_BASE: &str = "https://api.anthropic.com";
const ANTHROPIC_API_VERSION: &str = "2023-06-01";
const DEFAULT_MODEL: &str = "claude-sonnet-4-6";
#[derive(Debug, Clone, Serialize, Deserialize)]
struct AnthropicMessage {
role: String,
content: AnthropicContent,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
enum AnthropicContent {
Text(String),
Blocks(Vec<ContentBlock>),
}
#[derive(Debug, Clone, Serialize, Deserialize)]
struct ImageSource {
#[serde(rename = "type")]
source_type: String, media_type: String, data: String, }
#[derive(Debug, Clone, Serialize, Deserialize)]
struct ContentBlock {
#[serde(rename = "type")]
content_type: String,
#[serde(skip_serializing_if = "Option::is_none")]
text: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
id: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
input: Option<serde_json::Value>,
#[serde(skip_serializing_if = "Option::is_none")]
tool_use_id: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
content: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
is_error: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
source: Option<ImageSource>,
}
#[derive(Debug, Clone, Serialize)]
struct AnthropicTool {
name: String,
description: String,
input_schema: serde_json::Value,
}
#[derive(Debug, Clone, Serialize)]
struct MessagesRequest {
model: String,
max_tokens: u32,
messages: Vec<AnthropicMessage>,
#[serde(skip_serializing_if = "Option::is_none")]
system: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
stream: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
tools: Option<Vec<AnthropicTool>>,
#[serde(skip_serializing_if = "Option::is_none")]
tool_choice: Option<serde_json::Value>,
#[serde(skip_serializing_if = "Option::is_none")]
temperature: Option<f32>,
#[serde(skip_serializing_if = "Option::is_none")]
top_p: Option<f32>,
#[serde(skip_serializing_if = "Option::is_none")]
stop_sequences: Option<Vec<String>>,
}
#[derive(Debug, Clone, Deserialize)]
#[allow(dead_code)] struct MessagesResponse {
id: String,
#[serde(rename = "type")]
response_type: String,
role: String,
content: Vec<ContentBlock>,
model: String,
stop_reason: Option<String>,
usage: AnthropicUsage,
}
#[derive(Debug, Clone, Deserialize, Default)]
#[allow(dead_code)] struct AnthropicUsage {
input_tokens: u32,
output_tokens: u32,
#[serde(default)]
cache_creation_input_tokens: Option<u32>,
#[serde(default)]
cache_read_input_tokens: Option<u32>,
}
#[derive(Debug, Clone, Deserialize)]
#[allow(dead_code)] struct AnthropicErrorResponse {
#[serde(rename = "type")]
error_type: String,
error: AnthropicError,
}
#[derive(Debug, Clone, Deserialize)]
struct AnthropicError {
#[serde(rename = "type")]
#[allow(dead_code)]
error_type: String,
message: String,
}
#[derive(Debug, Clone, Deserialize)]
#[serde(tag = "type")]
#[allow(dead_code)] enum StreamEvent {
#[serde(rename = "message_start")]
MessageStart { message: MessagesResponse },
#[serde(rename = "content_block_start")]
ContentBlockStart {
index: usize,
content_block: ContentBlock,
},
#[serde(rename = "content_block_delta")]
ContentBlockDelta { index: usize, delta: DeltaBlock },
#[serde(rename = "content_block_stop")]
ContentBlockStop { index: usize },
#[serde(rename = "message_delta")]
MessageDelta {
delta: MessageDeltaData,
usage: Option<DeltaUsage>,
},
#[serde(rename = "message_stop")]
MessageStop,
#[serde(rename = "ping")]
Ping,
#[serde(rename = "error")]
Error { error: AnthropicError },
}
#[derive(Debug, Clone, Deserialize)]
struct DeltaBlock {
#[serde(rename = "type")]
delta_type: String,
text: Option<String>,
partial_json: Option<String>,
thinking: Option<String>,
#[allow(dead_code)]
signature: Option<String>,
}
#[derive(Debug, Clone, Deserialize)]
struct MessageDeltaData {
stop_reason: Option<String>,
}
#[derive(Debug, Clone, Deserialize)]
#[allow(dead_code)] struct DeltaUsage {
output_tokens: u32,
}
#[derive(Debug, Clone)]
pub struct AnthropicProvider {
client: Client,
api_key: String,
model: String,
base_url: String,
max_context_length: usize,
api_version: String,
}
impl AnthropicProvider {
pub fn new(api_key: impl Into<String>) -> Self {
let model = DEFAULT_MODEL.to_string();
Self {
client: Client::new(),
api_key: api_key.into(),
max_context_length: Self::context_length_for_model(&model),
model,
base_url: ANTHROPIC_API_BASE.to_string(),
api_version: ANTHROPIC_API_VERSION.to_string(),
}
}
pub fn from_env() -> Result<Self> {
let api_key = std::env::var("ANTHROPIC_API_KEY")
.or_else(|_| std::env::var("ANTHROPIC_AUTH_TOKEN"))
.map_err(|_| {
LlmError::ConfigError(
"ANTHROPIC_API_KEY or ANTHROPIC_AUTH_TOKEN environment variable not set"
.to_string(),
)
})?;
let mut provider = Self::new(api_key);
if let Ok(base_url) = std::env::var("ANTHROPIC_BASE_URL") {
provider = provider.with_base_url(base_url);
}
if let Ok(model) = std::env::var("ANTHROPIC_MODEL") {
provider = provider.with_model(model);
}
Ok(provider)
}
pub fn for_ollama() -> Self {
Self::new("ollama")
.with_base_url("http://localhost:11434")
.with_model("qwen3-coder")
}
pub fn for_ollama_with_model(model: impl Into<String>) -> Self {
Self::new("ollama")
.with_base_url("http://localhost:11434")
.with_model(model)
}
pub fn for_ollama_at(host: impl Into<String>, model: impl Into<String>) -> Self {
Self::new("ollama").with_base_url(host).with_model(model)
}
pub fn with_api_key(mut self, api_key: impl Into<String>) -> Self {
self.api_key = api_key.into();
self
}
pub fn with_model(mut self, model: impl Into<String>) -> Self {
let model_name = model.into();
self.max_context_length = Self::context_length_for_model(&model_name);
self.model = model_name;
self
}
pub fn with_base_url(mut self, url: impl Into<String>) -> Self {
self.base_url = url.into();
self
}
pub fn with_api_version(mut self, version: impl Into<String>) -> Self {
self.api_version = version.into();
self
}
pub fn api_key(&self) -> &str {
&self.api_key
}
pub fn base_url(&self) -> &str {
&self.base_url
}
pub fn api_version(&self) -> &str {
&self.api_version
}
pub fn endpoint(&self) -> String {
format!("{}/v1/messages", self.base_url)
}
pub fn context_length_for_model(model: &str) -> usize {
match model {
m if m.contains("claude-opus-4-6") => 200_000,
m if m.contains("claude-sonnet-4-6") => 200_000,
m if m.contains("claude-opus-4-5") || m.contains("opus-4.5") => 200_000,
m if m.contains("claude-sonnet-4-5") || m.contains("sonnet-4.5") => 200_000,
m if m.contains("claude-haiku-4-5") || m.contains("haiku-4.5") => 200_000,
m if m.contains("claude-3-5-sonnet") => 200_000,
m if m.contains("claude-3-5-haiku") => 200_000,
m if m.contains("claude-3-opus") => 200_000,
m if m.contains("claude-3-sonnet") => 200_000,
m if m.contains("claude-3-haiku") => 200_000,
m if m.contains("claude-2") => 100_000,
m if m.contains("claude-instant") => 100_000,
_ => 200_000, }
}
fn headers(&self) -> reqwest::header::HeaderMap {
let mut headers = reqwest::header::HeaderMap::new();
headers.insert("x-api-key", self.api_key.parse().expect("Invalid API key"));
headers.insert(
"anthropic-version",
self.api_version.parse().expect("Invalid API version"),
);
headers.insert(
reqwest::header::CONTENT_TYPE,
"application/json".parse().unwrap(),
);
headers
}
fn convert_messages(messages: &[ChatMessage]) -> (Option<String>, Vec<AnthropicMessage>) {
let mut system_parts: Vec<String> = Vec::new();
let mut anthropic_messages = Vec::new();
for msg in messages {
match msg.role {
ChatRole::System => {
system_parts.push(msg.content.clone());
}
ChatRole::User => {
if msg.has_images() {
let mut blocks = Vec::new();
if !msg.content.is_empty() {
blocks.push(ContentBlock {
content_type: "text".to_string(),
text: Some(msg.content.clone()),
id: None,
name: None,
input: None,
tool_use_id: None,
content: None,
is_error: None,
source: None,
});
}
if let Some(ref images) = msg.images {
for img in images {
blocks.push(ContentBlock {
content_type: "image".to_string(),
text: None,
id: None,
name: None,
input: None,
tool_use_id: None,
content: None,
is_error: None,
source: Some(ImageSource {
source_type: "base64".to_string(),
media_type: img.mime_type.clone(),
data: img.data.clone(),
}),
});
}
}
anthropic_messages.push(AnthropicMessage {
role: "user".to_string(),
content: AnthropicContent::Blocks(blocks),
});
} else {
anthropic_messages.push(AnthropicMessage {
role: "user".to_string(),
content: AnthropicContent::Text(msg.content.clone()),
});
}
}
ChatRole::Assistant => {
if let Some(ref tool_calls) = msg.tool_calls {
if !tool_calls.is_empty() {
let mut blocks = Vec::new();
if !msg.content.is_empty() {
blocks.push(ContentBlock {
content_type: "text".to_string(),
text: Some(msg.content.clone()),
id: None,
name: None,
input: None,
tool_use_id: None,
content: None,
is_error: None,
source: None,
});
}
for tc in tool_calls {
let input: serde_json::Value =
serde_json::from_str(&tc.function.arguments)
.unwrap_or(serde_json::Value::Object(Default::default()));
blocks.push(ContentBlock {
content_type: "tool_use".to_string(),
text: None,
id: Some(tc.id.clone()),
name: Some(tc.function.name.clone()),
input: Some(input),
tool_use_id: None,
content: None,
is_error: None,
source: None,
});
}
anthropic_messages.push(AnthropicMessage {
role: "assistant".to_string(),
content: AnthropicContent::Blocks(blocks),
});
continue;
}
}
anthropic_messages.push(AnthropicMessage {
role: "assistant".to_string(),
content: AnthropicContent::Text(msg.content.clone()),
});
}
ChatRole::Tool => {
if let Some(tool_call_id) = &msg.tool_call_id {
anthropic_messages.push(AnthropicMessage {
role: "user".to_string(),
content: AnthropicContent::Blocks(vec![ContentBlock {
content_type: "tool_result".to_string(),
tool_use_id: Some(tool_call_id.clone()),
content: Some(msg.content.clone()),
text: None,
id: None,
name: None,
input: None,
is_error: None,
source: None,
}]),
});
} else {
anthropic_messages.push(AnthropicMessage {
role: "user".to_string(),
content: AnthropicContent::Text(msg.content.clone()),
});
}
}
ChatRole::Function => {
anthropic_messages.push(AnthropicMessage {
role: "user".to_string(),
content: AnthropicContent::Text(msg.content.clone()),
});
}
}
}
let system_prompt = if system_parts.is_empty() {
None
} else {
Some(system_parts.join("\n\n"))
};
(system_prompt, anthropic_messages)
}
fn convert_tools(tools: &[ToolDefinition]) -> Vec<AnthropicTool> {
tools
.iter()
.map(|tool| AnthropicTool {
name: tool.function.name.clone(),
description: tool.function.description.clone(),
input_schema: tool.function.parameters.clone(),
})
.collect()
}
fn convert_tool_choice(choice: &ToolChoice) -> serde_json::Value {
match choice {
ToolChoice::Auto(s) if s == "none" => serde_json::json!({"type": "none"}),
ToolChoice::Auto(_) => serde_json::json!({"type": "auto"}),
ToolChoice::Required(_) => serde_json::json!({"type": "any"}),
ToolChoice::Function { function, .. } => {
serde_json::json!({"type": "tool", "name": function.name})
}
}
}
fn parse_response(response: MessagesResponse) -> LLMResponse {
let mut content = String::new();
let mut tool_calls = Vec::new();
let mut metadata = HashMap::new();
for block in &response.content {
match block.content_type.as_str() {
"text" => {
if let Some(text) = &block.text {
content.push_str(text);
}
}
"tool_use" => {
if let (Some(id), Some(name), Some(input)) =
(&block.id, &block.name, &block.input)
{
tool_calls.push(ToolCall {
id: id.clone(),
call_type: "function".to_string(),
function: FunctionCall {
name: name.clone(),
arguments: input.to_string(),
},
thought_signature: None,
});
}
}
_ => {
debug!("Unknown content block type: {}", block.content_type);
}
}
}
metadata.insert("response_id".to_string(), serde_json::json!(response.id));
let cache_hit_tokens = response.usage.cache_read_input_tokens.map(|t| t as usize);
LLMResponse {
content,
prompt_tokens: response.usage.input_tokens as usize,
completion_tokens: response.usage.output_tokens as usize,
total_tokens: (response.usage.input_tokens + response.usage.output_tokens) as usize,
model: response.model,
finish_reason: response.stop_reason,
tool_calls,
metadata,
cache_hit_tokens,
thinking_tokens: None,
thinking_content: None,
}
}
fn handle_error(status: reqwest::StatusCode, body: &str) -> LlmError {
let message = serde_json::from_str::<AnthropicErrorResponse>(body)
.map(|e| e.error.message)
.unwrap_or_else(|_| body.to_string());
match status.as_u16() {
400 => LlmError::InvalidRequest(message),
401 => LlmError::AuthError(message),
402 => LlmError::ApiError(format!("Billing error: {}", message)),
403 => LlmError::AuthError(format!("Permission denied: {}", message)),
404 => LlmError::ModelNotFound(message),
413 => LlmError::TokenLimitExceeded { max: 0, got: 0 },
429 => LlmError::RateLimited(message),
500 => LlmError::ApiError(format!("Anthropic internal error: {}", message)),
529 => LlmError::RateLimited(format!("Service overloaded: {}", message)),
_ => LlmError::ApiError(format!("HTTP {}: {}", status, message)),
}
}
#[instrument(skip(self, request))]
async fn send_request(&self, request: &MessagesRequest) -> Result<MessagesResponse> {
debug!("Sending request to Anthropic API: model={}", request.model);
let response = self
.client
.post(self.endpoint())
.headers(self.headers())
.json(request)
.send()
.await
.map_err(|e| LlmError::NetworkError(e.to_string()))?;
let status = response.status();
if !status.is_success() {
let body = response
.text()
.await
.unwrap_or_else(|_| "Unknown error".to_string());
return Err(Self::handle_error(status, &body));
}
let response_text = response
.text()
.await
.map_err(|e| LlmError::NetworkError(e.to_string()))?;
debug!("Anthropic response received: {} bytes", response_text.len());
serde_json::from_str(&response_text)
.map_err(|e| LlmError::NetworkError(format!("Failed to parse response: {}", e)))
}
}
#[async_trait]
impl LLMProvider for AnthropicProvider {
fn name(&self) -> &str {
"anthropic"
}
fn model(&self) -> &str {
&self.model
}
fn max_context_length(&self) -> usize {
self.max_context_length
}
#[instrument(skip(self, prompt))]
async fn complete(&self, prompt: &str) -> Result<LLMResponse> {
self.complete_with_options(prompt, &CompletionOptions::default())
.await
}
#[instrument(skip(self, prompt, options))]
async fn complete_with_options(
&self,
prompt: &str,
options: &CompletionOptions,
) -> Result<LLMResponse> {
let mut messages = Vec::new();
if let Some(system) = &options.system_prompt {
messages.push(ChatMessage::system(system));
}
messages.push(ChatMessage::user(prompt));
self.chat(&messages, Some(options)).await
}
#[instrument(skip(self, messages, options))]
async fn chat(
&self,
messages: &[ChatMessage],
options: Option<&CompletionOptions>,
) -> Result<LLMResponse> {
let (system, anthropic_messages) = Self::convert_messages(messages);
let options = options.cloned().unwrap_or_default();
let request = MessagesRequest {
model: self.model.clone(),
max_tokens: options.max_tokens.unwrap_or(4096) as u32,
messages: anthropic_messages,
system,
stream: None,
tools: None,
tool_choice: None,
temperature: options.temperature,
top_p: options.top_p,
stop_sequences: options.stop.clone(),
};
let response = self.send_request(&request).await?;
Ok(Self::parse_response(response))
}
#[instrument(skip(self, prompt))]
async fn stream(&self, prompt: &str) -> Result<BoxStream<'static, Result<String>>> {
let messages = vec![ChatMessage::user(prompt)];
let (system, anthropic_messages) = Self::convert_messages(&messages);
let request = MessagesRequest {
model: self.model.clone(),
max_tokens: 4096,
messages: anthropic_messages,
system,
stream: Some(true),
tools: None,
tool_choice: None,
temperature: None,
top_p: None,
stop_sequences: None,
};
let response = self
.client
.post(self.endpoint())
.headers(self.headers())
.json(&request)
.send()
.await
.map_err(|e| LlmError::NetworkError(e.to_string()))?;
if !response.status().is_success() {
let status = response.status();
let body = response.text().await.unwrap_or_default();
return Err(Self::handle_error(status, &body));
}
let mut line_buffer = String::new();
let stream = response
.bytes_stream()
.map(move |chunk| {
let chunk = chunk.map_err(|e| LlmError::NetworkError(e.to_string()))?;
let text = String::from_utf8_lossy(&chunk);
line_buffer.push_str(&text);
let mut result = String::new();
while let Some(newline_idx) = line_buffer.find('\n') {
let line = line_buffer[..newline_idx].trim().to_string();
line_buffer.drain(..=newline_idx);
if line.is_empty() || line.starts_with(':') {
continue;
}
if let Some(data) = line.strip_prefix("data: ") {
if let Ok(event) = serde_json::from_str::<StreamEvent>(data) {
match event {
StreamEvent::ContentBlockDelta { delta, .. } => {
if delta.delta_type == "text_delta" {
if let Some(text) = delta.text {
result.push_str(&text);
}
}
}
StreamEvent::Error { error } => {
warn!("Stream error: {}", error.message);
}
_ => {}
}
}
}
}
Ok(result)
})
.filter(|r| {
let keep = match r {
Ok(s) => !s.is_empty(),
Err(_) => true,
};
futures::future::ready(keep)
});
Ok(stream.boxed())
}
fn supports_streaming(&self) -> bool {
true
}
fn supports_function_calling(&self) -> bool {
true
}
fn supports_tool_streaming(&self) -> bool {
true
}
#[instrument(skip(self, messages, tools, options))]
async fn chat_with_tools(
&self,
messages: &[ChatMessage],
tools: &[ToolDefinition],
tool_choice: Option<ToolChoice>,
options: Option<&CompletionOptions>,
) -> Result<LLMResponse> {
let (system, anthropic_messages) = Self::convert_messages(messages);
let anthropic_tools = Self::convert_tools(tools);
let options = options.cloned().unwrap_or_default();
let request = MessagesRequest {
model: self.model.clone(),
max_tokens: options.max_tokens.unwrap_or(4096) as u32,
messages: anthropic_messages,
system,
stream: None,
tools: if tools.is_empty() {
None
} else {
Some(anthropic_tools)
},
tool_choice: tool_choice.map(|tc| Self::convert_tool_choice(&tc)),
temperature: options.temperature,
top_p: options.top_p,
stop_sequences: options.stop.clone(),
};
let response = self.send_request(&request).await?;
Ok(Self::parse_response(response))
}
#[instrument(skip(self, messages, tools, options))]
async fn chat_with_tools_stream(
&self,
messages: &[ChatMessage],
tools: &[ToolDefinition],
tool_choice: Option<ToolChoice>,
options: Option<&CompletionOptions>,
) -> Result<BoxStream<'static, Result<StreamChunk>>> {
let (system, anthropic_messages) = Self::convert_messages(messages);
let anthropic_tools = Self::convert_tools(tools);
let options = options.cloned().unwrap_or_default();
let request = MessagesRequest {
model: self.model.clone(),
max_tokens: options.max_tokens.unwrap_or(4096) as u32,
messages: anthropic_messages,
system,
stream: Some(true),
tools: if tools.is_empty() {
None
} else {
Some(anthropic_tools)
},
tool_choice: tool_choice.map(|tc| Self::convert_tool_choice(&tc)),
temperature: options.temperature,
top_p: options.top_p,
stop_sequences: options.stop.clone(),
};
let response = self
.client
.post(self.endpoint())
.headers(self.headers())
.json(&request)
.send()
.await
.map_err(|e| LlmError::NetworkError(e.to_string()))?;
if !response.status().is_success() {
let status = response.status();
let body = response.text().await.unwrap_or_default();
return Err(Self::handle_error(status, &body));
}
let mut line_buffer = String::new();
let mut finished_emitted = false;
let mut prompt_tokens = 0usize;
let mut cache_hit_tokens = None;
let mut latest_output_tokens = 0usize;
let stream = response
.bytes_stream()
.map(move |chunk| -> Result<Vec<StreamChunk>> {
let chunk = chunk.map_err(|e| LlmError::NetworkError(e.to_string()))?;
let text = String::from_utf8_lossy(&chunk);
line_buffer.push_str(&text);
let mut chunks: Vec<StreamChunk> = Vec::new();
while let Some(newline_idx) = line_buffer.find('\n') {
let line = line_buffer[..newline_idx].trim().to_string();
line_buffer.drain(..=newline_idx);
if line.is_empty() || line.starts_with(':') {
continue;
}
if let Some(data) = line.strip_prefix("data: ") {
if let Ok(event) = serde_json::from_str::<StreamEvent>(data) {
match event {
StreamEvent::MessageStart { message } => {
prompt_tokens = message.usage.input_tokens as usize;
cache_hit_tokens =
message.usage.cache_read_input_tokens.map(|t| t as usize);
latest_output_tokens = message.usage.output_tokens as usize;
}
StreamEvent::ContentBlockStart {
index,
content_block,
} => {
if content_block.content_type == "tool_use" {
if let (Some(id), Some(name)) =
(content_block.id, content_block.name)
{
chunks.push(StreamChunk::ToolCallDelta {
index,
id: Some(id),
function_name: Some(name),
function_arguments: None,
thought_signature: None,
});
}
}
}
StreamEvent::ContentBlockDelta { index, delta } => {
match delta.delta_type.as_str() {
"text_delta" => {
if let Some(text) = delta.text {
chunks.push(StreamChunk::Content(text));
}
}
"input_json_delta" => {
if let Some(json) = delta.partial_json {
chunks.push(StreamChunk::ToolCallDelta {
index,
id: None,
function_name: None,
function_arguments: Some(json),
thought_signature: None,
});
}
}
"thinking_delta" => {
if let Some(thinking) = delta.thinking {
chunks.push(StreamChunk::ThinkingContent {
text: thinking,
tokens_used: None,
budget_total: None,
});
}
}
_ => {}
}
}
StreamEvent::ContentBlockStop { .. } => {
}
StreamEvent::MessageDelta { delta, usage } => {
if let Some(usage) = usage {
latest_output_tokens = usage.output_tokens as usize;
}
if let Some(reason) = delta.stop_reason {
if !finished_emitted {
finished_emitted = true;
let mut usage = StreamUsage::new(
prompt_tokens,
latest_output_tokens,
);
if let Some(tokens) = cache_hit_tokens {
usage = usage.with_cache_hit_tokens(tokens);
}
chunks.push(StreamChunk::Finished {
reason,
ttft_ms: None,
usage: Some(usage),
});
}
}
}
StreamEvent::MessageStop => {
if !finished_emitted {
finished_emitted = true;
let mut usage =
StreamUsage::new(prompt_tokens, latest_output_tokens);
if let Some(tokens) = cache_hit_tokens {
usage = usage.with_cache_hit_tokens(tokens);
}
chunks.push(StreamChunk::Finished {
reason: "stop".to_string(),
ttft_ms: None,
usage: Some(usage),
});
}
}
StreamEvent::Error { error } => {
return Err(LlmError::ApiError(error.message));
}
_ => {}
}
}
}
}
Ok(chunks)
})
.flat_map(|result: Result<Vec<StreamChunk>>| {
futures::stream::iter(match result {
Ok(chunks) => chunks.into_iter().map(Ok).collect::<Vec<_>>(),
Err(e) => vec![Err(e)],
})
});
Ok(stream.boxed())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_new_provider() {
let provider = AnthropicProvider::new("test-api-key");
assert_eq!(provider.name(), "anthropic");
assert_eq!(provider.model(), DEFAULT_MODEL);
}
#[test]
fn test_with_model() {
let provider =
AnthropicProvider::new("test-api-key").with_model("claude-3-5-sonnet-20241022");
assert_eq!(provider.model(), "claude-3-5-sonnet-20241022");
assert_eq!(provider.max_context_length(), 200_000);
}
#[test]
fn test_context_length_for_model() {
assert_eq!(
AnthropicProvider::context_length_for_model("claude-opus-4-6"),
200_000
);
assert_eq!(
AnthropicProvider::context_length_for_model("claude-sonnet-4-6"),
200_000
);
assert_eq!(
AnthropicProvider::context_length_for_model("claude-opus-4-5-20250929"),
200_000
);
assert_eq!(
AnthropicProvider::context_length_for_model("claude-sonnet-4-5-20250929"),
200_000
);
assert_eq!(
AnthropicProvider::context_length_for_model("claude-haiku-4-5"),
200_000
);
assert_eq!(
AnthropicProvider::context_length_for_model("claude-3-5-sonnet-20241022"),
200_000
);
assert_eq!(
AnthropicProvider::context_length_for_model("claude-2.1"),
100_000
);
}
#[test]
fn test_convert_messages_with_system() {
let messages = vec![
ChatMessage::system("You are a helpful assistant."),
ChatMessage::user("Hello!"),
ChatMessage::assistant("Hi there!"),
];
let (system, anthropic_messages) = AnthropicProvider::convert_messages(&messages);
assert_eq!(system, Some("You are a helpful assistant.".to_string()));
assert_eq!(anthropic_messages.len(), 2);
assert_eq!(anthropic_messages[0].role, "user");
assert_eq!(anthropic_messages[1].role, "assistant");
}
#[test]
fn test_convert_messages_without_system() {
let messages = vec![ChatMessage::user("Hello!")];
let (system, anthropic_messages) = AnthropicProvider::convert_messages(&messages);
assert_eq!(system, None);
assert_eq!(anthropic_messages.len(), 1);
}
#[test]
fn test_convert_tools() {
use crate::traits::FunctionDefinition;
let tools = vec![ToolDefinition {
tool_type: "function".to_string(),
function: FunctionDefinition {
name: "get_weather".to_string(),
description: "Get the weather for a location".to_string(),
parameters: serde_json::json!({
"type": "object",
"properties": {
"location": {"type": "string"}
}
}),
strict: None,
},
}];
let anthropic_tools = AnthropicProvider::convert_tools(&tools);
assert_eq!(anthropic_tools.len(), 1);
assert_eq!(anthropic_tools[0].name, "get_weather");
assert_eq!(
anthropic_tools[0].description,
"Get the weather for a location"
);
}
#[test]
fn test_convert_tool_choice() {
let auto = AnthropicProvider::convert_tool_choice(&ToolChoice::auto());
assert_eq!(auto, serde_json::json!({"type": "auto"}));
let required = AnthropicProvider::convert_tool_choice(&ToolChoice::required());
assert_eq!(required, serde_json::json!({"type": "any"}));
let none = AnthropicProvider::convert_tool_choice(&ToolChoice::none());
assert_eq!(none, serde_json::json!({"type": "none"}));
let specific = AnthropicProvider::convert_tool_choice(&ToolChoice::function("my_tool"));
assert_eq!(
specific,
serde_json::json!({"type": "tool", "name": "my_tool"})
);
}
#[test]
fn test_headers() {
let provider = AnthropicProvider::new("test-key");
let headers = provider.headers();
assert!(headers.contains_key("x-api-key"));
assert!(headers.contains_key("anthropic-version"));
assert!(headers.contains_key(reqwest::header::CONTENT_TYPE));
}
#[test]
fn test_endpoint() {
let provider = AnthropicProvider::new("test-key");
assert_eq!(provider.endpoint(), "https://api.anthropic.com/v1/messages");
let custom = provider.with_base_url("https://custom.api.com");
assert_eq!(custom.endpoint(), "https://custom.api.com/v1/messages");
}
#[test]
fn test_parse_response() {
let response = MessagesResponse {
id: "msg_123".to_string(),
response_type: "message".to_string(),
role: "assistant".to_string(),
content: vec![ContentBlock {
content_type: "text".to_string(),
text: Some("Hello, world!".to_string()),
id: None,
name: None,
input: None,
tool_use_id: None,
content: None,
is_error: None,
source: None,
}],
model: "claude-3-5-sonnet-20241022".to_string(),
stop_reason: Some("end_turn".to_string()),
usage: AnthropicUsage {
input_tokens: 10,
output_tokens: 5,
cache_creation_input_tokens: None,
cache_read_input_tokens: None,
},
};
let llm_response = AnthropicProvider::parse_response(response);
assert_eq!(llm_response.content, "Hello, world!");
assert_eq!(llm_response.prompt_tokens, 10);
assert_eq!(llm_response.completion_tokens, 5);
assert_eq!(llm_response.total_tokens, 15);
assert_eq!(llm_response.model, "claude-3-5-sonnet-20241022");
assert_eq!(llm_response.finish_reason, Some("end_turn".to_string()));
assert!(llm_response.tool_calls.is_empty());
}
#[test]
fn test_parse_response_with_tool_calls() {
let response = MessagesResponse {
id: "msg_456".to_string(),
response_type: "message".to_string(),
role: "assistant".to_string(),
content: vec![ContentBlock {
content_type: "tool_use".to_string(),
text: None,
id: Some("tool_1".to_string()),
name: Some("get_weather".to_string()),
input: Some(serde_json::json!({"location": "Paris"})),
tool_use_id: None,
content: None,
is_error: None,
source: None,
}],
model: "claude-3-5-sonnet-20241022".to_string(),
stop_reason: Some("tool_use".to_string()),
usage: AnthropicUsage {
input_tokens: 20,
output_tokens: 10,
cache_creation_input_tokens: None,
cache_read_input_tokens: None,
},
};
let llm_response = AnthropicProvider::parse_response(response);
assert_eq!(llm_response.tool_calls.len(), 1);
assert_eq!(llm_response.tool_calls[0].id, "tool_1");
assert_eq!(llm_response.tool_calls[0].name(), "get_weather");
assert!(llm_response.tool_calls[0].arguments().contains("Paris"));
}
#[tokio::test]
#[ignore]
async fn test_chat_completion_live() {
let provider = AnthropicProvider::from_env().expect("ANTHROPIC_API_KEY not set");
let messages = vec![ChatMessage::user("Say 'hello' and nothing else.")];
let response = provider.chat(&messages, None).await;
assert!(response.is_ok());
let response = response.unwrap();
assert!(!response.content.is_empty());
assert!(response.prompt_tokens > 0);
assert!(response.completion_tokens > 0);
}
#[test]
fn test_for_ollama_defaults() {
let provider = AnthropicProvider::for_ollama();
assert_eq!(provider.base_url, "http://localhost:11434");
assert_eq!(provider.api_key, "ollama");
assert_eq!(provider.model, "qwen3-coder");
}
#[test]
fn test_for_ollama_with_model() {
let provider = AnthropicProvider::for_ollama_with_model("gpt-oss:20b");
assert_eq!(provider.model, "gpt-oss:20b");
assert_eq!(provider.base_url, "http://localhost:11434");
assert_eq!(provider.api_key, "ollama");
}
#[test]
fn test_for_ollama_at_custom_host() {
let provider = AnthropicProvider::for_ollama_at("http://192.168.1.100:11434", "llama3");
assert_eq!(provider.base_url, "http://192.168.1.100:11434");
assert_eq!(provider.model, "llama3");
assert_eq!(provider.api_key, "ollama");
}
#[test]
fn test_for_ollama_endpoint() {
let provider = AnthropicProvider::for_ollama();
assert_eq!(provider.endpoint(), "http://localhost:11434/v1/messages");
}
#[test]
fn test_with_base_url_chain() {
let provider = AnthropicProvider::new("test-key")
.with_base_url("http://localhost:11434")
.with_model("qwen3-coder");
assert_eq!(provider.base_url, "http://localhost:11434");
assert_eq!(provider.model, "qwen3-coder");
assert_eq!(provider.api_key, "test-key");
}
#[tokio::test]
#[ignore]
async fn test_ollama_chat_completion_live() {
let provider = AnthropicProvider::for_ollama();
let messages = vec![ChatMessage::user("Say 'hello' and nothing else.")];
let response = provider.chat(&messages, None).await;
assert!(response.is_ok(), "Ollama chat failed: {:?}", response.err());
let response = response.unwrap();
assert!(!response.content.is_empty());
}
#[test]
fn test_convert_messages_text_only() {
let messages = vec![ChatMessage::user("Hello, world!")];
let (_, anthropic_messages) = AnthropicProvider::convert_messages(&messages);
assert_eq!(anthropic_messages.len(), 1);
let json = serde_json::to_value(&anthropic_messages[0]).unwrap();
assert_eq!(json["content"], "Hello, world!");
}
#[test]
fn test_convert_messages_with_images() {
use crate::traits::ImageData;
let images = vec![ImageData::new("base64data", "image/png")];
let messages = vec![ChatMessage::user_with_images("What's this?", images)];
let (_, anthropic_messages) = AnthropicProvider::convert_messages(&messages);
assert_eq!(anthropic_messages.len(), 1);
let json = serde_json::to_value(&anthropic_messages[0]).unwrap();
let content = &json["content"];
assert!(content.is_array(), "Content should be an array for images");
assert_eq!(content.as_array().unwrap().len(), 2);
assert_eq!(content[0]["type"], "text");
assert_eq!(content[0]["text"], "What's this?");
assert_eq!(content[1]["type"], "image");
assert!(
content[1]["source"].is_object(),
"Image should have source object"
);
assert_eq!(content[1]["source"]["type"], "base64");
assert_eq!(content[1]["source"]["media_type"], "image/png");
assert_eq!(content[1]["source"]["data"], "base64data");
}
#[test]
fn test_convert_messages_multiple_images() {
use crate::traits::ImageData;
let images = vec![
ImageData::new("img1data", "image/png"),
ImageData::new("img2data", "image/jpeg"),
];
let messages = vec![ChatMessage::user_with_images("Compare these", images)];
let (_, anthropic_messages) = AnthropicProvider::convert_messages(&messages);
let json = serde_json::to_value(&anthropic_messages[0]).unwrap();
let content = &json["content"];
assert_eq!(content.as_array().unwrap().len(), 3);
assert_eq!(content[1]["source"]["media_type"], "image/png");
assert_eq!(content[2]["source"]["media_type"], "image/jpeg");
}
#[test]
fn test_delta_block_parses_thinking_delta() {
let json = r#"{"type":"thinking_delta","thinking":"Let me analyze step by step..."}"#;
let delta: DeltaBlock = serde_json::from_str(json).unwrap();
assert_eq!(delta.delta_type, "thinking_delta");
assert_eq!(
delta.thinking,
Some("Let me analyze step by step...".to_string())
);
assert!(delta.text.is_none());
assert!(delta.partial_json.is_none());
}
#[test]
fn test_delta_block_parses_text_delta() {
let json = r#"{"type":"text_delta","text":"Hello world"}"#;
let delta: DeltaBlock = serde_json::from_str(json).unwrap();
assert_eq!(delta.delta_type, "text_delta");
assert_eq!(delta.text, Some("Hello world".to_string()));
assert!(delta.thinking.is_none());
}
#[test]
fn test_delta_block_parses_input_json_delta() {
let json = r#"{"type":"input_json_delta","partial_json":"{\"name\":"}"#;
let delta: DeltaBlock = serde_json::from_str(json).unwrap();
assert_eq!(delta.delta_type, "input_json_delta");
assert_eq!(delta.partial_json, Some("{\"name\":".to_string()));
assert!(delta.thinking.is_none());
assert!(delta.text.is_none());
}
#[test]
fn test_constants() {
assert_eq!(ANTHROPIC_API_BASE, "https://api.anthropic.com");
assert_eq!(ANTHROPIC_API_VERSION, "2023-06-01");
assert_eq!(DEFAULT_MODEL, "claude-sonnet-4-6");
}
#[test]
fn test_supports_streaming() {
let provider = AnthropicProvider::new("key");
assert!(provider.supports_streaming());
}
#[test]
fn test_supports_tool_streaming() {
let provider = AnthropicProvider::new("key");
assert!(provider.supports_tool_streaming());
}
#[test]
fn test_anthropic_usage_with_cache_tokens() {
let json = r#"{
"input_tokens": 100,
"output_tokens": 50,
"cache_creation_input_tokens": 25,
"cache_read_input_tokens": 10
}"#;
let usage: AnthropicUsage = serde_json::from_str(json).unwrap();
assert_eq!(usage.input_tokens, 100);
assert_eq!(usage.output_tokens, 50);
assert_eq!(usage.cache_creation_input_tokens, Some(25));
assert_eq!(usage.cache_read_input_tokens, Some(10));
}
#[test]
fn test_anthropic_error_response_deserialization() {
let json = r#"{
"type": "error",
"error": {
"type": "invalid_request_error",
"message": "messages: Required field missing"
}
}"#;
let error: AnthropicErrorResponse = serde_json::from_str(json).unwrap();
assert_eq!(error.error_type, "error");
assert_eq!(error.error.error_type, "invalid_request_error");
assert_eq!(error.error.message, "messages: Required field missing");
}
#[test]
fn test_stream_event_message_start() {
let json = r#"{
"type": "message_start",
"message": {
"id": "msg_123",
"type": "message",
"role": "assistant",
"content": [],
"model": "claude-3-5-sonnet",
"stop_reason": null,
"usage": {"input_tokens": 10, "output_tokens": 0}
}
}"#;
let event: StreamEvent = serde_json::from_str(json).unwrap();
match event {
StreamEvent::MessageStart { message } => {
assert_eq!(message.id, "msg_123");
assert_eq!(message.role, "assistant");
}
_ => panic!("Expected MessageStart event"),
}
}
#[test]
fn test_stream_event_ping() {
let json = r#"{"type": "ping"}"#;
let event: StreamEvent = serde_json::from_str(json).unwrap();
matches!(event, StreamEvent::Ping);
}
#[test]
fn test_image_source_serialization() {
let source = ImageSource {
source_type: "base64".to_string(),
media_type: "image/png".to_string(),
data: "aGVsbG8=".to_string(),
};
let json = serde_json::to_value(&source).unwrap();
assert_eq!(json["type"], "base64");
assert_eq!(json["media_type"], "image/png");
assert_eq!(json["data"], "aGVsbG8=");
}
#[test]
fn test_content_block_tool_use() {
let block = ContentBlock {
content_type: "tool_use".to_string(),
text: None,
id: Some("tool_123".to_string()),
name: Some("get_weather".to_string()),
input: Some(serde_json::json!({"location": "NYC"})),
tool_use_id: None,
content: None,
is_error: None,
source: None,
};
let json = serde_json::to_value(&block).unwrap();
assert_eq!(json["type"], "tool_use");
assert_eq!(json["id"], "tool_123");
assert_eq!(json["name"], "get_weather");
assert_eq!(json["input"]["location"], "NYC");
}
#[test]
fn test_multiple_system_messages_are_concatenated() {
let messages = vec![
ChatMessage::system("You are a helpful assistant."),
ChatMessage::system("Always respond in JSON."),
ChatMessage::user("Hello!"),
];
let (system, anthropic_messages) = AnthropicProvider::convert_messages(&messages);
assert_eq!(
system,
Some("You are a helpful assistant.\n\nAlways respond in JSON.".to_string()),
"Multiple system messages must be joined with \\n\\n"
);
assert_eq!(anthropic_messages.len(), 1);
assert_eq!(anthropic_messages[0].role, "user");
}
#[test]
fn test_assistant_message_with_tool_calls_serializes_as_blocks() {
use crate::traits::{FunctionCall, ToolCall};
let tool_call = ToolCall {
id: "toolu_01".to_string(),
call_type: "function".to_string(),
function: FunctionCall {
name: "get_weather".to_string(),
arguments: r#"{"location":"Paris"}"#.to_string(),
},
thought_signature: None,
};
let mut msg = ChatMessage::assistant("Let me check the weather.");
msg.tool_calls = Some(vec![tool_call]);
let (_, anthropic_messages) = AnthropicProvider::convert_messages(&[msg]);
assert_eq!(anthropic_messages.len(), 1);
let json = serde_json::to_value(&anthropic_messages[0]).unwrap();
assert!(
json["content"].is_array(),
"Assistant message with tool calls must serialize as a block array"
);
let blocks = json["content"].as_array().unwrap();
assert_eq!(
blocks.len(),
2,
"Expect one text block + one tool_use block"
);
assert_eq!(blocks[0]["type"], "text");
assert_eq!(blocks[0]["text"], "Let me check the weather.");
assert_eq!(blocks[1]["type"], "tool_use");
assert_eq!(blocks[1]["id"], "toolu_01");
assert_eq!(blocks[1]["name"], "get_weather");
assert_eq!(blocks[1]["input"]["location"], "Paris");
}
#[test]
fn test_assistant_message_without_tool_calls_serializes_as_text() {
let msg = ChatMessage::assistant("Hello!");
let (_, anthropic_messages) = AnthropicProvider::convert_messages(&[msg]);
let json = serde_json::to_value(&anthropic_messages[0]).unwrap();
assert_eq!(
json["content"], "Hello!",
"Plain assistant message must serialize as a JSON string"
);
}
#[test]
fn test_content_block_is_error_field() {
let block = ContentBlock {
content_type: "tool_result".to_string(),
tool_use_id: Some("toolu_01".to_string()),
content: Some("File not found".to_string()),
is_error: Some(true),
text: None,
id: None,
name: None,
input: None,
source: None,
};
let json = serde_json::to_value(&block).unwrap();
assert_eq!(json["type"], "tool_result");
assert_eq!(json["tool_use_id"], "toolu_01");
assert_eq!(json["content"], "File not found");
assert_eq!(json["is_error"], true);
}
#[test]
fn test_content_block_is_error_not_serialized_when_none() {
let block = ContentBlock {
content_type: "tool_result".to_string(),
tool_use_id: Some("toolu_01".to_string()),
content: Some("42°C".to_string()),
is_error: None,
text: None,
id: None,
name: None,
input: None,
source: None,
};
let json = serde_json::to_value(&block).unwrap();
assert!(
json.get("is_error").is_none(),
"is_error must be omitted from JSON when None"
);
}
#[test]
fn test_delta_block_no_spurious_serialize_attrs() {
let json = r#"{"type":"text_delta","text":"hello","partial_json":null,"thinking":null}"#;
let delta: DeltaBlock = serde_json::from_str(json).unwrap();
assert_eq!(delta.delta_type, "text_delta");
assert_eq!(delta.text, Some("hello".to_string()));
assert_eq!(delta.partial_json, None);
assert_eq!(delta.thinking, None);
}
#[test]
fn test_delta_block_parses_signature_delta() {
let json = r#"{"type":"signature_delta","signature":"EqQBCgIYAhIM1gbcDa9GJwZA2b3hGg=="}"#;
let delta: DeltaBlock = serde_json::from_str(json).unwrap();
assert_eq!(delta.delta_type, "signature_delta");
assert_eq!(
delta.signature,
Some("EqQBCgIYAhIM1gbcDa9GJwZA2b3hGg==".to_string())
);
}
#[test]
fn test_supports_function_calling() {
let provider = AnthropicProvider::new("key");
assert!(
provider.supports_function_calling(),
"AnthropicProvider must advertise function-calling support"
);
}
#[test]
fn test_handle_error_401() {
let body = r#"{"type":"error","error":{"type":"authentication_error","message":"Invalid API key"}}"#;
let err = AnthropicProvider::handle_error(reqwest::StatusCode::UNAUTHORIZED, body);
assert!(matches!(err, crate::error::LlmError::AuthError(_)));
}
#[test]
fn test_handle_error_400() {
let body =
r#"{"type":"error","error":{"type":"invalid_request_error","message":"Bad request"}}"#;
let err = AnthropicProvider::handle_error(reqwest::StatusCode::BAD_REQUEST, body);
assert!(matches!(err, crate::error::LlmError::InvalidRequest(_)));
}
#[test]
fn test_handle_error_402() {
let body =
r#"{"type":"error","error":{"type":"billing_error","message":"Payment required"}}"#;
let err = AnthropicProvider::handle_error(reqwest::StatusCode::PAYMENT_REQUIRED, body);
assert!(matches!(err, crate::error::LlmError::ApiError(_)));
if let crate::error::LlmError::ApiError(msg) = err {
assert!(msg.contains("Billing error"));
}
}
#[test]
fn test_handle_error_403() {
let body =
r#"{"type":"error","error":{"type":"permission_error","message":"No permission"}}"#;
let err = AnthropicProvider::handle_error(reqwest::StatusCode::FORBIDDEN, body);
assert!(matches!(err, crate::error::LlmError::AuthError(_)));
if let crate::error::LlmError::AuthError(msg) = err {
assert!(msg.contains("Permission denied"));
}
}
#[test]
fn test_handle_error_404() {
let body =
r#"{"type":"error","error":{"type":"not_found_error","message":"Model not found"}}"#;
let err = AnthropicProvider::handle_error(reqwest::StatusCode::NOT_FOUND, body);
assert!(matches!(err, crate::error::LlmError::ModelNotFound(_)));
}
#[test]
fn test_handle_error_413() {
let body = r#"{"type":"error","error":{"type":"request_too_large","message":"Request too large"}}"#;
let err = AnthropicProvider::handle_error(reqwest::StatusCode::PAYLOAD_TOO_LARGE, body);
assert!(matches!(
err,
crate::error::LlmError::TokenLimitExceeded { .. }
));
}
#[test]
fn test_handle_error_429() {
let body =
r#"{"type":"error","error":{"type":"rate_limit_error","message":"Too many requests"}}"#;
let err = AnthropicProvider::handle_error(reqwest::StatusCode::TOO_MANY_REQUESTS, body);
assert!(matches!(err, crate::error::LlmError::RateLimited(_)));
}
#[test]
fn test_handle_error_500() {
let body = r#"{"type":"error","error":{"type":"api_error","message":"Internal error"}}"#;
let err = AnthropicProvider::handle_error(reqwest::StatusCode::INTERNAL_SERVER_ERROR, body);
assert!(matches!(err, crate::error::LlmError::ApiError(_)));
if let crate::error::LlmError::ApiError(msg) = err {
assert!(msg.contains("Anthropic internal error"));
}
}
#[test]
fn test_handle_error_529_overloaded() {
let body = r#"{"type":"error","error":{"type":"overloaded_error","message":"Overloaded"}}"#;
let status = reqwest::StatusCode::from_u16(529).unwrap();
let err = AnthropicProvider::handle_error(status, body);
assert!(
matches!(err, crate::error::LlmError::RateLimited(_)),
"529 overloaded_error should map to RateLimited"
);
if let crate::error::LlmError::RateLimited(msg) = err {
assert!(msg.contains("overloaded"));
}
}
#[test]
fn test_handle_error_fallback_non_json_body() {
let err = AnthropicProvider::handle_error(
reqwest::StatusCode::INTERNAL_SERVER_ERROR,
"Service temporarily unavailable",
);
assert!(matches!(err, crate::error::LlmError::ApiError(_)));
}
#[test]
fn test_sse_line_buffer_logic() {
let chunk1 = "data: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"hel";
let chunk2 = "lo\"}}\n";
let mut buffer = String::new();
buffer.push_str(chunk1);
assert!(buffer.find('\n').is_none());
buffer.push_str(chunk2);
let newline_idx = buffer.find('\n').unwrap();
let line = buffer[..newline_idx].trim().to_string();
buffer.drain(..=newline_idx);
let data = line.strip_prefix("data: ").unwrap();
let event: StreamEvent = serde_json::from_str(data).unwrap();
match event {
StreamEvent::ContentBlockDelta { delta, .. } => {
assert_eq!(delta.delta_type, "text_delta");
assert_eq!(delta.text, Some("hello".to_string()));
}
_ => panic!("Expected ContentBlockDelta"),
}
}
#[test]
fn test_sse_multiple_events_in_one_chunk() {
let chunk = "data: {\"type\":\"ping\"}\ndata: {\"type\":\"message_stop\"}\n";
let mut buffer = String::new();
buffer.push_str(chunk);
let mut events: Vec<StreamEvent> = Vec::new();
while let Some(newline_idx) = buffer.find('\n') {
let line = buffer[..newline_idx].trim().to_string();
buffer.drain(..=newline_idx);
if let Some(data) = line.strip_prefix("data: ") {
if let Ok(event) = serde_json::from_str::<StreamEvent>(data) {
events.push(event);
}
}
}
assert_eq!(events.len(), 2);
assert!(matches!(events[0], StreamEvent::Ping));
assert!(matches!(events[1], StreamEvent::MessageStop));
}
#[test]
fn test_convert_messages_tool_role_without_id_falls_back_to_user() {
let mut msg = ChatMessage::user("result of tool");
msg.role = crate::traits::ChatRole::Tool;
let (_, anthropic_messages) = AnthropicProvider::convert_messages(&[msg]);
assert_eq!(anthropic_messages.len(), 1);
assert_eq!(anthropic_messages[0].role, "user");
let json = serde_json::to_value(&anthropic_messages[0]).unwrap();
assert_eq!(json["content"], "result of tool");
}
}