1use crate::core::Protocol;
6use crate::types::{ChatRequest, ChatResponse, Message, Role, Choice, Usage};
7use crate::error::LlmConnectorError;
8use async_trait::async_trait;
9use serde::{Deserialize, Serialize};
10
11#[derive(Clone, Debug)]
13pub struct AnthropicProtocol {
14 api_key: String,
15}
16
17impl AnthropicProtocol {
18 pub fn new(api_key: &str) -> Self {
20 Self {
21 api_key: api_key.to_string(),
22 }
23 }
24
25 pub fn api_key(&self) -> &str {
27 &self.api_key
28 }
29}
30
31#[async_trait]
32impl Protocol for AnthropicProtocol {
33 type Request = AnthropicRequest;
34 type Response = AnthropicResponse;
35
36 fn name(&self) -> &str {
37 "anthropic"
38 }
39
40 fn chat_endpoint(&self, base_url: &str) -> String {
41 format!("{}/v1/messages", base_url.trim_end_matches('/'))
42 }
43
44 fn build_request(&self, request: &ChatRequest) -> Result<Self::Request, LlmConnectorError> {
45 let mut system_message = None;
47 let mut messages = Vec::new();
48
49 for msg in &request.messages {
50 match msg.role {
51 Role::System => {
52 let text = msg.content_as_text();
54 if system_message.is_none() {
55 system_message = Some(text);
56 } else {
57 let existing = system_message.take().unwrap_or_default();
59 system_message = Some(format!("{}\n\n{}", existing, text));
60 }
61 }
62 Role::User => {
63 let content = serde_json::to_value(&msg.content).unwrap();
65 messages.push(AnthropicMessage {
66 role: "user".to_string(),
67 content,
68 });
69 }
70 Role::Assistant => {
71 let content = serde_json::to_value(&msg.content).unwrap();
73 messages.push(AnthropicMessage {
74 role: "assistant".to_string(),
75 content,
76 });
77 }
78 Role::Tool => {
79 let text = format!("Tool result: {}", msg.content_as_text());
81 messages.push(AnthropicMessage {
82 role: "user".to_string(),
83 content: serde_json::json!([{"type": "text", "text": text}]),
84 });
85 }
86 }
87 }
88
89 Ok(AnthropicRequest {
90 model: request.model.clone(),
91 max_tokens: request.max_tokens.unwrap_or(1024), messages,
93 system: system_message,
94 temperature: request.temperature,
95 top_p: request.top_p,
96 stream: request.stream,
97 })
98 }
99
100 fn parse_response(&self, response: &str) -> Result<ChatResponse, LlmConnectorError> {
101 let anthropic_response: AnthropicResponse = serde_json::from_str(response)
102 .map_err(|e| LlmConnectorError::ParseError(format!("Failed to parse Anthropic response: {}", e)))?;
103
104 let message_blocks: Vec<crate::types::MessageBlock> = anthropic_response.content.iter()
107 .map(|c| crate::types::MessageBlock::text(&c.text))
108 .collect();
109
110 let content = anthropic_response.content.first()
111 .map(|c| c.text.clone())
112 .unwrap_or_default();
113
114 let choices = vec![Choice {
115 index: 0,
116 message: Message {
117 role: Role::Assistant,
118 content: message_blocks,
119 name: None,
120 tool_calls: None,
121 tool_call_id: None,
122 reasoning_content: None,
123 reasoning: None,
124 thought: None,
125 thinking: None,
126 },
127 finish_reason: Some(anthropic_response.stop_reason.unwrap_or_else(|| "stop".to_string())),
128 logprobs: None,
129 }];
130
131 let usage = Some(Usage {
132 prompt_tokens: anthropic_response.usage.input_tokens,
133 completion_tokens: anthropic_response.usage.output_tokens,
134 total_tokens: anthropic_response.usage.input_tokens + anthropic_response.usage.output_tokens,
135 completion_tokens_details: None,
136 prompt_cache_hit_tokens: None,
137 prompt_cache_miss_tokens: None,
138 prompt_tokens_details: None,
139 });
140
141 Ok(ChatResponse {
142 id: anthropic_response.id,
143 object: "chat.completion".to_string(),
144 created: std::time::SystemTime::now()
145 .duration_since(std::time::UNIX_EPOCH)
146 .unwrap_or_default()
147 .as_secs(),
148 model: anthropic_response.model,
149 choices,
150 content,
151 reasoning_content: None,
152 usage,
153 system_fingerprint: None,
154 })
155 }
156
157 fn map_error(&self, status: u16, body: &str) -> LlmConnectorError {
158 let error_info = serde_json::from_str::<serde_json::Value>(body)
159 .ok()
160 .and_then(|v| v.get("error").cloned())
161 .unwrap_or_else(|| serde_json::json!({"message": body}));
162
163 let message = error_info.get("message")
164 .and_then(|m| m.as_str())
165 .unwrap_or("Unknown Anthropic error");
166
167 match status {
168 400 => LlmConnectorError::InvalidRequest(format!("Anthropic: {}", message)),
169 401 => LlmConnectorError::AuthenticationError(format!("Anthropic: {}", message)),
170 403 => LlmConnectorError::PermissionError(format!("Anthropic: {}", message)),
171 429 => LlmConnectorError::RateLimitError(format!("Anthropic: {}", message)),
172 500..=599 => LlmConnectorError::ServerError(format!("Anthropic: {}", message)),
173 _ => LlmConnectorError::ApiError(format!("Anthropic HTTP {}: {}", status, message)),
174 }
175 }
176
177 fn auth_headers(&self) -> Vec<(String, String)> {
178 vec![
179 ("x-api-key".to_string(), self.api_key.clone()),
180 ("Content-Type".to_string(), "application/json".to_string()),
181 ("anthropic-version".to_string(), "2023-06-01".to_string()),
182 ]
183 }
184
185 #[cfg(feature = "streaming")]
195 async fn parse_stream_response(&self, response: reqwest::Response) -> Result<crate::types::ChatStream, LlmConnectorError> {
196 use crate::types::{StreamingResponse, StreamingChoice, Delta, Usage};
197 use futures_util::StreamExt;
198 use std::sync::{Arc, Mutex};
199
200 let events_stream = crate::sse::sse_events(response);
202
203 let message_id = Arc::new(Mutex::new(String::new()));
205
206 let response_stream = events_stream.filter_map(move |result| {
208 let message_id = message_id.clone();
209 async move {
210 match result {
211 Ok(json_str) => {
212 match serde_json::from_str::<serde_json::Value>(&json_str) {
214 Ok(event) => {
215 let event_type = event.get("type").and_then(|t| t.as_str()).unwrap_or("");
216
217 match event_type {
218 "message_start" => {
219 if let Some(msg_id) = event.get("message")
221 .and_then(|m| m.get("id"))
222 .and_then(|id| id.as_str()) {
223 if let Ok(mut id) = message_id.lock() {
224 *id = msg_id.to_string();
225 }
226 }
227 None
229 }
230 "content_block_delta" => {
231 if let Some(text) = event.get("delta")
233 .and_then(|d| d.get("text"))
234 .and_then(|t| t.as_str()) {
235
236 let id = message_id.lock().ok()
237 .map(|id| id.clone())
238 .unwrap_or_default();
239
240 Some(Ok(StreamingResponse {
242 id,
243 object: "chat.completion.chunk".to_string(),
244 created: std::time::SystemTime::now()
245 .duration_since(std::time::UNIX_EPOCH)
246 .unwrap_or_default()
247 .as_secs(),
248 model: "anthropic".to_string(),
249 choices: vec![StreamingChoice {
250 index: 0,
251 delta: Delta {
252 role: Some(crate::types::Role::Assistant),
253 content: Some(text.to_string()),
254 tool_calls: None,
255 reasoning_content: None,
256 reasoning: None,
257 thought: None,
258 thinking: None,
259 },
260 finish_reason: None,
261 logprobs: None,
262 }],
263 content: text.to_string(),
264 reasoning_content: None,
265 usage: None,
266 system_fingerprint: None,
267 }))
268 } else {
269 None
270 }
271 }
272 "message_delta" => {
273 let stop_reason = event.get("delta")
275 .and_then(|d| d.get("stop_reason"))
276 .and_then(|s| s.as_str())
277 .map(|s| s.to_string());
278
279 let usage = event.get("usage").and_then(|u| {
280 let input_tokens = u.get("input_tokens").and_then(|t| t.as_u64()).unwrap_or(0) as u32;
281 let output_tokens = u.get("output_tokens").and_then(|t| t.as_u64()).unwrap_or(0) as u32;
282 Some(Usage {
283 prompt_tokens: input_tokens,
284 completion_tokens: output_tokens,
285 total_tokens: input_tokens + output_tokens,
286 completion_tokens_details: None,
287 prompt_cache_hit_tokens: None,
288 prompt_cache_miss_tokens: None,
289 prompt_tokens_details: None,
290 })
291 });
292
293 let id = message_id.lock().ok()
294 .map(|id| id.clone())
295 .unwrap_or_default();
296
297 Some(Ok(StreamingResponse {
299 id,
300 object: "chat.completion.chunk".to_string(),
301 created: std::time::SystemTime::now()
302 .duration_since(std::time::UNIX_EPOCH)
303 .unwrap_or_default()
304 .as_secs(),
305 model: "anthropic".to_string(),
306 choices: vec![StreamingChoice {
307 index: 0,
308 delta: Delta {
309 role: None,
310 content: None,
311 tool_calls: None,
312 reasoning_content: None,
313 reasoning: None,
314 thought: None,
315 thinking: None,
316 },
317 finish_reason: stop_reason,
318 logprobs: None,
319 }],
320 content: String::new(),
321 reasoning_content: None,
322 usage,
323 system_fingerprint: None,
324 }))
325 }
326 _ => {
327 None
329 }
330 }
331 }
332 Err(e) => {
333 Some(Err(LlmConnectorError::ParseError(format!(
334 "Failed to parse Anthropic streaming event: {}. JSON: {}",
335 e, json_str
336 ))))
337 }
338 }
339 }
340 Err(e) => Some(Err(e)),
341 }
342 }
343 });
344
345 Ok(Box::pin(response_stream))
346 }
347}
348
349#[derive(Serialize, Debug)]
351pub struct AnthropicRequest {
352 pub model: String,
353 pub max_tokens: u32,
354 pub messages: Vec<AnthropicMessage>,
355 #[serde(skip_serializing_if = "Option::is_none")]
356 pub system: Option<String>,
357 #[serde(skip_serializing_if = "Option::is_none")]
358 pub temperature: Option<f32>,
359 #[serde(skip_serializing_if = "Option::is_none")]
360 pub top_p: Option<f32>,
361 #[serde(skip_serializing_if = "Option::is_none")]
362 pub stream: Option<bool>,
363}
364
365#[derive(Serialize, Debug)]
366pub struct AnthropicMessage {
367 pub role: String,
368 pub content: serde_json::Value, }
370
371#[derive(Deserialize, Debug)]
373pub struct AnthropicResponse {
374 pub id: String,
375 pub model: String,
376 pub content: Vec<AnthropicContent>,
377 pub stop_reason: Option<String>,
378 pub usage: AnthropicUsage,
379}
380
381#[derive(Deserialize, Debug)]
382pub struct AnthropicContent {
383 #[serde(rename = "type")]
384 pub content_type: String,
385 pub text: String,
386}
387
388#[derive(Deserialize, Debug)]
389pub struct AnthropicUsage {
390 pub input_tokens: u32,
391 pub output_tokens: u32,
392}