ricecoder_providers/providers/
anthropic.rs

1//! Anthropic provider implementation
2//!
3//! Supports Claude 3, 3.5, and 4 family models via the Anthropic API.
4
5use async_trait::async_trait;
6use reqwest::Client;
7use serde::{Deserialize, Serialize};
8use std::sync::Arc;
9use tracing::{debug, error, warn};
10
11use crate::error::ProviderError;
12use crate::models::{Capability, ChatRequest, ChatResponse, FinishReason, ModelInfo, TokenUsage};
13use crate::provider::Provider;
14use crate::token_counter::TokenCounter;
15
16/// Anthropic provider implementation
17pub struct AnthropicProvider {
18    api_key: String,
19    client: Arc<Client>,
20    base_url: String,
21    token_counter: Arc<TokenCounter>,
22}
23
24impl AnthropicProvider {
25    /// Create a new Anthropic provider instance
26    pub fn new(api_key: String) -> Result<Self, ProviderError> {
27        if api_key.is_empty() {
28            return Err(ProviderError::ConfigError(
29                "Anthropic API key is required".to_string(),
30            ));
31        }
32
33        Ok(Self {
34            api_key,
35            client: Arc::new(Client::new()),
36            base_url: "https://api.anthropic.com/v1".to_string(),
37            token_counter: Arc::new(TokenCounter::new()),
38        })
39    }
40
41    /// Create a new Anthropic provider with a custom base URL
42    pub fn with_base_url(api_key: String, base_url: String) -> Result<Self, ProviderError> {
43        if api_key.is_empty() {
44            return Err(ProviderError::ConfigError(
45                "Anthropic API key is required".to_string(),
46            ));
47        }
48
49        Ok(Self {
50            api_key,
51            client: Arc::new(Client::new()),
52            base_url,
53            token_counter: Arc::new(TokenCounter::new()),
54        })
55    }
56
57    /// Convert Anthropic API response to our ChatResponse
58    fn convert_response(
59        response: AnthropicChatResponse,
60        model: String,
61    ) -> Result<ChatResponse, ProviderError> {
62        let content = response
63            .content
64            .first()
65            .map(|c| c.text.clone())
66            .ok_or_else(|| ProviderError::ProviderError("No content in response".to_string()))?;
67
68        let finish_reason = match response.stop_reason.as_deref() {
69            Some("end_turn") => FinishReason::Stop,
70            Some("max_tokens") => FinishReason::Length,
71            Some("stop_sequence") => FinishReason::Stop,
72            _ => FinishReason::Stop,
73        };
74
75        Ok(ChatResponse {
76            content,
77            model,
78            usage: TokenUsage {
79                prompt_tokens: response.usage.input_tokens,
80                completion_tokens: response.usage.output_tokens,
81                total_tokens: response.usage.input_tokens + response.usage.output_tokens,
82            },
83            finish_reason,
84        })
85    }
86}
87
88#[async_trait]
89impl Provider for AnthropicProvider {
90    fn id(&self) -> &str {
91        "anthropic"
92    }
93
94    fn name(&self) -> &str {
95        "Anthropic"
96    }
97
98    fn models(&self) -> Vec<ModelInfo> {
99        vec![
100            ModelInfo {
101                id: "claude-3-opus-20250219".to_string(),
102                name: "Claude 3 Opus".to_string(),
103                provider: "anthropic".to_string(),
104                context_window: 200000,
105                capabilities: vec![Capability::Chat, Capability::Code, Capability::Streaming],
106                pricing: Some(crate::models::Pricing {
107                    input_per_1k_tokens: 0.015,
108                    output_per_1k_tokens: 0.075,
109                }),
110            },
111            ModelInfo {
112                id: "claude-3-5-sonnet-20241022".to_string(),
113                name: "Claude 3.5 Sonnet".to_string(),
114                provider: "anthropic".to_string(),
115                context_window: 200000,
116                capabilities: vec![Capability::Chat, Capability::Code, Capability::Streaming],
117                pricing: Some(crate::models::Pricing {
118                    input_per_1k_tokens: 0.003,
119                    output_per_1k_tokens: 0.015,
120                }),
121            },
122            ModelInfo {
123                id: "claude-3-5-haiku-20241022".to_string(),
124                name: "Claude 3.5 Haiku".to_string(),
125                provider: "anthropic".to_string(),
126                context_window: 200000,
127                capabilities: vec![Capability::Chat, Capability::Code, Capability::Streaming],
128                pricing: Some(crate::models::Pricing {
129                    input_per_1k_tokens: 0.0008,
130                    output_per_1k_tokens: 0.004,
131                }),
132            },
133            ModelInfo {
134                id: "claude-3-haiku-20240307".to_string(),
135                name: "Claude 3 Haiku".to_string(),
136                provider: "anthropic".to_string(),
137                context_window: 200000,
138                capabilities: vec![Capability::Chat, Capability::Code, Capability::Streaming],
139                pricing: Some(crate::models::Pricing {
140                    input_per_1k_tokens: 0.00025,
141                    output_per_1k_tokens: 0.00125,
142                }),
143            },
144        ]
145    }
146
147    async fn chat(&self, request: ChatRequest) -> Result<ChatResponse, ProviderError> {
148        // Validate model
149        let model_id = &request.model;
150        if !self.models().iter().any(|m| m.id == *model_id) {
151            return Err(ProviderError::InvalidModel(model_id.clone()));
152        }
153
154        // Convert messages to Anthropic format
155        let messages: Vec<AnthropicMessage> = request
156            .messages
157            .iter()
158            .map(|m| AnthropicMessage {
159                role: m.role.clone(),
160                content: m.content.clone(),
161            })
162            .collect();
163
164        let anthropic_request = AnthropicChatRequest {
165            model: request.model.clone(),
166            max_tokens: request.max_tokens.unwrap_or(1024),
167            messages,
168            temperature: request.temperature,
169        };
170
171        debug!(
172            "Sending chat request to Anthropic for model: {}",
173            request.model
174        );
175
176        let response = self
177            .client
178            .post(format!("{}/messages", self.base_url))
179            .header("x-api-key", &self.api_key)
180            .header("anthropic-version", "2023-06-01")
181            .header("Content-Type", "application/json")
182            .json(&anthropic_request)
183            .send()
184            .await
185            .map_err(|e| {
186                error!("Anthropic API request failed: {}", e);
187                ProviderError::from(e)
188            })?;
189
190        let status = response.status();
191        if !status.is_success() {
192            let error_text = response.text().await.unwrap_or_default();
193            error!("Anthropic API error ({}): {}", status, error_text);
194
195            return match status.as_u16() {
196                401 => Err(ProviderError::AuthError),
197                429 => Err(ProviderError::RateLimited(60)),
198                _ => Err(ProviderError::ProviderError(format!(
199                    "Anthropic API error: {}",
200                    status
201                ))),
202            };
203        }
204
205        let anthropic_response: AnthropicChatResponse = response.json().await?;
206        Self::convert_response(anthropic_response, request.model)
207    }
208
209    async fn chat_stream(
210        &self,
211        _request: ChatRequest,
212    ) -> Result<crate::provider::ChatStream, ProviderError> {
213        // Streaming support will be implemented in a future iteration
214        Err(ProviderError::ProviderError(
215            "Streaming not yet implemented for Anthropic".to_string(),
216        ))
217    }
218
219    fn count_tokens(&self, content: &str, model: &str) -> Result<usize, ProviderError> {
220        // Validate model
221        if !self.models().iter().any(|m| m.id == model) {
222            return Err(ProviderError::InvalidModel(model.to_string()));
223        }
224
225        // Use token counter with caching for performance
226        // Anthropic's token counting is similar to OpenAI's
227        let tokens = self.token_counter.count_tokens_openai(content, model);
228        Ok(tokens)
229    }
230
231    async fn health_check(&self) -> Result<bool, ProviderError> {
232        debug!("Performing health check for Anthropic provider");
233
234        // Try to get models list as a health check
235        let response = self
236            .client
237            .get(format!("{}/models", self.base_url))
238            .header("x-api-key", &self.api_key)
239            .header("anthropic-version", "2023-06-01")
240            .send()
241            .await
242            .map_err(|e| {
243                warn!("Anthropic health check failed: {}", e);
244                ProviderError::from(e)
245            })?;
246
247        match response.status().as_u16() {
248            200 => {
249                debug!("Anthropic health check passed");
250                Ok(true)
251            }
252            401 => {
253                error!("Anthropic health check failed: authentication error");
254                Err(ProviderError::AuthError)
255            }
256            _ => {
257                warn!(
258                    "Anthropic health check failed with status: {}",
259                    response.status()
260                );
261                Ok(false)
262            }
263        }
264    }
265}
266
267/// Anthropic API request format
268#[derive(Debug, Serialize)]
269struct AnthropicChatRequest {
270    model: String,
271    max_tokens: usize,
272    messages: Vec<AnthropicMessage>,
273    #[serde(skip_serializing_if = "Option::is_none")]
274    temperature: Option<f32>,
275}
276
277/// Anthropic API message format
278#[derive(Debug, Serialize, Deserialize)]
279struct AnthropicMessage {
280    role: String,
281    content: String,
282}
283
284/// Anthropic API response format
285#[derive(Debug, Deserialize)]
286struct AnthropicChatResponse {
287    content: Vec<AnthropicContent>,
288    usage: AnthropicUsage,
289    stop_reason: Option<String>,
290}
291
292/// Anthropic API content format
293#[derive(Debug, Deserialize)]
294struct AnthropicContent {
295    text: String,
296}
297
298/// Anthropic API usage format
299#[derive(Debug, Deserialize)]
300struct AnthropicUsage {
301    input_tokens: usize,
302    output_tokens: usize,
303}
304
305#[cfg(test)]
306mod tests {
307    use super::*;
308
309    #[test]
310    fn test_anthropic_provider_creation() {
311        let provider = AnthropicProvider::new("test-key".to_string());
312        assert!(provider.is_ok());
313    }
314
315    #[test]
316    fn test_anthropic_provider_creation_empty_key() {
317        let provider = AnthropicProvider::new("".to_string());
318        assert!(provider.is_err());
319    }
320
321    #[test]
322    fn test_anthropic_provider_id() {
323        let provider = AnthropicProvider::new("test-key".to_string()).unwrap();
324        assert_eq!(provider.id(), "anthropic");
325    }
326
327    #[test]
328    fn test_anthropic_provider_name() {
329        let provider = AnthropicProvider::new("test-key".to_string()).unwrap();
330        assert_eq!(provider.name(), "Anthropic");
331    }
332
333    #[test]
334    fn test_anthropic_models() {
335        let provider = AnthropicProvider::new("test-key".to_string()).unwrap();
336        let models = provider.models();
337        assert_eq!(models.len(), 4);
338        assert!(models.iter().any(|m| m.id == "claude-3-opus-20250219"));
339        assert!(models.iter().any(|m| m.id == "claude-3-5-sonnet-20241022"));
340        assert!(models.iter().any(|m| m.id == "claude-3-5-haiku-20241022"));
341        assert!(models.iter().any(|m| m.id == "claude-3-haiku-20240307"));
342    }
343
344    #[test]
345    fn test_token_counting() {
346        let provider = AnthropicProvider::new("test-key".to_string()).unwrap();
347        let tokens = provider
348            .count_tokens("Hello, world!", "claude-3-opus-20250219")
349            .unwrap();
350        assert!(tokens > 0);
351    }
352
353    #[test]
354    fn test_token_counting_invalid_model() {
355        let provider = AnthropicProvider::new("test-key".to_string()).unwrap();
356        let result = provider.count_tokens("Hello, world!", "invalid-model");
357        assert!(result.is_err());
358    }
359}