Skip to main content

devsper_providers/
anthropic.rs

1use devsper_core::{LlmProvider, LlmRequest, LlmResponse, LlmRole, StopReason};
2use anyhow::{anyhow, Result};
3use async_trait::async_trait;
4use reqwest::Client;
5use serde::{Deserialize, Serialize};
6use tracing::debug;
7
8/// Anthropic Claude API provider (Messages API).
9pub struct AnthropicProvider {
10    client: Client,
11    api_key: String,
12    base_url: String,
13}
14
15impl AnthropicProvider {
16    pub fn new(api_key: impl Into<String>) -> Self {
17        Self {
18            client: Client::new(),
19            api_key: api_key.into(),
20            base_url: "https://api.anthropic.com".to_string(),
21        }
22    }
23
24    pub fn with_base_url(mut self, url: impl Into<String>) -> Self {
25        self.base_url = url.into();
26        self
27    }
28}
29
30#[derive(Serialize)]
31struct AnthropicRequest<'a> {
32    model: &'a str,
33    messages: Vec<AnthropicMessage<'a>>,
34    max_tokens: u32,
35    #[serde(skip_serializing_if = "Option::is_none")]
36    system: Option<&'a str>,
37}
38
39#[derive(Serialize)]
40struct AnthropicMessage<'a> {
41    role: &'a str,
42    content: &'a str,
43}
44
45#[derive(Deserialize)]
46struct AnthropicResponse {
47    content: Vec<AnthropicContent>,
48    usage: AnthropicUsage,
49    model: String,
50    stop_reason: Option<String>,
51}
52
53#[derive(Deserialize)]
54struct AnthropicContent {
55    #[serde(rename = "type")]
56    content_type: String,
57    text: Option<String>,
58}
59
60#[derive(Deserialize)]
61struct AnthropicUsage {
62    input_tokens: u32,
63    output_tokens: u32,
64}
65
66fn role_to_str(role: &LlmRole) -> &'static str {
67    match role {
68        LlmRole::User | LlmRole::Tool => "user",
69        LlmRole::Assistant => "assistant",
70        LlmRole::System => "user", // system goes in separate field
71    }
72}
73
74#[async_trait]
75impl LlmProvider for AnthropicProvider {
76    async fn generate(&self, req: LlmRequest) -> Result<LlmResponse> {
77        use tracing::Instrument;
78
79        let span = tracing::info_span!(
80            "gen_ai.chat",
81            "gen_ai.system" = self.name(),
82            "gen_ai.operation.name" = "chat",
83            "gen_ai.request.model" = req.model.as_str(),
84            "gen_ai.request.max_tokens" = req.max_tokens,
85            "gen_ai.response.model" = tracing::field::Empty,
86            "gen_ai.usage.input_tokens" = tracing::field::Empty,
87            "gen_ai.usage.output_tokens" = tracing::field::Empty,
88        );
89
90        let messages: Vec<AnthropicMessage> = req
91            .messages
92            .iter()
93            .filter(|m| !matches!(m.role, LlmRole::System))
94            .map(|m| AnthropicMessage {
95                role: role_to_str(&m.role),
96                content: &m.content,
97            })
98            .collect();
99
100        let system_from_messages = req
101            .messages
102            .iter()
103            .find(|m| matches!(m.role, LlmRole::System))
104            .map(|m| m.content.as_str());
105
106        let system = req.system.as_deref().or(system_from_messages);
107
108        let body = AnthropicRequest {
109            model: &req.model,
110            messages,
111            max_tokens: req.max_tokens.unwrap_or(4096),
112            system,
113        };
114
115        debug!(model = %req.model, "Anthropic request");
116
117        let result = async {
118            let resp = self
119                .client
120                .post(format!("{}/v1/messages", self.base_url))
121                .header("x-api-key", &self.api_key)
122                .header("anthropic-version", "2023-06-01")
123                .header("content-type", "application/json")
124                .json(&body)
125                .send()
126                .await?;
127
128            if !resp.status().is_success() {
129                let status = resp.status();
130                let text = resp.text().await.unwrap_or_default();
131                return Err(anyhow!("Anthropic API error {status}: {text}"));
132            }
133
134            let data: AnthropicResponse = resp.json().await?;
135
136            let content = data
137                .content
138                .iter()
139                .filter_map(|c| {
140                    if c.content_type == "text" {
141                        c.text.clone()
142                    } else {
143                        None
144                    }
145                })
146                .collect::<Vec<_>>()
147                .join("");
148
149            let stop_reason = match data.stop_reason.as_deref() {
150                Some("end_turn") => StopReason::EndTurn,
151                Some("tool_use") => StopReason::ToolUse,
152                Some("max_tokens") => StopReason::MaxTokens,
153                _ => StopReason::EndTurn,
154            };
155
156            Ok(LlmResponse {
157                content,
158                tool_calls: vec![],
159                input_tokens: data.usage.input_tokens,
160                output_tokens: data.usage.output_tokens,
161                model: data.model,
162                stop_reason,
163            })
164        }
165        .instrument(span.clone())
166        .await;
167
168        if let Ok(ref resp) = result {
169            span.record("gen_ai.response.model", resp.model.as_str());
170            span.record("gen_ai.usage.input_tokens", resp.input_tokens);
171            span.record("gen_ai.usage.output_tokens", resp.output_tokens);
172        }
173        result
174    }
175
176    fn name(&self) -> &str {
177        "anthropic"
178    }
179
180    fn supports_model(&self, model: &str) -> bool {
181        model.starts_with("claude-")
182    }
183}