agents_runtime/providers/
anthropic.rs1use agents_core::llm::{LanguageModel, LlmRequest, LlmResponse};
2use agents_core::messaging::{AgentMessage, MessageContent, MessageRole};
3use agents_core::tools::ToolSchema;
4use async_trait::async_trait;
5use reqwest::Client;
6use serde::{Deserialize, Serialize};
7use serde_json::Value;
8
9#[derive(Clone)]
10pub struct AnthropicConfig {
11 pub api_key: String,
12 pub model: String,
13 pub max_output_tokens: u32,
14 pub api_url: Option<String>,
15 pub api_version: Option<String>,
16}
17
18pub struct AnthropicMessagesModel {
19 client: Client,
20 config: AnthropicConfig,
21}
22
23impl AnthropicMessagesModel {
24 pub fn new(config: AnthropicConfig) -> anyhow::Result<Self> {
25 Ok(Self {
26 client: Client::builder()
27 .user_agent("rust-deep-agents-sdk/0.1")
28 .build()?,
29 config,
30 })
31 }
32}
33
34#[derive(Serialize)]
35struct AnthropicRequest {
36 model: String,
37 max_tokens: u32,
38 system: String,
39 messages: Vec<AnthropicMessage>,
40 #[serde(skip_serializing_if = "Option::is_none")]
41 tools: Option<Vec<AnthropicTool>>,
42}
43
44#[derive(Serialize)]
45struct AnthropicTool {
46 name: String,
47 description: String,
48 input_schema: Value,
49}
50
51#[derive(Serialize)]
52struct AnthropicMessage {
53 role: String,
54 content: Vec<AnthropicContentBlock>,
55}
56
57#[derive(Serialize)]
58struct AnthropicContentBlock {
59 #[serde(rename = "type")]
60 kind: &'static str,
61 text: String,
62 #[serde(skip_serializing_if = "Option::is_none")]
63 cache_control: Option<AnthropicCacheControl>,
64}
65
66#[derive(Serialize)]
67struct AnthropicCacheControl {
68 #[serde(rename = "type")]
69 cache_type: String,
70}
71
72#[derive(Deserialize)]
73struct AnthropicResponse {
74 content: Vec<AnthropicResponseBlock>,
75}
76
77#[derive(Deserialize)]
78struct AnthropicResponseBlock {
79 #[serde(rename = "type")]
80 kind: String,
81 text: Option<String>,
82}
83
84fn to_anthropic_messages(request: &LlmRequest) -> (String, Vec<AnthropicMessage>) {
85 let mut system_prompt = request.system_prompt.clone();
86 let mut messages = Vec::new();
87
88 for message in &request.messages {
89 let text = match &message.content {
90 MessageContent::Text(text) => text.clone(),
91 MessageContent::Json(value) => value.to_string(),
92 };
93
94 if matches!(message.role, MessageRole::System) {
96 if !system_prompt.is_empty() {
97 system_prompt.push_str("\n\n");
98 }
99 system_prompt.push_str(&text);
100 continue;
101 }
102
103 let role = match message.role {
104 MessageRole::User => "user",
105 MessageRole::Agent => "assistant",
106 MessageRole::Tool => "user",
107 MessageRole::System => unreachable!(), };
109
110 let cache_control = message
112 .metadata
113 .as_ref()
114 .and_then(|meta| meta.cache_control.as_ref())
115 .map(|cc| AnthropicCacheControl {
116 cache_type: cc.cache_type.clone(),
117 });
118
119 messages.push(AnthropicMessage {
120 role: role.to_string(),
121 content: vec![AnthropicContentBlock {
122 kind: "text",
123 text,
124 cache_control,
125 }],
126 });
127 }
128
129 (system_prompt, messages)
130}
131
132fn to_anthropic_tools(tools: &[ToolSchema]) -> Option<Vec<AnthropicTool>> {
134 if tools.is_empty() {
135 return None;
136 }
137
138 Some(
139 tools
140 .iter()
141 .map(|tool| AnthropicTool {
142 name: tool.name.clone(),
143 description: tool.description.clone(),
144 input_schema: serde_json::to_value(&tool.parameters)
145 .unwrap_or_else(|_| serde_json::json!({})),
146 })
147 .collect(),
148 )
149}
150
151#[async_trait]
152impl LanguageModel for AnthropicMessagesModel {
153 async fn generate(&self, request: LlmRequest) -> anyhow::Result<LlmResponse> {
154 let (system_prompt, messages) = to_anthropic_messages(&request);
155 let tools = to_anthropic_tools(&request.tools);
156
157 let body = AnthropicRequest {
158 model: self.config.model.clone(),
159 max_tokens: self.config.max_output_tokens,
160 system: system_prompt,
161 messages,
162 tools,
163 };
164
165 let url = self
166 .config
167 .api_url
168 .as_deref()
169 .unwrap_or("https://api.anthropic.com/v1/messages");
170 let version = self.config.api_version.as_deref().unwrap_or("2023-06-01");
171
172 let response = self
173 .client
174 .post(url)
175 .header("x-api-key", &self.config.api_key)
176 .header("anthropic-version", version)
177 .json(&body)
178 .send()
179 .await?
180 .error_for_status()?;
181
182 let data: AnthropicResponse = response.json().await?;
183 let text = data
184 .content
185 .into_iter()
186 .find_map(|block| (block.kind == "text").then(|| block.text.unwrap_or_default()))
187 .unwrap_or_default();
188
189 Ok(LlmResponse {
190 message: AgentMessage {
191 role: MessageRole::Agent,
192 content: MessageContent::Text(text),
193 metadata: None,
194 },
195 })
196 }
197}
198
199#[cfg(test)]
200mod tests {
201 use super::*;
202
203 #[test]
204 fn anthropic_message_conversion_includes_system_prompt() {
205 let request = LlmRequest::new(
206 "You are helpful",
207 vec![AgentMessage {
208 role: MessageRole::User,
209 content: MessageContent::Text("Hello".into()),
210 metadata: None,
211 }],
212 );
213 let (system, messages) = to_anthropic_messages(&request);
214 assert_eq!(system, "You are helpful");
215 assert_eq!(messages.len(), 1);
216 assert_eq!(messages[0].role, "user");
217 assert_eq!(messages[0].content[0].text, "Hello");
218 }
219}