agents_runtime/providers/
anthropic.rs1use agents_core::llm::{LanguageModel, LlmRequest, LlmResponse};
2use agents_core::messaging::{AgentMessage, MessageContent, MessageRole};
3use async_trait::async_trait;
4use reqwest::Client;
5use serde::{Deserialize, Serialize};
6
7#[derive(Clone)]
8pub struct AnthropicConfig {
9 pub api_key: String,
10 pub model: String,
11 pub max_output_tokens: u32,
12 pub api_url: Option<String>,
13 pub api_version: Option<String>,
14}
15
16pub struct AnthropicMessagesModel {
17 client: Client,
18 config: AnthropicConfig,
19}
20
21impl AnthropicMessagesModel {
22 pub fn new(config: AnthropicConfig) -> anyhow::Result<Self> {
23 Ok(Self {
24 client: Client::builder()
25 .user_agent("rust-deep-agents-sdk/0.1")
26 .build()?,
27 config,
28 })
29 }
30}
31
32#[derive(Serialize)]
33struct AnthropicRequest {
34 model: String,
35 max_tokens: u32,
36 system: String,
37 messages: Vec<AnthropicMessage>,
38}
39
40#[derive(Serialize)]
41struct AnthropicMessage {
42 role: String,
43 content: Vec<AnthropicContentBlock>,
44}
45
46#[derive(Serialize)]
47struct AnthropicContentBlock {
48 #[serde(rename = "type")]
49 kind: &'static str,
50 text: String,
51 #[serde(skip_serializing_if = "Option::is_none")]
52 cache_control: Option<AnthropicCacheControl>,
53}
54
55#[derive(Serialize)]
56struct AnthropicCacheControl {
57 #[serde(rename = "type")]
58 cache_type: String,
59}
60
61#[derive(Deserialize)]
62struct AnthropicResponse {
63 content: Vec<AnthropicResponseBlock>,
64}
65
66#[derive(Deserialize)]
67struct AnthropicResponseBlock {
68 #[serde(rename = "type")]
69 kind: String,
70 text: Option<String>,
71}
72
73fn to_anthropic_messages(request: &LlmRequest) -> (String, Vec<AnthropicMessage>) {
74 let mut system_prompt = request.system_prompt.clone();
75 let mut messages = Vec::new();
76
77 for message in &request.messages {
78 let text = match &message.content {
79 MessageContent::Text(text) => text.clone(),
80 MessageContent::Json(value) => value.to_string(),
81 };
82
83 if matches!(message.role, MessageRole::System) {
85 if !system_prompt.is_empty() {
86 system_prompt.push_str("\n\n");
87 }
88 system_prompt.push_str(&text);
89 continue;
90 }
91
92 let role = match message.role {
93 MessageRole::User => "user",
94 MessageRole::Agent => "assistant",
95 MessageRole::Tool => "user",
96 MessageRole::System => unreachable!(), };
98
99 let cache_control = message
101 .metadata
102 .as_ref()
103 .and_then(|meta| meta.cache_control.as_ref())
104 .map(|cc| AnthropicCacheControl {
105 cache_type: cc.cache_type.clone(),
106 });
107
108 messages.push(AnthropicMessage {
109 role: role.to_string(),
110 content: vec![AnthropicContentBlock {
111 kind: "text",
112 text,
113 cache_control,
114 }],
115 });
116 }
117
118 (system_prompt, messages)
119}
120
121#[async_trait]
122impl LanguageModel for AnthropicMessagesModel {
123 async fn generate(&self, request: LlmRequest) -> anyhow::Result<LlmResponse> {
124 let (system_prompt, messages) = to_anthropic_messages(&request);
125 let body = AnthropicRequest {
126 model: self.config.model.clone(),
127 max_tokens: self.config.max_output_tokens,
128 system: system_prompt,
129 messages,
130 };
131
132 let url = self
133 .config
134 .api_url
135 .as_deref()
136 .unwrap_or("https://api.anthropic.com/v1/messages");
137 let version = self.config.api_version.as_deref().unwrap_or("2023-06-01");
138
139 let response = self
140 .client
141 .post(url)
142 .header("x-api-key", &self.config.api_key)
143 .header("anthropic-version", version)
144 .json(&body)
145 .send()
146 .await?
147 .error_for_status()?;
148
149 let data: AnthropicResponse = response.json().await?;
150 let text = data
151 .content
152 .into_iter()
153 .find_map(|block| (block.kind == "text").then(|| block.text.unwrap_or_default()))
154 .unwrap_or_default();
155
156 Ok(LlmResponse {
157 message: AgentMessage {
158 role: MessageRole::Agent,
159 content: MessageContent::Text(text),
160 metadata: None,
161 },
162 })
163 }
164}
165
166#[cfg(test)]
167mod tests {
168 use super::*;
169
170 #[test]
171 fn anthropic_message_conversion_includes_system_prompt() {
172 let request = LlmRequest {
173 system_prompt: "You are helpful".into(),
174 messages: vec![AgentMessage {
175 role: MessageRole::User,
176 content: MessageContent::Text("Hello".into()),
177 metadata: None,
178 }],
179 };
180 let (system, messages) = to_anthropic_messages(&request);
181 assert_eq!(system, "You are helpful");
182 assert_eq!(messages.len(), 1);
183 assert_eq!(messages[0].role, "user");
184 assert_eq!(messages[0].content[0].text, "Hello");
185 }
186}