use reqwest::Client;
use serde_json::{Value, json};
use crate::{
EmbeddingModel, Error, FinishReason, LanguageModel, ModelMessage, ModelRequest, ModelResponse,
Part, ProviderRegistration, Result, Role, ToolChoice,
};
#[derive(Clone, Debug)]
pub struct AnthropicLanguageModel {
model_id: String,
}
impl LanguageModel for AnthropicLanguageModel {
fn model_id(&self) -> &str {
&self.model_id
}
fn generate<'a>(&'a self, request: &'a ModelRequest) -> crate::ModelFuture<'a, ModelResponse> {
Box::pin(async move {
let (status, body) = anthropic_post_json(
"https://api.anthropic.com/v1/messages",
anthropic_request(&self.model_id, request),
)
.await?;
if !(200..300).contains(&status) {
return Err(Error::Api(anthropic_error_message(&body)));
}
anthropic_response_to_model_response(&self.model_id, &body)
})
}
}
fn anthropic_language_model(model_id: &str) -> Result<Box<dyn LanguageModel>> {
if model_id.is_empty() {
return Err(Error::UnsupportedModel("anthropic/".to_string()));
}
Ok(Box::new(AnthropicLanguageModel {
model_id: model_id.to_string(),
}) as Box<dyn LanguageModel>)
}
fn anthropic_embedding_model(_model_id: &str) -> Result<Box<dyn EmbeddingModel>> {
Err(Error::UnsupportedModel(
"anthropic embeddings are not supported".to_string(),
))
}
inventory::submit! {
ProviderRegistration {
id: "anthropic",
language_model: anthropic_language_model,
embedding_model: anthropic_embedding_model,
}
}
fn anthropic_api_key() -> Result<String> {
std::env::var("ANTHROPIC_API_KEY")
.map_err(|_| Error::MissingEnvironmentVariable("ANTHROPIC_API_KEY"))
}
async fn anthropic_post_json(url: &'static str, body: Value) -> Result<(u16, Value)> {
let api_key = anthropic_api_key()?;
let response = Client::builder()
.build()
.map_err(|error| Error::Http(error.to_string()))?
.post(url)
.header("x-api-key", api_key)
.header("anthropic-version", "2023-06-01")
.json(&body)
.send()
.await
.map_err(|error| Error::Http(error.to_string()))?;
let status = response.status().as_u16();
let body = response
.json()
.await
.map_err(|error| Error::Json(error.to_string()))?;
Ok((status, body))
}
fn anthropic_request(model_id: &str, request: &ModelRequest) -> Value {
let system = request
.messages
.iter()
.filter(|message| message.role == Role::System)
.map(ModelMessage::text)
.collect::<Vec<_>>()
.join("\n\n");
let mut body = json!({
"model": model_id,
"messages": anthropic_messages(&request.messages),
"max_tokens": request.settings.max_output_tokens.unwrap_or(1024),
});
if !system.is_empty() {
body["system"] = Value::String(system);
}
if let Some(temperature) = request.settings.temperature {
body["temperature"] = json!(temperature);
}
if !request.tools.is_empty() {
body["tools"] = Value::Array(
request
.tools
.iter()
.map(|tool| {
json!({
"name": tool.name,
"description": tool.description,
"input_schema": super::openai::tool_schema_json(&tool.input_schema),
})
})
.collect(),
);
if let ToolChoice::Required(name) = &request.tool_choice {
body["tool_choice"] = json!({ "type": "tool", "name": name });
}
}
body
}
fn anthropic_messages(messages: &[ModelMessage]) -> Vec<Value> {
messages
.iter()
.filter(|message| message.role != Role::System)
.map(|message| match message.role {
Role::User => json!({
"role": "user",
"content": [{ "type": "text", "text": message.text() }],
}),
Role::Assistant => json!({
"role": "assistant",
"content": message
.parts
.iter()
.filter_map(|part| match part {
Part::Text(text) if !text.is_empty() => Some(json!({ "type": "text", "text": text })),
Part::ToolCall(call) => Some(json!({
"type": "tool_use",
"id": call.id,
"name": call.name,
"input": serde_json::from_str::<Value>(&call.input).unwrap_or_else(|_| json!({ "input": call.input })),
})),
_ => None,
})
.collect::<Vec<_>>(),
}),
Role::Tool => {
let result = message.parts.iter().find_map(|part| match part {
Part::ToolResult(result) => Some(result),
_ => None,
});
match result {
Some(result) => json!({
"role": "user",
"content": [{
"type": "tool_result",
"tool_use_id": result.call_id,
"content": result.output,
"is_error": result.is_error,
}],
}),
None => json!({
"role": "user",
"content": [{ "type": "text", "text": message.text() }],
}),
}
}
Role::System => unreachable!(),
})
.collect()
}
fn anthropic_response_to_model_response(model_id: &str, body: &Value) -> Result<ModelResponse> {
let content = body
.get("content")
.and_then(Value::as_array)
.ok_or_else(|| Error::Parse("missing content".to_string()))?;
let mut parts = Vec::new();
for item in content {
match item.get("type").and_then(Value::as_str) {
Some("text") => {
if let Some(text) = item.get("text").and_then(Value::as_str) {
if !text.is_empty() {
parts.push(Part::Text(text.to_string()));
}
}
}
Some("tool_use") => {
let id = item
.get("id")
.and_then(Value::as_str)
.ok_or_else(|| Error::Parse("missing tool use id".to_string()))?;
let name = item
.get("name")
.and_then(Value::as_str)
.ok_or_else(|| Error::Parse("missing tool use name".to_string()))?;
let input = item
.get("input")
.map(Value::to_string)
.ok_or_else(|| Error::Parse("missing tool use input".to_string()))?;
parts.push(Part::ToolCall(crate::ToolCall {
id: id.to_string(),
name: name.to_string(),
input,
}));
}
_ => {}
}
}
Ok(ModelResponse {
parts,
finish_reason: anthropic_finish_reason(body.get("stop_reason").and_then(Value::as_str)),
usage: super::openai::openai_usage(body),
response_metadata: crate::metadata_with_provider("anthropic", model_id),
})
}
fn anthropic_finish_reason(reason: Option<&str>) -> FinishReason {
match reason {
Some("tool_use") => FinishReason::ToolCalls,
Some("max_tokens") => FinishReason::Length,
Some("end_turn") | None => FinishReason::Stop,
_ => FinishReason::Error,
}
}
fn anthropic_error_message(body: &Value) -> String {
body.get("error")
.and_then(|error| error.get("message"))
.and_then(Value::as_str)
.unwrap_or("unknown Anthropic error")
.to_string()
}