agent 0.0.1

A flexible AI Agent SDK for building intelligent agents
Documentation
use crate::error::{AgentError, Result};
use crate::message::{Message, MessageRole};
use crate::provider::{ModelConfig, ModelProvider, ModelResponse, Usage};
use async_trait::async_trait;
use serde::Deserialize;
use serde_json::json;

pub struct AnthropicProvider {
    api_key: String,
    base_url: String,
    client: reqwest::Client,
}

impl AnthropicProvider {
    pub fn new(api_key: impl Into<String>) -> Self {
        Self {
            api_key: api_key.into(),
            base_url: "https://api.anthropic.com/v1".to_string(),
            client: reqwest::Client::new(),
        }
    }

    fn convert_messages(&self, messages: Vec<Message>) -> (Option<String>, Vec<serde_json::Value>) {
        let mut system_prompt = None;
        let mut converted = Vec::new();

        for msg in messages {
            match msg.role {
                MessageRole::System => {
                    system_prompt = Some(msg.content);
                }
                MessageRole::User => {
                    converted.push(json!({
                        "role": "user",
                        "content": msg.content
                    }));
                }
                MessageRole::Assistant => {
                    converted.push(json!({
                        "role": "assistant",
                        "content": msg.content
                    }));
                }
                MessageRole::Tool => {
                    converted.push(json!({
                        "role": "user",
                        "content": msg.content
                    }));
                }
            }
        }

        (system_prompt, converted)
    }
}

#[derive(Debug, Deserialize)]
struct AnthropicResponse {
    content: Vec<ContentBlock>,
    model: String,
    usage: AnthropicUsage,
    stop_reason: Option<String>,
}

#[derive(Debug, Deserialize)]
struct ContentBlock {
    text: String,
}

#[derive(Debug, Deserialize)]
struct AnthropicUsage {
    input_tokens: usize,
    output_tokens: usize,
}

#[async_trait]
impl ModelProvider for AnthropicProvider {
    fn name(&self) -> &str {
        "anthropic"
    }

    async fn complete(&self, messages: Vec<Message>, config: &ModelConfig) -> Result<ModelResponse> {
        let url = format!("{}/messages", self.base_url);
        let (system_prompt, converted_messages) = self.convert_messages(messages);

        let mut body = json!({
            "model": config.model,
            "messages": converted_messages,
            "max_tokens": config.max_tokens.unwrap_or(4096),
            "temperature": config.temperature,
        });

        if let Some(system) = system_prompt {
            body["system"] = json!(system);
        }

        let response = self
            .client
            .post(&url)
            .header("x-api-key", &self.api_key)
            .header("anthropic-version", "2023-06-01")
            .header("Content-Type", "application/json")
            .json(&body)
            .send()
            .await
            .map_err(|e| AgentError::ExecutionError(format!("Anthropic API request failed: {}", e)))?;

        if !response.status().is_success() {
            let error_text = response.text().await.unwrap_or_default();
            return Err(AgentError::ExecutionError(format!(
                "Anthropic API error: {}",
                error_text
            )));
        }

        let api_response: AnthropicResponse = response
            .json()
            .await
            .map_err(|e| AgentError::ExecutionError(format!("Failed to parse Anthropic response: {}", e)))?;

        let content = api_response
            .content
            .first()
            .map(|c| c.text.clone())
            .unwrap_or_default();

        Ok(ModelResponse {
            content,
            model: api_response.model,
            usage: Some(Usage {
                prompt_tokens: api_response.usage.input_tokens,
                completion_tokens: api_response.usage.output_tokens,
                total_tokens: api_response.usage.input_tokens + api_response.usage.output_tokens,
            }),
            finish_reason: api_response.stop_reason,
        })
    }

    async fn stream_complete(
        &self,
        _messages: Vec<Message>,
        _config: &ModelConfig,
    ) -> Result<Box<dyn futures::Stream<Item = Result<String>> + Unpin + Send>> {
        Err(AgentError::ExecutionError(
            "Streaming not yet implemented for Anthropic".to_string(),
        ))
    }
}