llm_kit_anthropic/
lib.rs

1//! # Anthropic Provider for LLM Kit
2//!
3//! This crate provides a comprehensive Rust implementation of the Anthropic API provider
4//! for the LLM Kit. It enables building AI-powered applications using Claude models with
5//! full support for text generation, streaming, tool calling, and advanced features like
6//! extended thinking, citations, and prompt caching.
7//!
8//! ## Features
9//!
10//! - **Text Generation**: Generate text using Claude models with `GenerateText` builder
11//! - **Streaming**: Stream responses in real-time with `StreamText`
12//! - **Tool Calling**: Support for both custom tools and Anthropic provider-defined tools
13//! - **Extended Thinking**: Enable Claude's reasoning process with thinking blocks
14//! - **Citations**: Enable source citations for generated content
15//! - **Prompt Caching**: Reduce costs with automatic prompt caching
16//! - **Vision**: Support for image inputs in prompts
17//! - **Multiple Models**: Support for all Claude models (Opus, Sonnet, Haiku)
18//!
19//! ## Quick Start
20//!
21//! ### Using the Client Builder (Recommended)
22//!
23//! ```rust,no_run
24//! use llm_kit_anthropic::AnthropicClient;
25//! use llm_kit_provider::LanguageModel;
26//! use llm_kit_provider::language_model::call_options::LanguageModelCallOptions;
27//! use llm_kit_provider::language_model::prompt::LanguageModelMessage;
28//! use llm_kit_provider::language_model::content::LanguageModelContent;
29//!
30//! #[tokio::main]
31//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
32//!     // Create provider using the client builder
33//!     let provider = AnthropicClient::new()
34//!         .api_key("your-api-key")
35//!         .build();
36//!
37//!     // Create a language model
38//!     let model = provider.language_model("claude-3-5-sonnet-20241022".to_string());
39//!
40//!     // Generate text
41//!     let options = LanguageModelCallOptions::new(
42//!         vec![LanguageModelMessage::user_text("Hello, Claude!")]
43//!     )
44//!     .with_temperature(0.7)
45//!     .with_max_output_tokens(100);
46//!
47//!     let result = model.do_generate(options).await?;
48//!
49//!     // Print the first text content
50//!     for content in &result.content {
51//!         if let LanguageModelContent::Text(text_content) = content {
52//!             println!("{}", text_content.text);
53//!         }
54//!     }
55//!     Ok(())
56//! }
57//! ```
58//!
59//! ### Using Settings Directly (Alternative)
60//!
61//! ```rust,no_run
62//! use llm_kit_anthropic::{AnthropicProvider, AnthropicProviderSettings};
63//! use llm_kit_provider::{Provider, LanguageModel};
64//! use llm_kit_provider::language_model::call_options::LanguageModelCallOptions;
65//! use llm_kit_provider::language_model::prompt::LanguageModelMessage;
66//! use llm_kit_provider::language_model::content::LanguageModelContent;
67//!
68//! #[tokio::main]
69//! async fn main() -> Result<(), Box<dyn std::error::Error>> {
70//!     // Create provider (uses ANTHROPIC_API_KEY env var)
71//!     let provider = AnthropicProvider::new(AnthropicProviderSettings::default());
72//!
73//!     // Create a language model
74//!     let model = provider.language_model("claude-3-5-sonnet-20241022".to_string());
75//!
76//!     // Generate text
77//!     let options = LanguageModelCallOptions::new(
78//!         vec![LanguageModelMessage::user_text("Hello, Claude!")]
79//!     )
80//!     .with_temperature(0.7)
81//!     .with_max_output_tokens(100);
82//!
83//!     let result = model.do_generate(options).await?;
84//!
85//!     // Print the first text content
86//!     for content in &result.content {
87//!         if let LanguageModelContent::Text(text_content) = content {
88//!             println!("{}", text_content.text);
89//!         }
90//!     }
91//!     Ok(())
92//! }
93//! ```
94//!
95//! ## Tool Calling
96//!
97//! The Anthropic provider supports both custom tools and provider-defined tools.
98//! For tool calling examples, see the `anthropic_tools` module documentation and
99//! the high-level `GenerateText` API from `llm-kit-core`.
100//!
101//! ## Provider-Defined Tools
102//!
103//! Anthropic provides several powerful provider-defined tools:
104//!
105//! - **Bash**: Execute shell commands (`bash_20241022`, `bash_20250124`)
106//! - **Computer Use**: Control desktop environments (`computer_20241022`, `computer_20250124`)
107//! - **Code Execution**: Run Python/Bash code (`code_execution_20250522`, `code_execution_20250825`)
108//! - **Text Editor**: Edit files (`text_editor_20241022`, `text_editor_20250124`, `text_editor_20250728`)
109//! - **Web Search**: Search the web with citations (`web_search_20250305`)
110//! - **Web Fetch**: Fetch web content (`web_fetch_20250910`)
111//! - **Memory**: Persistent memory across conversations (`memory_20250818`)
112//!
113//! ## Advanced Features
114//!
115//! ### Extended Thinking
116//!
117//! Extended thinking is supported through provider options (implementation in progress).
118//!
119//! ### Streaming
120//!
121//! Stream responses for real-time output:
122//!
123//! ```rust,no_run
124//! use llm_kit_anthropic::{AnthropicProvider, AnthropicProviderSettings};
125//! use llm_kit_provider::{Provider, LanguageModel};
126//! use llm_kit_provider::language_model::call_options::LanguageModelCallOptions;
127//! use llm_kit_provider::language_model::prompt::LanguageModelMessage;
128//! use futures_util::StreamExt;
129//!
130//! # async fn example() -> Result<(), Box<dyn std::error::Error>> {
131//! let provider = AnthropicProvider::new(AnthropicProviderSettings::default());
132//! let model = provider.language_model("claude-3-5-sonnet-20241022".to_string());
133//!
134//! let options = LanguageModelCallOptions::new(
135//!     vec![LanguageModelMessage::user_text("Write a story")]
136//! )
137//! .with_temperature(0.8);
138//!
139//! let result = model.do_stream(options).await?;
140//!
141//! let mut stream = result.stream;
142//! while let Some(part) = stream.next().await {
143//!     // Handle stream part
144//! }
145//! # Ok(())
146//! # }
147//! ```
148//!
149//! ## Configuration
150//!
151//! ### Using the Client Builder (Recommended)
152//!
153//! ```rust
154//! use llm_kit_anthropic::AnthropicClient;
155//!
156//! let provider = AnthropicClient::new()
157//!     .api_key("your-api-key")
158//!     .base_url("https://api.anthropic.com/v1")
159//!     .header("Custom-Header", "value")
160//!     .name("my-anthropic-provider")
161//!     .build();
162//! ```
163//!
164//! ### Using Settings Directly (Alternative)
165//!
166//! ```rust
167//! use llm_kit_anthropic::{AnthropicProvider, AnthropicProviderSettings};
168//! use std::collections::HashMap;
169//!
170//! let settings = AnthropicProviderSettings::new()
171//!     .with_api_key("your-api-key")
172//!     .with_base_url("https://api.anthropic.com/v1")
173//!     .add_header("Custom-Header", "value")
174//!     .with_name("my-anthropic-provider");
175//!
176//! let provider = AnthropicProvider::new(settings);
177//! ```
178//!
179//! ## Environment Variables
180//!
181//! - `ANTHROPIC_API_KEY`: API key for authentication (required)
182//! - `ANTHROPIC_BASE_URL`: Custom base URL (optional, defaults to `https://api.anthropic.com/v1`)
183//!
184//! ## Error Handling
185//!
186//! The provider uses the `AnthropicError` type for error handling:
187//!
188//! ```rust,no_run
189//! use llm_kit_anthropic::{AnthropicProvider, AnthropicProviderSettings, AnthropicError};
190//! use llm_kit_provider::{Provider, LanguageModel};
191//! use llm_kit_provider::language_model::call_options::LanguageModelCallOptions;
192//! use llm_kit_provider::language_model::prompt::LanguageModelMessage;
193//! use llm_kit_provider::language_model::content::LanguageModelContent;
194//!
195//! # async fn example() {
196//! let provider = AnthropicProvider::new(AnthropicProviderSettings::default());
197//! let model = provider.language_model("claude-3-5-sonnet-20241022".to_string());
198//!
199//! let options = LanguageModelCallOptions::new(
200//!     vec![LanguageModelMessage::user_text("Hello")]
201//! );
202//!
203//! match model.do_generate(options).await {
204//!     Ok(result) => {
205//!         for content in &result.content {
206//!             if let LanguageModelContent::Text(text_content) = content {
207//!                 println!("{}", text_content.text);
208//!             }
209//!         }
210//!     },
211//!     Err(e) => eprintln!("Error: {}", e),
212//! }
213//! # }
214//! ```
215
216/// Anthropic provider-defined tools namespace.
217pub mod anthropic_tools;
218/// Client builder for creating Anthropic providers.
219pub mod client;
220/// Internal module for converting prompts to Anthropic message format.
221mod convert_to_message_prompt;
222/// Error types for Anthropic provider.
223pub mod error;
224/// Utilities for cache control breakpoint management.
225pub mod get_cache_control;
226/// Language model implementation for Anthropic.
227pub mod language_model;
228/// Utilities for mapping stop reasons.
229pub mod map_stop_reason;
230/// Options and settings for Anthropic models.
231pub mod options;
232/// Tool preparation utilities.
233pub mod prepare_tools;
234/// Prompt and message types.
235pub mod prompt;
236/// Provider implementation.
237pub mod provider;
238/// Provider metadata utilities.
239pub mod provider_metadata_utils;
240/// Provider-defined tool factory functions.
241pub mod provider_tool;
242/// Settings and configuration for Anthropic providers.
243pub mod settings;
244
245// Re-export main types for convenience
246pub use client::AnthropicClient;
247pub use error::{AnthropicError, AnthropicErrorData, AnthropicErrorDetails, parse_anthropic_error};
248pub use language_model::{
249    response_schema::{AnthropicMessagesResponse, ContentBlock, Usage},
250    stream_schema::{AnthropicChunk, ContentBlockDelta, ContentBlockStart},
251};
252pub use provider::AnthropicProvider;
253pub use settings::AnthropicProviderSettings;