adk_model/lib.rs
1//! # adk-model
2//!
3//! LLM model integrations for ADK (Gemini, OpenAI, Anthropic, DeepSeek, etc.).
4//!
5//! ## Overview
6//!
7//! This crate provides LLM implementations for ADK agents. Currently supports:
8//!
9//! - [`GeminiModel`] - Google's Gemini models (2.0 Flash, Pro, etc.)
10//! - `OpenAIClient` - OpenAI models (GPT-4o, GPT-4o-mini, etc.) - requires `openai` feature
11//! - `AzureOpenAIClient` - Azure OpenAI Service - requires `openai` feature
12//! - `AnthropicClient` - Anthropic Claude models (Claude 4, Claude 3.5, etc.) - requires `anthropic` feature
13//! - `DeepSeekClient` - DeepSeek models (deepseek-chat, deepseek-reasoner) - requires `deepseek` feature
14//! - [`MockLlm`] - Mock LLM for testing
15//!
16//! ## Quick Start
17//!
18//! ### Gemini
19//!
20//! ```rust,no_run
21//! use adk_model::GeminiModel;
22//! use std::sync::Arc;
23//!
24//! let api_key = std::env::var("GOOGLE_API_KEY").unwrap();
25//! let model = GeminiModel::new(&api_key, "gemini-2.5-flash").unwrap();
26//! ```
27//!
28//! ### OpenAI
29//!
30//! ```rust,ignore
31//! use adk_model::openai::{OpenAIClient, OpenAIConfig};
32//!
33//! let model = OpenAIClient::new(OpenAIConfig::new(
34//! std::env::var("OPENAI_API_KEY").unwrap(),
35//! "gpt-4o-mini",
36//! )).unwrap();
37//! ```
38//!
39//! ### Anthropic
40//!
41//! ```rust,ignore
42//! use adk_model::anthropic::{AnthropicClient, AnthropicConfig};
43//!
44//! let model = AnthropicClient::new(AnthropicConfig::new(
45//! std::env::var("ANTHROPIC_API_KEY").unwrap(),
46//! "claude-sonnet-4-20250514",
47//! )).unwrap();
48//! ```
49//!
50//! ### DeepSeek
51//!
52//! ```rust,ignore
53//! use adk_model::deepseek::{DeepSeekClient, DeepSeekConfig};
54//!
55//! // Chat model
56//! let chat = DeepSeekClient::chat(std::env::var("DEEPSEEK_API_KEY").unwrap()).unwrap();
57//!
58//! // Reasoner with thinking mode
59//! let reasoner = DeepSeekClient::reasoner(std::env::var("DEEPSEEK_API_KEY").unwrap()).unwrap();
60//! ```
61//!
62//! ## Supported Models
63//!
64//! ### Gemini
65//! | Model | Description |
66//! |-------|-------------|
67//! | `gemini-2.5-flash` | Fast, efficient model (recommended) |
68//! | `gemini-1.5-pro` | Most capable model |
69//! | `gemini-1.5-flash` | Balanced speed/capability |
70//!
71//! ### OpenAI
72//! | Model | Description |
73//! |-------|-------------|
74//! | `gpt-4o` | Most capable model |
75//! | `gpt-4o-mini` | Fast, cost-effective |
76//! | `gpt-4-turbo` | Previous generation flagship |
77//!
78//! ### Anthropic
79//! | Model | Description |
80//! |-------|-------------|
81//! | `claude-sonnet-4-20250514` | Latest Claude 4 Sonnet |
82//! | `claude-3-5-sonnet-20241022` | Claude 3.5 Sonnet |
83//! | `claude-3-opus-20240229` | Most capable Claude 3 |
84//!
85//! ### DeepSeek
86//! | Model | Description |
87//! |-------|-------------|
88//! | `deepseek-chat` | Fast, capable chat model |
89//! | `deepseek-reasoner` | Reasoning model with thinking mode |
90//!
91//! ## Features
92//!
93//! - Async streaming with backpressure
94//! - Tool/function calling support
95//! - Multimodal input (text, images, audio, video, PDF)
96//! - Generation configuration (temperature, top_p, etc.)
97//! - OpenAI-compatible APIs (Ollama, vLLM, etc.)
98
99#[cfg(feature = "anthropic")]
100pub mod anthropic;
101#[cfg(feature = "deepseek")]
102pub mod deepseek;
103#[cfg(feature = "gemini")]
104pub mod gemini;
105pub mod mock;
106#[cfg(feature = "openai")]
107pub mod openai;
108
109#[cfg(feature = "anthropic")]
110pub use anthropic::AnthropicClient;
111#[cfg(feature = "deepseek")]
112pub use deepseek::{DeepSeekClient, DeepSeekConfig};
113#[cfg(feature = "gemini")]
114pub use gemini::GeminiModel;
115pub use mock::MockLlm;
116#[cfg(feature = "openai")]
117pub use openai::{AzureConfig, AzureOpenAIClient, OpenAIClient, OpenAIConfig};