adk_model/
lib.rs

1//! # adk-model
2//!
3//! LLM model integrations for ADK (Gemini, OpenAI, Anthropic, DeepSeek, etc.).
4//!
5//! ## Overview
6//!
7//! This crate provides LLM implementations for ADK agents. Currently supports:
8//!
9//! - [`GeminiModel`] - Google's Gemini models (2.0 Flash, Pro, etc.)
10//! - `OpenAIClient` - OpenAI models (GPT-4o, GPT-4o-mini, etc.) - requires `openai` feature
11//! - `AzureOpenAIClient` - Azure OpenAI Service - requires `openai` feature
12//! - `AnthropicClient` - Anthropic Claude models (Claude 4, Claude 3.5, etc.) - requires `anthropic` feature
13//! - `DeepSeekClient` - DeepSeek models (deepseek-chat, deepseek-reasoner) - requires `deepseek` feature
14//! - `OllamaModel` - Local LLMs via Ollama (LLaMA, Mistral, Qwen, etc.) - requires `ollama` feature
15//! - `GroqClient` - Groq ultra-fast inference (LLaMA, Mixtral, Gemma) - requires `groq` feature
16//! - [`MockLlm`] - Mock LLM for testing
17//!
18//! ## Quick Start
19//!
20//! ### Gemini
21//!
22//! ```rust,no_run
23//! use adk_model::GeminiModel;
24//! use std::sync::Arc;
25//!
26//! let api_key = std::env::var("GOOGLE_API_KEY").unwrap();
27//! let model = GeminiModel::new(&api_key, "gemini-2.5-flash").unwrap();
28//! ```
29//!
30//! ### OpenAI
31//!
32//! ```rust,ignore
33//! use adk_model::openai::{OpenAIClient, OpenAIConfig};
34//!
35//! let model = OpenAIClient::new(OpenAIConfig::new(
36//!     std::env::var("OPENAI_API_KEY").unwrap(),
37//!     "gpt-4o-mini",
38//! )).unwrap();
39//! ```
40//!
41//! ### Anthropic
42//!
43//! ```rust,ignore
44//! use adk_model::anthropic::{AnthropicClient, AnthropicConfig};
45//!
46//! let model = AnthropicClient::new(AnthropicConfig::new(
47//!     std::env::var("ANTHROPIC_API_KEY").unwrap(),
48//!     "claude-sonnet-4-20250514",
49//! )).unwrap();
50//! ```
51//!
52//! ### DeepSeek
53//!
54//! ```rust,ignore
55//! use adk_model::deepseek::{DeepSeekClient, DeepSeekConfig};
56//!
57//! // Chat model
58//! let chat = DeepSeekClient::chat(std::env::var("DEEPSEEK_API_KEY").unwrap()).unwrap();
59//!
60//! // Reasoner with thinking mode
61//! let reasoner = DeepSeekClient::reasoner(std::env::var("DEEPSEEK_API_KEY").unwrap()).unwrap();
62//! ```
63//!
64//! ## Supported Models
65//!
66//! ### Gemini
67//! | Model | Description |
68//! |-------|-------------|
69//! | `gemini-2.5-flash` | Fast, efficient model (recommended) |
70//! | `gemini-1.5-pro` | Most capable model |
71//! | `gemini-1.5-flash` | Balanced speed/capability |
72//!
73//! ### OpenAI
74//! | Model | Description |
75//! |-------|-------------|
76//! | `gpt-4o` | Most capable model |
77//! | `gpt-4o-mini` | Fast, cost-effective |
78//! | `gpt-4-turbo` | Previous generation flagship |
79//!
80//! ### Anthropic
81//! | Model | Description |
82//! |-------|-------------|
83//! | `claude-sonnet-4-20250514` | Latest Claude 4 Sonnet |
84//! | `claude-3-5-sonnet-20241022` | Claude 3.5 Sonnet |
85//! | `claude-3-opus-20240229` | Most capable Claude 3 |
86//!
87//! ### DeepSeek
88//! | Model | Description |
89//! |-------|-------------|
90//! | `deepseek-chat` | Fast, capable chat model |
91//! | `deepseek-reasoner` | Reasoning model with thinking mode |
92//!
93//! ## Features
94//!
95//! - Async streaming with backpressure
96//! - Tool/function calling support
97//! - Multimodal input (text, images, audio, video, PDF)
98//! - Generation configuration (temperature, top_p, etc.)
99//! - OpenAI-compatible APIs (Ollama, vLLM, etc.)
100//!
101//! ### Ollama (Local)
102//!
103//! ```rust,ignore
104//! use adk_model::ollama::{OllamaModel, OllamaConfig};
105//!
106//! // Default: localhost:11434
107//! let model = OllamaModel::new(OllamaConfig::new("llama3.2")).unwrap();
108//! ```
109
110#[cfg(feature = "anthropic")]
111pub mod anthropic;
112#[cfg(feature = "deepseek")]
113pub mod deepseek;
114#[cfg(feature = "gemini")]
115pub mod gemini;
116#[cfg(feature = "groq")]
117pub mod groq;
118pub mod mock;
119#[cfg(feature = "ollama")]
120pub mod ollama;
121#[cfg(feature = "openai")]
122pub mod openai;
123
124#[cfg(feature = "anthropic")]
125pub use anthropic::AnthropicClient;
126#[cfg(feature = "deepseek")]
127pub use deepseek::{DeepSeekClient, DeepSeekConfig};
128#[cfg(feature = "gemini")]
129pub use gemini::GeminiModel;
130#[cfg(feature = "groq")]
131pub use groq::{GroqClient, GroqConfig};
132pub use mock::MockLlm;
133#[cfg(feature = "ollama")]
134pub use ollama::{OllamaConfig, OllamaModel};
135#[cfg(feature = "openai")]
136pub use openai::{AzureConfig, AzureOpenAIClient, OpenAIClient, OpenAIConfig};