Skip to main content

serdes_ai_providers/
lib.rs

1//! Provider abstractions for serdes-ai.
2//!
3//! This crate provides a unified interface for different AI providers:
4//!
5//! - **OpenAI** - GPT-4o, GPT-4 Turbo, o1, o3-mini
6//! - **Anthropic** - Claude 3.5 Sonnet, Claude 3 Opus
7//! - **Google** - Gemini 2.0 Flash, Gemini Pro
8//! - **Azure** - Azure OpenAI Service
9//! - **Groq** - Ultra-fast Llama, Mixtral
10//! - **Mistral** - Mistral Large, Codestral
11//! - **Ollama** - Local models
12//! - **Together AI** - Open models
13//! - **Fireworks** - Fast inference
14//! - **DeepSeek** - DeepSeek Chat, DeepSeek R1
15//! - **OpenRouter** - Multi-provider routing
16//! - **Cohere** - Command R+
17//! - **Gateway** - AI gateways (Portkey, LiteLLM, Helicone, Cloudflare)
18//!
19//! ## Example
20//!
21//! ```rust,ignore
22//! use serdes_ai_providers::{ProviderRegistry, OpenAIProvider};
23//! use std::sync::Arc;
24//!
25//! // Create a registry
26//! let registry = ProviderRegistry::new();
27//!
28//! // Register providers
29//! registry.register(Arc::new(OpenAIProvider::new("sk-...")));
30//!
31//! // Infer provider from model string
32//! let (provider, model) = registry.infer_provider("openai:gpt-4o")?;
33//! ```
34//!
35//! ## Model Strings
36//!
37//! Models can be specified with provider prefixes:
38//!
39//! - `openai:gpt-4o` - OpenAI GPT-4o
40//! - `anthropic:claude-3-5-sonnet-20241022` - Anthropic Claude
41//! - `google:gemini-2.0-flash` - Google Gemini
42//! - `groq:llama-3.3-70b-versatile` - Groq Llama
43//! - `ollama:llama3.2` - Local Ollama model
44//!
45//! Or models can be inferred from their names:
46//!
47//! - `gpt-4o` → OpenAI
48//! - `claude-3-5-sonnet-20241022` → Anthropic
49//! - `gemini-2.0-flash` → Google
50
51mod provider;
52mod registry;
53
54// Provider implementations
55#[cfg(feature = "anthropic")]
56mod anthropic;
57#[cfg(feature = "azure")]
58mod azure;
59#[cfg(feature = "google")]
60mod google;
61#[cfg(feature = "groq")]
62mod groq;
63#[cfg(feature = "mistral")]
64mod mistral;
65#[cfg(feature = "ollama")]
66mod ollama;
67#[cfg(feature = "openai")]
68mod openai;
69
70// OpenAI-compatible providers
71mod compatible;
72
73// Gateway providers
74mod gateway;
75
76// OAuth utilities
77pub mod oauth;
78pub use oauth::config::{chatgpt_oauth_config, claude_code_oauth_config};
79pub use oauth::{
80    refresh_token, run_pkce_flow, OAuthConfig, OAuthContext, OAuthError, TokenResponse,
81};
82
83// Re-exports
84pub use provider::*;
85pub use registry::*;
86
87#[cfg(feature = "anthropic")]
88pub use anthropic::AnthropicProvider;
89#[cfg(feature = "azure")]
90pub use azure::AzureProvider;
91#[cfg(feature = "google")]
92pub use google::{GoogleProvider, VertexAIProvider};
93#[cfg(feature = "groq")]
94pub use groq::GroqProvider;
95#[cfg(feature = "mistral")]
96pub use mistral::MistralProvider;
97#[cfg(feature = "ollama")]
98pub use ollama::OllamaProvider;
99#[cfg(feature = "openai")]
100pub use openai::OpenAIProvider;
101
102// Compatible providers
103pub use compatible::{
104    CohereProvider, DeepSeekProvider, FireworksProvider, OpenRouterProvider, TogetherProvider,
105};
106
107// Gateway providers
108pub use gateway::{GatewayConfig, GatewayProvider};
109
110use std::sync::Arc;
111
112/// Create a provider registry configured from environment variables.
113///
114/// This will check for API keys and create providers for each configured service.
115pub fn from_env() -> ProviderRegistry {
116    let registry = ProviderRegistry::new();
117
118    #[cfg(feature = "openai")]
119    if let Ok(provider) = OpenAIProvider::from_env() {
120        registry.register(Arc::new(provider));
121    }
122
123    #[cfg(feature = "anthropic")]
124    if let Ok(provider) = AnthropicProvider::from_env() {
125        registry.register(Arc::new(provider));
126    }
127
128    #[cfg(feature = "google")]
129    if let Ok(provider) = GoogleProvider::from_env() {
130        registry.register(Arc::new(provider));
131    }
132
133    #[cfg(feature = "azure")]
134    if let Ok(provider) = AzureProvider::from_env() {
135        registry.register(Arc::new(provider));
136    }
137
138    #[cfg(feature = "groq")]
139    if let Ok(provider) = GroqProvider::from_env() {
140        registry.register(Arc::new(provider));
141    }
142
143    #[cfg(feature = "mistral")]
144    if let Ok(provider) = MistralProvider::from_env() {
145        registry.register(Arc::new(provider));
146    }
147
148    #[cfg(feature = "ollama")]
149    if let Ok(provider) = OllamaProvider::from_env() {
150        registry.register(Arc::new(provider));
151    }
152
153    // Compatible providers
154    if let Ok(provider) = TogetherProvider::from_env() {
155        registry.register(Arc::new(provider));
156    }
157
158    if let Ok(provider) = FireworksProvider::from_env() {
159        registry.register(Arc::new(provider));
160    }
161
162    if let Ok(provider) = DeepSeekProvider::from_env() {
163        registry.register(Arc::new(provider));
164    }
165
166    if let Ok(provider) = OpenRouterProvider::from_env() {
167        registry.register(Arc::new(provider));
168    }
169
170    if let Ok(provider) = CohereProvider::from_env() {
171        registry.register(Arc::new(provider));
172    }
173
174    // Gateway providers
175    if let Ok(provider) = GatewayProvider::portkey_from_env() {
176        registry.register(Arc::new(provider));
177    }
178
179    if let Ok(provider) = GatewayProvider::litellm_from_env() {
180        registry.register(Arc::new(provider));
181    }
182
183    if let Ok(provider) = GatewayProvider::helicone_from_env() {
184        registry.register(Arc::new(provider));
185    }
186
187    if let Ok(provider) = GatewayProvider::from_env() {
188        registry.register(Arc::new(provider));
189    }
190
191    registry
192}
193
194/// Infer provider and model from a model string.
195///
196/// Supports formats like:
197/// - `openai:gpt-4o` (explicit provider)
198/// - `gpt-4o` (inferred from model name)
199///
200/// Returns a tuple of (provider, model_name).
201pub fn infer(model_string: &str) -> Result<(BoxedProvider, String), ProviderError> {
202    let registry = from_env();
203    registry.infer_provider(model_string)
204}
205
206/// Prelude for common imports.
207pub mod prelude {
208    pub use crate::{
209        from_env, global_registry, infer, BoxedProvider, GatewayProvider, Provider, ProviderConfig,
210        ProviderError, ProviderRegistry,
211    };
212
213    #[cfg(feature = "anthropic")]
214    pub use crate::AnthropicProvider;
215    #[cfg(feature = "groq")]
216    pub use crate::GroqProvider;
217    #[cfg(feature = "ollama")]
218    pub use crate::OllamaProvider;
219    #[cfg(feature = "openai")]
220    pub use crate::OpenAIProvider;
221    #[cfg(feature = "google")]
222    pub use crate::{GoogleProvider, VertexAIProvider};
223}
224
225#[cfg(test)]
226mod tests {
227    use super::*;
228
229    #[test]
230    fn test_from_env_creates_registry() {
231        let registry = from_env();
232        // Should have at least the ollama provider (no API key needed)
233        let providers = registry.list();
234        // The exact providers depend on env vars, but registry should exist
235        // Just verify we can list providers (count depends on env)
236        let _ = providers;
237    }
238
239    #[test]
240    fn test_global_registry() {
241        let registry = global_registry();
242        // Just verify global registry is accessible
243        let _ = registry.list();
244    }
245}