Skip to main content

brainwires_providers/
lib.rs

1#![deny(missing_docs)]
2//! Provider layer for the Brainwires Agent Framework.
3//!
4//! Contains both low-level API client structs (HTTP transport, auth, rate
5//! limiting, serde) and high-level chat provider implementations that wrap
6//! them with the `brainwires_core::Provider` trait.
7
8// Re-export core traits for convenience
9pub use brainwires_core::provider::{ChatOptions, Provider};
10
11// Rate limiting and HTTP client
12#[cfg(feature = "native")]
13pub mod http_client;
14#[cfg(feature = "native")]
15pub mod rate_limiter;
16
17#[cfg(feature = "native")]
18pub use http_client::RateLimitedClient;
19#[cfg(feature = "native")]
20pub use rate_limiter::RateLimiter;
21
22// ── Protocol directories ──────────────────────────────────────────────
23
24/// OpenAI Chat Completions protocol (also used by Groq, Together, Fireworks, Anyscale).
25#[cfg(feature = "native")]
26pub mod openai_chat;
27
28/// OpenAI Responses API protocol (`/v1/responses`).
29#[cfg(feature = "native")]
30pub mod openai_responses;
31
32/// Anthropic Messages protocol (also used by Bedrock, Vertex AI).
33#[cfg(feature = "native")]
34pub mod anthropic;
35
36/// Google Gemini generateContent protocol.
37#[cfg(feature = "native")]
38pub mod gemini;
39
40/// Ollama native chat protocol.
41#[cfg(feature = "native")]
42pub mod ollama;
43
44/// Brainwires HTTP relay protocol.
45#[cfg(feature = "native")]
46pub mod brainwires_http;
47#[cfg(feature = "native")]
48pub use brainwires_http::{DEFAULT_BACKEND_URL, DEV_BACKEND_URL, get_backend_from_api_key};
49
50// ── Audio/speech API clients ──────────────────────────────────────────
51
52/// Azure Cognitive Services Speech API client.
53#[cfg(feature = "native")]
54pub mod azure_speech;
55/// Cartesia TTS API client.
56#[cfg(feature = "native")]
57pub mod cartesia;
58/// Deepgram TTS/STT API client.
59#[cfg(feature = "native")]
60pub mod deepgram;
61/// ElevenLabs TTS/STT API client.
62#[cfg(feature = "native")]
63pub mod elevenlabs;
64/// Fish Audio TTS/ASR API client.
65#[cfg(feature = "native")]
66pub mod fish;
67/// Google Cloud Text-to-Speech API client.
68#[cfg(feature = "native")]
69pub mod google_tts;
70/// Murf AI TTS API client.
71#[cfg(feature = "native")]
72pub mod murf;
73
74// ── Registry ──────────────────────────────────────────────────────────
75
76/// Provider registry — protocol, auth, and endpoint metadata for all known providers.
77pub mod registry;
78
79// ── Model listing ─────────────────────────────────────────────────────
80
81/// Model listing — query available models from provider APIs.
82#[cfg(feature = "native")]
83pub mod model_listing;
84
85/// Chat provider factory — registry-driven protocol dispatch.
86#[cfg(feature = "native")]
87pub mod chat_factory;
88
89// ── Local LLM ─────────────────────────────────────────────────────────
90
91/// Local LLM inference (always compiled, llama.cpp behind feature flag).
92pub mod local_llm;
93
94// ── Re-exports ────────────────────────────────────────────────────────
95
96// Chat-capable API clients
97#[cfg(feature = "native")]
98pub use anthropic::AnthropicClient;
99#[cfg(feature = "native")]
100pub use brainwires_http::BrainwiresHttpProvider;
101#[cfg(feature = "native")]
102pub use gemini::GoogleClient;
103#[cfg(feature = "native")]
104pub use ollama::OllamaProvider;
105#[cfg(feature = "native")]
106pub use openai_chat::OpenAiClient;
107
108// Chat providers
109#[cfg(feature = "native")]
110pub use anthropic::chat::AnthropicChatProvider;
111#[cfg(feature = "native")]
112pub use gemini::chat::GoogleChatProvider;
113#[cfg(feature = "native")]
114pub use ollama::chat::OllamaChatProvider;
115#[cfg(feature = "native")]
116pub use openai_chat::chat::OpenAiChatProvider;
117#[cfg(feature = "native")]
118pub use openai_responses::OpenAiResponsesProvider;
119
120// Audio API clients
121#[cfg(feature = "native")]
122pub use azure_speech::AzureSpeechClient;
123#[cfg(feature = "native")]
124pub use cartesia::CartesiaClient;
125#[cfg(feature = "native")]
126pub use deepgram::DeepgramClient;
127#[cfg(feature = "native")]
128pub use elevenlabs::ElevenLabsClient;
129#[cfg(feature = "native")]
130pub use fish::FishClient;
131#[cfg(feature = "native")]
132pub use google_tts::GoogleTtsClient;
133#[cfg(feature = "native")]
134pub use murf::MurfClient;
135
136// Model listing
137#[cfg(feature = "native")]
138pub use model_listing::{AvailableModel, ModelCapability, ModelLister, create_model_lister};
139
140// Factory
141#[cfg(feature = "native")]
142pub use chat_factory::ChatProviderFactory;
143
144// Local LLM
145pub use local_llm::*;
146
147use serde::{Deserialize, Serialize};
148use std::fmt;
149use std::str::FromStr;
150
151/// AI provider types
152#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Hash)]
153#[serde(rename_all = "lowercase")]
154pub enum ProviderType {
155    /// Anthropic (Claude).
156    Anthropic,
157    /// OpenAI (GPT).
158    OpenAI,
159    /// Google (Gemini).
160    Google,
161    /// Groq inference.
162    Groq,
163    /// Ollama local models.
164    Ollama,
165    /// Brainwires HTTP relay.
166    Brainwires,
167    /// Together AI.
168    Together,
169    /// Fireworks AI.
170    Fireworks,
171    /// Anyscale.
172    Anyscale,
173    /// Amazon Bedrock (Anthropic Messages via AWS SigV4).
174    Bedrock,
175    /// Google Vertex AI (Anthropic Messages via OAuth2).
176    VertexAI,
177    /// ElevenLabs.
178    ElevenLabs,
179    /// Deepgram.
180    Deepgram,
181    /// Azure Speech.
182    Azure,
183    /// Fish Audio.
184    Fish,
185    /// Cartesia.
186    Cartesia,
187    /// Murf AI.
188    Murf,
189    /// OpenAI Responses API.
190    OpenAiResponses,
191    /// Custom / user-defined provider.
192    Custom,
193}
194
195impl ProviderType {
196    /// Get the default model for this provider
197    pub fn default_model(&self) -> &'static str {
198        match self {
199            Self::Anthropic => "claude-sonnet-4-20250514",
200            Self::OpenAI => "gpt-5-mini",
201            Self::Google => "gemini-2.5-flash",
202            Self::Groq => "llama-3.3-70b-versatile",
203            Self::Ollama => "llama3.3",
204            Self::Brainwires => "gpt-5-mini",
205            Self::Together => "meta-llama/Llama-3.1-8B-Instruct",
206            Self::Fireworks => "accounts/fireworks/models/llama-v3p1-8b-instruct",
207            Self::Anyscale => "meta-llama/Meta-Llama-3.1-8B-Instruct",
208            Self::Bedrock => "anthropic.claude-sonnet-4-20250514-v1:0",
209            Self::VertexAI => "claude-sonnet-4@20250514",
210            Self::ElevenLabs => "eleven_multilingual_v2",
211            Self::Deepgram => "nova-2",
212            Self::Azure => "en-US-JennyNeural",
213            Self::Fish => "default",
214            Self::Cartesia => "sonic-english",
215            Self::Murf => "en-US-natalie",
216            Self::OpenAiResponses => "gpt-5-mini",
217            Self::Custom => "claude-sonnet-4-20250514",
218        }
219    }
220
221    /// Parse from string
222    pub fn from_str_opt(s: &str) -> Option<Self> {
223        match s.to_lowercase().as_str() {
224            "anthropic" => Some(Self::Anthropic),
225            "openai" => Some(Self::OpenAI),
226            "google" | "gemini" => Some(Self::Google),
227            "groq" => Some(Self::Groq),
228            "ollama" => Some(Self::Ollama),
229            "brainwires" => Some(Self::Brainwires),
230            "together" => Some(Self::Together),
231            "fireworks" => Some(Self::Fireworks),
232            "anyscale" => Some(Self::Anyscale),
233            "bedrock" => Some(Self::Bedrock),
234            "vertex-ai" | "vertexai" | "vertex_ai" => Some(Self::VertexAI),
235            "elevenlabs" => Some(Self::ElevenLabs),
236            "deepgram" => Some(Self::Deepgram),
237            "azure" => Some(Self::Azure),
238            "fish" => Some(Self::Fish),
239            "cartesia" => Some(Self::Cartesia),
240            "murf" => Some(Self::Murf),
241            "openai-responses" | "openai_responses" => Some(Self::OpenAiResponses),
242            "custom" => Some(Self::Custom),
243            _ => None,
244        }
245    }
246
247    /// Convert to string
248    pub fn as_str(&self) -> &'static str {
249        match self {
250            Self::Anthropic => "anthropic",
251            Self::OpenAI => "openai",
252            Self::Google => "google",
253            Self::Groq => "groq",
254            Self::Ollama => "ollama",
255            Self::Brainwires => "brainwires",
256            Self::Together => "together",
257            Self::Fireworks => "fireworks",
258            Self::Anyscale => "anyscale",
259            Self::Bedrock => "bedrock",
260            Self::VertexAI => "vertex-ai",
261            Self::ElevenLabs => "elevenlabs",
262            Self::Deepgram => "deepgram",
263            Self::Azure => "azure",
264            Self::Fish => "fish",
265            Self::Cartesia => "cartesia",
266            Self::Murf => "murf",
267            Self::OpenAiResponses => "openai-responses",
268            Self::Custom => "custom",
269        }
270    }
271
272    /// Whether this provider requires an API key
273    pub fn requires_api_key(&self) -> bool {
274        !matches!(self, Self::Ollama | Self::Bedrock | Self::VertexAI)
275    }
276}
277
278impl fmt::Display for ProviderType {
279    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
280        write!(f, "{}", self.as_str())
281    }
282}
283
284impl FromStr for ProviderType {
285    type Err = anyhow::Error;
286
287    fn from_str(s: &str) -> Result<Self, Self::Err> {
288        Self::from_str_opt(s).ok_or_else(|| anyhow::anyhow!("Unknown provider: {}", s))
289    }
290}
291
292/// Provider configuration
293#[derive(Debug, Clone, Serialize, Deserialize)]
294pub struct ProviderConfig {
295    /// Provider type
296    pub provider: ProviderType,
297    /// Model name
298    pub model: String,
299    /// API key (if required)
300    #[serde(skip_serializing_if = "Option::is_none")]
301    pub api_key: Option<String>,
302    /// Base URL (for custom endpoints)
303    #[serde(skip_serializing_if = "Option::is_none")]
304    pub base_url: Option<String>,
305    /// Additional provider-specific options
306    #[serde(flatten)]
307    pub options: std::collections::HashMap<String, serde_json::Value>,
308}
309
310impl ProviderConfig {
311    /// Create a new provider config
312    pub fn new(provider: ProviderType, model: String) -> Self {
313        Self {
314            provider,
315            model,
316            api_key: None,
317            base_url: None,
318            options: std::collections::HashMap::new(),
319        }
320    }
321
322    /// Set API key
323    pub fn with_api_key<S: Into<String>>(mut self, api_key: S) -> Self {
324        self.api_key = Some(api_key.into());
325        self
326    }
327
328    /// Set base URL
329    pub fn with_base_url<S: Into<String>>(mut self, base_url: S) -> Self {
330        self.base_url = Some(base_url.into());
331        self
332    }
333
334    /// Set a provider-specific option.
335    pub fn with_option(mut self, key: impl Into<String>, value: serde_json::Value) -> Self {
336        self.options.insert(key.into(), value);
337        self
338    }
339
340    /// Set the AWS region (for Bedrock) or GCP region (for Vertex AI).
341    pub fn with_region(self, region: impl Into<String>) -> Self {
342        self.with_option("region", serde_json::Value::String(region.into()))
343    }
344
345    /// Set the GCP project ID (for Vertex AI).
346    pub fn with_project_id(self, project_id: impl Into<String>) -> Self {
347        self.with_option("project_id", serde_json::Value::String(project_id.into()))
348    }
349}
350
351#[cfg(test)]
352mod tests {
353    use super::*;
354
355    #[test]
356    fn test_provider_type_default_model() {
357        assert_eq!(
358            ProviderType::Anthropic.default_model(),
359            "claude-sonnet-4-20250514"
360        );
361        assert_eq!(ProviderType::OpenAI.default_model(), "gpt-5-mini");
362        assert_eq!(ProviderType::Google.default_model(), "gemini-2.5-flash");
363        assert_eq!(
364            ProviderType::Groq.default_model(),
365            "llama-3.3-70b-versatile"
366        );
367        assert_eq!(ProviderType::Ollama.default_model(), "llama3.3");
368        assert_eq!(ProviderType::Brainwires.default_model(), "gpt-5-mini");
369    }
370
371    #[test]
372    fn test_provider_type_from_str() {
373        assert_eq!(
374            ProviderType::from_str_opt("anthropic"),
375            Some(ProviderType::Anthropic)
376        );
377        assert_eq!(
378            ProviderType::from_str_opt("openai"),
379            Some(ProviderType::OpenAI)
380        );
381        assert_eq!(
382            ProviderType::from_str_opt("google"),
383            Some(ProviderType::Google)
384        );
385        assert_eq!(
386            ProviderType::from_str_opt("gemini"),
387            Some(ProviderType::Google)
388        );
389        assert_eq!(ProviderType::from_str_opt("groq"), Some(ProviderType::Groq));
390        assert_eq!(
391            ProviderType::from_str_opt("ollama"),
392            Some(ProviderType::Ollama)
393        );
394        assert_eq!(
395            ProviderType::from_str_opt("brainwires"),
396            Some(ProviderType::Brainwires)
397        );
398        assert_eq!(
399            ProviderType::from_str_opt("together"),
400            Some(ProviderType::Together)
401        );
402        assert_eq!(
403            ProviderType::from_str_opt("fireworks"),
404            Some(ProviderType::Fireworks)
405        );
406        assert_eq!(
407            ProviderType::from_str_opt("anyscale"),
408            Some(ProviderType::Anyscale)
409        );
410        assert_eq!(
411            ProviderType::from_str_opt("elevenlabs"),
412            Some(ProviderType::ElevenLabs)
413        );
414        assert_eq!(
415            ProviderType::from_str_opt("deepgram"),
416            Some(ProviderType::Deepgram)
417        );
418        assert_eq!(
419            ProviderType::from_str_opt("custom"),
420            Some(ProviderType::Custom)
421        );
422        assert_eq!(ProviderType::from_str_opt("unknown"), None);
423    }
424
425    #[test]
426    fn test_provider_type_requires_api_key() {
427        assert!(ProviderType::Anthropic.requires_api_key());
428        assert!(ProviderType::OpenAI.requires_api_key());
429        assert!(!ProviderType::Ollama.requires_api_key());
430        assert!(ProviderType::ElevenLabs.requires_api_key());
431    }
432
433    #[test]
434    fn test_provider_config() {
435        let config = ProviderConfig::new(ProviderType::Anthropic, "claude-3".to_string())
436            .with_api_key("sk-test")
437            .with_base_url("https://api.example.com");
438        assert_eq!(config.provider, ProviderType::Anthropic);
439        assert_eq!(config.api_key, Some("sk-test".to_string()));
440        assert_eq!(config.base_url, Some("https://api.example.com".to_string()));
441    }
442}