use std::collections::HashMap;
use std::pin::Pin;
use std::sync::Arc;
use async_trait::async_trait;
use futures::Stream;
use tokio::time::sleep;
use crate::audio::{
SpeechProvider, SpeechRequest, SpeechResponse, TranscriptionProvider, TranscriptionRequest,
TranscriptionResponse,
};
use crate::embedding::{EmbeddingProvider, EmbeddingRequest, EmbeddingResponse};
use crate::error::{Error, Result};
use crate::image::{ImageGenerationRequest, ImageGenerationResponse, ImageProvider};
use crate::provider::{Provider, ProviderConfig};
use crate::retry::RetryConfig;
use crate::specialized::{
ClassificationProvider, ClassificationRequest, ClassificationResponse, ModerationProvider,
ModerationRequest, ModerationResponse, RankingProvider, RankingRequest, RankingResponse,
};
use crate::types::{
BatchJob, BatchRequest, BatchResult, CompletionRequest, CompletionResponse, StreamChunk,
TokenCountRequest, TokenCountResult,
};
use crate::video::{
VideoGenerationRequest, VideoGenerationResponse, VideoJobStatus, VideoProvider,
};
struct DynamicRetryingProvider {
inner: Arc<dyn Provider>,
config: RetryConfig,
}
impl DynamicRetryingProvider {
async fn execute_with_retry<T, F, Fut>(&self, operation_name: &str, mut f: F) -> Result<T>
where
F: FnMut() -> Fut,
Fut: std::future::Future<Output = Result<T>>,
{
let mut last_error: Option<Error> = None;
for attempt in 0..=self.config.max_retries {
match f().await {
Ok(result) => {
if attempt > 0 {
tracing::info!(
provider = %self.inner.name(),
operation = %operation_name,
attempt = attempt + 1,
"Operation succeeded after retry"
);
}
return Ok(result);
}
Err(e) => {
if !e.is_retryable() {
tracing::debug!(
provider = %self.inner.name(),
operation = %operation_name,
error = %e,
"Non-retryable error, failing immediately"
);
return Err(e);
}
if attempt < self.config.max_retries {
let delay = e
.retry_after()
.unwrap_or_else(|| self.config.delay_for_attempt(attempt));
tracing::warn!(
provider = %self.inner.name(),
operation = %operation_name,
attempt = attempt + 1,
max_retries = self.config.max_retries,
delay_ms = delay.as_millis(),
error = %e,
"Retryable error, will retry after delay"
);
sleep(delay).await;
}
last_error = Some(e);
}
}
}
tracing::error!(
provider = %self.inner.name(),
operation = %operation_name,
max_retries = self.config.max_retries,
"All retry attempts exhausted"
);
Err(last_error.unwrap_or_else(|| Error::other("Unknown retry failure")))
}
}
#[async_trait]
impl Provider for DynamicRetryingProvider {
fn name(&self) -> &str {
self.inner.name()
}
async fn complete(&self, request: CompletionRequest) -> Result<CompletionResponse> {
let request = Arc::new(request);
self.execute_with_retry("complete", || {
let request = (*request).clone();
let inner = Arc::clone(&self.inner);
async move { inner.complete(request).await }
})
.await
}
async fn complete_stream(
&self,
request: CompletionRequest,
) -> Result<Pin<Box<dyn Stream<Item = Result<StreamChunk>> + Send>>> {
let request = Arc::new(request);
self.execute_with_retry("complete_stream", || {
let request = (*request).clone();
let inner = Arc::clone(&self.inner);
async move { inner.complete_stream(request).await }
})
.await
}
fn supports_tools(&self) -> bool {
self.inner.supports_tools()
}
fn supports_vision(&self) -> bool {
self.inner.supports_vision()
}
fn supports_streaming(&self) -> bool {
self.inner.supports_streaming()
}
async fn count_tokens(&self, request: TokenCountRequest) -> Result<TokenCountResult> {
let request = Arc::new(request);
self.execute_with_retry("count_tokens", || {
let request = (*request).clone();
let inner = Arc::clone(&self.inner);
async move { inner.count_tokens(request).await }
})
.await
}
fn supports_token_counting(&self) -> bool {
self.inner.supports_token_counting()
}
async fn create_batch(&self, requests: Vec<BatchRequest>) -> Result<BatchJob> {
self.inner.create_batch(requests).await
}
async fn get_batch(&self, batch_id: &str) -> Result<BatchJob> {
self.inner.get_batch(batch_id).await
}
async fn get_batch_results(&self, batch_id: &str) -> Result<Vec<BatchResult>> {
self.inner.get_batch_results(batch_id).await
}
async fn cancel_batch(&self, batch_id: &str) -> Result<BatchJob> {
self.inner.cancel_batch(batch_id).await
}
async fn list_batches(&self, limit: Option<u32>) -> Result<Vec<BatchJob>> {
self.inner.list_batches(limit).await
}
fn supports_batch(&self) -> bool {
self.inner.supports_batch()
}
}
fn parse_model_identifier(model: &str) -> Result<(&str, &str)> {
if let Some(idx) = model.find('/') {
let provider = &model[..idx];
let model_name = &model[idx + 1..];
if !provider.is_empty()
&& !provider.contains('-')
&& !provider.contains('.')
&& !provider.contains(':')
{
return Ok((provider, model_name));
}
}
Err(Error::InvalidRequest(format!(
"Model must be in 'provider/model' format (e.g., 'openai/gpt-4o'), got: {}",
model
)))
}
pub struct LLMKitClient {
providers: HashMap<String, Arc<dyn Provider>>,
embedding_providers: HashMap<String, Arc<dyn EmbeddingProvider>>,
speech_providers: HashMap<String, Arc<dyn SpeechProvider>>,
transcription_providers: HashMap<String, Arc<dyn TranscriptionProvider>>,
image_providers: HashMap<String, Arc<dyn ImageProvider>>,
video_providers: HashMap<String, Arc<dyn VideoProvider>>,
ranking_providers: HashMap<String, Arc<dyn RankingProvider>>,
moderation_providers: HashMap<String, Arc<dyn ModerationProvider>>,
classification_providers: HashMap<String, Arc<dyn ClassificationProvider>>,
default_provider: Option<String>,
}
impl LLMKitClient {
pub fn builder() -> ClientBuilder {
ClientBuilder::new()
}
pub fn provider(&self, name: &str) -> Option<Arc<dyn Provider>> {
self.providers.get(name).cloned()
}
pub fn default_provider(&self) -> Option<Arc<dyn Provider>> {
self.default_provider
.as_ref()
.and_then(|name| self.providers.get(name).cloned())
}
pub fn providers(&self) -> Vec<&str> {
self.providers.keys().map(|s| s.as_str()).collect()
}
pub async fn complete(&self, mut request: CompletionRequest) -> Result<CompletionResponse> {
let (provider, model_name) = self.resolve_provider(&request.model)?;
request.model = model_name;
provider.complete(request).await
}
pub async fn complete_stream(
&self,
mut request: CompletionRequest,
) -> Result<Pin<Box<dyn Stream<Item = Result<StreamChunk>> + Send>>> {
let (provider, model_name) = self.resolve_provider(&request.model)?;
request.model = model_name;
provider.complete_stream(request).await
}
pub async fn complete_with_provider(
&self,
provider_name: &str,
request: CompletionRequest,
) -> Result<CompletionResponse> {
let provider = self
.providers
.get(provider_name)
.ok_or_else(|| Error::ProviderNotFound(provider_name.to_string()))?;
provider.complete(request).await
}
pub async fn complete_stream_with_provider(
&self,
provider_name: &str,
request: CompletionRequest,
) -> Result<Pin<Box<dyn Stream<Item = Result<StreamChunk>> + Send>>> {
let provider = self
.providers
.get(provider_name)
.ok_or_else(|| Error::ProviderNotFound(provider_name.to_string()))?;
provider.complete_stream(request).await
}
pub async fn count_tokens(&self, mut request: TokenCountRequest) -> Result<TokenCountResult> {
let (provider, model_name) = self.resolve_provider(&request.model)?;
request.model = model_name;
provider.count_tokens(request).await
}
pub async fn count_tokens_with_provider(
&self,
provider_name: &str,
request: TokenCountRequest,
) -> Result<TokenCountResult> {
let provider = self
.providers
.get(provider_name)
.ok_or_else(|| Error::ProviderNotFound(provider_name.to_string()))?;
provider.count_tokens(request).await
}
pub async fn create_batch(&self, mut requests: Vec<BatchRequest>) -> Result<BatchJob> {
if requests.is_empty() {
return Err(Error::invalid_request(
"Batch must contain at least one request",
));
}
let (provider, model_name) = self.resolve_provider(&requests[0].request.model)?;
for req in &mut requests {
let (_, req_model) = parse_model_identifier(&req.request.model)?;
req.request.model = req_model.to_string();
}
requests[0].request.model = model_name;
provider.create_batch(requests).await
}
pub async fn create_batch_with_provider(
&self,
provider_name: &str,
requests: Vec<BatchRequest>,
) -> Result<BatchJob> {
let provider = self
.providers
.get(provider_name)
.ok_or_else(|| Error::ProviderNotFound(provider_name.to_string()))?;
provider.create_batch(requests).await
}
pub async fn get_batch(&self, provider_name: &str, batch_id: &str) -> Result<BatchJob> {
let provider = self
.providers
.get(provider_name)
.ok_or_else(|| Error::ProviderNotFound(provider_name.to_string()))?;
provider.get_batch(batch_id).await
}
pub async fn get_batch_results(
&self,
provider_name: &str,
batch_id: &str,
) -> Result<Vec<BatchResult>> {
let provider = self
.providers
.get(provider_name)
.ok_or_else(|| Error::ProviderNotFound(provider_name.to_string()))?;
provider.get_batch_results(batch_id).await
}
pub async fn cancel_batch(&self, provider_name: &str, batch_id: &str) -> Result<BatchJob> {
let provider = self
.providers
.get(provider_name)
.ok_or_else(|| Error::ProviderNotFound(provider_name.to_string()))?;
provider.cancel_batch(batch_id).await
}
pub async fn list_batches(
&self,
provider_name: &str,
limit: Option<u32>,
) -> Result<Vec<BatchJob>> {
let provider = self
.providers
.get(provider_name)
.ok_or_else(|| Error::ProviderNotFound(provider_name.to_string()))?;
provider.list_batches(limit).await
}
pub async fn embed(&self, mut request: EmbeddingRequest) -> Result<EmbeddingResponse> {
let (provider, model_name) = self.resolve_embedding_provider(&request.model)?;
request.model = model_name;
provider.embed(request).await
}
pub async fn embed_with_provider(
&self,
provider_name: &str,
request: EmbeddingRequest,
) -> Result<EmbeddingResponse> {
let provider = self
.embedding_providers
.get(provider_name)
.ok_or_else(|| Error::ProviderNotFound(provider_name.to_string()))?;
provider.embed(request).await
}
pub fn embedding_providers(&self) -> Vec<&str> {
self.embedding_providers
.keys()
.map(|s| s.as_str())
.collect()
}
pub fn supports_embeddings(&self, provider_name: &str) -> bool {
self.embedding_providers.contains_key(provider_name)
}
pub async fn speech(&self, mut request: SpeechRequest) -> Result<SpeechResponse> {
let (provider, model_name) = self.resolve_speech_provider(&request.model)?;
request.model = model_name;
provider.speech(request).await
}
pub async fn speech_with_provider(
&self,
provider_name: &str,
request: SpeechRequest,
) -> Result<SpeechResponse> {
let provider = self
.speech_providers
.get(provider_name)
.ok_or_else(|| Error::ProviderNotFound(provider_name.to_string()))?;
provider.speech(request).await
}
pub fn speech_providers(&self) -> Vec<&str> {
self.speech_providers.keys().map(|s| s.as_str()).collect()
}
pub async fn transcribe(
&self,
mut request: TranscriptionRequest,
) -> Result<TranscriptionResponse> {
let (provider, model_name) = self.resolve_transcription_provider(&request.model)?;
request.model = model_name;
provider.transcribe(request).await
}
pub async fn transcribe_with_provider(
&self,
provider_name: &str,
request: TranscriptionRequest,
) -> Result<TranscriptionResponse> {
let provider = self
.transcription_providers
.get(provider_name)
.ok_or_else(|| Error::ProviderNotFound(provider_name.to_string()))?;
provider.transcribe(request).await
}
pub fn transcription_providers(&self) -> Vec<&str> {
self.transcription_providers
.keys()
.map(|s| s.as_str())
.collect()
}
pub async fn generate_image(
&self,
mut request: ImageGenerationRequest,
) -> Result<ImageGenerationResponse> {
let (provider, model_name) = self.resolve_image_provider(&request.model)?;
request.model = model_name;
provider.generate_image(request).await
}
pub async fn generate_image_with_provider(
&self,
provider_name: &str,
request: ImageGenerationRequest,
) -> Result<ImageGenerationResponse> {
let provider = self
.image_providers
.get(provider_name)
.ok_or_else(|| Error::ProviderNotFound(provider_name.to_string()))?;
provider.generate_image(request).await
}
pub fn image_providers(&self) -> Vec<&str> {
self.image_providers.keys().map(|s| s.as_str()).collect()
}
pub async fn generate_video(
&self,
mut request: VideoGenerationRequest,
) -> Result<VideoGenerationResponse> {
let (provider, model_name) = self.resolve_video_provider(&request.model)?;
request.model = model_name;
provider.generate_video(request).await
}
pub async fn generate_video_with_provider(
&self,
provider_name: &str,
request: VideoGenerationRequest,
) -> Result<VideoGenerationResponse> {
let provider = self
.video_providers
.get(provider_name)
.ok_or_else(|| Error::ProviderNotFound(provider_name.to_string()))?;
provider.generate_video(request).await
}
pub async fn get_video_status(
&self,
provider_name: &str,
job_id: &str,
) -> Result<VideoJobStatus> {
let provider = self
.video_providers
.get(provider_name)
.ok_or_else(|| Error::ProviderNotFound(provider_name.to_string()))?;
provider.get_video_status(job_id).await
}
pub fn video_providers(&self) -> Vec<&str> {
self.video_providers.keys().map(|s| s.as_str()).collect()
}
pub async fn rank(&self, mut request: RankingRequest) -> Result<RankingResponse> {
let (provider, model_name) = self.resolve_ranking_provider(&request.model)?;
request.model = model_name;
provider.rank(request).await
}
pub async fn rank_with_provider(
&self,
provider_name: &str,
request: RankingRequest,
) -> Result<RankingResponse> {
let provider = self
.ranking_providers
.get(provider_name)
.ok_or_else(|| Error::ProviderNotFound(provider_name.to_string()))?;
provider.rank(request).await
}
pub fn ranking_providers(&self) -> Vec<&str> {
self.ranking_providers.keys().map(|s| s.as_str()).collect()
}
pub async fn moderate(&self, mut request: ModerationRequest) -> Result<ModerationResponse> {
let (provider, model_name) = self.resolve_moderation_provider(&request.model)?;
request.model = model_name;
provider.moderate(request).await
}
pub async fn moderate_with_provider(
&self,
provider_name: &str,
request: ModerationRequest,
) -> Result<ModerationResponse> {
let provider = self
.moderation_providers
.get(provider_name)
.ok_or_else(|| Error::ProviderNotFound(provider_name.to_string()))?;
provider.moderate(request).await
}
pub fn moderation_providers(&self) -> Vec<&str> {
self.moderation_providers
.keys()
.map(|s| s.as_str())
.collect()
}
pub async fn classify(
&self,
mut request: ClassificationRequest,
) -> Result<ClassificationResponse> {
let (provider, model_name) = self.resolve_classification_provider(&request.model)?;
request.model = model_name;
provider.classify(request).await
}
pub async fn classify_with_provider(
&self,
provider_name: &str,
request: ClassificationRequest,
) -> Result<ClassificationResponse> {
let provider = self
.classification_providers
.get(provider_name)
.ok_or_else(|| Error::ProviderNotFound(provider_name.to_string()))?;
provider.classify(request).await
}
pub fn classification_providers(&self) -> Vec<&str> {
self.classification_providers
.keys()
.map(|s| s.as_str())
.collect()
}
fn resolve_embedding_provider(
&self,
model: &str,
) -> Result<(Arc<dyn EmbeddingProvider>, String)> {
let (provider_name, model_name) = parse_model_identifier(model)?;
self.embedding_providers
.get(provider_name)
.cloned()
.map(|p| (p, model_name.to_string()))
.ok_or_else(|| {
Error::ProviderNotFound(format!(
"Embedding provider '{}' not configured. Available providers: {:?}",
provider_name,
self.embedding_providers.keys().collect::<Vec<_>>()
))
})
}
fn resolve_provider(&self, model: &str) -> Result<(Arc<dyn Provider>, String)> {
let (provider_name, model_name) = parse_model_identifier(model)?;
self.providers
.get(provider_name)
.cloned()
.map(|p| (p, model_name.to_string()))
.ok_or_else(|| {
Error::ProviderNotFound(format!(
"Provider '{}' not configured. Available providers: {:?}",
provider_name,
self.providers.keys().collect::<Vec<_>>()
))
})
}
fn resolve_speech_provider(&self, model: &str) -> Result<(Arc<dyn SpeechProvider>, String)> {
let (provider_name, model_name) = parse_model_identifier(model)?;
self.speech_providers
.get(provider_name)
.cloned()
.map(|p| (p, model_name.to_string()))
.ok_or_else(|| {
Error::ProviderNotFound(format!(
"Speech provider '{}' not configured. Available providers: {:?}",
provider_name,
self.speech_providers.keys().collect::<Vec<_>>()
))
})
}
fn resolve_transcription_provider(
&self,
model: &str,
) -> Result<(Arc<dyn TranscriptionProvider>, String)> {
let (provider_name, model_name) = parse_model_identifier(model)?;
self.transcription_providers
.get(provider_name)
.cloned()
.map(|p| (p, model_name.to_string()))
.ok_or_else(|| {
Error::ProviderNotFound(format!(
"Transcription provider '{}' not configured. Available providers: {:?}",
provider_name,
self.transcription_providers.keys().collect::<Vec<_>>()
))
})
}
fn resolve_image_provider(&self, model: &str) -> Result<(Arc<dyn ImageProvider>, String)> {
let (provider_name, model_name) = parse_model_identifier(model)?;
self.image_providers
.get(provider_name)
.cloned()
.map(|p| (p, model_name.to_string()))
.ok_or_else(|| {
Error::ProviderNotFound(format!(
"Image provider '{}' not configured. Available providers: {:?}",
provider_name,
self.image_providers.keys().collect::<Vec<_>>()
))
})
}
fn resolve_video_provider(&self, model: &str) -> Result<(Arc<dyn VideoProvider>, String)> {
let (provider_name, model_name) = parse_model_identifier(model)?;
self.video_providers
.get(provider_name)
.cloned()
.map(|p| (p, model_name.to_string()))
.ok_or_else(|| {
Error::ProviderNotFound(format!(
"Video provider '{}' not configured. Available providers: {:?}",
provider_name,
self.video_providers.keys().collect::<Vec<_>>()
))
})
}
fn resolve_ranking_provider(&self, model: &str) -> Result<(Arc<dyn RankingProvider>, String)> {
let (provider_name, model_name) = parse_model_identifier(model)?;
self.ranking_providers
.get(provider_name)
.cloned()
.map(|p| (p, model_name.to_string()))
.ok_or_else(|| {
Error::ProviderNotFound(format!(
"Ranking provider '{}' not configured. Available providers: {:?}",
provider_name,
self.ranking_providers.keys().collect::<Vec<_>>()
))
})
}
fn resolve_moderation_provider(
&self,
model: &str,
) -> Result<(Arc<dyn ModerationProvider>, String)> {
let (provider_name, model_name) = parse_model_identifier(model)?;
self.moderation_providers
.get(provider_name)
.cloned()
.map(|p| (p, model_name.to_string()))
.ok_or_else(|| {
Error::ProviderNotFound(format!(
"Moderation provider '{}' not configured. Available providers: {:?}",
provider_name,
self.moderation_providers.keys().collect::<Vec<_>>()
))
})
}
fn resolve_classification_provider(
&self,
model: &str,
) -> Result<(Arc<dyn ClassificationProvider>, String)> {
let (provider_name, model_name) = parse_model_identifier(model)?;
self.classification_providers
.get(provider_name)
.cloned()
.map(|p| (p, model_name.to_string()))
.ok_or_else(|| {
Error::ProviderNotFound(format!(
"Classification provider '{}' not configured. Available providers: {:?}",
provider_name,
self.classification_providers.keys().collect::<Vec<_>>()
))
})
}
}
#[cfg(feature = "vertex")]
#[derive(Clone)]
enum PendingVertexConfig {
FromEnv,
ServiceAccount {
path: std::path::PathBuf,
project_id: String,
location: String,
},
WithPublisher { publisher: String },
}
#[cfg(feature = "bedrock")]
#[derive(Clone)]
enum PendingBedrockConfig {
FromEnv,
WithRegion { region: String },
}
pub struct ClientBuilder {
providers: HashMap<String, Arc<dyn Provider>>,
embedding_providers: HashMap<String, Arc<dyn EmbeddingProvider>>,
speech_providers: HashMap<String, Arc<dyn SpeechProvider>>,
transcription_providers: HashMap<String, Arc<dyn TranscriptionProvider>>,
image_providers: HashMap<String, Arc<dyn ImageProvider>>,
video_providers: HashMap<String, Arc<dyn VideoProvider>>,
ranking_providers: HashMap<String, Arc<dyn RankingProvider>>,
moderation_providers: HashMap<String, Arc<dyn ModerationProvider>>,
classification_providers: HashMap<String, Arc<dyn ClassificationProvider>>,
default_provider: Option<String>,
retry_config: Option<RetryConfig>,
#[cfg(feature = "vertex")]
pending_vertex: Vec<(String, PendingVertexConfig)>,
#[cfg(feature = "bedrock")]
pending_bedrock: Vec<(String, PendingBedrockConfig)>,
}
impl ClientBuilder {
pub fn new() -> Self {
Self {
providers: HashMap::new(),
embedding_providers: HashMap::new(),
speech_providers: HashMap::new(),
transcription_providers: HashMap::new(),
image_providers: HashMap::new(),
video_providers: HashMap::new(),
ranking_providers: HashMap::new(),
moderation_providers: HashMap::new(),
classification_providers: HashMap::new(),
default_provider: None,
retry_config: None,
#[cfg(feature = "vertex")]
pending_vertex: Vec::new(),
#[cfg(feature = "bedrock")]
pending_bedrock: Vec::new(),
}
}
pub fn with_embedding_provider(
mut self,
name: impl Into<String>,
provider: Arc<dyn EmbeddingProvider>,
) -> Self {
self.embedding_providers.insert(name.into(), provider);
self
}
pub fn with_speech_provider(
mut self,
name: impl Into<String>,
provider: Arc<dyn SpeechProvider>,
) -> Self {
self.speech_providers.insert(name.into(), provider);
self
}
pub fn with_transcription_provider(
mut self,
name: impl Into<String>,
provider: Arc<dyn TranscriptionProvider>,
) -> Self {
self.transcription_providers.insert(name.into(), provider);
self
}
pub fn with_image_provider(
mut self,
name: impl Into<String>,
provider: Arc<dyn ImageProvider>,
) -> Self {
self.image_providers.insert(name.into(), provider);
self
}
pub fn with_video_provider(
mut self,
name: impl Into<String>,
provider: Arc<dyn VideoProvider>,
) -> Self {
self.video_providers.insert(name.into(), provider);
self
}
pub fn with_ranking_provider(
mut self,
name: impl Into<String>,
provider: Arc<dyn RankingProvider>,
) -> Self {
self.ranking_providers.insert(name.into(), provider);
self
}
pub fn with_moderation_provider(
mut self,
name: impl Into<String>,
provider: Arc<dyn ModerationProvider>,
) -> Self {
self.moderation_providers.insert(name.into(), provider);
self
}
pub fn with_classification_provider(
mut self,
name: impl Into<String>,
provider: Arc<dyn ClassificationProvider>,
) -> Self {
self.classification_providers.insert(name.into(), provider);
self
}
pub fn with_retry(mut self, config: RetryConfig) -> Self {
self.retry_config = Some(config);
self
}
pub fn with_default_retry(mut self) -> Self {
self.retry_config = Some(RetryConfig::default());
self
}
pub fn with_provider(mut self, name: impl Into<String>, provider: Arc<dyn Provider>) -> Self {
let name = name.into();
if self.default_provider.is_none() {
self.default_provider = Some(name.clone());
}
self.providers.insert(name, provider);
self
}
pub fn with_default(mut self, name: impl Into<String>) -> Self {
self.default_provider = Some(name.into());
self
}
#[cfg(feature = "anthropic")]
pub fn with_anthropic_from_env(self) -> Self {
match crate::providers::chat::anthropic::AnthropicProvider::from_env() {
Ok(provider) => self.with_provider("anthropic", Arc::new(provider)),
Err(_) => self, }
}
#[cfg(feature = "anthropic")]
pub fn with_anthropic(self, api_key: impl Into<String>) -> Result<Self> {
let provider = crate::providers::chat::anthropic::AnthropicProvider::with_api_key(api_key)?;
Ok(self.with_provider("anthropic", Arc::new(provider)))
}
#[cfg(feature = "anthropic")]
pub fn with_anthropic_config(self, config: ProviderConfig) -> Result<Self> {
let provider = crate::providers::chat::anthropic::AnthropicProvider::new(config)?;
Ok(self.with_provider("anthropic", Arc::new(provider)))
}
#[cfg(feature = "openai")]
pub fn with_openai_from_env(mut self) -> Self {
match crate::providers::chat::openai::OpenAIProvider::from_env() {
Ok(provider) => {
let provider = Arc::new(provider);
self.embedding_providers.insert(
"openai".to_string(),
Arc::clone(&provider) as Arc<dyn EmbeddingProvider>,
);
self.with_provider("openai", provider)
}
Err(_) => self, }
}
#[cfg(feature = "openai")]
pub fn with_openai(mut self, api_key: impl Into<String>) -> Result<Self> {
let provider =
Arc::new(crate::providers::chat::openai::OpenAIProvider::with_api_key(api_key)?);
self.embedding_providers.insert(
"openai".to_string(),
Arc::clone(&provider) as Arc<dyn EmbeddingProvider>,
);
Ok(self.with_provider("openai", provider))
}
#[cfg(feature = "openai")]
pub fn with_openai_config(mut self, config: ProviderConfig) -> Result<Self> {
let provider = Arc::new(crate::providers::chat::openai::OpenAIProvider::new(config)?);
self.embedding_providers.insert(
"openai".to_string(),
Arc::clone(&provider) as Arc<dyn EmbeddingProvider>,
);
Ok(self.with_provider("openai", provider))
}
#[cfg(feature = "groq")]
pub fn with_groq_from_env(self) -> Self {
match crate::providers::chat::groq::GroqProvider::from_env() {
Ok(provider) => self.with_provider("groq", Arc::new(provider)),
Err(_) => self, }
}
#[cfg(feature = "groq")]
pub fn with_groq(self, api_key: impl Into<String>) -> Result<Self> {
let provider = crate::providers::chat::groq::GroqProvider::with_api_key(api_key)?;
Ok(self.with_provider("groq", Arc::new(provider)))
}
#[cfg(feature = "groq")]
pub fn with_groq_config(self, config: ProviderConfig) -> Result<Self> {
let provider = crate::providers::chat::groq::GroqProvider::new(config)?;
Ok(self.with_provider("groq", Arc::new(provider)))
}
#[cfg(feature = "mistral")]
pub fn with_mistral_from_env(self) -> Self {
match crate::providers::chat::mistral::MistralProvider::from_env() {
Ok(provider) => self.with_provider("mistral", Arc::new(provider)),
Err(_) => self, }
}
#[cfg(feature = "mistral")]
pub fn with_mistral(self, api_key: impl Into<String>) -> Result<Self> {
let provider = crate::providers::chat::mistral::MistralProvider::with_api_key(api_key)?;
Ok(self.with_provider("mistral", Arc::new(provider)))
}
#[cfg(feature = "mistral")]
pub fn with_mistral_config(self, _config: ProviderConfig) -> Result<Self> {
let provider = crate::providers::chat::mistral::MistralProvider::from_env()?;
Ok(self.with_provider("mistral", Arc::new(provider)))
}
#[cfg(feature = "azure")]
pub fn with_azure_from_env(self) -> Self {
match crate::providers::chat::azure::AzureOpenAIProvider::from_env() {
Ok(provider) => self.with_provider("azure", Arc::new(provider)),
Err(_) => self, }
}
#[cfg(feature = "azure")]
pub fn with_azure(self, config: crate::providers::chat::azure::AzureConfig) -> Result<Self> {
let provider = crate::providers::chat::azure::AzureOpenAIProvider::new(config)?;
Ok(self.with_provider("azure", Arc::new(provider)))
}
#[cfg(feature = "bedrock")]
pub fn with_bedrock_from_env(mut self) -> Self {
self.pending_bedrock
.push(("bedrock".to_string(), PendingBedrockConfig::FromEnv));
self
}
#[cfg(feature = "bedrock")]
pub fn with_bedrock_region(mut self, region: impl Into<String>) -> Self {
self.pending_bedrock.push((
"bedrock".to_string(),
PendingBedrockConfig::WithRegion {
region: region.into(),
},
));
self
}
#[cfg(feature = "bedrock")]
pub async fn with_bedrock(
self,
builder: crate::providers::chat::bedrock::BedrockBuilder,
) -> Result<Self> {
let provider = builder.build().await?;
Ok(self.with_provider("bedrock", Arc::new(provider)))
}
#[cfg(feature = "openai-compatible")]
pub fn with_together_from_env(self) -> Self {
match crate::providers::chat::openai_compatible::OpenAICompatibleProvider::together_from_env(
) {
Ok(provider) => self.with_provider("together", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "openai-compatible")]
pub fn with_together(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::together(api_key)?;
Ok(self.with_provider("together", Arc::new(provider)))
}
#[cfg(all(feature = "openai-compatible", not(feature = "fireworks")))]
pub fn with_fireworks_from_env(self) -> Self {
match crate::providers::chat::openai_compatible::OpenAICompatibleProvider::fireworks_from_env() {
Ok(provider) => self.with_provider("fireworks", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(all(feature = "openai-compatible", not(feature = "fireworks")))]
pub fn with_fireworks(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::fireworks(
api_key,
)?;
Ok(self.with_provider("fireworks", Arc::new(provider)))
}
#[cfg(feature = "fireworks")]
pub fn with_fireworks_from_env(self) -> Self {
match crate::providers::chat::fireworks::FireworksProvider::from_env() {
Ok(provider) => self.with_provider("fireworks", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "fireworks")]
pub fn with_fireworks(self, api_key: impl Into<String>) -> Result<Self> {
let provider = crate::providers::chat::fireworks::FireworksProvider::with_api_key(api_key)?;
Ok(self.with_provider("fireworks", Arc::new(provider)))
}
#[cfg(all(feature = "openai-compatible", not(feature = "deepseek")))]
pub fn with_deepseek_from_env(self) -> Self {
match crate::providers::chat::openai_compatible::OpenAICompatibleProvider::deepseek_from_env(
) {
Ok(provider) => self.with_provider("deepseek", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(all(feature = "openai-compatible", not(feature = "deepseek")))]
pub fn with_deepseek(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::deepseek(api_key)?;
Ok(self.with_provider("deepseek", Arc::new(provider)))
}
#[cfg(feature = "deepseek")]
pub fn with_deepseek_from_env(self) -> Self {
match crate::providers::chat::deepseek::DeepSeekProvider::from_env() {
Ok(provider) => self.with_provider("deepseek", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "deepseek")]
pub fn with_deepseek(self, api_key: impl Into<String>) -> Result<Self> {
let provider = crate::providers::chat::deepseek::DeepSeekProvider::with_api_key(api_key)?;
Ok(self.with_provider("deepseek", Arc::new(provider)))
}
#[cfg(feature = "openai-compatible")]
pub fn with_perplexity_from_env(self) -> Self {
match crate::providers::chat::openai_compatible::OpenAICompatibleProvider::perplexity_from_env() {
Ok(provider) => self.with_provider("perplexity", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "openai-compatible")]
pub fn with_perplexity(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::perplexity(
api_key,
)?;
Ok(self.with_provider("perplexity", Arc::new(provider)))
}
#[cfg(feature = "openai-compatible")]
pub fn with_anyscale_from_env(self) -> Self {
match crate::providers::chat::openai_compatible::OpenAICompatibleProvider::anyscale_from_env(
) {
Ok(provider) => self.with_provider("anyscale", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "openai-compatible")]
pub fn with_anyscale(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::anyscale(api_key)?;
Ok(self.with_provider("anyscale", Arc::new(provider)))
}
#[cfg(feature = "openai-compatible")]
pub fn with_deepinfra_from_env(self) -> Self {
match crate::providers::chat::openai_compatible::OpenAICompatibleProvider::deepinfra_from_env() {
Ok(provider) => self.with_provider("deepinfra", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "openai-compatible")]
pub fn with_deepinfra(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::deepinfra(
api_key,
)?;
Ok(self.with_provider("deepinfra", Arc::new(provider)))
}
#[cfg(feature = "openai-compatible")]
pub fn with_novita_from_env(self) -> Self {
match crate::providers::chat::openai_compatible::OpenAICompatibleProvider::novita_from_env()
{
Ok(provider) => self.with_provider("novita", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "openai-compatible")]
pub fn with_novita(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::novita(api_key)?;
Ok(self.with_provider("novita", Arc::new(provider)))
}
#[cfg(feature = "openai-compatible")]
pub fn with_hyperbolic_from_env(self) -> Self {
match crate::providers::chat::openai_compatible::OpenAICompatibleProvider::hyperbolic_from_env() {
Ok(provider) => self.with_provider("hyperbolic", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "openai-compatible")]
pub fn with_hyperbolic(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::hyperbolic(
api_key,
)?;
Ok(self.with_provider("hyperbolic", Arc::new(provider)))
}
#[cfg(all(feature = "openai-compatible", not(feature = "cerebras")))]
pub fn with_cerebras_from_env(self) -> Self {
match crate::providers::chat::openai_compatible::OpenAICompatibleProvider::cerebras_from_env(
) {
Ok(provider) => self.with_provider("cerebras", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(all(feature = "openai-compatible", not(feature = "cerebras")))]
pub fn with_cerebras(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::cerebras(api_key)?;
Ok(self.with_provider("cerebras", Arc::new(provider)))
}
#[cfg(feature = "cerebras")]
pub fn with_cerebras_from_env(self) -> Self {
match crate::providers::chat::cerebras::CerebrasProvider::from_env() {
Ok(provider) => self.with_provider("cerebras", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "cerebras")]
pub fn with_cerebras(self, api_key: impl Into<String>) -> Result<Self> {
let provider = crate::providers::chat::cerebras::CerebrasProvider::with_api_key(api_key)?;
Ok(self.with_provider("cerebras", Arc::new(provider)))
}
#[cfg(feature = "reka")]
pub fn with_reka_from_env(self) -> Self {
match crate::providers::chat::openai_compatible::OpenAICompatibleProvider::reka_from_env() {
Ok(provider) => self.with_provider("reka", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "reka")]
pub fn with_reka(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::reka(api_key)?;
Ok(self.with_provider("reka", Arc::new(provider)))
}
#[cfg(feature = "reka")]
pub fn with_reka_config(self, config: crate::provider::ProviderConfig) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::reka_config(
config,
)?;
Ok(self.with_provider("reka", Arc::new(provider)))
}
#[cfg(feature = "nvidia-nim")]
pub fn with_nvidia_nim_from_env(self) -> Self {
match crate::providers::chat::openai_compatible::OpenAICompatibleProvider::nvidia_nim_from_env() {
Ok(provider) => self.with_provider("nvidia_nim", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "nvidia-nim")]
pub fn with_nvidia_nim(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::nvidia_nim(
api_key,
)?;
Ok(self.with_provider("nvidia_nim", Arc::new(provider)))
}
#[cfg(feature = "nvidia-nim")]
pub fn with_nvidia_nim_config(self, config: crate::provider::ProviderConfig) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::nvidia_nim_config(
config,
)?;
Ok(self.with_provider("nvidia_nim", Arc::new(provider)))
}
#[cfg(feature = "xinference")]
pub fn with_xinference_from_env(self) -> Self {
match crate::providers::chat::openai_compatible::OpenAICompatibleProvider::xinference_from_env() {
Ok(provider) => self.with_provider("xinference", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "xinference")]
pub fn with_xinference(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::xinference(
api_key,
)?;
Ok(self.with_provider("xinference", Arc::new(provider)))
}
#[cfg(feature = "xinference")]
pub fn with_xinference_config(self, config: crate::provider::ProviderConfig) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::xinference_config(
config,
)?;
Ok(self.with_provider("xinference", Arc::new(provider)))
}
#[cfg(feature = "public-ai")]
pub fn with_public_ai_from_env(self) -> Self {
match crate::providers::chat::openai_compatible::OpenAICompatibleProvider::public_ai_from_env() {
Ok(provider) => self.with_provider("public_ai", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "public-ai")]
pub fn with_public_ai(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::public_ai(
api_key,
)?;
Ok(self.with_provider("public_ai", Arc::new(provider)))
}
#[cfg(feature = "public-ai")]
pub fn with_public_ai_config(self, config: crate::provider::ProviderConfig) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::public_ai_config(
config,
)?;
Ok(self.with_provider("public_ai", Arc::new(provider)))
}
#[cfg(feature = "bytez")]
pub fn with_bytez_from_env(self) -> Self {
match crate::providers::chat::openai_compatible::OpenAICompatibleProvider::bytez_from_env()
{
Ok(provider) => self.with_provider("bytez", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "bytez")]
pub fn with_bytez(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::bytez(api_key)?;
Ok(self.with_provider("bytez", Arc::new(provider)))
}
#[cfg(feature = "bytez")]
pub fn with_bytez_config(self, config: crate::provider::ProviderConfig) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::bytez_config(
config,
)?;
Ok(self.with_provider("bytez", Arc::new(provider)))
}
#[cfg(feature = "chutes")]
pub fn with_chutes_from_env(self) -> Self {
match crate::providers::chat::openai_compatible::OpenAICompatibleProvider::chutes_from_env()
{
Ok(provider) => self.with_provider("chutes", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "chutes")]
pub fn with_chutes(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::chutes(api_key)?;
Ok(self.with_provider("chutes", Arc::new(provider)))
}
#[cfg(feature = "chutes")]
pub fn with_chutes_config(self, config: crate::provider::ProviderConfig) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::chutes_config(
config,
)?;
Ok(self.with_provider("chutes", Arc::new(provider)))
}
#[cfg(feature = "comet-api")]
pub fn with_comet_api_from_env(self) -> Self {
match crate::providers::chat::openai_compatible::OpenAICompatibleProvider::comet_api_from_env() {
Ok(provider) => self.with_provider("comet_api", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "comet-api")]
pub fn with_comet_api(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::comet_api(
api_key,
)?;
Ok(self.with_provider("comet_api", Arc::new(provider)))
}
#[cfg(feature = "comet-api")]
pub fn with_comet_api_config(self, config: crate::provider::ProviderConfig) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::comet_api_config(
config,
)?;
Ok(self.with_provider("comet_api", Arc::new(provider)))
}
#[cfg(feature = "compactifai")]
pub fn with_compactifai_from_env(self) -> Self {
match crate::providers::chat::openai_compatible::OpenAICompatibleProvider::compactifai_from_env()
{
Ok(provider) => self.with_provider("compactifai", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "compactifai")]
pub fn with_compactifai(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::compactifai(
api_key,
)?;
Ok(self.with_provider("compactifai", Arc::new(provider)))
}
#[cfg(feature = "compactifai")]
pub fn with_compactifai_config(self, config: crate::provider::ProviderConfig) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::compactifai_config(
config,
)?;
Ok(self.with_provider("compactifai", Arc::new(provider)))
}
#[cfg(feature = "synthetic")]
pub fn with_synthetic_from_env(self) -> Self {
match crate::providers::chat::openai_compatible::OpenAICompatibleProvider::synthetic_from_env() {
Ok(provider) => self.with_provider("synthetic", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "synthetic")]
pub fn with_synthetic(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::synthetic(
api_key,
)?;
Ok(self.with_provider("synthetic", Arc::new(provider)))
}
#[cfg(feature = "synthetic")]
pub fn with_synthetic_config(self, config: crate::provider::ProviderConfig) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::synthetic_config(
config,
)?;
Ok(self.with_provider("synthetic", Arc::new(provider)))
}
#[cfg(feature = "morph")]
pub fn with_morph_from_env(self) -> Self {
match crate::providers::chat::openai_compatible::OpenAICompatibleProvider::morph_from_env()
{
Ok(provider) => self.with_provider("morph", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "morph")]
pub fn with_morph(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::morph(api_key)?;
Ok(self.with_provider("morph", Arc::new(provider)))
}
#[cfg(feature = "morph")]
pub fn with_morph_config(self, config: crate::provider::ProviderConfig) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::morph_config(
config,
)?;
Ok(self.with_provider("morph", Arc::new(provider)))
}
#[cfg(feature = "heroku-ai")]
pub fn with_heroku_ai_from_env(self) -> Self {
match crate::providers::chat::openai_compatible::OpenAICompatibleProvider::heroku_ai_from_env() {
Ok(provider) => self.with_provider("heroku_ai", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "heroku-ai")]
pub fn with_heroku_ai(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::heroku_ai(
api_key,
)?;
Ok(self.with_provider("heroku_ai", Arc::new(provider)))
}
#[cfg(feature = "heroku-ai")]
pub fn with_heroku_ai_config(self, config: crate::provider::ProviderConfig) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::heroku_ai_config(
config,
)?;
Ok(self.with_provider("heroku_ai", Arc::new(provider)))
}
#[cfg(feature = "v0")]
pub fn with_v0_from_env(self) -> Self {
match crate::providers::chat::openai_compatible::OpenAICompatibleProvider::v0_from_env() {
Ok(provider) => self.with_provider("v0", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "v0")]
pub fn with_v0(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::v0(api_key)?;
Ok(self.with_provider("v0", Arc::new(provider)))
}
#[cfg(feature = "v0")]
pub fn with_v0_config(self, config: crate::provider::ProviderConfig) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::v0_config(config)?;
Ok(self.with_provider("v0", Arc::new(provider)))
}
#[cfg(feature = "openai-compatible")]
pub fn with_openai_compatible(
self,
name: impl Into<String>,
base_url: impl Into<String>,
api_key: Option<String>,
) -> Result<Self> {
let name_str = name.into();
let provider = crate::providers::chat::openai_compatible::OpenAICompatibleProvider::custom(
name_str.clone(),
base_url,
api_key,
)?;
Ok(self.with_provider(name_str, Arc::new(provider)))
}
#[cfg(feature = "google")]
pub fn with_google_from_env(self) -> Self {
match crate::providers::chat::google::GoogleProvider::from_env() {
Ok(provider) => self.with_provider("google", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "google")]
pub fn with_google(self, api_key: impl Into<String>) -> Result<Self> {
let provider = crate::providers::chat::google::GoogleProvider::with_api_key(api_key)?;
Ok(self.with_provider("google", Arc::new(provider)))
}
#[cfg(feature = "google")]
pub fn with_google_config(self, config: ProviderConfig) -> Result<Self> {
let provider = crate::providers::chat::google::GoogleProvider::new(config)?;
Ok(self.with_provider("google", Arc::new(provider)))
}
#[cfg(feature = "vertex")]
pub fn with_vertex_from_env(mut self) -> Self {
self.pending_vertex
.push(("vertex".to_string(), PendingVertexConfig::FromEnv));
self
}
#[cfg(feature = "vertex")]
pub fn with_vertex_service_account(
mut self,
path: impl AsRef<std::path::Path>,
project_id: impl Into<String>,
location: impl Into<String>,
) -> Self {
self.pending_vertex.push((
"vertex".to_string(),
PendingVertexConfig::ServiceAccount {
path: path.as_ref().to_path_buf(),
project_id: project_id.into(),
location: location.into(),
},
));
self
}
#[cfg(feature = "vertex")]
pub fn with_vertex_config(
self,
config: crate::providers::chat::vertex::VertexConfig,
) -> Result<Self> {
let provider = crate::providers::chat::vertex::VertexProvider::with_config(config)?;
Ok(self.with_provider("vertex", Arc::new(provider)))
}
#[cfg(feature = "vertex")]
pub fn with_vertex_anthropic_from_env(mut self) -> Self {
self.pending_vertex.push((
"vertex-anthropic".to_string(),
PendingVertexConfig::WithPublisher {
publisher: "anthropic".to_string(),
},
));
self
}
#[cfg(feature = "vertex")]
pub fn with_vertex_anthropic_config(
self,
mut config: crate::providers::chat::vertex::VertexConfig,
) -> Result<Self> {
config.set_publisher("anthropic");
let provider = crate::providers::chat::vertex::VertexProvider::with_config(config)?;
Ok(self.with_provider("vertex-anthropic", Arc::new(provider)))
}
#[cfg(feature = "vertex")]
pub fn with_vertex_deepseek_from_env(mut self) -> Self {
self.pending_vertex.push((
"vertex-deepseek".to_string(),
PendingVertexConfig::WithPublisher {
publisher: "deepseek".to_string(),
},
));
self
}
#[cfg(feature = "vertex")]
pub fn with_vertex_deepseek_config(
self,
mut config: crate::providers::chat::vertex::VertexConfig,
) -> Result<Self> {
config.set_publisher("deepseek");
let provider = crate::providers::chat::vertex::VertexProvider::with_config(config)?;
Ok(self.with_provider("vertex-deepseek", Arc::new(provider)))
}
#[cfg(feature = "vertex")]
pub fn with_vertex_llama_from_env(mut self) -> Self {
self.pending_vertex.push((
"vertex-llama".to_string(),
PendingVertexConfig::WithPublisher {
publisher: "meta".to_string(),
},
));
self
}
#[cfg(feature = "vertex")]
pub fn with_vertex_llama_config(
self,
mut config: crate::providers::chat::vertex::VertexConfig,
) -> Result<Self> {
config.set_publisher("meta");
let provider = crate::providers::chat::vertex::VertexProvider::with_config(config)?;
Ok(self.with_provider("vertex-llama", Arc::new(provider)))
}
#[cfg(feature = "vertex")]
pub fn with_vertex_mistral_from_env(mut self) -> Self {
self.pending_vertex.push((
"vertex-mistral".to_string(),
PendingVertexConfig::WithPublisher {
publisher: "mistralai".to_string(),
},
));
self
}
#[cfg(feature = "vertex")]
pub fn with_vertex_mistral_config(
self,
mut config: crate::providers::chat::vertex::VertexConfig,
) -> Result<Self> {
config.set_publisher("mistralai");
let provider = crate::providers::chat::vertex::VertexProvider::with_config(config)?;
Ok(self.with_provider("vertex-mistral", Arc::new(provider)))
}
#[cfg(feature = "vertex")]
pub fn with_vertex_ai21_from_env(mut self) -> Self {
self.pending_vertex.push((
"vertex-ai21".to_string(),
PendingVertexConfig::WithPublisher {
publisher: "ai21labs".to_string(),
},
));
self
}
#[cfg(feature = "vertex")]
pub fn with_vertex_ai21_config(
self,
mut config: crate::providers::chat::vertex::VertexConfig,
) -> Result<Self> {
config.set_publisher("ai21labs");
let provider = crate::providers::chat::vertex::VertexProvider::with_config(config)?;
Ok(self.with_provider("vertex-ai21", Arc::new(provider)))
}
#[cfg(feature = "cohere")]
pub fn with_cohere_from_env(mut self) -> Self {
match crate::providers::chat::cohere::CohereProvider::from_env() {
Ok(provider) => {
let provider = Arc::new(provider);
self.embedding_providers.insert(
"cohere".to_string(),
Arc::clone(&provider) as Arc<dyn EmbeddingProvider>,
);
self.with_provider("cohere", provider)
}
Err(_) => self,
}
}
#[cfg(feature = "cohere")]
pub fn with_cohere(mut self, api_key: impl Into<String>) -> Result<Self> {
let provider =
Arc::new(crate::providers::chat::cohere::CohereProvider::with_api_key(api_key)?);
self.embedding_providers.insert(
"cohere".to_string(),
Arc::clone(&provider) as Arc<dyn EmbeddingProvider>,
);
Ok(self.with_provider("cohere", provider))
}
#[cfg(feature = "cohere")]
pub fn with_cohere_config(mut self, config: ProviderConfig) -> Result<Self> {
let provider = Arc::new(crate::providers::chat::cohere::CohereProvider::new(config)?);
self.embedding_providers.insert(
"cohere".to_string(),
Arc::clone(&provider) as Arc<dyn EmbeddingProvider>,
);
Ok(self.with_provider("cohere", provider))
}
#[cfg(feature = "ai21")]
pub fn with_ai21_from_env(self) -> Self {
match crate::providers::chat::ai21::AI21Provider::from_env() {
Ok(provider) => self.with_provider("ai21", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "ai21")]
pub fn with_ai21(self, api_key: impl Into<String>) -> Result<Self> {
let provider = crate::providers::chat::ai21::AI21Provider::with_api_key(api_key)?;
Ok(self.with_provider("ai21", Arc::new(provider)))
}
#[cfg(feature = "ai21")]
pub fn with_ai21_config(self, config: ProviderConfig) -> Result<Self> {
let provider = crate::providers::chat::ai21::AI21Provider::new(config)?;
Ok(self.with_provider("ai21", Arc::new(provider)))
}
#[cfg(feature = "huggingface")]
pub fn with_huggingface_from_env(self) -> Self {
match crate::providers::chat::huggingface::HuggingFaceProvider::from_env() {
Ok(provider) => self.with_provider("huggingface", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "huggingface")]
pub fn with_huggingface(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::huggingface::HuggingFaceProvider::with_api_key(api_key)?;
Ok(self.with_provider("huggingface", Arc::new(provider)))
}
#[cfg(feature = "huggingface")]
pub fn with_huggingface_endpoint(
self,
endpoint_url: impl Into<String>,
api_key: impl Into<String>,
) -> Result<Self> {
let provider = crate::providers::chat::huggingface::HuggingFaceProvider::endpoint(
endpoint_url,
api_key,
)?;
Ok(self.with_provider("huggingface", Arc::new(provider)))
}
#[cfg(feature = "huggingface")]
pub fn with_huggingface_config(self, config: ProviderConfig) -> Result<Self> {
let provider = crate::providers::chat::huggingface::HuggingFaceProvider::new(config)?;
Ok(self.with_provider("huggingface", Arc::new(provider)))
}
#[cfg(feature = "replicate")]
pub fn with_replicate_from_env(self) -> Self {
match crate::providers::chat::replicate::ReplicateProvider::from_env() {
Ok(provider) => self.with_provider("replicate", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "replicate")]
pub fn with_replicate(self, api_key: impl Into<String>) -> Result<Self> {
let provider = crate::providers::chat::replicate::ReplicateProvider::with_api_key(api_key)?;
Ok(self.with_provider("replicate", Arc::new(provider)))
}
#[cfg(feature = "replicate")]
pub fn with_replicate_config(self, config: ProviderConfig) -> Result<Self> {
let provider = crate::providers::chat::replicate::ReplicateProvider::new(config)?;
Ok(self.with_provider("replicate", Arc::new(provider)))
}
#[cfg(feature = "baseten")]
pub fn with_baseten_from_env(self) -> Self {
match crate::providers::chat::baseten::BasetenProvider::from_env() {
Ok(provider) => self.with_provider("baseten", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "baseten")]
pub fn with_baseten(self, api_key: impl Into<String>) -> Result<Self> {
let provider = crate::providers::chat::baseten::BasetenProvider::with_api_key(api_key)?;
Ok(self.with_provider("baseten", Arc::new(provider)))
}
#[cfg(feature = "baseten")]
pub fn with_baseten_model(
self,
model_id: impl Into<String>,
api_key: impl Into<String>,
) -> Result<Self> {
let provider =
crate::providers::chat::baseten::BasetenProvider::with_model(model_id, api_key)?;
Ok(self.with_provider("baseten", Arc::new(provider)))
}
#[cfg(feature = "baseten")]
pub fn with_baseten_config(self, config: ProviderConfig) -> Result<Self> {
let provider = crate::providers::chat::baseten::BasetenProvider::new(config)?;
Ok(self.with_provider("baseten", Arc::new(provider)))
}
#[cfg(feature = "runpod")]
pub fn with_runpod_from_env(self) -> Self {
match crate::providers::chat::runpod::RunPodProvider::from_env() {
Ok(provider) => self.with_provider("runpod", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "runpod")]
pub fn with_runpod(
self,
endpoint_id: impl Into<String>,
api_key: impl Into<String>,
) -> Result<Self> {
let provider = crate::providers::chat::runpod::RunPodProvider::new(endpoint_id, api_key)?;
Ok(self.with_provider("runpod", Arc::new(provider)))
}
#[cfg(feature = "cloudflare")]
pub fn with_cloudflare_from_env(self) -> Self {
match crate::providers::chat::cloudflare::CloudflareProvider::from_env() {
Ok(provider) => self.with_provider("cloudflare", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "cloudflare")]
pub fn with_cloudflare(
self,
account_id: impl Into<String>,
api_token: impl Into<String>,
) -> Result<Self> {
let provider =
crate::providers::chat::cloudflare::CloudflareProvider::new(account_id, api_token)?;
Ok(self.with_provider("cloudflare", Arc::new(provider)))
}
#[cfg(feature = "watsonx")]
pub fn with_watsonx_from_env(self) -> Self {
match crate::providers::chat::watsonx::WatsonxProvider::from_env() {
Ok(provider) => self.with_provider("watsonx", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "watsonx")]
pub fn with_watsonx(
self,
api_key: impl Into<String>,
project_id: impl Into<String>,
) -> Result<Self> {
let provider = crate::providers::chat::watsonx::WatsonxProvider::new(api_key, project_id)?;
Ok(self.with_provider("watsonx", Arc::new(provider)))
}
#[cfg(feature = "databricks")]
pub fn with_databricks_from_env(self) -> Self {
match crate::providers::chat::databricks::DatabricksProvider::from_env() {
Ok(provider) => self.with_provider("databricks", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "databricks")]
pub fn with_databricks(
self,
host: impl Into<String>,
token: impl Into<String>,
) -> Result<Self> {
let provider = crate::providers::chat::databricks::DatabricksProvider::new(host, token)?;
Ok(self.with_provider("databricks", Arc::new(provider)))
}
#[cfg(feature = "sambanova")]
pub fn with_sambanova_from_env(self) -> Self {
match crate::providers::chat::sambanova::SambaNovaProvider::from_env() {
Ok(provider) => self.with_provider("sambanova", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "sambanova")]
pub fn with_sambanova(self, api_key: impl Into<String>) -> Result<Self> {
let provider = crate::providers::chat::sambanova::SambaNovaProvider::with_api_key(api_key)?;
Ok(self.with_provider("sambanova", Arc::new(provider)))
}
#[cfg(feature = "openai-compatible")]
pub fn with_xai_from_env(self) -> Self {
match crate::providers::chat::openai_compatible::OpenAICompatibleProvider::xai_from_env() {
Ok(provider) => self.with_provider("xai", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "openai-compatible")]
pub fn with_xai(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::xai(api_key)?;
Ok(self.with_provider("xai", Arc::new(provider)))
}
#[cfg(feature = "openai-compatible")]
pub fn with_lambda_from_env(self) -> Self {
match crate::providers::chat::openai_compatible::OpenAICompatibleProvider::lambda_from_env()
{
Ok(provider) => self.with_provider("lambda", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "openai-compatible")]
pub fn with_lambda(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::lambda(api_key)?;
Ok(self.with_provider("lambda", Arc::new(provider)))
}
#[cfg(feature = "openai-compatible")]
pub fn with_friendli_from_env(self) -> Self {
match crate::providers::chat::openai_compatible::OpenAICompatibleProvider::friendli_from_env(
) {
Ok(provider) => self.with_provider("friendli", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "openai-compatible")]
pub fn with_friendli(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::friendli(api_key)?;
Ok(self.with_provider("friendli", Arc::new(provider)))
}
#[cfg(feature = "openai-compatible")]
pub fn with_volcengine_from_env(self) -> Self {
match crate::providers::chat::openai_compatible::OpenAICompatibleProvider::volcengine_from_env() {
Ok(provider) => self.with_provider("volcengine", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "openai-compatible")]
pub fn with_volcengine(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::volcengine(
api_key,
)?;
Ok(self.with_provider("volcengine", Arc::new(provider)))
}
#[cfg(feature = "openai-compatible")]
pub fn with_meta_llama_from_env(self) -> Self {
match crate::providers::chat::openai_compatible::OpenAICompatibleProvider::meta_llama_from_env() {
Ok(provider) => self.with_provider("meta_llama", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "openai-compatible")]
pub fn with_meta_llama(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::openai_compatible::OpenAICompatibleProvider::meta_llama(
api_key,
)?;
Ok(self.with_provider("meta_llama", Arc::new(provider)))
}
#[cfg(feature = "datarobot")]
pub fn with_datarobot_from_env(self) -> Self {
match crate::providers::DataRobotProvider::from_env() {
Ok(provider) => self.with_provider("datarobot", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "datarobot")]
pub fn with_datarobot(self, api_key: impl Into<String>) -> Result<Self> {
let provider = crate::providers::DataRobotProvider::with_api_key(api_key)?;
Ok(self.with_provider("datarobot", Arc::new(provider)))
}
#[cfg(feature = "stability")]
pub fn with_stability_from_env(self) -> Self {
match crate::providers::StabilityProvider::from_env() {
Ok(provider) => self.with_provider("stability", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "stability")]
pub fn with_stability(self, api_key: impl Into<String>) -> Result<Self> {
let provider = crate::providers::StabilityProvider::with_api_key(api_key)?;
Ok(self.with_provider("stability", Arc::new(provider)))
}
#[cfg(feature = "runwayml")]
pub fn with_runwayml_from_env(self) -> Self {
match crate::providers::RunwayMLProvider::from_env() {
Ok(provider) => self.with_provider("runwayml", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "runwayml")]
pub fn with_runwayml(self, api_key: impl Into<String>) -> Result<Self> {
let provider = crate::providers::RunwayMLProvider::with_api_key(api_key)?;
Ok(self.with_provider("runwayml", Arc::new(provider)))
}
#[cfg(feature = "recraft")]
pub fn with_recraft_from_env(self) -> Self {
match crate::providers::RecraftProvider::from_env() {
Ok(provider) => self.with_provider("recraft", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "recraft")]
pub fn with_recraft(self, api_key: impl Into<String>) -> Result<Self> {
let provider = crate::providers::RecraftProvider::with_api_key(api_key)?;
Ok(self.with_provider("recraft", Arc::new(provider)))
}
#[cfg(feature = "voyage")]
pub fn with_voyage_from_env(self) -> Self {
match crate::providers::VoyageProvider::from_env() {
Ok(provider) => self.with_provider("voyage", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "voyage")]
pub fn with_voyage(self, api_key: impl Into<String>) -> Result<Self> {
let provider = crate::providers::VoyageProvider::with_api_key(api_key)?;
Ok(self.with_provider("voyage", Arc::new(provider)))
}
#[cfg(feature = "jina")]
pub fn with_jina_from_env(self) -> Self {
match crate::providers::JinaProvider::from_env() {
Ok(provider) => self.with_provider("jina", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "jina")]
pub fn with_jina(self, api_key: impl Into<String>) -> Result<Self> {
let provider = crate::providers::JinaProvider::with_api_key(api_key)?;
Ok(self.with_provider("jina", Arc::new(provider)))
}
#[cfg(feature = "sagemaker")]
pub async fn with_sagemaker_from_env(self) -> Result<Self> {
let provider = crate::providers::SageMakerProvider::from_env().await?;
Ok(self.with_provider("sagemaker", Arc::new(provider)))
}
#[cfg(feature = "sagemaker")]
pub async fn with_sagemaker(self, region: &str, endpoint_name: &str) -> Result<Self> {
let provider = crate::providers::SageMakerProvider::new(region, endpoint_name).await?;
Ok(self.with_provider("sagemaker", Arc::new(provider)))
}
#[cfg(feature = "snowflake")]
pub async fn with_snowflake_from_env(self) -> Result<Self> {
let provider = crate::providers::SnowflakeProvider::from_env().await?;
Ok(self.with_provider("snowflake", Arc::new(provider)))
}
#[cfg(feature = "snowflake")]
pub async fn with_snowflake(
self,
account: &str,
user: &str,
password: &str,
database: &str,
schema: &str,
warehouse: &str,
) -> Result<Self> {
let provider = crate::providers::SnowflakeProvider::new(
account, user, password, database, schema, warehouse,
)
.await?;
Ok(self.with_provider("snowflake", Arc::new(provider)))
}
#[cfg(feature = "openai-realtime")]
pub fn openai_realtime_from_env(&self) -> Result<crate::providers::RealtimeProvider> {
crate::providers::RealtimeProvider::from_env()
}
#[cfg(feature = "openai-realtime")]
pub fn openai_realtime(&self, api_key: &str) -> crate::providers::RealtimeProvider {
crate::providers::RealtimeProvider::new(api_key, "gpt-4o-realtime-preview")
}
#[cfg(feature = "openai-realtime")]
pub fn openai_realtime_with_model(
&self,
api_key: &str,
model: &str,
) -> crate::providers::RealtimeProvider {
crate::providers::RealtimeProvider::new(api_key, model)
}
#[cfg(feature = "baidu")]
pub fn with_baidu_from_env(self) -> Self {
match crate::providers::BaiduProvider::from_env() {
Ok(provider) => self.with_provider("baidu", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "baidu")]
pub fn with_baidu(
self,
api_key: impl Into<String>,
secret_key: impl Into<String>,
) -> Result<Self> {
let provider = crate::providers::BaiduProvider::new(&api_key.into(), &secret_key.into())?;
Ok(self.with_provider("baidu", Arc::new(provider)))
}
#[cfg(feature = "alibaba")]
pub fn with_alibaba_from_env(self) -> Self {
match crate::providers::AlibabaProvider::from_env() {
Ok(provider) => self.with_provider("alibaba", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "alibaba")]
pub fn with_alibaba(self, api_key: impl Into<String>) -> Result<Self> {
let provider = crate::providers::AlibabaProvider::new(&api_key.into())?;
Ok(self.with_provider("alibaba", Arc::new(provider)))
}
#[cfg(feature = "openrouter")]
pub fn with_openrouter_from_env(self) -> Self {
match crate::providers::chat::openrouter::OpenRouterProvider::from_env() {
Ok(provider) => self.with_provider("openrouter", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "openrouter")]
pub fn with_openrouter(self, api_key: impl Into<String>) -> Result<Self> {
let provider =
crate::providers::chat::openrouter::OpenRouterProvider::with_api_key(api_key)?;
Ok(self.with_provider("openrouter", Arc::new(provider)))
}
#[cfg(feature = "ollama")]
pub fn with_ollama(self) -> Result<Self> {
let provider = crate::providers::chat::ollama::OllamaProvider::new(Default::default())?;
Ok(self.with_provider("ollama", Arc::new(provider)))
}
#[cfg(feature = "ollama")]
pub fn with_ollama_url(self, base_url: impl Into<String>) -> Result<Self> {
let config = crate::provider::ProviderConfig {
base_url: Some(base_url.into()),
..Default::default()
};
let provider = crate::providers::chat::ollama::OllamaProvider::new(config)?;
Ok(self.with_provider("ollama", Arc::new(provider)))
}
#[cfg(feature = "maritaca")]
pub fn with_maritaca_from_env(self) -> Self {
match crate::providers::chat::maritaca::MaritacaProvider::from_env() {
Ok(provider) => self.with_provider("maritaca", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "maritaca")]
pub fn with_maritaca(self, api_key: impl Into<String>) -> Result<Self> {
let provider = crate::providers::chat::maritaca::MaritacaProvider::with_api_key(api_key)?;
Ok(self.with_provider("maritaca", Arc::new(provider)))
}
#[cfg(feature = "lighton")]
pub fn with_lighton_from_env(self) -> Self {
match crate::providers::chat::lighton::LightOnProvider::from_env() {
Ok(provider) => self.with_provider("lighton", Arc::new(provider)),
Err(_) => self,
}
}
#[cfg(feature = "lighton")]
pub fn with_lighton(self, api_key: impl Into<String>) -> Result<Self> {
let provider = crate::providers::chat::lighton::LightOnProvider::with_api_key(api_key)?;
Ok(self.with_provider("lighton", Arc::new(provider)))
}
#[allow(unused_mut)] pub async fn build(mut self) -> Result<LLMKitClient> {
#[cfg(feature = "vertex")]
{
for (name, config) in self.pending_vertex {
let result = match config {
PendingVertexConfig::FromEnv => {
match crate::providers::chat::vertex::VertexConfig::from_env().await {
Ok(cfg) => {
crate::providers::chat::vertex::VertexProvider::with_config(cfg)
}
Err(_) => continue, }
}
PendingVertexConfig::ServiceAccount {
path,
project_id,
location,
} => {
match crate::providers::chat::vertex::VertexConfig::from_service_account_file(
&path,
&project_id,
&location,
)
.await
{
Ok(cfg) => {
crate::providers::chat::vertex::VertexProvider::with_config(cfg)
}
Err(_) => continue, }
}
PendingVertexConfig::WithPublisher { publisher } => {
match crate::providers::chat::vertex::VertexConfig::from_env_with_publisher(
&publisher,
)
.await
{
Ok(cfg) => {
crate::providers::chat::vertex::VertexProvider::with_config(cfg)
}
Err(_) => continue, }
}
};
if let Ok(provider) = result {
self.providers.insert(name, Arc::new(provider));
}
}
}
#[cfg(feature = "bedrock")]
{
for (name, config) in self.pending_bedrock {
let result = match config {
PendingBedrockConfig::FromEnv => {
crate::providers::chat::bedrock::BedrockProvider::from_env_region().await
}
PendingBedrockConfig::WithRegion { region } => {
crate::providers::chat::bedrock::BedrockProvider::from_env(®ion).await
}
};
if let Ok(provider) = result {
self.providers.insert(name, Arc::new(provider));
}
}
}
if self.providers.is_empty() {
return Err(Error::config("No providers configured"));
}
let providers = if let Some(retry_config) = self.retry_config {
self.providers
.into_iter()
.map(|(name, provider)| {
let retrying = DynamicRetryingProvider {
inner: provider,
config: retry_config.clone(),
};
(name, Arc::new(retrying) as Arc<dyn Provider>)
})
.collect()
} else {
self.providers
};
Ok(LLMKitClient {
providers,
embedding_providers: self.embedding_providers,
speech_providers: self.speech_providers,
transcription_providers: self.transcription_providers,
image_providers: self.image_providers,
video_providers: self.video_providers,
ranking_providers: self.ranking_providers,
moderation_providers: self.moderation_providers,
classification_providers: self.classification_providers,
default_provider: self.default_provider,
})
}
}
impl Default for ClientBuilder {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_model_parsing_valid_format() {
let (provider, model) =
parse_model_identifier("anthropic/claude-sonnet-4-20250514").unwrap();
assert_eq!(provider, "anthropic");
assert_eq!(model, "claude-sonnet-4-20250514");
let (provider, model) = parse_model_identifier("openai/gpt-4o").unwrap();
assert_eq!(provider, "openai");
assert_eq!(model, "gpt-4o");
let (provider, model) = parse_model_identifier("groq/llama-3.3-70b-versatile").unwrap();
assert_eq!(provider, "groq");
assert_eq!(model, "llama-3.3-70b-versatile");
let (provider, model) = parse_model_identifier("vertex/gemini-pro").unwrap();
assert_eq!(provider, "vertex");
assert_eq!(model, "gemini-pro");
let (provider, model) = parse_model_identifier("mistral/mistral-large-latest").unwrap();
assert_eq!(provider, "mistral");
assert_eq!(model, "mistral-large-latest");
let (provider, model) = parse_model_identifier("baidu/ERNIE-Bot-Ultra").unwrap();
assert_eq!(provider, "baidu");
assert_eq!(model, "ERNIE-Bot-Ultra");
let (provider, model) = parse_model_identifier("alibaba/qwen-max").unwrap();
assert_eq!(provider, "alibaba");
assert_eq!(model, "qwen-max");
let (provider, model) = parse_model_identifier("runwayml/gen4_turbo").unwrap();
assert_eq!(provider, "runwayml");
assert_eq!(model, "gen4_turbo");
let (provider, model) = parse_model_identifier("recraft/recraft-v3").unwrap();
assert_eq!(provider, "recraft");
assert_eq!(model, "recraft-v3");
}
#[test]
fn test_model_parsing_requires_provider() {
assert!(parse_model_identifier("claude-sonnet-4-20250514").is_err());
assert!(parse_model_identifier("gpt-4o").is_err());
assert!(parse_model_identifier("mistral-large").is_err());
assert!(parse_model_identifier("model").is_err());
assert!(parse_model_identifier("").is_err());
}
#[test]
fn test_model_parsing_invalid_provider_format() {
assert!(parse_model_identifier("meta-llama/Llama-3.2-3B-Instruct").is_err());
assert!(parse_model_identifier("v1.2.3/model").is_err());
assert!(parse_model_identifier("namespace:tag/model").is_err());
}
#[test]
fn test_model_parsing_valid_provider_like_names() {
let (provider, model) = parse_model_identifier("mistralai/Mistral-7B-v0.1").unwrap();
assert_eq!(provider, "mistralai");
assert_eq!(model, "Mistral-7B-v0.1");
}
}