pub struct AiClientBuilder { /* private fields */ }
Expand description
AI client builder with progressive custom configuration
Usage examples:
use ai_lib::{AiClientBuilder, Provider};
// Simplest usage - automatic environment variable detection
let client = AiClientBuilder::new(Provider::Groq).build()?;
// Custom base_url and proxy
let client = AiClientBuilder::new(Provider::Groq)
.with_base_url("https://custom.groq.com")
.with_proxy(Some("http://proxy.example.com:8080"))
.build()?;
// Full custom configuration
let client = AiClientBuilder::new(Provider::Groq)
.with_base_url("https://custom.groq.com")
.with_proxy(Some("http://proxy.example.com:8080"))
.with_timeout(std::time::Duration::from_secs(60))
.with_pool_config(32, std::time::Duration::from_secs(90))
.build()?;
Implementations§
Source§impl AiClientBuilder
impl AiClientBuilder
Sourcepub fn with_base_url(self, base_url: &str) -> Self
pub fn with_base_url(self, base_url: &str) -> Self
Sourcepub fn with_proxy(self, proxy_url: Option<&str>) -> Self
pub fn with_proxy(self, proxy_url: Option<&str>) -> Self
Set custom proxy URL
§Arguments
proxy_url
- Custom proxy URL, or None to use AI_PROXY_URL environment variable
§Returns
Self
- Builder instance for method chaining
§Examples
use ai_lib::{AiClientBuilder, Provider};
// Use specific proxy URL
let client = AiClientBuilder::new(Provider::Groq)
.with_proxy(Some("http://proxy.example.com:8080"))
.build()?;
// Use AI_PROXY_URL environment variable
let client = AiClientBuilder::new(Provider::Groq)
.with_proxy(None)
.build()?;
Sourcepub fn without_proxy(self) -> Self
pub fn without_proxy(self) -> Self
Sourcepub fn with_timeout(self, timeout: Duration) -> Self
pub fn with_timeout(self, timeout: Duration) -> Self
Sourcepub fn with_pool_config(self, max_idle: usize, idle_timeout: Duration) -> Self
pub fn with_pool_config(self, max_idle: usize, idle_timeout: Duration) -> Self
Sourcepub fn with_metrics(self, metrics: Arc<dyn Metrics>) -> Self
pub fn with_metrics(self, metrics: Arc<dyn Metrics>) -> Self
Sourcepub fn with_default_chat_model(self, model: &str) -> Self
pub fn with_default_chat_model(self, model: &str) -> Self
Sourcepub fn with_default_multimodal_model(self, model: &str) -> Self
pub fn with_default_multimodal_model(self, model: &str) -> Self
Set default multimodal model for the client
§Arguments
model
- Default multimodal model name
§Returns
Self
- Builder instance for method chaining
§Example
use ai_lib::{AiClientBuilder, Provider};
let client = AiClientBuilder::new(Provider::Groq)
.with_default_multimodal_model("llama-3.2-11b-vision")
.build()?;
Sourcepub fn with_smart_defaults(self) -> Self
pub fn with_smart_defaults(self) -> Self
Enable smart defaults for resilience features
This method enables reasonable default configurations for circuit breaker, rate limiting, and error handling without requiring detailed configuration.
§Returns
Self
- Builder instance for method chaining
§Example
use ai_lib::{AiClientBuilder, Provider};
let client = AiClientBuilder::new(Provider::Groq)
.with_smart_defaults()
.build()?;
Sourcepub fn for_production(self) -> Self
pub fn for_production(self) -> Self
Configure for production environment
This method applies production-ready configurations for all resilience features with conservative settings for maximum reliability.
§Returns
Self
- Builder instance for method chaining
§Example
use ai_lib::{AiClientBuilder, Provider};
let client = AiClientBuilder::new(Provider::Groq)
.for_production()
.build()?;
Sourcepub fn for_development(self) -> Self
pub fn for_development(self) -> Self
Configure for development environment
This method applies development-friendly configurations with more lenient settings for easier debugging and testing.
§Returns
Self
- Builder instance for method chaining
§Example
use ai_lib::{AiClientBuilder, Provider};
let client = AiClientBuilder::new(Provider::Groq)
.for_development()
.build()?;
Sourcepub fn with_max_concurrency(self, max_concurrent_requests: usize) -> Self
pub fn with_max_concurrency(self, max_concurrent_requests: usize) -> Self
Configure a simple max concurrent requests backpressure guard
This provides a convenient way to set a global concurrency cap using a semaphore.
It is equivalent于设置 ResilienceConfig.backpressure.max_concurrent_requests
。
Sourcepub fn with_resilience_config(self, config: ResilienceConfig) -> Self
pub fn with_resilience_config(self, config: ResilienceConfig) -> Self
Sourcepub fn build(self) -> Result<AiClient, AiLibError>
pub fn build(self) -> Result<AiClient, AiLibError>
Build AiClient instance
The build process applies configuration in the following priority order:
- Explicitly set configuration (via with_* methods)
- Environment variable configuration
- Default configuration
§Returns
Result<AiClient, AiLibError>
- Returns client instance on success, error on failure