pub struct InferenceConfig { /* private fields */ }Expand description
Provider configuration for Inference client
Allows configuring multiple AI providers with their API keys and base URLs.
§Example
use stakai::{Inference, InferenceConfig};
let client = Inference::with_config(
InferenceConfig::new()
.openai("sk-...", None)
.anthropic("sk-ant-...", None)
.gemini("your-key", None)
);Implementations§
Source§impl InferenceConfig
impl InferenceConfig
Sourcepub fn openai(
self,
api_key: impl Into<String>,
base_url: Option<String>,
) -> Self
pub fn openai( self, api_key: impl Into<String>, base_url: Option<String>, ) -> Self
Configure OpenAI provider with API key and optional base URL
§Example
let config = InferenceConfig::new()
.openai("sk-...", None);
// With custom base URL (e.g., Azure OpenAI)
let config = InferenceConfig::new()
.openai("sk-...", Some("https://your-endpoint.openai.azure.com/v1".to_string()));Sourcepub fn openai_config(self, config: OpenAIConfig) -> Self
pub fn openai_config(self, config: OpenAIConfig) -> Self
Configure OpenAI provider with full OpenAIConfig
§Example
let openai_config = OpenAIConfig::new("sk-...")
.with_base_url("https://custom.com/v1")
.with_organization("org-123");
let config = InferenceConfig::new()
.openai_config(openai_config);Sourcepub fn anthropic(
self,
api_key: impl Into<String>,
base_url: Option<String>,
) -> Self
pub fn anthropic( self, api_key: impl Into<String>, base_url: Option<String>, ) -> Self
Configure Anthropic provider with API key and optional base URL
§Example
let config = InferenceConfig::new()
.anthropic("sk-ant-...", None);
// With custom base URL
let config = InferenceConfig::new()
.anthropic("sk-ant-...", Some("https://custom-anthropic.com/v1".to_string()));Sourcepub fn anthropic_config(self, config: AnthropicConfig) -> Self
pub fn anthropic_config(self, config: AnthropicConfig) -> Self
Configure Anthropic provider with full AnthropicConfig
§Example
let anthropic_config = AnthropicConfig::new("sk-ant-...")
.with_version("2023-06-01")
.with_beta_feature("prompt-caching-2024-07-31");
let config = InferenceConfig::new()
.anthropic_config(anthropic_config);Sourcepub fn gemini(
self,
api_key: impl Into<String>,
base_url: Option<String>,
) -> Self
pub fn gemini( self, api_key: impl Into<String>, base_url: Option<String>, ) -> Self
Configure Gemini provider with API key and optional base URL
§Example
let config = InferenceConfig::new()
.gemini("your-api-key", None);
// With custom base URL
let config = InferenceConfig::new()
.gemini("your-key", Some("https://custom-gemini.com/v1beta".to_string()));Sourcepub fn gemini_config(self, config: GeminiConfig) -> Self
pub fn gemini_config(self, config: GeminiConfig) -> Self
Configure Gemini provider with full GeminiConfig
§Example
let gemini_config = GeminiConfig::new("your-key")
.with_base_url("https://custom.com/v1beta");
let config = InferenceConfig::new()
.gemini_config(gemini_config);Sourcepub fn temperature(self, temperature: f32) -> Self
pub fn temperature(self, temperature: f32) -> Self
Set default temperature for all requests
Sourcepub fn max_tokens(self, max_tokens: u32) -> Self
pub fn max_tokens(self, max_tokens: u32) -> Self
Set default max tokens for all requests
Trait Implementations§
Source§impl Debug for InferenceConfig
impl Debug for InferenceConfig
Source§impl Default for InferenceConfig
impl Default for InferenceConfig
Source§fn default() -> InferenceConfig
fn default() -> InferenceConfig
Returns the “default value” for a type. Read more
Auto Trait Implementations§
impl Freeze for InferenceConfig
impl RefUnwindSafe for InferenceConfig
impl Send for InferenceConfig
impl Sync for InferenceConfig
impl Unpin for InferenceConfig
impl UnwindSafe for InferenceConfig
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more