pub struct LLMConfig {Show 35 fields
pub model: String,
pub api_key: Option<String>,
pub base_url: Option<String>,
pub max_tokens: Option<u32>,
pub temperature: Option<f32>,
pub top_p: Option<f32>,
pub top_k: Option<u32>,
pub system: Option<String>,
pub timeout_seconds: Option<u64>,
pub embedding_encoding_format: Option<String>,
pub embedding_dimensions: Option<u32>,
pub enable_parallel_tool_use: Option<bool>,
pub reasoning: Option<bool>,
pub reasoning_effort: Option<String>,
pub reasoning_budget_tokens: Option<u32>,
pub api_version: Option<String>,
pub deployment_id: Option<String>,
pub voice: Option<String>,
pub xai_search_mode: Option<String>,
pub xai_search_source_type: Option<String>,
pub xai_search_excluded_websites: Option<Vec<String>>,
pub xai_search_max_results: Option<u32>,
pub xai_search_from_date: Option<String>,
pub xai_search_to_date: Option<String>,
pub openai_enable_web_search: Option<bool>,
pub openai_web_search_context_size: Option<String>,
pub openai_web_search_user_location_type: Option<String>,
pub openai_web_search_user_location_approximate_country: Option<String>,
pub openai_web_search_user_location_approximate_city: Option<String>,
pub openai_web_search_user_location_approximate_region: Option<String>,
pub resilient_enable: Option<bool>,
pub resilient_attempts: Option<usize>,
pub resilient_base_delay_ms: Option<u64>,
pub resilient_max_delay_ms: Option<u64>,
pub resilient_jitter: Option<bool>,
}Expand description
Configuration for LLM providers with all builder options.
This struct provides comprehensive configuration for any LLM provider, matching all options available in the LLMBuilder.
Fields§
§model: String§api_key: Option<String>§base_url: Option<String>§max_tokens: Option<u32>§temperature: Option<f32>§top_p: Option<f32>§top_k: Option<u32>§system: Option<String>§timeout_seconds: Option<u64>§embedding_encoding_format: Option<String>§embedding_dimensions: Option<u32>§enable_parallel_tool_use: Option<bool>§reasoning: Option<bool>§reasoning_effort: Option<String>§reasoning_budget_tokens: Option<u32>§api_version: Option<String>§deployment_id: Option<String>§voice: Option<String>§xai_search_mode: Option<String>§xai_search_source_type: Option<String>§xai_search_excluded_websites: Option<Vec<String>>§xai_search_max_results: Option<u32>§xai_search_from_date: Option<String>§xai_search_to_date: Option<String>§openai_enable_web_search: Option<bool>§openai_web_search_context_size: Option<String>§openai_web_search_user_location_type: Option<String>§openai_web_search_user_location_approximate_country: Option<String>§openai_web_search_user_location_approximate_city: Option<String>§openai_web_search_user_location_approximate_region: Option<String>§resilient_enable: Option<bool>§resilient_attempts: Option<usize>§resilient_base_delay_ms: Option<u64>§resilient_max_delay_ms: Option<u64>§resilient_jitter: Option<bool>Implementations§
Source§impl LLMConfig
impl LLMConfig
Sourcepub fn new(model: impl Into<String>) -> LLMConfig
pub fn new(model: impl Into<String>) -> LLMConfig
Create a new LLMConfig with just the model name
Sourcepub fn with_api_key(self, api_key: impl Into<String>) -> LLMConfig
pub fn with_api_key(self, api_key: impl Into<String>) -> LLMConfig
Set API key
Sourcepub fn with_base_url(self, base_url: impl Into<String>) -> LLMConfig
pub fn with_base_url(self, base_url: impl Into<String>) -> LLMConfig
Set base URL
Sourcepub fn with_max_tokens(self, max_tokens: u32) -> LLMConfig
pub fn with_max_tokens(self, max_tokens: u32) -> LLMConfig
Set max tokens
Sourcepub fn with_temperature(self, temperature: f32) -> LLMConfig
pub fn with_temperature(self, temperature: f32) -> LLMConfig
Set temperature
Sourcepub fn with_top_p(self, top_p: f32) -> LLMConfig
pub fn with_top_p(self, top_p: f32) -> LLMConfig
Set top_p
Sourcepub fn with_top_k(self, top_k: u32) -> LLMConfig
pub fn with_top_k(self, top_k: u32) -> LLMConfig
Set top_k
Sourcepub fn with_system(self, system: impl Into<String>) -> LLMConfig
pub fn with_system(self, system: impl Into<String>) -> LLMConfig
Set system prompt
Sourcepub fn with_timeout_seconds(self, timeout: u64) -> LLMConfig
pub fn with_timeout_seconds(self, timeout: u64) -> LLMConfig
Set timeout in seconds
Sourcepub fn with_reasoning(self, enabled: bool) -> LLMConfig
pub fn with_reasoning(self, enabled: bool) -> LLMConfig
Enable reasoning (for supported providers)
Sourcepub fn with_reasoning_effort(self, effort: impl Into<String>) -> LLMConfig
pub fn with_reasoning_effort(self, effort: impl Into<String>) -> LLMConfig
Set reasoning effort
Sourcepub fn with_deployment_id(self, deployment_id: impl Into<String>) -> LLMConfig
pub fn with_deployment_id(self, deployment_id: impl Into<String>) -> LLMConfig
Set Azure deployment ID
Sourcepub fn with_api_version(self, api_version: impl Into<String>) -> LLMConfig
pub fn with_api_version(self, api_version: impl Into<String>) -> LLMConfig
Set Azure API version
Sourcepub fn with_openai_web_search(self, enabled: bool) -> LLMConfig
pub fn with_openai_web_search(self, enabled: bool) -> LLMConfig
Enable OpenAI web search
Sourcepub fn with_resilience(self, enabled: bool, attempts: usize) -> LLMConfig
pub fn with_resilience(self, enabled: bool, attempts: usize) -> LLMConfig
Enable resilience with retry/backoff
Trait Implementations§
Source§impl<'de> Deserialize<'de> for LLMConfig
impl<'de> Deserialize<'de> for LLMConfig
Source§fn deserialize<__D>(
__deserializer: __D,
) -> Result<LLMConfig, <__D as Deserializer<'de>>::Error>where
__D: Deserializer<'de>,
fn deserialize<__D>(
__deserializer: __D,
) -> Result<LLMConfig, <__D as Deserializer<'de>>::Error>where
__D: Deserializer<'de>,
Deserialize this value from the given Serde deserializer. Read more
Source§impl Serialize for LLMConfig
impl Serialize for LLMConfig
Source§fn serialize<__S>(
&self,
__serializer: __S,
) -> Result<<__S as Serializer>::Ok, <__S as Serializer>::Error>where
__S: Serializer,
fn serialize<__S>(
&self,
__serializer: __S,
) -> Result<<__S as Serializer>::Ok, <__S as Serializer>::Error>where
__S: Serializer,
Serialize this value into the given Serde serializer. Read more
Auto Trait Implementations§
impl Freeze for LLMConfig
impl RefUnwindSafe for LLMConfig
impl Send for LLMConfig
impl Sync for LLMConfig
impl Unpin for LLMConfig
impl UnwindSafe for LLMConfig
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> CloneToUninit for Twhere
T: Clone,
impl<T> CloneToUninit for Twhere
T: Clone,
Source§impl<T> Instrument for T
impl<T> Instrument for T
Source§fn instrument(self, span: Span) -> Instrumented<Self>
fn instrument(self, span: Span) -> Instrumented<Self>
Source§fn in_current_span(self) -> Instrumented<Self>
fn in_current_span(self) -> Instrumented<Self>
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left is true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left(&self) returns true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read more