pub struct LLMSessionConfig {Show 17 fields
pub provider: LLMProvider,
pub api_key: String,
pub model: String,
pub base_url: Option<String>,
pub max_tokens: Option<u32>,
pub system_prompt: Option<String>,
pub temperature: Option<f32>,
pub streaming: bool,
pub context_limit: i32,
pub compaction: Option<CompactorType>,
pub azure_resource: Option<String>,
pub azure_deployment: Option<String>,
pub azure_api_version: Option<String>,
pub bedrock_region: Option<String>,
pub bedrock_access_key_id: Option<String>,
pub bedrock_secret_access_key: Option<String>,
pub bedrock_session_token: Option<String>,
}Expand description
Configuration for creating an LLM session
Fields§
§provider: LLMProviderThe LLM provider to use
api_key: StringAPI key for the provider
model: StringModel to use (e.g., “claude-3-sonnet-20240229”, “gpt-4”)
base_url: Option<String>Custom base URL for OpenAI-compatible providers. Only used when provider is OpenAI. If None, uses default OpenAI endpoint.
max_tokens: Option<u32>Default maximum tokens for responses
system_prompt: Option<String>Default system prompt
temperature: Option<f32>Default temperature
streaming: boolEnable streaming responses
context_limit: i32Model’s context window size (for compaction decisions)
compaction: Option<CompactorType>Compaction configuration (None to disable compaction)
azure_resource: Option<String>Azure OpenAI resource name (e.g., “my-resource”). When set, the provider uses Azure OpenAI instead of standard OpenAI.
azure_deployment: Option<String>Azure OpenAI deployment name (e.g., “gpt-4-deployment”).
azure_api_version: Option<String>Azure OpenAI API version (e.g., “2024-10-21”).
bedrock_region: Option<String>AWS region for Bedrock (e.g., “us-east-1”).
bedrock_access_key_id: Option<String>AWS access key ID for Bedrock.
bedrock_secret_access_key: Option<String>AWS secret access key for Bedrock.
bedrock_session_token: Option<String>AWS session token for Bedrock (optional, for temporary credentials).
Implementations§
Source§impl LLMSessionConfig
impl LLMSessionConfig
Sourcepub fn anthropic(
api_key: impl Into<String>,
model: impl Into<String>,
) -> LLMSessionConfig
pub fn anthropic( api_key: impl Into<String>, model: impl Into<String>, ) -> LLMSessionConfig
Creates a new Anthropic session config
Sourcepub fn openai(
api_key: impl Into<String>,
model: impl Into<String>,
) -> LLMSessionConfig
pub fn openai( api_key: impl Into<String>, model: impl Into<String>, ) -> LLMSessionConfig
Creates a new OpenAI session config
Sourcepub fn openai_compatible(
api_key: impl Into<String>,
model: impl Into<String>,
base_url: impl Into<String>,
context_limit: i32,
) -> LLMSessionConfig
pub fn openai_compatible( api_key: impl Into<String>, model: impl Into<String>, base_url: impl Into<String>, context_limit: i32, ) -> LLMSessionConfig
Creates a new OpenAI-compatible session config with a custom base URL.
Use this for providers like Groq, Together, Fireworks, etc. that have OpenAI-compatible APIs.
Sourcepub fn google(
api_key: impl Into<String>,
model: impl Into<String>,
) -> LLMSessionConfig
pub fn google( api_key: impl Into<String>, model: impl Into<String>, ) -> LLMSessionConfig
Creates a new Google (Gemini) session config
Sourcepub fn azure_openai(
api_key: impl Into<String>,
resource: impl Into<String>,
deployment: impl Into<String>,
) -> LLMSessionConfig
pub fn azure_openai( api_key: impl Into<String>, resource: impl Into<String>, deployment: impl Into<String>, ) -> LLMSessionConfig
Creates a new Azure OpenAI session config.
Azure OpenAI uses a different URL format and authentication method. The endpoint is: https://{resource}.openai.azure.com/openai/deployments/{deployment}/chat/completions?api-version={version}
§Arguments
api_key- Azure OpenAI API keyresource- Azure resource name (e.g., “my-openai-resource”)deployment- Deployment name (e.g., “gpt-4-deployment”)
Sourcepub fn with_azure_api_version(
self,
version: impl Into<String>,
) -> LLMSessionConfig
pub fn with_azure_api_version( self, version: impl Into<String>, ) -> LLMSessionConfig
Sets the Azure API version.
Sourcepub fn cohere(
api_key: impl Into<String>,
model: impl Into<String>,
) -> LLMSessionConfig
pub fn cohere( api_key: impl Into<String>, model: impl Into<String>, ) -> LLMSessionConfig
Creates a new Cohere session config
Sourcepub fn bedrock(
access_key_id: impl Into<String>,
secret_access_key: impl Into<String>,
region: impl Into<String>,
model: impl Into<String>,
) -> LLMSessionConfig
pub fn bedrock( access_key_id: impl Into<String>, secret_access_key: impl Into<String>, region: impl Into<String>, model: impl Into<String>, ) -> LLMSessionConfig
Creates a new Amazon Bedrock session config.
§Arguments
access_key_id- AWS access key IDsecret_access_key- AWS secret access keyregion- AWS region (e.g., “us-east-1”)model- Bedrock model ID (e.g., “anthropic.claude-3-sonnet-20240229-v1:0”)
Sourcepub fn with_bedrock_session_token(
self,
token: impl Into<String>,
) -> LLMSessionConfig
pub fn with_bedrock_session_token( self, token: impl Into<String>, ) -> LLMSessionConfig
Sets the Bedrock session token for temporary credentials.
Sourcepub fn with_streaming(self, streaming: bool) -> LLMSessionConfig
pub fn with_streaming(self, streaming: bool) -> LLMSessionConfig
Enable or disable streaming
Sourcepub fn with_max_tokens(self, max_tokens: u32) -> LLMSessionConfig
pub fn with_max_tokens(self, max_tokens: u32) -> LLMSessionConfig
Sets the default max tokens
Sourcepub fn with_system_prompt(self, prompt: impl Into<String>) -> LLMSessionConfig
pub fn with_system_prompt(self, prompt: impl Into<String>) -> LLMSessionConfig
Sets the default system prompt
Sourcepub fn with_temperature(self, temperature: f32) -> LLMSessionConfig
pub fn with_temperature(self, temperature: f32) -> LLMSessionConfig
Sets the default temperature
Sourcepub fn with_context_limit(self, context_limit: i32) -> LLMSessionConfig
pub fn with_context_limit(self, context_limit: i32) -> LLMSessionConfig
Sets the model’s context window size
Sourcepub fn with_base_url(self, base_url: impl Into<String>) -> LLMSessionConfig
pub fn with_base_url(self, base_url: impl Into<String>) -> LLMSessionConfig
Sets a custom base URL for OpenAI-compatible providers
Sourcepub fn with_threshold_compaction(
self,
config: CompactionConfig,
) -> LLMSessionConfig
pub fn with_threshold_compaction( self, config: CompactionConfig, ) -> LLMSessionConfig
Enables threshold compaction with custom configuration
Sourcepub fn with_llm_compaction(self, config: LLMCompactorConfig) -> LLMSessionConfig
pub fn with_llm_compaction(self, config: LLMCompactorConfig) -> LLMSessionConfig
Enables LLM-based compaction with custom configuration
Sourcepub fn with_compaction(self, compactor_type: CompactorType) -> LLMSessionConfig
pub fn with_compaction(self, compactor_type: CompactorType) -> LLMSessionConfig
Enables compaction with the specified compactor type
Sourcepub fn without_compaction(self) -> LLMSessionConfig
pub fn without_compaction(self) -> LLMSessionConfig
Disables compaction
Trait Implementations§
Source§impl Clone for LLMSessionConfig
impl Clone for LLMSessionConfig
Source§fn clone(&self) -> LLMSessionConfig
fn clone(&self) -> LLMSessionConfig
1.0.0 · Source§fn clone_from(&mut self, source: &Self)
fn clone_from(&mut self, source: &Self)
source. Read moreAuto Trait Implementations§
impl Freeze for LLMSessionConfig
impl RefUnwindSafe for LLMSessionConfig
impl Send for LLMSessionConfig
impl Sync for LLMSessionConfig
impl Unpin for LLMSessionConfig
impl UnwindSafe for LLMSessionConfig
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Source§impl<T> CloneToUninit for Twhere
T: Clone,
impl<T> CloneToUninit for Twhere
T: Clone,
Source§impl<T> Instrument for T
impl<T> Instrument for T
Source§fn instrument(self, span: Span) -> Instrumented<Self>
fn instrument(self, span: Span) -> Instrumented<Self>
Source§fn in_current_span(self) -> Instrumented<Self>
fn in_current_span(self) -> Instrumented<Self>
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
self into a Left variant of Either<Self, Self>
if into_left is true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
self into a Left variant of Either<Self, Self>
if into_left(&self) returns true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read more