pub struct LLMSessionConfig {Show 17 fields
pub provider: LLMProvider,
pub api_key: String,
pub model: String,
pub base_url: Option<String>,
pub max_tokens: Option<u32>,
pub system_prompt: Option<String>,
pub temperature: Option<f32>,
pub streaming: bool,
pub context_limit: i32,
pub compaction: Option<CompactorType>,
pub azure_resource: Option<String>,
pub azure_deployment: Option<String>,
pub azure_api_version: Option<String>,
pub bedrock_region: Option<String>,
pub bedrock_access_key_id: Option<String>,
pub bedrock_secret_access_key: Option<String>,
pub bedrock_session_token: Option<String>,
}Expand description
Configuration for creating an LLM session
Fields§
§provider: LLMProviderThe LLM provider to use
api_key: StringAPI key for the provider
model: StringModel to use (e.g., “claude-3-sonnet-20240229”, “gpt-4”)
base_url: Option<String>Custom base URL for OpenAI-compatible providers. Only used when provider is OpenAI. If None, uses default OpenAI endpoint.
max_tokens: Option<u32>Default maximum tokens for responses
system_prompt: Option<String>Default system prompt
temperature: Option<f32>Default temperature
streaming: boolEnable streaming responses
context_limit: i32Model’s context window size (for compaction decisions)
compaction: Option<CompactorType>Compaction configuration (None to disable compaction)
azure_resource: Option<String>Azure OpenAI resource name (e.g., “my-resource”). When set, the provider uses Azure OpenAI instead of standard OpenAI.
azure_deployment: Option<String>Azure OpenAI deployment name (e.g., “gpt-4-deployment”).
azure_api_version: Option<String>Azure OpenAI API version (e.g., “2024-10-21”).
bedrock_region: Option<String>AWS region for Bedrock (e.g., “us-east-1”).
bedrock_access_key_id: Option<String>AWS access key ID for Bedrock.
bedrock_secret_access_key: Option<String>AWS secret access key for Bedrock.
bedrock_session_token: Option<String>AWS session token for Bedrock (optional, for temporary credentials).
Implementations§
Source§impl LLMSessionConfig
impl LLMSessionConfig
Sourcepub fn anthropic(api_key: impl Into<String>, model: impl Into<String>) -> Self
pub fn anthropic(api_key: impl Into<String>, model: impl Into<String>) -> Self
Creates a new Anthropic session config
Sourcepub fn openai(api_key: impl Into<String>, model: impl Into<String>) -> Self
pub fn openai(api_key: impl Into<String>, model: impl Into<String>) -> Self
Creates a new OpenAI session config
Sourcepub fn openai_compatible(
api_key: impl Into<String>,
model: impl Into<String>,
base_url: impl Into<String>,
context_limit: i32,
) -> Self
pub fn openai_compatible( api_key: impl Into<String>, model: impl Into<String>, base_url: impl Into<String>, context_limit: i32, ) -> Self
Creates a new OpenAI-compatible session config with a custom base URL.
Use this for providers like Groq, Together, Fireworks, etc. that have OpenAI-compatible APIs.
Sourcepub fn google(api_key: impl Into<String>, model: impl Into<String>) -> Self
pub fn google(api_key: impl Into<String>, model: impl Into<String>) -> Self
Creates a new Google (Gemini) session config
Sourcepub fn azure_openai(
api_key: impl Into<String>,
resource: impl Into<String>,
deployment: impl Into<String>,
) -> Self
pub fn azure_openai( api_key: impl Into<String>, resource: impl Into<String>, deployment: impl Into<String>, ) -> Self
Creates a new Azure OpenAI session config.
Azure OpenAI uses a different URL format and authentication method. The endpoint is: https://{resource}.openai.azure.com/openai/deployments/{deployment}/chat/completions?api-version={version}
§Arguments
api_key- Azure OpenAI API keyresource- Azure resource name (e.g., “my-openai-resource”)deployment- Deployment name (e.g., “gpt-4-deployment”)
Sourcepub fn with_azure_api_version(self, version: impl Into<String>) -> Self
pub fn with_azure_api_version(self, version: impl Into<String>) -> Self
Sets the Azure API version.
Sourcepub fn cohere(api_key: impl Into<String>, model: impl Into<String>) -> Self
pub fn cohere(api_key: impl Into<String>, model: impl Into<String>) -> Self
Creates a new Cohere session config
Sourcepub fn bedrock(
access_key_id: impl Into<String>,
secret_access_key: impl Into<String>,
region: impl Into<String>,
model: impl Into<String>,
) -> Self
pub fn bedrock( access_key_id: impl Into<String>, secret_access_key: impl Into<String>, region: impl Into<String>, model: impl Into<String>, ) -> Self
Creates a new Amazon Bedrock session config.
§Arguments
access_key_id- AWS access key IDsecret_access_key- AWS secret access keyregion- AWS region (e.g., “us-east-1”)model- Bedrock model ID (e.g., “anthropic.claude-3-sonnet-20240229-v1:0”)
Sourcepub fn with_bedrock_session_token(self, token: impl Into<String>) -> Self
pub fn with_bedrock_session_token(self, token: impl Into<String>) -> Self
Sets the Bedrock session token for temporary credentials.
Sourcepub fn with_streaming(self, streaming: bool) -> Self
pub fn with_streaming(self, streaming: bool) -> Self
Enable or disable streaming
Sourcepub fn with_max_tokens(self, max_tokens: u32) -> Self
pub fn with_max_tokens(self, max_tokens: u32) -> Self
Sets the default max tokens
Sourcepub fn with_system_prompt(self, prompt: impl Into<String>) -> Self
pub fn with_system_prompt(self, prompt: impl Into<String>) -> Self
Sets the default system prompt
Sourcepub fn with_temperature(self, temperature: f32) -> Self
pub fn with_temperature(self, temperature: f32) -> Self
Sets the default temperature
Sourcepub fn with_context_limit(self, context_limit: i32) -> Self
pub fn with_context_limit(self, context_limit: i32) -> Self
Sets the model’s context window size
Sourcepub fn with_base_url(self, base_url: impl Into<String>) -> Self
pub fn with_base_url(self, base_url: impl Into<String>) -> Self
Sets a custom base URL for OpenAI-compatible providers
Sourcepub fn with_threshold_compaction(self, config: CompactionConfig) -> Self
pub fn with_threshold_compaction(self, config: CompactionConfig) -> Self
Enables threshold compaction with custom configuration
Sourcepub fn with_llm_compaction(self, config: LLMCompactorConfig) -> Self
pub fn with_llm_compaction(self, config: LLMCompactorConfig) -> Self
Enables LLM-based compaction with custom configuration
Sourcepub fn with_compaction(self, compactor_type: CompactorType) -> Self
pub fn with_compaction(self, compactor_type: CompactorType) -> Self
Enables compaction with the specified compactor type
Sourcepub fn without_compaction(self) -> Self
pub fn without_compaction(self) -> Self
Disables compaction
Trait Implementations§
Source§impl Clone for LLMSessionConfig
impl Clone for LLMSessionConfig
Source§fn clone(&self) -> LLMSessionConfig
fn clone(&self) -> LLMSessionConfig
1.0.0 · Source§fn clone_from(&mut self, source: &Self)
fn clone_from(&mut self, source: &Self)
source. Read more