agent_core/controller/session/
config.rs1use super::compactor::{LLMCompactorConfig, ToolCompaction};
4
5#[derive(Debug, Clone, PartialEq)]
7pub enum LLMProvider {
8 Anthropic,
9 OpenAI,
10}
11
12#[derive(Debug, Clone)]
14pub struct CompactionConfig {
15 pub threshold: f64,
18 pub keep_recent_turns: usize,
20 pub tool_compaction: ToolCompaction,
22}
23
24impl Default for CompactionConfig {
25 fn default() -> Self {
26 Self {
27 threshold: 0.75,
28 keep_recent_turns: 5,
29 tool_compaction: ToolCompaction::Summarize,
30 }
31 }
32}
33
34#[derive(Debug, Clone)]
36pub enum CompactorType {
37 Threshold(CompactionConfig),
39 LLM(LLMCompactorConfig),
41}
42
43impl Default for CompactorType {
44 fn default() -> Self {
45 CompactorType::Threshold(CompactionConfig::default())
46 }
47}
48
49#[derive(Debug, Clone)]
51pub struct LLMSessionConfig {
52 pub provider: LLMProvider,
54 pub api_key: String,
56 pub model: String,
58 pub max_tokens: Option<u32>,
60 pub system_prompt: Option<String>,
62 pub temperature: Option<f32>,
64 pub streaming: bool,
66 pub context_limit: i32,
68 pub compaction: Option<CompactorType>,
70}
71
72impl LLMSessionConfig {
73 pub fn anthropic(api_key: impl Into<String>, model: impl Into<String>) -> Self {
75 Self {
76 provider: LLMProvider::Anthropic,
77 api_key: api_key.into(),
78 model: model.into(),
79 max_tokens: Some(4096),
80 system_prompt: None,
81 temperature: None,
82 streaming: true,
83 context_limit: 200_000, compaction: Some(CompactorType::default()),
85 }
86 }
87
88 pub fn openai(api_key: impl Into<String>, model: impl Into<String>) -> Self {
90 Self {
91 provider: LLMProvider::OpenAI,
92 api_key: api_key.into(),
93 model: model.into(),
94 max_tokens: Some(4096),
95 system_prompt: None,
96 temperature: None,
97 streaming: false, context_limit: 128_000, compaction: Some(CompactorType::default()),
100 }
101 }
102
103 pub fn with_streaming(mut self, streaming: bool) -> Self {
105 self.streaming = streaming;
106 self
107 }
108
109 pub fn with_max_tokens(mut self, max_tokens: u32) -> Self {
111 self.max_tokens = Some(max_tokens);
112 self
113 }
114
115 pub fn with_system_prompt(mut self, prompt: impl Into<String>) -> Self {
117 self.system_prompt = Some(prompt.into());
118 self
119 }
120
121 pub fn with_temperature(mut self, temperature: f32) -> Self {
123 self.temperature = Some(temperature);
124 self
125 }
126
127 pub fn with_context_limit(mut self, context_limit: i32) -> Self {
129 self.context_limit = context_limit;
130 self
131 }
132
133 pub fn with_threshold_compaction(mut self, config: CompactionConfig) -> Self {
135 self.compaction = Some(CompactorType::Threshold(config));
136 self
137 }
138
139 pub fn with_llm_compaction(mut self, config: LLMCompactorConfig) -> Self {
141 self.compaction = Some(CompactorType::LLM(config));
142 self
143 }
144
145 pub fn with_compaction(mut self, compactor_type: CompactorType) -> Self {
147 self.compaction = Some(compactor_type);
148 self
149 }
150
151 pub fn without_compaction(mut self) -> Self {
153 self.compaction = None;
154 self
155 }
156}
157
158#[cfg(test)]
159mod tests {
160 use super::*;
161
162 #[test]
163 fn test_anthropic_config() {
164 let config = LLMSessionConfig::anthropic("test-key", "claude-3-sonnet")
165 .with_max_tokens(2048)
166 .with_system_prompt("You are helpful.");
167
168 assert_eq!(config.provider, LLMProvider::Anthropic);
169 assert_eq!(config.api_key, "test-key");
170 assert_eq!(config.model, "claude-3-sonnet");
171 assert_eq!(config.max_tokens, Some(2048));
172 assert_eq!(config.system_prompt, Some("You are helpful.".to_string()));
173 }
174
175 #[test]
176 fn test_openai_config() {
177 let config = LLMSessionConfig::openai("test-key", "gpt-4");
178
179 assert_eq!(config.provider, LLMProvider::OpenAI);
180 assert_eq!(config.model, "gpt-4");
181 }
182}