Skip to main content

offline_intelligence/context_engine/
context_builder.rs

1//! Builds optimal context from multiple memory sources
2
3use crate::memory::Message;
4use crate::memory_db::StoredMessage;
5use tracing::{info, debug};
6
7/// Builds context from multiple memory sources
8pub struct ContextBuilder {
9    config: ContextBuilderConfig,
10}
11
12/// Configuration for context building
13#[derive(Debug, Clone)]
14pub struct ContextBuilderConfig {
15    pub max_total_tokens: usize,
16    pub min_current_context_ratio: f32,
17    pub preserve_system_messages: bool,
18    pub enable_detail_injection: bool,
19    pub detail_injection_threshold: f32,
20}
21
22impl Default for ContextBuilderConfig {
23    fn default() -> Self {
24        Self {
25            max_total_tokens: 4000,
26            min_current_context_ratio: 0.4,
27            preserve_system_messages: true,
28            enable_detail_injection: true,
29            detail_injection_threshold: 0.7,
30        }
31    }
32}
33
34impl ContextBuilderConfig {
35    /// Derive the hard token ceiling from the model's context window.
36    /// Mirrors OrchestratorConfig: 75% of CTX_SIZE as the total token cap.
37    pub fn from_ctx_size(ctx_size: u32) -> Self {
38        Self {
39            max_total_tokens: (ctx_size as f32 * 0.75) as usize,
40            ..Self::default()
41        }
42    }
43}
44
45impl ContextBuilder {
46    /// Create a new context builder
47    pub fn new(config: ContextBuilderConfig) -> Self {
48        Self {
49            config,
50        }
51    }
52    
53    /// Build optimal context from Tier 1 (hot cache) and Tier 3 (cold storage)
54    pub async fn build_context(
55        &mut self,
56        current_messages: &[Message],
57        tier1_content: Option<Vec<Message>>,
58        tier3_messages: Option<Vec<StoredMessage>>,
59        cross_session_messages: Option<Vec<StoredMessage>>,
60        user_query: Option<&str>,
61    ) -> anyhow::Result<Vec<Message>> {
62        info!("Building context from {} current messages", current_messages.len());
63
64        // Start with current messages (incorporates tier1 hot cache if available)
65        let mut context = self.prepare_context_with_tier1(current_messages, tier1_content);
66
67        // Add cross-session messages if available
68        if let Some(ref cross_messages) = cross_session_messages {
69            self.add_cross_session_context(&mut context, cross_messages, user_query)
70                .await?;
71        }
72
73        // Add specific details from cold storage (Tier 3) if needed
74        if let Some(ref full_messages) = tier3_messages {
75            self.add_specific_details(&mut context, full_messages, user_query)
76                .await?;
77        }
78
79        // Ensure we don't exceed token limits
80        self.trim_to_token_limit(&mut context);
81
82        debug!("Built context with {} messages", context.len());
83
84        Ok(context)
85    }
86
87    /// Add historical messages from other sessions to the current context
88    async fn add_cross_session_context(
89        &mut self,
90        context: &mut Vec<Message>,
91        cross_messages: &[StoredMessage],
92        _user_query: Option<&str>,
93    ) -> anyhow::Result<()> {
94        if cross_messages.is_empty() {
95            return Ok(());
96        }
97        
98        // Create a bridging message to inform the model of the source
99        let bridge = Message {
100            role: "system".to_string(),
101            content: "[Context from previous conversations]".to_string(),
102        };
103        context.insert(0, bridge);
104        
105        // Add relevant cross-session messages (limit to 3 to avoid context bloat)
106        for message in cross_messages.iter().take(3) {
107            let cross_msg = Message {
108                role: message.role.clone(),
109                content: format!("[From earlier: {}]", message.content),
110            };
111            context.insert(1, cross_msg); // Insert after bridge
112        }
113        
114        Ok(())
115    }
116    
117    /// Prepare context incorporating Tier 1 content if available
118    fn prepare_context_with_tier1(
119        &self, 
120        current_messages: &[Message], 
121        tier1_content: Option<Vec<Message>>
122    ) -> Vec<Message> {
123        let mut context = Vec::new();
124        
125        // Always preserve system messages from current
126        if self.config.preserve_system_messages {
127            for message in current_messages.iter().filter(|m| m.role == "system") {
128                context.push(message.clone());
129            }
130        }
131        
132        // Use Tier 1 content if available, otherwise use recent current messages
133        if let Some(tier1_messages) = tier1_content {
134            context.extend(tier1_messages);
135        } else {
136            let recent_messages = self.select_recent_messages(current_messages);
137            context.extend(recent_messages);
138        }
139        
140        context
141    }
142    
143    /// Select recent messages to keep
144    fn select_recent_messages(&self, messages: &[Message]) -> Vec<Message> {
145        if messages.is_empty() {
146            return Vec::new();
147        }
148        
149        let target_count = (messages.len() as f32 * self.config.min_current_context_ratio).ceil() as usize;
150        let target_count = target_count.max(1).min(messages.len());
151        
152        messages.iter()
153            .rev()
154            .take(target_count)
155            .rev()
156            .cloned()
157            .collect()
158    }
159    
160    async fn add_specific_details(
161        &mut self, 
162        context: &mut Vec<Message>, 
163        full_messages: &[StoredMessage], 
164        user_query: Option<&str>
165    ) -> anyhow::Result<()> {
166        if !self.config.enable_detail_injection || full_messages.is_empty() {
167            return Ok(());
168        }
169        
170        let detail_requests = self.extract_detail_requests(user_query);
171        if detail_requests.is_empty() { 
172            return Ok(()); 
173        }
174        
175        let relevant_messages = self.find_relevant_details(full_messages, &detail_requests);
176        for message in &relevant_messages {
177            let detail_message = Message {
178                role: message.role.clone(),
179                content: format!("[Earlier detail: {}]", message.content),
180            };
181            
182            // Insert details before the last user message if possible
183            if let Some(pos) = context.iter().rposition(|m| m.role == "user") {
184                context.insert(pos, detail_message);
185            } else {
186                context.insert(0, detail_message);
187            }
188        }
189        
190        Ok(())
191    }
192
193    fn extract_detail_requests(&self, user_query: Option<&str>) -> Vec<String> {
194        let mut requests = Vec::new();
195        if let Some(query) = user_query {
196            let query_lower = query.to_lowercase();
197            let words: Vec<&str> = query_lower.split_whitespace().collect();
198            
199            for i in 0..words.len().saturating_sub(1) {
200                if ["the", "that", "those", "specific", "exact"].contains(&words[i]) {
201                    let potential = words[i + 1..].iter()
202                        .take(3)
203                        .copied()
204                        .collect::<Vec<&str>>()
205                        .join(" ");
206                    
207                    if !potential.is_empty() { 
208                        requests.push(potential); 
209                    }
210                }
211            }
212        }
213        
214        requests.dedup();
215        requests
216    }
217
218    fn find_relevant_details<'a>(
219        &self, 
220        messages: &'a [StoredMessage], 
221        detail_requests: &[String]
222    ) -> Vec<&'a StoredMessage> {
223        let mut relevant = Vec::new();
224        
225        for message in messages {
226            let content_lower = message.content.to_lowercase();
227            
228            for request in detail_requests {
229                if content_lower.contains(&request.to_lowercase()) {
230                    relevant.push(message);
231                    break;
232                }
233            }
234            
235            if relevant.len() >= 3 { 
236                break; 
237            }
238        }
239        
240        relevant
241    }
242
243    fn trim_to_token_limit(&self, context: &mut Vec<Message>) {
244        let mut total_tokens = 0;
245        let mut to_remove = Vec::new();
246        
247        for (idx, message) in context.iter().enumerate() {
248            let message_tokens = message.content.len() / 4;
249            
250            if total_tokens + message_tokens > self.config.max_total_tokens {
251                to_remove.push(idx);
252            } else {
253                total_tokens += message_tokens;
254            }
255        }
256        
257        // Remove from end to preserve order
258        for idx in to_remove.iter().rev() {
259            context.remove(*idx);
260        }
261    }
262
263    fn extract_topics(&self, messages: &[Message]) -> Vec<String> {
264        let mut topics = Vec::new();
265        
266        for message in messages.iter().rev().take(5) {
267            let words: Vec<&str> = message.content.split_whitespace().collect();
268            
269            for i in 0..words.len().saturating_sub(2) {
270                let word_lower = words[i].to_lowercase();
271                
272                if word_lower == "about" || word_lower == "regarding" {
273                    let topic = words[i + 1..].iter()
274                        .take(3)
275                        .copied()
276                        .collect::<Vec<&str>>()
277                        .join(" ");
278                    
279                    if !topic.is_empty() { 
280                        topics.push(topic); 
281                    }
282                }
283                
284                if ["what", "how", "why", "when", "where", "who", "which"].contains(&word_lower.as_str()) {
285                    let topic = words[i + 1..].iter()
286                        .take(4)
287                        .copied()
288                        .collect::<Vec<&str>>()
289                        .join(" ");
290                    
291                    if !topic.is_empty() { 
292                        topics.push(topic); 
293                    }
294                }
295            }
296        }
297        
298        topics.dedup();
299        topics.truncate(3);
300        topics
301    }
302}
303
304impl Clone for ContextBuilder {
305    fn clone(&self) -> Self {
306        Self {
307            config: self.config.clone(),
308        }
309    }
310}