Skip to main content

engine/
routing.rs

1//! Semantic Routing Engine for Dakera AI Agent Memory Platform.
2//!
3//! Agents query Dakera without knowing which namespace holds the answer.
4//! Dakera figures it out by comparing the query embedding against cached
5//! namespace centroids (averaged embeddings sampled from each namespace).
6//!
7//! The centroid cache is refreshed periodically in the background.
8
9use std::collections::HashMap;
10use std::sync::Arc;
11
12use parking_lot::RwLock;
13use storage::VectorStorage;
14
15use crate::distance::calculate_distance;
16use common::DistanceMetric;
17
18/// A route result: which namespace matched and how strongly.
19#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
20pub struct RouteMatch {
21    pub namespace: String,
22    pub similarity: f32,
23    pub memory_count: usize,
24}
25
26/// Configuration for the semantic router.
27pub struct SemanticRouterConfig {
28    /// Maximum number of memories to sample per namespace for centroid calculation
29    pub sample_size: usize,
30    /// How often to refresh centroids (seconds)
31    pub refresh_interval_secs: u64,
32}
33
34impl Default for SemanticRouterConfig {
35    fn default() -> Self {
36        Self {
37            sample_size: 20,
38            refresh_interval_secs: 1800, // 30 minutes
39        }
40    }
41}
42
43impl SemanticRouterConfig {
44    pub fn from_env() -> Self {
45        let sample_size: usize = std::env::var("DAKERA_ROUTE_SAMPLE_SIZE")
46            .ok()
47            .and_then(|v| v.parse().ok())
48            .unwrap_or(20);
49
50        let refresh_interval_secs: u64 = std::env::var("DAKERA_ROUTE_REFRESH_SECS")
51            .ok()
52            .and_then(|v| v.parse().ok())
53            .unwrap_or(1800);
54
55        Self {
56            sample_size,
57            refresh_interval_secs,
58        }
59    }
60}
61
62/// Cached centroid for a namespace: average embedding + vector count.
63#[derive(Clone)]
64struct CentroidEntry {
65    centroid: Vec<f32>,
66    count: usize,
67}
68
69/// Semantic router that maintains a centroid cache per namespace.
70pub struct SemanticRouter {
71    config: SemanticRouterConfig,
72    /// Namespace → averaged centroid embedding + count
73    cache: RwLock<HashMap<String, CentroidEntry>>,
74}
75
76impl SemanticRouter {
77    pub fn new(config: SemanticRouterConfig) -> Self {
78        Self {
79            config,
80            cache: RwLock::new(HashMap::new()),
81        }
82    }
83
84    /// Route a query embedding to the most relevant namespaces.
85    ///
86    /// Returns namespaces sorted by similarity (descending), filtered
87    /// by `min_similarity`.
88    pub fn route(&self, query: &[f32], top_k: usize, min_similarity: f32) -> Vec<RouteMatch> {
89        let cache = self.cache.read();
90        let mut matches: Vec<RouteMatch> = cache
91            .iter()
92            .filter_map(|(ns, entry)| {
93                if entry.centroid.len() != query.len() {
94                    return None; // dimension mismatch, skip
95                }
96                let sim = calculate_distance(query, &entry.centroid, DistanceMetric::Cosine);
97                if sim >= min_similarity {
98                    Some(RouteMatch {
99                        namespace: ns.clone(),
100                        similarity: sim,
101                        memory_count: entry.count,
102                    })
103                } else {
104                    None
105                }
106            })
107            .collect();
108
109        matches.sort_by(|a, b| {
110            b.similarity
111                .partial_cmp(&a.similarity)
112                .unwrap_or(std::cmp::Ordering::Equal)
113        });
114        matches.truncate(top_k);
115        matches
116    }
117
118    /// Refresh the centroid cache by sampling memories from each agent namespace.
119    ///
120    /// For each `_dakera_agent_*` namespace, sample up to `sample_size` vectors,
121    /// average their embeddings into a single centroid.
122    pub async fn refresh_centroids(&self, storage: &Arc<dyn VectorStorage>) {
123        let namespaces = match storage.list_namespaces().await {
124            Ok(ns) => ns,
125            Err(e) => {
126                tracing::warn!(error = %e, "Failed to list namespaces for centroid refresh");
127                return;
128            }
129        };
130
131        let mut new_cache: HashMap<String, CentroidEntry> = HashMap::new();
132
133        for namespace in &namespaces {
134            if !namespace.starts_with("_dakera_agent_") {
135                continue;
136            }
137
138            let vectors = match storage.get_all(namespace).await {
139                Ok(v) => v,
140                Err(_) => continue,
141            };
142
143            if vectors.is_empty() {
144                continue;
145            }
146
147            let count = vectors.len();
148
149            // Sample up to sample_size vectors (take first N — they're stored in insertion order)
150            let sample: Vec<&Vec<f32>> = vectors
151                .iter()
152                .filter(|v| !v.values.is_empty())
153                .take(self.config.sample_size)
154                .map(|v| &v.values)
155                .collect();
156
157            if sample.is_empty() {
158                continue;
159            }
160
161            // Compute centroid (average embedding)
162            let dim = sample[0].len();
163            let mut centroid = vec![0.0f32; dim];
164            let mut valid = 0usize;
165            for embedding in &sample {
166                if embedding.len() == dim {
167                    for (i, val) in embedding.iter().enumerate() {
168                        centroid[i] += val;
169                    }
170                    valid += 1;
171                }
172            }
173
174            if valid > 0 {
175                for val in &mut centroid {
176                    *val /= valid as f32;
177                }
178                // Normalize centroid for cosine comparison
179                let norm: f32 = centroid.iter().map(|x| x * x).sum::<f32>().sqrt();
180                if norm > 1e-8 {
181                    for val in &mut centroid {
182                        *val /= norm;
183                    }
184                }
185                new_cache.insert(namespace.clone(), CentroidEntry { centroid, count });
186            }
187        }
188
189        let refreshed_count = new_cache.len();
190        *self.cache.write() = new_cache;
191
192        tracing::info!(
193            namespaces_cached = refreshed_count,
194            "Semantic router centroid cache refreshed"
195        );
196    }
197
198    /// Spawn the centroid refresh as a background tokio task.
199    pub fn spawn_refresh(
200        router: Arc<SemanticRouter>,
201        storage: Arc<dyn VectorStorage>,
202    ) -> tokio::task::JoinHandle<()> {
203        let interval_secs = router.config.refresh_interval_secs;
204        tokio::spawn(async move {
205            // Initial refresh on startup (small delay to let storage warm up)
206            tokio::time::sleep(std::time::Duration::from_secs(5)).await;
207            router.refresh_centroids(&storage).await;
208
209            let mut interval = tokio::time::interval(std::time::Duration::from_secs(interval_secs));
210            loop {
211                interval.tick().await;
212                router.refresh_centroids(&storage).await;
213            }
214        })
215    }
216}
217
218// ============================================================================
219// CE-12a: Query Classifier for smart routing
220// ============================================================================
221
222/// Inferred query kind used for smart routing decisions.
223#[derive(Debug, Clone, Copy, PartialEq, Eq)]
224pub enum QueryKind {
225    /// Short / keyword-based query → prefer BM25 full-text search
226    Keyword,
227    /// Long / natural-language query → prefer vector similarity search
228    Semantic,
229    /// Mixed signal → hybrid (vector + BM25)
230    Hybrid,
231}
232
233/// Heuristic classifier that determines the best retrieval strategy for a
234/// free-text query without any model inference.
235pub struct QueryClassifier;
236
237impl QueryClassifier {
238    /// Classify a raw query string into a [`QueryKind`].
239    ///
240    /// Heuristics (in priority order):
241    /// 1. Natural language question (has `?` or starts with a question word) → [`QueryKind::Hybrid`]
242    ///    BM25 finds exact names/dates while vector captures semantic intent.
243    ///    This covers all LoCoMo benchmark queries which are long NL questions.
244    /// 2. ≥ 8 words **or** sentence ends in `.` (prose, no question) → [`QueryKind::Semantic`]
245    /// 3. ≤ 3 words with no sentence structure → [`QueryKind::Keyword`]
246    /// 4. Everything else → [`QueryKind::Hybrid`]
247    pub fn classify(query: &str) -> QueryKind {
248        let trimmed = query.trim();
249        let word_count = trimmed.split_whitespace().count();
250        let lower = trimmed.to_lowercase();
251
252        // Natural language questions benefit from both BM25 (named entities, dates)
253        // and vector search (semantic meaning).
254        let is_question = trimmed.contains('?')
255            || lower.starts_with("what ")
256            || lower.starts_with("how ")
257            || lower.starts_with("why ")
258            || lower.starts_with("when ")
259            || lower.starts_with("where ")
260            || lower.starts_with("who ")
261            || lower.starts_with("tell me")
262            || lower.starts_with("explain")
263            || lower.starts_with("describe");
264
265        if is_question {
266            QueryKind::Hybrid
267        } else if word_count >= 8 || trimmed.contains('.') {
268            QueryKind::Semantic
269        } else if word_count <= 3 {
270            QueryKind::Keyword
271        } else {
272            QueryKind::Hybrid
273        }
274    }
275}
276
277#[cfg(test)]
278mod tests {
279    use super::*;
280
281    #[test]
282    fn test_route_empty_cache() {
283        let router = SemanticRouter::new(SemanticRouterConfig::default());
284        let results = router.route(&[1.0, 0.0, 0.0], 3, 0.5);
285        assert!(results.is_empty());
286    }
287
288    #[test]
289    fn test_route_with_cached_centroids() {
290        let router = SemanticRouter::new(SemanticRouterConfig::default());
291
292        // Manually populate cache
293        {
294            let mut cache = router.cache.write();
295            cache.insert(
296                "_dakera_agent_dev".to_string(),
297                CentroidEntry {
298                    centroid: vec![1.0, 0.0, 0.0],
299                    count: 100,
300                },
301            );
302            cache.insert(
303                "_dakera_agent_ops".to_string(),
304                CentroidEntry {
305                    centroid: vec![0.0, 1.0, 0.0],
306                    count: 50,
307                },
308            );
309            cache.insert(
310                "_dakera_agent_sec".to_string(),
311                CentroidEntry {
312                    centroid: vec![0.707, 0.707, 0.0],
313                    count: 30,
314                },
315            );
316        }
317
318        // Query aligned with "dev" namespace
319        let results = router.route(&[1.0, 0.0, 0.0], 3, 0.0);
320        assert_eq!(results.len(), 3);
321        assert_eq!(results[0].namespace, "_dakera_agent_dev");
322        assert!(results[0].similarity > results[1].similarity);
323    }
324
325    #[test]
326    fn test_route_min_similarity_filter() {
327        let router = SemanticRouter::new(SemanticRouterConfig::default());
328
329        {
330            let mut cache = router.cache.write();
331            cache.insert(
332                "_dakera_agent_a".to_string(),
333                CentroidEntry {
334                    centroid: vec![1.0, 0.0, 0.0],
335                    count: 10,
336                },
337            );
338            cache.insert(
339                "_dakera_agent_b".to_string(),
340                CentroidEntry {
341                    centroid: vec![0.0, 1.0, 0.0],
342                    count: 10,
343                },
344            );
345        }
346
347        // High min_similarity should filter out the orthogonal namespace
348        let results = router.route(&[1.0, 0.0, 0.0], 5, 0.9);
349        assert_eq!(results.len(), 1);
350        assert_eq!(results[0].namespace, "_dakera_agent_a");
351    }
352
353    #[test]
354    fn test_route_top_k_truncation() {
355        let router = SemanticRouter::new(SemanticRouterConfig::default());
356
357        {
358            let mut cache = router.cache.write();
359            for i in 0..10 {
360                let mut centroid = vec![0.0f32; 3];
361                centroid[0] = 1.0 - (i as f32 * 0.05);
362                centroid[1] = i as f32 * 0.05;
363                let norm = (centroid[0] * centroid[0] + centroid[1] * centroid[1]).sqrt();
364                centroid[0] /= norm;
365                centroid[1] /= norm;
366                cache.insert(
367                    format!("_dakera_agent_{}", i),
368                    CentroidEntry {
369                        centroid,
370                        count: 10,
371                    },
372                );
373            }
374        }
375
376        let results = router.route(&[1.0, 0.0, 0.0], 3, 0.0);
377        assert_eq!(results.len(), 3);
378    }
379
380    #[test]
381    fn test_route_dimension_mismatch_skipped() {
382        let router = SemanticRouter::new(SemanticRouterConfig::default());
383
384        {
385            let mut cache = router.cache.write();
386            cache.insert(
387                "_dakera_agent_3d".to_string(),
388                CentroidEntry {
389                    centroid: vec![1.0, 0.0, 0.0],
390                    count: 10,
391                },
392            );
393            cache.insert(
394                "_dakera_agent_5d".to_string(),
395                CentroidEntry {
396                    centroid: vec![1.0, 0.0, 0.0, 0.0, 0.0],
397                    count: 10,
398                },
399            );
400        }
401
402        // Query is 3D, should only match the 3D centroid
403        let results = router.route(&[1.0, 0.0, 0.0], 5, 0.0);
404        assert_eq!(results.len(), 1);
405        assert_eq!(results[0].namespace, "_dakera_agent_3d");
406    }
407
408    #[test]
409    fn test_config_defaults() {
410        let config = SemanticRouterConfig::default();
411        assert_eq!(config.sample_size, 20);
412        assert_eq!(config.refresh_interval_secs, 1800);
413    }
414
415    // --- QueryClassifier tests ---
416
417    #[test]
418    fn test_classify_keyword_short() {
419        assert_eq!(QueryClassifier::classify("rust async"), QueryKind::Keyword);
420        assert_eq!(QueryClassifier::classify("HNSW"), QueryKind::Keyword);
421        assert_eq!(
422            QueryClassifier::classify("memory importance"),
423            QueryKind::Keyword
424        );
425    }
426
427    #[test]
428    fn test_classify_question_routes_hybrid() {
429        // Natural language questions → Hybrid (BM25 + vector, covers LoCoMo benchmark queries)
430        assert_eq!(
431            QueryClassifier::classify(
432                "what is the best way to store long term memories in an AI system"
433            ),
434            QueryKind::Hybrid
435        );
436        assert_eq!(
437            QueryClassifier::classify("tell me about the agent memory architecture"),
438            QueryKind::Hybrid
439        );
440        assert_eq!(
441            QueryClassifier::classify("how does HNSW work?"),
442            QueryKind::Hybrid
443        );
444        assert_eq!(
445            QueryClassifier::classify("What sport did Sarah's brother play in high school?"),
446            QueryKind::Hybrid
447        );
448    }
449
450    #[test]
451    fn test_classify_semantic_long_prose() {
452        // Long prose without question structure → Semantic
453        assert_eq!(
454            QueryClassifier::classify(
455                "the agent memory platform stores embeddings with adaptive decay weighting"
456            ),
457            QueryKind::Semantic
458        );
459    }
460
461    #[test]
462    fn test_classify_hybrid_middle() {
463        assert_eq!(
464            QueryClassifier::classify("vector search memory agent"),
465            QueryKind::Hybrid
466        );
467    }
468}