ruvector_core/
agenticdb.rs

1//! AgenticDB API Compatibility Layer
2//!
3//! Provides a drop-in replacement for agenticDB with 5-table schema:
4//! - vectors_table: Core embeddings + metadata
5//! - reflexion_episodes: Self-critique memories
6//! - skills_library: Consolidated patterns
7//! - causal_edges: Cause-effect relationships with hypergraphs
8//! - learning_sessions: RL training data
9
10use crate::error::{Result, RuvectorError};
11use crate::types::*;
12use crate::vector_db::VectorDB;
13use parking_lot::RwLock;
14use redb::{Database, TableDefinition};
15use serde::{Deserialize, Serialize};
16use std::collections::HashMap;
17use std::path::Path;
18use std::sync::Arc;
19
20// Table definitions
21const REFLEXION_TABLE: TableDefinition<&str, &[u8]> = TableDefinition::new("reflexion_episodes");
22const SKILLS_TABLE: TableDefinition<&str, &[u8]> = TableDefinition::new("skills_library");
23const CAUSAL_TABLE: TableDefinition<&str, &[u8]> = TableDefinition::new("causal_edges");
24const LEARNING_TABLE: TableDefinition<&str, &[u8]> = TableDefinition::new("learning_sessions");
25
26/// Reflexion episode for self-critique memory
27/// Note: Serialized using JSON (not bincode) due to serde_json::Value in metadata field
28#[derive(Debug, Clone, Serialize, Deserialize)]
29pub struct ReflexionEpisode {
30    pub id: String,
31    pub task: String,
32    pub actions: Vec<String>,
33    pub observations: Vec<String>,
34    pub critique: String,
35    pub embedding: Vec<f32>,
36    pub timestamp: i64,
37    pub metadata: Option<HashMap<String, serde_json::Value>>,
38}
39
40/// Skill definition in the library
41#[derive(Debug, Clone, Serialize, Deserialize, bincode::Encode, bincode::Decode)]
42pub struct Skill {
43    pub id: String,
44    pub name: String,
45    pub description: String,
46    pub parameters: HashMap<String, String>,
47    pub examples: Vec<String>,
48    pub embedding: Vec<f32>,
49    pub usage_count: usize,
50    pub success_rate: f64,
51    pub created_at: i64,
52    pub updated_at: i64,
53}
54
55/// Causal edge in the hypergraph
56#[derive(Debug, Clone, Serialize, Deserialize, bincode::Encode, bincode::Decode)]
57pub struct CausalEdge {
58    pub id: String,
59    pub causes: Vec<String>,  // Hypergraph: multiple causes
60    pub effects: Vec<String>, // Hypergraph: multiple effects
61    pub confidence: f64,
62    pub context: String,
63    pub embedding: Vec<f32>,
64    pub observations: usize,
65    pub timestamp: i64,
66}
67
68/// Learning session for RL training
69#[derive(Debug, Clone, Serialize, Deserialize, bincode::Encode, bincode::Decode)]
70pub struct LearningSession {
71    pub id: String,
72    pub algorithm: String, // Q-Learning, DQN, PPO, etc
73    pub state_dim: usize,
74    pub action_dim: usize,
75    pub experiences: Vec<Experience>,
76    pub model_params: Option<Vec<u8>>, // Serialized model
77    pub created_at: i64,
78    pub updated_at: i64,
79}
80
81/// Single RL experience
82#[derive(Debug, Clone, Serialize, Deserialize, bincode::Encode, bincode::Decode)]
83pub struct Experience {
84    pub state: Vec<f32>,
85    pub action: Vec<f32>,
86    pub reward: f64,
87    pub next_state: Vec<f32>,
88    pub done: bool,
89    pub timestamp: i64,
90}
91
92/// Prediction with confidence interval
93#[derive(Debug, Clone, Serialize, Deserialize, bincode::Encode, bincode::Decode)]
94pub struct Prediction {
95    pub action: Vec<f32>,
96    pub confidence_lower: f64,
97    pub confidence_upper: f64,
98    pub mean_confidence: f64,
99}
100
101/// Query result with utility score
102#[derive(Debug, Clone, Serialize, Deserialize)]
103pub struct UtilitySearchResult {
104    pub result: SearchResult,
105    pub utility_score: f64,
106    pub similarity_score: f64,
107    pub causal_uplift: f64,
108    pub latency_penalty: f64,
109}
110
111/// Main AgenticDB interface
112pub struct AgenticDB {
113    vector_db: Arc<VectorDB>,
114    db: Arc<Database>,
115    dimensions: usize,
116}
117
118impl AgenticDB {
119    /// Create a new AgenticDB with the given options
120    pub fn new(options: DbOptions) -> Result<Self> {
121        // Create vector DB for core vector operations
122        let vector_db = Arc::new(VectorDB::new(options.clone())?);
123
124        // Create separate database for AgenticDB tables
125        let agentic_path = format!("{}.agentic", options.storage_path);
126        let db = Arc::new(Database::create(&agentic_path)?);
127
128        // Initialize tables
129        let write_txn = db.begin_write()?;
130        {
131            let _ = write_txn.open_table(REFLEXION_TABLE)?;
132            let _ = write_txn.open_table(SKILLS_TABLE)?;
133            let _ = write_txn.open_table(CAUSAL_TABLE)?;
134            let _ = write_txn.open_table(LEARNING_TABLE)?;
135        }
136        write_txn.commit()?;
137
138        Ok(Self {
139            vector_db,
140            db,
141            dimensions: options.dimensions,
142        })
143    }
144
145    /// Create with default options
146    pub fn with_dimensions(dimensions: usize) -> Result<Self> {
147        let mut options = DbOptions::default();
148        options.dimensions = dimensions;
149        Self::new(options)
150    }
151
152    // ============ Vector DB Core Methods ============
153
154    /// Insert a vector entry
155    pub fn insert(&self, entry: VectorEntry) -> Result<VectorId> {
156        self.vector_db.insert(entry)
157    }
158
159    /// Insert multiple vectors in a batch
160    pub fn insert_batch(&self, entries: Vec<VectorEntry>) -> Result<Vec<VectorId>> {
161        self.vector_db.insert_batch(entries)
162    }
163
164    /// Search for similar vectors
165    pub fn search(&self, query: SearchQuery) -> Result<Vec<SearchResult>> {
166        self.vector_db.search(query)
167    }
168
169    /// Delete a vector by ID
170    pub fn delete(&self, id: &str) -> Result<bool> {
171        self.vector_db.delete(id)
172    }
173
174    /// Get a vector by ID
175    pub fn get(&self, id: &str) -> Result<Option<VectorEntry>> {
176        self.vector_db.get(id)
177    }
178
179    // ============ Reflexion Memory API ============
180
181    /// Store a reflexion episode with self-critique
182    pub fn store_episode(
183        &self,
184        task: String,
185        actions: Vec<String>,
186        observations: Vec<String>,
187        critique: String,
188    ) -> Result<String> {
189        let id = uuid::Uuid::new_v4().to_string();
190
191        // Generate embedding from critique for similarity search
192        let embedding = self.generate_text_embedding(&critique)?;
193
194        let episode = ReflexionEpisode {
195            id: id.clone(),
196            task,
197            actions,
198            observations,
199            critique,
200            embedding: embedding.clone(),
201            timestamp: chrono::Utc::now().timestamp(),
202            metadata: None,
203        };
204
205        // Store in reflexion table
206        let write_txn = self.db.begin_write()?;
207        {
208            let mut table = write_txn.open_table(REFLEXION_TABLE)?;
209            // Use JSON encoding for ReflexionEpisode (contains serde_json::Value which isn't bincode-compatible)
210            let json = serde_json::to_vec(&episode)
211                .map_err(|e| RuvectorError::SerializationError(e.to_string()))?;
212            table.insert(id.as_str(), json.as_slice())?;
213        }
214        write_txn.commit()?;
215
216        // Also index in vector DB for fast similarity search
217        self.vector_db.insert(VectorEntry {
218            id: Some(format!("reflexion_{}", id)),
219            vector: embedding,
220            metadata: Some({
221                let mut meta = HashMap::new();
222                meta.insert("type".to_string(), serde_json::json!("reflexion"));
223                meta.insert("episode_id".to_string(), serde_json::json!(id.clone()));
224                meta
225            }),
226        })?;
227
228        Ok(id)
229    }
230
231    /// Retrieve similar reflexion episodes
232    pub fn retrieve_similar_episodes(
233        &self,
234        query: &str,
235        k: usize,
236    ) -> Result<Vec<ReflexionEpisode>> {
237        // Generate embedding for query
238        let query_embedding = self.generate_text_embedding(query)?;
239
240        // Search in vector DB
241        let results = self.vector_db.search(SearchQuery {
242            vector: query_embedding,
243            k,
244            filter: Some({
245                let mut filter = HashMap::new();
246                filter.insert("type".to_string(), serde_json::json!("reflexion"));
247                filter
248            }),
249            ef_search: None,
250        })?;
251
252        // Retrieve full episodes
253        let mut episodes = Vec::new();
254        let read_txn = self.db.begin_read()?;
255        let table = read_txn.open_table(REFLEXION_TABLE)?;
256
257        for result in results {
258            if let Some(metadata) = result.metadata {
259                if let Some(episode_id) = metadata.get("episode_id") {
260                    let id = episode_id.as_str().unwrap();
261                    if let Some(data) = table.get(id)? {
262                        // Use JSON decoding for ReflexionEpisode (contains serde_json::Value which isn't bincode-compatible)
263                        let episode: ReflexionEpisode = serde_json::from_slice(data.value())
264                            .map_err(|e| RuvectorError::SerializationError(e.to_string()))?;
265                        episodes.push(episode);
266                    }
267                }
268            }
269        }
270
271        Ok(episodes)
272    }
273
274    // ============ Skill Library API ============
275
276    /// Create a new skill in the library
277    pub fn create_skill(
278        &self,
279        name: String,
280        description: String,
281        parameters: HashMap<String, String>,
282        examples: Vec<String>,
283    ) -> Result<String> {
284        let id = uuid::Uuid::new_v4().to_string();
285
286        // Generate embedding from description
287        let embedding = self.generate_text_embedding(&description)?;
288
289        let skill = Skill {
290            id: id.clone(),
291            name,
292            description,
293            parameters,
294            examples,
295            embedding: embedding.clone(),
296            usage_count: 0,
297            success_rate: 0.0,
298            created_at: chrono::Utc::now().timestamp(),
299            updated_at: chrono::Utc::now().timestamp(),
300        };
301
302        // Store in skills table
303        let write_txn = self.db.begin_write()?;
304        {
305            let mut table = write_txn.open_table(SKILLS_TABLE)?;
306            let data = bincode::encode_to_vec(&skill, bincode::config::standard())
307                .map_err(|e| RuvectorError::SerializationError(e.to_string()))?;
308            table.insert(id.as_str(), data.as_slice())?;
309        }
310        write_txn.commit()?;
311
312        // Index in vector DB
313        self.vector_db.insert(VectorEntry {
314            id: Some(format!("skill_{}", id)),
315            vector: embedding,
316            metadata: Some({
317                let mut meta = HashMap::new();
318                meta.insert("type".to_string(), serde_json::json!("skill"));
319                meta.insert("skill_id".to_string(), serde_json::json!(id.clone()));
320                meta
321            }),
322        })?;
323
324        Ok(id)
325    }
326
327    /// Search skills by description
328    pub fn search_skills(&self, query_description: &str, k: usize) -> Result<Vec<Skill>> {
329        let query_embedding = self.generate_text_embedding(query_description)?;
330
331        let results = self.vector_db.search(SearchQuery {
332            vector: query_embedding,
333            k,
334            filter: Some({
335                let mut filter = HashMap::new();
336                filter.insert("type".to_string(), serde_json::json!("skill"));
337                filter
338            }),
339            ef_search: None,
340        })?;
341
342        let mut skills = Vec::new();
343        let read_txn = self.db.begin_read()?;
344        let table = read_txn.open_table(SKILLS_TABLE)?;
345
346        for result in results {
347            if let Some(metadata) = result.metadata {
348                if let Some(skill_id) = metadata.get("skill_id") {
349                    let id = skill_id.as_str().unwrap();
350                    if let Some(data) = table.get(id)? {
351                        let (skill, _): (Skill, usize) =
352                            bincode::decode_from_slice(data.value(), bincode::config::standard())
353                                .map_err(|e| RuvectorError::SerializationError(e.to_string()))?;
354                        skills.push(skill);
355                    }
356                }
357            }
358        }
359
360        Ok(skills)
361    }
362
363    /// Auto-consolidate action sequences into skills
364    pub fn auto_consolidate(
365        &self,
366        action_sequences: Vec<Vec<String>>,
367        success_threshold: usize,
368    ) -> Result<Vec<String>> {
369        let mut skill_ids = Vec::new();
370
371        // Group similar sequences (simplified - would use clustering in production)
372        for sequence in action_sequences {
373            if sequence.len() >= success_threshold {
374                let description = format!("Skill: {}", sequence.join(" -> "));
375                let skill_id = self.create_skill(
376                    format!("Auto-Skill-{}", uuid::Uuid::new_v4()),
377                    description,
378                    HashMap::new(),
379                    sequence.clone(),
380                )?;
381                skill_ids.push(skill_id);
382            }
383        }
384
385        Ok(skill_ids)
386    }
387
388    // ============ Causal Memory with Hypergraphs ============
389
390    /// Add a causal edge (supporting hypergraphs with multiple causes/effects)
391    pub fn add_causal_edge(
392        &self,
393        causes: Vec<String>,
394        effects: Vec<String>,
395        confidence: f64,
396        context: String,
397    ) -> Result<String> {
398        let id = uuid::Uuid::new_v4().to_string();
399
400        // Generate embedding from context
401        let embedding = self.generate_text_embedding(&context)?;
402
403        let edge = CausalEdge {
404            id: id.clone(),
405            causes,
406            effects,
407            confidence,
408            context,
409            embedding: embedding.clone(),
410            observations: 1,
411            timestamp: chrono::Utc::now().timestamp(),
412        };
413
414        // Store in causal table
415        let write_txn = self.db.begin_write()?;
416        {
417            let mut table = write_txn.open_table(CAUSAL_TABLE)?;
418            let data = bincode::encode_to_vec(&edge, bincode::config::standard())
419                .map_err(|e| RuvectorError::SerializationError(e.to_string()))?;
420            table.insert(id.as_str(), data.as_slice())?;
421        }
422        write_txn.commit()?;
423
424        // Index in vector DB
425        self.vector_db.insert(VectorEntry {
426            id: Some(format!("causal_{}", id)),
427            vector: embedding,
428            metadata: Some({
429                let mut meta = HashMap::new();
430                meta.insert("type".to_string(), serde_json::json!("causal"));
431                meta.insert("causal_id".to_string(), serde_json::json!(id.clone()));
432                meta.insert("confidence".to_string(), serde_json::json!(confidence));
433                meta
434            }),
435        })?;
436
437        Ok(id)
438    }
439
440    /// Query with utility function: U = α·similarity + β·causal_uplift − γ·latency
441    pub fn query_with_utility(
442        &self,
443        query: &str,
444        k: usize,
445        alpha: f64,
446        beta: f64,
447        gamma: f64,
448    ) -> Result<Vec<UtilitySearchResult>> {
449        let start_time = std::time::Instant::now();
450        let query_embedding = self.generate_text_embedding(query)?;
451
452        // Get all causal edges
453        let results = self.vector_db.search(SearchQuery {
454            vector: query_embedding,
455            k: k * 2, // Get more results for utility ranking
456            filter: Some({
457                let mut filter = HashMap::new();
458                filter.insert("type".to_string(), serde_json::json!("causal"));
459                filter
460            }),
461            ef_search: None,
462        })?;
463
464        let mut utility_results = Vec::new();
465
466        for result in results {
467            let similarity_score = 1.0 / (1.0 + result.score as f64); // Convert distance to similarity
468
469            // Get causal uplift from metadata
470            let causal_uplift = if let Some(ref metadata) = result.metadata {
471                metadata
472                    .get("confidence")
473                    .and_then(|v| v.as_f64())
474                    .unwrap_or(0.0)
475            } else {
476                0.0
477            };
478
479            let latency = start_time.elapsed().as_secs_f64();
480            let latency_penalty = latency * gamma;
481
482            // Calculate utility: U = α·similarity + β·causal_uplift − γ·latency
483            let utility_score = alpha * similarity_score + beta * causal_uplift - latency_penalty;
484
485            utility_results.push(UtilitySearchResult {
486                result,
487                utility_score,
488                similarity_score,
489                causal_uplift,
490                latency_penalty,
491            });
492        }
493
494        // Sort by utility score (descending)
495        utility_results.sort_by(|a, b| b.utility_score.partial_cmp(&a.utility_score).unwrap());
496        utility_results.truncate(k);
497
498        Ok(utility_results)
499    }
500
501    // ============ Learning Sessions API ============
502
503    /// Start a new learning session
504    pub fn start_session(
505        &self,
506        algorithm: String,
507        state_dim: usize,
508        action_dim: usize,
509    ) -> Result<String> {
510        let id = uuid::Uuid::new_v4().to_string();
511
512        let session = LearningSession {
513            id: id.clone(),
514            algorithm,
515            state_dim,
516            action_dim,
517            experiences: Vec::new(),
518            model_params: None,
519            created_at: chrono::Utc::now().timestamp(),
520            updated_at: chrono::Utc::now().timestamp(),
521        };
522
523        let write_txn = self.db.begin_write()?;
524        {
525            let mut table = write_txn.open_table(LEARNING_TABLE)?;
526            let data = bincode::encode_to_vec(&session, bincode::config::standard())
527                .map_err(|e| RuvectorError::SerializationError(e.to_string()))?;
528            table.insert(id.as_str(), data.as_slice())?;
529        }
530        write_txn.commit()?;
531
532        Ok(id)
533    }
534
535    /// Add an experience to a learning session
536    pub fn add_experience(
537        &self,
538        session_id: &str,
539        state: Vec<f32>,
540        action: Vec<f32>,
541        reward: f64,
542        next_state: Vec<f32>,
543        done: bool,
544    ) -> Result<()> {
545        let read_txn = self.db.begin_read()?;
546        let table = read_txn.open_table(LEARNING_TABLE)?;
547
548        let data = table
549            .get(session_id)?
550            .ok_or_else(|| RuvectorError::VectorNotFound(session_id.to_string()))?;
551
552        let (mut session, _): (LearningSession, usize) =
553            bincode::decode_from_slice(data.value(), bincode::config::standard())
554                .map_err(|e| RuvectorError::SerializationError(e.to_string()))?;
555
556        drop(table);
557        drop(read_txn);
558
559        // Add experience
560        session.experiences.push(Experience {
561            state,
562            action,
563            reward,
564            next_state,
565            done,
566            timestamp: chrono::Utc::now().timestamp(),
567        });
568        session.updated_at = chrono::Utc::now().timestamp();
569
570        // Update session
571        let write_txn = self.db.begin_write()?;
572        {
573            let mut table = write_txn.open_table(LEARNING_TABLE)?;
574            let data = bincode::encode_to_vec(&session, bincode::config::standard())
575                .map_err(|e| RuvectorError::SerializationError(e.to_string()))?;
576            table.insert(session_id, data.as_slice())?;
577        }
578        write_txn.commit()?;
579
580        Ok(())
581    }
582
583    /// Predict action with confidence interval
584    pub fn predict_with_confidence(&self, session_id: &str, state: Vec<f32>) -> Result<Prediction> {
585        let read_txn = self.db.begin_read()?;
586        let table = read_txn.open_table(LEARNING_TABLE)?;
587
588        let data = table
589            .get(session_id)?
590            .ok_or_else(|| RuvectorError::VectorNotFound(session_id.to_string()))?;
591
592        let (session, _): (LearningSession, usize) =
593            bincode::decode_from_slice(data.value(), bincode::config::standard())
594                .map_err(|e| RuvectorError::SerializationError(e.to_string()))?;
595
596        // Simple prediction based on similar states (would use actual RL model in production)
597        let mut similar_actions = Vec::new();
598        let mut rewards = Vec::new();
599
600        for exp in &session.experiences {
601            let distance = euclidean_distance(&state, &exp.state);
602            if distance < 1.0 {
603                // Similarity threshold
604                similar_actions.push(exp.action.clone());
605                rewards.push(exp.reward);
606            }
607        }
608
609        if similar_actions.is_empty() {
610            // Return random action if no similar states
611            return Ok(Prediction {
612                action: vec![0.0; session.action_dim],
613                confidence_lower: 0.0,
614                confidence_upper: 0.0,
615                mean_confidence: 0.0,
616            });
617        }
618
619        // Average actions weighted by rewards
620        let total_reward: f64 = rewards.iter().sum();
621        let mut action = vec![0.0; session.action_dim];
622
623        for (act, reward) in similar_actions.iter().zip(rewards.iter()) {
624            let weight = reward / total_reward;
625            for (i, val) in act.iter().enumerate() {
626                action[i] += val * weight as f32;
627            }
628        }
629
630        // Calculate confidence interval (simplified)
631        let mean_reward = total_reward / rewards.len() as f64;
632        let std_dev = calculate_std_dev(&rewards, mean_reward);
633
634        Ok(Prediction {
635            action,
636            confidence_lower: mean_reward - 1.96 * std_dev,
637            confidence_upper: mean_reward + 1.96 * std_dev,
638            mean_confidence: mean_reward,
639        })
640    }
641
642    /// Get learning session by ID
643    pub fn get_session(&self, session_id: &str) -> Result<Option<LearningSession>> {
644        let read_txn = self.db.begin_read()?;
645        let table = read_txn.open_table(LEARNING_TABLE)?;
646
647        if let Some(data) = table.get(session_id)? {
648            let (session, _): (LearningSession, usize) =
649                bincode::decode_from_slice(data.value(), bincode::config::standard())
650                    .map_err(|e| RuvectorError::SerializationError(e.to_string()))?;
651            Ok(Some(session))
652        } else {
653            Ok(None)
654        }
655    }
656
657    // ============ Helper Methods ============
658
659    /// Generate text embedding (placeholder - would use actual embedding model)
660    fn generate_text_embedding(&self, text: &str) -> Result<Vec<f32>> {
661        // Simple hash-based embedding for demonstration
662        // In production, use actual embedding models like sentence-transformers
663        let mut embedding = vec![0.0; self.dimensions];
664        let bytes = text.as_bytes();
665
666        for (i, byte) in bytes.iter().enumerate() {
667            embedding[i % self.dimensions] += (*byte as f32) / 255.0;
668        }
669
670        // Normalize
671        let norm: f32 = embedding.iter().map(|x| x * x).sum::<f32>().sqrt();
672        if norm > 0.0 {
673            for val in &mut embedding {
674                *val /= norm;
675            }
676        }
677
678        Ok(embedding)
679    }
680}
681
682// Helper functions
683fn euclidean_distance(a: &[f32], b: &[f32]) -> f32 {
684    a.iter()
685        .zip(b.iter())
686        .map(|(x, y)| (x - y).powi(2))
687        .sum::<f32>()
688        .sqrt()
689}
690
691fn calculate_std_dev(values: &[f64], mean: f64) -> f64 {
692    let variance = values.iter().map(|x| (x - mean).powi(2)).sum::<f64>() / values.len() as f64;
693    variance.sqrt()
694}
695
696#[cfg(test)]
697mod tests {
698    use super::*;
699    use tempfile::tempdir;
700
701    fn create_test_db() -> Result<AgenticDB> {
702        let dir = tempdir().unwrap();
703        let mut options = DbOptions::default();
704        options.storage_path = dir.path().join("test.db").to_string_lossy().to_string();
705        options.dimensions = 128;
706        AgenticDB::new(options)
707    }
708
709    #[test]
710    fn test_reflexion_episode() -> Result<()> {
711        let db = create_test_db()?;
712
713        let id = db.store_episode(
714            "Solve math problem".to_string(),
715            vec!["read problem".to_string(), "calculate".to_string()],
716            vec!["got 42".to_string()],
717            "Should have shown work".to_string(),
718        )?;
719
720        let episodes = db.retrieve_similar_episodes("math problem solving", 5)?;
721        assert!(!episodes.is_empty());
722        assert_eq!(episodes[0].id, id);
723
724        Ok(())
725    }
726
727    #[test]
728    fn test_skill_library() -> Result<()> {
729        let db = create_test_db()?;
730
731        let mut params = HashMap::new();
732        params.insert("input".to_string(), "string".to_string());
733
734        let skill_id = db.create_skill(
735            "Parse JSON".to_string(),
736            "Parse JSON from string".to_string(),
737            params,
738            vec!["json.parse()".to_string()],
739        )?;
740
741        let skills = db.search_skills("parse json data", 5)?;
742        assert!(!skills.is_empty());
743
744        Ok(())
745    }
746
747    #[test]
748    fn test_causal_edge() -> Result<()> {
749        let db = create_test_db()?;
750
751        let edge_id = db.add_causal_edge(
752            vec!["rain".to_string()],
753            vec!["wet ground".to_string()],
754            0.95,
755            "Weather observation".to_string(),
756        )?;
757
758        let results = db.query_with_utility("weather patterns", 5, 0.7, 0.2, 0.1)?;
759        assert!(!results.is_empty());
760
761        Ok(())
762    }
763
764    #[test]
765    fn test_learning_session() -> Result<()> {
766        let db = create_test_db()?;
767
768        let session_id = db.start_session("Q-Learning".to_string(), 4, 2)?;
769
770        db.add_experience(
771            &session_id,
772            vec![1.0, 0.0, 0.0, 0.0],
773            vec![1.0, 0.0],
774            1.0,
775            vec![0.0, 1.0, 0.0, 0.0],
776            false,
777        )?;
778
779        let prediction = db.predict_with_confidence(&session_id, vec![1.0, 0.0, 0.0, 0.0])?;
780        assert_eq!(prediction.action.len(), 2);
781
782        Ok(())
783    }
784
785    #[test]
786    fn test_auto_consolidate() -> Result<()> {
787        let db = create_test_db()?;
788
789        let sequences = vec![
790            vec![
791                "step1".to_string(),
792                "step2".to_string(),
793                "step3".to_string(),
794            ],
795            vec![
796                "action1".to_string(),
797                "action2".to_string(),
798                "action3".to_string(),
799            ],
800        ];
801
802        let skill_ids = db.auto_consolidate(sequences, 3)?;
803        assert_eq!(skill_ids.len(), 2);
804
805        Ok(())
806    }
807}