Skip to main content

shodh_memory/memory/
learning_history.rs

1//! Learning History Storage
2//!
3//! Persistent storage for significant learning events. This enables:
4//! - Recency-weighted retrieval (recently learned = more relevant)
5//! - Learning velocity tracking (rapidly reinforced = more relevant)
6//! - Session coherence (current session learning prioritized)
7//! - Audit trail (what did the system learn and when)
8//!
9//! Storage schema:
10//! - `learning:{user_id}:{timestamp_nanos:020}` - Primary event storage (time-ordered)
11//! - `learning_by_memory:{user_id}:{memory_id}:{timestamp}` - Index by memory
12//! - `learning_by_type:{user_id}:{event_type}:{timestamp}` - Index by event type
13//! - `learning_stats:{user_id}` - Aggregated learning statistics
14
15use anyhow::Result;
16use chrono::{DateTime, Duration, Utc};
17use rocksdb::{IteratorMode, DB};
18use serde::{Deserialize, Serialize};
19use std::collections::HashMap;
20use std::sync::Arc;
21
22use super::introspection::ConsolidationEvent;
23
24/// Stored learning event with full event fidelity
25///
26/// Stores the complete ConsolidationEvent (including its serde tag) using
27/// MessagePack (rmp-serde), which is a binary format that supports tagged enums.
28/// This preserves all event metadata for accurate retrieval and reporting.
29///
30/// Additional index fields enable efficient queries without deserializing
31/// the full event.
32#[derive(Debug, Clone, Serialize, Deserialize)]
33pub struct StoredLearningEvent {
34    /// The full consolidation event (stored with rmp-serde, supports tagged enums)
35    pub event: ConsolidationEvent,
36    /// Event type for indexing and queries (denormalized for fast filtering)
37    pub event_type: LearningEventType,
38    /// Primary memory ID involved (if any) - for memory index
39    pub memory_id: Option<String>,
40    /// Secondary memory ID (for edges/interference) - for memory index
41    pub related_memory_id: Option<String>,
42    /// Fact ID (if fact-related) - for fact index
43    pub fact_id: Option<String>,
44}
45
46/// Categorized learning event types for indexing and querying
47#[derive(
48    Debug,
49    Clone,
50    Copy,
51    PartialEq,
52    Eq,
53    Hash,
54    Serialize,
55    Deserialize,
56    bincode::Encode,
57    bincode::Decode,
58)]
59#[serde(rename_all = "snake_case")]
60pub enum LearningEventType {
61    /// Edge became permanent (LTP)
62    EdgePotentiated,
63    /// New semantic fact extracted
64    FactExtracted,
65    /// Fact was deleted (knowledge lost)
66    FactDeleted,
67    /// Fact was reinforced
68    FactReinforced,
69    /// Memory interference detected
70    InterferenceDetected,
71    /// Memory was replayed during consolidation
72    MemoryReplayed,
73    /// Memory promoted to higher tier
74    MemoryPromoted,
75    /// Replay cycle completed (summary)
76    ReplayCycleCompleted,
77    /// Maintenance cycle completed (summary)
78    MaintenanceCycleCompleted,
79}
80
81impl LearningEventType {
82    pub fn as_str(&self) -> &'static str {
83        match self {
84            Self::EdgePotentiated => "edge_potentiated",
85            Self::FactExtracted => "fact_extracted",
86            Self::FactDeleted => "fact_deleted",
87            Self::FactReinforced => "fact_reinforced",
88            Self::InterferenceDetected => "interference_detected",
89            Self::MemoryReplayed => "memory_replayed",
90            Self::MemoryPromoted => "memory_promoted",
91            Self::ReplayCycleCompleted => "replay_cycle_completed",
92            Self::MaintenanceCycleCompleted => "maintenance_cycle_completed",
93        }
94    }
95}
96
97/// Learning velocity for a memory or fact
98#[derive(Debug, Clone, Default, Serialize, Deserialize)]
99pub struct LearningVelocity {
100    /// Number of learning events in the window
101    pub event_count: usize,
102    /// Types of events that occurred
103    pub event_types: Vec<LearningEventType>,
104    /// Most recent event timestamp
105    pub last_event: Option<DateTime<Utc>>,
106    /// Velocity score (events per day, weighted by recency)
107    pub velocity_score: f32,
108}
109
110/// Aggregated learning statistics
111#[derive(Debug, Clone, Default, Serialize, Deserialize)]
112pub struct LearningStats {
113    pub total_events: usize,
114    pub events_by_type: HashMap<String, usize>,
115    pub events_last_24h: usize,
116    pub events_last_7d: usize,
117    pub most_active_memories: Vec<(String, usize)>,
118    pub potentiation_count: usize,
119    pub interference_count: usize,
120}
121
122/// Persistent storage for learning history
123pub struct LearningHistoryStore {
124    db: Arc<DB>,
125}
126
127impl LearningHistoryStore {
128    /// Create a new learning history store
129    pub fn new(db: Arc<DB>) -> Self {
130        Self { db }
131    }
132
133    /// Record a significant learning event
134    pub fn record(&self, user_id: &str, event: &ConsolidationEvent) -> Result<()> {
135        let (event_type, memory_id, related_memory_id, fact_id) = Self::classify_event(event);
136
137        let stored = StoredLearningEvent {
138            event: event.clone(),
139            event_type,
140            memory_id: memory_id.clone(),
141            related_memory_id: related_memory_id.clone(),
142            fact_id: fact_id.clone(),
143        };
144
145        let timestamp = event.timestamp();
146        let timestamp_nanos = timestamp.timestamp_nanos_opt().unwrap_or_else(|| {
147            tracing::warn!("learning event timestamp outside i64 nanos range, using 0");
148            0
149        });
150
151        // Primary storage (time-ordered)
152        // Use MessagePack (rmp-serde) - binary format that supports serde tagged enums
153        let key = format!("learning:{}:{:020}", user_id, timestamp_nanos);
154        let value = rmp_serde::to_vec(&stored)?;
155
156        // Batch all writes atomically — avoids orphaned index entries on crash
157        let mut batch = rocksdb::WriteBatch::default();
158        batch.put(key.as_bytes(), &value);
159
160        // Index by memory ID
161        if let Some(ref mem_id) = memory_id {
162            let mem_key = format!(
163                "learning_by_memory:{}:{}:{:020}",
164                user_id, mem_id, timestamp_nanos
165            );
166            batch.put(mem_key.as_bytes(), key.as_bytes());
167        }
168
169        // Index related memory too (for edges/interference)
170        if let Some(ref related_id) = related_memory_id {
171            let related_key = format!(
172                "learning_by_memory:{}:{}:{:020}",
173                user_id, related_id, timestamp_nanos
174            );
175            batch.put(related_key.as_bytes(), key.as_bytes());
176        }
177
178        // Index by fact ID
179        if let Some(ref f_id) = fact_id {
180            let fact_key = format!(
181                "learning_by_fact:{}:{}:{:020}",
182                user_id, f_id, timestamp_nanos
183            );
184            batch.put(fact_key.as_bytes(), key.as_bytes());
185        }
186
187        // Index by event type
188        let type_key = format!(
189            "learning_by_type:{}:{}:{:020}",
190            user_id,
191            event_type.as_str(),
192            timestamp_nanos
193        );
194        batch.put(type_key.as_bytes(), key.as_bytes());
195
196        self.db.write(batch)?;
197
198        Ok(())
199    }
200
201    /// Classify a consolidation event for indexing
202    fn classify_event(
203        event: &ConsolidationEvent,
204    ) -> (
205        LearningEventType,
206        Option<String>,
207        Option<String>,
208        Option<String>,
209    ) {
210        match event {
211            ConsolidationEvent::EdgePotentiated {
212                from_memory_id,
213                to_memory_id,
214                ..
215            } => (
216                LearningEventType::EdgePotentiated,
217                Some(from_memory_id.clone()),
218                Some(to_memory_id.clone()),
219                None,
220            ),
221            ConsolidationEvent::FactExtracted { fact_id, .. } => (
222                LearningEventType::FactExtracted,
223                None,
224                None,
225                Some(fact_id.clone()),
226            ),
227            ConsolidationEvent::FactDeleted { fact_id, .. } => (
228                LearningEventType::FactDeleted,
229                None,
230                None,
231                Some(fact_id.clone()),
232            ),
233            ConsolidationEvent::FactReinforced { fact_id, .. } => (
234                LearningEventType::FactReinforced,
235                None,
236                None,
237                Some(fact_id.clone()),
238            ),
239            ConsolidationEvent::InterferenceDetected {
240                new_memory_id,
241                old_memory_id,
242                ..
243            } => (
244                LearningEventType::InterferenceDetected,
245                Some(new_memory_id.clone()),
246                Some(old_memory_id.clone()),
247                None,
248            ),
249            ConsolidationEvent::MemoryReplayed { memory_id, .. } => (
250                LearningEventType::MemoryReplayed,
251                Some(memory_id.clone()),
252                None,
253                None,
254            ),
255            ConsolidationEvent::MemoryPromoted { memory_id, .. } => (
256                LearningEventType::MemoryPromoted,
257                Some(memory_id.clone()),
258                None,
259                None,
260            ),
261            ConsolidationEvent::ReplayCycleCompleted { .. } => {
262                (LearningEventType::ReplayCycleCompleted, None, None, None)
263            }
264            ConsolidationEvent::MaintenanceCycleCompleted { .. } => (
265                LearningEventType::MaintenanceCycleCompleted,
266                None,
267                None,
268                None,
269            ),
270            // Remaining variants are not significant learning events.
271            // Matched exhaustively so new variants cause a compile error.
272            ConsolidationEvent::MemoryStrengthened { memory_id, .. } => (
273                LearningEventType::MaintenanceCycleCompleted,
274                Some(memory_id.clone()),
275                None,
276                None,
277            ),
278            ConsolidationEvent::MemoryDecayed { memory_id, .. }
279            | ConsolidationEvent::MemoryWeakened { memory_id, .. }
280            | ConsolidationEvent::SalienceSpikeDetected { memory_id, .. } => (
281                LearningEventType::MaintenanceCycleCompleted,
282                Some(memory_id.clone()),
283                None,
284                None,
285            ),
286            ConsolidationEvent::EdgeFormed {
287                from_memory_id,
288                to_memory_id,
289                ..
290            }
291            | ConsolidationEvent::EdgeStrengthened {
292                from_memory_id,
293                to_memory_id,
294                ..
295            }
296            | ConsolidationEvent::EdgePruned {
297                from_memory_id,
298                to_memory_id,
299                ..
300            } => (
301                LearningEventType::MaintenanceCycleCompleted,
302                Some(from_memory_id.clone()),
303                Some(to_memory_id.clone()),
304                None,
305            ),
306            ConsolidationEvent::FactDecayed { fact_id, .. } => (
307                LearningEventType::MaintenanceCycleCompleted,
308                None,
309                None,
310                Some(fact_id.clone()),
311            ),
312            ConsolidationEvent::RetrievalCompetition { .. }
313            | ConsolidationEvent::PatternTriggeredReplay { .. }
314            | ConsolidationEvent::EntityPatternDetected { .. }
315            | ConsolidationEvent::SemanticClusterFormed { .. }
316            | ConsolidationEvent::TemporalClusterFormed { .. }
317            | ConsolidationEvent::BehavioralChangeDetected { .. }
318            | ConsolidationEvent::PatternDetected { .. } => (
319                LearningEventType::MaintenanceCycleCompleted,
320                None,
321                None,
322                None,
323            ),
324            // Memory-Edge Tier Coupling events
325            ConsolidationEvent::EdgePromotionBoostApplied { memory_id, .. } => (
326                LearningEventType::MaintenanceCycleCompleted,
327                Some(memory_id.clone()),
328                None,
329                None,
330            ),
331            ConsolidationEvent::GraphOrphanDetected { memory_id, .. } => (
332                LearningEventType::MaintenanceCycleCompleted,
333                Some(memory_id.clone()),
334                None,
335                None,
336            ),
337            ConsolidationEvent::GraphAdjustedPromotion { memory_id, .. } => (
338                LearningEventType::MaintenanceCycleCompleted,
339                Some(memory_id.clone()),
340                None,
341                None,
342            ),
343            ConsolidationEvent::GraphDecayConsolidated { .. } => (
344                LearningEventType::MaintenanceCycleCompleted,
345                None,
346                None,
347                None,
348            ),
349        }
350    }
351
352    /// Query learning events since a timestamp
353    pub fn events_since(
354        &self,
355        user_id: &str,
356        since: DateTime<Utc>,
357    ) -> Result<Vec<StoredLearningEvent>> {
358        let since_nanos = since.timestamp_nanos_opt().unwrap_or_else(|| {
359            tracing::warn!("events_since timestamp outside i64 nanos range, using 0");
360            0
361        });
362        let prefix = format!("learning:{}:", user_id);
363        let start_key = format!("learning:{}:{:020}", user_id, since_nanos);
364
365        let mut events = Vec::new();
366        let iter = self.db.iterator(IteratorMode::From(
367            start_key.as_bytes(),
368            rocksdb::Direction::Forward,
369        ));
370
371        for item in iter {
372            let (key, value) = item?;
373            let key_str = String::from_utf8_lossy(&key);
374
375            // Stop if we've moved past this user's events
376            if !key_str.starts_with(&prefix) {
377                break;
378            }
379
380            if let Ok(event) = rmp_serde::from_slice::<StoredLearningEvent>(&value) {
381                events.push(event);
382            }
383        }
384
385        Ok(events)
386    }
387
388    /// Query learning events in a time range
389    pub fn events_in_range(
390        &self,
391        user_id: &str,
392        since: DateTime<Utc>,
393        until: DateTime<Utc>,
394    ) -> Result<Vec<StoredLearningEvent>> {
395        let since_nanos = since.timestamp_nanos_opt().unwrap_or_else(|| {
396            tracing::warn!("events_in_range since timestamp outside i64 nanos range, using 0");
397            0
398        });
399        let until_nanos = until.timestamp_nanos_opt().unwrap_or_else(|| {
400            tracing::warn!(
401                "events_in_range until timestamp outside i64 nanos range, using i64::MAX"
402            );
403            i64::MAX
404        });
405        let prefix = format!("learning:{}:", user_id);
406        let start_key = format!("learning:{}:{:020}", user_id, since_nanos);
407        let end_key = format!("learning:{}:{:020}", user_id, until_nanos);
408
409        let mut events = Vec::new();
410        let iter = self.db.iterator(IteratorMode::From(
411            start_key.as_bytes(),
412            rocksdb::Direction::Forward,
413        ));
414
415        for item in iter {
416            let (key, value) = item?;
417            let key_str = String::from_utf8_lossy(&key);
418
419            if !key_str.starts_with(&prefix) {
420                break;
421            }
422            // Stop once we pass the upper bound key
423            if key_str.as_ref() > end_key.as_str() {
424                break;
425            }
426
427            if let Ok(event) = rmp_serde::from_slice::<StoredLearningEvent>(&value) {
428                events.push(event);
429            }
430        }
431
432        Ok(events)
433    }
434
435    /// Get learning events for a specific memory
436    pub fn events_for_memory(
437        &self,
438        user_id: &str,
439        memory_id: &str,
440        limit: usize,
441    ) -> Result<Vec<StoredLearningEvent>> {
442        let prefix = format!("learning_by_memory:{}:{}:", user_id, memory_id);
443        let mut events = Vec::new();
444
445        let iter = self.db.prefix_iterator(prefix.as_bytes());
446        for item in iter.take(limit) {
447            let (key, value) = item?;
448            let key_str = String::from_utf8_lossy(&key);
449
450            if !key_str.starts_with(&prefix) {
451                break;
452            }
453
454            // Value is the primary key - fetch the actual event
455            let primary_key = String::from_utf8_lossy(&value);
456            if let Some(event_data) = self.db.get(primary_key.as_bytes())? {
457                if let Ok(event) = rmp_serde::from_slice::<StoredLearningEvent>(&event_data) {
458                    events.push(event);
459                }
460            }
461        }
462
463        Ok(events)
464    }
465
466    /// Get learning events for a specific fact
467    pub fn events_for_fact(
468        &self,
469        user_id: &str,
470        fact_id: &str,
471        limit: usize,
472    ) -> Result<Vec<StoredLearningEvent>> {
473        let prefix = format!("learning_by_fact:{}:{}:", user_id, fact_id);
474        let mut events = Vec::new();
475
476        let iter = self.db.prefix_iterator(prefix.as_bytes());
477        for item in iter.take(limit) {
478            let (key, value) = item?;
479            let key_str = String::from_utf8_lossy(&key);
480
481            if !key_str.starts_with(&prefix) {
482                break;
483            }
484
485            let primary_key = String::from_utf8_lossy(&value);
486            if let Some(event_data) = self.db.get(primary_key.as_bytes())? {
487                if let Ok(event) = rmp_serde::from_slice::<StoredLearningEvent>(&event_data) {
488                    events.push(event);
489                }
490            }
491        }
492
493        Ok(events)
494    }
495
496    /// Calculate learning velocity for a memory
497    ///
498    /// Returns a velocity score based on recent learning activity.
499    /// Higher velocity = more actively being learned = more relevant.
500    pub fn memory_learning_velocity(
501        &self,
502        user_id: &str,
503        memory_id: &str,
504        window_hours: i64,
505    ) -> Result<LearningVelocity> {
506        let since = Utc::now() - Duration::hours(window_hours);
507        let events = self.events_for_memory(user_id, memory_id, 100)?;
508
509        let recent_events: Vec<_> = events
510            .into_iter()
511            .filter(|e| e.event.timestamp() >= since)
512            .collect();
513
514        if recent_events.is_empty() {
515            return Ok(LearningVelocity::default());
516        }
517
518        let event_count = recent_events.len();
519        let event_types: Vec<_> = recent_events.iter().map(|e| e.event_type).collect();
520        let last_event = recent_events.iter().map(|e| e.event.timestamp()).max();
521
522        // Calculate velocity score with recency weighting
523        let now = Utc::now();
524        let mut weighted_sum = 0.0;
525        for event in &recent_events {
526            let age_hours = (now - event.event.timestamp()).num_hours() as f32;
527            // Exponential decay: recent events weighted more heavily
528            let weight = (-age_hours / window_hours as f32).exp();
529            // Potentiation and interference events are more significant
530            let type_weight = match event.event_type {
531                LearningEventType::EdgePotentiated => 2.0,
532                LearningEventType::InterferenceDetected => 1.5,
533                LearningEventType::FactExtracted => 1.5,
534                _ => 1.0,
535            };
536            weighted_sum += weight * type_weight;
537        }
538
539        // Normalize to events per day
540        let velocity_score = weighted_sum * (24.0 / window_hours as f32);
541
542        Ok(LearningVelocity {
543            event_count,
544            event_types,
545            last_event,
546            velocity_score,
547        })
548    }
549
550    /// Calculate learning velocity for a fact
551    pub fn fact_learning_velocity(
552        &self,
553        user_id: &str,
554        fact_id: &str,
555        window_days: i64,
556    ) -> Result<LearningVelocity> {
557        let since = Utc::now() - Duration::days(window_days);
558        let events = self.events_for_fact(user_id, fact_id, 100)?;
559
560        let recent_events: Vec<_> = events
561            .into_iter()
562            .filter(|e| e.event.timestamp() >= since)
563            .collect();
564
565        if recent_events.is_empty() {
566            return Ok(LearningVelocity::default());
567        }
568
569        let event_count = recent_events.len();
570        let event_types: Vec<_> = recent_events.iter().map(|e| e.event_type).collect();
571        let last_event = recent_events.iter().map(|e| e.event.timestamp()).max();
572
573        // Reinforcement events are the key signal for facts
574        let reinforcement_count = recent_events
575            .iter()
576            .filter(|e| e.event_type == LearningEventType::FactReinforced)
577            .count();
578
579        // Velocity = reinforcements per day
580        let velocity_score = reinforcement_count as f32 / window_days as f32;
581
582        Ok(LearningVelocity {
583            event_count,
584            event_types,
585            last_event,
586            velocity_score,
587        })
588    }
589
590    /// Get aggregated learning statistics
591    pub fn stats(&self, user_id: &str) -> Result<LearningStats> {
592        let now = Utc::now();
593        let day_ago = now - Duration::hours(24);
594        let week_ago = now - Duration::days(7);
595
596        // Get all events (could be optimized with counters)
597        let all_events = self.events_since(user_id, now - Duration::days(365))?;
598
599        let mut stats = LearningStats::default();
600        stats.total_events = all_events.len();
601
602        let mut memory_event_counts: HashMap<String, usize> = HashMap::new();
603
604        for event in &all_events {
605            // Count by type
606            *stats
607                .events_by_type
608                .entry(event.event_type.as_str().to_string())
609                .or_insert(0) += 1;
610
611            // Count recent
612            let ts = event.event.timestamp();
613            if ts >= day_ago {
614                stats.events_last_24h += 1;
615            }
616            if ts >= week_ago {
617                stats.events_last_7d += 1;
618            }
619
620            // Track memory activity
621            if let Some(ref mem_id) = event.memory_id {
622                *memory_event_counts.entry(mem_id.clone()).or_insert(0) += 1;
623            }
624
625            // Count specific types
626            match event.event_type {
627                LearningEventType::EdgePotentiated => stats.potentiation_count += 1,
628                LearningEventType::InterferenceDetected => stats.interference_count += 1,
629                _ => {}
630            }
631        }
632
633        // Find most active memories
634        let mut memory_counts: Vec<_> = memory_event_counts.into_iter().collect();
635        memory_counts.sort_by(|a, b| b.1.cmp(&a.1));
636        stats.most_active_memories = memory_counts.into_iter().take(10).collect();
637
638        Ok(stats)
639    }
640
641    /// Check if a memory has recent learning activity
642    ///
643    /// Used for retrieval boosting - recently learned memories are more relevant
644    pub fn has_recent_learning(&self, user_id: &str, memory_id: &str, hours: i64) -> Result<bool> {
645        let velocity = self.memory_learning_velocity(user_id, memory_id, hours)?;
646        Ok(velocity.event_count > 0)
647    }
648
649    /// Get the recency boost factor for a memory
650    ///
651    /// Returns a multiplier (1.0 = no boost, up to ~1.3 for very active learning)
652    pub fn recency_boost(&self, user_id: &str, memory_id: &str) -> Result<f32> {
653        // Check 24-hour window
654        let velocity = self.memory_learning_velocity(user_id, memory_id, 24)?;
655
656        // Base boost from having any recent learning
657        let mut boost = 1.0;
658
659        if velocity.event_count > 0 {
660            // Base boost for any recent activity
661            boost += 0.05;
662
663            // Additional boost based on velocity (capped)
664            boost += (velocity.velocity_score * 0.05).min(0.15);
665
666            // Extra boost for potentiation (permanent learning)
667            if velocity
668                .event_types
669                .contains(&LearningEventType::EdgePotentiated)
670            {
671                boost += 0.1;
672            }
673        }
674
675        Ok(boost.min(1.3)) // Cap at 30% boost
676    }
677
678    /// Get recency boost for a fact
679    pub fn fact_recency_boost(&self, user_id: &str, fact_id: &str) -> Result<f32> {
680        let velocity = self.fact_learning_velocity(user_id, fact_id, 7)?;
681
682        let mut boost = 1.0;
683
684        if velocity.event_count > 0 {
685            boost += 0.05;
686            // Reinforcement velocity is key for facts
687            boost += (velocity.velocity_score * 0.1).min(0.2);
688        }
689
690        Ok(boost.min(1.25))
691    }
692
693    /// Prune old learning history (keep last N days)
694    pub fn prune_old_events(&self, user_id: &str, keep_days: i64) -> Result<usize> {
695        let cutoff = Utc::now() - Duration::days(keep_days);
696        let cutoff_nanos = cutoff.timestamp_nanos_opt().unwrap_or_else(|| {
697            tracing::warn!("prune cutoff timestamp outside i64 nanos range, using 0");
698            0
699        });
700
701        let prefix = format!("learning:{}:", user_id);
702        let mut batch = rocksdb::WriteBatch::default();
703        let mut deleted = 0;
704
705        let iter = self.db.prefix_iterator(prefix.as_bytes());
706        for item in iter {
707            let (key, value) = item?;
708            let key_str = String::from_utf8_lossy(&key);
709
710            if !key_str.starts_with(&prefix) {
711                break;
712            }
713
714            // Extract timestamp from key
715            if let Some(ts_str) = key_str.strip_prefix(&prefix) {
716                if let Ok(ts) = ts_str.parse::<i64>() {
717                    if ts < cutoff_nanos {
718                        // Delete primary key
719                        batch.delete(&key);
720
721                        // Also delete secondary index entries for this event
722                        if let Ok(stored) = rmp_serde::from_slice::<StoredLearningEvent>(&value) {
723                            let ts_fmt = format!("{:020}", ts);
724                            if let Some(ref mem_id) = stored.memory_id {
725                                batch.delete(
726                                    format!("learning_by_memory:{}:{}:{}", user_id, mem_id, ts_fmt)
727                                        .as_bytes(),
728                                );
729                            }
730                            if let Some(ref related_id) = stored.related_memory_id {
731                                batch.delete(
732                                    format!(
733                                        "learning_by_memory:{}:{}:{}",
734                                        user_id, related_id, ts_fmt
735                                    )
736                                    .as_bytes(),
737                                );
738                            }
739                            if let Some(ref f_id) = stored.fact_id {
740                                batch.delete(
741                                    format!("learning_by_fact:{}:{}:{}", user_id, f_id, ts_fmt)
742                                        .as_bytes(),
743                                );
744                            }
745                            batch.delete(
746                                format!(
747                                    "learning_by_type:{}:{}:{}",
748                                    user_id,
749                                    stored.event_type.as_str(),
750                                    ts_fmt
751                                )
752                                .as_bytes(),
753                            );
754                        }
755
756                        deleted += 1;
757                    }
758                }
759            }
760        }
761
762        if deleted > 0 {
763            self.db.write(batch)?;
764        }
765
766        Ok(deleted)
767    }
768}
769
770#[cfg(test)]
771mod tests {
772    use super::*;
773    use tempfile::TempDir;
774
775    fn create_test_store() -> (LearningHistoryStore, TempDir) {
776        let temp_dir = TempDir::new().unwrap();
777        let db = Arc::new(DB::open_default(temp_dir.path()).unwrap());
778        (LearningHistoryStore::new(db), temp_dir)
779    }
780
781    #[test]
782    fn test_record_and_query() {
783        let (store, _dir) = create_test_store();
784
785        let event = ConsolidationEvent::EdgePotentiated {
786            from_memory_id: "mem-1".to_string(),
787            to_memory_id: "mem-2".to_string(),
788            final_strength: 0.95,
789            total_co_activations: 15,
790            timestamp: Utc::now(),
791        };
792
793        store.record("user-1", &event).unwrap();
794
795        let events = store
796            .events_since("user-1", Utc::now() - Duration::hours(1))
797            .unwrap();
798        assert_eq!(events.len(), 1);
799        assert_eq!(events[0].event_type, LearningEventType::EdgePotentiated);
800    }
801
802    #[test]
803    fn test_memory_velocity() {
804        let (store, _dir) = create_test_store();
805
806        // Record several events for a memory
807        for i in 0..5 {
808            let event = ConsolidationEvent::MemoryReplayed {
809                memory_id: "mem-1".to_string(),
810                content_preview: "test".to_string(),
811                activation_before: 0.5,
812                activation_after: 0.55,
813                replay_priority: 0.8,
814                connected_memories_replayed: 2,
815                timestamp: Utc::now() - Duration::hours(i),
816            };
817            store.record("user-1", &event).unwrap();
818        }
819
820        let velocity = store
821            .memory_learning_velocity("user-1", "mem-1", 24)
822            .unwrap();
823        assert_eq!(velocity.event_count, 5);
824        assert!(velocity.velocity_score > 0.0);
825    }
826
827    #[test]
828    fn test_recency_boost() {
829        let (store, _dir) = create_test_store();
830
831        // No events = no boost
832        let boost = store.recency_boost("user-1", "mem-1").unwrap();
833        assert_eq!(boost, 1.0);
834
835        // Add an event
836        let event = ConsolidationEvent::MemoryReplayed {
837            memory_id: "mem-1".to_string(),
838            content_preview: "test".to_string(),
839            activation_before: 0.5,
840            activation_after: 0.55,
841            replay_priority: 0.8,
842            connected_memories_replayed: 2,
843            timestamp: Utc::now(),
844        };
845        store.record("user-1", &event).unwrap();
846
847        let boost = store.recency_boost("user-1", "mem-1").unwrap();
848        assert!(boost > 1.0);
849    }
850}