Skip to main content

conch_core/
lib.rs

1pub mod memory;
2pub mod store;
3pub mod embed;
4pub mod decay;
5pub mod recall;
6pub mod consolidate;
7pub mod importance;
8
9pub use memory::{Episode, ExportData, Fact, GraphNode, MemoryKind, MemoryRecord, MemoryStats, ProvenanceInfo, RememberResult, AuditEntry, VerifyResult, CorruptedMemory};
10pub use store::MemoryStore;
11pub use embed::{Embedder, EmbedError, FastEmbedder, SharedEmbedder, cosine_similarity};
12pub use decay::{run_decay, DecayResult};
13pub use recall::{recall, recall_with_tag_filter, RecallResult, RecallError};
14pub use consolidate::{consolidate, find_clusters, ConsolidateResult, ConsolidateCluster};
15pub use importance::{compute_importance, score_all as score_importance, list_importance, ImportanceInfo};
16
17use chrono::Duration;
18
19/// High-level API wrapping storage + embeddings.
20pub struct ConchDB {
21    store: MemoryStore,
22    embedder: Box<dyn Embedder>,
23    namespace: String,
24}
25
26#[derive(Debug, thiserror::Error)]
27pub enum ConchError {
28    #[error("database error: {0}")]
29    Db(#[from] rusqlite::Error),
30    #[error("embedding error: {0}")]
31    Embed(#[from] EmbedError),
32    #[error("invalid argument: {0}")]
33    InvalidArgument(String),
34}
35
36impl ConchDB {
37    pub fn open(path: &str) -> Result<Self, ConchError> {
38        Self::open_with_namespace(path, "default")
39    }
40
41    pub fn open_with_namespace(path: &str, namespace: &str) -> Result<Self, ConchError> {
42        let store = MemoryStore::open(path)?;
43        let embedder = embed::FastEmbedder::new()?;
44        Ok(Self { store, embedder: Box::new(embedder), namespace: namespace.to_string() })
45    }
46
47    pub fn open_in_memory_with(embedder: Box<dyn Embedder>) -> Result<Self, ConchError> {
48        Self::open_in_memory_with_namespace(embedder, "default")
49    }
50
51    pub fn open_in_memory_with_namespace(embedder: Box<dyn Embedder>, namespace: &str) -> Result<Self, ConchError> {
52        let store = MemoryStore::open_in_memory()?;
53        Ok(Self { store, embedder, namespace: namespace.to_string() })
54    }
55
56    pub fn namespace(&self) -> &str {
57        &self.namespace
58    }
59
60    pub fn store(&self) -> &MemoryStore {
61        &self.store
62    }
63
64    pub fn remember_fact(&self, subject: &str, relation: &str, object: &str) -> Result<MemoryRecord, ConchError> {
65        self.remember_fact_with_tags(subject, relation, object, &[])
66    }
67
68    pub fn remember_fact_with_tags(&self, subject: &str, relation: &str, object: &str, tags: &[String]) -> Result<MemoryRecord, ConchError> {
69        self.remember_fact_full(subject, relation, object, tags, None, None, None)
70    }
71
72    pub fn remember_fact_full(
73        &self, subject: &str, relation: &str, object: &str, tags: &[String],
74        source: Option<&str>, session_id: Option<&str>, channel: Option<&str>,
75    ) -> Result<MemoryRecord, ConchError> {
76        let text = format!("{subject} {relation} {object}");
77        let embedding = self.embedder.embed_one(&text)?;
78        let id = self.store.remember_fact_ns(subject, relation, object, Some(&embedding), tags, source, session_id, channel, &self.namespace)?;
79        Ok(self.store.get_memory(id)?.expect("just inserted"))
80    }
81
82    /// Upsert a fact: if a fact with the same subject+relation exists, update
83    /// its object. Otherwise insert a new fact.
84    /// Returns `(record, was_updated)`.
85    pub fn upsert_fact(&self, subject: &str, relation: &str, object: &str) -> Result<(MemoryRecord, bool), ConchError> {
86        self.upsert_fact_with_tags(subject, relation, object, &[])
87    }
88
89    pub fn upsert_fact_with_tags(&self, subject: &str, relation: &str, object: &str, tags: &[String]) -> Result<(MemoryRecord, bool), ConchError> {
90        let text = format!("{subject} {relation} {object}");
91        let embedding = self.embedder.embed_one(&text)?;
92        let (id, was_updated) = self.store.upsert_fact_ns(subject, relation, object, Some(&embedding), tags, None, None, None, &self.namespace)?;
93        Ok((self.store.get_memory(id)?.expect("just upserted"), was_updated))
94    }
95
96    pub fn remember_episode(&self, text: &str) -> Result<MemoryRecord, ConchError> {
97        self.remember_episode_with_tags(text, &[])
98    }
99
100    pub fn remember_episode_with_tags(&self, text: &str, tags: &[String]) -> Result<MemoryRecord, ConchError> {
101        self.remember_episode_full(text, tags, None, None, None)
102    }
103
104    pub fn remember_episode_full(
105        &self, text: &str, tags: &[String],
106        source: Option<&str>, session_id: Option<&str>, channel: Option<&str>,
107    ) -> Result<MemoryRecord, ConchError> {
108        let embedding = self.embedder.embed_one(text)?;
109        let id = self.store.remember_episode_ns(text, Some(&embedding), tags, source, session_id, channel, &self.namespace)?;
110        Ok(self.store.get_memory(id)?.expect("just inserted"))
111    }
112
113    // ── Dedup-aware remember ──────────────────────────────────
114
115    /// Cosine similarity threshold for dedup. Memories with similarity > this
116    /// value are considered duplicates and merged instead of inserted.
117    const DEDUP_SIMILARITY_THRESHOLD: f32 = 0.95;
118
119    /// Strength boost applied when reinforcing a duplicate memory.
120    const DEDUP_REINFORCE_BOOST: f64 = 0.10;
121
122    /// Check if a new embedding is a duplicate of any existing memory.
123    /// Returns the (id, similarity) of the best match above threshold, if any.
124    fn find_duplicate(&self, embedding: &[f32]) -> Result<Option<(i64, f32)>, ConchError> {
125        self.find_duplicate_excluding(embedding, -1)
126    }
127
128    fn find_duplicate_excluding(&self, embedding: &[f32], exclude_id: i64) -> Result<Option<(i64, f32)>, ConchError> {
129        let all = self.store.all_embeddings_ns(&self.namespace)?;
130        let mut best: Option<(i64, f32)> = None;
131        for (id, existing_emb) in &all {
132            if *id == exclude_id {
133                continue;
134            }
135            let sim = cosine_similarity(embedding, existing_emb);
136            if sim > Self::DEDUP_SIMILARITY_THRESHOLD {
137                if best.is_none() || sim > best.unwrap().1 {
138                    best = Some((*id, sim));
139                }
140            }
141        }
142        Ok(best)
143    }
144
145    /// Store a fact with dedup check. If a near-duplicate exists (cosine sim > 0.95),
146    /// the existing memory is reinforced instead of creating a new one.
147    pub fn remember_fact_dedup(&self, subject: &str, relation: &str, object: &str) -> Result<RememberResult, ConchError> {
148        self.remember_fact_dedup_with_tags(subject, relation, object, &[])
149    }
150
151    /// Store a fact with dedup check and tags.
152    pub fn remember_fact_dedup_with_tags(&self, subject: &str, relation: &str, object: &str, tags: &[String]) -> Result<RememberResult, ConchError> {
153        self.remember_fact_dedup_full(subject, relation, object, tags, None, None, None)
154    }
155
156    /// Store a fact with upsert + dedup check, tags, and source tracking.
157    ///
158    /// Pipeline:
159    /// 1. If a fact with the same subject+relation exists, update its object (upsert).
160    /// 2. Otherwise, check for near-duplicate embeddings (cosine sim > 0.95).
161    /// 3. If neither, create a new fact.
162    pub fn remember_fact_dedup_full(
163        &self, subject: &str, relation: &str, object: &str, tags: &[String],
164        source: Option<&str>, session_id: Option<&str>, channel: Option<&str>,
165    ) -> Result<RememberResult, ConchError> {
166        let text = format!("{subject} {relation} {object}");
167        let embedding = self.embedder.embed_one(&text)?;
168
169        // Step 1: Upsert — check for existing fact with same subject+relation
170        let (id, was_updated) = self.store.upsert_fact_ns(
171            subject, relation, object, Some(&embedding), tags, source, session_id, channel, &self.namespace,
172        )?;
173        if was_updated {
174            let record = self.store.get_memory(id)?.expect("just upserted");
175            return Ok(RememberResult::Updated(record));
176        }
177        // upsert_fact inserted a new row — but we should still check for dedup
178        // against other memories. If we find a near-duplicate, delete the just-inserted
179        // row and reinforce the duplicate instead.
180        if let Some((existing_id, similarity)) = self.find_duplicate_excluding(&embedding, id)? {
181            // Remove the just-inserted row and reinforce the duplicate
182            self.store.forget_by_id(&id.to_string())?;
183            self.store.reinforce_memory(existing_id, Self::DEDUP_REINFORCE_BOOST)?;
184            let existing = self.store.get_memory(existing_id)?.expect("just reinforced");
185            return Ok(RememberResult::Duplicate { existing, similarity });
186        }
187
188        let record = self.store.get_memory(id)?.expect("just inserted");
189        Ok(RememberResult::Created(record))
190    }
191
192    /// Store an episode with dedup check. If a near-duplicate exists (cosine sim > 0.95),
193    /// the existing memory is reinforced instead of creating a new one.
194    pub fn remember_episode_dedup(&self, text: &str) -> Result<RememberResult, ConchError> {
195        self.remember_episode_dedup_with_tags(text, &[])
196    }
197
198    /// Store an episode with dedup check and tags.
199    pub fn remember_episode_dedup_with_tags(&self, text: &str, tags: &[String]) -> Result<RememberResult, ConchError> {
200        self.remember_episode_dedup_full(text, tags, None, None, None)
201    }
202
203    /// Store an episode with dedup check, tags, and source tracking.
204    pub fn remember_episode_dedup_full(
205        &self, text: &str, tags: &[String],
206        source: Option<&str>, session_id: Option<&str>, channel: Option<&str>,
207    ) -> Result<RememberResult, ConchError> {
208        let embedding = self.embedder.embed_one(text)?;
209
210        if let Some((existing_id, similarity)) = self.find_duplicate(&embedding)? {
211            self.store.reinforce_memory(existing_id, Self::DEDUP_REINFORCE_BOOST)?;
212            let existing = self.store.get_memory(existing_id)?.expect("just reinforced");
213            return Ok(RememberResult::Duplicate { existing, similarity });
214        }
215
216        let id = self.store.remember_episode_ns(text, Some(&embedding), tags, source, session_id, channel, &self.namespace)?;
217        let record = self.store.get_memory(id)?.expect("just inserted");
218        Ok(RememberResult::Created(record))
219    }
220
221    pub fn recall(&self, query: &str, limit: usize) -> Result<Vec<RecallResult>, ConchError> {
222        self.recall_with_tag(query, limit, None)
223    }
224
225    pub fn recall_with_tag(&self, query: &str, limit: usize, tag: Option<&str>) -> Result<Vec<RecallResult>, ConchError> {
226        recall::recall_with_tag_filter_ns(&self.store, query, self.embedder.as_ref(), limit, tag, &self.namespace)
227            .map_err(|e| match e {
228                RecallError::Db(e) => ConchError::Db(e),
229                RecallError::Embedding(msg) => ConchError::Embed(EmbedError::Other(msg)),
230            })
231    }
232
233    pub fn forget_by_subject(&self, subject: &str) -> Result<usize, ConchError> {
234        Ok(self.store.forget_by_subject_ns(subject, &self.namespace)?)
235    }
236
237    pub fn forget_by_id(&self, id: &str) -> Result<usize, ConchError> {
238        Ok(self.store.forget_by_id(id)?)
239    }
240
241    pub fn forget_older_than(&self, secs: i64) -> Result<usize, ConchError> {
242        if secs <= 0 {
243            return Err(ConchError::InvalidArgument(format!(
244                "older_than duration must be positive, got {secs}s"
245            )));
246        }
247        Ok(self.store.forget_older_than_ns(Duration::seconds(secs), &self.namespace)?)
248    }
249
250    pub fn decay(&self) -> Result<DecayResult, ConchError> {
251        Ok(decay::run_decay_ns(&self.store, None, None, &self.namespace)?)
252    }
253
254    pub fn stats(&self) -> Result<MemoryStats, ConchError> {
255        Ok(self.store.stats_ns(&self.namespace)?)
256    }
257
258    pub fn embed_all(&self) -> Result<usize, ConchError> {
259        let missing = self.store.memories_missing_embeddings()?;
260        if missing.is_empty() {
261            return Ok(0);
262        }
263        let texts: Vec<String> = missing.iter().map(|m| m.text_for_embedding()).collect();
264        let text_refs: Vec<&str> = texts.iter().map(|s| s.as_str()).collect();
265        let embeddings = self.embedder.embed(&text_refs)?;
266        for (mem, emb) in missing.iter().zip(embeddings.iter()) {
267            self.store.update_embedding(mem.id, emb)?;
268        }
269        Ok(missing.len())
270    }
271
272    // ── Graph traversal ──────────────────────────────────────
273
274    /// Find all facts related to a subject via graph traversal up to `max_depth` hops.
275    /// Returns a list of GraphNodes with hop distance.
276    pub fn related(&self, subject: &str, max_depth: usize) -> Result<Vec<GraphNode>, ConchError> {
277        let max_depth = max_depth.min(3);
278        let mut result: Vec<GraphNode> = Vec::new();
279        let mut seen_ids = std::collections::HashSet::new();
280        // Entities to explore at each depth level
281        let mut frontier = vec![subject.to_string()];
282
283        for depth in 0..max_depth {
284            let mut next_frontier = Vec::new();
285            for entity in &frontier {
286                let facts = self.store.facts_involving(entity)?;
287                for fact in facts {
288                    if seen_ids.contains(&fact.id) {
289                        continue;
290                    }
291                    seen_ids.insert(fact.id);
292                    // Determine the connecting entity and the "other" entity for next hop
293                    let (connected_via, other_entity) = match &fact.kind {
294                        MemoryKind::Fact(f) => {
295                            if f.subject == *entity {
296                                (entity.clone(), f.object.clone())
297                            } else {
298                                (entity.clone(), f.subject.clone())
299                            }
300                        }
301                        _ => continue,
302                    };
303                    next_frontier.push(other_entity);
304                    result.push(GraphNode {
305                        memory: fact,
306                        depth,
307                        connected_via,
308                    });
309                }
310            }
311            frontier = next_frontier;
312        }
313
314        Ok(result)
315    }
316
317    // ── Provenance ──────────────────────────────────────────
318
319    /// Get provenance information for a memory by ID, including metadata and 1-hop related facts.
320    pub fn why(&self, id: i64) -> Result<Option<ProvenanceInfo>, ConchError> {
321        let mem = match self.store.get_memory(id)? {
322            Some(m) => m,
323            None => return Ok(None),
324        };
325
326        // Get 1-hop related facts if it's a fact
327        let related = if let MemoryKind::Fact(ref f) = mem.kind {
328            let mut nodes = Vec::new();
329            let mut seen = std::collections::HashSet::new();
330            seen.insert(mem.id);
331            for entity in [&f.subject, &f.object] {
332                let facts = self.store.facts_involving(entity)?;
333                for fact in facts {
334                    if seen.contains(&fact.id) {
335                        continue;
336                    }
337                    seen.insert(fact.id);
338                    nodes.push(GraphNode {
339                        memory: fact,
340                        depth: 0,
341                        connected_via: entity.clone(),
342                    });
343                }
344            }
345            nodes
346        } else {
347            vec![]
348        };
349
350        Ok(Some(ProvenanceInfo {
351            created_at: mem.created_at.to_rfc3339(),
352            last_accessed_at: mem.last_accessed_at.to_rfc3339(),
353            access_count: mem.access_count,
354            strength: mem.strength,
355            source: mem.source.clone(),
356            session_id: mem.session_id.clone(),
357            channel: mem.channel.clone(),
358            related,
359            memory: mem,
360        }))
361    }
362
363    pub fn export(&self) -> Result<ExportData, ConchError> {
364        let memories = self.store.all_memories_ns(&self.namespace)?;
365        Ok(ExportData { memories })
366    }
367
368    pub fn consolidate(&self, dry_run: bool) -> Result<ConsolidateResult, ConchError> {
369        if dry_run {
370            let clusters = find_clusters(&self.store, None)?;
371            Ok(ConsolidateResult {
372                clusters: clusters.len(),
373                archived: clusters.iter().map(|c| c.duplicates.len()).sum(),
374                boosted: clusters.len(),
375            })
376        } else {
377            Ok(consolidate::consolidate(&self.store, None)?)
378        }
379    }
380
381    pub fn consolidate_clusters(&self) -> Result<Vec<ConsolidateCluster>, ConchError> {
382        Ok(find_clusters(&self.store, None)?)
383    }
384
385    pub fn score_importance(&self) -> Result<usize, ConchError> {
386        Ok(importance::score_all(&self.store)?)
387    }
388
389    pub fn list_importance(&self) -> Result<Vec<ImportanceInfo>, ConchError> {
390        Ok(importance::list_importance(&self.store)?)
391    }
392
393    pub fn set_importance(&self, id: i64, importance: f64) -> Result<(), ConchError> {
394        self.store.update_importance(id, importance)?;
395        Ok(())
396    }
397
398    pub fn import(&self, data: &ExportData) -> Result<usize, ConchError> {
399        let mut count = 0;
400        for mem in &data.memories {
401            let created = mem.created_at.to_rfc3339();
402            let accessed = mem.last_accessed_at.to_rfc3339();
403            match &mem.kind {
404                MemoryKind::Fact(f) => {
405                    self.store.import_fact_ns(
406                        &f.subject, &f.relation, &f.object,
407                        mem.strength, mem.embedding.as_deref(),
408                        &created, &accessed, mem.access_count,
409                        &mem.tags,
410                        mem.source.as_deref(),
411                        mem.session_id.as_deref(),
412                        mem.channel.as_deref(),
413                        &self.namespace,
414                    )?;
415                }
416                MemoryKind::Episode(e) => {
417                    self.store.import_episode_ns(
418                        &e.text, mem.strength, mem.embedding.as_deref(),
419                        &created, &accessed, mem.access_count,
420                        &mem.tags,
421                        mem.source.as_deref(),
422                        mem.session_id.as_deref(),
423                        mem.channel.as_deref(),
424                        &self.namespace,
425                    )?;
426                }
427            }
428            count += 1;
429        }
430        Ok(count)
431    }
432
433    // ── Security: Audit Log ─────────────────────────────────
434
435    pub fn audit_log(&self, limit: usize, memory_id: Option<i64>, actor: Option<&str>) -> Result<Vec<AuditEntry>, ConchError> {
436        Ok(self.store.get_audit_log(limit, memory_id, actor)?)
437    }
438
439    // ── Security: Verify ────────────────────────────────────
440
441    pub fn verify(&self) -> Result<VerifyResult, ConchError> {
442        Ok(self.store.verify_integrity_ns(&self.namespace)?)
443    }
444}
445
446#[cfg(test)]
447mod tests {
448    use super::*;
449    use crate::embed::{EmbedError, Embedding};
450
451
452    /// Mock embedder where all texts produce the exact same embedding.
453    /// This guarantees cosine similarity = 1.0 for any pair of texts.
454    struct IdenticalEmbedder;
455
456    impl Embedder for IdenticalEmbedder {
457        fn embed(&self, texts: &[&str]) -> Result<Vec<Embedding>, EmbedError> {
458            Ok(texts.iter().map(|_| vec![1.0, 0.0, 0.0, 0.0]).collect())
459        }
460
461        fn dimension(&self) -> usize { 4 }
462    }
463
464    /// Mock embedder that produces orthogonal embeddings for each call.
465    /// This guarantees cosine similarity = 0.0 between different texts.
466    struct OrthogonalEmbedder {
467        counter: std::sync::atomic::AtomicUsize,
468    }
469
470    impl OrthogonalEmbedder {
471        fn new() -> Self {
472            Self { counter: std::sync::atomic::AtomicUsize::new(0) }
473        }
474    }
475
476    impl Embedder for OrthogonalEmbedder {
477        fn embed(&self, texts: &[&str]) -> Result<Vec<Embedding>, EmbedError> {
478            Ok(texts.iter().map(|_| {
479                let i = self.counter.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
480                let mut emb = vec![0.0; 8];
481                emb[i % 8] = 1.0;
482                emb
483            }).collect())
484        }
485
486        fn dimension(&self) -> usize { 8 }
487    }
488
489    #[test]
490    fn dedup_detects_identical_embedding() {
491        // All texts produce the same embedding. For facts with same subject+relation,
492        // upsert takes priority over dedup.
493        let db = ConchDB::open_in_memory_with(Box::new(IdenticalEmbedder)).unwrap();
494
495        let r1 = db.remember_fact_dedup("Jared", "likes", "Rust").unwrap();
496        assert!(!r1.is_duplicate(), "first insert should not be duplicate");
497        assert!(!r1.is_updated(), "first insert should not be updated");
498
499        // Same subject+relation → upsert (not dedup)
500        let r2 = db.remember_fact_dedup("Jared", "likes", "Rust").unwrap();
501        assert!(r2.is_updated(), "second identical fact should be upserted");
502
503        // Only 1 memory should exist in the database
504        let stats = db.stats().unwrap();
505        assert_eq!(stats.total_memories, 1, "should have 1 memory, not 2");
506    }
507
508    #[test]
509    fn dedup_detects_identical_episode_embedding() {
510        // Episodes don't have upsert, so dedup should still work.
511        let db = ConchDB::open_in_memory_with(Box::new(IdenticalEmbedder)).unwrap();
512
513        let r1 = db.remember_episode_dedup("Meeting notes from standup").unwrap();
514        assert!(!r1.is_duplicate(), "first insert should not be duplicate");
515
516        let r2 = db.remember_episode_dedup("Meeting notes from standup").unwrap();
517        assert!(r2.is_duplicate(), "second identical episode should be duplicate");
518
519        if let RememberResult::Duplicate { similarity, .. } = r2 {
520            assert!(similarity > 0.95, "similarity should be > 0.95, got {similarity}");
521        }
522
523        let stats = db.stats().unwrap();
524        assert_eq!(stats.total_memories, 1, "should have 1 memory, not 2");
525    }
526
527    #[test]
528    fn dedup_allows_different_memories() {
529        // Orthogonal embeddings → cosine sim = 0.0 → both should be inserted.
530        let db = ConchDB::open_in_memory_with(Box::new(OrthogonalEmbedder::new())).unwrap();
531
532        let r1 = db.remember_fact_dedup("Jared", "likes", "Rust").unwrap();
533        assert!(!r1.is_duplicate());
534
535        let r2 = db.remember_episode_dedup("Had coffee this morning").unwrap();
536        assert!(!r2.is_duplicate());
537
538        let stats = db.stats().unwrap();
539        assert_eq!(stats.total_memories, 2, "both memories should be stored");
540    }
541
542    #[test]
543    fn dedup_reinforces_strength_and_bumps_access_count() {
544        let db = ConchDB::open_in_memory_with(Box::new(IdenticalEmbedder)).unwrap();
545
546        // First insert: strength = 1.0, access_count = 0
547        let r1 = db.remember_episode_dedup("Meeting notes from standup").unwrap();
548        let initial = r1.memory().clone();
549        assert_eq!(initial.access_count, 0);
550
551        // Second insert: duplicate detected → reinforced
552        let r2 = db.remember_episode_dedup("Meeting notes from standup").unwrap();
553        assert!(r2.is_duplicate());
554        let reinforced = r2.memory();
555        assert_eq!(reinforced.id, initial.id, "should reinforce same memory");
556        assert_eq!(reinforced.access_count, initial.access_count + 1);
557        // strength should still be 1.0 (was 1.0 + 0.10 clamped to 1.0)
558        assert!((reinforced.strength - 1.0).abs() < f64::EPSILON);
559    }
560
561    #[test]
562    fn dedup_reinforces_decayed_memory() {
563        let db = ConchDB::open_in_memory_with(Box::new(IdenticalEmbedder)).unwrap();
564
565        let r1 = db.remember_episode_dedup("Important project context").unwrap();
566        let id = r1.memory().id;
567
568        // Manually decay the memory's strength
569        db.store().conn().execute(
570            "UPDATE memories SET strength = 0.5 WHERE id = ?1",
571            rusqlite::params![id],
572        ).unwrap();
573
574        // Second insert should reinforce (0.5 + 0.10 = 0.6)
575        let r2 = db.remember_episode_dedup("Important project context").unwrap();
576        assert!(r2.is_duplicate());
577        let reinforced = r2.memory();
578        assert!((reinforced.strength - 0.6).abs() < 0.01,
579            "strength should be ~0.6 after reinforcement, got {}", reinforced.strength);
580    }
581
582    #[test]
583    fn dedup_episode_detected_as_duplicate_of_fact() {
584        // With IdenticalEmbedder, even a fact and episode will have same embedding
585        let db = ConchDB::open_in_memory_with(Box::new(IdenticalEmbedder)).unwrap();
586
587        let r1 = db.remember_fact_dedup("Jared", "prefers", "Rust").unwrap();
588        assert!(!r1.is_duplicate());
589
590        // Episode with same embedding should be detected as duplicate
591        let r2 = db.remember_episode_dedup("Jared prefers Rust").unwrap();
592        assert!(r2.is_duplicate(), "episode matching a fact should be detected as duplicate");
593        assert_eq!(r2.memory().id, r1.memory().id);
594    }
595
596    #[test]
597    fn dedup_with_empty_db_always_creates() {
598        let db = ConchDB::open_in_memory_with(Box::new(IdenticalEmbedder)).unwrap();
599
600        let r1 = db.remember_fact_dedup("first", "memory", "ever").unwrap();
601        assert!(!r1.is_duplicate(), "first memory in empty DB should always be created");
602    }
603
604    #[test]
605    fn remember_result_memory_accessor() {
606        let db = ConchDB::open_in_memory_with(Box::new(IdenticalEmbedder)).unwrap();
607
608        let r1 = db.remember_fact_dedup("A", "B", "C").unwrap();
609        assert!(r1.memory().id > 0);
610
611        let r2 = db.remember_fact_dedup("A", "B", "C").unwrap();
612        assert!(r2.memory().id > 0);
613        assert_eq!(r1.memory().id, r2.memory().id);
614    }
615
616    #[test]
617    fn store_all_embeddings_returns_correct_count() {
618        let store = MemoryStore::open_in_memory().unwrap();
619        store.remember_fact("A", "B", "C", Some(&[1.0, 0.0])).unwrap();
620        store.remember_episode("test", Some(&[0.0, 1.0])).unwrap();
621        store.remember_episode("no embedding", None).unwrap();
622
623        let embeddings = store.all_embeddings().unwrap();
624        assert_eq!(embeddings.len(), 2, "should only return memories with embeddings");
625    }
626
627    #[test]
628    fn store_reinforce_memory_boosts_strength() {
629        let store = MemoryStore::open_in_memory().unwrap();
630        let id = store.remember_fact("A", "B", "C", Some(&[1.0, 0.0])).unwrap();
631
632        // Manually set low strength
633        store.conn().execute(
634            "UPDATE memories SET strength = 0.3 WHERE id = ?1",
635            rusqlite::params![id],
636        ).unwrap();
637
638        store.reinforce_memory(id, 0.10).unwrap();
639
640        let mem = store.get_memory(id).unwrap().unwrap();
641        assert!((mem.strength - 0.4).abs() < 0.01, "strength should be ~0.4, got {}", mem.strength);
642        assert_eq!(mem.access_count, 1);
643    }
644
645    #[test]
646    fn store_reinforce_memory_clamps_to_1() {
647        let store = MemoryStore::open_in_memory().unwrap();
648        let id = store.remember_fact("A", "B", "C", Some(&[1.0, 0.0])).unwrap();
649
650        // strength starts at 1.0, boost by 0.5 should still be 1.0
651        store.reinforce_memory(id, 0.5).unwrap();
652
653        let mem = store.get_memory(id).unwrap().unwrap();
654        assert!((mem.strength - 1.0).abs() < f64::EPSILON, "strength should be clamped to 1.0");
655    }
656
657    // ── Upsert integration tests ────────────────────────────
658
659    #[test]
660    fn upsert_via_dedup_updates_existing_fact() {
661        // Orthogonal embedder ensures dedup doesn't fire; only upsert should trigger
662        let db = ConchDB::open_in_memory_with(Box::new(OrthogonalEmbedder::new())).unwrap();
663
664        let r1 = db.remember_fact_dedup_full("Jared", "favorite_color", "blue", &[], None, None, None).unwrap();
665        assert!(!r1.is_duplicate());
666        assert!(!r1.is_updated());
667
668        let r2 = db.remember_fact_dedup_full("Jared", "favorite_color", "green", &[], None, None, None).unwrap();
669        assert!(r2.is_updated(), "same subject+relation should trigger upsert");
670
671        let mem = r2.memory();
672        if let MemoryKind::Fact(f) = &mem.kind {
673            assert_eq!(f.object, "green", "object should be updated to green");
674        } else { panic!("expected fact"); }
675
676        // Should still only have 1 memory
677        let stats = db.stats().unwrap();
678        assert_eq!(stats.total_memories, 1);
679    }
680
681    #[test]
682    fn upsert_different_subject_creates_new() {
683        let db = ConchDB::open_in_memory_with(Box::new(OrthogonalEmbedder::new())).unwrap();
684
685        db.remember_fact_dedup_full("Jared", "likes", "Rust", &[], None, None, None).unwrap();
686        let r2 = db.remember_fact_dedup_full("Alice", "likes", "Python", &[], None, None, None).unwrap();
687        assert!(!r2.is_updated(), "different subject should not trigger upsert");
688        assert!(!r2.is_duplicate(), "orthogonal embeddings should not trigger dedup");
689
690        assert_eq!(db.stats().unwrap().total_memories, 2);
691    }
692
693    // ── Graph traversal tests ───────────────────────────────
694
695    /// Helper: create a ConchDB with OrthogonalEmbedder and insert a chain of facts.
696    fn setup_graph_db() -> ConchDB {
697        let db = ConchDB::open_in_memory_with(Box::new(OrthogonalEmbedder::new())).unwrap();
698        // Chain: Alice -> knows -> Bob -> works_at -> Acme -> located_in -> NYC
699        db.remember_fact("Alice", "knows", "Bob").unwrap();
700        db.remember_fact("Bob", "works_at", "Acme").unwrap();
701        db.remember_fact("Acme", "located_in", "NYC").unwrap();
702        // Extra connection: Alice -> lives_in -> NYC (creates a cycle)
703        db.remember_fact("Alice", "lives_in", "NYC").unwrap();
704        db
705    }
706
707    #[test]
708    fn related_finds_direct_connections() {
709        let db = setup_graph_db();
710        let nodes = db.related("Alice", 1).unwrap();
711        // Depth 1: Alice -> knows -> Bob, Alice -> lives_in -> NYC
712        assert_eq!(nodes.len(), 2, "Alice should have 2 direct connections, got {}", nodes.len());
713        for node in &nodes {
714            assert_eq!(node.depth, 0, "all nodes at depth 1 traversal should be hop 0");
715        }
716    }
717
718    #[test]
719    fn related_finds_2hop_chain() {
720        let db = setup_graph_db();
721        let nodes = db.related("Alice", 2).unwrap();
722        let nodes_1 = db.related("Alice", 1).unwrap();
723        assert!(nodes.len() > nodes_1.len(), "depth 2 should find more nodes than depth 1");
724
725        // Verify we have both depth 0 and depth 1 nodes
726        let hop0: Vec<_> = nodes.iter().filter(|n| n.depth == 0).collect();
727        let hop1: Vec<_> = nodes.iter().filter(|n| n.depth == 1).collect();
728        assert!(!hop0.is_empty(), "should have hop 0 nodes");
729        assert!(!hop1.is_empty(), "should have hop 1 nodes");
730    }
731
732    #[test]
733    fn related_respects_max_depth_cap() {
734        let db = setup_graph_db();
735        // Max depth is capped at 3
736        let nodes_4 = db.related("Alice", 4).unwrap();
737        let nodes_3 = db.related("Alice", 3).unwrap();
738        assert_eq!(nodes_4.len(), nodes_3.len(), "depth 4 should be capped to 3");
739    }
740
741    #[test]
742    fn related_no_duplicates() {
743        let db = setup_graph_db();
744        let nodes = db.related("Alice", 3).unwrap();
745        let ids: Vec<i64> = nodes.iter().map(|n| n.memory.id).collect();
746        let unique: std::collections::HashSet<i64> = ids.iter().cloned().collect();
747        assert_eq!(ids.len(), unique.len(), "should have no duplicate memory IDs");
748    }
749
750    #[test]
751    fn related_empty_for_unknown_subject() {
752        let db = setup_graph_db();
753        let nodes = db.related("UnknownEntity", 2).unwrap();
754        assert!(nodes.is_empty(), "unknown entity should yield no results");
755    }
756
757    #[test]
758    fn related_finds_reverse_connections() {
759        let db = setup_graph_db();
760        // Bob appears as object of "Alice knows Bob"
761        // and subject of "Bob works_at Acme"
762        let nodes = db.related("Bob", 1).unwrap();
763        assert!(nodes.len() >= 2, "Bob should be found as both subject and object, got {}", nodes.len());
764    }
765
766    // ── Provenance tests ────────────────────────────────────
767
768    #[test]
769    fn why_returns_full_provenance() {
770        let db = ConchDB::open_in_memory_with(Box::new(OrthogonalEmbedder::new())).unwrap();
771        let mem = db.remember_fact_full("Jared", "uses", "Rust", &["technical".to_string()],
772            Some("cli"), Some("sess-42"), Some("#dev")).unwrap();
773
774        let info = db.why(mem.id).unwrap().expect("should find memory");
775        assert_eq!(info.memory.id, mem.id);
776        assert_eq!(info.source.as_deref(), Some("cli"));
777        assert_eq!(info.session_id.as_deref(), Some("sess-42"));
778        assert_eq!(info.channel.as_deref(), Some("#dev"));
779        assert_eq!(info.access_count, 0);
780        assert!((info.strength - 1.0).abs() < f64::EPSILON);
781        assert_eq!(info.memory.tags, vec!["technical"]);
782    }
783
784    #[test]
785    fn why_includes_related_facts() {
786        let db = setup_graph_db();
787        // Get the "Alice knows Bob" fact
788        let nodes = db.related("Alice", 1).unwrap();
789        let alice_knows_bob = nodes.iter()
790            .find(|n| {
791                if let MemoryKind::Fact(f) = &n.memory.kind {
792                    f.subject == "Alice" && f.relation == "knows"
793                } else { false }
794            })
795            .expect("should find Alice knows Bob");
796
797        let info = db.why(alice_knows_bob.memory.id).unwrap().expect("should find memory");
798        // "Alice knows Bob" should have related facts via "Alice" and "Bob"
799        assert!(!info.related.is_empty(), "should have related facts");
800    }
801
802    #[test]
803    fn why_returns_none_for_missing_id() {
804        let db = ConchDB::open_in_memory_with(Box::new(OrthogonalEmbedder::new())).unwrap();
805        let result = db.why(99999).unwrap();
806        assert!(result.is_none(), "should return None for non-existent ID");
807    }
808
809    #[test]
810    fn why_episode_has_no_related() {
811        let db = ConchDB::open_in_memory_with(Box::new(OrthogonalEmbedder::new())).unwrap();
812        let mem = db.remember_episode("Had a meeting").unwrap();
813        let info = db.why(mem.id).unwrap().expect("should find episode");
814        assert!(info.related.is_empty(), "episodes should have no graph-related facts");
815    }
816
817    #[test]
818    fn provenance_json_serializable() {
819        let db = ConchDB::open_in_memory_with(Box::new(OrthogonalEmbedder::new())).unwrap();
820        db.remember_fact("A", "r", "B").unwrap();
821        db.remember_fact("B", "r", "C").unwrap();
822        let nodes = db.related("A", 1).unwrap();
823        let a_r_b = &nodes[0];
824        let info = db.why(a_r_b.memory.id).unwrap().unwrap();
825        let json = serde_json::to_string_pretty(&info).unwrap();
826        assert!(json.contains("memory"), "JSON should contain memory field");
827        assert!(json.contains("created_at"), "JSON should contain created_at");
828        assert!(json.contains("strength"), "JSON should contain strength");
829    }
830
831    // ── Security: Namespace isolation tests ──────────────────
832
833    #[test]
834    fn namespace_isolation_facts() {
835        // Test at the store level since we can't share store between ConchDB instances
836        let store = MemoryStore::open_in_memory().unwrap();
837        store.remember_fact_ns("X", "is", "A", None, &[], None, None, None, "ns-a").unwrap();
838        store.remember_fact_ns("Y", "is", "B", None, &[], None, None, None, "ns-b").unwrap();
839
840        let stats_a = store.stats_ns("ns-a").unwrap();
841        let stats_b = store.stats_ns("ns-b").unwrap();
842        assert_eq!(stats_a.total_memories, 1);
843        assert_eq!(stats_b.total_memories, 1);
844
845        // Default namespace should be empty
846        let stats_default = store.stats_ns("default").unwrap();
847        assert_eq!(stats_default.total_memories, 0, "default namespace should be empty");
848
849        // Namespace-scoped queries only return their own memories
850        let ns_a_mems = store.all_memories_ns("ns-a").unwrap();
851        let ns_b_mems = store.all_memories_ns("ns-b").unwrap();
852        assert_eq!(ns_a_mems.len(), 1);
853        assert_eq!(ns_b_mems.len(), 1);
854        assert_ne!(ns_a_mems[0].id, ns_b_mems[0].id);
855    }
856
857    #[test]
858    fn namespace_isolation_recall() {
859        let store = MemoryStore::open_in_memory().unwrap();
860        store.remember_fact_ns("Jared", "likes", "Rust", Some(&[1.0, 0.0]), &[], None, None, None, "ns-a").unwrap();
861        store.remember_fact_ns("Alice", "likes", "Python", Some(&[0.0, 1.0]), &[], None, None, None, "ns-b").unwrap();
862
863        let memories_a = store.all_memories_with_text_ns("ns-a").unwrap();
864        let memories_b = store.all_memories_with_text_ns("ns-b").unwrap();
865        assert_eq!(memories_a.len(), 1);
866        assert_eq!(memories_b.len(), 1);
867        assert_ne!(memories_a[0].0.id, memories_b[0].0.id);
868    }
869
870    #[test]
871    fn namespace_upsert_scoped() {
872        let store = MemoryStore::open_in_memory().unwrap();
873        store.upsert_fact_ns("Jared", "color", "blue", None, &[], None, None, None, "ns-a").unwrap();
874        store.upsert_fact_ns("Jared", "color", "red", None, &[], None, None, None, "ns-b").unwrap();
875
876        // Both should exist (different namespaces)
877        let all_a = store.all_memories_ns("ns-a").unwrap();
878        let all_b = store.all_memories_ns("ns-b").unwrap();
879        assert_eq!(all_a.len(), 1);
880        assert_eq!(all_b.len(), 1);
881        if let MemoryKind::Fact(f) = &all_a[0].kind { assert_eq!(f.object, "blue"); } else { panic!(); }
882        if let MemoryKind::Fact(f) = &all_b[0].kind { assert_eq!(f.object, "red"); } else { panic!(); }
883
884        // Upsert within ns-a should update only ns-a
885        store.upsert_fact_ns("Jared", "color", "green", None, &[], None, None, None, "ns-a").unwrap();
886        let all_a = store.all_memories_ns("ns-a").unwrap();
887        assert_eq!(all_a.len(), 1);
888        if let MemoryKind::Fact(f) = &all_a[0].kind { assert_eq!(f.object, "green"); } else { panic!(); }
889        // ns-b unchanged
890        let all_b = store.all_memories_ns("ns-b").unwrap();
891        if let MemoryKind::Fact(f) = &all_b[0].kind { assert_eq!(f.object, "red"); } else { panic!(); }
892    }
893
894    // ── Security: Audit log tests ───────────────────────────
895
896    #[test]
897    fn audit_log_records_remember() {
898        let db = ConchDB::open_in_memory_with(Box::new(OrthogonalEmbedder::new())).unwrap();
899        db.remember_fact("Jared", "likes", "Rust").unwrap();
900
901        let log = db.audit_log(10, None, None).unwrap();
902        assert!(!log.is_empty(), "audit log should have entries");
903        assert!(log.iter().any(|e| e.action == "remember"), "should have a remember action");
904    }
905
906    #[test]
907    fn audit_log_records_forget() {
908        let db = ConchDB::open_in_memory_with(Box::new(OrthogonalEmbedder::new())).unwrap();
909        let mem = db.remember_fact("Jared", "likes", "Rust").unwrap();
910        db.forget_by_id(&mem.id.to_string()).unwrap();
911
912        let log = db.audit_log(10, None, None).unwrap();
913        assert!(log.iter().any(|e| e.action == "forget"), "should have a forget action");
914    }
915
916    #[test]
917    fn audit_log_filter_by_memory_id() {
918        let db = ConchDB::open_in_memory_with(Box::new(OrthogonalEmbedder::new())).unwrap();
919        let m1 = db.remember_fact("A", "B", "C").unwrap();
920        db.remember_fact("D", "E", "F").unwrap();
921
922        let log = db.audit_log(10, Some(m1.id), None).unwrap();
923        for entry in &log {
924            assert_eq!(entry.memory_id, Some(m1.id));
925        }
926    }
927
928    // ── Security: Checksum & verify tests ───────────────────
929
930    #[test]
931    fn checksum_stored_on_remember() {
932        let store = MemoryStore::open_in_memory().unwrap();
933        let id = store.remember_fact("Jared", "likes", "Rust", None).unwrap();
934        let mem = store.get_memory(id).unwrap().unwrap();
935        assert!(mem.checksum.is_some(), "checksum should be set on remember");
936    }
937
938    #[test]
939    fn verify_passes_for_clean_data() {
940        let db = ConchDB::open_in_memory_with(Box::new(OrthogonalEmbedder::new())).unwrap();
941        db.remember_fact("Jared", "likes", "Rust").unwrap();
942        db.remember_episode("had coffee").unwrap();
943
944        let result = db.verify().unwrap();
945        assert_eq!(result.total_checked, 2);
946        assert_eq!(result.valid, 2);
947        assert!(result.corrupted.is_empty());
948        assert_eq!(result.missing_checksum, 0);
949    }
950
951    #[test]
952    fn verify_detects_corruption() {
953        let db = ConchDB::open_in_memory_with(Box::new(OrthogonalEmbedder::new())).unwrap();
954        let mem = db.remember_fact("Jared", "likes", "Rust").unwrap();
955
956        // Corrupt the data by changing the object directly in SQL
957        db.store().conn().execute(
958            "UPDATE memories SET object = 'Python' WHERE id = ?1",
959            rusqlite::params![mem.id],
960        ).unwrap();
961
962        let result = db.verify().unwrap();
963        assert_eq!(result.corrupted.len(), 1);
964        assert_eq!(result.corrupted[0].id, mem.id);
965    }
966
967    #[test]
968    fn verify_reports_missing_checksums() {
969        let store = MemoryStore::open_in_memory().unwrap();
970        store.remember_fact("Jared", "likes", "Rust", None).unwrap();
971        // Null out the checksum directly
972        store.conn().execute("UPDATE memories SET checksum = NULL", []).unwrap();
973
974        let result = store.verify_integrity().unwrap();
975        assert_eq!(result.missing_checksum, 1);
976    }
977
978    // ── Reliability regression tests (KR3) ─────────────────────────────────
979
980    /// Regression for issue #8: negative older_than_secs wiped all memories.
981    /// The core library must reject negative values and return an error.
982    #[test]
983    fn regression_forget_older_than_negative_secs_is_rejected() {
984        let db = ConchDB::open_in_memory_with(Box::new(OrthogonalEmbedder::new())).unwrap();
985        db.remember_fact("Jared", "builds", "Gen").unwrap();
986        db.remember_episode("important context that must survive").unwrap();
987
988        let result = db.forget_older_than(-100);
989        assert!(
990            result.is_err(),
991            "negative older_than secs must return Err, not silently delete memories"
992        );
993        match result.unwrap_err() {
994            ConchError::InvalidArgument(_) => {}
995            e => panic!("expected InvalidArgument, got {e:?}"),
996        }
997
998        // Memories must be untouched
999        let stats = db.stats().unwrap();
1000        assert_eq!(stats.total_memories, 2, "no memories should have been deleted");
1001    }
1002
1003    /// Boundary: zero seconds is also nonsensical and must be rejected.
1004    #[test]
1005    fn regression_forget_older_than_zero_secs_is_rejected() {
1006        let db = ConchDB::open_in_memory_with(Box::new(OrthogonalEmbedder::new())).unwrap();
1007        db.remember_fact("Alice", "knows", "Bob").unwrap();
1008
1009        let result = db.forget_older_than(0);
1010        assert!(result.is_err(), "zero older_than secs must return Err");
1011        match result.unwrap_err() {
1012            ConchError::InvalidArgument(_) => {}
1013            e => panic!("expected InvalidArgument, got {e:?}"),
1014        }
1015
1016        let stats = db.stats().unwrap();
1017        assert_eq!(stats.total_memories, 1, "memory must survive a rejected forget call");
1018    }
1019
1020    /// Positive duration must work normally (not a regression, just a sanity gate).
1021    #[test]
1022    fn forget_older_than_positive_secs_deletes_old_memories() {
1023        let db = ConchDB::open_in_memory_with(Box::new(OrthogonalEmbedder::new())).unwrap();
1024        let id = db.remember_fact("Jared", "used", "Python").unwrap().id;
1025
1026        // Back-date the memory to 10 days ago
1027        let old_time = (chrono::Utc::now() - chrono::Duration::days(10)).to_rfc3339();
1028        db.store().conn().execute(
1029            "UPDATE memories SET created_at = ?1 WHERE id = ?2",
1030            rusqlite::params![old_time, id],
1031        ).unwrap();
1032
1033        // Forget memories older than 1 day (86400s)
1034        let deleted = db.forget_older_than(86_400).unwrap();
1035        assert_eq!(deleted, 1, "one old memory should be deleted");
1036        assert_eq!(db.stats().unwrap().total_memories, 0);
1037    }
1038
1039    /// P1 reliability: a stored fact must appear in recall results (false negative = P1 bug).
1040    #[test]
1041    fn recall_stored_fact_has_no_false_negative() {
1042        let db = ConchDB::open_in_memory_with(Box::new(IdenticalEmbedder)).unwrap();
1043        db.remember_fact("Jared", "plays", "trumpet").unwrap();
1044
1045        let results = db.recall("trumpet", 10).unwrap();
1046        assert!(
1047            !results.is_empty(),
1048            "recall must return at least one result for a stored fact"
1049        );
1050        let found = results.iter().any(|r| {
1051            matches!(&r.memory.kind, MemoryKind::Fact(f) if f.subject == "Jared" && f.object == "trumpet")
1052        });
1053        assert!(found, "the stored fact must appear in recall results — false negative is a P1 bug");
1054    }
1055
1056    /// P1 reliability: a stored episode must appear in recall results.
1057    #[test]
1058    fn recall_stored_episode_has_no_false_negative() {
1059        let db = ConchDB::open_in_memory_with(Box::new(IdenticalEmbedder)).unwrap();
1060        db.remember_episode("Jared submitted Gen to Y Combinator").unwrap();
1061
1062        let results = db.recall("Y Combinator", 10).unwrap();
1063        assert!(
1064            !results.is_empty(),
1065            "recall must return at least one result for a stored episode"
1066        );
1067        let found = results.iter().any(|r| {
1068            matches!(&r.memory.kind, MemoryKind::Episode(e) if e.text.contains("Y Combinator"))
1069        });
1070        assert!(found, "the stored episode must appear in recall results — false negative is a P1 bug");
1071    }
1072
1073    /// Export/import round-trip must preserve memory count across fact and episode kinds.
1074    #[test]
1075    fn export_import_round_trip_preserves_count() {
1076        let source = ConchDB::open_in_memory_with(Box::new(OrthogonalEmbedder::new())).unwrap();
1077        source.remember_fact("Jared", "builds", "Gen").unwrap();
1078        source.remember_fact("Claw", "is", "a lobster").unwrap();
1079        source.remember_episode("Shipped conch v0.2 with 117 tests").unwrap();
1080
1081        let export = source.export().unwrap();
1082        assert_eq!(export.memories.len(), 3);
1083
1084        let dest = ConchDB::open_in_memory_with(Box::new(OrthogonalEmbedder::new())).unwrap();
1085        let imported = dest.import(&export).unwrap();
1086        assert_eq!(imported, 3, "all 3 memories must be imported");
1087        assert_eq!(dest.stats().unwrap().total_memories, 3);
1088    }
1089
1090    /// Export/import round-trip must preserve field values faithfully.
1091    #[test]
1092    fn export_import_round_trip_preserves_field_values() {
1093        let source = ConchDB::open_in_memory_with(Box::new(OrthogonalEmbedder::new())).unwrap();
1094        source.remember_fact("Jared", "works_at", "Microsoft").unwrap();
1095        source.remember_episode("Tortellini is Jared's dog").unwrap();
1096
1097        let export = source.export().unwrap();
1098        let dest = ConchDB::open_in_memory_with(Box::new(OrthogonalEmbedder::new())).unwrap();
1099        dest.import(&export).unwrap();
1100
1101        let all = dest.store().all_memories().unwrap();
1102        let fact = all.iter().find(|m| matches!(&m.kind, MemoryKind::Fact(f) if f.subject == "Jared")).unwrap();
1103        let episode = all.iter().find(|m| matches!(&m.kind, MemoryKind::Episode(e) if e.text.contains("Tortellini"))).unwrap();
1104
1105        if let MemoryKind::Fact(f) = &fact.kind {
1106            assert_eq!(f.subject, "Jared");
1107            assert_eq!(f.relation, "works_at");
1108            assert_eq!(f.object, "Microsoft");
1109        }
1110        if let MemoryKind::Episode(e) = &episode.kind {
1111            assert!(e.text.contains("Tortellini"), "episode text must survive round-trip");
1112        }
1113        // Strength must survive (default is 1.0)
1114        assert!((fact.strength - 1.0).abs() < f64::EPSILON, "strength must be preserved through export/import");
1115    }
1116}