use alaya::*;
mod common;
struct TestProvider {
knowledge: Vec<NewSemanticNode>,
impressions: Vec<NewImpression>,
}
impl TestProvider {
fn with_knowledge(knowledge: Vec<NewSemanticNode>) -> Self {
Self {
knowledge,
impressions: vec![],
}
}
fn with_impressions(impressions: Vec<NewImpression>) -> Self {
Self {
knowledge: vec![],
impressions,
}
}
}
impl ConsolidationProvider for TestProvider {
fn extract_knowledge(&self, _episodes: &[Episode]) -> alaya::Result<Vec<NewSemanticNode>> {
Ok(self.knowledge.clone())
}
fn extract_impressions(&self, _interaction: &Interaction) -> alaya::Result<Vec<NewImpression>> {
Ok(self.impressions.clone())
}
fn detect_contradiction(&self, _a: &SemanticNode, _b: &SemanticNode) -> alaya::Result<bool> {
Ok(false)
}
}
fn episode(content: &str, session: &str, ts: i64) -> NewEpisode {
NewEpisode {
content: content.to_string(),
role: Role::User,
session_id: session.to_string(),
timestamp: ts,
context: EpisodeContext::default(),
embedding: None,
}
}
fn store_n_episodes(store: &Alaya, session: &str, count: usize, base_ts: i64) -> Vec<EpisodeId> {
(0..count)
.map(|i| {
store
.episodes()
.store(&episode(
&format!("Episode {i} in session {session} about Rust programming"),
session,
base_ts + (i as i64) * 100,
))
.unwrap()
})
.collect()
}
#[test]
fn test_multi_session_lifecycle() {
let store = Alaya::open_in_memory().unwrap();
let _s1_ids = store_n_episodes(&store, "session-1", 4, 1_000);
let _s2_ids = store_n_episodes(&store, "session-2", 4, 2_000);
let _s3_ids = store_n_episodes(&store, "session-3", 4, 3_000);
let status = store.admin().status().unwrap();
assert_eq!(
status.episode_count, 12,
"should have 12 episodes across 3 sessions"
);
let results = store
.knowledge()
.query(&Query::simple("Rust programming"))
.unwrap();
assert!(!results.is_empty(), "query should return matching episodes");
let cr = store.lifecycle().consolidate(&NoOpProvider).unwrap();
assert_eq!(
cr.nodes_created, 0,
"NoOp provider creates no semantic nodes"
);
assert!(
cr.episodes_processed > 0,
"consolidation should process the unconsolidated batch (got {})",
cr.episodes_processed
);
let perfume_provider = TestProvider::with_impressions(vec![NewImpression {
domain: "lifecycle".to_string(),
observation: "user tests full lifecycle".to_string(),
valence: 0.8,
}]);
let interaction = Interaction {
text: "lifecycle test interaction".to_string(),
role: Role::User,
session_id: "session-1".to_string(),
timestamp: 5000,
context: EpisodeContext::default(),
};
let pr = store
.lifecycle()
.perfume(&interaction, &perfume_provider)
.unwrap();
assert_eq!(
pr.impressions_stored, 1,
"perfume should store 1 impression"
);
let tr = store.lifecycle().transform().unwrap();
assert_eq!(tr.duplicates_merged, 0);
let fr = store.lifecycle().forget().unwrap();
assert!(
fr.nodes_decayed > 0,
"forget should decay the 12 episode strength records (got {})",
fr.nodes_decayed
);
let final_status = store.admin().status().unwrap();
assert_eq!(
final_status.episode_count, 12,
"episodes should survive a single forget pass"
);
}
#[test]
fn test_multi_session_purge_isolation() {
let store = Alaya::open_in_memory().unwrap();
let _alpha_ids = store_n_episodes(&store, "alpha", 3, 1_000);
let _beta_ids = store_n_episodes(&store, "beta", 3, 2_000);
let _gamma_ids = store_n_episodes(&store, "gamma", 3, 3_000);
assert_eq!(store.admin().status().unwrap().episode_count, 9);
let purge_report = store
.admin()
.purge(PurgeFilter::Session("beta".to_string()))
.unwrap();
assert_eq!(
purge_report.episodes_deleted, 3,
"purging 'beta' should delete its 3 episodes"
);
let status = store.admin().status().unwrap();
assert_eq!(
status.episode_count, 6,
"6 episodes should remain after purging 'beta'"
);
let alpha_results = store
.knowledge()
.query(&Query::simple("session alpha"))
.unwrap();
assert!(
!alpha_results.is_empty(),
"'alpha' episodes should survive the purge of 'beta'"
);
let purge_report2 = store.admin().purge(PurgeFilter::OlderThan(2500)).unwrap();
assert_eq!(
purge_report2.episodes_deleted, 3,
"OlderThan(2500) should remove the 3 'alpha' episodes"
);
let final_status = store.admin().status().unwrap();
assert_eq!(
final_status.episode_count, 3,
"only 'gamma' episodes should remain"
);
let gamma_results = store
.knowledge()
.query(&Query::simple("session gamma"))
.unwrap();
assert!(
!gamma_results.is_empty(),
"'gamma' episodes should still be queryable"
);
}
#[test]
fn test_lifecycle_idempotence() {
let store = Alaya::open_in_memory().unwrap();
for pass in 0..2 {
let cr = store.lifecycle().consolidate(&NoOpProvider).unwrap();
assert_eq!(cr.episodes_processed, 0, "empty consolidate pass {pass}");
assert_eq!(cr.nodes_created, 0, "empty consolidate pass {pass}");
let tr = store.lifecycle().transform().unwrap();
assert_eq!(tr.duplicates_merged, 0, "empty transform pass {pass}");
assert_eq!(tr.links_pruned, 0, "empty transform pass {pass}");
let fr = store.lifecycle().forget().unwrap();
assert_eq!(fr.nodes_decayed, 0, "empty forget pass {pass}");
assert_eq!(fr.nodes_archived, 0, "empty forget pass {pass}");
}
let _ids = store_n_episodes(&store, "idempotence", 6, 1_000);
let status_before = store.admin().status().unwrap();
assert_eq!(status_before.episode_count, 6);
let cr1 = store.lifecycle().consolidate(&NoOpProvider).unwrap();
let _tr1 = store.lifecycle().transform().unwrap();
let fr1 = store.lifecycle().forget().unwrap();
assert!(
cr1.episodes_processed > 0,
"first consolidate should process episodes"
);
assert!(fr1.nodes_decayed > 0, "first forget should decay nodes");
let cr2 = store.lifecycle().consolidate(&NoOpProvider).unwrap();
let _tr2 = store.lifecycle().transform().unwrap();
let fr2 = store.lifecycle().forget().unwrap();
assert!(
cr2.episodes_processed > 0,
"second consolidate should re-process (NoOp leaves no links)"
);
assert!(
fr2.nodes_decayed > 0,
"second forget should still decay nodes"
);
let status_after = store.admin().status().unwrap();
assert_eq!(
status_after.episode_count, 6,
"episodes should survive two lifecycle passes"
);
let tr_a = store.lifecycle().transform().unwrap();
let tr_b = store.lifecycle().transform().unwrap();
assert_eq!(tr_a.duplicates_merged, tr_b.duplicates_merged);
}
#[test]
fn test_preference_crystallization_e2e() {
let store = Alaya::open_in_memory().unwrap();
let provider = TestProvider::with_impressions(vec![NewImpression {
domain: "code_style".to_string(),
observation: "prefers functional style".to_string(),
valence: 0.9,
}]);
for i in 0..4 {
let interaction = Interaction {
text: format!("I like map/filter/fold {i}"),
role: Role::User,
session_id: "crystal-s1".to_string(),
timestamp: 1000 + i * 100,
context: EpisodeContext::default(),
};
let report = store.lifecycle().perfume(&interaction, &provider).unwrap();
assert_eq!(
report.preferences_crystallized, 0,
"pass {i}: should not crystallize below threshold"
);
}
let prefs = store.admin().preferences(Some("code_style")).unwrap();
assert!(
prefs.is_empty(),
"no preference should exist before threshold"
);
let interaction = Interaction {
text: "I like map/filter/fold 4".to_string(),
role: Role::User,
session_id: "crystal-s1".to_string(),
timestamp: 1400,
context: EpisodeContext::default(),
};
let report = store.lifecycle().perfume(&interaction, &provider).unwrap();
assert_eq!(
report.preferences_crystallized, 1,
"5th impression should trigger crystallization"
);
let prefs = store.admin().preferences(Some("code_style")).unwrap();
assert_eq!(prefs.len(), 1);
assert_eq!(prefs[0].domain, "code_style");
let other = store.admin().preferences(Some("other_domain")).unwrap();
assert!(other.is_empty());
let all = store.admin().preferences(None).unwrap();
assert!(!all.is_empty());
let interaction = Interaction {
text: "functional style is great".to_string(),
role: Role::User,
session_id: "crystal-s1".to_string(),
timestamp: 1500,
context: EpisodeContext::default(),
};
let report = store.lifecycle().perfume(&interaction, &provider).unwrap();
assert_eq!(
report.preferences_crystallized, 0,
"should reinforce, not re-crystallize"
);
assert_eq!(
report.preferences_reinforced, 1,
"should reinforce existing preference"
);
let prefs = store.admin().preferences(Some("code_style")).unwrap();
assert_eq!(prefs.len(), 1);
}
#[test]
fn test_memory_decay_and_revival() {
let store = Alaya::open_in_memory().unwrap();
store
.episodes()
.store(&episode("Rust async runtime uses tokio", "decay-s1", 1000))
.unwrap();
store
.episodes()
.store(&episode(
"Tokio has a multi-threaded scheduler",
"decay-s1",
2000,
))
.unwrap();
store
.episodes()
.store(&episode("Async functions return futures", "decay-s1", 3000))
.unwrap();
let status = store.admin().status().unwrap();
assert_eq!(status.episode_count, 3);
for _ in 0..10 {
let report = store.lifecycle().forget().unwrap();
assert!(
report.nodes_decayed > 0,
"should decay strength records each pass"
);
}
assert_eq!(
store.admin().status().unwrap().episode_count,
3,
"episodes should survive 10 decay passes"
);
let results = store
.knowledge()
.query(&Query::simple("Rust async tokio"))
.unwrap();
assert!(
!results.is_empty(),
"decayed memories should still be retrievable (they're latent, not gone)"
);
for _ in 0..5 {
store.lifecycle().forget().unwrap();
}
assert_eq!(
store.admin().status().unwrap().episode_count,
3,
"revived memories should survive additional decay passes"
);
let results = store.knowledge().query(&Query::simple("tokio")).unwrap();
assert!(
!results.is_empty(),
"revived memories should still be queryable after further decay"
);
}
#[test]
fn test_emergent_category_lifecycle() {
let store = Alaya::open_in_memory().unwrap();
for i in 0..5 {
store
.episodes()
.store(&NewEpisode {
content: format!(
"I made {} for dinner",
["pasta", "risotto", "gnocchi", "lasagna", "ravioli"][i]
),
role: Role::User,
session_id: "s1".to_string(),
timestamp: 1000 + (i as i64) * 100,
context: EpisodeContext::default(),
embedding: None,
})
.unwrap();
}
let provider = TestProvider::with_knowledge(vec![
NewSemanticNode {
content: "User cooks pasta dishes".to_string(),
node_type: SemanticType::Fact,
confidence: 0.9,
source_episodes: vec![EpisodeId(1)],
embedding: Some(vec![0.8, 0.3, 0.1]),
},
NewSemanticNode {
content: "User likes Italian food".to_string(),
node_type: SemanticType::Fact,
confidence: 0.85,
source_episodes: vec![EpisodeId(2)],
embedding: Some(vec![0.4, 0.8, 0.2]),
},
NewSemanticNode {
content: "User knows many Italian recipes".to_string(),
node_type: SemanticType::Concept,
confidence: 0.8,
source_episodes: vec![EpisodeId(3)],
embedding: Some(vec![0.6, 0.5, 0.5]),
},
NewSemanticNode {
content: "User enjoys cooking dinner".to_string(),
node_type: SemanticType::Fact,
confidence: 0.75,
source_episodes: vec![EpisodeId(4)],
embedding: Some(vec![0.3, 0.6, 0.7]),
},
]);
let cr = store.lifecycle().consolidate(&provider).unwrap();
assert_eq!(cr.nodes_created, 4);
assert_eq!(cr.categories_assigned, 0);
assert!(
store.admin().categories(None).unwrap().is_empty(),
"no categories should exist before transform"
);
let tr = store.lifecycle().transform().unwrap();
assert!(
tr.categories_discovered >= 1,
"transform should discover at least 1 category from 4 similar nodes"
);
let cats = store.admin().categories(None).unwrap();
assert!(!cats.is_empty(), "should have categories after transform");
let cat = &cats[0];
assert!(
cat.member_count >= 3,
"category should have at least 3 members (cluster minimum)"
);
for i in 5..10 {
store
.episodes()
.store(&NewEpisode {
content: format!("cooking episode {i}"),
role: Role::User,
session_id: "s2".to_string(),
timestamp: 2000 + (i as i64) * 100,
context: EpisodeContext::default(),
embedding: None,
})
.unwrap();
}
let provider2 = TestProvider::with_knowledge(vec![NewSemanticNode {
content: "User experiments with Italian recipes".to_string(),
node_type: SemanticType::Fact,
confidence: 0.85,
source_episodes: vec![EpisodeId(6), EpisodeId(7)],
embedding: Some(vec![0.55, 0.55, 0.35]), }]);
let cr2 = store.lifecycle().consolidate(&provider2).unwrap();
assert_eq!(cr2.nodes_created, 1);
assert_eq!(
cr2.categories_assigned, 1,
"new node with similar embedding should be assigned to existing cooking category"
);
let knowledge = store.knowledge().filter(None).unwrap();
let new_node = knowledge
.iter()
.find(|n| n.content == "User experiments with Italian recipes")
.unwrap();
let node_cat = store.admin().node_category(new_node.id).unwrap();
assert!(node_cat.is_some(), "new node should have a category");
}
#[test]
fn test_category_survives_transform_cycles() {
let store = Alaya::open_in_memory().unwrap();
for i in 0..5 {
store
.episodes()
.store(&NewEpisode {
content: format!("Rust memory management topic {i}"),
role: Role::User,
session_id: "s1".to_string(),
timestamp: 1000 + (i as i64) * 100,
context: EpisodeContext::default(),
embedding: None,
})
.unwrap();
}
let provider = TestProvider::with_knowledge(vec![
NewSemanticNode {
content: "Rust ownership".to_string(),
node_type: SemanticType::Concept,
confidence: 0.9,
source_episodes: vec![EpisodeId(1)],
embedding: Some(vec![0.3, 0.8, 0.1]),
},
NewSemanticNode {
content: "Rust borrowing".to_string(),
node_type: SemanticType::Concept,
confidence: 0.85,
source_episodes: vec![EpisodeId(2)],
embedding: Some(vec![0.1, 0.7, 0.6]),
},
NewSemanticNode {
content: "Rust lifetimes".to_string(),
node_type: SemanticType::Concept,
confidence: 0.8,
source_episodes: vec![EpisodeId(3)],
embedding: Some(vec![0.4, 0.6, 0.5]),
},
]);
store.lifecycle().consolidate(&provider).unwrap();
store.lifecycle().transform().unwrap();
let cats = store.admin().categories(None).unwrap();
assert!(!cats.is_empty());
let initial_stability = cats[0].stability;
store.lifecycle().transform().unwrap();
let cats = store.admin().categories(None).unwrap();
assert!(!cats.is_empty());
assert!(
cats[0].stability > initial_stability,
"stability should increase after surviving a transform cycle"
);
}