use crate::scoring::compute_score;
use crate::types::ToolResult;
use crate::McpServer;
use codemem_core::{
Edge, GraphBackend, GraphNode, MemoryNode, MemoryType, NodeKind, RelationshipType,
SearchResult, VectorBackend,
};
use codemem_vector::HnswIndex;
use serde_json::{json, Value};
use std::collections::{HashMap, HashSet};
impl McpServer {
pub(crate) fn tool_consolidate_decay(&self, args: &Value) -> ToolResult {
let threshold_days = args
.get("threshold_days")
.and_then(|v| v.as_u64())
.unwrap_or(30) as i64;
let now = chrono::Utc::now();
let threshold_ts = (now - chrono::Duration::days(threshold_days)).timestamp();
let affected = match self.storage.decay_stale_memories(threshold_ts, 0.9) {
Ok(count) => count,
Err(e) => return ToolResult::tool_error(format!("Decay failed: {e}")),
};
if let Err(e) = self.storage.insert_consolidation_log("decay", affected) {
tracing::warn!("Failed to log decay consolidation: {e}");
}
ToolResult::text(
json!({
"cycle": "decay",
"affected": affected,
"threshold_days": threshold_days,
})
.to_string(),
)
}
pub(crate) fn tool_consolidate_creative(&self, args: &Value) -> ToolResult {
let _ = args;
let parsed = match self.storage.list_memories_for_creative() {
Ok(rows) => rows,
Err(e) => return ToolResult::tool_error(format!("Creative cycle failed: {e}")),
};
let ids_refs: Vec<&str> = parsed.iter().map(|(id, _, _)| id.as_str()).collect();
let memories = match self.storage.get_memories_batch(&ids_refs) {
Ok(m) => m,
Err(e) => return ToolResult::tool_error(format!("Creative cycle failed: {e}")),
};
let memory_info: Vec<(String, String, Vec<String>)> = memories
.iter()
.map(|m| (m.id.clone(), m.memory_type.to_string(), m.tags.clone()))
.collect();
let all_edges = match self.storage.all_graph_edges() {
Ok(e) => e,
Err(e) => return ToolResult::tool_error(format!("Creative cycle failed: {e}")),
};
let existing_edges: HashSet<(String, String)> = all_edges
.iter()
.filter(|e| e.relationship == RelationshipType::RelatesTo)
.map(|e| (e.src.clone(), e.dst.clone()))
.collect();
let mut new_connections = 0usize;
let now = chrono::Utc::now();
let mut graph = match self.lock_graph() {
Ok(g) => g,
Err(e) => return ToolResult::tool_error(format!("Lock error: {e}")),
};
for i in 0..memory_info.len() {
for j in (i + 1)..memory_info.len() {
let (ref id_a, ref type_a, ref tags_a) = memory_info[i];
let (ref id_b, ref type_b, ref tags_b) = memory_info[j];
if type_a == type_b {
continue;
}
let has_common_tag = tags_a.iter().any(|t| tags_b.contains(t));
if !has_common_tag {
continue;
}
if existing_edges.contains(&(id_a.clone(), id_b.clone()))
|| existing_edges.contains(&(id_b.clone(), id_a.clone()))
{
continue;
}
let node_a = GraphNode {
id: id_a.clone(),
kind: NodeKind::Memory,
label: id_a.clone(),
payload: HashMap::new(),
centrality: 0.0,
memory_id: Some(id_a.clone()),
namespace: None,
};
let node_b = GraphNode {
id: id_b.clone(),
kind: NodeKind::Memory,
label: id_b.clone(),
payload: HashMap::new(),
centrality: 0.0,
memory_id: Some(id_b.clone()),
namespace: None,
};
let _ = self.storage.insert_graph_node(&node_a);
let _ = self.storage.insert_graph_node(&node_b);
let edge_id = format!("{id_a}-RELATES_TO-{id_b}");
let edge = Edge {
id: edge_id,
src: id_a.clone(),
dst: id_b.clone(),
relationship: RelationshipType::RelatesTo,
weight: 1.0,
properties: HashMap::new(),
created_at: now,
};
if self.storage.insert_graph_edge(&edge).is_ok() {
let _ = graph.add_edge(edge);
new_connections += 1;
}
}
}
if let Err(e) = self
.storage
.insert_consolidation_log("creative", new_connections)
{
tracing::warn!("Failed to log creative consolidation: {e}");
}
ToolResult::text(
json!({
"cycle": "creative",
"new_connections": new_connections,
})
.to_string(),
)
}
pub(crate) fn tool_consolidate_cluster(&self, args: &Value) -> ToolResult {
let _ = args;
let ids = match self.storage.list_memory_ids() {
Ok(ids) => ids,
Err(e) => return ToolResult::tool_error(format!("Cluster cycle failed: {e}")),
};
let id_refs: Vec<&str> = ids.iter().map(|s| s.as_str()).collect();
let memories = match self.storage.get_memories_batch(&id_refs) {
Ok(m) => m,
Err(e) => return ToolResult::tool_error(format!("Cluster cycle failed: {e}")),
};
let mut groups: HashMap<String, Vec<(String, f64)>> = HashMap::new();
for m in &memories {
let (id, hash, importance) = (&m.id, &m.content_hash, m.importance);
let prefix = if hash.len() >= 8 {
hash[..8].to_string()
} else {
hash.clone()
};
groups
.entry(prefix)
.or_default()
.push((id.clone(), importance));
}
let mut merged_count = 0usize;
let mut kept_count = 0usize;
let mut ids_to_delete: Vec<String> = Vec::new();
for (_prefix, mut members) in groups {
if members.len() <= 1 {
kept_count += 1;
continue;
}
members.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
kept_count += 1;
for (id, _importance) in members.iter().skip(1) {
ids_to_delete.push(id.clone());
merged_count += 1;
}
}
let mut vector = match self.lock_vector() {
Ok(v) => v,
Err(e) => return ToolResult::tool_error(format!("Lock error: {e}")),
};
let mut graph = match self.lock_graph() {
Ok(g) => g,
Err(e) => return ToolResult::tool_error(format!("Lock error: {e}")),
};
for id in &ids_to_delete {
let _ = self.storage.delete_memory(id);
let _ = self.storage.delete_embedding(id);
let _ = self.storage.delete_graph_edges_for_node(id);
let _ = self.storage.delete_graph_node(id);
let _ = vector.remove(id);
let _ = graph.remove_node(id);
}
if merged_count > 0 {
self.rebuild_vector_index_internal(&mut vector);
}
drop(vector);
drop(graph);
self.save_index();
if let Err(e) = self
.storage
.insert_consolidation_log("cluster", merged_count)
{
tracing::warn!("Failed to log cluster consolidation: {e}");
}
ToolResult::text(
json!({
"cycle": "cluster",
"merged": merged_count,
"kept": kept_count,
})
.to_string(),
)
}
pub(crate) fn tool_consolidate_forget(&self, args: &Value) -> ToolResult {
let importance_threshold = args
.get("importance_threshold")
.and_then(|v| v.as_f64())
.unwrap_or(0.1);
let ids = match self.storage.find_forgettable(importance_threshold) {
Ok(ids) => ids,
Err(e) => return ToolResult::tool_error(format!("Forget cycle failed: {e}")),
};
let deleted = ids.len();
let mut vector = match self.lock_vector() {
Ok(v) => v,
Err(e) => return ToolResult::tool_error(format!("Lock error: {e}")),
};
let mut graph = match self.lock_graph() {
Ok(g) => g,
Err(e) => return ToolResult::tool_error(format!("Lock error: {e}")),
};
let mut bm25 = match self.lock_bm25() {
Ok(b) => b,
Err(e) => return ToolResult::tool_error(format!("Lock error: {e}")),
};
for id in &ids {
let _ = self.storage.delete_memory(id);
let _ = self.storage.delete_embedding(id);
let _ = self.storage.delete_graph_edges_for_node(id);
let _ = self.storage.delete_graph_node(id);
let _ = vector.remove(id);
let _ = graph.remove_node(id);
bm25.remove_document(id);
}
if deleted > 0 {
self.rebuild_vector_index_internal(&mut vector);
}
drop(vector);
drop(graph);
drop(bm25);
self.save_index();
if let Err(e) = self.storage.insert_consolidation_log("forget", deleted) {
tracing::warn!("Failed to log forget consolidation: {e}");
}
ToolResult::text(
json!({
"cycle": "forget",
"deleted": deleted,
"threshold": importance_threshold,
})
.to_string(),
)
}
pub(crate) fn tool_consolidation_status(&self) -> ToolResult {
let runs = match self.storage.last_consolidation_runs() {
Ok(r) => r,
Err(e) => return ToolResult::tool_error(format!("Failed to query status: {e}")),
};
let mut cycles = json!({});
for entry in &runs {
let dt = chrono::DateTime::from_timestamp(entry.run_at, 0)
.map(|t| t.format("%Y-%m-%d %H:%M:%S UTC").to_string())
.unwrap_or_else(|| "unknown".to_string());
cycles[&entry.cycle_type] = json!({
"last_run": dt,
"affected": entry.affected_count,
});
}
ToolResult::text(
json!({
"cycles": cycles,
})
.to_string(),
)
}
pub(crate) fn tool_recall_with_impact(&self, args: &Value) -> ToolResult {
let query = match args.get("query").and_then(|v| v.as_str()) {
Some(q) if !q.is_empty() => q,
_ => return ToolResult::tool_error("Missing or empty 'query' parameter"),
};
let k = args.get("k").and_then(|v| v.as_u64()).unwrap_or(10) as usize;
let namespace_filter: Option<&str> = args.get("namespace").and_then(|v| v.as_str());
let query_lower = query.to_lowercase();
let query_tokens: Vec<&str> = query_lower.split_whitespace().collect();
let vector_results: Vec<(String, f32)> = if let Some(emb_guard) =
match self.lock_embeddings() {
Ok(g) => g,
Err(e) => return ToolResult::tool_error(format!("Lock error: {e}")),
} {
match emb_guard.embed(query) {
Ok(query_embedding) => {
drop(emb_guard);
let vec = match self.lock_vector() {
Ok(v) => v,
Err(e) => return ToolResult::tool_error(format!("Lock error: {e}")),
};
vec.search(&query_embedding, k * 2).unwrap_or_default()
}
Err(e) => {
tracing::warn!("Query embedding failed: {e}");
vec![]
}
}
} else {
vec![]
};
let graph = match self.lock_graph() {
Ok(g) => g,
Err(e) => return ToolResult::tool_error(format!("Lock error: {e}")),
};
let bm25 = match self.lock_bm25() {
Ok(b) => b,
Err(e) => return ToolResult::tool_error(format!("Lock error: {e}")),
};
let mut results: Vec<SearchResult> = Vec::new();
if vector_results.is_empty() {
let ids = match self.storage.list_memory_ids() {
Ok(ids) => ids,
Err(e) => return ToolResult::tool_error(format!("Storage error: {e}")),
};
for id in &ids {
if let Ok(Some(memory)) = self.storage.get_memory(id) {
if let Some(ns) = namespace_filter {
if memory.namespace.as_deref() != Some(ns) {
continue;
}
}
let breakdown =
compute_score(&memory, query, &query_tokens, 0.0, &graph, &bm25);
let weights = match self.scoring_weights() {
Ok(w) => w,
Err(e) => return ToolResult::tool_error(format!("Lock error: {e}")),
};
let score = breakdown.total_with_weights(&weights);
drop(weights);
if score > 0.01 {
results.push(SearchResult {
memory,
score,
score_breakdown: breakdown,
});
}
}
}
} else {
for (id, distance) in &vector_results {
if let Ok(Some(memory)) = self.storage.get_memory(id) {
if let Some(ns) = namespace_filter {
if memory.namespace.as_deref() != Some(ns) {
continue;
}
}
let similarity = 1.0 - (*distance as f64);
let breakdown =
compute_score(&memory, query, &query_tokens, similarity, &graph, &bm25);
let weights = match self.scoring_weights() {
Ok(w) => w,
Err(e) => return ToolResult::tool_error(format!("Lock error: {e}")),
};
let score = breakdown.total_with_weights(&weights);
drop(weights);
results.push(SearchResult {
memory,
score,
score_breakdown: breakdown,
});
}
}
}
results.sort_by(|a, b| {
b.score
.partial_cmp(&a.score)
.unwrap_or(std::cmp::Ordering::Equal)
});
results.truncate(k);
if results.is_empty() {
return ToolResult::text("No matching memories found.");
}
let output: Vec<Value> = results
.iter()
.map(|r| {
let memory_id = &r.memory.id;
let pagerank = graph.get_pagerank(memory_id);
let centrality = graph.get_betweenness(memory_id);
let connected_decisions: Vec<String> = graph
.get_edges(memory_id)
.unwrap_or_default()
.iter()
.filter_map(|e| {
let other_id = if e.src == *memory_id { &e.dst } else { &e.src };
self.storage
.get_memory(other_id)
.ok()
.flatten()
.and_then(|m| {
if m.memory_type == MemoryType::Decision {
Some(m.id)
} else {
None
}
})
})
.collect();
let dependent_files: Vec<String> = graph
.get_edges(memory_id)
.unwrap_or_default()
.iter()
.filter_map(|e| {
let other_id = if e.src == *memory_id { &e.dst } else { &e.src };
graph.get_node(other_id).ok().flatten().and_then(|n| {
if n.kind == NodeKind::File {
Some(n.label.clone())
} else {
n.payload
.get("file_path")
.and_then(|v| v.as_str().map(String::from))
}
})
})
.collect();
let modification_count = r.memory.access_count;
json!({
"id": r.memory.id,
"content": r.memory.content,
"memory_type": r.memory.memory_type.to_string(),
"score": format!("{:.4}", r.score),
"importance": r.memory.importance,
"tags": r.memory.tags,
"access_count": r.memory.access_count,
"impact": {
"pagerank": format!("{:.6}", pagerank),
"centrality": format!("{:.6}", centrality),
"connected_decisions": connected_decisions,
"dependent_files": dependent_files,
"modification_count": modification_count,
}
})
})
.collect();
ToolResult::text(
serde_json::to_string_pretty(&output).expect("JSON serialization of literal"),
)
}
pub(crate) fn tool_get_decision_chain(&self, args: &Value) -> ToolResult {
let file_path: Option<&str> = args.get("file_path").and_then(|v| v.as_str());
let topic: Option<&str> = args.get("topic").and_then(|v| v.as_str());
if file_path.is_none() && topic.is_none() {
return ToolResult::tool_error("Must provide either 'file_path' or 'topic' parameter");
}
let graph = match self.lock_graph() {
Ok(g) => g,
Err(e) => return ToolResult::tool_error(format!("Lock error: {e}")),
};
let ids = match self.storage.list_memory_ids() {
Ok(ids) => ids,
Err(e) => return ToolResult::tool_error(format!("Storage error: {e}")),
};
let decision_edge_types = [
RelationshipType::EvolvedInto,
RelationshipType::LeadsTo,
RelationshipType::DerivedFrom,
];
let mut decision_memories: Vec<MemoryNode> = Vec::new();
for id in &ids {
if let Ok(Some(memory)) = self.storage.get_memory(id) {
if memory.memory_type != MemoryType::Decision {
continue;
}
let content_lower = memory.content.to_lowercase();
let tags_lower: String = memory.tags.join(" ").to_lowercase();
let matches = if let Some(fp) = file_path {
content_lower.contains(&fp.to_lowercase())
|| tags_lower.contains(&fp.to_lowercase())
|| memory
.metadata
.get("file_path")
.and_then(|v| v.as_str())
.map(|v| v.to_lowercase().contains(&fp.to_lowercase()))
.unwrap_or(false)
} else if let Some(t) = topic {
let t_lower = t.to_lowercase();
content_lower.contains(&t_lower) || tags_lower.contains(&t_lower)
} else {
false
};
if matches {
decision_memories.push(memory);
}
}
}
if decision_memories.is_empty() {
return ToolResult::text("No decision memories found matching the criteria.");
}
let mut chain_ids: HashSet<String> = HashSet::new();
let mut to_explore: Vec<String> = decision_memories.iter().map(|m| m.id.clone()).collect();
while let Some(current_id) = to_explore.pop() {
if !chain_ids.insert(current_id.clone()) {
continue;
}
if let Ok(edges) = graph.get_edges(¤t_id) {
for edge in &edges {
if decision_edge_types.contains(&edge.relationship) {
let other_id = if edge.src == current_id {
&edge.dst
} else {
&edge.src
};
if !chain_ids.contains(other_id) {
if let Ok(Some(m)) = self.storage.get_memory(other_id) {
if m.memory_type == MemoryType::Decision {
to_explore.push(other_id.clone());
}
}
}
}
}
}
}
let mut chain: Vec<Value> = Vec::new();
for id in &chain_ids {
if let Ok(Some(memory)) = self.storage.get_memory(id) {
let connections: Vec<Value> = graph
.get_edges(id)
.unwrap_or_default()
.iter()
.filter(|e| {
decision_edge_types.contains(&e.relationship)
&& (chain_ids.contains(&e.src) && chain_ids.contains(&e.dst))
})
.map(|e| {
json!({
"relationship": e.relationship.to_string(),
"source": e.src,
"target": e.dst,
})
})
.collect();
chain.push(json!({
"id": memory.id,
"content": memory.content,
"importance": memory.importance,
"tags": memory.tags,
"created_at": memory.created_at.to_rfc3339(),
"connections": connections,
}));
}
}
chain.sort_by(|a, b| {
let a_dt = a["created_at"].as_str().unwrap_or("");
let b_dt = b["created_at"].as_str().unwrap_or("");
a_dt.cmp(b_dt)
});
let response = json!({
"chain_length": chain.len(),
"filter": {
"file_path": file_path,
"topic": topic,
},
"decisions": chain,
});
ToolResult::text(
serde_json::to_string_pretty(&response).expect("JSON serialization of literal"),
)
}
pub(crate) fn rebuild_vector_index_internal(&self, vector: &mut HnswIndex) {
let embeddings = match self.storage.list_all_embeddings() {
Ok(e) => e,
Err(e) => {
tracing::warn!("Failed to rebuild vector index: {e}");
return;
}
};
if let Ok(mut fresh) = HnswIndex::with_defaults() {
for (id, floats) in &embeddings {
let _ = fresh.insert(id, floats);
}
*vector = fresh;
}
}
pub(crate) fn tool_detect_patterns(&self, args: &Value) -> ToolResult {
let min_frequency = args
.get("min_frequency")
.and_then(|v| v.as_u64())
.unwrap_or(3) as usize;
let namespace = args.get("namespace").and_then(|v| v.as_str());
match crate::patterns::detect_patterns(&*self.storage, namespace, min_frequency) {
Ok(detected) => {
let json_patterns: Vec<Value> = detected
.iter()
.map(|p| {
json!({
"pattern_type": p.pattern_type.to_string(),
"description": p.description,
"frequency": p.frequency,
"confidence": p.confidence,
"related_memories": p.related_memories,
})
})
.collect();
ToolResult::text(
serde_json::to_string_pretty(&json!({
"patterns": json_patterns,
"count": detected.len(),
}))
.expect("JSON serialization of literal"),
)
}
Err(e) => ToolResult::tool_error(format!("Pattern detection error: {e}")),
}
}
pub(crate) fn tool_pattern_insights(&self, args: &Value) -> ToolResult {
let min_frequency = args
.get("min_frequency")
.and_then(|v| v.as_u64())
.unwrap_or(2) as usize;
let namespace = args.get("namespace").and_then(|v| v.as_str());
match crate::patterns::detect_patterns(&*self.storage, namespace, min_frequency) {
Ok(detected) => {
let markdown = crate::patterns::generate_insights(&detected);
ToolResult::text(markdown)
}
Err(e) => ToolResult::tool_error(format!("Pattern insights error: {e}")),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test_helpers::*;
use codemem_storage::Storage;
fn call_tool(server: &McpServer, tool_name: &str, arguments: Value) -> Value {
let params = json!({"name": tool_name, "arguments": arguments});
let resp = server.handle_request("tools/call", Some(¶ms), json!("req"));
assert!(
resp.error.is_none(),
"Unexpected error calling {tool_name}: {:?}",
resp.error
);
resp.result.unwrap()
}
fn call_tool_parse(server: &McpServer, tool_name: &str, arguments: Value) -> Value {
let result = call_tool(server, tool_name, arguments);
let text = result["content"][0]["text"].as_str().unwrap();
serde_json::from_str(text).unwrap_or_else(|_| Value::String(text.to_string()))
}
fn store_ns(
server: &McpServer,
content: &str,
namespace: &str,
memory_type: &str,
tags: &[&str],
) -> Value {
call_tool_parse(
server,
"store_memory",
json!({
"content": content,
"memory_type": memory_type,
"tags": tags,
"namespace": namespace,
}),
)
}
#[test]
fn consolidate_decay_reduces_importance() {
let server = test_server();
let now = chrono::Utc::now();
let sixty_days_ago = now - chrono::Duration::days(60);
let id = uuid::Uuid::new_v4().to_string();
let content = "old memory that should decay";
let hash = Storage::content_hash(content);
let memory = MemoryNode {
id: id.clone(),
content: content.to_string(),
memory_type: MemoryType::Context,
importance: 0.8,
confidence: 1.0,
access_count: 0,
content_hash: hash,
tags: vec![],
metadata: HashMap::new(),
namespace: None,
created_at: sixty_days_ago,
updated_at: sixty_days_ago,
last_accessed_at: sixty_days_ago,
};
server.storage.insert_memory(&memory).unwrap();
let params = json!({"name": "consolidate_decay", "arguments": {}});
let resp = server.handle_request("tools/call", Some(¶ms), json!(1));
let result = resp.result.unwrap();
let text = result["content"][0]["text"].as_str().unwrap();
let parsed: Value = serde_json::from_str(text).unwrap();
assert_eq!(parsed["cycle"], "decay");
assert_eq!(parsed["affected"], 1);
assert_eq!(parsed["threshold_days"], 30);
let retrieved = server.storage.get_memory(&id).unwrap().unwrap();
assert!((retrieved.importance - 0.72).abs() < 0.01);
}
#[test]
fn consolidate_decay_skips_recent_memories() {
let server = test_server();
store_memory(&server, "recently accessed memory", "context", &[]);
let params = json!({"name": "consolidate_decay", "arguments": {"threshold_days": 30}});
let resp = server.handle_request("tools/call", Some(¶ms), json!(1));
let result = resp.result.unwrap();
let text = result["content"][0]["text"].as_str().unwrap();
let parsed: Value = serde_json::from_str(text).unwrap();
assert_eq!(parsed["affected"], 0);
}
#[test]
fn consolidate_creative_creates_edges() {
let server = test_server();
store_memory(
&server,
"insight about rust safety",
"insight",
&["rust", "safety"],
);
store_memory(
&server,
"pattern for error handling",
"pattern",
&["rust", "error"],
);
let params = json!({"name": "consolidate_creative", "arguments": {}});
let resp = server.handle_request("tools/call", Some(¶ms), json!(1));
let result = resp.result.unwrap();
let text = result["content"][0]["text"].as_str().unwrap();
let parsed: Value = serde_json::from_str(text).unwrap();
assert_eq!(parsed["cycle"], "creative");
assert_eq!(parsed["new_connections"], 1);
}
#[test]
fn consolidate_creative_skips_same_type() {
let server = test_server();
store_memory(&server, "insight one about rust", "insight", &["rust"]);
store_memory(&server, "insight two about rust", "insight", &["rust"]);
let params = json!({"name": "consolidate_creative", "arguments": {}});
let resp = server.handle_request("tools/call", Some(¶ms), json!(1));
let result = resp.result.unwrap();
let text = result["content"][0]["text"].as_str().unwrap();
let parsed: Value = serde_json::from_str(text).unwrap();
assert_eq!(parsed["new_connections"], 0);
}
#[test]
fn consolidate_forget_deletes_low_importance() {
let server = test_server();
let now = chrono::Utc::now();
let id = uuid::Uuid::new_v4().to_string();
let content = "forgettable memory";
let hash = Storage::content_hash(content);
let memory = MemoryNode {
id: id.clone(),
content: content.to_string(),
memory_type: MemoryType::Context,
importance: 0.05,
confidence: 1.0,
access_count: 0,
content_hash: hash,
tags: vec![],
metadata: HashMap::new(),
namespace: None,
created_at: now,
updated_at: now,
last_accessed_at: now,
};
server.storage.insert_memory(&memory).unwrap();
assert_eq!(server.storage.memory_count().unwrap(), 1);
let params = json!({"name": "consolidate_forget", "arguments": {}});
let resp = server.handle_request("tools/call", Some(¶ms), json!(1));
let result = resp.result.unwrap();
let text = result["content"][0]["text"].as_str().unwrap();
let parsed: Value = serde_json::from_str(text).unwrap();
assert_eq!(parsed["cycle"], "forget");
assert_eq!(parsed["deleted"], 1);
assert_eq!(parsed["threshold"], 0.1);
assert_eq!(server.storage.memory_count().unwrap(), 0);
}
#[test]
fn consolidate_forget_keeps_accessed_memories() {
let server = test_server();
let now = chrono::Utc::now();
let memory = MemoryNode {
id: uuid::Uuid::new_v4().to_string(),
content: "low importance but accessed".to_string(),
memory_type: MemoryType::Context,
importance: 0.05,
confidence: 1.0,
access_count: 5,
content_hash: Storage::content_hash("low importance but accessed"),
tags: vec![],
metadata: HashMap::new(),
namespace: None,
created_at: now,
updated_at: now,
last_accessed_at: now,
};
server.storage.insert_memory(&memory).unwrap();
let params = json!({"name": "consolidate_forget", "arguments": {}});
let resp = server.handle_request("tools/call", Some(¶ms), json!(1));
let result = resp.result.unwrap();
let text = result["content"][0]["text"].as_str().unwrap();
let parsed: Value = serde_json::from_str(text).unwrap();
assert_eq!(parsed["deleted"], 0);
assert_eq!(server.storage.memory_count().unwrap(), 1);
}
#[test]
fn consolidation_status_shows_last_run() {
let server = test_server();
let params = json!({"name": "consolidation_status", "arguments": {}});
let resp = server.handle_request("tools/call", Some(¶ms), json!(1));
let result = resp.result.unwrap();
let text = result["content"][0]["text"].as_str().unwrap();
let parsed: Value = serde_json::from_str(text).unwrap();
assert_eq!(parsed["cycles"], json!({}));
let params = json!({"name": "consolidate_decay", "arguments": {}});
server.handle_request("tools/call", Some(¶ms), json!(2));
let params = json!({"name": "consolidation_status", "arguments": {}});
let resp = server.handle_request("tools/call", Some(¶ms), json!(3));
let result = resp.result.unwrap();
let text = result["content"][0]["text"].as_str().unwrap();
let parsed: Value = serde_json::from_str(text).unwrap();
assert!(parsed["cycles"]["decay"].is_object());
assert!(parsed["cycles"]["decay"]["last_run"].is_string());
assert!(parsed["cycles"]["decay"]["affected"].is_number());
}
#[test]
fn consolidate_forget_custom_threshold() {
let server = test_server();
let now = chrono::Utc::now();
for (imp, content) in [(0.3, "medium importance"), (0.05, "very low importance")] {
let id = uuid::Uuid::new_v4().to_string();
let hash = Storage::content_hash(content);
let memory = MemoryNode {
id,
content: content.to_string(),
memory_type: MemoryType::Context,
importance: imp,
confidence: 1.0,
access_count: 0,
content_hash: hash,
tags: vec![],
metadata: HashMap::new(),
namespace: None,
created_at: now,
updated_at: now,
last_accessed_at: now,
};
server.storage.insert_memory(&memory).unwrap();
}
assert_eq!(server.storage.memory_count().unwrap(), 2);
let params =
json!({"name": "consolidate_forget", "arguments": {"importance_threshold": 0.5}});
let resp = server.handle_request("tools/call", Some(¶ms), json!(1));
let result = resp.result.unwrap();
let text = result["content"][0]["text"].as_str().unwrap();
let parsed: Value = serde_json::from_str(text).unwrap();
assert_eq!(parsed["deleted"], 2);
assert_eq!(parsed["threshold"], 0.5);
assert_eq!(server.storage.memory_count().unwrap(), 0);
}
#[test]
fn recall_with_impact_returns_impact_data() {
let server = test_server();
let mem = store_ns(
&server,
"impact test memory about error handling patterns",
"test-ns",
"insight",
&["error", "handling"],
);
let _id = mem["id"].as_str().unwrap();
let result = call_tool(
&server,
"recall_with_impact",
json!({"query": "error handling"}),
);
let text = result["content"][0]["text"].as_str().unwrap();
if text.contains("No matching memories") {
return;
}
let parsed: Value = serde_json::from_str(text).unwrap();
let first = &parsed[0];
assert!(
first.get("impact").is_some(),
"result should contain impact data"
);
let impact = &first["impact"];
assert!(impact.get("pagerank").is_some());
assert!(impact.get("centrality").is_some());
assert!(impact.get("connected_decisions").is_some());
assert!(impact.get("dependent_files").is_some());
assert!(impact.get("modification_count").is_some());
}
#[test]
fn get_decision_chain_requires_parameter() {
let server = test_server();
let params = json!({"name": "get_decision_chain", "arguments": {}});
let resp = server.handle_request("tools/call", Some(¶ms), json!("req"));
let result = resp.result.unwrap();
let text = result["content"][0]["text"].as_str().unwrap();
assert!(
text.contains("file_path") || text.contains("topic"),
"error should mention required parameters"
);
}
#[test]
fn get_decision_chain_by_topic() {
let server = test_server();
let _d1 = call_tool_parse(
&server,
"store_memory",
json!({
"content": "Decision: use async runtime for concurrency",
"memory_type": "decision",
"tags": ["concurrency"],
}),
);
let _d2 = call_tool_parse(
&server,
"store_memory",
json!({
"content": "Decision: switched from threads to async for concurrency",
"memory_type": "decision",
"tags": ["concurrency"],
}),
);
let result = call_tool(
&server,
"get_decision_chain",
json!({"topic": "concurrency"}),
);
let text = result["content"][0]["text"].as_str().unwrap();
let parsed: Value = serde_json::from_str(text).unwrap();
assert!(parsed["chain_length"].as_u64().unwrap() >= 2);
assert_eq!(parsed["filter"]["topic"], "concurrency");
}
#[test]
fn decision_chain_follows_temporal_order() {
let server = test_server();
let d1 = call_tool_parse(
&server,
"store_memory",
json!({
"content": "Decision: initial architecture for auth module",
"memory_type": "decision",
"tags": ["auth"],
}),
);
let d2 = call_tool_parse(
&server,
"store_memory",
json!({
"content": "Decision: refactored auth to use JWT tokens",
"memory_type": "decision",
"tags": ["auth"],
}),
);
let d3 = call_tool_parse(
&server,
"store_memory",
json!({
"content": "Decision: added OAuth2 to auth module",
"memory_type": "decision",
"tags": ["auth"],
}),
);
let id1 = d1["id"].as_str().unwrap();
let id2 = d2["id"].as_str().unwrap();
let id3 = d3["id"].as_str().unwrap();
call_tool(
&server,
"associate_memories",
json!({
"source_id": id1,
"target_id": id2,
"relationship": "EVOLVED_INTO",
}),
);
call_tool(
&server,
"associate_memories",
json!({
"source_id": id2,
"target_id": id3,
"relationship": "EVOLVED_INTO",
}),
);
let result = call_tool(&server, "get_decision_chain", json!({"topic": "auth"}));
let text = result["content"][0]["text"].as_str().unwrap();
let parsed: Value = serde_json::from_str(text).unwrap();
assert_eq!(parsed["chain_length"].as_u64().unwrap(), 3);
let decisions = parsed["decisions"].as_array().unwrap();
for i in 0..decisions.len() - 1 {
let dt_a = decisions[i]["created_at"].as_str().unwrap();
let dt_b = decisions[i + 1]["created_at"].as_str().unwrap();
assert!(dt_a <= dt_b, "decisions should be in chronological order");
}
let has_connections = decisions
.iter()
.any(|d| !d["connections"].as_array().unwrap().is_empty());
assert!(
has_connections,
"at least one decision should have connections"
);
}
}