use chrono::{DateTime, Utc};
use petgraph::stable_graph::{NodeIndex, StableGraph};
use petgraph::visit::EdgeRef;
use petgraph::Direction;
use std::collections::{HashMap, HashSet, VecDeque};
pub type Layer = u8;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Partition {
Project,
Foreign { source_repo: String, commit: String },
}
impl Partition {
pub fn is_project(&self) -> bool {
matches!(self, Partition::Project)
}
pub fn is_foreign(&self) -> bool {
matches!(self, Partition::Foreign { .. })
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum CodeLanguage {
Rust,
Python,
JavaScript,
TypeScript,
Go,
Java,
Cpp,
Shell,
Sql,
Unknown,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)]
pub enum ContentType {
#[default]
NaturalLanguage,
Code(CodeLanguage),
StructuredData,
}
impl ContentType {
pub fn is_code(&self) -> bool {
matches!(self, ContentType::Code(_))
}
pub fn language(&self) -> Option<CodeLanguage> {
match self {
ContentType::Code(lang) => Some(*lang),
_ => None,
}
}
pub fn from_label(s: &str) -> Self {
match s {
"code" => ContentType::Code(CodeLanguage::Unknown),
"structured_data" => ContentType::StructuredData,
_ => ContentType::NaturalLanguage,
}
}
pub fn as_label(&self) -> &'static str {
match self {
ContentType::NaturalLanguage => "natural_language",
ContentType::Code(_) => "code",
ContentType::StructuredData => "structured_data",
}
}
}
pub fn detect_content_type(text: &str) -> ContentType {
let trimmed = text.trim();
if trimmed.starts_with('{') || trimmed.starts_with('[') {
if trimmed.ends_with('}') || trimmed.ends_with(']') {
return ContentType::StructuredData;
}
}
if trimmed.starts_with("---") {
return ContentType::StructuredData;
}
let lines: Vec<&str> = trimmed.lines().collect();
if lines.len() >= 2 {
let kv_count = lines
.iter()
.filter(|l| {
let l = l.trim();
if l.is_empty() || l.starts_with('#') || l.starts_with("//") {
return false;
}
if let Some(pos) = l.find(" = ") {
return pos < 30 && !l[..pos].contains(' ');
}
if let Some(pos) = l.find(": ") {
return pos < 30 && !l[..pos].contains(' ');
}
false
})
.count();
if kv_count as f32 / lines.len() as f32 > 0.6 {
return ContentType::StructuredData;
}
}
if let Some(lang) = detect_code_language(text) {
return ContentType::Code(lang);
}
ContentType::NaturalLanguage
}
fn detect_code_language(text: &str) -> Option<CodeLanguage> {
let rust_markers: &[&str] = &[
"fn ", "pub fn", "impl ", "trait ", "mod ", "#[", "let mut ", "::",
];
let python_markers: &[&str] = &[
"def ",
"async def",
"elif ",
"self.",
"import ",
"__init__",
"None",
"True",
"False",
];
let js_markers: &[&str] = &[
"function ",
"const ",
"let ",
"var ",
"=> ",
"===",
"!==",
"require(",
"console.",
];
let ts_markers: &[&str] = &[
"interface ",
": string",
": number",
": boolean",
"readonly ",
"as const",
];
let go_markers: &[&str] = &["func ", "package ", "go func", ":= ", "fmt."];
let java_markers: &[&str] = &[
"public class",
"private ",
"protected ",
"System.out",
"@Override",
];
let cpp_markers: &[&str] = &["#include", "std::", "cout", "nullptr", "template<"];
let shell_markers: &[&str] = &["#!/bin", "echo ", "fi\n", "done\n", "esac", "$(", "export "];
let sql_markers: &[&str] = &[
"SELECT ",
"INSERT ",
"UPDATE ",
"DELETE ",
"CREATE TABLE",
"ALTER TABLE",
"JOIN ",
];
let count = |markers: &[&str]| markers.iter().filter(|m| text.contains(**m)).count();
let rust = count(rust_markers);
let python = count(python_markers);
let js = count(js_markers);
let ts = count(ts_markers);
let go = count(go_markers);
let java = count(java_markers);
let cpp = count(cpp_markers);
let shell = count(shell_markers);
let sql = count(sql_markers);
let scores = [
(rust, CodeLanguage::Rust),
(python, CodeLanguage::Python),
(js, CodeLanguage::JavaScript),
(ts, CodeLanguage::TypeScript),
(go, CodeLanguage::Go),
(java, CodeLanguage::Java),
(cpp, CodeLanguage::Cpp),
(shell, CodeLanguage::Shell),
(sql, CodeLanguage::Sql),
];
let (best_score, best_lang) = scores
.iter()
.max_by_key(|(score, _)| *score)
.copied()
.unwrap();
if best_score >= 2 {
return Some(best_lang);
}
let has_braces = text.contains('{') && text.contains('}');
let has_parens_semi = text.contains('(') && text.contains(';');
let structural = has_braces as usize + has_parens_semi as usize;
let generic_strong: &[&str] = &["fn ", "def ", "impl ", "struct ", "enum ", "=>", "->"];
let generic_weak: &[&str] = &[
"function ",
"class ",
"import ",
"from ",
"const ",
"let ",
"var ",
"export ",
"pub ",
"use ",
];
let strong = count(generic_strong);
let weak = count(generic_weak);
let total = strong + weak;
let is_code = (strong >= 2)
|| (strong >= 1 && (structural > 0 || weak >= 1))
|| (weak >= 2 && structural > 0)
|| (total >= 3 && structural > 0);
if is_code {
if best_score >= 1 {
Some(best_lang)
} else {
Some(CodeLanguage::Unknown)
}
} else {
None
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum MemKind {
Identity,
Fact,
FactSuperseded,
Conclusion,
ConclusionInvalidated,
Skill,
SkillDeprecated,
Conversation,
ConversationSummary,
Environment,
Model,
CodeSymbol,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum EdgeKind {
Supersedes, DependsOn, RelatedTo, Triggers, TemporalNext, Calls, DefinedIn, Imports, CitesPremise, }
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct CodeSymbolMeta {
pub name: String,
pub kind: String, pub signature: String,
pub file_path: String,
pub start_line: u32,
pub end_line: u32,
pub doc_comment: Option<String>,
pub parent: Option<String>,
}
impl CodeSymbolMeta {
pub fn from_node(node: &MemNode) -> Option<Self> {
if node.kind != MemKind::CodeSymbol {
return None;
}
serde_json::from_str(&node.value).ok()
}
pub fn encode(&self) -> String {
serde_json::to_string(self).expect("CodeSymbolMeta serializable")
}
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum SkillScope {
Global,
Domain(String),
}
impl Default for SkillScope {
fn default() -> Self {
SkillScope::Global
}
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, Default, PartialEq)]
pub struct SkillTrigger {
#[serde(default)]
pub persona: String,
#[serde(default)]
pub url_pattern: String,
#[serde(default)]
pub task_keywords: Vec<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub structured: Option<StructuredTrigger>,
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, PartialEq)]
pub struct StructuredTrigger {
pub kind: String,
pub signature: serde_json::Value,
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, Default, PartialEq)]
pub struct SkillStats {
pub success_count: u64,
pub fail_count: u64,
pub degraded: bool,
pub broken_for_repair: bool,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub completion_rate: Option<f64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub complaint_count: Option<u64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub last_used_at: Option<DateTime<Utc>>,
}
impl SkillStats {
pub fn should_degrade(&self, threshold: u64) -> bool {
self.fail_count > self.success_count + threshold
}
pub fn success_ratio(&self) -> f64 {
let total = self.success_count + self.fail_count;
if total == 0 {
return 0.5;
}
self.success_count as f64 / total as f64
}
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum SkillOutcome {
Success,
Fail,
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, PartialEq)]
pub struct SkillMeta {
pub name: String,
pub code: String, pub platform: String, pub description: String,
pub trigger: SkillTrigger,
#[serde(default)]
pub scope: SkillScope,
#[serde(default)]
pub when_to_apply: String,
#[serde(default)]
pub stats: SkillStats,
#[serde(default = "default_skill_version")]
pub version: u64,
#[serde(default)]
pub tenant_id: Option<String>,
}
fn default_skill_version() -> u64 {
1
}
impl SkillMeta {
pub fn from_node(node: &MemNode) -> Option<Self> {
if !matches!(node.kind, MemKind::Skill | MemKind::SkillDeprecated) {
return None;
}
serde_json::from_str(&node.value).ok()
}
pub fn encode(&self) -> String {
serde_json::to_string(self).expect("SkillMeta serializable")
}
}
#[derive(Debug, Clone, Default, serde::Serialize, serde::Deserialize)]
pub struct Provenance {
pub source: String, pub reference: String, #[serde(default)]
pub date: Option<DateTime<Utc>>,
}
#[derive(Debug, Clone, Default, serde::Serialize, serde::Deserialize)]
pub struct FactMetadata {
#[serde(default)]
pub confidence: String,
#[serde(default)]
pub provenance: Vec<Provenance>,
#[serde(default)]
pub affected_files: Vec<String>,
#[serde(default)]
pub tags: Vec<String>,
#[serde(default)]
pub category: String,
#[serde(default)]
pub usage_count: u64,
#[serde(default)]
pub helpful_count: u64,
#[serde(default)]
pub outdated_reports: u64,
#[serde(default)]
pub tenant_id: Option<String>,
}
impl FactMetadata {
pub fn is_empty(&self) -> bool {
self.confidence.is_empty()
&& self.provenance.is_empty()
&& self.affected_files.is_empty()
&& self.tags.is_empty()
&& self.category.is_empty()
}
pub fn staleness_ratio(&self) -> f64 {
if self.usage_count == 0 {
return 0.0;
}
self.outdated_reports as f64 / self.usage_count as f64
}
pub fn helpfulness_ratio(&self) -> f64 {
if self.usage_count == 0 {
return 0.5;
} self.helpful_count as f64 / self.usage_count as f64
}
}
#[derive(Debug, Clone)]
pub struct MemNode {
pub kind: MemKind,
pub layer: Layer,
pub key: String,
pub value: String,
pub fact_id: Option<String>,
pub scope: String,
pub authority: String,
pub is_constraint: bool,
pub created_at: DateTime<Utc>,
pub expires_at: Option<DateTime<Utc>>,
pub content_type: ContentType,
pub metadata: FactMetadata,
}
impl MemNode {
pub fn token_estimate(&self) -> usize {
std::cmp::max(1, self.value.len() / 4)
}
pub fn is_valid(&self) -> bool {
!matches!(
self.kind,
MemKind::FactSuperseded | MemKind::SkillDeprecated | MemKind::ConclusionInvalidated
)
}
}
#[derive(Debug, Clone)]
pub struct MemEdge {
pub kind: EdgeKind,
pub weight: f32,
pub created_at: DateTime<Utc>,
}
#[derive(Debug, Clone)]
pub struct RetrievalHit {
pub node_ix: NodeIndex,
pub activation: f32,
pub hops: usize,
}
pub struct MemoryGraph {
pub inner: StableGraph<MemNode, MemEdge>,
by_fact_id: HashMap<String, NodeIndex>,
by_key: HashMap<String, Vec<NodeIndex>>,
by_layer: [Vec<NodeIndex>; 4],
last_conversation: Option<NodeIndex>,
partitions: HashMap<NodeIndex, Partition>,
foreign_by_fact_id: HashMap<String, NodeIndex>,
}
impl MemoryGraph {
pub fn new() -> Self {
Self {
inner: StableGraph::new(),
by_fact_id: HashMap::new(),
by_key: HashMap::new(),
by_layer: [Vec::new(), Vec::new(), Vec::new(), Vec::new()],
last_conversation: None,
partitions: HashMap::new(),
foreign_by_fact_id: HashMap::new(),
}
}
pub fn partition_of(&self, nix: NodeIndex) -> Partition {
self.partitions
.get(&nix)
.cloned()
.unwrap_or(Partition::Project)
}
pub fn insert_partitioned(&mut self, node: MemNode, partition: Partition) -> NodeIndex {
if let Partition::Foreign { .. } = &partition {
if let Some(fid) = node.fact_id.as_deref() {
if let Some(&existing) = self.foreign_by_fact_id.get(fid) {
return existing;
}
}
let fid = node.fact_id.clone();
let nix = self.insert(node);
if let Some(fid) = fid {
self.foreign_by_fact_id.insert(fid, nix);
}
self.partitions.insert(nix, partition);
return nix;
}
self.insert(node)
}
pub fn insert_foreign(
&mut self,
source_repo: impl Into<String>,
commit: impl Into<String>,
node: MemNode,
) -> NodeIndex {
self.insert_partitioned(
node,
Partition::Foreign {
source_repo: source_repo.into(),
commit: commit.into(),
},
)
}
pub fn insert(&mut self, node: MemNode) -> NodeIndex {
let layer_idx = (node.layer as usize).saturating_sub(1).min(3);
let fact_id = node.fact_id.clone();
let key = node.key.clone();
let is_conv = matches!(
node.kind,
MemKind::Conversation | MemKind::ConversationSummary
);
let nix = self.inner.add_node(node);
if let Some(fid) = fact_id {
self.by_fact_id.insert(fid, nix);
}
self.by_key.entry(key).or_default().push(nix);
self.by_layer[layer_idx].push(nix);
if is_conv {
if let Some(prev) = self.last_conversation {
self.inner.add_edge(
prev,
nix,
MemEdge {
kind: EdgeKind::TemporalNext,
weight: 1.0,
created_at: Utc::now(),
},
);
}
self.last_conversation = Some(nix);
}
nix
}
pub fn link(&mut self, from: NodeIndex, to: NodeIndex, kind: EdgeKind, weight: f32) {
self.inner.add_edge(
from,
to,
MemEdge {
kind,
weight,
created_at: Utc::now(),
},
);
}
pub fn supersede(&mut self, new_nix: NodeIndex, old_fact_id: &str) -> HashSet<NodeIndex> {
let mut invalidated = HashSet::new();
if let Some(&old_nix) = self.by_fact_id.get(old_fact_id) {
if let Some(old_node) = self.inner.node_weight_mut(old_nix) {
old_node.kind = MemKind::FactSuperseded;
}
self.link(new_nix, old_nix, EdgeKind::Supersedes, 1.0);
invalidated.insert(old_nix);
let dependents: Vec<NodeIndex> = self
.inner
.neighbors_directed(old_nix, Direction::Incoming)
.filter(|&n| {
self.inner
.edges_connecting(n, old_nix)
.any(|e| e.weight().kind == EdgeKind::DependsOn)
})
.collect();
for dep in dependents {
invalidated.insert(dep);
}
let citing_conclusions: Vec<NodeIndex> = self
.inner
.neighbors_directed(old_nix, Direction::Incoming)
.filter(|&n| {
self.inner
.edges_connecting(n, old_nix)
.any(|e| e.weight().kind == EdgeKind::CitesPremise)
})
.collect();
for conc_nix in citing_conclusions {
if let Some(conc_node) = self.inner.node_weight_mut(conc_nix) {
if conc_node.kind == MemKind::Conclusion {
conc_node.kind = MemKind::ConclusionInvalidated;
invalidated.insert(conc_nix);
}
}
}
}
invalidated
}
pub fn get_by_fact_id(&self, fact_id: &str) -> Option<(NodeIndex, &MemNode)> {
self.by_fact_id
.get(fact_id)
.and_then(|&nix| self.inner.node_weight(nix).map(|n| (nix, n)))
}
pub fn valid_facts(&self) -> Vec<(NodeIndex, &MemNode)> {
self.by_layer[1]
.iter() .filter_map(|&nix| {
self.inner
.node_weight(nix)
.filter(|n| n.kind == MemKind::Fact)
.map(|n| (nix, n))
})
.collect()
}
pub fn constraints(&self) -> Vec<(NodeIndex, &MemNode)> {
self.valid_facts()
.into_iter()
.filter(|(_, n)| n.is_constraint)
.collect()
}
pub fn nodes_by_layer(&self, layer: Layer) -> Vec<(NodeIndex, &MemNode)> {
let idx = (layer as usize).saturating_sub(1).min(3);
self.by_layer[idx]
.iter()
.filter_map(|&nix| self.inner.node_weight(nix).map(|n| (nix, n)))
.collect()
}
pub fn retrieve_ppr(
&self,
seeds: &[NodeIndex],
seed_weights: Option<&[f32]>,
damping: f32,
max_results: usize,
) -> Vec<RetrievalHit> {
self.retrieve_ppr_inner(seeds, seed_weights, damping, max_results, true)
}
pub fn retrieve_ppr_cross_partition(
&self,
seeds: &[NodeIndex],
seed_weights: Option<&[f32]>,
damping: f32,
max_results: usize,
) -> Vec<RetrievalHit> {
self.retrieve_ppr_inner(seeds, seed_weights, damping, max_results, false)
}
fn retrieve_ppr_inner(
&self,
seeds: &[NodeIndex],
seed_weights: Option<&[f32]>,
damping: f32,
max_results: usize,
apply_partition_filter: bool,
) -> Vec<RetrievalHit> {
if seeds.is_empty() {
return Vec::new();
}
let all_nodes: Vec<NodeIndex> = self.inner.node_indices().collect();
let n = all_nodes.len();
if n == 0 {
return Vec::new();
}
let pos: HashMap<NodeIndex, usize> = all_nodes
.iter()
.enumerate()
.map(|(i, &nix)| (nix, i))
.collect();
let mut reset = vec![0.0f32; n];
let mut total_weight = 0.0f32;
for (i, &seed) in seeds.iter().enumerate() {
if let Some(&p) = pos.get(&seed) {
let w = seed_weights
.and_then(|sw| sw.get(i).copied())
.unwrap_or(1.0);
reset[p] = w;
total_weight += w;
}
}
if total_weight > 0.0 {
for v in &mut reset {
*v /= total_weight;
}
}
let cap = 0.4f32;
let mut needs_renorm = false;
for v in &mut reset {
if *v > cap {
*v = cap;
needs_renorm = true;
}
}
if needs_renorm {
let sum: f32 = reset.iter().sum();
if sum > 0.0 {
for v in &mut reset {
*v /= sum;
}
}
}
let mut scores = reset.clone();
let edge_mult = |kind: EdgeKind| -> f32 {
match kind {
EdgeKind::Supersedes => 0.3,
EdgeKind::DependsOn => 0.8,
EdgeKind::RelatedTo => 0.6,
EdgeKind::Triggers => 0.9,
EdgeKind::TemporalNext => 0.5,
EdgeKind::Calls => 0.85,
EdgeKind::DefinedIn => 0.9,
EdgeKind::Imports => 0.4,
EdgeKind::CitesPremise => 0.85,
}
};
let mut out_weights: Vec<f32> = vec![0.0; n];
for (i, &nix) in all_nodes.iter().enumerate() {
let mut total = 0.0f32;
for e in self.inner.edges(nix) {
total += e.weight().weight * edge_mult(e.weight().kind);
}
for e in self.inner.edges_directed(nix, Direction::Incoming) {
let k = e.weight().kind;
if matches!(
k,
EdgeKind::RelatedTo
| EdgeKind::DependsOn
| EdgeKind::Triggers
| EdgeKind::Calls
| EdgeKind::DefinedIn
| EdgeKind::Imports
) {
total += e.weight().weight * edge_mult(k);
}
}
out_weights[i] = total;
}
let max_iters = 30;
let epsilon = 1e-6;
let mut new_scores = vec![0.0f32; n];
for _iter in 0..max_iters {
new_scores.fill(0.0);
for i in 0..n {
new_scores[i] += (1.0 - damping) * reset[i];
}
for (i, &nix) in all_nodes.iter().enumerate() {
if scores[i] < epsilon {
continue;
}
let out_w = out_weights[i];
if out_w == 0.0 {
continue;
}
for e in self.inner.edges(nix) {
if let Some(&j) = pos.get(&e.target()) {
let w = e.weight().weight * edge_mult(e.weight().kind);
new_scores[j] += damping * scores[i] * w / out_w;
}
}
for e in self.inner.edges_directed(nix, Direction::Incoming) {
let k = e.weight().kind;
if !matches!(
k,
EdgeKind::RelatedTo
| EdgeKind::DependsOn
| EdgeKind::Triggers
| EdgeKind::Calls
| EdgeKind::DefinedIn
| EdgeKind::Imports
) {
continue;
}
if let Some(&j) = pos.get(&e.source()) {
let w = e.weight().weight * edge_mult(k);
new_scores[j] += damping * scores[i] * w / out_w;
}
}
}
let delta: f32 = scores
.iter()
.zip(new_scores.iter())
.map(|(a, b)| (a - b).abs())
.sum();
std::mem::swap(&mut scores, &mut new_scores);
if delta < epsilon {
break;
}
}
let mut hits: Vec<RetrievalHit> = all_nodes
.iter()
.enumerate()
.filter_map(|(i, &nix)| {
let node = self.inner.node_weight(nix)?;
if !node.is_valid() {
return None;
}
if scores[i] < epsilon {
return None;
}
let hops = 0;
Some(RetrievalHit {
node_ix: nix,
activation: scores[i],
hops,
})
})
.collect();
hits.sort_by(|a, b| {
b.activation
.partial_cmp(&a.activation)
.unwrap_or(std::cmp::Ordering::Equal)
});
if apply_partition_filter {
self.filter_same_partition(&mut hits, seeds);
}
hits.truncate(max_results);
hits
}
fn filter_same_partition(&self, hits: &mut Vec<RetrievalHit>, seeds: &[NodeIndex]) {
if seeds.is_empty() {
return;
}
if self.partitions.is_empty() {
return;
}
let seed_parts: HashSet<Partition> = seeds.iter().map(|&n| self.partition_of(n)).collect();
hits.retain(|h| seed_parts.contains(&self.partition_of(h.node_ix)));
}
pub fn retrieve(
&self,
seeds: &[NodeIndex],
max_hops: usize,
max_results: usize,
decay: f32,
min_activation: f32,
) -> Vec<RetrievalHit> {
self.retrieve_inner(seeds, max_hops, max_results, decay, min_activation, true)
}
pub fn retrieve_cross_partition(
&self,
seeds: &[NodeIndex],
max_hops: usize,
max_results: usize,
decay: f32,
min_activation: f32,
) -> Vec<RetrievalHit> {
self.retrieve_inner(seeds, max_hops, max_results, decay, min_activation, false)
}
fn retrieve_inner(
&self,
seeds: &[NodeIndex],
max_hops: usize,
max_results: usize,
decay: f32,
min_activation: f32,
apply_partition_filter: bool,
) -> Vec<RetrievalHit> {
let mut activations: HashMap<NodeIndex, f32> = HashMap::new();
let mut hops_map: HashMap<NodeIndex, usize> = HashMap::new();
let mut queue: VecDeque<(NodeIndex, f32, usize)> = VecDeque::new();
for &seed in seeds {
activations.insert(seed, 1.0);
hops_map.insert(seed, 0);
queue.push_back((seed, 1.0, 0));
}
while let Some((node, activation, hops)) = queue.pop_front() {
if hops >= max_hops {
continue;
}
for edge_ref in self.inner.edges(node) {
let neighbor = edge_ref.target();
let edge = edge_ref.weight();
let edge_mult = match edge.kind {
EdgeKind::Supersedes => 0.3,
EdgeKind::DependsOn => 0.8,
EdgeKind::RelatedTo => 0.6,
EdgeKind::Triggers => 0.9,
EdgeKind::TemporalNext => 0.5,
EdgeKind::Calls => 0.85,
EdgeKind::DefinedIn => 0.9,
EdgeKind::Imports => 0.4,
EdgeKind::CitesPremise => 0.85,
};
let new_activation = activation * decay * edge.weight * edge_mult;
if new_activation < min_activation {
continue;
}
let existing = activations.get(&neighbor).copied().unwrap_or(0.0);
if new_activation > existing {
activations.insert(neighbor, new_activation);
hops_map.insert(neighbor, hops + 1);
queue.push_back((neighbor, new_activation, hops + 1));
}
}
for edge_ref in self.inner.edges_directed(node, Direction::Incoming) {
let neighbor = edge_ref.source();
let edge = edge_ref.weight();
let edge_mult = match edge.kind {
EdgeKind::RelatedTo => 0.6,
EdgeKind::DependsOn => 0.5,
EdgeKind::Triggers => 0.85, EdgeKind::Calls => 0.7, EdgeKind::DefinedIn => 0.8, EdgeKind::Imports => 0.3,
_ => continue,
};
let new_activation = activation * decay * edge.weight * edge_mult;
if new_activation < min_activation {
continue;
}
let existing = activations.get(&neighbor).copied().unwrap_or(0.0);
if new_activation > existing {
activations.insert(neighbor, new_activation);
hops_map.insert(neighbor, hops + 1);
queue.push_back((neighbor, new_activation, hops + 1));
}
}
}
let mut hits: Vec<RetrievalHit> = activations
.into_iter()
.filter_map(|(nix, act)| {
let node = self.inner.node_weight(nix)?;
if !node.is_valid() {
return None;
} Some(RetrievalHit {
node_ix: nix,
activation: act,
hops: *hops_map.get(&nix).unwrap_or(&0),
})
})
.collect();
hits.sort_by(|a, b| {
b.activation
.partial_cmp(&a.activation)
.unwrap_or(std::cmp::Ordering::Equal)
});
if apply_partition_filter {
self.filter_same_partition(&mut hits, seeds);
}
hits.truncate(max_results);
hits
}
pub fn find_seeds_weighted(&self, query: &str, max_seeds: usize) -> (Vec<NodeIndex>, Vec<f32>) {
let q_tokens = tokenize(query);
if q_tokens.is_empty() {
return (Vec::new(), Vec::new());
}
let corpus_idf = self.compute_corpus_idf();
let mut scored: Vec<(f32, NodeIndex)> = Vec::new();
for nix in self.inner.node_indices() {
let node = match self.inner.node_weight(nix) {
Some(n) => n,
None => continue,
};
if !node.is_valid() {
continue;
}
let text = format!("{} {}", node.key, node.value).to_lowercase();
let n_tokens = tokenize_for_content(&text, node.content_type);
let n_stems: HashSet<String> = n_tokens.iter().map(|t| stem(t)).collect();
let mut weighted_hits = 0.0f32;
let mut weighted_total = 0.0f32;
for qt in &q_tokens {
let term_idf = corpus_idf.get(qt).copied().unwrap_or(1.0);
weighted_total += term_idf * 3.0;
if n_tokens.contains(qt) {
weighted_hits += term_idf * 3.0;
continue;
}
let qt_stem = stem(qt);
if n_stems.contains(&qt_stem) {
weighted_hits += term_idf * 2.0;
continue;
}
let qt_syns = synonym_expand(qt);
if !qt_syns.is_empty()
&& qt_syns
.iter()
.any(|s| n_tokens.contains(s) || n_stems.contains(&stem(s)))
{
weighted_hits += term_idf * 1.5;
continue;
}
if text.contains(qt.as_str()) {
weighted_hits += term_idf;
}
}
if weighted_total > 0.0 {
let match_score = weighted_hits / weighted_total;
if match_score > 0.0 {
scored.push((match_score, nix));
}
}
}
scored.sort_by(|a, b| b.0.partial_cmp(&a.0).unwrap_or(std::cmp::Ordering::Equal));
scored.truncate(max_seeds);
let seeds: Vec<NodeIndex> = scored.iter().map(|(_, nix)| *nix).collect();
let weights: Vec<f32> = scored.iter().map(|(w, _)| *w).collect();
(seeds, weights)
}
fn compute_corpus_idf(&self) -> HashMap<String, f32> {
let total = self.inner.node_count().max(1) as f32;
let mut doc_freq: HashMap<String, usize> = HashMap::new();
for nix in self.inner.node_indices() {
let node = match self.inner.node_weight(nix) {
Some(n) if n.is_valid() => n,
_ => continue,
};
let text = format!("{} {}", node.key, node.value).to_lowercase();
let tokens = tokenize_for_content(&text, node.content_type);
let mut seen = HashSet::new();
for token in tokens {
if seen.insert(token.clone()) {
*doc_freq.entry(token).or_insert(0) += 1;
}
}
}
doc_freq
.into_iter()
.map(|(term, df)| (term, (total / (1.0 + df as f32)).ln().max(0.1)))
.collect()
}
pub fn find_seeds(&self, query: &str, max_seeds: usize) -> Vec<NodeIndex> {
self.find_seeds_weighted(query, max_seeds).0
}
pub fn total_tokens(&self) -> usize {
self.inner
.node_indices()
.filter_map(|nix| self.inner.node_weight(nix))
.filter(|n| n.is_valid())
.map(|n| n.token_estimate())
.sum()
}
pub fn valid_fact_count(&self) -> usize {
self.valid_facts().len()
}
pub fn node_count(&self) -> usize {
self.inner.node_count()
}
pub fn edge_count(&self) -> usize {
self.inner.edge_count()
}
pub fn gc_superseded(&mut self, max_depth: usize) -> usize {
let stale: Vec<NodeIndex> = self
.inner
.node_indices()
.filter(|&nix| {
self.inner
.node_weight(nix)
.map(|n| {
matches!(
n.kind,
MemKind::FactSuperseded
| MemKind::SkillDeprecated
| MemKind::ConclusionInvalidated
)
})
.unwrap_or(false)
})
.collect();
let mut to_remove = Vec::new();
for nix in &stale {
let mut depth = 0;
let mut current = *nix;
let mut reachable = false;
loop {
let parent: Option<NodeIndex> = self
.inner
.neighbors_directed(current, Direction::Incoming)
.find(|&neighbor| {
self.inner
.edges_connecting(neighbor, current)
.any(|e| e.weight().kind == EdgeKind::Supersedes)
});
match parent {
Some(p) => {
depth += 1;
if let Some(node) = self.inner.node_weight(p) {
if node.is_valid() {
reachable = true;
break;
}
}
if depth > max_depth {
break;
}
current = p;
}
None => break, }
}
if !reachable || depth > max_depth {
to_remove.push(*nix);
}
}
let count = to_remove.len();
for nix in to_remove {
let (key, layer, fact_id) = match self.inner.node_weight(nix) {
Some(node) => (node.key.clone(), node.layer, node.fact_id.clone()),
None => continue,
};
self.remove_from_indexes(nix, &key, layer, fact_id.as_deref());
self.inner.remove_node(nix);
}
count
}
pub fn prune_expired(&mut self, now: DateTime<Utc>) {
let expired: Vec<(NodeIndex, String, Layer, Option<String>)> = self
.inner
.node_indices()
.filter_map(|nix| {
let node = self.inner.node_weight(nix)?;
if node.expires_at.map(|e| e <= now).unwrap_or(false) {
Some((nix, node.key.clone(), node.layer, node.fact_id.clone()))
} else {
None
}
})
.collect();
for (nix, key, layer, fact_id) in expired {
self.remove_from_indexes(nix, &key, layer, fact_id.as_deref());
self.inner.remove_node(nix);
}
}
pub fn remove_conversation_nodes(&mut self, to_remove: &[NodeIndex]) {
for &nix in to_remove {
let (key, layer, fact_id) = match self.inner.node_weight(nix) {
Some(node) => (node.key.clone(), node.layer, node.fact_id.clone()),
None => continue,
};
self.remove_from_indexes(nix, &key, layer, fact_id.as_deref());
self.inner.remove_node(nix);
}
let mut survivors: Vec<(NodeIndex, chrono::DateTime<Utc>)> = self
.inner
.node_indices()
.filter_map(|nix| {
let n = self.inner.node_weight(nix)?;
if matches!(n.kind, MemKind::Conversation | MemKind::ConversationSummary) {
Some((nix, n.created_at))
} else {
None
}
})
.collect();
survivors.sort_by_key(|&(_, ts)| ts);
let temporal_edges: Vec<petgraph::graph::EdgeIndex> = self
.inner
.edge_indices()
.filter(|&eix| {
self.inner
.edge_weight(eix)
.map(|e| e.kind == EdgeKind::TemporalNext)
.unwrap_or(false)
})
.collect();
for eix in temporal_edges {
self.inner.remove_edge(eix);
}
for window in survivors.windows(2) {
let (from_nix, _) = window[0];
let (to_nix, _) = window[1];
self.inner.add_edge(
from_nix,
to_nix,
MemEdge {
kind: EdgeKind::TemporalNext,
weight: 1.0,
created_at: Utc::now(),
},
);
}
self.last_conversation = survivors.last().map(|&(nix, _)| nix);
}
fn remove_from_indexes(
&mut self,
nix: NodeIndex,
key: &str,
layer: Layer,
fact_id: Option<&str>,
) {
if let Some(fid) = fact_id {
self.by_fact_id.remove(fid);
}
if let Some(entries) = self.by_key.get_mut(key) {
entries.retain(|&n| n != nix);
if entries.is_empty() {
self.by_key.remove(key);
}
}
let layer_idx = (layer as usize).saturating_sub(1).min(3);
self.by_layer[layer_idx].retain(|&n| n != nix);
}
pub fn clear(&mut self) {
self.inner.clear();
self.by_fact_id.clear();
self.by_key.clear();
for v in &mut self.by_layer {
v.clear();
}
self.last_conversation = None;
}
}
impl Default for MemoryGraph {
fn default() -> Self {
Self::new()
}
}
fn tokenize(text: &str) -> HashSet<String> {
text.split(|c: char| {
c.is_whitespace() || c == '/' || c == ':' || c == '-' || c == '_' || c == '.' || c == ','
})
.map(|s| s.trim().to_lowercase())
.filter(|s| !s.is_empty())
.collect()
}
const CODE_NOISE_COMMON: &[&str] = &[
"return", "if", "else", "for", "while", "true", "false", "new", "this", "static", "void",
"null", "break", "continue", "try", "catch", "throw",
];
fn noise_for_language(lang: CodeLanguage) -> &'static [&'static str] {
match lang {
CodeLanguage::Rust => &[
"fn", "let", "const", "pub", "mut", "use", "mod", "crate", "super", "struct", "enum",
"impl", "trait", "async", "await", "where", "type", "match", "ref", "move", "dyn",
"unsafe",
],
CodeLanguage::Python => &[
"def", "class", "import", "from", "pass", "none", "self", "elif", "except", "finally",
"yield", "lambda", "nonlocal", "global", "assert", "with", "as", "in", "is", "not",
"and", "or",
],
CodeLanguage::JavaScript | CodeLanguage::TypeScript => &[
"function",
"const",
"let",
"var",
"export",
"default",
"async",
"await",
"typeof",
"instanceof",
"undefined",
"require",
],
CodeLanguage::Go => &[
"func",
"package",
"import",
"defer",
"go",
"chan",
"select",
"range",
"switch",
"case",
"fallthrough",
],
CodeLanguage::Java => &[
"public",
"private",
"protected",
"class",
"interface",
"extends",
"implements",
"final",
"abstract",
"synchronized",
"volatile",
"import",
"package",
"throws",
],
CodeLanguage::Cpp => &[
"include",
"namespace",
"using",
"template",
"typename",
"virtual",
"override",
"const",
"auto",
"inline",
"extern",
"typedef",
"class",
"struct",
"enum",
],
CodeLanguage::Shell => &[
"echo", "fi", "do", "done", "then", "esac", "elif", "local", "export", "readonly",
"set",
],
CodeLanguage::Sql => &[
"select", "from", "where", "and", "or", "not", "insert", "into", "update", "set",
"delete", "create", "alter", "drop", "table", "join", "on", "as", "order", "by",
"group", "having", "limit",
],
CodeLanguage::Unknown => &[
"fn", "let", "const", "var", "pub", "mut", "return", "def", "class", "import", "from",
"function", "export", "default",
],
}
}
fn split_camel_case(s: &str) -> Vec<String> {
let mut parts = Vec::new();
let mut current = String::new();
let chars: Vec<char> = s.chars().collect();
for i in 0..chars.len() {
let c = chars[i];
if !c.is_alphanumeric() {
if !current.is_empty() {
parts.push(std::mem::take(&mut current).to_lowercase());
}
continue;
}
if c.is_uppercase() {
let prev_lower = i > 0 && chars[i - 1].is_lowercase();
let acronym_end = i > 0
&& chars[i - 1].is_uppercase()
&& i + 1 < chars.len()
&& chars[i + 1].is_lowercase();
if prev_lower || acronym_end {
if !current.is_empty() {
parts.push(std::mem::take(&mut current).to_lowercase());
}
}
}
current.push(c);
}
if !current.is_empty() {
parts.push(current.to_lowercase());
}
parts
}
fn tokenize_code(text: &str, lang: CodeLanguage) -> HashSet<String> {
let mut noise: HashSet<&str> = CODE_NOISE_COMMON.iter().copied().collect();
noise.extend(noise_for_language(lang));
let raw_tokens: Vec<String> = text
.split(|c: char| {
c.is_whitespace()
|| c == '/'
|| c == ':'
|| c == '-'
|| c == '_'
|| c == '.'
|| c == ','
|| c == '('
|| c == ')'
|| c == '{'
|| c == '}'
|| c == '['
|| c == ']'
|| c == ';'
|| c == '"'
|| c == '\''
|| c == '<'
|| c == '>'
|| c == '='
|| c == '&'
|| c == '|'
|| c == '!'
|| c == '#'
|| c == '*'
|| c == '+'
})
.map(|s| s.trim().to_string())
.filter(|s| !s.is_empty())
.collect();
let mut tokens = HashSet::new();
for raw in &raw_tokens {
let parts = split_camel_case(raw);
for part in parts {
if part.len() >= 2 && !noise.contains(part.as_str()) {
tokens.insert(part);
}
}
}
tokens
}
pub fn tokenize_for_content(text: &str, content_type: ContentType) -> HashSet<String> {
match content_type {
ContentType::Code(lang) => tokenize_code(text, lang),
_ => tokenize(text),
}
}
pub fn stem(word: &str) -> String {
let w = word.to_lowercase();
if w.len() < 4 {
return w;
}
let suffixes = &[
"ation", "tion", "ment", "ness", "able", "ible", "ence", "ance", "ing", "ful", "ous",
"ive", "ize", "ise", "ify", "ate", "ed", "er", "ly", "al", "es",
];
for suffix in suffixes {
if let Some(stem) = w.strip_suffix(suffix) {
if stem.len() >= 3 {
return stem.to_string();
}
}
}
if w.ends_with('s') && !w.ends_with("ss") && w.len() > 3 {
return w[..w.len() - 1].to_string();
}
w
}
const SYNONYM_GROUPS: &[&[&str]] = &[
&[
"auth",
"authenticate",
"authorization",
"credential",
"login",
"jwt",
"token",
"verify",
],
&[
"db", "database", "sql", "query", "postgres", "sqlite", "mysql",
],
&["err", "error", "exception", "panic", "fail", "failure"],
&["config", "configuration", "setting", "preference", "option"],
&["msg", "message", "notification", "alert", "event"],
&["req", "request", "http", "api", "endpoint", "route"],
&["resp", "response", "reply", "result", "output"],
&["mem", "memory", "cache", "buffer", "storage"],
&["exec", "execute", "run", "invoke", "call", "dispatch"],
&["parse", "deserialize", "decode", "unmarshal", "extract"],
&["serial", "serialize", "encode", "marshal", "format"],
&["nav", "navigate", "redirect", "goto"],
];
pub fn synonym_expand(token: &str) -> HashSet<String> {
let mut result = HashSet::new();
let lower = token.to_lowercase();
let stemmed = stem(&lower);
for group in SYNONYM_GROUPS {
let matches = group.iter().any(|&t| t == lower || stem(t) == stemmed);
if matches {
for &t in *group {
result.insert(t.to_string());
}
}
}
result
}
#[cfg(test)]
mod tests {
use super::*;
fn make_fact(key: &str, value: &str, fact_id: &str) -> MemNode {
MemNode {
kind: MemKind::Fact,
layer: 2,
key: key.to_string(),
value: value.to_string(),
fact_id: Some(fact_id.to_string()),
scope: "global".to_string(),
authority: "peer".to_string(),
is_constraint: false,
created_at: Utc::now(),
expires_at: None,
content_type: ContentType::default(),
metadata: FactMetadata::default(),
}
}
#[test]
fn insert_and_lookup() {
let mut g = MemoryGraph::new();
let nix = g.insert(make_fact("language", "Rust", "f1"));
let (found_nix, node) = g.get_by_fact_id("f1").unwrap();
assert_eq!(found_nix, nix);
assert_eq!(node.value, "Rust");
}
#[test]
fn supersession() {
let mut g = MemoryGraph::new();
g.insert(make_fact("db", "PostgreSQL", "f1"));
let new_nix = g.insert(make_fact("db", "SQLite", "f2"));
let invalidated = g.supersede(new_nix, "f1");
assert!(!invalidated.is_empty());
let (_, old) = g.get_by_fact_id("f1").unwrap();
assert_eq!(old.kind, MemKind::FactSuperseded);
assert_eq!(g.valid_fact_count(), 1);
}
#[test]
fn dependency_cascade() {
let mut g = MemoryGraph::new();
let base = g.insert(make_fact("base", "100", "f1"));
let derived = g.insert(make_fact("derived", "200", "f2"));
g.link(derived, base, EdgeKind::DependsOn, 1.0);
let new_base = g.insert(make_fact("base", "150", "f3"));
let invalidated = g.supersede(new_base, "f1");
assert!(invalidated.contains(&derived));
}
#[test]
fn seed_finding() {
let mut g = MemoryGraph::new();
g.insert(make_fact("navigate /ops/dashboard", "Page loads", "f1"));
g.insert(make_fact("click button", "Button clicked", "f2"));
let seeds = g.find_seeds("dashboard", 5);
assert!(!seeds.is_empty()); }
#[test]
fn spreading_activation() {
let mut g = MemoryGraph::new();
let a = g.insert(make_fact("project", "CAR runtime", "f1"));
let b = g.insert(make_fact("language", "Rust", "f2"));
let c = g.insert(make_fact("testing", "uses proptest", "f3"));
g.link(a, b, EdgeKind::RelatedTo, 0.8);
g.link(b, c, EdgeKind::RelatedTo, 0.7);
let hits = g.retrieve(&[a], 3, 10, 0.7, 0.05);
assert!(hits.len() >= 2);
assert!(hits[0].activation > hits[1].activation); }
#[test]
fn conversation_temporal_links() {
let mut g = MemoryGraph::new();
let c1 = g.insert(MemNode {
kind: MemKind::Conversation,
layer: 3,
key: "user".to_string(),
value: "How's the project?".to_string(),
fact_id: None,
scope: "global".to_string(),
authority: "peer".to_string(),
is_constraint: false,
created_at: Utc::now(),
expires_at: None,
content_type: ContentType::NaturalLanguage,
metadata: FactMetadata::default(),
});
let c2 = g.insert(MemNode {
kind: MemKind::Conversation,
layer: 3,
key: "assistant".to_string(),
value: "Going well".to_string(),
fact_id: None,
scope: "global".to_string(),
authority: "peer".to_string(),
is_constraint: false,
created_at: Utc::now(),
expires_at: None,
content_type: ContentType::NaturalLanguage,
metadata: FactMetadata::default(),
});
assert_eq!(g.edge_count(), 1);
let edge = g.inner.edges(c1).next().unwrap();
assert_eq!(edge.weight().kind, EdgeKind::TemporalNext);
assert_eq!(edge.target(), c2);
}
#[test]
fn constraints_tracked() {
let mut g = MemoryGraph::new();
let mut node = make_fact("budget", "Max $500K", "c1");
node.is_constraint = true;
g.insert(node);
g.insert(make_fact("language", "Rust", "f1"));
assert_eq!(g.constraints().len(), 1);
assert_eq!(g.valid_fact_count(), 2);
}
#[test]
fn gc_superseded_removes_deep_chains() {
let mut g = MemoryGraph::new();
let _n1 = g.insert(make_fact("db", "v1", "f1"));
let n2 = g.insert(make_fact("db", "v2", "f2"));
g.supersede(n2, "f1"); let n3 = g.insert(make_fact("db", "v3", "f3"));
g.supersede(n3, "f2"); let n4 = g.insert(make_fact("db", "v4", "f4"));
g.supersede(n4, "f3");
assert_eq!(g.node_count(), 4);
let removed = g.gc_superseded(1);
assert_eq!(removed, 2);
assert_eq!(g.node_count(), 2);
assert!(g.get_by_fact_id("f4").is_some());
assert!(g.get_by_fact_id("f1").is_none());
assert!(g.get_by_fact_id("f2").is_none());
}
#[test]
fn gc_superseded_retains_shallow_chain() {
let mut g = MemoryGraph::new();
let _n1 = g.insert(make_fact("db", "old", "f1"));
let n2 = g.insert(make_fact("db", "new", "f2"));
g.supersede(n2, "f1");
let removed = g.gc_superseded(1);
assert_eq!(removed, 0);
assert_eq!(g.node_count(), 2);
}
#[test]
fn gc_superseded_removes_orphaned() {
let mut g = MemoryGraph::new();
let mut orphan = make_fact("db", "orphan", "f1");
orphan.kind = MemKind::FactSuperseded;
g.insert(orphan);
let removed = g.gc_superseded(1);
assert_eq!(removed, 1);
assert_eq!(g.node_count(), 0);
}
#[test]
fn gc_superseded_noop_on_clean_graph() {
let mut g = MemoryGraph::new();
g.insert(make_fact("a", "1", "f1"));
g.insert(make_fact("b", "2", "f2"));
let removed = g.gc_superseded(1);
assert_eq!(removed, 0);
assert_eq!(g.node_count(), 2);
}
#[test]
fn prune_expired_removes_ttl_nodes() {
let mut g = MemoryGraph::new();
let now = Utc::now();
let mut expired = make_fact("temp", "gone", "e1");
expired.kind = MemKind::Environment;
expired.layer = 4;
expired.expires_at = Some(now - chrono::Duration::hours(1));
g.insert(expired);
let mut fresh = make_fact("temp", "here", "e2");
fresh.kind = MemKind::Environment;
fresh.layer = 4;
fresh.expires_at = Some(now + chrono::Duration::hours(1));
g.insert(fresh);
g.prune_expired(now);
assert_eq!(g.node_count(), 1);
}
#[test]
fn prune_expired_cleans_by_key_and_by_layer() {
let mut g = MemoryGraph::new();
let now = Utc::now();
let mut expired = make_fact("env_temp", "gone", "e1");
expired.kind = MemKind::Environment;
expired.layer = 4;
expired.expires_at = Some(now - chrono::Duration::hours(1));
g.insert(expired);
assert!(g.by_key.contains_key("env_temp"));
let layer_idx = (4usize).saturating_sub(1).min(3); assert_eq!(g.by_layer[layer_idx].len(), 1);
g.prune_expired(now);
assert!(!g.by_key.contains_key("env_temp"));
assert_eq!(g.by_layer[layer_idx].len(), 0);
assert!(g.get_by_fact_id("e1").is_none());
}
#[test]
fn gc_superseded_cleans_by_key_and_by_layer() {
let mut g = MemoryGraph::new();
let _n1 = g.insert(make_fact("db", "v1", "f1"));
let n2 = g.insert(make_fact("db", "v2", "f2"));
g.supersede(n2, "f1");
let n3 = g.insert(make_fact("db", "v3", "f3"));
g.supersede(n3, "f2");
assert_eq!(g.by_key.get("db").map(|v| v.len()).unwrap_or(0), 3);
let layer_idx = (2usize).saturating_sub(1).min(3); let initial_layer_count = g.by_layer[layer_idx].len();
assert_eq!(initial_layer_count, 3);
let removed = g.gc_superseded(0);
assert_eq!(removed, 2);
assert_eq!(g.by_key.get("db").map(|v| v.len()).unwrap_or(0), 1);
assert_eq!(g.by_layer[layer_idx].len(), 1);
}
#[test]
fn remove_conversation_nodes_cleans_by_key_and_by_layer() {
let mut g = MemoryGraph::new();
let c1 = g.insert(MemNode {
kind: MemKind::Conversation,
layer: 3,
key: "user".to_string(),
value: "Hello".to_string(),
fact_id: None,
scope: "global".to_string(),
authority: "peer".to_string(),
is_constraint: false,
created_at: Utc::now(),
expires_at: None,
content_type: ContentType::NaturalLanguage,
metadata: FactMetadata::default(),
});
let c2 = g.insert(MemNode {
kind: MemKind::Conversation,
layer: 3,
key: "assistant".to_string(),
value: "Hi there".to_string(),
fact_id: None,
scope: "global".to_string(),
authority: "peer".to_string(),
is_constraint: false,
created_at: Utc::now(),
expires_at: None,
content_type: ContentType::NaturalLanguage,
metadata: FactMetadata::default(),
});
assert!(g.by_key.contains_key("user"));
assert!(g.by_key.contains_key("assistant"));
let layer_idx = (3usize).saturating_sub(1).min(3); assert_eq!(g.by_layer[layer_idx].len(), 2);
g.remove_conversation_nodes(&[c1, c2]);
assert!(!g.by_key.contains_key("user"));
assert!(!g.by_key.contains_key("assistant"));
assert_eq!(g.by_layer[layer_idx].len(), 0);
assert_eq!(g.node_count(), 0);
}
}