1use std::cmp::Ordering;
4use std::collections::{BinaryHeap, HashMap};
5use std::time::Instant;
6
7use crate::error::M1ndResult;
8use crate::graph::Graph;
9use crate::types::*;
10
11pub struct BloomFilter {
19 bits: Vec<u64>,
20 num_bits: usize,
21 num_hashes: u32,
22}
23
24impl BloomFilter {
25 pub fn with_capacity(expected_items: usize, fpr: f64) -> Self {
27 let expected = expected_items.max(1);
28 let m = (-(expected as f64) * fpr.ln() / (2.0f64.ln().powi(2))) as usize;
30 let num_bits = m.max(64);
31 let num_words = (num_bits + 63) / 64;
32 let k = ((num_bits as f64 / expected as f64) * 2.0f64.ln()).ceil() as u32;
34 let num_hashes = k.max(1).min(16);
35 Self {
36 bits: vec![0u64; num_words],
37 num_bits,
38 num_hashes,
39 }
40 }
41
42 #[inline]
43 fn compute_hashes(&self, item: u32, out: &mut [usize; 16]) -> u32 {
44 let h1 = item.wrapping_mul(2654435761) as usize;
45 let h2 = item.wrapping_mul(2246822519).wrapping_add(1) as usize;
46 let m = self.num_bits;
47 let k = self.num_hashes.min(16);
48 for i in 0..k as usize {
49 out[i] = h1.wrapping_add(i.wrapping_mul(h2)) % m;
50 }
51 k
52 }
53
54 pub fn insert(&mut self, item: NodeId) {
55 let mut hashes = [0usize; 16];
56 let k = self.compute_hashes(item.0, &mut hashes);
57 for i in 0..k as usize {
58 let h = hashes[i];
59 self.bits[h >> 6] |= 1u64 << (h & 63);
60 }
61 }
62
63 pub fn probably_contains(&self, item: NodeId) -> bool {
64 let mut hashes = [0usize; 16];
65 let k = self.compute_hashes(item.0, &mut hashes);
66 for i in 0..k as usize {
67 let h = hashes[i];
68 if self.bits[h >> 6] & (1u64 << (h & 63)) == 0 {
69 return false;
70 }
71 }
72 true
73 }
74
75 pub fn clear(&mut self) {
76 self.bits.fill(0);
77 }
78}
79
80#[derive(Clone, Debug)]
87pub struct ActivatedNode {
88 pub node: NodeId,
89 pub activation: FiniteF32,
91 pub dimensions: [FiniteF32; 4],
93 pub active_dimension_count: u8,
95}
96
97#[derive(Clone, Debug)]
99pub struct ActivationResult {
100 pub activated: Vec<ActivatedNode>,
102 pub seeds: Vec<(NodeId, FiniteF32)>,
104 pub elapsed_ns: u64,
106 pub xlr_fallback_used: bool,
108}
109
110#[derive(Clone, Debug)]
113pub struct DimensionResult {
114 pub scores: Vec<(NodeId, FiniteF32)>,
116 pub dimension: Dimension,
118 pub elapsed_ns: u64,
120}
121
122pub trait ActivationEngine: Send + Sync {
130 fn propagate(
133 &self,
134 graph: &Graph,
135 seeds: &[(NodeId, FiniteF32)],
136 config: &PropagationConfig,
137 ) -> M1ndResult<DimensionResult>;
138}
139
140pub struct WavefrontEngine;
149
150impl WavefrontEngine {
151 pub fn new() -> Self {
152 Self
153 }
154}
155
156impl ActivationEngine for WavefrontEngine {
157 fn propagate(
158 &self,
159 graph: &Graph,
160 seeds: &[(NodeId, FiniteF32)],
161 config: &PropagationConfig,
162 ) -> M1ndResult<DimensionResult> {
163 let start = Instant::now();
164 let n = graph.num_nodes() as usize;
165 if n == 0 || seeds.is_empty() {
166 return Ok(DimensionResult {
167 scores: Vec::new(),
168 dimension: Dimension::Structural,
169 elapsed_ns: start.elapsed().as_nanos() as u64,
170 });
171 }
172
173 let threshold = config.threshold.get();
174 let decay = config.decay.get();
175 let max_depth = config.max_depth.min(20) as usize; let mut activation = vec![0.0f32; n];
179 let mut visited = vec![false; n];
180
181 let mut frontier: Vec<NodeId> = Vec::new();
183 for &(node, score) in seeds {
184 let idx = node.as_usize();
185 if idx < n {
186 let s = score.get().min(config.saturation_cap.get());
187 if s > activation[idx] {
188 activation[idx] = s;
189 }
190 if !visited[idx] {
191 frontier.push(node);
192 visited[idx] = true;
193 }
194 }
195 }
196
197 for _depth in 0..max_depth {
199 if frontier.is_empty() {
200 break;
201 }
202 let mut next_frontier: Vec<NodeId> = Vec::new();
203
204 for &src in &frontier {
205 let src_act = activation[src.as_usize()];
206 if src_act < threshold {
207 continue;
208 }
209
210 let range = graph.csr.out_range(src);
211 for j in range {
212 let tgt = graph.csr.targets[j];
213 let w = graph.csr.read_weight(EdgeIdx::new(j as u32)).get();
214 let is_inhib = graph.csr.inhibitory[j];
215
216 let mut signal = src_act * w * decay;
217 if is_inhib {
218 signal = -signal * config.inhibitory_factor.get();
220 }
221
222 let tgt_idx = tgt.as_usize();
223 if tgt_idx >= n {
224 continue;
225 }
226
227 if !is_inhib && signal > threshold {
228 if signal > activation[tgt_idx] {
230 activation[tgt_idx] = signal;
231 }
232 if !visited[tgt_idx] {
233 visited[tgt_idx] = true;
234 next_frontier.push(tgt);
235 }
236 } else if is_inhib {
237 activation[tgt_idx] = (activation[tgt_idx] + signal).max(0.0);
239 }
240 }
241 }
242
243 frontier = next_frontier;
244 }
245
246 let mut scores: Vec<(NodeId, FiniteF32)> = activation
248 .iter()
249 .enumerate()
250 .filter(|(_, &v)| v > 0.0)
251 .map(|(i, &v)| (NodeId::new(i as u32), FiniteF32::new(v)))
252 .collect();
253 scores.sort_by(|a, b| b.1.cmp(&a.1));
254
255 Ok(DimensionResult {
256 scores,
257 dimension: Dimension::Structural,
258 elapsed_ns: start.elapsed().as_nanos() as u64,
259 })
260 }
261}
262
263#[derive(Clone, Copy)]
270struct HeapEntry {
271 node: NodeId,
272 activation: f32,
273}
274
275impl PartialEq for HeapEntry {
276 fn eq(&self, other: &Self) -> bool {
277 self.activation == other.activation
278 }
279}
280impl Eq for HeapEntry {}
281impl PartialOrd for HeapEntry {
282 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
283 Some(self.cmp(other))
284 }
285}
286impl Ord for HeapEntry {
287 fn cmp(&self, other: &Self) -> Ordering {
288 self.activation.total_cmp(&other.activation)
289 }
290}
291
292pub struct HeapEngine;
296
297impl HeapEngine {
298 pub fn new() -> Self {
299 Self
300 }
301}
302
303impl ActivationEngine for HeapEngine {
304 fn propagate(
305 &self,
306 graph: &Graph,
307 seeds: &[(NodeId, FiniteF32)],
308 config: &PropagationConfig,
309 ) -> M1ndResult<DimensionResult> {
310 let start = Instant::now();
311 let n = graph.num_nodes() as usize;
312 if n == 0 || seeds.is_empty() {
313 return Ok(DimensionResult {
314 scores: Vec::new(),
315 dimension: Dimension::Structural,
316 elapsed_ns: start.elapsed().as_nanos() as u64,
317 });
318 }
319
320 let threshold = config.threshold.get();
321 let decay = config.decay.get();
322
323 let mut activation = vec![0.0f32; n];
324 let mut bloom = BloomFilter::with_capacity(n, 0.01);
325 let mut heap = BinaryHeap::new();
326
327 for &(node, score) in seeds {
329 let idx = node.as_usize();
330 if idx < n {
331 let s = score.get().min(config.saturation_cap.get());
332 activation[idx] = s;
333 heap.push(HeapEntry {
334 node,
335 activation: s,
336 });
337 bloom.insert(node);
338 }
339 }
340
341 let mut depth_counter = 0u32;
342 let max_ops = (n as u32)
343 .saturating_mul(config.max_depth as u32)
344 .max(10000);
345
346 while let Some(entry) = heap.pop() {
347 if entry.activation < threshold {
348 break; }
350 depth_counter += 1;
351 if depth_counter > max_ops {
352 break;
353 }
354
355 let src = entry.node;
356 let src_act = activation[src.as_usize()];
357
358 let range = graph.csr.out_range(src);
359 for j in range {
360 let tgt = graph.csr.targets[j];
361 let w = graph.csr.read_weight(EdgeIdx::new(j as u32)).get();
362 let is_inhib = graph.csr.inhibitory[j];
363
364 let mut signal = src_act * w * decay;
365 if is_inhib {
366 signal = -signal * config.inhibitory_factor.get();
367 }
368
369 let tgt_idx = tgt.as_usize();
370 if tgt_idx >= n {
371 continue;
372 }
373
374 if !is_inhib && signal > threshold && signal > activation[tgt_idx] {
375 activation[tgt_idx] = signal;
376 if !bloom.probably_contains(tgt) {
377 bloom.insert(tgt);
378 heap.push(HeapEntry {
379 node: tgt,
380 activation: signal,
381 });
382 }
383 } else if is_inhib {
384 activation[tgt_idx] = (activation[tgt_idx] + signal).max(0.0);
385 }
386 }
387 }
388
389 let mut scores: Vec<(NodeId, FiniteF32)> = activation
391 .iter()
392 .enumerate()
393 .filter(|(_, &v)| v > 0.0)
394 .map(|(i, &v)| (NodeId::new(i as u32), FiniteF32::new(v)))
395 .collect();
396 scores.sort_by(|a, b| b.1.cmp(&a.1));
397
398 Ok(DimensionResult {
399 scores,
400 dimension: Dimension::Structural,
401 elapsed_ns: start.elapsed().as_nanos() as u64,
402 })
403 }
404}
405
406pub struct HybridEngine {
414 wavefront: WavefrontEngine,
415 heap: HeapEngine,
416}
417
418impl HybridEngine {
419 pub fn new() -> Self {
420 Self {
421 wavefront: WavefrontEngine::new(),
422 heap: HeapEngine::new(),
423 }
424 }
425
426 fn prefer_heap(graph: &Graph, seed_count: usize) -> bool {
429 let seed_ratio = seed_count as f64 / graph.num_nodes().max(1) as f64;
430 seed_ratio < 0.001 && graph.avg_degree() < 8.0
431 }
432}
433
434impl ActivationEngine for HybridEngine {
435 fn propagate(
436 &self,
437 graph: &Graph,
438 seeds: &[(NodeId, FiniteF32)],
439 config: &PropagationConfig,
440 ) -> M1ndResult<DimensionResult> {
441 if Self::prefer_heap(graph, seeds.len()) {
442 self.heap.propagate(graph, seeds, config)
443 } else {
444 self.wavefront.propagate(graph, seeds, config)
445 }
446 }
447}
448
449pub fn activate_semantic(
457 _graph: &Graph,
458 semantic: &crate::semantic::SemanticEngine,
459 query: &str,
460 top_k: usize,
461) -> M1ndResult<DimensionResult> {
462 let start = Instant::now();
463 let scores = semantic.query_fast(_graph, query, top_k)?;
464 Ok(DimensionResult {
465 scores,
466 dimension: Dimension::Semantic,
467 elapsed_ns: start.elapsed().as_nanos() as u64,
468 })
469}
470
471pub fn activate_temporal(
479 graph: &Graph,
480 seeds: &[(NodeId, FiniteF32)],
481 weights: &TemporalWeights,
482) -> M1ndResult<DimensionResult> {
483 let start = Instant::now();
484 let n = graph.num_nodes() as usize;
485 let now = std::time::SystemTime::now()
486 .duration_since(std::time::UNIX_EPOCH)
487 .map(|d| d.as_secs_f64())
488 .unwrap_or(0.0);
489
490 let half_life_secs = 168.0 * 3600.0; let k = std::f64::consts::LN_2 / half_life_secs;
492
493 let mut scores = Vec::new();
494 for &(node, seed_strength) in seeds {
495 let idx = node.as_usize();
496 if idx >= n {
497 continue;
498 }
499 let last_mod = graph.nodes.last_modified[idx];
500 let age_secs = (now - last_mod).max(0.0);
501 let recency = (-k * age_secs).exp() as f32;
502 let frequency = graph.nodes.change_frequency[idx].get();
503
504 let score = recency * weights.recency.get() + frequency * weights.frequency.get();
505 let combined = score * seed_strength.get();
506 if combined > 0.0 {
507 scores.push((node, FiniteF32::new(combined)));
508 }
509 }
510 scores.sort_by(|a, b| b.1.cmp(&a.1));
511
512 Ok(DimensionResult {
513 scores,
514 dimension: Dimension::Temporal,
515 elapsed_ns: start.elapsed().as_nanos() as u64,
516 })
517}
518
519pub fn activate_causal(
527 graph: &Graph,
528 seeds: &[(NodeId, FiniteF32)],
529 config: &PropagationConfig,
530) -> M1ndResult<DimensionResult> {
531 let start = Instant::now();
532 let n = graph.num_nodes() as usize;
533 if n == 0 || seeds.is_empty() {
534 return Ok(DimensionResult {
535 scores: Vec::new(),
536 dimension: Dimension::Causal,
537 elapsed_ns: start.elapsed().as_nanos() as u64,
538 });
539 }
540
541 let threshold = config.threshold.get();
542 let decay = config.decay.get();
543 let max_depth = config.max_depth.min(20) as usize;
544
545 let mut activation = vec![0.0f32; n];
547 let mut frontier: Vec<NodeId> = Vec::new();
548 let mut visited = vec![false; n];
549
550 for &(node, score) in seeds {
551 let idx = node.as_usize();
552 if idx < n {
553 activation[idx] = score.get();
554 if !visited[idx] {
555 frontier.push(node);
556 visited[idx] = true;
557 }
558 }
559 }
560
561 for _depth in 0..max_depth {
562 if frontier.is_empty() {
563 break;
564 }
565 let mut next_frontier = Vec::new();
566 for &src in &frontier {
567 let src_act = activation[src.as_usize()];
568 if src_act < threshold {
569 continue;
570 }
571 let range = graph.csr.out_range(src);
572 for j in range {
573 let causal = graph.csr.causal_strengths[j].get();
574 if causal <= 0.0 {
575 continue; }
577 let tgt = graph.csr.targets[j];
578 let w = graph.csr.read_weight(EdgeIdx::new(j as u32)).get();
579 let signal = src_act * w * causal * decay;
580 let tgt_idx = tgt.as_usize();
581 if tgt_idx < n && signal > threshold && signal > activation[tgt_idx] {
582 activation[tgt_idx] = signal;
583 if !visited[tgt_idx] {
584 visited[tgt_idx] = true;
585 next_frontier.push(tgt);
586 }
587 }
588 }
589 }
590 frontier = next_frontier;
591 }
592
593 let mut back_frontier: Vec<NodeId> = Vec::new();
595 let mut back_visited = vec![false; n];
596 for &(node, _) in seeds {
597 let idx = node.as_usize();
598 if idx < n && !back_visited[idx] {
599 back_frontier.push(node);
600 back_visited[idx] = true;
601 }
602 }
603
604 for _depth in 0..max_depth {
605 if back_frontier.is_empty() {
606 break;
607 }
608 let mut next = Vec::new();
609 for &src in &back_frontier {
610 let src_act = activation[src.as_usize()]
611 .max(seeds.iter().find(|s| s.0 == src).map_or(0.0, |s| s.1.get()));
612 if src_act < threshold {
613 continue;
614 }
615 let range = graph.csr.in_range(src);
616 for j in range {
617 let fwd_idx = graph.csr.rev_edge_idx[j];
618 let causal = graph.csr.causal_strengths[fwd_idx.as_usize()].get();
619 if causal <= 0.0 {
620 continue;
621 }
622 let rev_src = graph.csr.rev_sources[j];
623 let w = graph.csr.read_weight(fwd_idx).get();
624 let signal = src_act * w * causal * decay * 0.7; let idx = rev_src.as_usize();
626 if idx < n && signal > threshold && signal > activation[idx] {
627 activation[idx] = signal;
628 if !back_visited[idx] {
629 back_visited[idx] = true;
630 next.push(rev_src);
631 }
632 }
633 }
634 }
635 back_frontier = next;
636 }
637
638 let mut scores: Vec<(NodeId, FiniteF32)> = activation
639 .iter()
640 .enumerate()
641 .filter(|(_, &v)| v > 0.0)
642 .map(|(i, &v)| (NodeId::new(i as u32), FiniteF32::new(v)))
643 .collect();
644 scores.sort_by(|a, b| b.1.cmp(&a.1));
645
646 Ok(DimensionResult {
647 scores,
648 dimension: Dimension::Causal,
649 elapsed_ns: start.elapsed().as_nanos() as u64,
650 })
651}
652
653const PAGERANK_BOOST: f32 = 0.1;
661const DIM_CONTRIBUTION_THRESHOLD: f32 = 0.01;
663
664pub fn merge_dimensions(
667 results: &[DimensionResult; 4],
668 top_k: usize,
669) -> M1ndResult<ActivationResult> {
670 let start = Instant::now();
671
672 let mut weights = DIMENSION_WEIGHTS;
674 let mut total_active_weight = 0.0f32;
675 let mut active_mask = [false; 4];
676 for (i, r) in results.iter().enumerate() {
677 if !r.scores.is_empty() {
678 active_mask[i] = true;
679 total_active_weight += weights[i];
680 }
681 }
682
683 if total_active_weight > 0.0 && total_active_weight < 1.0 {
685 for i in 0..4 {
686 if active_mask[i] {
687 weights[i] /= total_active_weight;
688 } else {
689 weights[i] = 0.0;
690 }
691 }
692 }
693
694 let mut node_scores: HashMap<u32, [f32; 4]> = HashMap::new();
696 for (dim_idx, result) in results.iter().enumerate() {
697 for &(node, score) in &result.scores {
698 let entry = node_scores.entry(node.0).or_insert([0.0; 4]);
699 entry[dim_idx] = score.get();
700 }
701 }
702
703 let mut activated: Vec<ActivatedNode> = node_scores
705 .iter()
706 .map(|(&node_id, dims)| {
707 let mut combined = 0.0f32;
709 let mut dim_count = 0u8;
710 for i in 0..4 {
711 combined += dims[i] * weights[i];
712 if dims[i] > DIM_CONTRIBUTION_THRESHOLD {
713 dim_count += 1;
714 }
715 }
716
717 if dim_count >= 4 {
719 combined *= RESONANCE_BONUS_4DIM;
720 } else if dim_count >= 3 {
721 combined *= RESONANCE_BONUS_3DIM;
722 }
723
724 ActivatedNode {
725 node: NodeId::new(node_id),
726 activation: FiniteF32::new(combined),
727 dimensions: [
728 FiniteF32::new(dims[0]),
729 FiniteF32::new(dims[1]),
730 FiniteF32::new(dims[2]),
731 FiniteF32::new(dims[3]),
732 ],
733 active_dimension_count: dim_count,
734 }
735 })
736 .collect();
737
738 activated.sort_by(|a, b| b.activation.cmp(&a.activation));
740 activated.truncate(top_k);
741
742 Ok(ActivationResult {
743 activated,
744 seeds: Vec::new(), elapsed_ns: start.elapsed().as_nanos() as u64,
746 xlr_fallback_used: false,
747 })
748}