1use smallvec::SmallVec;
4use std::collections::HashMap;
5use std::sync::atomic::{AtomicU32, Ordering};
6
7use crate::error::{M1ndError, M1ndResult};
8use crate::types::*;
9
10pub struct StringInterner {
18 strings: Vec<String>,
19 index: HashMap<String, InternedStr>,
20}
21
22impl StringInterner {
23 pub fn new() -> Self {
24 Self {
25 strings: Vec::new(),
26 index: HashMap::new(),
27 }
28 }
29
30 pub fn with_capacity(cap: usize) -> Self {
31 Self {
32 strings: Vec::with_capacity(cap),
33 index: HashMap::with_capacity(cap),
34 }
35 }
36
37 pub fn get_or_intern(&mut self, s: &str) -> InternedStr {
39 if let Some(&idx) = self.index.get(s) {
40 return idx;
41 }
42 let idx = InternedStr(self.strings.len() as u32);
43 self.strings.push(s.to_owned());
44 self.index.insert(s.to_owned(), idx);
45 idx
46 }
47
48 pub fn resolve(&self, idx: InternedStr) -> &str {
50 &self.strings[idx.0 as usize]
51 }
52
53 pub fn try_resolve(&self, idx: InternedStr) -> Option<&str> {
55 self.strings.get(idx.0 as usize).map(|s| s.as_str())
56 }
57
58 pub fn lookup(&self, s: &str) -> Option<InternedStr> {
60 self.index.get(s).copied()
61 }
62
63 pub fn len(&self) -> usize {
64 self.strings.len()
65 }
66
67 pub fn is_empty(&self) -> bool {
68 self.strings.is_empty()
69 }
70}
71
72#[derive(Clone)]
79pub struct PendingEdge {
80 pub source: NodeId,
81 pub target: NodeId,
82 pub weight: FiniteF32,
83 pub inhibitory: bool,
84 pub relation: InternedStr,
85 pub direction: EdgeDirection,
86 pub causal_strength: FiniteF32,
87}
88
89pub struct CsrGraph {
93 pub offsets: Vec<u64>,
96 pub targets: Vec<NodeId>,
98 pub weights: Vec<AtomicU32>, pub inhibitory: Vec<bool>,
102 pub relations: Vec<InternedStr>,
104 pub directions: Vec<EdgeDirection>,
106 pub causal_strengths: Vec<FiniteF32>,
108
109 pub rev_offsets: Vec<u64>,
112 pub rev_sources: Vec<NodeId>,
114 pub rev_edge_idx: Vec<EdgeIdx>,
116
117 pub pending_edges: Vec<PendingEdge>,
119}
120
121impl CsrGraph {
122 pub fn empty() -> Self {
124 Self {
125 offsets: Vec::new(),
126 targets: Vec::new(),
127 weights: Vec::new(),
128 inhibitory: Vec::new(),
129 relations: Vec::new(),
130 directions: Vec::new(),
131 causal_strengths: Vec::new(),
132 rev_offsets: Vec::new(),
133 rev_sources: Vec::new(),
134 rev_edge_idx: Vec::new(),
135 pending_edges: Vec::new(),
136 }
137 }
138
139 pub fn num_edges(&self) -> usize {
141 if self.offsets.is_empty() {
142 0
143 } else {
144 *self.offsets.last().unwrap() as usize
145 }
146 }
147
148 #[inline]
150 pub fn out_range(&self, node: NodeId) -> std::ops::Range<usize> {
151 let lo = self.offsets[node.as_usize()] as usize;
152 let hi = self.offsets[node.as_usize() + 1] as usize;
153 lo..hi
154 }
155
156 #[inline]
158 pub fn in_range(&self, node: NodeId) -> std::ops::Range<usize> {
159 let lo = self.rev_offsets[node.as_usize()] as usize;
160 let hi = self.rev_offsets[node.as_usize() + 1] as usize;
161 lo..hi
162 }
163
164 #[inline]
166 pub fn read_weight(&self, edge: EdgeIdx) -> FiniteF32 {
167 let bits = self.weights[edge.as_usize()].load(Ordering::Relaxed);
168 FiniteF32::new(f32::from_bits(bits))
169 }
170
171 pub fn atomic_max_weight(
174 &self,
175 edge: EdgeIdx,
176 new_val: FiniteF32,
177 max_retries: u32,
178 ) -> M1ndResult<()> {
179 let slot = &self.weights[edge.as_usize()];
180 let new_bits = new_val.get().to_bits();
181 for _ in 0..max_retries {
182 let old_bits = slot.load(Ordering::Relaxed);
183 let old_val = f32::from_bits(old_bits);
184 if old_val >= new_val.get() {
185 return Ok(());
186 }
187 if slot
188 .compare_exchange_weak(old_bits, new_bits, Ordering::Release, Ordering::Relaxed)
189 .is_ok()
190 {
191 return Ok(());
192 }
193 }
194 Err(M1ndError::CasRetryExhausted {
195 edge,
196 limit: max_retries,
197 })
198 }
199
200 pub fn atomic_write_weight(
202 &self,
203 edge: EdgeIdx,
204 new_val: FiniteF32,
205 max_retries: u32,
206 ) -> M1ndResult<()> {
207 let slot = &self.weights[edge.as_usize()];
208 let new_bits = new_val.get().to_bits();
209 for _ in 0..max_retries {
210 let old_bits = slot.load(Ordering::Relaxed);
211 match slot.compare_exchange_weak(
212 old_bits,
213 new_bits,
214 Ordering::Release,
215 Ordering::Relaxed,
216 ) {
217 Ok(_) => return Ok(()),
218 Err(_) => continue,
219 }
220 }
221 Err(M1ndError::CasRetryExhausted {
222 edge,
223 limit: max_retries,
224 })
225 }
226}
227
228#[derive(Clone, Copy, Debug, Default)]
233pub struct PlasticityNode {
234 pub incoming_weight_sum: FiniteF32,
236 pub ceiling: FiniteF32,
239}
240
241#[derive(Clone, Copy, Debug, Default)]
246pub struct NodeProvenance {
247 pub source_path: Option<InternedStr>,
248 pub line_start: u32,
249 pub line_end: u32,
250 pub excerpt: Option<InternedStr>,
251 pub namespace: Option<InternedStr>,
252 pub canonical: bool,
253}
254
255#[derive(Clone, Debug, Default)]
256pub struct NodeProvenanceInput<'a> {
257 pub source_path: Option<&'a str>,
258 pub line_start: Option<u32>,
259 pub line_end: Option<u32>,
260 pub excerpt: Option<&'a str>,
261 pub namespace: Option<&'a str>,
262 pub canonical: bool,
263}
264
265#[derive(Clone, Debug, Default, PartialEq, Eq)]
266pub struct ResolvedNodeProvenance {
267 pub source_path: Option<String>,
268 pub line_start: Option<u32>,
269 pub line_end: Option<u32>,
270 pub excerpt: Option<String>,
271 pub namespace: Option<String>,
272 pub canonical: bool,
273}
274
275impl ResolvedNodeProvenance {
276 pub fn is_empty(&self) -> bool {
277 self.source_path.is_none()
278 && self.line_start.is_none()
279 && self.line_end.is_none()
280 && self.excerpt.is_none()
281 && self.namespace.is_none()
282 && !self.canonical
283 }
284}
285
286pub struct NodeStorage {
293 pub count: u32,
294
295 pub activation: Vec<[FiniteF32; 4]>,
299 pub pagerank: Vec<FiniteF32>,
301
302 pub plasticity: Vec<PlasticityNode>,
304
305 pub label: Vec<InternedStr>,
308 pub node_type: Vec<NodeType>,
310 pub tags: Vec<SmallVec<[InternedStr; 6]>>,
312 pub last_modified: Vec<f64>,
314 pub change_frequency: Vec<FiniteF32>,
316 pub provenance: Vec<NodeProvenance>,
318}
319
320impl NodeStorage {
321 pub fn new() -> Self {
322 Self {
323 count: 0,
324 activation: Vec::new(),
325 pagerank: Vec::new(),
326 plasticity: Vec::new(),
327 label: Vec::new(),
328 node_type: Vec::new(),
329 tags: Vec::new(),
330 last_modified: Vec::new(),
331 change_frequency: Vec::new(),
332 provenance: Vec::new(),
333 }
334 }
335
336 pub fn with_capacity(cap: usize) -> Self {
337 Self {
338 count: 0,
339 activation: Vec::with_capacity(cap),
340 pagerank: Vec::with_capacity(cap),
341 plasticity: Vec::with_capacity(cap),
342 label: Vec::with_capacity(cap),
343 node_type: Vec::with_capacity(cap),
344 tags: Vec::with_capacity(cap),
345 last_modified: Vec::with_capacity(cap),
346 change_frequency: Vec::with_capacity(cap),
347 provenance: Vec::with_capacity(cap),
348 }
349 }
350}
351
352pub struct EdgePlasticity {
359 pub original_weight: Vec<FiniteF32>,
361 pub current_weight: Vec<FiniteF32>,
363 pub strengthen_count: Vec<u16>,
365 pub weaken_count: Vec<u16>,
367 pub ltp_applied: Vec<bool>,
369 pub ltd_applied: Vec<bool>,
371 pub last_used_query: Vec<u32>,
373}
374
375impl EdgePlasticity {
376 pub fn new() -> Self {
377 Self {
378 original_weight: Vec::new(),
379 current_weight: Vec::new(),
380 strengthen_count: Vec::new(),
381 weaken_count: Vec::new(),
382 ltp_applied: Vec::new(),
383 ltd_applied: Vec::new(),
384 last_used_query: Vec::new(),
385 }
386 }
387
388 pub fn with_capacity(cap: usize) -> Self {
389 Self {
390 original_weight: Vec::with_capacity(cap),
391 current_weight: Vec::with_capacity(cap),
392 strengthen_count: Vec::with_capacity(cap),
393 weaken_count: Vec::with_capacity(cap),
394 ltp_applied: Vec::with_capacity(cap),
395 ltd_applied: Vec::with_capacity(cap),
396 last_used_query: Vec::with_capacity(cap),
397 }
398 }
399}
400
401pub struct Graph {
409 pub nodes: NodeStorage,
410 pub csr: CsrGraph,
411 pub edge_plasticity: EdgePlasticity,
412 pub strings: StringInterner,
413 pub id_to_node: HashMap<InternedStr, NodeId>,
415 pub generation: Generation,
417 pub pagerank_computed: bool,
418 pub finalized: bool,
419}
420
421impl Graph {
422 pub fn new() -> Self {
423 Self {
424 nodes: NodeStorage::new(),
425 csr: CsrGraph::empty(),
426 edge_plasticity: EdgePlasticity::new(),
427 strings: StringInterner::new(),
428 id_to_node: HashMap::new(),
429 generation: Generation(0),
430 pagerank_computed: false,
431 finalized: false,
432 }
433 }
434
435 pub fn with_capacity(node_cap: usize, edge_cap: usize) -> Self {
436 Self {
437 nodes: NodeStorage::with_capacity(node_cap),
438 csr: CsrGraph::empty(),
439 edge_plasticity: EdgePlasticity::with_capacity(edge_cap),
440 strings: StringInterner::with_capacity(node_cap),
441 id_to_node: HashMap::with_capacity(node_cap),
442 generation: Generation(0),
443 pagerank_computed: false,
444 finalized: false,
445 }
446 }
447
448 pub fn add_node(
451 &mut self,
452 external_id: &str,
453 label: &str,
454 node_type: NodeType,
455 tags: &[&str],
456 last_modified: f64,
457 change_frequency: f32,
458 ) -> M1ndResult<NodeId> {
459 let ext_interned = self.strings.get_or_intern(external_id);
461 if let Some(&existing) = self.id_to_node.get(&ext_interned) {
462 return Err(M1ndError::DuplicateNode(existing));
463 }
464
465 let id = NodeId::new(self.nodes.count);
466 self.nodes.count += 1;
467
468 let label_interned = self.strings.get_or_intern(label);
469 let tag_interned: SmallVec<[InternedStr; 6]> =
470 tags.iter().map(|t| self.strings.get_or_intern(t)).collect();
471
472 self.nodes.label.push(label_interned);
473 self.nodes.node_type.push(node_type);
474 self.nodes.tags.push(tag_interned);
475 self.nodes.last_modified.push(last_modified);
476 self.nodes
477 .change_frequency
478 .push(FiniteF32::new(change_frequency));
479 self.nodes.activation.push([FiniteF32::ZERO; 4]);
480 self.nodes.pagerank.push(FiniteF32::ZERO);
481 self.nodes.plasticity.push(PlasticityNode::default());
482 self.nodes.provenance.push(NodeProvenance::default());
483
484 self.id_to_node.insert(ext_interned, id);
485 self.generation = self.generation.next();
486 self.finalized = false;
487 Ok(id)
488 }
489
490 pub fn add_edge(
493 &mut self,
494 source: NodeId,
495 target: NodeId,
496 relation: &str,
497 weight: FiniteF32,
498 direction: EdgeDirection,
499 inhibitory: bool,
500 causal_strength: FiniteF32,
501 ) -> M1ndResult<EdgeIdx> {
502 if source.as_usize() >= self.nodes.count as usize {
504 return Err(M1ndError::DanglingEdge {
505 edge: EdgeIdx::new(self.edge_plasticity.original_weight.len() as u32),
506 node: source,
507 });
508 }
509 if target.as_usize() >= self.nodes.count as usize {
510 return Err(M1ndError::DanglingEdge {
511 edge: EdgeIdx::new(self.edge_plasticity.original_weight.len() as u32),
512 node: target,
513 });
514 }
515
516 let edge_idx = EdgeIdx::new(self.edge_plasticity.original_weight.len() as u32);
517 let rel_interned = self.strings.get_or_intern(relation);
518
519 self.edge_plasticity.original_weight.push(weight);
521 self.edge_plasticity.current_weight.push(weight);
522 self.edge_plasticity.strengthen_count.push(0);
523 self.edge_plasticity.weaken_count.push(0);
524 self.edge_plasticity.ltp_applied.push(false);
525 self.edge_plasticity.ltd_applied.push(false);
526 self.edge_plasticity.last_used_query.push(0);
527
528 self.csr.pending_edges.push(PendingEdge {
530 source,
531 target,
532 weight,
533 inhibitory,
534 relation: rel_interned,
535 direction,
536 causal_strength,
537 });
538
539 self.generation = self.generation.next();
540 self.finalized = false;
541 Ok(edge_idx)
542 }
543
544 pub fn finalize(&mut self) -> M1ndResult<()> {
548 if self.finalized {
549 return Ok(());
550 }
551 let n = self.nodes.count as usize;
552
553 let edges = std::mem::take(&mut self.csr.pending_edges);
556 let mut indexed_edges: Vec<(usize, PendingEdge)> = edges.into_iter().enumerate().collect();
558 indexed_edges.sort_by_key(|(_, e)| e.source.0);
559
560 let total_edges = indexed_edges.len();
561
562 let mut offsets = vec![0u64; n + 1];
563 let mut targets = Vec::with_capacity(total_edges);
564 let mut weights = Vec::with_capacity(total_edges);
565 let mut inhibitory = Vec::with_capacity(total_edges);
566 let mut relations = Vec::with_capacity(total_edges);
567 let mut directions = Vec::with_capacity(total_edges);
568 let mut causal_strengths = Vec::with_capacity(total_edges);
569
570 for (_, e) in &indexed_edges {
572 offsets[e.source.as_usize() + 1] += 1;
573 if e.direction == EdgeDirection::Bidirectional {
575 offsets[e.target.as_usize() + 1] += 1;
576 }
577 }
578 for i in 1..=n {
580 offsets[i] += offsets[i - 1];
581 }
582
583 let total_csr_edges = *offsets.last().unwrap_or(&0) as usize;
584 targets.resize(total_csr_edges, NodeId::default());
585 weights.extend((0..total_csr_edges).map(|_| AtomicU32::new(0)));
586 inhibitory.resize(total_csr_edges, false);
587 relations.resize(total_csr_edges, InternedStr::default());
588 directions.resize(total_csr_edges, EdgeDirection::Forward);
589 causal_strengths.resize(total_csr_edges, FiniteF32::ZERO);
590
591 let mut plasticity_mapping: Vec<(usize, usize)> = Vec::with_capacity(total_csr_edges);
594
595 let mut cursors = vec![0u64; n];
596 for i in 0..n {
597 cursors[i] = offsets[i];
598 }
599
600 for &(orig_idx, ref e) in &indexed_edges {
601 let src = e.source.as_usize();
602 let pos = cursors[src] as usize;
603 targets[pos] = e.target;
604 weights[pos] = AtomicU32::new(e.weight.get().to_bits());
605 inhibitory[pos] = e.inhibitory;
606 relations[pos] = e.relation;
607 directions[pos] = e.direction;
608 causal_strengths[pos] = e.causal_strength;
609 cursors[src] += 1;
610 plasticity_mapping.push((orig_idx, pos));
611
612 if e.direction == EdgeDirection::Bidirectional {
613 let tgt = e.target.as_usize();
614 let pos2 = cursors[tgt] as usize;
615 targets[pos2] = e.source;
616 weights[pos2] = AtomicU32::new(e.weight.get().to_bits());
617 inhibitory[pos2] = e.inhibitory;
618 relations[pos2] = e.relation;
619 directions[pos2] = e.direction;
620 causal_strengths[pos2] = e.causal_strength;
621 cursors[tgt] += 1;
622 plasticity_mapping.push((orig_idx, pos2));
624 }
625 }
626
627 let old_plasticity = &self.edge_plasticity;
629 let mut new_plasticity = EdgePlasticity::with_capacity(total_csr_edges);
630 new_plasticity
631 .original_weight
632 .resize(total_csr_edges, FiniteF32::ZERO);
633 new_plasticity
634 .current_weight
635 .resize(total_csr_edges, FiniteF32::ZERO);
636 new_plasticity.strengthen_count.resize(total_csr_edges, 0);
637 new_plasticity.weaken_count.resize(total_csr_edges, 0);
638 new_plasticity.ltp_applied.resize(total_csr_edges, false);
639 new_plasticity.ltd_applied.resize(total_csr_edges, false);
640 new_plasticity.last_used_query.resize(total_csr_edges, 0);
641
642 for &(orig_idx, csr_pos) in &plasticity_mapping {
643 new_plasticity.original_weight[csr_pos] = old_plasticity.original_weight[orig_idx];
644 new_plasticity.current_weight[csr_pos] = old_plasticity.current_weight[orig_idx];
645 new_plasticity.strengthen_count[csr_pos] = old_plasticity.strengthen_count[orig_idx];
646 new_plasticity.weaken_count[csr_pos] = old_plasticity.weaken_count[orig_idx];
647 new_plasticity.ltp_applied[csr_pos] = old_plasticity.ltp_applied[orig_idx];
648 new_plasticity.ltd_applied[csr_pos] = old_plasticity.ltd_applied[orig_idx];
649 new_plasticity.last_used_query[csr_pos] = old_plasticity.last_used_query[orig_idx];
650 }
651
652 self.edge_plasticity = new_plasticity;
653
654 let mut rev_offsets = vec![0u64; n + 1];
657 for i in 0..n {
658 let lo = offsets[i] as usize;
659 let hi = offsets[i + 1] as usize;
660 for j in lo..hi {
661 let tgt = targets[j].as_usize();
662 rev_offsets[tgt + 1] += 1;
663 }
664 }
665 for i in 1..=n {
666 rev_offsets[i] += rev_offsets[i - 1];
667 }
668
669 let total_rev = *rev_offsets.last().unwrap_or(&0) as usize;
670 let mut rev_sources = vec![NodeId::default(); total_rev];
671 let mut rev_edge_idx = vec![EdgeIdx::default(); total_rev];
672
673 let mut rev_cursors = vec![0u64; n];
674 for i in 0..n {
675 rev_cursors[i] = rev_offsets[i];
676 }
677 for src in 0..n {
678 let lo = offsets[src] as usize;
679 let hi = offsets[src + 1] as usize;
680 for j in lo..hi {
681 let tgt = targets[j].as_usize();
682 let pos = rev_cursors[tgt] as usize;
683 rev_sources[pos] = NodeId::new(src as u32);
684 rev_edge_idx[pos] = EdgeIdx::new(j as u32);
685 rev_cursors[tgt] += 1;
686 }
687 }
688
689 self.csr = CsrGraph {
690 offsets,
691 targets,
692 weights,
693 inhibitory,
694 relations,
695 directions,
696 causal_strengths,
697 rev_offsets,
698 rev_sources,
699 rev_edge_idx,
700 pending_edges: Vec::new(),
701 };
702
703 self.compute_pagerank(0.85, 50, 1e-6);
705
706 self.finalized = true;
707 Ok(())
708 }
709
710 pub fn num_nodes(&self) -> u32 {
712 self.nodes.count
713 }
714
715 pub fn num_edges(&self) -> usize {
717 self.csr.num_edges()
718 }
719
720 pub fn resolve_id(&self, external_id: &str) -> Option<NodeId> {
722 let interned = self.strings.lookup(external_id)?;
723 self.id_to_node.get(&interned).copied()
724 }
725
726 pub fn set_node_provenance(&mut self, node: NodeId, provenance: NodeProvenanceInput<'_>) {
727 let idx = node.as_usize();
728 if idx >= self.nodes.count as usize {
729 return;
730 }
731
732 self.nodes.provenance[idx] = NodeProvenance {
733 source_path: provenance
734 .source_path
735 .filter(|value| !value.is_empty())
736 .map(|value| self.strings.get_or_intern(value)),
737 line_start: provenance.line_start.unwrap_or(0),
738 line_end: provenance.line_end.or(provenance.line_start).unwrap_or(0),
739 excerpt: provenance
740 .excerpt
741 .filter(|value| !value.is_empty())
742 .map(|value| self.strings.get_or_intern(value)),
743 namespace: provenance
744 .namespace
745 .filter(|value| !value.is_empty())
746 .map(|value| self.strings.get_or_intern(value)),
747 canonical: provenance.canonical,
748 };
749 }
750
751 pub fn merge_node_provenance(&mut self, node: NodeId, incoming: NodeProvenanceInput<'_>) {
752 let idx = node.as_usize();
753 if idx >= self.nodes.count as usize {
754 return;
755 }
756
757 let current = self.resolve_node_provenance(node);
758 let line_start = current.line_start.or(incoming.line_start);
759 let line_end = match (current.line_end, incoming.line_end.or(incoming.line_start)) {
760 (Some(existing), Some(extra)) => Some(existing.max(extra)),
761 (Some(existing), None) => Some(existing),
762 (None, Some(extra)) => Some(extra),
763 (None, None) => line_start,
764 };
765
766 self.set_node_provenance(
767 node,
768 NodeProvenanceInput {
769 source_path: current.source_path.as_deref().or(incoming.source_path),
770 line_start,
771 line_end,
772 excerpt: current.excerpt.as_deref().or(incoming.excerpt),
773 namespace: current.namespace.as_deref().or(incoming.namespace),
774 canonical: current.canonical || incoming.canonical,
775 },
776 );
777 }
778
779 pub fn resolve_node_provenance(&self, node: NodeId) -> ResolvedNodeProvenance {
780 let idx = node.as_usize();
781 if idx >= self.nodes.count as usize {
782 return ResolvedNodeProvenance::default();
783 }
784
785 let provenance = self.nodes.provenance[idx];
786 ResolvedNodeProvenance {
787 source_path: provenance
788 .source_path
789 .and_then(|value| self.strings.try_resolve(value).map(str::to_owned)),
790 line_start: (provenance.line_start > 0).then_some(provenance.line_start),
791 line_end: (provenance.line_end > 0).then_some(provenance.line_end),
792 excerpt: provenance
793 .excerpt
794 .and_then(|value| self.strings.try_resolve(value).map(str::to_owned)),
795 namespace: provenance
796 .namespace
797 .and_then(|value| self.strings.try_resolve(value).map(str::to_owned)),
798 canonical: provenance.canonical,
799 }
800 }
801
802 pub fn avg_degree(&self) -> f32 {
804 if self.nodes.count == 0 {
805 0.0
806 } else {
807 self.csr.num_edges() as f32 / self.nodes.count as f32
808 }
809 }
810
811 fn compute_pagerank(&mut self, damping: f32, max_iterations: u32, convergence: f32) {
815 let n = self.nodes.count as usize;
816 if n == 0 {
817 self.pagerank_computed = true;
818 return;
819 }
820
821 let nf = n as f32;
822 let base = (1.0 - damping) / nf;
823 let mut pr = vec![1.0f32 / nf; n];
824 let mut new_pr = vec![0.0f32; n];
825
826 let mut out_degree = vec![0u32; n];
828 for i in 0..n {
829 let lo = self.csr.offsets[i] as usize;
830 let hi = self.csr.offsets[i + 1] as usize;
831 out_degree[i] = (hi - lo) as u32;
832 }
833
834 for _iter in 0..max_iterations {
835 new_pr.fill(base);
836
837 for i in 0..n {
839 let lo = self.csr.rev_offsets[i] as usize;
840 let hi = self.csr.rev_offsets[i + 1] as usize;
841 let mut rank_sum = 0.0f32;
842 for j in lo..hi {
843 let src = self.csr.rev_sources[j].as_usize();
844 let deg = out_degree[src];
845 if deg > 0 {
846 rank_sum += pr[src] / deg as f32;
847 }
848 }
849 new_pr[i] += damping * rank_sum;
850 }
851
852 let mut delta = 0.0f32;
854 for i in 0..n {
855 delta += (new_pr[i] - pr[i]).abs();
856 }
857 std::mem::swap(&mut pr, &mut new_pr);
858 if delta < convergence {
859 break;
860 }
861 }
862
863 let max_pr = pr.iter().cloned().fold(0.0f32, f32::max);
865 if max_pr > 0.0 {
866 for i in 0..n {
867 self.nodes.pagerank[i] = FiniteF32::new(pr[i] / max_pr);
868 }
869 }
870 self.pagerank_computed = true;
871 }
872}
873
874pub type SharedGraph = std::sync::Arc<parking_lot::RwLock<Graph>>;