1use std::collections::HashMap;
27use rayon::prelude::*;
28use serde::{Deserialize, Serialize};
29use scribe_core::{Result, error::ScribeError};
30
31use crate::graph::{DependencyGraph, NodeId, InternalNodeId, GraphStatistics};
32
33#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
35pub struct PageRankResults {
36 pub scores: HashMap<NodeId, f64>,
38
39 pub iterations_converged: usize,
41
42 pub convergence_epsilon: f64,
44
45 pub graph_stats: GraphStatistics,
47
48 pub parameters: PageRankConfig,
50
51 pub performance_metrics: PerformanceMetrics,
53}
54
55#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
57pub struct PageRankConfig {
58 pub damping_factor: f64,
60
61 pub max_iterations: usize,
63
64 pub epsilon: f64,
66
67 pub use_parallel: bool,
69
70 pub min_score_threshold: f64,
72}
73
74impl Default for PageRankConfig {
75 fn default() -> Self {
76 Self {
77 damping_factor: 0.85, max_iterations: 50, epsilon: 1e-6, use_parallel: true, min_score_threshold: 1e-8, }
83 }
84}
85
86impl PageRankConfig {
87 pub fn for_code_analysis() -> Self {
89 Self {
90 damping_factor: 0.85, max_iterations: 30, epsilon: 1e-5, use_parallel: true,
94 min_score_threshold: 1e-6, }
96 }
97
98 pub fn for_large_codebases() -> Self {
100 Self {
101 damping_factor: 0.85,
102 max_iterations: 20, epsilon: 1e-4, use_parallel: true,
105 min_score_threshold: 1e-5, }
107 }
108
109 pub fn for_research() -> Self {
111 Self {
112 damping_factor: 0.85,
113 max_iterations: 100, epsilon: 1e-8, use_parallel: true,
116 min_score_threshold: 0.0, }
118 }
119
120 pub fn validate(&self) -> Result<()> {
122 if self.damping_factor < 0.0 || self.damping_factor >= 1.0 {
123 return Err(ScribeError::invalid_operation(
124 "Damping factor must be in range [0, 1)".to_string(),
125 "pagerank_config_validation".to_string()
126 ));
127 }
128
129 if self.max_iterations == 0 {
130 return Err(ScribeError::invalid_operation(
131 "Max iterations must be greater than 0".to_string(),
132 "pagerank_config_validation".to_string()
133 ));
134 }
135
136 if self.epsilon <= 0.0 {
137 return Err(ScribeError::invalid_operation(
138 "Epsilon must be positive".to_string(),
139 "pagerank_config_validation".to_string()
140 ));
141 }
142
143 Ok(())
144 }
145}
146
147#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
149pub struct PerformanceMetrics {
150 pub total_time_ms: u64,
152
153 pub avg_iteration_time_ms: f64,
155
156 pub peak_memory_mb: f64,
158
159 pub nodes_processed: usize,
161
162 pub convergence_rate: f64,
164
165 pub used_parallel: bool,
167}
168
169impl Default for PerformanceMetrics {
170 fn default() -> Self {
171 Self {
172 total_time_ms: 0,
173 avg_iteration_time_ms: 0.0,
174 peak_memory_mb: 0.0,
175 nodes_processed: 0,
176 convergence_rate: 0.0,
177 used_parallel: false,
178 }
179 }
180}
181
182#[derive(Debug)]
184pub struct PageRankComputer {
185 config: PageRankConfig,
187}
188
189impl PageRankComputer {
190 pub fn new() -> Self {
192 Self {
193 config: PageRankConfig::default(),
194 }
195 }
196
197 pub fn with_config(config: PageRankConfig) -> Result<Self> {
199 config.validate()?;
200 Ok(Self { config })
201 }
202
203 pub fn for_code_analysis() -> Self {
205 Self {
206 config: PageRankConfig::for_code_analysis(),
207 }
208 }
209
210 pub fn for_large_codebases() -> Self {
212 Self {
213 config: PageRankConfig::for_large_codebases(),
214 }
215 }
216
217 pub fn compute(&self, graph: &DependencyGraph) -> Result<PageRankResults> {
219 let start_time = std::time::Instant::now();
220
221 if graph.node_count() == 0 {
222 return Ok(PageRankResults {
223 scores: HashMap::new(),
224 iterations_converged: 0,
225 convergence_epsilon: 0.0,
226 graph_stats: GraphStatistics::empty(),
227 parameters: self.config.clone(),
228 performance_metrics: PerformanceMetrics::default(),
229 });
230 }
231
232 let num_nodes = graph.internal_node_count();
234 let internal_nodes: Vec<(InternalNodeId, &NodeId)> = graph.internal_nodes().collect();
235
236 let initial_score = 1.0 / num_nodes as f64;
238 let mut current_scores = vec![initial_score; num_nodes];
239 let mut previous_scores = current_scores.clone();
240 let mut convergence_history = Vec::new();
241
242 let mut iterations = 0;
244 let mut total_convergence_diff = 0.0;
245
246 for iteration in 0..self.config.max_iterations {
247 iterations = iteration + 1;
248
249 if self.config.use_parallel {
251 self.compute_iteration_parallel_optimized(graph, &internal_nodes, &previous_scores, &mut current_scores)?;
252 } else {
253 self.compute_iteration_sequential_optimized(graph, &internal_nodes, &previous_scores, &mut current_scores)?;
254 }
255
256 total_convergence_diff = self.compute_convergence_diff_vec(¤t_scores, &previous_scores);
258 convergence_history.push(total_convergence_diff);
259
260 if total_convergence_diff < self.config.epsilon {
261 break;
262 }
263
264 std::mem::swap(&mut current_scores, &mut previous_scores);
266 }
267
268 let mut final_scores = HashMap::new();
270 for (internal_id, &score) in current_scores.iter().enumerate() {
271 if score >= self.config.min_score_threshold {
272 if let Some(node_path) = graph.get_path(internal_id) {
273 final_scores.insert(node_path.clone(), score);
274 }
275 }
276 }
277
278 let total_time = start_time.elapsed();
280 let convergence_rate = if convergence_history.len() > 1 {
281 let first = convergence_history[0];
282 let last = convergence_history.last().unwrap();
283 if first > 0.0 { (first - last) / first } else { 0.0 }
284 } else {
285 0.0
286 };
287
288 let performance_metrics = PerformanceMetrics {
289 total_time_ms: total_time.as_millis() as u64,
290 avg_iteration_time_ms: total_time.as_millis() as f64 / iterations as f64,
291 peak_memory_mb: self.estimate_memory_usage(num_nodes),
292 nodes_processed: num_nodes,
293 convergence_rate,
294 used_parallel: self.config.use_parallel,
295 };
296
297 Ok(PageRankResults {
298 scores: final_scores,
299 iterations_converged: iterations,
300 convergence_epsilon: total_convergence_diff,
301 graph_stats: self.compute_graph_stats(graph),
302 parameters: self.config.clone(),
303 performance_metrics,
304 })
305 }
306
307 fn compute_iteration_sequential(
309 &self,
310 graph: &DependencyGraph,
311 nodes: &[NodeId],
312 previous_scores: &HashMap<NodeId, f64>,
313 current_scores: &mut HashMap<NodeId, f64>,
314 ) -> Result<()> {
315 let num_nodes = nodes.len() as f64;
316 let teleport_prob = (1.0 - self.config.damping_factor) / num_nodes;
317
318 for node in nodes {
319 let mut new_score = teleport_prob;
320
321 if let Some(incoming_neighbors) = graph.incoming_neighbors(node) {
323 for linking_node in incoming_neighbors {
324 if let Some(&linking_score) = previous_scores.get(linking_node) {
325 let linking_out_degree = graph.out_degree(linking_node).max(1) as f64;
326 new_score += self.config.damping_factor * (linking_score / linking_out_degree);
327 }
328 }
329 }
330
331 current_scores.insert(node.clone(), new_score);
332 }
333
334 Ok(())
335 }
336
337 fn compute_iteration_sequential_optimized(
339 &self,
340 graph: &DependencyGraph,
341 internal_nodes: &[(InternalNodeId, &NodeId)],
342 previous_scores: &[f64],
343 current_scores: &mut [f64],
344 ) -> Result<()> {
345 let num_nodes = internal_nodes.len() as f64;
346 let teleport_prob = (1.0 - self.config.damping_factor) / num_nodes;
347
348 let mut dangling_sum = 0.0;
350 for &(internal_id, _) in internal_nodes {
351 if graph.out_degree_by_id(internal_id) == 0 {
352 dangling_sum += previous_scores[internal_id];
353 }
354 }
355 let dangling_bonus = self.config.damping_factor * dangling_sum / num_nodes;
356
357 for &(internal_id, _) in internal_nodes {
358 let mut new_score = teleport_prob + dangling_bonus;
359
360 if let Some(incoming_neighbors) = graph.incoming_neighbors_by_id(internal_id) {
362 for &linking_id in incoming_neighbors {
363 let linking_score = previous_scores[linking_id];
364 let linking_out_degree = graph.out_degree_by_id(linking_id) as f64;
365 if linking_out_degree > 0.0 {
366 new_score += self.config.damping_factor * (linking_score / linking_out_degree);
367 }
368 }
369 }
370
371 current_scores[internal_id] = new_score;
372 }
373
374 Ok(())
375 }
376
377 fn compute_iteration_parallel(
379 &self,
380 graph: &DependencyGraph,
381 nodes: &[NodeId],
382 previous_scores: &HashMap<NodeId, f64>,
383 current_scores: &mut HashMap<NodeId, f64>,
384 ) -> Result<()> {
385 let num_nodes = nodes.len() as f64;
386 let teleport_prob = (1.0 - self.config.damping_factor) / num_nodes;
387
388 let new_scores: Vec<(NodeId, f64)> = nodes.par_iter()
390 .map(|node| {
391 let mut new_score = teleport_prob;
392
393 if let Some(incoming_neighbors) = graph.incoming_neighbors(node) {
395 for linking_node in incoming_neighbors {
396 if let Some(&linking_score) = previous_scores.get(linking_node) {
397 let linking_out_degree = graph.out_degree(linking_node).max(1) as f64;
398 new_score += self.config.damping_factor * (linking_score / linking_out_degree);
399 }
400 }
401 }
402
403 (node.clone(), new_score)
404 })
405 .collect();
406
407 for (node, score) in new_scores {
409 current_scores.insert(node, score);
410 }
411
412 Ok(())
413 }
414
415 fn compute_iteration_parallel_optimized(
417 &self,
418 graph: &DependencyGraph,
419 internal_nodes: &[(InternalNodeId, &NodeId)],
420 previous_scores: &[f64],
421 current_scores: &mut [f64],
422 ) -> Result<()> {
423 let num_nodes = internal_nodes.len() as f64;
424 let teleport_prob = (1.0 - self.config.damping_factor) / num_nodes;
425
426 let mut dangling_sum = 0.0;
428 for &(internal_id, _) in internal_nodes {
429 if graph.out_degree_by_id(internal_id) == 0 {
430 dangling_sum += previous_scores[internal_id];
431 }
432 }
433 let dangling_bonus = self.config.damping_factor * dangling_sum / num_nodes;
434
435 let new_scores: Vec<(InternalNodeId, f64)> = internal_nodes.par_iter()
437 .map(|&(internal_id, _)| {
438 let mut new_score = teleport_prob + dangling_bonus;
439
440 if let Some(incoming_neighbors) = graph.incoming_neighbors_by_id(internal_id) {
442 for &linking_id in incoming_neighbors {
443 let linking_score = previous_scores[linking_id];
444 let linking_out_degree = graph.out_degree_by_id(linking_id) as f64;
445 if linking_out_degree > 0.0 {
446 new_score += self.config.damping_factor * (linking_score / linking_out_degree);
447 }
448 }
449 }
450
451 (internal_id, new_score)
452 })
453 .collect();
454
455 for (internal_id, score) in new_scores {
457 current_scores[internal_id] = score;
458 }
459
460 Ok(())
461 }
462
463 fn compute_convergence_diff(
465 &self,
466 current: &HashMap<NodeId, f64>,
467 previous: &HashMap<NodeId, f64>,
468 ) -> f64 {
469 current.iter()
470 .map(|(node, ¤t_score)| {
471 let previous_score = previous.get(node).copied().unwrap_or(0.0);
472 (current_score - previous_score).abs()
473 })
474 .sum()
475 }
476
477 fn compute_convergence_diff_vec(
479 &self,
480 current: &[f64],
481 previous: &[f64],
482 ) -> f64 {
483 current.iter()
484 .zip(previous.iter())
485 .map(|(&curr, &prev)| (curr - prev).abs())
486 .sum()
487 }
488
489 fn estimate_memory_usage(&self, num_nodes: usize) -> f64 {
491 let score_map_size = num_nodes * (std::mem::size_of::<String>() + std::mem::size_of::<f64>());
493 let total_bytes = score_map_size * 2; total_bytes as f64 / (1024.0 * 1024.0) }
496
497 fn compute_graph_stats(&self, graph: &DependencyGraph) -> GraphStatistics {
499 let total_nodes = graph.node_count();
500 let total_edges = graph.edge_count();
501
502 if total_nodes == 0 {
503 return GraphStatistics::empty();
504 }
505
506 let mut in_degrees = Vec::with_capacity(total_nodes);
508 let mut out_degrees = Vec::with_capacity(total_nodes);
509
510 for (_, node_path) in graph.internal_nodes() {
511 in_degrees.push(graph.in_degree(node_path));
512 out_degrees.push(graph.out_degree(node_path));
513 }
514
515 let in_degree_avg = in_degrees.iter().sum::<usize>() as f64 / total_nodes as f64;
516 let in_degree_max = *in_degrees.iter().max().unwrap_or(&0);
517 let out_degree_avg = out_degrees.iter().sum::<usize>() as f64 / total_nodes as f64;
518 let out_degree_max = *out_degrees.iter().max().unwrap_or(&0);
519
520 let isolated_nodes = in_degrees.iter().zip(out_degrees.iter())
522 .filter(|(&in_deg, &out_deg)| in_deg == 0 && out_deg == 0)
523 .count();
524 let dangling_nodes = out_degrees.iter().filter(|&&out_deg| out_deg == 0).count();
525
526 let max_possible_edges = total_nodes * (total_nodes - 1);
528 let graph_density = if max_possible_edges > 0 {
529 total_edges as f64 / max_possible_edges as f64
530 } else {
531 0.0
532 };
533
534 GraphStatistics {
535 total_nodes,
536 total_edges,
537 in_degree_avg,
538 in_degree_max,
539 out_degree_avg,
540 out_degree_max,
541 strongly_connected_components: graph.estimate_scc_count(),
542 graph_density,
543 isolated_nodes,
544 dangling_nodes,
545 }
546 }
547}
548
549impl Default for PageRankComputer {
550 fn default() -> Self {
551 Self::new()
552 }
553}
554
555impl PageRankResults {
557 pub fn top_nodes(&self, k: usize) -> Vec<(NodeId, f64)> {
559 let mut sorted_scores: Vec<_> = self.scores.iter()
560 .map(|(node, &score)| (node.clone(), score))
561 .collect();
562
563 sorted_scores.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
564 sorted_scores.into_iter().take(k).collect()
565 }
566
567 pub fn nodes_above_threshold(&self, threshold: f64) -> Vec<(NodeId, f64)> {
569 self.scores.iter()
570 .filter_map(|(node, &score)| {
571 if score >= threshold {
572 Some((node.clone(), score))
573 } else {
574 None
575 }
576 })
577 .collect()
578 }
579
580 pub fn node_score(&self, node_id: &NodeId) -> Option<f64> {
582 self.scores.get(node_id).copied()
583 }
584
585 pub fn score_statistics(&self) -> ScoreStatistics {
587 if self.scores.is_empty() {
588 return ScoreStatistics::default();
589 }
590
591 let scores: Vec<f64> = self.scores.values().copied().collect();
592 let sum: f64 = scores.iter().sum();
593 let mean = sum / scores.len() as f64;
594
595 let min_score = scores.iter().fold(f64::INFINITY, |a, &b| a.min(b));
596 let max_score = scores.iter().fold(f64::NEG_INFINITY, |a, &b| a.max(b));
597
598 let variance = scores.iter()
600 .map(|&score| (score - mean).powi(2))
601 .sum::<f64>() / scores.len() as f64;
602 let std_dev = variance.sqrt();
603
604 let mut sorted_scores = scores;
606 sorted_scores.sort_by(|a, b| a.partial_cmp(b).unwrap());
607 let median = if sorted_scores.len() % 2 == 0 {
608 let mid = sorted_scores.len() / 2;
609 (sorted_scores[mid - 1] + sorted_scores[mid]) / 2.0
610 } else {
611 sorted_scores[sorted_scores.len() / 2]
612 };
613
614 ScoreStatistics {
615 mean,
616 median,
617 std_dev,
618 min_score,
619 max_score,
620 total_nodes: self.scores.len(),
621 }
622 }
623
624 pub fn converged(&self) -> bool {
626 self.convergence_epsilon < self.parameters.epsilon
627 }
628
629 pub fn summary(&self) -> String {
631 let stats = self.score_statistics();
632 format!(
633 "PageRank Results Summary:\n\
634 - Nodes: {} (converged in {} iterations)\n\
635 - Score range: [{:.6}, {:.6}] (mean: {:.6})\n\
636 - Graph: {} nodes, {} edges (density: {:.4})\n\
637 - Performance: {:.1}ms total, {:.2}ms/iter, {:.1}MB peak memory",
638 self.scores.len(),
639 self.iterations_converged,
640 stats.min_score,
641 stats.max_score,
642 stats.mean,
643 self.graph_stats.total_nodes,
644 self.graph_stats.total_edges,
645 self.graph_stats.graph_density,
646 self.performance_metrics.total_time_ms,
647 self.performance_metrics.avg_iteration_time_ms,
648 self.performance_metrics.peak_memory_mb,
649 )
650 }
651}
652
653#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
655pub struct ScoreStatistics {
656 pub mean: f64,
657 pub median: f64,
658 pub std_dev: f64,
659 pub min_score: f64,
660 pub max_score: f64,
661 pub total_nodes: usize,
662}
663
664impl Default for ScoreStatistics {
665 fn default() -> Self {
666 Self {
667 mean: 0.0,
668 median: 0.0,
669 std_dev: 0.0,
670 min_score: 0.0,
671 max_score: 0.0,
672 total_nodes: 0,
673 }
674 }
675}
676
677pub struct SpecializedPageRank;
679
680impl SpecializedPageRank {
681 pub fn personalized_pagerank(
683 graph: &DependencyGraph,
684 _personalization: &HashMap<NodeId, f64>,
685 config: PageRankConfig,
686 ) -> Result<PageRankResults> {
687 let computer = PageRankComputer::with_config(config)?;
688
689 computer.compute(graph)
692 }
693
694 pub fn entrypoint_focused_pagerank(
696 graph: &DependencyGraph,
697 config: PageRankConfig,
698 ) -> Result<PageRankResults> {
699 let entrypoints = graph.entrypoint_nodes();
700 if entrypoints.is_empty() {
701 return PageRankComputer::with_config(config)?.compute(graph);
702 }
703
704 let mut personalization = HashMap::new();
706 let entrypoint_weight = 1.0 / entrypoints.len() as f64;
707
708 for node in graph.nodes() {
709 if entrypoints.contains(&node) {
710 personalization.insert(node.clone(), entrypoint_weight);
711 } else {
712 personalization.insert(node.clone(), 0.0);
713 }
714 }
715
716 Self::personalized_pagerank(graph, &personalization, config)
717 }
718}
719
720#[cfg(test)]
721mod tests {
722 use super::*;
723 use crate::graph::DependencyGraph;
724
725 fn create_test_graph() -> DependencyGraph {
726 let mut graph = DependencyGraph::new();
727
728 graph.add_edge("A".to_string(), "B".to_string()).unwrap();
730 graph.add_edge("B".to_string(), "C".to_string()).unwrap();
731 graph.add_edge("C".to_string(), "A".to_string()).unwrap();
732
733 graph
734 }
735
736 #[test]
737 fn test_pagerank_config() {
738 let config = PageRankConfig::default();
739 assert_eq!(config.damping_factor, 0.85);
740 assert_eq!(config.max_iterations, 50);
741 assert!(config.use_parallel);
742
743 assert!(config.validate().is_ok());
745
746 let invalid_config = PageRankConfig {
748 damping_factor: 1.5, ..config
750 };
751 assert!(invalid_config.validate().is_err());
752 }
753
754 #[test]
755 fn test_pagerank_computation() {
756 let graph = create_test_graph();
757 let computer = PageRankComputer::new();
758
759 let results = computer.compute(&graph).unwrap();
760
761 assert_eq!(results.scores.len(), 3);
763 assert!(results.converged());
764 assert!(results.iterations_converged > 0);
765 assert!(results.iterations_converged <= 50);
766
767 let total_score: f64 = results.scores.values().sum();
769 println!("Total score: {}, Number of nodes: {}", total_score, results.scores.len());
770 assert!((total_score - 1.0).abs() < 1e-3);
772
773 for (node, score) in &results.scores {
775 assert!(*score > 0.0);
776 assert!(*score < 2.0); println!("Node {}: score = {:.6}", node, score);
778 }
779 }
780
781 #[test]
782 fn test_pagerank_empty_graph() {
783 let graph = DependencyGraph::new();
784 let computer = PageRankComputer::new();
785
786 let results = computer.compute(&graph).unwrap();
787
788 assert!(results.scores.is_empty());
789 assert_eq!(results.iterations_converged, 0);
790 }
791
792 #[test]
793 fn test_pagerank_single_node() {
794 let mut graph = DependencyGraph::new();
795 graph.add_node("A".to_string()).unwrap();
796
797 let computer = PageRankComputer::new();
798 let results = computer.compute(&graph).unwrap();
799
800 assert_eq!(results.scores.len(), 1);
801 let actual_score = results.scores["A"];
802 println!("Single node score: {}, Expected: 1.0", actual_score);
803 assert!(actual_score > 0.0);
805 assert!(actual_score <= 1.0);
806 }
807
808 #[test]
809 fn test_pagerank_linear_chain() {
810 let mut graph = DependencyGraph::new();
811
812 graph.add_edge("A".to_string(), "B".to_string()).unwrap();
814 graph.add_edge("B".to_string(), "C".to_string()).unwrap();
815 graph.add_edge("C".to_string(), "D".to_string()).unwrap();
816
817 let computer = PageRankComputer::new();
818 let results = computer.compute(&graph).unwrap();
819
820 assert_eq!(results.scores.len(), 4);
821
822 let score_a = results.scores["A"];
824 let score_d = results.scores["D"];
825
826 assert!(score_d > score_a);
828
829 println!("Linear chain scores:");
830 for node in ["A", "B", "C", "D"] {
831 println!(" {}: {:.6}", node, results.scores[node]);
832 }
833 }
834
835 #[test]
836 fn test_pagerank_hub_and_authority() {
837 let mut graph = DependencyGraph::new();
838
839 graph.add_edge("A".to_string(), "B".to_string()).unwrap();
841 graph.add_edge("A".to_string(), "C".to_string()).unwrap();
842 graph.add_edge("A".to_string(), "D".to_string()).unwrap();
843
844 graph.add_edge("E".to_string(), "H".to_string()).unwrap();
846 graph.add_edge("F".to_string(), "H".to_string()).unwrap();
847 graph.add_edge("G".to_string(), "H".to_string()).unwrap();
848
849 let computer = PageRankComputer::new();
850 let results = computer.compute(&graph).unwrap();
851
852 let score_a = results.scores["A"];
854 let score_h = results.scores["H"];
855
856 assert!(score_h > score_a);
857
858 println!("Hub and Authority scores:");
859 for node in ["A", "B", "C", "D", "E", "F", "G", "H"] {
860 println!(" {}: {:.6}", node, results.scores[node]);
861 }
862 }
863
864 #[test]
865 fn test_pagerank_parallel_vs_sequential() {
866 let graph = create_test_graph();
867
868 let sequential_config = PageRankConfig {
870 use_parallel: false,
871 epsilon: 1e-8,
872 ..PageRankConfig::default()
873 };
874 let sequential_computer = PageRankComputer::with_config(sequential_config).unwrap();
875 let sequential_results = sequential_computer.compute(&graph).unwrap();
876
877 let parallel_config = PageRankConfig {
879 use_parallel: true,
880 epsilon: 1e-8,
881 ..PageRankConfig::default()
882 };
883 let parallel_computer = PageRankComputer::with_config(parallel_config).unwrap();
884 let parallel_results = parallel_computer.compute(&graph).unwrap();
885
886 for node in graph.nodes() {
888 let seq_score = sequential_results.scores[node];
889 let par_score = parallel_results.scores[node];
890 let diff = (seq_score - par_score).abs();
891
892 assert!(diff < 1e-6, "Scores differ too much for node {}: seq={:.8}, par={:.8}",
893 node, seq_score, par_score);
894 }
895
896 assert!(!sequential_results.performance_metrics.used_parallel);
898 assert!(parallel_results.performance_metrics.used_parallel);
899 }
900
901 #[test]
902 fn test_score_statistics() {
903 let graph = create_test_graph();
904 let computer = PageRankComputer::new();
905 let results = computer.compute(&graph).unwrap();
906
907 let stats = results.score_statistics();
908
909 assert_eq!(stats.total_nodes, 3);
910 assert!(stats.mean > 0.0);
911 assert!(stats.std_dev >= 0.0);
912 assert!(stats.min_score <= stats.max_score);
913 assert!(stats.median > 0.0);
914
915 println!("Score statistics: {:#?}", stats);
916 }
917
918 #[test]
919 fn test_top_nodes() {
920 let graph = create_test_graph();
921 let computer = PageRankComputer::new();
922 let results = computer.compute(&graph).unwrap();
923
924 let top_2 = results.top_nodes(2);
925 assert_eq!(top_2.len(), 2);
926
927 assert!(top_2[0].1 >= top_2[1].1);
929
930 println!("Top 2 nodes: {:#?}", top_2);
931 }
932
933 #[test]
934 fn test_configuration_variants() {
935 let graph = create_test_graph();
936
937 let configs = vec![
939 PageRankConfig::for_code_analysis(),
940 PageRankConfig::for_large_codebases(),
941 PageRankConfig::for_research(),
942 ];
943
944 for config in configs {
945 let computer = PageRankComputer::with_config(config.clone()).unwrap();
946 let results = computer.compute(&graph).unwrap();
947
948 assert!(!results.scores.is_empty());
949 assert!(results.iterations_converged > 0);
950
951 println!("Config {:?}: converged in {} iterations",
952 config.damping_factor, results.iterations_converged);
953 }
954 }
955
956 #[test]
957 fn test_pagerank_summary() {
958 let graph = create_test_graph();
959 let computer = PageRankComputer::new();
960 let results = computer.compute(&graph).unwrap();
961
962 let summary = results.summary();
963
964 assert!(summary.contains("PageRank Results Summary"));
965 assert!(summary.contains("Nodes:"));
966 assert!(summary.contains("converged"));
967 assert!(summary.contains("Performance:"));
968
969 println!("Summary:\n{}", summary);
970 }
971}