use crate::KnowledgeGraph;
use std::collections::HashMap;
#[derive(Debug, Clone, Copy)]
pub struct EigenvectorConfig {
pub max_iterations: usize,
pub tolerance: f64,
pub undirected: bool,
}
impl Default for EigenvectorConfig {
fn default() -> Self {
Self {
max_iterations: 100,
tolerance: 1e-6,
undirected: false,
}
}
}
#[must_use]
#[allow(clippy::cast_precision_loss)]
pub fn eigenvector_centrality(
kg: &KnowledgeGraph,
config: EigenvectorConfig,
) -> HashMap<String, f64> {
let graph = kg.as_petgraph();
let n = graph.node_count();
if n == 0 {
return HashMap::new();
}
let init_val = 1.0 / (n as f64).sqrt();
let mut scores = vec![init_val; n];
let mut new_scores = vec![0.0; n];
for _iter in 0..config.max_iterations {
new_scores.fill(0.0);
for idx in graph.node_indices() {
let predecessors: Vec<_> = if config.undirected {
graph.neighbors_undirected(idx).collect()
} else {
graph
.neighbors_directed(idx, petgraph::Direction::Incoming)
.collect()
};
for pred in predecessors {
new_scores[idx.index()] += scores[pred.index()];
}
}
let norm: f64 = new_scores.iter().map(|x| x * x).sum::<f64>().sqrt();
if norm > 0.0 {
for s in &mut new_scores {
*s /= norm;
}
} else {
let uniform = 1.0 / (n as f64).sqrt();
new_scores.fill(uniform);
}
let diff: f64 = scores
.iter()
.zip(new_scores.iter())
.map(|(old, new)| (old - new).powi(2))
.sum::<f64>()
.sqrt();
std::mem::swap(&mut scores, &mut new_scores);
if diff < config.tolerance {
break;
}
}
graph
.node_indices()
.map(|idx| (graph[idx].id.as_str().to_owned(), scores[idx.index()]))
.collect()
}
#[cfg(test)]
mod tests {
use super::*;
use crate::Triple;
#[test]
fn test_eigenvector_symmetric() {
let mut kg = KnowledgeGraph::new();
kg.add_triple(Triple::new("A", "rel", "B"));
kg.add_triple(Triple::new("B", "rel", "A"));
kg.add_triple(Triple::new("B", "rel", "C"));
kg.add_triple(Triple::new("C", "rel", "B"));
kg.add_triple(Triple::new("C", "rel", "A"));
kg.add_triple(Triple::new("A", "rel", "C"));
let scores = eigenvector_centrality(&kg, EigenvectorConfig::default());
let a = *scores.get("A").unwrap();
let b = *scores.get("B").unwrap();
let c = *scores.get("C").unwrap();
assert!((a - b).abs() < 0.01, "A={a}, B={b} should be equal");
assert!((b - c).abs() < 0.01, "B={b}, C={c} should be equal");
}
#[test]
fn test_eigenvector_star() {
let mut kg = KnowledgeGraph::new();
kg.add_triple(Triple::new("Hub", "rel", "A"));
kg.add_triple(Triple::new("A", "rel", "Hub"));
kg.add_triple(Triple::new("Hub", "rel", "B"));
kg.add_triple(Triple::new("B", "rel", "Hub"));
kg.add_triple(Triple::new("Hub", "rel", "C"));
kg.add_triple(Triple::new("C", "rel", "Hub"));
let scores = eigenvector_centrality(&kg, EigenvectorConfig::default());
let hub = *scores.get("Hub").unwrap();
let a = *scores.get("A").unwrap();
assert!(hub > a, "Hub={hub} should be more central than A={a}");
}
#[test]
fn test_eigenvector_normalized() {
let mut kg = KnowledgeGraph::new();
kg.add_triple(Triple::new("A", "rel", "B"));
kg.add_triple(Triple::new("B", "rel", "A"));
let scores = eigenvector_centrality(&kg, EigenvectorConfig::default());
let norm: f64 = scores.values().map(|x| x * x).sum::<f64>().sqrt();
assert!(
(norm - 1.0).abs() < 1e-6,
"Scores should be L2 normalized: {norm}"
);
}
}