pub mod coloring;
pub mod community;
pub mod connectivity;
pub mod decomposition;
pub mod flow;
pub mod hypergraph;
pub mod isomorphism;
pub mod matching;
pub mod motifs;
pub mod paths;
pub mod properties;
pub mod random_walk;
pub mod shortest_path;
pub mod similarity;
pub mod transformations;
pub mod traversal;
pub use coloring::*;
pub use community::{
fluid_communities_result,
girvan_newman_communities_result,
girvan_newman_result,
greedy_modularity_optimization_result,
hierarchical_communities_result,
infomap_communities,
label_propagation_result,
louvain_communities_result,
modularity,
modularity_optimization_result,
CommunityResult,
CommunityStructure,
DendrogramLevel,
GirvanNewmanConfig,
GirvanNewmanResult,
InfomapResult,
};
#[cfg(feature = "parallel")]
pub use community::{
parallel_label_propagation_result, parallel_louvain_communities_result, parallel_modularity,
};
#[deprecated(note = "Use `louvain_communities_result` instead")]
#[allow(deprecated)]
pub use community::louvain_communities;
#[deprecated(note = "Use `label_propagation_result` instead")]
#[allow(deprecated)]
pub use community::label_propagation;
#[deprecated(note = "Use `fluid_communities_result` instead")]
#[allow(deprecated)]
pub use community::fluid_communities;
#[deprecated(note = "Use `hierarchical_communities_result` instead")]
#[allow(deprecated)]
pub use community::hierarchical_communities;
#[deprecated(note = "Use `modularity_optimization_result` instead")]
#[allow(deprecated)]
pub use community::modularity_optimization;
#[deprecated(note = "Use `greedy_modularity_optimization_result` instead")]
#[allow(deprecated)]
pub use community::greedy_modularity_optimization;
#[deprecated(note = "Use `parallel_louvain_communities_result` instead")]
#[allow(deprecated)]
pub use community::parallel_louvain_communities;
pub use connectivity::*;
pub use decomposition::*;
pub use flow::{
capacity_scaling_max_flow, dinic_max_flow, dinic_max_flow_full, edmonds_karp_max_flow,
ford_fulkerson_max_flow, hopcroft_karp, isap_max_flow, min_cost_max_flow,
min_cost_max_flow_graph, minimum_cut, minimum_st_cut, multi_commodity_flow,
multi_source_multi_sink_max_flow, parallel_max_flow, push_relabel_max_flow,
push_relabel_max_flow_full, CostEdge, HopcroftKarpResult, MaxFlowResult, MinCostFlowResult,
MultiCommodityFlowResult,
};
pub use hypergraph::*;
pub use isomorphism::*;
pub use matching::*;
pub use motifs::*;
pub use paths::*;
pub use properties::*;
pub use random_walk::*;
pub use shortest_path::*;
pub use similarity::*;
pub use transformations::*;
pub use traversal::*;
use crate::base::{DiGraph, EdgeWeight, Graph, IndexType, Node};
use crate::error::{GraphError, Result};
use petgraph::algo::toposort as petgraph_toposort;
use petgraph::visit::EdgeRef;
use petgraph::Direction;
use scirs2_core::ndarray::{Array1, Array2};
use std::cmp::Ordering;
use std::collections::{HashMap, VecDeque};
#[allow(dead_code)]
pub fn minimum_spanning_tree<N, E, Ix>(
graph: &Graph<N, E, Ix>,
) -> Result<Vec<crate::base::Edge<N, E>>>
where
N: Node + std::fmt::Debug,
E: EdgeWeight + Into<f64> + std::cmp::PartialOrd,
Ix: petgraph::graph::IndexType,
{
let mut edges: Vec<_> = graph
.inner()
.edge_references()
.map(|e| {
let source = graph.inner()[e.source()].clone();
let target = graph.inner()[e.target()].clone();
let weight = e.weight().clone();
(source, target, weight)
})
.collect();
edges.sort_by(|a, b| a.2.partial_cmp(&b.2).unwrap_or(Ordering::Equal));
let nodes: Vec<N> = graph.nodes().into_iter().cloned().collect();
let mut parent: HashMap<N, N> = nodes.iter().map(|n| (n.clone(), n.clone())).collect();
let mut rank: HashMap<N, usize> = nodes.iter().map(|n| (n.clone(), 0)).collect();
fn find<N: Node>(parent: &mut HashMap<N, N>, node: &N) -> N {
if parent[node] != *node {
let root = find(parent, &parent[node].clone());
parent.insert(node.clone(), root.clone());
}
parent[node].clone()
}
fn union<N: Node>(
parent: &mut HashMap<N, N>,
rank: &mut HashMap<N, usize>,
x: &N,
y: &N,
) -> bool {
let root_x = find(parent, x);
let root_y = find(parent, y);
if root_x == root_y {
return false; }
match rank[&root_x].cmp(&rank[&root_y]) {
Ordering::Less => {
parent.insert(root_x, root_y);
}
Ordering::Greater => {
parent.insert(root_y, root_x);
}
Ordering::Equal => {
parent.insert(root_y, root_x.clone());
*rank.get_mut(&root_x).expect("Operation failed") += 1;
}
}
true
}
let mut mst = Vec::new();
for (source, target, weight) in edges {
if union(&mut parent, &mut rank, &source, &target) {
mst.push(crate::base::Edge {
source,
target,
weight,
});
}
}
Ok(mst)
}
#[allow(dead_code)]
pub fn topological_sort<N, E, Ix>(graph: &DiGraph<N, E, Ix>) -> Result<Vec<N>>
where
N: Node + std::fmt::Debug,
E: EdgeWeight,
Ix: IndexType,
{
match petgraph_toposort(graph.inner(), None) {
Ok(indices) => {
let sorted_nodes = indices
.into_iter()
.map(|idx| graph.inner()[idx].clone())
.collect();
Ok(sorted_nodes)
}
Err(_) => Err(GraphError::CycleDetected {
start_node: "unknown".to_string(),
cycle_length: 0,
}),
}
}
#[allow(dead_code)]
pub fn pagerank<N, E, Ix>(
graph: &DiGraph<N, E, Ix>,
damping_factor: f64,
tolerance: f64,
max_iterations: usize,
) -> HashMap<N, f64>
where
N: Node + std::fmt::Debug,
E: EdgeWeight,
Ix: IndexType,
{
let nodes: Vec<_> = graph.inner().node_indices().collect();
let n = nodes.len();
if n == 0 {
return HashMap::new();
}
let mut pr = vec![1.0 / n as f64; n];
let mut new_pr = vec![0.0; n];
let node_to_idx: HashMap<_, _> = nodes.iter().enumerate().map(|(i, &n)| (n, i)).collect();
for _ in 0..max_iterations {
for pr in new_pr.iter_mut().take(n) {
*pr = (1.0 - damping_factor) / n as f64;
}
for (i, &node_idx) in nodes.iter().enumerate() {
let out_degree = graph
.inner()
.edges_directed(node_idx, Direction::Outgoing)
.count();
if out_degree > 0 {
let contribution = damping_factor * pr[i] / out_degree as f64;
for edge in graph.inner().edges_directed(node_idx, Direction::Outgoing) {
if let Some(&j) = node_to_idx.get(&edge.target()) {
new_pr[j] += contribution;
}
}
} else {
let contribution = damping_factor * pr[i] / n as f64;
for pr_val in new_pr.iter_mut().take(n) {
*pr_val += contribution;
}
}
}
let diff: f64 = pr
.iter()
.zip(&new_pr)
.map(|(old, new)| (old - new).abs())
.sum();
std::mem::swap(&mut pr, &mut new_pr);
if diff < tolerance {
break;
}
}
nodes
.iter()
.enumerate()
.map(|(i, &node_idx)| (graph.inner()[node_idx].clone(), pr[i]))
.collect()
}
#[allow(dead_code)]
pub fn betweenness_centrality<N, E, Ix>(
graph: &Graph<N, E, Ix>,
normalized: bool,
) -> HashMap<N, f64>
where
N: Node + std::fmt::Debug,
E: EdgeWeight,
Ix: IndexType,
{
let node_indices: Vec<_> = graph.inner().node_indices().collect();
let nodes: Vec<N> = node_indices
.iter()
.map(|&idx| graph.inner()[idx].clone())
.collect();
let n = nodes.len();
let mut centrality: HashMap<N, f64> = nodes.iter().map(|n| (n.clone(), 0.0)).collect();
for s in &nodes {
let mut stack = Vec::new();
let mut paths: HashMap<N, Vec<N>> = HashMap::new();
let mut sigma: HashMap<N, f64> = nodes.iter().map(|n| (n.clone(), 0.0)).collect();
let mut dist: HashMap<N, Option<f64>> = nodes.iter().map(|n| (n.clone(), None)).collect();
sigma.insert(s.clone(), 1.0);
dist.insert(s.clone(), Some(0.0));
let mut queue = VecDeque::new();
queue.push_back(s.clone());
while let Some(v) = queue.pop_front() {
stack.push(v.clone());
if let Ok(neighbors) = graph.neighbors(&v) {
for w in neighbors {
if dist[&w].is_none() {
dist.insert(w.clone(), Some(dist[&v].expect("Operation failed") + 1.0));
queue.push_back(w.clone());
}
if dist[&w] == Some(dist[&v].expect("Operation failed") + 1.0) {
*sigma.get_mut(&w).expect("Operation failed") += sigma[&v];
paths.entry(w.clone()).or_default().push(v.clone());
}
}
}
}
let mut delta: HashMap<N, f64> = nodes.iter().map(|n| (n.clone(), 0.0)).collect();
while let Some(w) = stack.pop() {
if let Some(predecessors) = paths.get(&w) {
for v in predecessors {
*delta.get_mut(v).expect("Operation failed") +=
(sigma[v] / sigma[&w]) * (1.0 + delta[&w]);
}
}
if w != *s {
*centrality.get_mut(&w).expect("Operation failed") += delta[&w];
}
}
}
if normalized && n > 2 {
let scale = 1.0 / ((n - 1) * (n - 2)) as f64;
for value in centrality.values_mut() {
*value *= scale;
}
}
centrality
}
#[allow(dead_code)]
pub fn closeness_centrality<N, E, Ix>(graph: &Graph<N, E, Ix>, normalized: bool) -> HashMap<N, f64>
where
N: Node + std::fmt::Debug,
E: EdgeWeight
+ Into<f64>
+ scirs2_core::numeric::Zero
+ scirs2_core::numeric::One
+ std::ops::Add<Output = E>
+ PartialOrd
+ std::marker::Copy
+ std::fmt::Debug
+ std::default::Default,
Ix: IndexType,
{
let node_indices: Vec<_> = graph.inner().node_indices().collect();
let nodes: Vec<N> = node_indices
.iter()
.map(|&idx| graph.inner()[idx].clone())
.collect();
let n = nodes.len();
let mut centrality = HashMap::new();
for node in &nodes {
let mut total_distance = 0.0;
let mut reachable_count = 0;
for other in &nodes {
if node != other {
if let Ok(Some(path)) = dijkstra_path(graph, node, other) {
let distance: f64 = path.total_weight.into();
total_distance += distance;
reachable_count += 1;
}
}
}
if reachable_count > 0 {
let closeness = reachable_count as f64 / total_distance;
let value = if normalized && n > 1 {
closeness * (reachable_count as f64 / (n - 1) as f64)
} else {
closeness
};
centrality.insert(node.clone(), value);
} else {
centrality.insert(node.clone(), 0.0);
}
}
centrality
}
#[allow(dead_code)]
pub fn eigenvector_centrality<N, E, Ix>(
graph: &Graph<N, E, Ix>,
max_iter: usize,
tolerance: f64,
) -> Result<HashMap<N, f64>>
where
N: Node + std::fmt::Debug,
E: EdgeWeight + Into<f64>,
Ix: IndexType,
{
let node_indices: Vec<_> = graph.inner().node_indices().collect();
let nodes: Vec<N> = node_indices
.iter()
.map(|&idx| graph.inner()[idx].clone())
.collect();
let n = nodes.len();
if n == 0 {
return Ok(HashMap::new());
}
let mut adj_matrix = Array2::<f64>::zeros((n, n));
for (i, node_i) in nodes.iter().enumerate() {
for (j, node_j) in nodes.iter().enumerate() {
if let Ok(weight) = graph.edge_weight(node_i, node_j) {
adj_matrix[[i, j]] = weight.into();
}
}
}
let mut x = Array1::<f64>::from_elem(n, 1.0 / (n as f64).sqrt());
let mut converged = false;
for _ in 0..max_iter {
let x_new = adj_matrix.dot(&x);
let norm = x_new.dot(&x_new).sqrt();
if norm == 0.0 {
return Err(GraphError::ComputationError(
"Eigenvector computation failed".to_string(),
));
}
let x_normalized = x_new / norm;
let diff = (&x_normalized - &x).mapv(f64::abs).sum();
if diff < tolerance {
converged = true;
x = x_normalized;
break;
}
x = x_normalized;
}
if !converged {
return Err(GraphError::ComputationError(
"Eigenvector centrality did not converge".to_string(),
));
}
Ok(nodes
.into_iter()
.enumerate()
.map(|(i, node)| (node, x[i]))
.collect())
}
#[cfg(test)]
mod tests {
use super::*;
use crate::generators::create_graph;
#[test]
fn test_minimum_spanning_tree() {
let mut graph = create_graph::<&str, f64>();
graph.add_edge("A", "B", 1.0).expect("Operation failed");
graph.add_edge("B", "C", 2.0).expect("Operation failed");
graph.add_edge("A", "C", 3.0).expect("Operation failed");
graph.add_edge("C", "D", 1.0).expect("Operation failed");
let mst = minimum_spanning_tree(&graph).expect("Operation failed");
assert_eq!(mst.len(), 3);
let total_weight: f64 = mst.iter().map(|e| e.weight).sum();
assert_eq!(total_weight, 4.0);
}
#[test]
fn test_topological_sort() {
let mut graph = crate::generators::create_digraph::<&str, ()>();
graph.add_edge("A", "B", ()).expect("Operation failed");
graph.add_edge("A", "C", ()).expect("Operation failed");
graph.add_edge("B", "D", ()).expect("Operation failed");
graph.add_edge("C", "D", ()).expect("Operation failed");
let sorted = topological_sort(&graph).expect("Operation failed");
let a_pos = sorted
.iter()
.position(|n| n == &"A")
.expect("Operation failed");
let b_pos = sorted
.iter()
.position(|n| n == &"B")
.expect("Operation failed");
let c_pos = sorted
.iter()
.position(|n| n == &"C")
.expect("Operation failed");
let d_pos = sorted
.iter()
.position(|n| n == &"D")
.expect("Operation failed");
assert!(a_pos < b_pos);
assert!(a_pos < c_pos);
assert!(b_pos < d_pos);
assert!(c_pos < d_pos);
}
#[test]
fn test_pagerank() {
let mut graph = crate::generators::create_digraph::<&str, ()>();
graph.add_edge("A", "B", ()).expect("Operation failed");
graph.add_edge("A", "C", ()).expect("Operation failed");
graph.add_edge("B", "C", ()).expect("Operation failed");
graph.add_edge("C", "A", ()).expect("Operation failed");
let pr = pagerank(&graph, 0.85, 1e-6, 100);
assert!(pr.values().all(|&v| v > 0.0));
let sum: f64 = pr.values().sum();
assert!((sum - 1.0).abs() < 0.01);
}
}