1use std::ops::Range;
12
13use rayon::prelude::*;
14
15use crate::cch::{Cch, CchGraph};
16
17#[derive(Clone)]
18pub struct Topology {
19 pub first_out: Vec<u32>,
20 pub head: Vec<u32>,
21 pub etree: Vec<Option<u32>>,
22}
23
24impl Topology {
25 fn from_nodes(nodes: Vec<ContractionNode>, etree: Vec<Option<u32>>) -> Self {
26 let mut first_out = Vec::with_capacity(nodes.len() + 1);
27 let mut head = Vec::with_capacity(nodes.iter().map(|n| n.edges.len()).sum());
28 first_out.push(0);
29 for node in nodes {
30 head.extend_from_slice(&node.edges);
31 first_out.push(head.len() as u32);
32 }
33 Self { first_out, head, etree }
34 }
35 pub fn build_scheduler(&self) -> Vec<Vec<u32>> {
36 let n = self.etree.len();
37 let mut depths = vec![0; n];
38 let mut max_depth = 0;
39 for u in (0..n).rev() {
40 if let Some(p) = self.etree[u] {
41 depths[u] = depths[p as usize] + 1;
42 max_depth = max_depth.max(depths[u]);
43 }
44 }
45 let mut levels = vec![Vec::new(); max_depth + 1];
46 for u in 0..n {
47 levels[max_depth - depths[u]].push(u as u32);
48 }
49 levels
50 }
51 #[inline(always)]
57 pub unsafe fn find_edge_id(&self, from: u32, to: u32) -> Option<u32> {
58 let r = unsafe { self.get_range(from as usize) };
59 self.head[r.clone()].binary_search(&to).ok().map(|idx| (r.start + idx) as u32)
60 }
61 #[inline(always)]
66 pub unsafe fn get_range(&self, u: usize) -> Range<usize> { unsafe { *self.first_out.get_unchecked(u) as usize..*self.first_out.get_unchecked(u + 1) as usize } }
67 #[inline(always)]
72 pub unsafe fn get_head(&self, edge_id: usize) -> usize { unsafe { *self.head.get_unchecked(edge_id) as usize } }
73}
74
75#[derive(Debug, Default)]
76struct ContractionNode {
77 edges: Vec<u32>,
78}
79
80impl Cch {
81 fn internal_contract(mut nodes: Vec<ContractionNode>, mut etree: Vec<Option<u32>>, start_rank: usize) -> Topology {
82 for i in start_rank..nodes.len() {
83 nodes[i].edges.sort_unstable();
84 nodes[i].edges.dedup();
85
86 if let Some(&lowest) = nodes[i].edges.first() {
87 etree[i] = Some(lowest);
88 let suffix = nodes[i].edges[1..].to_vec();
89 nodes[lowest as usize].edges.extend_from_slice(&suffix);
90 }
91 }
92 Topology::from_nodes(nodes, etree)
93 }
94
95 pub fn contract<G: CchGraph + Sync>(ranks: &[u32], graph: &G) -> Topology {
96 let n = graph.num_nodes();
97
98 let mut up_edges = vec![vec![]; n];
99 for u in 0..n {
100 let u_r = ranks[u];
101 for v in graph.neighbors(u as u32) {
102 let v_r = ranks[v as usize];
103 if u_r < v_r {
104 up_edges[u_r as usize].push(v_r);
105 } else if v_r < u_r {
106 up_edges[v_r as usize].push(u_r);
107 }
108 }
109 }
110
111 let nodes: Vec<ContractionNode> = up_edges.into_par_iter().map(|edges| ContractionNode { edges }).collect();
112 let etree = vec![None; n];
113 Self::internal_contract(nodes, etree, 0)
114 }
115
116 pub fn recontract<G: CchGraph + Sync>(&self, graph: &G, start_rank: u32, old: &Topology) -> Topology {
117 let n = graph.num_nodes();
118 let start_r = start_rank as usize;
119
120 let mut new_up_edges = vec![vec![]; n - start_r];
121
122 for u in 0..n {
123 let u_r = self.ranks[u] as usize;
124 for v in graph.neighbors(u as u32) {
125 let v_r = self.ranks[v as usize] as usize;
126 if u_r < v_r && u_r >= start_r {
127 new_up_edges[u_r - start_r].push(v_r as u32);
128 } else if v_r < u_r && v_r >= start_r {
129 new_up_edges[v_r - start_r].push(u_r as u32);
130 }
131 }
132 }
133 let mut nodes: Vec<ContractionNode> = (0..start_r)
134 .into_par_iter()
135 .map(|r| ContractionNode {
136 edges: old.head[unsafe { old.get_range(r) }].to_vec(),
137 })
138 .chain(new_up_edges.into_par_iter().map(|edges| ContractionNode { edges }))
139 .collect();
140 let etree = old.etree.clone();
141 let (low, high) = nodes.split_at_mut(start_r);
142
143 for (r, node) in low.iter().enumerate() {
144 if let Some(parent) = etree[r].filter(|&p| p >= start_rank) {
145 let high_idx = (parent - start_rank) as usize;
146 if let Some((_, suffix)) = node.edges.split_first() {
147 high[high_idx].edges.extend_from_slice(suffix);
148 }
149 }
150 }
151 Self::internal_contract(nodes, etree, start_r)
152 }
153}