1use super::*;
4
5pub struct SpectralEmbeddingMapper {
7 pub embedding_dims: usize,
9 pub normalization: SpectralNormalization,
11 pub tolerance: f64,
13}
14
15#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
17pub enum SpectralNormalization {
18 Unnormalized,
19 Symmetric,
20 RandomWalk,
21}
22
23impl SpectralEmbeddingMapper {
24 pub fn new(embedding_dims: usize) -> Self {
25 Self {
26 embedding_dims,
27 normalization: SpectralNormalization::Symmetric,
28 tolerance: 1e-10,
29 }
30 }
31
32 pub fn embed_graphs(
33 &self,
34 logical_graph: &Graph<usize, f64>,
35 physical_graph: &Graph<usize, f64>,
36 ) -> DeviceResult<(Array2<f64>, Array2<f64>)> {
37 let logical_embedding = Array2::zeros((logical_graph.node_count(), self.embedding_dims));
39 let physical_embedding = Array2::zeros((physical_graph.node_count(), self.embedding_dims));
40
41 Ok((logical_embedding, physical_embedding))
42 }
43}
44
45pub struct CommunityBasedMapper {
47 pub method: CommunityMethod,
49 pub resolution: f64,
51 pub random_seed: Option<u64>,
53}
54
55impl CommunityBasedMapper {
56 pub fn new(method: CommunityMethod) -> Self {
57 Self {
58 method,
59 resolution: 1.0,
60 random_seed: None,
61 }
62 }
63
64 pub fn detect_communities(
65 &self,
66 graph: &Graph<usize, f64>,
67 ) -> DeviceResult<HashMap<usize, usize>> {
68 match self.method {
69 CommunityMethod::Louvain => self.louvain_communities_result(graph),
70 CommunityMethod::Leiden => self.leiden_communities(graph),
71 CommunityMethod::LabelPropagation => self.label_propagation(graph),
72 CommunityMethod::SpectralClustering => self.spectral_clustering(graph),
73 CommunityMethod::Walktrap => self.walktrap_communities(graph),
74 }
75 }
76
77 fn louvain_communities_result(
78 &self,
79 graph: &Graph<usize, f64>,
80 ) -> DeviceResult<HashMap<usize, usize>> {
81 let community_result = louvain_communities_result(graph);
84 let mut result = HashMap::new();
86 for (node, community_id) in community_result.node_communities {
87 result.insert(node, community_id);
88 }
89 Ok(result)
90 }
91
92 fn leiden_communities(
93 &self,
94 _graph: &Graph<usize, f64>,
95 ) -> DeviceResult<HashMap<usize, usize>> {
96 Ok(HashMap::new())
98 }
99
100 fn label_propagation(&self, _graph: &Graph<usize, f64>) -> DeviceResult<HashMap<usize, usize>> {
101 Ok(HashMap::new())
103 }
104
105 fn spectral_clustering(
106 &self,
107 _graph: &Graph<usize, f64>,
108 ) -> DeviceResult<HashMap<usize, usize>> {
109 Ok(HashMap::new())
111 }
112
113 fn walktrap_communities(
114 &self,
115 _graph: &Graph<usize, f64>,
116 ) -> DeviceResult<HashMap<usize, usize>> {
117 Ok(HashMap::new())
119 }
120}
121
122pub struct CentralityWeightedMapper {
124 pub centrality_measures: Vec<CentralityMeasure>,
126 pub centrality_weights: Vec<f64>,
128 pub normalization: CentralityNormalization,
130}
131
132#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
134pub enum CentralityMeasure {
135 Betweenness,
136 Closeness,
137 Eigenvector,
138 PageRank,
139 Degree,
140}
141
142#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
144pub enum CentralityNormalization {
145 None,
146 MinMax,
147 ZScore,
148 Softmax,
149}
150
151impl CentralityWeightedMapper {
152 pub fn new() -> Self {
153 Self {
154 centrality_measures: vec![
155 CentralityMeasure::Betweenness,
156 CentralityMeasure::Closeness,
157 CentralityMeasure::PageRank,
158 ],
159 centrality_weights: vec![0.4, 0.3, 0.3],
160 normalization: CentralityNormalization::MinMax,
161 }
162 }
163
164 pub fn calculate_centralities(
165 &self,
166 graph: &Graph<usize, f64>,
167 ) -> DeviceResult<HashMap<usize, f64>> {
168 use scirs2_graph::pagerank_centrality;
169
170 let mut combined_centrality = HashMap::new();
171
172 for (measure, weight) in self
173 .centrality_measures
174 .iter()
175 .zip(&self.centrality_weights)
176 {
177 let centrality = match measure {
178 CentralityMeasure::Betweenness => {
179 betweenness_centrality(graph, true)
181 }
182 CentralityMeasure::Closeness => {
183 closeness_centrality(graph, true)
185 }
186 CentralityMeasure::Eigenvector => {
187 eigenvector_centrality(graph, 100, 1e-6).map_err(|e| {
189 DeviceError::GraphAnalysisError(format!("Eigenvector failed: {:?}", e))
190 })?
191 }
192 CentralityMeasure::PageRank => {
193 pagerank_centrality(graph, 0.85, 1e-6).map_err(|e| {
195 DeviceError::GraphAnalysisError(format!("PageRank failed: {:?}", e))
196 })?
197 }
198 CentralityMeasure::Degree => {
199 let mut degree_centrality = HashMap::new();
202 for node in graph.nodes() {
203 let degree = graph.degree(node) as f64;
204 degree_centrality.insert(*node, degree);
205 }
206 degree_centrality
207 }
208 };
209
210 let normalized = self.normalize_centrality(¢rality);
212
213 for (node, value) in normalized {
215 *combined_centrality.entry(node).or_insert(0.0) += weight * value;
216 }
217 }
218
219 Ok(combined_centrality)
220 }
221
222 fn normalize_centrality(&self, centrality: &HashMap<usize, f64>) -> HashMap<usize, f64> {
223 match self.normalization {
224 CentralityNormalization::None => centrality.clone(),
225 CentralityNormalization::MinMax => {
226 let values: Vec<f64> = centrality.values().copied().collect();
227 let min_val = values.iter().fold(f64::INFINITY, |a, &b| a.min(b));
228 let max_val = values.iter().fold(f64::NEG_INFINITY, |a, &b| a.max(b));
229 let range = max_val - min_val;
230
231 if range > 1e-10 {
232 centrality
233 .iter()
234 .map(|(&k, &v)| (k, (v - min_val) / range))
235 .collect()
236 } else {
237 centrality.iter().map(|(&k, _)| (k, 0.5)).collect()
238 }
239 }
240 CentralityNormalization::ZScore => {
241 let values: Vec<f64> = centrality.values().copied().collect();
242 let mean = values.iter().sum::<f64>() / values.len() as f64;
243 let var =
244 values.iter().map(|v| (v - mean).powi(2)).sum::<f64>() / values.len() as f64;
245 let std_dev = var.sqrt();
246
247 if std_dev > 1e-10 {
248 centrality
249 .iter()
250 .map(|(&k, &v)| (k, (v - mean) / std_dev))
251 .collect()
252 } else {
253 centrality.iter().map(|(&k, _)| (k, 0.0)).collect()
254 }
255 }
256 CentralityNormalization::Softmax => {
257 let values: Vec<f64> = centrality.values().copied().collect();
258 let max_val = values.iter().fold(f64::NEG_INFINITY, |a, &b| a.max(b));
259 let exp_sum: f64 = values.iter().map(|v| (v - max_val).exp()).sum();
260
261 centrality
262 .iter()
263 .map(|(&k, &v)| (k, (v - max_val).exp() / exp_sum))
264 .collect()
265 }
266 }
267 }
268}
269
270pub struct BipartiteMatchingMapper {
272 pub weight_method: WeightMethod,
274 pub max_weight: f64,
276}
277
278#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
280pub enum WeightMethod {
281 Distance,
282 Fidelity,
283 Hybrid {
284 distance_weight: f64,
285 fidelity_weight: f64,
286 },
287}
288
289impl BipartiteMatchingMapper {
290 pub fn new() -> Self {
291 Self {
292 weight_method: WeightMethod::Hybrid {
293 distance_weight: 0.6,
294 fidelity_weight: 0.4,
295 },
296 max_weight: 100.0,
297 }
298 }
299
300 pub fn find_optimal_mapping(
301 &self,
302 logical_graph: &Graph<usize, f64>,
303 physical_graph: &Graph<usize, f64>,
304 calibration: Option<&DeviceCalibration>,
305 ) -> DeviceResult<HashMap<usize, usize>> {
306 let (bipartite_graph, coloring, logical_ids, physical_offset) =
308 self.build_bipartite_graph(logical_graph, physical_graph, calibration)?;
309
310 let matching_result = maximum_bipartite_matching(&bipartite_graph, &coloring);
313
314 let mut mapping = HashMap::new();
316 for (logical_node, physical_node) in matching_result.matching {
317 let logical_id = logical_node;
319 let physical_id = physical_node.saturating_sub(physical_offset);
320 mapping.insert(logical_id, physical_id);
321 }
322
323 for logical_id in &logical_ids {
325 if !mapping.contains_key(logical_id) {
326 let used_physical: std::collections::HashSet<_> = mapping.values().collect();
328 for phys in 0..physical_graph.node_count() {
329 if !used_physical.contains(&phys) {
330 mapping.insert(*logical_id, phys);
331 break;
332 }
333 }
334 }
335 }
336
337 Ok(mapping)
338 }
339
340 fn build_bipartite_graph(
341 &self,
342 logical_graph: &Graph<usize, f64>,
343 physical_graph: &Graph<usize, f64>,
344 calibration: Option<&DeviceCalibration>,
345 ) -> DeviceResult<(Graph<usize, f64>, HashMap<usize, u8>, Vec<usize>, usize)> {
346 let mut bipartite = Graph::new();
347 let mut coloring = HashMap::new();
348
349 let physical_offset = 1000;
351
352 let mut logical_ids = Vec::new();
355 for &node_data in logical_graph.nodes() {
356 bipartite.add_node(node_data);
357 coloring.insert(node_data, 0u8);
358 logical_ids.push(node_data);
359 }
360
361 let mut physical_ids = Vec::new();
363 for &node_data in physical_graph.nodes() {
364 let offset_node = node_data + physical_offset;
365 bipartite.add_node(offset_node);
366 coloring.insert(offset_node, 1u8);
367 physical_ids.push(node_data);
368 }
369
370 for &logical_id in &logical_ids {
372 for &physical_id in &physical_ids {
373 let weight = self.calculate_assignment_weight(logical_id, physical_id, calibration);
374 let _ = bipartite.add_edge(logical_id, physical_id + physical_offset, weight);
376 }
377 }
378
379 Ok((bipartite, coloring, logical_ids, physical_offset))
380 }
381
382 fn calculate_assignment_weight(
383 &self,
384 _logical_id: usize,
385 _physical_id: usize,
386 calibration: Option<&DeviceCalibration>,
387 ) -> f64 {
388 match self.weight_method {
389 WeightMethod::Distance => {
390 1.0
392 }
393 WeightMethod::Fidelity => {
394 if let Some(cal) = calibration {
395 cal.single_qubit_fidelity(_physical_id).unwrap_or(0.99)
396 } else {
397 0.99
398 }
399 }
400 WeightMethod::Hybrid {
401 distance_weight,
402 fidelity_weight,
403 } => {
404 let distance_score = 1.0; let fidelity_score = if let Some(cal) = calibration {
406 cal.single_qubit_fidelity(_physical_id).unwrap_or(0.99)
407 } else {
408 0.99
409 };
410
411 distance_weight * distance_score + fidelity_weight * fidelity_score
412 }
413 }
414 }
415}
416
417pub struct MultilevelPartitioner {
419 pub num_levels: usize,
421 pub coarsening_ratio: f64,
423 pub base_partitioner: BasePartitioner,
425}
426
427#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
429pub enum BasePartitioner {
430 SpectralBisection,
431 KernighanLin,
432 FiducciaMattheyses,
433 RandomBisection,
434}
435
436impl MultilevelPartitioner {
437 pub fn new() -> Self {
438 Self {
439 num_levels: 5,
440 coarsening_ratio: 0.5,
441 base_partitioner: BasePartitioner::SpectralBisection,
442 }
443 }
444
445 pub fn partition_graph(
446 &self,
447 graph: &Graph<usize, f64>,
448 num_partitions: usize,
449 ) -> DeviceResult<HashMap<usize, usize>> {
450 let mut partition = HashMap::new();
453 let nodes = graph.nodes();
454
455 for (i, node_data) in nodes.iter().enumerate() {
456 partition.insert(**node_data, i % num_partitions);
457 }
458
459 Ok(partition)
460 }
461}