1use super::types::*;
4use scirs2_core::ndarray::Array2;
5use std::collections::HashMap;
6
7pub struct SolutionIntegrator {
9 strategy: IntegrationStrategy,
10 weight_scheme: WeightScheme,
11 conflict_resolution: ConflictResolution,
12}
13
14#[derive(Debug, Clone)]
16pub enum WeightScheme {
17 Uniform,
19 SizeBased,
21 QualityBased,
23 ConfidenceBased,
25 Custom(Vec<f64>),
27}
28
29#[derive(Debug, Clone)]
31pub enum ConflictResolution {
32 MajorityVote,
34 HighestQuality,
36 MostConfident,
38 Random,
40 WeightedAverage,
42}
43
44impl SolutionIntegrator {
45 pub const fn new(strategy: IntegrationStrategy) -> Self {
47 Self {
48 strategy,
49 weight_scheme: WeightScheme::Uniform,
50 conflict_resolution: ConflictResolution::MajorityVote,
51 }
52 }
53
54 pub fn with_weight_scheme(mut self, scheme: WeightScheme) -> Self {
56 self.weight_scheme = scheme;
57 self
58 }
59
60 pub const fn with_conflict_resolution(mut self, resolution: ConflictResolution) -> Self {
62 self.conflict_resolution = resolution;
63 self
64 }
65
66 pub fn integrate_solutions(
68 &self,
69 component_solutions: &[ComponentSolution],
70 global_var_map: &HashMap<String, usize>,
71 ) -> Result<IntegratedSolution, String> {
72 match self.strategy {
73 IntegrationStrategy::WeightedVoting => {
74 self.weighted_voting_integration(component_solutions, global_var_map)
75 }
76 IntegrationStrategy::Consensus => {
77 self.consensus_integration(component_solutions, global_var_map)
78 }
79 IntegrationStrategy::BestSelection => {
80 self.best_selection_integration(component_solutions, global_var_map)
81 }
82 IntegrationStrategy::MajorityVoting => {
83 self.majority_voting_integration(component_solutions, global_var_map)
84 }
85 }
86 }
87
88 fn weighted_voting_integration(
90 &self,
91 component_solutions: &[ComponentSolution],
92 _global_var_map: &HashMap<String, usize>,
93 ) -> Result<IntegratedSolution, String> {
94 let weights = self.compute_weights(component_solutions)?;
95 let mut integrated_assignment = HashMap::new();
96 let mut variable_votes: HashMap<String, f64> = HashMap::new();
97
98 for (i, solution) in component_solutions.iter().enumerate() {
100 let weight = weights.get(i).unwrap_or(&1.0);
101
102 for (var_name, &value) in &solution.assignment {
103 let vote = if value { *weight } else { -*weight };
104 *variable_votes.entry(var_name.clone()).or_insert(0.0) += vote;
105 }
106 }
107
108 for (var_name, vote_sum) in variable_votes {
110 integrated_assignment.insert(var_name, vote_sum > 0.0);
111 }
112
113 let energy = self.calculate_integrated_energy(component_solutions, &weights);
115 let confidence = self.calculate_integration_confidence(component_solutions, &weights);
116
117 Ok(IntegratedSolution {
118 assignment: integrated_assignment,
119 energy,
120 confidence,
121 component_solutions: component_solutions.to_vec(),
122 })
123 }
124
125 fn consensus_integration(
127 &self,
128 component_solutions: &[ComponentSolution],
129 _global_var_map: &HashMap<String, usize>,
130 ) -> Result<IntegratedSolution, String> {
131 let mut consensus_assignment = HashMap::new();
132 let mut conflicts = Vec::new();
133
134 let mut variable_values: HashMap<String, Vec<bool>> = HashMap::new();
136
137 for solution in component_solutions {
138 for (var_name, &value) in &solution.assignment {
139 variable_values
140 .entry(var_name.clone())
141 .or_default()
142 .push(value);
143 }
144 }
145
146 for (var_name, values) in variable_values {
148 if values.is_empty() {
149 continue;
150 }
151
152 let first_value = values[0];
154 if values.iter().all(|&v| v == first_value) {
155 consensus_assignment.insert(var_name, first_value);
157 } else {
158 let resolved_value = self.resolve_conflict(&var_name, &values)?;
160 consensus_assignment.insert(var_name.clone(), resolved_value);
161 conflicts.push(var_name);
162 }
163 }
164
165 let energy = component_solutions
166 .iter()
167 .map(|s| s.energy)
168 .fold(0.0, |acc, e| acc + e);
169 let confidence = if conflicts.is_empty() { 1.0 } else { 0.5 };
170
171 Ok(IntegratedSolution {
172 assignment: consensus_assignment,
173 energy,
174 confidence,
175 component_solutions: component_solutions.to_vec(),
176 })
177 }
178
179 fn best_selection_integration(
181 &self,
182 component_solutions: &[ComponentSolution],
183 _global_var_map: &HashMap<String, usize>,
184 ) -> Result<IntegratedSolution, String> {
185 let best_solution = component_solutions
187 .iter()
188 .min_by(|a, b| {
189 a.energy
190 .partial_cmp(&b.energy)
191 .unwrap_or(std::cmp::Ordering::Equal)
192 })
193 .ok_or("No solutions provided")?;
194
195 Ok(IntegratedSolution {
196 assignment: best_solution.assignment.clone(),
197 energy: best_solution.energy,
198 confidence: best_solution.weight,
199 component_solutions: component_solutions.to_vec(),
200 })
201 }
202
203 fn majority_voting_integration(
205 &self,
206 component_solutions: &[ComponentSolution],
207 _global_var_map: &HashMap<String, usize>,
208 ) -> Result<IntegratedSolution, String> {
209 let mut integrated_assignment = HashMap::new();
210 let mut variable_votes: HashMap<String, (usize, usize)> = HashMap::new(); for solution in component_solutions {
214 for (var_name, &value) in &solution.assignment {
215 let (true_votes, false_votes) =
216 variable_votes.entry(var_name.clone()).or_insert((0, 0));
217 if value {
218 *true_votes += 1;
219 } else {
220 *false_votes += 1;
221 }
222 }
223 }
224
225 for (var_name, (true_votes, false_votes)) in variable_votes {
227 integrated_assignment.insert(var_name, true_votes > false_votes);
228 }
229
230 let energy = component_solutions
231 .iter()
232 .map(|s| s.energy)
233 .fold(0.0, |acc, e| acc + e);
234 let confidence = 0.8; Ok(IntegratedSolution {
237 assignment: integrated_assignment,
238 energy,
239 confidence,
240 component_solutions: component_solutions.to_vec(),
241 })
242 }
243
244 fn compute_weights(&self, solutions: &[ComponentSolution]) -> Result<Vec<f64>, String> {
246 match &self.weight_scheme {
247 WeightScheme::Uniform => Ok(vec![1.0; solutions.len()]),
248 WeightScheme::SizeBased => {
249 let sizes: Vec<f64> = solutions
250 .iter()
251 .map(|s| s.assignment.len() as f64)
252 .collect();
253 let total_size: f64 = sizes.iter().sum();
254 Ok(sizes.into_iter().map(|s| s / total_size).collect())
255 }
256 WeightScheme::QualityBased => {
257 let max_energy = solutions
259 .iter()
260 .map(|s| s.energy)
261 .fold(f64::NEG_INFINITY, f64::max);
262
263 let weights: Vec<f64> = solutions
264 .iter()
265 .map(|s| max_energy - s.energy + 1.0)
266 .collect();
267 let total_weight: f64 = weights.iter().sum();
268 Ok(weights.into_iter().map(|w| w / total_weight).collect())
269 }
270 WeightScheme::ConfidenceBased => {
271 let weights: Vec<f64> = solutions.iter().map(|s| s.weight).collect();
272 let total_weight: f64 = weights.iter().sum();
273 if total_weight > 0.0 {
274 Ok(weights.into_iter().map(|w| w / total_weight).collect())
275 } else {
276 Ok(vec![1.0 / solutions.len() as f64; solutions.len()])
277 }
278 }
279 WeightScheme::Custom(weights) => {
280 if weights.len() == solutions.len() {
281 Ok(weights.clone())
282 } else {
283 Err("Custom weights length mismatch".to_string())
284 }
285 }
286 }
287 }
288
289 fn resolve_conflict(&self, _var_name: &str, values: &[bool]) -> Result<bool, String> {
291 match self.conflict_resolution {
292 ConflictResolution::MajorityVote => {
293 let true_count = values.iter().filter(|&&v| v).count();
294 Ok(true_count > values.len() / 2)
295 }
296 ConflictResolution::Random => {
297 use scirs2_core::random::prelude::*;
298 let mut rng = thread_rng();
299 let index = rng.gen_range(0..values.len());
300 Ok(values[index])
301 }
302 ConflictResolution::HighestQuality => {
303 Ok(values[0])
305 }
306 ConflictResolution::MostConfident => {
307 Ok(values[0])
309 }
310 ConflictResolution::WeightedAverage => {
311 let true_count = values.iter().filter(|&&v| v).count();
312 Ok(true_count as f64 / values.len() as f64 > 0.5)
313 }
314 }
315 }
316
317 fn calculate_integrated_energy(&self, solutions: &[ComponentSolution], weights: &[f64]) -> f64 {
319 solutions
320 .iter()
321 .zip(weights.iter())
322 .map(|(solution, weight)| solution.energy * weight)
323 .sum()
324 }
325
326 fn calculate_integration_confidence(
328 &self,
329 solutions: &[ComponentSolution],
330 weights: &[f64],
331 ) -> f64 {
332 if solutions.is_empty() {
333 return 0.0;
334 }
335
336 let weighted_confidence: f64 = solutions
338 .iter()
339 .zip(weights.iter())
340 .map(|(solution, weight)| solution.weight * weight)
341 .sum();
342
343 weighted_confidence.min(1.0).max(0.0)
344 }
345}
346
347pub struct DecompositionAnalyzer {
349 metrics_history: Vec<DecompositionMetrics>,
350}
351
352impl Default for DecompositionAnalyzer {
353 fn default() -> Self {
354 Self::new()
355 }
356}
357
358impl DecompositionAnalyzer {
359 pub const fn new() -> Self {
361 Self {
362 metrics_history: Vec::new(),
363 }
364 }
365
366 pub fn analyze_decomposition(
368 &mut self,
369 decomposition: &Partitioning,
370 _original_qubo: &Array2<f64>,
371 ) -> DecompositionMetrics {
372 let start_time = std::time::Instant::now();
373
374 let width = self.calculate_decomposition_width(decomposition);
375 let num_clusters = decomposition.subproblems.len();
376 let balance_factor = self.calculate_balance_factor(decomposition);
377 let separator_size = self.calculate_average_separator_size(decomposition);
378
379 let metrics = DecompositionMetrics {
380 width,
381 num_clusters,
382 balance_factor,
383 separator_size,
384 decomposition_time: start_time.elapsed(),
385 };
386
387 self.metrics_history.push(metrics.clone());
388 metrics
389 }
390
391 fn calculate_decomposition_width(&self, decomposition: &Partitioning) -> usize {
393 decomposition
394 .subproblems
395 .iter()
396 .map(|sub| sub.variables.len())
397 .max()
398 .unwrap_or(0)
399 }
400
401 fn calculate_balance_factor(&self, decomposition: &Partitioning) -> f64 {
403 if decomposition.subproblems.is_empty() {
404 return 1.0;
405 }
406
407 let sizes: Vec<usize> = decomposition
408 .subproblems
409 .iter()
410 .map(|sub| sub.variables.len())
411 .collect();
412
413 let mean_size = sizes.iter().sum::<usize>() as f64 / sizes.len() as f64;
414 let variance = sizes
415 .iter()
416 .map(|&size| (size as f64 - mean_size).powi(2))
417 .sum::<f64>()
418 / sizes.len() as f64;
419
420 if mean_size > 0.0 {
422 mean_size / (variance.sqrt() + 1e-10)
423 } else {
424 1.0
425 }
426 }
427
428 fn calculate_average_separator_size(&self, decomposition: &Partitioning) -> f64 {
430 if decomposition.coupling_terms.is_empty() {
431 return 0.0;
432 }
433
434 let mut coupling_vars = std::collections::HashSet::new();
436 for coupling in &decomposition.coupling_terms {
437 coupling_vars.insert(&coupling.var1);
438 coupling_vars.insert(&coupling.var2);
439 }
440
441 coupling_vars.len() as f64 / decomposition.subproblems.len() as f64
442 }
443
444 pub fn get_performance_trends(&self) -> Vec<(usize, f64, f64)> {
446 self.metrics_history
447 .iter()
448 .enumerate()
449 .map(|(i, metrics)| {
450 (
451 i,
452 metrics.balance_factor,
453 metrics.decomposition_time.as_secs_f64(),
454 )
455 })
456 .collect()
457 }
458
459 pub fn generate_report(&self) -> String {
461 if self.metrics_history.is_empty() {
462 return "No decomposition metrics available".to_string();
463 }
464
465 let latest = &self.metrics_history[self.metrics_history.len() - 1];
466
467 format!(
468 "Decomposition Analysis Report\n\
469 ============================\n\
470 Width (max subproblem size): {}\n\
471 Number of clusters: {}\n\
472 Balance factor: {:.3}\n\
473 Average separator size: {:.3}\n\
474 Decomposition time: {:.3}s\n\
475 Total decompositions analyzed: {}",
476 latest.width,
477 latest.num_clusters,
478 latest.balance_factor,
479 latest.separator_size,
480 latest.decomposition_time.as_secs_f64(),
481 self.metrics_history.len()
482 )
483 }
484}
485
486pub struct DecompositionValidator;
488
489impl DecompositionValidator {
490 pub fn validate_decomposition(
492 partitioning: &Partitioning,
493 original_qubo: &Array2<f64>,
494 original_var_map: &HashMap<String, usize>,
495 ) -> Result<ValidationReport, String> {
496 let mut issues = Vec::new();
497
498 let coverage_issue = Self::check_variable_coverage(partitioning, original_var_map);
500 if let Some(issue) = coverage_issue {
501 issues.push(issue);
502 }
503
504 let equivalence_issue = Self::check_problem_equivalence(partitioning, original_qubo);
506 if let Some(issue) = equivalence_issue {
507 issues.push(issue);
508 }
509
510 let coupling_issue = Self::check_coupling_consistency(partitioning, original_qubo);
512 if let Some(issue) = coupling_issue {
513 issues.push(issue);
514 }
515
516 Ok(ValidationReport {
517 is_valid: issues.is_empty(),
518 issues,
519 coverage_percentage: Self::calculate_coverage_percentage(
520 partitioning,
521 original_var_map,
522 ),
523 })
524 }
525
526 fn check_variable_coverage(
528 partitioning: &Partitioning,
529 original_var_map: &HashMap<String, usize>,
530 ) -> Option<ValidationIssue> {
531 let mut covered_vars = std::collections::HashSet::new();
532
533 for subproblem in &partitioning.subproblems {
534 for var in &subproblem.variables {
535 covered_vars.insert(var.clone());
536 }
537 }
538
539 let missing_vars: Vec<_> = original_var_map
540 .keys()
541 .filter(|var| !covered_vars.contains(*var))
542 .cloned()
543 .collect();
544
545 if missing_vars.is_empty() {
546 None
547 } else {
548 Some(ValidationIssue {
549 issue_type: "Missing Variables".to_string(),
550 description: format!("Variables not covered by any subproblem: {missing_vars:?}"),
551 severity: ValidationSeverity::Critical,
552 })
553 }
554 }
555
556 fn check_problem_equivalence(
558 partitioning: &Partitioning,
559 original_qubo: &Array2<f64>,
560 ) -> Option<ValidationIssue> {
561 let total_internal_terms: f64 = partitioning
565 .subproblems
566 .iter()
567 .map(|sub| sub.qubo.sum())
568 .sum();
569
570 let total_coupling_terms: f64 = partitioning
571 .coupling_terms
572 .iter()
573 .map(|coupling| coupling.weight.abs())
574 .sum();
575
576 let original_total = original_qubo.sum();
577 let reconstructed_total = total_internal_terms + total_coupling_terms;
578
579 let relative_error =
580 (original_total - reconstructed_total).abs() / original_total.abs().max(1e-10);
581
582 if relative_error > 0.01 {
583 Some(ValidationIssue {
584 issue_type: "Energy Mismatch".to_string(),
585 description: format!(
586 "Relative error in total energy: {:.3}%",
587 relative_error * 100.0
588 ),
589 severity: ValidationSeverity::Warning,
590 })
591 } else {
592 None
593 }
594 }
595
596 fn check_coupling_consistency(
598 partitioning: &Partitioning,
599 _original_qubo: &Array2<f64>,
600 ) -> Option<ValidationIssue> {
601 let mut inconsistencies = 0;
603
604 for coupling in &partitioning.coupling_terms {
605 if coupling.weight.abs() < 1e-10 {
608 inconsistencies += 1;
609 }
610 }
611
612 if inconsistencies > partitioning.coupling_terms.len() / 10 {
613 Some(ValidationIssue {
614 issue_type: "Coupling Inconsistency".to_string(),
615 description: format!("{inconsistencies} coupling terms have near-zero weights"),
616 severity: ValidationSeverity::Warning,
617 })
618 } else {
619 None
620 }
621 }
622
623 fn calculate_coverage_percentage(
625 partitioning: &Partitioning,
626 original_var_map: &HashMap<String, usize>,
627 ) -> f64 {
628 let mut covered_vars = std::collections::HashSet::new();
629
630 for subproblem in &partitioning.subproblems {
631 for var in &subproblem.variables {
632 covered_vars.insert(var.clone());
633 }
634 }
635
636 covered_vars.len() as f64 / original_var_map.len() as f64 * 100.0
637 }
638}
639
640#[derive(Debug, Clone)]
642pub struct ValidationReport {
643 pub is_valid: bool,
644 pub issues: Vec<ValidationIssue>,
645 pub coverage_percentage: f64,
646}
647
648#[derive(Debug, Clone)]
650pub struct ValidationIssue {
651 pub issue_type: String,
652 pub description: String,
653 pub severity: ValidationSeverity,
654}
655
656#[derive(Debug, Clone)]
658pub enum ValidationSeverity {
659 Info,
660 Warning,
661 Error,
662 Critical,
663}
664
665#[cfg(test)]
666mod tests {
667 use super::*;
668
669 #[test]
670 fn test_solution_integrator() {
671 let integrator = SolutionIntegrator::new(IntegrationStrategy::WeightedVoting);
672
673 let mut solutions = vec![
674 ComponentSolution {
675 subproblem_id: 0,
676 assignment: [("x0".to_string(), true), ("x1".to_string(), false)]
677 .iter()
678 .cloned()
679 .collect(),
680 energy: 1.0,
681 weight: 0.8,
682 },
683 ComponentSolution {
684 subproblem_id: 1,
685 assignment: [("x0".to_string(), true), ("x1".to_string(), true)]
686 .iter()
687 .cloned()
688 .collect(),
689 energy: 2.0,
690 weight: 0.6,
691 },
692 ];
693
694 let global_var_map = [("x0".to_string(), 0), ("x1".to_string(), 1)]
695 .iter()
696 .cloned()
697 .collect();
698
699 let mut result = integrator.integrate_solutions(&solutions, &global_var_map);
700 assert!(result.is_ok());
701
702 let integrated = result.expect("Solution integration should succeed");
703 assert_eq!(integrated.assignment.len(), 2);
704 assert!(integrated.assignment.contains_key("x0"));
705 assert!(integrated.assignment.contains_key("x1"));
706 }
707
708 #[test]
709 fn test_decomposition_analyzer() {
710 let mut analyzer = DecompositionAnalyzer::new();
711
712 let partitioning = Partitioning {
714 partition_assignment: vec![0, 0, 1, 1],
715 subproblems: vec![
716 Subproblem {
717 id: 0,
718 variables: vec!["x0".to_string(), "x1".to_string()],
719 qubo: Array2::zeros((2, 2)),
720 var_map: HashMap::new(),
721 },
722 Subproblem {
723 id: 1,
724 variables: vec!["x2".to_string(), "x3".to_string()],
725 qubo: Array2::zeros((2, 2)),
726 var_map: HashMap::new(),
727 },
728 ],
729 coupling_terms: vec![],
730 metrics: PartitionMetrics {
731 edge_cut: 1.0,
732 balance: 1.0,
733 modularity: 0.5,
734 conductance: 0.1,
735 },
736 };
737
738 let mut original_qubo = Array2::zeros((4, 4));
739 let mut metrics = analyzer.analyze_decomposition(&partitioning, &original_qubo);
740
741 assert_eq!(metrics.width, 2);
742 assert_eq!(metrics.num_clusters, 2);
743 assert!(metrics.balance_factor > 0.0);
744 }
745
746 #[test]
747 fn test_decomposition_validator() {
748 let partitioning = Partitioning {
749 partition_assignment: vec![0, 0, 1, 1],
750 subproblems: vec![
751 Subproblem {
752 id: 0,
753 variables: vec!["x0".to_string(), "x1".to_string()],
754 qubo: Array2::zeros((2, 2)),
755 var_map: HashMap::new(),
756 },
757 Subproblem {
758 id: 1,
759 variables: vec!["x2".to_string(), "x3".to_string()],
760 qubo: Array2::zeros((2, 2)),
761 var_map: HashMap::new(),
762 },
763 ],
764 coupling_terms: vec![],
765 metrics: PartitionMetrics {
766 edge_cut: 1.0,
767 balance: 1.0,
768 modularity: 0.5,
769 conductance: 0.1,
770 },
771 };
772
773 let mut original_qubo = Array2::zeros((4, 4));
774 let original_var_map = [
775 ("x0".to_string(), 0),
776 ("x1".to_string(), 1),
777 ("x2".to_string(), 2),
778 ("x3".to_string(), 3),
779 ]
780 .iter()
781 .cloned()
782 .collect();
783
784 let mut report = DecompositionValidator::validate_decomposition(
785 &partitioning,
786 &original_qubo,
787 &original_var_map,
788 );
789 assert!(report.is_ok());
790
791 let validation_report = report.expect("Decomposition validation should succeed");
792 assert_eq!(validation_report.coverage_percentage, 100.0);
793 }
794}