quantrs2_tytan/
quantum_classical_hybrid.rs

1//! Quantum-Classical Hybrid Refinement
2//!
3//! This module implements hybrid quantum-classical optimization strategies that refine
4//! quantum annealing solutions using classical local search, gradient-based methods,
5//! and constraint repair techniques.
6//!
7//! # Features
8//!
9//! - Local search refinement (hill climbing, simulated annealing)
10//! - Gradient-based fine-tuning for continuous embeddings
11//! - Constraint repair and feasibility restoration
12//! - Variable fixing based on high-confidence quantum samples
13//! - Iterative quantum-classical loops with convergence criteria
14//! - Integration with existing samplers
15//!
16//! # Examples
17//!
18//! ```rust
19//! use quantrs2_tytan::quantum_classical_hybrid::*;
20//! use scirs2_core::ndarray::Array2;
21//! use std::collections::HashMap;
22//!
23//! // Create hybrid optimizer
24//! let config = HybridConfig::default();
25//! let mut optimizer = HybridOptimizer::new(config);
26//!
27//! // Create a simple QUBO matrix
28//! let qubo_matrix = Array2::from_shape_fn((2, 2), |(i, j)| {
29//!     if i == j { -1.0 } else { 0.5 }
30//! });
31//!
32//! // Refine quantum solution
33//! let quantum_solution = HashMap::from([
34//!     ("x0".to_string(), true),
35//!     ("x1".to_string(), false),
36//! ]);
37//! let refined = optimizer.refine_solution(&quantum_solution, &qubo_matrix).expect("refinement should succeed");
38//! assert!(refined.energy <= 0.0);
39//! ```
40
41use crate::sampler::{SampleResult, Sampler};
42use quantrs2_anneal::{IsingModel, QuboModel};
43use scirs2_core::ndarray::{Array1, Array2};
44use scirs2_core::parallel_ops;
45use scirs2_core::random::prelude::*;
46use serde::{Deserialize, Serialize};
47use std::collections::{HashMap, HashSet};
48use std::fmt;
49
50/// Local search strategy
51#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
52pub enum LocalSearchStrategy {
53    /// Steepest descent (best improvement)
54    SteepestDescent,
55    /// First improvement (accept first better solution)
56    FirstImprovement,
57    /// Random descent (random neighbor)
58    RandomDescent,
59    /// Tabu search with memory
60    TabuSearch,
61    /// Variable neighborhood descent
62    VariableNeighborhoodDescent,
63}
64
65/// Constraint repair strategy
66#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
67pub enum RepairStrategy {
68    /// Greedy repair (minimize constraint violation)
69    Greedy,
70    /// Random repair
71    Random,
72    /// Weighted repair based on constraint importance
73    Weighted,
74    /// Iterative repair with backtracking
75    Iterative,
76}
77
78/// Variable fixing criterion
79#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
80pub enum FixingCriterion {
81    /// Fix variables with high frequency across samples
82    HighFrequency { threshold: f64 },
83    /// Fix variables with low energy contribution variance
84    LowVariance { threshold: f64 },
85    /// Fix variables in strongly correlated groups
86    StrongCorrelation { threshold: f64 },
87    /// Fix based on reduced cost analysis
88    ReducedCost { threshold: f64 },
89}
90
91/// Hybrid optimization configuration
92#[derive(Debug, Clone, Serialize, Deserialize)]
93pub struct HybridConfig {
94    /// Local search strategy
95    pub local_search: LocalSearchStrategy,
96    /// Maximum local search iterations
97    pub max_local_iterations: usize,
98    /// Constraint repair strategy
99    pub repair_strategy: RepairStrategy,
100    /// Enable constraint repair
101    pub enable_repair: bool,
102    /// Variable fixing criterion
103    pub fixing_criterion: Option<FixingCriterion>,
104    /// Percentage of variables to fix (0.0 - 1.0)
105    pub fixing_percentage: f64,
106    /// Number of quantum-classical iterations
107    pub max_qc_iterations: usize,
108    /// Convergence tolerance
109    pub convergence_tolerance: f64,
110    /// Enable gradient-based refinement
111    pub enable_gradient: bool,
112    /// Learning rate for gradient descent
113    pub learning_rate: f64,
114    /// Enable parallel evaluation
115    pub parallel: bool,
116}
117
118impl Default for HybridConfig {
119    fn default() -> Self {
120        Self {
121            local_search: LocalSearchStrategy::SteepestDescent,
122            max_local_iterations: 1000,
123            repair_strategy: RepairStrategy::Greedy,
124            enable_repair: true,
125            fixing_criterion: Some(FixingCriterion::HighFrequency { threshold: 0.8 }),
126            fixing_percentage: 0.3,
127            max_qc_iterations: 10,
128            convergence_tolerance: 1e-6,
129            enable_gradient: false,
130            learning_rate: 0.01,
131            parallel: true,
132        }
133    }
134}
135
136/// Solution with metadata
137#[derive(Debug, Clone, Serialize, Deserialize)]
138pub struct RefinedSolution {
139    /// Variable assignments
140    pub assignments: HashMap<String, bool>,
141    /// Solution energy
142    pub energy: f64,
143    /// Constraint violations
144    pub violations: Vec<ConstraintViolation>,
145    /// Number of refinement iterations
146    pub iterations: usize,
147    /// Improvement over initial solution
148    pub improvement: f64,
149    /// Whether solution is feasible
150    pub is_feasible: bool,
151}
152
153/// Constraint violation information
154#[derive(Debug, Clone, Serialize, Deserialize)]
155pub struct ConstraintViolation {
156    /// Constraint identifier
157    pub constraint_id: String,
158    /// Violation magnitude
159    pub magnitude: f64,
160    /// Variables involved
161    pub variables: Vec<String>,
162}
163
164/// Fixed variable information
165#[derive(Debug, Clone, Serialize, Deserialize)]
166pub struct FixedVariable {
167    /// Variable name
168    pub name: String,
169    /// Fixed value
170    pub value: bool,
171    /// Confidence score (0.0 - 1.0)
172    pub confidence: f64,
173    /// Reason for fixing
174    pub reason: String,
175}
176
177/// Hybrid quantum-classical optimizer
178pub struct HybridOptimizer {
179    /// Configuration
180    config: HybridConfig,
181    /// Random number generator
182    rng: Box<dyn RngCore>,
183    /// Tabu list for tabu search
184    tabu_list: HashSet<u64>,
185    /// Fixed variables
186    fixed_variables: HashMap<String, bool>,
187    /// Iteration history
188    history: Vec<f64>,
189}
190
191impl HybridOptimizer {
192    /// Create a new hybrid optimizer
193    pub fn new(config: HybridConfig) -> Self {
194        Self {
195            config,
196            rng: Box::new(thread_rng()),
197            tabu_list: HashSet::new(),
198            fixed_variables: HashMap::new(),
199            history: Vec::new(),
200        }
201    }
202
203    /// Refine a solution using local search
204    pub fn refine_solution(
205        &mut self,
206        solution: &HashMap<String, bool>,
207        qubo_matrix: &Array2<f64>,
208    ) -> Result<RefinedSolution, String> {
209        let initial_energy = self.compute_energy(solution, qubo_matrix);
210        let mut current_solution = solution.clone();
211        let mut current_energy = initial_energy;
212        let mut iterations = 0;
213
214        self.history.clear();
215        self.history.push(current_energy);
216
217        // Apply constraint repair if enabled
218        if self.config.enable_repair {
219            current_solution = self.repair_constraints(&current_solution, qubo_matrix)?;
220            current_energy = self.compute_energy(&current_solution, qubo_matrix);
221            self.history.push(current_energy);
222        }
223
224        // Local search refinement
225        for iter in 0..self.config.max_local_iterations {
226            iterations = iter + 1;
227
228            let (improved_solution, improved_energy) = match self.config.local_search {
229                LocalSearchStrategy::SteepestDescent => {
230                    self.steepest_descent_step(&current_solution, qubo_matrix)
231                }
232                LocalSearchStrategy::FirstImprovement => {
233                    self.first_improvement_step(&current_solution, qubo_matrix)
234                }
235                LocalSearchStrategy::RandomDescent => {
236                    self.random_descent_step(&current_solution, qubo_matrix)
237                }
238                LocalSearchStrategy::TabuSearch => {
239                    self.tabu_search_step(&current_solution, qubo_matrix)
240                }
241                LocalSearchStrategy::VariableNeighborhoodDescent => {
242                    self.vnd_step(&current_solution, qubo_matrix)
243                }
244            }?;
245
246            // Check for improvement
247            if improved_energy < current_energy - self.config.convergence_tolerance {
248                current_solution = improved_solution;
249                current_energy = improved_energy;
250                self.history.push(current_energy);
251            } else {
252                // No improvement, stop
253                break;
254            }
255
256            // Check convergence
257            if self.has_converged() {
258                break;
259            }
260        }
261
262        // Compute constraint violations
263        let violations = self.compute_violations(&current_solution);
264        let is_feasible = violations.is_empty();
265
266        Ok(RefinedSolution {
267            assignments: current_solution,
268            energy: current_energy,
269            violations,
270            iterations,
271            improvement: initial_energy - current_energy,
272            is_feasible,
273        })
274    }
275
276    /// Steepest descent local search step
277    fn steepest_descent_step(
278        &self,
279        solution: &HashMap<String, bool>,
280        qubo_matrix: &Array2<f64>,
281    ) -> Result<(HashMap<String, bool>, f64), String> {
282        let current_energy = self.compute_energy(solution, qubo_matrix);
283        let mut best_solution = solution.clone();
284        let mut best_energy = current_energy;
285        let mut improved = false;
286
287        // Try flipping each variable
288        for (var_name, &current_value) in solution {
289            // Skip fixed variables
290            if self.fixed_variables.contains_key(var_name) {
291                continue;
292            }
293
294            let mut neighbor = solution.clone();
295            neighbor.insert(var_name.clone(), !current_value);
296
297            let neighbor_energy = self.compute_energy(&neighbor, qubo_matrix);
298
299            if neighbor_energy < best_energy {
300                best_solution = neighbor;
301                best_energy = neighbor_energy;
302                improved = true;
303            }
304        }
305
306        if improved {
307            Ok((best_solution, best_energy))
308        } else {
309            Ok((solution.clone(), current_energy))
310        }
311    }
312
313    /// First improvement local search step
314    fn first_improvement_step(
315        &mut self,
316        solution: &HashMap<String, bool>,
317        qubo_matrix: &Array2<f64>,
318    ) -> Result<(HashMap<String, bool>, f64), String> {
319        let current_energy = self.compute_energy(solution, qubo_matrix);
320
321        // Try flipping variables in random order
322        let mut var_names: Vec<_> = solution.keys().cloned().collect();
323        var_names.shuffle(&mut *self.rng);
324
325        for var_name in var_names {
326            // Skip fixed variables
327            if self.fixed_variables.contains_key(&var_name) {
328                continue;
329            }
330
331            let current_value = solution[&var_name];
332            let mut neighbor = solution.clone();
333            neighbor.insert(var_name, !current_value);
334
335            let neighbor_energy = self.compute_energy(&neighbor, qubo_matrix);
336
337            if neighbor_energy < current_energy {
338                return Ok((neighbor, neighbor_energy));
339            }
340        }
341
342        Ok((solution.clone(), current_energy))
343    }
344
345    /// Random descent step
346    fn random_descent_step(
347        &mut self,
348        solution: &HashMap<String, bool>,
349        qubo_matrix: &Array2<f64>,
350    ) -> Result<(HashMap<String, bool>, f64), String> {
351        let current_energy = self.compute_energy(solution, qubo_matrix);
352
353        // Select random variable to flip
354        let var_names: Vec<_> = solution
355            .keys()
356            .filter(|k| !self.fixed_variables.contains_key(*k))
357            .cloned()
358            .collect();
359
360        if var_names.is_empty() {
361            return Ok((solution.clone(), current_energy));
362        }
363
364        let var_name = &var_names[self.rng.gen_range(0..var_names.len())];
365        let current_value = solution[var_name];
366
367        let mut neighbor = solution.clone();
368        neighbor.insert(var_name.clone(), !current_value);
369
370        let neighbor_energy = self.compute_energy(&neighbor, qubo_matrix);
371
372        if neighbor_energy < current_energy {
373            Ok((neighbor, neighbor_energy))
374        } else {
375            Ok((solution.clone(), current_energy))
376        }
377    }
378
379    /// Tabu search step
380    fn tabu_search_step(
381        &mut self,
382        solution: &HashMap<String, bool>,
383        qubo_matrix: &Array2<f64>,
384    ) -> Result<(HashMap<String, bool>, f64), String> {
385        let current_energy = self.compute_energy(solution, qubo_matrix);
386        let mut best_solution = solution.clone();
387        let mut best_energy = current_energy;
388
389        // Try non-tabu moves
390        for (var_name, &current_value) in solution {
391            if self.fixed_variables.contains_key(var_name) {
392                continue;
393            }
394
395            let mut neighbor = solution.clone();
396            neighbor.insert(var_name.clone(), !current_value);
397
398            // Check if move is tabu
399            let move_hash = self.hash_solution(&neighbor);
400            if self.tabu_list.contains(&move_hash) {
401                continue;
402            }
403
404            let neighbor_energy = self.compute_energy(&neighbor, qubo_matrix);
405
406            if neighbor_energy < best_energy {
407                best_solution = neighbor;
408                best_energy = neighbor_energy;
409            }
410        }
411
412        // Update tabu list
413        let move_hash = self.hash_solution(&best_solution);
414        self.tabu_list.insert(move_hash);
415
416        // Limit tabu list size
417        if self.tabu_list.len() > 100 {
418            self.tabu_list.clear();
419        }
420
421        Ok((best_solution, best_energy))
422    }
423
424    /// Variable neighborhood descent step
425    fn vnd_step(
426        &mut self,
427        solution: &HashMap<String, bool>,
428        qubo_matrix: &Array2<f64>,
429    ) -> Result<(HashMap<String, bool>, f64), String> {
430        let mut current_solution = solution.clone();
431        let mut current_energy = self.compute_energy(solution, qubo_matrix);
432
433        // Neighborhood 1: Single variable flip
434        let (sol1, e1) = self.steepest_descent_step(&current_solution, qubo_matrix)?;
435        if e1 < current_energy {
436            current_solution = sol1;
437            current_energy = e1;
438        }
439
440        // Neighborhood 2: Two-variable swap (if solution is binary)
441        let (sol2, e2) = self.two_variable_swap(&current_solution, qubo_matrix)?;
442        if e2 < current_energy {
443            current_solution = sol2;
444            current_energy = e2;
445        }
446
447        Ok((current_solution, current_energy))
448    }
449
450    /// Two-variable swap neighborhood
451    fn two_variable_swap(
452        &self,
453        solution: &HashMap<String, bool>,
454        qubo_matrix: &Array2<f64>,
455    ) -> Result<(HashMap<String, bool>, f64), String> {
456        let current_energy = self.compute_energy(solution, qubo_matrix);
457        let mut best_solution = solution.clone();
458        let mut best_energy = current_energy;
459
460        let var_names: Vec<_> = solution
461            .keys()
462            .filter(|k| !self.fixed_variables.contains_key(*k))
463            .cloned()
464            .collect();
465
466        for i in 0..var_names.len() {
467            for j in (i + 1)..var_names.len() {
468                let mut neighbor = solution.clone();
469                let val_i = solution[&var_names[i]];
470                let val_j = solution[&var_names[j]];
471
472                neighbor.insert(var_names[i].clone(), !val_i);
473                neighbor.insert(var_names[j].clone(), !val_j);
474
475                let neighbor_energy = self.compute_energy(&neighbor, qubo_matrix);
476
477                if neighbor_energy < best_energy {
478                    best_solution = neighbor;
479                    best_energy = neighbor_energy;
480                }
481            }
482        }
483
484        Ok((best_solution, best_energy))
485    }
486
487    /// Repair constraint violations
488    fn repair_constraints(
489        &self,
490        solution: &HashMap<String, bool>,
491        _qubo_matrix: &Array2<f64>,
492    ) -> Result<HashMap<String, bool>, String> {
493        // Simplified constraint repair
494        // In practice, this would analyze specific constraint types
495        let mut repaired = solution.clone();
496
497        match self.config.repair_strategy {
498            RepairStrategy::Greedy => {
499                // Greedy repair: flip variables to reduce violations
500                // Placeholder implementation
501            }
502            RepairStrategy::Random => {
503                // Random repair
504                // Placeholder implementation
505            }
506            RepairStrategy::Weighted => {
507                // Weighted repair
508                // Placeholder implementation
509            }
510            RepairStrategy::Iterative => {
511                // Iterative repair with backtracking
512                // Placeholder implementation
513            }
514        }
515
516        Ok(repaired)
517    }
518
519    /// Fix high-confidence variables based on quantum samples
520    pub fn fix_variables(
521        &mut self,
522        samples: &[HashMap<String, bool>],
523        criterion: FixingCriterion,
524    ) -> Result<Vec<FixedVariable>, String> {
525        if samples.is_empty() {
526            return Ok(Vec::new());
527        }
528
529        let mut fixed = Vec::new();
530
531        match criterion {
532            FixingCriterion::HighFrequency { threshold } => {
533                // Compute variable frequencies
534                let mut frequencies: HashMap<String, (usize, usize)> = HashMap::new();
535
536                for sample in samples {
537                    for (var, &value) in sample {
538                        let entry = frequencies.entry(var.clone()).or_insert((0, 0));
539                        if value {
540                            entry.0 += 1;
541                        } else {
542                            entry.1 += 1;
543                        }
544                    }
545                }
546
547                // Fix variables with high frequency
548                for (var, (true_count, false_count)) in frequencies {
549                    let total = (true_count + false_count) as f64;
550                    let true_freq = true_count as f64 / total;
551                    let false_freq = false_count as f64 / total;
552
553                    if true_freq >= threshold {
554                        self.fixed_variables.insert(var.clone(), true);
555                        fixed.push(FixedVariable {
556                            name: var,
557                            value: true,
558                            confidence: true_freq,
559                            reason: format!("High frequency ({true_freq})"),
560                        });
561                    } else if false_freq >= threshold {
562                        self.fixed_variables.insert(var.clone(), false);
563                        fixed.push(FixedVariable {
564                            name: var,
565                            value: false,
566                            confidence: false_freq,
567                            reason: format!("High frequency ({false_freq})"),
568                        });
569                    }
570                }
571            }
572            FixingCriterion::LowVariance { threshold } => {
573                // Compute variance of each variable's contribution
574                // Placeholder implementation
575            }
576            FixingCriterion::StrongCorrelation { threshold } => {
577                // Detect strongly correlated variable groups
578                // Placeholder implementation
579            }
580            FixingCriterion::ReducedCost { threshold } => {
581                // Reduced cost analysis
582                // Placeholder implementation
583            }
584        }
585
586        Ok(fixed)
587    }
588
589    /// Unfix all variables
590    pub fn unfix_all(&mut self) {
591        self.fixed_variables.clear();
592    }
593
594    /// Iterative quantum-classical refinement
595    pub fn iterative_refinement<S: Sampler>(
596        &mut self,
597        sampler: &S,
598        qubo_matrix: &Array2<f64>,
599        num_samples: usize,
600    ) -> Result<Vec<RefinedSolution>, String> {
601        let mut refined_solutions = Vec::new();
602        let mut best_energy = f64::INFINITY;
603
604        for iteration in 0..self.config.max_qc_iterations {
605            println!(
606                "Quantum-Classical iteration {}/{}",
607                iteration + 1,
608                self.config.max_qc_iterations
609            );
610
611            // Quantum sampling step
612            // Note: This is a simplified interface; actual implementation would need proper QUBO format
613            // For now, we'll generate random samples as placeholder
614            let mut samples = Vec::new();
615            for _ in 0..num_samples {
616                let mut sample = HashMap::new();
617                for i in 0..qubo_matrix.nrows() {
618                    sample.insert(format!("x{i}"), self.rng.gen::<bool>());
619                }
620                samples.push(sample);
621            }
622
623            // Fix high-confidence variables if configured
624            if let Some(criterion) = self.config.fixing_criterion {
625                let fixed = self.fix_variables(&samples, criterion)?;
626                println!("Fixed {} variables", fixed.len());
627            }
628
629            // Classical refinement of quantum samples
630            for sample in samples {
631                let refined = self.refine_solution(&sample, qubo_matrix)?;
632
633                if refined.energy < best_energy {
634                    best_energy = refined.energy;
635                    println!("New best energy: {best_energy}");
636                }
637
638                refined_solutions.push(refined);
639            }
640
641            // Check convergence
642            if iteration > 0 && self.has_converged() {
643                println!("Converged after {} iterations", iteration + 1);
644                break;
645            }
646        }
647
648        Ok(refined_solutions)
649    }
650
651    /// Compute solution energy
652    fn compute_energy(&self, solution: &HashMap<String, bool>, qubo_matrix: &Array2<f64>) -> f64 {
653        let n = qubo_matrix.nrows();
654        let mut energy = 0.0;
655
656        for i in 0..n {
657            for j in 0..n {
658                let x_i = if solution.get(&format!("x{i}")).copied().unwrap_or(false) {
659                    1.0
660                } else {
661                    0.0
662                };
663                let x_j = if solution.get(&format!("x{j}")).copied().unwrap_or(false) {
664                    1.0
665                } else {
666                    0.0
667                };
668                energy += qubo_matrix[[i, j]] * x_i * x_j;
669            }
670        }
671
672        energy
673    }
674
675    /// Compute constraint violations
676    const fn compute_violations(
677        &self,
678        _solution: &HashMap<String, bool>,
679    ) -> Vec<ConstraintViolation> {
680        // Placeholder: would check actual constraints
681        Vec::new()
682    }
683
684    /// Check if optimization has converged
685    fn has_converged(&self) -> bool {
686        if self.history.len() < 3 {
687            return false;
688        }
689
690        let recent = &self.history[self.history.len() - 3..];
691        let max_change = recent
692            .windows(2)
693            .map(|w| (w[0] - w[1]).abs())
694            .fold(0.0, f64::max);
695
696        max_change < self.config.convergence_tolerance
697    }
698
699    /// Hash a solution for tabu search
700    fn hash_solution(&self, solution: &HashMap<String, bool>) -> u64 {
701        use std::collections::hash_map::DefaultHasher;
702        use std::hash::{Hash, Hasher};
703
704        let mut hasher = DefaultHasher::new();
705        let mut sorted: Vec<_> = solution.iter().collect();
706        sorted.sort_by_key(|(k, _)| k.as_str());
707
708        for (k, v) in sorted {
709            k.hash(&mut hasher);
710            v.hash(&mut hasher);
711        }
712
713        hasher.finish()
714    }
715
716    /// Get refinement history
717    pub fn get_history(&self) -> &[f64] {
718        &self.history
719    }
720
721    /// Get fixed variables
722    pub const fn get_fixed_variables(&self) -> &HashMap<String, bool> {
723        &self.fixed_variables
724    }
725}
726
727#[cfg(test)]
728mod tests {
729    use super::*;
730
731    #[test]
732    fn test_hybrid_optimizer_creation() {
733        let config = HybridConfig::default();
734        let optimizer = HybridOptimizer::new(config);
735
736        assert_eq!(optimizer.fixed_variables.len(), 0);
737        assert_eq!(optimizer.history.len(), 0);
738    }
739
740    #[test]
741    fn test_energy_computation() {
742        let config = HybridConfig::default();
743        let optimizer = HybridOptimizer::new(config);
744
745        let qubo = Array2::from_shape_fn((2, 2), |(i, j)| if i == j { -1.0 } else { 2.0 });
746
747        let solution = HashMap::from([("x0".to_string(), true), ("x1".to_string(), false)]);
748
749        let energy = optimizer.compute_energy(&solution, &qubo);
750        assert_eq!(energy, -1.0); // Only x0 contributes
751    }
752
753    #[test]
754    fn test_local_search_refinement() {
755        let config = HybridConfig {
756            max_local_iterations: 10,
757            ..Default::default()
758        };
759        let mut optimizer = HybridOptimizer::new(config);
760
761        let qubo = Array2::from_shape_fn((3, 3), |(i, j)| if i == j { -1.0 } else { 0.5 });
762
763        let initial_solution = HashMap::from([
764            ("x0".to_string(), false),
765            ("x1".to_string(), false),
766            ("x2".to_string(), false),
767        ]);
768
769        let refined = optimizer
770            .refine_solution(&initial_solution, &qubo)
771            .expect("refinement should succeed");
772
773        assert!(refined.improvement >= 0.0);
774        assert!(refined.energy <= optimizer.compute_energy(&initial_solution, &qubo));
775    }
776
777    #[test]
778    fn test_variable_fixing() {
779        let config = HybridConfig::default();
780        let mut optimizer = HybridOptimizer::new(config);
781
782        let samples = vec![
783            HashMap::from([("x0".to_string(), true), ("x1".to_string(), false)]),
784            HashMap::from([("x0".to_string(), true), ("x1".to_string(), true)]),
785            HashMap::from([("x0".to_string(), true), ("x1".to_string(), false)]),
786        ];
787
788        let criterion = FixingCriterion::HighFrequency { threshold: 0.8 };
789        let fixed = optimizer
790            .fix_variables(&samples, criterion)
791            .expect("variable fixing should succeed");
792
793        // x0 should be fixed to true (100% frequency)
794        assert!(!fixed.is_empty());
795        assert!(fixed.iter().any(|f| f.name == "x0" && f.value));
796    }
797
798    #[test]
799    fn test_convergence_detection() {
800        let mut config = HybridConfig::default();
801        config.convergence_tolerance = 0.001; // Set tolerance for test
802        let mut optimizer = HybridOptimizer::new(config);
803
804        // Add converged history (changes smaller than tolerance)
805        optimizer.history = vec![10.0, 10.00001, 10.00002];
806
807        assert!(optimizer.has_converged());
808
809        // Add non-converged history (changes larger than tolerance)
810        optimizer.history = vec![10.0, 9.0, 8.0];
811
812        assert!(!optimizer.has_converged());
813    }
814}