quantrs2_anneal/bayesian_hyperopt/
mod.rs

1//! Bayesian Optimization for Hyperparameter Tuning
2//!
3//! This module implements advanced Bayesian optimization techniques for automatically
4//! tuning hyperparameters in quantum annealing systems. It uses Gaussian processes
5//! as surrogate models and sophisticated acquisition functions to efficiently explore
6//! the hyperparameter space.
7//!
8//! Key features:
9//! - Multi-objective Bayesian optimization
10//! - Mixed parameter types (continuous, discrete, categorical)
11//! - Advanced acquisition functions (EI, UCB, PI, Entropy Search)
12//! - Gaussian process surrogate models with different kernels
13//! - Constraint handling and feasibility modeling
14//! - Transfer learning across related optimization problems
15//! - Parallel and batch optimization
16//! - Uncertainty quantification and confidence intervals
17
18use scirs2_core::random::seq::SliceRandom;
19use scirs2_core::random::ChaCha8Rng;
20use scirs2_core::random::{Rng, SeedableRng};
21use std::collections::HashMap;
22use std::time::{Duration, Instant};
23
24use crate::embedding::{Embedding, HardwareTopology};
25use crate::hardware_compilation::{CompilerConfig, HardwareCompiler};
26use crate::ising::IsingModel;
27use crate::simulator::{AnnealingParams, AnnealingResult, ClassicalAnnealingSimulator};
28
29// Module declarations
30pub mod acquisition;
31pub mod config;
32pub mod constraints;
33pub mod convergence;
34pub mod gaussian_process;
35pub mod multi_objective;
36pub mod parallel;
37pub mod transfer;
38
39// Re-export main types for backward compatibility
40pub use acquisition::*;
41pub use config::*;
42pub use constraints::*;
43pub use convergence::*;
44pub use gaussian_process::*;
45pub use multi_objective::*;
46pub use parallel::*;
47pub use transfer::*;
48
49/// Create parameter space for annealing hyperparameters
50#[must_use]
51pub fn create_annealing_parameter_space() -> ParameterSpace {
52    let mut parameters = Vec::new();
53
54    // Common annealing parameters
55    parameters.push(Parameter {
56        name: "temperature".to_string(),
57        param_type: ParameterType::Continuous,
58        bounds: ParameterBounds::Continuous {
59            min: 0.1,
60            max: 10.0,
61        },
62    });
63
64    parameters.push(Parameter {
65        name: "num_sweeps".to_string(),
66        param_type: ParameterType::Discrete,
67        bounds: ParameterBounds::Discrete {
68            min: 100,
69            max: 10_000,
70        },
71    });
72
73    parameters.push(Parameter {
74        name: "schedule_type".to_string(),
75        param_type: ParameterType::Categorical,
76        bounds: ParameterBounds::Categorical {
77            values: vec![
78                "linear".to_string(),
79                "exponential".to_string(),
80                "polynomial".to_string(),
81            ],
82        },
83    });
84
85    ParameterSpace { parameters }
86}
87
88/// Create Bayesian optimizer with default configuration
89#[must_use]
90pub fn create_bayesian_optimizer() -> BayesianHyperoptimizer {
91    let config = BayesianOptConfig {
92        max_iterations: 50,
93        initial_samples: 5,
94        acquisition_config: AcquisitionConfig {
95            function_type: AcquisitionFunctionType::ExpectedImprovement,
96            exploration_factor: 0.1,
97            num_restarts: 10,
98            batch_strategy: BatchAcquisitionStrategy::LocalPenalization,
99            optimization_method: AcquisitionOptimizationMethod::RandomSearch,
100        },
101        gp_config: GaussianProcessSurrogate {
102            kernel: KernelFunction::RBF,
103            noise_variance: 1e-6,
104            mean_function: MeanFunction::Zero,
105        },
106        multi_objective_config: MultiObjectiveConfig::default(),
107        constraint_config: ConstraintConfig::default(),
108        convergence_config: ConvergenceConfig::default(),
109        parallel_config: ParallelConfig::default(),
110        transfer_config: TransferConfig::default(),
111        seed: Some(42),
112    };
113
114    let parameter_space = create_annealing_parameter_space();
115    BayesianHyperoptimizer::new(config, parameter_space)
116}
117
118/// Create custom Bayesian optimizer with specified parameters
119#[must_use]
120pub fn create_custom_bayesian_optimizer(
121    max_iterations: usize,
122    acquisition_function: AcquisitionFunctionType,
123    kernel: KernelFunction,
124) -> BayesianHyperoptimizer {
125    let config = BayesianOptConfig {
126        max_iterations,
127        initial_samples: (max_iterations / 10).max(3),
128        acquisition_config: AcquisitionConfig {
129            function_type: acquisition_function,
130            exploration_factor: 0.1,
131            num_restarts: 10,
132            batch_strategy: BatchAcquisitionStrategy::LocalPenalization,
133            optimization_method: AcquisitionOptimizationMethod::RandomSearch,
134        },
135        gp_config: GaussianProcessSurrogate {
136            kernel,
137            noise_variance: 1e-6,
138            mean_function: MeanFunction::Zero,
139        },
140        multi_objective_config: MultiObjectiveConfig::default(),
141        constraint_config: ConstraintConfig::default(),
142        convergence_config: ConvergenceConfig::default(),
143        parallel_config: ParallelConfig::default(),
144        transfer_config: TransferConfig::default(),
145        seed: Some(42),
146    };
147
148    let parameter_space = create_annealing_parameter_space();
149    BayesianHyperoptimizer::new(config, parameter_space)
150}
151
152/// Optimize annealing parameters for a given problem
153pub fn optimize_annealing_parameters<F>(
154    objective_function: F,
155    max_iterations: Option<usize>,
156) -> BayesianOptResult<Vec<f64>>
157where
158    F: Fn(&[f64]) -> f64,
159{
160    let mut optimizer = if let Some(max_iter) = max_iterations {
161        create_custom_bayesian_optimizer(
162            max_iter,
163            AcquisitionFunctionType::ExpectedImprovement,
164            KernelFunction::RBF,
165        )
166    } else {
167        create_bayesian_optimizer()
168    };
169
170    optimizer.optimize(objective_function)
171}
172
173#[cfg(test)]
174mod tests {
175    use super::*;
176
177    #[test]
178    fn test_parameter_space_creation() {
179        let param_space = create_annealing_parameter_space();
180        assert_eq!(param_space.parameters.len(), 3);
181
182        // Check parameter types
183        assert_eq!(
184            param_space.parameters[0].param_type,
185            ParameterType::Continuous
186        );
187        assert_eq!(
188            param_space.parameters[1].param_type,
189            ParameterType::Discrete
190        );
191        assert_eq!(
192            param_space.parameters[2].param_type,
193            ParameterType::Categorical
194        );
195    }
196
197    #[test]
198    fn test_optimizer_creation() {
199        let optimizer = create_bayesian_optimizer();
200        assert_eq!(optimizer.config.max_iterations, 50);
201        assert_eq!(optimizer.config.initial_samples, 5);
202        assert_eq!(optimizer.parameter_space.parameters.len(), 3);
203    }
204
205    #[test]
206    fn test_custom_optimizer_creation() {
207        let optimizer = create_custom_bayesian_optimizer(
208            100,
209            AcquisitionFunctionType::UpperConfidenceBound,
210            KernelFunction::Matern,
211        );
212
213        assert_eq!(optimizer.config.max_iterations, 100);
214        assert_eq!(optimizer.config.initial_samples, 10);
215        assert_eq!(
216            optimizer.config.acquisition_config.function_type,
217            AcquisitionFunctionType::UpperConfidenceBound
218        );
219        assert_eq!(optimizer.config.gp_config.kernel, KernelFunction::Matern);
220    }
221
222    #[test]
223    fn test_simple_optimization() {
224        // Simple quadratic function to minimize
225        let objective = |x: &[f64]| x.iter().map(|&xi| (xi - 1.0).powi(2)).sum::<f64>();
226
227        let result = optimize_annealing_parameters(objective, Some(10));
228        assert!(result.is_ok());
229
230        let best_params = result.expect("Optimization should return best params");
231        assert_eq!(best_params.len(), 3);
232
233        // Check that parameters are within bounds
234        assert!(best_params[0] >= 0.1 && best_params[0] <= 10.0); // temperature
235        assert!(best_params[1] >= 100.0 && best_params[1] <= 10_000.0); // num_sweeps
236        assert!(best_params[2] >= 0.0 && best_params[2] <= 2.0); // schedule_type (categorical index)
237    }
238}