quantrs2_tytan/optimization/
tuning.rs

1//! Parameter tuning for quantum annealing
2//!
3//! This module provides automated parameter tuning using Bayesian optimization
4//! and other advanced techniques with SciRS2 integration.
5
6use crate::{
7    optimization::penalty::CompiledModel,
8    sampler::{SampleResult, Sampler},
9};
10use scirs2_core::ndarray::Array1;
11use serde::{Deserialize, Serialize};
12use std::collections::HashMap;
13
14#[cfg(feature = "scirs")]
15use crate::scirs_stub::scirs2_optimization::bayesian::{
16    AcquisitionFunction, BayesianOptimizer, GaussianProcess,
17};
18
19/// Parameter bounds definition
20#[derive(Debug, Clone, Serialize, Deserialize)]
21pub struct ParameterBounds {
22    pub name: String,
23    pub min: f64,
24    pub max: f64,
25    pub scale: ParameterScale,
26    pub integer: bool,
27}
28
29/// Parameter scaling type
30#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
31pub enum ParameterScale {
32    Linear,
33    Logarithmic,
34    Sigmoid,
35}
36
37/// Tuning configuration
38#[derive(Debug, Clone, Serialize, Deserialize)]
39pub struct TuningConfig {
40    /// Maximum number of evaluations
41    pub max_evaluations: usize,
42    /// Number of initial random samples
43    pub initial_samples: usize,
44    /// Acquisition function for Bayesian optimization
45    pub acquisition: AcquisitionType,
46    /// Exploration vs exploitation trade-off
47    pub exploration_factor: f64,
48    /// Number of parallel evaluations
49    pub parallel_evaluations: usize,
50    /// Early stopping tolerance
51    pub early_stopping_tolerance: f64,
52    /// Random seed
53    pub seed: Option<u64>,
54}
55
56/// Acquisition function types
57#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
58pub enum AcquisitionType {
59    ExpectedImprovement,
60    UpperConfidenceBound,
61    ProbabilityOfImprovement,
62    ThompsonSampling,
63}
64
65impl Default for TuningConfig {
66    fn default() -> Self {
67        Self {
68            max_evaluations: 100,
69            initial_samples: 20,
70            acquisition: AcquisitionType::ExpectedImprovement,
71            exploration_factor: 0.1,
72            parallel_evaluations: 1,
73            early_stopping_tolerance: 1e-6,
74            seed: None,
75        }
76    }
77}
78
79/// Parameter tuner
80pub struct ParameterTuner {
81    config: TuningConfig,
82    parameter_bounds: Vec<ParameterBounds>,
83    evaluation_history: Vec<TuningEvaluation>,
84    #[cfg(feature = "scirs")]
85    optimizer: Option<BayesianOptimizer>,
86}
87
88/// Single tuning evaluation
89#[derive(Debug, Clone, Serialize, Deserialize)]
90pub struct TuningEvaluation {
91    pub parameters: HashMap<String, f64>,
92    pub objective_value: f64,
93    pub constraint_violations: HashMap<String, f64>,
94    pub evaluation_time: std::time::Duration,
95    pub iteration: usize,
96}
97
98/// Tuning result
99#[derive(Debug, Clone, Serialize, Deserialize)]
100pub struct TuningResult {
101    pub best_parameters: HashMap<String, f64>,
102    pub best_objective: f64,
103    pub convergence_history: Vec<f64>,
104    pub total_evaluations: usize,
105    pub converged: bool,
106    pub improvement_over_default: f64,
107    pub parameter_importance: HashMap<String, f64>,
108}
109
110impl ParameterTuner {
111    /// Create new parameter tuner
112    pub const fn new(config: TuningConfig) -> Self {
113        Self {
114            config,
115            parameter_bounds: Vec::new(),
116            evaluation_history: Vec::new(),
117            #[cfg(feature = "scirs")]
118            optimizer: None,
119        }
120    }
121
122    /// Add parameter to tune
123    pub fn add_parameter(&mut self, bounds: ParameterBounds) {
124        self.parameter_bounds.push(bounds);
125    }
126
127    /// Add multiple parameters
128    pub fn add_parameters(&mut self, bounds: Vec<ParameterBounds>) {
129        self.parameter_bounds.extend(bounds);
130    }
131
132    /// Tune parameters for a sampler
133    pub fn tune_sampler<S: Sampler>(
134        &mut self,
135        sampler_factory: impl Fn(HashMap<String, f64>) -> S,
136        model: &CompiledModel,
137        objective: impl Fn(&[SampleResult]) -> f64,
138    ) -> Result<TuningResult, Box<dyn std::error::Error>> {
139        // Initialize optimizer
140        self.initialize_optimizer()?;
141
142        // Get default parameters for baseline
143        let default_params = self.get_default_parameters();
144        let default_objective =
145            self.evaluate_configuration(&default_params, &sampler_factory, model, &objective)?;
146
147        // Initial random sampling
148        for i in 0..self.config.initial_samples {
149            let params = self.sample_random_parameters(i as u64);
150            let obj_value =
151                self.evaluate_configuration(&params, &sampler_factory, model, &objective)?;
152
153            self.record_evaluation(params, obj_value, HashMap::new(), i);
154        }
155
156        // Bayesian optimization loop
157        let mut best_objective = self
158            .evaluation_history
159            .iter()
160            .map(|e| e.objective_value)
161            .fold(f64::INFINITY, f64::min);
162
163        let mut no_improvement_count = 0;
164
165        for i in self.config.initial_samples..self.config.max_evaluations {
166            // Get next parameters to evaluate
167            let next_params = self.get_next_parameters()?;
168
169            // Evaluate
170            let obj_value =
171                self.evaluate_configuration(&next_params, &sampler_factory, model, &objective)?;
172
173            self.record_evaluation(next_params, obj_value, HashMap::new(), i);
174
175            // Check for improvement
176            if obj_value < best_objective - self.config.early_stopping_tolerance {
177                best_objective = obj_value;
178                no_improvement_count = 0;
179            } else {
180                no_improvement_count += 1;
181            }
182
183            // Early stopping
184            if no_improvement_count > 10 {
185                break;
186            }
187        }
188
189        // Analyze results
190        let best_eval = self
191            .evaluation_history
192            .iter()
193            .min_by(|a, b| {
194                a.objective_value
195                    .partial_cmp(&b.objective_value)
196                    .unwrap_or(std::cmp::Ordering::Equal)
197            })
198            .ok_or("No evaluations recorded in history")?;
199
200        let convergence_history: Vec<f64> = self
201            .evaluation_history
202            .iter()
203            .scan(f64::INFINITY, |best, eval| {
204                *best = best.min(eval.objective_value);
205                Some(*best)
206            })
207            .collect();
208
209        let parameter_importance = self.calculate_parameter_importance()?;
210
211        Ok(TuningResult {
212            best_parameters: best_eval.parameters.clone(),
213            best_objective: best_eval.objective_value,
214            convergence_history,
215            total_evaluations: self.evaluation_history.len(),
216            converged: no_improvement_count > 10,
217            improvement_over_default: (default_objective - best_eval.objective_value)
218                / default_objective.abs(),
219            parameter_importance,
220        })
221    }
222
223    /// Initialize optimizer
224    fn initialize_optimizer(&mut self) -> Result<(), Box<dyn std::error::Error>> {
225        #[cfg(feature = "scirs")]
226        {
227            use crate::scirs_stub::scirs2_optimization::bayesian::KernelType;
228
229            let dim = self.parameter_bounds.len();
230            let mut kernel = KernelType::Matern52;
231
232            self.optimizer = Some(BayesianOptimizer::new(
233                dim,
234                kernel,
235                self.config.acquisition.into(),
236                self.config.exploration_factor,
237            )?);
238        }
239
240        Ok(())
241    }
242
243    /// Get default parameters (middle of bounds)
244    fn get_default_parameters(&self) -> HashMap<String, f64> {
245        self.parameter_bounds
246            .iter()
247            .map(|b| {
248                let value = match b.scale {
249                    ParameterScale::Linear => f64::midpoint(b.min, b.max),
250                    ParameterScale::Logarithmic => { f64::midpoint(b.min.ln(), b.max.ln()) }.exp(),
251                    ParameterScale::Sigmoid => f64::midpoint(b.min, b.max),
252                };
253                (
254                    b.name.clone(),
255                    if b.integer { value.round() } else { value },
256                )
257            })
258            .collect()
259    }
260
261    /// Sample random parameters
262    fn sample_random_parameters(&self, seed: u64) -> HashMap<String, f64> {
263        use scirs2_core::random::prelude::*;
264
265        let mut rng = StdRng::seed_from_u64(seed + self.config.seed.unwrap_or(42));
266
267        self.parameter_bounds
268            .iter()
269            .map(|b| {
270                let value = match b.scale {
271                    ParameterScale::Linear => rng.gen_range(b.min..b.max),
272                    ParameterScale::Logarithmic => {
273                        let log_min = b.min.ln();
274                        let log_max = b.max.ln();
275                        rng.gen_range(log_min..log_max).exp()
276                    }
277                    ParameterScale::Sigmoid => {
278                        let u: f64 = rng.gen();
279                        b.min + (b.max - b.min) / (1.0 + (-4.0 * (u - 0.5)).exp())
280                    }
281                };
282                (
283                    b.name.clone(),
284                    if b.integer { value.round() } else { value },
285                )
286            })
287            .collect()
288    }
289
290    /// Get next parameters using Bayesian optimization
291    fn get_next_parameters(&mut self) -> Result<HashMap<String, f64>, Box<dyn std::error::Error>> {
292        #[cfg(feature = "scirs")]
293        {
294            // Prepare data before borrowing optimizer mutably
295            let x_data: Vec<Array1<f64>> = self
296                .evaluation_history
297                .iter()
298                .map(|e| {
299                    Array1::from_vec(
300                        self.parameter_bounds
301                            .iter()
302                            .map(|b| self.transform_to_unit(e.parameters[&b.name], b))
303                            .collect(),
304                    )
305                })
306                .collect();
307
308            let y_data: Array1<f64> = self
309                .evaluation_history
310                .iter()
311                .map(|e| e.objective_value)
312                .collect();
313
314            if let Some(ref mut optimizer) = self.optimizer {
315                // Update GP model
316                optimizer.update(&x_data, &y_data)?;
317
318                // Get next point
319                let next_point = optimizer.suggest_next()?;
320
321                // Transform back to parameter space
322                return Ok(self.transform_from_unit(&next_point));
323            }
324        }
325
326        // Fallback: random sampling
327        Ok(self.sample_random_parameters(self.evaluation_history.len() as u64))
328    }
329
330    /// Transform parameter to unit interval
331    fn transform_to_unit(&self, value: f64, bounds: &ParameterBounds) -> f64 {
332        match bounds.scale {
333            ParameterScale::Linear => (value - bounds.min) / (bounds.max - bounds.min),
334            ParameterScale::Logarithmic => {
335                (value.ln() - bounds.min.ln()) / (bounds.max.ln() - bounds.min.ln())
336            }
337            ParameterScale::Sigmoid => {
338                let normalized = (value - bounds.min) / (bounds.max - bounds.min);
339                0.25f64.mul_add((4.0 * (normalized - 0.5)).tanh(), 0.5)
340            }
341        }
342    }
343
344    /// Transform from unit interval to parameter space
345    fn transform_from_unit(&self, unit_values: &Array1<f64>) -> HashMap<String, f64> {
346        self.parameter_bounds
347            .iter()
348            .enumerate()
349            .map(|(i, b)| {
350                let unit_val = unit_values[i].clamp(0.0, 1.0);
351                let value = match b.scale {
352                    ParameterScale::Linear => b.min + unit_val * (b.max - b.min),
353                    ParameterScale::Logarithmic => {
354                        let log_val = b.min.ln() + unit_val * (b.max.ln() - b.min.ln());
355                        log_val.exp()
356                    }
357                    ParameterScale::Sigmoid => {
358                        let t = (unit_val - 0.5) * 4.0;
359                        let sigmoid = 0.5f64.mul_add(t.tanh(), 0.5);
360                        b.min + sigmoid * (b.max - b.min)
361                    }
362                };
363                (
364                    b.name.clone(),
365                    if b.integer { value.round() } else { value },
366                )
367            })
368            .collect()
369    }
370
371    /// Evaluate a parameter configuration
372    fn evaluate_configuration<S: Sampler>(
373        &self,
374        parameters: &HashMap<String, f64>,
375        sampler_factory: &impl Fn(HashMap<String, f64>) -> S,
376        model: &CompiledModel,
377        objective: &impl Fn(&[SampleResult]) -> f64,
378    ) -> Result<f64, Box<dyn std::error::Error>> {
379        let _start_time = std::time::Instant::now();
380
381        // Create sampler with parameters
382        let sampler = sampler_factory(parameters.clone());
383
384        // Run sampling
385        let num_reads = 100; // Could be a tunable parameter
386        let samples = sampler.run_qubo(&model.to_qubo(), num_reads)?;
387
388        // Evaluate objective
389        let obj_value = objective(&samples);
390
391        Ok(obj_value)
392    }
393
394    /// Record evaluation result
395    fn record_evaluation(
396        &mut self,
397        parameters: HashMap<String, f64>,
398        objective_value: f64,
399        constraint_violations: HashMap<String, f64>,
400        iteration: usize,
401    ) {
402        self.evaluation_history.push(TuningEvaluation {
403            parameters,
404            objective_value,
405            constraint_violations,
406            evaluation_time: std::time::Duration::from_secs(1), // Placeholder
407            iteration,
408        });
409    }
410
411    /// Calculate parameter importance using sensitivity analysis
412    fn calculate_parameter_importance(
413        &self,
414    ) -> Result<HashMap<String, f64>, Box<dyn std::error::Error>> {
415        #[cfg(feature = "scirs")]
416        {
417            if let Some(ref optimizer) = self.optimizer {
418                // Use GP model to calculate parameter sensitivities
419                return self.calculate_importance_gp();
420            }
421        }
422
423        // Fallback: correlation-based importance
424        self.calculate_importance_correlation()
425    }
426
427    #[cfg(feature = "scirs")]
428    /// Calculate importance using Gaussian Process
429    fn calculate_importance_gp(&self) -> Result<HashMap<String, f64>, Box<dyn std::error::Error>> {
430        // Implement Sobol indices or similar sensitivity analysis
431        // For now, return uniform importance
432        Ok(self
433            .parameter_bounds
434            .iter()
435            .map(|b| (b.name.clone(), 1.0 / self.parameter_bounds.len() as f64))
436            .collect())
437    }
438
439    /// Calculate importance using correlation
440    fn calculate_importance_correlation(
441        &self,
442    ) -> Result<HashMap<String, f64>, Box<dyn std::error::Error>> {
443        let mut importance = HashMap::new();
444
445        for bounds in &self.parameter_bounds {
446            // Calculate correlation between parameter and objective
447            let param_values: Vec<f64> = self
448                .evaluation_history
449                .iter()
450                .map(|e| e.parameters[&bounds.name])
451                .collect();
452
453            let obj_values: Vec<f64> = self
454                .evaluation_history
455                .iter()
456                .map(|e| e.objective_value)
457                .collect();
458
459            let correlation = calculate_correlation(&param_values, &obj_values);
460            importance.insert(bounds.name.clone(), correlation.abs());
461        }
462
463        // Normalize
464        let total: f64 = importance.values().sum();
465        if total > 0.0 {
466            for value in importance.values_mut() {
467                *value /= total;
468            }
469        }
470
471        Ok(importance)
472    }
473
474    /// Export tuning results
475    pub fn export_results(&self, path: &str) -> Result<(), Box<dyn std::error::Error>> {
476        let export = TuningExport {
477            config: self.config.clone(),
478            parameter_bounds: self.parameter_bounds.clone(),
479            evaluation_history: self.evaluation_history.clone(),
480            timestamp: std::time::SystemTime::now(),
481        };
482
483        let json = serde_json::to_string_pretty(&export)?;
484        std::fs::write(path, json)?;
485
486        Ok(())
487    }
488}
489
490/// Calculate correlation coefficient
491fn calculate_correlation(x: &[f64], y: &[f64]) -> f64 {
492    if x.len() != y.len() || x.is_empty() {
493        return 0.0;
494    }
495
496    let n = x.len() as f64;
497    let mean_x = x.iter().sum::<f64>() / n;
498    let mean_y = y.iter().sum::<f64>() / n;
499
500    let cov: f64 = x
501        .iter()
502        .zip(y.iter())
503        .map(|(xi, yi)| (xi - mean_x) * (yi - mean_y))
504        .sum::<f64>()
505        / n;
506
507    let std_x = (x.iter().map(|xi| (xi - mean_x).powi(2)).sum::<f64>() / n).sqrt();
508    let std_y = (y.iter().map(|yi| (yi - mean_y).powi(2)).sum::<f64>() / n).sqrt();
509
510    if std_x > 0.0 && std_y > 0.0 {
511        cov / (std_x * std_y)
512    } else {
513        0.0
514    }
515}
516
517/// Tuning export format
518#[derive(Debug, Clone, Serialize, Deserialize)]
519pub struct TuningExport {
520    pub config: TuningConfig,
521    pub parameter_bounds: Vec<ParameterBounds>,
522    pub evaluation_history: Vec<TuningEvaluation>,
523    pub timestamp: std::time::SystemTime,
524}
525
526#[cfg(feature = "scirs")]
527impl From<AcquisitionType>
528    for crate::scirs_stub::scirs2_optimization::bayesian::AcquisitionFunction
529{
530    fn from(acq: AcquisitionType) -> Self {
531        match acq {
532            AcquisitionType::ExpectedImprovement => Self::ExpectedImprovement,
533            AcquisitionType::UpperConfidenceBound => Self::UCB,
534            AcquisitionType::ProbabilityOfImprovement => Self::PI,
535            AcquisitionType::ThompsonSampling => Self::Thompson,
536        }
537    }
538}
539
540/// Quick parameter tuning function
541pub fn quick_tune<S: Sampler>(
542    sampler_factory: impl Fn(HashMap<String, f64>) -> S,
543    model: &CompiledModel,
544    parameter_bounds: Vec<ParameterBounds>,
545) -> Result<HashMap<String, f64>, Box<dyn std::error::Error>> {
546    let config = TuningConfig {
547        max_evaluations: 50,
548        initial_samples: 10,
549        ..Default::default()
550    };
551
552    let mut tuner = ParameterTuner::new(config);
553    tuner.add_parameters(parameter_bounds);
554
555    let result = tuner.tune_sampler(sampler_factory, model, |samples| {
556        // Default objective: minimize average energy
557        samples.iter().map(|s| s.energy).sum::<f64>() / samples.len() as f64
558    })?;
559
560    Ok(result.best_parameters)
561}