quantrs2_tytan/
sampler_framework.rs

1//! Sampler framework extensions for advanced optimization strategies.
2//!
3//! This module provides plugin architecture, hyperparameter optimization,
4//! ensemble methods, and adaptive sampling strategies.
5
6#![allow(dead_code)]
7
8#[cfg(feature = "dwave")]
9use crate::compile::CompiledModel;
10use crate::sampler::{SampleResult, Sampler, SamplerError, SamplerResult};
11use scirs2_core::ndarray::{Array, Array2, IxDyn};
12use scirs2_core::random::prelude::*;
13use scirs2_core::random::prelude::*;
14use std::collections::HashMap;
15use std::sync::{Arc, Mutex};
16use std::time::{Duration, Instant};
17
18#[cfg(feature = "scirs")]
19use crate::scirs_stub::{
20    scirs2_ml::{CrossValidation, RandomForest},
21    scirs2_optimization::bayesian::{AcquisitionFunction, BayesianOptimizer, KernelType},
22};
23
24/// Plugin trait for custom samplers
25pub trait SamplerPlugin: Send + Sync {
26    /// Plugin name
27    fn name(&self) -> &str;
28
29    /// Plugin version
30    fn version(&self) -> &str;
31
32    /// Initialize plugin
33    fn initialize(&mut self, config: &HashMap<String, String>) -> Result<(), String>;
34
35    /// Create sampler instance
36    fn create_sampler(&self) -> Box<dyn Sampler>;
37
38    /// Get default configuration
39    fn default_config(&self) -> HashMap<String, String>;
40
41    /// Validate configuration
42    fn validate_config(&self, config: &HashMap<String, String>) -> Result<(), String>;
43}
44
45/// Plugin manager for dynamic sampler loading
46pub struct PluginManager {
47    /// Registered plugins
48    plugins: HashMap<String, Box<dyn SamplerPlugin>>,
49    /// Plugin configurations
50    configs: HashMap<String, HashMap<String, String>>,
51}
52
53impl Default for PluginManager {
54    fn default() -> Self {
55        Self::new()
56    }
57}
58
59impl PluginManager {
60    /// Create new plugin manager
61    pub fn new() -> Self {
62        Self {
63            plugins: HashMap::new(),
64            configs: HashMap::new(),
65        }
66    }
67
68    /// Register a plugin
69    pub fn register_plugin(&mut self, plugin: Box<dyn SamplerPlugin>) -> Result<(), String> {
70        let name = plugin.name().to_string();
71
72        if self.plugins.contains_key(&name) {
73            return Err(format!("Plugin {name} already registered"));
74        }
75
76        let default_config = plugin.default_config();
77        self.configs.insert(name.clone(), default_config);
78        self.plugins.insert(name, plugin);
79
80        Ok(())
81    }
82
83    /// Configure plugin
84    pub fn configure_plugin(
85        &mut self,
86        name: &str,
87        config: HashMap<String, String>,
88    ) -> Result<(), String> {
89        let plugin = self
90            .plugins
91            .get(name)
92            .ok_or_else(|| format!("Plugin {name} not found"))?;
93
94        plugin.validate_config(&config)?;
95        self.configs.insert(name.to_string(), config);
96
97        Ok(())
98    }
99
100    /// Create sampler from plugin
101    pub fn create_sampler(&mut self, name: &str) -> Result<Box<dyn Sampler>, String> {
102        let plugin = self
103            .plugins
104            .get_mut(name)
105            .ok_or_else(|| format!("Plugin {name} not found"))?;
106
107        let config = self.configs.get(name).cloned().unwrap_or_default();
108        plugin.initialize(&config)?;
109
110        Ok(plugin.create_sampler())
111    }
112
113    /// List available plugins
114    pub fn list_plugins(&self) -> Vec<PluginInfo> {
115        self.plugins
116            .values()
117            .map(|p| PluginInfo {
118                name: p.name().to_string(),
119                version: p.version().to_string(),
120            })
121            .collect()
122    }
123}
124
125#[derive(Debug, Clone)]
126pub struct PluginInfo {
127    pub name: String,
128    pub version: String,
129}
130
131/// Hyperparameter optimization for samplers
132pub struct HyperparameterOptimizer {
133    /// Parameter search space
134    search_space: HashMap<String, ParameterSpace>,
135    /// Optimization method
136    method: OptimizationMethod,
137    /// Number of trials
138    num_trials: usize,
139    /// Cross-validation folds
140    cv_folds: usize,
141}
142
143#[derive(Debug, Clone)]
144pub enum ParameterSpace {
145    /// Continuous parameter
146    Continuous { min: f64, max: f64, log_scale: bool },
147    /// Discrete parameter
148    Discrete { values: Vec<f64> },
149    /// Categorical parameter
150    Categorical { options: Vec<String> },
151}
152
153#[derive(Debug, Clone)]
154pub enum OptimizationMethod {
155    /// Random search
156    RandomSearch,
157    /// Grid search
158    GridSearch { resolution: usize },
159    /// Bayesian optimization
160    #[cfg(feature = "scirs")]
161    Bayesian {
162        kernel: KernelType,
163        acquisition: AcquisitionFunction,
164        exploration: f64,
165    },
166    /// Evolutionary optimization
167    Evolutionary {
168        population_size: usize,
169        mutation_rate: f64,
170    },
171}
172
173impl HyperparameterOptimizer {
174    /// Create new optimizer
175    pub fn new(method: OptimizationMethod, num_trials: usize) -> Self {
176        Self {
177            search_space: HashMap::new(),
178            method,
179            num_trials,
180            cv_folds: 5,
181        }
182    }
183
184    /// Add parameter to search space
185    pub fn add_parameter(&mut self, name: &str, space: ParameterSpace) {
186        self.search_space.insert(name.to_string(), space);
187    }
188
189    /// Optimize hyperparameters
190    #[cfg(feature = "dwave")]
191    pub fn optimize<F>(
192        &self,
193        objective: F,
194        validation_problems: &[CompiledModel],
195    ) -> Result<OptimizationResult, String>
196    where
197        F: Fn(&HashMap<String, f64>) -> Box<dyn Sampler>,
198    {
199        match &self.method {
200            OptimizationMethod::RandomSearch => self.random_search(objective, validation_problems),
201            OptimizationMethod::GridSearch { resolution } => {
202                self.grid_search(objective, validation_problems, *resolution)
203            }
204            #[cfg(feature = "scirs")]
205            OptimizationMethod::Bayesian {
206                kernel,
207                acquisition,
208                exploration,
209            } => self.bayesian_optimization(
210                objective,
211                validation_problems,
212                *kernel,
213                *acquisition,
214                *exploration,
215            ),
216            OptimizationMethod::Evolutionary {
217                population_size,
218                mutation_rate,
219            } => self.evolutionary_optimization(
220                objective,
221                validation_problems,
222                *population_size,
223                *mutation_rate,
224            ),
225            #[cfg(not(feature = "scirs"))]
226            _ => Err("Optimization method not available (requires 'scirs' feature)".to_string()),
227        }
228    }
229
230    /// Random search implementation
231    #[cfg(feature = "dwave")]
232    fn random_search<F>(
233        &self,
234        objective: F,
235        validation_problems: &[CompiledModel],
236    ) -> Result<OptimizationResult, String>
237    where
238        F: Fn(&HashMap<String, f64>) -> Box<dyn Sampler>,
239    {
240        let mut rng = thread_rng();
241        let mut best_params = HashMap::new();
242        let mut best_score = f64::INFINITY;
243        let mut history = Vec::new();
244
245        for trial in 0..self.num_trials {
246            // Sample random parameters
247            let mut params = self.sample_parameters(&mut rng)?;
248
249            // Evaluate
250            let sampler = objective(&params);
251            let mut score = self.evaluate_sampler(sampler, validation_problems)?;
252
253            history.push(TrialResult {
254                parameters: params.clone(),
255                score,
256                iteration: trial,
257            });
258
259            if score < best_score {
260                best_score = score;
261                best_params = params;
262            }
263        }
264
265        let convergence_curve = self.compute_convergence_curve(&history);
266        Ok(OptimizationResult {
267            best_parameters: best_params,
268            best_score,
269            history,
270            convergence_curve,
271        })
272    }
273
274    /// Grid search implementation
275    #[cfg(feature = "dwave")]
276    fn grid_search<F>(
277        &self,
278        objective: F,
279        validation_problems: &[CompiledModel],
280        resolution: usize,
281    ) -> Result<OptimizationResult, String>
282    where
283        F: Fn(&HashMap<String, f64>) -> Box<dyn Sampler>,
284    {
285        // Generate grid points
286        let grid_points = self.generate_grid(resolution)?;
287
288        let mut best_params = HashMap::new();
289        let mut best_score = f64::INFINITY;
290        let mut history = Vec::new();
291
292        for (i, params) in grid_points.iter().enumerate() {
293            let sampler = objective(params);
294            let mut score = self.evaluate_sampler(sampler, validation_problems)?;
295
296            history.push(TrialResult {
297                parameters: params.clone(),
298                score,
299                iteration: i,
300            });
301
302            if score < best_score {
303                best_score = score;
304                best_params = params.clone();
305            }
306        }
307
308        let convergence_curve = self.compute_convergence_curve(&history);
309        Ok(OptimizationResult {
310            best_parameters: best_params,
311            best_score,
312            history,
313            convergence_curve,
314        })
315    }
316
317    /// Bayesian optimization implementation
318    #[cfg(all(feature = "scirs", feature = "dwave"))]
319    fn bayesian_optimization<F>(
320        &self,
321        objective: F,
322        validation_problems: &[CompiledModel],
323        kernel: KernelType,
324        acquisition: AcquisitionFunction,
325        exploration: f64,
326    ) -> Result<OptimizationResult, String>
327    where
328        F: Fn(&HashMap<String, f64>) -> Box<dyn Sampler>,
329    {
330        use scirs2_core::ndarray::Array1;
331
332        let dim = self.search_space.len();
333        let mut optimizer = BayesianOptimizer::new(dim, kernel, acquisition, exploration)
334            .map_err(|e| e.to_string())?;
335
336        let mut history = Vec::new();
337        let mut x_data = Vec::new();
338        let mut y_data = Vec::new();
339
340        // Initial random samples
341        let mut rng = thread_rng();
342        for _ in 0..std::cmp::min(10, self.num_trials / 4) {
343            let mut params = self.sample_parameters(&mut rng)?;
344            let sampler = objective(&params);
345            let mut score = self.evaluate_sampler(sampler, validation_problems)?;
346
347            let mut x = self.params_to_array(&params)?;
348            x_data.push(x);
349            y_data.push(score);
350
351            history.push(TrialResult {
352                parameters: params,
353                score,
354                iteration: history.len(),
355            });
356        }
357
358        // Bayesian optimization loop
359        let y_array = Array1::from_vec(y_data.clone());
360        optimizer
361            .update(&x_data, &y_array)
362            .map_err(|e| e.to_string())?;
363
364        for _ in history.len()..self.num_trials {
365            // Suggest next point
366            let x_next = optimizer.suggest_next().map_err(|e| e.to_string())?;
367            let mut params = self.array_to_params(&x_next)?;
368
369            // Evaluate
370            let sampler = objective(&params);
371            let mut score = self.evaluate_sampler(sampler, validation_problems)?;
372
373            // Update model
374            x_data.push(x_next);
375            y_data.push(score);
376            let y_array = Array1::from_vec(y_data.clone());
377            optimizer
378                .update(&x_data, &y_array)
379                .map_err(|e| e.to_string())?;
380
381            history.push(TrialResult {
382                parameters: params,
383                score,
384                iteration: history.len(),
385            });
386        }
387
388        // Find best
389        let (best_idx, &best_score) = y_data
390            .iter()
391            .enumerate()
392            .min_by(|(_, a), (_, b)| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal))
393            .ok_or_else(|| "No optimization trials completed".to_string())?;
394
395        let best_params = self.array_to_params(&x_data[best_idx])?;
396
397        let convergence_curve = self.compute_convergence_curve(&history);
398        Ok(OptimizationResult {
399            best_parameters: best_params,
400            best_score,
401            history,
402            convergence_curve,
403        })
404    }
405
406    /// Evolutionary optimization (placeholder)
407    #[cfg(feature = "dwave")]
408    fn evolutionary_optimization<F>(
409        &self,
410        _objective: F,
411        _validation_problems: &[CompiledModel],
412        _population_size: usize,
413        _mutation_rate: f64,
414    ) -> Result<OptimizationResult, String>
415    where
416        F: Fn(&HashMap<String, f64>) -> Box<dyn Sampler>,
417    {
418        Err("Evolutionary optimization not yet implemented".to_string())
419    }
420
421    /// Sample parameters from search space
422    fn sample_parameters(&self, rng: &mut impl Rng) -> Result<HashMap<String, f64>, String> {
423        let mut params = HashMap::new();
424
425        for (name, space) in &self.search_space {
426            let value = match space {
427                ParameterSpace::Continuous {
428                    min,
429                    max,
430                    log_scale,
431                } => {
432                    if *log_scale {
433                        let log_min = min.ln();
434                        let log_max = max.ln();
435                        let log_val = rng.gen_range(log_min..log_max);
436                        log_val.exp()
437                    } else {
438                        rng.gen_range(*min..*max)
439                    }
440                }
441                ParameterSpace::Discrete { values } => values[rng.gen_range(0..values.len())],
442                ParameterSpace::Categorical { options } => {
443                    // Return index for categorical
444                    rng.gen_range(0..options.len()) as f64
445                }
446            };
447
448            params.insert(name.clone(), value);
449        }
450
451        Ok(params)
452    }
453
454    /// Generate grid points
455    fn generate_grid(&self, resolution: usize) -> Result<Vec<HashMap<String, f64>>, String> {
456        // Simplified: generate regular grid
457        let mut grid_points = Vec::new();
458
459        // This would need proper multi-dimensional grid generation
460        // For now, just sample uniformly
461        let total_points = resolution.pow(self.search_space.len() as u32);
462        let mut rng = thread_rng();
463
464        for _ in 0..total_points.min(self.num_trials) {
465            grid_points.push(self.sample_parameters(&mut rng)?);
466        }
467
468        Ok(grid_points)
469    }
470
471    /// Convert parameters to array
472    #[cfg(feature = "scirs")]
473    fn params_to_array(
474        &self,
475        params: &HashMap<String, f64>,
476    ) -> Result<scirs2_core::ndarray::Array1<f64>, String> {
477        let mut values = Vec::new();
478
479        // Ensure consistent ordering
480        let mut names: Vec<_> = self.search_space.keys().collect();
481        names.sort();
482
483        for name in names {
484            values.push(params.get(name).copied().unwrap_or(0.0));
485        }
486
487        Ok(scirs2_core::ndarray::Array1::from_vec(values))
488    }
489
490    /// Convert array to parameters
491    #[cfg(feature = "scirs")]
492    fn array_to_params(
493        &self,
494        array: &scirs2_core::ndarray::Array1<f64>,
495    ) -> Result<HashMap<String, f64>, String> {
496        let mut params = HashMap::new();
497
498        let mut names: Vec<_> = self.search_space.keys().collect();
499        names.sort();
500
501        for (i, name) in names.iter().enumerate() {
502            params.insert((*name).clone(), array[i]);
503        }
504
505        Ok(params)
506    }
507
508    /// Evaluate sampler performance
509    #[cfg(feature = "dwave")]
510    fn evaluate_sampler(
511        &self,
512        mut sampler: Box<dyn Sampler>,
513        problems: &[CompiledModel],
514    ) -> Result<f64, String> {
515        let mut scores = Vec::new();
516
517        for problem in problems {
518            let mut qubo = problem.to_qubo();
519            let start = Instant::now();
520
521            let qubo_tuple = (qubo.to_dense_matrix(), qubo.variable_map());
522            let mut results = sampler
523                .run_qubo(&qubo_tuple, 100)
524                .map_err(|e| format!("Sampler error: {e:?}"))?;
525
526            let elapsed = start.elapsed();
527
528            // Score based on solution quality and time
529            let mut best_energy = results.first().map_or(f64::INFINITY, |r| r.energy);
530
531            let time_penalty = elapsed.as_secs_f64();
532            let mut score = 0.1f64.mul_add(time_penalty, best_energy);
533
534            scores.push(score);
535        }
536
537        // Return average score
538        Ok(scores.iter().sum::<f64>() / scores.len() as f64)
539    }
540
541    /// Compute convergence curve
542    fn compute_convergence_curve(&self, history: &[TrialResult]) -> Vec<f64> {
543        let mut curve = Vec::new();
544        let mut best_so_far = f64::INFINITY;
545
546        for trial in history {
547            best_so_far = best_so_far.min(trial.score);
548            curve.push(best_so_far);
549        }
550
551        curve
552    }
553}
554
555#[derive(Debug, Clone)]
556pub struct OptimizationResult {
557    pub best_parameters: HashMap<String, f64>,
558    pub best_score: f64,
559    pub history: Vec<TrialResult>,
560    pub convergence_curve: Vec<f64>,
561}
562
563#[derive(Debug, Clone)]
564pub struct TrialResult {
565    pub parameters: HashMap<String, f64>,
566    pub score: f64,
567    pub iteration: usize,
568}
569
570/// Ensemble sampler that combines multiple sampling strategies
571pub struct EnsembleSampler {
572    /// Base samplers
573    samplers: Vec<Box<dyn Sampler>>,
574    /// Combination method
575    method: EnsembleMethod,
576    /// Weights for weighted combination
577    weights: Option<Vec<f64>>,
578}
579
580#[derive(Debug, Clone)]
581pub enum EnsembleMethod {
582    /// Simple voting
583    Voting,
584    /// Weighted voting
585    WeightedVoting,
586    /// Best of all
587    BestOf,
588    /// Sequential refinement
589    Sequential,
590    /// Parallel with aggregation
591    Parallel,
592}
593
594impl EnsembleSampler {
595    /// Create new ensemble sampler
596    pub fn new(samplers: Vec<Box<dyn Sampler>>, method: EnsembleMethod) -> Self {
597        Self {
598            samplers,
599            method,
600            weights: None,
601        }
602    }
603
604    /// Set weights for weighted voting
605    pub fn with_weights(mut self, weights: Vec<f64>) -> Self {
606        self.weights = Some(weights);
607        self
608    }
609}
610
611impl Sampler for EnsembleSampler {
612    fn run_qubo(
613        &self,
614        qubo: &(Array2<f64>, HashMap<String, usize>),
615        shots: usize,
616    ) -> SamplerResult<Vec<SampleResult>> {
617        match &self.method {
618            EnsembleMethod::Voting => self.voting_ensemble(qubo, shots),
619            EnsembleMethod::WeightedVoting => self.weighted_voting_ensemble(qubo, shots),
620            EnsembleMethod::BestOf => self.best_of_ensemble(qubo, shots),
621            EnsembleMethod::Sequential => self.sequential_ensemble(qubo, shots),
622            EnsembleMethod::Parallel => self.parallel_ensemble(qubo, shots),
623        }
624    }
625
626    fn run_hobo(
627        &self,
628        hobo: &(Array<f64, IxDyn>, HashMap<String, usize>),
629        shots: usize,
630    ) -> SamplerResult<Vec<SampleResult>> {
631        // Similar implementation for HOBO
632        match &self.method {
633            EnsembleMethod::Voting => self.voting_ensemble_hobo(hobo, shots),
634            _ => Err(SamplerError::InvalidParameter(
635                "HOBO ensemble not fully implemented".to_string(),
636            )),
637        }
638    }
639}
640
641impl EnsembleSampler {
642    /// Simple voting ensemble
643    fn voting_ensemble(
644        &self,
645        qubo: &(Array2<f64>, HashMap<String, usize>),
646        shots: usize,
647    ) -> SamplerResult<Vec<SampleResult>> {
648        let shots_per_sampler = shots / self.samplers.len();
649        let mut all_results = Vec::new();
650
651        // Run each sampler
652        for sampler in &self.samplers {
653            let results = sampler.run_qubo(qubo, shots_per_sampler)?;
654            all_results.extend(results);
655        }
656
657        // Aggregate by voting
658        let mut vote_counts: HashMap<Vec<bool>, (f64, usize)> = HashMap::new();
659
660        for result in all_results {
661            let state: Vec<bool> = qubo.1.keys().map(|var| result.assignments[var]).collect();
662
663            let entry = vote_counts.entry(state).or_insert((result.energy, 0));
664            entry.1 += result.occurrences;
665        }
666
667        // Convert back to results
668        let mut final_results: Vec<SampleResult> = vote_counts
669            .into_iter()
670            .map(|(state, (energy, count))| {
671                let assignments: HashMap<String, bool> = qubo
672                    .1
673                    .iter()
674                    .zip(state.iter())
675                    .map(|((var, _), &val)| (var.clone(), val))
676                    .collect();
677
678                SampleResult {
679                    assignments,
680                    energy,
681                    occurrences: count,
682                }
683            })
684            .collect();
685
686        final_results.sort_by(|a, b| {
687            a.energy
688                .partial_cmp(&b.energy)
689                .unwrap_or(std::cmp::Ordering::Equal)
690        });
691
692        Ok(final_results)
693    }
694
695    /// Weighted voting ensemble
696    fn weighted_voting_ensemble(
697        &self,
698        qubo: &(Array2<f64>, HashMap<String, usize>),
699        shots: usize,
700    ) -> SamplerResult<Vec<SampleResult>> {
701        let weights = self.weights.as_ref().ok_or_else(|| {
702            SamplerError::InvalidParameter("Weights not set for weighted voting".to_string())
703        })?;
704
705        if weights.len() != self.samplers.len() {
706            return Err(SamplerError::InvalidParameter(
707                "Number of weights must match number of samplers".to_string(),
708            ));
709        }
710
711        // Normalize weights
712        let total_weight: f64 = weights.iter().sum();
713        let normalized: Vec<f64> = weights.iter().map(|&w| w / total_weight).collect();
714
715        let mut all_results = Vec::new();
716
717        // Run each sampler with weighted shots
718        for (sampler, &weight) in self.samplers.iter().zip(normalized.iter()) {
719            let sampler_shots = (shots as f64 * weight).round() as usize;
720            if sampler_shots > 0 {
721                let results = sampler.run_qubo(qubo, sampler_shots)?;
722                all_results.extend(results);
723            }
724        }
725
726        // Aggregate results
727        self.aggregate_results(all_results, &qubo.1)
728    }
729
730    /// Best-of ensemble
731    fn best_of_ensemble(
732        &self,
733        qubo: &(Array2<f64>, HashMap<String, usize>),
734        shots: usize,
735    ) -> SamplerResult<Vec<SampleResult>> {
736        let shots_per_sampler = shots / self.samplers.len();
737        let mut best_results = Vec::new();
738        let mut best_energy = f64::INFINITY;
739
740        // Run each sampler and keep best
741        for sampler in &self.samplers {
742            let results = sampler.run_qubo(qubo, shots_per_sampler)?;
743
744            if let Some(best) = results.first() {
745                if best.energy < best_energy {
746                    best_energy = best.energy;
747                    best_results = results;
748                }
749            }
750        }
751
752        Ok(best_results)
753    }
754
755    /// Sequential refinement ensemble
756    fn sequential_ensemble(
757        &self,
758        qubo: &(Array2<f64>, HashMap<String, usize>),
759        shots: usize,
760    ) -> SamplerResult<Vec<SampleResult>> {
761        if self.samplers.is_empty() {
762            return Ok(Vec::new());
763        }
764
765        // Start with first sampler
766        let mut current_best = self.samplers[0].run_qubo(qubo, shots)?;
767
768        // Refine with subsequent samplers
769        for sampler in self.samplers.iter().skip(1) {
770            // Use best solutions as warm start (if sampler supports it)
771            // For now, just run independently
772            let refined = sampler.run_qubo(qubo, shots / self.samplers.len())?;
773
774            // Merge results
775            current_best.extend(refined);
776            current_best.sort_by(|a, b| {
777                a.energy
778                    .partial_cmp(&b.energy)
779                    .unwrap_or(std::cmp::Ordering::Equal)
780            });
781            current_best.truncate(shots);
782        }
783
784        Ok(current_best)
785    }
786
787    /// Parallel ensemble with aggregation
788    fn parallel_ensemble(
789        &self,
790        qubo: &(Array2<f64>, HashMap<String, usize>),
791        shots: usize,
792    ) -> SamplerResult<Vec<SampleResult>> {
793        let shots_per_sampler = shots / self.samplers.len();
794        let _handles: Vec<std::thread::JoinHandle<()>> = Vec::new();
795
796        // Would need to make samplers thread-safe for real parallel execution
797        // For now, sequential execution
798        let mut all_results = Vec::new();
799
800        for sampler in &self.samplers {
801            let results = sampler.run_qubo(qubo, shots_per_sampler)?;
802            all_results.extend(results);
803        }
804
805        self.aggregate_results(all_results, &qubo.1)
806    }
807
808    /// Aggregate results from multiple samplers
809    fn aggregate_results(
810        &self,
811        results: Vec<SampleResult>,
812        var_map: &HashMap<String, usize>,
813    ) -> SamplerResult<Vec<SampleResult>> {
814        let mut aggregated: HashMap<Vec<bool>, (f64, usize)> = HashMap::new();
815
816        for result in results {
817            let state: Vec<bool> = var_map.keys().map(|var| result.assignments[var]).collect();
818
819            let entry = aggregated.entry(state).or_insert((result.energy, 0));
820
821            // Keep minimum energy for duplicates
822            entry.0 = entry.0.min(result.energy);
823            entry.1 += result.occurrences;
824        }
825
826        let mut final_results: Vec<SampleResult> = aggregated
827            .into_iter()
828            .map(|(state, (energy, count))| {
829                let assignments: HashMap<String, bool> = var_map
830                    .iter()
831                    .zip(state.iter())
832                    .map(|((var, _), &val)| (var.clone(), val))
833                    .collect();
834
835                SampleResult {
836                    assignments,
837                    energy,
838                    occurrences: count,
839                }
840            })
841            .collect();
842
843        final_results.sort_by(|a, b| {
844            a.energy
845                .partial_cmp(&b.energy)
846                .unwrap_or(std::cmp::Ordering::Equal)
847        });
848
849        Ok(final_results)
850    }
851
852    /// Voting ensemble for HOBO
853    fn voting_ensemble_hobo(
854        &self,
855        hobo: &(Array<f64, IxDyn>, HashMap<String, usize>),
856        shots: usize,
857    ) -> SamplerResult<Vec<SampleResult>> {
858        // Similar to QUBO voting but for HOBO
859        let shots_per_sampler = shots / self.samplers.len();
860        let mut all_results = Vec::new();
861
862        for sampler in &self.samplers {
863            let results = sampler.run_hobo(hobo, shots_per_sampler)?;
864            all_results.extend(results);
865        }
866
867        self.aggregate_results(all_results, &hobo.1)
868    }
869}
870
871/// Adaptive sampling strategy
872pub struct AdaptiveSampler<S: Sampler> {
873    /// Base sampler
874    base_sampler: S,
875    /// Adaptation strategy
876    strategy: AdaptationStrategy,
877    /// Performance history
878    history: Arc<Mutex<PerformanceHistory>>,
879}
880
881#[derive(Debug, Clone)]
882pub enum AdaptationStrategy {
883    /// Temperature adaptation
884    TemperatureAdaptive {
885        initial_range: (f64, f64),
886        adaptation_rate: f64,
887    },
888    /// Population size adaptation
889    PopulationAdaptive {
890        min_size: usize,
891        max_size: usize,
892        growth_rate: f64,
893    },
894    /// Multi-armed bandit for strategy selection
895    BanditAdaptive {
896        strategies: Vec<String>,
897        exploration_rate: f64,
898    },
899    /// Reinforcement learning based
900    RLAdaptive {
901        state_features: Vec<String>,
902        action_space: Vec<String>,
903    },
904}
905
906#[derive(Default)]
907struct PerformanceHistory {
908    energies: Vec<f64>,
909    times: Vec<Duration>,
910    improvements: Vec<f64>,
911    parameters: Vec<HashMap<String, f64>>,
912}
913
914impl<S: Sampler> AdaptiveSampler<S> {
915    /// Create new adaptive sampler
916    pub fn new(base_sampler: S, strategy: AdaptationStrategy) -> Self {
917        Self {
918            base_sampler,
919            strategy,
920            history: Arc::new(Mutex::new(PerformanceHistory::default())),
921        }
922    }
923
924    /// Adapt parameters based on performance
925    fn adapt_parameters(&self) -> HashMap<String, f64> {
926        let history = self
927            .history
928            .lock()
929            .unwrap_or_else(|poisoned| poisoned.into_inner());
930
931        match &self.strategy {
932            AdaptationStrategy::TemperatureAdaptive {
933                initial_range,
934                adaptation_rate,
935            } => {
936                let mut params = HashMap::new();
937
938                // Adapt temperature based on acceptance rate
939                let (min_temp, max_temp) = initial_range;
940                let temp = if history.improvements.len() > 10 {
941                    let recent_improvements: f64 =
942                        history.improvements.iter().rev().take(10).sum::<f64>() / 10.0;
943
944                    if recent_improvements < 0.1 {
945                        // Low improvement: increase temperature
946                        min_temp + (max_temp - min_temp) * (1.0 - adaptation_rate)
947                    } else {
948                        // Good improvement: decrease temperature
949                        min_temp + (max_temp - min_temp) * adaptation_rate
950                    }
951                } else {
952                    (min_temp + max_temp) / 2.0
953                };
954
955                params.insert("temperature".to_string(), temp);
956                params
957            }
958            _ => HashMap::new(),
959        }
960    }
961}
962
963impl<S: Sampler> Sampler for AdaptiveSampler<S> {
964    fn run_qubo(
965        &self,
966        qubo: &(Array2<f64>, HashMap<String, usize>),
967        shots: usize,
968    ) -> SamplerResult<Vec<SampleResult>> {
969        // Adapt parameters
970        let params = self.adapt_parameters();
971
972        // Run base sampler (would need to apply params)
973        let start = Instant::now();
974        let results = self.base_sampler.run_qubo(qubo, shots)?;
975        let elapsed = start.elapsed();
976
977        // Update history
978        if let Some(best) = results.first() {
979            let mut history = self
980                .history
981                .lock()
982                .unwrap_or_else(|poisoned| poisoned.into_inner());
983
984            let improvement = if let Some(&last) = history.energies.last() {
985                (last - best.energy) / last.abs().max(1.0)
986            } else {
987                1.0
988            };
989
990            history.energies.push(best.energy);
991            history.times.push(elapsed);
992            history.improvements.push(improvement);
993            history.parameters.push(params);
994        }
995
996        Ok(results)
997    }
998
999    fn run_hobo(
1000        &self,
1001        hobo: &(Array<f64, IxDyn>, HashMap<String, usize>),
1002        shots: usize,
1003    ) -> SamplerResult<Vec<SampleResult>> {
1004        // Similar adaptation for HOBO
1005        self.base_sampler.run_hobo(hobo, shots)
1006    }
1007}
1008
1009/// Cross-validation for sampler evaluation
1010pub struct SamplerCrossValidation {
1011    /// Number of folds
1012    n_folds: usize,
1013    /// Evaluation metric
1014    metric: EvaluationMetric,
1015}
1016
1017#[derive(Debug, Clone)]
1018pub enum EvaluationMetric {
1019    /// Best energy found
1020    BestEnergy,
1021    /// Average of top-k energies
1022    TopKAverage(usize),
1023    /// Time to solution
1024    TimeToSolution(f64),
1025    /// Success probability
1026    SuccessProbability(f64),
1027}
1028
1029impl SamplerCrossValidation {
1030    /// Create new cross-validation
1031    pub const fn new(n_folds: usize, metric: EvaluationMetric) -> Self {
1032        Self { n_folds, metric }
1033    }
1034
1035    /// Evaluate sampler with cross-validation
1036    #[cfg(feature = "dwave")]
1037    pub fn evaluate<S: Sampler>(
1038        &self,
1039        sampler: &S,
1040        problems: &[CompiledModel],
1041        shots_per_problem: usize,
1042    ) -> Result<CrossValidationResult, String> {
1043        let n_problems = problems.len();
1044        let fold_size = n_problems / self.n_folds;
1045
1046        let mut fold_scores = Vec::new();
1047
1048        for fold in 0..self.n_folds {
1049            let test_start = fold * fold_size;
1050            let test_end = if fold == self.n_folds - 1 {
1051                n_problems
1052            } else {
1053                (fold + 1) * fold_size
1054            };
1055
1056            let test_problems = &problems[test_start..test_end];
1057
1058            // Evaluate on test fold
1059            let mut scores = Vec::new();
1060            for problem in test_problems {
1061                let mut score = self.evaluate_single(sampler, problem, shots_per_problem)?;
1062                scores.push(score);
1063            }
1064
1065            let fold_score = scores.iter().sum::<f64>() / scores.len() as f64;
1066            fold_scores.push(fold_score);
1067        }
1068
1069        let mean_score = fold_scores.iter().sum::<f64>() / fold_scores.len() as f64;
1070        let variance = fold_scores
1071            .iter()
1072            .map(|&s| (s - mean_score).powi(2))
1073            .sum::<f64>()
1074            / fold_scores.len() as f64;
1075
1076        Ok(CrossValidationResult {
1077            mean_score,
1078            std_error: variance.sqrt(),
1079            fold_scores,
1080        })
1081    }
1082
1083    /// Evaluate single problem
1084    #[cfg(feature = "dwave")]
1085    fn evaluate_single<S: Sampler>(
1086        &self,
1087        sampler: &S,
1088        problem: &CompiledModel,
1089        shots: usize,
1090    ) -> Result<f64, String> {
1091        let mut qubo = problem.to_qubo();
1092        let qubo_tuple = (qubo.to_dense_matrix(), qubo.variable_map());
1093        let start = Instant::now();
1094        let mut results = sampler
1095            .run_qubo(&qubo_tuple, shots)
1096            .map_err(|e| format!("Sampler error: {e:?}"))?;
1097        let elapsed = start.elapsed();
1098
1099        match &self.metric {
1100            EvaluationMetric::BestEnergy => Ok(results.first().map_or(f64::INFINITY, |r| r.energy)),
1101            EvaluationMetric::TopKAverage(k) => {
1102                let sum: f64 = results.iter().take(*k).map(|r| r.energy).sum();
1103                Ok(sum / (*k).min(results.len()) as f64)
1104            }
1105            EvaluationMetric::TimeToSolution(threshold) => {
1106                let found = results.iter().any(|r| r.energy <= *threshold);
1107                Ok(if found {
1108                    elapsed.as_secs_f64()
1109                } else {
1110                    f64::INFINITY
1111                })
1112            }
1113            EvaluationMetric::SuccessProbability(threshold) => {
1114                let successes = results
1115                    .iter()
1116                    .filter(|r| r.energy <= *threshold)
1117                    .map(|r| r.occurrences)
1118                    .sum::<usize>();
1119                Ok(successes as f64 / shots as f64)
1120            }
1121        }
1122    }
1123}
1124
1125#[derive(Debug, Clone)]
1126pub struct CrossValidationResult {
1127    pub mean_score: f64,
1128    pub std_error: f64,
1129    pub fold_scores: Vec<f64>,
1130}
1131
1132#[cfg(test)]
1133mod tests {
1134    use super::*;
1135    use crate::sampler::SASampler;
1136
1137    #[test]
1138    fn test_plugin_manager() {
1139        let manager = PluginManager::new();
1140
1141        // Would need actual plugin implementation to test
1142        assert_eq!(manager.list_plugins().len(), 0);
1143    }
1144
1145    #[test]
1146    fn test_hyperparameter_space() {
1147        let mut optimizer = HyperparameterOptimizer::new(OptimizationMethod::RandomSearch, 10);
1148
1149        optimizer.add_parameter(
1150            "temperature",
1151            ParameterSpace::Continuous {
1152                min: 0.1,
1153                max: 10.0,
1154                log_scale: true,
1155            },
1156        );
1157
1158        optimizer.add_parameter(
1159            "sweeps",
1160            ParameterSpace::Discrete {
1161                values: vec![100.0, 500.0, 1000.0],
1162            },
1163        );
1164
1165        // Would need actual optimization to test further
1166    }
1167
1168    #[test]
1169    fn test_ensemble_sampler() {
1170        let samplers: Vec<Box<dyn Sampler>> = vec![
1171            Box::new(SASampler::new(Some(42))),
1172            Box::new(SASampler::new(Some(43))),
1173        ];
1174
1175        let ensemble = EnsembleSampler::new(samplers, EnsembleMethod::Voting);
1176
1177        // Would need QUBO problem to test
1178    }
1179}