quantrs2_tytan/
sampler_framework.rs

1//! Sampler framework extensions for advanced optimization strategies.
2//!
3//! This module provides plugin architecture, hyperparameter optimization,
4//! ensemble methods, and adaptive sampling strategies.
5
6#![allow(dead_code)]
7
8#[cfg(feature = "dwave")]
9use crate::compile::CompiledModel;
10use crate::sampler::{SampleResult, Sampler, SamplerError, SamplerResult};
11use scirs2_core::ndarray::{Array, Array2, IxDyn};
12use scirs2_core::random::prelude::*;
13use scirs2_core::random::prelude::*;
14use std::collections::HashMap;
15use std::sync::{Arc, Mutex};
16use std::time::{Duration, Instant};
17
18#[cfg(feature = "scirs")]
19use crate::scirs_stub::{
20    scirs2_ml::{CrossValidation, RandomForest},
21    scirs2_optimization::bayesian::{AcquisitionFunction, BayesianOptimizer, KernelType},
22};
23
24/// Plugin trait for custom samplers
25pub trait SamplerPlugin: Send + Sync {
26    /// Plugin name
27    fn name(&self) -> &str;
28
29    /// Plugin version
30    fn version(&self) -> &str;
31
32    /// Initialize plugin
33    fn initialize(&mut self, config: &HashMap<String, String>) -> Result<(), String>;
34
35    /// Create sampler instance
36    fn create_sampler(&self) -> Box<dyn Sampler>;
37
38    /// Get default configuration
39    fn default_config(&self) -> HashMap<String, String>;
40
41    /// Validate configuration
42    fn validate_config(&self, config: &HashMap<String, String>) -> Result<(), String>;
43}
44
45/// Plugin manager for dynamic sampler loading
46pub struct PluginManager {
47    /// Registered plugins
48    plugins: HashMap<String, Box<dyn SamplerPlugin>>,
49    /// Plugin configurations
50    configs: HashMap<String, HashMap<String, String>>,
51}
52
53impl Default for PluginManager {
54    fn default() -> Self {
55        Self::new()
56    }
57}
58
59impl PluginManager {
60    /// Create new plugin manager
61    pub fn new() -> Self {
62        Self {
63            plugins: HashMap::new(),
64            configs: HashMap::new(),
65        }
66    }
67
68    /// Register a plugin
69    pub fn register_plugin(&mut self, plugin: Box<dyn SamplerPlugin>) -> Result<(), String> {
70        let name = plugin.name().to_string();
71
72        if self.plugins.contains_key(&name) {
73            return Err(format!("Plugin {name} already registered"));
74        }
75
76        let default_config = plugin.default_config();
77        self.configs.insert(name.clone(), default_config);
78        self.plugins.insert(name, plugin);
79
80        Ok(())
81    }
82
83    /// Configure plugin
84    pub fn configure_plugin(
85        &mut self,
86        name: &str,
87        config: HashMap<String, String>,
88    ) -> Result<(), String> {
89        let plugin = self
90            .plugins
91            .get(name)
92            .ok_or_else(|| format!("Plugin {name} not found"))?;
93
94        plugin.validate_config(&config)?;
95        self.configs.insert(name.to_string(), config);
96
97        Ok(())
98    }
99
100    /// Create sampler from plugin
101    pub fn create_sampler(&mut self, name: &str) -> Result<Box<dyn Sampler>, String> {
102        let plugin = self
103            .plugins
104            .get_mut(name)
105            .ok_or_else(|| format!("Plugin {name} not found"))?;
106
107        let config = self.configs.get(name).cloned().unwrap_or_default();
108        plugin.initialize(&config)?;
109
110        Ok(plugin.create_sampler())
111    }
112
113    /// List available plugins
114    pub fn list_plugins(&self) -> Vec<PluginInfo> {
115        self.plugins
116            .values()
117            .map(|p| PluginInfo {
118                name: p.name().to_string(),
119                version: p.version().to_string(),
120            })
121            .collect()
122    }
123}
124
125#[derive(Debug, Clone)]
126pub struct PluginInfo {
127    pub name: String,
128    pub version: String,
129}
130
131/// Hyperparameter optimization for samplers
132pub struct HyperparameterOptimizer {
133    /// Parameter search space
134    search_space: HashMap<String, ParameterSpace>,
135    /// Optimization method
136    method: OptimizationMethod,
137    /// Number of trials
138    num_trials: usize,
139    /// Cross-validation folds
140    cv_folds: usize,
141}
142
143#[derive(Debug, Clone)]
144pub enum ParameterSpace {
145    /// Continuous parameter
146    Continuous { min: f64, max: f64, log_scale: bool },
147    /// Discrete parameter
148    Discrete { values: Vec<f64> },
149    /// Categorical parameter
150    Categorical { options: Vec<String> },
151}
152
153#[derive(Debug, Clone)]
154pub enum OptimizationMethod {
155    /// Random search
156    RandomSearch,
157    /// Grid search
158    GridSearch { resolution: usize },
159    /// Bayesian optimization
160    #[cfg(feature = "scirs")]
161    Bayesian {
162        kernel: KernelType,
163        acquisition: AcquisitionFunction,
164        exploration: f64,
165    },
166    /// Evolutionary optimization
167    Evolutionary {
168        population_size: usize,
169        mutation_rate: f64,
170    },
171}
172
173impl HyperparameterOptimizer {
174    /// Create new optimizer
175    pub fn new(method: OptimizationMethod, num_trials: usize) -> Self {
176        Self {
177            search_space: HashMap::new(),
178            method,
179            num_trials,
180            cv_folds: 5,
181        }
182    }
183
184    /// Add parameter to search space
185    pub fn add_parameter(&mut self, name: &str, space: ParameterSpace) {
186        self.search_space.insert(name.to_string(), space);
187    }
188
189    /// Optimize hyperparameters
190    #[cfg(feature = "dwave")]
191    pub fn optimize<F>(
192        &self,
193        objective: F,
194        validation_problems: &[CompiledModel],
195    ) -> Result<OptimizationResult, String>
196    where
197        F: Fn(&HashMap<String, f64>) -> Box<dyn Sampler>,
198    {
199        match &self.method {
200            OptimizationMethod::RandomSearch => self.random_search(objective, validation_problems),
201            OptimizationMethod::GridSearch { resolution } => {
202                self.grid_search(objective, validation_problems, *resolution)
203            }
204            #[cfg(feature = "scirs")]
205            OptimizationMethod::Bayesian {
206                kernel,
207                acquisition,
208                exploration,
209            } => self.bayesian_optimization(
210                objective,
211                validation_problems,
212                *kernel,
213                *acquisition,
214                *exploration,
215            ),
216            OptimizationMethod::Evolutionary {
217                population_size,
218                mutation_rate,
219            } => self.evolutionary_optimization(
220                objective,
221                validation_problems,
222                *population_size,
223                *mutation_rate,
224            ),
225        }
226    }
227
228    /// Random search implementation
229    #[cfg(feature = "dwave")]
230    fn random_search<F>(
231        &self,
232        objective: F,
233        validation_problems: &[CompiledModel],
234    ) -> Result<OptimizationResult, String>
235    where
236        F: Fn(&HashMap<String, f64>) -> Box<dyn Sampler>,
237    {
238        let mut rng = thread_rng();
239        let mut best_params = HashMap::new();
240        let mut best_score = f64::INFINITY;
241        let mut history = Vec::new();
242
243        for trial in 0..self.num_trials {
244            // Sample random parameters
245            let mut params = self.sample_parameters(&mut rng)?;
246
247            // Evaluate
248            let sampler = objective(&params);
249            let mut score = self.evaluate_sampler(sampler, validation_problems)?;
250
251            history.push(TrialResult {
252                parameters: params.clone(),
253                score,
254                iteration: trial,
255            });
256
257            if score < best_score {
258                best_score = score;
259                best_params = params;
260            }
261        }
262
263        let convergence_curve = self.compute_convergence_curve(&history);
264        Ok(OptimizationResult {
265            best_parameters: best_params,
266            best_score,
267            history,
268            convergence_curve,
269        })
270    }
271
272    /// Grid search implementation
273    #[cfg(feature = "dwave")]
274    fn grid_search<F>(
275        &self,
276        objective: F,
277        validation_problems: &[CompiledModel],
278        resolution: usize,
279    ) -> Result<OptimizationResult, String>
280    where
281        F: Fn(&HashMap<String, f64>) -> Box<dyn Sampler>,
282    {
283        // Generate grid points
284        let grid_points = self.generate_grid(resolution)?;
285
286        let mut best_params = HashMap::new();
287        let mut best_score = f64::INFINITY;
288        let mut history = Vec::new();
289
290        for (i, params) in grid_points.iter().enumerate() {
291            let sampler = objective(params);
292            let mut score = self.evaluate_sampler(sampler, validation_problems)?;
293
294            history.push(TrialResult {
295                parameters: params.clone(),
296                score,
297                iteration: i,
298            });
299
300            if score < best_score {
301                best_score = score;
302                best_params = params.clone();
303            }
304        }
305
306        let convergence_curve = self.compute_convergence_curve(&history);
307        Ok(OptimizationResult {
308            best_parameters: best_params,
309            best_score,
310            history,
311            convergence_curve,
312        })
313    }
314
315    /// Bayesian optimization implementation
316    #[cfg(all(feature = "scirs", feature = "dwave"))]
317    fn bayesian_optimization<F>(
318        &self,
319        objective: F,
320        validation_problems: &[CompiledModel],
321        kernel: KernelType,
322        acquisition: AcquisitionFunction,
323        exploration: f64,
324    ) -> Result<OptimizationResult, String>
325    where
326        F: Fn(&HashMap<String, f64>) -> Box<dyn Sampler>,
327    {
328        use scirs2_core::ndarray::Array1;
329
330        let dim = self.search_space.len();
331        let mut optimizer = BayesianOptimizer::new(dim, kernel, acquisition, exploration)
332            .map_err(|e| e.to_string())?;
333
334        let mut history = Vec::new();
335        let mut x_data = Vec::new();
336        let mut y_data = Vec::new();
337
338        // Initial random samples
339        let mut rng = thread_rng();
340        for _ in 0..std::cmp::min(10, self.num_trials / 4) {
341            let mut params = self.sample_parameters(&mut rng)?;
342            let sampler = objective(&params);
343            let mut score = self.evaluate_sampler(sampler, validation_problems)?;
344
345            let mut x = self.params_to_array(&params)?;
346            x_data.push(x);
347            y_data.push(score);
348
349            history.push(TrialResult {
350                parameters: params,
351                score,
352                iteration: history.len(),
353            });
354        }
355
356        // Bayesian optimization loop
357        let y_array = Array1::from_vec(y_data.clone());
358        optimizer
359            .update(&x_data, &y_array)
360            .map_err(|e| e.to_string())?;
361
362        for _ in history.len()..self.num_trials {
363            // Suggest next point
364            let x_next = optimizer.suggest_next().map_err(|e| e.to_string())?;
365            let mut params = self.array_to_params(&x_next)?;
366
367            // Evaluate
368            let sampler = objective(&params);
369            let mut score = self.evaluate_sampler(sampler, validation_problems)?;
370
371            // Update model
372            x_data.push(x_next);
373            y_data.push(score);
374            let y_array = Array1::from_vec(y_data.clone());
375            optimizer
376                .update(&x_data, &y_array)
377                .map_err(|e| e.to_string())?;
378
379            history.push(TrialResult {
380                parameters: params,
381                score,
382                iteration: history.len(),
383            });
384        }
385
386        // Find best
387        let (best_idx, &best_score) = y_data
388            .iter()
389            .enumerate()
390            .min_by(|(_, a), (_, b)| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal))
391            .ok_or_else(|| "No optimization trials completed".to_string())?;
392
393        let best_params = self.array_to_params(&x_data[best_idx])?;
394
395        let convergence_curve = self.compute_convergence_curve(&history);
396        Ok(OptimizationResult {
397            best_parameters: best_params,
398            best_score,
399            history,
400            convergence_curve,
401        })
402    }
403
404    /// Evolutionary optimization (placeholder)
405    #[cfg(feature = "dwave")]
406    fn evolutionary_optimization<F>(
407        &self,
408        _objective: F,
409        _validation_problems: &[CompiledModel],
410        _population_size: usize,
411        _mutation_rate: f64,
412    ) -> Result<OptimizationResult, String>
413    where
414        F: Fn(&HashMap<String, f64>) -> Box<dyn Sampler>,
415    {
416        Err("Evolutionary optimization not yet implemented".to_string())
417    }
418
419    /// Sample parameters from search space
420    fn sample_parameters(&self, rng: &mut impl Rng) -> Result<HashMap<String, f64>, String> {
421        let mut params = HashMap::new();
422
423        for (name, space) in &self.search_space {
424            let value = match space {
425                ParameterSpace::Continuous {
426                    min,
427                    max,
428                    log_scale,
429                } => {
430                    if *log_scale {
431                        let log_min = min.ln();
432                        let log_max = max.ln();
433                        let log_val = rng.gen_range(log_min..log_max);
434                        log_val.exp()
435                    } else {
436                        rng.gen_range(*min..*max)
437                    }
438                }
439                ParameterSpace::Discrete { values } => values[rng.gen_range(0..values.len())],
440                ParameterSpace::Categorical { options } => {
441                    // Return index for categorical
442                    rng.gen_range(0..options.len()) as f64
443                }
444            };
445
446            params.insert(name.clone(), value);
447        }
448
449        Ok(params)
450    }
451
452    /// Generate grid points
453    fn generate_grid(&self, resolution: usize) -> Result<Vec<HashMap<String, f64>>, String> {
454        // Simplified: generate regular grid
455        let mut grid_points = Vec::new();
456
457        // This would need proper multi-dimensional grid generation
458        // For now, just sample uniformly
459        let total_points = resolution.pow(self.search_space.len() as u32);
460        let mut rng = thread_rng();
461
462        for _ in 0..total_points.min(self.num_trials) {
463            grid_points.push(self.sample_parameters(&mut rng)?);
464        }
465
466        Ok(grid_points)
467    }
468
469    /// Convert parameters to array
470    #[cfg(feature = "scirs")]
471    fn params_to_array(
472        &self,
473        params: &HashMap<String, f64>,
474    ) -> Result<scirs2_core::ndarray::Array1<f64>, String> {
475        let mut values = Vec::new();
476
477        // Ensure consistent ordering
478        let mut names: Vec<_> = self.search_space.keys().collect();
479        names.sort();
480
481        for name in names {
482            values.push(params.get(name).copied().unwrap_or(0.0));
483        }
484
485        Ok(scirs2_core::ndarray::Array1::from_vec(values))
486    }
487
488    /// Convert array to parameters
489    #[cfg(feature = "scirs")]
490    fn array_to_params(
491        &self,
492        array: &scirs2_core::ndarray::Array1<f64>,
493    ) -> Result<HashMap<String, f64>, String> {
494        let mut params = HashMap::new();
495
496        let mut names: Vec<_> = self.search_space.keys().collect();
497        names.sort();
498
499        for (i, name) in names.iter().enumerate() {
500            params.insert((*name).clone(), array[i]);
501        }
502
503        Ok(params)
504    }
505
506    /// Evaluate sampler performance
507    #[cfg(feature = "dwave")]
508    fn evaluate_sampler(
509        &self,
510        mut sampler: Box<dyn Sampler>,
511        problems: &[CompiledModel],
512    ) -> Result<f64, String> {
513        let mut scores = Vec::new();
514
515        for problem in problems {
516            let mut qubo = problem.to_qubo();
517            let start = Instant::now();
518
519            let qubo_tuple = (qubo.to_dense_matrix(), qubo.variable_map());
520            let mut results = sampler
521                .run_qubo(&qubo_tuple, 100)
522                .map_err(|e| format!("Sampler error: {e:?}"))?;
523
524            let elapsed = start.elapsed();
525
526            // Score based on solution quality and time
527            let mut best_energy = results.first().map_or(f64::INFINITY, |r| r.energy);
528
529            let time_penalty = elapsed.as_secs_f64();
530            let mut score = 0.1f64.mul_add(time_penalty, best_energy);
531
532            scores.push(score);
533        }
534
535        // Return average score
536        Ok(scores.iter().sum::<f64>() / scores.len() as f64)
537    }
538
539    /// Compute convergence curve
540    fn compute_convergence_curve(&self, history: &[TrialResult]) -> Vec<f64> {
541        let mut curve = Vec::new();
542        let mut best_so_far = f64::INFINITY;
543
544        for trial in history {
545            best_so_far = best_so_far.min(trial.score);
546            curve.push(best_so_far);
547        }
548
549        curve
550    }
551}
552
553#[derive(Debug, Clone)]
554pub struct OptimizationResult {
555    pub best_parameters: HashMap<String, f64>,
556    pub best_score: f64,
557    pub history: Vec<TrialResult>,
558    pub convergence_curve: Vec<f64>,
559}
560
561#[derive(Debug, Clone)]
562pub struct TrialResult {
563    pub parameters: HashMap<String, f64>,
564    pub score: f64,
565    pub iteration: usize,
566}
567
568/// Ensemble sampler that combines multiple sampling strategies
569pub struct EnsembleSampler {
570    /// Base samplers
571    samplers: Vec<Box<dyn Sampler>>,
572    /// Combination method
573    method: EnsembleMethod,
574    /// Weights for weighted combination
575    weights: Option<Vec<f64>>,
576}
577
578#[derive(Debug, Clone)]
579pub enum EnsembleMethod {
580    /// Simple voting
581    Voting,
582    /// Weighted voting
583    WeightedVoting,
584    /// Best of all
585    BestOf,
586    /// Sequential refinement
587    Sequential,
588    /// Parallel with aggregation
589    Parallel,
590}
591
592impl EnsembleSampler {
593    /// Create new ensemble sampler
594    pub fn new(samplers: Vec<Box<dyn Sampler>>, method: EnsembleMethod) -> Self {
595        Self {
596            samplers,
597            method,
598            weights: None,
599        }
600    }
601
602    /// Set weights for weighted voting
603    pub fn with_weights(mut self, weights: Vec<f64>) -> Self {
604        self.weights = Some(weights);
605        self
606    }
607}
608
609impl Sampler for EnsembleSampler {
610    fn run_qubo(
611        &self,
612        qubo: &(Array2<f64>, HashMap<String, usize>),
613        shots: usize,
614    ) -> SamplerResult<Vec<SampleResult>> {
615        match &self.method {
616            EnsembleMethod::Voting => self.voting_ensemble(qubo, shots),
617            EnsembleMethod::WeightedVoting => self.weighted_voting_ensemble(qubo, shots),
618            EnsembleMethod::BestOf => self.best_of_ensemble(qubo, shots),
619            EnsembleMethod::Sequential => self.sequential_ensemble(qubo, shots),
620            EnsembleMethod::Parallel => self.parallel_ensemble(qubo, shots),
621        }
622    }
623
624    fn run_hobo(
625        &self,
626        hobo: &(Array<f64, IxDyn>, HashMap<String, usize>),
627        shots: usize,
628    ) -> SamplerResult<Vec<SampleResult>> {
629        // Similar implementation for HOBO
630        match &self.method {
631            EnsembleMethod::Voting => self.voting_ensemble_hobo(hobo, shots),
632            _ => Err(SamplerError::InvalidParameter(
633                "HOBO ensemble not fully implemented".to_string(),
634            )),
635        }
636    }
637}
638
639impl EnsembleSampler {
640    /// Simple voting ensemble
641    fn voting_ensemble(
642        &self,
643        qubo: &(Array2<f64>, HashMap<String, usize>),
644        shots: usize,
645    ) -> SamplerResult<Vec<SampleResult>> {
646        let shots_per_sampler = shots / self.samplers.len();
647        let mut all_results = Vec::new();
648
649        // Run each sampler
650        for sampler in &self.samplers {
651            let results = sampler.run_qubo(qubo, shots_per_sampler)?;
652            all_results.extend(results);
653        }
654
655        // Aggregate by voting
656        let mut vote_counts: HashMap<Vec<bool>, (f64, usize)> = HashMap::new();
657
658        for result in all_results {
659            let state: Vec<bool> = qubo.1.keys().map(|var| result.assignments[var]).collect();
660
661            let entry = vote_counts.entry(state).or_insert((result.energy, 0));
662            entry.1 += result.occurrences;
663        }
664
665        // Convert back to results
666        let mut final_results: Vec<SampleResult> = vote_counts
667            .into_iter()
668            .map(|(state, (energy, count))| {
669                let assignments: HashMap<String, bool> = qubo
670                    .1
671                    .iter()
672                    .zip(state.iter())
673                    .map(|((var, _), &val)| (var.clone(), val))
674                    .collect();
675
676                SampleResult {
677                    assignments,
678                    energy,
679                    occurrences: count,
680                }
681            })
682            .collect();
683
684        final_results.sort_by(|a, b| {
685            a.energy
686                .partial_cmp(&b.energy)
687                .unwrap_or(std::cmp::Ordering::Equal)
688        });
689
690        Ok(final_results)
691    }
692
693    /// Weighted voting ensemble
694    fn weighted_voting_ensemble(
695        &self,
696        qubo: &(Array2<f64>, HashMap<String, usize>),
697        shots: usize,
698    ) -> SamplerResult<Vec<SampleResult>> {
699        let weights = self.weights.as_ref().ok_or_else(|| {
700            SamplerError::InvalidParameter("Weights not set for weighted voting".to_string())
701        })?;
702
703        if weights.len() != self.samplers.len() {
704            return Err(SamplerError::InvalidParameter(
705                "Number of weights must match number of samplers".to_string(),
706            ));
707        }
708
709        // Normalize weights
710        let total_weight: f64 = weights.iter().sum();
711        let normalized: Vec<f64> = weights.iter().map(|&w| w / total_weight).collect();
712
713        let mut all_results = Vec::new();
714
715        // Run each sampler with weighted shots
716        for (sampler, &weight) in self.samplers.iter().zip(normalized.iter()) {
717            let sampler_shots = (shots as f64 * weight).round() as usize;
718            if sampler_shots > 0 {
719                let results = sampler.run_qubo(qubo, sampler_shots)?;
720                all_results.extend(results);
721            }
722        }
723
724        // Aggregate results
725        self.aggregate_results(all_results, &qubo.1)
726    }
727
728    /// Best-of ensemble
729    fn best_of_ensemble(
730        &self,
731        qubo: &(Array2<f64>, HashMap<String, usize>),
732        shots: usize,
733    ) -> SamplerResult<Vec<SampleResult>> {
734        let shots_per_sampler = shots / self.samplers.len();
735        let mut best_results = Vec::new();
736        let mut best_energy = f64::INFINITY;
737
738        // Run each sampler and keep best
739        for sampler in &self.samplers {
740            let results = sampler.run_qubo(qubo, shots_per_sampler)?;
741
742            if let Some(best) = results.first() {
743                if best.energy < best_energy {
744                    best_energy = best.energy;
745                    best_results = results;
746                }
747            }
748        }
749
750        Ok(best_results)
751    }
752
753    /// Sequential refinement ensemble
754    fn sequential_ensemble(
755        &self,
756        qubo: &(Array2<f64>, HashMap<String, usize>),
757        shots: usize,
758    ) -> SamplerResult<Vec<SampleResult>> {
759        if self.samplers.is_empty() {
760            return Ok(Vec::new());
761        }
762
763        // Start with first sampler
764        let mut current_best = self.samplers[0].run_qubo(qubo, shots)?;
765
766        // Refine with subsequent samplers
767        for sampler in self.samplers.iter().skip(1) {
768            // Use best solutions as warm start (if sampler supports it)
769            // For now, just run independently
770            let refined = sampler.run_qubo(qubo, shots / self.samplers.len())?;
771
772            // Merge results
773            current_best.extend(refined);
774            current_best.sort_by(|a, b| {
775                a.energy
776                    .partial_cmp(&b.energy)
777                    .unwrap_or(std::cmp::Ordering::Equal)
778            });
779            current_best.truncate(shots);
780        }
781
782        Ok(current_best)
783    }
784
785    /// Parallel ensemble with aggregation
786    fn parallel_ensemble(
787        &self,
788        qubo: &(Array2<f64>, HashMap<String, usize>),
789        shots: usize,
790    ) -> SamplerResult<Vec<SampleResult>> {
791        let shots_per_sampler = shots / self.samplers.len();
792        let _handles: Vec<std::thread::JoinHandle<()>> = Vec::new();
793
794        // Would need to make samplers thread-safe for real parallel execution
795        // For now, sequential execution
796        let mut all_results = Vec::new();
797
798        for sampler in &self.samplers {
799            let results = sampler.run_qubo(qubo, shots_per_sampler)?;
800            all_results.extend(results);
801        }
802
803        self.aggregate_results(all_results, &qubo.1)
804    }
805
806    /// Aggregate results from multiple samplers
807    fn aggregate_results(
808        &self,
809        results: Vec<SampleResult>,
810        var_map: &HashMap<String, usize>,
811    ) -> SamplerResult<Vec<SampleResult>> {
812        let mut aggregated: HashMap<Vec<bool>, (f64, usize)> = HashMap::new();
813
814        for result in results {
815            let state: Vec<bool> = var_map.keys().map(|var| result.assignments[var]).collect();
816
817            let entry = aggregated.entry(state).or_insert((result.energy, 0));
818
819            // Keep minimum energy for duplicates
820            entry.0 = entry.0.min(result.energy);
821            entry.1 += result.occurrences;
822        }
823
824        let mut final_results: Vec<SampleResult> = aggregated
825            .into_iter()
826            .map(|(state, (energy, count))| {
827                let assignments: HashMap<String, bool> = var_map
828                    .iter()
829                    .zip(state.iter())
830                    .map(|((var, _), &val)| (var.clone(), val))
831                    .collect();
832
833                SampleResult {
834                    assignments,
835                    energy,
836                    occurrences: count,
837                }
838            })
839            .collect();
840
841        final_results.sort_by(|a, b| {
842            a.energy
843                .partial_cmp(&b.energy)
844                .unwrap_or(std::cmp::Ordering::Equal)
845        });
846
847        Ok(final_results)
848    }
849
850    /// Voting ensemble for HOBO
851    fn voting_ensemble_hobo(
852        &self,
853        hobo: &(Array<f64, IxDyn>, HashMap<String, usize>),
854        shots: usize,
855    ) -> SamplerResult<Vec<SampleResult>> {
856        // Similar to QUBO voting but for HOBO
857        let shots_per_sampler = shots / self.samplers.len();
858        let mut all_results = Vec::new();
859
860        for sampler in &self.samplers {
861            let results = sampler.run_hobo(hobo, shots_per_sampler)?;
862            all_results.extend(results);
863        }
864
865        self.aggregate_results(all_results, &hobo.1)
866    }
867}
868
869/// Adaptive sampling strategy
870pub struct AdaptiveSampler<S: Sampler> {
871    /// Base sampler
872    base_sampler: S,
873    /// Adaptation strategy
874    strategy: AdaptationStrategy,
875    /// Performance history
876    history: Arc<Mutex<PerformanceHistory>>,
877}
878
879#[derive(Debug, Clone)]
880pub enum AdaptationStrategy {
881    /// Temperature adaptation
882    TemperatureAdaptive {
883        initial_range: (f64, f64),
884        adaptation_rate: f64,
885    },
886    /// Population size adaptation
887    PopulationAdaptive {
888        min_size: usize,
889        max_size: usize,
890        growth_rate: f64,
891    },
892    /// Multi-armed bandit for strategy selection
893    BanditAdaptive {
894        strategies: Vec<String>,
895        exploration_rate: f64,
896    },
897    /// Reinforcement learning based
898    RLAdaptive {
899        state_features: Vec<String>,
900        action_space: Vec<String>,
901    },
902}
903
904#[derive(Default)]
905struct PerformanceHistory {
906    energies: Vec<f64>,
907    times: Vec<Duration>,
908    improvements: Vec<f64>,
909    parameters: Vec<HashMap<String, f64>>,
910}
911
912impl<S: Sampler> AdaptiveSampler<S> {
913    /// Create new adaptive sampler
914    pub fn new(base_sampler: S, strategy: AdaptationStrategy) -> Self {
915        Self {
916            base_sampler,
917            strategy,
918            history: Arc::new(Mutex::new(PerformanceHistory::default())),
919        }
920    }
921
922    /// Adapt parameters based on performance
923    fn adapt_parameters(&self) -> HashMap<String, f64> {
924        let history = self
925            .history
926            .lock()
927            .unwrap_or_else(|poisoned| poisoned.into_inner());
928
929        match &self.strategy {
930            AdaptationStrategy::TemperatureAdaptive {
931                initial_range,
932                adaptation_rate,
933            } => {
934                let mut params = HashMap::new();
935
936                // Adapt temperature based on acceptance rate
937                let (min_temp, max_temp) = initial_range;
938                let temp = if history.improvements.len() > 10 {
939                    let recent_improvements: f64 =
940                        history.improvements.iter().rev().take(10).sum::<f64>() / 10.0;
941
942                    if recent_improvements < 0.1 {
943                        // Low improvement: increase temperature
944                        min_temp + (max_temp - min_temp) * (1.0 - adaptation_rate)
945                    } else {
946                        // Good improvement: decrease temperature
947                        min_temp + (max_temp - min_temp) * adaptation_rate
948                    }
949                } else {
950                    (min_temp + max_temp) / 2.0
951                };
952
953                params.insert("temperature".to_string(), temp);
954                params
955            }
956            _ => HashMap::new(),
957        }
958    }
959}
960
961impl<S: Sampler> Sampler for AdaptiveSampler<S> {
962    fn run_qubo(
963        &self,
964        qubo: &(Array2<f64>, HashMap<String, usize>),
965        shots: usize,
966    ) -> SamplerResult<Vec<SampleResult>> {
967        // Adapt parameters
968        let params = self.adapt_parameters();
969
970        // Run base sampler (would need to apply params)
971        let start = Instant::now();
972        let results = self.base_sampler.run_qubo(qubo, shots)?;
973        let elapsed = start.elapsed();
974
975        // Update history
976        if let Some(best) = results.first() {
977            let mut history = self
978                .history
979                .lock()
980                .unwrap_or_else(|poisoned| poisoned.into_inner());
981
982            let improvement = if let Some(&last) = history.energies.last() {
983                (last - best.energy) / last.abs().max(1.0)
984            } else {
985                1.0
986            };
987
988            history.energies.push(best.energy);
989            history.times.push(elapsed);
990            history.improvements.push(improvement);
991            history.parameters.push(params);
992        }
993
994        Ok(results)
995    }
996
997    fn run_hobo(
998        &self,
999        hobo: &(Array<f64, IxDyn>, HashMap<String, usize>),
1000        shots: usize,
1001    ) -> SamplerResult<Vec<SampleResult>> {
1002        // Similar adaptation for HOBO
1003        self.base_sampler.run_hobo(hobo, shots)
1004    }
1005}
1006
1007/// Cross-validation for sampler evaluation
1008pub struct SamplerCrossValidation {
1009    /// Number of folds
1010    n_folds: usize,
1011    /// Evaluation metric
1012    metric: EvaluationMetric,
1013}
1014
1015#[derive(Debug, Clone)]
1016pub enum EvaluationMetric {
1017    /// Best energy found
1018    BestEnergy,
1019    /// Average of top-k energies
1020    TopKAverage(usize),
1021    /// Time to solution
1022    TimeToSolution(f64),
1023    /// Success probability
1024    SuccessProbability(f64),
1025}
1026
1027impl SamplerCrossValidation {
1028    /// Create new cross-validation
1029    pub const fn new(n_folds: usize, metric: EvaluationMetric) -> Self {
1030        Self { n_folds, metric }
1031    }
1032
1033    /// Evaluate sampler with cross-validation
1034    #[cfg(feature = "dwave")]
1035    pub fn evaluate<S: Sampler>(
1036        &self,
1037        sampler: &S,
1038        problems: &[CompiledModel],
1039        shots_per_problem: usize,
1040    ) -> Result<CrossValidationResult, String> {
1041        let n_problems = problems.len();
1042        let fold_size = n_problems / self.n_folds;
1043
1044        let mut fold_scores = Vec::new();
1045
1046        for fold in 0..self.n_folds {
1047            let test_start = fold * fold_size;
1048            let test_end = if fold == self.n_folds - 1 {
1049                n_problems
1050            } else {
1051                (fold + 1) * fold_size
1052            };
1053
1054            let test_problems = &problems[test_start..test_end];
1055
1056            // Evaluate on test fold
1057            let mut scores = Vec::new();
1058            for problem in test_problems {
1059                let mut score = self.evaluate_single(sampler, problem, shots_per_problem)?;
1060                scores.push(score);
1061            }
1062
1063            let fold_score = scores.iter().sum::<f64>() / scores.len() as f64;
1064            fold_scores.push(fold_score);
1065        }
1066
1067        let mean_score = fold_scores.iter().sum::<f64>() / fold_scores.len() as f64;
1068        let variance = fold_scores
1069            .iter()
1070            .map(|&s| (s - mean_score).powi(2))
1071            .sum::<f64>()
1072            / fold_scores.len() as f64;
1073
1074        Ok(CrossValidationResult {
1075            mean_score,
1076            std_error: variance.sqrt(),
1077            fold_scores,
1078        })
1079    }
1080
1081    /// Evaluate single problem
1082    #[cfg(feature = "dwave")]
1083    fn evaluate_single<S: Sampler>(
1084        &self,
1085        sampler: &S,
1086        problem: &CompiledModel,
1087        shots: usize,
1088    ) -> Result<f64, String> {
1089        let mut qubo = problem.to_qubo();
1090        let qubo_tuple = (qubo.to_dense_matrix(), qubo.variable_map());
1091        let start = Instant::now();
1092        let mut results = sampler
1093            .run_qubo(&qubo_tuple, shots)
1094            .map_err(|e| format!("Sampler error: {e:?}"))?;
1095        let elapsed = start.elapsed();
1096
1097        match &self.metric {
1098            EvaluationMetric::BestEnergy => Ok(results.first().map_or(f64::INFINITY, |r| r.energy)),
1099            EvaluationMetric::TopKAverage(k) => {
1100                let sum: f64 = results.iter().take(*k).map(|r| r.energy).sum();
1101                Ok(sum / (*k).min(results.len()) as f64)
1102            }
1103            EvaluationMetric::TimeToSolution(threshold) => {
1104                let found = results.iter().any(|r| r.energy <= *threshold);
1105                Ok(if found {
1106                    elapsed.as_secs_f64()
1107                } else {
1108                    f64::INFINITY
1109                })
1110            }
1111            EvaluationMetric::SuccessProbability(threshold) => {
1112                let successes = results
1113                    .iter()
1114                    .filter(|r| r.energy <= *threshold)
1115                    .map(|r| r.occurrences)
1116                    .sum::<usize>();
1117                Ok(successes as f64 / shots as f64)
1118            }
1119        }
1120    }
1121}
1122
1123#[derive(Debug, Clone)]
1124pub struct CrossValidationResult {
1125    pub mean_score: f64,
1126    pub std_error: f64,
1127    pub fold_scores: Vec<f64>,
1128}
1129
1130#[cfg(test)]
1131mod tests {
1132    use super::*;
1133    use crate::sampler::SASampler;
1134
1135    #[test]
1136    fn test_plugin_manager() {
1137        let manager = PluginManager::new();
1138
1139        // Would need actual plugin implementation to test
1140        assert_eq!(manager.list_plugins().len(), 0);
1141    }
1142
1143    #[test]
1144    fn test_hyperparameter_space() {
1145        let mut optimizer = HyperparameterOptimizer::new(OptimizationMethod::RandomSearch, 10);
1146
1147        optimizer.add_parameter(
1148            "temperature",
1149            ParameterSpace::Continuous {
1150                min: 0.1,
1151                max: 10.0,
1152                log_scale: true,
1153            },
1154        );
1155
1156        optimizer.add_parameter(
1157            "sweeps",
1158            ParameterSpace::Discrete {
1159                values: vec![100.0, 500.0, 1000.0],
1160            },
1161        );
1162
1163        // Would need actual optimization to test further
1164    }
1165
1166    #[test]
1167    fn test_ensemble_sampler() {
1168        let samplers: Vec<Box<dyn Sampler>> = vec![
1169            Box::new(SASampler::new(Some(42))),
1170            Box::new(SASampler::new(Some(43))),
1171        ];
1172
1173        let ensemble = EnsembleSampler::new(samplers, EnsembleMethod::Voting);
1174
1175        // Would need QUBO problem to test
1176    }
1177}