quantrs2_tytan/benchmark/
runner.rs

1//! Benchmark runner implementation
2
3use crate::{
4    benchmark::{
5        analysis::PerformanceReport,
6        hardware::{CpuBackend, HardwareBackend},
7        metrics::{BenchmarkMetrics, QualityMetrics, TimingMetrics, UtilizationMetrics},
8    },
9    sampler::SASampler,
10};
11use scirs2_core::ndarray::Array2;
12use scirs2_core::random::prelude::*;
13use serde::{Deserialize, Serialize};
14use std::{
15    collections::HashMap,
16    time::{Duration, Instant},
17};
18
19/// Benchmark configuration
20#[derive(Debug, Clone, Serialize, Deserialize)]
21pub struct BenchmarkConfig {
22    /// Problem sizes to test
23    pub problem_sizes: Vec<usize>,
24    /// Problem densities to test (fraction of non-zero elements)
25    pub problem_densities: Vec<f64>,
26    /// Number of samples per problem
27    pub num_reads: usize,
28    /// Number of repetitions for timing
29    pub num_repetitions: usize,
30    /// Backends to benchmark
31    pub backends: Vec<String>,
32    /// Sampler configurations
33    pub sampler_configs: Vec<SamplerConfig>,
34    /// Whether to save intermediate results
35    pub save_intermediate: bool,
36    /// Output directory for results
37    pub output_dir: Option<String>,
38    /// Maximum time per benchmark (seconds)
39    pub timeout_seconds: u64,
40}
41
42/// Sampler configuration for benchmarking
43#[derive(Debug, Clone, Serialize, Deserialize)]
44pub struct SamplerConfig {
45    pub name: String,
46    pub params: HashMap<String, f64>,
47}
48
49impl Default for BenchmarkConfig {
50    fn default() -> Self {
51        Self {
52            problem_sizes: vec![10, 50, 100, 500, 1000],
53            problem_densities: vec![0.1, 0.5, 1.0],
54            num_reads: 100,
55            num_repetitions: 3,
56            backends: vec!["cpu".to_string()],
57            sampler_configs: vec![
58                SamplerConfig {
59                    name: "SA".to_string(),
60                    params: HashMap::from([
61                        ("T_0".to_string(), 10.0),
62                        ("T_f".to_string(), 0.01),
63                        ("steps".to_string(), 1000.0),
64                    ]),
65                },
66                SamplerConfig {
67                    name: "GA".to_string(),
68                    params: HashMap::from([
69                        ("population_size".to_string(), 50.0),
70                        ("max_generations".to_string(), 100.0),
71                        ("mutation_rate".to_string(), 0.1),
72                    ]),
73                },
74            ],
75            save_intermediate: false,
76            output_dir: None,
77            timeout_seconds: 300,
78        }
79    }
80}
81
82/// Benchmark runner
83pub struct BenchmarkRunner {
84    config: BenchmarkConfig,
85    backends: Vec<Box<dyn HardwareBackend>>,
86    results: Vec<BenchmarkResult>,
87}
88
89/// Individual benchmark result
90#[derive(Debug, Clone, Serialize, Deserialize)]
91pub struct BenchmarkResult {
92    pub backend_name: String,
93    pub sampler_name: String,
94    pub problem_size: usize,
95    pub problem_density: f64,
96    pub metrics: BenchmarkMetrics,
97    pub timestamp: std::time::SystemTime,
98}
99
100impl BenchmarkRunner {
101    /// Create new benchmark runner
102    pub fn new(config: BenchmarkConfig) -> Self {
103        let backends = Self::create_backends(&config);
104
105        Self {
106            config,
107            backends,
108            results: Vec::new(),
109        }
110    }
111
112    /// Create hardware backends based on configuration
113    fn create_backends(config: &BenchmarkConfig) -> Vec<Box<dyn HardwareBackend>> {
114        let mut backends: Vec<Box<dyn HardwareBackend>> = Vec::new();
115
116        for backend_name in &config.backends {
117            match backend_name.as_str() {
118                "cpu" => {
119                    // Create CPU backend with SA sampler as default
120                    let sampler = Box::new(SASampler::new(None))
121                        as Box<dyn crate::sampler::Sampler + Send + Sync>;
122                    backends.push(Box::new(CpuBackend::new(sampler)));
123                }
124                #[cfg(feature = "gpu")]
125                "gpu" => {
126                    use crate::benchmark::hardware::GpuBackend;
127                    backends.push(Box::new(GpuBackend::new(0)));
128                }
129                "quantum" => {
130                    use crate::benchmark::hardware::QuantumBackend;
131                    backends.push(Box::new(QuantumBackend::new("simulator".to_string())));
132                }
133                _ => {
134                    eprintln!("Unknown backend: {backend_name}");
135                }
136            }
137        }
138
139        backends
140    }
141
142    /// Run complete benchmark suite
143    pub fn run_complete_suite(mut self) -> Result<PerformanceReport, Box<dyn std::error::Error>> {
144        println!("Starting benchmark suite...");
145        println!("Configuration: {:?}", self.config);
146
147        // Initialize backends
148        for backend in &mut self.backends {
149            if !backend.is_available() {
150                eprintln!("Backend {} is not available, skipping", backend.name());
151                continue;
152            }
153
154            backend.initialize()?;
155            println!("Initialized backend: {}", backend.name());
156        }
157
158        // Run benchmarks for each configuration
159        let total_benchmarks = self.config.problem_sizes.len()
160            * self.config.problem_densities.len()
161            * self.config.sampler_configs.len()
162            * self.backends.len();
163
164        let mut completed = 0;
165
166        for &problem_size in &self.config.problem_sizes {
167            for &density in &self.config.problem_densities {
168                // Generate test problem
169                let matrix = self.generate_qubo_problem(problem_size, density);
170
171                for sampler_config in &self.config.sampler_configs {
172                    for backend_idx in 0..self.backends.len() {
173                        if !self.backends[backend_idx].is_available() {
174                            continue;
175                        }
176
177                        let backend_name = self.backends[backend_idx].name().to_string();
178                        println!(
179                            "Running benchmark {}/{}: {} - {} - size={}, density={}",
180                            completed + 1,
181                            total_benchmarks,
182                            backend_name,
183                            sampler_config.name,
184                            problem_size,
185                            density
186                        );
187
188                        let result = {
189                            let backend = &mut self.backends[backend_idx];
190                            Self::run_single_benchmark(
191                                backend.as_mut(),
192                                sampler_config,
193                                &matrix,
194                                problem_size,
195                                density,
196                                self.config.num_reads,
197                                self.config.num_repetitions,
198                            )
199                        };
200
201                        match result {
202                            Ok(result) => {
203                                self.results.push(result);
204                                completed += 1;
205                            }
206                            Err(e) => {
207                                eprintln!("Benchmark failed: {e}");
208                            }
209                        }
210
211                        // Save intermediate results if requested
212                        if self.config.save_intermediate {
213                            self.save_intermediate_results()?;
214                        }
215                    }
216                }
217            }
218        }
219
220        // Generate performance report
221        let report = PerformanceReport::from_results(&self.results)?;
222
223        // Save final results
224        if let Some(ref output_dir) = self.config.output_dir {
225            self.save_results(output_dir)?;
226            report.save_to_file(&format!("{output_dir}/performance_report.json"))?;
227        }
228
229        Ok(report)
230    }
231
232    /// Run single benchmark
233    fn run_single_benchmark(
234        backend: &mut dyn HardwareBackend,
235        sampler_config: &SamplerConfig,
236        matrix: &Array2<f64>,
237        problem_size: usize,
238        density: f64,
239        num_reads: usize,
240        num_repetitions: usize,
241    ) -> Result<BenchmarkResult, Box<dyn std::error::Error>> {
242        let mut metrics = BenchmarkMetrics::new(problem_size, density);
243
244        // Warm-up run
245        let _ = backend.run_qubo(matrix, 1, sampler_config.params.clone())?;
246
247        // Timing runs
248        let mut timings = Vec::new();
249        let mut all_results = Vec::new();
250
251        for _ in 0..num_repetitions {
252            // Measure memory before
253            let mem_before = Self::get_memory_usage_static();
254
255            let start = Instant::now();
256            let _setup_start = start;
257
258            // Run benchmark
259            let results = backend.run_qubo(matrix, num_reads, sampler_config.params.clone())?;
260
261            let total_time = start.elapsed();
262
263            // Measure memory after
264            let mem_after = Self::get_memory_usage_static();
265
266            timings.push(total_time);
267            all_results.extend(results);
268
269            // Update memory metrics
270            metrics.memory.peak_memory = metrics.memory.peak_memory.max(mem_after);
271            metrics.memory.allocated = mem_after.saturating_sub(mem_before);
272        }
273
274        // Calculate timing statistics
275        let avg_time = timings.iter().sum::<Duration>() / timings.len() as u32;
276        metrics.timings = TimingMetrics {
277            total_time: avg_time,
278            setup_time: Duration::from_millis(10), // Estimate
279            compute_time: avg_time
280                .checked_sub(Duration::from_millis(10))
281                .unwrap_or(Duration::ZERO),
282            postprocess_time: Duration::ZERO,
283            time_per_sample: avg_time / num_reads as u32,
284            time_to_solution: Some(timings[0]),
285        };
286
287        // Calculate quality metrics
288        if !all_results.is_empty() {
289            let energies: Vec<f64> = all_results.iter().map(|r| r.energy).collect();
290            let best_energy = energies.iter().copied().fold(f64::INFINITY, f64::min);
291            let avg_energy = energies.iter().sum::<f64>() / energies.len() as f64;
292            let variance = energies
293                .iter()
294                .map(|e| (e - avg_energy).powi(2))
295                .sum::<f64>()
296                / (energies.len() - 1) as f64;
297
298            metrics.quality = QualityMetrics {
299                best_energy,
300                avg_energy,
301                energy_std: variance.sqrt(),
302                success_probability: 0.0, // Would need known optimal
303                time_to_target: None,
304                unique_solutions: Self::count_unique_solutions(&all_results),
305            };
306        }
307
308        // Get hardware metrics
309        let hw_metrics = backend.get_metrics();
310        metrics.utilization = UtilizationMetrics {
311            cpu_usage: hw_metrics.get("cpu_threads").copied().unwrap_or(0.0),
312            gpu_usage: hw_metrics.get("gpu_usage").copied(),
313            memory_bandwidth: 0.0, // Placeholder
314            cache_hit_rate: None,
315            power_consumption: None,
316        };
317
318        Ok(BenchmarkResult {
319            backend_name: backend.name().to_string(),
320            sampler_name: sampler_config.name.clone(),
321            problem_size,
322            problem_density: density,
323            metrics,
324            timestamp: std::time::SystemTime::now(),
325        })
326    }
327
328    /// Generate random QUBO problem
329    fn generate_qubo_problem(&self, size: usize, density: f64) -> Array2<f64> {
330        let mut rng = thread_rng();
331        let mut matrix = Array2::zeros((size, size));
332
333        // Generate symmetric matrix with given density
334        for i in 0..size {
335            for j in i..size {
336                if rng.gen::<f64>() < density {
337                    let value = rng.gen_range(-10.0..10.0);
338                    matrix[[i, j]] = value;
339                    if i != j {
340                        matrix[[j, i]] = value;
341                    }
342                }
343            }
344        }
345
346        matrix
347    }
348
349    /// Get current memory usage (static version)
350    fn get_memory_usage_static() -> usize {
351        // Simple implementation - in practice would use system-specific APIs
352        #[cfg(feature = "scirs")]
353        {
354            if let Ok(usage) = crate::scirs_stub::scirs2_core::memory::get_current_usage() {
355                return usage;
356            }
357        }
358
359        // Fallback: estimate based on process info
360        0
361    }
362
363    /// Count unique solutions
364    fn count_unique_solutions(results: &[crate::sampler::SampleResult]) -> usize {
365        use std::collections::HashSet;
366
367        let unique: HashSet<Vec<bool>> = results
368            .iter()
369            .map(|r| {
370                // Convert assignments to ordered vector
371                let mut vars: Vec<_> = r.assignments.iter().collect();
372                vars.sort_by_key(|(name, _)| name.as_str());
373                vars.into_iter().map(|(_, &value)| value).collect()
374            })
375            .collect();
376
377        unique.len()
378    }
379
380    /// Save intermediate results
381    fn save_intermediate_results(&self) -> Result<(), Box<dyn std::error::Error>> {
382        if let Some(ref dir) = self.config.output_dir {
383            let path = format!("{dir}/intermediate_results.json");
384            let json = serde_json::to_string_pretty(&self.results)?;
385            std::fs::write(path, json)?;
386        }
387        Ok(())
388    }
389
390    /// Save final results
391    fn save_results(&self, output_dir: &str) -> Result<(), Box<dyn std::error::Error>> {
392        std::fs::create_dir_all(output_dir)?;
393
394        // Save raw results
395        let results_path = format!("{output_dir}/benchmark_results.json");
396        let json = serde_json::to_string_pretty(&self.results)?;
397        std::fs::write(results_path, json)?;
398
399        // Save configuration
400        let config_path = format!("{output_dir}/benchmark_config.json");
401        let config_json = serde_json::to_string_pretty(&self.config)?;
402        std::fs::write(config_path, config_json)?;
403
404        Ok(())
405    }
406}
407
408/// Quick benchmark function for simple testing
409pub fn quick_benchmark(
410    problem_size: usize,
411) -> Result<BenchmarkMetrics, Box<dyn std::error::Error>> {
412    let config = BenchmarkConfig {
413        problem_sizes: vec![problem_size],
414        problem_densities: vec![0.5],
415        num_reads: 10,
416        num_repetitions: 1,
417        ..Default::default()
418    };
419
420    let runner = BenchmarkRunner::new(config);
421    let report = runner.run_complete_suite()?;
422
423    Ok(report.summary.overall_metrics)
424}