quantrs2_tytan/benchmark/
runner.rs1use crate::{
4 benchmark::{
5 analysis::PerformanceReport,
6 hardware::{CpuBackend, HardwareBackend},
7 metrics::{BenchmarkMetrics, QualityMetrics, TimingMetrics, UtilizationMetrics},
8 },
9 sampler::SASampler,
10};
11use scirs2_core::ndarray::Array2;
12use scirs2_core::random::prelude::*;
13use serde::{Deserialize, Serialize};
14use std::{
15 collections::HashMap,
16 time::{Duration, Instant},
17};
18
19#[derive(Debug, Clone, Serialize, Deserialize)]
21pub struct BenchmarkConfig {
22 pub problem_sizes: Vec<usize>,
24 pub problem_densities: Vec<f64>,
26 pub num_reads: usize,
28 pub num_repetitions: usize,
30 pub backends: Vec<String>,
32 pub sampler_configs: Vec<SamplerConfig>,
34 pub save_intermediate: bool,
36 pub output_dir: Option<String>,
38 pub timeout_seconds: u64,
40}
41
42#[derive(Debug, Clone, Serialize, Deserialize)]
44pub struct SamplerConfig {
45 pub name: String,
46 pub params: HashMap<String, f64>,
47}
48
49impl Default for BenchmarkConfig {
50 fn default() -> Self {
51 Self {
52 problem_sizes: vec![10, 50, 100, 500, 1000],
53 problem_densities: vec![0.1, 0.5, 1.0],
54 num_reads: 100,
55 num_repetitions: 3,
56 backends: vec!["cpu".to_string()],
57 sampler_configs: vec![
58 SamplerConfig {
59 name: "SA".to_string(),
60 params: HashMap::from([
61 ("T_0".to_string(), 10.0),
62 ("T_f".to_string(), 0.01),
63 ("steps".to_string(), 1000.0),
64 ]),
65 },
66 SamplerConfig {
67 name: "GA".to_string(),
68 params: HashMap::from([
69 ("population_size".to_string(), 50.0),
70 ("max_generations".to_string(), 100.0),
71 ("mutation_rate".to_string(), 0.1),
72 ]),
73 },
74 ],
75 save_intermediate: false,
76 output_dir: None,
77 timeout_seconds: 300,
78 }
79 }
80}
81
82pub struct BenchmarkRunner {
84 config: BenchmarkConfig,
85 backends: Vec<Box<dyn HardwareBackend>>,
86 results: Vec<BenchmarkResult>,
87}
88
89#[derive(Debug, Clone, Serialize, Deserialize)]
91pub struct BenchmarkResult {
92 pub backend_name: String,
93 pub sampler_name: String,
94 pub problem_size: usize,
95 pub problem_density: f64,
96 pub metrics: BenchmarkMetrics,
97 pub timestamp: std::time::SystemTime,
98}
99
100impl BenchmarkRunner {
101 pub fn new(config: BenchmarkConfig) -> Self {
103 let backends = Self::create_backends(&config);
104
105 Self {
106 config,
107 backends,
108 results: Vec::new(),
109 }
110 }
111
112 fn create_backends(config: &BenchmarkConfig) -> Vec<Box<dyn HardwareBackend>> {
114 let mut backends: Vec<Box<dyn HardwareBackend>> = Vec::new();
115
116 for backend_name in &config.backends {
117 match backend_name.as_str() {
118 "cpu" => {
119 let sampler = Box::new(SASampler::new(None))
121 as Box<dyn crate::sampler::Sampler + Send + Sync>;
122 backends.push(Box::new(CpuBackend::new(sampler)));
123 }
124 #[cfg(feature = "gpu")]
125 "gpu" => {
126 use crate::benchmark::hardware::GpuBackend;
127 backends.push(Box::new(GpuBackend::new(0)));
128 }
129 "quantum" => {
130 use crate::benchmark::hardware::QuantumBackend;
131 backends.push(Box::new(QuantumBackend::new("simulator".to_string())));
132 }
133 _ => {
134 eprintln!("Unknown backend: {backend_name}");
135 }
136 }
137 }
138
139 backends
140 }
141
142 pub fn run_complete_suite(mut self) -> Result<PerformanceReport, Box<dyn std::error::Error>> {
144 println!("Starting benchmark suite...");
145 println!("Configuration: {:?}", self.config);
146
147 for backend in &mut self.backends {
149 if !backend.is_available() {
150 eprintln!("Backend {} is not available, skipping", backend.name());
151 continue;
152 }
153
154 backend.initialize()?;
155 println!("Initialized backend: {}", backend.name());
156 }
157
158 let total_benchmarks = self.config.problem_sizes.len()
160 * self.config.problem_densities.len()
161 * self.config.sampler_configs.len()
162 * self.backends.len();
163
164 let mut completed = 0;
165
166 for &problem_size in &self.config.problem_sizes {
167 for &density in &self.config.problem_densities {
168 let matrix = self.generate_qubo_problem(problem_size, density);
170
171 for sampler_config in &self.config.sampler_configs {
172 for backend_idx in 0..self.backends.len() {
173 if !self.backends[backend_idx].is_available() {
174 continue;
175 }
176
177 let backend_name = self.backends[backend_idx].name().to_string();
178 println!(
179 "Running benchmark {}/{}: {} - {} - size={}, density={}",
180 completed + 1,
181 total_benchmarks,
182 backend_name,
183 sampler_config.name,
184 problem_size,
185 density
186 );
187
188 let result = {
189 let backend = &mut self.backends[backend_idx];
190 Self::run_single_benchmark(
191 backend.as_mut(),
192 sampler_config,
193 &matrix,
194 problem_size,
195 density,
196 self.config.num_reads,
197 self.config.num_repetitions,
198 )
199 };
200
201 match result {
202 Ok(result) => {
203 self.results.push(result);
204 completed += 1;
205 }
206 Err(e) => {
207 eprintln!("Benchmark failed: {e}");
208 }
209 }
210
211 if self.config.save_intermediate {
213 self.save_intermediate_results()?;
214 }
215 }
216 }
217 }
218 }
219
220 let report = PerformanceReport::from_results(&self.results)?;
222
223 if let Some(ref output_dir) = self.config.output_dir {
225 self.save_results(output_dir)?;
226 report.save_to_file(&format!("{output_dir}/performance_report.json"))?;
227 }
228
229 Ok(report)
230 }
231
232 fn run_single_benchmark(
234 backend: &mut dyn HardwareBackend,
235 sampler_config: &SamplerConfig,
236 matrix: &Array2<f64>,
237 problem_size: usize,
238 density: f64,
239 num_reads: usize,
240 num_repetitions: usize,
241 ) -> Result<BenchmarkResult, Box<dyn std::error::Error>> {
242 let mut metrics = BenchmarkMetrics::new(problem_size, density);
243
244 let _ = backend.run_qubo(matrix, 1, sampler_config.params.clone())?;
246
247 let mut timings = Vec::new();
249 let mut all_results = Vec::new();
250
251 for _ in 0..num_repetitions {
252 let mem_before = Self::get_memory_usage_static();
254
255 let start = Instant::now();
256 let _setup_start = start;
257
258 let results = backend.run_qubo(matrix, num_reads, sampler_config.params.clone())?;
260
261 let total_time = start.elapsed();
262
263 let mem_after = Self::get_memory_usage_static();
265
266 timings.push(total_time);
267 all_results.extend(results);
268
269 metrics.memory.peak_memory = metrics.memory.peak_memory.max(mem_after);
271 metrics.memory.allocated = mem_after.saturating_sub(mem_before);
272 }
273
274 let avg_time = timings.iter().sum::<Duration>() / timings.len() as u32;
276 metrics.timings = TimingMetrics {
277 total_time: avg_time,
278 setup_time: Duration::from_millis(10), compute_time: avg_time
280 .checked_sub(Duration::from_millis(10))
281 .unwrap_or(Duration::ZERO),
282 postprocess_time: Duration::ZERO,
283 time_per_sample: avg_time / num_reads as u32,
284 time_to_solution: Some(timings[0]),
285 };
286
287 if !all_results.is_empty() {
289 let energies: Vec<f64> = all_results.iter().map(|r| r.energy).collect();
290 let best_energy = energies.iter().copied().fold(f64::INFINITY, f64::min);
291 let avg_energy = energies.iter().sum::<f64>() / energies.len() as f64;
292 let variance = energies
293 .iter()
294 .map(|e| (e - avg_energy).powi(2))
295 .sum::<f64>()
296 / (energies.len() - 1) as f64;
297
298 metrics.quality = QualityMetrics {
299 best_energy,
300 avg_energy,
301 energy_std: variance.sqrt(),
302 success_probability: 0.0, time_to_target: None,
304 unique_solutions: Self::count_unique_solutions(&all_results),
305 };
306 }
307
308 let hw_metrics = backend.get_metrics();
310 metrics.utilization = UtilizationMetrics {
311 cpu_usage: hw_metrics.get("cpu_threads").copied().unwrap_or(0.0),
312 gpu_usage: hw_metrics.get("gpu_usage").copied(),
313 memory_bandwidth: 0.0, cache_hit_rate: None,
315 power_consumption: None,
316 };
317
318 Ok(BenchmarkResult {
319 backend_name: backend.name().to_string(),
320 sampler_name: sampler_config.name.clone(),
321 problem_size,
322 problem_density: density,
323 metrics,
324 timestamp: std::time::SystemTime::now(),
325 })
326 }
327
328 fn generate_qubo_problem(&self, size: usize, density: f64) -> Array2<f64> {
330 let mut rng = thread_rng();
331 let mut matrix = Array2::zeros((size, size));
332
333 for i in 0..size {
335 for j in i..size {
336 if rng.gen::<f64>() < density {
337 let value = rng.gen_range(-10.0..10.0);
338 matrix[[i, j]] = value;
339 if i != j {
340 matrix[[j, i]] = value;
341 }
342 }
343 }
344 }
345
346 matrix
347 }
348
349 fn get_memory_usage_static() -> usize {
351 #[cfg(feature = "scirs")]
353 {
354 if let Ok(usage) = crate::scirs_stub::scirs2_core::memory::get_current_usage() {
355 return usage;
356 }
357 }
358
359 0
361 }
362
363 fn count_unique_solutions(results: &[crate::sampler::SampleResult]) -> usize {
365 use std::collections::HashSet;
366
367 let unique: HashSet<Vec<bool>> = results
368 .iter()
369 .map(|r| {
370 let mut vars: Vec<_> = r.assignments.iter().collect();
372 vars.sort_by_key(|(name, _)| name.as_str());
373 vars.into_iter().map(|(_, &value)| value).collect()
374 })
375 .collect();
376
377 unique.len()
378 }
379
380 fn save_intermediate_results(&self) -> Result<(), Box<dyn std::error::Error>> {
382 if let Some(ref dir) = self.config.output_dir {
383 let path = format!("{dir}/intermediate_results.json");
384 let json = serde_json::to_string_pretty(&self.results)?;
385 std::fs::write(path, json)?;
386 }
387 Ok(())
388 }
389
390 fn save_results(&self, output_dir: &str) -> Result<(), Box<dyn std::error::Error>> {
392 std::fs::create_dir_all(output_dir)?;
393
394 let results_path = format!("{output_dir}/benchmark_results.json");
396 let json = serde_json::to_string_pretty(&self.results)?;
397 std::fs::write(results_path, json)?;
398
399 let config_path = format!("{output_dir}/benchmark_config.json");
401 let config_json = serde_json::to_string_pretty(&self.config)?;
402 std::fs::write(config_path, config_json)?;
403
404 Ok(())
405 }
406}
407
408pub fn quick_benchmark(
410 problem_size: usize,
411) -> Result<BenchmarkMetrics, Box<dyn std::error::Error>> {
412 let config = BenchmarkConfig {
413 problem_sizes: vec![problem_size],
414 problem_densities: vec![0.5],
415 num_reads: 10,
416 num_repetitions: 1,
417 ..Default::default()
418 };
419
420 let runner = BenchmarkRunner::new(config);
421 let report = runner.run_complete_suite()?;
422
423 Ok(report.summary.overall_metrics)
424}