optirs_core/benchmarking/
cross_platform_tester.rs1use std::collections::HashMap;
7use std::fmt::Debug;
8use std::time::{Duration, Instant};
9
10#[cfg(feature = "cross-platform-testing")]
12use scirs2_datasets;
13use scirs2_metrics::evaluation;
14use scirs2_stats::distributions;
15
16use crate::error::{OptimError, Result};
17
18#[derive(Debug, Clone, PartialEq, Eq, Hash)]
20pub enum PlatformTarget {
21 CPU,
23 CUDA,
25 Metal,
27 OpenCL,
29 WebGPU,
31 TPU,
33 Custom(String),
35}
36
37#[derive(Debug, Clone)]
39pub struct PerformanceBaseline {
40 pub target: PlatformTarget,
41 pub throughput_ops_per_sec: f64,
42 pub latency_ms: f64,
43 pub memory_usage_mb: f64,
44 pub energy_consumption_joules: Option<f64>,
45 pub accuracy_metrics: HashMap<String, f64>,
46}
47
48impl PerformanceBaseline {
49 pub fn new(target: PlatformTarget) -> Self {
50 Self {
51 target,
52 throughput_ops_per_sec: 0.0,
53 latency_ms: 0.0,
54 memory_usage_mb: 0.0,
55 energy_consumption_joules: None,
56 accuracy_metrics: HashMap::new(),
57 }
58 }
59
60 pub fn with_throughput(mut self, ops_per_sec: f64) -> Self {
61 self.throughput_ops_per_sec = ops_per_sec;
62 self
63 }
64
65 pub fn with_latency(mut self, latency_ms: f64) -> Self {
66 self.latency_ms = latency_ms;
67 self
68 }
69
70 pub fn with_memory_usage(mut self, memory_mb: f64) -> Self {
71 self.memory_usage_mb = memory_mb;
72 self
73 }
74}
75
76#[derive(Debug)]
78pub struct CrossPlatformTester {
79 baselines: HashMap<PlatformTarget, PerformanceBaseline>,
80 test_configurations: HashMap<String, TestConfiguration>,
81}
82
83#[derive(Debug, Clone)]
85pub struct TestConfiguration {
86 pub name: String,
87 pub iterations: usize,
88 pub warmup_iterations: usize,
89 pub data_size: usize,
90 pub timeout: Duration,
91}
92
93impl CrossPlatformTester {
94 pub fn new() -> Self {
95 Self {
96 baselines: HashMap::new(),
97 test_configurations: HashMap::new(),
98 }
99 }
100
101 pub fn add_baseline(&mut self, baseline: PerformanceBaseline) {
102 self.baselines.insert(baseline.target.clone(), baseline);
103 }
104
105 pub fn add_test_config(&mut self, config: TestConfiguration) {
106 self.test_configurations.insert(config.name.clone(), config);
107 }
108
109 pub fn run_benchmark(
110 &self,
111 target: &PlatformTarget,
112 test_name: &str,
113 ) -> Result<PerformanceBaseline> {
114 let config = self.test_configurations.get(test_name).ok_or_else(|| {
115 OptimError::InvalidConfig(format!("Test configuration '{}' not found", test_name))
116 })?;
117
118 let start = Instant::now();
120
121 std::thread::sleep(Duration::from_millis(1));
123
124 let duration = start.elapsed();
125 let throughput = config.iterations as f64 / duration.as_secs_f64();
126
127 Ok(PerformanceBaseline::new(target.clone())
128 .with_throughput(throughput)
129 .with_latency(duration.as_millis() as f64 / config.iterations as f64))
130 }
131
132 pub fn compare_performance(
133 &self,
134 target1: &PlatformTarget,
135 target2: &PlatformTarget,
136 ) -> Result<f64> {
137 let baseline1 = self.baselines.get(target1).ok_or_else(|| {
138 OptimError::InvalidConfig("Baseline for target1 not found".to_string())
139 })?;
140 let baseline2 = self.baselines.get(target2).ok_or_else(|| {
141 OptimError::InvalidConfig("Baseline for target2 not found".to_string())
142 })?;
143
144 Ok(baseline1.throughput_ops_per_sec / baseline2.throughput_ops_per_sec)
145 }
146}
147
148impl Default for CrossPlatformTester {
149 fn default() -> Self {
150 Self::new()
151 }
152}