optirs_core/benchmarking/
cross_platform_tester.rs1use std::collections::HashMap;
7use std::fmt::Debug;
8use std::time::{Duration, Instant};
9
10#[cfg(feature = "cross-platform-testing")]
12use scirs2_datasets;
13use scirs2_stats::distributions;
14
15use crate::error::{OptimError, Result};
16
17#[derive(Debug, Clone, PartialEq, Eq, Hash)]
19pub enum PlatformTarget {
20 CPU,
22 CUDA,
24 Metal,
26 OpenCL,
28 WebGPU,
30 TPU,
32 Custom(String),
34}
35
36#[derive(Debug, Clone)]
38pub struct PerformanceBaseline {
39 pub target: PlatformTarget,
40 pub throughput_ops_per_sec: f64,
41 pub latency_ms: f64,
42 pub memory_usage_mb: f64,
43 pub energy_consumption_joules: Option<f64>,
44 pub accuracy_metrics: HashMap<String, f64>,
45}
46
47impl PerformanceBaseline {
48 pub fn new(target: PlatformTarget) -> Self {
49 Self {
50 target,
51 throughput_ops_per_sec: 0.0,
52 latency_ms: 0.0,
53 memory_usage_mb: 0.0,
54 energy_consumption_joules: None,
55 accuracy_metrics: HashMap::new(),
56 }
57 }
58
59 pub fn with_throughput(mut self, ops_per_sec: f64) -> Self {
60 self.throughput_ops_per_sec = ops_per_sec;
61 self
62 }
63
64 pub fn with_latency(mut self, latency_ms: f64) -> Self {
65 self.latency_ms = latency_ms;
66 self
67 }
68
69 pub fn with_memory_usage(mut self, memory_mb: f64) -> Self {
70 self.memory_usage_mb = memory_mb;
71 self
72 }
73}
74
75#[derive(Debug)]
77pub struct CrossPlatformTester {
78 baselines: HashMap<PlatformTarget, PerformanceBaseline>,
79 test_configurations: HashMap<String, TestConfiguration>,
80}
81
82#[derive(Debug, Clone)]
84pub struct TestConfiguration {
85 pub name: String,
86 pub iterations: usize,
87 pub warmup_iterations: usize,
88 pub data_size: usize,
89 pub timeout: Duration,
90}
91
92impl CrossPlatformTester {
93 pub fn new() -> Self {
94 Self {
95 baselines: HashMap::new(),
96 test_configurations: HashMap::new(),
97 }
98 }
99
100 pub fn add_baseline(&mut self, baseline: PerformanceBaseline) {
101 self.baselines.insert(baseline.target.clone(), baseline);
102 }
103
104 pub fn add_test_config(&mut self, config: TestConfiguration) {
105 self.test_configurations.insert(config.name.clone(), config);
106 }
107
108 pub fn run_benchmark(
109 &self,
110 target: &PlatformTarget,
111 test_name: &str,
112 ) -> Result<PerformanceBaseline> {
113 let config = self.test_configurations.get(test_name).ok_or_else(|| {
114 OptimError::InvalidConfig(format!("Test configuration '{}' not found", test_name))
115 })?;
116
117 let start = Instant::now();
119
120 std::thread::sleep(Duration::from_millis(1));
122
123 let duration = start.elapsed();
124 let throughput = config.iterations as f64 / duration.as_secs_f64();
125
126 Ok(PerformanceBaseline::new(target.clone())
127 .with_throughput(throughput)
128 .with_latency(duration.as_millis() as f64 / config.iterations as f64))
129 }
130
131 pub fn compare_performance(
132 &self,
133 target1: &PlatformTarget,
134 target2: &PlatformTarget,
135 ) -> Result<f64> {
136 let baseline1 = self.baselines.get(target1).ok_or_else(|| {
137 OptimError::InvalidConfig("Baseline for target1 not found".to_string())
138 })?;
139 let baseline2 = self.baselines.get(target2).ok_or_else(|| {
140 OptimError::InvalidConfig("Baseline for target2 not found".to_string())
141 })?;
142
143 Ok(baseline1.throughput_ops_per_sec / baseline2.throughput_ops_per_sec)
144 }
145}
146
147impl Default for CrossPlatformTester {
148 fn default() -> Self {
149 Self::new()
150 }
151}