Skip to main content

oximedia_optimize/
benchmark.rs

1//! Benchmark and profiling utilities for optimization.
2
3use crate::utils::Timer;
4use std::collections::HashMap;
5use std::time::Duration;
6
7/// Benchmark configuration.
8#[derive(Debug, Clone)]
9pub struct BenchmarkConfig {
10    /// Number of warmup runs.
11    pub warmup_runs: usize,
12    /// Number of benchmark runs.
13    pub benchmark_runs: usize,
14    /// Enable detailed profiling.
15    pub enable_profiling: bool,
16    /// Target FPS.
17    pub target_fps: f64,
18}
19
20impl Default for BenchmarkConfig {
21    fn default() -> Self {
22        Self {
23            warmup_runs: 3,
24            benchmark_runs: 10,
25            enable_profiling: true,
26            target_fps: 30.0,
27        }
28    }
29}
30
31/// Benchmark result for a single operation.
32#[derive(Debug, Clone)]
33pub struct BenchmarkResult {
34    /// Operation name.
35    pub name: String,
36    /// Minimum time.
37    pub min_time: Duration,
38    /// Maximum time.
39    pub max_time: Duration,
40    /// Average time.
41    pub avg_time: Duration,
42    /// Median time.
43    pub median_time: Duration,
44    /// Standard deviation.
45    pub std_dev: Duration,
46    /// Number of iterations.
47    pub iterations: usize,
48}
49
50impl BenchmarkResult {
51    /// Creates a new benchmark result from timings.
52    #[must_use]
53    pub fn from_timings(name: String, timings: &[Duration]) -> Self {
54        if timings.is_empty() {
55            return Self {
56                name,
57                min_time: Duration::ZERO,
58                max_time: Duration::ZERO,
59                avg_time: Duration::ZERO,
60                median_time: Duration::ZERO,
61                std_dev: Duration::ZERO,
62                iterations: 0,
63            };
64        }
65
66        let mut sorted_timings = timings.to_vec();
67        sorted_timings.sort();
68
69        // sorted_timings is non-empty (checked above), so first/last always yield Some
70        let min_time = sorted_timings[0];
71        let max_time = sorted_timings[sorted_timings.len() - 1];
72        let median_time = sorted_timings[sorted_timings.len() / 2];
73
74        let total_nanos: u128 = timings.iter().map(std::time::Duration::as_nanos).sum();
75        let avg_nanos = total_nanos / timings.len() as u128;
76        let avg_time = Duration::from_nanos(avg_nanos as u64);
77
78        // Calculate standard deviation
79        let variance: f64 = timings
80            .iter()
81            .map(|d| {
82                let diff = d.as_secs_f64() - avg_time.as_secs_f64();
83                diff * diff
84            })
85            .sum::<f64>()
86            / timings.len() as f64;
87        let std_dev = Duration::from_secs_f64(variance.sqrt());
88
89        Self {
90            name,
91            min_time,
92            max_time,
93            avg_time,
94            median_time,
95            std_dev,
96            iterations: timings.len(),
97        }
98    }
99
100    /// Prints the result.
101    pub fn print(&self) {
102        println!("Benchmark: {}", self.name);
103        println!("  Iterations: {}", self.iterations);
104        println!("  Min:    {:?}", self.min_time);
105        println!("  Max:    {:?}", self.max_time);
106        println!("  Avg:    {:?}", self.avg_time);
107        println!("  Median: {:?}", self.median_time);
108        println!("  StdDev: {:?}", self.std_dev);
109    }
110}
111
112/// Performance profiler.
113#[derive(Debug, Default)]
114pub struct Profiler {
115    timings: HashMap<String, Vec<Duration>>,
116    active_timers: HashMap<String, Timer>,
117}
118
119impl Profiler {
120    /// Creates a new profiler.
121    #[must_use]
122    pub fn new() -> Self {
123        Self::default()
124    }
125
126    /// Starts profiling a section.
127    pub fn start(&mut self, label: impl Into<String>) {
128        let label = label.into();
129        self.active_timers.insert(label.clone(), Timer::new(label));
130    }
131
132    /// Stops profiling a section.
133    pub fn stop(&mut self, label: &str) {
134        if let Some(timer) = self.active_timers.remove(label) {
135            let duration = timer.elapsed();
136            self.timings
137                .entry(label.to_string())
138                .or_default()
139                .push(duration);
140        }
141    }
142
143    /// Gets all benchmark results.
144    #[must_use]
145    pub fn results(&self) -> Vec<BenchmarkResult> {
146        self.timings
147            .iter()
148            .map(|(name, timings)| BenchmarkResult::from_timings(name.clone(), timings))
149            .collect()
150    }
151
152    /// Prints all results.
153    pub fn print_results(&self) {
154        println!("\n=== Profiling Results ===");
155        for result in self.results() {
156            result.print();
157            println!();
158        }
159    }
160
161    /// Clears all data.
162    pub fn clear(&mut self) {
163        self.timings.clear();
164        self.active_timers.clear();
165    }
166}
167
168/// Benchmark runner.
169pub struct BenchmarkRunner {
170    config: BenchmarkConfig,
171    profiler: Profiler,
172}
173
174impl BenchmarkRunner {
175    /// Creates a new benchmark runner.
176    #[must_use]
177    pub fn new(config: BenchmarkConfig) -> Self {
178        Self {
179            config,
180            profiler: Profiler::new(),
181        }
182    }
183
184    /// Runs a benchmark.
185    pub fn run<F>(&mut self, name: impl Into<String>, mut f: F) -> BenchmarkResult
186    where
187        F: FnMut(),
188    {
189        let name = name.into();
190
191        // Warmup
192        for _ in 0..self.config.warmup_runs {
193            f();
194        }
195
196        // Benchmark
197        let mut timings = Vec::new();
198        for _ in 0..self.config.benchmark_runs {
199            let timer = Timer::new(&name);
200            f();
201            timings.push(timer.elapsed());
202        }
203
204        BenchmarkResult::from_timings(name, &timings)
205    }
206
207    /// Runs a benchmark with profiling.
208    pub fn run_with_profiling<F>(&mut self, name: impl Into<String>, mut f: F) -> BenchmarkResult
209    where
210        F: FnMut(&mut Profiler),
211    {
212        let name = name.into();
213
214        // Warmup
215        for _ in 0..self.config.warmup_runs {
216            self.profiler.clear();
217            f(&mut self.profiler);
218        }
219
220        // Benchmark
221        self.profiler.clear();
222        let mut timings = Vec::new();
223        for _ in 0..self.config.benchmark_runs {
224            let timer = Timer::new(&name);
225            f(&mut self.profiler);
226            timings.push(timer.elapsed());
227        }
228
229        BenchmarkResult::from_timings(name, &timings)
230    }
231
232    /// Gets profiler.
233    #[must_use]
234    pub fn profiler(&self) -> &Profiler {
235        &self.profiler
236    }
237}
238
239/// Comparison benchmark for different optimization levels.
240pub struct ComparativeBenchmark {
241    results: HashMap<String, BenchmarkResult>,
242}
243
244impl ComparativeBenchmark {
245    /// Creates a new comparative benchmark.
246    #[must_use]
247    pub fn new() -> Self {
248        Self {
249            results: HashMap::new(),
250        }
251    }
252
253    /// Adds a result.
254    pub fn add_result(&mut self, name: impl Into<String>, result: BenchmarkResult) {
255        self.results.insert(name.into(), result);
256    }
257
258    /// Compares results and prints.
259    pub fn print_comparison(&self) {
260        println!("\n=== Comparative Benchmark Results ===");
261
262        // Find baseline (usually "fast" or first entry)
263        let baseline_name = self
264            .results
265            .keys()
266            .find(|k| k.contains("fast") || k.contains("baseline"))
267            .or_else(|| self.results.keys().next())
268            .map(String::as_str);
269
270        if let Some(baseline_name) = baseline_name {
271            if let Some(baseline) = self.results.get(baseline_name) {
272                println!("Baseline: {baseline_name}");
273                println!("  Time: {:?}", baseline.avg_time);
274                println!();
275
276                for (name, result) in &self.results {
277                    if name != baseline_name {
278                        let speedup =
279                            baseline.avg_time.as_secs_f64() / result.avg_time.as_secs_f64();
280                        println!("{name}");
281                        println!("  Time: {:?}", result.avg_time);
282                        println!("  Speedup: {speedup:.2}x");
283                        println!();
284                    }
285                }
286            }
287        }
288    }
289}
290
291impl Default for ComparativeBenchmark {
292    fn default() -> Self {
293        Self::new()
294    }
295}
296
297/// Quality-speed tradeoff analyzer.
298pub struct TradeoffAnalyzer {
299    points: Vec<TradeoffPoint>,
300}
301
302#[derive(Debug, Clone)]
303struct TradeoffPoint {
304    label: String,
305    encoding_time: Duration,
306    quality: f64,
307    bits: u64,
308}
309
310impl TradeoffAnalyzer {
311    /// Creates a new tradeoff analyzer.
312    #[must_use]
313    pub fn new() -> Self {
314        Self { points: Vec::new() }
315    }
316
317    /// Adds a measurement point.
318    pub fn add_point(
319        &mut self,
320        label: impl Into<String>,
321        encoding_time: Duration,
322        quality: f64,
323        bits: u64,
324    ) {
325        self.points.push(TradeoffPoint {
326            label: label.into(),
327            encoding_time,
328            quality,
329            bits,
330        });
331    }
332
333    /// Finds optimal configuration for target quality.
334    #[must_use]
335    pub fn find_optimal_for_quality(&self, target_quality: f64) -> Option<&str> {
336        self.points
337            .iter()
338            .filter(|p| p.quality >= target_quality)
339            .min_by(|a, b| a.encoding_time.cmp(&b.encoding_time))
340            .map(|p| p.label.as_str())
341    }
342
343    /// Finds optimal configuration for target speed.
344    #[must_use]
345    pub fn find_optimal_for_speed(&self, max_time: Duration) -> Option<&str> {
346        self.points
347            .iter()
348            .filter(|p| p.encoding_time <= max_time)
349            .max_by(|a, b| {
350                a.quality
351                    .partial_cmp(&b.quality)
352                    .unwrap_or(std::cmp::Ordering::Equal)
353            })
354            .map(|p| p.label.as_str())
355    }
356
357    /// Calculates Pareto frontier.
358    #[must_use]
359    pub fn pareto_frontier(&self) -> Vec<&str> {
360        let mut frontier = Vec::new();
361
362        for point in &self.points {
363            let mut dominated = false;
364
365            for other in &self.points {
366                if other.encoding_time <= point.encoding_time
367                    && other.quality >= point.quality
368                    && (other.encoding_time < point.encoding_time || other.quality > point.quality)
369                {
370                    dominated = true;
371                    break;
372                }
373            }
374
375            if !dominated {
376                frontier.push(point.label.as_str());
377            }
378        }
379
380        frontier
381    }
382
383    /// Prints analysis.
384    pub fn print_analysis(&self) {
385        println!("\n=== Quality-Speed Tradeoff Analysis ===");
386
387        let frontier = self.pareto_frontier();
388        println!("Pareto-optimal configurations:");
389        for label in &frontier {
390            if let Some(point) = self.points.iter().find(|p| &p.label == label) {
391                println!(
392                    "  {}: {:.2} dB, {:?}, {} bits",
393                    label, point.quality, point.encoding_time, point.bits
394                );
395            }
396        }
397    }
398}
399
400impl Default for TradeoffAnalyzer {
401    fn default() -> Self {
402        Self::new()
403    }
404}
405
406#[cfg(test)]
407mod tests {
408    use super::*;
409
410    #[test]
411    fn test_benchmark_config_default() {
412        let config = BenchmarkConfig::default();
413        assert_eq!(config.warmup_runs, 3);
414        assert_eq!(config.benchmark_runs, 10);
415    }
416
417    #[test]
418    fn test_benchmark_result_from_timings() {
419        let timings = vec![
420            Duration::from_millis(10),
421            Duration::from_millis(20),
422            Duration::from_millis(15),
423        ];
424        let result = BenchmarkResult::from_timings("test".to_string(), &timings);
425        assert_eq!(result.iterations, 3);
426        assert_eq!(result.min_time, Duration::from_millis(10));
427        assert_eq!(result.max_time, Duration::from_millis(20));
428    }
429
430    #[test]
431    fn test_profiler() {
432        let mut profiler = Profiler::new();
433        profiler.start("test1");
434        std::thread::sleep(Duration::from_millis(10));
435        profiler.stop("test1");
436
437        let results = profiler.results();
438        assert_eq!(results.len(), 1);
439        assert_eq!(results[0].name, "test1");
440    }
441
442    #[test]
443    fn test_benchmark_runner() {
444        let config = BenchmarkConfig {
445            warmup_runs: 1,
446            benchmark_runs: 2,
447            enable_profiling: false,
448            target_fps: 30.0,
449        };
450        let mut runner = BenchmarkRunner::new(config);
451
452        let result = runner.run("test", || {
453            std::thread::sleep(Duration::from_millis(1));
454        });
455
456        assert_eq!(result.iterations, 2);
457        assert!(result.avg_time >= Duration::from_millis(1));
458    }
459
460    #[test]
461    fn test_comparative_benchmark() {
462        let mut comp = ComparativeBenchmark::new();
463
464        let result1 =
465            BenchmarkResult::from_timings("fast".to_string(), &[Duration::from_millis(10)]);
466        let result2 =
467            BenchmarkResult::from_timings("slow".to_string(), &[Duration::from_millis(20)]);
468
469        comp.add_result("fast", result1);
470        comp.add_result("slow", result2);
471
472        assert_eq!(comp.results.len(), 2);
473    }
474
475    #[test]
476    fn test_tradeoff_analyzer() {
477        let mut analyzer = TradeoffAnalyzer::new();
478
479        analyzer.add_point("fast", Duration::from_millis(10), 40.0, 1000);
480        analyzer.add_point("medium", Duration::from_millis(20), 42.0, 900);
481        analyzer.add_point("slow", Duration::from_millis(40), 44.0, 800);
482
483        let optimal_quality = analyzer.find_optimal_for_quality(42.0);
484        assert!(optimal_quality.is_some());
485
486        let optimal_speed = analyzer.find_optimal_for_speed(Duration::from_millis(25));
487        assert!(optimal_speed.is_some());
488    }
489
490    #[test]
491    fn test_pareto_frontier() {
492        let mut analyzer = TradeoffAnalyzer::new();
493
494        analyzer.add_point("a", Duration::from_millis(10), 40.0, 1000);
495        analyzer.add_point("b", Duration::from_millis(20), 42.0, 900);
496        analyzer.add_point("c", Duration::from_millis(15), 41.0, 950); // Dominated
497        analyzer.add_point("d", Duration::from_millis(30), 44.0, 850);
498
499        let frontier = analyzer.pareto_frontier();
500        assert!(frontier.contains(&"a"));
501        assert!(frontier.contains(&"b"));
502        assert!(frontier.contains(&"d"));
503        // "c" should be dominated by "b" (better quality at similar or better time)
504    }
505}