Skip to main content

voirs_spatial/
performance.rs

1//! Performance Testing and Validation for Spatial Audio
2//!
3//! This module provides comprehensive performance testing capabilities including
4//! latency measurement, CPU usage monitoring, memory analysis, and throughput testing.
5
6use crate::{
7    AmbisonicsProcessor, BinauralRenderer, Error, Position3D, Result, SpeakerConfiguration,
8};
9use scirs2_core::ndarray::{Array1, Array2};
10use serde::{Deserialize, Serialize};
11use std::collections::HashMap;
12use std::sync::{Arc, Mutex};
13use std::thread;
14use std::time::{Duration, Instant};
15
16/// Performance test configuration
17#[derive(Debug, Clone, Serialize, Deserialize)]
18pub struct PerformanceConfig {
19    /// Number of test iterations
20    pub iterations: usize,
21    /// Test duration for long-running tests
22    pub test_duration: Duration,
23    /// Number of audio sources to test with
24    pub source_count: usize,
25    /// Audio sample rate
26    pub sample_rate: u32,
27    /// Buffer size for testing
28    pub buffer_size: usize,
29    /// Enable memory usage tracking
30    pub track_memory: bool,
31    /// Enable CPU usage tracking
32    pub track_cpu: bool,
33    /// Target latency thresholds (VR, Gaming, General)
34    pub latency_thresholds: (Duration, Duration, Duration),
35}
36
37impl Default for PerformanceConfig {
38    fn default() -> Self {
39        Self {
40            iterations: 1000,
41            test_duration: Duration::from_secs(60),
42            source_count: 8,
43            sample_rate: 44100,
44            buffer_size: 512,
45            track_memory: true,
46            track_cpu: true,
47            latency_thresholds: (
48                Duration::from_millis(20), // VR
49                Duration::from_millis(30), // Gaming
50                Duration::from_millis(50), // General
51            ),
52        }
53    }
54}
55
56/// Performance metrics collected during testing
57#[derive(Debug, Clone, Serialize, Deserialize)]
58pub struct PerformanceMetrics {
59    /// Test name
60    pub test_name: String,
61    /// Average processing latency
62    pub avg_latency: Duration,
63    /// Minimum processing latency
64    pub min_latency: Duration,
65    /// Maximum processing latency
66    pub max_latency: Duration,
67    /// 95th percentile latency
68    pub p95_latency: Duration,
69    /// 99th percentile latency
70    pub p99_latency: Duration,
71    /// Average CPU usage percentage
72    pub avg_cpu_usage: f32,
73    /// Peak CPU usage percentage
74    pub peak_cpu_usage: f32,
75    /// Average memory usage in bytes
76    pub avg_memory_usage: usize,
77    /// Peak memory usage in bytes
78    pub peak_memory_usage: usize,
79    /// Audio processing throughput (samples/second)
80    pub throughput: f64,
81    /// Number of test iterations
82    pub iterations: usize,
83    /// Test success rate (0.0 to 1.0)
84    pub success_rate: f32,
85    /// Additional custom metrics
86    pub custom_metrics: HashMap<String, f64>,
87}
88
89impl PerformanceMetrics {
90    /// Create new empty metrics
91    pub fn new(test_name: String) -> Self {
92        Self {
93            test_name,
94            avg_latency: Duration::ZERO,
95            min_latency: Duration::MAX,
96            max_latency: Duration::ZERO,
97            p95_latency: Duration::ZERO,
98            p99_latency: Duration::ZERO,
99            avg_cpu_usage: 0.0,
100            peak_cpu_usage: 0.0,
101            avg_memory_usage: 0,
102            peak_memory_usage: 0,
103            throughput: 0.0,
104            iterations: 0,
105            success_rate: 0.0,
106            custom_metrics: HashMap::new(),
107        }
108    }
109
110    /// Check if metrics meet performance targets
111    pub fn meets_targets(&self, config: &PerformanceConfig) -> PerformanceTargetResult {
112        let mut result = PerformanceTargetResult {
113            vr_latency_met: self.p95_latency <= config.latency_thresholds.0,
114            gaming_latency_met: self.p95_latency <= config.latency_thresholds.1,
115            general_latency_met: self.p95_latency <= config.latency_thresholds.2,
116            cpu_usage_acceptable: self.avg_cpu_usage < 25.0, // <25% CPU
117            success_rate_acceptable: self.success_rate >= 0.95, // 95%+ success
118            issues: Vec::new(),
119        };
120
121        if !result.vr_latency_met {
122            result.issues.push(format!(
123                "VR latency target not met: {}ms > {}ms",
124                self.p95_latency.as_millis(),
125                config.latency_thresholds.0.as_millis()
126            ));
127        }
128
129        if !result.cpu_usage_acceptable {
130            result.issues.push(format!(
131                "CPU usage too high: {:.1}% > 25.0%",
132                self.avg_cpu_usage
133            ));
134        }
135
136        if !result.success_rate_acceptable {
137            result.issues.push(format!(
138                "Success rate too low: {:.1}% < 95.0%",
139                self.success_rate * 100.0
140            ));
141        }
142
143        result
144    }
145}
146
147/// Performance target validation results
148#[derive(Debug, Clone, Serialize, Deserialize)]
149pub struct PerformanceTargetResult {
150    /// VR latency target met (<20ms)
151    pub vr_latency_met: bool,
152    /// Gaming latency target met (<30ms)
153    pub gaming_latency_met: bool,
154    /// General latency target met (<50ms)
155    pub general_latency_met: bool,
156    /// CPU usage acceptable (<25%)
157    pub cpu_usage_acceptable: bool,
158    /// Success rate acceptable (>95%)
159    pub success_rate_acceptable: bool,
160    /// List of issues found
161    pub issues: Vec<String>,
162}
163
164/// System resource monitor
165pub struct ResourceMonitor {
166    start_time: Instant,
167    samples: Arc<Mutex<Vec<ResourceSample>>>,
168    stop_flag: Arc<Mutex<bool>>,
169}
170
171/// Resource sample point
172#[derive(Debug, Clone)]
173struct ResourceSample {
174    timestamp: Instant,
175    cpu_usage: f32,
176    memory_usage: usize,
177}
178
179impl ResourceMonitor {
180    /// Start monitoring system resources
181    pub fn start() -> Self {
182        let samples = Arc::new(Mutex::new(Vec::new()));
183        let stop_flag = Arc::new(Mutex::new(false));
184
185        let samples_clone = samples.clone();
186        let stop_clone = stop_flag.clone();
187
188        // Start monitoring thread
189        thread::spawn(move || {
190            while !*stop_clone
191                .lock()
192                .expect("Failed to acquire lock on stop flag in monitor thread")
193            {
194                let sample = ResourceSample {
195                    timestamp: Instant::now(),
196                    cpu_usage: Self::get_cpu_usage(),
197                    memory_usage: Self::get_memory_usage(),
198                };
199
200                samples_clone
201                    .lock()
202                    .expect("Failed to acquire lock on samples in monitor thread")
203                    .push(sample);
204                thread::sleep(Duration::from_millis(100)); // 10Hz sampling
205            }
206        });
207
208        Self {
209            start_time: Instant::now(),
210            samples,
211            stop_flag,
212        }
213    }
214
215    /// Stop monitoring and return statistics
216    pub fn stop(self) -> ResourceStatistics {
217        *self
218            .stop_flag
219            .lock()
220            .expect("Failed to acquire lock on stop flag") = true;
221        thread::sleep(Duration::from_millis(200)); // Allow thread to finish
222
223        let samples = self
224            .samples
225            .lock()
226            .expect("Failed to acquire lock on samples")
227            .clone();
228        ResourceStatistics::from_samples(samples, self.start_time)
229    }
230
231    /// Get current CPU usage (simplified implementation)
232    fn get_cpu_usage() -> f32 {
233        // In a real implementation, this would read from /proc/stat on Linux
234        // or use system APIs. For testing, return a mock value.
235        fastrand::f32() * 10.0 // Random 0-10% CPU usage
236    }
237
238    /// Get current memory usage (simplified implementation)
239    fn get_memory_usage() -> usize {
240        // In a real implementation, this would read system memory usage
241        // For testing, return a mock value.
242        100_000_000 + (fastrand::usize(..50_000_000)) // 100-150 MB
243    }
244}
245
246/// Resource usage statistics
247#[derive(Debug, Clone)]
248pub struct ResourceStatistics {
249    /// Average CPU usage (0.0-1.0)
250    pub avg_cpu_usage: f32,
251    /// Peak CPU usage (0.0-1.0)
252    pub peak_cpu_usage: f32,
253    /// Average memory usage in bytes
254    pub avg_memory_usage: usize,
255    /// Peak memory usage in bytes
256    pub peak_memory_usage: usize,
257    /// Duration of the measurement period
258    pub duration: Duration,
259    /// Number of samples taken
260    pub sample_count: usize,
261}
262
263impl ResourceStatistics {
264    fn from_samples(samples: Vec<ResourceSample>, start_time: Instant) -> Self {
265        if samples.is_empty() {
266            return Self {
267                avg_cpu_usage: 0.0,
268                peak_cpu_usage: 0.0,
269                avg_memory_usage: 0,
270                peak_memory_usage: 0,
271                duration: Duration::ZERO,
272                sample_count: 0,
273            };
274        }
275
276        let avg_cpu = samples.iter().map(|s| s.cpu_usage).sum::<f32>() / samples.len() as f32;
277        let peak_cpu = samples.iter().map(|s| s.cpu_usage).fold(0.0, f32::max);
278        let avg_memory = samples.iter().map(|s| s.memory_usage).sum::<usize>() / samples.len();
279        let peak_memory = samples.iter().map(|s| s.memory_usage).max().unwrap_or(0);
280
281        let duration = samples
282            .last()
283            .expect("Samples should not be empty at this point")
284            .timestamp
285            - start_time;
286
287        Self {
288            avg_cpu_usage: avg_cpu,
289            peak_cpu_usage: peak_cpu,
290            avg_memory_usage: avg_memory,
291            peak_memory_usage: peak_memory,
292            duration,
293            sample_count: samples.len(),
294        }
295    }
296}
297
298/// Comprehensive performance test suite
299pub struct PerformanceTestSuite {
300    config: PerformanceConfig,
301    results: Vec<PerformanceMetrics>,
302}
303
304impl PerformanceTestSuite {
305    /// Create new test suite
306    pub fn new(config: PerformanceConfig) -> Self {
307        Self {
308            config,
309            results: Vec::new(),
310        }
311    }
312
313    /// Run all performance tests
314    pub fn run_all_tests(&mut self) -> Result<Vec<PerformanceMetrics>> {
315        tracing::info!("Starting comprehensive performance test suite");
316
317        // Test binaural rendering performance
318        self.test_binaural_rendering()?;
319
320        // Test ambisonics processing performance
321        self.test_ambisonics_processing()?;
322
323        // Test multi-source processing
324        self.test_multi_source_processing()?;
325
326        // Test real-time latency
327        self.test_real_time_latency()?;
328
329        // Test memory efficiency
330        self.test_memory_efficiency()?;
331
332        // Test throughput scaling
333        self.test_throughput_scaling()?;
334
335        tracing::info!(
336            "Performance test suite completed: {} tests",
337            self.results.len()
338        );
339        Ok(self.results.clone())
340    }
341
342    /// Test binaural rendering performance
343    fn test_binaural_rendering(&mut self) -> Result<()> {
344        let mut metrics = PerformanceMetrics::new("Binaural Rendering".to_string());
345        let mut latencies = Vec::new();
346        let mut successes = 0;
347
348        let monitor = ResourceMonitor::start();
349
350        // Create test audio and renderer
351        let audio_samples = Array1::from_vec(vec![0.1; self.config.buffer_size]);
352        let position = Position3D::new(1.0, 0.5, 0.0);
353
354        // Mock binaural renderer for testing
355        for i in 0..self.config.iterations {
356            let start = Instant::now();
357
358            // Simulate binaural processing
359            let _ = self.simulate_binaural_processing(&audio_samples, &position);
360
361            let latency = start.elapsed();
362            latencies.push(latency);
363
364            if latency <= self.config.latency_thresholds.2 {
365                successes += 1;
366            }
367
368            if i % 100 == 0 {
369                tracing::debug!("Binaural test progress: {}/{}", i, self.config.iterations);
370            }
371        }
372
373        let resource_stats = monitor.stop();
374
375        // Calculate metrics
376        latencies.sort();
377        metrics.avg_latency = Duration::from_nanos(
378            (latencies.iter().map(|d| d.as_nanos()).sum::<u128>() / latencies.len() as u128) as u64,
379        );
380        metrics.min_latency = latencies[0];
381        metrics.max_latency = latencies[latencies.len() - 1];
382        metrics.p95_latency = latencies[(latencies.len() as f32 * 0.95) as usize];
383        metrics.p99_latency = latencies[(latencies.len() as f32 * 0.99) as usize];
384        metrics.avg_cpu_usage = resource_stats.avg_cpu_usage;
385        metrics.peak_cpu_usage = resource_stats.peak_cpu_usage;
386        metrics.avg_memory_usage = resource_stats.avg_memory_usage;
387        metrics.peak_memory_usage = resource_stats.peak_memory_usage;
388        metrics.iterations = self.config.iterations;
389        metrics.success_rate = successes as f32 / self.config.iterations as f32;
390        metrics.throughput = (self.config.iterations * self.config.buffer_size) as f64
391            / resource_stats.duration.as_secs_f64();
392
393        self.results.push(metrics);
394        Ok(())
395    }
396
397    /// Test ambisonics processing performance
398    fn test_ambisonics_processing(&mut self) -> Result<()> {
399        let mut metrics = PerformanceMetrics::new("Ambisonics Processing".to_string());
400        let mut latencies = Vec::new();
401        let mut successes = 0;
402
403        let monitor = ResourceMonitor::start();
404
405        // Create test data
406        let audio_samples = Array1::from_vec(vec![0.1; self.config.buffer_size]);
407        let position = Position3D::new(1.0, 0.5, 0.0);
408
409        for i in 0..self.config.iterations {
410            let start = Instant::now();
411
412            // Simulate ambisonics processing
413            let _ = self.simulate_ambisonics_processing(&audio_samples, &position);
414
415            let latency = start.elapsed();
416            latencies.push(latency);
417
418            if latency <= self.config.latency_thresholds.2 {
419                successes += 1;
420            }
421
422            if i % 100 == 0 {
423                tracing::debug!("Ambisonics test progress: {}/{}", i, self.config.iterations);
424            }
425        }
426
427        let resource_stats = monitor.stop();
428
429        // Calculate metrics
430        latencies.sort();
431        metrics.avg_latency = Duration::from_nanos(
432            (latencies.iter().map(|d| d.as_nanos()).sum::<u128>() / latencies.len() as u128) as u64,
433        );
434        metrics.min_latency = latencies[0];
435        metrics.max_latency = latencies[latencies.len() - 1];
436        metrics.p95_latency = latencies[(latencies.len() as f32 * 0.95) as usize];
437        metrics.p99_latency = latencies[(latencies.len() as f32 * 0.99) as usize];
438        metrics.avg_cpu_usage = resource_stats.avg_cpu_usage;
439        metrics.peak_cpu_usage = resource_stats.peak_cpu_usage;
440        metrics.avg_memory_usage = resource_stats.avg_memory_usage;
441        metrics.peak_memory_usage = resource_stats.peak_memory_usage;
442        metrics.iterations = self.config.iterations;
443        metrics.success_rate = successes as f32 / self.config.iterations as f32;
444        metrics.throughput = (self.config.iterations * self.config.buffer_size) as f64
445            / resource_stats.duration.as_secs_f64();
446
447        self.results.push(metrics);
448        Ok(())
449    }
450
451    /// Test multi-source processing performance
452    fn test_multi_source_processing(&mut self) -> Result<()> {
453        let mut metrics = PerformanceMetrics::new("Multi-Source Processing".to_string());
454        let mut latencies = Vec::new();
455        let mut successes = 0;
456
457        let monitor = ResourceMonitor::start();
458
459        // Create multi-source test data
460        let audio_data = Array2::from_shape_vec(
461            (self.config.source_count, self.config.buffer_size),
462            vec![0.1; self.config.source_count * self.config.buffer_size],
463        )
464        .map_err(|e| Error::LegacyProcessing(format!("Failed to create test audio data: {e}")))?;
465
466        let positions: Vec<Position3D> = (0..self.config.source_count)
467            .map(|i| {
468                let angle =
469                    (i as f32 / self.config.source_count as f32) * 2.0 * std::f32::consts::PI;
470                Position3D::new(angle.cos(), angle.sin(), 0.0)
471            })
472            .collect();
473
474        for i in 0..self.config.iterations {
475            let start = Instant::now();
476
477            // Simulate multi-source processing
478            let _ = self.simulate_multi_source_processing(&audio_data, &positions);
479
480            let latency = start.elapsed();
481            latencies.push(latency);
482
483            // Higher latency threshold for multi-source
484            if latency <= Duration::from_millis(100) {
485                successes += 1;
486            }
487
488            if i % 100 == 0 {
489                tracing::debug!(
490                    "Multi-source test progress: {}/{}",
491                    i,
492                    self.config.iterations
493                );
494            }
495        }
496
497        let resource_stats = monitor.stop();
498
499        // Calculate metrics
500        latencies.sort();
501        metrics.avg_latency = Duration::from_nanos(
502            (latencies.iter().map(|d| d.as_nanos()).sum::<u128>() / latencies.len() as u128) as u64,
503        );
504        metrics.min_latency = latencies[0];
505        metrics.max_latency = latencies[latencies.len() - 1];
506        metrics.p95_latency = latencies[(latencies.len() as f32 * 0.95) as usize];
507        metrics.p99_latency = latencies[(latencies.len() as f32 * 0.99) as usize];
508        metrics.avg_cpu_usage = resource_stats.avg_cpu_usage;
509        metrics.peak_cpu_usage = resource_stats.peak_cpu_usage;
510        metrics.avg_memory_usage = resource_stats.avg_memory_usage;
511        metrics.peak_memory_usage = resource_stats.peak_memory_usage;
512        metrics.iterations = self.config.iterations;
513        metrics.success_rate = successes as f32 / self.config.iterations as f32;
514        metrics.throughput =
515            (self.config.iterations * self.config.source_count * self.config.buffer_size) as f64
516                / resource_stats.duration.as_secs_f64();
517
518        // Add custom metric for sources per second
519        metrics.custom_metrics.insert(
520            "sources_per_second".to_string(),
521            (self.config.iterations * self.config.source_count) as f64
522                / resource_stats.duration.as_secs_f64(),
523        );
524
525        self.results.push(metrics);
526        Ok(())
527    }
528
529    /// Test real-time latency requirements
530    fn test_real_time_latency(&mut self) -> Result<()> {
531        let mut metrics = PerformanceMetrics::new("Real-Time Latency".to_string());
532        let mut latencies = Vec::new();
533        let mut vr_successes = 0;
534        let mut gaming_successes = 0;
535        let mut general_successes = 0;
536
537        // Simulate real-time processing constraints
538        let audio_samples = Array1::from_vec(vec![0.1; self.config.buffer_size]);
539        let position = Position3D::new(1.0, 0.0, 0.0);
540
541        for i in 0..self.config.iterations {
542            let start = Instant::now();
543
544            // Simulate full spatial audio pipeline
545            let _ = self.simulate_full_pipeline(&audio_samples, &position);
546
547            let latency = start.elapsed();
548            latencies.push(latency);
549
550            if latency <= self.config.latency_thresholds.0 {
551                vr_successes += 1;
552            }
553            if latency <= self.config.latency_thresholds.1 {
554                gaming_successes += 1;
555            }
556            if latency <= self.config.latency_thresholds.2 {
557                general_successes += 1;
558            }
559
560            if i % 100 == 0 {
561                tracing::debug!("Latency test progress: {}/{}", i, self.config.iterations);
562            }
563        }
564
565        // Calculate metrics
566        latencies.sort();
567        metrics.avg_latency = Duration::from_nanos(
568            (latencies.iter().map(|d| d.as_nanos()).sum::<u128>() / latencies.len() as u128) as u64,
569        );
570        metrics.min_latency = latencies[0];
571        metrics.max_latency = latencies[latencies.len() - 1];
572        metrics.p95_latency = latencies[(latencies.len() as f32 * 0.95) as usize];
573        metrics.p99_latency = latencies[(latencies.len() as f32 * 0.99) as usize];
574        metrics.iterations = self.config.iterations;
575        metrics.success_rate = general_successes as f32 / self.config.iterations as f32;
576
577        // Add custom metrics for different latency targets
578        metrics.custom_metrics.insert(
579            "vr_success_rate".to_string(),
580            vr_successes as f64 / self.config.iterations as f64,
581        );
582        metrics.custom_metrics.insert(
583            "gaming_success_rate".to_string(),
584            gaming_successes as f64 / self.config.iterations as f64,
585        );
586        metrics.custom_metrics.insert(
587            "general_success_rate".to_string(),
588            general_successes as f64 / self.config.iterations as f64,
589        );
590
591        self.results.push(metrics);
592        Ok(())
593    }
594
595    /// Test memory efficiency
596    fn test_memory_efficiency(&mut self) -> Result<()> {
597        let mut metrics = PerformanceMetrics::new("Memory Efficiency".to_string());
598
599        let monitor = ResourceMonitor::start();
600
601        // Test memory allocation patterns
602        let mut audio_buffers = Vec::new();
603        let mut processors: Vec<i32> = Vec::new(); // Placeholder type for testing
604
605        // Allocate resources incrementally
606        for i in 0..(self.config.source_count * 10) {
607            let buffer = Array1::from_vec(vec![0.1; self.config.buffer_size]);
608            audio_buffers.push(buffer);
609
610            if i % 10 == 0 {
611                thread::sleep(Duration::from_millis(10));
612            }
613        }
614
615        // Hold resources for a while to test steady-state memory usage
616        thread::sleep(Duration::from_millis(1000));
617
618        // Clean up
619        drop(audio_buffers);
620        drop(processors);
621
622        let resource_stats = monitor.stop();
623
624        metrics.avg_memory_usage = resource_stats.avg_memory_usage;
625        metrics.peak_memory_usage = resource_stats.peak_memory_usage;
626        metrics.avg_cpu_usage = resource_stats.avg_cpu_usage;
627        metrics.peak_cpu_usage = resource_stats.peak_cpu_usage;
628        metrics.iterations = 1;
629        metrics.success_rate = 1.0;
630
631        // Custom memory metrics
632        metrics.custom_metrics.insert(
633            "memory_per_source_mb".to_string(),
634            (resource_stats.peak_memory_usage as f64 / self.config.source_count as f64)
635                / 1_000_000.0,
636        );
637
638        self.results.push(metrics);
639        Ok(())
640    }
641
642    /// Test throughput scaling with source count
643    fn test_throughput_scaling(&mut self) -> Result<()> {
644        let mut metrics = PerformanceMetrics::new("Throughput Scaling".to_string());
645        let mut throughputs = Vec::new();
646
647        let audio_samples = Array1::from_vec(vec![0.1; self.config.buffer_size]);
648
649        // Test with varying source counts
650        for source_count in [1, 2, 4, 8, 16, 32] {
651            if source_count > self.config.source_count * 4 {
652                break;
653            }
654
655            let positions: Vec<Position3D> = (0..source_count)
656                .map(|i| {
657                    let angle = (i as f32 / source_count as f32) * 2.0 * std::f32::consts::PI;
658                    Position3D::new(angle.cos(), angle.sin(), 0.0)
659                })
660                .collect();
661
662            let start = Instant::now();
663
664            // Process batch with this source count
665            for _ in 0..100 {
666                for pos in &positions {
667                    let _ = self.simulate_binaural_processing(&audio_samples, pos);
668                }
669            }
670
671            let duration = start.elapsed();
672            let throughput =
673                (100 * source_count * self.config.buffer_size) as f64 / duration.as_secs_f64();
674            throughputs.push(throughput);
675
676            metrics
677                .custom_metrics
678                .insert(format!("throughput_{source_count}_sources"), throughput);
679        }
680
681        metrics.throughput = throughputs.iter().copied().fold(0.0, f64::max);
682        metrics.iterations = 100;
683        metrics.success_rate = 1.0;
684
685        self.results.push(metrics);
686        Ok(())
687    }
688
689    // Simulation functions (would use real processors in practice)
690
691    fn simulate_binaural_processing(
692        &self,
693        _audio: &Array1<f32>,
694        _position: &Position3D,
695    ) -> Array2<f32> {
696        // Simulate processing time
697        thread::sleep(Duration::from_micros(50));
698        Array2::zeros((2, self.config.buffer_size))
699    }
700
701    fn simulate_ambisonics_processing(
702        &self,
703        _audio: &Array1<f32>,
704        _position: &Position3D,
705    ) -> Array2<f32> {
706        // Simulate processing time
707        thread::sleep(Duration::from_micros(75));
708        Array2::zeros((4, self.config.buffer_size))
709    }
710
711    fn simulate_multi_source_processing(
712        &self,
713        _audio: &Array2<f32>,
714        _positions: &[Position3D],
715    ) -> Array2<f32> {
716        // Simulate processing time proportional to source count
717        thread::sleep(Duration::from_micros(25 * self.config.source_count as u64));
718        Array2::zeros((2, self.config.buffer_size))
719    }
720
721    fn simulate_full_pipeline(&self, _audio: &Array1<f32>, _position: &Position3D) -> Array2<f32> {
722        // Simulate full spatial audio pipeline
723        thread::sleep(Duration::from_micros(100));
724        Array2::zeros((2, self.config.buffer_size))
725    }
726
727    /// Generate performance report
728    pub fn generate_report(&self) -> PerformanceReport {
729        PerformanceReport::new(&self.results, &self.config)
730    }
731}
732
733/// Performance test report
734#[derive(Debug, Clone, Serialize, Deserialize)]
735pub struct PerformanceReport {
736    /// Test configuration used
737    pub config: PerformanceConfig,
738    /// All test results
739    pub results: Vec<PerformanceMetrics>,
740    /// Overall summary
741    pub summary: PerformanceSummary,
742    /// Target validation results
743    pub target_results: Vec<PerformanceTargetResult>,
744    /// Recommendations
745    pub recommendations: Vec<String>,
746}
747
748/// Performance summary
749#[derive(Debug, Clone, Serialize, Deserialize)]
750pub struct PerformanceSummary {
751    /// Total tests run
752    pub total_tests: usize,
753    /// Tests that met VR targets
754    pub vr_compatible_tests: usize,
755    /// Tests that met gaming targets
756    pub gaming_compatible_tests: usize,
757    /// Overall system rating (0-10)
758    pub overall_rating: f32,
759    /// Primary performance bottleneck
760    pub bottleneck: String,
761}
762
763impl PerformanceReport {
764    fn new(results: &[PerformanceMetrics], config: &PerformanceConfig) -> Self {
765        let target_results: Vec<PerformanceTargetResult> =
766            results.iter().map(|r| r.meets_targets(config)).collect();
767
768        let vr_compatible = target_results.iter().filter(|r| r.vr_latency_met).count();
769        let gaming_compatible = target_results
770            .iter()
771            .filter(|r| r.gaming_latency_met)
772            .count();
773
774        let mut recommendations = Vec::new();
775        if vr_compatible < results.len() {
776            recommendations
777                .push("Consider optimizing for lower latency to meet VR requirements".to_string());
778        }
779        if target_results.iter().any(|r| !r.cpu_usage_acceptable) {
780            recommendations.push(
781                "CPU usage is high - consider GPU acceleration or algorithmic optimization"
782                    .to_string(),
783            );
784        }
785
786        let avg_latency_ms: f32 = results
787            .iter()
788            .map(|r| r.avg_latency.as_millis() as f32)
789            .sum::<f32>()
790            / results.len() as f32;
791
792        let overall_rating = if avg_latency_ms < 20.0 {
793            10.0
794        } else if avg_latency_ms < 30.0 {
795            8.0
796        } else if avg_latency_ms < 50.0 {
797            6.0
798        } else {
799            4.0
800        };
801
802        let bottleneck = if results.iter().any(|r| r.avg_cpu_usage > 50.0) {
803            "CPU Processing".to_string()
804        } else if results.iter().any(|r| r.peak_memory_usage > 500_000_000) {
805            "Memory Usage".to_string()
806        } else {
807            "Algorithm Efficiency".to_string()
808        };
809
810        Self {
811            config: config.clone(),
812            results: results.to_vec(),
813            summary: PerformanceSummary {
814                total_tests: results.len(),
815                vr_compatible_tests: vr_compatible,
816                gaming_compatible_tests: gaming_compatible,
817                overall_rating,
818                bottleneck,
819            },
820            target_results,
821            recommendations,
822        }
823    }
824
825    /// Print report to console
826    pub fn print_summary(&self) {
827        println!("\n=== Spatial Audio Performance Report ===");
828        println!("Total tests: {}", self.summary.total_tests);
829        println!(
830            "VR-compatible: {}/{}",
831            self.summary.vr_compatible_tests, self.summary.total_tests
832        );
833        println!(
834            "Gaming-compatible: {}/{}",
835            self.summary.gaming_compatible_tests, self.summary.total_tests
836        );
837        println!("Overall rating: {:.1}/10", self.summary.overall_rating);
838        println!("Primary bottleneck: {}", self.summary.bottleneck);
839
840        println!("\n--- Test Results ---");
841        for result in &self.results {
842            println!(
843                "{}: avg={:.1}ms, p95={:.1}ms, cpu={:.1}%",
844                result.test_name,
845                result.avg_latency.as_millis(),
846                result.p95_latency.as_millis(),
847                result.avg_cpu_usage
848            );
849        }
850
851        if !self.recommendations.is_empty() {
852            println!("\n--- Recommendations ---");
853            for rec in &self.recommendations {
854                println!("• {rec}");
855            }
856        }
857    }
858}
859
860#[cfg(test)]
861mod tests {
862    use super::*;
863
864    #[test]
865    fn test_performance_config_default() {
866        let config = PerformanceConfig::default();
867        assert_eq!(config.iterations, 1000);
868        assert_eq!(config.source_count, 8);
869        assert_eq!(config.sample_rate, 44100);
870        assert_eq!(config.buffer_size, 512);
871    }
872
873    #[test]
874    fn test_performance_metrics_creation() {
875        let metrics = PerformanceMetrics::new("Test".to_string());
876        assert_eq!(metrics.test_name, "Test");
877        assert_eq!(metrics.iterations, 0);
878        assert_eq!(metrics.success_rate, 0.0);
879    }
880
881    #[test]
882    fn test_resource_monitor() {
883        let monitor = ResourceMonitor::start();
884        thread::sleep(Duration::from_millis(200));
885        let stats = monitor.stop();
886
887        assert!(stats.sample_count > 0);
888        assert!(stats.duration > Duration::from_millis(100));
889    }
890
891    #[test]
892    fn test_performance_targets() {
893        let config = PerformanceConfig::default();
894        let mut metrics = PerformanceMetrics::new("Test".to_string());
895
896        metrics.p95_latency = Duration::from_millis(15);
897        metrics.avg_cpu_usage = 20.0;
898        metrics.success_rate = 0.98;
899
900        let result = metrics.meets_targets(&config);
901        assert!(result.vr_latency_met);
902        assert!(result.cpu_usage_acceptable);
903        assert!(result.success_rate_acceptable);
904    }
905
906    #[test]
907    fn test_performance_test_suite_creation() {
908        let config = PerformanceConfig {
909            iterations: 10,
910            ..Default::default()
911        };
912        let suite = PerformanceTestSuite::new(config);
913        assert_eq!(suite.config.iterations, 10);
914        assert_eq!(suite.results.len(), 0);
915    }
916
917    #[test]
918    fn test_simulation_functions() {
919        let config = PerformanceConfig::default();
920        let suite = PerformanceTestSuite::new(config);
921
922        let audio = Array1::zeros(512);
923        let position = Position3D::new(1.0, 0.0, 0.0);
924
925        let result = suite.simulate_binaural_processing(&audio, &position);
926        assert_eq!(result.shape(), [2, 512]);
927    }
928
929    #[test]
930    fn test_performance_report_generation() {
931        let config = PerformanceConfig::default();
932        let mut metrics = PerformanceMetrics::new("Test".to_string());
933        metrics.avg_latency = Duration::from_millis(25);
934        metrics.p95_latency = Duration::from_millis(35);
935        metrics.avg_cpu_usage = 15.0;
936        metrics.success_rate = 0.96;
937
938        let report = PerformanceReport::new(&[metrics], &config);
939        assert_eq!(report.summary.total_tests, 1);
940        assert_eq!(report.summary.vr_compatible_tests, 0); // 35ms > 20ms VR threshold
941        assert_eq!(report.summary.gaming_compatible_tests, 0); // 35ms > 30ms gaming threshold
942    }
943
944    #[test]
945    fn test_throughput_calculation() {
946        let mut metrics = PerformanceMetrics::new("Throughput Test".to_string());
947        metrics.iterations = 1000;
948
949        // Simulate processing 1000 iterations of 512 samples in 1 second
950        metrics.throughput = (1000.0 * 512.0) / 1.0; // samples per second
951
952        assert_eq!(metrics.throughput, 512_000.0);
953    }
954}