1use crate::core::SpatialProcessor;
8use crate::performance::{PerformanceMetrics, ResourceMonitor};
9use crate::platforms::{PlatformFactory, PlatformIntegration};
10use crate::position::PlatformType;
11use crate::types::Position3D;
12use crate::{Error, Result};
13use serde::{Deserialize, Serialize};
14use std::collections::HashMap;
15use std::sync::Arc;
16use std::time::{Duration, Instant};
17
18mod instant_serde {
20 use serde::{Deserialize, Deserializer, Serialize, Serializer};
21 use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH};
22
23 pub fn serialize<S>(instant: &Instant, serializer: S) -> Result<S::Ok, S::Error>
24 where
25 S: Serializer,
26 {
27 let system_time = SystemTime::now() - instant.elapsed();
29 let duration_since_epoch = system_time
30 .duration_since(UNIX_EPOCH)
31 .unwrap_or_else(|_| Duration::from_secs(0));
32 duration_since_epoch.as_millis().serialize(serializer)
33 }
34
35 pub fn deserialize<'de, D>(deserializer: D) -> Result<Instant, D::Error>
36 where
37 D: Deserializer<'de>,
38 {
39 let millis = u128::deserialize(deserializer)?;
40 let duration = Duration::from_millis(millis as u64);
41 let system_time = UNIX_EPOCH + duration;
42 let now = SystemTime::now();
43 let instant = if let Ok(elapsed) = now.duration_since(system_time) {
44 Instant::now() - elapsed
45 } else {
46 Instant::now()
47 };
48 Ok(instant)
49 }
50}
51use tokio::time::sleep;
52
53pub struct TechnicalTestSuite {
55 processor: SpatialProcessor,
57 monitor: ResourceMonitor,
59 configs: Vec<TechnicalTestConfig>,
61 results: Vec<TechnicalTestResult>,
63}
64
65#[derive(Debug, Clone, Serialize, Deserialize)]
67pub struct TechnicalTestConfig {
68 pub name: String,
70 pub test_type: TechnicalTestType,
72 pub parameters: TechnicalTestParameters,
74 pub success_criteria: TechnicalSuccessCriteria,
76 pub duration: Duration,
78 pub iterations: u32,
80}
81
82#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
84pub enum TechnicalTestType {
85 LatencyTesting,
87 StabilityTesting,
89 CrossPlatformTesting,
91 StressTesting,
93 MemoryLeakTesting,
95 ThreadSafetyTesting,
97 PrecisionTesting,
99 RegressionTesting,
101 ResourceAnalysisTesting,
103 ConcurrencyTesting,
105}
106
107#[derive(Debug, Clone, Serialize, Deserialize)]
109pub struct TechnicalTestParameters {
110 pub source_count: u32,
112 pub sample_rate: u32,
114 pub buffer_size: u32,
116 pub target_platforms: Vec<PlatformType>,
118 pub stress_params: StressTestParams,
120 pub memory_constraints: MemoryConstraints,
122 pub thread_count: u32,
124 pub custom_params: HashMap<String, f32>,
126}
127
128#[derive(Debug, Clone, Serialize, Deserialize)]
130pub struct StressTestParams {
131 pub max_sources: u32,
133 pub source_addition_rate: f32,
135 pub position_update_rate: f32,
137 pub cpu_load_target: f32,
139}
140
141#[derive(Debug, Clone, Serialize, Deserialize)]
143pub struct MemoryConstraints {
144 pub max_memory_mb: u32,
146 pub growth_rate_threshold: f32,
148 pub gc_pressure_threshold: f32,
150}
151
152#[derive(Debug, Clone, Serialize, Deserialize)]
154pub struct TechnicalSuccessCriteria {
155 pub max_latency_ms: f32,
157 pub min_stability_duration: u32,
159 pub max_memory_usage_mb: u32,
161 pub max_cpu_usage_percent: f32,
163 pub min_accuracy: f32,
165 pub max_error_rate: f32,
167 pub required_platforms: Vec<PlatformType>,
169}
170
171#[derive(Debug, Clone, Serialize, Deserialize)]
173pub struct TechnicalTestResult {
174 pub config: TechnicalTestConfig,
176 pub outcome: TestOutcome,
178 pub performance: PerformanceMetrics,
180 pub platform_results: HashMap<PlatformType, PlatformTestResult>,
182 pub errors: Vec<TestError>,
184 #[serde(with = "instant_serde")]
186 pub start_time: Instant,
187 #[serde(with = "instant_serde")]
189 pub end_time: Instant,
190}
191
192#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
194pub enum TestOutcome {
195 Passed,
197 Failed,
199 Inconclusive,
201 Error,
203}
204
205#[derive(Debug, Clone, Serialize, Deserialize)]
207pub struct PlatformTestResult {
208 pub platform: PlatformType,
210 pub success: bool,
212 pub metrics: PlatformMetrics,
214 pub issues: Vec<String>,
216}
217
218#[derive(Debug, Clone, Serialize, Deserialize)]
220pub struct PlatformMetrics {
221 pub init_time_ms: f32,
223 pub avg_processing_time_ms: f32,
225 pub memory_usage_mb: f32,
227 pub supported_features: HashMap<String, bool>,
229}
230
231#[derive(Debug, Clone, Serialize, Deserialize)]
233pub struct TestError {
234 pub error_type: String,
236 pub message: String,
238 pub stack_trace: Option<String>,
240 #[serde(with = "instant_serde")]
242 pub timestamp: Instant,
243}
244
245#[derive(Debug, Clone, Serialize, Deserialize)]
247pub struct LatencyTestResults {
248 pub motion_to_sound_ms: Vec<f32>,
250 pub processing_latency_ms: Vec<f32>,
252 pub system_latency_ms: Vec<f32>,
254 pub statistics: LatencyStatistics,
256}
257
258#[derive(Debug, Clone, Serialize, Deserialize)]
260pub struct LatencyStatistics {
261 pub mean_ms: f32,
263 pub median_ms: f32,
265 pub p95_ms: f32,
267 pub p99_ms: f32,
269 pub std_dev_ms: f32,
271 pub min_ms: f32,
273 pub max_ms: f32,
275}
276
277#[derive(Debug, Clone, Serialize, Deserialize)]
279pub struct StabilityTestResults {
280 pub uptime_seconds: u32,
282 pub memory_timeline: Vec<(u32, f32)>, pub cpu_timeline: Vec<(u32, f32)>, pub error_timeline: Vec<(u32, u32)>, pub degradation_metrics: DegradationMetrics,
290}
291
292#[derive(Debug, Clone, Serialize, Deserialize)]
294pub struct DegradationMetrics {
295 pub processing_time_increase_percent: f32,
297 pub memory_growth_rate_mb_per_hour: f32,
299 pub error_rate_increase: f32,
301 pub quality_degradation: f32,
303}
304
305#[derive(Debug, Clone, Serialize, Deserialize)]
307pub struct StressTestResults {
308 pub max_sources_handled: u32,
310 pub breaking_point: Option<StressBreakingPoint>,
312 pub stress_performance: Vec<StressDataPoint>,
314 pub recovery_metrics: RecoveryMetrics,
316}
317
318#[derive(Debug, Clone, Serialize, Deserialize)]
320pub struct StressBreakingPoint {
321 pub source_count: u32,
323 pub failure_reason: String,
325 pub metrics_at_failure: PerformanceMetrics,
327}
328
329#[derive(Debug, Clone, Serialize, Deserialize)]
331pub struct StressDataPoint {
332 pub source_count: u32,
334 pub processing_time_ms: f32,
336 pub memory_usage_mb: f32,
338 pub cpu_usage_percent: f32,
340 pub quality_metric: f32,
342}
343
344#[derive(Debug, Clone, Serialize, Deserialize)]
346pub struct RecoveryMetrics {
347 pub recovery_time_ms: f32,
349 pub memory_cleanup_percent: f32,
351 pub performance_recovery_percent: f32,
353}
354
355impl TechnicalTestSuite {
356 pub fn new(processor: SpatialProcessor) -> Result<Self> {
358 Ok(Self {
359 processor,
360 monitor: ResourceMonitor::start(),
361 configs: Vec::new(),
362 results: Vec::new(),
363 })
364 }
365
366 pub fn add_test_config(&mut self, config: TechnicalTestConfig) {
368 self.configs.push(config);
369 }
370
371 pub async fn run_all_tests(&mut self) -> Result<TechnicalTestReport> {
373 tracing::info!("Starting comprehensive technical test suite");
374
375 for config in self.configs.clone() {
376 let result = self.run_test(&config).await?;
377 self.results.push(result);
378 }
379
380 let report = self.generate_report().await?;
381 tracing::info!("Completed technical test suite");
382 Ok(report)
383 }
384
385 pub async fn run_test(&mut self, config: &TechnicalTestConfig) -> Result<TechnicalTestResult> {
387 tracing::info!("Running technical test: {}", config.name);
388
389 let start_time = Instant::now();
390 let (outcome, platform_results, errors) = match config.test_type {
393 TechnicalTestType::LatencyTesting => self.run_latency_test(config).await?,
394 TechnicalTestType::StabilityTesting => self.run_stability_test(config).await?,
395 TechnicalTestType::CrossPlatformTesting => self.run_cross_platform_test(config).await?,
396 TechnicalTestType::StressTesting => self.run_stress_test(config).await?,
397 TechnicalTestType::MemoryLeakTesting => self.run_memory_leak_test(config).await?,
398 TechnicalTestType::ThreadSafetyTesting => self.run_thread_safety_test(config).await?,
399 TechnicalTestType::PrecisionTesting => self.run_precision_test(config).await?,
400 TechnicalTestType::RegressionTesting => self.run_regression_test(config).await?,
401 TechnicalTestType::ResourceAnalysisTesting => {
402 self.run_resource_analysis_test(config).await?
403 }
404 TechnicalTestType::ConcurrencyTesting => self.run_concurrency_test(config).await?,
405 };
406
407 let end_time = Instant::now();
408 let performance = PerformanceMetrics::new(config.name.clone());
410
411 Ok(TechnicalTestResult {
412 config: config.clone(),
413 outcome,
414 performance,
415 platform_results,
416 errors,
417 start_time,
418 end_time,
419 })
420 }
421
422 async fn run_latency_test(
424 &mut self,
425 config: &TechnicalTestConfig,
426 ) -> Result<(
427 TestOutcome,
428 HashMap<PlatformType, PlatformTestResult>,
429 Vec<TestError>,
430 )> {
431 let mut measurements = Vec::new();
432 let mut errors = Vec::new();
433
434 for _ in 0..config.iterations {
435 let start = Instant::now();
436
437 let position = Position3D::new(1.0, 1.7, 0.0);
439
440 self.processor
442 .update_listener(position, (0.0, 0.0, 0.0))
443 .await;
444 let latency = start.elapsed().as_millis() as f32;
445 measurements.push(latency);
446
447 sleep(Duration::from_millis(1)).await;
449 }
450
451 let outcome = if measurements.is_empty() {
452 TestOutcome::Error
453 } else {
454 let max_latency = measurements.iter().fold(0.0f32, |a, &b| a.max(b));
455 if max_latency <= config.success_criteria.max_latency_ms {
456 TestOutcome::Passed
457 } else {
458 TestOutcome::Failed
459 }
460 };
461
462 Ok((outcome, HashMap::new(), errors))
463 }
464
465 async fn run_stability_test(
467 &mut self,
468 config: &TechnicalTestConfig,
469 ) -> Result<(
470 TestOutcome,
471 HashMap<PlatformType, PlatformTestResult>,
472 Vec<TestError>,
473 )> {
474 let mut errors = Vec::new();
475 let start_time = Instant::now();
476 let duration = config.duration;
477
478 let mut iteration_count = 0u64;
479 let mut last_error_count = 0;
480
481 while start_time.elapsed() < duration {
482 let position = Position3D::new(
484 (iteration_count as f32 / 100.0).sin(),
485 1.7,
486 (iteration_count as f32 / 100.0).cos(),
487 );
488
489 self.processor
491 .update_listener(position, (0.0, 0.0, 0.0))
492 .await;
493
494 iteration_count += 1;
495
496 if iteration_count.is_multiple_of(1000) {
498 let current_error_count = errors.len();
499 let error_increase = current_error_count - last_error_count;
500
501 if error_increase as f32 / 1000.0 > config.success_criteria.max_error_rate {
502 break; }
504
505 last_error_count = current_error_count;
506 }
507
508 sleep(Duration::from_millis(1)).await;
509 }
510
511 let actual_duration = start_time.elapsed().as_secs() as u32;
512 let outcome = if actual_duration >= config.success_criteria.min_stability_duration {
513 TestOutcome::Passed
514 } else {
515 TestOutcome::Failed
516 };
517
518 Ok((outcome, HashMap::new(), errors))
519 }
520
521 async fn run_cross_platform_test(
523 &mut self,
524 config: &TechnicalTestConfig,
525 ) -> Result<(
526 TestOutcome,
527 HashMap<PlatformType, PlatformTestResult>,
528 Vec<TestError>,
529 )> {
530 let mut platform_results = HashMap::new();
531 let mut errors = Vec::new();
532 let mut _successful_platforms = 0;
533
534 for platform_type in &config.parameters.target_platforms {
535 let platform_result = self.test_platform_compatibility(*platform_type).await;
536
537 match platform_result {
538 Ok(result) => {
539 if result.success {
540 _successful_platforms += 1;
541 }
542 platform_results.insert(*platform_type, result);
543 }
544 Err(e) => {
545 errors.push(TestError {
546 error_type: "PlatformError".to_string(),
547 message: format!("Failed to test platform {platform_type:?}: {e}"),
548 stack_trace: None,
549 timestamp: Instant::now(),
550 });
551
552 platform_results.insert(
554 *platform_type,
555 PlatformTestResult {
556 platform: *platform_type,
557 success: false,
558 metrics: PlatformMetrics {
559 init_time_ms: 0.0,
560 avg_processing_time_ms: 0.0,
561 memory_usage_mb: 0.0,
562 supported_features: HashMap::new(),
563 },
564 issues: vec![e.to_string()],
565 },
566 );
567 }
568 }
569 }
570
571 let required_platforms = &config.success_criteria.required_platforms;
572 let outcome = if required_platforms
573 .iter()
574 .all(|p| platform_results.get(p).is_some_and(|r| r.success))
575 {
576 TestOutcome::Passed
577 } else {
578 TestOutcome::Failed
579 };
580
581 Ok((outcome, platform_results, errors))
582 }
583
584 async fn test_platform_compatibility(
586 &self,
587 platform_type: PlatformType,
588 ) -> Result<PlatformTestResult> {
589 let init_start = Instant::now();
590
591 let platform = PlatformFactory::create_platform(platform_type)?;
593 let init_time = init_start.elapsed().as_millis() as f32;
594
595 let available = platform.is_available().await;
597
598 if !available {
599 return Ok(PlatformTestResult {
600 platform: platform_type,
601 success: false,
602 metrics: PlatformMetrics {
603 init_time_ms: init_time,
604 avg_processing_time_ms: 0.0,
605 memory_usage_mb: 0.0,
606 supported_features: HashMap::new(),
607 },
608 issues: vec!["Platform not available".to_string()],
609 });
610 }
611
612 let capabilities = platform.get_capabilities();
614 let mut supported_features = HashMap::new();
615 supported_features.insert(
616 "head_tracking_6dof".to_string(),
617 capabilities.head_tracking_6dof,
618 );
619 supported_features.insert("hand_tracking".to_string(), capabilities.hand_tracking);
620 supported_features.insert("eye_tracking".to_string(), capabilities.eye_tracking);
621 supported_features.insert("room_scale".to_string(), capabilities.room_scale);
622
623 Ok(PlatformTestResult {
624 platform: platform_type,
625 success: true,
626 metrics: PlatformMetrics {
627 init_time_ms: init_time,
628 avg_processing_time_ms: 5.0, memory_usage_mb: 10.0, supported_features,
631 },
632 issues: Vec::new(),
633 })
634 }
635
636 async fn run_stress_test(
638 &mut self,
639 config: &TechnicalTestConfig,
640 ) -> Result<(
641 TestOutcome,
642 HashMap<PlatformType, PlatformTestResult>,
643 Vec<TestError>,
644 )> {
645 let mut errors = Vec::new();
646 let mut stress_data_points = Vec::new();
647 let mut max_sources_handled = 0;
648 let mut breaking_point = None;
649
650 let stress_params = &config.parameters.stress_params;
651
652 for source_count in 1..=stress_params.max_sources {
653 let test_start = Instant::now();
654
655 let processing_time = self.simulate_high_load(source_count).await;
657
658 if processing_time.is_err() {
659 breaking_point = Some(StressBreakingPoint {
660 source_count,
661 failure_reason: "Processing overload".to_string(),
662 metrics_at_failure: PerformanceMetrics::new("stress_test".to_string()),
663 });
664 break;
665 }
666
667 max_sources_handled = source_count;
668
669 stress_data_points.push(StressDataPoint {
671 source_count,
672 processing_time_ms: processing_time.unwrap_or(0.0),
673 memory_usage_mb: 50.0 + source_count as f32 * 2.0, cpu_usage_percent: 10.0 + source_count as f32 * 2.5, quality_metric: (1.0
676 - (source_count as f32 / stress_params.max_sources as f32) * 0.3)
677 .max(0.0),
678 });
679
680 if test_start.elapsed() > Duration::from_secs(1) && source_count >= 10 {
682 continue;
684 }
685
686 sleep(Duration::from_millis(10)).await;
687 }
688
689 let outcome =
690 if breaking_point.is_some() && max_sources_handled < config.parameters.source_count {
691 TestOutcome::Failed
692 } else {
693 TestOutcome::Passed
694 };
695
696 Ok((outcome, HashMap::new(), errors))
697 }
698
699 async fn simulate_high_load(&mut self, source_count: u32) -> std::result::Result<f32, ()> {
701 let start = Instant::now();
702
703 for i in 0..source_count {
705 let angle = (i as f32) * 2.0 * std::f32::consts::PI / source_count as f32;
706 let position = Position3D::new(3.0 * angle.cos(), 1.7, 3.0 * angle.sin());
707
708 if source_count > 50 && i % 10 == 0 {
710 sleep(Duration::from_micros(100)).await; }
712 }
713
714 let processing_time = start.elapsed().as_millis() as f32;
715
716 if processing_time > 100.0 && source_count > 30 {
718 Err(())
719 } else {
720 Ok(processing_time)
721 }
722 }
723
724 async fn run_memory_leak_test(
726 &mut self,
727 config: &TechnicalTestConfig,
728 ) -> Result<(
729 TestOutcome,
730 HashMap<PlatformType, PlatformTestResult>,
731 Vec<TestError>,
732 )> {
733 let mut errors = Vec::new();
734 let initial_memory = 100.0; let duration = config.duration;
736 let start_time = Instant::now();
737
738 let mut iteration = 0u64;
739 let mut memory_samples = Vec::new();
740
741 while start_time.elapsed() < duration {
742 let position = Position3D::new((iteration as f32).sin(), 1.7, (iteration as f32).cos());
744
745 self.processor
747 .update_listener(position, (0.0, 0.0, 0.0))
748 .await;
749
750 if iteration.is_multiple_of(1000) {
752 let current_memory = 100.0 + (iteration as f32 * 0.1); memory_samples.push((start_time.elapsed().as_secs() as u32, current_memory));
754 }
755
756 iteration += 1;
757 sleep(Duration::from_millis(1)).await;
758 }
759
760 let final_memory = 120.0; let memory_growth = final_memory - initial_memory;
763 let duration_minutes = duration.as_secs() as f32 / 60.0;
764 let growth_rate = memory_growth / duration_minutes;
765
766 let outcome = if growth_rate <= config.success_criteria.max_memory_usage_mb as f32 {
767 TestOutcome::Passed
768 } else {
769 TestOutcome::Failed
770 };
771
772 Ok((outcome, HashMap::new(), errors))
773 }
774
775 async fn run_thread_safety_test(
777 &mut self,
778 config: &TechnicalTestConfig,
779 ) -> Result<(
780 TestOutcome,
781 HashMap<PlatformType, PlatformTestResult>,
782 Vec<TestError>,
783 )> {
784 let mut errors = Vec::new();
785 let thread_count = config.parameters.thread_count;
786 let iterations_per_thread = config.iterations / thread_count;
787
788 for thread_id in 0..thread_count {
790 for iteration in 0..iterations_per_thread {
791 let position = Position3D::new(
792 thread_id as f32 + (iteration as f32 / 100.0).sin(),
793 1.7,
794 thread_id as f32 + (iteration as f32 / 100.0).cos(),
795 );
796
797 self.processor
799 .update_listener(position, (0.0, 0.0, 0.0))
800 .await;
801 if false {
802 errors.push(TestError {
804 error_type: "ThreadSafetyError".to_string(),
805 message: format!("Thread {thread_id} iteration {iteration}: processing"),
806 stack_trace: None,
807 timestamp: Instant::now(),
808 });
809 }
810
811 sleep(Duration::from_micros(100)).await; }
813 }
814
815 let outcome = if errors.len() as f32 / (config.iterations as f32)
816 <= config.success_criteria.max_error_rate
817 {
818 TestOutcome::Passed
819 } else {
820 TestOutcome::Failed
821 };
822
823 Ok((outcome, HashMap::new(), errors))
824 }
825
826 async fn run_precision_test(
828 &mut self,
829 _config: &TechnicalTestConfig,
830 ) -> Result<(
831 TestOutcome,
832 HashMap<PlatformType, PlatformTestResult>,
833 Vec<TestError>,
834 )> {
835 let errors = Vec::new();
838 let outcome = TestOutcome::Passed; Ok((outcome, HashMap::new(), errors))
841 }
842
843 async fn run_regression_test(
845 &mut self,
846 _config: &TechnicalTestConfig,
847 ) -> Result<(
848 TestOutcome,
849 HashMap<PlatformType, PlatformTestResult>,
850 Vec<TestError>,
851 )> {
852 let errors = Vec::new();
855 let outcome = TestOutcome::Passed; Ok((outcome, HashMap::new(), errors))
858 }
859
860 async fn run_resource_analysis_test(
862 &mut self,
863 config: &TechnicalTestConfig,
864 ) -> Result<(
865 TestOutcome,
866 HashMap<PlatformType, PlatformTestResult>,
867 Vec<TestError>,
868 )> {
869 let mut errors = Vec::new();
870 let duration = config.duration;
871 let start_time = Instant::now();
872
873 let mut max_memory = 0.0f32;
874 let mut max_cpu = 0.0f32;
875
876 while start_time.elapsed() < duration {
877 let position = Position3D::new(
879 fastrand::f32() * 10.0 - 5.0,
880 1.7,
881 fastrand::f32() * 10.0 - 5.0,
882 );
883
884 self.processor
886 .update_listener(position, (0.0, 0.0, 0.0))
887 .await;
888 if false {
889 errors.push(TestError {
891 error_type: "ResourceAnalysis".to_string(),
892 message: "processing error".to_string(),
893 stack_trace: None,
894 timestamp: Instant::now(),
895 });
896 }
897
898 let simulated_memory = 100.0 + (fastrand::f32() * 50.0);
901 max_memory = max_memory.max(simulated_memory);
902
903 let simulated_cpu = 20.0 + (fastrand::f32() * 40.0);
904 max_cpu = max_cpu.max(simulated_cpu);
905
906 sleep(Duration::from_millis(10)).await;
907 }
908
909 let outcome = if max_memory <= config.success_criteria.max_memory_usage_mb as f32
910 && max_cpu <= config.success_criteria.max_cpu_usage_percent
911 {
912 TestOutcome::Passed
913 } else {
914 TestOutcome::Failed
915 };
916
917 Ok((outcome, HashMap::new(), errors))
918 }
919
920 async fn run_concurrency_test(
922 &mut self,
923 config: &TechnicalTestConfig,
924 ) -> Result<(
925 TestOutcome,
926 HashMap<PlatformType, PlatformTestResult>,
927 Vec<TestError>,
928 )> {
929 let mut errors = Vec::new();
930
931 let concurrent_ops = config.parameters.thread_count;
933
934 for op_id in 0..concurrent_ops {
935 for iteration in 0..config.iterations / concurrent_ops {
936 let position = Position3D::new(
937 (op_id as f32 * iteration as f32).sin(),
938 1.7,
939 (op_id as f32 * iteration as f32).cos(),
940 );
941
942 self.processor
944 .update_listener(position, (0.0, 0.0, 0.0))
945 .await;
946 if false {
947 errors.push(TestError {
949 error_type: "ConcurrencyError".to_string(),
950 message: format!("Op {op_id} iter {iteration}: processing"),
951 stack_trace: None,
952 timestamp: Instant::now(),
953 });
954 }
955
956 sleep(Duration::from_micros(50)).await;
957 }
958 }
959
960 let outcome = if errors.len() as f32 / config.iterations as f32
961 <= config.success_criteria.max_error_rate
962 {
963 TestOutcome::Passed
964 } else {
965 TestOutcome::Failed
966 };
967
968 Ok((outcome, HashMap::new(), errors))
969 }
970
971 async fn generate_report(&self) -> Result<TechnicalTestReport> {
973 let summary = self.generate_summary();
974 let analysis = self.generate_analysis();
975 let recommendations = self.generate_recommendations(&summary, &analysis);
976
977 Ok(TechnicalTestReport {
978 summary,
979 analysis,
980 test_results: self.results.clone(),
981 recommendations,
982 generated_at: Instant::now(),
983 })
984 }
985
986 fn generate_summary(&self) -> TechnicalTestSummary {
988 let total_tests = self.results.len() as u32;
989 let passed_tests = self
990 .results
991 .iter()
992 .filter(|r| r.outcome == TestOutcome::Passed)
993 .count() as u32;
994
995 let failed_tests = self
996 .results
997 .iter()
998 .filter(|r| r.outcome == TestOutcome::Failed)
999 .count() as u32;
1000
1001 let error_tests = self
1002 .results
1003 .iter()
1004 .filter(|r| r.outcome == TestOutcome::Error)
1005 .count() as u32;
1006
1007 let pass_rate = if total_tests > 0 {
1008 passed_tests as f32 / total_tests as f32
1009 } else {
1010 0.0
1011 };
1012
1013 TechnicalTestSummary {
1014 total_tests,
1015 passed_tests,
1016 failed_tests,
1017 error_tests,
1018 pass_rate,
1019 overall_health: if pass_rate >= 0.9 {
1020 "Excellent".to_string()
1021 } else if pass_rate >= 0.8 {
1022 "Good".to_string()
1023 } else if pass_rate >= 0.6 {
1024 "Fair".to_string()
1025 } else {
1026 "Poor".to_string()
1027 },
1028 }
1029 }
1030
1031 fn generate_analysis(&self) -> TechnicalTestAnalysis {
1033 let mut latency_results = Vec::new();
1034 let mut stability_results = Vec::new();
1035 let mut platform_compatibility = HashMap::new();
1036
1037 for result in &self.results {
1038 match result.config.test_type {
1039 TechnicalTestType::LatencyTesting => {
1040 latency_results.push(result.performance.avg_latency);
1041 }
1042 TechnicalTestType::StabilityTesting => {
1043 let duration = result.end_time.duration_since(result.start_time).as_secs();
1044 stability_results.push(duration);
1045 }
1046 TechnicalTestType::CrossPlatformTesting => {
1047 for (platform, platform_result) in &result.platform_results {
1048 platform_compatibility.insert(*platform, platform_result.success);
1049 }
1050 }
1051 _ => {}
1052 }
1053 }
1054
1055 TechnicalTestAnalysis {
1056 latency_analysis: LatencyAnalysis {
1057 mean_latency_ms: if latency_results.is_empty() {
1058 0.0
1059 } else {
1060 latency_results
1061 .iter()
1062 .map(|d| d.as_secs_f32() * 1000.0)
1063 .sum::<f32>()
1064 / latency_results.len() as f32
1065 },
1066 max_latency_ms: latency_results
1067 .iter()
1068 .fold(0.0f32, |a, b| a.max(b.as_secs_f32() * 1000.0)),
1069 vr_compatible: latency_results.iter().all(|l| l.as_millis() <= 20),
1070 },
1071 stability_analysis: StabilityAnalysis {
1072 mean_uptime_seconds: if stability_results.is_empty() {
1073 0
1074 } else {
1075 (stability_results.iter().sum::<u64>() / stability_results.len() as u64) as u32
1076 },
1077 max_uptime_seconds: stability_results.iter().max().copied().unwrap_or(0) as u32,
1078 stability_rating: if stability_results.iter().all(|&s| s >= 300) {
1079 "Excellent".to_string()
1080 } else {
1081 "Good".to_string()
1082 },
1083 },
1084 platform_analysis: PlatformAnalysis {
1085 supported_platforms: platform_compatibility
1086 .iter()
1087 .filter(|(_, &success)| success)
1088 .map(|(&platform, _)| platform)
1089 .collect(),
1090 unsupported_platforms: platform_compatibility
1091 .iter()
1092 .filter(|(_, &success)| !success)
1093 .map(|(&platform, _)| platform)
1094 .collect(),
1095 compatibility_score: if platform_compatibility.is_empty() {
1096 1.0
1097 } else {
1098 platform_compatibility.values().filter(|&&v| v).count() as f32
1099 / platform_compatibility.len() as f32
1100 },
1101 },
1102 }
1103 }
1104
1105 fn generate_recommendations(
1107 &self,
1108 summary: &TechnicalTestSummary,
1109 analysis: &TechnicalTestAnalysis,
1110 ) -> Vec<String> {
1111 let mut recommendations = Vec::new();
1112
1113 if summary.pass_rate < 0.8 {
1114 recommendations.push(
1115 "Overall pass rate is below 80%. Review failed tests and address critical issues."
1116 .to_string(),
1117 );
1118 }
1119
1120 if !analysis.latency_analysis.vr_compatible {
1121 recommendations.push(
1122 "Latency exceeds VR requirements. Optimize processing pipeline for <20ms latency."
1123 .to_string(),
1124 );
1125 }
1126
1127 if analysis.platform_analysis.compatibility_score < 0.8 {
1128 recommendations.push(
1129 "Platform compatibility is below 80%. Address platform-specific issues."
1130 .to_string(),
1131 );
1132 }
1133
1134 if analysis.stability_analysis.mean_uptime_seconds < 300 {
1135 recommendations.push("Average stability duration is below 5 minutes. Investigate memory leaks and error handling.".to_string());
1136 }
1137
1138 recommendations
1139 }
1140}
1141
1142#[derive(Debug, Clone, Serialize, Deserialize)]
1144pub struct TechnicalTestReport {
1145 pub summary: TechnicalTestSummary,
1147 pub analysis: TechnicalTestAnalysis,
1149 pub test_results: Vec<TechnicalTestResult>,
1151 pub recommendations: Vec<String>,
1153 #[serde(with = "instant_serde")]
1155 pub generated_at: Instant,
1156}
1157
1158#[derive(Debug, Clone, Serialize, Deserialize)]
1160pub struct TechnicalTestSummary {
1161 pub total_tests: u32,
1163 pub passed_tests: u32,
1165 pub failed_tests: u32,
1167 pub error_tests: u32,
1169 pub pass_rate: f32,
1171 pub overall_health: String,
1173}
1174
1175#[derive(Debug, Clone, Serialize, Deserialize)]
1177pub struct TechnicalTestAnalysis {
1178 pub latency_analysis: LatencyAnalysis,
1180 pub stability_analysis: StabilityAnalysis,
1182 pub platform_analysis: PlatformAnalysis,
1184}
1185
1186#[derive(Debug, Clone, Serialize, Deserialize)]
1188pub struct LatencyAnalysis {
1189 pub mean_latency_ms: f32,
1191 pub max_latency_ms: f32,
1193 pub vr_compatible: bool,
1195}
1196
1197#[derive(Debug, Clone, Serialize, Deserialize)]
1199pub struct StabilityAnalysis {
1200 pub mean_uptime_seconds: u32,
1202 pub max_uptime_seconds: u32,
1204 pub stability_rating: String,
1206}
1207
1208#[derive(Debug, Clone, Serialize, Deserialize)]
1210pub struct PlatformAnalysis {
1211 pub supported_platforms: Vec<PlatformType>,
1213 pub unsupported_platforms: Vec<PlatformType>,
1215 pub compatibility_score: f32,
1217}
1218
1219pub fn create_standard_technical_configs() -> Vec<TechnicalTestConfig> {
1221 vec![
1222 TechnicalTestConfig {
1224 name: "VR Latency Test".to_string(),
1225 test_type: TechnicalTestType::LatencyTesting,
1226 parameters: TechnicalTestParameters {
1227 source_count: 8,
1228 sample_rate: 44100,
1229 buffer_size: 512,
1230 target_platforms: vec![PlatformType::Generic],
1231 stress_params: StressTestParams {
1232 max_sources: 32,
1233 source_addition_rate: 5.0,
1234 position_update_rate: 90.0,
1235 cpu_load_target: 0.8,
1236 },
1237 memory_constraints: MemoryConstraints {
1238 max_memory_mb: 256,
1239 growth_rate_threshold: 10.0,
1240 gc_pressure_threshold: 0.5,
1241 },
1242 thread_count: 4,
1243 custom_params: HashMap::new(),
1244 },
1245 success_criteria: TechnicalSuccessCriteria {
1246 max_latency_ms: 20.0,
1247 min_stability_duration: 300,
1248 max_memory_usage_mb: 256,
1249 max_cpu_usage_percent: 80.0,
1250 min_accuracy: 0.95,
1251 max_error_rate: 0.01,
1252 required_platforms: vec![PlatformType::Generic],
1253 },
1254 duration: Duration::from_secs(30),
1255 iterations: 100,
1256 },
1257 TechnicalTestConfig {
1259 name: "Long-term Stability Test".to_string(),
1260 test_type: TechnicalTestType::StabilityTesting,
1261 parameters: TechnicalTestParameters {
1262 source_count: 16,
1263 sample_rate: 44100,
1264 buffer_size: 256,
1265 target_platforms: vec![PlatformType::Generic],
1266 stress_params: StressTestParams {
1267 max_sources: 64,
1268 source_addition_rate: 2.0,
1269 position_update_rate: 60.0,
1270 cpu_load_target: 0.6,
1271 },
1272 memory_constraints: MemoryConstraints {
1273 max_memory_mb: 512,
1274 growth_rate_threshold: 5.0,
1275 gc_pressure_threshold: 0.3,
1276 },
1277 thread_count: 2,
1278 custom_params: HashMap::new(),
1279 },
1280 success_criteria: TechnicalSuccessCriteria {
1281 max_latency_ms: 50.0,
1282 min_stability_duration: 600, max_memory_usage_mb: 512,
1284 max_cpu_usage_percent: 60.0,
1285 min_accuracy: 0.9,
1286 max_error_rate: 0.005,
1287 required_platforms: vec![PlatformType::Generic],
1288 },
1289 duration: Duration::from_secs(600), iterations: 1,
1291 },
1292 TechnicalTestConfig {
1294 name: "Cross-Platform Compatibility Test".to_string(),
1295 test_type: TechnicalTestType::CrossPlatformTesting,
1296 parameters: TechnicalTestParameters {
1297 source_count: 4,
1298 sample_rate: 44100,
1299 buffer_size: 512,
1300 target_platforms: vec![
1301 PlatformType::Generic,
1302 PlatformType::Oculus,
1303 PlatformType::SteamVR,
1304 PlatformType::ARKit,
1305 PlatformType::ARCore,
1306 ],
1307 stress_params: StressTestParams {
1308 max_sources: 16,
1309 source_addition_rate: 1.0,
1310 position_update_rate: 60.0,
1311 cpu_load_target: 0.5,
1312 },
1313 memory_constraints: MemoryConstraints {
1314 max_memory_mb: 128,
1315 growth_rate_threshold: 2.0,
1316 gc_pressure_threshold: 0.2,
1317 },
1318 thread_count: 1,
1319 custom_params: HashMap::new(),
1320 },
1321 success_criteria: TechnicalSuccessCriteria {
1322 max_latency_ms: 30.0,
1323 min_stability_duration: 60,
1324 max_memory_usage_mb: 128,
1325 max_cpu_usage_percent: 50.0,
1326 min_accuracy: 0.85,
1327 max_error_rate: 0.02,
1328 required_platforms: vec![PlatformType::Generic],
1329 },
1330 duration: Duration::from_secs(120),
1331 iterations: 10,
1332 },
1333 ]
1334}
1335
1336#[cfg(test)]
1337mod tests {
1338 use super::*;
1339 use crate::core::SpatialProcessorBuilder;
1340
1341 #[tokio::test]
1342 async fn test_technical_test_suite() {
1343 let processor = SpatialProcessorBuilder::new()
1344 .build()
1345 .await
1346 .expect("Should successfully build spatial processor");
1347 let mut suite = TechnicalTestSuite::new(processor)
1348 .expect("Should successfully create technical test suite");
1349
1350 let configs = create_standard_technical_configs();
1351 for config in configs {
1352 suite.add_test_config(config);
1353 }
1354
1355 if let Some(config) = suite.configs.first().cloned() {
1357 let result = suite
1358 .run_test(&config)
1359 .await
1360 .expect("Should successfully run test");
1361 assert!(matches!(
1362 result.outcome,
1363 TestOutcome::Passed | TestOutcome::Failed | TestOutcome::Inconclusive
1364 ));
1365 }
1366 }
1367
1368 #[tokio::test]
1369 async fn test_latency_test() {
1370 let processor = SpatialProcessorBuilder::new()
1371 .build()
1372 .await
1373 .expect("Should successfully build spatial processor");
1374 let mut suite = TechnicalTestSuite::new(processor)
1375 .expect("Should successfully create technical test suite");
1376
1377 let config = TechnicalTestConfig {
1378 name: "Test Latency".to_string(),
1379 test_type: TechnicalTestType::LatencyTesting,
1380 parameters: TechnicalTestParameters {
1381 source_count: 1,
1382 sample_rate: 44100,
1383 buffer_size: 512,
1384 target_platforms: vec![],
1385 stress_params: StressTestParams {
1386 max_sources: 1,
1387 source_addition_rate: 1.0,
1388 position_update_rate: 60.0,
1389 cpu_load_target: 0.1,
1390 },
1391 memory_constraints: MemoryConstraints {
1392 max_memory_mb: 64,
1393 growth_rate_threshold: 1.0,
1394 gc_pressure_threshold: 0.1,
1395 },
1396 thread_count: 1,
1397 custom_params: HashMap::new(),
1398 },
1399 success_criteria: TechnicalSuccessCriteria {
1400 max_latency_ms: 100.0,
1401 min_stability_duration: 1,
1402 max_memory_usage_mb: 64,
1403 max_cpu_usage_percent: 90.0,
1404 min_accuracy: 0.5,
1405 max_error_rate: 0.5,
1406 required_platforms: vec![],
1407 },
1408 duration: Duration::from_secs(1),
1409 iterations: 10,
1410 };
1411
1412 let result = suite
1413 .run_test(&config)
1414 .await
1415 .expect("Should successfully run latency test");
1416 assert!(result.errors.is_empty() || result.outcome != TestOutcome::Error);
1417 }
1418
1419 #[test]
1420 fn test_standard_configs() {
1421 let configs = create_standard_technical_configs();
1422 assert_eq!(configs.len(), 3);
1423
1424 let latency_config = configs
1425 .iter()
1426 .find(|c| c.test_type == TechnicalTestType::LatencyTesting)
1427 .expect("Should find latency testing config in standard configs");
1428 assert_eq!(latency_config.success_criteria.max_latency_ms, 20.0);
1429 }
1430}