1use crate::{G2pError, LanguageCode, Result};
8use serde::{Deserialize, Serialize};
9use std::collections::{HashMap, VecDeque};
10use std::sync::{Arc, Mutex};
11use std::time::{Duration, SystemTime};
12
13pub struct RealTimeOptimizer {
15 pub language: LanguageCode,
17 pub metrics_collector: Arc<Mutex<MetricsCollector>>,
19 pub optimization_strategies: Vec<Box<dyn OptimizationStrategy>>,
21 pub optimization_state: OptimizationState,
23 pub optimization_history: VecDeque<OptimizationEntry>,
25 pub config: OptimizerConfig,
27}
28
29#[derive(Debug, Clone, Default)]
31pub struct MetricsCollector {
32 pub latency_measurements: VecDeque<Duration>,
34 pub accuracy_measurements: VecDeque<f32>,
36 pub throughput_measurements: VecDeque<f32>,
38 pub error_rates: VecDeque<f32>,
40 pub memory_usage: VecDeque<u64>,
42 pub cpu_usage: VecDeque<f32>,
44 pub quality_scores: VecDeque<f32>,
46 pub user_satisfaction: VecDeque<f32>,
48}
49
50pub trait OptimizationStrategy: Send + Sync {
52 fn name(&self) -> &str;
54
55 fn analyze_metrics(&self, metrics: &MetricsCollector) -> OptimizationRecommendation;
57
58 fn apply_optimization(&self, recommendation: &OptimizationRecommendation) -> Result<()>;
60
61 fn is_applicable(&self, metrics: &MetricsCollector) -> bool;
63
64 fn priority(&self) -> u32;
66}
67
68#[derive(Debug, Clone, Serialize, Deserialize)]
70pub struct OptimizationRecommendation {
71 pub strategy_name: String,
73 pub optimization_type: OptimizationType,
75 pub target_parameters: HashMap<String, ParameterAdjustment>,
77 pub expected_improvement: ExpectedImprovement,
79 pub confidence: f32,
81 pub priority: u32,
83}
84
85#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
87pub enum OptimizationType {
88 LatencyOptimization,
90 AccuracyOptimization,
92 ThroughputOptimization,
94 MemoryOptimization,
96 CpuOptimization,
98 QualityOptimization,
100 BalancedOptimization,
102}
103
104#[derive(Debug, Clone, Serialize, Deserialize)]
106pub struct ParameterAdjustment {
107 pub parameter_name: String,
109 pub current_value: f32,
111 pub recommended_value: f32,
113 pub adjustment_type: AdjustmentType,
115 pub confidence: f32,
117}
118
119#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
121pub enum AdjustmentType {
122 Increase,
124 Decrease,
126 SetValue,
128 FineTune,
130}
131
132#[derive(Debug, Clone, Serialize, Deserialize)]
134pub struct ExpectedImprovement {
135 pub latency_improvement: f32,
137 pub accuracy_improvement: f32,
139 pub throughput_improvement: f32,
141 pub memory_reduction: f32,
143 pub cpu_reduction: f32,
145 pub quality_improvement: f32,
147}
148
149#[derive(Debug, Clone, Serialize, Deserialize)]
151pub struct OptimizationState {
152 pub current_parameters: HashMap<String, f32>,
154 pub last_optimization: Option<SystemTime>,
156 pub active_optimizations: Vec<String>,
158 pub effectiveness_scores: HashMap<String, f32>,
160 pub stability_score: f32,
162}
163
164#[derive(Debug, Clone, Serialize, Deserialize)]
166pub struct OptimizationEntry {
167 pub timestamp: SystemTime,
169 pub optimization: OptimizationRecommendation,
171 pub performance_before: PerformanceSnapshot,
173 pub performance_after: Option<PerformanceSnapshot>,
175 pub success: bool,
177 pub notes: String,
179}
180
181#[derive(Debug, Clone, Serialize, Deserialize)]
183pub struct PerformanceSnapshot {
184 pub avg_latency_ms: f32,
186 pub avg_accuracy: f32,
188 pub avg_throughput: f32,
190 pub avg_memory_mb: f32,
192 pub avg_cpu_percent: f32,
194 pub quality_score: f32,
196 pub timestamp: SystemTime,
198}
199
200#[derive(Debug, Clone, Serialize, Deserialize)]
202pub struct OptimizerConfig {
203 pub enable_auto_optimization: bool,
205 pub optimization_interval: Duration,
207 pub min_data_points: usize,
209 pub max_optimizations_per_interval: usize,
211 pub improvement_threshold: f32,
213 pub rollback_threshold: f32,
215 pub conservative_mode: bool,
217}
218
219pub struct LatencyOptimizationStrategy {
221 pub target_latency_ms: f32,
223 pub aggressive_mode: bool,
225}
226
227pub struct AccuracyOptimizationStrategy {
229 pub target_accuracy: f32,
231 pub quality_priority: f32,
233}
234
235pub struct MemoryOptimizationStrategy {
237 pub target_memory_mb: f32,
239 pub aggressive_gc: bool,
241}
242
243pub struct AdaptiveLoadBalancingStrategy {
245 pub load_thresholds: HashMap<String, f32>,
247 pub backend_performance: HashMap<String, PerformanceSnapshot>,
249}
250
251pub struct DynamicCachingStrategy {
253 pub target_hit_rate: f32,
255 pub memory_budget_mb: f32,
257 pub eviction_policy: String,
259}
260
261pub struct BatchProcessingOptimizer {
263 pub optimal_batch_sizes: HashMap<String, usize>,
265 pub throughput_history: VecDeque<(usize, f32)>, pub optimization_target: BatchOptimizationTarget,
269}
270
271#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
273pub enum BatchOptimizationTarget {
274 MaxThroughput,
276 MinLatency,
278 Balanced,
280 MinMemory,
282}
283
284pub struct QualityAwareOptimizer {
286 pub quality_metrics: QualityMetrics,
288 pub quality_thresholds: HashMap<String, f32>,
290 pub improvement_strategies: Vec<QualityImprovementStrategy>,
292}
293
294#[derive(Debug, Clone, Serialize, Deserialize)]
296pub struct QualityMetrics {
297 pub phoneme_accuracy: VecDeque<f32>,
299 pub word_accuracy: VecDeque<f32>,
301 pub naturalness_scores: VecDeque<f32>,
303 pub user_satisfaction: VecDeque<f32>,
305 pub error_patterns: HashMap<String, usize>,
307}
308
309#[derive(Debug, Clone, Serialize, Deserialize)]
311pub struct QualityImprovementStrategy {
312 pub name: String,
314 pub target_aspect: String,
316 pub actions: Vec<String>,
318 pub expected_improvement: f32,
320 pub complexity: u32,
322}
323
324impl RealTimeOptimizer {
326 pub fn new(language: LanguageCode, config: OptimizerConfig) -> Self {
328 Self {
329 language,
330 metrics_collector: Arc::new(Mutex::new(MetricsCollector::new())),
331 optimization_strategies: Vec::new(),
332 optimization_state: OptimizationState::default(),
333 optimization_history: VecDeque::new(),
334 config,
335 }
336 }
337
338 pub fn add_strategy(&mut self, strategy: Box<dyn OptimizationStrategy>) {
340 self.optimization_strategies.push(strategy);
341
342 self.optimization_strategies
344 .sort_by_key(|b| std::cmp::Reverse(b.priority()));
345 }
346
347 pub fn record_performance(&self, latency: Duration, accuracy: f32, throughput: f32) {
349 let mut collector = self
350 .metrics_collector
351 .lock()
352 .expect("lock should not be poisoned");
353 collector.add_measurement(latency, accuracy, throughput);
354 }
355
356 pub fn optimize(&mut self) -> Result<Vec<OptimizationRecommendation>> {
358 let metrics = self
359 .metrics_collector
360 .lock()
361 .expect("lock should not be poisoned")
362 .clone();
363
364 if metrics.latency_measurements.len() < self.config.min_data_points {
366 return Ok(Vec::new());
367 }
368
369 let mut recommendations = Vec::new();
370
371 for strategy in &self.optimization_strategies {
373 if strategy.is_applicable(&metrics) {
374 let recommendation = strategy.analyze_metrics(&metrics);
375 recommendations.push(recommendation);
376 }
377 }
378
379 recommendations.sort_by(|a, b| {
381 let priority_cmp = b.priority.cmp(&a.priority);
382 if priority_cmp == std::cmp::Ordering::Equal {
383 b.confidence
384 .partial_cmp(&a.confidence)
385 .unwrap_or(std::cmp::Ordering::Equal)
386 } else {
387 priority_cmp
388 }
389 });
390
391 if self.config.enable_auto_optimization {
393 let max_optimizations = self.config.max_optimizations_per_interval;
394 for recommendation in recommendations.iter().take(max_optimizations) {
395 if let Err(e) = self.apply_optimization(recommendation) {
396 eprintln!("Failed to apply optimization: {e}");
397 }
398 }
399 }
400
401 Ok(recommendations)
402 }
403
404 fn apply_optimization(&mut self, recommendation: &OptimizationRecommendation) -> Result<()> {
406 let performance_before = self.capture_performance_snapshot();
407
408 let strategy_name = &recommendation.strategy_name;
410 let strategy = self
411 .optimization_strategies
412 .iter()
413 .find(|s| s.name() == strategy_name)
414 .ok_or_else(|| G2pError::ConfigError(format!("Strategy not found: {strategy_name}")))?;
415
416 let result = strategy.apply_optimization(recommendation);
417 let success = result.is_ok();
418
419 let entry = OptimizationEntry {
421 timestamp: SystemTime::now(),
422 optimization: recommendation.clone(),
423 performance_before,
424 performance_after: None, success,
426 notes: if success {
427 "Applied successfully".to_string()
428 } else {
429 format!("Failed: {result:?}")
430 },
431 };
432
433 self.optimization_history.push_back(entry);
434
435 if self.optimization_history.len() > 1000 {
437 self.optimization_history.pop_front();
438 }
439
440 result
441 }
442
443 fn capture_performance_snapshot(&self) -> PerformanceSnapshot {
445 let metrics = self
446 .metrics_collector
447 .lock()
448 .expect("lock should not be poisoned");
449
450 PerformanceSnapshot {
451 avg_latency_ms: metrics.average_latency().as_millis() as f32,
452 avg_accuracy: metrics.average_accuracy(),
453 avg_throughput: metrics.average_throughput(),
454 avg_memory_mb: metrics.average_memory() as f32 / (1024.0 * 1024.0),
455 avg_cpu_percent: metrics.average_cpu(),
456 quality_score: metrics.average_quality(),
457 timestamp: SystemTime::now(),
458 }
459 }
460
461 pub fn get_effectiveness_report(&self) -> OptimizationEffectivenessReport {
463 let mut strategy_effectiveness = HashMap::new();
464 let mut total_improvements = 0;
465 let mut successful_optimizations = 0;
466
467 for entry in &self.optimization_history {
468 let strategy_name = &entry.optimization.strategy_name;
469
470 if entry.success {
471 successful_optimizations += 1;
472
473 if let Some(after) = &entry.performance_after {
474 let improvement = self.calculate_improvement(&entry.performance_before, after);
475
476 let effectiveness_entry = strategy_effectiveness
477 .entry(strategy_name.clone())
478 .or_insert(StrategyEffectiveness::default());
479
480 effectiveness_entry.total_applications += 1;
481 effectiveness_entry.successful_applications += 1;
482 effectiveness_entry.average_improvement += improvement;
483
484 if improvement > 0.0 {
485 total_improvements += 1;
486 }
487 }
488 } else {
489 let effectiveness_entry = strategy_effectiveness
490 .entry(strategy_name.clone())
491 .or_insert(StrategyEffectiveness::default());
492
493 effectiveness_entry.total_applications += 1;
494 }
495 }
496
497 for effectiveness in strategy_effectiveness.values_mut() {
499 if effectiveness.successful_applications > 0 {
500 effectiveness.average_improvement /= effectiveness.successful_applications as f32;
501 }
502 }
503
504 OptimizationEffectivenessReport {
505 total_optimizations: self.optimization_history.len(),
506 successful_optimizations,
507 total_improvements,
508 strategy_effectiveness,
509 overall_success_rate: if self.optimization_history.is_empty() {
510 0.0
511 } else {
512 successful_optimizations as f32 / self.optimization_history.len() as f32
513 },
514 }
515 }
516
517 fn calculate_improvement(
519 &self,
520 before: &PerformanceSnapshot,
521 after: &PerformanceSnapshot,
522 ) -> f32 {
523 let latency_improvement =
524 (before.avg_latency_ms - after.avg_latency_ms) / before.avg_latency_ms;
525 let accuracy_improvement = (after.avg_accuracy - before.avg_accuracy) / before.avg_accuracy;
526 let throughput_improvement =
527 (after.avg_throughput - before.avg_throughput) / before.avg_throughput;
528
529 (latency_improvement * 0.3 + accuracy_improvement * 0.4 + throughput_improvement * 0.3)
531 * 100.0
532 }
533}
534
535#[derive(Debug, Clone, Serialize, Deserialize)]
537pub struct OptimizationEffectivenessReport {
538 pub total_optimizations: usize,
540 pub successful_optimizations: usize,
542 pub total_improvements: usize,
544 pub strategy_effectiveness: HashMap<String, StrategyEffectiveness>,
546 pub overall_success_rate: f32,
548}
549
550#[derive(Debug, Clone, Default, Serialize, Deserialize)]
552pub struct StrategyEffectiveness {
553 pub total_applications: usize,
555 pub successful_applications: usize,
557 pub average_improvement: f32,
559}
560
561impl MetricsCollector {
563 pub fn new() -> Self {
565 Self {
566 latency_measurements: VecDeque::new(),
567 accuracy_measurements: VecDeque::new(),
568 throughput_measurements: VecDeque::new(),
569 error_rates: VecDeque::new(),
570 memory_usage: VecDeque::new(),
571 cpu_usage: VecDeque::new(),
572 quality_scores: VecDeque::new(),
573 user_satisfaction: VecDeque::new(),
574 }
575 }
576
577 pub fn add_measurement(&mut self, latency: Duration, accuracy: f32, throughput: f32) {
579 self.latency_measurements.push_back(latency);
580 self.accuracy_measurements.push_back(accuracy);
581 self.throughput_measurements.push_back(throughput);
582
583 const MAX_MEASUREMENTS: usize = 1000;
585
586 if self.latency_measurements.len() > MAX_MEASUREMENTS {
587 self.latency_measurements.pop_front();
588 }
589 if self.accuracy_measurements.len() > MAX_MEASUREMENTS {
590 self.accuracy_measurements.pop_front();
591 }
592 if self.throughput_measurements.len() > MAX_MEASUREMENTS {
593 self.throughput_measurements.pop_front();
594 }
595 }
596
597 pub fn average_latency(&self) -> Duration {
599 if self.latency_measurements.is_empty() {
600 return Duration::from_millis(0);
601 }
602
603 let total: Duration = self.latency_measurements.iter().sum();
604 total / self.latency_measurements.len() as u32
605 }
606
607 pub fn average_accuracy(&self) -> f32 {
609 if self.accuracy_measurements.is_empty() {
610 return 0.0;
611 }
612
613 self.accuracy_measurements.iter().sum::<f32>() / self.accuracy_measurements.len() as f32
614 }
615
616 pub fn average_throughput(&self) -> f32 {
618 if self.throughput_measurements.is_empty() {
619 return 0.0;
620 }
621
622 self.throughput_measurements.iter().sum::<f32>() / self.throughput_measurements.len() as f32
623 }
624
625 pub fn average_memory(&self) -> u64 {
627 if self.memory_usage.is_empty() {
628 return 0;
629 }
630
631 self.memory_usage.iter().sum::<u64>() / self.memory_usage.len() as u64
632 }
633
634 pub fn average_cpu(&self) -> f32 {
636 if self.cpu_usage.is_empty() {
637 return 0.0;
638 }
639
640 self.cpu_usage.iter().sum::<f32>() / self.cpu_usage.len() as f32
641 }
642
643 pub fn average_quality(&self) -> f32 {
645 if self.quality_scores.is_empty() {
646 return 0.0;
647 }
648
649 self.quality_scores.iter().sum::<f32>() / self.quality_scores.len() as f32
650 }
651}
652
653impl OptimizationStrategy for LatencyOptimizationStrategy {
655 fn name(&self) -> &str {
656 "LatencyOptimization"
657 }
658
659 fn analyze_metrics(&self, metrics: &MetricsCollector) -> OptimizationRecommendation {
660 let avg_latency_ms = metrics.average_latency().as_millis() as f32;
661 let target_latency_ms = self.target_latency_ms;
662
663 let mut parameter_adjustments = HashMap::new();
664
665 if avg_latency_ms > target_latency_ms {
666 parameter_adjustments.insert(
668 "model_complexity".to_string(),
669 ParameterAdjustment {
670 parameter_name: "model_complexity".to_string(),
671 current_value: 1.0,
672 recommended_value: 0.8,
673 adjustment_type: AdjustmentType::Decrease,
674 confidence: 0.7,
675 },
676 );
677
678 parameter_adjustments.insert(
680 "cache_size".to_string(),
681 ParameterAdjustment {
682 parameter_name: "cache_size".to_string(),
683 current_value: 1000.0,
684 recommended_value: 2000.0,
685 adjustment_type: AdjustmentType::Increase,
686 confidence: 0.8,
687 },
688 );
689 }
690
691 let expected_improvement = ExpectedImprovement {
692 latency_improvement: if avg_latency_ms > target_latency_ms {
693 20.0
694 } else {
695 0.0
696 },
697 accuracy_improvement: -5.0, throughput_improvement: 15.0,
699 memory_reduction: -10.0, cpu_reduction: 10.0,
701 quality_improvement: -5.0, };
703
704 OptimizationRecommendation {
705 strategy_name: self.name().to_string(),
706 optimization_type: OptimizationType::LatencyOptimization,
707 target_parameters: parameter_adjustments,
708 expected_improvement,
709 confidence: 0.75,
710 priority: 3,
711 }
712 }
713
714 fn apply_optimization(&self, _recommendation: &OptimizationRecommendation) -> Result<()> {
715 Ok(())
717 }
718
719 fn is_applicable(&self, metrics: &MetricsCollector) -> bool {
720 !metrics.latency_measurements.is_empty()
721 }
722
723 fn priority(&self) -> u32 {
724 3
725 }
726}
727
728impl OptimizationStrategy for AccuracyOptimizationStrategy {
730 fn name(&self) -> &str {
731 "AccuracyOptimization"
732 }
733
734 fn analyze_metrics(&self, metrics: &MetricsCollector) -> OptimizationRecommendation {
735 let avg_accuracy = metrics.average_accuracy();
736 let target_accuracy = self.target_accuracy;
737
738 let mut parameter_adjustments = HashMap::new();
739
740 if avg_accuracy < target_accuracy {
741 parameter_adjustments.insert(
743 "model_complexity".to_string(),
744 ParameterAdjustment {
745 parameter_name: "model_complexity".to_string(),
746 current_value: 0.8,
747 recommended_value: 1.0,
748 adjustment_type: AdjustmentType::Increase,
749 confidence: 0.8,
750 },
751 );
752
753 parameter_adjustments.insert(
755 "ensemble_enabled".to_string(),
756 ParameterAdjustment {
757 parameter_name: "ensemble_enabled".to_string(),
758 current_value: 0.0,
759 recommended_value: 1.0,
760 adjustment_type: AdjustmentType::SetValue,
761 confidence: 0.9,
762 },
763 );
764 }
765
766 let expected_improvement = ExpectedImprovement {
767 latency_improvement: -15.0, accuracy_improvement: if avg_accuracy < target_accuracy {
769 10.0
770 } else {
771 0.0
772 },
773 throughput_improvement: -10.0, memory_reduction: -20.0, cpu_reduction: -15.0, quality_improvement: 15.0,
777 };
778
779 OptimizationRecommendation {
780 strategy_name: self.name().to_string(),
781 optimization_type: OptimizationType::AccuracyOptimization,
782 target_parameters: parameter_adjustments,
783 expected_improvement,
784 confidence: 0.8,
785 priority: 4,
786 }
787 }
788
789 fn apply_optimization(&self, _recommendation: &OptimizationRecommendation) -> Result<()> {
790 Ok(())
792 }
793
794 fn is_applicable(&self, metrics: &MetricsCollector) -> bool {
795 !metrics.accuracy_measurements.is_empty()
796 }
797
798 fn priority(&self) -> u32 {
799 4
800 }
801}
802
803impl Default for OptimizationState {
805 fn default() -> Self {
806 Self {
807 current_parameters: HashMap::new(),
808 last_optimization: None,
809 active_optimizations: Vec::new(),
810 effectiveness_scores: HashMap::new(),
811 stability_score: 1.0,
812 }
813 }
814}
815
816impl Default for OptimizerConfig {
817 fn default() -> Self {
818 Self {
819 enable_auto_optimization: false,
820 optimization_interval: Duration::from_secs(300), min_data_points: 100,
822 max_optimizations_per_interval: 3,
823 improvement_threshold: 5.0,
824 rollback_threshold: -10.0,
825 conservative_mode: true,
826 }
827 }
828}
829
830impl Default for ExpectedImprovement {
831 fn default() -> Self {
832 Self {
833 latency_improvement: 0.0,
834 accuracy_improvement: 0.0,
835 throughput_improvement: 0.0,
836 memory_reduction: 0.0,
837 cpu_reduction: 0.0,
838 quality_improvement: 0.0,
839 }
840 }
841}
842
843#[cfg(test)]
844mod tests {
845 use super::*;
846 use std::time::Duration;
847
848 #[test]
849 fn test_metrics_collector() {
850 let mut collector = MetricsCollector::new();
851
852 collector.add_measurement(Duration::from_millis(100), 0.8, 50.0);
853 collector.add_measurement(Duration::from_millis(120), 0.85, 45.0);
854 collector.add_measurement(Duration::from_millis(110), 0.82, 48.0);
855
856 assert_eq!(collector.latency_measurements.len(), 3);
857 assert!((collector.average_accuracy() - 0.823).abs() < 0.01);
858 assert!((collector.average_throughput() - 47.67).abs() < 0.1);
859 }
860
861 #[test]
862 fn test_real_time_optimizer() {
863 let config = OptimizerConfig::default();
864 let mut optimizer = RealTimeOptimizer::new(LanguageCode::EnUs, config);
865
866 let latency_strategy = Box::new(LatencyOptimizationStrategy {
868 target_latency_ms: 50.0,
869 aggressive_mode: false,
870 });
871 optimizer.add_strategy(latency_strategy);
872
873 optimizer.record_performance(Duration::from_millis(100), 0.8, 50.0);
875 optimizer.record_performance(Duration::from_millis(120), 0.85, 45.0);
876
877 assert_eq!(optimizer.optimization_strategies.len(), 1);
878 }
879
880 #[test]
881 fn test_latency_optimization_strategy() {
882 let strategy = LatencyOptimizationStrategy {
883 target_latency_ms: 50.0,
884 aggressive_mode: false,
885 };
886
887 let mut metrics = MetricsCollector::new();
888 metrics.add_measurement(Duration::from_millis(100), 0.8, 50.0);
889
890 assert!(strategy.is_applicable(&metrics));
891
892 let recommendation = strategy.analyze_metrics(&metrics);
893 assert_eq!(
894 recommendation.optimization_type,
895 OptimizationType::LatencyOptimization
896 );
897 assert!(recommendation.expected_improvement.latency_improvement > 0.0);
898 }
899
900 #[test]
901 fn test_accuracy_optimization_strategy() {
902 let strategy = AccuracyOptimizationStrategy {
903 target_accuracy: 0.9,
904 quality_priority: 0.8,
905 };
906
907 let mut metrics = MetricsCollector::new();
908 metrics.add_measurement(Duration::from_millis(100), 0.7, 50.0); assert!(strategy.is_applicable(&metrics));
911
912 let recommendation = strategy.analyze_metrics(&metrics);
913 assert_eq!(
914 recommendation.optimization_type,
915 OptimizationType::AccuracyOptimization
916 );
917 assert!(recommendation.expected_improvement.accuracy_improvement > 0.0);
918 }
919
920 #[test]
921 fn test_optimization_recommendation() {
922 let recommendation = OptimizationRecommendation {
923 strategy_name: "TestStrategy".to_string(),
924 optimization_type: OptimizationType::LatencyOptimization,
925 target_parameters: HashMap::new(),
926 expected_improvement: ExpectedImprovement::default(),
927 confidence: 0.8,
928 priority: 3,
929 };
930
931 assert_eq!(recommendation.strategy_name, "TestStrategy");
932 assert_eq!(
933 recommendation.optimization_type,
934 OptimizationType::LatencyOptimization
935 );
936 assert_eq!(recommendation.confidence, 0.8);
937 }
938
939 #[test]
940 fn test_performance_snapshot() {
941 let snapshot = PerformanceSnapshot {
942 avg_latency_ms: 100.0,
943 avg_accuracy: 0.85,
944 avg_throughput: 50.0,
945 avg_memory_mb: 256.0,
946 avg_cpu_percent: 45.0,
947 quality_score: 0.8,
948 timestamp: SystemTime::now(),
949 };
950
951 assert!(snapshot.avg_latency_ms > 0.0);
952 assert!(snapshot.avg_accuracy > 0.0);
953 assert!(snapshot.quality_score > 0.0);
954 }
955}