1use super::config::*;
8use super::optimizer::{Adaptation, AdaptationPriority, AdaptationType, StreamingDataPoint};
9
10use scirs2_core::numeric::Float;
11use serde::{Deserialize, Serialize};
12use std::collections::{HashMap, VecDeque};
13use std::time::{Duration, Instant};
14
15pub struct AnomalyDetector<A: Float + Send + Sync> {
17 config: AnomalyConfig,
19 statistical_detectors: HashMap<String, Box<dyn StatisticalAnomalyDetector<A>>>,
21 ml_detectors: HashMap<String, Box<dyn MLAnomalyDetector<A>>>,
23 ensemble_detector: EnsembleAnomalyDetector<A>,
25 threshold_manager: AdaptiveThresholdManager<A>,
27 anomaly_history: VecDeque<AnomalyEvent<A>>,
29 false_positive_tracker: FalsePositiveTracker<A>,
31 response_system: AnomalyResponseSystem<A>,
33}
34
35#[derive(Debug, Clone)]
37pub struct AnomalyEvent<A: Float + Send + Sync> {
38 pub id: u64,
40 pub timestamp: Instant,
42 pub anomaly_type: AnomalyType,
44 pub severity: AnomalySeverity,
46 pub confidence: A,
48 pub data_point: StreamingDataPoint<A>,
50 pub detector_name: String,
52 pub anomaly_score: A,
54 pub context: AnomalyContext<A>,
56 pub response_actions: Vec<String>,
58}
59
60#[derive(Debug, Clone, PartialEq, Eq, Hash)]
62pub enum AnomalyType {
63 StatisticalOutlier,
65 PatternChange,
67 TemporalAnomaly,
69 SpatialAnomaly,
71 ContextualAnomaly,
73 CollectiveAnomaly,
75 PointAnomaly,
77 DataQualityAnomaly,
79 PerformanceAnomaly,
81 Custom(String),
83}
84
85#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
87pub enum AnomalySeverity {
88 Low,
90 Medium,
92 High,
94 Critical,
96}
97
98#[derive(Debug, Clone)]
100pub struct AnomalyContext<A: Float + Send + Sync> {
101 pub recent_statistics: DataStatistics<A>,
103 pub performance_metrics: Vec<A>,
105 pub resource_usage: Vec<A>,
107 pub drift_indicators: Vec<A>,
109 pub time_since_last_anomaly: Duration,
111}
112
113#[derive(Debug, Clone)]
115pub struct DataStatistics<A: Float + Send + Sync> {
116 pub means: Vec<A>,
118 pub std_devs: Vec<A>,
120 pub min_values: Vec<A>,
122 pub max_values: Vec<A>,
124 pub medians: Vec<A>,
126 pub skewness: Vec<A>,
128 pub kurtosis: Vec<A>,
130}
131
132pub trait StatisticalAnomalyDetector<A: Float + Send + Sync>: Send + Sync {
134 fn detect_anomaly(
136 &mut self,
137 data_point: &StreamingDataPoint<A>,
138 ) -> Result<AnomalyDetectionResult<A>, String>;
139
140 fn update(&mut self, data_point: &StreamingDataPoint<A>) -> Result<(), String>;
142
143 fn reset(&mut self);
145
146 fn name(&self) -> String;
148
149 fn get_threshold(&self) -> A;
151
152 fn set_threshold(&mut self, threshold: A);
154}
155
156pub trait MLAnomalyDetector<A: Float + Send + Sync>: Send + Sync {
158 fn detect_anomaly(
160 &mut self,
161 data_point: &StreamingDataPoint<A>,
162 ) -> Result<AnomalyDetectionResult<A>, String>;
163
164 fn train(&mut self, training_data: &[StreamingDataPoint<A>]) -> Result<(), String>;
166
167 fn update_incremental(&mut self, data_point: &StreamingDataPoint<A>) -> Result<(), String>;
169
170 fn get_performance_metrics(&self) -> MLModelMetrics<A>;
172
173 fn name(&self) -> String;
175}
176
177#[derive(Debug, Clone)]
179pub struct AnomalyDetectionResult<A: Float + Send + Sync> {
180 pub is_anomaly: bool,
182 pub anomaly_score: A,
184 pub confidence: A,
186 pub anomaly_type: Option<AnomalyType>,
188 pub severity: AnomalySeverity,
190 pub metadata: HashMap<String, A>,
192}
193
194#[derive(Debug, Clone)]
196pub struct MLModelMetrics<A: Float + Send + Sync> {
197 pub accuracy: A,
199 pub precision: A,
201 pub recall: A,
203 pub f1_score: A,
205 pub auc_roc: A,
207 pub false_positive_rate: A,
209 pub training_time: Duration,
211 pub inference_time: Duration,
213}
214
215pub struct EnsembleAnomalyDetector<A: Float + Send + Sync> {
217 detector_results: HashMap<String, AnomalyDetectionResult<A>>,
219 voting_strategy: EnsembleVotingStrategy,
221 detector_weights: HashMap<String, A>,
223 detector_performance: HashMap<String, DetectorPerformance<A>>,
225 ensemble_config: EnsembleConfig<A>,
227}
228
229#[derive(Debug, Clone)]
231pub enum EnsembleVotingStrategy {
232 Majority,
234 Weighted,
236 MaxScore,
238 AverageScore,
240 MedianScore,
242 Adaptive,
244 Stacking,
246}
247
248#[derive(Debug, Clone)]
250pub struct DetectorPerformance<A: Float + Send + Sync> {
251 pub recent_accuracy: A,
253 pub historical_accuracy: A,
255 pub false_positive_rate: A,
257 pub false_negative_rate: A,
259 pub detection_latency: Duration,
261 pub reliability_score: A,
263}
264
265#[derive(Debug, Clone)]
267pub struct EnsembleConfig<A: Float + Send + Sync> {
268 pub min_consensus: usize,
270 pub ensemble_threshold: A,
272 pub dynamic_weighting: bool,
274 pub evaluation_window: usize,
276 pub context_based_selection: bool,
278}
279
280pub struct AdaptiveThresholdManager<A: Float + Send + Sync> {
282 thresholds: HashMap<String, A>,
284 adaptation_strategy: ThresholdAdaptationStrategy,
286 performance_feedback: VecDeque<ThresholdPerformanceFeedback<A>>,
288 threshold_bounds: HashMap<String, (A, A)>,
290 adaptation_params: ThresholdAdaptationParams<A>,
292}
293
294#[derive(Debug, Clone)]
296pub enum ThresholdAdaptationStrategy {
297 Fixed,
299 PerformanceBased,
301 QuantileBased { quantile: f64 },
303 ROCOptimized,
305 PROptimized,
307 FPRControlled { target_fpr: f64 },
309 DistributionAdaptive,
311}
312
313#[derive(Debug, Clone)]
315pub struct ThresholdPerformanceFeedback<A: Float + Send + Sync> {
316 pub detector_name: String,
318 pub threshold: A,
320 pub true_positives: usize,
322 pub false_positives: usize,
324 pub true_negatives: usize,
326 pub false_negatives: usize,
328 pub timestamp: Instant,
330}
331
332#[derive(Debug, Clone)]
334pub struct ThresholdAdaptationParams<A: Float + Send + Sync> {
335 pub learning_rate: A,
337 pub momentum: A,
339 pub min_change: A,
341 pub max_change: A,
343 pub adaptation_frequency: usize,
345}
346
347pub struct FalsePositiveTracker<A: Float + Send + Sync> {
349 false_positives: VecDeque<FalsePositiveEvent<A>>,
351 fp_rate_calculator: FPRateCalculator<A>,
353 fp_patterns: FalsePositivePatterns<A>,
355 mitigation_strategies: Vec<FPMitigationStrategy>,
357}
358
359#[derive(Debug, Clone)]
361pub struct FalsePositiveEvent<A: Float + Send + Sync> {
362 pub timestamp: Instant,
364 pub data_point: StreamingDataPoint<A>,
366 pub detector_name: String,
368 pub anomaly_score: A,
370 pub context: AnomalyContext<A>,
372}
373
374pub struct FPRateCalculator<A: Float + Send + Sync> {
376 recent_results: VecDeque<DetectionResult>,
378 window_size: usize,
380 current_fp_rate: A,
382 target_fp_rate: A,
384}
385
386#[derive(Debug, Clone)]
388pub struct DetectionResult {
389 pub timestamp: Instant,
391 pub anomaly_detected: bool,
393 pub ground_truth: Option<bool>,
395 pub detector_name: String,
397}
398
399#[derive(Debug, Clone)]
401pub struct FalsePositivePatterns<A: Float + Send + Sync> {
402 pub temporal_patterns: Vec<TemporalPattern>,
404 pub feature_patterns: HashMap<String, A>,
406 pub context_patterns: Vec<ContextPattern<A>>,
408 pub detector_patterns: HashMap<String, Vec<A>>,
410}
411
412#[derive(Debug, Clone)]
414pub struct TemporalPattern {
415 pub pattern_type: TemporalPatternType,
417 pub strength: f64,
419 pub period: Option<Duration>,
421 pub confidence: f64,
423}
424
425#[derive(Debug, Clone)]
427pub enum TemporalPatternType {
428 Periodic,
430 TimeSpecific,
432 Burst,
434 Trend,
436}
437
438#[derive(Debug, Clone)]
440pub struct ContextPattern<A: Float + Send + Sync> {
441 pub context_features: Vec<A>,
443 pub frequency: usize,
445 pub reliability: A,
447}
448
449#[derive(Debug, Clone)]
451pub enum FPMitigationStrategy {
452 ThresholdAdjustment,
454 FeatureAdjustment,
456 EnsembleReweighting,
458 ContextFiltering,
460 TemporalFiltering,
462 ModelRetraining,
464}
465
466pub struct AnomalyResponseSystem<A: Float + Send + Sync> {
468 response_strategies: HashMap<AnomalyType, Vec<ResponseAction>>,
470 response_executor: ResponseExecutor<A>,
472 effectiveness_tracker: ResponseEffectivenessTracker<A>,
474 escalation_rules: Vec<EscalationRule<A>>,
476}
477
478#[derive(Debug, Clone)]
480pub enum ResponseAction {
481 Log,
483 Alert,
485 Quarantine,
487 ModelAdjustment,
489 IncreaseMonitoring,
491 TriggerRecovery,
493 Custom(String),
495}
496
497pub struct ResponseExecutor<A: Float + Send + Sync> {
499 pending_responses: VecDeque<PendingResponse<A>>,
501 execution_history: VecDeque<ResponseExecution<A>>,
503 resource_limits: ResponseResourceLimits,
505}
506
507#[derive(Debug, Clone)]
509pub struct PendingResponse<A: Float + Send + Sync> {
510 pub id: u64,
512 pub anomaly_event: AnomalyEvent<A>,
514 pub action: ResponseAction,
516 pub priority: ResponsePriority,
518 pub scheduled_time: Instant,
520 pub timeout: Duration,
522}
523
524#[derive(Debug, Clone)]
526pub struct ResponseExecution<A: Float + Send + Sync> {
527 pub id: u64,
529 pub response: PendingResponse<A>,
531 pub start_time: Instant,
533 pub duration: Duration,
535 pub success: bool,
537 pub error_message: Option<String>,
539 pub resources_consumed: HashMap<String, A>,
541}
542
543#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
545pub enum ResponsePriority {
546 Low = 0,
548 Normal = 1,
550 High = 2,
552 Critical = 3,
554}
555
556#[derive(Debug, Clone)]
558pub struct ResponseResourceLimits {
559 pub max_concurrent_responses: usize,
561 pub max_cpu_usage: f64,
563 pub max_memory_usage: usize,
565 pub max_execution_time: Duration,
567}
568
569pub struct ResponseEffectivenessTracker<A: Float + Send + Sync> {
571 effectiveness_metrics: HashMap<ResponseAction, EffectivenessMetrics<A>>,
573 outcome_tracking: VecDeque<ResponseOutcome<A>>,
575 effectiveness_trends: HashMap<ResponseAction, TrendAnalysis<A>>,
577}
578
579#[derive(Debug, Clone)]
581pub struct EffectivenessMetrics<A: Float + Send + Sync> {
582 pub success_rate: A,
584 pub avg_response_time: Duration,
586 pub resolution_rate: A,
588 pub false_alarm_reduction: A,
590 pub cost_benefit_ratio: A,
592}
593
594#[derive(Debug, Clone)]
596pub struct ResponseOutcome<A: Float + Send + Sync> {
597 pub execution: ResponseExecution<A>,
599 pub outcome: OutcomeMeasurement<A>,
601 pub follow_up_required: bool,
603 pub lessons_learned: Vec<String>,
605}
606
607#[derive(Debug, Clone)]
609pub struct OutcomeMeasurement<A: Float + Send + Sync> {
610 pub issue_resolved: bool,
612 pub time_to_resolution: Duration,
614 pub performance_impact: A,
616 pub side_effects: Vec<String>,
618 pub effectiveness_score: A,
620}
621
622#[derive(Debug, Clone)]
624pub struct TrendAnalysis<A: Float + Send + Sync> {
625 pub trend_direction: TrendDirection,
627 pub trend_magnitude: A,
629 pub trend_confidence: A,
631 pub trend_stability: A,
633}
634
635#[derive(Debug, Clone, PartialEq, Eq)]
637pub enum TrendDirection {
638 Improving,
640 Declining,
642 Stable,
644 Oscillating,
646}
647
648#[derive(Debug, Clone)]
650pub struct EscalationRule<A: Float + Send + Sync> {
651 pub name: String,
653 pub conditions: Vec<EscalationCondition<A>>,
655 pub actions: Vec<EscalationAction>,
657 pub priority: EscalationPriority,
659}
660
661#[derive(Debug, Clone)]
663pub struct EscalationCondition<A: Float + Send + Sync> {
664 pub condition_type: EscalationConditionType,
666 pub threshold: A,
668 pub time_window: Duration,
670}
671
672#[derive(Debug, Clone)]
674pub enum EscalationConditionType {
675 MultipleAnomalies,
677 HighSeverity,
679 ResponseFailure,
681 PerformanceDegradation,
683 ResourceExhaustion,
685}
686
687#[derive(Debug, Clone)]
689pub enum EscalationAction {
690 NotifyAdmin,
692 EmergencyProtocol,
694 SystemShutdown,
696 ActivateBackup,
698 IncreaseResources,
700}
701
702#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
704pub enum EscalationPriority {
705 Normal = 0,
707 Urgent = 1,
709 Emergency = 2,
711}
712
713impl<A: Float + Default + Clone + std::iter::Sum + Send + Sync + 'static> AnomalyDetector<A> {
714 pub fn new(config: &StreamingConfig) -> Result<Self, String> {
716 let anomaly_config = config.anomaly_config.clone();
717
718 let mut statistical_detectors: HashMap<String, Box<dyn StatisticalAnomalyDetector<A>>> =
719 HashMap::new();
720 let mut ml_detectors: HashMap<String, Box<dyn MLAnomalyDetector<A>>> = HashMap::new();
721
722 statistical_detectors.insert(
724 "zscore".to_string(),
725 Box::new(ZScoreDetector::new(anomaly_config.threshold)?),
726 );
727 statistical_detectors.insert(
728 "iqr".to_string(),
729 Box::new(IQRDetector::new(anomaly_config.threshold)?),
730 );
731
732 match anomaly_config.detection_method {
734 AnomalyDetectionMethod::IsolationForest => {
735 ml_detectors.insert(
736 "isolation_forest".to_string(),
737 Box::new(IsolationForestDetector::new()?),
738 );
739 }
740 AnomalyDetectionMethod::OneClassSVM => {
741 ml_detectors.insert(
742 "one_class_svm".to_string(),
743 Box::new(OneClassSVMDetector::new()?),
744 );
745 }
746 AnomalyDetectionMethod::LocalOutlierFactor => {
747 ml_detectors.insert("lof".to_string(), Box::new(LOFDetector::new()?));
748 }
749 _ => {
750 }
752 }
753
754 let ensemble_detector = EnsembleAnomalyDetector::new(EnsembleVotingStrategy::Weighted)?;
755 let threshold_manager =
756 AdaptiveThresholdManager::new(ThresholdAdaptationStrategy::PerformanceBased)?;
757 let false_positive_tracker = FalsePositiveTracker::new();
758 let response_system = AnomalyResponseSystem::new(&anomaly_config.response_strategy)?;
759
760 Ok(Self {
761 config: anomaly_config,
762 statistical_detectors,
763 ml_detectors,
764 ensemble_detector,
765 threshold_manager,
766 anomaly_history: VecDeque::with_capacity(10000),
767 false_positive_tracker,
768 response_system,
769 })
770 }
771
772 pub fn detect_anomaly(&mut self, data_point: &StreamingDataPoint<A>) -> Result<bool, String> {
774 let mut detection_results = HashMap::new();
775
776 for (name, detector) in &mut self.statistical_detectors {
778 let result = detector.detect_anomaly(data_point)?;
779 detection_results.insert(name.clone(), result);
780 }
781
782 for (name, detector) in &mut self.ml_detectors {
784 let result = detector.detect_anomaly(data_point)?;
785 detection_results.insert(name.clone(), result);
786 }
787
788 let ensemble_result = self.ensemble_detector.combine_results(detection_results)?;
790
791 if ensemble_result.is_anomaly {
793 let anomaly_event = AnomalyEvent {
795 id: self.generate_event_id(),
796 timestamp: Instant::now(),
797 anomaly_type: ensemble_result
798 .anomaly_type
799 .as_ref()
800 .cloned()
801 .unwrap_or(AnomalyType::StatisticalOutlier),
802 severity: ensemble_result.severity.clone(),
803 confidence: ensemble_result.confidence,
804 data_point: data_point.clone(),
805 detector_name: "ensemble".to_string(),
806 anomaly_score: ensemble_result.anomaly_score,
807 context: self.create_anomaly_context(data_point)?,
808 response_actions: Vec::new(),
809 };
810
811 self.record_anomaly(anomaly_event)?;
813
814 self.response_system
816 .trigger_response(&ensemble_result, data_point)?;
817
818 return Ok(true);
819 }
820
821 for detector in self.statistical_detectors.values_mut() {
823 detector.update(data_point)?;
824 }
825
826 for detector in self.ml_detectors.values_mut() {
827 detector.update_incremental(data_point)?;
828 }
829
830 Ok(false)
831 }
832
833 fn generate_event_id(&self) -> u64 {
835 self.anomaly_history.len() as u64 + 1
836 }
837
838 fn create_anomaly_context(
840 &self,
841 data_point: &StreamingDataPoint<A>,
842 ) -> Result<AnomalyContext<A>, String> {
843 let recent_statistics = self.calculate_recent_statistics()?;
845
846 let performance_metrics = vec![
848 A::from(0.8).expect("unwrap failed"),
849 A::from(0.7).expect("unwrap failed"),
850 ];
851
852 let resource_usage = vec![
854 A::from(0.6).expect("unwrap failed"),
855 A::from(0.5).expect("unwrap failed"),
856 ];
857
858 let drift_indicators = vec![A::from(0.1).expect("unwrap failed")];
860
861 let time_since_last_anomaly = if let Some(last_anomaly) = self.anomaly_history.back() {
863 last_anomaly.timestamp.elapsed()
864 } else {
865 Duration::from_secs(3600) };
867
868 Ok(AnomalyContext {
869 recent_statistics,
870 performance_metrics,
871 resource_usage,
872 drift_indicators,
873 time_since_last_anomaly,
874 })
875 }
876
877 fn calculate_recent_statistics(&self) -> Result<DataStatistics<A>, String> {
879 Ok(DataStatistics {
881 means: vec![
882 A::from(0.5).expect("unwrap failed"),
883 A::from(0.3).expect("unwrap failed"),
884 ],
885 std_devs: vec![
886 A::from(0.1).expect("unwrap failed"),
887 A::from(0.15).expect("unwrap failed"),
888 ],
889 min_values: vec![
890 A::from(0.0).expect("unwrap failed"),
891 A::from(0.0).expect("unwrap failed"),
892 ],
893 max_values: vec![
894 A::from(1.0).expect("unwrap failed"),
895 A::from(1.0).expect("unwrap failed"),
896 ],
897 medians: vec![
898 A::from(0.5).expect("unwrap failed"),
899 A::from(0.3).expect("unwrap failed"),
900 ],
901 skewness: vec![
902 A::from(0.0).expect("unwrap failed"),
903 A::from(0.1).expect("unwrap failed"),
904 ],
905 kurtosis: vec![
906 A::from(0.0).expect("unwrap failed"),
907 A::from(0.0).expect("unwrap failed"),
908 ],
909 })
910 }
911
912 fn record_anomaly(&mut self, anomaly_event: AnomalyEvent<A>) -> Result<(), String> {
914 if self.anomaly_history.len() >= 10000 {
915 self.anomaly_history.pop_front();
916 }
917 self.anomaly_history.push_back(anomaly_event);
918 Ok(())
919 }
920
921 pub fn apply_adaptation(&mut self, adaptation: &Adaptation<A>) -> Result<(), String> {
923 if adaptation.adaptation_type == AdaptationType::AnomalyDetection {
924 let threshold_adjustment = adaptation.magnitude;
926
927 for detector in self.statistical_detectors.values_mut() {
928 let current_threshold = detector.get_threshold();
929 let new_threshold = current_threshold + threshold_adjustment;
930 detector.set_threshold(new_threshold);
931 }
932
933 self.ensemble_detector
935 .adjust_sensitivity(threshold_adjustment)?;
936 }
937
938 Ok(())
939 }
940
941 pub fn get_recent_anomalies(&self, count: usize) -> Vec<&AnomalyEvent<A>> {
943 self.anomaly_history.iter().rev().take(count).collect()
944 }
945
946 pub fn get_diagnostics(&self) -> AnomalyDiagnostics {
948 AnomalyDiagnostics {
949 total_anomalies: self.anomaly_history.len(),
950 recent_anomaly_rate: self.calculate_recent_anomaly_rate(),
951 false_positive_rate: self.false_positive_tracker.get_current_fp_rate(),
952 detector_count: self.statistical_detectors.len() + self.ml_detectors.len(),
953 response_success_rate: self.response_system.get_success_rate(),
954 }
955 }
956
957 fn calculate_recent_anomaly_rate(&self) -> f64 {
959 let recent_window = Duration::from_secs(3600); let cutoff_time = Instant::now() - recent_window;
961
962 let recent_count = self
963 .anomaly_history
964 .iter()
965 .filter(|event| event.timestamp > cutoff_time)
966 .count();
967
968 recent_count as f64 / 3600.0 }
970}
971
972pub struct ZScoreDetector<A: Float + Send + Sync> {
976 threshold: A,
977 running_mean: A,
978 running_variance: A,
979 sample_count: usize,
980}
981
982impl<A: Float + Default + Clone + Send + Sync + Send + Sync> ZScoreDetector<A> {
983 fn new(threshold: f64) -> Result<Self, String> {
984 Ok(Self {
985 threshold: A::from(threshold).expect("unwrap failed"),
986 running_mean: A::zero(),
987 running_variance: A::zero(),
988 sample_count: 0,
989 })
990 }
991}
992
993impl<A: Float + Default + Clone + Send + Sync + std::iter::Sum> StatisticalAnomalyDetector<A>
994 for ZScoreDetector<A>
995{
996 fn detect_anomaly(
997 &mut self,
998 data_point: &StreamingDataPoint<A>,
999 ) -> Result<AnomalyDetectionResult<A>, String> {
1000 if self.sample_count < 10 {
1001 return Ok(AnomalyDetectionResult {
1003 is_anomaly: false,
1004 anomaly_score: A::zero(),
1005 confidence: A::zero(),
1006 anomaly_type: None,
1007 severity: AnomalySeverity::Low,
1008 metadata: HashMap::new(),
1009 });
1010 }
1011
1012 let feature_sum = data_point.features.iter().cloned().sum::<A>();
1014 let z_score = if self.running_variance > A::zero() {
1015 (feature_sum - self.running_mean) / self.running_variance.sqrt()
1016 } else {
1017 A::zero()
1018 };
1019
1020 let is_anomaly = z_score.abs() > self.threshold;
1021 let anomaly_score = z_score.abs();
1022
1023 Ok(AnomalyDetectionResult {
1024 is_anomaly,
1025 anomaly_score,
1026 confidence: if is_anomaly {
1027 A::from(0.8).expect("unwrap failed")
1028 } else {
1029 A::from(0.2).expect("unwrap failed")
1030 },
1031 anomaly_type: if is_anomaly {
1032 Some(AnomalyType::StatisticalOutlier)
1033 } else {
1034 None
1035 },
1036 severity: if anomaly_score > A::from(3.0).expect("unwrap failed") {
1037 AnomalySeverity::High
1038 } else if anomaly_score > A::from(2.0).expect("unwrap failed") {
1039 AnomalySeverity::Medium
1040 } else {
1041 AnomalySeverity::Low
1042 },
1043 metadata: HashMap::new(),
1044 })
1045 }
1046
1047 fn update(&mut self, data_point: &StreamingDataPoint<A>) -> Result<(), String> {
1048 let feature_sum = data_point.features.iter().cloned().sum::<A>();
1049
1050 self.sample_count += 1;
1052 let delta = feature_sum - self.running_mean;
1053 self.running_mean =
1054 self.running_mean + delta / A::from(self.sample_count).expect("unwrap failed");
1055 let delta2 = feature_sum - self.running_mean;
1056 self.running_variance = self.running_variance + delta * delta2;
1057
1058 Ok(())
1059 }
1060
1061 fn reset(&mut self) {
1062 self.running_mean = A::zero();
1063 self.running_variance = A::zero();
1064 self.sample_count = 0;
1065 }
1066
1067 fn name(&self) -> String {
1068 "zscore".to_string()
1069 }
1070
1071 fn get_threshold(&self) -> A {
1072 self.threshold
1073 }
1074
1075 fn set_threshold(&mut self, threshold: A) {
1076 self.threshold = threshold;
1077 }
1078}
1079
1080pub struct IQRDetector<A: Float + Send + Sync> {
1082 threshold: A,
1083 recent_values: VecDeque<A>,
1084 window_size: usize,
1085}
1086
1087impl<A: Float + Default + Clone + Send + Sync + Send + Sync> IQRDetector<A> {
1088 fn new(threshold: f64) -> Result<Self, String> {
1089 Ok(Self {
1090 threshold: A::from(threshold).expect("unwrap failed"),
1091 recent_values: VecDeque::with_capacity(100),
1092 window_size: 100,
1093 })
1094 }
1095}
1096
1097impl<A: Float + Default + Clone + Send + Sync + std::iter::Sum> StatisticalAnomalyDetector<A>
1098 for IQRDetector<A>
1099{
1100 fn detect_anomaly(
1101 &mut self,
1102 data_point: &StreamingDataPoint<A>,
1103 ) -> Result<AnomalyDetectionResult<A>, String> {
1104 if self.recent_values.len() < 20 {
1105 return Ok(AnomalyDetectionResult {
1106 is_anomaly: false,
1107 anomaly_score: A::zero(),
1108 confidence: A::zero(),
1109 anomaly_type: None,
1110 severity: AnomalySeverity::Low,
1111 metadata: HashMap::new(),
1112 });
1113 }
1114
1115 let mut sorted_values: Vec<A> = self.recent_values.iter().cloned().collect();
1117 sorted_values.sort_by(|a, b| a.partial_cmp(b).expect("unwrap failed"));
1118
1119 let q1_idx = sorted_values.len() / 4;
1120 let q3_idx = 3 * sorted_values.len() / 4;
1121 let q1 = sorted_values[q1_idx];
1122 let q3 = sorted_values[q3_idx];
1123 let iqr = q3 - q1;
1124
1125 let lower_bound = q1 - self.threshold * iqr;
1126 let upper_bound = q3 + self.threshold * iqr;
1127
1128 let feature_sum = data_point.features.iter().cloned().sum::<A>();
1129 let is_anomaly = feature_sum < lower_bound || feature_sum > upper_bound;
1130
1131 let distance_from_bounds = if feature_sum < lower_bound {
1132 lower_bound - feature_sum
1133 } else if feature_sum > upper_bound {
1134 feature_sum - upper_bound
1135 } else {
1136 A::zero()
1137 };
1138
1139 Ok(AnomalyDetectionResult {
1140 is_anomaly,
1141 anomaly_score: distance_from_bounds / iqr.max(A::from(1e-8).expect("unwrap failed")),
1142 confidence: if is_anomaly {
1143 A::from(0.7).expect("unwrap failed")
1144 } else {
1145 A::from(0.3).expect("unwrap failed")
1146 },
1147 anomaly_type: if is_anomaly {
1148 Some(AnomalyType::StatisticalOutlier)
1149 } else {
1150 None
1151 },
1152 severity: if distance_from_bounds > iqr * A::from(2.0).expect("unwrap failed") {
1153 AnomalySeverity::High
1154 } else {
1155 AnomalySeverity::Medium
1156 },
1157 metadata: HashMap::new(),
1158 })
1159 }
1160
1161 fn update(&mut self, data_point: &StreamingDataPoint<A>) -> Result<(), String> {
1162 let feature_sum = data_point.features.iter().cloned().sum::<A>();
1163
1164 if self.recent_values.len() >= self.window_size {
1165 self.recent_values.pop_front();
1166 }
1167 self.recent_values.push_back(feature_sum);
1168
1169 Ok(())
1170 }
1171
1172 fn reset(&mut self) {
1173 self.recent_values.clear();
1174 }
1175
1176 fn name(&self) -> String {
1177 "iqr".to_string()
1178 }
1179
1180 fn get_threshold(&self) -> A {
1181 self.threshold
1182 }
1183
1184 fn set_threshold(&mut self, threshold: A) {
1185 self.threshold = threshold;
1186 }
1187}
1188
1189pub struct IsolationForestDetector<A: Float + Send + Sync> {
1191 model_trained: bool,
1192 threshold: A,
1193}
1194
1195impl<A: Float + Default + Send + Sync + Send + Sync> IsolationForestDetector<A> {
1196 fn new() -> Result<Self, String> {
1197 Ok(Self {
1198 model_trained: false,
1199 threshold: A::from(0.5).expect("unwrap failed"),
1200 })
1201 }
1202}
1203
1204impl<A: Float + Default + Clone + Send + Sync + std::iter::Sum> MLAnomalyDetector<A>
1205 for IsolationForestDetector<A>
1206{
1207 fn detect_anomaly(
1208 &mut self,
1209 data_point: &StreamingDataPoint<A>,
1210 ) -> Result<AnomalyDetectionResult<A>, String> {
1211 let anomaly_score = A::from(0.3).expect("unwrap failed"); Ok(AnomalyDetectionResult {
1214 is_anomaly: anomaly_score > self.threshold,
1215 anomaly_score,
1216 confidence: A::from(0.6).expect("unwrap failed"),
1217 anomaly_type: Some(AnomalyType::StatisticalOutlier),
1218 severity: AnomalySeverity::Medium,
1219 metadata: HashMap::new(),
1220 })
1221 }
1222
1223 fn train(&mut self, _training_data: &[StreamingDataPoint<A>]) -> Result<(), String> {
1224 self.model_trained = true;
1225 Ok(())
1226 }
1227
1228 fn update_incremental(&mut self, _data_point: &StreamingDataPoint<A>) -> Result<(), String> {
1229 Ok(())
1230 }
1231
1232 fn get_performance_metrics(&self) -> MLModelMetrics<A> {
1233 MLModelMetrics {
1234 accuracy: A::from(0.85).expect("unwrap failed"),
1235 precision: A::from(0.8).expect("unwrap failed"),
1236 recall: A::from(0.75).expect("unwrap failed"),
1237 f1_score: A::from(0.77).expect("unwrap failed"),
1238 auc_roc: A::from(0.88).expect("unwrap failed"),
1239 false_positive_rate: A::from(0.05).expect("unwrap failed"),
1240 training_time: Duration::from_secs(60),
1241 inference_time: Duration::from_millis(10),
1242 }
1243 }
1244
1245 fn name(&self) -> String {
1246 "isolation_forest".to_string()
1247 }
1248}
1249
1250pub struct OneClassSVMDetector<A: Float + Send + Sync> {
1251 model_trained: bool,
1252 threshold: A,
1253}
1254
1255impl<A: Float + Default + Send + Sync + Send + Sync> OneClassSVMDetector<A> {
1256 fn new() -> Result<Self, String> {
1257 Ok(Self {
1258 model_trained: false,
1259 threshold: A::from(0.0).expect("unwrap failed"),
1260 })
1261 }
1262}
1263
1264impl<A: Float + Default + Clone + Send + Sync + std::iter::Sum> MLAnomalyDetector<A>
1265 for OneClassSVMDetector<A>
1266{
1267 fn detect_anomaly(
1268 &mut self,
1269 _data_point: &StreamingDataPoint<A>,
1270 ) -> Result<AnomalyDetectionResult<A>, String> {
1271 Ok(AnomalyDetectionResult {
1273 is_anomaly: false,
1274 anomaly_score: A::from(0.2).expect("unwrap failed"),
1275 confidence: A::from(0.5).expect("unwrap failed"),
1276 anomaly_type: None,
1277 severity: AnomalySeverity::Low,
1278 metadata: HashMap::new(),
1279 })
1280 }
1281
1282 fn train(&mut self, _training_data: &[StreamingDataPoint<A>]) -> Result<(), String> {
1283 self.model_trained = true;
1284 Ok(())
1285 }
1286
1287 fn update_incremental(&mut self, _data_point: &StreamingDataPoint<A>) -> Result<(), String> {
1288 Ok(())
1289 }
1290
1291 fn get_performance_metrics(&self) -> MLModelMetrics<A> {
1292 MLModelMetrics {
1293 accuracy: A::from(0.82).expect("unwrap failed"),
1294 precision: A::from(0.78).expect("unwrap failed"),
1295 recall: A::from(0.73).expect("unwrap failed"),
1296 f1_score: A::from(0.75).expect("unwrap failed"),
1297 auc_roc: A::from(0.85).expect("unwrap failed"),
1298 false_positive_rate: A::from(0.08).expect("unwrap failed"),
1299 training_time: Duration::from_secs(120),
1300 inference_time: Duration::from_millis(5),
1301 }
1302 }
1303
1304 fn name(&self) -> String {
1305 "one_class_svm".to_string()
1306 }
1307}
1308
1309pub struct LOFDetector<A: Float + Send + Sync> {
1310 model_trained: bool,
1311 threshold: A,
1312}
1313
1314impl<A: Float + Default + Send + Sync + Send + Sync> LOFDetector<A> {
1315 fn new() -> Result<Self, String> {
1316 Ok(Self {
1317 model_trained: false,
1318 threshold: A::from(1.5).expect("unwrap failed"),
1319 })
1320 }
1321}
1322
1323impl<A: Float + Default + Clone + Send + Sync + std::iter::Sum> MLAnomalyDetector<A>
1324 for LOFDetector<A>
1325{
1326 fn detect_anomaly(
1327 &mut self,
1328 _data_point: &StreamingDataPoint<A>,
1329 ) -> Result<AnomalyDetectionResult<A>, String> {
1330 Ok(AnomalyDetectionResult {
1332 is_anomaly: false,
1333 anomaly_score: A::from(0.1).expect("unwrap failed"),
1334 confidence: A::from(0.4).expect("unwrap failed"),
1335 anomaly_type: None,
1336 severity: AnomalySeverity::Low,
1337 metadata: HashMap::new(),
1338 })
1339 }
1340
1341 fn train(&mut self, _training_data: &[StreamingDataPoint<A>]) -> Result<(), String> {
1342 self.model_trained = true;
1343 Ok(())
1344 }
1345
1346 fn update_incremental(&mut self, _data_point: &StreamingDataPoint<A>) -> Result<(), String> {
1347 Ok(())
1348 }
1349
1350 fn get_performance_metrics(&self) -> MLModelMetrics<A> {
1351 MLModelMetrics {
1352 accuracy: A::from(0.79).expect("unwrap failed"),
1353 precision: A::from(0.76).expect("unwrap failed"),
1354 recall: A::from(0.71).expect("unwrap failed"),
1355 f1_score: A::from(0.73).expect("unwrap failed"),
1356 auc_roc: A::from(0.83).expect("unwrap failed"),
1357 false_positive_rate: A::from(0.12).expect("unwrap failed"),
1358 training_time: Duration::from_secs(90),
1359 inference_time: Duration::from_millis(15),
1360 }
1361 }
1362
1363 fn name(&self) -> String {
1364 "lof".to_string()
1365 }
1366}
1367
1368impl<A: Float + Default + Clone + Send + Sync + std::iter::Sum> EnsembleAnomalyDetector<A> {
1371 fn new(voting_strategy: EnsembleVotingStrategy) -> Result<Self, String> {
1372 Ok(Self {
1373 detector_results: HashMap::new(),
1374 voting_strategy,
1375 detector_weights: HashMap::new(),
1376 detector_performance: HashMap::new(),
1377 ensemble_config: EnsembleConfig {
1378 min_consensus: 2,
1379 ensemble_threshold: A::from(0.5).expect("unwrap failed"),
1380 dynamic_weighting: true,
1381 evaluation_window: 100,
1382 context_based_selection: false,
1383 },
1384 })
1385 }
1386
1387 fn combine_results(
1388 &mut self,
1389 results: HashMap<String, AnomalyDetectionResult<A>>,
1390 ) -> Result<AnomalyDetectionResult<A>, String> {
1391 if results.is_empty() {
1392 return Ok(AnomalyDetectionResult {
1393 is_anomaly: false,
1394 anomaly_score: A::zero(),
1395 confidence: A::zero(),
1396 anomaly_type: None,
1397 severity: AnomalySeverity::Low,
1398 metadata: HashMap::new(),
1399 });
1400 }
1401
1402 let anomaly_count = results.values().filter(|r| r.is_anomaly).count();
1403 let total_count = results.len();
1404
1405 let avg_score = results.values().map(|r| r.anomaly_score).sum::<A>()
1406 / A::from(total_count).expect("unwrap failed");
1407 let avg_confidence = results.values().map(|r| r.confidence).sum::<A>()
1408 / A::from(total_count).expect("unwrap failed");
1409
1410 let is_anomaly = match self.voting_strategy {
1411 EnsembleVotingStrategy::Majority => anomaly_count > total_count / 2,
1412 EnsembleVotingStrategy::MaxScore => avg_score > self.ensemble_config.ensemble_threshold,
1413 _ => anomaly_count >= self.ensemble_config.min_consensus,
1414 };
1415
1416 Ok(AnomalyDetectionResult {
1417 is_anomaly,
1418 anomaly_score: avg_score,
1419 confidence: avg_confidence,
1420 anomaly_type: if is_anomaly {
1421 Some(AnomalyType::StatisticalOutlier)
1422 } else {
1423 None
1424 },
1425 severity: if avg_score > A::from(0.8).expect("unwrap failed") {
1426 AnomalySeverity::High
1427 } else if avg_score > A::from(0.5).expect("unwrap failed") {
1428 AnomalySeverity::Medium
1429 } else {
1430 AnomalySeverity::Low
1431 },
1432 metadata: HashMap::new(),
1433 })
1434 }
1435
1436 fn adjust_sensitivity(&mut self, adjustment: A) -> Result<(), String> {
1437 self.ensemble_config.ensemble_threshold = (self.ensemble_config.ensemble_threshold
1438 + adjustment)
1439 .max(A::from(0.1).expect("unwrap failed"))
1440 .min(A::from(0.9).expect("unwrap failed"));
1441 Ok(())
1442 }
1443}
1444
1445impl<A: Float + Default + Clone + Send + Sync + Send + Sync> AdaptiveThresholdManager<A> {
1446 fn new(strategy: ThresholdAdaptationStrategy) -> Result<Self, String> {
1447 Ok(Self {
1448 thresholds: HashMap::new(),
1449 adaptation_strategy: strategy,
1450 performance_feedback: VecDeque::with_capacity(1000),
1451 threshold_bounds: HashMap::new(),
1452 adaptation_params: ThresholdAdaptationParams {
1453 learning_rate: A::from(0.01).expect("unwrap failed"),
1454 momentum: A::from(0.9).expect("unwrap failed"),
1455 min_change: A::from(0.001).expect("unwrap failed"),
1456 max_change: A::from(0.1).expect("unwrap failed"),
1457 adaptation_frequency: 100,
1458 },
1459 })
1460 }
1461}
1462
1463impl<A: Float + Default + Clone + Send + Sync + Send + Sync> FalsePositiveTracker<A> {
1464 fn new() -> Self {
1465 Self {
1466 false_positives: VecDeque::with_capacity(1000),
1467 fp_rate_calculator: FPRateCalculator {
1468 recent_results: VecDeque::with_capacity(1000),
1469 window_size: 1000,
1470 current_fp_rate: A::from(0.05).expect("unwrap failed"),
1471 target_fp_rate: A::from(0.05).expect("unwrap failed"),
1472 },
1473 fp_patterns: FalsePositivePatterns {
1474 temporal_patterns: Vec::new(),
1475 feature_patterns: HashMap::new(),
1476 context_patterns: Vec::new(),
1477 detector_patterns: HashMap::new(),
1478 },
1479 mitigation_strategies: vec![
1480 FPMitigationStrategy::ThresholdAdjustment,
1481 FPMitigationStrategy::ContextFiltering,
1482 ],
1483 }
1484 }
1485
1486 fn get_current_fp_rate(&self) -> f64 {
1487 self.fp_rate_calculator
1488 .current_fp_rate
1489 .to_f64()
1490 .unwrap_or(0.05)
1491 }
1492}
1493
1494impl<A: Float + Default + Clone + Send + Sync + Send + Sync> AnomalyResponseSystem<A> {
1495 fn new(response_strategy: &AnomalyResponseStrategy) -> Result<Self, String> {
1496 let mut response_strategies = HashMap::new();
1497
1498 match response_strategy {
1500 AnomalyResponseStrategy::Ignore => {
1501 response_strategies
1502 .insert(AnomalyType::StatisticalOutlier, vec![ResponseAction::Log]);
1503 }
1504 AnomalyResponseStrategy::Filter => {
1505 response_strategies.insert(
1506 AnomalyType::StatisticalOutlier,
1507 vec![ResponseAction::Quarantine],
1508 );
1509 }
1510 AnomalyResponseStrategy::Adaptive => {
1511 response_strategies.insert(
1512 AnomalyType::StatisticalOutlier,
1513 vec![ResponseAction::Log, ResponseAction::ModelAdjustment],
1514 );
1515 }
1516 _ => {
1517 response_strategies
1518 .insert(AnomalyType::StatisticalOutlier, vec![ResponseAction::Alert]);
1519 }
1520 }
1521
1522 Ok(Self {
1523 response_strategies,
1524 response_executor: ResponseExecutor {
1525 pending_responses: VecDeque::new(),
1526 execution_history: VecDeque::with_capacity(1000),
1527 resource_limits: ResponseResourceLimits {
1528 max_concurrent_responses: 10,
1529 max_cpu_usage: 0.2,
1530 max_memory_usage: 100 * 1024 * 1024, max_execution_time: Duration::from_secs(60),
1532 },
1533 },
1534 effectiveness_tracker: ResponseEffectivenessTracker {
1535 effectiveness_metrics: HashMap::new(),
1536 outcome_tracking: VecDeque::with_capacity(1000),
1537 effectiveness_trends: HashMap::new(),
1538 },
1539 escalation_rules: Vec::new(),
1540 })
1541 }
1542
1543 fn trigger_response(
1544 &mut self,
1545 _result: &AnomalyDetectionResult<A>,
1546 _data_point: &StreamingDataPoint<A>,
1547 ) -> Result<(), String> {
1548 Ok(())
1550 }
1551
1552 fn get_success_rate(&self) -> f64 {
1553 0.85
1555 }
1556}
1557
1558#[derive(Debug, Clone)]
1560pub struct AnomalyDiagnostics {
1561 pub total_anomalies: usize,
1562 pub recent_anomaly_rate: f64,
1563 pub false_positive_rate: f64,
1564 pub detector_count: usize,
1565 pub response_success_rate: f64,
1566}