1use super::config::*;
8use super::optimizer::{Adaptation, AdaptationPriority, AdaptationType, StreamingDataPoint};
9
10use scirs2_core::numeric::Float;
11use serde::{Deserialize, Serialize};
12use std::collections::{HashMap, VecDeque};
13use std::time::{Duration, Instant};
14
15pub struct AnomalyDetector<A: Float + Send + Sync> {
17 config: AnomalyConfig,
19 statistical_detectors: HashMap<String, Box<dyn StatisticalAnomalyDetector<A>>>,
21 ml_detectors: HashMap<String, Box<dyn MLAnomalyDetector<A>>>,
23 ensemble_detector: EnsembleAnomalyDetector<A>,
25 threshold_manager: AdaptiveThresholdManager<A>,
27 anomaly_history: VecDeque<AnomalyEvent<A>>,
29 false_positive_tracker: FalsePositiveTracker<A>,
31 response_system: AnomalyResponseSystem<A>,
33}
34
35#[derive(Debug, Clone)]
37pub struct AnomalyEvent<A: Float + Send + Sync> {
38 pub id: u64,
40 pub timestamp: Instant,
42 pub anomaly_type: AnomalyType,
44 pub severity: AnomalySeverity,
46 pub confidence: A,
48 pub data_point: StreamingDataPoint<A>,
50 pub detector_name: String,
52 pub anomaly_score: A,
54 pub context: AnomalyContext<A>,
56 pub response_actions: Vec<String>,
58}
59
60#[derive(Debug, Clone, PartialEq, Eq, Hash)]
62pub enum AnomalyType {
63 StatisticalOutlier,
65 PatternChange,
67 TemporalAnomaly,
69 SpatialAnomaly,
71 ContextualAnomaly,
73 CollectiveAnomaly,
75 PointAnomaly,
77 DataQualityAnomaly,
79 PerformanceAnomaly,
81 Custom(String),
83}
84
85#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
87pub enum AnomalySeverity {
88 Low,
90 Medium,
92 High,
94 Critical,
96}
97
98#[derive(Debug, Clone)]
100pub struct AnomalyContext<A: Float + Send + Sync> {
101 pub recent_statistics: DataStatistics<A>,
103 pub performance_metrics: Vec<A>,
105 pub resource_usage: Vec<A>,
107 pub drift_indicators: Vec<A>,
109 pub time_since_last_anomaly: Duration,
111}
112
113#[derive(Debug, Clone)]
115pub struct DataStatistics<A: Float + Send + Sync> {
116 pub means: Vec<A>,
118 pub std_devs: Vec<A>,
120 pub min_values: Vec<A>,
122 pub max_values: Vec<A>,
124 pub medians: Vec<A>,
126 pub skewness: Vec<A>,
128 pub kurtosis: Vec<A>,
130}
131
132pub trait StatisticalAnomalyDetector<A: Float + Send + Sync>: Send + Sync {
134 fn detect_anomaly(
136 &mut self,
137 data_point: &StreamingDataPoint<A>,
138 ) -> Result<AnomalyDetectionResult<A>, String>;
139
140 fn update(&mut self, data_point: &StreamingDataPoint<A>) -> Result<(), String>;
142
143 fn reset(&mut self);
145
146 fn name(&self) -> String;
148
149 fn get_threshold(&self) -> A;
151
152 fn set_threshold(&mut self, threshold: A);
154}
155
156pub trait MLAnomalyDetector<A: Float + Send + Sync>: Send + Sync {
158 fn detect_anomaly(
160 &mut self,
161 data_point: &StreamingDataPoint<A>,
162 ) -> Result<AnomalyDetectionResult<A>, String>;
163
164 fn train(&mut self, training_data: &[StreamingDataPoint<A>]) -> Result<(), String>;
166
167 fn update_incremental(&mut self, data_point: &StreamingDataPoint<A>) -> Result<(), String>;
169
170 fn get_performance_metrics(&self) -> MLModelMetrics<A>;
172
173 fn name(&self) -> String;
175}
176
177#[derive(Debug, Clone)]
179pub struct AnomalyDetectionResult<A: Float + Send + Sync> {
180 pub is_anomaly: bool,
182 pub anomaly_score: A,
184 pub confidence: A,
186 pub anomaly_type: Option<AnomalyType>,
188 pub severity: AnomalySeverity,
190 pub metadata: HashMap<String, A>,
192}
193
194#[derive(Debug, Clone)]
196pub struct MLModelMetrics<A: Float + Send + Sync> {
197 pub accuracy: A,
199 pub precision: A,
201 pub recall: A,
203 pub f1_score: A,
205 pub auc_roc: A,
207 pub false_positive_rate: A,
209 pub training_time: Duration,
211 pub inference_time: Duration,
213}
214
215pub struct EnsembleAnomalyDetector<A: Float + Send + Sync> {
217 detector_results: HashMap<String, AnomalyDetectionResult<A>>,
219 voting_strategy: EnsembleVotingStrategy,
221 detector_weights: HashMap<String, A>,
223 detector_performance: HashMap<String, DetectorPerformance<A>>,
225 ensemble_config: EnsembleConfig<A>,
227}
228
229#[derive(Debug, Clone)]
231pub enum EnsembleVotingStrategy {
232 Majority,
234 Weighted,
236 MaxScore,
238 AverageScore,
240 MedianScore,
242 Adaptive,
244 Stacking,
246}
247
248#[derive(Debug, Clone)]
250pub struct DetectorPerformance<A: Float + Send + Sync> {
251 pub recent_accuracy: A,
253 pub historical_accuracy: A,
255 pub false_positive_rate: A,
257 pub false_negative_rate: A,
259 pub detection_latency: Duration,
261 pub reliability_score: A,
263}
264
265#[derive(Debug, Clone)]
267pub struct EnsembleConfig<A: Float + Send + Sync> {
268 pub min_consensus: usize,
270 pub ensemble_threshold: A,
272 pub dynamic_weighting: bool,
274 pub evaluation_window: usize,
276 pub context_based_selection: bool,
278}
279
280pub struct AdaptiveThresholdManager<A: Float + Send + Sync> {
282 thresholds: HashMap<String, A>,
284 adaptation_strategy: ThresholdAdaptationStrategy,
286 performance_feedback: VecDeque<ThresholdPerformanceFeedback<A>>,
288 threshold_bounds: HashMap<String, (A, A)>,
290 adaptation_params: ThresholdAdaptationParams<A>,
292}
293
294#[derive(Debug, Clone)]
296pub enum ThresholdAdaptationStrategy {
297 Fixed,
299 PerformanceBased,
301 QuantileBased { quantile: f64 },
303 ROCOptimized,
305 PROptimized,
307 FPRControlled { target_fpr: f64 },
309 DistributionAdaptive,
311}
312
313#[derive(Debug, Clone)]
315pub struct ThresholdPerformanceFeedback<A: Float + Send + Sync> {
316 pub detector_name: String,
318 pub threshold: A,
320 pub true_positives: usize,
322 pub false_positives: usize,
324 pub true_negatives: usize,
326 pub false_negatives: usize,
328 pub timestamp: Instant,
330}
331
332#[derive(Debug, Clone)]
334pub struct ThresholdAdaptationParams<A: Float + Send + Sync> {
335 pub learning_rate: A,
337 pub momentum: A,
339 pub min_change: A,
341 pub max_change: A,
343 pub adaptation_frequency: usize,
345}
346
347pub struct FalsePositiveTracker<A: Float + Send + Sync> {
349 false_positives: VecDeque<FalsePositiveEvent<A>>,
351 fp_rate_calculator: FPRateCalculator<A>,
353 fp_patterns: FalsePositivePatterns<A>,
355 mitigation_strategies: Vec<FPMitigationStrategy>,
357}
358
359#[derive(Debug, Clone)]
361pub struct FalsePositiveEvent<A: Float + Send + Sync> {
362 pub timestamp: Instant,
364 pub data_point: StreamingDataPoint<A>,
366 pub detector_name: String,
368 pub anomaly_score: A,
370 pub context: AnomalyContext<A>,
372}
373
374pub struct FPRateCalculator<A: Float + Send + Sync> {
376 recent_results: VecDeque<DetectionResult>,
378 window_size: usize,
380 current_fp_rate: A,
382 target_fp_rate: A,
384}
385
386#[derive(Debug, Clone)]
388pub struct DetectionResult {
389 pub timestamp: Instant,
391 pub anomaly_detected: bool,
393 pub ground_truth: Option<bool>,
395 pub detector_name: String,
397}
398
399#[derive(Debug, Clone)]
401pub struct FalsePositivePatterns<A: Float + Send + Sync> {
402 pub temporal_patterns: Vec<TemporalPattern>,
404 pub feature_patterns: HashMap<String, A>,
406 pub context_patterns: Vec<ContextPattern<A>>,
408 pub detector_patterns: HashMap<String, Vec<A>>,
410}
411
412#[derive(Debug, Clone)]
414pub struct TemporalPattern {
415 pub pattern_type: TemporalPatternType,
417 pub strength: f64,
419 pub period: Option<Duration>,
421 pub confidence: f64,
423}
424
425#[derive(Debug, Clone)]
427pub enum TemporalPatternType {
428 Periodic,
430 TimeSpecific,
432 Burst,
434 Trend,
436}
437
438#[derive(Debug, Clone)]
440pub struct ContextPattern<A: Float + Send + Sync> {
441 pub context_features: Vec<A>,
443 pub frequency: usize,
445 pub reliability: A,
447}
448
449#[derive(Debug, Clone)]
451pub enum FPMitigationStrategy {
452 ThresholdAdjustment,
454 FeatureAdjustment,
456 EnsembleReweighting,
458 ContextFiltering,
460 TemporalFiltering,
462 ModelRetraining,
464}
465
466pub struct AnomalyResponseSystem<A: Float + Send + Sync> {
468 response_strategies: HashMap<AnomalyType, Vec<ResponseAction>>,
470 response_executor: ResponseExecutor<A>,
472 effectiveness_tracker: ResponseEffectivenessTracker<A>,
474 escalation_rules: Vec<EscalationRule<A>>,
476}
477
478#[derive(Debug, Clone)]
480pub enum ResponseAction {
481 Log,
483 Alert,
485 Quarantine,
487 ModelAdjustment,
489 IncreaseMonitoring,
491 TriggerRecovery,
493 Custom(String),
495}
496
497pub struct ResponseExecutor<A: Float + Send + Sync> {
499 pending_responses: VecDeque<PendingResponse<A>>,
501 execution_history: VecDeque<ResponseExecution<A>>,
503 resource_limits: ResponseResourceLimits,
505}
506
507#[derive(Debug, Clone)]
509pub struct PendingResponse<A: Float + Send + Sync> {
510 pub id: u64,
512 pub anomaly_event: AnomalyEvent<A>,
514 pub action: ResponseAction,
516 pub priority: ResponsePriority,
518 pub scheduled_time: Instant,
520 pub timeout: Duration,
522}
523
524#[derive(Debug, Clone)]
526pub struct ResponseExecution<A: Float + Send + Sync> {
527 pub id: u64,
529 pub response: PendingResponse<A>,
531 pub start_time: Instant,
533 pub duration: Duration,
535 pub success: bool,
537 pub error_message: Option<String>,
539 pub resources_consumed: HashMap<String, A>,
541}
542
543#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
545pub enum ResponsePriority {
546 Low = 0,
548 Normal = 1,
550 High = 2,
552 Critical = 3,
554}
555
556#[derive(Debug, Clone)]
558pub struct ResponseResourceLimits {
559 pub max_concurrent_responses: usize,
561 pub max_cpu_usage: f64,
563 pub max_memory_usage: usize,
565 pub max_execution_time: Duration,
567}
568
569pub struct ResponseEffectivenessTracker<A: Float + Send + Sync> {
571 effectiveness_metrics: HashMap<ResponseAction, EffectivenessMetrics<A>>,
573 outcome_tracking: VecDeque<ResponseOutcome<A>>,
575 effectiveness_trends: HashMap<ResponseAction, TrendAnalysis<A>>,
577}
578
579#[derive(Debug, Clone)]
581pub struct EffectivenessMetrics<A: Float + Send + Sync> {
582 pub success_rate: A,
584 pub avg_response_time: Duration,
586 pub resolution_rate: A,
588 pub false_alarm_reduction: A,
590 pub cost_benefit_ratio: A,
592}
593
594#[derive(Debug, Clone)]
596pub struct ResponseOutcome<A: Float + Send + Sync> {
597 pub execution: ResponseExecution<A>,
599 pub outcome: OutcomeMeasurement<A>,
601 pub follow_up_required: bool,
603 pub lessons_learned: Vec<String>,
605}
606
607#[derive(Debug, Clone)]
609pub struct OutcomeMeasurement<A: Float + Send + Sync> {
610 pub issue_resolved: bool,
612 pub time_to_resolution: Duration,
614 pub performance_impact: A,
616 pub side_effects: Vec<String>,
618 pub effectiveness_score: A,
620}
621
622#[derive(Debug, Clone)]
624pub struct TrendAnalysis<A: Float + Send + Sync> {
625 pub trend_direction: TrendDirection,
627 pub trend_magnitude: A,
629 pub trend_confidence: A,
631 pub trend_stability: A,
633}
634
635#[derive(Debug, Clone, PartialEq, Eq)]
637pub enum TrendDirection {
638 Improving,
640 Declining,
642 Stable,
644 Oscillating,
646}
647
648#[derive(Debug, Clone)]
650pub struct EscalationRule<A: Float + Send + Sync> {
651 pub name: String,
653 pub conditions: Vec<EscalationCondition<A>>,
655 pub actions: Vec<EscalationAction>,
657 pub priority: EscalationPriority,
659}
660
661#[derive(Debug, Clone)]
663pub struct EscalationCondition<A: Float + Send + Sync> {
664 pub condition_type: EscalationConditionType,
666 pub threshold: A,
668 pub time_window: Duration,
670}
671
672#[derive(Debug, Clone)]
674pub enum EscalationConditionType {
675 MultipleAnomalies,
677 HighSeverity,
679 ResponseFailure,
681 PerformanceDegradation,
683 ResourceExhaustion,
685}
686
687#[derive(Debug, Clone)]
689pub enum EscalationAction {
690 NotifyAdmin,
692 EmergencyProtocol,
694 SystemShutdown,
696 ActivateBackup,
698 IncreaseResources,
700}
701
702#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
704pub enum EscalationPriority {
705 Normal = 0,
707 Urgent = 1,
709 Emergency = 2,
711}
712
713impl<A: Float + Default + Clone + std::iter::Sum + Send + Sync + 'static> AnomalyDetector<A> {
714 pub fn new(config: &StreamingConfig) -> Result<Self, String> {
716 let anomaly_config = config.anomaly_config.clone();
717
718 let mut statistical_detectors: HashMap<String, Box<dyn StatisticalAnomalyDetector<A>>> =
719 HashMap::new();
720 let mut ml_detectors: HashMap<String, Box<dyn MLAnomalyDetector<A>>> = HashMap::new();
721
722 statistical_detectors.insert(
724 "zscore".to_string(),
725 Box::new(ZScoreDetector::new(anomaly_config.threshold)?),
726 );
727 statistical_detectors.insert(
728 "iqr".to_string(),
729 Box::new(IQRDetector::new(anomaly_config.threshold)?),
730 );
731
732 match anomaly_config.detection_method {
734 AnomalyDetectionMethod::IsolationForest => {
735 ml_detectors.insert(
736 "isolation_forest".to_string(),
737 Box::new(IsolationForestDetector::new()?),
738 );
739 }
740 AnomalyDetectionMethod::OneClassSVM => {
741 ml_detectors.insert(
742 "one_class_svm".to_string(),
743 Box::new(OneClassSVMDetector::new()?),
744 );
745 }
746 AnomalyDetectionMethod::LocalOutlierFactor => {
747 ml_detectors.insert("lof".to_string(), Box::new(LOFDetector::new()?));
748 }
749 _ => {
750 }
752 }
753
754 let ensemble_detector = EnsembleAnomalyDetector::new(EnsembleVotingStrategy::Weighted)?;
755 let threshold_manager =
756 AdaptiveThresholdManager::new(ThresholdAdaptationStrategy::PerformanceBased)?;
757 let false_positive_tracker = FalsePositiveTracker::new();
758 let response_system = AnomalyResponseSystem::new(&anomaly_config.response_strategy)?;
759
760 Ok(Self {
761 config: anomaly_config,
762 statistical_detectors,
763 ml_detectors,
764 ensemble_detector,
765 threshold_manager,
766 anomaly_history: VecDeque::with_capacity(10000),
767 false_positive_tracker,
768 response_system,
769 })
770 }
771
772 pub fn detect_anomaly(&mut self, data_point: &StreamingDataPoint<A>) -> Result<bool, String> {
774 let mut detection_results = HashMap::new();
775
776 for (name, detector) in &mut self.statistical_detectors {
778 let result = detector.detect_anomaly(data_point)?;
779 detection_results.insert(name.clone(), result);
780 }
781
782 for (name, detector) in &mut self.ml_detectors {
784 let result = detector.detect_anomaly(data_point)?;
785 detection_results.insert(name.clone(), result);
786 }
787
788 let ensemble_result = self.ensemble_detector.combine_results(detection_results)?;
790
791 if ensemble_result.is_anomaly {
793 let anomaly_event = AnomalyEvent {
795 id: self.generate_event_id(),
796 timestamp: Instant::now(),
797 anomaly_type: ensemble_result
798 .anomaly_type
799 .as_ref()
800 .cloned()
801 .unwrap_or(AnomalyType::StatisticalOutlier),
802 severity: ensemble_result.severity.clone(),
803 confidence: ensemble_result.confidence,
804 data_point: data_point.clone(),
805 detector_name: "ensemble".to_string(),
806 anomaly_score: ensemble_result.anomaly_score,
807 context: self.create_anomaly_context(data_point)?,
808 response_actions: Vec::new(),
809 };
810
811 self.record_anomaly(anomaly_event)?;
813
814 self.response_system
816 .trigger_response(&ensemble_result, data_point)?;
817
818 return Ok(true);
819 }
820
821 for detector in self.statistical_detectors.values_mut() {
823 detector.update(data_point)?;
824 }
825
826 for detector in self.ml_detectors.values_mut() {
827 detector.update_incremental(data_point)?;
828 }
829
830 Ok(false)
831 }
832
833 fn generate_event_id(&self) -> u64 {
835 self.anomaly_history.len() as u64 + 1
836 }
837
838 fn create_anomaly_context(
840 &self,
841 data_point: &StreamingDataPoint<A>,
842 ) -> Result<AnomalyContext<A>, String> {
843 let recent_statistics = self.calculate_recent_statistics()?;
845
846 let performance_metrics = vec![A::from(0.8).unwrap(), A::from(0.7).unwrap()];
848
849 let resource_usage = vec![A::from(0.6).unwrap(), A::from(0.5).unwrap()];
851
852 let drift_indicators = vec![A::from(0.1).unwrap()];
854
855 let time_since_last_anomaly = if let Some(last_anomaly) = self.anomaly_history.back() {
857 last_anomaly.timestamp.elapsed()
858 } else {
859 Duration::from_secs(3600) };
861
862 Ok(AnomalyContext {
863 recent_statistics,
864 performance_metrics,
865 resource_usage,
866 drift_indicators,
867 time_since_last_anomaly,
868 })
869 }
870
871 fn calculate_recent_statistics(&self) -> Result<DataStatistics<A>, String> {
873 Ok(DataStatistics {
875 means: vec![A::from(0.5).unwrap(), A::from(0.3).unwrap()],
876 std_devs: vec![A::from(0.1).unwrap(), A::from(0.15).unwrap()],
877 min_values: vec![A::from(0.0).unwrap(), A::from(0.0).unwrap()],
878 max_values: vec![A::from(1.0).unwrap(), A::from(1.0).unwrap()],
879 medians: vec![A::from(0.5).unwrap(), A::from(0.3).unwrap()],
880 skewness: vec![A::from(0.0).unwrap(), A::from(0.1).unwrap()],
881 kurtosis: vec![A::from(0.0).unwrap(), A::from(0.0).unwrap()],
882 })
883 }
884
885 fn record_anomaly(&mut self, anomaly_event: AnomalyEvent<A>) -> Result<(), String> {
887 if self.anomaly_history.len() >= 10000 {
888 self.anomaly_history.pop_front();
889 }
890 self.anomaly_history.push_back(anomaly_event);
891 Ok(())
892 }
893
894 pub fn apply_adaptation(&mut self, adaptation: &Adaptation<A>) -> Result<(), String> {
896 if adaptation.adaptation_type == AdaptationType::AnomalyDetection {
897 let threshold_adjustment = adaptation.magnitude;
899
900 for detector in self.statistical_detectors.values_mut() {
901 let current_threshold = detector.get_threshold();
902 let new_threshold = current_threshold + threshold_adjustment;
903 detector.set_threshold(new_threshold);
904 }
905
906 self.ensemble_detector
908 .adjust_sensitivity(threshold_adjustment)?;
909 }
910
911 Ok(())
912 }
913
914 pub fn get_recent_anomalies(&self, count: usize) -> Vec<&AnomalyEvent<A>> {
916 self.anomaly_history.iter().rev().take(count).collect()
917 }
918
919 pub fn get_diagnostics(&self) -> AnomalyDiagnostics {
921 AnomalyDiagnostics {
922 total_anomalies: self.anomaly_history.len(),
923 recent_anomaly_rate: self.calculate_recent_anomaly_rate(),
924 false_positive_rate: self.false_positive_tracker.get_current_fp_rate(),
925 detector_count: self.statistical_detectors.len() + self.ml_detectors.len(),
926 response_success_rate: self.response_system.get_success_rate(),
927 }
928 }
929
930 fn calculate_recent_anomaly_rate(&self) -> f64 {
932 let recent_window = Duration::from_secs(3600); let cutoff_time = Instant::now() - recent_window;
934
935 let recent_count = self
936 .anomaly_history
937 .iter()
938 .filter(|event| event.timestamp > cutoff_time)
939 .count();
940
941 recent_count as f64 / 3600.0 }
943}
944
945pub struct ZScoreDetector<A: Float + Send + Sync> {
949 threshold: A,
950 running_mean: A,
951 running_variance: A,
952 sample_count: usize,
953}
954
955impl<A: Float + Default + Clone + Send + Sync + Send + Sync> ZScoreDetector<A> {
956 fn new(threshold: f64) -> Result<Self, String> {
957 Ok(Self {
958 threshold: A::from(threshold).unwrap(),
959 running_mean: A::zero(),
960 running_variance: A::zero(),
961 sample_count: 0,
962 })
963 }
964}
965
966impl<A: Float + Default + Clone + Send + Sync + std::iter::Sum> StatisticalAnomalyDetector<A>
967 for ZScoreDetector<A>
968{
969 fn detect_anomaly(
970 &mut self,
971 data_point: &StreamingDataPoint<A>,
972 ) -> Result<AnomalyDetectionResult<A>, String> {
973 if self.sample_count < 10 {
974 return Ok(AnomalyDetectionResult {
976 is_anomaly: false,
977 anomaly_score: A::zero(),
978 confidence: A::zero(),
979 anomaly_type: None,
980 severity: AnomalySeverity::Low,
981 metadata: HashMap::new(),
982 });
983 }
984
985 let feature_sum = data_point.features.iter().cloned().sum::<A>();
987 let z_score = if self.running_variance > A::zero() {
988 (feature_sum - self.running_mean) / self.running_variance.sqrt()
989 } else {
990 A::zero()
991 };
992
993 let is_anomaly = z_score.abs() > self.threshold;
994 let anomaly_score = z_score.abs();
995
996 Ok(AnomalyDetectionResult {
997 is_anomaly,
998 anomaly_score,
999 confidence: if is_anomaly {
1000 A::from(0.8).unwrap()
1001 } else {
1002 A::from(0.2).unwrap()
1003 },
1004 anomaly_type: if is_anomaly {
1005 Some(AnomalyType::StatisticalOutlier)
1006 } else {
1007 None
1008 },
1009 severity: if anomaly_score > A::from(3.0).unwrap() {
1010 AnomalySeverity::High
1011 } else if anomaly_score > A::from(2.0).unwrap() {
1012 AnomalySeverity::Medium
1013 } else {
1014 AnomalySeverity::Low
1015 },
1016 metadata: HashMap::new(),
1017 })
1018 }
1019
1020 fn update(&mut self, data_point: &StreamingDataPoint<A>) -> Result<(), String> {
1021 let feature_sum = data_point.features.iter().cloned().sum::<A>();
1022
1023 self.sample_count += 1;
1025 let delta = feature_sum - self.running_mean;
1026 self.running_mean = self.running_mean + delta / A::from(self.sample_count).unwrap();
1027 let delta2 = feature_sum - self.running_mean;
1028 self.running_variance = self.running_variance + delta * delta2;
1029
1030 Ok(())
1031 }
1032
1033 fn reset(&mut self) {
1034 self.running_mean = A::zero();
1035 self.running_variance = A::zero();
1036 self.sample_count = 0;
1037 }
1038
1039 fn name(&self) -> String {
1040 "zscore".to_string()
1041 }
1042
1043 fn get_threshold(&self) -> A {
1044 self.threshold
1045 }
1046
1047 fn set_threshold(&mut self, threshold: A) {
1048 self.threshold = threshold;
1049 }
1050}
1051
1052pub struct IQRDetector<A: Float + Send + Sync> {
1054 threshold: A,
1055 recent_values: VecDeque<A>,
1056 window_size: usize,
1057}
1058
1059impl<A: Float + Default + Clone + Send + Sync + Send + Sync> IQRDetector<A> {
1060 fn new(threshold: f64) -> Result<Self, String> {
1061 Ok(Self {
1062 threshold: A::from(threshold).unwrap(),
1063 recent_values: VecDeque::with_capacity(100),
1064 window_size: 100,
1065 })
1066 }
1067}
1068
1069impl<A: Float + Default + Clone + Send + Sync + std::iter::Sum> StatisticalAnomalyDetector<A>
1070 for IQRDetector<A>
1071{
1072 fn detect_anomaly(
1073 &mut self,
1074 data_point: &StreamingDataPoint<A>,
1075 ) -> Result<AnomalyDetectionResult<A>, String> {
1076 if self.recent_values.len() < 20 {
1077 return Ok(AnomalyDetectionResult {
1078 is_anomaly: false,
1079 anomaly_score: A::zero(),
1080 confidence: A::zero(),
1081 anomaly_type: None,
1082 severity: AnomalySeverity::Low,
1083 metadata: HashMap::new(),
1084 });
1085 }
1086
1087 let mut sorted_values: Vec<A> = self.recent_values.iter().cloned().collect();
1089 sorted_values.sort_by(|a, b| a.partial_cmp(b).unwrap());
1090
1091 let q1_idx = sorted_values.len() / 4;
1092 let q3_idx = 3 * sorted_values.len() / 4;
1093 let q1 = sorted_values[q1_idx];
1094 let q3 = sorted_values[q3_idx];
1095 let iqr = q3 - q1;
1096
1097 let lower_bound = q1 - self.threshold * iqr;
1098 let upper_bound = q3 + self.threshold * iqr;
1099
1100 let feature_sum = data_point.features.iter().cloned().sum::<A>();
1101 let is_anomaly = feature_sum < lower_bound || feature_sum > upper_bound;
1102
1103 let distance_from_bounds = if feature_sum < lower_bound {
1104 lower_bound - feature_sum
1105 } else if feature_sum > upper_bound {
1106 feature_sum - upper_bound
1107 } else {
1108 A::zero()
1109 };
1110
1111 Ok(AnomalyDetectionResult {
1112 is_anomaly,
1113 anomaly_score: distance_from_bounds / iqr.max(A::from(1e-8).unwrap()),
1114 confidence: if is_anomaly {
1115 A::from(0.7).unwrap()
1116 } else {
1117 A::from(0.3).unwrap()
1118 },
1119 anomaly_type: if is_anomaly {
1120 Some(AnomalyType::StatisticalOutlier)
1121 } else {
1122 None
1123 },
1124 severity: if distance_from_bounds > iqr * A::from(2.0).unwrap() {
1125 AnomalySeverity::High
1126 } else {
1127 AnomalySeverity::Medium
1128 },
1129 metadata: HashMap::new(),
1130 })
1131 }
1132
1133 fn update(&mut self, data_point: &StreamingDataPoint<A>) -> Result<(), String> {
1134 let feature_sum = data_point.features.iter().cloned().sum::<A>();
1135
1136 if self.recent_values.len() >= self.window_size {
1137 self.recent_values.pop_front();
1138 }
1139 self.recent_values.push_back(feature_sum);
1140
1141 Ok(())
1142 }
1143
1144 fn reset(&mut self) {
1145 self.recent_values.clear();
1146 }
1147
1148 fn name(&self) -> String {
1149 "iqr".to_string()
1150 }
1151
1152 fn get_threshold(&self) -> A {
1153 self.threshold
1154 }
1155
1156 fn set_threshold(&mut self, threshold: A) {
1157 self.threshold = threshold;
1158 }
1159}
1160
1161pub struct IsolationForestDetector<A: Float + Send + Sync> {
1163 model_trained: bool,
1164 threshold: A,
1165}
1166
1167impl<A: Float + Default + Send + Sync + Send + Sync> IsolationForestDetector<A> {
1168 fn new() -> Result<Self, String> {
1169 Ok(Self {
1170 model_trained: false,
1171 threshold: A::from(0.5).unwrap(),
1172 })
1173 }
1174}
1175
1176impl<A: Float + Default + Clone + Send + Sync + std::iter::Sum> MLAnomalyDetector<A>
1177 for IsolationForestDetector<A>
1178{
1179 fn detect_anomaly(
1180 &mut self,
1181 data_point: &StreamingDataPoint<A>,
1182 ) -> Result<AnomalyDetectionResult<A>, String> {
1183 let anomaly_score = A::from(0.3).unwrap(); Ok(AnomalyDetectionResult {
1186 is_anomaly: anomaly_score > self.threshold,
1187 anomaly_score,
1188 confidence: A::from(0.6).unwrap(),
1189 anomaly_type: Some(AnomalyType::StatisticalOutlier),
1190 severity: AnomalySeverity::Medium,
1191 metadata: HashMap::new(),
1192 })
1193 }
1194
1195 fn train(&mut self, _training_data: &[StreamingDataPoint<A>]) -> Result<(), String> {
1196 self.model_trained = true;
1197 Ok(())
1198 }
1199
1200 fn update_incremental(&mut self, _data_point: &StreamingDataPoint<A>) -> Result<(), String> {
1201 Ok(())
1202 }
1203
1204 fn get_performance_metrics(&self) -> MLModelMetrics<A> {
1205 MLModelMetrics {
1206 accuracy: A::from(0.85).unwrap(),
1207 precision: A::from(0.8).unwrap(),
1208 recall: A::from(0.75).unwrap(),
1209 f1_score: A::from(0.77).unwrap(),
1210 auc_roc: A::from(0.88).unwrap(),
1211 false_positive_rate: A::from(0.05).unwrap(),
1212 training_time: Duration::from_secs(60),
1213 inference_time: Duration::from_millis(10),
1214 }
1215 }
1216
1217 fn name(&self) -> String {
1218 "isolation_forest".to_string()
1219 }
1220}
1221
1222pub struct OneClassSVMDetector<A: Float + Send + Sync> {
1223 model_trained: bool,
1224 threshold: A,
1225}
1226
1227impl<A: Float + Default + Send + Sync + Send + Sync> OneClassSVMDetector<A> {
1228 fn new() -> Result<Self, String> {
1229 Ok(Self {
1230 model_trained: false,
1231 threshold: A::from(0.0).unwrap(),
1232 })
1233 }
1234}
1235
1236impl<A: Float + Default + Clone + Send + Sync + std::iter::Sum> MLAnomalyDetector<A>
1237 for OneClassSVMDetector<A>
1238{
1239 fn detect_anomaly(
1240 &mut self,
1241 _data_point: &StreamingDataPoint<A>,
1242 ) -> Result<AnomalyDetectionResult<A>, String> {
1243 Ok(AnomalyDetectionResult {
1245 is_anomaly: false,
1246 anomaly_score: A::from(0.2).unwrap(),
1247 confidence: A::from(0.5).unwrap(),
1248 anomaly_type: None,
1249 severity: AnomalySeverity::Low,
1250 metadata: HashMap::new(),
1251 })
1252 }
1253
1254 fn train(&mut self, _training_data: &[StreamingDataPoint<A>]) -> Result<(), String> {
1255 self.model_trained = true;
1256 Ok(())
1257 }
1258
1259 fn update_incremental(&mut self, _data_point: &StreamingDataPoint<A>) -> Result<(), String> {
1260 Ok(())
1261 }
1262
1263 fn get_performance_metrics(&self) -> MLModelMetrics<A> {
1264 MLModelMetrics {
1265 accuracy: A::from(0.82).unwrap(),
1266 precision: A::from(0.78).unwrap(),
1267 recall: A::from(0.73).unwrap(),
1268 f1_score: A::from(0.75).unwrap(),
1269 auc_roc: A::from(0.85).unwrap(),
1270 false_positive_rate: A::from(0.08).unwrap(),
1271 training_time: Duration::from_secs(120),
1272 inference_time: Duration::from_millis(5),
1273 }
1274 }
1275
1276 fn name(&self) -> String {
1277 "one_class_svm".to_string()
1278 }
1279}
1280
1281pub struct LOFDetector<A: Float + Send + Sync> {
1282 model_trained: bool,
1283 threshold: A,
1284}
1285
1286impl<A: Float + Default + Send + Sync + Send + Sync> LOFDetector<A> {
1287 fn new() -> Result<Self, String> {
1288 Ok(Self {
1289 model_trained: false,
1290 threshold: A::from(1.5).unwrap(),
1291 })
1292 }
1293}
1294
1295impl<A: Float + Default + Clone + Send + Sync + std::iter::Sum> MLAnomalyDetector<A>
1296 for LOFDetector<A>
1297{
1298 fn detect_anomaly(
1299 &mut self,
1300 _data_point: &StreamingDataPoint<A>,
1301 ) -> Result<AnomalyDetectionResult<A>, String> {
1302 Ok(AnomalyDetectionResult {
1304 is_anomaly: false,
1305 anomaly_score: A::from(0.1).unwrap(),
1306 confidence: A::from(0.4).unwrap(),
1307 anomaly_type: None,
1308 severity: AnomalySeverity::Low,
1309 metadata: HashMap::new(),
1310 })
1311 }
1312
1313 fn train(&mut self, _training_data: &[StreamingDataPoint<A>]) -> Result<(), String> {
1314 self.model_trained = true;
1315 Ok(())
1316 }
1317
1318 fn update_incremental(&mut self, _data_point: &StreamingDataPoint<A>) -> Result<(), String> {
1319 Ok(())
1320 }
1321
1322 fn get_performance_metrics(&self) -> MLModelMetrics<A> {
1323 MLModelMetrics {
1324 accuracy: A::from(0.79).unwrap(),
1325 precision: A::from(0.76).unwrap(),
1326 recall: A::from(0.71).unwrap(),
1327 f1_score: A::from(0.73).unwrap(),
1328 auc_roc: A::from(0.83).unwrap(),
1329 false_positive_rate: A::from(0.12).unwrap(),
1330 training_time: Duration::from_secs(90),
1331 inference_time: Duration::from_millis(15),
1332 }
1333 }
1334
1335 fn name(&self) -> String {
1336 "lof".to_string()
1337 }
1338}
1339
1340impl<A: Float + Default + Clone + Send + Sync + std::iter::Sum> EnsembleAnomalyDetector<A> {
1343 fn new(voting_strategy: EnsembleVotingStrategy) -> Result<Self, String> {
1344 Ok(Self {
1345 detector_results: HashMap::new(),
1346 voting_strategy,
1347 detector_weights: HashMap::new(),
1348 detector_performance: HashMap::new(),
1349 ensemble_config: EnsembleConfig {
1350 min_consensus: 2,
1351 ensemble_threshold: A::from(0.5).unwrap(),
1352 dynamic_weighting: true,
1353 evaluation_window: 100,
1354 context_based_selection: false,
1355 },
1356 })
1357 }
1358
1359 fn combine_results(
1360 &mut self,
1361 results: HashMap<String, AnomalyDetectionResult<A>>,
1362 ) -> Result<AnomalyDetectionResult<A>, String> {
1363 if results.is_empty() {
1364 return Ok(AnomalyDetectionResult {
1365 is_anomaly: false,
1366 anomaly_score: A::zero(),
1367 confidence: A::zero(),
1368 anomaly_type: None,
1369 severity: AnomalySeverity::Low,
1370 metadata: HashMap::new(),
1371 });
1372 }
1373
1374 let anomaly_count = results.values().filter(|r| r.is_anomaly).count();
1375 let total_count = results.len();
1376
1377 let avg_score =
1378 results.values().map(|r| r.anomaly_score).sum::<A>() / A::from(total_count).unwrap();
1379 let avg_confidence =
1380 results.values().map(|r| r.confidence).sum::<A>() / A::from(total_count).unwrap();
1381
1382 let is_anomaly = match self.voting_strategy {
1383 EnsembleVotingStrategy::Majority => anomaly_count > total_count / 2,
1384 EnsembleVotingStrategy::MaxScore => avg_score > self.ensemble_config.ensemble_threshold,
1385 _ => anomaly_count >= self.ensemble_config.min_consensus,
1386 };
1387
1388 Ok(AnomalyDetectionResult {
1389 is_anomaly,
1390 anomaly_score: avg_score,
1391 confidence: avg_confidence,
1392 anomaly_type: if is_anomaly {
1393 Some(AnomalyType::StatisticalOutlier)
1394 } else {
1395 None
1396 },
1397 severity: if avg_score > A::from(0.8).unwrap() {
1398 AnomalySeverity::High
1399 } else if avg_score > A::from(0.5).unwrap() {
1400 AnomalySeverity::Medium
1401 } else {
1402 AnomalySeverity::Low
1403 },
1404 metadata: HashMap::new(),
1405 })
1406 }
1407
1408 fn adjust_sensitivity(&mut self, adjustment: A) -> Result<(), String> {
1409 self.ensemble_config.ensemble_threshold = (self.ensemble_config.ensemble_threshold
1410 + adjustment)
1411 .max(A::from(0.1).unwrap())
1412 .min(A::from(0.9).unwrap());
1413 Ok(())
1414 }
1415}
1416
1417impl<A: Float + Default + Clone + Send + Sync + Send + Sync> AdaptiveThresholdManager<A> {
1418 fn new(strategy: ThresholdAdaptationStrategy) -> Result<Self, String> {
1419 Ok(Self {
1420 thresholds: HashMap::new(),
1421 adaptation_strategy: strategy,
1422 performance_feedback: VecDeque::with_capacity(1000),
1423 threshold_bounds: HashMap::new(),
1424 adaptation_params: ThresholdAdaptationParams {
1425 learning_rate: A::from(0.01).unwrap(),
1426 momentum: A::from(0.9).unwrap(),
1427 min_change: A::from(0.001).unwrap(),
1428 max_change: A::from(0.1).unwrap(),
1429 adaptation_frequency: 100,
1430 },
1431 })
1432 }
1433}
1434
1435impl<A: Float + Default + Clone + Send + Sync + Send + Sync> FalsePositiveTracker<A> {
1436 fn new() -> Self {
1437 Self {
1438 false_positives: VecDeque::with_capacity(1000),
1439 fp_rate_calculator: FPRateCalculator {
1440 recent_results: VecDeque::with_capacity(1000),
1441 window_size: 1000,
1442 current_fp_rate: A::from(0.05).unwrap(),
1443 target_fp_rate: A::from(0.05).unwrap(),
1444 },
1445 fp_patterns: FalsePositivePatterns {
1446 temporal_patterns: Vec::new(),
1447 feature_patterns: HashMap::new(),
1448 context_patterns: Vec::new(),
1449 detector_patterns: HashMap::new(),
1450 },
1451 mitigation_strategies: vec![
1452 FPMitigationStrategy::ThresholdAdjustment,
1453 FPMitigationStrategy::ContextFiltering,
1454 ],
1455 }
1456 }
1457
1458 fn get_current_fp_rate(&self) -> f64 {
1459 self.fp_rate_calculator
1460 .current_fp_rate
1461 .to_f64()
1462 .unwrap_or(0.05)
1463 }
1464}
1465
1466impl<A: Float + Default + Clone + Send + Sync + Send + Sync> AnomalyResponseSystem<A> {
1467 fn new(response_strategy: &AnomalyResponseStrategy) -> Result<Self, String> {
1468 let mut response_strategies = HashMap::new();
1469
1470 match response_strategy {
1472 AnomalyResponseStrategy::Ignore => {
1473 response_strategies
1474 .insert(AnomalyType::StatisticalOutlier, vec![ResponseAction::Log]);
1475 }
1476 AnomalyResponseStrategy::Filter => {
1477 response_strategies.insert(
1478 AnomalyType::StatisticalOutlier,
1479 vec![ResponseAction::Quarantine],
1480 );
1481 }
1482 AnomalyResponseStrategy::Adaptive => {
1483 response_strategies.insert(
1484 AnomalyType::StatisticalOutlier,
1485 vec![ResponseAction::Log, ResponseAction::ModelAdjustment],
1486 );
1487 }
1488 _ => {
1489 response_strategies
1490 .insert(AnomalyType::StatisticalOutlier, vec![ResponseAction::Alert]);
1491 }
1492 }
1493
1494 Ok(Self {
1495 response_strategies,
1496 response_executor: ResponseExecutor {
1497 pending_responses: VecDeque::new(),
1498 execution_history: VecDeque::with_capacity(1000),
1499 resource_limits: ResponseResourceLimits {
1500 max_concurrent_responses: 10,
1501 max_cpu_usage: 0.2,
1502 max_memory_usage: 100 * 1024 * 1024, max_execution_time: Duration::from_secs(60),
1504 },
1505 },
1506 effectiveness_tracker: ResponseEffectivenessTracker {
1507 effectiveness_metrics: HashMap::new(),
1508 outcome_tracking: VecDeque::with_capacity(1000),
1509 effectiveness_trends: HashMap::new(),
1510 },
1511 escalation_rules: Vec::new(),
1512 })
1513 }
1514
1515 fn trigger_response(
1516 &mut self,
1517 _result: &AnomalyDetectionResult<A>,
1518 _data_point: &StreamingDataPoint<A>,
1519 ) -> Result<(), String> {
1520 Ok(())
1522 }
1523
1524 fn get_success_rate(&self) -> f64 {
1525 0.85
1527 }
1528}
1529
1530#[derive(Debug, Clone)]
1532pub struct AnomalyDiagnostics {
1533 pub total_anomalies: usize,
1534 pub recent_anomaly_rate: f64,
1535 pub false_positive_rate: f64,
1536 pub detector_count: usize,
1537 pub response_success_rate: f64,
1538}