optirs_core/streaming/
concept_drift.rs

1// Concept drift detection and adaptation for streaming optimization
2//
3// This module provides various algorithms for detecting when the underlying
4// data distribution changes (concept drift) and adapting the optimizer accordingly.
5
6use scirs2_core::numeric::Float;
7use std::collections::VecDeque;
8use std::iter::Sum;
9use std::time::{Duration, Instant};
10
11#[allow(unused_imports)]
12use crate::error::Result;
13
14/// Types of concept drift detection algorithms
15#[derive(Debug, Clone, Copy, PartialEq)]
16#[allow(dead_code)]
17pub enum DriftDetectionMethod {
18    /// Page-Hinkley test for change detection
19    PageHinkley,
20    /// ADWIN (Adaptive Windowing) algorithm
21    Adwin,
22    /// Drift Detection Method (DDM)
23    DriftDetectionMethod,
24    /// Early Drift Detection Method (EDDM)
25    EarlyDriftDetection,
26    /// Statistical test-based detection
27    StatisticalTest,
28    /// Ensemble-based detection
29    Ensemble,
30}
31
32/// Concept drift detector configuration
33#[derive(Debug, Clone)]
34#[allow(dead_code)]
35pub struct DriftDetectorConfig {
36    /// Detection method to use
37    pub method: DriftDetectionMethod,
38    /// Minimum samples before detection
39    pub min_samples: usize,
40    /// Detection threshold
41    pub threshold: f64,
42    /// Window size for statistical methods
43    pub window_size: usize,
44    /// Alpha value for statistical tests
45    pub alpha: f64,
46    /// Warning threshold (before drift)
47    pub warningthreshold: f64,
48    /// Enable ensemble detection
49    pub enable_ensemble: bool,
50}
51
52impl Default for DriftDetectorConfig {
53    fn default() -> Self {
54        Self {
55            method: DriftDetectionMethod::PageHinkley,
56            min_samples: 30,
57            threshold: 3.0,
58            window_size: 100,
59            alpha: 0.005,
60            warningthreshold: 2.0,
61            enable_ensemble: false,
62        }
63    }
64}
65
66/// Concept drift detection result
67#[derive(Debug, Clone, Copy, PartialEq)]
68pub enum DriftStatus {
69    /// No drift detected
70    Stable,
71    /// Warning level - potential drift
72    Warning,
73    /// Drift detected
74    Drift,
75}
76
77/// Drift detection event
78#[derive(Debug, Clone)]
79pub struct DriftEvent<A: Float + Send + Sync> {
80    /// Timestamp of detection
81    pub timestamp: Instant,
82    /// Detection confidence (0.0 to 1.0)
83    pub confidence: A,
84    /// Type of drift detected
85    pub drift_type: DriftType,
86    /// Recommendation for adaptation
87    pub adaptation_recommendation: AdaptationRecommendation,
88}
89
90/// Types of concept drift
91#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
92pub enum DriftType {
93    /// Sudden/abrupt drift
94    Sudden,
95    /// Gradual drift
96    Gradual,
97    /// Incremental drift
98    Incremental,
99    /// Recurring drift
100    Recurring,
101    /// Blip (temporary change)
102    Blip,
103}
104
105/// Recommendations for adapting to drift
106#[derive(Debug, Clone)]
107pub enum AdaptationRecommendation {
108    /// Reset optimizer state
109    Reset,
110    /// Increase learning rate
111    IncreaseLearningRate { factor: f64 },
112    /// Decrease learning rate
113    DecreaseLearningRate { factor: f64 },
114    /// Use different optimizer
115    SwitchOptimizer { new_optimizer: String },
116    /// Adjust window size
117    AdjustWindow { new_size: usize },
118    /// No adaptation needed
119    NoAction,
120}
121
122/// Page-Hinkley drift detector
123#[derive(Debug, Clone)]
124pub struct PageHinkleyDetector<A: Float + Send + Sync> {
125    /// Cumulative sum
126    sum: A,
127    /// Minimum cumulative sum seen
128    min_sum: A,
129    /// Detection threshold
130    threshold: A,
131    /// Warning threshold
132    warningthreshold: A,
133    /// Sample count
134    sample_count: usize,
135    /// Last drift time
136    last_drift: Option<Instant>,
137}
138
139impl<A: Float + Send + Sync + Send + Sync> PageHinkleyDetector<A> {
140    /// Create a new Page-Hinkley detector
141    pub fn new(threshold: A, warningthreshold: A) -> Self {
142        Self {
143            sum: A::zero(),
144            min_sum: A::zero(),
145            threshold,
146            warningthreshold,
147            sample_count: 0,
148            last_drift: None,
149        }
150    }
151
152    /// Update detector with new loss value
153    pub fn update(&mut self, loss: A) -> DriftStatus {
154        self.sample_count += 1;
155
156        // Update cumulative sum (assuming we want to detect increases in loss)
157        let mean_loss = A::from(0.1).unwrap(); // Estimated mean under H0
158        self.sum = self.sum + loss - mean_loss;
159
160        // Update minimum
161        if self.sum < self.min_sum {
162            self.min_sum = self.sum;
163        }
164
165        // Compute test statistic
166        let test_stat = self.sum - self.min_sum;
167
168        if test_stat > self.threshold {
169            self.last_drift = Some(Instant::now());
170            self.reset();
171            DriftStatus::Drift
172        } else if test_stat > self.warningthreshold {
173            DriftStatus::Warning
174        } else {
175            DriftStatus::Stable
176        }
177    }
178
179    /// Reset detector state
180    pub fn reset(&mut self) {
181        self.sum = A::zero();
182        self.min_sum = A::zero();
183        self.sample_count = 0;
184    }
185}
186
187/// ADWIN (Adaptive Windowing) drift detector
188#[derive(Debug, Clone)]
189pub struct AdwinDetector<A: Float + Send + Sync> {
190    /// Window of recent values
191    window: VecDeque<A>,
192    /// Maximum window size
193    max_windowsize: usize,
194    /// Detection confidence level
195    delta: A,
196    /// Minimum window size for detection
197    min_window_size: usize,
198}
199
200impl<A: Float + Sum + Send + Sync + Send + Sync> AdwinDetector<A> {
201    /// Create a new ADWIN detector
202    pub fn new(delta: A, max_windowsize: usize) -> Self {
203        Self {
204            window: VecDeque::new(),
205            max_windowsize,
206            delta,
207            min_window_size: 10,
208        }
209    }
210
211    /// Update detector with new value
212    pub fn update(&mut self, value: A) -> DriftStatus {
213        self.window.push_back(value);
214
215        // Maintain window size
216        if self.window.len() > self.max_windowsize {
217            self.window.pop_front();
218        }
219
220        // Check for drift
221        if self.window.len() >= self.min_window_size {
222            if self.detect_change() {
223                self.shrink_window();
224                DriftStatus::Drift
225            } else {
226                DriftStatus::Stable
227            }
228        } else {
229            DriftStatus::Stable
230        }
231    }
232
233    /// Detect change using ADWIN algorithm
234    fn detect_change(&self) -> bool {
235        let n = self.window.len();
236        if n < 2 {
237            return false;
238        }
239
240        // Simplified ADWIN: check for significant difference between halves
241        let mid = n / 2;
242
243        let first_half: Vec<_> = self.window.iter().take(mid).cloned().collect();
244        let second_half: Vec<_> = self.window.iter().skip(mid).cloned().collect();
245
246        let mean1 = first_half.iter().cloned().sum::<A>() / A::from(first_half.len()).unwrap();
247        let mean2 = second_half.iter().cloned().sum::<A>() / A::from(second_half.len()).unwrap();
248
249        // Compute variance
250        let var1 = first_half
251            .iter()
252            .map(|&x| {
253                let diff = x - mean1;
254                diff * diff
255            })
256            .sum::<A>()
257            / A::from(first_half.len()).unwrap();
258
259        let var2 = second_half
260            .iter()
261            .map(|&x| {
262                let diff = x - mean2;
263                diff * diff
264            })
265            .sum::<A>()
266            / A::from(second_half.len()).unwrap();
267
268        // Simplified change detection
269        let diff = (mean1 - mean2).abs();
270        let threshold = (var1 + var2 + A::from(0.01).unwrap()).sqrt();
271
272        diff > threshold
273    }
274
275    /// Shrink window after drift detection
276    fn shrink_window(&mut self) {
277        let new_size = self.window.len() / 2;
278        while self.window.len() > new_size {
279            self.window.pop_front();
280        }
281    }
282}
283
284/// DDM (Drift Detection Method) detector
285#[derive(Debug, Clone)]
286pub struct DdmDetector<A: Float + Send + Sync> {
287    /// Error rate
288    error_rate: A,
289    /// Standard deviation of error rate
290    error_std: A,
291    /// Minimum error rate + 2*std
292    min_error_plus_2_std: A,
293    /// Minimum error rate + 3*std
294    min_error_plus_3_std: A,
295    /// Sample count
296    sample_count: usize,
297    /// Error count
298    error_count: usize,
299}
300
301impl<A: Float + Send + Sync + Send + Sync> DdmDetector<A> {
302    /// Create a new DDM detector
303    pub fn new() -> Self {
304        Self {
305            error_rate: A::zero(),
306            error_std: A::one(),
307            min_error_plus_2_std: A::from(f64::MAX).unwrap(),
308            min_error_plus_3_std: A::from(f64::MAX).unwrap(),
309            sample_count: 0,
310            error_count: 0,
311        }
312    }
313
314    /// Update with prediction result
315    pub fn update(&mut self, iserror: bool) -> DriftStatus {
316        self.sample_count += 1;
317        if iserror {
318            self.error_count += 1;
319        }
320
321        if self.sample_count < 30 {
322            return DriftStatus::Stable;
323        }
324
325        // Update _error rate and standard deviation
326        self.error_rate = A::from(self.error_count as f64 / self.sample_count as f64).unwrap();
327        let p = self.error_rate;
328        let n = A::from(self.sample_count as f64).unwrap();
329        self.error_std = (p * (A::one() - p) / n).sqrt();
330
331        let current_level = self.error_rate + A::from(2.0).unwrap() * self.error_std;
332
333        // Update minimums
334        if current_level < self.min_error_plus_2_std {
335            self.min_error_plus_2_std = current_level;
336            self.min_error_plus_3_std = self.error_rate + A::from(3.0).unwrap() * self.error_std;
337        }
338
339        // Check for drift
340        if current_level > self.min_error_plus_3_std {
341            self.reset();
342            DriftStatus::Drift
343        } else if current_level > self.min_error_plus_2_std {
344            DriftStatus::Warning
345        } else {
346            DriftStatus::Stable
347        }
348    }
349
350    /// Reset detector state
351    pub fn reset(&mut self) {
352        self.sample_count = 0;
353        self.error_count = 0;
354        self.error_rate = A::zero();
355        self.error_std = A::one();
356        self.min_error_plus_2_std = A::from(f64::MAX).unwrap();
357        self.min_error_plus_3_std = A::from(f64::MAX).unwrap();
358    }
359}
360
361impl<A: Float + Send + Sync + Send + Sync> Default for DdmDetector<A> {
362    fn default() -> Self {
363        Self::new()
364    }
365}
366
367/// Comprehensive concept drift detector
368pub struct ConceptDriftDetector<A: Float + Send + Sync> {
369    /// Configuration
370    config: DriftDetectorConfig,
371
372    /// Page-Hinkley detector
373    ph_detector: PageHinkleyDetector<A>,
374
375    /// ADWIN detector
376    adwin_detector: AdwinDetector<A>,
377
378    /// DDM detector
379    ddm_detector: DdmDetector<A>,
380
381    /// Ensemble voting history
382    ensemble_history: VecDeque<DriftStatus>,
383
384    /// Drift events history
385    drift_events: Vec<DriftEvent<A>>,
386
387    /// Performance before/after drift
388    performance_tracker: PerformanceDriftTracker<A>,
389}
390
391impl<A: Float + std::fmt::Debug + Sum + Send + Sync + Send + Sync> ConceptDriftDetector<A> {
392    /// Create a new concept drift detector
393    pub fn new(config: DriftDetectorConfig) -> Self {
394        let threshold = A::from(config.threshold).unwrap();
395        let warningthreshold = A::from(config.warningthreshold).unwrap();
396        let delta = A::from(config.alpha).unwrap();
397
398        Self {
399            ph_detector: PageHinkleyDetector::new(threshold, warningthreshold),
400            adwin_detector: AdwinDetector::new(delta, config.window_size),
401            ddm_detector: DdmDetector::new(),
402            ensemble_history: VecDeque::with_capacity(10),
403            drift_events: Vec::new(),
404            performance_tracker: PerformanceDriftTracker::new(),
405            config,
406        }
407    }
408
409    /// Update detector with new loss and prediction error
410    pub fn update(&mut self, loss: A, is_predictionerror: bool) -> Result<DriftStatus> {
411        let ph_status = self.ph_detector.update(loss);
412        let adwin_status = self.adwin_detector.update(loss);
413        let ddm_status = self.ddm_detector.update(is_predictionerror);
414
415        let final_status = if self.config.enable_ensemble {
416            self.ensemble_vote(ph_status, adwin_status, ddm_status)
417        } else {
418            match self.config.method {
419                DriftDetectionMethod::PageHinkley => ph_status,
420                DriftDetectionMethod::Adwin => adwin_status,
421                DriftDetectionMethod::DriftDetectionMethod => ddm_status,
422                _ => ddm_status, // Fallback for EarlyDriftDetection, StatisticalTest, Ensemble
423            }
424        };
425
426        // Record drift event if detected
427        if final_status == DriftStatus::Drift {
428            let event = DriftEvent {
429                timestamp: Instant::now(),
430                confidence: A::from(0.8).unwrap(), // Simplified confidence
431                drift_type: self.classify_drift_type(),
432                adaptation_recommendation: self.generate_adaptation_recommendation(),
433            };
434            self.drift_events.push(event);
435        }
436
437        // Update performance tracking
438        self.performance_tracker.update(loss, final_status);
439
440        Ok(final_status)
441    }
442
443    /// Ensemble voting among detectors
444    fn ensemble_vote(
445        &mut self,
446        ph: DriftStatus,
447        adwin: DriftStatus,
448        ddm: DriftStatus,
449    ) -> DriftStatus {
450        let votes = [ph, adwin, ddm];
451
452        // Count votes
453        let drift_votes = votes.iter().filter(|&&s| s == DriftStatus::Drift).count();
454        let warning_votes = votes.iter().filter(|&&s| s == DriftStatus::Warning).count();
455
456        if drift_votes >= 2 {
457            DriftStatus::Drift
458        } else if warning_votes >= 2 || drift_votes >= 1 {
459            DriftStatus::Warning
460        } else {
461            DriftStatus::Stable
462        }
463    }
464
465    /// Classify the type of drift based on recent history
466    fn classify_drift_type(&self) -> DriftType {
467        // Simplified classification based on recent drift events
468        if self.drift_events.len() < 2 {
469            return DriftType::Sudden;
470        }
471
472        let recent_events = self.drift_events.iter().rev().take(5);
473        let time_intervals: Vec<_> = recent_events
474            .map(|event| event.timestamp)
475            .collect::<Vec<_>>()
476            .windows(2)
477            .map(|window| window[0].duration_since(window[1]))
478            .collect();
479
480        if time_intervals.iter().all(|&d| d < Duration::from_secs(60)) {
481            DriftType::Sudden
482        } else if time_intervals.len() > 2 {
483            DriftType::Gradual
484        } else {
485            DriftType::Incremental
486        }
487    }
488
489    /// Generate adaptation recommendation based on drift characteristics
490    fn generate_adaptation_recommendation(&self) -> AdaptationRecommendation {
491        let recent_performance = self.performance_tracker.get_recent_performance_change();
492
493        if recent_performance > A::from(0.5).unwrap() {
494            // Significant performance degradation
495            AdaptationRecommendation::Reset
496        } else if recent_performance > A::from(0.2).unwrap() {
497            // Moderate degradation
498            AdaptationRecommendation::IncreaseLearningRate { factor: 1.5 }
499        } else if recent_performance < A::from(-0.1).unwrap() {
500            // Performance improved (suspicious)
501            AdaptationRecommendation::DecreaseLearningRate { factor: 0.8 }
502        } else {
503            AdaptationRecommendation::NoAction
504        }
505    }
506
507    /// Get drift detection statistics
508    pub fn get_statistics(&self) -> DriftStatistics<A> {
509        DriftStatistics {
510            total_drifts: self.drift_events.len(),
511            recent_drift_rate: self.calculate_recent_drift_rate(),
512            average_drift_confidence: self.calculate_average_confidence(),
513            drift_types_distribution: self.calculate_drift_type_distribution(),
514            time_since_last_drift: self.time_since_last_drift(),
515        }
516    }
517
518    fn calculate_recent_drift_rate(&self) -> f64 {
519        // Calculate drift rate in the last hour
520        let one_hour_ago = Instant::now() - Duration::from_secs(3600);
521        let recent_drifts = self
522            .drift_events
523            .iter()
524            .filter(|event| event.timestamp > one_hour_ago)
525            .count();
526        recent_drifts as f64 / 3600.0 // Drifts per second
527    }
528
529    fn calculate_average_confidence(&self) -> Option<A> {
530        if self.drift_events.is_empty() {
531            None
532        } else {
533            let sum = self
534                .drift_events
535                .iter()
536                .map(|event| event.confidence)
537                .sum::<A>();
538            Some(sum / A::from(self.drift_events.len()).unwrap())
539        }
540    }
541
542    fn calculate_drift_type_distribution(&self) -> std::collections::HashMap<DriftType, usize> {
543        let mut distribution = std::collections::HashMap::new();
544        for event in &self.drift_events {
545            *distribution.entry(event.drift_type).or_insert(0) += 1;
546        }
547        distribution
548    }
549
550    fn time_since_last_drift(&self) -> Option<Duration> {
551        self.drift_events
552            .last()
553            .map(|event| event.timestamp.elapsed())
554    }
555}
556
557/// Performance tracker for drift impact analysis
558#[derive(Debug, Clone)]
559struct PerformanceDriftTracker<A: Float + Send + Sync> {
560    /// Performance history with drift annotations
561    performance_history: VecDeque<(A, DriftStatus, Instant)>,
562    /// Window size for analysis
563    window_size: usize,
564}
565
566impl<A: Float + std::iter::Sum + Send + Sync + Send + Sync> PerformanceDriftTracker<A> {
567    fn new() -> Self {
568        Self {
569            performance_history: VecDeque::new(),
570            window_size: 100,
571        }
572    }
573
574    fn update(&mut self, performance: A, driftstatus: DriftStatus) {
575        self.performance_history
576            .push_back((performance, driftstatus, Instant::now()));
577
578        // Maintain window size
579        if self.performance_history.len() > self.window_size {
580            self.performance_history.pop_front();
581        }
582    }
583
584    /// Get recent performance change (positive = degradation, negative = improvement)
585    fn get_recent_performance_change(&self) -> A {
586        if self.performance_history.len() < 10 {
587            return A::zero();
588        }
589
590        let recent: Vec<_> = self.performance_history.iter().rev().take(10).collect();
591        let older: Vec<_> = self
592            .performance_history
593            .iter()
594            .rev()
595            .skip(10)
596            .take(10)
597            .collect();
598
599        if older.is_empty() {
600            return A::zero();
601        }
602
603        let recent_avg =
604            recent.iter().map(|(p, _, _)| *p).sum::<A>() / A::from(recent.len()).unwrap();
605        let older_avg = older.iter().map(|(p, _, _)| *p).sum::<A>() / A::from(older.len()).unwrap();
606
607        recent_avg - older_avg
608    }
609}
610
611/// Drift detection statistics
612#[derive(Debug, Clone)]
613pub struct DriftStatistics<A: Float + Send + Sync> {
614    /// Total number of drifts detected
615    pub total_drifts: usize,
616    /// Recent drift rate (drifts per second)
617    pub recent_drift_rate: f64,
618    /// Average confidence of drift detections
619    pub average_drift_confidence: Option<A>,
620    /// Distribution of drift types
621    pub drift_types_distribution: std::collections::HashMap<DriftType, usize>,
622    /// Time since last drift
623    pub time_since_last_drift: Option<Duration>,
624}
625
626/// Advanced concept drift analysis and adaptation
627pub mod advanced_drift_analysis {
628    use super::*;
629    use std::collections::HashMap;
630
631    /// Advanced drift detector with machine learning-based detection
632    #[derive(Debug)]
633    pub struct AdvancedDriftDetector<A: Float + Send + Sync> {
634        /// Base detector ensemble
635        base_detectors: Vec<Box<dyn DriftDetectorTrait<A>>>,
636
637        /// Drift pattern analyzer
638        pattern_analyzer: DriftPatternAnalyzer<A>,
639
640        /// Adaptive threshold manager
641        threshold_manager: AdaptiveThresholdManager<A>,
642
643        /// Context-aware drift detection
644        context_detector: ContextAwareDriftDetector<A>,
645
646        /// Performance impact analyzer
647        impact_analyzer: DriftImpactAnalyzer<A>,
648
649        /// Adaptation strategy selector
650        adaptation_selector: AdaptationStrategySelector<A>,
651
652        /// Historical drift database
653        drift_database: DriftDatabase<A>,
654    }
655
656    /// Trait for all drift detectors
657    pub trait DriftDetectorTrait<A: Float + Send + Sync>: std::fmt::Debug {
658        fn update(&mut self, value: A) -> DriftStatus;
659        fn reset(&mut self);
660        fn get_confidence(&self) -> A;
661    }
662
663    /// Drift pattern analyzer for characterizing drift behavior
664    #[derive(Debug)]
665    pub struct DriftPatternAnalyzer<A: Float + Send + Sync> {
666        /// Pattern history buffer
667        pattern_buffer: VecDeque<PatternFeatures<A>>,
668
669        /// Learned drift patterns
670        known_patterns: HashMap<String, DriftPattern<A>>,
671
672        /// Pattern matching threshold
673        matching_threshold: A,
674
675        /// Feature extractors
676        feature_extractors: Vec<Box<dyn FeatureExtractor<A>>>,
677    }
678
679    /// Pattern features for drift characterization
680    #[derive(Debug, Clone)]
681    pub struct PatternFeatures<A: Float + Send + Sync> {
682        /// Statistical moments
683        pub mean: A,
684        pub variance: A,
685        pub skewness: A,
686        pub kurtosis: A,
687
688        /// Trend indicators
689        pub trend_slope: A,
690        pub trend_strength: A,
691
692        /// Frequency domain features
693        pub dominant_frequency: A,
694        pub spectral_entropy: A,
695
696        /// Temporal features
697        pub temporal_locality: A,
698        pub persistence: A,
699
700        /// Complexity measures
701        pub entropy: A,
702        pub fractal_dimension: A,
703    }
704
705    /// Learned drift pattern
706    #[derive(Debug, Clone)]
707    pub struct DriftPattern<A: Float + Send + Sync> {
708        /// Pattern identifier
709        pub id: String,
710
711        /// Characteristic features
712        pub features: PatternFeatures<A>,
713
714        /// Pattern type
715        pub pattern_type: DriftType,
716
717        /// Typical duration
718        pub typical_duration: Duration,
719
720        /// Optimal adaptation strategy
721        pub optimal_adaptation: AdaptationRecommendation,
722
723        /// Success rate of this pattern's adaptations
724        pub adaptation_success_rate: A,
725
726        /// Occurrence frequency
727        pub occurrence_count: usize,
728    }
729
730    /// Feature extractor trait
731    pub trait FeatureExtractor<A: Float + Send + Sync>: std::fmt::Debug {
732        fn extract(&self, data: &[A]) -> A;
733        fn name(&self) -> &str;
734    }
735
736    /// Adaptive threshold management
737    #[derive(Debug)]
738    pub struct AdaptiveThresholdManager<A: Float + Send + Sync> {
739        /// Current thresholds for different detectors
740        thresholds: HashMap<String, A>,
741
742        /// Threshold adaptation history
743        threshold_history: VecDeque<ThresholdUpdate<A>>,
744
745        /// Performance feedback for threshold adjustment
746        performance_feedback: VecDeque<PerformanceFeedback<A>>,
747
748        /// Learning rate for threshold adaptation
749        learning_rate: A,
750    }
751
752    /// Threshold update record
753    #[derive(Debug, Clone)]
754    pub struct ThresholdUpdate<A: Float + Send + Sync> {
755        pub detector_name: String,
756        pub old_threshold: A,
757        pub new_threshold: A,
758        pub timestamp: Instant,
759        pub reason: String,
760    }
761
762    /// Performance feedback for threshold adjustment
763    #[derive(Debug, Clone)]
764    pub struct PerformanceFeedback<A: Float + Send + Sync> {
765        pub true_positive_rate: A,
766        pub false_positive_rate: A,
767        pub detection_delay: Duration,
768        pub adaptation_effectiveness: A,
769        pub timestamp: Instant,
770    }
771
772    /// Context-aware drift detection
773    #[derive(Debug)]
774    pub struct ContextAwareDriftDetector<A: Float + Send + Sync> {
775        /// Contextual features
776        context_features: Vec<ContextFeature<A>>,
777
778        /// Context-specific drift models
779        context_models: HashMap<String, Box<dyn DriftDetectorTrait<A>>>,
780
781        /// Current context state
782        current_context: Option<String>,
783
784        /// Context transition matrix
785        transition_matrix: HashMap<(String, String), A>,
786    }
787
788    /// Contextual feature for drift detection
789    #[derive(Debug, Clone)]
790    pub struct ContextFeature<A: Float + Send + Sync> {
791        pub name: String,
792        pub value: A,
793        pub importance_weight: A,
794        pub temporal_stability: A,
795    }
796
797    /// Drift impact analyzer
798    #[derive(Debug)]
799    pub struct DriftImpactAnalyzer<A: Float + Send + Sync> {
800        /// Impact metrics history
801        impact_history: VecDeque<DriftImpact<A>>,
802
803        /// Severity classifier
804        severity_classifier: SeverityClassifier<A>,
805
806        /// Recovery time predictor
807        recovery_predictor: RecoveryTimePredictor<A>,
808
809        /// Business impact estimator
810        business_impact_estimator: BusinessImpactEstimator<A>,
811    }
812
813    /// Drift impact assessment
814    #[derive(Debug, Clone)]
815    pub struct DriftImpact<A: Float + Send + Sync> {
816        /// Performance degradation magnitude
817        pub performance_degradation: A,
818
819        /// Affected metrics
820        pub affected_metrics: Vec<String>,
821
822        /// Estimated recovery time
823        pub estimated_recovery_time: Duration,
824
825        /// Confidence in impact assessment
826        pub confidence: A,
827
828        /// Business impact score
829        pub business_impact_score: A,
830
831        /// Urgency level
832        pub urgency_level: UrgencyLevel,
833    }
834
835    /// Urgency levels for drift response
836    #[derive(Debug, Clone, Copy, PartialEq)]
837    pub enum UrgencyLevel {
838        Low,
839        Medium,
840        High,
841        Critical,
842    }
843
844    /// Adaptation strategy selector
845    #[derive(Debug)]
846    pub struct AdaptationStrategySelector<A: Float + Send + Sync> {
847        /// Available adaptation strategies
848        strategies: Vec<AdaptationStrategy<A>>,
849
850        /// Strategy performance history
851        strategy_performance: HashMap<String, StrategyPerformance<A>>,
852
853        /// Multi-armed bandit for strategy selection
854        bandit: EpsilonGreedyBandit<A>,
855
856        /// Context-strategy mapping
857        context_strategy_map: HashMap<String, Vec<String>>,
858    }
859
860    /// Adaptation strategy
861    #[derive(Debug, Clone)]
862    pub struct AdaptationStrategy<A: Float + Send + Sync> {
863        /// Strategy identifier
864        pub id: String,
865
866        /// Strategy type
867        pub strategy_type: AdaptationStrategyType,
868
869        /// Parameters
870        pub parameters: HashMap<String, A>,
871
872        /// Applicability conditions
873        pub applicability_conditions: Vec<ApplicabilityCondition<A>>,
874
875        /// Expected effectiveness
876        pub expected_effectiveness: A,
877
878        /// Computational cost
879        pub computational_cost: A,
880    }
881
882    /// Types of adaptation strategies
883    #[derive(Debug, Clone, Copy)]
884    pub enum AdaptationStrategyType {
885        ParameterTuning,
886        ModelReplacement,
887        EnsembleReweighting,
888        ArchitectureChange,
889        DataAugmentation,
890        FeatureSelection,
891        Hybrid,
892    }
893
894    /// Conditions for strategy applicability
895    #[derive(Debug, Clone)]
896    pub struct ApplicabilityCondition<A: Float + Send + Sync> {
897        pub feature_name: String,
898        pub operator: ComparisonOperator,
899        pub threshold: A,
900        pub weight: A,
901    }
902
903    #[derive(Debug, Clone, Copy)]
904    pub enum ComparisonOperator {
905        GreaterThan,
906        LessThan,
907        Equal,
908        NotEqual,
909        GreaterEqual,
910        LessEqual,
911    }
912
913    /// Strategy performance tracking
914    #[derive(Debug, Clone)]
915    pub struct StrategyPerformance<A: Float + Send + Sync> {
916        pub success_rate: A,
917        pub average_improvement: A,
918        pub average_adaptation_time: Duration,
919        pub stability_after_adaptation: A,
920        pub usage_count: usize,
921    }
922
923    /// Epsilon-greedy bandit for strategy selection
924    #[derive(Debug)]
925    pub struct EpsilonGreedyBandit<A: Float + Send + Sync> {
926        epsilon: A,
927        action_values: HashMap<String, A>,
928        action_counts: HashMap<String, usize>,
929        total_trials: usize,
930    }
931
932    /// Historical drift database
933    #[derive(Debug)]
934    pub struct DriftDatabase<A: Float + Send + Sync> {
935        /// Stored drift events
936        drift_events: Vec<StoredDriftEvent<A>>,
937
938        /// Pattern-outcome associations
939        pattern_outcomes: HashMap<String, Vec<AdaptationOutcome<A>>>,
940
941        /// Seasonal drift patterns
942        seasonal_patterns: HashMap<String, SeasonalPattern<A>>,
943
944        /// Similarity search index
945        similarity_index: SimilarityIndex<A>,
946    }
947
948    /// Stored drift event for learning
949    #[derive(Debug, Clone)]
950    pub struct StoredDriftEvent<A: Float + Send + Sync> {
951        pub features: PatternFeatures<A>,
952        pub context: Vec<ContextFeature<A>>,
953        pub applied_strategy: String,
954        pub outcome: AdaptationOutcome<A>,
955        pub timestamp: Instant,
956    }
957
958    /// Adaptation outcome for learning
959    #[derive(Debug, Clone)]
960    pub struct AdaptationOutcome<A: Float + Send + Sync> {
961        pub success: bool,
962        pub performance_improvement: A,
963        pub adaptation_time: Duration,
964        pub stability_period: Duration,
965        pub side_effects: Vec<String>,
966    }
967
968    /// Seasonal drift pattern
969    #[derive(Debug, Clone)]
970    pub struct SeasonalPattern<A: Float + Send + Sync> {
971        pub period: Duration,
972        pub amplitude: A,
973        pub phase_offset: Duration,
974        pub pattern_strength: A,
975        pub last_occurrence: Instant,
976    }
977
978    /// Similarity search for historical patterns
979    #[derive(Debug)]
980    pub struct SimilarityIndex<A: Float + Send + Sync> {
981        /// Feature vectors for similarity search
982        feature_vectors: Vec<(String, Vec<A>)>,
983
984        /// Similarity threshold
985        similarity_threshold: A,
986
987        /// Distance metric
988        distance_metric: DistanceMetric,
989    }
990
991    #[derive(Debug, Clone, Copy)]
992    pub enum DistanceMetric {
993        Euclidean,
994        Manhattan,
995        Cosine,
996        Mahalanobis,
997    }
998
999    impl<
1000            A: Float + Default + Clone + std::fmt::Debug + std::iter::Sum + Send + Sync + Send + Sync,
1001        > AdvancedDriftDetector<A>
1002    {
1003        /// Create new advanced drift detector
1004        pub fn new(config: DriftDetectorConfig) -> Self {
1005            let base_detectors: Vec<Box<dyn DriftDetectorTrait<A>>> = vec![
1006                // Add base detectors here
1007            ];
1008
1009            Self {
1010                base_detectors,
1011                pattern_analyzer: DriftPatternAnalyzer::new(),
1012                threshold_manager: AdaptiveThresholdManager::new(),
1013                context_detector: ContextAwareDriftDetector::new(),
1014                impact_analyzer: DriftImpactAnalyzer::new(),
1015                adaptation_selector: AdaptationStrategySelector::new(),
1016                drift_database: DriftDatabase::new(),
1017            }
1018        }
1019
1020        /// Advanced drift detection with pattern analysis
1021        pub fn detect_drift_advanced(
1022            &mut self,
1023            value: A,
1024            context_features: &[ContextFeature<A>],
1025        ) -> Result<AdvancedDriftResult<A>> {
1026            // Update context
1027            self.context_detector.update_context(context_features);
1028
1029            // Run base detectors
1030            let base_results: Vec<_> = self
1031                .base_detectors
1032                .iter_mut()
1033                .map(|detector| detector.update(value))
1034                .collect();
1035
1036            // Analyze patterns
1037            let pattern_features = self.pattern_analyzer.extract_features(&[value])?;
1038            let matched_pattern = self.pattern_analyzer.match_pattern(&pattern_features);
1039
1040            // Adaptive threshold adjustment
1041            self.threshold_manager
1042                .update_thresholds(&base_results, &pattern_features);
1043
1044            // Combine results with confidence weighting
1045            let combined_result = self.combine_detection_results(&base_results, &matched_pattern);
1046
1047            // Analyze impact if drift detected
1048            let impact = if combined_result.status == DriftStatus::Drift {
1049                Some(
1050                    self.impact_analyzer
1051                        .analyze_impact(&pattern_features, &matched_pattern)?,
1052                )
1053            } else {
1054                None
1055            };
1056
1057            // Select adaptation strategy
1058            let adaptation_strategy = if let Some(ref impact) = impact {
1059                self.adaptation_selector.select_strategy(
1060                    &pattern_features,
1061                    impact,
1062                    &matched_pattern,
1063                )?
1064            } else {
1065                None
1066            };
1067
1068            // Store in database for learning
1069            if combined_result.status == DriftStatus::Drift {
1070                self.drift_database.store_event(
1071                    &pattern_features,
1072                    context_features,
1073                    &adaptation_strategy,
1074                );
1075            }
1076
1077            Ok(AdvancedDriftResult {
1078                status: combined_result.status,
1079                confidence: combined_result.confidence,
1080                matched_pattern,
1081                impact,
1082                recommended_strategy: adaptation_strategy,
1083                feature_importance: self.calculate_feature_importance(&pattern_features),
1084                prediction_horizon: self.estimate_drift_duration(&pattern_features),
1085            })
1086        }
1087
1088        fn combine_detection_results(
1089            &self,
1090            base_results: &[DriftStatus],
1091            matched_pattern: &Option<DriftPattern<A>>,
1092        ) -> CombinedDetectionResult<A> {
1093            // Weighted voting based on detector confidence and _pattern matching
1094            let drift_votes = base_results
1095                .iter()
1096                .filter(|&&s| s == DriftStatus::Drift)
1097                .count();
1098            let warning_votes = base_results
1099                .iter()
1100                .filter(|&&s| s == DriftStatus::Warning)
1101                .count();
1102
1103            // Pattern-based confidence adjustment
1104            let pattern_confidence = matched_pattern
1105                .as_ref()
1106                .map(|p| p.adaptation_success_rate)
1107                .unwrap_or(A::from(0.5).unwrap());
1108
1109            let status = if drift_votes >= 2 {
1110                DriftStatus::Drift
1111            } else if warning_votes >= 2
1112                || (drift_votes >= 1 && pattern_confidence > A::from(0.7).unwrap())
1113            {
1114                DriftStatus::Warning
1115            } else {
1116                DriftStatus::Stable
1117            };
1118
1119            let confidence = A::from(drift_votes as f64 / base_results.len() as f64).unwrap()
1120                * pattern_confidence;
1121
1122            CombinedDetectionResult { status, confidence }
1123        }
1124
1125        fn calculate_feature_importance(
1126            &self,
1127            features: &PatternFeatures<A>,
1128        ) -> HashMap<String, A> {
1129            // Simplified feature importance calculation
1130            let mut importance = HashMap::new();
1131            importance.insert("variance".to_string(), features.variance);
1132            importance.insert("trend_slope".to_string(), features.trend_slope.abs());
1133            importance.insert("entropy".to_string(), features.entropy);
1134            importance
1135        }
1136
1137        fn estimate_drift_duration(&self, features: &PatternFeatures<A>) -> Duration {
1138            // Estimate how long the drift will last based on patterns
1139            let base_duration = Duration::from_secs(300); // 5 minutes base
1140
1141            // Adjust based on trend strength and persistence
1142            let duration_multiplier = features.trend_strength * features.persistence;
1143            let adjustment = duration_multiplier.to_f64().unwrap_or(1.0);
1144
1145            Duration::from_secs((base_duration.as_secs() as f64 * adjustment) as u64)
1146        }
1147    }
1148
1149    /// Advanced drift detection result
1150    #[derive(Debug, Clone)]
1151    pub struct AdvancedDriftResult<A: Float + Send + Sync> {
1152        pub status: DriftStatus,
1153        pub confidence: A,
1154        pub matched_pattern: Option<DriftPattern<A>>,
1155        pub impact: Option<DriftImpact<A>>,
1156        pub recommended_strategy: Option<AdaptationStrategy<A>>,
1157        pub feature_importance: HashMap<String, A>,
1158        pub prediction_horizon: Duration,
1159    }
1160
1161    #[derive(Debug, Clone)]
1162    struct CombinedDetectionResult<A: Float + Send + Sync> {
1163        status: DriftStatus,
1164        confidence: A,
1165    }
1166
1167    // Implementation stubs for complex components
1168
1169    impl<A: Float + std::iter::Sum + Send + Sync + Send + Sync> DriftPatternAnalyzer<A> {
1170        fn new() -> Self {
1171            Self {
1172                pattern_buffer: VecDeque::new(),
1173                known_patterns: HashMap::new(),
1174                matching_threshold: A::from(0.8).unwrap(),
1175                feature_extractors: Vec::new(),
1176            }
1177        }
1178
1179        fn extract_features(&mut self, data: &[A]) -> Result<PatternFeatures<A>> {
1180            // Simplified feature extraction
1181            let mean = data.iter().cloned().sum::<A>() / A::from(data.len()).unwrap();
1182            let variance = data.iter().map(|&x| (x - mean) * (x - mean)).sum::<A>()
1183                / A::from(data.len()).unwrap();
1184
1185            Ok(PatternFeatures {
1186                mean,
1187                variance,
1188                skewness: A::zero(), // Simplified
1189                kurtosis: A::zero(),
1190                trend_slope: A::zero(),
1191                trend_strength: A::zero(),
1192                dominant_frequency: A::zero(),
1193                spectral_entropy: A::zero(),
1194                temporal_locality: A::zero(),
1195                persistence: A::zero(),
1196                entropy: variance.ln().abs(), // Simplified entropy
1197                fractal_dimension: A::from(1.5).unwrap(), // Default
1198            })
1199        }
1200
1201        fn match_pattern(&self, features: &PatternFeatures<A>) -> Option<DriftPattern<A>> {
1202            // Simplified pattern matching
1203            self.known_patterns
1204                .values()
1205                .find(|pattern| {
1206                    self.calculate_similarity(&pattern.features, features) > self.matching_threshold
1207                })
1208                .cloned()
1209        }
1210
1211        fn calculate_similarity(&self, p1: &PatternFeatures<A>, p2: &PatternFeatures<A>) -> A {
1212            // Simplified similarity calculation
1213            let mean_diff = (p1.mean - p2.mean).abs();
1214            let var_diff = (p1.variance - p2.variance).abs();
1215            A::one() - (mean_diff + var_diff) / A::from(2.0).unwrap()
1216        }
1217    }
1218
1219    impl<A: Float + Send + Sync + Send + Sync> AdaptiveThresholdManager<A> {
1220        fn new() -> Self {
1221            Self {
1222                thresholds: HashMap::new(),
1223                threshold_history: VecDeque::new(),
1224                performance_feedback: VecDeque::new(),
1225                learning_rate: A::from(0.01).unwrap(),
1226            }
1227        }
1228
1229        fn update_thresholds(&mut self, results: &[DriftStatus], features: &PatternFeatures<A>) {
1230            // Simplified threshold adaptation
1231            for (i, result) in results.iter().enumerate() {
1232                let detector_name = format!("detector_{}", i);
1233                let current_threshold = self
1234                    .thresholds
1235                    .get(&detector_name)
1236                    .cloned()
1237                    .unwrap_or(A::from(1.0).unwrap());
1238
1239                // Adjust threshold based on recent performance
1240                let adjustment = if *result == DriftStatus::Drift {
1241                    -self.learning_rate // Lower threshold if drift detected
1242                } else {
1243                    self.learning_rate * A::from(0.1).unwrap() // Slightly raise threshold
1244                };
1245
1246                let new_threshold = current_threshold + adjustment;
1247                self.thresholds.insert(detector_name.clone(), new_threshold);
1248
1249                self.threshold_history.push_back(ThresholdUpdate {
1250                    detector_name,
1251                    old_threshold: current_threshold,
1252                    new_threshold,
1253                    timestamp: Instant::now(),
1254                    reason: "Performance-based adjustment".to_string(),
1255                });
1256            }
1257        }
1258    }
1259
1260    impl<A: Float + Send + Sync + Send + Sync> ContextAwareDriftDetector<A> {
1261        fn new() -> Self {
1262            Self {
1263                context_features: Vec::new(),
1264                context_models: HashMap::new(),
1265                current_context: None,
1266                transition_matrix: HashMap::new(),
1267            }
1268        }
1269
1270        fn update_context(&mut self, features: &[ContextFeature<A>]) {
1271            self.context_features = features.to_vec();
1272
1273            // Simplified context classification
1274            let context_id = if !features.is_empty() && features[0].value > A::from(0.5).unwrap() {
1275                "high_activity".to_string()
1276            } else {
1277                "low_activity".to_string()
1278            };
1279
1280            self.current_context = Some(context_id);
1281        }
1282    }
1283
1284    impl<A: Float + Send + Sync + Send + Sync> DriftImpactAnalyzer<A> {
1285        fn new() -> Self {
1286            Self {
1287                impact_history: VecDeque::new(),
1288                severity_classifier: SeverityClassifier::new(),
1289                recovery_predictor: RecoveryTimePredictor::new(),
1290                business_impact_estimator: BusinessImpactEstimator::new(),
1291            }
1292        }
1293
1294        fn analyze_impact(
1295            &mut self,
1296            features: &PatternFeatures<A>,
1297            _pattern: &Option<DriftPattern<A>>,
1298        ) -> Result<DriftImpact<A>> {
1299            let performance_degradation = features.variance; // Simplified
1300            let urgency_level = if performance_degradation > A::from(1.0).unwrap() {
1301                UrgencyLevel::High
1302            } else {
1303                UrgencyLevel::Medium
1304            };
1305
1306            Ok(DriftImpact {
1307                performance_degradation,
1308                affected_metrics: vec!["accuracy".to_string(), "loss".to_string()],
1309                estimated_recovery_time: Duration::from_secs(300),
1310                confidence: A::from(0.8).unwrap(),
1311                business_impact_score: performance_degradation,
1312                urgency_level,
1313            })
1314        }
1315    }
1316
1317    impl<A: Float + Send + Sync + Send + Sync> AdaptationStrategySelector<A> {
1318        fn new() -> Self {
1319            Self {
1320                strategies: Vec::new(),
1321                strategy_performance: HashMap::new(),
1322                bandit: EpsilonGreedyBandit::new(A::from(0.1).unwrap()),
1323                context_strategy_map: HashMap::new(),
1324            }
1325        }
1326
1327        fn select_strategy(
1328            &mut self,
1329            features: &PatternFeatures<A>,
1330            _impact: &DriftImpact<A>,
1331            _pattern: &Option<DriftPattern<A>>,
1332        ) -> Result<Option<AdaptationStrategy<A>>> {
1333            // Simplified strategy selection
1334            let strategy = AdaptationStrategy {
1335                id: "increase_lr".to_string(),
1336                strategy_type: AdaptationStrategyType::ParameterTuning,
1337                parameters: {
1338                    let mut params = HashMap::new();
1339                    params.insert("learning_rate_factor".to_string(), A::from(1.5).unwrap());
1340                    params
1341                },
1342                applicability_conditions: Vec::new(),
1343                expected_effectiveness: A::from(0.7).unwrap(),
1344                computational_cost: A::from(0.1).unwrap(),
1345            };
1346
1347            Ok(Some(strategy))
1348        }
1349    }
1350
1351    impl<A: Float + Send + Sync + Send + Sync> DriftDatabase<A> {
1352        fn new() -> Self {
1353            Self {
1354                drift_events: Vec::new(),
1355                pattern_outcomes: HashMap::new(),
1356                seasonal_patterns: HashMap::new(),
1357                similarity_index: SimilarityIndex::new(),
1358            }
1359        }
1360
1361        fn store_event(
1362            &mut self,
1363            features: &PatternFeatures<A>,
1364            context: &[ContextFeature<A>],
1365            strategy: &Option<AdaptationStrategy<A>>,
1366        ) {
1367            if let Some(strat) = strategy {
1368                let event = StoredDriftEvent {
1369                    features: features.clone(),
1370                    context: context.to_vec(),
1371                    applied_strategy: strat.id.clone(),
1372                    outcome: AdaptationOutcome {
1373                        success: true, // Simplified
1374                        performance_improvement: A::from(0.1).unwrap(),
1375                        adaptation_time: Duration::from_secs(60),
1376                        stability_period: Duration::from_secs(300),
1377                        side_effects: Vec::new(),
1378                    },
1379                    timestamp: Instant::now(),
1380                };
1381
1382                self.drift_events.push(event);
1383            }
1384        }
1385    }
1386
1387    impl<A: Float + Send + Sync + Send + Sync> SimilarityIndex<A> {
1388        fn new() -> Self {
1389            Self {
1390                feature_vectors: Vec::new(),
1391                similarity_threshold: A::from(0.8).unwrap(),
1392                distance_metric: DistanceMetric::Euclidean,
1393            }
1394        }
1395    }
1396
1397    impl<A: Float + Send + Sync + Send + Sync> EpsilonGreedyBandit<A> {
1398        fn new(epsilon: A) -> Self {
1399            Self {
1400                epsilon,
1401                action_values: HashMap::new(),
1402                action_counts: HashMap::new(),
1403                total_trials: 0,
1404            }
1405        }
1406    }
1407
1408    // Placeholder implementations for complex analyzers
1409
1410    #[derive(Debug)]
1411    struct SeverityClassifier<A: Float + Send + Sync> {
1412        _phantom: std::marker::PhantomData<A>,
1413    }
1414
1415    impl<A: Float + Send + Sync + Send + Sync> SeverityClassifier<A> {
1416        fn new() -> Self {
1417            Self {
1418                _phantom: std::marker::PhantomData,
1419            }
1420        }
1421    }
1422
1423    #[derive(Debug)]
1424    struct RecoveryTimePredictor<A: Float + Send + Sync> {
1425        _phantom: std::marker::PhantomData<A>,
1426    }
1427
1428    impl<A: Float + Send + Sync + Send + Sync> RecoveryTimePredictor<A> {
1429        fn new() -> Self {
1430            Self {
1431                _phantom: std::marker::PhantomData,
1432            }
1433        }
1434    }
1435
1436    #[derive(Debug)]
1437    struct BusinessImpactEstimator<A: Float + Send + Sync> {
1438        _phantom: std::marker::PhantomData<A>,
1439    }
1440
1441    impl<A: Float + Send + Sync + Send + Sync> BusinessImpactEstimator<A> {
1442        fn new() -> Self {
1443            Self {
1444                _phantom: std::marker::PhantomData,
1445            }
1446        }
1447    }
1448}
1449
1450#[cfg(test)]
1451mod tests {
1452    use super::*;
1453
1454    #[test]
1455    fn test_page_hinkley_detector() {
1456        let mut detector = PageHinkleyDetector::new(3.0f64, 2.0f64);
1457
1458        // Stable period
1459        for _ in 0..10 {
1460            let status = detector.update(0.1);
1461            assert_eq!(status, DriftStatus::Stable);
1462        }
1463
1464        // Drift period
1465        for _ in 0..5 {
1466            let status = detector.update(0.5); // Higher loss
1467            if status == DriftStatus::Drift {
1468                break;
1469            }
1470        }
1471    }
1472
1473    #[test]
1474    fn test_adwin_detector() {
1475        let mut detector = AdwinDetector::new(0.005f64, 100);
1476
1477        // Add stable values
1478        for i in 0..20 {
1479            let value = 0.1 + (i as f64) * 0.001; // Slight trend
1480            detector.update(value);
1481        }
1482
1483        // Add drift values
1484        for i in 0..10 {
1485            let value = 0.5 + (i as f64) * 0.01; // Clear change
1486            let status = detector.update(value);
1487            if status == DriftStatus::Drift {
1488                break;
1489            }
1490        }
1491    }
1492
1493    #[test]
1494    fn test_ddm_detector() {
1495        let mut detector = DdmDetector::<f64>::new();
1496
1497        // Stable period with low error rate
1498        for i in 0..50 {
1499            let iserror = i % 10 == 0; // 10% error rate
1500            detector.update(iserror);
1501        }
1502
1503        // Period with high error rate
1504        for i in 0..20 {
1505            let iserror = i % 2 == 0; // 50% error rate
1506            let status = detector.update(iserror);
1507            if status == DriftStatus::Drift {
1508                break;
1509            }
1510        }
1511    }
1512
1513    #[test]
1514    fn test_concept_drift_detector() {
1515        let config = DriftDetectorConfig::default();
1516        let mut detector = ConceptDriftDetector::new(config);
1517
1518        // Simulate stable period
1519        for i in 0..30 {
1520            let loss = 0.1 + (i as f64) * 0.001;
1521            let iserror = i % 10 == 0;
1522            let status = detector.update(loss, iserror).unwrap();
1523            assert_ne!(status, DriftStatus::Drift); // Should be stable
1524        }
1525
1526        // Simulate drift
1527        for i in 0..20 {
1528            let loss = 0.5 + (i as f64) * 0.01; // Much higher loss
1529            let iserror = i % 2 == 0; // Higher error rate
1530            let _status = detector.update(loss, iserror).unwrap();
1531        }
1532
1533        let stats = detector.get_statistics();
1534        assert!(stats.total_drifts > 0 || stats.recent_drift_rate > 0.0);
1535    }
1536
1537    #[test]
1538    fn test_drift_event() {
1539        let event = DriftEvent {
1540            timestamp: Instant::now(),
1541            confidence: 0.85f64,
1542            drift_type: DriftType::Sudden,
1543            adaptation_recommendation: AdaptationRecommendation::Reset,
1544        };
1545
1546        assert_eq!(event.drift_type, DriftType::Sudden);
1547        assert!(event.confidence > 0.8);
1548    }
1549}