1use crate::error::{OptimError, Result};
7use scirs2_core::numeric::Float;
8use serde::{Deserialize, Serialize};
9use std::collections::{HashMap, VecDeque};
10use std::f64::consts::PI;
11
12#[derive(Debug)]
14pub struct AdvancedPatternDetector {
15 config: AdvancedPatternConfig,
17 pattern_classifier: PatternClassifier,
19 signal_processor: SignalProcessor,
21 statistical_analyzer: AdvancedStatisticalAnalyzer,
23 pattern_database: PatternDatabase,
25 feature_extractor: FeatureExtractor,
27}
28
29#[derive(Debug, Clone, Serialize, Deserialize)]
31pub struct AdvancedPatternConfig {
32 pub enable_ml_classification: bool,
34 pub enable_signal_processing: bool,
36 pub enable_statistical_matching: bool,
38 pub min_pattern_length: usize,
40 pub pattern_matching_threshold: f64,
42 pub feature_window_size: usize,
44 pub max_patterns_stored: usize,
46 pub learning_rate: f64,
48 pub enable_anomaly_scoring: bool,
50 pub enable_trend_forecasting: bool,
52}
53
54impl Default for AdvancedPatternConfig {
55 fn default() -> Self {
56 Self {
57 enable_ml_classification: true,
58 enable_signal_processing: true,
59 enable_statistical_matching: true,
60 min_pattern_length: 10,
61 pattern_matching_threshold: 0.85,
62 feature_window_size: 50,
63 max_patterns_stored: 1000,
64 learning_rate: 0.01,
65 enable_anomaly_scoring: true,
66 enable_trend_forecasting: true,
67 }
68 }
69}
70
71#[derive(Debug, Clone, Serialize, Deserialize)]
73pub struct AdvancedMemoryPattern {
74 pub id: String,
76 pub pattern_type: AdvancedPatternType,
78 pub confidence: f64,
80 pub signature: Vec<f64>,
82 pub description: String,
84 pub frequency_characteristics: FrequencyCharacteristics,
86 pub statistical_properties: StatisticalProperties,
88 pub anomaly_score: f64,
90 pub strength: f64,
92 pub periodicity: Option<PeriodicityInfo>,
94 pub trend: TrendInfo,
96 pub leak_indicators: Vec<LeakIndicator>,
98 pub evolution: PatternEvolution,
100}
101
102#[derive(Debug, Clone, Serialize, Deserialize)]
104pub enum AdvancedPatternType {
105 LinearGrowth {
107 slope: f64,
108 intercept: f64,
109 r_squared: f64,
110 },
111 ExponentialGrowth {
113 growth_rate: f64,
114 base_value: f64,
115 doubling_time: f64,
116 },
117 Periodic {
119 fundamental_frequency: f64,
120 harmonics: Vec<f64>,
121 phase_shift: f64,
122 amplitude: f64,
123 },
124 SawTooth {
126 peak_height: f64,
127 cycle_duration: f64,
128 duty_cycle: f64,
129 baseline: f64,
130 },
131 StepFunction {
133 step_size: f64,
134 step_frequency: f64,
135 plateaus: Vec<f64>,
136 },
137 Chaotic {
139 lyapunov_exponent: f64,
140 correlation_dimension: f64,
141 hurst_exponent: f64,
142 },
143 Burst {
145 burst_intensity: f64,
146 burst_duration: f64,
147 inter_burst_interval: f64,
148 baseline_level: f64,
149 },
150 LeakSignature {
152 leak_rate: f64,
153 leak_acceleration: f64,
154 leak_confidence: f64,
155 },
156 Composite {
158 components: Vec<Box<AdvancedPatternType>>,
159 weights: Vec<f64>,
160 },
161}
162
163#[derive(Debug, Clone, Serialize, Deserialize)]
165pub struct FrequencyCharacteristics {
166 pub dominant_frequencies: Vec<f64>,
168 pub power_spectrum: Vec<f64>,
170 pub spectral_centroid: f64,
172 pub spectral_bandwidth: f64,
174 pub spectral_rolloff: f64,
176 pub spectral_flux: f64,
178 pub zero_crossing_rate: f64,
180}
181
182#[derive(Debug, Clone, Serialize, Deserialize)]
184pub struct StatisticalProperties {
185 pub mean: f64,
187 pub std_dev: f64,
189 pub skewness: f64,
191 pub kurtosis: f64,
193 pub entropy: f64,
195 pub autocorrelation: Vec<f64>,
197 pub partial_autocorrelation: Vec<f64>,
199 pub hjorth_parameters: HjorthParameters,
201}
202
203#[derive(Debug, Clone, Serialize, Deserialize)]
205pub struct HjorthParameters {
206 pub activity: f64,
208 pub mobility: f64,
210 pub complexity: f64,
212}
213
214#[derive(Debug, Clone, Serialize, Deserialize)]
216pub struct PeriodicityInfo {
217 pub period: f64,
219 pub strength: f64,
221 pub phase_coherence: f64,
223 pub stability: f64,
225}
226
227#[derive(Debug, Clone, Serialize, Deserialize)]
229pub struct TrendInfo {
230 pub direction: f64,
232 pub strength: f64,
234 pub acceleration: f64,
236 pub stability: f64,
238 pub change_points: Vec<usize>,
240}
241
242#[derive(Debug, Clone, Serialize, Deserialize)]
244pub struct LeakIndicator {
245 pub indicator_type: LeakIndicatorType,
247 pub strength: f64,
249 pub time_to_critical: Option<f64>,
251 pub confidence: f64,
253}
254
255#[derive(Debug, Clone, Serialize, Deserialize)]
257pub enum LeakIndicatorType {
258 MonotonicIncrease,
260 AcceleratingGrowth,
262 IrregularSpikes,
264 BaselineDrift,
266 FragmentationSignature,
268 CacheThrashing,
270 ResourceExhaustion,
272}
273
274#[derive(Debug, Clone, Serialize, Deserialize)]
276pub struct PatternEvolution {
277 pub stability: f64,
279 pub evolution_rate: f64,
281 pub adaptation_score: f64,
283 pub historical_states: Vec<PatternState>,
285}
286
287#[derive(Debug, Clone, Serialize, Deserialize)]
289pub struct PatternState {
290 pub timestamp: u64,
292 pub parameters: HashMap<String, f64>,
294 pub confidence: f64,
296}
297
298#[derive(Debug)]
300pub struct PatternClassifier {
301 weights: Vec<Vec<f64>>,
303 biases: Vec<f64>,
305 learning_rate: f64,
307 training_data: Vec<TrainingExample>,
309}
310
311#[derive(Debug, Clone)]
313pub struct TrainingExample {
314 pub features: Vec<f64>,
316 pub pattern_type: AdvancedPatternType,
318 pub weight: f64,
320}
321
322#[derive(Debug)]
324pub struct SignalProcessor {
325 fft_processor: FFTProcessor,
327 wavelet_processor: WaveletProcessor,
329 kalman_filter: KalmanFilter,
331}
332
333#[derive(Debug)]
335pub struct FFTProcessor {
336 window_type: WindowType,
338 fft_size: usize,
340 overlap_factor: f64,
342}
343
344#[derive(Debug, Clone)]
346pub enum WindowType {
347 Rectangular,
348 Hamming,
349 Hanning,
350 Blackman,
351 Kaiser { beta: f64 },
352}
353
354#[derive(Debug)]
356pub struct WaveletProcessor {
357 wavelet_type: WaveletType,
359 levels: usize,
361}
362
363#[derive(Debug, Clone)]
365pub enum WaveletType {
366 Daubechies { order: usize },
367 Biorthogonal { order: (usize, usize) },
368 Coiflets { order: usize },
369 Haar,
370 Morlet { sigma: f64 },
371}
372
373#[derive(Debug)]
375pub struct KalmanFilter {
376 state: Vec<f64>,
378 covariance: Vec<Vec<f64>>,
380 process_noise: f64,
382 measurement_noise: f64,
384}
385
386#[derive(Debug)]
388pub struct AdvancedStatisticalAnalyzer {
389 config: StatisticalConfig,
391 hypothesis_tester: HypothesisTestEngine,
393 time_series_analyzer: TimeSeriesAnalyzer,
395}
396
397#[derive(Debug, Clone)]
399pub struct StatisticalConfig {
400 pub significance_level: f64,
402 pub bootstrap_iterations: usize,
404 pub confidence_interval: f64,
406}
407
408#[derive(Debug)]
410pub struct HypothesisTestEngine {
411 test_types: Vec<HypothesisTestType>,
413}
414
415#[derive(Debug, Clone)]
417pub enum HypothesisTestType {
418 KolmogorovSmirnov,
420 AndersonDarling,
422 MannKendall,
424 LjungBox,
426 AugmentedDickeyFuller,
428 KPSS,
430}
431
432#[derive(Debug)]
434pub struct TimeSeriesAnalyzer {
435 arima_fitter: ARIMAFitter,
437 seasonal_decomposer: SeasonalDecomposer,
439 change_point_detector: ChangePointDetector,
441}
442
443#[derive(Debug)]
445pub struct ARIMAFitter {
446 parameters: ARIMAParameters,
448}
449
450#[derive(Debug, Clone)]
452pub struct ARIMAParameters {
453 pub p: usize,
455 pub d: usize,
457 pub q: usize,
459 pub seasonal: Option<SeasonalParameters>,
461}
462
463#[derive(Debug, Clone)]
465pub struct SeasonalParameters {
466 pub p: usize,
468 pub d: usize,
470 pub q: usize,
472 pub period: usize,
474}
475
476#[derive(Debug)]
478pub struct SeasonalDecomposer {
479 method: DecompositionMethod,
481}
482
483#[derive(Debug, Clone)]
485pub enum DecompositionMethod {
486 Additive,
488 Multiplicative,
490 STL,
492 X13ARIMA,
494}
495
496#[derive(Debug)]
498pub struct ChangePointDetector {
499 algorithms: Vec<ChangePointAlgorithm>,
501}
502
503#[derive(Debug, Clone)]
505pub enum ChangePointAlgorithm {
506 CUSUM { threshold: f64 },
508 PELT { penalty: f64 },
510 BinarySegmentation { min_size: usize },
512 Bayesian { prior_scale: f64 },
514}
515
516#[derive(Debug)]
518pub struct PatternDatabase {
519 patterns: HashMap<String, AdvancedMemoryPattern>,
521 similarity_matrix: HashMap<(String, String), f64>,
523 frequency_stats: HashMap<String, PatternFrequencyStats>,
525}
526
527#[derive(Debug, Clone)]
529pub struct PatternFrequencyStats {
530 pub count: usize,
532 pub avg_confidence: f64,
534 pub last_seen: u64,
536 pub contexts: Vec<String>,
538}
539
540#[derive(Debug)]
542pub struct FeatureExtractor {
543 feature_types: Vec<FeatureType>,
545 scaling_params: HashMap<String, (f64, f64)>, }
548
549#[derive(Debug, Clone)]
551pub enum FeatureType {
552 StatisticalMoments,
554 FrequencyDomain,
556 TimeDomain,
558 WaveletFeatures,
560 FractalFeatures,
562 InformationTheoretic,
564 ShapeFeatures,
566}
567
568impl AdvancedPatternDetector {
569 pub fn new(config: AdvancedPatternConfig) -> Result<Self> {
571 Ok(Self {
572 config: config.clone(),
573 pattern_classifier: PatternClassifier::new(config.learning_rate)?,
574 signal_processor: SignalProcessor::new()?,
575 statistical_analyzer: AdvancedStatisticalAnalyzer::new()?,
576 pattern_database: PatternDatabase::new(),
577 feature_extractor: FeatureExtractor::new(),
578 })
579 }
580
581 pub fn detect_patterns(&mut self, memorydata: &[f64]) -> Result<Vec<AdvancedMemoryPattern>> {
583 if memorydata.len() < self.config.min_pattern_length {
584 return Ok(Vec::new());
585 }
586
587 let mut detected_patterns = Vec::new();
588
589 let features = self.feature_extractor.extract_features(memorydata)?;
591
592 if self.config.enable_signal_processing {
594 let signal_patterns = self.signal_processor.analyze_signal(memorydata)?;
595 detected_patterns.extend(signal_patterns);
596 }
597
598 if self.config.enable_statistical_matching {
600 let statistical_patterns = self.statistical_analyzer.analyze_patterns(memorydata)?;
601 detected_patterns.extend(statistical_patterns);
602 }
603
604 if self.config.enable_ml_classification {
606 let ml_patterns = self.pattern_classifier.classify_patterns(&features)?;
607 detected_patterns.extend(ml_patterns);
608 }
609
610 let refined_patterns = self.fuse_and_refine_patterns(detected_patterns)?;
612
613 self.update_pattern_database(&refined_patterns)?;
615
616 if self.config.enable_anomaly_scoring {
618 self.compute_anomaly_scores(&mut refined_patterns.clone(), memorydata)?;
619 }
620
621 if self.config.enable_trend_forecasting {
623 self.add_trend_forecasts(&mut refined_patterns.clone(), memorydata)?;
624 }
625
626 Ok(refined_patterns)
627 }
628
629 fn fuse_and_refine_patterns(
631 &self,
632 patterns: Vec<AdvancedMemoryPattern>,
633 ) -> Result<Vec<AdvancedMemoryPattern>> {
634 let mut refined_patterns = Vec::new();
635 let mut used_patterns = vec![false; patterns.len()];
636
637 for i in 0..patterns.len() {
638 if used_patterns[i] {
639 continue;
640 }
641
642 let mut pattern_group = vec![&patterns[i]];
643 used_patterns[i] = true;
644
645 for j in (i + 1)..patterns.len() {
647 if used_patterns[j] {
648 continue;
649 }
650
651 let similarity = self.calculate_pattern_similarity(&patterns[i], &patterns[j])?;
652 if similarity > self.config.pattern_matching_threshold {
653 pattern_group.push(&patterns[j]);
654 used_patterns[j] = true;
655 }
656 }
657
658 let fused_pattern = self.fuse_pattern_group(pattern_group)?;
660 refined_patterns.push(fused_pattern);
661 }
662
663 Ok(refined_patterns)
664 }
665
666 fn calculate_pattern_similarity(
668 &self,
669 pattern1: &AdvancedMemoryPattern,
670 pattern2: &AdvancedMemoryPattern,
671 ) -> Result<f64> {
672 let dot_product: f64 = pattern1
674 .signature
675 .iter()
676 .zip(pattern2.signature.iter())
677 .map(|(a, b)| a * b)
678 .sum();
679
680 let norm1: f64 = pattern1.signature.iter().map(|x| x * x).sum::<f64>().sqrt();
681 let norm2: f64 = pattern2.signature.iter().map(|x| x * x).sum::<f64>().sqrt();
682
683 if norm1 == 0.0 || norm2 == 0.0 {
684 return Ok(0.0);
685 }
686
687 let cosine_similarity = dot_product / (norm1 * norm2);
688
689 let type_similarity =
691 self.calculate_type_similarity(&pattern1.pattern_type, &pattern2.pattern_type);
692
693 Ok((cosine_similarity + type_similarity) / 2.0)
695 }
696
697 fn calculate_type_similarity(
699 &self,
700 type1: &AdvancedPatternType,
701 type2: &AdvancedPatternType,
702 ) -> f64 {
703 match (type1, type2) {
704 (
705 AdvancedPatternType::LinearGrowth { .. },
706 AdvancedPatternType::LinearGrowth { .. },
707 ) => 1.0,
708 (
709 AdvancedPatternType::ExponentialGrowth { .. },
710 AdvancedPatternType::ExponentialGrowth { .. },
711 ) => 1.0,
712 (AdvancedPatternType::Periodic { .. }, AdvancedPatternType::Periodic { .. }) => 1.0,
713 (AdvancedPatternType::SawTooth { .. }, AdvancedPatternType::SawTooth { .. }) => 1.0,
714 (
715 AdvancedPatternType::StepFunction { .. },
716 AdvancedPatternType::StepFunction { .. },
717 ) => 1.0,
718 (AdvancedPatternType::Chaotic { .. }, AdvancedPatternType::Chaotic { .. }) => 1.0,
719 (AdvancedPatternType::Burst { .. }, AdvancedPatternType::Burst { .. }) => 1.0,
720 (
721 AdvancedPatternType::LeakSignature { .. },
722 AdvancedPatternType::LeakSignature { .. },
723 ) => 1.0,
724 (
726 AdvancedPatternType::LinearGrowth { .. },
727 AdvancedPatternType::ExponentialGrowth { .. },
728 ) => 0.7,
729 (
730 AdvancedPatternType::ExponentialGrowth { .. },
731 AdvancedPatternType::LinearGrowth { .. },
732 ) => 0.7,
733 (AdvancedPatternType::SawTooth { .. }, AdvancedPatternType::Periodic { .. }) => 0.6,
734 (AdvancedPatternType::Periodic { .. }, AdvancedPatternType::SawTooth { .. }) => 0.6,
735 _ => 0.0,
736 }
737 }
738
739 fn fuse_pattern_group(
741 &self,
742 pattern_group: Vec<&AdvancedMemoryPattern>,
743 ) -> Result<AdvancedMemoryPattern> {
744 if pattern_group.is_empty() {
745 return Err(std::io::Error::new(
746 std::io::ErrorKind::InvalidInput,
747 "Empty pattern group",
748 )
749 .into());
750 }
751
752 if pattern_group.len() == 1 {
753 return Ok(pattern_group[0].clone());
754 }
755
756 let base_pattern = pattern_group
758 .iter()
759 .max_by(|a, b| {
760 a.confidence
761 .partial_cmp(&b.confidence)
762 .unwrap_or(std::cmp::Ordering::Equal)
763 })
764 .expect("unwrap failed");
765
766 let mut fused_pattern = (*base_pattern).clone();
767
768 fused_pattern.confidence =
770 pattern_group.iter().map(|p| p.confidence).sum::<f64>() / pattern_group.len() as f64;
771
772 let signature_len = fused_pattern.signature.len();
774 let mut averaged_signature = vec![0.0; signature_len];
775
776 for pattern in &pattern_group {
777 for (i, &val) in pattern.signature.iter().enumerate() {
778 if i < signature_len {
779 averaged_signature[i] += val;
780 }
781 }
782 }
783
784 for val in &mut averaged_signature {
785 *val /= pattern_group.len() as f64;
786 }
787
788 fused_pattern.signature = averaged_signature;
789
790 fused_pattern.description = format!(
792 "Fused pattern from {} similar patterns: {}",
793 pattern_group.len(),
794 pattern_group
795 .iter()
796 .map(|p| p.description.as_str())
797 .collect::<Vec<_>>()
798 .join(", ")
799 );
800
801 Ok(fused_pattern)
802 }
803
804 fn update_pattern_database(&mut self, patterns: &[AdvancedMemoryPattern]) -> Result<()> {
806 for pattern in patterns {
807 if let Some(existing_pattern) = self.pattern_database.patterns.get_mut(&pattern.id) {
809 existing_pattern.confidence =
811 (existing_pattern.confidence + pattern.confidence) / 2.0;
812 if let Some(freq_stats) = self.pattern_database.frequency_stats.get_mut(&pattern.id)
814 {
815 freq_stats.count += 1;
816 freq_stats.avg_confidence =
817 (freq_stats.avg_confidence + pattern.confidence) / 2.0;
818 freq_stats.last_seen = std::time::SystemTime::now()
819 .duration_since(std::time::UNIX_EPOCH)
820 .expect("unwrap failed")
821 .as_secs();
822 }
823 } else {
824 self.pattern_database
826 .patterns
827 .insert(pattern.id.clone(), pattern.clone());
828 self.pattern_database.frequency_stats.insert(
829 pattern.id.clone(),
830 PatternFrequencyStats {
831 count: 1,
832 avg_confidence: pattern.confidence,
833 last_seen: std::time::SystemTime::now()
834 .duration_since(std::time::UNIX_EPOCH)
835 .expect("unwrap failed")
836 .as_secs(),
837 contexts: Vec::new(),
838 },
839 );
840 }
841 }
842
843 if self.pattern_database.patterns.len() > self.config.max_patterns_stored {
845 self.prune_pattern_database()?;
846 }
847
848 Ok(())
849 }
850
851 fn prune_pattern_database(&mut self) -> Result<()> {
853 let mut patterns_to_remove = Vec::new();
855
856 for (id, freq_stats) in &self.pattern_database.frequency_stats {
857 if let Some(pattern) = self.pattern_database.patterns.get(id) {
858 let score = freq_stats.count as f64 * pattern.confidence;
859 patterns_to_remove.push((id.clone(), score));
860 }
861 }
862
863 patterns_to_remove
865 .sort_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal));
866
867 let patterns_to_remove_count =
868 self.pattern_database.patterns.len() - self.config.max_patterns_stored;
869 for (id_, _) in patterns_to_remove.iter().take(patterns_to_remove_count) {
870 self.pattern_database.patterns.remove(id_);
871 self.pattern_database.frequency_stats.remove(id_);
872 }
873
874 Ok(())
875 }
876
877 fn compute_anomaly_scores(
879 &self,
880 patterns: &mut Vec<AdvancedMemoryPattern>,
881 memorydata: &[f64],
882 ) -> Result<()> {
883 for pattern in patterns {
884 let historical_mean = self.get_historical_pattern_mean(&pattern.pattern_type)?;
886 let deviation = (pattern.strength - historical_mean).abs();
887 pattern.anomaly_score = (deviation / historical_mean.max(1.0)).min(1.0);
888 }
889 Ok(())
890 }
891
892 fn add_trend_forecasts(
894 &self,
895 patterns: &mut Vec<AdvancedMemoryPattern>,
896 memorydata: &[f64],
897 ) -> Result<()> {
898 for pattern in patterns {
899 if memorydata.len() >= 2 {
901 let n = memorydata.len();
902 let last_values = &memorydata[n.saturating_sub(10)..];
903
904 if let Some((slope_, _)) = self.calculate_linear_trend(last_values) {
905 pattern.trend.direction = slope_.signum();
906 pattern.trend.strength = slope_.abs().min(1.0);
907 pattern.trend.acceleration = 0.0; }
909 }
910 }
911 Ok(())
912 }
913
914 fn calculate_linear_trend(&self, data: &[f64]) -> Option<(f64, f64)> {
916 if data.len() < 2 {
917 return None;
918 }
919
920 let n = data.len() as f64;
921 let sum_x = (0..data.len()).map(|i| i as f64).sum::<f64>();
922 let sum_y = data.iter().sum::<f64>();
923 let sum_xy = data
924 .iter()
925 .enumerate()
926 .map(|(i, &y)| i as f64 * y)
927 .sum::<f64>();
928 let sum_x2 = (0..data.len()).map(|i| (i * i) as f64).sum::<f64>();
929
930 let denominator = n * sum_x2 - sum_x * sum_x;
931 if denominator.abs() < f64::EPSILON {
932 return None;
933 }
934
935 let slope = (n * sum_xy - sum_x * sum_y) / denominator;
936 let intercept = (sum_y - slope * sum_x) / n;
937
938 Some((slope, intercept))
939 }
940
941 fn get_historical_pattern_mean(&self, patterntype: &AdvancedPatternType) -> Result<f64> {
943 match patterntype {
945 AdvancedPatternType::LinearGrowth { .. } => Ok(0.5),
946 AdvancedPatternType::ExponentialGrowth { .. } => Ok(0.3),
947 AdvancedPatternType::Periodic { .. } => Ok(0.7),
948 AdvancedPatternType::LeakSignature { .. } => Ok(0.2),
949 _ => Ok(0.5),
950 }
951 }
952}
953
954impl PatternClassifier {
957 fn new(_learningrate: f64) -> Result<Self> {
958 Ok(Self {
959 weights: vec![vec![0.0; 50]; 10], biases: vec![0.0; 10],
961 learning_rate: _learningrate,
962 training_data: Vec::new(),
963 })
964 }
965
966 fn classify_patterns(&self, features: &[f64]) -> Result<Vec<AdvancedMemoryPattern>> {
967 let mut patterns = Vec::new();
969
970 let outputs = self.forward_pass(features);
972
973 for (i, &output) in outputs.iter().enumerate() {
975 if output > 0.5 {
976 patterns.push(self.create_pattern_from_class(i, output, features)?);
977 }
978 }
979
980 Ok(patterns)
981 }
982
983 fn forward_pass(&self, features: &[f64]) -> Vec<f64> {
984 let mut outputs = vec![0.0; self.weights.len()];
985
986 for (i, weights_row) in self.weights.iter().enumerate() {
987 let mut sum = self.biases[i];
988 for (j, &weight) in weights_row.iter().enumerate() {
989 if j < features.len() {
990 sum += weight * features[j];
991 }
992 }
993 outputs[i] = 1.0 / (1.0 + (-sum).exp()); }
995
996 outputs
997 }
998
999 fn create_pattern_from_class(
1000 &self,
1001 class_id: usize,
1002 confidence: f64,
1003 features: &[f64],
1004 ) -> Result<AdvancedMemoryPattern> {
1005 let pattern_type = match class_id {
1006 0 => AdvancedPatternType::LinearGrowth {
1007 slope: features.first().copied().unwrap_or(0.0),
1008 intercept: features.get(1).copied().unwrap_or(0.0),
1009 r_squared: 0.8,
1010 },
1011 1 => AdvancedPatternType::ExponentialGrowth {
1012 growth_rate: features.get(2).copied().unwrap_or(0.1),
1013 base_value: features.get(3).copied().unwrap_or(1.0),
1014 doubling_time: 10.0,
1015 },
1016 2 => AdvancedPatternType::Periodic {
1017 fundamental_frequency: features.get(4).copied().unwrap_or(0.1),
1018 harmonics: vec![1.0, 0.5, 0.25],
1019 phase_shift: 0.0,
1020 amplitude: features.get(5).copied().unwrap_or(1.0),
1021 },
1022 _ => AdvancedPatternType::LeakSignature {
1023 leak_rate: features.get(6).copied().unwrap_or(0.01),
1024 leak_acceleration: features.get(7).copied().unwrap_or(0.001),
1025 leak_confidence: confidence,
1026 },
1027 };
1028
1029 Ok(AdvancedMemoryPattern {
1030 id: format!("ml_pattern_{}", class_id),
1031 pattern_type,
1032 confidence,
1033 signature: features.to_vec(),
1034 description: format!("ML-detected pattern class {}", class_id),
1035 frequency_characteristics: FrequencyCharacteristics::default(),
1036 statistical_properties: StatisticalProperties::default(),
1037 anomaly_score: 0.0,
1038 strength: confidence,
1039 periodicity: None,
1040 trend: TrendInfo::default(),
1041 leak_indicators: Vec::new(),
1042 evolution: PatternEvolution::default(),
1043 })
1044 }
1045}
1046
1047impl SignalProcessor {
1048 fn new() -> Result<Self> {
1049 Ok(Self {
1050 fft_processor: FFTProcessor::new(),
1051 wavelet_processor: WaveletProcessor::new(),
1052 kalman_filter: KalmanFilter::new(),
1053 })
1054 }
1055
1056 fn analyze_signal(&mut self, signal: &[f64]) -> Result<Vec<AdvancedMemoryPattern>> {
1057 let mut patterns = Vec::new();
1058
1059 let frequency_patterns = self.fft_processor.analyze_frequencies(signal)?;
1061 patterns.extend(frequency_patterns);
1062
1063 let wavelet_patterns = self.wavelet_processor.analyze_wavelets(signal)?;
1065 patterns.extend(wavelet_patterns);
1066
1067 let filtered_signal = self.kalman_filter.filter(signal)?;
1069
1070 if !filtered_signal.is_empty() {
1072 }
1074
1075 Ok(patterns)
1076 }
1077}
1078
1079impl FFTProcessor {
1080 fn new() -> Self {
1081 Self {
1082 window_type: WindowType::Hanning,
1083 fft_size: 1024,
1084 overlap_factor: 0.5,
1085 }
1086 }
1087
1088 fn analyze_frequencies(&self, signal: &[f64]) -> Result<Vec<AdvancedMemoryPattern>> {
1089 let mut patterns = Vec::new();
1091
1092 if signal.len() < 8 {
1093 return Ok(patterns);
1094 }
1095
1096 let windowed_signal = self.apply_window(signal);
1098
1099 let spectrum = self.compute_fft(&windowed_signal)?;
1101
1102 let dominant_freqs = self.find_dominant_frequencies(&spectrum);
1104
1105 if !dominant_freqs.is_empty() {
1107 patterns.push(AdvancedMemoryPattern {
1108 id: "fft_pattern".to_string(),
1109 pattern_type: AdvancedPatternType::Periodic {
1110 fundamental_frequency: dominant_freqs[0],
1111 harmonics: dominant_freqs[1..].to_vec(),
1112 phase_shift: 0.0,
1113 amplitude: spectrum.iter().sum::<f64>() / spectrum.len() as f64,
1114 },
1115 confidence: 0.8,
1116 signature: spectrum.clone(),
1117 description: "FFT-detected periodic pattern".to_string(),
1118 frequency_characteristics: FrequencyCharacteristics {
1119 dominant_frequencies: dominant_freqs,
1120 power_spectrum: spectrum,
1121 spectral_centroid: 0.0,
1122 spectral_bandwidth: 0.0,
1123 spectral_rolloff: 0.0,
1124 spectral_flux: 0.0,
1125 zero_crossing_rate: 0.0,
1126 },
1127 statistical_properties: StatisticalProperties::default(),
1128 anomaly_score: 0.0,
1129 strength: 0.8,
1130 periodicity: None,
1131 trend: TrendInfo::default(),
1132 leak_indicators: Vec::new(),
1133 evolution: PatternEvolution::default(),
1134 });
1135 }
1136
1137 Ok(patterns)
1138 }
1139
1140 fn apply_window(&self, signal: &[f64]) -> Vec<f64> {
1141 let n = signal.len();
1142 match &self.window_type {
1143 WindowType::Hanning => signal
1144 .iter()
1145 .enumerate()
1146 .map(|(i, &x)| x * 0.5 * (1.0 - (2.0 * PI * i as f64 / (n - 1) as f64).cos()))
1147 .collect(),
1148 WindowType::Hamming => signal
1149 .iter()
1150 .enumerate()
1151 .map(|(i, &x)| x * (0.54 - 0.46 * (2.0 * PI * i as f64 / (n - 1) as f64).cos()))
1152 .collect(),
1153 _ => signal.to_vec(),
1154 }
1155 }
1156
1157 fn compute_fft(&self, signal: &[f64]) -> Result<Vec<f64>> {
1158 let n = signal.len();
1160 let mut spectrum = vec![0.0; n / 2];
1161
1162 for (k, spectrum_k) in spectrum.iter_mut().enumerate() {
1163 let mut real = 0.0;
1164 let mut imag = 0.0;
1165
1166 for (j, &signal_j) in signal.iter().enumerate() {
1167 let angle = -2.0 * PI * (k * j) as f64 / n as f64;
1168 real += signal_j * angle.cos();
1169 imag += signal_j * angle.sin();
1170 }
1171
1172 *spectrum_k = (real * real + imag * imag).sqrt();
1173 }
1174
1175 Ok(spectrum)
1176 }
1177
1178 fn find_dominant_frequencies(&self, spectrum: &[f64]) -> Vec<f64> {
1179 let mut freq_mag_pairs: Vec<(usize, f64)> = spectrum
1180 .iter()
1181 .enumerate()
1182 .map(|(i, &mag)| (i, mag))
1183 .collect();
1184
1185 freq_mag_pairs.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
1186
1187 freq_mag_pairs
1188 .iter()
1189 .take(5)
1190 .map(|(i_, _)| *i_ as f64 / spectrum.len() as f64)
1191 .collect()
1192 }
1193}
1194
1195impl WaveletProcessor {
1196 fn new() -> Self {
1197 Self {
1198 wavelet_type: WaveletType::Daubechies { order: 4 },
1199 levels: 5,
1200 }
1201 }
1202
1203 fn analyze_wavelets(&self, signal: &[f64]) -> Result<Vec<AdvancedMemoryPattern>> {
1204 let patterns = Vec::new();
1206 Ok(patterns)
1208 }
1209}
1210
1211impl KalmanFilter {
1212 fn new() -> Self {
1213 Self {
1214 state: vec![0.0, 0.0], covariance: vec![vec![1.0, 0.0], vec![0.0, 1.0]],
1216 process_noise: 0.01,
1217 measurement_noise: 0.1,
1218 }
1219 }
1220
1221 fn filter(&mut self, signal: &[f64]) -> Result<Vec<f64>> {
1222 let mut filtered = Vec::new();
1223
1224 for &measurement in signal {
1225 self.predict();
1227
1228 self.update(measurement);
1230
1231 filtered.push(self.state[0]);
1232 }
1233
1234 Ok(filtered)
1235 }
1236
1237 fn predict(&mut self) {
1238 self.state[0] += self.state[1]; self.covariance[0][0] += self.process_noise;
1243 self.covariance[1][1] += self.process_noise;
1244 }
1245
1246 fn update(&mut self, measurement: f64) {
1247 let gain = self.covariance[0][0] / (self.covariance[0][0] + self.measurement_noise);
1249
1250 let innovation = measurement - self.state[0];
1252 self.state[0] += gain * innovation;
1253
1254 self.covariance[0][0] *= 1.0 - gain;
1256 }
1257}
1258
1259impl AdvancedStatisticalAnalyzer {
1260 fn new() -> Result<Self> {
1261 Ok(Self {
1262 config: StatisticalConfig {
1263 significance_level: 0.05,
1264 bootstrap_iterations: 1000,
1265 confidence_interval: 0.95,
1266 },
1267 hypothesis_tester: HypothesisTestEngine::new(),
1268 time_series_analyzer: TimeSeriesAnalyzer::new(),
1269 })
1270 }
1271
1272 fn analyze_patterns(&self, data: &[f64]) -> Result<Vec<AdvancedMemoryPattern>> {
1273 let mut patterns = Vec::new();
1274
1275 let test_results = self.hypothesis_tester.run_tests(data)?;
1277
1278 let ts_patterns = self.time_series_analyzer.analyze(data)?;
1280 patterns.extend(ts_patterns);
1281
1282 Ok(patterns)
1283 }
1284}
1285
1286impl FeatureExtractor {
1287 fn new() -> Self {
1288 Self {
1289 feature_types: vec![
1290 FeatureType::StatisticalMoments,
1291 FeatureType::FrequencyDomain,
1292 FeatureType::TimeDomain,
1293 ],
1294 scaling_params: HashMap::new(),
1295 }
1296 }
1297
1298 fn extract_features(&self, data: &[f64]) -> Result<Vec<f64>> {
1299 let mut features = Vec::new();
1300
1301 for feature_type in &self.feature_types {
1302 match feature_type {
1303 FeatureType::StatisticalMoments => {
1304 features.extend(self.extract_statistical_moments(data)?);
1305 }
1306 FeatureType::FrequencyDomain => {
1307 features.extend(self.extract_frequency_features(data)?);
1308 }
1309 FeatureType::TimeDomain => {
1310 features.extend(self.extract_time_domain_features(data)?);
1311 }
1312 _ => {} }
1314 }
1315
1316 Ok(features)
1317 }
1318
1319 fn extract_statistical_moments(&self, data: &[f64]) -> Result<Vec<f64>> {
1320 if data.is_empty() {
1321 return Ok(vec![0.0; 4]);
1322 }
1323
1324 let n = data.len() as f64;
1325 let mean = data.iter().sum::<f64>() / n;
1326
1327 let variance = data.iter().map(|x| (x - mean).powi(2)).sum::<f64>() / n;
1328 let std_dev = variance.sqrt();
1329
1330 let skewness = if std_dev > 0.0 {
1331 data.iter()
1332 .map(|x| ((x - mean) / std_dev).powi(3))
1333 .sum::<f64>()
1334 / n
1335 } else {
1336 0.0
1337 };
1338
1339 let kurtosis = if std_dev > 0.0 {
1340 data.iter()
1341 .map(|x| ((x - mean) / std_dev).powi(4))
1342 .sum::<f64>()
1343 / n
1344 - 3.0
1345 } else {
1346 0.0
1347 };
1348
1349 Ok(vec![mean, std_dev, skewness, kurtosis])
1350 }
1351
1352 fn extract_frequency_features(&self, data: &[f64]) -> Result<Vec<f64>> {
1353 if data.len() < 4 {
1355 return Ok(vec![0.0; 3]);
1356 }
1357
1358 let mut zero_crossings = 0;
1360 for i in 1..data.len() {
1361 if (data[i] >= 0.0) != (data[i - 1] >= 0.0) {
1362 zero_crossings += 1;
1363 }
1364 }
1365 let zero_crossing_rate = zero_crossings as f64 / data.len() as f64;
1366
1367 let spectral_centroid = data
1369 .iter()
1370 .enumerate()
1371 .map(|(i, &x)| i as f64 * x.abs())
1372 .sum::<f64>()
1373 / data.iter().map(|&x| x.abs()).sum::<f64>().max(1.0);
1374
1375 let total_energy = data.iter().map(|&x| x * x).sum::<f64>();
1377 let mut cumulative_energy = 0.0;
1378 let mut rolloff_index = 0;
1379 for (i, &x) in data.iter().enumerate() {
1380 cumulative_energy += x * x;
1381 if cumulative_energy >= 0.85 * total_energy {
1382 rolloff_index = i;
1383 break;
1384 }
1385 }
1386 let spectral_rolloff = rolloff_index as f64 / data.len() as f64;
1387
1388 Ok(vec![
1389 zero_crossing_rate,
1390 spectral_centroid,
1391 spectral_rolloff,
1392 ])
1393 }
1394
1395 fn extract_time_domain_features(&self, data: &[f64]) -> Result<Vec<f64>> {
1396 if data.is_empty() {
1397 return Ok(vec![0.0; 3]);
1398 }
1399
1400 let energy = data.iter().map(|&x| x * x).sum::<f64>();
1402
1403 let rms = (energy / data.len() as f64).sqrt();
1405
1406 let min_val = data.iter().fold(f64::INFINITY, |a, &b| a.min(b));
1408 let max_val = data.iter().fold(f64::NEG_INFINITY, |a, &b| a.max(b));
1409 let peak_to_peak = max_val - min_val;
1410
1411 Ok(vec![energy, rms, peak_to_peak])
1412 }
1413}
1414
1415impl Default for FrequencyCharacteristics {
1418 fn default() -> Self {
1419 Self {
1420 dominant_frequencies: Vec::new(),
1421 power_spectrum: Vec::new(),
1422 spectral_centroid: 0.0,
1423 spectral_bandwidth: 0.0,
1424 spectral_rolloff: 0.0,
1425 spectral_flux: 0.0,
1426 zero_crossing_rate: 0.0,
1427 }
1428 }
1429}
1430
1431impl Default for StatisticalProperties {
1432 fn default() -> Self {
1433 Self {
1434 mean: 0.0,
1435 std_dev: 0.0,
1436 skewness: 0.0,
1437 kurtosis: 0.0,
1438 entropy: 0.0,
1439 autocorrelation: Vec::new(),
1440 partial_autocorrelation: Vec::new(),
1441 hjorth_parameters: HjorthParameters {
1442 activity: 0.0,
1443 mobility: 0.0,
1444 complexity: 0.0,
1445 },
1446 }
1447 }
1448}
1449
1450impl Default for TrendInfo {
1451 fn default() -> Self {
1452 Self {
1453 direction: 0.0,
1454 strength: 0.0,
1455 acceleration: 0.0,
1456 stability: 0.0,
1457 change_points: Vec::new(),
1458 }
1459 }
1460}
1461
1462impl Default for PatternEvolution {
1463 fn default() -> Self {
1464 Self {
1465 stability: 0.0,
1466 evolution_rate: 0.0,
1467 adaptation_score: 0.0,
1468 historical_states: Vec::new(),
1469 }
1470 }
1471}
1472
1473impl PatternDatabase {
1474 fn new() -> Self {
1475 Self {
1476 patterns: HashMap::new(),
1477 similarity_matrix: HashMap::new(),
1478 frequency_stats: HashMap::new(),
1479 }
1480 }
1481}
1482
1483impl HypothesisTestEngine {
1484 fn new() -> Self {
1485 Self {
1486 test_types: vec![
1487 HypothesisTestType::MannKendall,
1488 HypothesisTestType::LjungBox,
1489 ],
1490 }
1491 }
1492
1493 fn run_tests(&self, data: &[f64]) -> Result<HashMap<String, f64>> {
1494 let mut results = HashMap::new();
1495
1496 let mk_statistic = self.mann_kendall_test(data)?;
1498 results.insert("mann_kendall".to_string(), mk_statistic);
1499
1500 Ok(results)
1501 }
1502
1503 fn mann_kendall_test(&self, data: &[f64]) -> Result<f64> {
1504 if data.len() < 3 {
1505 return Ok(0.0);
1506 }
1507
1508 let mut s = 0;
1509 let n = data.len();
1510
1511 for i in 0..n - 1 {
1512 for j in i + 1..n {
1513 if data[j] > data[i] {
1514 s += 1;
1515 } else if data[j] < data[i] {
1516 s -= 1;
1517 }
1518 }
1519 }
1520
1521 let max_s = (n * (n - 1) / 2) as i32;
1523 Ok(s as f64 / max_s as f64)
1524 }
1525}
1526
1527impl TimeSeriesAnalyzer {
1528 fn new() -> Self {
1529 Self {
1530 arima_fitter: ARIMAFitter::new(),
1531 seasonal_decomposer: SeasonalDecomposer::new(),
1532 change_point_detector: ChangePointDetector::new(),
1533 }
1534 }
1535
1536 fn analyze(&self, data: &[f64]) -> Result<Vec<AdvancedMemoryPattern>> {
1537 let mut patterns = Vec::new();
1538
1539 if let Ok(arima_pattern) = self.arima_fitter.fit_and_analyze(data) {
1541 patterns.push(arima_pattern);
1542 }
1543
1544 let change_points = self.change_point_detector.detect_change_points(data)?;
1546 if !change_points.is_empty() {
1547 patterns.push(AdvancedMemoryPattern {
1548 id: "change_points".to_string(),
1549 pattern_type: AdvancedPatternType::StepFunction {
1550 step_size: 0.0,
1551 step_frequency: change_points.len() as f64 / data.len() as f64,
1552 plateaus: Vec::new(),
1553 },
1554 confidence: 0.7,
1555 signature: change_points.iter().map(|&x| x as f64).collect(),
1556 description: "Change point pattern detected".to_string(),
1557 frequency_characteristics: FrequencyCharacteristics::default(),
1558 statistical_properties: StatisticalProperties::default(),
1559 anomaly_score: 0.0,
1560 strength: 0.7,
1561 periodicity: None,
1562 trend: TrendInfo {
1563 direction: 0.0,
1564 strength: 0.0,
1565 acceleration: 0.0,
1566 stability: 0.0,
1567 change_points,
1568 },
1569 leak_indicators: Vec::new(),
1570 evolution: PatternEvolution::default(),
1571 });
1572 }
1573
1574 Ok(patterns)
1575 }
1576}
1577
1578impl ARIMAFitter {
1579 fn new() -> Self {
1580 Self {
1581 parameters: ARIMAParameters {
1582 p: 1,
1583 d: 1,
1584 q: 1,
1585 seasonal: None,
1586 },
1587 }
1588 }
1589
1590 fn fit_and_analyze(&self, data: &[f64]) -> Result<AdvancedMemoryPattern> {
1591 Ok(AdvancedMemoryPattern {
1593 id: "arima_pattern".to_string(),
1594 pattern_type: AdvancedPatternType::LinearGrowth {
1595 slope: 1.0,
1596 intercept: 0.0,
1597 r_squared: 0.8,
1598 },
1599 confidence: 0.6,
1600 signature: data.to_vec(),
1601 description: "ARIMA-fitted pattern".to_string(),
1602 frequency_characteristics: FrequencyCharacteristics::default(),
1603 statistical_properties: StatisticalProperties::default(),
1604 anomaly_score: 0.0,
1605 strength: 0.6,
1606 periodicity: None,
1607 trend: TrendInfo::default(),
1608 leak_indicators: Vec::new(),
1609 evolution: PatternEvolution::default(),
1610 })
1611 }
1612}
1613
1614impl SeasonalDecomposer {
1615 fn new() -> Self {
1616 Self {
1617 method: DecompositionMethod::Additive,
1618 }
1619 }
1620}
1621
1622impl ChangePointDetector {
1623 fn new() -> Self {
1624 Self {
1625 algorithms: vec![
1626 ChangePointAlgorithm::CUSUM { threshold: 2.0 },
1627 ChangePointAlgorithm::BinarySegmentation { min_size: 5 },
1628 ],
1629 }
1630 }
1631
1632 fn detect_change_points(&self, data: &[f64]) -> Result<Vec<usize>> {
1633 let mut change_points = Vec::new();
1635
1636 if data.len() < 10 {
1637 return Ok(change_points);
1638 }
1639
1640 let mean = data.iter().sum::<f64>() / data.len() as f64;
1641 let mut cumsum = 0.0;
1642 let threshold =
1643 2.0 * data.iter().map(|x| (x - mean).abs()).sum::<f64>() / data.len() as f64;
1644
1645 for (i, &value) in data.iter().enumerate() {
1646 cumsum += value - mean;
1647
1648 if cumsum.abs() > threshold {
1649 change_points.push(i);
1650 cumsum = 0.0; }
1652 }
1653
1654 Ok(change_points)
1655 }
1656}