1#![allow(dead_code)]
8#![allow(clippy::too_many_arguments)]
9
10use crate::error::{IoError, Result};
11use scirs2_core::ndarray::ArrayStatCompat;
12use scirs2_core::ndarray::{Array1, Array2};
13use scirs2_core::random::Rng;
14use statrs::statistics::Statistics;
15use std::collections::{HashMap, VecDeque};
16use std::time::Instant;
17
18#[derive(Debug)]
20pub struct AdvancedPatternRecognizer {
21 pattern_networks: Vec<PatternNetwork>,
23 pattern_database: HashMap<String, PatternMetadata>,
25 analysis_buffer: VecDeque<PatternInstance>,
27 learning_rate: f32,
29}
30
31impl Default for AdvancedPatternRecognizer {
32 fn default() -> Self {
33 Self::new()
34 }
35}
36
37impl AdvancedPatternRecognizer {
38 pub fn new() -> Self {
40 let pattern_networks = vec![
41 PatternNetwork::new("repetition", 16, 8, 4),
42 PatternNetwork::new("sequential", 16, 8, 4),
43 PatternNetwork::new("fractal", 32, 16, 8),
44 PatternNetwork::new("entropy", 16, 8, 4),
45 PatternNetwork::new("compression", 24, 12, 6),
46 ];
47
48 Self {
49 pattern_networks,
50 pattern_database: HashMap::new(),
51 analysis_buffer: VecDeque::with_capacity(1000),
52 learning_rate: 0.001,
53 }
54 }
55
56 pub fn analyze_patterns(&mut self, data: &[u8]) -> Result<AdvancedPatternAnalysis> {
58 let mut pattern_scores = HashMap::new();
59 let mut emergent_patterns = Vec::new();
60
61 let features = self.extract_multiscale_features(data)?;
63
64 let data_characteristics = self.characterize_data(data);
66
67 let mut network_results = Vec::new();
69 for network in &mut self.pattern_networks {
70 let score = network.analyze(&features)?;
71 let pattern_type = network.pattern_type.clone();
72 network_results.push((pattern_type, score));
73 }
74
75 for (pattern_type, score) in network_results {
77 let is_novel = self.is_novel_pattern(&pattern_type, score);
79
80 pattern_scores.insert(pattern_type.clone(), score);
81
82 if score > 0.8 && is_novel {
84 emergent_patterns.push(EmergentPattern {
85 pattern_type,
86 confidence: score,
87 discovered_at: Instant::now(),
88 data_characteristics: data_characteristics.clone(),
89 });
90 }
91 }
92
93 self.update_pattern_database(data, &pattern_scores)?;
95
96 let meta_patterns = self.detect_meta_patterns(&pattern_scores)?;
98 let optimization_recommendations =
99 self.generate_optimization_recommendations(&pattern_scores);
100
101 Ok(AdvancedPatternAnalysis {
102 pattern_scores,
103 emergent_patterns,
104 meta_patterns,
105 complexity_index: self.calculate_complexity_index(&features),
106 predictability_score: self.calculate_predictability(data),
107 optimization_recommendations,
108 })
109 }
110
111 fn extract_multiscale_features(&self, data: &[u8]) -> Result<Array2<f32>> {
113 let byte_features = self.extract_byte_level_features(data);
115 let local_features_4 = self.extract_local_structure_features(data, 4);
116 let local_features_16 = self.extract_local_structure_features(data, 16);
117 let global_features = self.extract_global_structure_features(data);
118
119 let max_features = [
121 byte_features.len(),
122 local_features_4.len(),
123 local_features_16.len(),
124 global_features.len(),
125 ]
126 .into_iter()
127 .max()
128 .unwrap_or(0);
129
130 let mut padded_features = Vec::with_capacity(4 * max_features);
132
133 let pad_features = |mut features: Vec<f32>, target_len: usize| {
135 features.resize(target_len, 0.0);
136 features
137 };
138
139 padded_features.extend(pad_features(byte_features, max_features));
141 padded_features.extend(pad_features(local_features_4, max_features));
142 padded_features.extend(pad_features(local_features_16, max_features));
143 padded_features.extend(pad_features(global_features, max_features));
144
145 let feature_array = Array2::from_shape_vec((4, max_features), padded_features)
147 .map_err(|e| IoError::Other(format!("Feature extraction error: {e}")))?;
148
149 Ok(feature_array)
150 }
151
152 fn extract_byte_level_features(&self, data: &[u8]) -> Vec<f32> {
154 let mut frequency = [0u32; 256];
155 for &byte in data {
156 frequency[byte as usize] += 1;
157 }
158
159 let len = data.len() as f32;
160 let mut features = Vec::new();
161
162 let mean = data.iter().map(|&x| x as f32).sum::<f32>() / len;
164 let variance = data.iter().map(|&x| (x as f32 - mean).powi(2)).sum::<f32>() / len;
165 let skewness = data.iter().map(|&x| (x as f32 - mean).powi(3)).sum::<f32>()
166 / (len * variance.powf(1.5));
167 let kurtosis =
168 data.iter().map(|&x| (x as f32 - mean).powi(4)).sum::<f32>() / (len * variance.powi(2));
169
170 features.extend(&[mean / 255.0, variance / (255.0 * 255.0), skewness, kurtosis]);
171
172 let mut shannon_entropy = 0.0;
174 let mut gini_index = 0.0;
175
176 for &freq in &frequency {
177 if freq > 0 {
178 let p = freq as f32 / len;
179 shannon_entropy -= p * p.log2();
180 gini_index += p * p;
181 }
182 }
183
184 features.push(shannon_entropy / 8.0);
185 features.push(1.0 - gini_index);
186
187 features
188 }
189
190 fn extract_local_structure_features(&self, data: &[u8], window_size: usize) -> Vec<f32> {
192 let mut features = Vec::new();
193
194 if data.len() < window_size {
195 return vec![0.0; 4]; }
197
198 let mut autocorrelations = Vec::new();
199 let mut transitions = 0;
200 let mut periodicity_score: f32 = 0.0;
201
202 for lag in 1..window_size.min(8) {
204 let mut correlation = 0.0;
205 let mut count = 0;
206
207 for i in 0..(data.len() - lag) {
208 if i + lag < data.len() {
209 correlation += (data[i] as f32) * (data[i + lag] as f32);
210 count += 1;
211 }
212 }
213
214 if count > 0 {
215 autocorrelations.push(correlation / count as f32);
216 }
217 }
218
219 for window in data.windows(window_size) {
221 for i in 1..window.len() {
222 if window[i] != window[i - 1] {
223 transitions += 1;
224 }
225 }
226 }
227
228 for period in 2..window_size.min(16) {
230 let mut matches = 0;
231 let mut total = 0;
232
233 for i in 0..(data.len() - period) {
234 if data[i] == data[i + period] {
235 matches += 1;
236 }
237 total += 1;
238 }
239
240 if total > 0 {
241 periodicity_score = periodicity_score.max(matches as f32 / total as f32);
242 }
243 }
244
245 features.push(
246 autocorrelations.iter().sum::<f32>()
247 / autocorrelations.len().max(1) as f32
248 / (255.0 * 255.0),
249 );
250 features.push(transitions as f32 / data.len() as f32);
251 features.push(periodicity_score);
252 features.push(autocorrelations.len() as f32 / 8.0);
253
254 features
255 }
256
257 fn extract_global_structure_features(&self, data: &[u8]) -> Vec<f32> {
259 let mut features = Vec::new();
260
261 let lz_complexity = self.calculate_lempel_ziv_complexity(data);
263 features.push(lz_complexity);
264
265 let reversed_data: Vec<u8> = data.iter().rev().cloned().collect();
267 let lcs_ratio = self.calculate_lcs_ratio(data, &reversed_data);
268 features.push(lcs_ratio);
269
270 let fractal_dimension = self.estimate_fractal_dimension(data);
272 features.push(fractal_dimension);
273
274 let rle_ratio = self.calculate_rle_ratio(data);
276 features.push(rle_ratio);
277
278 features
279 }
280
281 fn calculate_lempel_ziv_complexity(&self, data: &[u8]) -> f32 {
283 let mut dictionary = std::collections::HashSet::new();
284 let mut i = 0;
285 let mut complexity = 0;
286
287 while i < data.len() {
288 let mut j = i + 1;
289 while j <= data.len() && dictionary.contains(&data[i..j]) {
290 j += 1;
291 }
292
293 if j <= data.len() {
294 dictionary.insert(data[i..j].to_vec());
295 }
296
297 complexity += 1;
298 i = j.min(data.len());
299 }
300
301 complexity as f32 / data.len() as f32
302 }
303
304 fn calculate_lcs_ratio(&self, data1: &[u8], data2: &[u8]) -> f32 {
306 let len1 = data1.len();
307 let len2 = data2.len();
308
309 if len1 == 0 || len2 == 0 {
310 return 0.0;
311 }
312
313 let sample_size = 100.min(len1).min(len2);
315 let mut dp = vec![vec![0; sample_size + 1]; sample_size + 1];
316
317 for i in 1..=sample_size {
318 for j in 1..=sample_size {
319 if data1[i - 1] == data2[j - 1] {
320 dp[i][j] = dp[i - 1][j - 1] + 1;
321 } else {
322 dp[i][j] = dp[i - 1][j].max(dp[i][j - 1]);
323 }
324 }
325 }
326
327 dp[sample_size][sample_size] as f32 / sample_size as f32
328 }
329
330 fn estimate_fractal_dimension(&self, data: &[u8]) -> f32 {
332 if data.len() < 4 {
333 return 1.0;
334 }
335
336 let mut dimensions = Vec::new();
337
338 for scale in [2, 4, 8, 16].iter() {
339 if data.len() >= *scale {
340 let mut boxes = std::collections::HashSet::new();
341
342 for chunk in data.chunks(*scale) {
343 let min_val = *chunk.iter().min().unwrap_or(&0);
344 let max_val = *chunk.iter().max().unwrap_or(&255);
345 boxes.insert((min_val / 16, max_val / 16)); }
347
348 if !boxes.is_empty() {
349 dimensions.push(((*scale as f32).ln(), (boxes.len() as f32).ln()));
350 }
351 }
352 }
353
354 if dimensions.len() < 2 {
355 return 1.0;
356 }
357
358 let n = dimensions.len() as f32;
360 let sum_x: f32 = dimensions.iter().map(|(x, _)| *x).sum();
361 let sum_y: f32 = dimensions.iter().map(|(_, y)| y).sum();
362 let sum_xy: f32 = dimensions.iter().map(|(x, y)| x * y).sum();
363 let sum_x2: f32 = dimensions.iter().map(|(x, _)| x * x).sum();
364
365 let slope = (n * sum_xy - sum_x * sum_y) / (n * sum_x2 - sum_x * sum_x);
366 slope.abs().min(2.0) }
368
369 fn calculate_rle_ratio(&self, data: &[u8]) -> f32 {
371 if data.is_empty() {
372 return 1.0;
373 }
374
375 let mut compressed_size = 0;
376 let mut i = 0;
377
378 while i < data.len() {
379 let current_byte = data[i];
380 let mut run_length = 1;
381
382 while i + run_length < data.len() && data[i + run_length] == current_byte {
383 run_length += 1;
384 }
385
386 compressed_size += if run_length > 3 { 2 } else { run_length }; i += run_length;
388 }
389
390 compressed_size as f32 / data.len() as f32
391 }
392
393 fn is_novel_pattern(&self, pattern_type: &str, score: f32) -> bool {
395 if let Some(metadata) = self.pattern_database.get(pattern_type) {
396 score > metadata.max_score * 1.1 } else {
398 true }
400 }
401
402 fn characterize_data(&self, data: &[u8]) -> DataCharacteristics {
404 DataCharacteristics {
405 size: data.len(),
406 entropy: self.calculate_shannon_entropy(data),
407 mean: data.iter().map(|&x| x as f32).sum::<f32>() / data.len() as f32,
408 variance: {
409 let mean = data.iter().map(|&x| x as f32).sum::<f32>() / data.len() as f32;
410 data.iter().map(|&x| (x as f32 - mean).powi(2)).sum::<f32>() / data.len() as f32
411 },
412 }
413 }
414
415 fn calculate_shannon_entropy(&self, data: &[u8]) -> f32 {
417 let mut frequency = [0u32; 256];
418 for &byte in data {
419 frequency[byte as usize] += 1;
420 }
421
422 let len = data.len() as f32;
423 let mut entropy = 0.0;
424
425 for &freq in &frequency {
426 if freq > 0 {
427 let p = freq as f32 / len;
428 entropy -= p * p.log2();
429 }
430 }
431
432 entropy / 8.0
433 }
434
435 fn update_pattern_database(
437 &mut self,
438 data: &[u8],
439 pattern_scores: &HashMap<String, f32>,
440 ) -> Result<()> {
441 let data_characteristics = self.characterize_data(data);
442
443 for (pattern_type, &score) in pattern_scores {
444 let metadata = self
445 .pattern_database
446 .entry(pattern_type.clone())
447 .or_insert_with(|| PatternMetadata {
448 pattern_type: pattern_type.clone(),
449 observation_count: 0,
450 max_score: 0.0,
451 avg_score: 0.0,
452 last_seen: Instant::now(),
453 associated_data_characteristics: Vec::new(),
454 });
455
456 metadata.observation_count += 1;
457 metadata.max_score = metadata.max_score.max(score);
458 metadata.avg_score = (metadata.avg_score * (metadata.observation_count - 1) as f32
459 + score)
460 / metadata.observation_count as f32;
461 metadata.last_seen = Instant::now();
462 metadata
463 .associated_data_characteristics
464 .push(data_characteristics.clone());
465
466 if metadata.associated_data_characteristics.len() > 100 {
468 metadata.associated_data_characteristics.remove(0);
469 }
470 }
471
472 Ok(())
473 }
474
475 fn detect_meta_patterns(
477 &self,
478 pattern_scores: &HashMap<String, f32>,
479 ) -> Result<Vec<MetaPattern>> {
480 let mut meta_patterns = Vec::new();
481
482 let score_pairs: Vec<_> = pattern_scores.iter().collect();
484
485 for i in 0..score_pairs.len() {
486 for j in (i + 1)..score_pairs.len() {
487 let (type1, &score1) = score_pairs[i];
488 let (type2, &score2) = score_pairs[j];
489
490 if score1 > 0.7 && score2 > 0.7 {
492 meta_patterns.push(MetaPattern {
493 pattern_combination: vec![type1.clone(), type2.clone()],
494 correlation_strength: (score1 * score2).sqrt(),
495 synergy_type: self.determine_synergy_type(type1, type2),
496 });
497 }
498 }
499 }
500
501 Ok(meta_patterns)
502 }
503
504 fn determine_synergy_type(&self, type1: &str, type2: &str) -> SynergyType {
506 match (type1, type2) {
507 ("repetition", "compression") => SynergyType::ReinforcingCompression,
508 ("sequential", "entropy") => SynergyType::ContrastedRandomness,
509 ("fractal", "periodicity") => SynergyType::HierarchicalStructure,
510 _ => SynergyType::Unknown,
511 }
512 }
513
514 fn calculate_complexity_index(&self, features: &Array2<f32>) -> f32 {
516 let weights = Array1::from(vec![0.4, 0.3, 0.2, 0.1]); let scale_complexities = features
519 .mean_axis(scirs2_core::ndarray::Axis(1))
520 .expect("Operation failed");
521 weights.dot(&scale_complexities)
522 }
523
524 fn calculate_predictability(&self, data: &[u8]) -> f32 {
526 if data.len() < 10 {
527 return 0.5;
528 }
529
530 let mut correct_predictions = 0;
531 let prediction_window = 5.min(data.len() - 1);
532
533 for i in prediction_window..data.len() {
534 let recent_bytes = &data[i - prediction_window..i];
536 let predicted = self.predict_next_byte(recent_bytes);
537
538 if predicted == data[i] {
539 correct_predictions += 1;
540 }
541 }
542
543 correct_predictions as f32 / (data.len() - prediction_window) as f32
544 }
545
546 fn predict_next_byte(&self, history: &[u8]) -> u8 {
548 if history.is_empty() {
549 return 0;
550 }
551
552 let mut frequency = [0u32; 256];
554 for &byte in history {
555 frequency[byte as usize] += 1;
556 }
557
558 frequency
559 .iter()
560 .enumerate()
561 .max_by_key(|(_, &count)| count)
562 .map(|(byte, _)| byte as u8)
563 .unwrap_or(0)
564 }
565
566 fn generate_optimization_recommendations(
568 &self,
569 pattern_scores: &HashMap<String, f32>,
570 ) -> Vec<OptimizationRecommendation> {
571 let mut recommendations = Vec::new();
572
573 for (pattern_type, &score) in pattern_scores {
574 match pattern_type.as_str() {
575 "repetition" if score > 0.8 => {
576 recommendations.push(OptimizationRecommendation {
577 optimization_type: "compression".to_string(),
578 reason: "High repetition detected - compression will be highly effective"
579 .to_string(),
580 expected_improvement: score * 0.7,
581 confidence: score,
582 });
583 }
584 "sequential" if score > 0.7 => {
585 recommendations.push(OptimizationRecommendation {
586 optimization_type: "streaming".to_string(),
587 reason: "Sequential access pattern - streaming optimization recommended"
588 .to_string(),
589 expected_improvement: score * 0.5,
590 confidence: score,
591 });
592 }
593 "fractal" if score > 0.8 => {
594 recommendations.push(OptimizationRecommendation {
595 optimization_type: "hierarchical_processing".to_string(),
596 reason:
597 "Fractal structure detected - hierarchical processing will be efficient"
598 .to_string(),
599 expected_improvement: score * 0.6,
600 confidence: score,
601 });
602 }
603 "entropy" if score < 0.3 => {
604 recommendations.push(OptimizationRecommendation {
605 optimization_type: "aggressive_compression".to_string(),
606 reason: "Low entropy - aggressive compression algorithms recommended"
607 .to_string(),
608 expected_improvement: (1.0 - score) * 0.8,
609 confidence: 1.0 - score,
610 });
611 }
612 _ => {}
613 }
614 }
615
616 recommendations
617 }
618}
619
620#[derive(Debug)]
622struct PatternNetwork {
623 pattern_type: String,
624 weights: Array2<f32>,
625 bias: Array1<f32>,
626 activation_history: VecDeque<f32>,
627}
628
629impl PatternNetwork {
630 fn new(pattern_type: &str, input_size: usize, hidden_size: usize, _output_size: usize) -> Self {
631 let scale = (2.0 / (input_size + hidden_size) as f32).sqrt();
633 let mut rng = scirs2_core::random::rng();
634 let weights = Array2::from_shape_fn((hidden_size, input_size), |_| {
635 (rng.random::<f32>() - 0.5) * 2.0 * scale
636 });
637
638 Self {
639 pattern_type: pattern_type.to_string(),
640 weights,
641 bias: Array1::zeros(hidden_size),
642 activation_history: VecDeque::with_capacity(100),
643 }
644 }
645
646 fn analyze(&mut self, features: &Array2<f32>) -> Result<f32> {
647 let flattened = features.as_slice().expect("Operation failed");
649 let input = Array1::from(flattened.to_vec());
650
651 let network_input = if input.len() > self.weights.ncols() {
653 input
654 .slice(scirs2_core::ndarray::s![..self.weights.ncols()])
655 .to_owned()
656 } else {
657 let mut padded = Array1::zeros(self.weights.ncols());
658 padded
659 .slice_mut(scirs2_core::ndarray::s![..input.len()])
660 .assign(&input);
661 padded
662 };
663
664 let hidden = self.weights.dot(&network_input) + &self.bias;
666 let activated = hidden.mapv(Self::relu);
667
668 let score = match self.pattern_type.as_str() {
670 "repetition" => self.score_repetition_pattern(&activated),
671 "sequential" => self.score_sequential_pattern(&activated),
672 "fractal" => self.score_fractal_pattern(&activated),
673 "entropy" => self.score_entropy_pattern(&activated),
674 "compression" => self.score_compression_pattern(&activated),
675 _ => activated.mean_or(0.0),
676 };
677
678 self.activation_history.push_back(score);
679 if self.activation_history.len() > 100 {
680 self.activation_history.pop_front();
681 }
682
683 Ok(score.clamp(0.0, 1.0))
684 }
685
686 fn relu(x: f32) -> f32 {
687 x.max(0.0)
688 }
689
690 fn score_repetition_pattern(&self, activations: &Array1<f32>) -> f32 {
691 let mut max_repetition: f32 = 0.0;
693
694 for window_size in 2..=activations.len() / 2 {
695 let mut repetition_score = 0.0;
696 let mut count = 0;
697
698 for i in 0..=(activations.len() - 2 * window_size) {
699 let window1 = activations.slice(scirs2_core::ndarray::s![i..i + window_size]);
700 let window2 = activations.slice(scirs2_core::ndarray::s![
701 i + window_size..i + 2 * window_size
702 ]);
703
704 let similarity = window1
705 .iter()
706 .zip(window2.iter())
707 .map(|(a, b)| 1.0 - (a - b).abs())
708 .sum::<f32>()
709 / window_size as f32;
710
711 repetition_score += similarity;
712 count += 1;
713 }
714
715 if count > 0 {
716 max_repetition = max_repetition.max(repetition_score / count as f32);
717 }
718 }
719
720 max_repetition
721 }
722
723 fn score_sequential_pattern(&self, activations: &Array1<f32>) -> f32 {
724 if activations.len() < 2 {
725 return 0.0;
726 }
727
728 let mut increasing = 0;
730 let mut decreasing = 0;
731
732 for i in 1..activations.len() {
733 if activations[i] > activations[i - 1] {
734 increasing += 1;
735 } else if activations[i] < activations[i - 1] {
736 decreasing += 1;
737 }
738 }
739
740 let total_transitions = activations.len() - 1;
741 let max_direction = increasing.max(decreasing);
742
743 max_direction as f32 / total_transitions as f32
744 }
745
746 fn score_fractal_pattern(&self, activations: &Array1<f32>) -> f32 {
747 let mut fractal_score = 0.0;
749 let mut scale_count = 0;
750
751 for scale in [2, 4, 8].iter() {
752 if activations.len() >= scale * 2 {
753 let downsampled1 = self.downsample(activations, *scale, 0);
754 let downsampled2 = self.downsample(activations, *scale, *scale);
755
756 if !downsampled1.is_empty() && !downsampled2.is_empty() {
757 let similarity = self.calculate_similarity(&downsampled1, &downsampled2);
758 fractal_score += similarity;
759 scale_count += 1;
760 }
761 }
762 }
763
764 if scale_count > 0 {
765 fractal_score / scale_count as f32
766 } else {
767 0.0
768 }
769 }
770
771 fn score_entropy_pattern(&self, activations: &Array1<f32>) -> f32 {
772 let quantized: Vec<u8> = activations.iter().map(|&x| (x * 255.0) as u8).collect();
774
775 let mut frequency = [0u32; 256];
776 for &val in &quantized {
777 frequency[val as usize] += 1;
778 }
779
780 let len = quantized.len() as f32;
781 let mut entropy = 0.0;
782
783 for &freq in &frequency {
784 if freq > 0 {
785 let p = freq as f32 / len;
786 entropy -= p * p.log2();
787 }
788 }
789
790 entropy / 8.0 }
792
793 fn score_compression_pattern(&self, activations: &Array1<f32>) -> f32 {
794 let quantized: Vec<u8> = activations.iter().map(|&x| (x * 255.0) as u8).collect();
796
797 let mut compressed_size = 0;
798 let mut i = 0;
799
800 while i < quantized.len() {
801 let current = quantized[i];
802 let mut run_length = 1;
803
804 while i + run_length < quantized.len() && quantized[i + run_length] == current {
805 run_length += 1;
806 }
807
808 compressed_size += if run_length > 2 { 2 } else { run_length };
809 i += run_length;
810 }
811
812 1.0 - (compressed_size as f32 / quantized.len() as f32)
813 }
814
815 fn downsample(&self, data: &Array1<f32>, scale: usize, offset: usize) -> Vec<f32> {
816 data.iter().skip(offset).step_by(scale).cloned().collect()
817 }
818
819 fn calculate_similarity(&self, data1: &[f32], data2: &[f32]) -> f32 {
820 if data1.is_empty() || data2.is_empty() {
821 return 0.0;
822 }
823
824 let min_len = data1.len().min(data2.len());
825 let mut similarity = 0.0;
826
827 for i in 0..min_len {
828 similarity += 1.0 - (data1[i] - data2[i]).abs();
829 }
830
831 similarity / min_len as f32
832 }
833}
834
835#[derive(Debug, Clone)]
839pub struct AdvancedPatternAnalysis {
840 pub pattern_scores: HashMap<String, f32>,
842 pub emergent_patterns: Vec<EmergentPattern>,
844 pub meta_patterns: Vec<MetaPattern>,
846 pub complexity_index: f32,
848 pub predictability_score: f32,
850 pub optimization_recommendations: Vec<OptimizationRecommendation>,
852}
853
854#[derive(Debug, Clone)]
856pub struct EmergentPattern {
857 pub pattern_type: String,
859 pub confidence: f32,
861 pub discovered_at: Instant,
863 pub data_characteristics: DataCharacteristics,
865}
866
867#[derive(Debug, Clone)]
869pub struct MetaPattern {
870 pub pattern_combination: Vec<String>,
872 pub correlation_strength: f32,
874 pub synergy_type: SynergyType,
876}
877
878#[derive(Debug, Clone)]
880pub enum SynergyType {
881 ReinforcingCompression,
883 ContrastedRandomness,
885 HierarchicalStructure,
887 Unknown,
889}
890
891#[derive(Debug, Clone)]
893pub struct OptimizationRecommendation {
894 pub optimization_type: String,
896 pub reason: String,
898 pub expected_improvement: f32,
900 pub confidence: f32,
902}
903
904#[derive(Debug, Clone)]
905struct PatternMetadata {
906 pattern_type: String,
907 observation_count: usize,
908 max_score: f32,
909 avg_score: f32,
910 last_seen: Instant,
911 associated_data_characteristics: Vec<DataCharacteristics>,
912}
913
914#[derive(Debug, Clone)]
915pub struct DataCharacteristics {
917 pub size: usize,
919 pub entropy: f32,
921 pub mean: f32,
923 pub variance: f32,
925}
926
927#[derive(Debug, Clone)]
928struct PatternInstance {
929 pattern_type: String,
930 score: f32,
931 timestamp: Instant,
932 data_hash: u64,
933}
934
935#[cfg(test)]
936mod tests {
937 use super::*;
938
939 #[test]
940 fn test_advanced_pattern_recognizer_creation() {
941 let recognizer = AdvancedPatternRecognizer::new();
942 assert_eq!(recognizer.pattern_networks.len(), 5);
943 }
944
945 #[test]
946 fn test_pattern_analysis() {
947 let mut recognizer = AdvancedPatternRecognizer::new();
948 let test_data = vec![1, 2, 3, 4, 5, 1, 2, 3, 4, 5, 1, 2, 3, 4, 5];
949
950 let analysis = recognizer
951 .analyze_patterns(&test_data)
952 .expect("Operation failed");
953 assert!(!analysis.pattern_scores.is_empty());
954 assert!(analysis.complexity_index >= 0.0 && analysis.complexity_index <= 1.0);
955 assert!(analysis.predictability_score >= 0.0 && analysis.predictability_score <= 1.0);
956 }
957
958 #[test]
959 fn test_multiscale_feature_extraction() {
960 let recognizer = AdvancedPatternRecognizer::new();
961 let test_data = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
962
963 let features = recognizer
964 .extract_multiscale_features(&test_data)
965 .expect("Operation failed");
966 assert_eq!(features.nrows(), 4); assert!(features.ncols() > 0);
968 }
969
970 #[test]
971 fn test_lempel_ziv_complexity() {
972 let recognizer = AdvancedPatternRecognizer::new();
973
974 let repetitive_data = vec![1, 1, 1, 1, 1, 1, 1, 1];
976 let complexity1 = recognizer.calculate_lempel_ziv_complexity(&repetitive_data);
977
978 let random_data = vec![1, 2, 3, 4, 5, 6, 7, 8];
980 let complexity2 = recognizer.calculate_lempel_ziv_complexity(&random_data);
981
982 assert!(complexity2 > complexity1); }
984
985 #[test]
986 fn test_pattern_network() {
987 let mut network = PatternNetwork::new("test", 10, 5, 3);
988 let mut rng = scirs2_core::random::rng();
989 let features = Array2::from_shape_fn((2, 5), |_| rng.random::<f32>());
990
991 let score = network.analyze(&features).expect("Operation failed");
992 assert!((0.0..=1.0).contains(&score));
993 }
994}