1use anyhow::Result;
7use serde::{Deserialize, Serialize};
8use std::collections::{HashMap, HashSet};
9
10#[derive(Debug, Clone, Serialize, Deserialize)]
12pub struct BehaviorAnalysisConfig {
13 pub enable_input_sensitivity: bool,
15 pub enable_feature_importance: bool,
17 pub enable_activation_patterns: bool,
19 pub enable_dead_neuron_detection: bool,
21 pub enable_correlation_analysis: bool,
23 pub dead_neuron_threshold: f32,
25 pub sensitivity_samples: usize,
27 pub perturbation_magnitude: f32,
29 pub correlation_threshold: f32,
31}
32
33impl Default for BehaviorAnalysisConfig {
34 fn default() -> Self {
35 Self {
36 enable_input_sensitivity: true,
37 enable_feature_importance: true,
38 enable_activation_patterns: true,
39 enable_dead_neuron_detection: true,
40 enable_correlation_analysis: true,
41 dead_neuron_threshold: 1e-6,
42 sensitivity_samples: 100,
43 perturbation_magnitude: 0.01,
44 correlation_threshold: 0.5,
45 }
46 }
47}
48
49#[derive(Debug, Clone, Serialize, Deserialize)]
51pub struct InputSensitivity {
52 pub input_dimension: usize,
53 pub sensitivity_score: f32,
54 pub gradient_magnitude: f32,
55 pub perturbation_impact: f32,
56 pub rank: usize,
57}
58
59#[derive(Debug, Clone, Serialize, Deserialize)]
61pub struct FeatureImportance {
62 pub feature_id: String,
63 pub importance_score: f32,
64 pub attribution_method: AttributionMethod,
65 pub confidence: f32,
66 pub rank: usize,
67}
68
69#[derive(Debug, Clone, Serialize, Deserialize)]
70pub enum AttributionMethod {
71 GradientBased,
72 PermutationImportance,
73 ShapleySampling,
74 IntegratedGradients,
75 LimeApproximation,
76}
77
78#[derive(Debug, Clone, Serialize, Deserialize)]
80pub struct NeuronActivationPattern {
81 pub layer_id: String,
82 pub neuron_id: usize,
83 pub activation_statistics: ActivationStatistics,
84 pub pattern_type: ActivationPatternType,
85 pub stability_score: f32,
86 pub selectivity_score: f32,
87}
88
89#[derive(Debug, Clone, Serialize, Deserialize)]
90pub struct ActivationStatistics {
91 pub mean: f32,
92 pub std: f32,
93 pub min: f32,
94 pub max: f32,
95 pub percentile_25: f32,
96 pub percentile_75: f32,
97 pub skewness: f32,
98 pub kurtosis: f32,
99 pub sparsity: f32, }
101
102#[derive(Debug, Clone, Serialize, Deserialize)]
103pub enum ActivationPatternType {
104 Normal,
105 Saturated,
106 Dead,
107 Oscillating,
108 Sparse,
109 Dense,
110 Bipolar,
111}
112
113#[derive(Debug, Clone, Serialize, Deserialize)]
115pub struct DeadNeuronInfo {
116 pub layer_id: String,
117 pub neuron_id: usize,
118 pub activation_level: f32,
119 pub dead_probability: f32,
120 pub suggested_action: NeuronRepairAction,
121}
122
123#[derive(Debug, Clone, Serialize, Deserialize)]
124pub enum NeuronRepairAction {
125 Reinitialize,
126 AdjustLearningRate,
127 ChangeActivationFunction,
128 AddNoise,
129 Skip, }
131
132#[derive(Debug, Clone, Serialize, Deserialize)]
134pub struct CorrelationAnalysis {
135 pub correlation_matrix: Vec<Vec<f32>>,
136 pub significant_correlations: Vec<CorrelationPair>,
137 pub redundant_features: Vec<FeatureGroup>,
138 pub independent_features: Vec<usize>,
139}
140
141#[derive(Debug, Clone, Serialize, Deserialize)]
142pub struct CorrelationPair {
143 pub feature_a: usize,
144 pub feature_b: usize,
145 pub correlation: f32,
146 pub p_value: f32,
147 pub relationship_type: CorrelationType,
148}
149
150#[derive(Debug, Clone, Serialize, Deserialize)]
151pub enum CorrelationType {
152 Strong,
153 Moderate,
154 Weak,
155 None,
156}
157
158#[derive(Debug, Clone, Serialize, Deserialize)]
159pub struct FeatureGroup {
160 pub features: Vec<usize>,
161 pub average_correlation: f32,
162 pub group_importance: f32,
163}
164
165#[derive(Debug, Clone, Serialize, Deserialize)]
167pub struct BehaviorAnalysisReport {
168 pub input_sensitivities: Vec<InputSensitivity>,
169 pub feature_importances: Vec<FeatureImportance>,
170 pub activation_patterns: Vec<NeuronActivationPattern>,
171 pub dead_neurons: Vec<DeadNeuronInfo>,
172 pub correlation_analysis: Option<CorrelationAnalysis>,
173 pub behavior_summary: BehaviorSummary,
174 pub recommendations: Vec<BehaviorRecommendation>,
175}
176
177#[derive(Debug, Clone, Serialize, Deserialize)]
178pub struct BehaviorSummary {
179 pub total_neurons_analyzed: usize,
180 pub dead_neuron_percentage: f32,
181 pub average_activation_sparsity: f32,
182 pub feature_distribution_entropy: f32,
183 pub model_stability_score: f32,
184 pub interpretability_score: f32,
185}
186
187#[derive(Debug, Clone, Serialize, Deserialize)]
188pub struct BehaviorRecommendation {
189 pub category: RecommendationCategory,
190 pub priority: Priority,
191 pub description: String,
192 pub implementation: String,
193 pub expected_impact: f32,
194}
195
196#[derive(Debug, Clone, Serialize, Deserialize)]
197pub enum RecommendationCategory {
198 Architecture,
199 Training,
200 Initialization,
201 Regularization,
202 DataPreprocessing,
203}
204
205#[derive(Debug, Clone, Serialize, Deserialize)]
206pub enum Priority {
207 Critical,
208 High,
209 Medium,
210 Low,
211}
212
213#[derive(Debug)]
215pub struct BehaviorAnalyzer {
216 config: BehaviorAnalysisConfig,
217 activation_history: HashMap<String, Vec<Vec<f32>>>,
218 input_gradients: HashMap<String, Vec<f32>>,
219 feature_attributions: HashMap<String, FeatureImportance>,
220 analysis_cache: HashMap<String, BehaviorAnalysisReport>,
221}
222
223impl BehaviorAnalyzer {
224 pub fn new(config: BehaviorAnalysisConfig) -> Self {
226 Self {
227 config,
228 activation_history: HashMap::new(),
229 input_gradients: HashMap::new(),
230 feature_attributions: HashMap::new(),
231 analysis_cache: HashMap::new(),
232 }
233 }
234
235 pub fn record_activations(&mut self, layer_id: String, activations: Vec<f32>) {
237 self.activation_history.entry(layer_id).or_default().push(activations);
238 }
239
240 pub fn record_input_gradients(&mut self, input_id: String, gradients: Vec<f32>) {
242 self.input_gradients.insert(input_id, gradients);
243 }
244
245 pub async fn analyze(&mut self) -> Result<BehaviorAnalysisReport> {
247 let mut report = BehaviorAnalysisReport {
248 input_sensitivities: Vec::new(),
249 feature_importances: Vec::new(),
250 activation_patterns: Vec::new(),
251 dead_neurons: Vec::new(),
252 correlation_analysis: None,
253 behavior_summary: BehaviorSummary {
254 total_neurons_analyzed: 0,
255 dead_neuron_percentage: 0.0,
256 average_activation_sparsity: 0.0,
257 feature_distribution_entropy: 0.0,
258 model_stability_score: 0.0,
259 interpretability_score: 0.0,
260 },
261 recommendations: Vec::new(),
262 };
263
264 if self.config.enable_input_sensitivity {
265 report.input_sensitivities = self.analyze_input_sensitivity().await?;
266 }
267
268 if self.config.enable_feature_importance {
269 report.feature_importances = self.calculate_feature_importance().await?;
270 }
271
272 if self.config.enable_activation_patterns {
273 report.activation_patterns = self.analyze_activation_patterns().await?;
274 }
275
276 if self.config.enable_dead_neuron_detection {
277 report.dead_neurons = self.detect_dead_neurons().await?;
278 }
279
280 if self.config.enable_correlation_analysis {
281 report.correlation_analysis = Some(self.perform_correlation_analysis().await?);
282 }
283
284 self.generate_behavior_summary(&mut report);
285 self.generate_recommendations(&mut report);
286
287 Ok(report)
288 }
289
290 async fn analyze_input_sensitivity(&self) -> Result<Vec<InputSensitivity>> {
292 let mut sensitivities = Vec::new();
293
294 for gradients in self.input_gradients.values() {
295 for (dim, &gradient) in gradients.iter().enumerate() {
296 let sensitivity_score = gradient.abs();
297 let gradient_magnitude = gradient.abs();
298
299 let perturbation_impact = self.estimate_perturbation_impact(gradient, dim);
301
302 sensitivities.push(InputSensitivity {
303 input_dimension: dim,
304 sensitivity_score,
305 gradient_magnitude,
306 perturbation_impact,
307 rank: 0, });
309 }
310 }
311
312 sensitivities
314 .sort_by(|a, b| b.sensitivity_score.partial_cmp(&a.sensitivity_score).unwrap());
315 for (rank, sensitivity) in sensitivities.iter_mut().enumerate() {
316 sensitivity.rank = rank + 1;
317 }
318
319 Ok(sensitivities)
320 }
321
322 fn estimate_perturbation_impact(&self, gradient: f32, _dimension: usize) -> f32 {
324 gradient.abs() * self.config.perturbation_magnitude
326 }
327
328 async fn calculate_feature_importance(&self) -> Result<Vec<FeatureImportance>> {
330 let mut importances = Vec::new();
331
332 for (input_id, gradients) in &self.input_gradients {
334 let total_gradient = gradients.iter().map(|g| g.abs()).sum::<f32>();
335 let importance_score = total_gradient / gradients.len() as f32;
336
337 importances.push(FeatureImportance {
338 feature_id: input_id.clone(),
339 importance_score,
340 attribution_method: AttributionMethod::GradientBased,
341 confidence: self.calculate_attribution_confidence(importance_score),
342 rank: 0,
343 });
344 }
345
346 importances.sort_by(|a, b| b.importance_score.partial_cmp(&a.importance_score).unwrap());
348 for (rank, importance) in importances.iter_mut().enumerate() {
349 importance.rank = rank + 1;
350 }
351
352 Ok(importances)
353 }
354
355 fn calculate_attribution_confidence(&self, score: f32) -> f32 {
357 (score.tanh() * 0.5 + 0.5).min(1.0)
359 }
360
361 async fn analyze_activation_patterns(&self) -> Result<Vec<NeuronActivationPattern>> {
363 let mut patterns = Vec::new();
364
365 for (layer_id, activation_history) in &self.activation_history {
366 if activation_history.is_empty() {
367 continue;
368 }
369
370 let neuron_count = activation_history[0].len();
371
372 for neuron_id in 0..neuron_count {
373 let neuron_activations: Vec<f32> = activation_history
374 .iter()
375 .map(|batch| batch.get(neuron_id).copied().unwrap_or(0.0))
376 .collect();
377
378 let statistics = self.compute_activation_statistics(&neuron_activations);
379 let pattern_type = self.classify_activation_pattern(&statistics);
380 let stability_score = self.compute_stability_score(&neuron_activations);
381 let selectivity_score = self.compute_selectivity_score(&neuron_activations);
382
383 patterns.push(NeuronActivationPattern {
384 layer_id: layer_id.clone(),
385 neuron_id,
386 activation_statistics: statistics,
387 pattern_type,
388 stability_score,
389 selectivity_score,
390 });
391 }
392 }
393
394 Ok(patterns)
395 }
396
397 fn compute_activation_statistics(&self, activations: &[f32]) -> ActivationStatistics {
399 if activations.is_empty() {
400 return ActivationStatistics {
401 mean: 0.0,
402 std: 0.0,
403 min: 0.0,
404 max: 0.0,
405 percentile_25: 0.0,
406 percentile_75: 0.0,
407 skewness: 0.0,
408 kurtosis: 0.0,
409 sparsity: 1.0,
410 };
411 }
412
413 let mean = activations.iter().sum::<f32>() / activations.len() as f32;
414 let variance =
415 activations.iter().map(|&x| (x - mean).powi(2)).sum::<f32>() / activations.len() as f32;
416 let std = variance.sqrt();
417
418 let mut sorted_activations = activations.to_vec();
419 sorted_activations.sort_by(|a, b| a.partial_cmp(b).unwrap());
420
421 let min = sorted_activations[0];
422 let max = sorted_activations[sorted_activations.len() - 1];
423 let percentile_25 = sorted_activations[sorted_activations.len() / 4];
424 let percentile_75 = sorted_activations[3 * sorted_activations.len() / 4];
425
426 let skewness = if std > 0.0 {
428 activations.iter().map(|&x| ((x - mean) / std).powi(3)).sum::<f32>()
429 / activations.len() as f32
430 } else {
431 0.0
432 };
433
434 let kurtosis = if std > 0.0 {
435 activations.iter().map(|&x| ((x - mean) / std).powi(4)).sum::<f32>()
436 / activations.len() as f32
437 - 3.0
438 } else {
439 0.0
440 };
441
442 let near_zero_count = activations
444 .iter()
445 .filter(|&&x| x.abs() < self.config.dead_neuron_threshold)
446 .count();
447 let sparsity = near_zero_count as f32 / activations.len() as f32;
448
449 ActivationStatistics {
450 mean,
451 std,
452 min,
453 max,
454 percentile_25,
455 percentile_75,
456 skewness,
457 kurtosis,
458 sparsity,
459 }
460 }
461
462 fn classify_activation_pattern(&self, stats: &ActivationStatistics) -> ActivationPatternType {
464 if stats.sparsity > 0.9 {
465 ActivationPatternType::Dead
466 } else if stats.sparsity > 0.7 {
467 ActivationPatternType::Sparse
468 } else if stats.max > 0.95 && stats.mean > 0.8 {
469 ActivationPatternType::Saturated
470 } else if stats.std / stats.mean.abs().max(1e-8) > 2.0 {
471 ActivationPatternType::Oscillating
472 } else if stats.mean.abs() > 0.1 && stats.mean * stats.min < 0.0 {
473 ActivationPatternType::Bipolar
474 } else if stats.sparsity < 0.3 {
475 ActivationPatternType::Dense
476 } else {
477 ActivationPatternType::Normal
478 }
479 }
480
481 fn compute_stability_score(&self, activations: &[f32]) -> f32 {
483 if activations.len() < 2 {
484 return 0.0;
485 }
486
487 let mean = activations.iter().sum::<f32>() / activations.len() as f32;
488 let variance =
489 activations.iter().map(|&x| (x - mean).powi(2)).sum::<f32>() / activations.len() as f32;
490
491 if mean.abs() > 1e-8 {
493 1.0 / (1.0 + variance.sqrt() / mean.abs())
494 } else {
495 0.0
496 }
497 }
498
499 fn compute_selectivity_score(&self, activations: &[f32]) -> f32 {
501 if activations.is_empty() {
502 return 0.0;
503 }
504
505 let max_activation = activations.iter().fold(0.0f32, |a, &b| a.max(b.abs()));
507 let mean_activation =
508 activations.iter().map(|x| x.abs()).sum::<f32>() / activations.len() as f32;
509
510 if max_activation > 1e-8 {
511 1.0 - (mean_activation / max_activation)
512 } else {
513 0.0
514 }
515 }
516
517 async fn detect_dead_neurons(&self) -> Result<Vec<DeadNeuronInfo>> {
519 let mut dead_neurons = Vec::new();
520
521 for (layer_id, activation_history) in &self.activation_history {
522 if activation_history.is_empty() {
523 continue;
524 }
525
526 let neuron_count = activation_history[0].len();
527
528 for neuron_id in 0..neuron_count {
529 let neuron_activations: Vec<f32> = activation_history
530 .iter()
531 .map(|batch| batch.get(neuron_id).copied().unwrap_or(0.0))
532 .collect();
533
534 let activation_level = neuron_activations.iter().map(|x| x.abs()).sum::<f32>()
535 / neuron_activations.len() as f32;
536
537 let dead_probability = if activation_level < self.config.dead_neuron_threshold {
538 1.0 - (activation_level / self.config.dead_neuron_threshold)
539 } else {
540 0.0
541 };
542
543 if dead_probability > 0.5 {
544 let suggested_action =
545 self.suggest_neuron_repair_action(activation_level, &neuron_activations);
546
547 dead_neurons.push(DeadNeuronInfo {
548 layer_id: layer_id.clone(),
549 neuron_id,
550 activation_level,
551 dead_probability,
552 suggested_action,
553 });
554 }
555 }
556 }
557
558 Ok(dead_neurons)
559 }
560
561 fn suggest_neuron_repair_action(
563 &self,
564 activation_level: f32,
565 activations: &[f32],
566 ) -> NeuronRepairAction {
567 if activation_level < self.config.dead_neuron_threshold * 0.1 {
568 NeuronRepairAction::Reinitialize
569 } else if activation_level < self.config.dead_neuron_threshold * 0.5 {
570 let variance =
571 activations.iter().map(|&x| x.powi(2)).sum::<f32>() / activations.len() as f32;
572 if variance < 1e-10 {
573 NeuronRepairAction::AddNoise
574 } else {
575 NeuronRepairAction::AdjustLearningRate
576 }
577 } else {
578 NeuronRepairAction::ChangeActivationFunction
579 }
580 }
581
582 async fn perform_correlation_analysis(&self) -> Result<CorrelationAnalysis> {
584 let gradient_vectors: Vec<&Vec<f32>> = self.input_gradients.values().collect();
586
587 if gradient_vectors.len() < 2 {
588 return Ok(CorrelationAnalysis {
589 correlation_matrix: Vec::new(),
590 significant_correlations: Vec::new(),
591 redundant_features: Vec::new(),
592 independent_features: Vec::new(),
593 });
594 }
595
596 let n = gradient_vectors.len();
597 let mut correlation_matrix = vec![vec![0.0; n]; n];
598 let mut significant_correlations = Vec::new();
599
600 for i in 0..n {
602 for j in i..n {
603 let correlation =
604 self.compute_correlation(gradient_vectors[i], gradient_vectors[j]);
605 correlation_matrix[i][j] = correlation;
606 correlation_matrix[j][i] = correlation;
607
608 if i != j && correlation.abs() > self.config.correlation_threshold {
609 let correlation_type = if correlation.abs() > 0.8 {
610 CorrelationType::Strong
611 } else if correlation.abs() > 0.5 {
612 CorrelationType::Moderate
613 } else {
614 CorrelationType::Weak
615 };
616
617 significant_correlations.push(CorrelationPair {
618 feature_a: i,
619 feature_b: j,
620 correlation,
621 p_value: 0.01, relationship_type: correlation_type,
623 });
624 }
625 }
626 }
627
628 let redundant_features = self.find_redundant_feature_groups(&correlation_matrix);
630
631 let independent_features = self.find_independent_features(&correlation_matrix);
633
634 Ok(CorrelationAnalysis {
635 correlation_matrix,
636 significant_correlations,
637 redundant_features,
638 independent_features,
639 })
640 }
641
642 fn compute_correlation(&self, x: &[f32], y: &[f32]) -> f32 {
644 if x.len() != y.len() || x.is_empty() {
645 return 0.0;
646 }
647
648 let n = x.len() as f32;
649 let mean_x = x.iter().sum::<f32>() / n;
650 let mean_y = y.iter().sum::<f32>() / n;
651
652 let numerator: f32 =
653 x.iter().zip(y.iter()).map(|(&xi, &yi)| (xi - mean_x) * (yi - mean_y)).sum();
654
655 let sum_sq_x: f32 = x.iter().map(|&xi| (xi - mean_x).powi(2)).sum();
656 let sum_sq_y: f32 = y.iter().map(|&yi| (yi - mean_y).powi(2)).sum();
657
658 let denominator = (sum_sq_x * sum_sq_y).sqrt();
659
660 if denominator > 1e-8 {
661 numerator / denominator
662 } else {
663 0.0
664 }
665 }
666
667 fn find_redundant_feature_groups(&self, correlation_matrix: &[Vec<f32>]) -> Vec<FeatureGroup> {
669 let mut groups = Vec::new();
670 let mut visited = HashSet::new();
671
672 for i in 0..correlation_matrix.len() {
673 if visited.contains(&i) {
674 continue;
675 }
676
677 let mut group = vec![i];
678 let mut group_correlations = Vec::new();
679
680 for j in (i + 1)..correlation_matrix.len() {
681 if correlation_matrix[i][j].abs() > 0.7 {
682 group.push(j);
683 group_correlations.push(correlation_matrix[i][j].abs());
684 visited.insert(j);
685 }
686 }
687
688 if group.len() > 1 {
689 let average_correlation =
690 group_correlations.iter().sum::<f32>() / group_correlations.len() as f32;
691 groups.push(FeatureGroup {
692 features: group,
693 average_correlation,
694 group_importance: average_correlation, });
696 }
697
698 visited.insert(i);
699 }
700
701 groups
702 }
703
704 fn find_independent_features(&self, correlation_matrix: &[Vec<f32>]) -> Vec<usize> {
706 let mut independent = Vec::new();
707
708 for i in 0..correlation_matrix.len() {
709 let max_correlation = correlation_matrix[i]
710 .iter()
711 .enumerate()
712 .filter(|(j, _)| *j != i)
713 .map(|(_, &corr)| corr.abs())
714 .fold(0.0f32, |a, b| a.max(b));
715
716 if max_correlation < self.config.correlation_threshold {
717 independent.push(i);
718 }
719 }
720
721 independent
722 }
723
724 fn generate_behavior_summary(&self, report: &mut BehaviorAnalysisReport) {
726 let total_neurons = report.activation_patterns.len();
727 let dead_neurons = report.dead_neurons.len();
728
729 report.behavior_summary.total_neurons_analyzed = total_neurons;
730 report.behavior_summary.dead_neuron_percentage = if total_neurons > 0 {
731 (dead_neurons as f32 / total_neurons as f32) * 100.0
732 } else {
733 0.0
734 };
735
736 if !report.activation_patterns.is_empty() {
737 report.behavior_summary.average_activation_sparsity = report
738 .activation_patterns
739 .iter()
740 .map(|p| p.activation_statistics.sparsity)
741 .sum::<f32>()
742 / report.activation_patterns.len() as f32;
743
744 report.behavior_summary.model_stability_score =
745 report.activation_patterns.iter().map(|p| p.stability_score).sum::<f32>()
746 / report.activation_patterns.len() as f32;
747 }
748
749 if !report.feature_importances.is_empty() {
751 let total_importance: f32 =
752 report.feature_importances.iter().map(|f| f.importance_score).sum();
753
754 if total_importance > 0.0 {
755 let entropy: f32 = report
756 .feature_importances
757 .iter()
758 .map(|f| {
759 let p = f.importance_score / total_importance;
760 if p > 0.0 {
761 -p * p.log2()
762 } else {
763 0.0
764 }
765 })
766 .sum();
767 report.behavior_summary.feature_distribution_entropy = entropy;
768 }
769 }
770
771 report.behavior_summary.interpretability_score =
773 (report.behavior_summary.model_stability_score * 0.4
774 + (1.0 - report.behavior_summary.dead_neuron_percentage / 100.0) * 0.3
775 + (1.0 - report.behavior_summary.average_activation_sparsity) * 0.3)
776 .max(0.0)
777 .min(1.0);
778 }
779
780 fn generate_recommendations(&self, report: &mut BehaviorAnalysisReport) {
782 if report.behavior_summary.dead_neuron_percentage > 20.0 {
784 report.recommendations.push(BehaviorRecommendation {
785 category: RecommendationCategory::Training,
786 priority: Priority::Critical,
787 description: format!("High percentage of dead neurons detected ({:.1}%)",
788 report.behavior_summary.dead_neuron_percentage),
789 implementation: "Consider reducing learning rate, changing initialization, or adding batch normalization".to_string(),
790 expected_impact: 0.8,
791 });
792 }
793
794 if report.behavior_summary.average_activation_sparsity > 0.8 {
796 report.recommendations.push(BehaviorRecommendation {
797 category: RecommendationCategory::Architecture,
798 priority: Priority::High,
799 description: "Very sparse activations detected, model may be under-utilized".to_string(),
800 implementation: "Consider reducing model capacity or adjusting activation functions".to_string(),
801 expected_impact: 0.6,
802 });
803 }
804
805 if report.behavior_summary.model_stability_score < 0.5 {
807 report.recommendations.push(BehaviorRecommendation {
808 category: RecommendationCategory::Training,
809 priority: Priority::High,
810 description: "Low model stability detected".to_string(),
811 implementation: "Consider adding regularization, reducing learning rate, or using gradient clipping".to_string(),
812 expected_impact: 0.7,
813 });
814 }
815
816 if report.feature_importances.len() > 10 {
818 let top_features = &report.feature_importances[..5];
819 let bottom_features =
820 &report.feature_importances[report.feature_importances.len() - 5..];
821
822 let top_importance: f32 = top_features.iter().map(|f| f.importance_score).sum();
823 let bottom_importance: f32 = bottom_features.iter().map(|f| f.importance_score).sum();
824
825 if top_importance > bottom_importance * 10.0 {
826 report.recommendations.push(BehaviorRecommendation {
827 category: RecommendationCategory::DataPreprocessing,
828 priority: Priority::Medium,
829 description: "Highly imbalanced feature importance detected".to_string(),
830 implementation: "Consider feature selection or dimensionality reduction"
831 .to_string(),
832 expected_impact: 0.5,
833 });
834 }
835 }
836 }
837
838 pub async fn generate_report(&self) -> Result<BehaviorAnalysisReport> {
840 let mut temp_analyzer = BehaviorAnalyzer {
841 config: self.config.clone(),
842 activation_history: self.activation_history.clone(),
843 input_gradients: self.input_gradients.clone(),
844 feature_attributions: self.feature_attributions.clone(),
845 analysis_cache: HashMap::new(),
846 };
847
848 temp_analyzer.analyze().await
849 }
850
851 pub fn clear(&mut self) {
853 self.activation_history.clear();
854 self.input_gradients.clear();
855 self.feature_attributions.clear();
856 self.analysis_cache.clear();
857 }
858
859 pub fn get_analysis_summary(&self) -> AnalysisSummary {
861 AnalysisSummary {
862 total_layers_tracked: self.activation_history.len(),
863 total_activation_samples: self
864 .activation_history
865 .values()
866 .map(|history| history.len())
867 .sum(),
868 total_inputs_tracked: self.input_gradients.len(),
869 analysis_coverage: if self.activation_history.is_empty() {
870 0.0
871 } else {
872 1.0 },
874 }
875 }
876}
877
878#[derive(Debug, Clone, Serialize, Deserialize)]
880pub struct AnalysisSummary {
881 pub total_layers_tracked: usize,
882 pub total_activation_samples: usize,
883 pub total_inputs_tracked: usize,
884 pub analysis_coverage: f32,
885}