1use crate::traits::{ProgressSnapshot, TimeRange, UserProgress};
8use crate::FeedbackError;
9use chrono::{DateTime, Utc};
10use serde::{Deserialize, Serialize};
11use std::collections::{HashMap, VecDeque};
12use std::time::Duration;
13
14#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
16pub enum TrendDirection {
17 Improving,
19 Stable,
21 Declining,
23}
24
25#[derive(Debug, Clone)]
27pub struct ComprehensiveAnalyticsFramework {
28 config: AnalyticsConfig,
30 metrics: MemoryBoundedMetrics,
32 aggregated_metrics: HashMap<String, AggregatedMetric>,
34 last_cleanup: DateTime<Utc>,
36}
37
38impl Default for ComprehensiveAnalyticsFramework {
39 fn default() -> Self {
40 Self::new()
41 }
42}
43
44impl ComprehensiveAnalyticsFramework {
45 #[must_use]
47 pub fn new() -> Self {
48 let config = AnalyticsConfig::default();
49 Self {
50 metrics: MemoryBoundedMetrics::new(config.max_metrics_capacity),
51 aggregated_metrics: HashMap::new(),
52 last_cleanup: Utc::now(),
53 config,
54 }
55 }
56
57 #[must_use]
59 pub fn with_config(config: AnalyticsConfig) -> Self {
60 Self {
61 metrics: MemoryBoundedMetrics::new(config.max_metrics_capacity),
62 aggregated_metrics: HashMap::new(),
63 last_cleanup: Utc::now(),
64 config,
65 }
66 }
67
68 pub async fn generate_analytics_report(
70 &self,
71 progress: &UserProgress,
72 time_range: Option<TimeRange>,
73 ) -> Result<ComprehensiveAnalyticsReport, FeedbackError> {
74 let now = Utc::now();
75 let range = time_range.unwrap_or(TimeRange {
76 start: now - chrono::Duration::days(30),
77 end: now,
78 });
79
80 let mut metrics = vec![
82 AnalyticsMetric {
83 name: "overall_skill_level".to_string(),
84 value: f64::from(progress.overall_skill_level),
85 timestamp: now,
86 metric_type: MetricType::Gauge,
87 },
88 AnalyticsMetric {
89 name: "total_sessions".to_string(),
90 value: progress.training_stats.total_sessions as f64,
91 timestamp: now,
92 metric_type: MetricType::Counter,
93 },
94 AnalyticsMetric {
95 name: "success_rate".to_string(),
96 value: f64::from(progress.training_stats.success_rate),
97 timestamp: now,
98 metric_type: MetricType::Gauge,
99 },
100 AnalyticsMetric {
101 name: "average_improvement".to_string(),
102 value: f64::from(progress.training_stats.average_improvement),
103 timestamp: now,
104 metric_type: MetricType::Gauge,
105 },
106 AnalyticsMetric {
107 name: "current_streak".to_string(),
108 value: progress.training_stats.current_streak as f64,
109 timestamp: now,
110 metric_type: MetricType::Counter,
111 },
112 AnalyticsMetric {
113 name: "longest_streak".to_string(),
114 value: progress.training_stats.longest_streak as f64,
115 timestamp: now,
116 metric_type: MetricType::Counter,
117 },
118 ];
119
120 for (focus_area, &skill_level) in &progress.skill_breakdown {
122 metrics.push(AnalyticsMetric {
123 name: format!("skill_{focus_area:?}").to_lowercase(),
124 value: f64::from(skill_level),
125 timestamp: now,
126 metric_type: MetricType::Gauge,
127 });
128 }
129
130 let relevant_history: Vec<_> = progress
132 .progress_history
133 .iter()
134 .filter(|snapshot| snapshot.timestamp >= range.start && snapshot.timestamp <= range.end)
135 .collect();
136
137 let trend_analytics = self.calculate_trend_analytics(&relevant_history);
139
140 metrics.push(AnalyticsMetric {
142 name: "improvement_velocity".to_string(),
143 value: f64::from(trend_analytics.improvement_velocity),
144 timestamp: now,
145 metric_type: MetricType::Gauge,
146 });
147
148 metrics.push(AnalyticsMetric {
149 name: "performance_stability".to_string(),
150 value: f64::from(trend_analytics.performance_stability),
151 timestamp: now,
152 metric_type: MetricType::Gauge,
153 });
154
155 let summary = AnalyticsSummary {
156 total_metrics: metrics.len(),
157 average_value: if metrics.is_empty() {
158 0.0
159 } else {
160 metrics.iter().map(|m| m.value).sum::<f64>() / metrics.len() as f64
161 },
162 time_range: range,
163 };
164
165 Ok(ComprehensiveAnalyticsReport {
166 timestamp: now,
167 metrics,
168 summary,
169 })
170 }
171
172 pub async fn test_statistical_significance(
174 &self,
175 _progress: &UserProgress,
176 _metric: AnalyticsMetric,
177 _time_period: Duration,
178 ) -> Result<StatisticalSignificanceResult, FeedbackError> {
179 Ok(StatisticalSignificanceResult {
181 p_value: 0.05,
182 is_significant: true,
183 confidence_level: 0.95,
184 effect_size: 0.3,
185 })
186 }
187
188 pub async fn generate_comparative_analysis(
190 &self,
191 user_progress_data: &[UserProgress],
192 _metric: AnalyticsMetric,
193 _time_range: Option<TimeRange>,
194 ) -> Result<ComparativeAnalyticsResult, FeedbackError> {
195 if user_progress_data.len() < 2 {
196 return Err(FeedbackError::ProgressTrackingError {
197 message: "Need at least 2 users for comparative analysis".to_string(),
198 source: None,
199 });
200 }
201
202 let baseline_value = f64::from(user_progress_data[0].overall_skill_level);
203 let comparison_value = f64::from(user_progress_data[1].overall_skill_level);
204 let percentage_change = ((comparison_value - baseline_value) / baseline_value) * 100.0;
205
206 Ok(ComparativeAnalyticsResult {
207 baseline_value,
208 comparison_value,
209 percentage_change,
210 statistical_significance: StatisticalSignificanceResult {
211 p_value: 0.05,
212 is_significant: percentage_change.abs() > 10.0,
213 confidence_level: 0.95,
214 effect_size: 0.3,
215 },
216 })
217 }
218
219 pub async fn collect_longitudinal_data(
221 &self,
222 _study_id: &str,
223 participant_data: &[UserProgress],
224 _tracking_metrics: &[AnalyticsMetric],
225 ) -> Result<LongitudinalStudyData, FeedbackError> {
226 let now = Utc::now();
227 let start = now - chrono::Duration::days(90);
228
229 let data_points: Vec<LongitudinalDataPoint> = participant_data
230 .iter()
231 .enumerate()
232 .map(|(i, progress)| LongitudinalDataPoint {
233 timestamp: start + chrono::Duration::days(i as i64),
234 value: f64::from(progress.overall_skill_level),
235 metadata: HashMap::new(),
236 })
237 .collect();
238
239 Ok(LongitudinalStudyData {
240 study_period: TimeRange { start, end: now },
241 data_points,
242 trend_analysis: TrendAnalysis {
243 trend_direction: TrendDirection::Improving,
244 slope: 0.01,
245 r_squared: 0.8,
246 },
247 })
248 }
249
250 fn calculate_trend_analytics(&self, history: &[&ProgressSnapshot]) -> TrendAnalytics {
252 if history.len() < 2 {
253 return TrendAnalytics {
254 improvement_velocity: 0.0,
255 performance_stability: 0.0,
256 trend_direction: TrendDirection::Stable,
257 slope: 0.0,
258 r_squared: 0.0,
259 };
260 }
261
262 let scores: Vec<f32> = history.iter().map(|s| s.overall_score).collect();
263
264 let improvements: Vec<f32> = scores
266 .windows(2)
267 .map(|window| window[1] - window[0])
268 .collect();
269
270 let improvement_velocity = if improvements.is_empty() {
271 0.0
272 } else {
273 improvements.iter().sum::<f32>() / improvements.len() as f32
274 };
275
276 let mean_score = if scores.is_empty() {
278 0.0
279 } else {
280 scores.iter().sum::<f32>() / scores.len() as f32
281 };
282
283 let variance = if scores.len() > 1 {
284 scores.iter().map(|s| (s - mean_score).powi(2)).sum::<f32>() / scores.len() as f32
285 } else {
286 0.0
287 };
288
289 let performance_stability = if mean_score > 0.0 && variance > 0.0 {
290 1.0 / (1.0 + (variance.sqrt() / mean_score))
291 } else {
292 1.0
293 };
294
295 let n = scores.len() as f32;
297 let x_mean = (n - 1.0) / 2.0;
298 let y_mean = mean_score;
299
300 let mut numerator = 0.0;
301 let mut denominator = 0.0;
302
303 for (i, &score) in scores.iter().enumerate() {
304 let x_diff = i as f32 - x_mean;
305 numerator += x_diff * (score - y_mean);
306 denominator += x_diff * x_diff;
307 }
308
309 let slope = if denominator == 0.0 {
310 0.0
311 } else {
312 numerator / denominator
313 };
314
315 let y_pred: Vec<f32> = (0..scores.len())
317 .map(|i| y_mean + slope * (i as f32 - x_mean))
318 .collect();
319
320 let ss_res: f32 = scores
321 .iter()
322 .zip(y_pred.iter())
323 .map(|(actual, predicted)| (actual - predicted).powi(2))
324 .sum();
325
326 let ss_tot: f32 = scores.iter().map(|score| (score - y_mean).powi(2)).sum();
327
328 let r_squared = if ss_tot == 0.0 {
329 0.0
330 } else {
331 1.0 - (ss_res / ss_tot)
332 };
333
334 let trend_direction = if slope > 0.02 {
335 TrendDirection::Improving
336 } else if slope < -0.02 {
337 TrendDirection::Declining
338 } else {
339 TrendDirection::Stable
340 };
341
342 TrendAnalytics {
343 improvement_velocity,
344 performance_stability,
345 trend_direction,
346 slope,
347 r_squared,
348 }
349 }
350
351 pub fn cleanup_old_metrics(&mut self) {
353 let now = Utc::now();
354 let retention_duration = chrono::Duration::days(i64::from(self.config.data_retention_days));
355 let cutoff_time = now - retention_duration;
356
357 self.metrics.cleanup_before(cutoff_time);
359
360 self.aggregated_metrics
362 .retain(|_, metric| metric.last_updated > cutoff_time);
363
364 if self.aggregated_metrics.len() > self.config.max_aggregated_metrics {
366 let mut metrics_by_age: Vec<_> = self
367 .aggregated_metrics
368 .iter()
369 .map(|(k, v)| (k.clone(), v.last_updated))
370 .collect();
371
372 metrics_by_age.sort_by_key(|(_, timestamp)| *timestamp);
374
375 let to_remove = self.aggregated_metrics.len() - self.config.max_aggregated_metrics;
377 for (key, _) in metrics_by_age.into_iter().take(to_remove) {
378 self.aggregated_metrics.remove(&key);
379 }
380 }
381
382 self.last_cleanup = now;
383 }
384
385 #[must_use]
387 pub fn needs_cleanup(&self) -> bool {
388 let now = Utc::now();
389 let cleanup_interval =
390 chrono::Duration::minutes(i64::from(self.config.cleanup_interval_minutes));
391 let time_based_cleanup = now.signed_duration_since(self.last_cleanup) > cleanup_interval;
392
393 let memory_stats = self.get_memory_stats();
395 let memory_based_cleanup =
396 memory_stats.memory_utilization > self.config.memory_cleanup_threshold;
397
398 time_based_cleanup || memory_based_cleanup
399 }
400
401 pub fn add_metric(&mut self, name: String, metric: AnalyticsMetric) {
403 if self.needs_cleanup() {
405 self.cleanup_old_metrics();
406 }
407
408 let memory_stats = self.get_memory_stats();
410 if memory_stats.memory_utilization >= 1.0 {
411 self.cleanup_old_metrics();
413 }
414
415 self.metrics.insert(name.clone(), metric.clone());
417
418 self.update_aggregated_metric(name, &metric);
420 }
421
422 fn update_aggregated_metric(&mut self, name: String, metric: &AnalyticsMetric) {
424 if !self.config.enable_auto_aggregation {
426 return;
427 }
428
429 let aggregated = self
430 .aggregated_metrics
431 .entry(name)
432 .or_insert_with(|| AggregatedMetric {
433 name: metric.name.clone(),
434 count: 0,
435 sum: 0.0,
436 sum_of_squares: 0.0,
437 min: f64::INFINITY,
438 max: f64::NEG_INFINITY,
439 last_updated: metric.timestamp,
440 metric_type: metric.metric_type.clone(),
441 });
442
443 aggregated.count += 1;
445 aggregated.sum += metric.value;
446 aggregated.sum_of_squares += metric.value * metric.value;
447 aggregated.min = aggregated.min.min(metric.value);
448 aggregated.max = aggregated.max.max(metric.value);
449 aggregated.last_updated = metric.timestamp;
450 }
451
452 #[must_use]
454 pub fn get_memory_stats(&self) -> MemoryStats {
455 let metrics_count = self.metrics.len();
456 let aggregated_count = self.aggregated_metrics.len();
457
458 let estimated_metrics_bytes = metrics_count * std::mem::size_of::<AnalyticsMetric>();
460 let estimated_aggregated_bytes = aggregated_count * std::mem::size_of::<AggregatedMetric>();
461 let total_estimated_bytes = estimated_metrics_bytes + estimated_aggregated_bytes;
462
463 MemoryStats {
464 total_metrics: metrics_count,
465 aggregated_metrics: aggregated_count,
466 estimated_memory_bytes: total_estimated_bytes,
467 memory_limit_bytes: self.config.memory_limit_bytes,
468 memory_utilization: if self.config.memory_limit_bytes > 0 {
469 total_estimated_bytes as f64 / self.config.memory_limit_bytes as f64
470 } else {
471 0.0
472 },
473 }
474 }
475}
476
477#[derive(Debug, Clone)]
479pub struct AnalyticsConfig {
480 pub enable_detailed_analytics: bool,
482 pub data_retention_days: u32,
484 pub max_metrics_capacity: usize,
486 pub memory_limit_bytes: usize,
488 pub cleanup_interval_minutes: u32,
490 pub memory_cleanup_threshold: f64,
492 pub enable_auto_aggregation: bool,
494 pub max_aggregated_metrics: usize,
496}
497
498impl Default for AnalyticsConfig {
499 fn default() -> Self {
500 Self {
501 enable_detailed_analytics: true,
502 data_retention_days: 90,
503 max_metrics_capacity: 10_000,
504 memory_limit_bytes: 50 * 1024 * 1024, cleanup_interval_minutes: 60, memory_cleanup_threshold: 0.8, enable_auto_aggregation: true,
508 max_aggregated_metrics: 1_000,
509 }
510 }
511}
512
513#[derive(Debug, Clone, Serialize, Deserialize)]
515pub struct AnalyticsMetric {
516 pub name: String,
518 pub value: f64,
520 pub timestamp: DateTime<Utc>,
522 pub metric_type: MetricType,
524}
525
526#[derive(Debug, Clone, Serialize, Deserialize)]
528pub enum MetricType {
529 Counter,
531 Gauge,
533 Histogram,
535 Timer,
537}
538
539#[derive(Debug, Clone, Serialize, Deserialize)]
541pub struct ComprehensiveAnalyticsReport {
542 pub timestamp: DateTime<Utc>,
544 pub metrics: Vec<AnalyticsMetric>,
546 pub summary: AnalyticsSummary,
548}
549
550#[derive(Debug, Clone, Serialize, Deserialize)]
552pub struct AnalyticsSummary {
553 pub total_metrics: usize,
555 pub average_value: f64,
557 pub time_range: TimeRange,
559}
560
561#[derive(Debug, Clone, Serialize, Deserialize)]
563pub struct StatisticalSignificanceResult {
564 pub p_value: f64,
566 pub is_significant: bool,
568 pub confidence_level: f64,
570 pub effect_size: f64,
572}
573
574#[derive(Debug, Clone, Serialize, Deserialize)]
576pub struct ComparativeAnalyticsResult {
577 pub baseline_value: f64,
579 pub comparison_value: f64,
581 pub percentage_change: f64,
583 pub statistical_significance: StatisticalSignificanceResult,
585}
586
587#[derive(Debug, Clone, Serialize, Deserialize)]
589pub struct LongitudinalStudyData {
590 pub study_period: TimeRange,
592 pub data_points: Vec<LongitudinalDataPoint>,
594 pub trend_analysis: TrendAnalysis,
596}
597
598#[derive(Debug, Clone, Serialize, Deserialize)]
600pub struct LongitudinalDataPoint {
601 pub timestamp: DateTime<Utc>,
603 pub value: f64,
605 pub metadata: HashMap<String, String>,
607}
608
609#[derive(Debug, Clone, Serialize, Deserialize)]
611pub struct TrendAnalysis {
612 pub trend_direction: TrendDirection,
614 pub slope: f64,
616 pub r_squared: f64,
618}
619
620#[derive(Debug, Clone)]
622pub struct TrendAnalytics {
623 pub improvement_velocity: f32,
625 pub performance_stability: f32,
627 pub trend_direction: TrendDirection,
629 pub slope: f32,
631 pub r_squared: f32,
633}
634
635#[derive(Debug, Clone, Serialize, Deserialize)]
637pub struct MemoryBoundedMetrics {
638 storage: VecDeque<(String, AnalyticsMetric)>,
640 capacity: usize,
642 index: HashMap<String, usize>,
644}
645
646impl MemoryBoundedMetrics {
647 #[must_use]
649 pub fn new(capacity: usize) -> Self {
650 Self {
651 storage: VecDeque::with_capacity(capacity),
652 capacity,
653 index: HashMap::new(),
654 }
655 }
656
657 pub fn insert(&mut self, key: String, metric: AnalyticsMetric) {
659 if self.storage.len() >= self.capacity {
661 if let Some((old_key, _)) = self.storage.pop_front() {
662 self.index.remove(&old_key);
663 }
664 }
665
666 let new_index = self.storage.len();
668 self.storage.push_back((key.clone(), metric));
669 self.index.insert(key, new_index);
670
671 if self.storage.len() != self.index.len() {
673 self.rebuild_index();
674 }
675 }
676
677 #[must_use]
679 pub fn get(&self, key: &str) -> Option<&AnalyticsMetric> {
680 if let Some(&index) = self.index.get(key) {
681 if index < self.storage.len() {
682 return Some(&self.storage[index].1);
683 }
684 }
685 None
686 }
687
688 pub fn cleanup_before(&mut self, cutoff_time: DateTime<Utc>) {
690 let mut removed_count = 0;
691
692 while let Some((key, metric)) = self.storage.front() {
694 if metric.timestamp < cutoff_time {
695 let (removed_key, _) = self.storage.pop_front().unwrap();
696 self.index.remove(&removed_key);
697 removed_count += 1;
698 } else {
699 break;
700 }
701 }
702
703 if removed_count > 0 {
705 self.rebuild_index();
706 }
707 }
708
709 fn rebuild_index(&mut self) {
711 self.index.clear();
712 for (i, (key, _)) in self.storage.iter().enumerate() {
713 self.index.insert(key.clone(), i);
714 }
715 }
716
717 #[must_use]
719 pub fn len(&self) -> usize {
720 self.storage.len()
721 }
722
723 #[must_use]
725 pub fn is_empty(&self) -> bool {
726 self.storage.is_empty()
727 }
728
729 #[must_use]
731 pub fn capacity(&self) -> usize {
732 self.capacity
733 }
734}
735
736#[derive(Debug, Clone, Serialize, Deserialize)]
738pub struct AggregatedMetric {
739 pub name: String,
741 pub count: u64,
743 pub sum: f64,
745 pub sum_of_squares: f64,
747 pub min: f64,
749 pub max: f64,
751 pub last_updated: DateTime<Utc>,
753 pub metric_type: MetricType,
755}
756
757impl AggregatedMetric {
758 #[must_use]
760 pub fn mean(&self) -> f64 {
761 if self.count > 0 {
762 self.sum / self.count as f64
763 } else {
764 0.0
765 }
766 }
767
768 #[must_use]
770 pub fn variance(&self) -> f64 {
771 if self.count > 1 {
772 let mean = self.mean();
773 (self.sum_of_squares - self.count as f64 * mean * mean) / (self.count - 1) as f64
774 } else {
775 0.0
776 }
777 }
778
779 #[must_use]
781 pub fn std_dev(&self) -> f64 {
782 self.variance().sqrt()
783 }
784}
785
786#[derive(Debug, Clone, Serialize, Deserialize)]
788pub struct MemoryStats {
789 pub total_metrics: usize,
791 pub aggregated_metrics: usize,
793 pub estimated_memory_bytes: usize,
795 pub memory_limit_bytes: usize,
797 pub memory_utilization: f64,
799}