Skip to main content

voirs_feedback/progress/
types.rs

1//! Core types and data structures for progress tracking
2//!
3//! This module contains the fundamental types, enums, and data structures
4//! used throughout the progress tracking system.
5
6use crate::adaptive::RecommendationType;
7use crate::traits::{
8    Achievement, AchievementTier, FocusArea, Goal, GoalMetric, ProgressSnapshot, TimeRange,
9};
10use crate::FeedbackError;
11use chrono::{DateTime, Utc};
12use serde::{Deserialize, Serialize};
13use std::collections::{HashMap, VecDeque};
14use std::time::Duration;
15
16// ============================================================================
17// Analytics Types
18// ============================================================================
19
20/// Configuration for analytics system
21#[derive(Debug, Clone, Serialize, Deserialize)]
22pub struct AnalyticsConfig {
23    /// Whether to enable detailed analytics
24    pub enable_detailed_analytics: bool,
25    /// Retention period for analytics data
26    pub data_retention_days: u32,
27    /// Maximum number of metrics to store in memory
28    pub max_metrics_capacity: usize,
29    /// Memory limit in bytes for analytics storage
30    pub memory_limit_bytes: usize,
31    /// Cleanup interval in minutes
32    pub cleanup_interval_minutes: u32,
33    /// Memory usage threshold for triggering cleanup (0.0 to 1.0)
34    pub memory_cleanup_threshold: f64,
35    /// Enable automatic aggregation of metrics
36    pub enable_auto_aggregation: bool,
37    /// Maximum number of aggregated metrics to keep
38    pub max_aggregated_metrics: usize,
39}
40
41impl Default for AnalyticsConfig {
42    fn default() -> Self {
43        Self {
44            enable_detailed_analytics: true,
45            data_retention_days: 90,
46            max_metrics_capacity: 10_000,
47            memory_limit_bytes: 50 * 1024 * 1024, // 50MB
48            cleanup_interval_minutes: 60,         // 1 hour
49            memory_cleanup_threshold: 0.8,        // 80% memory usage
50            enable_auto_aggregation: true,
51            max_aggregated_metrics: 1_000,
52        }
53    }
54}
55
56/// Analytics metric definition
57#[derive(Debug, Clone, Serialize, Deserialize)]
58pub struct AnalyticsMetric {
59    /// Metric name
60    pub name: String,
61    /// Metric value
62    pub value: f64,
63    /// Metric timestamp
64    pub timestamp: DateTime<Utc>,
65    /// Metric type
66    pub metric_type: MetricType,
67}
68
69/// Metric type enumeration
70#[derive(Debug, Clone, Serialize, Deserialize)]
71pub enum MetricType {
72    /// Counter metric
73    Counter,
74    /// Gauge metric
75    Gauge,
76    /// Histogram metric
77    Histogram,
78    /// Timer metric
79    Timer,
80}
81
82/// Comprehensive analytics report
83#[derive(Debug, Clone, Serialize, Deserialize)]
84pub struct ComprehensiveAnalyticsReport {
85    /// Report timestamp
86    pub timestamp: DateTime<Utc>,
87    /// Metrics included in the report
88    pub metrics: Vec<AnalyticsMetric>,
89    /// Summary statistics
90    pub summary: AnalyticsSummary,
91}
92
93/// Analytics summary
94#[derive(Debug, Clone, Serialize, Deserialize)]
95pub struct AnalyticsSummary {
96    /// Total metrics count
97    pub total_metrics: usize,
98    /// Average metric value
99    pub average_value: f64,
100    /// Time range covered
101    pub time_range: TimeRange,
102}
103
104/// Statistical significance result
105#[derive(Debug, Clone, Serialize, Deserialize)]
106pub struct StatisticalSignificanceResult {
107    /// P-value of the test
108    pub p_value: f64,
109    /// Whether result is statistically significant
110    pub is_significant: bool,
111    /// Confidence level used
112    pub confidence_level: f64,
113    /// Effect size
114    pub effect_size: f64,
115}
116
117/// Comparative analytics result
118#[derive(Debug, Clone, Serialize, Deserialize)]
119pub struct ComparativeAnalyticsResult {
120    /// Baseline metric value
121    pub baseline_value: f64,
122    /// Comparison metric value
123    pub comparison_value: f64,
124    /// Percentage change
125    pub percentage_change: f64,
126    /// Statistical significance
127    pub statistical_significance: StatisticalSignificanceResult,
128}
129
130/// Longitudinal study data
131#[derive(Debug, Clone, Serialize, Deserialize)]
132pub struct LongitudinalStudyData {
133    /// Study period
134    pub study_period: TimeRange,
135    /// Data points collected
136    pub data_points: Vec<LongitudinalDataPoint>,
137    /// Trend analysis
138    pub trend_analysis: TrendAnalysis,
139}
140
141/// Longitudinal data point
142#[derive(Debug, Clone, Serialize, Deserialize)]
143pub struct LongitudinalDataPoint {
144    /// Data point timestamp
145    pub timestamp: DateTime<Utc>,
146    /// Metric value
147    pub value: f64,
148    /// Associated metadata
149    pub metadata: HashMap<String, String>,
150}
151
152/// Trend analysis
153#[derive(Debug, Clone, Serialize, Deserialize)]
154pub struct TrendAnalysis {
155    /// Overall trend direction
156    pub trend_direction: crate::progress::analytics::TrendDirection,
157    /// Trend slope
158    pub slope: f64,
159    /// R-squared correlation
160    pub r_squared: f64,
161}
162
163/// Advanced trend analytics
164#[derive(Debug, Clone)]
165pub struct TrendAnalytics {
166    /// Rate of improvement over time
167    pub improvement_velocity: f32,
168    /// Stability of performance (inverse of variation)
169    pub performance_stability: f32,
170    /// Overall trend direction
171    pub trend_direction: crate::progress::analytics::TrendDirection,
172    /// Linear regression slope
173    pub slope: f32,
174    /// Correlation coefficient (R-squared)
175    pub r_squared: f32,
176}
177
178// ============================================================================
179// Achievement and Progress Analysis Types
180// ============================================================================
181
182/// Achievement definition with unlock conditions
183#[derive(Debug, Clone, Serialize, Deserialize)]
184pub struct AchievementDefinition {
185    /// Unique achievement ID
186    pub id: String,
187    /// Achievement name
188    pub name: String,
189    /// Description
190    pub description: String,
191    /// Condition to unlock
192    pub condition: AchievementCondition,
193    /// Achievement tier
194    pub tier: AchievementTier,
195    /// Points awarded
196    pub points: u32,
197}
198
199/// Achievement unlock conditions
200#[derive(Debug, Clone, Serialize, Deserialize)]
201pub enum AchievementCondition {
202    /// Complete a number of sessions
203    SessionCount(usize),
204    /// Reach a skill level
205    SkillLevel(f32),
206    /// Maintain a streak
207    Streak(usize),
208    /// Master a specific area
209    AreaMastery(FocusArea, f32),
210    /// Total training time
211    TrainingTime(Duration),
212}
213
214/// Detailed progress report
215#[derive(Debug, Clone, Serialize, Deserialize)]
216pub struct DetailedProgressReport {
217    /// User ID
218    pub user_id: String,
219    /// Report period
220    pub period: TimeRange,
221    /// Overall improvement
222    pub overall_improvement: f32,
223    /// Area-specific improvements
224    pub area_improvements: HashMap<FocusArea, f32>,
225    /// Skill trends
226    pub skill_trends: HashMap<FocusArea, crate::progress::analytics::TrendDirection>,
227    /// Session analytics
228    pub session_analytics: crate::progress::metrics::SessionAnalytics,
229    /// Consistency metrics
230    pub consistency_metrics: crate::progress::metrics::ConsistencyMetrics,
231    /// Achievement progress
232    pub achievement_progress: Vec<AchievementAnalysis>,
233    /// Goal analysis
234    pub goal_analysis: Vec<GoalAnalysis>,
235    /// Recommendations
236    pub recommendations: Vec<ProgressRecommendation>,
237    /// Comparative analysis
238    pub comparative_analysis: ComparativeAnalysis,
239}
240
241/// Achievement analysis
242#[derive(Debug, Clone, Serialize, Deserialize)]
243pub struct AchievementAnalysis {
244    /// Achievement ID
245    pub achievement_id: String,
246    /// Achievement name
247    pub name: String,
248    /// Current progress [0.0, 1.0]
249    pub current_progress: f32,
250    /// Whether unlocked
251    pub is_unlocked: bool,
252    /// Estimated time to unlock
253    pub estimated_time_to_unlock: Option<Duration>,
254}
255
256/// Goal analysis
257#[derive(Debug, Clone, Serialize, Deserialize)]
258pub struct GoalAnalysis {
259    /// Goal being analyzed
260    pub goal: Goal,
261    /// Current value
262    pub current_value: f32,
263    /// Progress percentage
264    pub progress_percentage: f32,
265    /// Whether on track
266    pub on_track: bool,
267    /// Estimated completion
268    pub estimated_completion: Option<DateTime<Utc>>,
269}
270
271/// Progress recommendation
272#[derive(Debug, Clone, Serialize, Deserialize)]
273pub struct ProgressRecommendation {
274    /// Type of recommendation
275    pub recommendation_type: RecommendationType,
276    /// Recommendation title
277    pub title: String,
278    /// Detailed description
279    pub description: String,
280    /// Priority level [0.0, 1.0]
281    pub priority: f32,
282    /// Estimated impact [0.0, 1.0]
283    pub estimated_impact: f32,
284    /// Suggested actions
285    pub suggested_actions: Vec<String>,
286}
287
288/// Comparative analysis
289#[derive(Debug, Clone, Serialize, Deserialize)]
290pub struct ComparativeAnalysis {
291    /// User's percentile ranking
292    pub user_percentile: f32,
293    /// Average user score
294    pub average_user_score: f32,
295    /// User's current score
296    pub user_score: f32,
297    /// Improvement rate vs average
298    pub improvement_rate_vs_average: f32,
299    /// User's strengths vs peers
300    pub strengths_vs_peers: Vec<String>,
301    /// Areas for improvement
302    pub areas_for_improvement: Vec<String>,
303}
304
305/// Learning pattern analysis
306#[derive(Debug, Clone, Serialize, Deserialize)]
307pub struct LearningPatternAnalysis {
308    /// Learning velocity
309    pub learning_velocity: f32,
310    /// Optimal session length
311    pub optimal_session_length: Duration,
312    /// Peak performance times
313    pub peak_performance_times: Vec<PeakPerformanceTime>,
314    /// Difficulty preference
315    pub difficulty_preference: DifficultyPreference,
316    /// Focus area patterns
317    pub focus_area_patterns: HashMap<FocusArea, FocusPattern>,
318    /// Consistency patterns
319    pub consistency_patterns: ConsistencyPattern,
320    /// Learning plateaus
321    pub improvement_plateaus: Vec<LearningPlateau>,
322    /// Learning style indicators
323    pub learning_style_indicators: LearningStyleProfile,
324}
325
326/// Peak performance time
327#[derive(Debug, Clone, Serialize, Deserialize)]
328pub struct PeakPerformanceTime {
329    /// Time range description
330    pub time_range: String,
331    /// Performance boost factor
332    pub performance_boost: f32,
333    /// Confidence in this finding
334    pub confidence: f32,
335}
336
337/// Difficulty preference profile
338#[derive(Debug, Clone, Serialize, Deserialize)]
339pub struct DifficultyPreference {
340    /// Preferred difficulty level [0.0, 1.0]
341    pub preferred_level: f32,
342    /// Adaptability to difficulty changes
343    pub adaptability: f32,
344    /// Challenge seeking tendency
345    pub challenge_seeking: f32,
346}
347
348/// Focus pattern for specific areas
349#[derive(Debug, Clone, Serialize, Deserialize)]
350pub struct FocusPattern {
351    /// How often this area receives attention
352    pub attention_frequency: f32,
353    /// Rate of improvement in this area
354    pub improvement_rate: f32,
355    /// Tendency to plateau in this area
356    pub plateau_tendency: f32,
357}
358
359/// Consistency pattern analysis
360#[derive(Debug, Clone, Serialize, Deserialize)]
361pub struct ConsistencyPattern {
362    /// Overall consistency score
363    pub overall_consistency: f32,
364    /// Performance variance
365    pub performance_variance: f32,
366    /// Tendency to maintain streaks
367    pub streak_tendency: f32,
368}
369
370/// Learning plateau identification
371#[derive(Debug, Clone, Serialize, Deserialize)]
372pub struct LearningPlateau {
373    /// When plateau started
374    pub start_date: DateTime<Utc>,
375    /// When plateau ended (if applicable)
376    pub end_date: DateTime<Utc>,
377    /// Skill area affected
378    pub skill_area: FocusArea,
379    /// Plateau performance level
380    pub plateau_level: f32,
381    /// Suggestions to break through
382    pub breakthrough_suggestions: Vec<String>,
383}
384
385/// Learning style profile
386#[derive(Debug, Clone, Serialize, Deserialize)]
387pub struct LearningStyleProfile {
388    /// Visual learning preference
389    pub visual_preference: f32,
390    /// Auditory learning preference
391    pub auditory_preference: f32,
392    /// Kinesthetic learning preference
393    pub kinesthetic_preference: f32,
394    /// Preference for structured approach
395    pub structured_preference: f32,
396    /// Preference for experimental approach
397    pub experimental_preference: f32,
398}
399
400/// Achievement progress tracking
401#[derive(Debug, Clone, Serialize, Deserialize)]
402pub struct AchievementProgress {
403    /// Achievement definition
404    pub achievement_definition: AchievementDefinition,
405    /// Current progress [0.0, 1.0]
406    pub current_progress: f32,
407    /// Whether unlocked
408    pub is_unlocked: bool,
409    /// Unlock date if unlocked
410    pub unlock_date: Option<DateTime<Utc>>,
411}
412
413// ============================================================================
414// Milestone and Streak Types
415// ============================================================================
416
417/// Adaptive milestone system
418#[derive(Debug, Clone, Serialize, Deserialize)]
419pub struct AdaptiveMilestone {
420    /// Unique milestone identifier
421    pub milestone_id: String,
422    /// Milestone title
423    pub title: String,
424    /// Detailed description
425    pub description: String,
426    /// Type of milestone
427    pub milestone_type: MilestoneType,
428    /// Achievement criteria
429    pub criteria: MilestoneCriteria,
430    /// Estimated time to completion
431    pub estimated_duration: Duration,
432    /// Difficulty level [0.0, 1.0]
433    pub difficulty: f32,
434    /// Motivational impact assessment [0.0, 1.0]
435    pub motivational_impact: f32,
436    /// Personalized message for user
437    pub personalized_message: String,
438    /// Prerequisites needed
439    pub prerequisites: Vec<String>,
440    /// Rewards for completion
441    pub rewards: Vec<MilestoneReward>,
442    /// Creation timestamp
443    pub created_at: DateTime<Utc>,
444    /// Achievement timestamp
445    pub achieved_at: Option<DateTime<Utc>>,
446}
447
448/// Types of milestones
449#[derive(Debug, Clone, Serialize, Deserialize)]
450pub enum MilestoneType {
451    /// Skill improvement milestone
452    SkillImprovement,
453    /// Consistency goal
454    ConsistencyGoal,
455    /// Achievement unlock
456    AchievementGoal,
457    /// Overall progress milestone
458    ProgressGoal,
459    /// Custom milestone
460    Custom,
461}
462
463/// Milestone achievement criteria
464#[derive(Debug, Clone, Serialize, Deserialize)]
465pub enum MilestoneCriteria {
466    /// Reach specific skill level in focus area
467    SkillLevel {
468        /// The focus area to improve
469        focus_area: FocusArea,
470        /// Target skill level to achieve
471        target_level: f32,
472    },
473    /// Complete number of sessions
474    SessionCount {
475        /// Target number of sessions
476        target_sessions: usize,
477    },
478    /// Maintain streak
479    Streak {
480        /// Target streak length
481        target_streak: usize,
482    },
483    /// Reach overall skill level
484    OverallSkill {
485        /// Target overall skill level
486        target_level: f32,
487    },
488    /// Complete training time
489    TrainingTime {
490        /// Target training duration
491        target_duration: Duration,
492    },
493}
494
495/// Milestone rewards
496#[derive(Debug, Clone, Serialize, Deserialize)]
497pub enum MilestoneReward {
498    /// Points awarded
499    Points(u32),
500    /// Badge unlocked
501    Badge(String),
502    /// Feature unlocked
503    UnlockFeature(String),
504    /// Certificate earned
505    Certificate(String),
506    /// Custom reward
507    Custom(String),
508}
509
510/// Comprehensive streak analysis system
511#[derive(Debug, Clone, Default)]
512pub struct ComprehensiveStreakAnalysis {
513    /// Current active streaks
514    pub current_streaks: HashMap<StreakType, CurrentStreak>,
515    /// Historical streak data
516    pub historical_streaks: Vec<HistoricalStreak>,
517    /// Streak patterns and insights
518    pub streak_patterns: StreakPatterns,
519    /// Motivation maintenance strategies
520    pub motivation_maintenance: MotivationMaintenance,
521    /// Recovery mechanism recommendations
522    pub recovery_mechanisms: Vec<RecoveryMechanism>,
523    /// Achievement potential assessment
524    pub achievement_potential: f32,
525}
526
527/// Types of streaks tracked
528#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
529pub enum StreakType {
530    /// Daily practice streak
531    Practice,
532    /// Quality performance streak
533    Quality,
534    /// Improvement streak
535    Improvement,
536    /// Consistency streak
537    Consistency,
538}
539
540/// Current active streak
541#[derive(Debug, Clone)]
542pub struct CurrentStreak {
543    /// Type of streak
544    pub streak_type: StreakType,
545    /// Current streak count
546    pub current_count: usize,
547    /// When streak started
548    pub start_date: DateTime<Utc>,
549    /// Last activity timestamp
550    pub last_activity: DateTime<Utc>,
551    /// Streak strength assessment [0.0, 1.0]
552    pub strength: f32,
553}
554
555/// Historical streak record
556#[derive(Debug, Clone, Serialize, Deserialize)]
557pub struct HistoricalStreak {
558    /// Type of streak
559    pub streak_type: StreakType,
560    /// Length of the streak
561    pub length: usize,
562    /// Start date
563    pub start_date: DateTime<Utc>,
564    /// End date
565    pub end_date: DateTime<Utc>,
566    /// Peak performance during streak
567    pub peak_performance: f32,
568    /// Reason for streak break
569    pub break_reason: StreakBreakReason,
570}
571
572/// Streak break reasons
573#[derive(Debug, Clone, Serialize, Deserialize)]
574pub enum StreakBreakReason {
575    /// Time constraints
576    TimeConstraints,
577    /// Lack of motivation
578    LackOfMotivation,
579    /// Technical issues
580    Technical,
581    /// External circumstances
582    External,
583    /// Planned break
584    Planned,
585    /// Unknown reason
586    Unknown,
587    /// Streak is ongoing
588    Ongoing,
589}
590
591/// Streak patterns analysis
592#[derive(Debug, Clone, Default)]
593pub struct StreakPatterns {
594    /// Average streak length
595    pub average_streak_length: f32,
596    /// Longest streak ever achieved
597    pub longest_streak_ever: usize,
598    /// Common reasons for streak breaks
599    pub common_break_reasons: Vec<StreakBreakReason>,
600    /// Optimal session times for streaks
601    pub optimal_session_times: Vec<String>,
602    /// Seasonal variations in streak performance
603    pub seasonal_variations: HashMap<String, f32>,
604}
605
606/// Motivation maintenance strategies
607#[derive(Debug, Clone, Default)]
608pub struct MotivationMaintenance {
609    /// Current motivation level [0.0, 1.0]
610    pub current_motivation_level: f32,
611    /// Motivation trend (positive = improving)
612    pub motivation_trend: f32,
613    /// Burnout risk assessment [0.0, 1.0]
614    pub burnout_risk: f32,
615    /// Suggested engagement strategies
616    pub engagement_strategies: Vec<String>,
617}
618
619/// Recovery mechanism for broken streaks
620#[derive(Debug, Clone)]
621pub struct RecoveryMechanism {
622    /// Type of recovery strategy
623    pub strategy_type: RecoveryStrategyType,
624    /// Description of the mechanism
625    pub description: String,
626    /// Estimated effectiveness [0.0, 1.0]
627    pub estimated_effectiveness: f32,
628    /// Time commitment required
629    pub time_commitment: Duration,
630}
631
632/// Recovery strategy types
633#[derive(Debug, Clone)]
634pub enum RecoveryStrategyType {
635    /// Gradual return to practice
636    GradualReturn,
637    /// Motivation boost activities
638    MotivationBoost,
639    /// Social support engagement
640    SocialSupport,
641    /// Routine adjustment
642    RoutineAdjustment,
643    /// Goal modification
644    GoalModification,
645}
646
647/// Streak recovery plan
648#[derive(Debug, Clone)]
649pub struct StreakRecoveryPlan {
650    /// Primary recovery strategy
651    pub recovery_strategy: RecoveryStrategyType,
652    /// Specific actions to take
653    pub suggested_actions: Vec<String>,
654    /// Motivation boosting messages
655    pub motivation_boosters: Vec<String>,
656    /// Recommended milestone adjustments
657    pub milestone_adjustments: Vec<String>,
658    /// Estimated time to recover streak
659    pub estimated_recovery_time: Duration,
660    /// Probability of successful recovery [0.0, 1.0]
661    pub success_probability: f32,
662}
663
664// ============================================================================
665// Memory Optimization Types
666// ============================================================================
667
668/// Memory-bounded metrics storage with LRU eviction
669#[derive(Debug, Clone, Serialize, Deserialize)]
670pub struct MemoryBoundedMetrics {
671    /// Internal storage with bounded capacity
672    storage: VecDeque<(String, AnalyticsMetric)>,
673    /// Maximum capacity
674    capacity: usize,
675    /// Quick lookup index
676    index: HashMap<String, usize>,
677}
678
679impl MemoryBoundedMetrics {
680    /// Create new memory-bounded metrics storage
681    #[must_use]
682    pub fn new(capacity: usize) -> Self {
683        Self {
684            storage: VecDeque::with_capacity(capacity),
685            capacity,
686            index: HashMap::new(),
687        }
688    }
689
690    /// Insert metric with LRU eviction
691    pub fn insert(&mut self, key: String, metric: AnalyticsMetric) {
692        // If at capacity, remove oldest entry
693        if self.storage.len() >= self.capacity {
694            if let Some((old_key, _)) = self.storage.pop_front() {
695                self.index.remove(&old_key);
696            }
697        }
698
699        // Add new entry
700        let new_index = self.storage.len();
701        self.storage.push_back((key.clone(), metric));
702        self.index.insert(key, new_index);
703
704        // Rebuild index if needed (simple approach)
705        if self.storage.len() != self.index.len() {
706            self.rebuild_index();
707        }
708    }
709
710    /// Get metric by key
711    #[must_use]
712    pub fn get(&self, key: &str) -> Option<&AnalyticsMetric> {
713        if let Some(&index) = self.index.get(key) {
714            if index < self.storage.len() {
715                return Some(&self.storage[index].1);
716            }
717        }
718        None
719    }
720
721    /// Remove metrics before given timestamp
722    pub fn cleanup_before(&mut self, cutoff_time: DateTime<Utc>) {
723        let mut removed_count = 0;
724
725        // Remove old entries from front
726        while let Some((key, metric)) = self.storage.front() {
727            if metric.timestamp < cutoff_time {
728                let (removed_key, _) = self.storage.pop_front().unwrap();
729                self.index.remove(&removed_key);
730                removed_count += 1;
731            } else {
732                break;
733            }
734        }
735
736        // Rebuild index if we removed items
737        if removed_count > 0 {
738            self.rebuild_index();
739        }
740    }
741
742    /// Rebuild index after modifications
743    fn rebuild_index(&mut self) {
744        self.index.clear();
745        for (i, (key, _)) in self.storage.iter().enumerate() {
746            self.index.insert(key.clone(), i);
747        }
748    }
749
750    /// Get number of stored metrics
751    #[must_use]
752    pub fn len(&self) -> usize {
753        self.storage.len()
754    }
755
756    /// Check if empty
757    #[must_use]
758    pub fn is_empty(&self) -> bool {
759        self.storage.is_empty()
760    }
761
762    /// Get capacity
763    #[must_use]
764    pub fn capacity(&self) -> usize {
765        self.capacity
766    }
767}
768
769/// Aggregated metric for long-term storage
770#[derive(Debug, Clone, Serialize, Deserialize)]
771pub struct AggregatedMetric {
772    /// Metric name
773    pub name: String,
774    /// Count of data points
775    pub count: u64,
776    /// Sum of all values
777    pub sum: f64,
778    /// Sum of squares for variance calculation
779    pub sum_of_squares: f64,
780    /// Minimum value
781    pub min: f64,
782    /// Maximum value
783    pub max: f64,
784    /// Last update timestamp
785    pub last_updated: DateTime<Utc>,
786    /// Metric type
787    pub metric_type: MetricType,
788}
789
790impl AggregatedMetric {
791    /// Calculate mean
792    #[must_use]
793    pub fn mean(&self) -> f64 {
794        if self.count > 0 {
795            self.sum / self.count as f64
796        } else {
797            0.0
798        }
799    }
800
801    /// Calculate variance
802    #[must_use]
803    pub fn variance(&self) -> f64 {
804        if self.count > 1 {
805            let mean = self.mean();
806            (self.sum_of_squares - self.count as f64 * mean * mean) / (self.count - 1) as f64
807        } else {
808            0.0
809        }
810    }
811
812    /// Calculate standard deviation
813    #[must_use]
814    pub fn std_dev(&self) -> f64 {
815        self.variance().sqrt()
816    }
817}
818
819/// Memory usage statistics
820#[derive(Debug, Clone, Serialize, Deserialize)]
821pub struct MemoryStats {
822    /// Total number of metrics
823    pub total_metrics: usize,
824    /// Number of aggregated metrics
825    pub aggregated_metrics: usize,
826    /// Estimated memory usage in bytes
827    pub estimated_memory_bytes: usize,
828    /// Memory limit in bytes
829    pub memory_limit_bytes: usize,
830    /// Memory utilization percentage (0.0 to 1.0)
831    pub memory_utilization: f64,
832}
833
834/// Memory-optimized circular buffer for historical data
835#[derive(Debug, Clone, Serialize, Deserialize)]
836pub struct CircularProgressHistory {
837    /// Fixed-size buffer for progress snapshots
838    buffer: Vec<ProgressSnapshot>,
839    /// Current write position
840    write_pos: usize,
841    /// Number of items written (for determining if buffer is full)
842    items_written: usize,
843    /// Maximum capacity
844    capacity: usize,
845}
846
847impl CircularProgressHistory {
848    /// Create new circular buffer with specified capacity
849    #[must_use]
850    pub fn new(capacity: usize) -> Self {
851        let mut buffer = Vec::with_capacity(capacity);
852        buffer.resize_with(capacity, || ProgressSnapshot {
853            timestamp: Utc::now(),
854            overall_score: 0.0,
855            area_scores: HashMap::new(),
856            session_count: 0,
857            events: Vec::new(),
858        });
859
860        Self {
861            buffer,
862            write_pos: 0,
863            items_written: 0,
864            capacity,
865        }
866    }
867
868    /// Add new progress snapshot, overwriting oldest if at capacity
869    pub fn push(&mut self, snapshot: ProgressSnapshot) {
870        self.buffer[self.write_pos] = snapshot;
871        self.write_pos = (self.write_pos + 1) % self.capacity;
872        self.items_written += 1;
873    }
874
875    /// Get most recent snapshots up to specified count
876    #[must_use]
877    pub fn get_recent(&self, count: usize) -> Vec<ProgressSnapshot> {
878        let actual_count = count.min(self.len());
879        let mut result = Vec::with_capacity(actual_count);
880
881        for i in 0..actual_count {
882            let pos = if self.write_pos > i {
883                self.write_pos - i - 1
884            } else {
885                self.capacity - (i + 1 - self.write_pos)
886            };
887            result.push(self.buffer[pos].clone());
888        }
889
890        result
891    }
892
893    /// Get number of items stored (up to capacity)
894    #[must_use]
895    pub fn len(&self) -> usize {
896        self.items_written.min(self.capacity)
897    }
898
899    /// Check if buffer is empty
900    #[must_use]
901    pub fn is_empty(&self) -> bool {
902        self.items_written == 0
903    }
904
905    /// Get memory usage in bytes (approximate)
906    #[must_use]
907    pub fn memory_usage(&self) -> usize {
908        self.capacity * std::mem::size_of::<ProgressSnapshot>()
909    }
910}
911
912/// Memory-optimized progress tracking
913#[derive(Debug, Clone)]
914pub struct MemoryOptimizedProgress {
915    /// User ID
916    pub user_id: String,
917    /// Current overall skill level
918    pub overall_skill_level: f32,
919    /// Compressed skill statistics instead of full breakdown
920    pub skill_stats: HashMap<FocusArea, crate::progress::skills::CompressedSkillStats>,
921    /// Circular buffer for recent progress history
922    pub progress_history: CircularProgressHistory,
923    /// Compressed achievement data
924    pub achievement_summary: AchievementSummary,
925    /// Essential training stats only
926    pub training_stats: EssentialTrainingStats,
927    /// Last updated timestamp
928    pub last_updated: DateTime<Utc>,
929}
930
931/// Essential training statistics for memory efficiency
932#[derive(Debug, Clone, Serialize, Deserialize)]
933pub struct EssentialTrainingStats {
934    /// Total sessions
935    pub total_sessions: u32,
936    /// Total practice time in seconds
937    pub total_practice_time_secs: u64,
938    /// Current streak
939    pub current_streak: u16,
940    /// Best streak ever
941    pub best_streak: u16,
942    /// Average session duration in seconds
943    pub avg_session_duration_secs: u32,
944}
945
946/// Compressed achievement summary
947#[derive(Debug, Clone, Serialize, Deserialize)]
948pub struct AchievementSummary {
949    /// Total achievements earned
950    pub total_earned: u16,
951    /// Achievements by tier
952    pub by_tier: HashMap<AchievementTier, u16>,
953    /// Recent achievements (last 5)
954    pub recent: Vec<String>,
955}
956
957/// Memory management utilities for analytics
958pub struct AnalyticsMemoryManager {
959    /// Memory limit in bytes
960    memory_limit: usize,
961    /// Current memory usage estimate
962    current_usage: usize,
963    /// Cleanup thresholds
964    cleanup_threshold: f64, // Percentage (0.0 to 1.0)
965}
966
967impl AnalyticsMemoryManager {
968    /// Create new memory manager with specified limit
969    #[must_use]
970    pub fn new(memory_limit_mb: usize) -> Self {
971        Self {
972            memory_limit: memory_limit_mb * 1024 * 1024,
973            current_usage: 0,
974            cleanup_threshold: 0.8, // Cleanup when 80% full
975        }
976    }
977
978    /// Update current memory usage estimate
979    pub fn update_usage(&mut self, new_usage: usize) {
980        self.current_usage = new_usage;
981    }
982
983    /// Check if cleanup is needed
984    #[must_use]
985    pub fn needs_cleanup(&self) -> bool {
986        self.current_usage as f64 / self.memory_limit as f64 > self.cleanup_threshold
987    }
988
989    /// Get memory utilization percentage
990    #[must_use]
991    pub fn utilization(&self) -> f64 {
992        self.current_usage as f64 / self.memory_limit as f64
993    }
994
995    /// Suggest cleanup actions
996    #[must_use]
997    pub fn suggest_cleanup_actions(&self) -> Vec<CleanupAction> {
998        let mut actions = Vec::new();
999
1000        if self.utilization() > 0.9 {
1001            actions.push(CleanupAction::CompressOldData);
1002            actions.push(CleanupAction::RemoveOldestMetrics);
1003        } else if self.utilization() > 0.8 {
1004            actions.push(CleanupAction::CompressOldData);
1005        } else if self.utilization() > 0.7 {
1006            actions.push(CleanupAction::AggregateOldMetrics);
1007        }
1008
1009        actions
1010    }
1011}
1012
1013/// Cleanup actions for memory management
1014#[derive(Debug, Clone, PartialEq)]
1015pub enum CleanupAction {
1016    /// Compress old historical data
1017    CompressOldData,
1018    /// Remove oldest metrics
1019    RemoveOldestMetrics,
1020    /// Aggregate old metrics into summaries
1021    AggregateOldMetrics,
1022    /// Switch to memory-optimized representations
1023    OptimizeDataStructures,
1024}
1025
1026/// Extension trait for memory optimization
1027pub trait MemoryOptimized {
1028    /// Estimate memory usage in bytes
1029    fn memory_usage(&self) -> usize;
1030    /// Compress data to reduce memory usage
1031    fn compress(&self) -> Self
1032    where
1033        Self: Sized;
1034    /// Check if compression is recommended
1035    fn should_compress(&self) -> bool;
1036}