1use crate::CacheStats;
11use crate::multi_tier::CacheKey;
12use std::collections::VecDeque;
13use std::sync::Arc;
14use tokio::sync::RwLock;
15
16type AccessHistory = Arc<RwLock<VecDeque<(CacheKey, chrono::DateTime<chrono::Utc>)>>>;
18
19type StatsHistory = Arc<RwLock<VecDeque<(chrono::DateTime<chrono::Utc>, CacheStats)>>>;
21
22#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
24pub struct TimeSeriesPoint {
25 pub timestamp: chrono::DateTime<chrono::Utc>,
27 pub value: f64,
29}
30
31#[derive(Debug, Clone, Default)]
33pub struct CacheMetrics {
34 pub hit_rate: Vec<TimeSeriesPoint>,
36 pub miss_rate: Vec<TimeSeriesPoint>,
38 pub eviction_rate: Vec<TimeSeriesPoint>,
40 pub cache_size: Vec<TimeSeriesPoint>,
42 pub avg_latency: Vec<TimeSeriesPoint>,
44}
45
46impl CacheMetrics {
47 pub fn new() -> Self {
49 Self::default()
50 }
51
52 pub fn add_point(&mut self, metric_type: MetricType, value: f64) {
54 let point = TimeSeriesPoint {
55 timestamp: chrono::Utc::now(),
56 value,
57 };
58
59 match metric_type {
60 MetricType::HitRate => self.hit_rate.push(point),
61 MetricType::MissRate => self.miss_rate.push(point),
62 MetricType::EvictionRate => self.eviction_rate.push(point),
63 MetricType::CacheSize => self.cache_size.push(point),
64 MetricType::AvgLatency => self.avg_latency.push(point),
65 }
66 }
67
68 pub fn trim(&mut self, keep_last: usize) {
70 if self.hit_rate.len() > keep_last {
71 self.hit_rate.drain(0..self.hit_rate.len() - keep_last);
72 }
73 if self.miss_rate.len() > keep_last {
74 self.miss_rate.drain(0..self.miss_rate.len() - keep_last);
75 }
76 if self.eviction_rate.len() > keep_last {
77 self.eviction_rate
78 .drain(0..self.eviction_rate.len() - keep_last);
79 }
80 if self.cache_size.len() > keep_last {
81 self.cache_size.drain(0..self.cache_size.len() - keep_last);
82 }
83 if self.avg_latency.len() > keep_last {
84 self.avg_latency
85 .drain(0..self.avg_latency.len() - keep_last);
86 }
87 }
88}
89
90#[derive(Debug, Clone, Copy)]
92pub enum MetricType {
93 HitRate,
95 MissRate,
97 EvictionRate,
99 CacheSize,
101 AvgLatency,
103}
104
105#[derive(Debug, Clone)]
107pub struct AccessPattern {
108 pub pattern_type: PatternType,
110 pub confidence: f64,
112 pub description: String,
114}
115
116#[derive(Debug, Clone, Copy, PartialEq, Eq)]
118pub enum PatternType {
119 Sequential,
121 Random,
123 TemporalLocality,
125 SpatialLocality,
127 Periodic,
129}
130
131impl std::fmt::Display for PatternType {
132 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
133 match self {
134 PatternType::Sequential => write!(f, "Sequential"),
135 PatternType::Random => write!(f, "Random"),
136 PatternType::TemporalLocality => write!(f, "Temporal Locality"),
137 PatternType::SpatialLocality => write!(f, "Spatial Locality"),
138 PatternType::Periodic => write!(f, "Periodic"),
139 }
140 }
141}
142
143#[derive(Debug, Clone)]
145pub struct CacheRecommendation {
146 pub recommendation_type: RecommendationType,
148 pub expected_improvement: f64,
150 pub rationale: String,
152}
153
154#[derive(Debug, Clone, Copy, PartialEq, Eq)]
156pub enum RecommendationType {
157 IncreaseSize,
159 DecreaseSize,
161 ChangeEvictionPolicy,
163 EnablePrefetching,
165 AdjustCompression,
167 EnableDistributed,
169}
170
171impl std::fmt::Display for RecommendationType {
172 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
173 match self {
174 RecommendationType::IncreaseSize => write!(f, "Increase Cache Size"),
175 RecommendationType::DecreaseSize => write!(f, "Decrease Cache Size"),
176 RecommendationType::ChangeEvictionPolicy => write!(f, "Change Eviction Policy"),
177 RecommendationType::EnablePrefetching => write!(f, "Enable Prefetching"),
178 RecommendationType::AdjustCompression => write!(f, "Adjust Compression"),
179 RecommendationType::EnableDistributed => write!(f, "Enable Distributed Caching"),
180 }
181 }
182}
183
184#[derive(Debug, Clone)]
186pub struct Anomaly {
187 pub anomaly_type: AnomalyType,
189 pub severity: f64,
191 pub timestamp: chrono::DateTime<chrono::Utc>,
193 pub description: String,
195}
196
197#[derive(Debug, Clone, Copy, PartialEq, Eq)]
199pub enum AnomalyType {
200 HitRateDrop,
202 EvictionSpike,
204 LatencySpike,
206 Thrashing,
208}
209
210impl std::fmt::Display for AnomalyType {
211 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
212 match self {
213 AnomalyType::HitRateDrop => write!(f, "Hit Rate Drop"),
214 AnomalyType::EvictionSpike => write!(f, "Eviction Spike"),
215 AnomalyType::LatencySpike => write!(f, "Latency Spike"),
216 AnomalyType::Thrashing => write!(f, "Cache Thrashing"),
217 }
218 }
219}
220
221pub struct CacheAnalytics {
223 metrics: Arc<RwLock<CacheMetrics>>,
225 access_history: AccessHistory,
227 max_history: usize,
229 stats_history: StatsHistory,
231 max_stats_history: usize,
233}
234
235impl CacheAnalytics {
236 pub fn new() -> Self {
238 Self {
239 metrics: Arc::new(RwLock::new(CacheMetrics::new())),
240 access_history: Arc::new(RwLock::new(VecDeque::new())),
241 max_history: 10000,
242 stats_history: Arc::new(RwLock::new(VecDeque::new())),
243 max_stats_history: 1000,
244 }
245 }
246
247 pub async fn record_access(&self, key: CacheKey) {
249 let mut history = self.access_history.write().await;
250
251 if history.len() >= self.max_history {
252 history.pop_front();
253 }
254
255 history.push_back((key, chrono::Utc::now()));
256 }
257
258 pub async fn record_stats(&self, stats: CacheStats) {
260 let mut history = self.stats_history.write().await;
261
262 if history.len() >= self.max_stats_history {
263 history.pop_front();
264 }
265
266 history.push_back((chrono::Utc::now(), stats.clone()));
267
268 let mut metrics = self.metrics.write().await;
270 metrics.add_point(MetricType::HitRate, stats.hit_rate());
271 metrics.add_point(MetricType::MissRate, 100.0 - stats.hit_rate());
272 metrics.add_point(MetricType::CacheSize, stats.bytes_stored as f64);
273
274 metrics.trim(1000);
276 }
277
278 pub async fn analyze_patterns(&self) -> Vec<AccessPattern> {
280 let history = self.access_history.read().await;
281 let mut patterns = Vec::new();
282
283 if history.len() < 10 {
284 return patterns;
285 }
286
287 let sequential_confidence = self.detect_sequential(&history);
289 if sequential_confidence > 0.5 {
290 patterns.push(AccessPattern {
291 pattern_type: PatternType::Sequential,
292 confidence: sequential_confidence,
293 description: "Keys are accessed in sequential order".to_string(),
294 });
295 }
296
297 let temporal_confidence = self.detect_temporal_locality(&history);
299 if temporal_confidence > 0.5 {
300 patterns.push(AccessPattern {
301 pattern_type: PatternType::TemporalLocality,
302 confidence: temporal_confidence,
303 description: "Recently accessed keys are frequently reaccessed".to_string(),
304 });
305 }
306
307 patterns
308 }
309
310 fn detect_sequential(
312 &self,
313 history: &VecDeque<(CacheKey, chrono::DateTime<chrono::Utc>)>,
314 ) -> f64 {
315 let mut sequential_count = 0;
317 let mut total_comparisons = 0;
318
319 for window in history.iter().collect::<Vec<_>>().windows(2) {
320 if let [a, b] = window {
321 total_comparisons += 1;
322 if a.0 < b.0 {
323 sequential_count += 1;
324 }
325 }
326 }
327
328 if total_comparisons > 0 {
329 sequential_count as f64 / total_comparisons as f64
330 } else {
331 0.0
332 }
333 }
334
335 fn detect_temporal_locality(
337 &self,
338 history: &VecDeque<(CacheKey, chrono::DateTime<chrono::Utc>)>,
339 ) -> f64 {
340 let window_size = 10;
342 let time_threshold = chrono::Duration::seconds(60);
343
344 let mut reaccess_count = 0;
345 let mut total_count = 0;
346
347 for i in window_size..history.len() {
348 total_count += 1;
349 let (key, ts) = &history[i];
350
351 for (prev_key, prev_ts) in history.range(i.saturating_sub(window_size)..i) {
353 if key == prev_key && (*ts - *prev_ts) < time_threshold {
354 reaccess_count += 1;
355 break;
356 }
357 }
358 }
359
360 if total_count > 0 {
361 reaccess_count as f64 / total_count as f64
362 } else {
363 0.0
364 }
365 }
366
367 pub async fn generate_recommendations(&self) -> Vec<CacheRecommendation> {
369 let stats_history = self.stats_history.read().await;
370 let mut recommendations = Vec::new();
371
372 if stats_history.len() < 10 {
373 return recommendations;
374 }
375
376 let recent_stats: Vec<_> = stats_history
378 .iter()
379 .rev()
380 .take(10)
381 .map(|(_, s)| s)
382 .collect();
383
384 let avg_hit_rate: f64 =
385 recent_stats.iter().map(|s| s.hit_rate()).sum::<f64>() / recent_stats.len() as f64;
386
387 if avg_hit_rate < 50.0 {
389 recommendations.push(CacheRecommendation {
390 recommendation_type: RecommendationType::IncreaseSize,
391 expected_improvement: 20.0,
392 rationale: format!(
393 "Hit rate is low ({:.1}%). Increasing cache size may improve performance.",
394 avg_hit_rate
395 ),
396 });
397 }
398
399 let avg_evictions: f64 = recent_stats.iter().map(|s| s.evictions as f64).sum::<f64>()
401 / recent_stats.len() as f64;
402
403 if avg_evictions > 10.0 {
404 recommendations.push(CacheRecommendation {
405 recommendation_type: RecommendationType::ChangeEvictionPolicy,
406 expected_improvement: 15.0,
407 rationale: format!(
408 "High eviction rate ({:.1} per snapshot). Consider ARC or LFU policy.",
409 avg_evictions
410 ),
411 });
412 }
413
414 recommendations
415 }
416
417 pub async fn detect_anomalies(&self) -> Vec<Anomaly> {
419 let stats_history = self.stats_history.read().await;
420 let mut anomalies = Vec::new();
421
422 if stats_history.len() < 20 {
423 return anomalies;
424 }
425
426 let baseline_stats: Vec<_> = stats_history
428 .iter()
429 .rev()
430 .skip(5)
431 .take(10)
432 .map(|(_, s)| s)
433 .collect();
434
435 let baseline_hit_rate: f64 =
436 baseline_stats.iter().map(|s| s.hit_rate()).sum::<f64>() / baseline_stats.len() as f64;
437
438 let recent_stats: Vec<_> = stats_history.iter().rev().take(5).collect();
440
441 for (ts, stats) in recent_stats {
442 let hit_rate = stats.hit_rate();
443
444 if hit_rate < baseline_hit_rate * 0.7 {
446 anomalies.push(Anomaly {
447 anomaly_type: AnomalyType::HitRateDrop,
448 severity: (baseline_hit_rate - hit_rate) / baseline_hit_rate,
449 timestamp: *ts,
450 description: format!(
451 "Hit rate dropped from {:.1}% to {:.1}%",
452 baseline_hit_rate, hit_rate
453 ),
454 });
455 }
456
457 if stats.evictions > 100 {
459 anomalies.push(Anomaly {
460 anomaly_type: AnomalyType::EvictionSpike,
461 severity: 0.8,
462 timestamp: *ts,
463 description: format!("Eviction spike: {} evictions", stats.evictions),
464 });
465 }
466 }
467
468 anomalies
469 }
470
471 pub async fn metrics(&self) -> CacheMetrics {
473 self.metrics.read().await.clone()
474 }
475
476 pub async fn clear(&self) {
478 self.access_history.write().await.clear();
479 self.stats_history.write().await.clear();
480 *self.metrics.write().await = CacheMetrics::new();
481 }
482}
483
484impl Default for CacheAnalytics {
485 fn default() -> Self {
486 Self::new()
487 }
488}
489
490#[cfg(test)]
491mod tests {
492 use super::*;
493
494 #[tokio::test]
495 async fn test_cache_analytics() {
496 let analytics = CacheAnalytics::new();
497
498 for i in 0..20 {
500 analytics.record_access(format!("key{}", i)).await;
501 }
502
503 let stats = CacheStats {
505 hits: 80,
506 misses: 20,
507 evictions: 5,
508 bytes_stored: 1024 * 1024,
509 item_count: 100,
510 };
511
512 analytics.record_stats(stats).await;
513
514 let metrics = analytics.metrics().await;
515 assert!(!metrics.hit_rate.is_empty());
516 }
517
518 #[tokio::test]
519 async fn test_pattern_analysis() {
520 let analytics = CacheAnalytics::new();
521
522 for i in 0..50 {
524 analytics.record_access(format!("key{:03}", i)).await;
525 }
526
527 let patterns = analytics.analyze_patterns().await;
528 assert!(!patterns.is_empty());
529 }
530
531 #[tokio::test]
532 async fn test_recommendations() {
533 let analytics = CacheAnalytics::new();
534
535 for _ in 0..15 {
537 let stats = CacheStats {
538 hits: 30,
539 misses: 70,
540 evictions: 15,
541 bytes_stored: 1024 * 1024,
542 item_count: 100,
543 };
544 analytics.record_stats(stats).await;
545 }
546
547 let recommendations = analytics.generate_recommendations().await;
548 assert!(!recommendations.is_empty());
549 }
550
551 #[tokio::test]
552 async fn test_anomaly_detection() {
553 let analytics = CacheAnalytics::new();
554
555 for _ in 0..15 {
557 let stats = CacheStats {
558 hits: 80,
559 misses: 20,
560 evictions: 2,
561 bytes_stored: 1024 * 1024,
562 item_count: 100,
563 };
564 analytics.record_stats(stats).await;
565 }
566
567 for _ in 0..5 {
569 let stats = CacheStats {
570 hits: 30,
571 misses: 70,
572 evictions: 150,
573 bytes_stored: 1024 * 1024,
574 item_count: 100,
575 };
576 analytics.record_stats(stats).await;
577 }
578
579 let anomalies = analytics.detect_anomalies().await;
580 assert!(!anomalies.is_empty());
581 }
582}