multi_tier_cache/backends/
moka_cache.rs

1//! Moka Cache - In-Memory Cache Backend
2//!
3//! High-performance in-memory cache using Moka for hot data storage.
4
5use std::sync::Arc;
6use std::time::{Duration, Instant};
7use anyhow::Result;
8use serde_json;
9use moka::future::Cache;
10use std::sync::atomic::{AtomicU64, Ordering};
11use tracing::{info, debug};
12
13/// Cache entry with TTL information
14#[derive(Debug, Clone)]
15struct CacheEntry {
16    value: serde_json::Value,
17    expires_at: Instant,
18}
19
20impl CacheEntry {
21    fn new(value: serde_json::Value, ttl: Duration) -> Self {
22        Self {
23            value,
24            expires_at: Instant::now() + ttl,
25        }
26    }
27
28    fn is_expired(&self) -> bool {
29        Instant::now() > self.expires_at
30    }
31}
32
33/// Moka in-memory cache with per-key TTL support
34///
35/// This is the default L1 (hot tier) cache backend, providing:
36/// - Fast in-memory access (< 1ms latency)
37/// - Automatic eviction via LRU
38/// - Per-key TTL support
39/// - Statistics tracking
40pub struct MokaCache {
41    /// Moka cache instance
42    cache: Cache<String, CacheEntry>,
43    /// Hit counter
44    hits: Arc<AtomicU64>,
45    /// Miss counter
46    misses: Arc<AtomicU64>,
47    /// Set counter
48    sets: Arc<AtomicU64>,
49    /// Coalesced requests counter (requests that waited for an ongoing computation)
50    #[allow(dead_code)]
51    coalesced_requests: Arc<AtomicU64>,
52}
53
54impl MokaCache {
55    /// Create new Moka cache
56    pub async fn new() -> Result<Self> {
57        info!("Initializing Moka Cache");
58
59        let cache = Cache::builder()
60            .max_capacity(2000) // 2000 entries max
61            .time_to_live(Duration::from_secs(3600)) // 1 hour max TTL as safety net
62            .time_to_idle(Duration::from_secs(120)) // 2 minutes idle time
63            .build();
64
65        info!(capacity = 2000, "Moka Cache initialized with per-key TTL support");
66
67        Ok(Self {
68            cache,
69            hits: Arc::new(AtomicU64::new(0)),
70            misses: Arc::new(AtomicU64::new(0)),
71            sets: Arc::new(AtomicU64::new(0)),
72            coalesced_requests: Arc::new(AtomicU64::new(0)),
73        })
74    }
75
76}
77
78// ===== Trait Implementations =====
79
80use crate::traits::CacheBackend;
81use async_trait::async_trait;
82
83/// Implement CacheBackend trait for MokaCache
84///
85/// This allows MokaCache to be used as a pluggable backend in the multi-tier cache system.
86#[async_trait]
87impl CacheBackend for MokaCache {
88    async fn get(&self, key: &str) -> Option<serde_json::Value> {
89        match self.cache.get(key).await {
90            Some(entry) => {
91                if entry.is_expired() {
92                    // Remove expired entry
93                    let _ = self.cache.remove(key).await;
94                    self.misses.fetch_add(1, Ordering::Relaxed);
95                    None
96                } else {
97                    self.hits.fetch_add(1, Ordering::Relaxed);
98                    Some(entry.value)
99                }
100            }
101            None => {
102                self.misses.fetch_add(1, Ordering::Relaxed);
103                None
104            }
105        }
106    }
107
108    async fn set_with_ttl(
109        &self,
110        key: &str,
111        value: serde_json::Value,
112        ttl: Duration,
113    ) -> Result<()> {
114        let entry = CacheEntry::new(value, ttl);
115        self.cache.insert(key.to_string(), entry).await;
116        self.sets.fetch_add(1, Ordering::Relaxed);
117        debug!(key = %key, ttl_secs = %ttl.as_secs(), "[Moka] Cached key with TTL");
118        Ok(())
119    }
120
121    async fn remove(&self, key: &str) -> Result<()> {
122        self.cache.remove(key).await;
123        Ok(())
124    }
125
126    async fn health_check(&self) -> bool {
127        // Test basic functionality with custom TTL
128        let test_key = "health_check_moka";
129        let test_value = serde_json::json!({"test": true});
130
131        match self.set_with_ttl(test_key, test_value.clone(), Duration::from_secs(60)).await {
132            Ok(_) => {
133                match self.get(test_key).await {
134                    Some(retrieved) => {
135                        let _ = self.remove(test_key).await;
136                        retrieved == test_value
137                    }
138                    None => false
139                }
140            }
141            Err(_) => false
142        }
143    }
144
145    fn name(&self) -> &str {
146        "Moka"
147    }
148}
149
150/// Cache statistics
151#[allow(dead_code)]
152#[derive(Debug, Clone)]
153pub struct CacheStats {
154    pub hits: u64,
155    pub misses: u64,
156    pub sets: u64,
157    pub coalesced_requests: u64,
158    pub size: u64,
159}