multi_tier_cache/backends/
moka_cache.rs

1//! Moka Cache - In-Memory Cache Backend
2//!
3//! High-performance in-memory cache using Moka for hot data storage.
4
5use anyhow::Result;
6use moka::future::Cache;
7use serde_json;
8use std::sync::atomic::{AtomicU64, Ordering};
9use std::sync::Arc;
10use std::time::{Duration, Instant};
11use tracing::{debug, info};
12
13/// Cache entry with TTL information
14#[derive(Debug, Clone)]
15struct CacheEntry {
16    value: serde_json::Value,
17    expires_at: Instant,
18}
19
20impl CacheEntry {
21    fn new(value: serde_json::Value, ttl: Duration) -> Self {
22        Self {
23            value,
24            expires_at: Instant::now() + ttl,
25        }
26    }
27
28    fn is_expired(&self) -> bool {
29        Instant::now() > self.expires_at
30    }
31}
32
33/// Moka in-memory cache with per-key TTL support
34///
35/// This is the default L1 (hot tier) cache backend, providing:
36/// - Fast in-memory access (< 1ms latency)
37/// - Automatic eviction via LRU
38/// - Per-key TTL support
39/// - Statistics tracking
40pub struct MokaCache {
41    /// Moka cache instance
42    cache: Cache<String, CacheEntry>,
43    /// Hit counter
44    hits: Arc<AtomicU64>,
45    /// Miss counter
46    misses: Arc<AtomicU64>,
47    /// Set counter
48    sets: Arc<AtomicU64>,
49    /// Coalesced requests counter (requests that waited for an ongoing computation)
50    #[allow(dead_code)]
51    coalesced_requests: Arc<AtomicU64>,
52}
53
54impl MokaCache {
55    /// Create new Moka cache
56    /// # Errors
57    ///
58    /// Returns an error if the cache cannot be initialized.
59    pub fn new() -> Result<Self> {
60        info!("Initializing Moka Cache");
61
62        let cache = Cache::builder()
63            .max_capacity(2000) // 2000 entries max
64            .time_to_live(Duration::from_secs(3600)) // 1 hour max TTL as safety net
65            .time_to_idle(Duration::from_secs(120)) // 2 minutes idle time
66            .build();
67
68        info!(
69            capacity = 2000,
70            "Moka Cache initialized with per-key TTL support"
71        );
72
73        Ok(Self {
74            cache,
75            hits: Arc::new(AtomicU64::new(0)),
76            misses: Arc::new(AtomicU64::new(0)),
77            sets: Arc::new(AtomicU64::new(0)),
78            coalesced_requests: Arc::new(AtomicU64::new(0)),
79        })
80    }
81}
82
83// ===== Trait Implementations =====
84
85use crate::traits::CacheBackend;
86use async_trait::async_trait;
87
88/// Implement `CacheBackend` trait for `MokaCache`
89///
90/// This allows `MokaCache` to be used as a pluggable backend in the multi-tier cache system.
91#[async_trait]
92impl CacheBackend for MokaCache {
93    async fn get(&self, key: &str) -> Option<serde_json::Value> {
94        if let Some(entry) = self.cache.get(key).await {
95            if entry.is_expired() {
96                // Remove expired entry
97                let _ = self.cache.remove(key).await;
98                self.misses.fetch_add(1, Ordering::Relaxed);
99                None
100            } else {
101                self.hits.fetch_add(1, Ordering::Relaxed);
102                Some(entry.value)
103            }
104        } else {
105            self.misses.fetch_add(1, Ordering::Relaxed);
106            None
107        }
108    }
109
110    async fn set_with_ttl(&self, key: &str, value: serde_json::Value, ttl: Duration) -> Result<()> {
111        let entry = CacheEntry::new(value, ttl);
112        self.cache.insert(key.to_string(), entry).await;
113        self.sets.fetch_add(1, Ordering::Relaxed);
114        debug!(key = %key, ttl_secs = %ttl.as_secs(), "[Moka] Cached key with TTL");
115        Ok(())
116    }
117
118    async fn remove(&self, key: &str) -> Result<()> {
119        self.cache.remove(key).await;
120        Ok(())
121    }
122
123    async fn health_check(&self) -> bool {
124        // Test basic functionality with custom TTL
125        let test_key = "health_check_moka";
126        let test_value = serde_json::json!({"test": true});
127
128        match self
129            .set_with_ttl(test_key, test_value.clone(), Duration::from_secs(60))
130            .await
131        {
132            Ok(()) => match self.get(test_key).await {
133                Some(retrieved) => {
134                    let _ = self.remove(test_key).await;
135                    retrieved == test_value
136                }
137                None => false,
138            },
139            Err(_) => false,
140        }
141    }
142
143    fn name(&self) -> &'static str {
144        "Moka"
145    }
146}
147
148/// Cache statistics
149#[allow(dead_code)]
150#[derive(Debug, Clone)]
151pub struct CacheStats {
152    pub hits: u64,
153    pub misses: u64,
154    pub sets: u64,
155    pub coalesced_requests: u64,
156    pub size: u64,
157}