use bytes::Bytes;
use multi_tier_cache::error::CacheError;
use multi_tier_cache::{CacheStrategy, CacheSystem};
use std::time::Duration;
async fn fetch_from_database(id: u32) -> anyhow::Result<Bytes> {
println!(" 📦 Fetching from database (expensive operation)...");
tokio::time::sleep(Duration::from_millis(200)).await;
let json = serde_json::json!({
"product_id": id,
"name": format!("Product {id}"),
"price": 100 + id
});
Ok(Bytes::from(json.to_string()))
}
#[tokio::main]
async fn main() -> anyhow::Result<()> {
println!("=== Multi-Tier Cache: Advanced Usage ===\n");
let cache = CacheSystem::new().await?;
println!();
println!("=== Scenario 1: L2-to-L1 Cache Promotion ===\n");
let data = Bytes::from("{\"message\": \"This data starts in L2\"}");
cache
.cache_manager()
.set_with_strategy("promotion_test", data, CacheStrategy::MediumTerm)
.await?;
println!("✅ Data stored in both L1 and L2\n");
println!("First access:");
let result1 = cache.cache_manager().get("promotion_test").await?;
println!(" Retrieved: {:?}\n", result1.is_some());
println!("Waiting 6 seconds for L1 TTL to expire...");
tokio::time::sleep(Duration::from_secs(6)).await;
println!("Second access (after L1 expiration):");
let result2 = cache.cache_manager().get("promotion_test").await?;
println!(" Retrieved: {:?}", result2.is_some());
println!(" (Data promoted from L2 back to L1)\n");
println!("Third access (now in L1):");
let result3 = cache.cache_manager().get("promotion_test").await?;
println!(" Retrieved: {:?}\n", result3.is_some());
println!("=== Scenario 2: Compute-on-Miss Pattern ===\n");
println!("First call - cache miss:");
let product1 = cache
.cache_manager()
.get_or_compute_with("product:42", CacheStrategy::MediumTerm, || async move {
fetch_from_database(42)
.await
.map_err(|e| CacheError::InternalError(e.to_string()))
})
.await?;
println!(" Result: {product1:?}\n");
println!("Second call - cache hit:");
let product2 = cache
.cache_manager()
.get_or_compute_with("product:42", CacheStrategy::MediumTerm, || async move {
fetch_from_database(42)
.await
.map_err(|e| CacheError::InternalError(e.to_string()))
})
.await?;
println!(" Result: {product2:?} (from cache, no DB call)\n");
println!("=== Scenario 3: Concurrent Operations ===\n");
let mut handles = vec![];
for i in 1..=5 {
let cache_clone = cache.clone();
let handle = tokio::spawn(async move {
let data = Bytes::from(format!(
"{{\"worker_id\": {i}, \"data\": \"Concurrent data from worker {i}\"}}"
));
cache_clone
.cache_manager()
.set_with_strategy(&format!("concurrent:{i}"), data, CacheStrategy::ShortTerm)
.await
});
handles.push(handle);
}
for handle in handles {
handle.await??;
}
println!("✅ 5 concurrent cache operations completed\n");
for i in 1..=5 {
if let Some(value) = cache
.cache_manager()
.get(&format!("concurrent:{i}"))
.await?
{
println!(" concurrent:{i} = {value:?}");
}
}
println!("\n=== Final Cache Statistics ===");
let stats = cache.cache_manager().get_stats();
println!("Total requests: {}", stats.total_requests);
println!("L1 hits: {} ({:.2}%)", stats.l1_hits, stats.l1_hit_rate);
println!("L2 hits: {}", stats.l2_hits);
println!("Misses: {}", stats.misses);
println!("Overall hit rate: {:.2}%", stats.hit_rate);
println!("L2→L1 promotions: {}", stats.promotions);
println!("In-flight requests: {}", stats.in_flight_requests);
Ok(())
}