use multi_tier_cache::{CacheStrategy, CacheSystem};
use serde::{Deserialize, Serialize};
use std::time::Duration;
#[derive(Debug, Clone, Serialize, Deserialize)]
struct User {
id: i64,
name: String,
email: String,
created_at: i64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
struct Product {
id: i64,
title: String,
price: f64,
stock: i32,
}
async fn fetch_user_from_db(user_id: i64) -> anyhow::Result<User> {
println!(" 🗄️ Simulating database query for user {user_id}");
tokio::time::sleep(Duration::from_millis(100)).await;
Ok(User {
id: user_id,
name: format!("User {user_id}"),
email: format!("user{user_id}@example.com"),
created_at: 1_704_326_400, })
}
async fn fetch_product_from_db(product_id: i64) -> anyhow::Result<Product> {
println!(" 🗄️ Simulating database query for product {product_id}");
tokio::time::sleep(Duration::from_millis(150)).await;
Ok(Product {
id: product_id,
title: format!("Product #{product_id}"),
#[allow(clippy::cast_precision_loss)]
price: 99.99 + (product_id as f64),
stock: i32::try_from(product_id * 10).unwrap_or(0),
})
}
#[tokio::main]
async fn main() -> anyhow::Result<()> {
println!("🎯 Type-Safe Database Caching Example\n");
println!("This demonstrates automatic caching with type safety for database queries.\n");
println!("📦 Initializing cache system...");
let cache = CacheSystem::new().await?;
println!("✅ Cache system ready!\n");
run_basic_examples(&cache).await?;
run_concurrent_example(&cache).await?;
print_final_stats(&cache);
Ok(())
}
async fn run_basic_examples(cache: &CacheSystem) -> anyhow::Result<()> {
println!("📊 Example 1: First user request (will hit database)");
println!("─────────────────────────────────────────────────────");
let start = std::time::Instant::now();
let user: User = cache
.cache_manager()
.get_or_compute_typed("user:123", CacheStrategy::MediumTerm, || async {
fetch_user_from_db(123)
.await
.map_err(|e| multi_tier_cache::error::CacheError::InternalError(e.to_string()))
})
.await?;
let elapsed = start.elapsed();
println!("✅ Retrieved user: {user:?}");
println!("⏱️ Time taken: {elapsed:?} (includes DB query + caching)\n");
println!("📊 Example 2: Second user request (will hit L1 cache)");
println!("─────────────────────────────────────────────────────");
let start = std::time::Instant::now();
let user: User = cache
.cache_manager()
.get_or_compute_typed("user:123", CacheStrategy::MediumTerm, || async {
fetch_user_from_db(123)
.await
.map_err(|e| multi_tier_cache::error::CacheError::InternalError(e.to_string()))
})
.await?;
let elapsed = start.elapsed();
println!("✅ Retrieved user: {user:?}");
println!("⏱️ Time taken: {elapsed:?} (sub-millisecond from L1!)\n");
println!("📊 Example 3: Product query with different type");
println!("────────────────────────────────────────────────");
let product: Product = cache
.cache_manager()
.get_or_compute_typed("product:456", CacheStrategy::LongTerm, || async {
fetch_product_from_db(456)
.await
.map_err(|e| multi_tier_cache::error::CacheError::InternalError(e.to_string()))
})
.await?;
println!("✅ Retrieved product: {product:?}\n");
Ok(())
}
async fn run_concurrent_example(cache: &CacheSystem) -> anyhow::Result<()> {
println!("📊 Example 4: Concurrent requests (stampede protection)");
println!("────────────────────────────────────────────────────────");
println!("Spawning 5 concurrent requests for user:999...");
let cache_clone = cache.clone();
let handles: Vec<_> = (0..5)
.map(|i| {
let cache = cache_clone.clone();
tokio::spawn(async move {
let start = std::time::Instant::now();
let user: User = cache
.cache_manager()
.get_or_compute_typed("user:999", CacheStrategy::ShortTerm, || async move {
fetch_user_from_db(999).await.map_err(|e| {
multi_tier_cache::error::CacheError::InternalError(e.to_string())
})
})
.await
.unwrap_or_else(|_| panic!("Failed to get user"));
let elapsed = start.elapsed();
println!(" 🎯 Request {} completed in {:?}", i + 1, elapsed);
user
})
})
.collect();
for handle in handles {
handle.await?;
}
println!("\n💡 Notice: Only ONE database query was executed!");
println!(" Cache stampede protection coalesced all 5 requests.\n");
Ok(())
}
fn print_final_stats(cache: &CacheSystem) {
println!("📈 Final Cache Statistics");
println!("─────────────────────────");
let stats = cache.cache_manager().get_stats();
println!("Total requests: {}", stats.total_requests);
println!("L1 hits: {}", stats.l1_hits);
println!("L2 hits: {}", stats.l2_hits);
println!("Cache misses: {}", stats.misses);
println!("Hit rate: {:.2}%", stats.hit_rate);
println!("Promotions: {}", stats.promotions);
println!("\n✅ Example completed successfully!");
println!(
"
186: 💡 Key Takeaways:
187: ─────────────────
188: 1. Type-safe: Compiler enforces correct types
189: 2. Zero boilerplate: No manual serialize/deserialize
190: 3. Automatic caching: L1+L2 storage handled for you
191: 4. Stampede protection: Concurrent requests coalesced
192: 5. Generic: Works with any Serialize + Deserialize type
"
);
}