use criterion::{BenchmarkId, Criterion, black_box, criterion_group, criterion_main};
use multi_tier_cache::error::CacheError;
use multi_tier_cache::{Bytes, CacheBackend, CacheStrategy, CacheSystem};
use serde_json::json;
use std::time::Duration;
use tokio::runtime::Runtime;
fn setup_cache() -> (CacheSystem, Runtime) {
let rt = Runtime::new().unwrap_or_else(|_| panic!("Failed to create runtime"));
let cache = rt.block_on(async {
unsafe { std::env::set_var("REDIS_URL", "redis://127.0.0.1:6379") };
CacheSystem::new()
.await
.unwrap_or_else(|_| panic!("Failed to create cache system"))
});
(cache, rt)
}
fn test_data(size_bytes: usize) -> Bytes {
let data_string = "x".repeat(size_bytes);
let json = json!({
"data": data_string,
"size": size_bytes,
"timestamp": "2025-01-01T00:00:00Z"
});
Bytes::from(
serde_json::to_vec(&json).unwrap_or_else(|e| panic!("Failed to serialize test data: {e}")),
)
}
fn bench_cache_set(c: &mut Criterion) {
let (cache, rt) = setup_cache();
let mut group = c.benchmark_group("cache_set");
group.measurement_time(Duration::from_secs(10));
for size in &[100, 1024, 10240, 102_400] {
let data = test_data(*size);
group.bench_with_input(BenchmarkId::new("short_term", size), size, |b, _| {
b.iter(|| {
rt.block_on(async {
let key = format!("bench:set:{}", rand::random::<u32>());
cache
.cache_manager()
.set_with_strategy(&key, black_box(data.clone()), CacheStrategy::ShortTerm)
.await
.unwrap_or_else(|_| panic!("Failed to set cache"));
});
});
});
group.bench_with_input(BenchmarkId::new("long_term", size), size, |b, _| {
b.iter(|| {
rt.block_on(async {
let key = format!("bench:set:{}", rand::random::<u32>());
cache
.cache_manager()
.set_with_strategy(&key, black_box(data.clone()), CacheStrategy::LongTerm)
.await
.unwrap_or_else(|_| panic!("Failed to set cache"));
});
});
});
}
group.finish();
}
fn bench_l1_hit(c: &mut Criterion) {
let (cache, rt) = setup_cache();
rt.block_on(async {
for i in 0..100 {
let key = format!("bench:l1:{i}");
cache
.cache_manager()
.set_with_strategy(&key, test_data(1024), CacheStrategy::ShortTerm)
.await
.unwrap_or_else(|_| panic!("Failed to set cache"));
let _ = cache
.cache_manager()
.get(&key)
.await
.unwrap_or_else(|_| panic!("Failed to get cache"));
}
});
c.bench_function("l1_cache_hit", |b| {
b.iter(|| {
rt.block_on(async {
let key = format!("bench:l1:{}", rand::random::<u8>() % 100);
black_box(
cache
.cache_manager()
.get(&key)
.await
.unwrap_or_else(|_| panic!("Failed to get cache")),
);
});
});
});
}
fn bench_l2_hit(c: &mut Criterion) {
let (cache, rt) = setup_cache();
rt.block_on(async {
for i in 0..100 {
let key = format!("bench:l2:{i}");
if let Some(l2) = &cache.l2_cache {
l2.set_with_ttl(&key, test_data(1024), Duration::from_secs(300))
.await
.unwrap_or_else(|_| panic!("Failed to set cache"));
}
}
});
c.bench_function("l2_cache_hit", |b| {
b.iter(|| {
rt.block_on(async {
let key = format!("bench:l2:{}", rand::random::<u8>() % 100);
if let Some(l1) = &cache.l1_cache {
l1.remove(&key)
.await
.unwrap_or_else(|_| panic!("Failed to remove from L1"));
}
black_box(
cache
.cache_manager()
.get(&key)
.await
.unwrap_or_else(|_| panic!("Failed to get cache")),
);
});
});
});
}
fn bench_cache_miss(c: &mut Criterion) {
let (cache, rt) = setup_cache();
c.bench_function("cache_miss", |b| {
b.iter(|| {
rt.block_on(async {
let key = format!("bench:miss:{}", rand::random::<u32>());
black_box(
cache
.cache_manager()
.get(&key)
.await
.unwrap_or_else(|_| panic!("Failed to get cache")),
);
});
});
});
}
fn bench_compute_on_miss(c: &mut Criterion) {
let (cache, rt) = setup_cache();
let mut group = c.benchmark_group("compute_on_miss");
for delay_ms in &[1, 10, 50] {
let delay = Duration::from_millis(*delay_ms);
group.bench_with_input(BenchmarkId::from_parameter(delay_ms), delay_ms, |b, _| {
b.iter(|| {
rt.block_on(async {
let key = format!("bench:compute:{}", rand::random::<u32>());
let data = test_data(1024);
cache
.cache_manager()
.get_or_compute_with(&key, CacheStrategy::ShortTerm, || {
let d = data.clone();
async move {
tokio::time::sleep(delay).await;
Ok(d)
}
})
.await
.unwrap_or_else(|_| panic!("Failed to get/compute"));
});
});
});
}
group.finish();
}
fn bench_typed_cache(c: &mut Criterion) {
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
struct User {
id: u64,
name: String,
email: String,
profile: String,
}
let (cache, rt) = setup_cache();
c.bench_function("typed_cache_set_get", |b| {
b.iter(|| {
rt.block_on(async {
let key = format!("bench:typed:{}", rand::random::<u32>());
let user = User {
id: 123,
name: "Test User".to_string(),
email: "test@example.com".to_string(),
profile: "x".repeat(1024),
};
cache
.cache_manager()
.get_or_compute_typed(&key, CacheStrategy::ShortTerm, || {
let u = user.clone();
async move { Ok::<User, CacheError>(u) }
})
.await
.unwrap_or_else(|_| panic!("Failed to get/compute typed"));
black_box(
cache
.cache_manager()
.get_or_compute_typed::<User, _, _>(
&key,
CacheStrategy::ShortTerm,
|| async {
panic!("Should not compute");
},
)
.await
.unwrap_or_else(|_| panic!("Failed to get/compute typed")),
);
});
});
});
}
fn bench_cache_strategies(c: &mut Criterion) {
let (cache, rt) = setup_cache();
let mut group = c.benchmark_group("cache_strategies");
let data = test_data(1024);
let strategies = vec![
("realtime", CacheStrategy::RealTime),
("short_term", CacheStrategy::ShortTerm),
("medium_term", CacheStrategy::MediumTerm),
("long_term", CacheStrategy::LongTerm),
("custom", CacheStrategy::Custom(Duration::from_secs(60))),
];
for (name, strategy) in &strategies {
group.bench_function(*name, |b| {
b.iter(|| {
rt.block_on(async {
let key = format!("bench:strategy:{}", rand::random::<u32>());
cache
.cache_manager()
.set_with_strategy(&key, black_box(data.clone()), strategy.clone())
.await
.unwrap_or_else(|_| panic!("Failed to set cache"));
});
});
});
}
group.finish();
}
criterion_group!(
benches,
bench_cache_set,
bench_l1_hit,
bench_l2_hit,
bench_cache_miss,
bench_compute_on_miss,
bench_typed_cache,
bench_cache_strategies
);
criterion_main!(benches);