use std::sync::Arc;
use std::time::{Duration, Instant};
fn measure_time<F, R>(operation: F) -> (R, Duration)
where
F: FnOnce() -> R,
{
let start = Instant::now();
let result = operation();
(result, start.elapsed())
}
mod complexity_lru {
use super::*;
use cachekit::policy::lru::LruCore;
use std::sync::Arc;
#[test]
fn test_get_is_o1() {
verify_get_complexity::<i32, Arc<i32>, _, _>(LruCore::new, "LRU");
}
#[test]
fn test_insert_is_o1() {
verify_insert_complexity::<i32, Arc<i32>, _, _>(LruCore::new, "LRU");
}
#[test]
fn test_eviction_is_o1() {
verify_eviction_complexity::<i32, Arc<i32>, _, _>(LruCore::new, "LRU");
}
}
mod complexity_lru_k {
use super::*;
use cachekit::policy::lru_k::LrukCache;
#[test]
fn test_get_is_o1() {
verify_get_complexity::<i32, i32, _, _>(LrukCache::new, "LRU-K");
}
#[test]
fn test_insert_is_o1() {
verify_insert_complexity::<i32, i32, _, _>(LrukCache::new, "LRU-K");
}
#[test]
fn test_eviction_is_o1() {
verify_eviction_complexity::<i32, i32, _, _>(LrukCache::new, "LRU-K");
}
}
mod complexity_lfu {
use super::*;
use cachekit::policy::lfu::LfuCache;
use std::sync::Arc;
#[test]
fn test_get_is_o1() {
verify_get_complexity::<i32, Arc<i32>, _, _>(LfuCache::new, "LFU");
}
#[test]
fn test_insert_is_o1() {
verify_insert_complexity::<i32, Arc<i32>, _, _>(LfuCache::new, "LFU");
}
#[test]
fn test_eviction_is_o1() {
verify_eviction_complexity::<i32, Arc<i32>, _, _>(LfuCache::new, "LFU");
}
}
mod complexity_clock {
use super::*;
use cachekit::policy::clock::ClockCache;
#[test]
fn test_get_is_o1() {
verify_get_complexity::<i32, i32, _, _>(ClockCache::new, "Clock");
}
#[test]
fn test_insert_is_o1() {
verify_insert_complexity::<i32, i32, _, _>(ClockCache::new, "Clock");
}
#[test]
fn test_eviction_is_o1() {
verify_eviction_complexity::<i32, i32, _, _>(ClockCache::new, "Clock");
}
}
mod complexity_s3_fifo {
use super::*;
use cachekit::policy::s3_fifo::S3FifoCache;
#[test]
fn test_get_is_o1() {
verify_get_complexity::<i32, i32, _, _>(S3FifoCache::new, "S3-FIFO");
}
#[test]
fn test_insert_is_o1() {
verify_insert_complexity::<i32, i32, _, _>(S3FifoCache::new, "S3-FIFO");
}
#[test]
fn test_eviction_is_o1() {
verify_eviction_complexity::<i32, i32, _, _>(S3FifoCache::new, "S3-FIFO");
}
}
fn verify_get_complexity<K, V, C, F>(mut create_cache: F, policy_name: &str)
where
K: std::hash::Hash + Eq + Clone + From<i32>,
V: Clone + From<i32>,
C: cachekit::traits::Cache<K, V>,
F: FnMut(usize) -> C,
{
let sizes = vec![1000, 2000, 4000, 8000];
let mut times = Vec::new();
for &size in &sizes {
let mut cache = create_cache(size);
for i in 0..size as i32 {
cache.insert(K::from(i), V::from(i));
}
let iterations = 10000;
let (_, duration) = measure_time(|| {
for i in 0..iterations {
let key = K::from(i % size as i32);
let _ = cache.get(&key);
}
});
let avg_time = duration.as_nanos() as f64 / iterations as f64;
times.push(avg_time);
println!(
"[{}] Size: {}, Avg get time: {:.2} ns",
policy_name, size, avg_time
);
}
for i in 1..times.len() {
let size_ratio = sizes[i] as f64 / sizes[i - 1] as f64;
let time_ratio = times[i] / times[i - 1];
println!(
"[{}] Size {}→{} ({:.2}x): time {:.1}ns→{:.1}ns ({:.2}x)",
policy_name,
sizes[i - 1],
sizes[i],
size_ratio,
times[i - 1],
times[i],
time_ratio
);
assert!(
time_ratio < 15.0,
"[{}] Get operation appears to be O(n), not O(1):\n\
Size increased by {:.2}x but time increased by {:.2}x\n\
Expected: time_ratio << size_ratio for O(1) operations",
policy_name,
size_ratio,
time_ratio
);
}
}
fn verify_insert_complexity<K, V, C, F>(mut create_cache: F, policy_name: &str)
where
K: std::hash::Hash + Eq + Clone + From<i32>,
V: Clone + From<i32>,
C: cachekit::traits::Cache<K, V>,
F: FnMut(usize) -> C,
{
let sizes = vec![1000, 2000, 4000, 8000];
let mut times = Vec::new();
for &size in &sizes {
let mut cache = create_cache(size);
let (_, duration) = measure_time(|| {
for i in 0..size as i32 {
cache.insert(K::from(i), V::from(i));
}
});
let avg_time = duration.as_nanos() as f64 / size as f64;
times.push(avg_time);
println!(
"[{}] Size: {}, Avg insert time: {:.2} ns",
policy_name, size, avg_time
);
}
for i in 1..times.len() {
let size_ratio = sizes[i] as f64 / sizes[i - 1] as f64;
let time_ratio = times[i] / times[i - 1];
println!(
"[{}] Size {}→{} ({:.2}x): time {:.1}ns→{:.1}ns ({:.2}x)",
policy_name,
sizes[i - 1],
sizes[i],
size_ratio,
times[i - 1],
times[i],
time_ratio
);
assert!(
time_ratio < 15.0,
"[{}] Insert operation appears to be O(n), not O(1):\n\
Size increased by {:.2}x but time increased by {:.2}x",
policy_name,
size_ratio,
time_ratio
);
}
}
fn verify_eviction_complexity<K, V, C, F>(mut create_cache: F, policy_name: &str)
where
K: std::hash::Hash + Eq + Clone + From<i32>,
V: Clone + From<i32>,
C: cachekit::traits::Cache<K, V>,
F: FnMut(usize) -> C,
{
let sizes = vec![1000, 2000, 4000, 8000];
let mut times = Vec::new();
for &size in &sizes {
let mut cache = create_cache(size);
for i in 0..size as i32 {
cache.insert(K::from(i), V::from(i));
}
let iterations = 1000;
let (_, duration) = measure_time(|| {
for i in size as i32..(size as i32 + iterations) {
cache.insert(K::from(i), V::from(i));
}
});
let avg_time = duration.as_nanos() as f64 / iterations as f64;
times.push(avg_time);
println!(
"[{}] Size: {}, Avg eviction time: {:.2} ns",
policy_name, size, avg_time
);
}
for i in 1..times.len() {
let size_ratio = sizes[i] as f64 / sizes[i - 1] as f64;
let time_ratio = times[i] / times[i - 1];
println!(
"[{}] Size {}→{} ({:.2}x): time {:.1}ns→{:.1}ns ({:.2}x)",
policy_name,
sizes[i - 1],
sizes[i],
size_ratio,
times[i - 1],
times[i],
time_ratio
);
assert!(
time_ratio < 15.0,
"[{}] Eviction operation appears to be O(n), not O(1):\n\
Size increased by {:.2}x but time increased by {:.2}x",
policy_name,
size_ratio,
time_ratio
);
}
}
mod regression_guards {
use super::*;
use cachekit::policy::lru::LruCore;
use cachekit::traits::Cache;
#[test]
fn test_operations_are_reasonably_fast() {
let mut cache = LruCore::new(10000);
for i in 0..10000 {
cache.insert(i, Arc::new(i));
}
let iterations = 50_000;
let (_, get_duration) = measure_time(|| {
for i in 0..iterations {
cache.get(&(i % 10000));
}
});
let (_, insert_duration) = measure_time(|| {
for i in 10000..10000 + iterations {
cache.insert(i, Arc::new(i));
}
});
println!(
"50K get operations: {:.2}ms (avg: {:.1}ns/op)",
get_duration.as_secs_f64() * 1000.0,
get_duration.as_nanos() as f64 / iterations as f64
);
println!(
"50K insert+evict operations: {:.2}ms (avg: {:.1}ns/op)",
insert_duration.as_secs_f64() * 1000.0,
insert_duration.as_nanos() as f64 / iterations as f64
);
assert!(
get_duration < Duration::from_secs(10),
"Get operations too slow: took {:?} for 50K ops (>10s indicates O(n) or worse)",
get_duration
);
assert!(
insert_duration < Duration::from_secs(10),
"Insert+evict operations too slow: took {:?} for 50K ops (>10s indicates O(n) or worse)",
insert_duration
);
}
#[test]
fn test_memory_stability() {
let capacity = 1000;
let mut cache = LruCore::new(capacity);
for i in 0..capacity {
cache.insert(i, Arc::new(vec![0u8; 1024])); }
assert_eq!(cache.len(), capacity);
for i in capacity..capacity + 10000 {
cache.insert(i, Arc::new(vec![0u8; 1024]));
}
assert_eq!(
cache.len(),
capacity,
"Cache should maintain capacity, not grow unbounded"
);
}
}