#[cfg_attr(coverage_nightly, coverage(off))]
#[cfg(test)]
mod tests {
use super::super::advanced_strategies_cache::*;
use super::super::advanced_strategies_predictor::*;
use super::super::advanced_strategies_types::*;
use anyhow::Result;
use chrono::Utc;
use rustc_hash::FxHashMap;
use std::sync::atomic::Ordering;
use std::sync::Arc;
use std::time::Duration;
#[test]
fn test_eviction_policy_equality() {
assert_eq!(EvictionPolicy::LRU, EvictionPolicy::LRU);
assert_ne!(EvictionPolicy::LRU, EvictionPolicy::LFU);
}
#[test]
fn test_eviction_policy_clone() {
let policy = EvictionPolicy::Adaptive;
let cloned = policy;
assert_eq!(policy, cloned);
}
#[test]
fn test_eviction_policy_debug() {
let debug = format!("{:?}", EvictionPolicy::TTL);
assert!(debug.contains("TTL"));
}
#[test]
fn test_eviction_policy_all_variants() {
let variants = [
EvictionPolicy::LRU,
EvictionPolicy::LFU,
EvictionPolicy::TTL,
EvictionPolicy::FIFO,
EvictionPolicy::Random,
EvictionPolicy::Adaptive,
];
assert_eq!(variants.len(), 6);
}
#[test]
fn test_eviction_policy_serialization() {
let policy = EvictionPolicy::LRU;
let json = serde_json::to_string(&policy).unwrap();
let parsed: EvictionPolicy = serde_json::from_str(&json).unwrap();
assert_eq!(policy, parsed);
}
#[test]
fn test_cache_tier_equality() {
assert_eq!(CacheTier::L1, CacheTier::L1);
assert_ne!(CacheTier::L1, CacheTier::L2);
}
#[test]
fn test_cache_tier_hash() {
use std::collections::HashSet;
let mut set = HashSet::new();
set.insert(CacheTier::L1);
set.insert(CacheTier::L2);
set.insert(CacheTier::L3);
assert_eq!(set.len(), 3);
}
#[test]
fn test_cache_tier_all_variants() {
let variants = [CacheTier::L1, CacheTier::L2, CacheTier::L3];
assert_eq!(variants.len(), 3);
}
#[test]
fn test_cache_tier_serialization() {
let tier = CacheTier::L2;
let json = serde_json::to_string(&tier).unwrap();
let parsed: CacheTier = serde_json::from_str(&json).unwrap();
assert_eq!(tier, parsed);
}
#[test]
fn test_access_pattern_creation() {
let pattern = AccessPattern {
frequency: 0.5,
temporal_locality: 0.7,
spatial_locality: 0.3,
entropy: 0.2,
last_access: Utc::now(),
access_count: 10,
};
assert!((pattern.frequency - 0.5).abs() < 0.001);
assert_eq!(pattern.access_count, 10);
}
#[test]
fn test_access_pattern_clone() {
let pattern = AccessPattern {
frequency: 0.5,
temporal_locality: 0.7,
spatial_locality: 0.3,
entropy: 0.2,
last_access: Utc::now(),
access_count: 10,
};
let cloned = pattern.clone();
assert_eq!(pattern.access_count, cloned.access_count);
}
#[test]
fn test_access_pattern_serialization() {
let pattern = AccessPattern {
frequency: 0.5,
temporal_locality: 0.7,
spatial_locality: 0.3,
entropy: 0.2,
last_access: Utc::now(),
access_count: 10,
};
let json = serde_json::to_string(&pattern).unwrap();
assert!(json.contains("frequency"));
assert!(json.contains("temporal_locality"));
}
#[test]
fn test_cache_warming_config_default() {
let config = AdvancedCacheConfig::default();
assert!(config.warming_config.auto_warm);
assert!(config.warming_config.dependency_warming);
assert!(!config.warming_config.warm_patterns.is_empty());
}
#[test]
fn test_cache_warming_config_max_warm_time() {
let config = AdvancedCacheConfig::default();
assert_eq!(config.warming_config.max_warm_time, Duration::from_secs(30));
}
#[test]
fn test_cache_warming_config_patterns() {
let config = AdvancedCacheConfig::default();
assert!(config
.warming_config
.warm_patterns
.contains(&"**/*.rs".to_string()));
}
#[test]
fn test_performance_config_default() {
let config = AdvancedCacheConfig::default();
assert!(config.performance_config.compression_enabled);
assert_eq!(config.performance_config.compression_level, 6);
assert!(config.performance_config.background_cleanup);
assert!(config.performance_config.stats_enabled);
}
#[test]
fn test_performance_config_cleanup_interval() {
let config = AdvancedCacheConfig::default();
assert_eq!(
config.performance_config.cleanup_interval,
Duration::from_secs(60)
);
}
#[test]
fn test_tier_stats_default() {
let stats = TierStats::default();
assert_eq!(stats.entry_count, 0);
assert_eq!(stats.memory_usage, 0);
}
#[test]
fn test_tier_stats_atomic_operations() {
let stats = TierStats::default();
stats.hits.fetch_add(5, Ordering::Relaxed);
assert_eq!(stats.hits.load(Ordering::Relaxed), 5);
}
#[test]
fn test_pattern_stats_default() {
let stats = PatternStats::default();
assert_eq!(stats.avg_frequency, 0.0);
assert_eq!(stats.avg_temporal_locality, 0.0);
}
#[test]
fn test_performance_stats_default() {
let stats = PerformanceStats::default();
assert_eq!(stats.avg_lookup_time, Duration::default());
assert_eq!(stats.compression_efficiency, 0.0);
}
#[test]
fn test_warming_stats_default() {
let stats = WarmingStats::default();
assert_eq!(stats.warming_success_rate, 0.0);
assert_eq!(stats.total_warming_time, Duration::default());
}
#[test]
fn test_adaptive_cache_stats_default() {
let stats = AdaptiveCacheStats::default();
assert!(stats.tier_stats.is_empty());
}
#[tokio::test]
async fn test_adaptive_cache_basic_operations() -> Result<()> {
let config = AdvancedCacheConfig::default();
let cache: AdaptiveCache<String, String> = AdaptiveCache::new(config);
cache.put("key1".to_string(), "value1".to_string()).await?;
let result = cache.get(&"key1".to_string()).await;
assert!(result.is_some());
assert_eq!(result.expect("internal error").as_ref(), "value1");
Ok(())
}
#[tokio::test]
async fn test_adaptive_cache_remove() -> Result<()> {
let config = AdvancedCacheConfig::default();
let cache: AdaptiveCache<String, String> = AdaptiveCache::new(config);
cache.put("key1".to_string(), "value1".to_string()).await?;
let removed = cache.remove(&"key1".to_string()).await;
assert!(removed.is_some());
let result = cache.get(&"key1".to_string()).await;
assert!(result.is_none());
Ok(())
}
#[tokio::test]
async fn test_adaptive_cache_clear() -> Result<()> {
let config = AdvancedCacheConfig::default();
let cache: AdaptiveCache<String, String> = AdaptiveCache::new(config);
cache.put("key1".to_string(), "value1".to_string()).await?;
cache.put("key2".to_string(), "value2".to_string()).await?;
cache.clear().await?;
assert!(cache.get(&"key1".to_string()).await.is_none());
assert!(cache.get(&"key2".to_string()).await.is_none());
Ok(())
}
#[tokio::test]
async fn test_adaptive_cache_get_stats() -> Result<()> {
let config = AdvancedCacheConfig::default();
let cache: AdaptiveCache<String, String> = AdaptiveCache::new(config);
cache.put("key1".to_string(), "value1".to_string()).await?;
let _ = cache.get(&"key1".to_string()).await;
let stats = cache.get_stats();
assert!(stats.tier_stats.is_empty() || !stats.tier_stats.is_empty());
Ok(())
}
#[tokio::test]
async fn test_adaptive_cache_miss() -> Result<()> {
let config = AdvancedCacheConfig::default();
let cache: AdaptiveCache<String, String> = AdaptiveCache::new(config);
let result = cache.get(&"nonexistent".to_string()).await;
assert!(result.is_none());
Ok(())
}
#[tokio::test]
async fn test_adaptive_cache_warm_cache() -> Result<()> {
let config = AdvancedCacheConfig::default();
let cache: AdaptiveCache<String, String> = AdaptiveCache::new(config);
let keys = vec!["key1".to_string(), "key2".to_string()];
let warmed = cache.warm_cache(keys).await?;
assert!(warmed >= 0);
Ok(())
}
#[tokio::test]
async fn test_adaptive_cache_background_maintenance() -> Result<()> {
let config = AdvancedCacheConfig::default();
let cache: AdaptiveCache<String, String> = AdaptiveCache::new(config);
cache.put("key1".to_string(), "value1".to_string()).await?;
cache.background_maintenance().await?;
Ok(())
}
#[tokio::test]
async fn test_cache_tiering() -> Result<()> {
let config = AdvancedCacheConfig::default();
let cache: AdaptiveCache<String, Vec<u8>> = AdaptiveCache::new(config);
let small_value = vec![0u8; 1024];
cache.put("small".to_string(), small_value).await?;
let large_value = vec![0u8; 2 * 1024 * 1024];
cache.put("large".to_string(), large_value).await?;
assert!(cache.get(&"small".to_string()).await.is_some());
assert!(cache.get(&"large".to_string()).await.is_some());
Ok(())
}
#[tokio::test]
async fn test_cache_medium_size_to_l2() -> Result<()> {
let config = AdvancedCacheConfig::default();
let cache: AdaptiveCache<String, Vec<u8>> = AdaptiveCache::new(config);
let medium_value = vec![0u8; 128 * 1024]; cache.put("medium".to_string(), medium_value).await?;
assert!(cache.get(&"medium".to_string()).await.is_some());
Ok(())
}
#[test]
fn test_eviction_policies() {
let mut cache = FxHashMap::default();
let adaptive_cache: AdaptiveCache<String, String> =
AdaptiveCache::new(AdvancedCacheConfig::default());
for i in 0..3 {
let entry = AdaptiveCacheEntry {
value: Arc::new(format!("value{}", i)),
pattern: AccessPattern {
frequency: i as f64 * 0.3,
temporal_locality: 0.5,
spatial_locality: 0.5,
entropy: 0.0,
last_access: Utc::now(),
access_count: i * 10,
},
size: 1024,
tier: CacheTier::L1,
created_at: Utc::now(),
expires_at: None,
};
cache.insert(format!("key{}", i), entry);
}
adaptive_cache.evict_lru(&mut cache);
assert_eq!(cache.len(), 2);
if let Some(_entry) = cache.get("key0") {
}
}
#[test]
fn test_evict_lfu() {
let mut cache = FxHashMap::default();
let adaptive_cache: AdaptiveCache<String, String> =
AdaptiveCache::new(AdvancedCacheConfig::default());
for i in 0..3 {
let entry = AdaptiveCacheEntry {
value: Arc::new(format!("value{}", i)),
pattern: AccessPattern {
frequency: 0.5,
temporal_locality: 0.5,
spatial_locality: 0.5,
entropy: 0.0,
last_access: Utc::now(),
access_count: (i + 1) * 10, },
size: 1024,
tier: CacheTier::L1,
created_at: Utc::now(),
expires_at: None,
};
cache.insert(format!("key{}", i), entry);
}
adaptive_cache.evict_lfu(&mut cache);
assert_eq!(cache.len(), 2);
assert!(cache.get("key0").is_none());
}
#[test]
fn test_evict_fifo() {
let mut cache = FxHashMap::default();
let adaptive_cache: AdaptiveCache<String, String> =
AdaptiveCache::new(AdvancedCacheConfig::default());
for i in 0..3 {
let entry = AdaptiveCacheEntry {
value: Arc::new(format!("value{}", i)),
pattern: AccessPattern {
frequency: 0.5,
temporal_locality: 0.5,
spatial_locality: 0.5,
entropy: 0.0,
last_access: Utc::now(),
access_count: 10,
},
size: 1024,
tier: CacheTier::L1,
created_at: Utc::now() + chrono::Duration::seconds(i as i64),
expires_at: None,
};
cache.insert(format!("key{}", i), entry);
}
adaptive_cache.evict_fifo(&mut cache);
assert_eq!(cache.len(), 2);
}
#[test]
fn test_evict_random() {
let mut cache = FxHashMap::default();
let adaptive_cache: AdaptiveCache<String, String> =
AdaptiveCache::new(AdvancedCacheConfig::default());
for i in 0..3 {
let entry = AdaptiveCacheEntry {
value: Arc::new(format!("value{}", i)),
pattern: AccessPattern {
frequency: 0.5,
temporal_locality: 0.5,
spatial_locality: 0.5,
entropy: 0.0,
last_access: Utc::now(),
access_count: 10,
},
size: 1024,
tier: CacheTier::L1,
created_at: Utc::now(),
expires_at: None,
};
cache.insert(format!("key{}", i), entry);
}
adaptive_cache.evict_random(&mut cache);
assert_eq!(cache.len(), 2);
}
#[test]
fn test_evict_adaptive() {
let mut cache = FxHashMap::default();
let adaptive_cache: AdaptiveCache<String, String> =
AdaptiveCache::new(AdvancedCacheConfig::default());
for i in 0..3 {
let entry = AdaptiveCacheEntry {
value: Arc::new(format!("value{}", i)),
pattern: AccessPattern {
frequency: i as f64 * 0.3,
temporal_locality: i as f64 * 0.2,
spatial_locality: 0.5,
entropy: 0.0,
last_access: Utc::now(),
access_count: i * 10,
},
size: 1024,
tier: CacheTier::L1,
created_at: Utc::now(),
expires_at: None,
};
cache.insert(format!("key{}", i), entry);
}
adaptive_cache.evict_adaptive(&mut cache);
assert_eq!(cache.len(), 2);
}
#[test]
fn test_evict_ttl() {
let mut cache = FxHashMap::default();
let adaptive_cache: AdaptiveCache<String, String> =
AdaptiveCache::new(AdvancedCacheConfig::default());
let expired_entry = AdaptiveCacheEntry {
value: Arc::new("expired".to_string()),
pattern: AccessPattern {
frequency: 0.5,
temporal_locality: 0.5,
spatial_locality: 0.5,
entropy: 0.0,
last_access: Utc::now(),
access_count: 10,
},
size: 1024,
tier: CacheTier::L1,
created_at: Utc::now(),
expires_at: Some(Utc::now() - chrono::Duration::hours(1)),
};
cache.insert("expired".to_string(), expired_entry);
let valid_entry = AdaptiveCacheEntry {
value: Arc::new("valid".to_string()),
pattern: AccessPattern {
frequency: 0.5,
temporal_locality: 0.5,
spatial_locality: 0.5,
entropy: 0.0,
last_access: Utc::now(),
access_count: 10,
},
size: 1024,
tier: CacheTier::L1,
created_at: Utc::now(),
expires_at: Some(Utc::now() + chrono::Duration::hours(1)),
};
cache.insert("valid".to_string(), valid_entry);
adaptive_cache.evict_ttl(&mut cache);
assert!(cache.get("expired").is_none());
}
#[test]
fn test_cache_predictor() {
let predictor: CachePredictor<String> = CachePredictor::new(0.5);
predictor.record_access("file1.rs".to_string());
predictor.record_access("file2.rs".to_string());
predictor.record_access("file3.rs".to_string());
predictor.record_access("file1.rs".to_string());
predictor.record_access("file2.rs".to_string());
let predictions = predictor.predict_next(&["file1.rs".to_string()]);
let _ = predictions;
}
#[test]
fn test_cache_predictor_high_confidence() {
let predictor: CachePredictor<String> = CachePredictor::new(0.9);
assert!(predictor.predict_value(&"any".to_string()).is_none());
}
#[test]
fn test_cache_predictor_low_confidence() {
let predictor: CachePredictor<String> = CachePredictor::new(0.1);
for _ in 0..20 {
predictor.record_access("a".to_string());
predictor.record_access("b".to_string());
predictor.record_access("c".to_string());
}
let predictions = predictor.predict_next(&["a".to_string()]);
let _ = predictions;
}
#[test]
fn test_cache_predictor_history_limit() {
let predictor: CachePredictor<String> = CachePredictor::new(0.5);
for i in 0..1100 {
predictor.record_access(format!("key{}", i));
}
let history_len = predictor.access_history_len();
assert!(history_len <= 1000);
}
#[test]
fn test_cache_config() {
let config = AdvancedCacheConfig::default();
assert_eq!(config.eviction_policy, EvictionPolicy::Adaptive);
assert!(config.enable_multi_tier);
assert!(config.enable_predictive);
assert!(!config.enable_collaborative); }
#[test]
fn test_cache_config_tier_limits() {
let config = AdvancedCacheConfig::default();
assert_eq!(
*config.tier_memory_limits.get(&CacheTier::L1).unwrap(),
64 * 1024 * 1024
);
assert_eq!(
*config.tier_memory_limits.get(&CacheTier::L2).unwrap(),
256 * 1024 * 1024
);
assert_eq!(
*config.tier_memory_limits.get(&CacheTier::L3).unwrap(),
1024 * 1024 * 1024
);
}
#[test]
fn test_cache_config_serialization() {
let config = AdvancedCacheConfig::default();
let json = serde_json::to_string(&config).unwrap();
assert!(json.contains("eviction_policy"));
assert!(json.contains("enable_multi_tier"));
}
#[test]
fn test_should_promote_high_frequency() {
let config = AdvancedCacheConfig::default();
let cache: AdaptiveCache<String, String> = AdaptiveCache::new(config);
let pattern = AccessPattern {
frequency: 0.8,
temporal_locality: 0.3,
spatial_locality: 0.3,
entropy: 0.0,
last_access: Utc::now(),
access_count: 10,
};
assert!(cache.should_promote(&pattern));
}
#[test]
fn test_should_promote_high_temporal_locality() {
let config = AdvancedCacheConfig::default();
let cache: AdaptiveCache<String, String> = AdaptiveCache::new(config);
let pattern = AccessPattern {
frequency: 0.3,
temporal_locality: 0.8,
spatial_locality: 0.3,
entropy: 0.0,
last_access: Utc::now(),
access_count: 10,
};
assert!(cache.should_promote(&pattern));
}
#[test]
fn test_should_not_promote_low_scores() {
let config = AdvancedCacheConfig::default();
let cache: AdaptiveCache<String, String> = AdaptiveCache::new(config);
let pattern = AccessPattern {
frequency: 0.3,
temporal_locality: 0.3,
spatial_locality: 0.3,
entropy: 0.0,
last_access: Utc::now(),
access_count: 10,
};
assert!(!cache.should_promote(&pattern));
}
#[test]
fn test_calculate_eviction_score() {
let config = AdvancedCacheConfig::default();
let cache: AdaptiveCache<String, String> = AdaptiveCache::new(config);
let pattern = AccessPattern {
frequency: 0.5,
temporal_locality: 0.5,
spatial_locality: 0.5,
entropy: 0.0,
last_access: Utc::now(),
access_count: 10,
};
let score = cache.calculate_eviction_score(&pattern);
assert!(score > 0.0);
assert!(score <= 1.0);
}
#[test]
fn test_determine_initial_tier_small() {
let config = AdvancedCacheConfig::default();
let cache: AdaptiveCache<String, String> = AdaptiveCache::new(config);
let tier = cache.determine_initial_tier(&"key".to_string(), 1024);
assert_eq!(tier, CacheTier::L1);
}
#[test]
fn test_determine_initial_tier_medium() {
let config = AdvancedCacheConfig::default();
let cache: AdaptiveCache<String, String> = AdaptiveCache::new(config);
let tier = cache.determine_initial_tier(&"key".to_string(), 128 * 1024);
assert_eq!(tier, CacheTier::L2);
}
#[test]
fn test_determine_initial_tier_large() {
let config = AdvancedCacheConfig::default();
let cache: AdaptiveCache<String, String> = AdaptiveCache::new(config);
let tier = cache.determine_initial_tier(&"key".to_string(), 2 * 1024 * 1024);
assert_eq!(tier, CacheTier::L3);
}
#[test]
fn test_get_or_create_pattern_new() {
let config = AdvancedCacheConfig::default();
let cache: AdaptiveCache<String, String> = AdaptiveCache::new(config);
let pattern = cache.get_or_create_pattern(&"new_key".to_string());
assert_eq!(pattern.access_count, 0);
assert_eq!(pattern.frequency, 0.0);
}
#[test]
fn test_calculate_expiration_ttl_policy() {
let mut config = AdvancedCacheConfig::default();
config.eviction_policy = EvictionPolicy::TTL;
let cache: AdaptiveCache<String, String> = AdaptiveCache::new(config);
let expiration_l1 = cache.calculate_expiration(CacheTier::L1);
assert!(expiration_l1.is_some());
let expiration_l3 = cache.calculate_expiration(CacheTier::L3);
assert!(expiration_l3.is_some());
assert!(expiration_l3.unwrap() > expiration_l1.unwrap());
}
#[test]
fn test_calculate_expiration_non_ttl_policy() {
let config = AdvancedCacheConfig::default(); let cache: AdaptiveCache<String, String> = AdaptiveCache::new(config);
let expiration = cache.calculate_expiration(CacheTier::L1);
assert!(expiration.is_none());
}
#[tokio::test]
async fn test_background_maintenance_disabled() -> Result<()> {
let mut config = AdvancedCacheConfig::default();
config.performance_config.background_cleanup = false;
let cache: AdaptiveCache<String, String> = AdaptiveCache::new(config);
cache.background_maintenance().await?;
Ok(())
}
#[test]
fn test_evict_from_empty_cache() {
let mut cache: FxHashMap<String, AdaptiveCacheEntry<String>> = FxHashMap::default();
let adaptive_cache: AdaptiveCache<String, String> =
AdaptiveCache::new(AdvancedCacheConfig::default());
let result = adaptive_cache.evict_from_tier(&mut cache, CacheTier::L1);
assert!(result.is_ok());
}
}
#[cfg_attr(coverage_nightly, coverage(off))]
#[cfg(test)]
mod property_tests {
use proptest::prelude::*;
proptest! {
#[test]
fn basic_property_stability(_input in ".*") {
prop_assert!(true);
}
#[test]
fn module_consistency_check(_x in 0u32..1000) {
prop_assert!(_x < 1001);
}
}
}