use crate::search::results::FinalSearchResults;
use lru::LruCache;
use std::hash::Hash;
use std::num::NonZeroUsize;
use std::sync::atomic::{AtomicU64, Ordering};
use std::sync::{Arc, RwLock};
use std::time::{SystemTime, UNIX_EPOCH};
use tracing::{debug, info};
const DEFAULT_CACHE_SIZE: usize = 1000;
const DEFAULT_TTL_SECONDS: u64 = 3600;
#[derive(Debug, Clone)]
struct CacheEntry {
results: FinalSearchResults,
created_at: u64,
}
impl CacheEntry {
fn new(results: FinalSearchResults) -> Self {
let created_at = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs();
Self {
results,
created_at,
}
}
fn is_expired(&self, ttl_seconds: u64) -> bool {
if ttl_seconds == 0 {
return false; }
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs();
now - self.created_at >= ttl_seconds
}
}
pub struct SearchCache {
cache: Arc<RwLock<LruCache<CacheKey, CacheEntry>>>,
ttl_seconds: u64,
hits: Arc<AtomicU64>,
misses: Arc<AtomicU64>,
evictions: Arc<AtomicU64>,
expirations: Arc<AtomicU64>,
}
impl Default for SearchCache {
fn default() -> Self {
Self::new(DEFAULT_CACHE_SIZE)
}
}
impl SearchCache {
pub fn with_ttl(capacity: usize, ttl_seconds: u64) -> Self {
info!(
"Creating search result cache (capacity: {}, ttl: {}s)",
capacity, ttl_seconds
);
Self {
cache: Arc::new(RwLock::new(LruCache::new(
NonZeroUsize::new(capacity).expect("Cache capacity must be > 0"),
))),
ttl_seconds,
hits: Arc::new(AtomicU64::new(0)),
misses: Arc::new(AtomicU64::new(0)),
evictions: Arc::new(AtomicU64::new(0)),
expirations: Arc::new(AtomicU64::new(0)),
}
}
pub fn new(capacity: usize) -> Self {
Self::with_ttl(capacity, DEFAULT_TTL_SECONDS)
}
pub fn get(&self, key: &CacheKey) -> Option<FinalSearchResults> {
let mut cache = self.cache.write().unwrap();
match cache.get(key) {
Some(entry) => {
if entry.is_expired(self.ttl_seconds) {
cache.pop(key);
self.expirations.fetch_add(1, Ordering::Relaxed);
self.misses.fetch_add(1, Ordering::Relaxed);
debug!("Cache EXPIRED: {:?}", key);
None
} else {
self.hits.fetch_add(1, Ordering::Relaxed);
debug!("Cache HIT: {:?}", key);
Some(entry.results.clone())
}
}
None => {
self.misses.fetch_add(1, Ordering::Relaxed);
debug!("Cache MISS: {:?}", key);
None
}
}
}
pub fn put(&self, key: CacheKey, results: FinalSearchResults) {
let mut cache = self.cache.write().unwrap();
if cache.len() >= cache.cap().get() && !cache.contains(&key) {
self.evictions.fetch_add(1, Ordering::Relaxed);
debug!("Cache EVICTION (capacity: {})", cache.cap());
}
let entry = CacheEntry::new(results);
cache.put(key, entry);
debug!("Cache PUT: entry added");
}
pub fn stats(&self) -> CacheStats {
let cache = self.cache.read().unwrap();
CacheStats {
capacity: cache.cap().get(),
size: cache.len(),
hits: self.hits.load(Ordering::Relaxed),
misses: self.misses.load(Ordering::Relaxed),
evictions: self.evictions.load(Ordering::Relaxed),
expirations: self.expirations.load(Ordering::Relaxed),
ttl_seconds: self.ttl_seconds,
}
}
pub fn clear(&self) {
let mut cache = self.cache.write().unwrap();
cache.clear();
info!("Cache cleared");
}
pub fn reset_stats(&self) {
self.hits.store(0, Ordering::Relaxed);
self.misses.store(0, Ordering::Relaxed);
self.evictions.store(0, Ordering::Relaxed);
self.expirations.store(0, Ordering::Relaxed);
info!("Cache statistics reset");
}
pub fn cleanup_expired(&self) -> usize {
let mut cache = self.cache.write().unwrap();
let mut expired_keys = Vec::new();
for (key, entry) in cache.iter() {
if entry.is_expired(self.ttl_seconds) {
expired_keys.push(key.clone());
}
}
let count = expired_keys.len();
for key in expired_keys {
cache.pop(&key);
}
if count > 0 {
self.expirations.fetch_add(count as u64, Ordering::Relaxed);
debug!("Cache cleanup: removed {} expired entries", count);
}
count
}
pub fn invalidate_by_repo(&self, repo_id: i64) -> usize {
let mut cache = self.cache.write().unwrap();
let mut keys_to_remove = Vec::new();
for (key, _) in cache.iter() {
if key.repo_id == repo_id {
keys_to_remove.push(key.clone());
}
}
let count = keys_to_remove.len();
for key in keys_to_remove {
cache.pop(&key);
}
if count > 0 {
info!(
"Cache invalidation: removed {} entries for repo {}",
count, repo_id
);
}
count
}
pub fn invalidate_by_worktree(&self, worktree_id: i64) -> usize {
let mut cache = self.cache.write().unwrap();
let mut keys_to_remove = Vec::new();
for (key, _) in cache.iter() {
if key.worktree_id == Some(worktree_id) {
keys_to_remove.push(key.clone());
}
}
let count = keys_to_remove.len();
for key in keys_to_remove {
cache.pop(&key);
}
if count > 0 {
info!(
"Cache invalidation: removed {} entries for worktree {}",
count, worktree_id
);
}
count
}
}
impl Clone for SearchCache {
fn clone(&self) -> Self {
Self {
cache: Arc::clone(&self.cache),
ttl_seconds: self.ttl_seconds,
hits: Arc::clone(&self.hits),
misses: Arc::clone(&self.misses),
evictions: Arc::clone(&self.evictions),
expirations: Arc::clone(&self.expirations),
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct CacheKey {
pub query: String,
pub repo_id: i64,
pub worktree_id: Option<i64>,
pub limit: usize,
}
impl CacheKey {
pub fn new(query: &str, repo_id: i64, worktree_id: Option<i64>, limit: usize) -> Self {
Self {
query: query.trim().to_lowercase(),
repo_id,
worktree_id,
limit,
}
}
pub fn from_options(query: &str, options: &crate::search::SearchOptions) -> Self {
Self::new(query, options.repo_id, options.worktree_id, options.limit)
}
}
#[derive(Debug, Clone)]
pub struct CacheStats {
pub capacity: usize,
pub size: usize,
pub hits: u64,
pub misses: u64,
pub evictions: u64,
pub expirations: u64,
pub ttl_seconds: u64,
}
impl CacheStats {
pub fn hit_rate(&self) -> f64 {
let total = self.hits + self.misses;
if total == 0 {
return 0.0;
}
self.hits as f64 / total as f64
}
pub fn utilization_percent(&self) -> f64 {
if self.capacity == 0 {
return 0.0;
}
(self.size as f64 / self.capacity as f64) * 100.0
}
pub fn total_queries(&self) -> u64 {
self.hits + self.misses
}
pub fn is_effective(&self) -> bool {
self.hit_rate() > 0.5
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::search::results::{QueryProcessingDetails, SearchMetadata, SearchTiming};
use crate::search::types::SearchMode;
use std::collections::HashMap;
fn create_test_metadata() -> SearchMetadata {
let query_processing = QueryProcessingDetails::new(
"test".to_string(),
SearchMode::Auto,
1,
0,
"test".to_string(),
false,
);
let result_counts = HashMap::new();
let timing = SearchTiming::new(1.0, 1.0, 1.0, 1.0);
SearchMetadata::new(query_processing, result_counts, timing, 0, 0)
}
#[test]
fn test_cache_key_normalization() {
let key1 = CacheKey::new("authenticate", 1, None, 10);
let key2 = CacheKey::new(" AUTHENTICATE ", 1, None, 10);
let key3 = CacheKey::new("Authenticate", 1, None, 10);
assert_eq!(key1, key2);
assert_eq!(key1, key3);
assert_eq!(key2, key3);
}
#[test]
fn test_cache_key_different_params() {
let key1 = CacheKey::new("auth", 1, None, 10);
let key2 = CacheKey::new("auth", 2, None, 10); let key3 = CacheKey::new("auth", 1, Some(1), 10); let key4 = CacheKey::new("auth", 1, None, 20);
assert_ne!(key1, key2);
assert_ne!(key1, key3);
assert_ne!(key1, key4);
}
#[test]
fn test_cache_basic_operations() {
let cache = SearchCache::new(2);
let key1 = CacheKey::new("test", 1, None, 10);
let results = FinalSearchResults::new("test".to_string(), vec![], create_test_metadata());
assert!(cache.get(&key1).is_none());
cache.put(key1.clone(), results.clone());
assert!(cache.get(&key1).is_some());
let stats = cache.stats();
assert_eq!(stats.hits, 1);
assert_eq!(stats.misses, 1);
}
#[test]
fn test_cache_lru_eviction() {
let cache = SearchCache::new(2);
let key1 = CacheKey::new("query1", 1, None, 10);
let key2 = CacheKey::new("query2", 1, None, 10);
let key3 = CacheKey::new("query3", 1, None, 10);
let results = FinalSearchResults::new("test".to_string(), vec![], create_test_metadata());
cache.put(key1.clone(), results.clone());
cache.put(key2.clone(), results.clone());
assert!(cache.get(&key1).is_some());
assert!(cache.get(&key2).is_some());
cache.put(key3.clone(), results.clone());
assert!(cache.get(&key1).is_none());
assert!(cache.get(&key2).is_some());
assert!(cache.get(&key3).is_some());
let stats = cache.stats();
assert_eq!(stats.evictions, 1);
}
#[test]
fn test_cache_stats_calculations() {
let stats = CacheStats {
capacity: 100,
size: 80,
hits: 70,
misses: 30,
evictions: 10,
expirations: 5,
ttl_seconds: 3600,
};
assert_eq!(stats.hit_rate(), 0.7);
assert_eq!(stats.utilization_percent(), 80.0);
assert_eq!(stats.total_queries(), 100);
assert!(stats.is_effective());
}
#[test]
fn test_cache_ttl_expiration() {
let cache = SearchCache::with_ttl(100, 0);
let key = CacheKey::new("test", 1, None, 10);
let results = FinalSearchResults::new("test".to_string(), vec![], create_test_metadata());
cache.put(key.clone(), results);
assert!(cache.get(&key).is_some());
let stats = cache.stats();
assert_eq!(stats.expirations, 0);
}
#[test]
fn test_cache_invalidation_by_repo() {
let cache = SearchCache::new(100);
let key1 = CacheKey::new("test1", 1, None, 10);
let key2 = CacheKey::new("test2", 2, None, 10);
let key3 = CacheKey::new("test3", 1, None, 10);
let results = FinalSearchResults::new("test".to_string(), vec![], create_test_metadata());
cache.put(key1.clone(), results.clone());
cache.put(key2.clone(), results.clone());
cache.put(key3.clone(), results.clone());
let count = cache.invalidate_by_repo(1);
assert_eq!(count, 2);
assert!(cache.get(&key1).is_none());
assert!(cache.get(&key2).is_some());
assert!(cache.get(&key3).is_none());
}
#[test]
fn test_cache_invalidation_by_worktree() {
let cache = SearchCache::new(100);
let key1 = CacheKey::new("test1", 1, Some(1), 10);
let key2 = CacheKey::new("test2", 1, Some(2), 10);
let key3 = CacheKey::new("test3", 1, None, 10);
let results = FinalSearchResults::new("test".to_string(), vec![], create_test_metadata());
cache.put(key1.clone(), results.clone());
cache.put(key2.clone(), results.clone());
cache.put(key3.clone(), results.clone());
let count = cache.invalidate_by_worktree(1);
assert_eq!(count, 1);
assert!(cache.get(&key1).is_none());
assert!(cache.get(&key2).is_some());
assert!(cache.get(&key3).is_some());
}
#[test]
fn test_cache_clone() {
let cache1 = SearchCache::new(100);
let cache2 = cache1.clone();
let key = CacheKey::new("test", 1, None, 10);
let results = FinalSearchResults::new("test".to_string(), vec![], create_test_metadata());
cache1.put(key.clone(), results);
assert!(cache2.get(&key).is_some());
let stats1 = cache1.stats();
let stats2 = cache2.stats();
assert_eq!(stats1.hits, stats2.hits);
}
}