Skip to main content

medical_cache/
study_cache.rs

1//! Study Cache Manager
2//!
3//! Manages multi-tier caching for DICOM studies with LRU eviction
4//! and automatic tier promotion/demotion based on access patterns.
5
6use std::collections::HashMap;
7use std::sync::{Arc, RwLock};
8use std::time::{Duration, Instant};
9use serde::{Deserialize, Serialize};
10use tracing::{info, debug};
11
12use crate::config::{
13    StudyCacheConfig, CacheTier, ImageQuality,
14};
15
16/// State of a cache entry
17#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
18#[serde(rename_all = "snake_case")]
19pub enum CacheEntryState {
20    /// Entry is being loaded
21    Loading,
22    /// Entry is ready for use
23    Ready,
24    /// Entry failed to load
25    Failed,
26    /// Entry is being evicted
27    Evicting,
28}
29
30/// A single cache entry (image at a specific quality level)
31#[derive(Debug, Clone)]
32pub struct CacheEntry {
33    /// Study ID this entry belongs to
34    pub study_id: String,
35    /// Series ID within the study
36    pub series_id: String,
37    /// Slice index (for multi-slice series)
38    pub slice_index: usize,
39    /// Quality level of this entry
40    pub quality: ImageQuality,
41    /// Current cache tier
42    pub tier: CacheTier,
43    /// Entry state
44    pub state: CacheEntryState,
45    /// Memory size in bytes
46    pub size_bytes: usize,
47    /// When entry was created
48    pub created_at: Instant,
49    /// When entry was last accessed
50    pub last_accessed: Instant,
51    /// Access count for frequency tracking
52    pub access_count: u64,
53    /// Pixel data (if loaded)
54    pub data: Option<Arc<Vec<u8>>>,
55}
56
57impl CacheEntry {
58    /// Create a new cache entry
59    pub fn new(
60        study_id: String,
61        series_id: String,
62        slice_index: usize,
63        quality: ImageQuality,
64        tier: CacheTier,
65    ) -> Self {
66        let now = Instant::now();
67        Self {
68            study_id,
69            series_id,
70            slice_index,
71            quality,
72            tier,
73            state: CacheEntryState::Loading,
74            size_bytes: 0,
75            created_at: now,
76            last_accessed: now,
77            access_count: 0,
78            data: None,
79        }
80    }
81
82    /// Mark entry as ready with data
83    pub fn set_ready(&mut self, data: Vec<u8>) {
84        self.size_bytes = data.len();
85        self.data = Some(Arc::new(data));
86        self.state = CacheEntryState::Ready;
87    }
88
89    /// Mark entry as failed
90    pub fn set_failed(&mut self) {
91        self.state = CacheEntryState::Failed;
92        self.data = None;
93    }
94
95    /// Record an access
96    pub fn touch(&mut self) {
97        self.last_accessed = Instant::now();
98        self.access_count += 1;
99    }
100
101    /// Get age since last access
102    pub fn age(&self) -> Duration {
103        self.last_accessed.elapsed()
104    }
105
106    /// Generate cache key
107    pub fn key(&self) -> String {
108        format!(
109            "{}:{}:{}:{:?}",
110            self.study_id, self.series_id, self.slice_index, self.quality
111        )
112    }
113}
114
115/// Cached study metadata and entries
116#[derive(Debug)]
117pub struct CachedStudy {
118    /// Study ID
119    pub id: String,
120    /// Patient name (for display)
121    pub patient_name: String,
122    /// Study description
123    pub description: String,
124    /// Total number of series
125    pub series_count: usize,
126    /// Total number of slices across all series
127    pub total_slices: usize,
128    /// Current cache tier (highest tier of any entry)
129    pub tier: CacheTier,
130    /// When study was first cached
131    pub cached_at: Instant,
132    /// When study was last accessed
133    pub last_accessed: Instant,
134    /// Total memory used by this study
135    pub memory_bytes: usize,
136    /// Series IDs in this study
137    pub series_ids: Vec<String>,
138}
139
140impl CachedStudy {
141    /// Create a new cached study
142    pub fn new(
143        id: String,
144        patient_name: String,
145        description: String,
146        series_ids: Vec<String>,
147        total_slices: usize,
148    ) -> Self {
149        let now = Instant::now();
150        Self {
151            id,
152            patient_name,
153            description,
154            series_count: series_ids.len(),
155            total_slices,
156            tier: CacheTier::Cold,
157            cached_at: now,
158            last_accessed: now,
159            memory_bytes: 0,
160            series_ids,
161        }
162    }
163
164    /// Record an access
165    pub fn touch(&mut self) {
166        self.last_accessed = Instant::now();
167    }
168
169    /// Get age since last access
170    pub fn age(&self) -> Duration {
171        self.last_accessed.elapsed()
172    }
173}
174
175/// Cache statistics for monitoring
176#[derive(Debug, Clone, Serialize)]
177pub struct StudyCacheStats {
178    /// Number of studies in cache
179    pub study_count: usize,
180    /// Number of entries in hot tier
181    pub hot_entries: usize,
182    /// Number of entries in warm tier
183    pub warm_entries: usize,
184    /// Number of entries in cold tier (metadata only)
185    pub cold_entries: usize,
186    /// Hot tier memory usage in bytes
187    pub hot_memory_bytes: usize,
188    /// Warm tier memory usage in bytes
189    pub warm_memory_bytes: usize,
190    /// Total memory usage in bytes
191    pub total_memory_bytes: usize,
192    /// Memory budget in bytes
193    pub budget_bytes: usize,
194    /// Utilization percentage (0-100)
195    pub utilization_percent: f32,
196    /// Cache hit rate (0-100)
197    pub hit_rate_percent: f32,
198    /// Total cache hits
199    pub hits: u64,
200    /// Total cache misses
201    pub misses: u64,
202}
203
204/// Main study cache manager
205pub struct StudyCache {
206    /// Cache configuration
207    config: StudyCacheConfig,
208    /// Cached studies (study_id -> CachedStudy)
209    studies: RwLock<HashMap<String, CachedStudy>>,
210    /// Cache entries (entry_key -> CacheEntry)
211    entries: RwLock<HashMap<String, CacheEntry>>,
212    /// Hot tier memory usage
213    hot_memory: RwLock<usize>,
214    /// Warm tier memory usage
215    warm_memory: RwLock<usize>,
216    /// Cache hit counter
217    hits: RwLock<u64>,
218    /// Cache miss counter
219    misses: RwLock<u64>,
220}
221
222impl StudyCache {
223    /// Create a new study cache with configuration
224    pub fn new(config: StudyCacheConfig) -> Self {
225        info!(
226            "Initializing StudyCache: {} MB budget, {} max studies",
227            config.total_budget_mb, config.max_studies
228        );
229
230        Self {
231            config,
232            studies: RwLock::new(HashMap::new()),
233            entries: RwLock::new(HashMap::new()),
234            hot_memory: RwLock::new(0),
235            warm_memory: RwLock::new(0),
236            hits: RwLock::new(0),
237            misses: RwLock::new(0),
238        }
239    }
240
241    /// Register a new study in the cache
242    pub fn register_study(
243        &self,
244        id: String,
245        patient_name: String,
246        description: String,
247        series_ids: Vec<String>,
248        total_slices: usize,
249    ) -> Result<(), CacheError> {
250        // Check if we need to evict studies first
251        self.ensure_study_capacity()?;
252
253        let study = CachedStudy::new(
254            id.clone(),
255            patient_name,
256            description,
257            series_ids,
258            total_slices,
259        );
260
261        let mut studies = self.studies.write().unwrap();
262        studies.insert(id.clone(), study);
263
264        info!("Registered study {} in cache", id);
265        Ok(())
266    }
267
268    /// Get a cache entry, returns None if not cached
269    pub fn get(&self, key: &str) -> Option<Arc<Vec<u8>>> {
270        let mut entries = self.entries.write().unwrap();
271
272        if let Some(entry) = entries.get_mut(key) {
273            if entry.state == CacheEntryState::Ready {
274                entry.touch();
275
276                // Update study last accessed
277                if let Some(study) = self.studies.write().unwrap().get_mut(&entry.study_id) {
278                    study.touch();
279                }
280
281                // Record hit
282                *self.hits.write().unwrap() += 1;
283
284                return entry.data.clone();
285            }
286        }
287
288        // Record miss
289        *self.misses.write().unwrap() += 1;
290        None
291    }
292
293    /// Store data in the cache
294    pub fn store(
295        &self,
296        study_id: String,
297        series_id: String,
298        slice_index: usize,
299        quality: ImageQuality,
300        tier: CacheTier,
301        data: Vec<u8>,
302    ) -> Result<String, CacheError> {
303        let data_size = data.len();
304
305        // Check if we have room
306        self.ensure_memory_capacity(tier, data_size)?;
307
308        // Create entry
309        let mut entry = CacheEntry::new(
310            study_id.clone(),
311            series_id,
312            slice_index,
313            quality,
314            tier,
315        );
316        let key = entry.key();
317        entry.set_ready(data);
318
319        // Update memory counters
320        match tier {
321            CacheTier::Hot => {
322                *self.hot_memory.write().unwrap() += data_size;
323            }
324            CacheTier::Warm => {
325                *self.warm_memory.write().unwrap() += data_size;
326            }
327            CacheTier::Cold => {
328                // Cold tier doesn't store pixel data
329            }
330        }
331
332        // Update study memory
333        if let Some(study) = self.studies.write().unwrap().get_mut(&study_id) {
334            study.memory_bytes += data_size;
335            // Update study tier to highest
336            if tier < study.tier {
337                study.tier = tier;
338            }
339        }
340
341        // Store entry
342        self.entries.write().unwrap().insert(key.clone(), entry);
343
344        debug!("Stored cache entry: {} ({} bytes, {:?})", key, data_size, tier);
345        Ok(key)
346    }
347
348    /// Promote an entry to a higher tier (Cold -> Warm -> Hot)
349    pub fn promote(&self, key: &str, target_tier: CacheTier) -> Result<(), CacheError> {
350        let mut entries = self.entries.write().unwrap();
351
352        if let Some(entry) = entries.get_mut(key) {
353            if entry.tier <= target_tier {
354                return Ok(()); // Already at or above target tier
355            }
356
357            let old_tier = entry.tier;
358            let size = entry.size_bytes;
359
360            // Check capacity in target tier
361            drop(entries); // Release lock for capacity check
362            self.ensure_memory_capacity(target_tier, size)?;
363            entries = self.entries.write().unwrap();
364
365            if let Some(entry) = entries.get_mut(key) {
366                // Update memory counters
367                match old_tier {
368                    CacheTier::Warm => *self.warm_memory.write().unwrap() -= size,
369                    CacheTier::Cold => {}
370                    CacheTier::Hot => {}
371                }
372                match target_tier {
373                    CacheTier::Hot => *self.hot_memory.write().unwrap() += size,
374                    CacheTier::Warm => *self.warm_memory.write().unwrap() += size,
375                    CacheTier::Cold => {}
376                }
377
378                entry.tier = target_tier;
379                debug!("Promoted {} from {:?} to {:?}", key, old_tier, target_tier);
380            }
381        }
382
383        Ok(())
384    }
385
386    /// Demote an entry to a lower tier (Hot -> Warm -> Cold)
387    pub fn demote(&self, key: &str, target_tier: CacheTier) -> Result<(), CacheError> {
388        let mut entries = self.entries.write().unwrap();
389
390        if let Some(entry) = entries.get_mut(key) {
391            if entry.tier >= target_tier {
392                return Ok(()); // Already at or below target tier
393            }
394
395            let old_tier = entry.tier;
396            let size = entry.size_bytes;
397
398            // Update memory counters
399            match old_tier {
400                CacheTier::Hot => *self.hot_memory.write().unwrap() -= size,
401                CacheTier::Warm => *self.warm_memory.write().unwrap() -= size,
402                CacheTier::Cold => {}
403            }
404            match target_tier {
405                CacheTier::Hot => *self.hot_memory.write().unwrap() += size,
406                CacheTier::Warm => *self.warm_memory.write().unwrap() += size,
407                CacheTier::Cold => {
408                    // Evict actual data when demoting to cold
409                    entry.data = None;
410                    entry.size_bytes = 0;
411                }
412            }
413
414            entry.tier = target_tier;
415            debug!("Demoted {} from {:?} to {:?}", key, old_tier, target_tier);
416        }
417
418        Ok(())
419    }
420
421    /// Evict a specific entry
422    pub fn evict(&self, key: &str) -> bool {
423        let mut entries = self.entries.write().unwrap();
424
425        if let Some(entry) = entries.remove(key) {
426            let size = entry.size_bytes;
427
428            // Update memory counters
429            match entry.tier {
430                CacheTier::Hot => *self.hot_memory.write().unwrap() -= size,
431                CacheTier::Warm => *self.warm_memory.write().unwrap() -= size,
432                CacheTier::Cold => {}
433            }
434
435            // Update study memory
436            if let Some(study) = self.studies.write().unwrap().get_mut(&entry.study_id) {
437                study.memory_bytes = study.memory_bytes.saturating_sub(size);
438            }
439
440            debug!("Evicted cache entry: {}", key);
441            return true;
442        }
443
444        false
445    }
446
447    /// Evict an entire study
448    pub fn evict_study(&self, study_id: &str) -> bool {
449        // First, collect all entry keys for this study
450        let keys_to_evict: Vec<String> = {
451            let entries = self.entries.read().unwrap();
452            entries
453                .iter()
454                .filter(|(_, e)| e.study_id == study_id)
455                .map(|(k, _)| k.clone())
456                .collect()
457        };
458
459        // Evict all entries
460        for key in keys_to_evict {
461            self.evict(&key);
462        }
463
464        // Remove study
465        let mut studies = self.studies.write().unwrap();
466        if studies.remove(study_id).is_some() {
467            info!("Evicted study {} from cache", study_id);
468            return true;
469        }
470
471        false
472    }
473
474    /// Run LRU eviction to free memory
475    pub fn run_lru_eviction(&self, target_free_bytes: usize) -> usize {
476        let mut freed = 0usize;
477
478        // Get entries sorted by last access time (oldest first)
479        let mut entries_by_age: Vec<(String, Instant, CacheTier)> = {
480            let entries = self.entries.read().unwrap();
481            entries
482                .iter()
483                .filter(|(_, e)| e.state == CacheEntryState::Ready && e.data.is_some())
484                .map(|(k, e)| (k.clone(), e.last_accessed, e.tier))
485                .collect()
486        };
487
488        entries_by_age.sort_by_key(|(_, t, _)| *t);
489
490        // Evict oldest entries first, preferring lower tiers
491        for (key, _, tier) in entries_by_age {
492            if freed >= target_free_bytes {
493                break;
494            }
495
496            // Get entry size before evicting
497            let size = {
498                let entries = self.entries.read().unwrap();
499                entries.get(&key).map(|e| e.size_bytes).unwrap_or(0)
500            };
501
502            // Demote hot to warm first, then evict warm/cold
503            if tier == CacheTier::Hot {
504                if self.demote(&key, CacheTier::Warm).is_ok() {
505                    freed += size;
506                }
507            } else if self.evict(&key) {
508                freed += size;
509            }
510        }
511
512        if freed > 0 {
513            info!("LRU eviction freed {} bytes", freed);
514        }
515
516        freed
517    }
518
519    /// Check for stale entries and evict them
520    pub fn evict_stale_entries(&self) -> usize {
521        let timeout = Duration::from_secs(self.config.eviction_timeout_secs);
522        let mut evicted = 0;
523
524        // Get stale entry keys
525        let stale_keys: Vec<String> = {
526            let entries = self.entries.read().unwrap();
527            entries
528                .iter()
529                .filter(|(_, e)| e.age() > timeout)
530                .map(|(k, _)| k.clone())
531                .collect()
532        };
533
534        for key in stale_keys {
535            if self.evict(&key) {
536                evicted += 1;
537            }
538        }
539
540        if evicted > 0 {
541            info!("Evicted {} stale cache entries", evicted);
542        }
543
544        evicted
545    }
546
547    /// Get cache statistics
548    pub fn stats(&self) -> StudyCacheStats {
549        let studies = self.studies.read().unwrap();
550        let entries = self.entries.read().unwrap();
551        let hot_memory = *self.hot_memory.read().unwrap();
552        let warm_memory = *self.warm_memory.read().unwrap();
553        let hits = *self.hits.read().unwrap();
554        let misses = *self.misses.read().unwrap();
555
556        let total_memory = hot_memory + warm_memory;
557        let budget = self.config.total_budget_mb * 1024 * 1024;
558
559        let (hot_entries, warm_entries, cold_entries) = entries.values().fold(
560            (0, 0, 0),
561            |(h, w, c), e| match e.tier {
562                CacheTier::Hot => (h + 1, w, c),
563                CacheTier::Warm => (h, w + 1, c),
564                CacheTier::Cold => (h, w, c + 1),
565            },
566        );
567
568        let total_requests = hits + misses;
569        let hit_rate = if total_requests > 0 {
570            (hits as f32 / total_requests as f32) * 100.0
571        } else {
572            0.0
573        };
574
575        StudyCacheStats {
576            study_count: studies.len(),
577            hot_entries,
578            warm_entries,
579            cold_entries,
580            hot_memory_bytes: hot_memory,
581            warm_memory_bytes: warm_memory,
582            total_memory_bytes: total_memory,
583            budget_bytes: budget,
584            utilization_percent: (total_memory as f32 / budget as f32) * 100.0,
585            hit_rate_percent: hit_rate,
586            hits,
587            misses,
588        }
589    }
590
591    /// Get list of cached studies
592    pub fn list_studies(&self) -> Vec<StudyInfo> {
593        let studies = self.studies.read().unwrap();
594        studies
595            .values()
596            .map(|s| StudyInfo {
597                id: s.id.clone(),
598                patient_name: s.patient_name.clone(),
599                description: s.description.clone(),
600                tier: s.tier,
601                memory_bytes: s.memory_bytes,
602                age_secs: s.age().as_secs(),
603            })
604            .collect()
605    }
606
607    /// Clear the entire cache
608    pub fn clear(&self) {
609        self.entries.write().unwrap().clear();
610        self.studies.write().unwrap().clear();
611        *self.hot_memory.write().unwrap() = 0;
612        *self.warm_memory.write().unwrap() = 0;
613        info!("Cache cleared");
614    }
615
616    /// Update configuration (e.g., when profile changes)
617    pub fn update_config(&mut self, config: StudyCacheConfig) {
618        info!(
619            "Updating cache config: {} MB -> {} MB",
620            self.config.total_budget_mb, config.total_budget_mb
621        );
622        self.config = config;
623
624        // Run eviction if new budget is smaller
625        let current_memory = *self.hot_memory.read().unwrap() + *self.warm_memory.read().unwrap();
626        let new_budget = self.config.total_budget_mb * 1024 * 1024;
627
628        if current_memory > new_budget {
629            let to_free = current_memory - new_budget;
630            self.run_lru_eviction(to_free);
631        }
632    }
633
634    // Private helper methods
635
636    /// Ensure we have capacity for another study
637    fn ensure_study_capacity(&self) -> Result<(), CacheError> {
638        let studies = self.studies.read().unwrap();
639        if studies.len() >= self.config.max_studies {
640            drop(studies);
641
642            // Find LRU study to evict
643            let lru_study_id = {
644                let studies = self.studies.read().unwrap();
645                studies
646                    .values()
647                    .min_by_key(|s| s.last_accessed)
648                    .map(|s| s.id.clone())
649            };
650
651            if let Some(id) = lru_study_id {
652                self.evict_study(&id);
653            } else {
654                return Err(CacheError::CapacityExceeded {
655                    message: "Cannot evict studies to make room".to_string(),
656                });
657            }
658        }
659        Ok(())
660    }
661
662    /// Ensure we have memory capacity in the target tier
663    fn ensure_memory_capacity(&self, tier: CacheTier, required: usize) -> Result<(), CacheError> {
664        let (current, limit) = match tier {
665            CacheTier::Hot => (
666                *self.hot_memory.read().unwrap(),
667                self.config.hot_tier_mb * 1024 * 1024,
668            ),
669            CacheTier::Warm => (
670                *self.warm_memory.read().unwrap(),
671                self.config.warm_tier_mb * 1024 * 1024,
672            ),
673            CacheTier::Cold => return Ok(()), // No limit for cold tier
674        };
675
676        if current + required > limit {
677            let to_free = (current + required) - limit;
678            let freed = self.run_lru_eviction(to_free);
679
680            if freed < to_free {
681                return Err(CacheError::CapacityExceeded {
682                    message: format!(
683                        "Insufficient {:?} tier capacity: need {} bytes, could only free {}",
684                        tier, to_free, freed
685                    ),
686                });
687            }
688        }
689
690        Ok(())
691    }
692}
693
694/// Study info for listing
695#[derive(Debug, Clone, Serialize)]
696pub struct StudyInfo {
697    pub id: String,
698    pub patient_name: String,
699    pub description: String,
700    pub tier: CacheTier,
701    pub memory_bytes: usize,
702    pub age_secs: u64,
703}
704
705/// Cache errors
706#[derive(Debug, Clone)]
707pub enum CacheError {
708    /// Cache capacity exceeded
709    CapacityExceeded { message: String },
710    /// Entry not found
711    NotFound { key: String },
712    /// Invalid operation
713    InvalidOperation { message: String },
714}
715
716impl std::fmt::Display for CacheError {
717    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
718        match self {
719            CacheError::CapacityExceeded { message } => {
720                write!(f, "Cache capacity exceeded: {}", message)
721            }
722            CacheError::NotFound { key } => {
723                write!(f, "Cache entry not found: {}", key)
724            }
725            CacheError::InvalidOperation { message } => {
726                write!(f, "Invalid cache operation: {}", message)
727            }
728        }
729    }
730}
731
732impl std::error::Error for CacheError {}
733
734#[cfg(test)]
735mod tests {
736    use super::*;
737    use crate::config::PerformanceProfile;
738
739    fn test_config() -> StudyCacheConfig {
740        StudyCacheConfig::for_profile(PerformanceProfile::Medium)
741    }
742
743    #[test]
744    fn test_cache_creation() {
745        let cache = StudyCache::new(test_config());
746        let stats = cache.stats();
747        assert_eq!(stats.study_count, 0);
748        assert_eq!(stats.total_memory_bytes, 0);
749    }
750
751    #[test]
752    fn test_register_study() {
753        let cache = StudyCache::new(test_config());
754
755        cache
756            .register_study(
757                "study1".to_string(),
758                "John Doe".to_string(),
759                "CT Scan".to_string(),
760                vec!["series1".to_string()],
761                100,
762            )
763            .unwrap();
764
765        let stats = cache.stats();
766        assert_eq!(stats.study_count, 1);
767    }
768
769    #[test]
770    fn test_store_and_get() {
771        let cache = StudyCache::new(test_config());
772
773        cache
774            .register_study(
775                "study1".to_string(),
776                "John Doe".to_string(),
777                "CT Scan".to_string(),
778                vec!["series1".to_string()],
779                100,
780            )
781            .unwrap();
782
783        let data = vec![0u8; 1024]; // 1KB test data
784        let key = cache
785            .store(
786                "study1".to_string(),
787                "series1".to_string(),
788                0,
789                ImageQuality::Thumbnail,
790                CacheTier::Warm,
791                data.clone(),
792            )
793            .unwrap();
794
795        let retrieved = cache.get(&key);
796        assert!(retrieved.is_some());
797        assert_eq!(retrieved.unwrap().len(), 1024);
798
799        let stats = cache.stats();
800        assert_eq!(stats.warm_entries, 1);
801        assert_eq!(stats.hits, 1);
802    }
803
804    #[test]
805    fn test_eviction() {
806        let cache = StudyCache::new(test_config());
807
808        cache
809            .register_study(
810                "study1".to_string(),
811                "John Doe".to_string(),
812                "CT Scan".to_string(),
813                vec!["series1".to_string()],
814                100,
815            )
816            .unwrap();
817
818        let key = cache
819            .store(
820                "study1".to_string(),
821                "series1".to_string(),
822                0,
823                ImageQuality::Thumbnail,
824                CacheTier::Warm,
825                vec![0u8; 1024],
826            )
827            .unwrap();
828
829        assert!(cache.evict(&key));
830
831        let stats = cache.stats();
832        assert_eq!(stats.warm_entries, 0);
833    }
834
835    #[test]
836    fn test_tier_ordering() {
837        assert!(CacheTier::Hot < CacheTier::Warm);
838        assert!(CacheTier::Warm < CacheTier::Cold);
839        assert!(CacheTier::Hot < CacheTier::Cold);
840    }
841
842    #[test]
843    fn test_hit_rate() {
844        let cache = StudyCache::new(test_config());
845
846        cache
847            .register_study(
848                "study1".to_string(),
849                "John Doe".to_string(),
850                "CT Scan".to_string(),
851                vec!["series1".to_string()],
852                100,
853            )
854            .unwrap();
855
856        let key = cache
857            .store(
858                "study1".to_string(),
859                "series1".to_string(),
860                0,
861                ImageQuality::Thumbnail,
862                CacheTier::Warm,
863                vec![0u8; 1024],
864            )
865            .unwrap();
866
867        // 2 hits
868        cache.get(&key);
869        cache.get(&key);
870
871        // 1 miss
872        cache.get("nonexistent");
873
874        let stats = cache.stats();
875        assert_eq!(stats.hits, 2);
876        assert_eq!(stats.misses, 1);
877        assert!((stats.hit_rate_percent - 66.66).abs() < 1.0);
878    }
879
880    #[test]
881    fn test_tier_promotion() {
882        let cache = StudyCache::new(test_config());
883
884        cache
885            .register_study(
886                "study1".to_string(),
887                "Test Patient".to_string(),
888                "CT Scan".to_string(),
889                vec!["series1".to_string()],
890                100,
891            )
892            .unwrap();
893
894        // Store in warm tier
895        let key = cache
896            .store(
897                "study1".to_string(),
898                "series1".to_string(),
899                0,
900                ImageQuality::Thumbnail,
901                CacheTier::Warm,
902                vec![0u8; 1024],
903            )
904            .unwrap();
905
906        let stats = cache.stats();
907        assert_eq!(stats.warm_entries, 1);
908        assert_eq!(stats.hot_entries, 0);
909
910        // Promote to hot tier
911        cache.promote(&key, CacheTier::Hot).unwrap();
912
913        let stats = cache.stats();
914        assert_eq!(stats.warm_entries, 0);
915        assert_eq!(stats.hot_entries, 1);
916    }
917
918    #[test]
919    fn test_tier_demotion() {
920        let cache = StudyCache::new(test_config());
921
922        cache
923            .register_study(
924                "study1".to_string(),
925                "Test Patient".to_string(),
926                "CT Scan".to_string(),
927                vec!["series1".to_string()],
928                100,
929            )
930            .unwrap();
931
932        // Store in hot tier
933        let key = cache
934            .store(
935                "study1".to_string(),
936                "series1".to_string(),
937                0,
938                ImageQuality::Full,
939                CacheTier::Hot,
940                vec![0u8; 1024],
941            )
942            .unwrap();
943
944        let stats = cache.stats();
945        assert_eq!(stats.hot_entries, 1);
946
947        // Demote to warm tier
948        cache.demote(&key, CacheTier::Warm).unwrap();
949
950        let stats = cache.stats();
951        assert_eq!(stats.hot_entries, 0);
952        assert_eq!(stats.warm_entries, 1);
953
954        // Demote to cold tier (data should be evicted)
955        cache.demote(&key, CacheTier::Cold).unwrap();
956
957        let stats = cache.stats();
958        assert_eq!(stats.warm_entries, 0);
959        assert_eq!(stats.cold_entries, 1);
960
961        // Data should be None in cold tier
962        let data = cache.get(&key);
963        assert!(data.is_none());
964    }
965
966    #[test]
967    fn test_lru_eviction_order() {
968        let cache = StudyCache::new(test_config());
969
970        cache
971            .register_study(
972                "study1".to_string(),
973                "Test".to_string(),
974                "CT".to_string(),
975                vec!["series1".to_string()],
976                100,
977            )
978            .unwrap();
979
980        // Store multiple entries
981        let key1 = cache
982            .store(
983                "study1".to_string(),
984                "series1".to_string(),
985                0,
986                ImageQuality::Thumbnail,
987                CacheTier::Warm,
988                vec![0u8; 1024],
989            )
990            .unwrap();
991
992        std::thread::sleep(std::time::Duration::from_millis(10));
993
994        let _key2 = cache
995            .store(
996                "study1".to_string(),
997                "series1".to_string(),
998                1,
999                ImageQuality::Thumbnail,
1000                CacheTier::Warm,
1001                vec![0u8; 1024],
1002            )
1003            .unwrap();
1004
1005        // Access key1 to make it more recently used
1006        cache.get(&key1);
1007
1008        // Run LRU eviction for 1KB (should evict key2, not key1)
1009        let freed = cache.run_lru_eviction(1024);
1010        assert!(freed >= 1024);
1011
1012        // key1 should still exist, key2 should be evicted
1013        assert!(cache.get(&key1).is_some());
1014        // key2 eviction is best-effort, check stats instead
1015    }
1016
1017    #[test]
1018    fn test_study_eviction() {
1019        let cache = StudyCache::new(test_config());
1020
1021        cache
1022            .register_study(
1023                "study1".to_string(),
1024                "Patient A".to_string(),
1025                "CT".to_string(),
1026                vec!["series1".to_string()],
1027                100,
1028            )
1029            .unwrap();
1030
1031        cache
1032            .register_study(
1033                "study2".to_string(),
1034                "Patient B".to_string(),
1035                "MRI".to_string(),
1036                vec!["series2".to_string()],
1037                100,
1038            )
1039            .unwrap();
1040
1041        // Store entries for both studies
1042        cache
1043            .store(
1044                "study1".to_string(),
1045                "series1".to_string(),
1046                0,
1047                ImageQuality::Thumbnail,
1048                CacheTier::Warm,
1049                vec![0u8; 1024],
1050            )
1051            .unwrap();
1052
1053        cache
1054            .store(
1055                "study2".to_string(),
1056                "series2".to_string(),
1057                0,
1058                ImageQuality::Thumbnail,
1059                CacheTier::Warm,
1060                vec![0u8; 1024],
1061            )
1062            .unwrap();
1063
1064        let stats = cache.stats();
1065        assert_eq!(stats.study_count, 2);
1066        assert_eq!(stats.warm_entries, 2);
1067
1068        // Evict study1
1069        assert!(cache.evict_study("study1"));
1070
1071        let stats = cache.stats();
1072        assert_eq!(stats.study_count, 1);
1073        assert_eq!(stats.warm_entries, 1);
1074    }
1075
1076    #[test]
1077    fn test_multiple_quality_levels() {
1078        let cache = StudyCache::new(test_config());
1079
1080        cache
1081            .register_study(
1082                "study1".to_string(),
1083                "Test".to_string(),
1084                "CT".to_string(),
1085                vec!["series1".to_string()],
1086                100,
1087            )
1088            .unwrap();
1089
1090        // Store same slice at different quality levels
1091        let thumb_key = cache
1092            .store(
1093                "study1".to_string(),
1094                "series1".to_string(),
1095                0,
1096                ImageQuality::Thumbnail,
1097                CacheTier::Warm,
1098                vec![0u8; 256],
1099            )
1100            .unwrap();
1101
1102        let mid_key = cache
1103            .store(
1104                "study1".to_string(),
1105                "series1".to_string(),
1106                0,
1107                ImageQuality::MidRes,
1108                CacheTier::Warm,
1109                vec![0u8; 512],
1110            )
1111            .unwrap();
1112
1113        let full_key = cache
1114            .store(
1115                "study1".to_string(),
1116                "series1".to_string(),
1117                0,
1118                ImageQuality::Full,
1119                CacheTier::Hot,
1120                vec![0u8; 1024],
1121            )
1122            .unwrap();
1123
1124        // All should have different keys
1125        assert_ne!(thumb_key, mid_key);
1126        assert_ne!(mid_key, full_key);
1127
1128        // All should be retrievable
1129        assert!(cache.get(&thumb_key).is_some());
1130        assert!(cache.get(&mid_key).is_some());
1131        assert!(cache.get(&full_key).is_some());
1132
1133        let stats = cache.stats();
1134        let total_entries = stats.hot_entries + stats.warm_entries + stats.cold_entries;
1135        assert_eq!(total_entries, 3);
1136    }
1137
1138    #[test]
1139    fn test_cache_clear() {
1140        let cache = StudyCache::new(test_config());
1141
1142        cache
1143            .register_study(
1144                "study1".to_string(),
1145                "Test".to_string(),
1146                "CT".to_string(),
1147                vec!["series1".to_string()],
1148                100,
1149            )
1150            .unwrap();
1151
1152        cache
1153            .store(
1154                "study1".to_string(),
1155                "series1".to_string(),
1156                0,
1157                ImageQuality::Thumbnail,
1158                CacheTier::Warm,
1159                vec![0u8; 1024],
1160            )
1161            .unwrap();
1162
1163        let stats = cache.stats();
1164        assert!(stats.study_count > 0);
1165        assert!(stats.warm_entries > 0);
1166
1167        cache.clear();
1168
1169        let stats = cache.stats();
1170        assert_eq!(stats.study_count, 0);
1171        assert_eq!(stats.hot_entries + stats.warm_entries + stats.cold_entries, 0);
1172        assert_eq!(stats.total_memory_bytes, 0);
1173    }
1174
1175    #[test]
1176    fn test_memory_tracking() {
1177        let cache = StudyCache::new(test_config());
1178
1179        cache
1180            .register_study(
1181                "study1".to_string(),
1182                "Test".to_string(),
1183                "CT".to_string(),
1184                vec!["series1".to_string()],
1185                100,
1186            )
1187            .unwrap();
1188
1189        let size = 4096;
1190        cache
1191            .store(
1192                "study1".to_string(),
1193                "series1".to_string(),
1194                0,
1195                ImageQuality::Thumbnail,
1196                CacheTier::Warm,
1197                vec![0u8; size],
1198            )
1199            .unwrap();
1200
1201        let stats = cache.stats();
1202        assert_eq!(stats.warm_memory_bytes, size);
1203        assert_eq!(stats.total_memory_bytes, size);
1204        assert_eq!(stats.hot_memory_bytes, 0);
1205    }
1206
1207    // ========== Memory Leak Tests ==========
1208
1209    #[test]
1210    fn test_memory_leak_alloc_free_cycles() {
1211        // Test repeated allocation/deallocation cycles don't accumulate memory
1212        let cache = StudyCache::new(test_config());
1213
1214        cache
1215            .register_study(
1216                "study1".to_string(),
1217                "Test".to_string(),
1218                "CT".to_string(),
1219                vec!["series1".to_string()],
1220                1000,
1221            )
1222            .unwrap();
1223
1224        // Perform 10 cycles of alloc and clear
1225        for cycle in 0..10 {
1226            // Allocate 100 entries of 10KB each = 1MB per cycle
1227            for i in 0..100 {
1228                cache
1229                    .store(
1230                        "study1".to_string(),
1231                        "series1".to_string(),
1232                        i,
1233                        ImageQuality::Thumbnail,
1234                        CacheTier::Warm,
1235                        vec![0u8; 10240],
1236                    )
1237                    .unwrap();
1238            }
1239
1240            let stats_before_clear = cache.stats();
1241            assert!(
1242                stats_before_clear.total_memory_bytes >= 1024000,
1243                "Cycle {}: Expected ~1MB allocated, got {}",
1244                cycle,
1245                stats_before_clear.total_memory_bytes
1246            );
1247
1248            // Clear and verify memory is released
1249            cache.clear();
1250
1251            // Re-register for next cycle
1252            cache
1253                .register_study(
1254                    "study1".to_string(),
1255                    "Test".to_string(),
1256                    "CT".to_string(),
1257                    vec!["series1".to_string()],
1258                    1000,
1259                )
1260                .unwrap();
1261        }
1262
1263        // Final state should have zero memory
1264        let final_stats = cache.stats();
1265        assert_eq!(final_stats.total_memory_bytes, 0);
1266        assert_eq!(final_stats.hot_entries, 0);
1267        assert_eq!(final_stats.warm_entries, 0);
1268        assert_eq!(final_stats.cold_entries, 0);
1269    }
1270
1271    #[test]
1272    fn test_memory_leak_tier_transitions() {
1273        // Test that tier transitions properly account for memory
1274        let cache = StudyCache::new(test_config());
1275
1276        cache
1277            .register_study(
1278                "study1".to_string(),
1279                "Test".to_string(),
1280                "CT".to_string(),
1281                vec!["series1".to_string()],
1282                100,
1283            )
1284            .unwrap();
1285
1286        let size = 8192;
1287        let key = cache
1288            .store(
1289                "study1".to_string(),
1290                "series1".to_string(),
1291                0,
1292                ImageQuality::Full,
1293                CacheTier::Warm,
1294                vec![0u8; size],
1295            )
1296            .unwrap();
1297
1298        // Initial: warm tier has the memory
1299        let stats = cache.stats();
1300        assert_eq!(stats.warm_memory_bytes, size);
1301        assert_eq!(stats.hot_memory_bytes, 0);
1302        assert_eq!(stats.total_memory_bytes, size);
1303
1304        // Promote to hot
1305        cache.promote(&key, CacheTier::Hot).unwrap();
1306        let stats = cache.stats();
1307        assert_eq!(stats.warm_memory_bytes, 0);
1308        assert_eq!(stats.hot_memory_bytes, size);
1309        assert_eq!(stats.total_memory_bytes, size);
1310
1311        // Demote back to warm
1312        cache.demote(&key, CacheTier::Warm).unwrap();
1313        let stats = cache.stats();
1314        assert_eq!(stats.warm_memory_bytes, size);
1315        assert_eq!(stats.hot_memory_bytes, 0);
1316        assert_eq!(stats.total_memory_bytes, size);
1317
1318        // Demote to cold (data evicted)
1319        cache.demote(&key, CacheTier::Cold).unwrap();
1320        let stats = cache.stats();
1321        assert_eq!(stats.warm_memory_bytes, 0);
1322        assert_eq!(stats.hot_memory_bytes, 0);
1323        assert_eq!(stats.total_memory_bytes, 0);
1324        assert_eq!(stats.cold_entries, 1);
1325    }
1326
1327    #[test]
1328    fn test_memory_leak_lru_eviction() {
1329        // Test that LRU eviction properly frees memory
1330        let cache = StudyCache::new(test_config());
1331
1332        cache
1333            .register_study(
1334                "study1".to_string(),
1335                "Test".to_string(),
1336                "CT".to_string(),
1337                vec!["series1".to_string()],
1338                1000,
1339            )
1340            .unwrap();
1341
1342        let entry_size = 10240; // 10KB per entry
1343        let num_entries = 50;
1344        let expected_total = entry_size * num_entries;
1345
1346        // Store 50 entries
1347        for i in 0..num_entries {
1348            cache
1349                .store(
1350                    "study1".to_string(),
1351                    "series1".to_string(),
1352                    i,
1353                    ImageQuality::Thumbnail,
1354                    CacheTier::Warm,
1355                    vec![0u8; entry_size],
1356                )
1357                .unwrap();
1358        }
1359
1360        let stats = cache.stats();
1361        assert_eq!(stats.warm_entries, num_entries);
1362        assert_eq!(stats.total_memory_bytes, expected_total);
1363
1364        // Evict half the memory
1365        let target_free = expected_total / 2;
1366        let freed = cache.run_lru_eviction(target_free);
1367        assert!(freed >= target_free);
1368
1369        // Memory should be reduced
1370        let stats_after = cache.stats();
1371        assert!(stats_after.total_memory_bytes <= expected_total - target_free + entry_size);
1372    }
1373
1374    #[test]
1375    fn test_memory_leak_study_eviction() {
1376        // Test that study eviction frees all associated memory
1377        let cache = StudyCache::new(test_config());
1378
1379        // Register two studies
1380        cache
1381            .register_study(
1382                "study1".to_string(),
1383                "Patient A".to_string(),
1384                "CT".to_string(),
1385                vec!["series1".to_string()],
1386                100,
1387            )
1388            .unwrap();
1389
1390        cache
1391            .register_study(
1392                "study2".to_string(),
1393                "Patient B".to_string(),
1394                "MRI".to_string(),
1395                vec!["series2".to_string()],
1396                100,
1397            )
1398            .unwrap();
1399
1400        let entry_size = 4096;
1401
1402        // Store 10 entries for study1
1403        for i in 0..10 {
1404            cache
1405                .store(
1406                    "study1".to_string(),
1407                    "series1".to_string(),
1408                    i,
1409                    ImageQuality::Thumbnail,
1410                    CacheTier::Warm,
1411                    vec![0u8; entry_size],
1412                )
1413                .unwrap();
1414        }
1415
1416        // Store 5 entries for study2
1417        for i in 0..5 {
1418            cache
1419                .store(
1420                    "study2".to_string(),
1421                    "series2".to_string(),
1422                    i,
1423                    ImageQuality::Thumbnail,
1424                    CacheTier::Warm,
1425                    vec![0u8; entry_size],
1426                )
1427                .unwrap();
1428        }
1429
1430        let stats_before = cache.stats();
1431        let study1_memory = entry_size * 10;
1432        let study2_memory = entry_size * 5;
1433        assert_eq!(stats_before.total_memory_bytes, study1_memory + study2_memory);
1434
1435        // Evict study1
1436        cache.evict_study("study1");
1437
1438        let stats_after = cache.stats();
1439        assert_eq!(stats_after.total_memory_bytes, study2_memory);
1440        assert_eq!(stats_after.study_count, 1);
1441    }
1442
1443    #[test]
1444    fn test_memory_leak_large_allocations() {
1445        // Test large allocations (simulating full-res DICOM slices)
1446        let cache = StudyCache::new(test_config());
1447
1448        cache
1449            .register_study(
1450                "study1".to_string(),
1451                "Test".to_string(),
1452                "CT".to_string(),
1453                vec!["series1".to_string()],
1454                500,
1455            )
1456            .unwrap();
1457
1458        // Simulate 512x512x16bit = 512KB per slice
1459        let slice_size = 512 * 512 * 2;
1460        let num_slices = 20;
1461
1462        for i in 0..num_slices {
1463            cache
1464                .store(
1465                    "study1".to_string(),
1466                    "series1".to_string(),
1467                    i,
1468                    ImageQuality::Full,
1469                    CacheTier::Hot,
1470                    vec![0u8; slice_size],
1471                )
1472                .unwrap();
1473        }
1474
1475        let stats = cache.stats();
1476        assert_eq!(stats.hot_entries, num_slices);
1477        assert_eq!(stats.hot_memory_bytes, slice_size * num_slices);
1478
1479        // Clear and verify all memory released
1480        cache.clear();
1481
1482        let final_stats = cache.stats();
1483        assert_eq!(final_stats.total_memory_bytes, 0);
1484        assert_eq!(final_stats.hot_memory_bytes, 0);
1485        assert_eq!(final_stats.hot_entries, 0);
1486    }
1487}