1use std::collections::HashMap;
30use std::sync::Arc;
31
32use async_trait::async_trait;
33use bytes::Bytes;
34use lru::LruCache;
35use tokio::sync::{Mutex, Notify, RwLock};
36
37use crate::error::{FormatError, IoError, TiffError};
38use crate::format::{detect_format, GenericTiffReader, SlideFormat, SvsReader};
39use crate::io::{BlockCache, RangeReader, DEFAULT_BLOCK_SIZE};
40
41use super::reader::{LevelInfo, SlideReader};
42
43const DEFAULT_SLIDE_CACHE_CAPACITY: usize = 100;
49
50const DEFAULT_BLOCK_CACHE_CAPACITY: usize = 100;
52
53#[derive(Debug, Clone)]
59pub struct SlideListResult {
60 pub slides: Vec<String>,
62 pub next_cursor: Option<String>,
64}
65
66#[async_trait]
71pub trait SlideSource: Send + Sync {
72 type Reader: RangeReader + 'static;
74
75 async fn create_reader(&self, slide_id: &str) -> Result<Self::Reader, IoError>;
83
84 async fn list_slides(
97 &self,
98 _limit: u32,
99 _cursor: Option<&str>,
100 _prefix: Option<&str>,
101 ) -> Result<SlideListResult, IoError> {
102 Ok(SlideListResult {
103 slides: vec![],
104 next_cursor: None,
105 })
106 }
107}
108
109pub struct CachedSlide<R: RangeReader + 'static> {
118 format: SlideFormat,
120
121 reader: Arc<BlockCache<R>>,
123
124 inner: SlideReaderInner,
126}
127
128enum SlideReaderInner {
133 Svs(SvsReader),
134 GenericTiff(GenericTiffReader),
135}
136
137impl<R: RangeReader + 'static> CachedSlide<R> {
138 pub fn format(&self) -> SlideFormat {
140 self.format
141 }
142
143 pub fn level_count(&self) -> usize {
145 match &self.inner {
146 SlideReaderInner::Svs(r) => r.level_count(),
147 SlideReaderInner::GenericTiff(r) => r.level_count(),
148 }
149 }
150
151 pub fn dimensions(&self) -> Option<(u32, u32)> {
153 match &self.inner {
154 SlideReaderInner::Svs(r) => r.dimensions(),
155 SlideReaderInner::GenericTiff(r) => r.dimensions(),
156 }
157 }
158
159 pub fn level_dimensions(&self, level: usize) -> Option<(u32, u32)> {
161 match &self.inner {
162 SlideReaderInner::Svs(r) => r.level_dimensions(level),
163 SlideReaderInner::GenericTiff(r) => r.level_dimensions(level),
164 }
165 }
166
167 pub fn level_downsample(&self, level: usize) -> Option<f64> {
169 match &self.inner {
170 SlideReaderInner::Svs(r) => r.level_downsample(level),
171 SlideReaderInner::GenericTiff(r) => r.level_downsample(level),
172 }
173 }
174
175 pub fn tile_size(&self, level: usize) -> Option<(u32, u32)> {
177 match &self.inner {
178 SlideReaderInner::Svs(r) => r.tile_size(level),
179 SlideReaderInner::GenericTiff(r) => r.tile_size(level),
180 }
181 }
182
183 pub fn tile_count(&self, level: usize) -> Option<(u32, u32)> {
185 match &self.inner {
186 SlideReaderInner::Svs(r) => r.tile_count(level),
187 SlideReaderInner::GenericTiff(r) => r.tile_count(level),
188 }
189 }
190
191 pub fn level_info(&self, level: usize) -> Option<LevelInfo> {
193 match &self.inner {
194 SlideReaderInner::Svs(r) => r.level_info(level),
195 SlideReaderInner::GenericTiff(r) => r.level_info(level),
196 }
197 }
198
199 pub fn best_level_for_downsample(&self, downsample: f64) -> Option<usize> {
201 match &self.inner {
202 SlideReaderInner::Svs(r) => SlideReader::best_level_for_downsample(r, downsample),
203 SlideReaderInner::GenericTiff(r) => {
204 SlideReader::best_level_for_downsample(r, downsample)
205 }
206 }
207 }
208
209 pub async fn read_tile(
219 &self,
220 level: usize,
221 tile_x: u32,
222 tile_y: u32,
223 ) -> Result<Bytes, TiffError> {
224 match &self.inner {
225 SlideReaderInner::Svs(r) => {
226 r.read_tile(self.reader.as_ref(), level, tile_x, tile_y)
227 .await
228 }
229 SlideReaderInner::GenericTiff(r) => {
230 r.read_tile(self.reader.as_ref(), level, tile_x, tile_y)
231 .await
232 }
233 }
234 }
235}
236
237pub struct SlideRegistry<S: SlideSource> {
249 source: S,
251
252 cache: RwLock<LruCache<String, Arc<CachedSlide<S::Reader>>>>,
254
255 in_flight: Mutex<HashMap<String, Arc<InFlightState<S::Reader>>>>,
257
258 block_size: usize,
260
261 block_cache_capacity: usize,
263}
264
265struct InFlightState<R: RangeReader + 'static> {
267 notify: Notify,
269 result: Mutex<Option<Result<Arc<CachedSlide<R>>, FormatError>>>,
271}
272
273impl<S: SlideSource> SlideRegistry<S> {
274 pub fn new(source: S) -> Self {
280 Self::with_capacity(
281 source,
282 DEFAULT_SLIDE_CACHE_CAPACITY,
283 DEFAULT_BLOCK_SIZE,
284 DEFAULT_BLOCK_CACHE_CAPACITY,
285 )
286 }
287
288 pub fn with_capacity(
296 source: S,
297 slide_cache_capacity: usize,
298 block_size: usize,
299 block_cache_capacity: usize,
300 ) -> Self {
301 Self {
302 source,
303 cache: RwLock::new(LruCache::new(
304 std::num::NonZeroUsize::new(slide_cache_capacity).unwrap(),
305 )),
306 in_flight: Mutex::new(HashMap::new()),
307 block_size,
308 block_cache_capacity,
309 }
310 }
311
312 pub async fn get_slide(
325 &self,
326 slide_id: &str,
327 ) -> Result<Arc<CachedSlide<S::Reader>>, FormatError> {
328 {
330 let mut cache = self.cache.write().await;
331 if let Some(slide) = cache.get(slide_id) {
332 return Ok(slide.clone());
333 }
334 }
335
336 loop {
338 let state = {
339 let mut in_flight = self.in_flight.lock().await;
340
341 if let Some(state) = in_flight.get(slide_id) {
342 state.clone()
344 } else {
345 let state = Arc::new(InFlightState {
347 notify: Notify::new(),
348 result: Mutex::new(None),
349 });
350 in_flight.insert(slide_id.to_string(), state.clone());
351 drop(in_flight);
352
353 let result = self.open_slide_internal(slide_id).await;
355
356 {
358 let mut result_guard = state.result.lock().await;
359 *result_guard = Some(result.clone());
360 }
361
362 if let Ok(ref slide) = result {
363 let mut cache = self.cache.write().await;
364 cache.put(slide_id.to_string(), slide.clone());
365 }
366
367 {
369 let mut in_flight = self.in_flight.lock().await;
370 in_flight.remove(slide_id);
371 }
372 state.notify.notify_waiters();
373
374 return result;
375 }
376 };
377
378 state.notify.notified().await;
380
381 let result_guard = state.result.lock().await;
383 if let Some(ref result) = *result_guard {
384 return result.clone();
385 }
386
387 }
389 }
390
391 async fn open_slide_internal(
393 &self,
394 slide_id: &str,
395 ) -> Result<Arc<CachedSlide<S::Reader>>, FormatError> {
396 let reader = self.source.create_reader(slide_id).await?;
398
399 let cached_reader = Arc::new(BlockCache::with_capacity(
401 reader,
402 self.block_size,
403 self.block_cache_capacity,
404 ));
405
406 let format = detect_format(cached_reader.as_ref()).await?;
408
409 let inner = match format {
411 SlideFormat::AperioSvs => {
412 let svs = SvsReader::open(cached_reader.as_ref()).await?;
413 SlideReaderInner::Svs(svs)
414 }
415 SlideFormat::GenericTiff => {
416 let tiff = GenericTiffReader::open(cached_reader.as_ref()).await?;
417 SlideReaderInner::GenericTiff(tiff)
418 }
419 };
420
421 Ok(Arc::new(CachedSlide {
422 format,
423 reader: cached_reader,
424 inner,
425 }))
426 }
427
428 pub async fn invalidate(&self, slide_id: &str) {
432 let mut cache = self.cache.write().await;
433 cache.pop(slide_id);
434 }
435
436 pub async fn clear(&self) {
438 let mut cache = self.cache.write().await;
439 cache.clear();
440 }
441
442 pub async fn cached_count(&self) -> usize {
444 let cache = self.cache.read().await;
445 cache.len()
446 }
447
448 pub fn source(&self) -> &S {
452 &self.source
453 }
454}
455
456#[cfg(test)]
461mod tests {
462 use super::*;
463 use std::sync::atomic::{AtomicUsize, Ordering};
464
465 struct MockSlideSource {
467 create_count: AtomicUsize,
469 data: Bytes,
471 }
472
473 impl MockSlideSource {
474 fn new(data: Vec<u8>) -> Self {
475 Self {
476 create_count: AtomicUsize::new(0),
477 data: Bytes::from(data),
478 }
479 }
480
481 fn create_count(&self) -> usize {
482 self.create_count.load(Ordering::SeqCst)
483 }
484 }
485
486 struct MockReader {
488 data: Bytes,
489 identifier: String,
490 }
491
492 #[async_trait]
493 impl RangeReader for MockReader {
494 async fn read_exact_at(&self, offset: u64, len: usize) -> Result<Bytes, IoError> {
495 let start = offset as usize;
496 let end = start + len;
497 if end > self.data.len() {
498 return Err(IoError::RangeOutOfBounds {
499 offset,
500 requested: len as u64,
501 size: self.data.len() as u64,
502 });
503 }
504 Ok(self.data.slice(start..end))
505 }
506
507 fn size(&self) -> u64 {
508 self.data.len() as u64
509 }
510
511 fn identifier(&self) -> &str {
512 &self.identifier
513 }
514 }
515
516 #[async_trait]
517 impl SlideSource for MockSlideSource {
518 type Reader = MockReader;
519
520 async fn create_reader(&self, slide_id: &str) -> Result<Self::Reader, IoError> {
521 self.create_count.fetch_add(1, Ordering::SeqCst);
522 Ok(MockReader {
523 data: self.data.clone(),
524 identifier: format!("mock://{}", slide_id),
525 })
526 }
527 }
528
529 fn create_minimal_tiff() -> Vec<u8> {
534 let mut data = vec![0u8; 8192];
535
536 data[0] = 0x49; data[1] = 0x49; data[2] = 0x2A; data[3] = 0x00;
541 data[4] = 0x08; data[5] = 0x00;
543 data[6] = 0x00;
544 data[7] = 0x00;
545
546 data[8] = 0x08;
549 data[9] = 0x00;
550
551 let mut offset = 10;
552
553 let write_entry =
555 |data: &mut [u8], offset: &mut usize, tag: u16, typ: u16, count: u32, value: u32| {
556 data[*offset..*offset + 2].copy_from_slice(&tag.to_le_bytes());
557 data[*offset + 2..*offset + 4].copy_from_slice(&typ.to_le_bytes());
558 data[*offset + 4..*offset + 8].copy_from_slice(&count.to_le_bytes());
559 data[*offset + 8..*offset + 12].copy_from_slice(&value.to_le_bytes());
560 *offset += 12;
561 };
562
563 write_entry(&mut data, &mut offset, 256, 4, 1, 2048);
566
567 write_entry(&mut data, &mut offset, 257, 4, 1, 1536);
569
570 write_entry(&mut data, &mut offset, 259, 3, 1, 7);
572
573 write_entry(&mut data, &mut offset, 322, 3, 1, 256);
575
576 write_entry(&mut data, &mut offset, 323, 3, 1, 256);
578
579 write_entry(&mut data, &mut offset, 324, 4, 48, 200);
582
583 write_entry(&mut data, &mut offset, 325, 4, 48, 400);
585
586 write_entry(&mut data, &mut offset, 258, 3, 1, 8);
588
589 data[offset..offset + 4].copy_from_slice(&0u32.to_le_bytes());
591
592 for i in 0..48u32 {
595 let tile_offset = 1000 + i * 100;
596 let arr_offset = 200 + (i as usize) * 4;
597 data[arr_offset..arr_offset + 4].copy_from_slice(&tile_offset.to_le_bytes());
598 }
599
600 for i in 0..48u32 {
603 let arr_offset = 400 + (i as usize) * 4;
604 data[arr_offset..arr_offset + 4].copy_from_slice(&90u32.to_le_bytes());
605 }
606
607 for i in 0..48 {
609 let tile_start = 1000 + i * 100;
610 data[tile_start] = 0xFF;
611 data[tile_start + 1] = 0xD8; data[tile_start + 2] = 0xFF;
613 data[tile_start + 3] = 0xDB; data[tile_start + 88] = 0xFF;
615 data[tile_start + 89] = 0xD9; }
617
618 data
619 }
620
621 #[tokio::test]
622 async fn test_registry_caches_slides() {
623 let tiff_data = create_minimal_tiff();
624 let source = MockSlideSource::new(tiff_data);
625 let registry = SlideRegistry::with_capacity(source, 10, 256, 10);
626
627 let result = registry.get_slide("test.tif").await;
629 assert!(result.is_ok());
630 assert_eq!(registry.source.create_count(), 1);
631
632 let result2 = registry.get_slide("test.tif").await;
634 assert!(result2.is_ok());
635 assert_eq!(registry.source.create_count(), 1); let result3 = registry.get_slide("test2.tif").await;
639 assert!(result3.is_ok());
640 assert_eq!(registry.source.create_count(), 2);
641 }
642
643 #[tokio::test]
644 async fn test_registry_cache_eviction() {
645 let tiff_data = create_minimal_tiff();
646 let source = MockSlideSource::new(tiff_data);
647 let registry = SlideRegistry::with_capacity(source, 2, 256, 10);
649
650 registry.get_slide("slide1.tif").await.unwrap();
652 registry.get_slide("slide2.tif").await.unwrap();
653 registry.get_slide("slide3.tif").await.unwrap();
654
655 assert_eq!(registry.source.create_count(), 3);
656 assert_eq!(registry.cached_count().await, 2);
657
658 registry.get_slide("slide1.tif").await.unwrap();
660 assert_eq!(registry.source.create_count(), 4);
661 }
662
663 #[tokio::test]
664 async fn test_registry_invalidate() {
665 let tiff_data = create_minimal_tiff();
666 let source = MockSlideSource::new(tiff_data);
667 let registry = SlideRegistry::new(source);
668
669 registry.get_slide("test.tif").await.unwrap();
671 assert_eq!(registry.source.create_count(), 1);
672
673 registry.invalidate("test.tif").await;
675 assert_eq!(registry.cached_count().await, 0);
676
677 registry.get_slide("test.tif").await.unwrap();
679 assert_eq!(registry.source.create_count(), 2);
680 }
681
682 #[tokio::test]
683 async fn test_registry_clear() {
684 let tiff_data = create_minimal_tiff();
685 let source = MockSlideSource::new(tiff_data);
686 let registry = SlideRegistry::new(source);
687
688 registry.get_slide("slide1.tif").await.unwrap();
690 registry.get_slide("slide2.tif").await.unwrap();
691 assert_eq!(registry.cached_count().await, 2);
692
693 registry.clear().await;
695 assert_eq!(registry.cached_count().await, 0);
696 }
697
698 #[tokio::test]
699 async fn test_cached_slide_metadata() {
700 let tiff_data = create_minimal_tiff();
701 let source = MockSlideSource::new(tiff_data);
702 let registry = SlideRegistry::new(source);
703
704 let slide = registry.get_slide("test.tif").await.unwrap();
705
706 assert_eq!(slide.format(), SlideFormat::GenericTiff);
708 assert_eq!(slide.level_count(), 1);
709 assert_eq!(slide.dimensions(), Some((2048, 1536)));
710 assert_eq!(slide.tile_size(0), Some((256, 256)));
711 assert_eq!(slide.tile_count(0), Some((8, 6)));
712 }
713
714 #[tokio::test]
715 async fn test_concurrent_opens_singleflight() {
716 use std::sync::atomic::AtomicBool;
717 use tokio::time::{sleep, Duration};
718
719 struct SlowMockSource {
721 data: Bytes,
722 create_count: AtomicUsize,
723 is_creating: AtomicBool,
724 }
725
726 impl SlowMockSource {
727 fn new(data: Vec<u8>) -> Self {
728 Self {
729 data: Bytes::from(data),
730 create_count: AtomicUsize::new(0),
731 is_creating: AtomicBool::new(false),
732 }
733 }
734 }
735
736 #[async_trait]
737 impl SlideSource for SlowMockSource {
738 type Reader = MockReader;
739
740 async fn create_reader(&self, slide_id: &str) -> Result<Self::Reader, IoError> {
741 let was_creating = self.is_creating.swap(true, Ordering::SeqCst);
743 assert!(
744 !was_creating,
745 "Concurrent creates detected - singleflight failed!"
746 );
747
748 self.create_count.fetch_add(1, Ordering::SeqCst);
749 sleep(Duration::from_millis(50)).await;
750
751 self.is_creating.store(false, Ordering::SeqCst);
752
753 Ok(MockReader {
754 data: self.data.clone(),
755 identifier: format!("mock://{}", slide_id),
756 })
757 }
758 }
759
760 let tiff_data = create_minimal_tiff();
761 let source = SlowMockSource::new(tiff_data);
762 let registry = Arc::new(SlideRegistry::new(source));
763
764 let mut handles = Vec::new();
766 for _ in 0..5 {
767 let registry = registry.clone();
768 handles.push(tokio::spawn(
769 async move { registry.get_slide("test.tif").await },
770 ));
771 }
772
773 for handle in handles {
775 let result = handle.await.unwrap();
776 assert!(result.is_ok());
777 }
778
779 assert_eq!(registry.source.create_count.load(Ordering::SeqCst), 1);
781 }
782}