geocog/
tile_cache.rs

1use lru::LruCache;
2use std::cmp::max;
3use std::hash::{Hash, Hasher};
4use std::path::Path;
5use std::sync::{Arc, Mutex};
6
7const CACHE_CAPACITY_BYTES: usize = 512 * 1024 * 1024; // 512 MB upper bound
8
9#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
10pub enum TileKind {
11    Chunked,
12    Lzw,
13}
14
15#[derive(Clone, Eq, PartialEq)]
16struct TileKey {
17    path: Arc<str>,
18    kind: TileKind,
19    index: u32,
20}
21
22impl Hash for TileKey {
23    fn hash<H: Hasher>(&self, state: &mut H) {
24        self.path.hash(state);
25        self.kind.hash(state);
26        self.index.hash(state);
27    }
28}
29
30impl TileKey {
31    fn new(path: &Path, kind: TileKind, index: usize) -> Self {
32        let path_str: Box<str> = path.to_string_lossy().into_owned().into_boxed_str();
33        TileKey {
34            path: Arc::from(path_str),
35            kind,
36            index: index as u32,
37        }
38    }
39}
40
41struct CacheEntry {
42    data: Arc<Vec<f32>>,
43    size_bytes: usize,
44}
45
46pub struct TileCache {
47    current_bytes: usize,
48    capacity_bytes: usize,
49    entries: LruCache<TileKey, CacheEntry>,
50}
51
52impl TileCache {
53    fn new(capacity_bytes: usize) -> Self {
54        TileCache {
55            current_bytes: 0,
56            capacity_bytes,
57            entries: LruCache::unbounded(),
58        }
59    }
60
61    fn get(&mut self, key: &TileKey) -> Option<Arc<Vec<f32>>> {
62        self.entries.get(key).map(|entry| Arc::clone(&entry.data))
63    }
64
65    fn contains(&mut self, key: &TileKey) -> bool {
66        self.entries.contains(key)
67    }
68
69    fn insert(&mut self, key: TileKey, data: Arc<Vec<f32>>, size_bytes: usize) {
70        if size_bytes > self.capacity_bytes {
71            return;
72        }
73
74        if let Some(old) = self.entries.pop(&key) {
75            self.current_bytes = self.current_bytes.saturating_sub(old.size_bytes);
76        }
77
78        while self.current_bytes + size_bytes > self.capacity_bytes {
79            if let Some((_key, entry)) = self.entries.pop_lru() {
80                self.current_bytes = self.current_bytes.saturating_sub(entry.size_bytes);
81            } else {
82                break;
83            }
84        }
85
86        self.current_bytes = self.current_bytes.saturating_add(size_bytes);
87        self.entries.put(key, CacheEntry { data, size_bytes });
88    }
89}
90
91static TILE_CACHE: std::sync::LazyLock<Mutex<TileCache>> = std::sync::LazyLock::new(|| {
92    let cap = max(CACHE_CAPACITY_BYTES, 64 * 1024 * 1024); // never below 64MB
93    Mutex::new(TileCache::new(cap))
94});
95
96fn make_key(path: &Path, kind: TileKind, index: usize) -> TileKey {
97    TileKey::new(path, kind, index)
98}
99
100pub fn get(path: &Path, kind: TileKind, index: usize) -> Option<Arc<Vec<f32>>> {
101    let key = make_key(path, kind, index);
102    TILE_CACHE.lock().unwrap().get(&key)
103}
104
105pub fn contains(path: &Path, kind: TileKind, index: usize) -> bool {
106    let key = make_key(path, kind, index);
107    TILE_CACHE.lock().unwrap().contains(&key)
108}
109
110pub fn insert(path: &Path, kind: TileKind, index: usize, data: Arc<Vec<f32>>) {
111    let size_bytes = data.len() * std::mem::size_of::<f32>();
112    let key = make_key(path, kind, index);
113    TILE_CACHE.lock().unwrap().insert(key, data, size_bytes);
114}