clache/global_cache.rs
1//! Globally-accessible storage for performant, path-indexed caching.
2//!
3//! The global cache supports storage of **any type** implementing [`Any`], [`Send`], and [`Sync`],
4//! dynamically.
5//!
6//! It is recommended to use namespacing in the paths in order to avoid conflicts, but this is not
7//! required. To ensure consistency, use `namespace://path` for namespacing, and if you don't use
8//! namespacing, avoid paths that begin that way. To obtain a list of all currently cached paths in
9//! a namespace, use [`namespaced_paths`].
10//!
11//! ## Type Safety
12//!
13//! Cached items must implement [`Any`], [`Send`], and [`Sync`] in order to be stored in the global
14//! cache. Unless using a dyn method, like [`get_dyn`], the cache uses downcasting. If a
15//! downcast fails, [`None`] is returned. This runtime check makes the cache slightly less
16//! efficient than [`LocalCache`], which uses static typing.
17//!
18//! ## Examples
19//!
20//! ```rust
21//! use std::sync::Arc;
22//! use clache::prelude::*;
23//!
24//! fn load_texture() -> String {
25//! "texture data".to_string()
26//! }
27//!
28//! // Slow first lookup
29//! let texture = GlobalCache::get_or_else("assets/texture.png", || {
30//! Arc::new(load_texture())
31//! });
32//!
33//! // ...
34//!
35//! // Much faster lookup, since "assets/texture.png" is already in the cache
36//! let texture = GlobalCache::get_or_else("assets/texture.png", || {
37//! Arc::new(load_texture())
38//! });
39//!
40//! ```
41//!
42//! If only a single type is needed, and the path is not uniquely identifying, use [`LocalCache`].
43//!
44//! [`LocalCache`]: crate::LocalCache
45//! [`get_dyn`]: GlobalCache::get_dyn
46//! [`namespaced_paths`]: GlobalCache::namespaced_paths
47
48use std::{
49 any::Any,
50 collections::{HashMap, HashSet},
51 sync::{Arc, LazyLock, RwLock},
52};
53
54use crate::fused_rw::FusedRw;
55
56type Item = Arc<FusedRw<Option<Arc<dyn Any + Send + Sync>>>>;
57
58static CACHE: LazyLock<RwLock<HashMap<String, Item>>> =
59 LazyLock::new(|| RwLock::new(HashMap::new()));
60
61/// The shared, global cache, supporting dynamic typing. See the [module-level docs] for more info.
62///
63/// [module-level docs]: crate::global_cache
64pub struct GlobalCache {
65 _p: std::marker::PhantomData<[u8]>,
66}
67
68impl GlobalCache {
69 /// Checks if a value exists in the **global** cache, regardless of type.
70 pub fn is_cached(path: impl AsRef<str>) -> bool {
71 let Some(entry) = CACHE
72 .read()
73 .expect("Cache cannot poison")
74 .get(path.as_ref())
75 .cloned()
76 else {
77 return false;
78 };
79
80 entry.read().is_some()
81 }
82
83 /// Checks if a value exists in the **global** cache, regardless of type, asynchronously.
84 pub async fn is_cached_async(path: impl AsRef<str>) -> bool {
85 let Some(entry) = CACHE
86 .read()
87 .expect("Cache cannot poison")
88 .get(path.as_ref())
89 .cloned()
90 else {
91 return false;
92 };
93
94 entry.read_async().await.is_some()
95 }
96
97 /// Checks if a value exists in the **global** cache, is initialized, and is of the correct
98 /// type.
99 pub fn is_cached_as<T: Any + Send + Sync>(path: impl AsRef<str>) -> bool {
100 let Some(entry) = CACHE
101 .read()
102 .expect("Cache cannot poison")
103 .get(path.as_ref())
104 .cloned()
105 else {
106 return false;
107 };
108
109 entry.read().as_ref().is_some_and(|e| e.is::<T>())
110 }
111
112 /// Checks if a value exists in the **global** cache, is initialized, and is of the correct
113 /// type. Waits asynchronously.
114 pub async fn is_cached_as_async<T: Any + Send + Sync>(path: impl AsRef<str>) -> bool {
115 let Some(entry) = CACHE
116 .read()
117 .expect("Cache cannot poison")
118 .get(path.as_ref())
119 .cloned()
120 else {
121 return false;
122 };
123
124 entry
125 .read_async()
126 .await
127 .as_ref()
128 .is_some_and(|e| e.is::<T>())
129 }
130
131 /// Retrieves a value from the **global** cache, if it exists, without trying to downcast it.
132 pub fn get_dyn(path: impl AsRef<str>) -> Option<Arc<dyn Any + Send + Sync>> {
133 let entry = {
134 let guard = CACHE.read().expect("Cache cannot poison");
135 let entry = guard.get(path.as_ref())?.clone();
136 drop(guard);
137 entry
138 };
139
140 entry.read().clone()
141 }
142
143 /// Retrieves a value from the **global** cache, if it exists, without trying to downcast it,
144 /// asynchronously.
145 pub async fn get_dyn_async(path: impl AsRef<str>) -> Option<Arc<dyn Any + Send + Sync>> {
146 let entry = {
147 let guard = CACHE.read().expect("Cache cannot poison");
148 let entry = guard.get(path.as_ref())?.clone();
149 drop(guard);
150 entry
151 };
152
153 entry.read_async().await.clone()
154 }
155
156 /// Retrieves a value from the **global** cache, if it exists. Also returns [`None`] if the
157 /// cached value is of the wrong type.
158 ///
159 /// If you wish to insert a default value, use [`get_or`] instead.
160 ///
161 /// This is not suitable for async.
162 ///
163 /// [`get_or`]: Self::get_or
164 pub fn get<T: Any + Send + Sync>(path: impl AsRef<str>) -> Option<Arc<T>> {
165 Self::get_dyn(path).and_then(|v| v.downcast().ok())
166 }
167
168 /// Retrieves a value from the **global** cache, if it exists, asynchronously. Also returns
169 /// [`None`] if the cached value is of the wrong type.
170 ///
171 /// If you wish to insert a default value, use [`get_or_async`] instead.
172 ///
173 /// This is not suitable for async.
174 ///
175 /// [`get_or_async`]: Self::get_or_async
176 pub async fn get_async<T: Any + Send + Sync>(path: impl AsRef<str>) -> Option<Arc<T>> {
177 Self::get_dyn_async(path)
178 .await
179 .and_then(|v| v.downcast().ok())
180 }
181
182 /// Retrieves a value from the **global** cache or adds `default` if it does not exist.
183 /// Also returns [`None`] if the cached value is of the wrong type.
184 ///
185 /// This eagerly evaluates `default` even if the value already exists in the cache. It is
186 /// generally better to use [`get_or_else`] or one of its variants instead.
187 ///
188 /// [`get_or_else`]: Self::get_or_else
189 pub fn get_or<T: Any + Send + Sync>(path: impl AsRef<str>, default: Arc<T>) -> Option<Arc<T>> {
190 if let Some(good) = Self::get_dyn(path.as_ref()) {
191 return good.downcast().ok();
192 }
193
194 let entry = {
195 let mut guard = CACHE.write().expect("Cache cannot poison");
196 guard
197 .entry(path.as_ref().to_string())
198 .or_insert_with(|| Arc::new(FusedRw::new(None)))
199 .clone()
200 };
201
202 if let Some(good) = entry.read().as_ref() {
203 good.clone().downcast().ok()
204 } else {
205 assert!(
206 entry.write().replace(default.clone()).is_none(),
207 "Cached value should be None"
208 );
209 Some(default)
210 }
211 }
212
213 /// Retrieves a value from the **global** cache **asynchronously** or adds `default` if it does
214 /// not exist. Also returns [`None`] if the cached value is of the wrong type.
215 ///
216 /// This eagerly evaluates `default` even if the value already exists in the cache. It is
217 /// generally better to use [`get_or_else_async`] or one of its variants instead.
218 ///
219 /// [`get_or_else_async`]: Self::get_or_else_async
220 pub async fn get_or_async<T: Any + Send + Sync>(
221 path: impl AsRef<str>,
222 default: Arc<T>,
223 ) -> Option<Arc<T>> {
224 if let Some(good) = Self::get_dyn_async(path.as_ref()).await {
225 return good.downcast().ok();
226 }
227
228 let entry = {
229 let mut guard = CACHE.write().expect("Cache cannot poison");
230 guard
231 .entry(path.as_ref().to_string())
232 .or_insert_with(|| Arc::new(FusedRw::new(None)))
233 .clone()
234 };
235
236 if let Some(good) = entry.read_async().await.as_ref() {
237 good.clone().downcast().ok()
238 } else {
239 assert!(
240 entry.write_async().await.replace(default.clone()).is_none(),
241 "Cached value should be None"
242 );
243 Some(default)
244 }
245 }
246
247 /// Retrieves a value from the **global** cache or loads it from a closure if it does not exist.
248 /// Also returns [`None`] if the cached value is of the wrong type.
249 pub fn get_or_else<T: Any + Send + Sync, F: FnOnce() -> Arc<T>>(
250 path: impl AsRef<str>,
251 f: F,
252 ) -> Option<Arc<T>> {
253 if let Some(good) = Self::get_dyn(path.as_ref()) {
254 return good.downcast().ok();
255 }
256
257 let entry = {
258 let mut guard = CACHE.write().expect("Cache cannot poison");
259 guard
260 .entry(path.as_ref().to_string())
261 .or_insert_with(|| Arc::new(FusedRw::new(None)))
262 .clone()
263 };
264
265 if let Some(good) = entry.read().as_ref() {
266 good.clone().downcast().ok()
267 } else {
268 let loaded = f();
269 assert!(
270 entry.write().replace(loaded.clone()).is_none(),
271 "Cached value should be None"
272 );
273 Some(loaded)
274 }
275 }
276
277 /// Retrieves a value from the **global** cache or loads it from an *async* closure if it does not
278 /// exist. Also returns [`None`] if the cached value is of the wrong type.
279 ///
280 /// Unlike the non-async variants, this **not** an atomic operation. Your closure may be run more
281 /// than once, but only the first run will write to the cache.
282 pub async fn get_or_else_async<T: Any + Send + Sync, F: AsyncFnOnce() -> Arc<T>>(
283 path: impl AsRef<str>,
284 f: F,
285 ) -> Option<Arc<T>> {
286 if let Some(good) = Self::get_dyn_async(path.as_ref()).await {
287 return good.downcast().ok();
288 }
289
290 let entry = {
291 let mut guard = CACHE.write().expect("Cache cannot poison");
292 guard
293 .entry(path.as_ref().to_string())
294 .or_insert_with(|| Arc::new(FusedRw::new(None)))
295 .clone()
296 };
297
298 if let Some(good) = entry.read_async().await.as_ref() {
299 good.clone().downcast().ok()
300 } else {
301 let loaded = f().await;
302 assert!(
303 entry.write_async().await.replace(loaded.clone()).is_none(),
304 "Cached value should be None"
305 );
306 Some(loaded)
307 }
308 }
309
310 /// Removes a path from the **global** cache, if it exists.
311 ///
312 /// Any values that have been acquired from the cache will remain valid until they are dropped.
313 pub fn uncache(path: impl AsRef<str>) {
314 // Don't care about entries; they'll drop eventually
315 let mut guard = CACHE.write().expect("Cache cannot poison");
316 guard.remove(path.as_ref());
317 }
318
319 /// Removes all paths from the **global** cache.
320 ///
321 /// This is dangerous, as it can cause other crates to lose their dependent values. Any values
322 /// that have been acquired from the cache will remain valid until they are dropped.
323 ///
324 /// If you want a safer alternative, iterate and use [`uncache`].
325 ///
326 /// [`uncache`]: Self::uncache
327 pub fn clear() {
328 let mut guard = CACHE.write().expect("Cache cannot poison");
329 guard.clear();
330 }
331
332 /// Returns all paths starting with `namespace`, stripping the namespace prefix.
333 ///
334 /// This is only a snapshot, and future insertions and removals will not be reflected. Some
335 /// entries may be in the process of being written, but most functions will wait until they
336 /// are ready.
337 pub fn namespaced_paths(namespace: impl AsRef<str>) -> HashSet<String> {
338 let ns = namespace.as_ref();
339 let guard = CACHE.read().expect("Cache cannot poison");
340 guard
341 .iter()
342 .filter_map(|(k, v)| k.strip_prefix(ns).map(|s| (s.to_string(), v)))
343 .filter(|(_, v)| v.read().is_some())
344 .map(|(k, _)| k)
345 .collect()
346 }
347
348 /// Returns all paths starting with `namespace` **asynchronously**, stripping the namespace
349 /// prefix.
350 ///
351 /// This is only a snapshot, and future insertions and removals will not be reflected. Some
352 /// entries may be in the process of being written, but most functions will wait until they
353 /// are ready.
354 pub async fn namespaced_paths_async(namespace: impl AsRef<str>) -> HashSet<String> {
355 let ns = namespace.as_ref();
356 let namespaced: Vec<_> = {
357 CACHE
358 .read()
359 .expect("Cache cannot poison")
360 .iter()
361 .filter_map(|(k, _)| k.strip_prefix(ns))
362 .map(|s| s.to_string())
363 .collect()
364 };
365
366 let mut hs = HashSet::new();
367 for k in namespaced {
368 let Some(entry) = CACHE.read().expect("Cache cannot poison").get(&k).cloned() else {
369 continue;
370 };
371
372 if entry.read_async().await.is_some() {
373 hs.insert(k);
374 }
375 }
376
377 hs
378 }
379}
380
381#[cfg(test)]
382mod tests {
383 use super::*;
384 use std::sync::atomic::AtomicUsize;
385
386 #[test]
387 fn simple() {
388 let string = GlobalCache::get_or_else("abc", || Arc::new("hello".to_string())).unwrap();
389 assert_eq!(*string, "hello");
390 }
391
392 #[test]
393 fn shared() {
394 let one = GlobalCache::get_or_else("123", || {
395 println!("Running one");
396 Arc::new("hello".to_string())
397 })
398 .unwrap();
399 let two = GlobalCache::get_or_else("123", || {
400 println!("Running two");
401 Arc::new("hello".to_string())
402 })
403 .unwrap();
404
405 assert!(Arc::ptr_eq(&one, &two));
406 }
407
408 #[test]
409 fn simultaneous30() {
410 let mut handles = Vec::new();
411 const N: usize = 30;
412 let barrier = Arc::new(std::sync::Barrier::new(N));
413 let runs = Arc::new(AtomicUsize::new(0));
414 for _ in 0..N {
415 let bar_clone = barrier.clone();
416 let runs_clone = runs.clone();
417 let handle = std::thread::spawn(move || {
418 bar_clone.wait();
419 let string = GlobalCache::get_or_else("xyz", || {
420 runs_clone.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
421 Arc::new("hello".to_string())
422 })
423 .unwrap();
424 assert_eq!(*string, "hello");
425 });
426 handles.push(handle);
427 }
428 for handle in handles {
429 handle.join().unwrap();
430 }
431
432 assert_eq!(runs.load(std::sync::atomic::Ordering::SeqCst), 1);
433 }
434}