trailcache_core/cache/fetch.rs
1//! Generic fetch-with-cache pattern.
2//!
3//! Encapsulates the repeated logic: check cache → fetch API → save → fallback to stale.
4
5use anyhow::Result;
6use std::future::Future;
7
8use super::manager::CachedData;
9
10/// Fetch data with cache fallback.
11///
12/// - `offline` — if true, return cached data only (regardless of staleness)
13/// - `load_cache` — loads cached data (returns `Ok(None)` if absent)
14/// - `save_cache` — saves fetched data to cache
15/// - `fetch_api` — async API fetch
16///
17/// Returns:
18/// - Offline + cached → `Ok(Some(data))`
19/// - Offline + no cache → `Ok(None)`
20/// - Cache fresh → `Ok(Some(data))`
21/// - API success → saves + `Ok(Some(data))`
22/// - API error + stale cache → `Ok(Some(stale_data))`
23/// - API error + no cache → `Err`
24pub async fn fetch_with_cache<T>(
25 offline: bool,
26 load_cache: impl FnOnce() -> Result<Option<CachedData<T>>>,
27 save_cache: impl FnOnce(&T) -> Result<()>,
28 fetch_api: impl Future<Output = Result<T>>,
29) -> Result<Option<T>> {
30 // Offline mode: return whatever we have
31 if offline {
32 return Ok(load_cache()?.map(|c| c.data));
33 }
34
35 // Check cache freshness
36 let stale_data = match load_cache()? {
37 Some(cached) if !cached.is_stale() => return Ok(Some(cached.data)),
38 Some(cached) => Some(cached.data),
39 None => None,
40 };
41
42 // Fetch from API
43 match fetch_api.await {
44 Ok(data) => {
45 let _ = save_cache(&data);
46 Ok(Some(data))
47 }
48 Err(e) => match stale_data {
49 Some(data) => Ok(Some(data)),
50 None => Err(e),
51 },
52 }
53}