use std::{
collections::HashMap,
fs,
path::PathBuf,
time::{SystemTime, UNIX_EPOCH},
};
use serde::{Deserialize, Serialize};
use crate::api::GithubStats;
const TTL_SECS: u64 = 6 * 3600;
const PRUNE_SECS: u64 = TTL_SECS * 7;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CachedEntry {
pub stars: u32,
pub forks: u32,
pub issues: u32,
pub cached_at: u64, }
impl CachedEntry {
pub fn is_fresh(&self) -> bool {
unix_now().saturating_sub(self.cached_at) < TTL_SECS
}
pub fn age_label(&self) -> String {
let secs = unix_now().saturating_sub(self.cached_at);
if secs < 60 {
"just now".into()
} else if secs < 3600 {
format!("{}m ago", secs / 60)
} else {
format!("{}h ago", secs / 3600)
}
}
}
impl From<&GithubStats> for CachedEntry {
fn from(s: &GithubStats) -> Self {
Self {
stars: s.stars,
forks: s.forks,
issues: s.issues,
cached_at: unix_now(),
}
}
}
pub type CacheMap = HashMap<String, CachedEntry>;
fn unix_now() -> u64 {
SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|d| d.as_secs())
.unwrap_or(0)
}
fn cache_path() -> Option<PathBuf> {
dirs::cache_dir().map(|d| d.join("hexplorer").join("gh_stats.json"))
}
pub fn load() -> CacheMap {
let Some(path) = cache_path() else {
return CacheMap::new();
};
let Ok(bytes) = fs::read(&path) else {
return CacheMap::new();
};
serde_json::from_slice(&bytes).unwrap_or_default()
}
pub fn save(map: &CacheMap) {
let Some(path) = cache_path() else { return };
if let Some(parent) = path.parent() {
let _ = fs::create_dir_all(parent);
}
if let Ok(json) = serde_json::to_vec_pretty(map) {
let _ = fs::write(&path, json);
}
}
pub fn get_fresh<'a>(map: &'a CacheMap, repo_url: &str) -> Option<&'a CachedEntry> {
map.get(repo_url).filter(|e| e.is_fresh())
}
pub fn get_any<'a>(map: &'a CacheMap, repo_url: &str) -> Option<&'a CachedEntry> {
map.get(repo_url)
}
pub fn insert(map: &mut CacheMap, repo_url: String, stats: &GithubStats) {
map.insert(repo_url, CachedEntry::from(stats));
map.retain(|_, e| unix_now().saturating_sub(e.cached_at) < PRUNE_SECS);
save(map);
}