use crate::error::CacheError;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::PathBuf;
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum SourceType {
Url,
Git,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CacheEntry {
pub source_type: SourceType,
pub url: String,
pub checksum: Option<String>,
pub checksum_type: Option<String>,
pub actual_filename: Option<String>,
pub git_commit: Option<String>,
pub git_rev: Option<String>,
pub cache_path: PathBuf,
pub extracted_path: Option<PathBuf>,
pub last_accessed: chrono::DateTime<chrono::Utc>,
pub created: chrono::DateTime<chrono::Utc>,
pub lock_file: Option<PathBuf>,
#[serde(default)]
pub attestation_verified: bool,
}
pub struct CacheIndex {
cache_dir: PathBuf,
metadata_dir: PathBuf,
entries: tokio::sync::RwLock<HashMap<String, CacheEntry>>,
}
impl CacheIndex {
pub async fn new(cache_dir: PathBuf) -> Result<Self, CacheError> {
let metadata_dir = cache_dir.join(".metadata");
if !metadata_dir.exists() {
fs_err::tokio::create_dir_all(&metadata_dir).await?;
}
let mut index = Self {
cache_dir,
metadata_dir,
entries: tokio::sync::RwLock::new(HashMap::new()),
};
index.load_all().await?;
Ok(index)
}
async fn load_all(&mut self) -> Result<(), CacheError> {
let mut entries = HashMap::new();
let mut dir = fs_err::tokio::read_dir(&self.metadata_dir).await?;
while let Some(entry) = dir.next_entry().await? {
if let Some(filename) = entry.file_name().to_str()
&& filename.ends_with(".json")
{
let key = filename.trim_end_matches(".json");
let metadata_path = self.metadata_dir.join(filename);
match fs_err::tokio::read_to_string(&metadata_path).await {
Ok(content) => match serde_json::from_str::<CacheEntry>(&content) {
Ok(cache_entry) => {
entries.insert(key.to_string(), cache_entry);
}
Err(e) => {
tracing::warn!("Failed to parse cache metadata {}: {}", key, e);
}
},
Err(e) => {
tracing::warn!("Failed to read cache metadata {}: {}", key, e);
}
}
}
}
*self.entries.write().await = entries;
Ok(())
}
pub fn generate_cache_key(url: &url::Url, checksums: &[crate::source::Checksum]) -> String {
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
let mut hasher = DefaultHasher::new();
url.as_str().hash(&mut hasher);
for cs in checksums {
cs.to_hex().hash(&mut hasher);
}
format!("{:x}", hasher.finish())
}
pub fn generate_git_cache_key(url: &str, rev: &str) -> String {
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
let mut hasher = DefaultHasher::new();
url.hash(&mut hasher);
rev.hash(&mut hasher);
format!("git_{:x}", hasher.finish())
}
pub async fn get(&self, key: &str) -> Option<CacheEntry> {
self.entries.read().await.get(key).cloned()
}
pub async fn insert(&self, key: String, entry: CacheEntry) -> Result<(), CacheError> {
self.entries
.write()
.await
.insert(key.clone(), entry.clone());
let metadata_path = self.metadata_dir.join(format!("{}.json", key));
let content = serde_json::to_string_pretty(&entry)?;
fs_err::tokio::write(&metadata_path, content).await?;
Ok(())
}
pub async fn touch(&self, key: &str) -> Result<(), CacheError> {
let mut entries = self.entries.write().await;
if let Some(entry) = entries.get_mut(key) {
entry.last_accessed = chrono::Utc::now();
let updated_entry = entry.clone();
drop(entries);
let metadata_path = self.metadata_dir.join(format!("{}.json", key));
let content = serde_json::to_string_pretty(&updated_entry)?;
fs_err::tokio::write(&metadata_path, content).await?;
}
Ok(())
}
pub async fn set_attestation_verified(&self, key: &str) -> Result<(), CacheError> {
let mut entries = self.entries.write().await;
if let Some(entry) = entries.get_mut(key) {
entry.attestation_verified = true;
let updated_entry = entry.clone();
drop(entries);
let metadata_path = self.metadata_dir.join(format!("{}.json", key));
let content = serde_json::to_string_pretty(&updated_entry)?;
fs_err::tokio::write(&metadata_path, content).await?;
}
Ok(())
}
pub async fn list_entries(&self) -> Vec<(String, CacheEntry)> {
self.entries
.read()
.await
.iter()
.map(|(k, v)| (k.clone(), v.clone()))
.collect()
}
pub fn get_cache_path(&self, entry: &CacheEntry) -> PathBuf {
self.cache_dir.join(&entry.cache_path)
}
pub fn get_extracted_path(&self, entry: &CacheEntry) -> Option<PathBuf> {
entry
.extracted_path
.as_ref()
.map(|p| self.cache_dir.join(p))
}
pub async fn cleanup_old_entries(&self, max_age: chrono::Duration) -> Result<(), CacheError> {
let cutoff = chrono::Utc::now() - max_age;
let entries = self.list_entries().await;
for (key, entry) in entries {
if entry.last_accessed < cutoff {
self.entries.write().await.remove(&key);
let cache_path = self.get_cache_path(&entry);
match entry.source_type {
SourceType::Url => {
if cache_path.exists() && cache_path.is_file() {
let _ = fs_err::tokio::remove_file(&cache_path).await;
}
if let Some(extracted_path) = self.get_extracted_path(&entry)
&& extracted_path.exists()
{
let _ = fs_err::tokio::remove_dir_all(&extracted_path).await;
}
}
SourceType::Git => {
if cache_path.exists() && cache_path.is_dir() {
let _ = fs_err::tokio::remove_dir_all(&cache_path).await;
}
}
}
let metadata_path = self.metadata_dir.join(format!("{}.json", key));
let _ = fs_err::tokio::remove_file(&metadata_path).await;
}
}
Ok(())
}
pub fn cache_dir(&self) -> &PathBuf {
&self.cache_dir
}
}