1use anyhow::{Context, Result};
2use git2::{FetchOptions, RemoteCallbacks, Repository};
3use sha2::{Digest, Sha256};
4use std::fs;
5use std::path::{Path, PathBuf};
6use tempfile::TempDir;
7
8use crate::registry::ResolvedPack;
9
10#[derive(Debug, Clone)]
12pub struct CacheManager {
13 cache_dir: PathBuf,
14}
15
16#[derive(Debug, Clone)]
18pub struct CachedPack {
19 pub id: String,
20 pub version: String,
21 pub path: PathBuf,
22 pub sha256: String,
23 pub manifest: Option<crate::rpack::RpackManifest>,
24}
25
26impl CacheManager {
27 pub fn new() -> Result<Self> {
29 let cache_dir = dirs::cache_dir()
30 .context("Failed to find cache directory")?
31 .join("rgen")
32 .join("rpacks");
33
34 fs::create_dir_all(&cache_dir).context("Failed to create cache directory")?;
35
36 Ok(Self { cache_dir })
37 }
38
39 pub fn with_dir(cache_dir: PathBuf) -> Result<Self> {
41 fs::create_dir_all(&cache_dir).context("Failed to create cache directory")?;
42
43 Ok(Self { cache_dir })
44 }
45
46 pub fn cache_dir(&self) -> &Path {
48 &self.cache_dir
49 }
50
51 pub async fn ensure(&self, resolved_pack: &ResolvedPack) -> Result<CachedPack> {
53 let pack_dir = self
54 .cache_dir
55 .join(&resolved_pack.id)
56 .join(&resolved_pack.version);
57
58 if pack_dir.exists() {
60 if let Ok(cached) = self.load_cached(&resolved_pack.id, &resolved_pack.version) {
61 if !resolved_pack.sha256.is_empty() {
63 let actual_sha256 = self.calculate_sha256(&pack_dir)?;
64 if actual_sha256 == resolved_pack.sha256 {
65 return Ok(cached);
66 } else {
67 fs::remove_dir_all(&pack_dir)
69 .context("Failed to remove corrupted cache")?;
70 }
71 } else {
72 return Ok(cached);
73 }
74 }
75 }
76
77 self.download_pack(resolved_pack, &pack_dir).await?;
79
80 self.load_cached(&resolved_pack.id, &resolved_pack.version)
82 }
83
84 async fn download_pack(&self, resolved_pack: &ResolvedPack, pack_dir: &Path) -> Result<()> {
86 fs::create_dir_all(pack_dir.parent().unwrap())
88 .context("Failed to create pack directory")?;
89
90 let mut fetch_options = FetchOptions::new();
92 let mut callbacks = RemoteCallbacks::new();
93
94 callbacks.transfer_progress(|stats| {
96 if stats.received_objects() % 100 == 0 {
97 log::info!("Downloaded {} objects", stats.received_objects());
98 }
99 true
100 });
101
102 fetch_options.remote_callbacks(callbacks);
103
104 let temp_dir = TempDir::new().context("Failed to create temporary directory")?;
106
107 let repo = Repository::clone(&resolved_pack.git_url, temp_dir.path())
108 .context("Failed to clone repository")?;
109
110 let object = repo
112 .revparse_single(&resolved_pack.git_rev)
113 .context("Failed to find revision")?;
114
115 repo.checkout_tree(&object, None)
116 .context("Failed to checkout revision")?;
117
118 fs::rename(temp_dir.path(), pack_dir).context("Failed to move downloaded pack")?;
120
121 Ok(())
122 }
123
124 pub fn load_cached(&self, pack_id: &str, version: &str) -> Result<CachedPack> {
126 let pack_dir = self.cache_dir.join(pack_id).join(version);
127
128 if !pack_dir.exists() {
129 anyhow::bail!("Pack not found in cache: {}@{}", pack_id, version);
130 }
131
132 let sha256 = self.calculate_sha256(&pack_dir)?;
133
134 let manifest_path = pack_dir.join("templates").join("rgen.toml");
136 let manifest = if manifest_path.exists() {
137 let content = fs::read_to_string(&manifest_path).context("Failed to read manifest")?;
138 Some(toml::from_str(&content).context("Failed to parse manifest")?)
139 } else {
140 None
141 };
142
143 Ok(CachedPack {
144 id: pack_id.to_string(),
145 version: version.to_string(),
146 path: pack_dir,
147 sha256,
148 manifest,
149 })
150 }
151
152 fn calculate_sha256(&self, dir: &Path) -> Result<String> {
154 let mut hasher = Sha256::new();
155
156 for entry in walkdir::WalkDir::new(dir) {
158 let entry = entry.context("Failed to read directory entry")?;
159 let path = entry.path();
160
161 if path.is_file() {
162 let content = fs::read(path).context("Failed to read file for hashing")?;
163 hasher.update(&content);
164 }
165 }
166
167 Ok(format!("{:x}", hasher.finalize()))
168 }
169
170 pub fn list_cached(&self) -> Result<Vec<CachedPack>> {
172 let mut packs = Vec::new();
173
174 if !self.cache_dir.exists() {
175 return Ok(packs);
176 }
177
178 for pack_entry in fs::read_dir(&self.cache_dir).context("Failed to read cache directory")? {
179 let pack_entry = pack_entry.context("Failed to read pack entry")?;
180 let pack_path = pack_entry.path();
181
182 if pack_path.is_dir() {
183 let pack_id = pack_entry.file_name().to_string_lossy().to_string();
184
185 for version_entry in
187 fs::read_dir(&pack_path).context("Failed to read pack directory")?
188 {
189 let version_entry = version_entry.context("Failed to read version entry")?;
190 let version_path = version_entry.path();
191
192 if version_path.is_dir() {
193 let version = version_entry.file_name().to_string_lossy().to_string();
194
195 if let Ok(cached) = self.load_cached(&pack_id, &version) {
196 packs.push(cached);
197 }
198 }
199 }
200 }
201 }
202
203 Ok(packs)
204 }
205
206 pub fn remove(&self, pack_id: &str, version: &str) -> Result<()> {
208 let pack_dir = self.cache_dir.join(pack_id).join(version);
209
210 if pack_dir.exists() {
211 fs::remove_dir_all(&pack_dir).context("Failed to remove cached pack")?;
212 }
213
214 let pack_parent = pack_dir.parent().unwrap();
216 if pack_parent.exists() && fs::read_dir(pack_parent)?.next().is_none() {
217 fs::remove_dir(pack_parent).context("Failed to remove empty pack directory")?;
218 }
219
220 Ok(())
221 }
222
223 pub fn cleanup_old_versions(&self) -> Result<()> {
225 if !self.cache_dir.exists() {
226 return Ok(());
227 }
228
229 for pack_entry in fs::read_dir(&self.cache_dir).context("Failed to read cache directory")? {
230 let pack_entry = pack_entry.context("Failed to read pack entry")?;
231 let pack_path = pack_entry.path();
232
233 if pack_path.is_dir() {
234 let mut versions = Vec::new();
235
236 for version_entry in
238 fs::read_dir(&pack_path).context("Failed to read pack directory")?
239 {
240 let version_entry = version_entry.context("Failed to read version entry")?;
241 let version_path = version_entry.path();
242
243 if version_path.is_dir() {
244 let version_str = version_entry.file_name().to_string_lossy().to_string();
245
246 if let Ok(version) = semver::Version::parse(&version_str) {
247 versions.push((version, version_path));
248 }
249 }
250 }
251
252 versions.sort_by(|a, b| a.0.cmp(&b.0));
254
255 for (_, version_path) in versions.into_iter().rev().skip(1) {
256 fs::remove_dir_all(&version_path).context("Failed to remove old version")?;
257 }
258 }
259 }
260
261 Ok(())
262 }
263}
264
265#[cfg(test)]
266mod tests {
267 use super::*;
268 use std::fs;
269 use tempfile::TempDir;
270
271 #[test]
272 fn test_cache_manager_creation() {
273 let temp_dir = TempDir::new().unwrap();
274 let cache_dir = temp_dir.path().to_path_buf();
275
276 let cache_manager = CacheManager::with_dir(cache_dir.clone()).unwrap();
277 assert_eq!(cache_manager.cache_dir(), cache_dir);
278 }
279
280 #[test]
281 fn test_sha256_calculation() {
282 let temp_dir = TempDir::new().unwrap();
283 let test_dir = temp_dir.path().join("test");
284 fs::create_dir_all(&test_dir).unwrap();
285
286 fs::write(test_dir.join("file1.txt"), "content1").unwrap();
288 fs::write(test_dir.join("file2.txt"), "content2").unwrap();
289
290 let cache_manager = CacheManager::with_dir(temp_dir.path().to_path_buf()).unwrap();
291 let sha256 = cache_manager.calculate_sha256(&test_dir).unwrap();
292
293 assert_eq!(sha256.len(), 64);
295 assert!(sha256.chars().all(|c| c.is_ascii_hexdigit()));
296 }
297
298 #[test]
299 fn test_list_cached_empty() {
300 let temp_dir = TempDir::new().unwrap();
301 let cache_manager = CacheManager::with_dir(temp_dir.path().to_path_buf()).unwrap();
302
303 let cached = cache_manager.list_cached().unwrap();
304 assert!(cached.is_empty());
305 }
306}