1use anyhow::Result;
2use sha2::{Digest, Sha256};
3use std::fs::{self, File};
4use std::io::{BufReader, Read};
5use std::path::Path;
6use std::time::SystemTime;
7
8use super::types::{CacheEntry, CacheKey, CacheMetadata};
9
10#[derive(Debug)]
12pub struct FileCache {
13 cache_dir: std::path::PathBuf,
14}
15
16impl FileCache {
17 pub fn new(cache_dir: std::path::PathBuf) -> Result<Self> {
19 fs::create_dir_all(&cache_dir)?;
21 Ok(Self { cache_dir })
22 }
23
24 pub fn hash_file(path: &Path) -> Result<String> {
27 let file = File::open(path)?;
28 let mut reader = BufReader::with_capacity(65536, file); let mut hasher = Sha256::new();
30 let mut buffer = [0u8; 65536];
31
32 loop {
33 let bytes_read = reader.read(&mut buffer)?;
34 if bytes_read == 0 {
35 break;
36 }
37 hasher.update(&buffer[..bytes_read]);
38 }
39
40 let result = hasher.finalize();
41 Ok(format!("{:x}", result))
42 }
43
44 pub fn generate_key(path: &Path) -> Result<CacheKey> {
46 let file_hash = Self::hash_file(path)?;
47 Ok(CacheKey {
48 file_path: path.to_path_buf(),
49 file_hash,
50 })
51 }
52
53 pub fn save<T>(&self, key: &CacheKey, data: &T) -> Result<()>
55 where
56 T: serde::Serialize,
57 {
58 let serialized = bincode::serialize(data)?;
60 let original_size = serialized.len();
61
62 let compressed = lz4::block::compress(&serialized, None, true)?;
64 let compressed_size = compressed.len();
65
66 let metadata = CacheMetadata {
68 created_at: SystemTime::now(),
69 last_accessed: SystemTime::now(),
70 file_size: original_size as u64,
71 compressed_size,
72 compression_ratio: original_size as f32 / compressed_size as f32,
73 };
74
75 let entry = CacheEntry {
77 key: key.clone(),
78 data: compressed,
79 metadata,
80 };
81
82 let cache_path = self.cache_path(key);
84
85 if let Some(parent) = cache_path.parent() {
87 fs::create_dir_all(parent)?;
88 }
89
90 let entry_data = bincode::serialize(&entry)?;
92 fs::write(cache_path, entry_data)?;
93
94 Ok(())
95 }
96
97 pub fn load<T>(&self, key: &CacheKey) -> Result<Option<T>>
99 where
100 T: serde::de::DeserializeOwned,
101 {
102 let cache_path = self.cache_path(key);
103
104 if !cache_path.exists() {
106 return Ok(None);
107 }
108
109 let entry_data = fs::read(&cache_path)?;
111 let mut entry: CacheEntry<Vec<u8>> = bincode::deserialize(&entry_data)?;
112
113 entry.metadata.last_accessed = SystemTime::now();
115
116 let decompressed =
118 lz4::block::decompress(&entry.data, Some(entry.metadata.file_size as i32))?;
119
120 let data: T = bincode::deserialize(&decompressed)?;
122
123 Ok(Some(data))
124 }
125
126 pub fn is_valid(&self, key: &CacheKey) -> Result<bool> {
128 if !key.file_path.exists() {
130 return Ok(false);
131 }
132
133 let current_hash = Self::hash_file(&key.file_path)?;
135 Ok(current_hash == key.file_hash)
136 }
137
138 pub fn remove(&self, key: &CacheKey) -> Result<()> {
140 let cache_path = self.cache_path(key);
141 if cache_path.exists() {
142 fs::remove_file(cache_path)?;
143 }
144 Ok(())
145 }
146
147 fn cache_path(&self, key: &CacheKey) -> std::path::PathBuf {
149 let hash_prefix = &key.file_hash[..2];
151 let cache_name = format!(
152 "{}_{}.cache",
153 key.file_path
154 .file_name()
155 .and_then(|n| n.to_str())
156 .unwrap_or("unknown"),
157 &key.file_hash[..8]
158 );
159
160 self.cache_dir.join(hash_prefix).join(cache_name)
161 }
162
163 pub fn get_stats(&self) -> Result<CacheStats> {
165 let mut total_entries = 0;
166 let mut total_size = 0;
167 let mut total_compressed_size = 0;
168
169 for entry in fs::read_dir(&self.cache_dir)? {
171 let entry = entry?;
172 if entry.path().is_dir() {
173 for cache_file in fs::read_dir(entry.path())? {
174 let cache_file = cache_file?;
175 let file_metadata = cache_file.metadata()?;
176 total_entries += 1;
177 total_compressed_size += file_metadata.len() as usize;
178
179 if let Ok(entry_data) = fs::read(cache_file.path()) {
181 if let Ok(entry) = bincode::deserialize::<CacheEntry<Vec<u8>>>(&entry_data) {
182 total_size += entry.metadata.file_size as usize;
183 continue;
184 }
185 }
186 total_size += file_metadata.len() as usize;
188 }
189 }
190 }
191
192 Ok(CacheStats {
193 total_entries,
194 total_size,
195 total_compressed_size,
196 compression_ratio: if total_compressed_size > 0 {
197 total_size as f32 / total_compressed_size as f32
198 } else {
199 1.0
200 },
201 cache_dir: self.cache_dir.clone(),
202 })
203 }
204}
205
206#[derive(Debug, Clone)]
208pub struct CacheStats {
209 pub total_entries: usize,
210 pub total_size: usize,
211 pub total_compressed_size: usize,
212 pub compression_ratio: f32,
213 pub cache_dir: std::path::PathBuf,
214}
215
216#[cfg(test)]
217mod tests {
218 use super::*;
219
220 #[test]
223 fn test_cache_key_structure() {
224 let path = Path::new("src/main.rs");
226 let file_hash = "abc123def456".to_string();
227
228 let key = CacheKey {
229 file_path: path.to_path_buf(),
230 file_hash: file_hash.clone(),
231 };
232
233 assert_eq!(key.file_hash, "abc123def456");
234 assert_eq!(key.file_path.file_name().unwrap(), "main.rs");
235 }
236
237 #[test]
238 fn test_cache_metadata_structure() {
239 let now = SystemTime::now();
241
242 let metadata = CacheMetadata {
243 created_at: now,
244 last_accessed: now,
245 file_size: 1024,
246 compressed_size: 512,
247 compression_ratio: 2.0,
248 };
249
250 assert_eq!(metadata.file_size, 1024);
251 assert_eq!(metadata.compressed_size, 512);
252 assert_eq!(metadata.compression_ratio, 2.0);
253 }
254
255 #[test]
256 fn test_cache_entry_structure() {
257 let key = CacheKey {
259 file_path: Path::new("test.rs").to_path_buf(),
260 file_hash: "test_hash".to_string(),
261 };
262
263 let metadata = CacheMetadata {
264 created_at: SystemTime::now(),
265 last_accessed: SystemTime::now(),
266 file_size: 100,
267 compressed_size: 50,
268 compression_ratio: 2.0,
269 };
270
271 let entry = CacheEntry {
272 key: key.clone(),
273 data: vec![1, 2, 3, 4, 5],
274 metadata,
275 };
276
277 assert_eq!(entry.data.len(), 5);
278 assert_eq!(entry.key.file_hash, "test_hash");
279 }
280
281 #[test]
282 fn test_cache_stats_structure() {
283 let stats = CacheStats {
285 total_entries: 100,
286 total_size: 1_000_000,
287 total_compressed_size: 500_000,
288 compression_ratio: 2.0,
289 cache_dir: Path::new("/cache").to_path_buf(),
290 };
291
292 assert_eq!(stats.total_entries, 100);
293 assert_eq!(stats.total_size, 1_000_000);
294 assert_eq!(stats.compression_ratio, 2.0);
295 }
296
297 #[test]
298 fn test_compression_ratio_calculation() {
299 let test_cases = vec![
301 (1000, 500, 2.0),
302 (2000, 1000, 2.0),
303 (3000, 1000, 3.0),
304 (1000, 250, 4.0),
305 ];
306
307 for (original, compressed, expected) in test_cases {
308 let ratio = original as f32 / compressed as f32;
309 assert!((ratio - expected).abs() < 0.01);
310 }
311 }
312
313 #[test]
314 fn test_cache_path_construction() {
315 let hash = "abc123def456";
317 let prefix = &hash[..2]; assert_eq!(prefix, "ab", "Prefix should be first 2 chars of hash");
320 }
321
322 #[test]
323 fn test_cache_file_naming() {
324 let file_name = "main.rs";
326 let hash_short = "abc12345";
327
328 let cache_name = format!("{}_{}.cache", file_name, hash_short);
329
330 assert!(
331 cache_name.contains("main.rs"),
332 "Should include original filename"
333 );
334 assert!(cache_name.contains("abc12345"), "Should include hash short");
335 assert!(cache_name.ends_with(".cache"), "Should end with .cache");
336 }
337
338 #[test]
339 fn test_cache_stats_compression_ratio_zero_handling() {
340 let total_size = 1000;
342 let compressed_size = 0;
343
344 let ratio = if compressed_size > 0 {
345 total_size as f32 / compressed_size as f32
346 } else {
347 1.0 };
349
350 assert_eq!(
351 ratio, 1.0,
352 "Should default to 1.0 when compressed size is 0"
353 );
354 }
355}