1use crate::CacheManager;
10use std::path::Path;
11use tracing::{debug, info};
12use std::sync::atomic::{AtomicU64, Ordering};
13use std::sync::Arc;
14use serde::Serialize;
15
16#[derive(Debug, Clone)]
18pub struct CacheStats {
19 hits: Arc<AtomicU64>,
20 misses: Arc<AtomicU64>,
21}
22
23impl CacheStats {
24 pub fn new() -> Self {
26 Self {
27 hits: Arc::new(AtomicU64::new(0)),
28 misses: Arc::new(AtomicU64::new(0)),
29 }
30 }
31
32 pub fn record_hit(&self) {
34 self.hits.fetch_add(1, Ordering::Relaxed);
35 }
36
37 pub fn record_miss(&self) {
39 self.misses.fetch_add(1, Ordering::Relaxed);
40 }
41
42 pub fn hit_rate(&self) -> f64 {
44 let hits = self.hits.load(Ordering::Relaxed);
45 let misses = self.misses.load(Ordering::Relaxed);
46 let total = hits + misses;
47
48 if total == 0 {
49 0.0
50 } else {
51 hits as f64 / total as f64
52 }
53 }
54
55 pub fn stats(&self) -> (u64, u64, f64) {
57 let hits = self.hits.load(Ordering::Relaxed);
58 let misses = self.misses.load(Ordering::Relaxed);
59 let rate = self.hit_rate();
60 (hits, misses, rate)
61 }
62
63 pub fn log_stats(&self, name: &str) {
65 let (hits, misses, rate) = self.stats();
66 info!(
67 "{} cache statistics: {} hits, {} misses, {:.2}% hit rate",
68 name,
69 hits,
70 misses,
71 rate * 100.0
72 );
73 }
74
75 pub fn reset(&self) {
77 self.hits.store(0, Ordering::Relaxed);
78 self.misses.store(0, Ordering::Relaxed);
79 }
80}
81
82impl Default for CacheStats {
83 fn default() -> Self {
84 Self::new()
85 }
86}
87
88pub struct ConfigCache {
90 cache: CacheManager,
91 stats: CacheStats,
92}
93
94impl ConfigCache {
95 pub fn new(cache_dir: &Path) -> Result<Self, Box<dyn std::error::Error>> {
97 Ok(Self {
98 cache: CacheManager::new(cache_dir)?,
99 stats: CacheStats::new(),
100 })
101 }
102
103 pub fn get_config<T: serde::de::DeserializeOwned + Serialize>(
105 &self,
106 path: &Path,
107 ) -> Result<T, Box<dyn std::error::Error>> {
108 let cache_key = format!("config_{}", path.display());
109
110 if let Some(cached) = self.cache.get(&cache_key)? {
112 debug!("Configuration cache hit: {}", path.display());
113 self.stats.record_hit();
114 return Ok(serde_json::from_str(&cached)?);
115 }
116
117 debug!("Configuration cache miss: {}", path.display());
118 self.stats.record_miss();
119
120 let content = std::fs::read_to_string(path)?;
122 let config: T = serde_yaml::from_str(&content)?;
123
124 let json = serde_json::to_string(&config)?;
126 self.cache.set(
127 &cache_key,
128 json,
129 crate::CacheInvalidationStrategy::Ttl(3600),
130 )?;
131
132 Ok(config)
133 }
134
135 pub fn invalidate_config(&self, path: &Path) -> Result<(), Box<dyn std::error::Error>> {
137 let cache_key = format!("config_{}", path.display());
138 self.cache.invalidate(&cache_key)?;
139 debug!("Configuration cache invalidated: {}", path.display());
140 Ok(())
141 }
142
143 pub fn stats(&self) -> &CacheStats {
145 &self.stats
146 }
147}
148
149pub struct SpecCache {
151 cache: CacheManager,
152 stats: CacheStats,
153}
154
155impl SpecCache {
156 pub fn new(cache_dir: &Path) -> Result<Self, Box<dyn std::error::Error>> {
158 Ok(Self {
159 cache: CacheManager::new(cache_dir)?,
160 stats: CacheStats::new(),
161 })
162 }
163
164 pub fn get_spec<T: serde::de::DeserializeOwned + Serialize>(
166 &self,
167 path: &Path,
168 ) -> Result<T, Box<dyn std::error::Error>> {
169 let cache_key = format!("spec_{}", path.display());
170
171 if let Some(cached) = self.cache.get(&cache_key)? {
173 debug!("Specification cache hit: {}", path.display());
174 self.stats.record_hit();
175 return Ok(serde_json::from_str(&cached)?);
176 }
177
178 debug!("Specification cache miss: {}", path.display());
179 self.stats.record_miss();
180
181 let content = std::fs::read_to_string(path)?;
183 let spec: T = serde_yaml::from_str(&content)?;
184
185 let json = serde_json::to_string(&spec)?;
187 self.cache.set(
188 &cache_key,
189 json,
190 crate::CacheInvalidationStrategy::Ttl(3600),
191 )?;
192
193 Ok(spec)
194 }
195
196 pub fn invalidate_spec(&self, path: &Path) -> Result<(), Box<dyn std::error::Error>> {
198 let cache_key = format!("spec_{}", path.display());
199 self.cache.invalidate(&cache_key)?;
200 debug!("Specification cache invalidated: {}", path.display());
201 Ok(())
202 }
203
204 pub fn stats(&self) -> &CacheStats {
206 &self.stats
207 }
208}
209
210pub struct ProviderCache {
212 cache: CacheManager,
213 stats: CacheStats,
214}
215
216impl ProviderCache {
217 pub fn new(cache_dir: &Path) -> Result<Self, Box<dyn std::error::Error>> {
219 Ok(Self {
220 cache: CacheManager::new(cache_dir)?,
221 stats: CacheStats::new(),
222 })
223 }
224
225 pub fn get_response(
227 &self,
228 provider: &str,
229 model: &str,
230 prompt: &str,
231 ) -> Result<Option<String>, Box<dyn std::error::Error>> {
232 let cache_key = self.make_cache_key(provider, model, prompt);
233
234 if let Some(cached) = self.cache.get(&cache_key)? {
236 debug!("Provider response cache hit: {}/{}", provider, model);
237 self.stats.record_hit();
238 return Ok(Some(cached));
239 }
240
241 debug!("Provider response cache miss: {}/{}", provider, model);
242 self.stats.record_miss();
243 Ok(None)
244 }
245
246 pub fn cache_response(
248 &self,
249 provider: &str,
250 model: &str,
251 prompt: &str,
252 response: &str,
253 ) -> Result<(), Box<dyn std::error::Error>> {
254 let cache_key = self.make_cache_key(provider, model, prompt);
255
256 self.cache.set(
258 &cache_key,
259 response.to_string(),
260 crate::CacheInvalidationStrategy::Ttl(86400),
261 )?;
262
263 debug!("Provider response cached: {}/{}", provider, model);
264 Ok(())
265 }
266
267 fn make_cache_key(&self, provider: &str, model: &str, prompt: &str) -> String {
269 let hash = prompt
272 .bytes()
273 .fold(0u64, |acc, b| acc.wrapping_mul(31).wrapping_add(b as u64));
274
275 format!("provider_{}_{}_{}",provider, model, hash)
276 }
277
278 pub fn stats(&self) -> &CacheStats {
280 &self.stats
281 }
282}
283
284pub struct ProjectAnalysisCache {
286 cache: CacheManager,
287 stats: CacheStats,
288}
289
290impl ProjectAnalysisCache {
291 pub fn new(cache_dir: &Path) -> Result<Self, Box<dyn std::error::Error>> {
293 Ok(Self {
294 cache: CacheManager::new(cache_dir)?,
295 stats: CacheStats::new(),
296 })
297 }
298
299 pub fn get_analysis<T: serde::de::DeserializeOwned + Serialize>(
301 &self,
302 project_path: &Path,
303 ) -> Result<Option<T>, Box<dyn std::error::Error>> {
304 let cache_key = format!("analysis_{}", project_path.display());
305
306 if let Some(cached) = self.cache.get(&cache_key)? {
307 debug!("Project analysis cache hit: {}", project_path.display());
308 self.stats.record_hit();
309 return Ok(Some(serde_json::from_str(&cached)?));
310 }
311
312 debug!("Project analysis cache miss: {}", project_path.display());
313 self.stats.record_miss();
314 Ok(None)
315 }
316
317 pub fn cache_analysis<T: serde::Serialize>(
319 &self,
320 project_path: &Path,
321 analysis: &T,
322 ) -> Result<(), Box<dyn std::error::Error>> {
323 let cache_key = format!("analysis_{}", project_path.display());
324
325 let json = serde_json::to_string(analysis)?;
327 self.cache.set(
328 &cache_key,
329 json,
330 crate::CacheInvalidationStrategy::Ttl(3600),
331 )?;
332
333 debug!("Project analysis cached: {}", project_path.display());
334 Ok(())
335 }
336
337 pub fn invalidate_analysis(&self, project_path: &Path) -> Result<(), Box<dyn std::error::Error>> {
339 let cache_key = format!("analysis_{}", project_path.display());
340 self.cache.invalidate(&cache_key)?;
341 debug!("Project analysis cache invalidated: {}", project_path.display());
342 Ok(())
343 }
344
345 pub fn stats(&self) -> &CacheStats {
347 &self.stats
348 }
349}
350
351#[cfg(test)]
352mod tests {
353 use super::*;
354 use tempfile::TempDir;
355
356 #[test]
357 fn test_cache_stats() {
358 let stats = CacheStats::new();
359
360 stats.record_hit();
361 stats.record_hit();
362 stats.record_miss();
363
364 let (hits, misses, rate) = stats.stats();
365 assert_eq!(hits, 2);
366 assert_eq!(misses, 1);
367 assert!((rate - 2.0/3.0).abs() < 0.01);
368 }
369
370 #[test]
371 fn test_cache_stats_reset() {
372 let stats = CacheStats::new();
373
374 stats.record_hit();
375 stats.record_miss();
376 stats.reset();
377
378 let (hits, misses, _) = stats.stats();
379 assert_eq!(hits, 0);
380 assert_eq!(misses, 0);
381 }
382
383 #[test]
384 fn test_config_cache() -> Result<(), Box<dyn std::error::Error>> {
385 let temp_dir = TempDir::new()?;
386 let cache_dir = temp_dir.path().join("cache");
387 std::fs::create_dir(&cache_dir)?;
388
389 let config_path = temp_dir.path().join("config.yaml");
390 std::fs::write(&config_path, "key: value")?;
391
392 let cache = ConfigCache::new(&cache_dir)?;
393
394 let _: serde_json::Value = cache.get_config(&config_path)?;
396 assert_eq!(cache.stats().stats().1, 1); let _: serde_json::Value = cache.get_config(&config_path)?;
400 assert_eq!(cache.stats().stats().0, 1); Ok(())
403 }
404
405 #[test]
406 fn test_spec_cache() -> Result<(), Box<dyn std::error::Error>> {
407 let temp_dir = TempDir::new()?;
408 let cache_dir = temp_dir.path().join("cache");
409 std::fs::create_dir(&cache_dir)?;
410
411 let spec_path = temp_dir.path().join("spec.yaml");
412 std::fs::write(&spec_path, "name: test")?;
413
414 let cache = SpecCache::new(&cache_dir)?;
415
416 let _: serde_json::Value = cache.get_spec(&spec_path)?;
418 assert_eq!(cache.stats().stats().1, 1); let _: serde_json::Value = cache.get_spec(&spec_path)?;
422 assert_eq!(cache.stats().stats().0, 1); Ok(())
425 }
426
427 #[test]
428 fn test_provider_cache() -> Result<(), Box<dyn std::error::Error>> {
429 let temp_dir = TempDir::new()?;
430 let cache_dir = temp_dir.path().join("cache");
431 std::fs::create_dir(&cache_dir)?;
432
433 let cache = ProviderCache::new(&cache_dir)?;
434
435 let result = cache.get_response("openai", "gpt-4", "hello")?;
437 assert!(result.is_none());
438 assert_eq!(cache.stats().stats().1, 1); cache.cache_response("openai", "gpt-4", "hello", "world")?;
442
443 let result = cache.get_response("openai", "gpt-4", "hello")?;
445 assert_eq!(result, Some("world".to_string()));
446 assert_eq!(cache.stats().stats().0, 1); Ok(())
449 }
450}