1use anyhow::{Context, Result};
7use serde::{Deserialize, Serialize};
8use std::collections::HashMap;
9use std::fs;
10use std::path::{Path, PathBuf};
11use tensorlogic_compiler::CompilerContext;
12use tensorlogic_ir::{EinsumGraph, TLExpr};
13
14#[derive(Debug, Clone, Serialize, Deserialize)]
16pub struct CacheEntry {
17 pub graph: EinsumGraph,
19 pub strategy: String,
21 pub timestamp: i64,
23 pub last_accessed: i64,
25 pub expr_hash: u64,
27 pub access_count: u64,
29}
30
31pub struct CompilationCache {
33 cache_dir: PathBuf,
35 max_size_mb: usize,
37 index: HashMap<u64, CacheEntry>,
39 enabled: bool,
41 hits: u64,
43 misses: u64,
45 evictions: u64,
47 use_compression: bool,
49}
50
51impl CompilationCache {
52 pub fn new(cache_dir: Option<PathBuf>, max_size_mb: usize) -> Result<Self> {
54 Self::with_compression(cache_dir, max_size_mb, true)
55 }
56
57 pub fn with_compression(
59 cache_dir: Option<PathBuf>,
60 max_size_mb: usize,
61 use_compression: bool,
62 ) -> Result<Self> {
63 let cache_dir = match cache_dir {
64 Some(dir) => dir,
65 None => Self::default_cache_dir()?,
66 };
67
68 if !cache_dir.exists() {
70 fs::create_dir_all(&cache_dir).context("Failed to create cache directory")?;
71 }
72
73 let mut cache = Self {
74 cache_dir,
75 max_size_mb,
76 index: HashMap::new(),
77 enabled: true,
78 hits: 0,
79 misses: 0,
80 evictions: 0,
81 use_compression,
82 };
83
84 cache.load_index()?;
86
87 Ok(cache)
88 }
89
90 pub fn default_cache_dir() -> Result<PathBuf> {
92 let cache_dir = dirs::cache_dir()
93 .context("Failed to determine cache directory")?
94 .join("tensorlogic")
95 .join("compilation");
96 Ok(cache_dir)
97 }
98
99 pub fn compute_hash(expr: &TLExpr, context: &CompilerContext) -> u64 {
101 use std::collections::hash_map::DefaultHasher;
102 use std::hash::{Hash, Hasher};
103
104 let mut hasher = DefaultHasher::new();
105
106 let expr_str = format!("{:?}", expr);
108 expr_str.hash(&mut hasher);
109
110 let config_str = format!("{:?}", context.config);
112 config_str.hash(&mut hasher);
113
114 let mut domains: Vec<_> = context.domains.iter().collect();
116 domains.sort_by_key(|(name, _)| *name);
117 for (name, info) in domains {
118 name.hash(&mut hasher);
119 info.cardinality.hash(&mut hasher);
121 }
122
123 hasher.finish()
124 }
125
126 pub fn get(&mut self, expr: &TLExpr, context: &CompilerContext) -> Option<EinsumGraph> {
128 if !self.enabled {
129 return None;
130 }
131
132 let hash = Self::compute_hash(expr, context);
133
134 if let Some(entry) = self.index.get_mut(&hash) {
135 let current_strategy = format!("{:?}", context.config);
137 if entry.strategy == current_strategy {
138 entry.last_accessed = chrono::Utc::now().timestamp();
140 entry.access_count += 1;
141 self.hits += 1;
142
143 let entry_clone = entry.clone();
145 let graph = entry.graph.clone();
146
147 let _ = self.update_entry_metadata(&entry_clone);
149
150 return Some(graph);
151 }
152 }
153
154 self.misses += 1;
155 None
156 }
157
158 fn update_entry_metadata(&self, entry: &CacheEntry) -> Result<()> {
160 if self.use_compression {
161 let compressed = Self::compress_entry(entry)?;
162 let cache_file = self.cache_dir.join(format!("{:016x}.bin", entry.expr_hash));
163 fs::write(&cache_file, compressed)?;
164 } else {
165 let cache_file = self
166 .cache_dir
167 .join(format!("{:016x}.json", entry.expr_hash));
168 let json = serde_json::to_string(entry)?;
169 fs::write(&cache_file, json)?;
170 }
171
172 Ok(())
173 }
174
175 fn compress_entry(entry: &CacheEntry) -> Result<Vec<u8>> {
177 let json = serde_json::to_vec(entry).context("Failed to serialize entry")?;
179
180 let compressed = oxiarc_deflate::gzip_compress(&json, 9)
182 .context("Failed to gzip-compress cache entry")?;
183
184 Ok(compressed)
185 }
186
187 fn decompress_entry(compressed: &[u8]) -> Result<CacheEntry> {
189 let decompressed = oxiarc_deflate::gzip_decompress(compressed)
191 .context("Failed to gzip-decompress cache entry")?;
192
193 let entry: CacheEntry =
195 serde_json::from_slice(&decompressed).context("Failed to deserialize entry")?;
196
197 Ok(entry)
198 }
199
200 pub fn put(
202 &mut self,
203 expr: &TLExpr,
204 context: &CompilerContext,
205 graph: &EinsumGraph,
206 ) -> Result<()> {
207 if !self.enabled {
208 return Ok(());
209 }
210
211 let hash = Self::compute_hash(expr, context);
212 let now = chrono::Utc::now().timestamp();
213
214 let entry = CacheEntry {
215 graph: graph.clone(),
216 strategy: format!("{:?}", context.config),
217 timestamp: now,
218 last_accessed: now,
219 expr_hash: hash,
220 access_count: 0,
221 };
222
223 if self.use_compression {
225 let compressed = Self::compress_entry(&entry)?;
226 let cache_file = self.cache_dir.join(format!("{:016x}.bin", hash));
227 fs::write(&cache_file, compressed)?;
228 } else {
229 let cache_file = self.cache_dir.join(format!("{:016x}.json", hash));
230 let json = serde_json::to_string_pretty(&entry)?;
231 fs::write(&cache_file, json)?;
232 }
233
234 self.index.insert(hash, entry);
236
237 self.enforce_size_limit()?;
239
240 Ok(())
241 }
242
243 fn load_index(&mut self) -> Result<()> {
245 if !self.cache_dir.exists() {
246 return Ok(());
247 }
248
249 for entry in fs::read_dir(&self.cache_dir)? {
250 let entry = entry?;
251 let path = entry.path();
252
253 let ext = path.extension().and_then(|s| s.to_str());
254
255 match ext {
256 Some("json") => {
257 if let Ok(content) = fs::read_to_string(&path) {
259 if let Ok(cache_entry) = serde_json::from_str::<CacheEntry>(&content) {
260 self.index.insert(cache_entry.expr_hash, cache_entry);
261 }
262 }
263 }
264 Some("bin") => {
265 if let Ok(content) = fs::read(&path) {
267 if let Ok(cache_entry) = Self::decompress_entry(&content) {
268 self.index.insert(cache_entry.expr_hash, cache_entry);
269 }
270 }
271 }
272 _ => {}
273 }
274 }
275
276 Ok(())
277 }
278
279 fn enforce_size_limit(&mut self) -> Result<()> {
281 let current_size = self.get_cache_size_mb()?;
282
283 if current_size > self.max_size_mb {
284 let mut entries: Vec<_> = self
286 .index
287 .iter()
288 .map(|(hash, entry)| (*hash, entry.last_accessed, entry.access_count))
289 .collect();
290
291 entries.sort_by(|a, b| a.1.cmp(&b.1).then_with(|| a.2.cmp(&b.2)));
293
294 let target_size = (self.max_size_mb as f64 * 0.8) as usize; for (hash, _, _) in entries {
298 if self.get_cache_size_mb()? <= target_size {
299 break;
300 }
301
302 self.remove_entry(hash)?;
303 self.evictions += 1;
304 }
305 }
306
307 Ok(())
308 }
309
310 fn get_cache_size_mb(&self) -> Result<usize> {
312 let mut total_bytes = 0u64;
313
314 for entry in fs::read_dir(&self.cache_dir)? {
315 let entry = entry?;
316 total_bytes += entry.metadata()?.len();
317 }
318
319 Ok((total_bytes / 1_000_000) as usize)
320 }
321
322 fn remove_entry(&mut self, hash: u64) -> Result<()> {
324 let json_file = self.cache_dir.join(format!("{:016x}.json", hash));
326 let bin_file = self.cache_dir.join(format!("{:016x}.bin", hash));
327
328 if json_file.exists() {
329 fs::remove_file(json_file)?;
330 }
331 if bin_file.exists() {
332 fs::remove_file(bin_file)?;
333 }
334
335 self.index.remove(&hash);
336 Ok(())
337 }
338
339 pub fn clear(&mut self) -> Result<()> {
341 for entry in fs::read_dir(&self.cache_dir)? {
342 let entry = entry?;
343 fs::remove_file(entry.path())?;
344 }
345
346 self.index.clear();
347 Ok(())
348 }
349
350 pub fn stats(&self) -> CacheStats {
352 let hit_rate = if self.hits + self.misses > 0 {
353 (self.hits as f64 / (self.hits + self.misses) as f64) * 100.0
354 } else {
355 0.0
356 };
357
358 CacheStats {
359 entries: self.index.len(),
360 size_mb: self.get_cache_size_mb().unwrap_or(0),
361 max_size_mb: self.max_size_mb,
362 enabled: self.enabled,
363 cache_dir: self.cache_dir.clone(),
364 hits: self.hits,
365 misses: self.misses,
366 evictions: self.evictions,
367 hit_rate,
368 compression_enabled: self.use_compression,
369 }
370 }
371
372 #[allow(dead_code)]
377 pub fn warm_up(&mut self, expressions: &[(String, CompilerContext)]) -> Result<usize> {
378 use crate::parser::parse_expression;
379 use tensorlogic_compiler::compile_to_einsum_with_context;
380
381 let mut warmed = 0;
382
383 for (expr_str, context) in expressions {
384 if let Ok(expr) = parse_expression(expr_str) {
386 let mut ctx_clone = context.clone();
387 if let Ok(graph) = compile_to_einsum_with_context(&expr, &mut ctx_clone) {
388 if self.put(&expr, context, &graph).is_ok() {
390 warmed += 1;
391 }
392 }
393 }
394 }
395
396 Ok(warmed)
397 }
398
399 #[allow(dead_code)]
405 pub fn warm_up_from_file(&mut self, file_path: &std::path::Path) -> Result<CacheWarmupResult> {
406 use std::fs;
407 use tensorlogic_compiler::CompilationConfig;
408
409 let content = fs::read_to_string(file_path)
410 .with_context(|| format!("Failed to read warmup file: {}", file_path.display()))?;
411
412 let mut expressions = Vec::new();
413 let mut errors = Vec::new();
414
415 for line in content.lines() {
416 let line = line.trim();
417
418 if line.is_empty() || line.starts_with('#') {
420 continue;
421 }
422
423 let parts: Vec<&str> = line.split('|').map(|s| s.trim()).collect();
425
426 if parts.is_empty() {
427 continue;
428 }
429
430 let expr_str = parts[0].to_string();
431
432 let config = if parts.len() > 1 {
434 match parts[1] {
435 "hard_boolean" => CompilationConfig::hard_boolean(),
436 "fuzzy_godel" => CompilationConfig::fuzzy_godel(),
437 "fuzzy_product" => CompilationConfig::fuzzy_product(),
438 "fuzzy_lukasiewicz" => CompilationConfig::fuzzy_lukasiewicz(),
439 "probabilistic" => CompilationConfig::probabilistic(),
440 _ => CompilationConfig::soft_differentiable(),
441 }
442 } else {
443 CompilationConfig::soft_differentiable()
444 };
445
446 let mut context = CompilerContext::with_config(config);
447
448 if parts.len() > 2 {
450 for domain_spec in parts[2].split(',') {
451 let domain_parts: Vec<&str> = domain_spec.split(':').collect();
452 if domain_parts.len() == 2 {
453 if let Ok(size) = domain_parts[1].parse::<usize>() {
454 context.add_domain(domain_parts[0], size);
455 }
456 }
457 }
458 }
459
460 expressions.push((expr_str, context));
461 }
462
463 match self.warm_up(&expressions) {
465 Ok(warmed) => Ok(CacheWarmupResult {
466 total: expressions.len(),
467 warmed,
468 errors,
469 }),
470 Err(e) => {
471 errors.push(format!("Warmup error: {}", e));
472 Ok(CacheWarmupResult {
473 total: expressions.len(),
474 warmed: 0,
475 errors,
476 })
477 }
478 }
479 }
480}
481
482#[derive(Debug, Clone)]
484#[allow(dead_code)]
485pub struct CacheWarmupResult {
486 pub total: usize,
488 pub warmed: usize,
490 pub errors: Vec<String>,
492}
493
494impl CacheWarmupResult {
495 #[allow(dead_code)]
497 pub fn print(&self) {
498 use crate::output::{print_error, print_header, print_info, print_success};
499
500 print_header("Cache Warmup Results");
501 print_info(&format!(" Total expressions: {}", self.total));
502 print_success(&format!(" Successfully warmed: {}", self.warmed));
503
504 if !self.errors.is_empty() {
505 print_error(&format!(" Errors: {}", self.errors.len()));
506 for error in &self.errors {
507 print_info(&format!(" - {}", error));
508 }
509 }
510 }
511}
512
513#[derive(Debug, Clone, Serialize, Deserialize)]
515pub struct CacheStats {
516 pub entries: usize,
518 pub size_mb: usize,
520 pub max_size_mb: usize,
522 pub enabled: bool,
524 #[serde(
526 serialize_with = "serialize_path",
527 deserialize_with = "deserialize_path"
528 )]
529 pub cache_dir: PathBuf,
530 pub hits: u64,
532 pub misses: u64,
534 pub evictions: u64,
536 pub hit_rate: f64,
538 pub compression_enabled: bool,
540}
541
542fn serialize_path<S>(path: &Path, serializer: S) -> Result<S::Ok, S::Error>
544where
545 S: serde::Serializer,
546{
547 serializer.serialize_str(&path.to_string_lossy())
548}
549
550fn deserialize_path<'de, D>(deserializer: D) -> Result<PathBuf, D::Error>
551where
552 D: serde::Deserializer<'de>,
553{
554 let s = String::deserialize(deserializer)?;
555 Ok(PathBuf::from(s))
556}
557
558impl CacheStats {
559 pub fn print(&self) {
561 use crate::output::{print_header, print_info, print_success};
562
563 print_header("Cache Statistics");
564 print_info(&format!(" Entries: {}", self.entries));
565 print_info(&format!(
566 " Size: {} MB / {} MB ({:.1}% full)",
567 self.size_mb,
568 self.max_size_mb,
569 (self.size_mb as f64 / self.max_size_mb as f64) * 100.0
570 ));
571 print_info(&format!(
572 " Enabled: {}",
573 if self.enabled { "yes" } else { "no" }
574 ));
575 print_info(&format!(
576 " Compression: {}",
577 if self.compression_enabled {
578 "enabled"
579 } else {
580 "disabled"
581 }
582 ));
583
584 print_header("Performance Metrics");
586 print_info(&format!(" Cache Hits: {}", self.hits));
587 print_info(&format!(" Cache Misses: {}", self.misses));
588 print_info(&format!(" Evictions: {}", self.evictions));
589
590 if self.hits + self.misses > 0 {
591 if self.hit_rate >= 80.0 {
592 print_success(&format!(" Hit Rate: {:.2}% (excellent)", self.hit_rate));
593 } else if self.hit_rate >= 50.0 {
594 print_info(&format!(" Hit Rate: {:.2}% (good)", self.hit_rate));
595 } else {
596 print_info(&format!(" Hit Rate: {:.2}% (poor)", self.hit_rate));
597 }
598 } else {
599 print_info(" Hit Rate: N/A (no requests yet)");
600 }
601
602 print_info(&format!(" Location: {}", self.cache_dir.display()));
603 }
604
605 #[allow(dead_code)]
607 pub fn to_json(&self) -> Result<String> {
608 serde_json::to_string_pretty(self).context("Failed to serialize cache statistics to JSON")
609 }
610
611 #[allow(dead_code)]
613 pub fn export_to_file(&self, path: &Path) -> Result<()> {
614 let json = self.to_json()?;
615 fs::write(path, json).context("Failed to write cache statistics to file")?;
616 Ok(())
617 }
618
619 #[allow(dead_code)]
621 pub fn get_analytics(&self) -> CacheAnalytics {
622 let total_requests = self.hits + self.misses;
623 let utilization_pct = if self.max_size_mb > 0 {
624 (self.size_mb as f64 / self.max_size_mb as f64) * 100.0
625 } else {
626 0.0
627 };
628
629 let avg_entry_size_kb = if self.entries > 0 {
630 (self.size_mb as f64 * 1024.0) / self.entries as f64
631 } else {
632 0.0
633 };
634
635 let eviction_rate = if total_requests > 0 {
636 (self.evictions as f64 / total_requests as f64) * 100.0
637 } else {
638 0.0
639 };
640
641 let efficiency_score =
642 calculate_efficiency_score(self.hit_rate, utilization_pct, eviction_rate);
643
644 CacheAnalytics {
645 total_requests,
646 utilization_pct,
647 avg_entry_size_kb,
648 eviction_rate,
649 efficiency_score,
650 recommendation: generate_recommendation(
651 self.hit_rate,
652 utilization_pct,
653 eviction_rate,
654 self.entries,
655 ),
656 }
657 }
658}
659
660#[derive(Debug, Clone, Serialize, Deserialize)]
662pub struct CacheAnalytics {
663 pub total_requests: u64,
665 pub utilization_pct: f64,
667 pub avg_entry_size_kb: f64,
669 pub eviction_rate: f64,
671 pub efficiency_score: f64,
673 pub recommendation: String,
675}
676
677impl CacheAnalytics {
678 #[allow(dead_code)]
680 pub fn print(&self) {
681 use crate::output::{print_header, print_info, print_success, print_warning};
682
683 print_header("Cache Analytics");
684 print_info(&format!(" Total Requests: {}", self.total_requests));
685 print_info(&format!(" Utilization: {:.1}%", self.utilization_pct));
686 print_info(&format!(
687 " Avg Entry Size: {:.2} KB",
688 self.avg_entry_size_kb
689 ));
690 print_info(&format!(" Eviction Rate: {:.2}%", self.eviction_rate));
691
692 if self.efficiency_score >= 80.0 {
693 print_success(&format!(
694 " Efficiency Score: {:.1}/100 (excellent)",
695 self.efficiency_score
696 ));
697 } else if self.efficiency_score >= 60.0 {
698 print_info(&format!(
699 " Efficiency Score: {:.1}/100 (good)",
700 self.efficiency_score
701 ));
702 } else {
703 print_warning(&format!(
704 " Efficiency Score: {:.1}/100 (needs improvement)",
705 self.efficiency_score
706 ));
707 }
708
709 if !self.recommendation.is_empty() {
710 print_header("Recommendation");
711 print_info(&format!(" {}", self.recommendation));
712 }
713 }
714
715 #[allow(dead_code)]
717 pub fn to_json(&self) -> Result<String> {
718 serde_json::to_string_pretty(self).context("Failed to serialize cache analytics to JSON")
719 }
720}
721
722fn calculate_efficiency_score(hit_rate: f64, utilization: f64, eviction_rate: f64) -> f64 {
724 let hit_score = hit_rate * 0.6;
730
731 let utilization_score = if (60.0..=80.0).contains(&utilization) {
732 100.0 * 0.2
733 } else if utilization < 60.0 {
734 (utilization / 60.0) * 100.0 * 0.2
735 } else {
736 ((100.0 - utilization) / 20.0) * 100.0 * 0.2
737 };
738
739 let eviction_score = if eviction_rate < 1.0 {
740 100.0 * 0.2
741 } else if eviction_rate < 5.0 {
742 ((5.0 - eviction_rate) / 4.0) * 100.0 * 0.2
743 } else {
744 0.0
745 };
746
747 (hit_score + utilization_score + eviction_score).min(100.0)
748}
749
750fn generate_recommendation(
752 hit_rate: f64,
753 utilization: f64,
754 eviction_rate: f64,
755 entries: usize,
756) -> String {
757 if entries == 0 {
758 return "Cache is empty. Start compiling expressions to populate the cache.".to_string();
759 }
760
761 if hit_rate < 50.0 {
762 return "Low hit rate detected. Consider using cache warmup with frequently used expressions.".to_string();
763 }
764
765 if eviction_rate > 10.0 {
766 return "High eviction rate detected. Consider increasing max cache size to reduce thrashing.".to_string();
767 }
768
769 if utilization > 90.0 {
770 return "Cache is nearly full. Consider increasing max cache size or clearing old entries."
771 .to_string();
772 }
773
774 if utilization < 30.0 && entries > 10 {
775 return "Low cache utilization. Cache size may be larger than needed.".to_string();
776 }
777
778 "Cache is performing well. No immediate optimization needed.".to_string()
779}
780
781#[cfg(test)]
782mod tests {
783 use super::*;
784 use tensorlogic_compiler::CompilationConfig;
785 use tensorlogic_ir::Term;
786
787 #[test]
788 fn test_cache_creation() {
789 let temp_dir = std::env::temp_dir().join("tensorlogic-test-cache");
790 let cache = CompilationCache::new(Some(temp_dir.clone()), 100);
791 assert!(cache.is_ok());
792
793 let _ = fs::remove_dir_all(temp_dir);
795 }
796
797 #[test]
798 fn test_hash_computation() {
799 let expr = TLExpr::Pred {
800 name: "test".to_string(),
801 args: vec![Term::Var("x".to_string())],
802 };
803
804 let ctx1 = CompilerContext::with_config(CompilationConfig::soft_differentiable());
805 let ctx2 = CompilerContext::with_config(CompilationConfig::hard_boolean());
806
807 let hash1 = CompilationCache::compute_hash(&expr, &ctx1);
808 let hash2 = CompilationCache::compute_hash(&expr, &ctx2);
809
810 assert_ne!(hash1, hash2);
812 }
813
814 #[test]
815 fn test_cache_put_get() {
816 let temp_dir = std::env::temp_dir().join("tensorlogic-test-cache-putget");
817 let mut cache = CompilationCache::new(Some(temp_dir.clone()), 100)
818 .expect("cache creation should succeed");
819
820 let expr = TLExpr::Pred {
821 name: "test".to_string(),
822 args: vec![Term::Var("x".to_string())],
823 };
824
825 let mut ctx = CompilerContext::with_config(CompilationConfig::soft_differentiable());
826 ctx.add_domain("D", 10);
827
828 let graph = EinsumGraph::new();
830
831 cache
833 .put(&expr, &ctx, &graph)
834 .expect("cache put should succeed");
835
836 let retrieved = cache.get(&expr, &ctx);
838 assert!(retrieved.is_some());
839
840 let _ = fs::remove_dir_all(temp_dir);
842 }
843
844 #[test]
845 fn test_cache_clear() {
846 let temp_dir = std::env::temp_dir().join("tensorlogic-test-cache-clear");
847 let mut cache = CompilationCache::new(Some(temp_dir.clone()), 100)
848 .expect("cache creation should succeed");
849
850 let expr = TLExpr::Pred {
851 name: "test".to_string(),
852 args: vec![Term::Var("x".to_string())],
853 };
854
855 let ctx = CompilerContext::with_config(CompilationConfig::soft_differentiable());
856 let graph = EinsumGraph::new();
857
858 cache
859 .put(&expr, &ctx, &graph)
860 .expect("cache put should succeed");
861 assert_eq!(cache.stats().entries, 1);
862
863 cache.clear().expect("cache clear should succeed");
864 assert_eq!(cache.stats().entries, 0);
865
866 let _ = fs::remove_dir_all(temp_dir);
868 }
869
870 #[test]
871 fn test_cache_warmup() {
872 let temp_dir = std::env::temp_dir().join("tensorlogic-test-cache-warmup");
873 let mut cache = CompilationCache::new(Some(temp_dir.clone()), 100)
874 .expect("cache creation should succeed");
875
876 let ctx = CompilerContext::with_config(CompilationConfig::soft_differentiable());
877
878 let expressions = vec![
879 ("pred(x, y)".to_string(), ctx.clone()),
880 ("AND(a, b)".to_string(), ctx.clone()),
881 ];
882
883 let warmed = cache
884 .warm_up(&expressions)
885 .expect("cache warmup should succeed");
886
887 assert_eq!(warmed, 2);
888 assert_eq!(cache.stats().entries, 2);
889
890 let _ = fs::remove_dir_all(temp_dir);
892 }
893
894 #[test]
895 fn test_cache_warmup_from_file() {
896 use std::fs::File;
897 use std::io::Write;
898
899 let unique_id = std::time::SystemTime::now()
902 .duration_since(std::time::UNIX_EPOCH)
903 .unwrap_or_default()
904 .subsec_nanos();
905 let temp_dir =
906 std::env::temp_dir().join(format!("tensorlogic-test-cache-warmup-file-{}", unique_id));
907 let mut cache = CompilationCache::new(Some(temp_dir.clone()), 100)
908 .expect("cache creation should succeed");
909
910 let warmup_file = temp_dir.join("warmup.txt");
912 let mut file = File::create(&warmup_file).expect("warmup file creation should succeed");
913 writeln!(file, "# This is a comment").expect("write should succeed");
914 writeln!(file, "pred(x, y) | soft_differentiable | Person:100")
915 .expect("write should succeed");
916 writeln!(file, "AND(a, b)").expect("write should succeed");
917
918 let result = cache
919 .warm_up_from_file(&warmup_file)
920 .expect("warmup from file should succeed");
921
922 assert_eq!(result.total, 2);
923 assert_eq!(result.warmed, 2);
924
925 let _ = fs::remove_dir_all(temp_dir);
927 }
928
929 #[test]
930 fn test_cache_analytics() {
931 let temp_dir = std::env::temp_dir().join("tensorlogic-test-cache-analytics");
932 let stats = CacheStats {
933 entries: 50,
934 size_mb: 100,
935 max_size_mb: 200,
936 enabled: true,
937 cache_dir: temp_dir.clone(),
938 hits: 800,
939 misses: 200,
940 evictions: 10,
941 hit_rate: 80.0,
942 compression_enabled: true,
943 };
944
945 let analytics = stats.get_analytics();
946
947 assert_eq!(analytics.total_requests, 1000);
948 assert_eq!(analytics.utilization_pct, 50.0);
949 assert!(analytics.efficiency_score >= 70.0); assert!(!analytics.recommendation.is_empty());
951
952 let _ = fs::remove_dir_all(temp_dir);
954 }
955
956 #[test]
957 fn test_cache_stats_json_export() {
958 let temp_dir = std::env::temp_dir().join("tensorlogic-test-cache-json");
959 let stats = CacheStats {
960 entries: 10,
961 size_mb: 50,
962 max_size_mb: 500,
963 enabled: true,
964 cache_dir: temp_dir.clone(),
965 hits: 100,
966 misses: 20,
967 evictions: 2,
968 hit_rate: 83.33,
969 compression_enabled: true,
970 };
971
972 let json = stats.to_json();
973 assert!(json.is_ok());
974
975 let json_str = json.expect("JSON serialization should succeed");
976 assert!(json_str.contains("\"entries\""));
977 assert!(json_str.contains("\"hits\""));
978 assert!(json_str.contains("\"hit_rate\""));
979
980 let _ = fs::remove_dir_all(temp_dir);
982 }
983
984 #[test]
985 fn test_cache_analytics_json_export() {
986 let analytics = CacheAnalytics {
987 total_requests: 500,
988 utilization_pct: 65.0,
989 avg_entry_size_kb: 512.0,
990 eviction_rate: 2.5,
991 efficiency_score: 85.0,
992 recommendation: "Cache is performing well.".to_string(),
993 };
994
995 let json = analytics.to_json();
996 assert!(json.is_ok());
997
998 let json_str = json.expect("JSON serialization should succeed");
999 assert!(json_str.contains("\"total_requests\""));
1000 assert!(json_str.contains("\"efficiency_score\""));
1001 assert!(json_str.contains("\"recommendation\""));
1002 }
1003
1004 #[test]
1005 fn test_efficiency_score_calculation() {
1006 let score1 = calculate_efficiency_score(90.0, 70.0, 0.5);
1008 assert!(score1 >= 80.0);
1009
1010 let score2 = calculate_efficiency_score(30.0, 70.0, 0.5);
1012 assert!(score2 < 60.0);
1013
1014 let score3 = calculate_efficiency_score(80.0, 70.0, 15.0);
1016 assert!(score3 < 80.0);
1017 }
1018
1019 #[test]
1020 fn test_recommendation_generation() {
1021 let rec1 = generate_recommendation(0.0, 0.0, 0.0, 0);
1023 assert!(rec1.contains("empty"));
1024
1025 let rec2 = generate_recommendation(30.0, 50.0, 1.0, 100);
1027 assert!(rec2.contains("hit rate"));
1028
1029 let rec3 = generate_recommendation(80.0, 70.0, 15.0, 100);
1031 assert!(rec3.contains("eviction"));
1032
1033 let rec4 = generate_recommendation(80.0, 95.0, 1.0, 100);
1035 assert!(rec4.contains("nearly full") || rec4.contains("full"));
1036
1037 let rec5 = generate_recommendation(85.0, 65.0, 1.0, 100);
1039 assert!(rec5.contains("performing well"));
1040 }
1041}