1use anyhow::{Context, Result};
7use serde::{Deserialize, Serialize};
8use std::collections::HashMap;
9use std::fs;
10use std::path::{Path, PathBuf};
11use tensorlogic_compiler::CompilerContext;
12use tensorlogic_ir::{EinsumGraph, TLExpr};
13
14#[derive(Debug, Clone, Serialize, Deserialize)]
16pub struct CacheEntry {
17 pub graph: EinsumGraph,
19 pub strategy: String,
21 pub timestamp: i64,
23 pub last_accessed: i64,
25 pub expr_hash: u64,
27 pub access_count: u64,
29}
30
31pub struct CompilationCache {
33 cache_dir: PathBuf,
35 max_size_mb: usize,
37 index: HashMap<u64, CacheEntry>,
39 enabled: bool,
41 hits: u64,
43 misses: u64,
45 evictions: u64,
47 use_compression: bool,
49}
50
51impl CompilationCache {
52 pub fn new(cache_dir: Option<PathBuf>, max_size_mb: usize) -> Result<Self> {
54 Self::with_compression(cache_dir, max_size_mb, true)
55 }
56
57 pub fn with_compression(
59 cache_dir: Option<PathBuf>,
60 max_size_mb: usize,
61 use_compression: bool,
62 ) -> Result<Self> {
63 let cache_dir = match cache_dir {
64 Some(dir) => dir,
65 None => Self::default_cache_dir()?,
66 };
67
68 if !cache_dir.exists() {
70 fs::create_dir_all(&cache_dir).context("Failed to create cache directory")?;
71 }
72
73 let mut cache = Self {
74 cache_dir,
75 max_size_mb,
76 index: HashMap::new(),
77 enabled: true,
78 hits: 0,
79 misses: 0,
80 evictions: 0,
81 use_compression,
82 };
83
84 cache.load_index()?;
86
87 Ok(cache)
88 }
89
90 pub fn default_cache_dir() -> Result<PathBuf> {
92 let cache_dir = dirs::cache_dir()
93 .context("Failed to determine cache directory")?
94 .join("tensorlogic")
95 .join("compilation");
96 Ok(cache_dir)
97 }
98
99 pub fn compute_hash(expr: &TLExpr, context: &CompilerContext) -> u64 {
101 use std::collections::hash_map::DefaultHasher;
102 use std::hash::{Hash, Hasher};
103
104 let mut hasher = DefaultHasher::new();
105
106 let expr_str = format!("{:?}", expr);
108 expr_str.hash(&mut hasher);
109
110 let config_str = format!("{:?}", context.config);
112 config_str.hash(&mut hasher);
113
114 let mut domains: Vec<_> = context.domains.iter().collect();
116 domains.sort_by_key(|(name, _)| *name);
117 for (name, info) in domains {
118 name.hash(&mut hasher);
119 info.cardinality.hash(&mut hasher);
121 }
122
123 hasher.finish()
124 }
125
126 pub fn get(&mut self, expr: &TLExpr, context: &CompilerContext) -> Option<EinsumGraph> {
128 if !self.enabled {
129 return None;
130 }
131
132 let hash = Self::compute_hash(expr, context);
133
134 if let Some(entry) = self.index.get_mut(&hash) {
135 let current_strategy = format!("{:?}", context.config);
137 if entry.strategy == current_strategy {
138 entry.last_accessed = chrono::Utc::now().timestamp();
140 entry.access_count += 1;
141 self.hits += 1;
142
143 let entry_clone = entry.clone();
145 let graph = entry.graph.clone();
146
147 let _ = self.update_entry_metadata(&entry_clone);
149
150 return Some(graph);
151 }
152 }
153
154 self.misses += 1;
155 None
156 }
157
158 fn update_entry_metadata(&self, entry: &CacheEntry) -> Result<()> {
160 if self.use_compression {
161 let compressed = Self::compress_entry(entry)?;
162 let cache_file = self.cache_dir.join(format!("{:016x}.bin", entry.expr_hash));
163 fs::write(&cache_file, compressed)?;
164 } else {
165 let cache_file = self
166 .cache_dir
167 .join(format!("{:016x}.json", entry.expr_hash));
168 let json = serde_json::to_string(entry)?;
169 fs::write(&cache_file, json)?;
170 }
171
172 Ok(())
173 }
174
175 fn compress_entry(entry: &CacheEntry) -> Result<Vec<u8>> {
177 use std::io::Write;
178
179 let json = serde_json::to_vec(entry).context("Failed to serialize entry")?;
181
182 let mut encoder = flate2::write::GzEncoder::new(Vec::new(), flate2::Compression::best());
184 encoder.write_all(&json).context("Failed to compress")?;
185 let compressed = encoder.finish().context("Failed to finish compression")?;
186
187 Ok(compressed)
188 }
189
190 fn decompress_entry(compressed: &[u8]) -> Result<CacheEntry> {
192 use flate2::read::GzDecoder;
193 use std::io::Read;
194
195 let mut decoder = GzDecoder::new(compressed);
197 let mut decompressed = Vec::new();
198 decoder
199 .read_to_end(&mut decompressed)
200 .context("Failed to decompress")?;
201
202 let entry: CacheEntry =
204 serde_json::from_slice(&decompressed).context("Failed to deserialize entry")?;
205
206 Ok(entry)
207 }
208
209 pub fn put(
211 &mut self,
212 expr: &TLExpr,
213 context: &CompilerContext,
214 graph: &EinsumGraph,
215 ) -> Result<()> {
216 if !self.enabled {
217 return Ok(());
218 }
219
220 let hash = Self::compute_hash(expr, context);
221 let now = chrono::Utc::now().timestamp();
222
223 let entry = CacheEntry {
224 graph: graph.clone(),
225 strategy: format!("{:?}", context.config),
226 timestamp: now,
227 last_accessed: now,
228 expr_hash: hash,
229 access_count: 0,
230 };
231
232 if self.use_compression {
234 let compressed = Self::compress_entry(&entry)?;
235 let cache_file = self.cache_dir.join(format!("{:016x}.bin", hash));
236 fs::write(&cache_file, compressed)?;
237 } else {
238 let cache_file = self.cache_dir.join(format!("{:016x}.json", hash));
239 let json = serde_json::to_string_pretty(&entry)?;
240 fs::write(&cache_file, json)?;
241 }
242
243 self.index.insert(hash, entry);
245
246 self.enforce_size_limit()?;
248
249 Ok(())
250 }
251
252 fn load_index(&mut self) -> Result<()> {
254 if !self.cache_dir.exists() {
255 return Ok(());
256 }
257
258 for entry in fs::read_dir(&self.cache_dir)? {
259 let entry = entry?;
260 let path = entry.path();
261
262 let ext = path.extension().and_then(|s| s.to_str());
263
264 match ext {
265 Some("json") => {
266 if let Ok(content) = fs::read_to_string(&path) {
268 if let Ok(cache_entry) = serde_json::from_str::<CacheEntry>(&content) {
269 self.index.insert(cache_entry.expr_hash, cache_entry);
270 }
271 }
272 }
273 Some("bin") => {
274 if let Ok(content) = fs::read(&path) {
276 if let Ok(cache_entry) = Self::decompress_entry(&content) {
277 self.index.insert(cache_entry.expr_hash, cache_entry);
278 }
279 }
280 }
281 _ => {}
282 }
283 }
284
285 Ok(())
286 }
287
288 fn enforce_size_limit(&mut self) -> Result<()> {
290 let current_size = self.get_cache_size_mb()?;
291
292 if current_size > self.max_size_mb {
293 let mut entries: Vec<_> = self
295 .index
296 .iter()
297 .map(|(hash, entry)| (*hash, entry.last_accessed, entry.access_count))
298 .collect();
299
300 entries.sort_by(|a, b| a.1.cmp(&b.1).then_with(|| a.2.cmp(&b.2)));
302
303 let target_size = (self.max_size_mb as f64 * 0.8) as usize; for (hash, _, _) in entries {
307 if self.get_cache_size_mb()? <= target_size {
308 break;
309 }
310
311 self.remove_entry(hash)?;
312 self.evictions += 1;
313 }
314 }
315
316 Ok(())
317 }
318
319 fn get_cache_size_mb(&self) -> Result<usize> {
321 let mut total_bytes = 0u64;
322
323 for entry in fs::read_dir(&self.cache_dir)? {
324 let entry = entry?;
325 total_bytes += entry.metadata()?.len();
326 }
327
328 Ok((total_bytes / 1_000_000) as usize)
329 }
330
331 fn remove_entry(&mut self, hash: u64) -> Result<()> {
333 let json_file = self.cache_dir.join(format!("{:016x}.json", hash));
335 let bin_file = self.cache_dir.join(format!("{:016x}.bin", hash));
336
337 if json_file.exists() {
338 fs::remove_file(json_file)?;
339 }
340 if bin_file.exists() {
341 fs::remove_file(bin_file)?;
342 }
343
344 self.index.remove(&hash);
345 Ok(())
346 }
347
348 pub fn clear(&mut self) -> Result<()> {
350 for entry in fs::read_dir(&self.cache_dir)? {
351 let entry = entry?;
352 fs::remove_file(entry.path())?;
353 }
354
355 self.index.clear();
356 Ok(())
357 }
358
359 pub fn stats(&self) -> CacheStats {
361 let hit_rate = if self.hits + self.misses > 0 {
362 (self.hits as f64 / (self.hits + self.misses) as f64) * 100.0
363 } else {
364 0.0
365 };
366
367 CacheStats {
368 entries: self.index.len(),
369 size_mb: self.get_cache_size_mb().unwrap_or(0),
370 max_size_mb: self.max_size_mb,
371 enabled: self.enabled,
372 cache_dir: self.cache_dir.clone(),
373 hits: self.hits,
374 misses: self.misses,
375 evictions: self.evictions,
376 hit_rate,
377 compression_enabled: self.use_compression,
378 }
379 }
380
381 #[allow(dead_code)]
386 pub fn warm_up(&mut self, expressions: &[(String, CompilerContext)]) -> Result<usize> {
387 use crate::parser::parse_expression;
388 use tensorlogic_compiler::compile_to_einsum_with_context;
389
390 let mut warmed = 0;
391
392 for (expr_str, context) in expressions {
393 if let Ok(expr) = parse_expression(expr_str) {
395 let mut ctx_clone = context.clone();
396 if let Ok(graph) = compile_to_einsum_with_context(&expr, &mut ctx_clone) {
397 if self.put(&expr, context, &graph).is_ok() {
399 warmed += 1;
400 }
401 }
402 }
403 }
404
405 Ok(warmed)
406 }
407
408 #[allow(dead_code)]
414 pub fn warm_up_from_file(&mut self, file_path: &std::path::Path) -> Result<CacheWarmupResult> {
415 use std::fs;
416 use tensorlogic_compiler::CompilationConfig;
417
418 let content = fs::read_to_string(file_path)
419 .with_context(|| format!("Failed to read warmup file: {}", file_path.display()))?;
420
421 let mut expressions = Vec::new();
422 let mut errors = Vec::new();
423
424 for line in content.lines() {
425 let line = line.trim();
426
427 if line.is_empty() || line.starts_with('#') {
429 continue;
430 }
431
432 let parts: Vec<&str> = line.split('|').map(|s| s.trim()).collect();
434
435 if parts.is_empty() {
436 continue;
437 }
438
439 let expr_str = parts[0].to_string();
440
441 let config = if parts.len() > 1 {
443 match parts[1] {
444 "hard_boolean" => CompilationConfig::hard_boolean(),
445 "fuzzy_godel" => CompilationConfig::fuzzy_godel(),
446 "fuzzy_product" => CompilationConfig::fuzzy_product(),
447 "fuzzy_lukasiewicz" => CompilationConfig::fuzzy_lukasiewicz(),
448 "probabilistic" => CompilationConfig::probabilistic(),
449 _ => CompilationConfig::soft_differentiable(),
450 }
451 } else {
452 CompilationConfig::soft_differentiable()
453 };
454
455 let mut context = CompilerContext::with_config(config);
456
457 if parts.len() > 2 {
459 for domain_spec in parts[2].split(',') {
460 let domain_parts: Vec<&str> = domain_spec.split(':').collect();
461 if domain_parts.len() == 2 {
462 if let Ok(size) = domain_parts[1].parse::<usize>() {
463 context.add_domain(domain_parts[0], size);
464 }
465 }
466 }
467 }
468
469 expressions.push((expr_str, context));
470 }
471
472 match self.warm_up(&expressions) {
474 Ok(warmed) => Ok(CacheWarmupResult {
475 total: expressions.len(),
476 warmed,
477 errors,
478 }),
479 Err(e) => {
480 errors.push(format!("Warmup error: {}", e));
481 Ok(CacheWarmupResult {
482 total: expressions.len(),
483 warmed: 0,
484 errors,
485 })
486 }
487 }
488 }
489}
490
491#[derive(Debug, Clone)]
493#[allow(dead_code)]
494pub struct CacheWarmupResult {
495 pub total: usize,
497 pub warmed: usize,
499 pub errors: Vec<String>,
501}
502
503impl CacheWarmupResult {
504 #[allow(dead_code)]
506 pub fn print(&self) {
507 use crate::output::{print_error, print_header, print_info, print_success};
508
509 print_header("Cache Warmup Results");
510 print_info(&format!(" Total expressions: {}", self.total));
511 print_success(&format!(" Successfully warmed: {}", self.warmed));
512
513 if !self.errors.is_empty() {
514 print_error(&format!(" Errors: {}", self.errors.len()));
515 for error in &self.errors {
516 print_info(&format!(" - {}", error));
517 }
518 }
519 }
520}
521
522#[derive(Debug, Clone, Serialize, Deserialize)]
524pub struct CacheStats {
525 pub entries: usize,
527 pub size_mb: usize,
529 pub max_size_mb: usize,
531 pub enabled: bool,
533 #[serde(
535 serialize_with = "serialize_path",
536 deserialize_with = "deserialize_path"
537 )]
538 pub cache_dir: PathBuf,
539 pub hits: u64,
541 pub misses: u64,
543 pub evictions: u64,
545 pub hit_rate: f64,
547 pub compression_enabled: bool,
549}
550
551fn serialize_path<S>(path: &Path, serializer: S) -> Result<S::Ok, S::Error>
553where
554 S: serde::Serializer,
555{
556 serializer.serialize_str(&path.to_string_lossy())
557}
558
559fn deserialize_path<'de, D>(deserializer: D) -> Result<PathBuf, D::Error>
560where
561 D: serde::Deserializer<'de>,
562{
563 let s = String::deserialize(deserializer)?;
564 Ok(PathBuf::from(s))
565}
566
567impl CacheStats {
568 pub fn print(&self) {
570 use crate::output::{print_header, print_info, print_success};
571
572 print_header("Cache Statistics");
573 print_info(&format!(" Entries: {}", self.entries));
574 print_info(&format!(
575 " Size: {} MB / {} MB ({:.1}% full)",
576 self.size_mb,
577 self.max_size_mb,
578 (self.size_mb as f64 / self.max_size_mb as f64) * 100.0
579 ));
580 print_info(&format!(
581 " Enabled: {}",
582 if self.enabled { "yes" } else { "no" }
583 ));
584 print_info(&format!(
585 " Compression: {}",
586 if self.compression_enabled {
587 "enabled"
588 } else {
589 "disabled"
590 }
591 ));
592
593 print_header("Performance Metrics");
595 print_info(&format!(" Cache Hits: {}", self.hits));
596 print_info(&format!(" Cache Misses: {}", self.misses));
597 print_info(&format!(" Evictions: {}", self.evictions));
598
599 if self.hits + self.misses > 0 {
600 if self.hit_rate >= 80.0 {
601 print_success(&format!(" Hit Rate: {:.2}% (excellent)", self.hit_rate));
602 } else if self.hit_rate >= 50.0 {
603 print_info(&format!(" Hit Rate: {:.2}% (good)", self.hit_rate));
604 } else {
605 print_info(&format!(" Hit Rate: {:.2}% (poor)", self.hit_rate));
606 }
607 } else {
608 print_info(" Hit Rate: N/A (no requests yet)");
609 }
610
611 print_info(&format!(" Location: {}", self.cache_dir.display()));
612 }
613
614 #[allow(dead_code)]
616 pub fn to_json(&self) -> Result<String> {
617 serde_json::to_string_pretty(self).context("Failed to serialize cache statistics to JSON")
618 }
619
620 #[allow(dead_code)]
622 pub fn export_to_file(&self, path: &Path) -> Result<()> {
623 let json = self.to_json()?;
624 fs::write(path, json).context("Failed to write cache statistics to file")?;
625 Ok(())
626 }
627
628 #[allow(dead_code)]
630 pub fn get_analytics(&self) -> CacheAnalytics {
631 let total_requests = self.hits + self.misses;
632 let utilization_pct = if self.max_size_mb > 0 {
633 (self.size_mb as f64 / self.max_size_mb as f64) * 100.0
634 } else {
635 0.0
636 };
637
638 let avg_entry_size_kb = if self.entries > 0 {
639 (self.size_mb as f64 * 1024.0) / self.entries as f64
640 } else {
641 0.0
642 };
643
644 let eviction_rate = if total_requests > 0 {
645 (self.evictions as f64 / total_requests as f64) * 100.0
646 } else {
647 0.0
648 };
649
650 let efficiency_score =
651 calculate_efficiency_score(self.hit_rate, utilization_pct, eviction_rate);
652
653 CacheAnalytics {
654 total_requests,
655 utilization_pct,
656 avg_entry_size_kb,
657 eviction_rate,
658 efficiency_score,
659 recommendation: generate_recommendation(
660 self.hit_rate,
661 utilization_pct,
662 eviction_rate,
663 self.entries,
664 ),
665 }
666 }
667}
668
669#[derive(Debug, Clone, Serialize, Deserialize)]
671pub struct CacheAnalytics {
672 pub total_requests: u64,
674 pub utilization_pct: f64,
676 pub avg_entry_size_kb: f64,
678 pub eviction_rate: f64,
680 pub efficiency_score: f64,
682 pub recommendation: String,
684}
685
686impl CacheAnalytics {
687 #[allow(dead_code)]
689 pub fn print(&self) {
690 use crate::output::{print_header, print_info, print_success, print_warning};
691
692 print_header("Cache Analytics");
693 print_info(&format!(" Total Requests: {}", self.total_requests));
694 print_info(&format!(" Utilization: {:.1}%", self.utilization_pct));
695 print_info(&format!(
696 " Avg Entry Size: {:.2} KB",
697 self.avg_entry_size_kb
698 ));
699 print_info(&format!(" Eviction Rate: {:.2}%", self.eviction_rate));
700
701 if self.efficiency_score >= 80.0 {
702 print_success(&format!(
703 " Efficiency Score: {:.1}/100 (excellent)",
704 self.efficiency_score
705 ));
706 } else if self.efficiency_score >= 60.0 {
707 print_info(&format!(
708 " Efficiency Score: {:.1}/100 (good)",
709 self.efficiency_score
710 ));
711 } else {
712 print_warning(&format!(
713 " Efficiency Score: {:.1}/100 (needs improvement)",
714 self.efficiency_score
715 ));
716 }
717
718 if !self.recommendation.is_empty() {
719 print_header("Recommendation");
720 print_info(&format!(" {}", self.recommendation));
721 }
722 }
723
724 #[allow(dead_code)]
726 pub fn to_json(&self) -> Result<String> {
727 serde_json::to_string_pretty(self).context("Failed to serialize cache analytics to JSON")
728 }
729}
730
731fn calculate_efficiency_score(hit_rate: f64, utilization: f64, eviction_rate: f64) -> f64 {
733 let hit_score = hit_rate * 0.6;
739
740 let utilization_score = if (60.0..=80.0).contains(&utilization) {
741 100.0 * 0.2
742 } else if utilization < 60.0 {
743 (utilization / 60.0) * 100.0 * 0.2
744 } else {
745 ((100.0 - utilization) / 20.0) * 100.0 * 0.2
746 };
747
748 let eviction_score = if eviction_rate < 1.0 {
749 100.0 * 0.2
750 } else if eviction_rate < 5.0 {
751 ((5.0 - eviction_rate) / 4.0) * 100.0 * 0.2
752 } else {
753 0.0
754 };
755
756 (hit_score + utilization_score + eviction_score).min(100.0)
757}
758
759fn generate_recommendation(
761 hit_rate: f64,
762 utilization: f64,
763 eviction_rate: f64,
764 entries: usize,
765) -> String {
766 if entries == 0 {
767 return "Cache is empty. Start compiling expressions to populate the cache.".to_string();
768 }
769
770 if hit_rate < 50.0 {
771 return "Low hit rate detected. Consider using cache warmup with frequently used expressions.".to_string();
772 }
773
774 if eviction_rate > 10.0 {
775 return "High eviction rate detected. Consider increasing max cache size to reduce thrashing.".to_string();
776 }
777
778 if utilization > 90.0 {
779 return "Cache is nearly full. Consider increasing max cache size or clearing old entries."
780 .to_string();
781 }
782
783 if utilization < 30.0 && entries > 10 {
784 return "Low cache utilization. Cache size may be larger than needed.".to_string();
785 }
786
787 "Cache is performing well. No immediate optimization needed.".to_string()
788}
789
790#[cfg(test)]
791mod tests {
792 use super::*;
793 use tensorlogic_compiler::CompilationConfig;
794 use tensorlogic_ir::Term;
795
796 #[test]
797 fn test_cache_creation() {
798 let temp_dir = std::env::temp_dir().join("tensorlogic-test-cache");
799 let cache = CompilationCache::new(Some(temp_dir.clone()), 100);
800 assert!(cache.is_ok());
801
802 let _ = fs::remove_dir_all(temp_dir);
804 }
805
806 #[test]
807 fn test_hash_computation() {
808 let expr = TLExpr::Pred {
809 name: "test".to_string(),
810 args: vec![Term::Var("x".to_string())],
811 };
812
813 let ctx1 = CompilerContext::with_config(CompilationConfig::soft_differentiable());
814 let ctx2 = CompilerContext::with_config(CompilationConfig::hard_boolean());
815
816 let hash1 = CompilationCache::compute_hash(&expr, &ctx1);
817 let hash2 = CompilationCache::compute_hash(&expr, &ctx2);
818
819 assert_ne!(hash1, hash2);
821 }
822
823 #[test]
824 fn test_cache_put_get() {
825 let temp_dir = std::env::temp_dir().join("tensorlogic-test-cache-putget");
826 let mut cache = CompilationCache::new(Some(temp_dir.clone()), 100).unwrap();
827
828 let expr = TLExpr::Pred {
829 name: "test".to_string(),
830 args: vec![Term::Var("x".to_string())],
831 };
832
833 let mut ctx = CompilerContext::with_config(CompilationConfig::soft_differentiable());
834 ctx.add_domain("D", 10);
835
836 let graph = EinsumGraph::new();
838
839 cache.put(&expr, &ctx, &graph).unwrap();
841
842 let retrieved = cache.get(&expr, &ctx);
844 assert!(retrieved.is_some());
845
846 let _ = fs::remove_dir_all(temp_dir);
848 }
849
850 #[test]
851 fn test_cache_clear() {
852 let temp_dir = std::env::temp_dir().join("tensorlogic-test-cache-clear");
853 let mut cache = CompilationCache::new(Some(temp_dir.clone()), 100).unwrap();
854
855 let expr = TLExpr::Pred {
856 name: "test".to_string(),
857 args: vec![Term::Var("x".to_string())],
858 };
859
860 let ctx = CompilerContext::with_config(CompilationConfig::soft_differentiable());
861 let graph = EinsumGraph::new();
862
863 cache.put(&expr, &ctx, &graph).unwrap();
864 assert_eq!(cache.stats().entries, 1);
865
866 cache.clear().unwrap();
867 assert_eq!(cache.stats().entries, 0);
868
869 let _ = fs::remove_dir_all(temp_dir);
871 }
872
873 #[test]
874 fn test_cache_warmup() {
875 let temp_dir = std::env::temp_dir().join("tensorlogic-test-cache-warmup");
876 let mut cache = CompilationCache::new(Some(temp_dir.clone()), 100).unwrap();
877
878 let ctx = CompilerContext::with_config(CompilationConfig::soft_differentiable());
879
880 let expressions = vec![
881 ("pred(x, y)".to_string(), ctx.clone()),
882 ("AND(a, b)".to_string(), ctx.clone()),
883 ];
884
885 let warmed = cache.warm_up(&expressions).unwrap();
886
887 assert_eq!(warmed, 2);
888 assert_eq!(cache.stats().entries, 2);
889
890 let _ = fs::remove_dir_all(temp_dir);
892 }
893
894 #[test]
895 fn test_cache_warmup_from_file() {
896 use std::fs::File;
897 use std::io::Write;
898
899 let temp_dir = std::env::temp_dir().join("tensorlogic-test-cache-warmup-file");
900 let mut cache = CompilationCache::new(Some(temp_dir.clone()), 100).unwrap();
901
902 let warmup_file = temp_dir.join("warmup.txt");
904 let mut file = File::create(&warmup_file).unwrap();
905 writeln!(file, "# This is a comment").unwrap();
906 writeln!(file, "pred(x, y) | soft_differentiable | Person:100").unwrap();
907 writeln!(file, "AND(a, b)").unwrap();
908
909 let result = cache.warm_up_from_file(&warmup_file).unwrap();
910
911 assert_eq!(result.total, 2);
912 assert_eq!(result.warmed, 2);
913
914 let _ = fs::remove_dir_all(temp_dir);
916 }
917
918 #[test]
919 fn test_cache_analytics() {
920 let temp_dir = std::env::temp_dir().join("tensorlogic-test-cache-analytics");
921 let stats = CacheStats {
922 entries: 50,
923 size_mb: 100,
924 max_size_mb: 200,
925 enabled: true,
926 cache_dir: temp_dir.clone(),
927 hits: 800,
928 misses: 200,
929 evictions: 10,
930 hit_rate: 80.0,
931 compression_enabled: true,
932 };
933
934 let analytics = stats.get_analytics();
935
936 assert_eq!(analytics.total_requests, 1000);
937 assert_eq!(analytics.utilization_pct, 50.0);
938 assert!(analytics.efficiency_score >= 70.0); assert!(!analytics.recommendation.is_empty());
940
941 let _ = fs::remove_dir_all(temp_dir);
943 }
944
945 #[test]
946 fn test_cache_stats_json_export() {
947 let temp_dir = std::env::temp_dir().join("tensorlogic-test-cache-json");
948 let stats = CacheStats {
949 entries: 10,
950 size_mb: 50,
951 max_size_mb: 500,
952 enabled: true,
953 cache_dir: temp_dir.clone(),
954 hits: 100,
955 misses: 20,
956 evictions: 2,
957 hit_rate: 83.33,
958 compression_enabled: true,
959 };
960
961 let json = stats.to_json();
962 assert!(json.is_ok());
963
964 let json_str = json.unwrap();
965 assert!(json_str.contains("\"entries\""));
966 assert!(json_str.contains("\"hits\""));
967 assert!(json_str.contains("\"hit_rate\""));
968
969 let _ = fs::remove_dir_all(temp_dir);
971 }
972
973 #[test]
974 fn test_cache_analytics_json_export() {
975 let analytics = CacheAnalytics {
976 total_requests: 500,
977 utilization_pct: 65.0,
978 avg_entry_size_kb: 512.0,
979 eviction_rate: 2.5,
980 efficiency_score: 85.0,
981 recommendation: "Cache is performing well.".to_string(),
982 };
983
984 let json = analytics.to_json();
985 assert!(json.is_ok());
986
987 let json_str = json.unwrap();
988 assert!(json_str.contains("\"total_requests\""));
989 assert!(json_str.contains("\"efficiency_score\""));
990 assert!(json_str.contains("\"recommendation\""));
991 }
992
993 #[test]
994 fn test_efficiency_score_calculation() {
995 let score1 = calculate_efficiency_score(90.0, 70.0, 0.5);
997 assert!(score1 >= 80.0);
998
999 let score2 = calculate_efficiency_score(30.0, 70.0, 0.5);
1001 assert!(score2 < 60.0);
1002
1003 let score3 = calculate_efficiency_score(80.0, 70.0, 15.0);
1005 assert!(score3 < 80.0);
1006 }
1007
1008 #[test]
1009 fn test_recommendation_generation() {
1010 let rec1 = generate_recommendation(0.0, 0.0, 0.0, 0);
1012 assert!(rec1.contains("empty"));
1013
1014 let rec2 = generate_recommendation(30.0, 50.0, 1.0, 100);
1016 assert!(rec2.contains("hit rate"));
1017
1018 let rec3 = generate_recommendation(80.0, 70.0, 15.0, 100);
1020 assert!(rec3.contains("eviction"));
1021
1022 let rec4 = generate_recommendation(80.0, 95.0, 1.0, 100);
1024 assert!(rec4.contains("nearly full") || rec4.contains("full"));
1025
1026 let rec5 = generate_recommendation(85.0, 65.0, 1.0, 100);
1028 assert!(rec5.contains("performing well"));
1029 }
1030}