1use crate::args::{CacheAction, Cli};
4use anyhow::{Context, Result};
5use sqry_core::cache::{CacheConfig, CacheManager, PruneOptions, PruneOutputMode, PruneReport};
6use std::collections::HashMap;
7use std::path::{Path, PathBuf};
8use std::time::Duration;
9
10pub fn run_cache(cli: &Cli, action: &CacheAction) -> Result<()> {
15 match action {
16 CacheAction::Stats { path } => {
17 let search_path = path.as_deref().unwrap_or(".");
18 show_cache_stats(cli, search_path)
19 }
20 CacheAction::Clear { path, confirm } => {
21 let search_path = path.as_deref().unwrap_or(".");
22 clear_cache(cli, search_path, *confirm);
23 Ok(())
24 }
25 CacheAction::Prune {
26 days,
27 size,
28 dry_run,
29 path,
30 } => prune_cache(cli, *days, size.as_deref(), *dry_run, path.as_deref()),
31 CacheAction::Expand {
32 refresh,
33 crate_name,
34 dry_run,
35 output,
36 } => run_expand_cache(
37 cli,
38 *refresh,
39 crate_name.as_deref(),
40 *dry_run,
41 output.as_deref(),
42 ),
43 }
44}
45
46fn show_cache_stats(cli: &Cli, _path: &str) -> Result<()> {
48 let config = CacheConfig::from_env();
50 let cache = CacheManager::new(config);
51 let stats = cache.stats();
52
53 if cli.json {
54 let json_stats = serde_json::json!({
56 "ast_cache": {
57 "hits": stats.hits,
58 "misses": stats.misses,
59 "evictions": stats.evictions,
60 "entry_count": stats.entry_count,
61 "total_bytes": stats.total_bytes,
62 "total_mb": bytes_to_mb_lossy(stats.total_bytes),
63 "hit_rate": stats.hit_rate(),
64 },
65 });
66 println!("{}", serde_json::to_string_pretty(&json_stats)?);
67 } else {
68 println!("AST Cache Statistics");
70 println!("====================");
71 println!();
72 println!("Performance:");
73 println!(" Hit rate: {:.1}%", stats.hit_rate() * 100.0);
74 println!(" Hits: {}", stats.hits);
75 println!(" Misses: {}", stats.misses);
76 println!(" Evictions: {}", stats.evictions);
77 println!();
78 println!("Storage:");
79 println!(" Entries: {}", stats.entry_count);
80 println!(
81 " Memory: {:.2} MB",
82 bytes_to_mb_lossy(stats.total_bytes)
83 );
84 println!();
85
86 print_cache_effectiveness(stats.hits, stats.misses);
88
89 let cache_root =
91 std::env::var("SQRY_CACHE_ROOT").unwrap_or_else(|_| ".sqry-cache".to_string());
92 println!("Cache location: {cache_root}");
93
94 let disk_usage = get_disk_usage(&cache_root);
96 println!();
97 println!("Disk Usage:");
98 println!(" Files: {}", disk_usage.file_count);
99 println!(
100 " Total size: {:.2} MB",
101 bytes_to_mb_lossy(disk_usage.bytes)
102 );
103 }
104
105 Ok(())
106}
107
108fn print_cache_effectiveness(hits: usize, misses: usize) {
110 if hits + misses > 0 {
111 let total_accesses = hits + misses;
112 let avg_savings_ms = 50; let time_saved_ms = hits * avg_savings_ms;
114 let time_saved_sec = time_saved_ms / 1000;
115
116 println!("Estimated Impact:");
117 println!(" Total accesses: {total_accesses}");
118 println!(" Time saved: ~{time_saved_sec} seconds ({time_saved_ms} ms)");
119 println!();
120 }
121}
122
123struct DiskUsage {
124 file_count: usize,
125 bytes: u64,
126}
127
128fn get_disk_usage(cache_root: &str) -> DiskUsage {
129 use walkdir::WalkDir;
130
131 let mut file_count = 0;
132 let mut total_bytes = 0u64;
133
134 for entry in WalkDir::new(cache_root)
135 .into_iter()
136 .filter_map(std::result::Result::ok)
137 .filter(|e| e.file_type().is_file())
138 {
139 if let Ok(metadata) = entry.metadata() {
140 total_bytes += metadata.len();
141 file_count += 1;
142 }
143 }
144
145 DiskUsage {
146 file_count,
147 bytes: total_bytes,
148 }
149}
150
151fn u64_to_f64_lossy(value: u64) -> f64 {
152 let narrowed = u32::try_from(value).unwrap_or(u32::MAX);
153 f64::from(narrowed)
154}
155
156fn bytes_to_mb_lossy(bytes: u64) -> f64 {
157 u64_to_f64_lossy(bytes) / 1_048_576.0
158}
159
160fn clear_cache(_cli: &Cli, _path: &str, confirm: bool) {
162 if !confirm {
163 eprintln!("Error: Cache clear requires --confirm flag for safety");
164 eprintln!();
165 eprintln!("This will delete all cached AST data. Next queries will re-parse files.");
166 eprintln!();
167 eprintln!("To proceed, run:");
168 eprintln!(" sqry cache clear --confirm");
169 std::process::exit(1);
170 }
171
172 let config = CacheConfig::from_env();
174 let cache = CacheManager::new(config);
175
176 let stats_before = cache.stats();
178
179 cache.clear();
180
181 let stats_after = cache.stats();
183
184 println!("Cache cleared successfully");
185 println!();
186 println!("Removed:");
187 println!(" Entries: {}", stats_before.entry_count);
188 println!(
189 " Memory: {:.2} MB",
190 bytes_to_mb_lossy(stats_before.total_bytes)
191 );
192 println!();
193 println!("Current stats:");
194 println!(" Entries: {}", stats_after.entry_count);
195 println!(
196 " Memory: {:.2} MB",
197 bytes_to_mb_lossy(stats_after.total_bytes)
198 );
199}
200
201fn prune_cache(
203 cli: &Cli,
204 days: Option<u64>,
205 size_str: Option<&str>,
206 dry_run: bool,
207 path: Option<&str>,
208) -> Result<()> {
209 let options = build_prune_options(cli, days, size_str, dry_run, path)?;
210 let report = execute_cache_prune(&options)?;
211 write_prune_report(cli, dry_run, &report)?;
212
213 Ok(())
214}
215
216fn parse_byte_size(s: &str) -> Result<u64> {
218 let s = s.trim().to_uppercase();
219
220 let (num_str, unit) = if s.ends_with("GB") {
222 (&s[..s.len() - 2], 1024 * 1024 * 1024)
223 } else if s.ends_with("MB") {
224 (&s[..s.len() - 2], 1024 * 1024)
225 } else if s.ends_with("KB") {
226 (&s[..s.len() - 2], 1024)
227 } else if s.ends_with('B') {
228 (&s[..s.len() - 1], 1)
229 } else {
230 (&s[..], 1)
232 };
233
234 let num: u64 = num_str.trim().parse().map_err(|_| {
235 anyhow::anyhow!("Invalid size format {s}. Expected formats: 1GB, 500MB, 100KB")
236 })?;
237
238 Ok(num * unit)
239}
240
241fn build_prune_options(
242 cli: &Cli,
243 days: Option<u64>,
244 size_str: Option<&str>,
245 dry_run: bool,
246 path: Option<&str>,
247) -> Result<PruneOptions> {
248 let max_size = size_str.map(parse_byte_size).transpose()?;
250
251 let max_age = days.map(|d| Duration::from_secs(d * 24 * 3600));
253
254 let mut options = PruneOptions::new();
256
257 if let Some(age) = max_age {
258 options = options.with_max_age(age);
259 }
260
261 if let Some(size) = max_size {
262 options = options.with_max_size(size);
263 }
264
265 options = options.with_dry_run(dry_run);
266
267 let output_mode = if cli.json {
268 PruneOutputMode::Json
269 } else {
270 PruneOutputMode::Human
271 };
272 options = options.with_output_mode(output_mode);
273
274 if let Some(p) = path {
275 options = options.with_target_dir(PathBuf::from(p));
276 }
277
278 Ok(options)
279}
280
281fn execute_cache_prune(options: &PruneOptions) -> Result<PruneReport> {
282 let config = CacheConfig::from_env();
283 let cache = CacheManager::new(config);
284 cache.prune(options)
285}
286
287fn write_prune_report(cli: &Cli, dry_run: bool, report: &PruneReport) -> Result<()> {
288 if cli.json {
289 println!("{}", serde_json::to_string_pretty(report)?);
290 return Ok(());
291 }
292
293 let header = if dry_run {
294 "Cache Prune Preview (Dry Run)"
295 } else {
296 "Cache Prune Report"
297 };
298 println!("{header}");
299 println!("====================");
300 println!();
301
302 if report.entries_removed == 0 {
303 println!("No entries removed");
304 println!("Cache is within configured limits");
305 return Ok(());
306 }
307
308 println!("Entries:");
309 println!(" Considered: {}", report.entries_considered);
310 println!(" Removed: {}", report.entries_removed);
311 println!(" Remaining: {}", report.remaining_entries);
312 println!();
313 println!("Space:");
314 println!(
315 " Reclaimed: {:.2} MB",
316 bytes_to_mb_lossy(report.bytes_removed)
317 );
318 println!(
319 " Remaining: {:.2} MB",
320 bytes_to_mb_lossy(report.remaining_bytes)
321 );
322
323 if dry_run {
324 println!();
325 println!("Run without --dry-run to actually delete files");
326 }
327
328 Ok(())
329}
330
331const DEFAULT_EXPAND_CACHE_DIR: &str = ".sqry/expand-cache";
337
338const MAX_EXPANSION_SIZE_BYTES: usize = 10 * 1024 * 1024;
340
341fn is_valid_symbol_name(name: &str) -> bool {
343 name.chars()
344 .all(|c| c.is_alphanumeric() || matches!(c, '_' | ':' | '<' | '>' | ' ' | '&' | '\''))
345}
346
347#[derive(Debug)]
349struct CrateExpandResult {
350 crate_name: String,
351 symbols_found: usize,
352 generated_symbols: usize,
353 cached: bool,
354 skipped_reason: Option<String>,
355}
356
357#[derive(Debug, serde::Serialize, serde::Deserialize)]
359struct ExpandCacheEntry {
360 crate_name: String,
361 rust_version: String,
362 generated_at: String,
363 source_hash: String,
364 files: HashMap<String, ExpandCacheFileEntry>,
365}
366
367#[derive(Debug, serde::Serialize, serde::Deserialize)]
369struct ExpandCacheFileEntry {
370 original_symbols: Vec<String>,
371 expanded_symbols: Vec<String>,
372 generated_symbols: Vec<String>,
373 confidence: String,
374}
375
376fn run_expand_cache(
386 cli: &Cli,
387 refresh: bool,
388 crate_name: Option<&str>,
389 dry_run: bool,
390 output: Option<&Path>,
391) -> Result<()> {
392 use sqry_lang_rust::macro_expander::MacroExpander;
393
394 if !MacroExpander::is_cargo_expand_available() {
396 anyhow::bail!(
397 "cargo-expand is not installed.\n\
398 Install with: cargo install cargo-expand\n\
399 \n\
400 cargo-expand is required to generate macro expansion output.\n\
401 It runs rustc to expand all macros in a crate."
402 );
403 }
404
405 let workspace_root = discover_workspace_root()?;
407 let cache_dir = output
408 .map(Path::to_path_buf)
409 .unwrap_or_else(|| workspace_root.join(DEFAULT_EXPAND_CACHE_DIR));
410
411 let crates = discover_workspace_crates(&workspace_root)?;
413
414 let target_crates: Vec<_> = if let Some(name) = crate_name {
416 let found: Vec<_> = crates.iter().filter(|(n, _)| n == name).cloned().collect();
417 if found.is_empty() {
418 let available: Vec<_> = crates.iter().map(|(n, _)| n.as_str()).collect();
419 anyhow::bail!(
420 "Crate '{}' not found in workspace.\nAvailable crates: {}",
421 name,
422 available.join(", ")
423 );
424 }
425 found
426 } else {
427 crates
428 };
429
430 if dry_run {
432 print_dry_run_plan(cli, &target_crates, &cache_dir, refresh)?;
433 return Ok(());
434 }
435
436 std::fs::create_dir_all(&cache_dir).with_context(|| {
438 format!(
439 "Failed to create expand cache directory: {}",
440 cache_dir.display()
441 )
442 })?;
443
444 let mut results = Vec::new();
446 for (name, path) in &target_crates {
447 let result = expand_single_crate(name, path, &workspace_root, &cache_dir, refresh)?;
448 results.push(result);
449 }
450
451 print_expand_results(cli, &results, &cache_dir)?;
453
454 Ok(())
455}
456
457fn discover_workspace_root() -> Result<PathBuf> {
459 let output = std::process::Command::new("cargo")
460 .args(["metadata", "--format-version=1", "--no-deps"])
461 .output()
462 .context("Failed to run cargo metadata")?;
463
464 if !output.status.success() {
465 let stderr = String::from_utf8_lossy(&output.stderr);
466 anyhow::bail!("cargo metadata failed: {stderr}");
467 }
468
469 let metadata: serde_json::Value =
470 serde_json::from_slice(&output.stdout).context("Failed to parse cargo metadata output")?;
471
472 let root = metadata["workspace_root"]
473 .as_str()
474 .context("workspace_root not found in cargo metadata")?;
475
476 Ok(PathBuf::from(root))
477}
478
479fn discover_workspace_crates(workspace_root: &Path) -> Result<Vec<(String, PathBuf)>> {
481 let output = std::process::Command::new("cargo")
482 .args(["metadata", "--format-version=1", "--no-deps"])
483 .current_dir(workspace_root)
484 .output()
485 .context("Failed to run cargo metadata")?;
486
487 if !output.status.success() {
488 let stderr = String::from_utf8_lossy(&output.stderr);
489 anyhow::bail!("cargo metadata failed: {stderr}");
490 }
491
492 let metadata: serde_json::Value =
493 serde_json::from_slice(&output.stdout).context("Failed to parse cargo metadata")?;
494
495 let packages = metadata["packages"]
496 .as_array()
497 .context("No packages in workspace")?;
498
499 let mut crates = Vec::new();
500 for pkg in packages {
501 let name = pkg["name"].as_str().unwrap_or("<unknown>").to_string();
502 let manifest_path = pkg["manifest_path"]
503 .as_str()
504 .map(PathBuf::from)
505 .unwrap_or_default();
506 let crate_dir = manifest_path
508 .parent()
509 .unwrap_or(workspace_root)
510 .to_path_buf();
511 crates.push((name, crate_dir));
512 }
513
514 crates.sort_by(|a, b| a.0.cmp(&b.0));
515 Ok(crates)
516}
517
518fn compute_source_hash(crate_dir: &Path) -> Result<String> {
520 use sha2::{Digest, Sha256};
521 use walkdir::WalkDir;
522
523 let mut hasher = Sha256::new();
524 let mut file_count = 0u64;
525
526 let mut paths: Vec<PathBuf> = WalkDir::new(crate_dir)
527 .into_iter()
528 .filter_map(std::result::Result::ok)
529 .filter(|e| e.file_type().is_file() && e.path().extension().is_some_and(|ext| ext == "rs"))
530 .map(|e| e.into_path())
531 .collect();
532
533 paths.sort();
535
536 for path in &paths {
537 let content =
538 std::fs::read(path).with_context(|| format!("Failed to read {}", path.display()))?;
539 hasher.update(&content);
540 file_count += 1;
541 }
542
543 hasher.update(file_count.to_le_bytes());
545
546 Ok(format!("{:x}", hasher.finalize()))
547}
548
549fn is_cache_fresh(cache_path: &Path, current_hash: &str) -> bool {
551 let Ok(content) = std::fs::read_to_string(cache_path) else {
552 return false;
553 };
554 let Ok(entry) = serde_json::from_str::<ExpandCacheEntry>(&content) else {
555 return false;
556 };
557 entry.source_hash == current_hash
558}
559
560fn expand_single_crate(
562 crate_name: &str,
563 crate_dir: &Path,
564 workspace_root: &Path,
565 cache_dir: &Path,
566 refresh: bool,
567) -> Result<CrateExpandResult> {
568 let source_hash = compute_source_hash(crate_dir)
570 .with_context(|| format!("Failed to compute source hash for {crate_name}"))?;
571
572 let cache_file = cache_dir.join(format!("{crate_name}.json"));
574 if !refresh && is_cache_fresh(&cache_file, &source_hash) {
575 return Ok(CrateExpandResult {
576 crate_name: crate_name.to_string(),
577 symbols_found: 0,
578 generated_symbols: 0,
579 cached: true,
580 skipped_reason: Some("cache is fresh".to_string()),
581 });
582 }
583
584 let expand_output = run_cargo_expand(crate_name, crate_dir)?;
586
587 if expand_output.len() > MAX_EXPANSION_SIZE_BYTES {
589 return Ok(CrateExpandResult {
590 crate_name: crate_name.to_string(),
591 symbols_found: 0,
592 generated_symbols: 0,
593 cached: false,
594 skipped_reason: Some(format!(
595 "expansion output too large ({} bytes, limit {})",
596 expand_output.len(),
597 MAX_EXPANSION_SIZE_BYTES
598 )),
599 });
600 }
601
602 let expanded_symbols = extract_rust_symbols_from_source(&expand_output);
604
605 let original_symbols = collect_original_symbols(crate_dir)?;
607
608 let generated: Vec<String> = expanded_symbols
610 .iter()
611 .filter(|s| !original_symbols.contains(s))
612 .filter(|s| is_valid_symbol_name(s))
613 .cloned()
614 .collect();
615
616 let generated_count = generated.len();
617 let total_expanded = expanded_symbols.len();
618
619 let relative_src = crate_dir
621 .strip_prefix(workspace_root)
622 .unwrap_or(crate_dir)
623 .join("src/lib.rs");
624
625 let entry = ExpandCacheEntry {
627 crate_name: crate_name.to_string(),
628 rust_version: get_rust_version(),
629 generated_at: chrono_now_utc(),
630 source_hash,
631 files: {
632 let mut map = HashMap::new();
633 map.insert(
634 relative_src.to_string_lossy().to_string(),
635 ExpandCacheFileEntry {
636 original_symbols: original_symbols.into_iter().collect(),
637 expanded_symbols: expanded_symbols.into_iter().collect(),
638 generated_symbols: generated,
639 confidence: "heuristic".to_string(),
640 },
641 );
642 map
643 },
644 };
645
646 let json =
648 serde_json::to_string_pretty(&entry).context("Failed to serialize expand cache entry")?;
649 std::fs::write(&cache_file, json)
650 .with_context(|| format!("Failed to write cache file: {}", cache_file.display()))?;
651
652 Ok(CrateExpandResult {
653 crate_name: crate_name.to_string(),
654 symbols_found: total_expanded,
655 generated_symbols: generated_count,
656 cached: false,
657 skipped_reason: None,
658 })
659}
660
661fn run_cargo_expand(crate_name: &str, crate_dir: &Path) -> Result<String> {
663 let output = std::process::Command::new("cargo")
664 .args(["expand", "--lib"])
665 .current_dir(crate_dir)
666 .output()
667 .with_context(|| format!("Failed to execute cargo expand for {crate_name}"))?;
668
669 if !output.status.success() {
670 let stderr = String::from_utf8_lossy(&output.stderr);
671 let output2 = std::process::Command::new("cargo")
673 .arg("expand")
674 .current_dir(crate_dir)
675 .output()
676 .with_context(|| format!("Failed to execute cargo expand for {crate_name}"))?;
677
678 if !output2.status.success() {
679 let stderr2 = String::from_utf8_lossy(&output2.stderr);
680 anyhow::bail!(
681 "cargo expand failed for '{crate_name}':\n --lib: {}\n default: {}",
682 stderr.lines().next().unwrap_or("unknown error"),
683 stderr2.lines().next().unwrap_or("unknown error")
684 );
685 }
686 return Ok(String::from_utf8_lossy(&output2.stdout).to_string());
687 }
688
689 Ok(String::from_utf8_lossy(&output.stdout).to_string())
690}
691
692fn extract_rust_symbols_from_source(source: &str) -> Vec<String> {
698 let mut symbols = Vec::new();
699
700 for line in source.lines() {
701 let trimmed = line.trim();
702
703 if trimmed.is_empty() || trimmed.starts_with("//") || trimmed.starts_with("/*") {
705 continue;
706 }
707
708 if let Some(name) = extract_decl_name(trimmed, "fn ") {
710 symbols.push(name);
711 } else if let Some(name) = extract_decl_name(trimmed, "struct ") {
712 symbols.push(name);
713 } else if let Some(name) = extract_decl_name(trimmed, "enum ") {
714 symbols.push(name);
715 } else if let Some(name) = extract_decl_name(trimmed, "trait ") {
716 symbols.push(name);
717 } else if let Some(name) = extract_decl_name(trimmed, "type ") {
718 symbols.push(name);
719 } else if let Some(name) = extract_decl_name(trimmed, "const ") {
720 symbols.push(name);
721 } else if let Some(name) = extract_decl_name(trimmed, "static ") {
722 symbols.push(name);
723 } else if let Some(name) = extract_decl_name(trimmed, "mod ") {
724 symbols.push(name);
725 }
726 }
727
728 symbols.sort();
729 symbols.dedup();
730 symbols
731}
732
733fn extract_decl_name(line: &str, keyword: &str) -> Option<String> {
735 let stripped = line
737 .strip_prefix("pub(crate) ")
738 .or_else(|| line.strip_prefix("pub(super) "))
739 .or_else(|| line.strip_prefix("pub(in "))
740 .or_else(|| line.strip_prefix("pub "))
741 .unwrap_or(line);
742
743 let stripped = stripped
745 .strip_prefix("async ")
746 .or_else(|| stripped.strip_prefix("unsafe "))
747 .unwrap_or(stripped);
748
749 let stripped = if keyword != "const " {
751 stripped.strip_prefix("const ").unwrap_or(stripped)
752 } else {
753 stripped
754 };
755
756 if !stripped.starts_with(keyword) {
757 return None;
758 }
759
760 let rest = &stripped[keyword.len()..];
761 let name: String = rest
762 .chars()
763 .take_while(|c| c.is_alphanumeric() || *c == '_')
764 .collect();
765
766 if name.is_empty() { None } else { Some(name) }
767}
768
769fn collect_original_symbols(crate_dir: &Path) -> Result<Vec<String>> {
771 use walkdir::WalkDir;
772
773 let mut all_symbols = Vec::new();
774
775 for entry in WalkDir::new(crate_dir)
776 .into_iter()
777 .filter_map(std::result::Result::ok)
778 .filter(|e| e.file_type().is_file() && e.path().extension().is_some_and(|ext| ext == "rs"))
779 {
780 let content = std::fs::read_to_string(entry.path())
781 .with_context(|| format!("Failed to read {}", entry.path().display()))?;
782 let symbols = extract_rust_symbols_from_source(&content);
783 all_symbols.extend(symbols);
784 }
785
786 all_symbols.sort();
787 all_symbols.dedup();
788 Ok(all_symbols)
789}
790
791fn get_rust_version() -> String {
793 std::process::Command::new("rustc")
794 .arg("--version")
795 .output()
796 .ok()
797 .and_then(|o| {
798 if o.status.success() {
799 String::from_utf8(o.stdout).ok()
800 } else {
801 None
802 }
803 })
804 .map(|v| v.trim().to_string())
805 .unwrap_or_else(|| "unknown".to_string())
806}
807
808fn chrono_now_utc() -> String {
810 use std::time::SystemTime;
812 let now = SystemTime::now()
813 .duration_since(SystemTime::UNIX_EPOCH)
814 .unwrap_or_default();
815 format!("{}Z", now.as_secs())
816}
817
818fn print_dry_run_plan(
820 cli: &Cli,
821 crates: &[(String, PathBuf)],
822 cache_dir: &Path,
823 refresh: bool,
824) -> Result<()> {
825 if cli.json {
826 let plan = serde_json::json!({
827 "action": "expand",
828 "dry_run": true,
829 "refresh": refresh,
830 "cache_dir": cache_dir.display().to_string(),
831 "crates": crates.iter().map(|(name, path)| {
832 let hash = compute_source_hash(path).unwrap_or_default();
833 let cache_file = cache_dir.join(format!("{name}.json"));
834 let fresh = is_cache_fresh(&cache_file, &hash);
835 serde_json::json!({
836 "name": name,
837 "path": path.display().to_string(),
838 "cache_fresh": fresh,
839 "would_expand": refresh || !fresh,
840 })
841 }).collect::<Vec<_>>(),
842 });
843 println!("{}", serde_json::to_string_pretty(&plan)?);
844 } else {
845 println!("Macro Expansion Plan (Dry Run)");
846 println!("==============================");
847 println!();
848 println!("Cache directory: {}", cache_dir.display());
849 println!(
850 "Refresh mode: {}",
851 if refresh { "force" } else { "incremental" }
852 );
853 println!();
854 println!("Crates ({}):", crates.len());
855
856 for (name, path) in crates {
857 let hash = compute_source_hash(path).unwrap_or_default();
858 let cache_file = cache_dir.join(format!("{name}.json"));
859 let fresh = is_cache_fresh(&cache_file, &hash);
860
861 let status = if fresh && !refresh {
862 "skip (cache fresh)"
863 } else if fresh && refresh {
864 "expand (--refresh)"
865 } else {
866 "expand (no cache)"
867 };
868
869 println!(" {name:30} {status}");
870 }
871
872 println!();
873 println!("Run without --dry-run to execute expansion.");
874 }
875
876 Ok(())
877}
878
879fn print_expand_results(cli: &Cli, results: &[CrateExpandResult], cache_dir: &Path) -> Result<()> {
881 if cli.json {
882 let json = serde_json::json!({
883 "cache_dir": cache_dir.display().to_string(),
884 "results": results.iter().map(|r| {
885 serde_json::json!({
886 "crate": r.crate_name,
887 "symbols_found": r.symbols_found,
888 "generated_symbols": r.generated_symbols,
889 "cached": r.cached,
890 "skipped_reason": r.skipped_reason,
891 })
892 }).collect::<Vec<_>>(),
893 });
894 println!("{}", serde_json::to_string_pretty(&json)?);
895 } else {
896 println!("Macro Expansion Results");
897 println!("=======================");
898 println!();
899 println!("Cache directory: {}", cache_dir.display());
900 println!();
901
902 let mut expanded = 0;
903 let mut skipped = 0;
904 let mut total_generated = 0;
905
906 for r in results {
907 if let Some(reason) = &r.skipped_reason {
908 println!(" {}: skipped ({reason})", r.crate_name);
909 skipped += 1;
910 } else {
911 println!(
912 " {}: {} symbols ({} generated)",
913 r.crate_name, r.symbols_found, r.generated_symbols
914 );
915 expanded += 1;
916 total_generated += r.generated_symbols;
917 }
918 }
919
920 println!();
921 println!("Summary:");
922 println!(" Expanded: {expanded}");
923 println!(" Skipped: {skipped}");
924 println!(" Total generated symbols: {total_generated}");
925 }
926
927 Ok(())
928}
929
930#[cfg(test)]
931mod tests {
932 use super::*;
933
934 #[test]
935 fn test_is_valid_symbol_name() {
936 assert!(is_valid_symbol_name("MyStruct"));
937 assert!(is_valid_symbol_name("my_crate::MyStruct"));
938 assert!(is_valid_symbol_name("my_crate::<MyStruct as Debug>::fmt"));
939 assert!(is_valid_symbol_name("some_fn"));
940 assert!(is_valid_symbol_name("CONSTANT_NAME"));
941 assert!(!is_valid_symbol_name("bad\x00name"));
943 assert!(!is_valid_symbol_name("bad\nname"));
944 assert!(!is_valid_symbol_name("$(evil)"));
946 assert!(!is_valid_symbol_name("`backtick`"));
947 assert!(!is_valid_symbol_name("semi;colon"));
948 assert!(!is_valid_symbol_name("pipe|char"));
949 }
950
951 #[test]
952 fn test_extract_decl_name_fn() {
953 assert_eq!(
954 extract_decl_name("fn main() {", "fn "),
955 Some("main".to_string())
956 );
957 assert_eq!(
958 extract_decl_name("pub fn foo() {", "fn "),
959 Some("foo".to_string())
960 );
961 assert_eq!(
962 extract_decl_name("pub(crate) fn bar() {", "fn "),
963 Some("bar".to_string())
964 );
965 assert_eq!(
966 extract_decl_name("async fn baz() {", "fn "),
967 Some("baz".to_string())
968 );
969 assert_eq!(
970 extract_decl_name("pub async fn qux() {", "fn "),
971 Some("qux".to_string())
972 );
973 }
974
975 #[test]
976 fn test_extract_decl_name_struct() {
977 assert_eq!(
978 extract_decl_name("struct Foo {", "struct "),
979 Some("Foo".to_string())
980 );
981 assert_eq!(
982 extract_decl_name("pub struct Bar;", "struct "),
983 Some("Bar".to_string())
984 );
985 }
986
987 #[test]
988 fn test_extract_decl_name_no_match() {
989 assert_eq!(extract_decl_name("let x = 5;", "fn "), None);
990 assert_eq!(extract_decl_name("// fn foo", "fn "), None);
991 }
992
993 #[test]
994 fn test_extract_rust_symbols_from_source() {
995 let source = r#"
996pub fn hello() {}
997struct MyStruct {
998 field: i32,
999}
1000enum Color { Red, Green, Blue }
1001const MAX: usize = 100;
1002mod inner {}
1003"#;
1004 let symbols = extract_rust_symbols_from_source(source);
1005 assert!(symbols.contains(&"hello".to_string()));
1006 assert!(symbols.contains(&"MyStruct".to_string()));
1007 assert!(symbols.contains(&"Color".to_string()));
1008 assert!(symbols.contains(&"MAX".to_string()));
1009 assert!(symbols.contains(&"inner".to_string()));
1010 }
1011
1012 #[test]
1013 fn test_expand_cache_entry_roundtrip() {
1014 let entry = ExpandCacheEntry {
1015 crate_name: "test_crate".to_string(),
1016 rust_version: "rustc 1.94.0".to_string(),
1017 generated_at: "1234567890Z".to_string(),
1018 source_hash: "abc123".to_string(),
1019 files: {
1020 let mut map = HashMap::new();
1021 map.insert(
1022 "src/lib.rs".to_string(),
1023 ExpandCacheFileEntry {
1024 original_symbols: vec!["Foo".to_string()],
1025 expanded_symbols: vec!["Foo".to_string(), "Foo_fmt".to_string()],
1026 generated_symbols: vec!["Foo_fmt".to_string()],
1027 confidence: "heuristic".to_string(),
1028 },
1029 );
1030 map
1031 },
1032 };
1033
1034 let json = serde_json::to_string_pretty(&entry).unwrap();
1035 let parsed: ExpandCacheEntry = serde_json::from_str(&json).unwrap();
1036 assert_eq!(parsed.crate_name, "test_crate");
1037 assert_eq!(parsed.source_hash, "abc123");
1038 assert_eq!(parsed.files.len(), 1);
1039
1040 let file_entry = parsed.files.get("src/lib.rs").unwrap();
1041 assert_eq!(file_entry.generated_symbols, vec!["Foo_fmt"]);
1042 }
1043
1044 #[test]
1045 fn test_is_cache_fresh_nonexistent() {
1046 assert!(!is_cache_fresh(
1047 Path::new("/nonexistent/cache.json"),
1048 "abc123"
1049 ));
1050 }
1051
1052 #[test]
1053 fn test_is_cache_fresh_matching_hash() {
1054 let dir = tempfile::tempdir().unwrap();
1055 let cache_path = dir.path().join("test.json");
1056 let entry = ExpandCacheEntry {
1057 crate_name: "test".to_string(),
1058 rust_version: "1.94.0".to_string(),
1059 generated_at: "0Z".to_string(),
1060 source_hash: "hash123".to_string(),
1061 files: HashMap::new(),
1062 };
1063 let json = serde_json::to_string(&entry).unwrap();
1064 std::fs::write(&cache_path, json).unwrap();
1065
1066 assert!(is_cache_fresh(&cache_path, "hash123"));
1067 assert!(!is_cache_fresh(&cache_path, "different_hash"));
1068 }
1069
1070 #[test]
1071 fn test_compute_source_hash_deterministic() {
1072 let dir = tempfile::tempdir().unwrap();
1073 let src_dir = dir.path().join("src");
1074 std::fs::create_dir_all(&src_dir).unwrap();
1075 std::fs::write(src_dir.join("lib.rs"), "fn main() {}").unwrap();
1076 std::fs::write(src_dir.join("helper.rs"), "fn helper() {}").unwrap();
1077
1078 let hash1 = compute_source_hash(dir.path()).unwrap();
1079 let hash2 = compute_source_hash(dir.path()).unwrap();
1080 assert_eq!(hash1, hash2, "Hashes should be deterministic");
1081 assert!(!hash1.is_empty());
1082 }
1083
1084 #[test]
1085 fn test_compute_source_hash_changes_on_modification() {
1086 let dir = tempfile::tempdir().unwrap();
1087 std::fs::write(dir.path().join("lib.rs"), "fn main() {}").unwrap();
1088
1089 let hash1 = compute_source_hash(dir.path()).unwrap();
1090
1091 std::fs::write(dir.path().join("lib.rs"), "fn main() { println!() }").unwrap();
1092 let hash2 = compute_source_hash(dir.path()).unwrap();
1093
1094 assert_ne!(hash1, hash2, "Hash should change when source changes");
1095 }
1096}