use anyhow::{Context, Result, anyhow};
use ignore::WalkBuilder;
use macroforge_ts::host::MacroExpander;
use sha2::{Digest, Sha256};
use std::{
collections::HashMap,
fs,
path::{Path, PathBuf},
};
use crate::wrappers::{TYPE_REGISTRY_CACHE_PATH, ensure_type_registry_cache};
#[derive(Debug, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
pub(crate) struct CacheManifest {
pub(crate) version: String,
pub(crate) config_hash: String,
#[serde(default)]
pub(crate) external_macro_hash: String,
pub(crate) entries: HashMap<String, CacheEntry>,
}
#[derive(Debug, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
pub(crate) struct CacheEntry {
pub(crate) source_hash: String,
pub(crate) has_macros: bool,
#[serde(default)]
pub(crate) normalized_hash: String,
}
pub(crate) fn content_hash(content: &[u8]) -> String {
let mut hasher = Sha256::new();
hasher.update(content);
let result = hasher.finalize();
result.iter().fold(String::with_capacity(64), |mut s, b| {
use std::fmt::Write;
let _ = write!(s, "{b:02x}");
s
})
}
pub(crate) fn normalized_content_hash(content: &str) -> String {
let mut normalized = String::with_capacity(content.len());
let mut prev_blank = false;
for line in content.lines() {
let trimmed = line.trim_end();
if trimmed.is_empty() {
if !prev_blank && !normalized.is_empty() {
normalized.push('\n');
}
prev_blank = true;
} else {
if !normalized.is_empty() {
normalized.push('\n');
}
normalized.push_str(trimmed);
prev_blank = false;
}
}
let normalized = normalized.trim_end_matches('\n');
content_hash(normalized.as_bytes())
}
pub(crate) const CONFIG_FILE_NAMES: &[&str] = &[
"macroforge.config.ts",
"macroforge.config.mts",
"macroforge.config.js",
"macroforge.config.mjs",
"macroforge.config.cjs",
];
pub(crate) fn compute_config_hash(root: &Path) -> String {
for name in CONFIG_FILE_NAMES {
let path = root.join(name);
if let Ok(content) = fs::read(&path) {
return content_hash(&content);
}
}
"none".to_string()
}
pub(crate) fn compute_external_macro_hash(root: &Path) -> String {
let node_modules = root.join("node_modules");
if !node_modules.exists() {
return "none".to_string();
}
let mut parts: Vec<String> = Vec::new();
let mut check_package = |pkg_dir: &Path| {
let index_js = pkg_dir.join("index.js");
let is_macro_pkg = fs::read_to_string(&index_js)
.map(|content| content.contains("__macroforgeRun"))
.unwrap_or(false);
if !is_macro_pkg {
return;
}
let extensions = ["node", "wasm"];
if let Ok(entries) = fs::read_dir(pkg_dir) {
for entry in entries.flatten() {
let path = entry.path();
if !path.is_file() {
continue;
}
let is_binary = path
.extension()
.and_then(|e| e.to_str())
.is_some_and(|ext| extensions.contains(&ext));
let is_entry = path.file_name().is_some_and(|n| n == "index.js");
if !is_binary && !is_entry {
continue;
}
if let Ok(meta) = fs::metadata(&path) {
use std::fmt::Write;
let mut buf = String::new();
let _ = write!(
buf,
"{}:{}:{}",
path.display(),
meta.len(),
meta.modified()
.map(|t| t
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs())
.unwrap_or(0)
);
parts.push(buf);
}
}
}
};
if let Ok(entries) = fs::read_dir(&node_modules) {
for entry in entries.flatten() {
let path = entry.path();
if path.is_dir() {
let name = entry.file_name();
let name_str = name.to_string_lossy();
if name_str.starts_with('@') {
if let Ok(scoped) = fs::read_dir(&path) {
for sub in scoped.flatten() {
if sub.path().is_dir() {
check_package(&sub.path());
}
}
}
} else if !name_str.starts_with('.') {
check_package(&path);
}
}
}
}
if parts.is_empty() {
return "none".to_string();
}
parts.sort();
let mut hasher = Sha256::new();
for part in &parts {
hasher.update(part.as_bytes());
}
let result = hasher.finalize();
result.iter().fold(String::with_capacity(64), |mut s, b| {
use std::fmt::Write;
let _ = write!(s, "{b:02x}");
s
})
}
impl CacheManifest {
pub(crate) fn new(version: String, config_hash: String, external_macro_hash: String) -> Self {
Self {
version,
config_hash,
external_macro_hash,
entries: HashMap::new(),
}
}
pub(crate) fn load(cache_dir: &Path) -> Option<Self> {
let manifest_path = cache_dir.join("manifest.json");
let content = fs::read_to_string(manifest_path).ok()?;
serde_json::from_str(&content).ok()
}
pub(crate) fn save(&self, cache_dir: &Path) -> Result<()> {
fs::create_dir_all(cache_dir)?;
let manifest_path = cache_dir.join("manifest.json");
let json = serde_json::to_string_pretty(self)?;
let tmp_path = cache_dir.join(".manifest.json.tmp");
fs::write(&tmp_path, &json)?;
fs::rename(&tmp_path, &manifest_path)?;
Ok(())
}
}
pub(crate) fn write_cache_file(
cache_dir: &Path,
rel_path: &str,
expanded_code: &str,
) -> Result<()> {
let cache_path = cache_dir.join(format!("{rel_path}.cache"));
if let Some(parent) = cache_path.parent() {
fs::create_dir_all(parent)?;
}
fs::write(&cache_path, expanded_code)?;
Ok(())
}
pub(crate) fn is_watchable_ts_file(path: &Path, root: &Path) -> bool {
let ext = path.extension().and_then(|e| e.to_str());
if !matches!(ext, Some("ts" | "tsx")) {
return false;
}
let name = path.file_name().unwrap_or_default().to_string_lossy();
if name.ends_with(".d.ts") || name.contains(".expanded.") {
return false;
}
let rel = path.strip_prefix(root).unwrap_or(path).to_string_lossy();
if rel.contains("node_modules") || rel.contains(".macroforge") {
return false;
}
true
}
pub(crate) fn collect_watch_files(root: &Path) -> Vec<PathBuf> {
let mut files = Vec::new();
let walker = WalkBuilder::new(root)
.hidden(false)
.git_ignore(true)
.git_global(false)
.git_exclude(false)
.build();
for entry in walker.flatten() {
let path = entry.path();
if path.is_file() && is_watchable_ts_file(path, root) {
files.push(path.to_path_buf());
}
}
files
}
pub(crate) fn has_macro_annotations(source: &str) -> bool {
if !source.contains("@derive") {
return false;
}
let mut in_code_block = false;
for line in source.lines() {
let trimmed = line
.trim()
.trim_start_matches('/')
.trim_start_matches('*')
.trim_end_matches('/')
.trim_end_matches('*')
.trim();
if trimmed.starts_with("```") {
in_code_block = !in_code_block;
continue;
}
if in_code_block {
continue;
}
if trimmed.starts_with("@derive(") {
return true;
}
}
false
}
pub(crate) fn expand_for_cache(path: &Path, source: &str) -> Result<Option<String>> {
if !has_macro_annotations(source) {
return Ok(None);
}
use macroforge_ts::host::MacroforgeConfigLoader;
if let Ok(Some(config)) = MacroforgeConfigLoader::find_from_path(path) {
macroforge_ts::host::set_foreign_types(config.foreign_types.clone());
}
let mut expander = MacroExpander::new().context("failed to initialize macro expander")?;
let registry_path = TYPE_REGISTRY_CACHE_PATH.lock().unwrap().clone();
if let Some(ref rp) = registry_path
&& let Ok(json) = fs::read_to_string(rp)
&& let Ok(registry) = serde_json::from_str::<
macroforge_ts::ts_syn::abi::ir::type_registry::TypeRegistry,
>(&json)
{
expander.set_type_registry(Some(registry));
}
let expansion = expander
.expand_source(source, &path.display().to_string())
.map_err(|err| anyhow!(format!("{err:?}")))?;
macroforge_ts::host::clear_registry();
macroforge_ts::host::clear_foreign_types();
if !expansion.changed {
return Ok(None);
}
Ok(Some(expansion.code))
}
pub(crate) fn warm_cache(
label: &str,
root: &Path,
cache_dir: &Path,
manifest: &mut CacheManifest,
) -> Result<()> {
use rayon::prelude::*;
eprintln!("[macroforge {label}] Warming cache for {}", root.display());
ensure_type_registry_cache();
let start = std::time::Instant::now();
let files = collect_watch_files(root);
let mut expanded_count = 0u32;
let mut files_to_expand: Vec<(PathBuf, String, String, String, String)> = Vec::new();
for file_path in &files {
let rel_path = file_path
.strip_prefix(root)
.unwrap_or(file_path)
.to_string_lossy()
.to_string();
let source = match fs::read_to_string(file_path) {
Ok(s) => s,
Err(_) => continue,
};
let source_hash = content_hash(source.as_bytes());
if let Some(entry) = manifest.entries.get(&rel_path)
&& entry.source_hash == source_hash
{
if entry.normalized_hash.is_empty() {
let norm_hash = normalized_content_hash(&source);
manifest.entries.insert(
rel_path,
CacheEntry {
source_hash,
has_macros: entry.has_macros,
normalized_hash: norm_hash,
},
);
}
continue;
}
let norm_hash = normalized_content_hash(&source);
files_to_expand.push((file_path.clone(), rel_path, source, source_hash, norm_hash));
}
let pool = rayon::ThreadPoolBuilder::new().build()?;
let results: Vec<_> = pool.install(|| {
files_to_expand
.par_iter()
.map(|(file_path, rel_path, source, source_hash, norm_hash)| {
let result = expand_for_cache(file_path, source);
(
rel_path.clone(),
source_hash.clone(),
norm_hash.clone(),
result,
)
})
.collect()
});
for (rel_path, source_hash, norm_hash, result) in results {
match result {
Ok(Some(expanded)) => {
if let Err(e) = write_cache_file(cache_dir, &rel_path, &expanded) {
eprintln!(" [!] {} — write failed: {}", rel_path, e);
continue;
}
manifest.entries.insert(
rel_path.clone(),
CacheEntry {
source_hash,
has_macros: true,
normalized_hash: norm_hash,
},
);
expanded_count += 1;
eprintln!(" [+] {}", rel_path);
}
Ok(None) => {
manifest.entries.insert(
rel_path,
CacheEntry {
source_hash,
has_macros: false,
normalized_hash: norm_hash,
},
);
}
Err(e) => {
eprintln!(" [!] {} — {}", rel_path, e);
}
}
}
manifest.save(cache_dir)?;
let elapsed = start.elapsed();
eprintln!(
"[macroforge {label}] Cache warm: {} files expanded in {:.1}s ({} total files)",
expanded_count,
elapsed.as_secs_f64(),
files.len()
);
Ok(())
}
pub(crate) fn init_cache(
root: Option<PathBuf>,
label: &str,
) -> Result<(PathBuf, PathBuf, CacheManifest)> {
let root = root
.unwrap_or_else(|| PathBuf::from("."))
.canonicalize()
.unwrap_or_else(|_| PathBuf::from("."));
let cache_dir = root.join(".macroforge").join("cache");
let version = env!("CARGO_PKG_VERSION").to_string();
let config_hash = compute_config_hash(&root);
let external_macro_hash = compute_external_macro_hash(&root);
let manifest = CacheManifest::load(&cache_dir)
.filter(|m| {
if m.version != version {
eprintln!("[macroforge {label}] Cache invalidated: macroforge version changed");
return false;
}
if m.config_hash != config_hash {
eprintln!("[macroforge {label}] Cache invalidated: config changed");
return false;
}
if m.external_macro_hash != external_macro_hash {
eprintln!("[macroforge {label}] Cache invalidated: external macro binary changed");
return false;
}
true
})
.unwrap_or_else(|| {
eprintln!("[macroforge {label}] Creating fresh cache");
CacheManifest::new(
version.clone(),
config_hash.clone(),
external_macro_hash.clone(),
)
});
Ok((root, cache_dir, manifest))
}
pub fn run_cache(root: Option<PathBuf>) -> Result<()> {
let (root, cache_dir, mut manifest) = init_cache(root, "cache")?;
warm_cache("cache", &root, &cache_dir, &mut manifest)?;
Ok(())
}
pub fn run_refresh(root: Option<PathBuf>) -> Result<()> {
let root_resolved = root
.clone()
.unwrap_or_else(|| PathBuf::from("."))
.canonicalize()
.unwrap_or_else(|_| PathBuf::from("."));
let cache_dir = root_resolved.join(".macroforge").join("cache");
if cache_dir.exists() {
eprintln!("[macroforge refresh] Deleting {}", cache_dir.display());
fs::remove_dir_all(&cache_dir).context("failed to delete .macroforge/cache")?;
} else {
eprintln!("[macroforge refresh] No existing cache found, building fresh");
}
let (root, cache_dir, mut manifest) = init_cache(root, "refresh")?;
warm_cache("refresh", &root, &cache_dir, &mut manifest)?;
Ok(())
}