use std::{
fs,
path::{Path, PathBuf},
time::UNIX_EPOCH,
};
use log::{debug, info};
use rustc_hash::{FxHashMap, FxHashSet};
use serde::{Deserialize, Serialize};
pub const BUILD_CACHE_VERSION: u32 = 10;
pub const BUILD_CACHE_FILENAME: &str = "build_cache.bin";
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct AssetFileFingerprint {
pub hash: String,
pub mtime_ns: u64,
pub size: u64,
}
impl AssetFileFingerprint {
pub fn from_path(path: &Path) -> Option<Self> {
let hash = hash_file_content(path)?;
let (mtime_ns, size) = file_fingerprint(path)?;
Some(Self {
hash,
mtime_ns,
size,
})
}
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct PersistedAssetHash {
pub options_hash: u64,
pub asset_hash: String,
pub mtime_ns: u64,
pub size: u64,
}
#[derive(Serialize, Deserialize, Default)]
pub struct BuildCache {
pub version: u32,
pub binary_hash: String,
pub content_sources: FxHashMap<String, ContentSourceState>,
pub pages: FxHashMap<PageKey, PageCacheEntry>,
pub asset_file_hashes: FxHashMap<PathBuf, AssetFileFingerprint>,
pub bundled_scripts: Vec<SerializedAssetRef>,
pub bundled_styles: Vec<SerializedAssetRef>,
#[serde(default)]
pub options_hash: String,
#[serde(default)]
pub static_files: FxHashSet<String>,
#[serde(default)]
pub bundled_output_files: FxHashSet<String>,
#[serde(default)]
pub css_url_dependencies: FxHashMap<PathBuf, AssetFileFingerprint>,
#[serde(default)]
pub persisted_asset_hashes: FxHashMap<PathBuf, Vec<PersistedAssetHash>>,
}
#[derive(Serialize, Deserialize, Default, Clone)]
pub struct ContentSourceState {
pub files: FxHashMap<PathBuf, String>,
pub entry_ids: Vec<String>,
#[serde(skip)]
pub file_to_entry: FxHashMap<PathBuf, String>,
}
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, Hash)]
pub struct PageKey {
pub route: String,
pub params: Vec<(String, Option<String>)>,
pub variant: Option<String>,
}
impl PageKey {
pub fn new(
route: &str,
params: &FxHashMap<String, Option<String>>,
variant: Option<&str>,
) -> Self {
let mut sorted_params: Vec<(String, Option<String>)> =
params.iter().map(|(k, v)| (k.clone(), v.clone())).collect();
sorted_params.sort_by(|a, b| a.0.cmp(&b.0));
Self {
route: route.to_string(),
params: sorted_params,
variant: variant.map(|s| s.to_string()),
}
}
pub fn new_static(route: &str, variant: Option<&str>) -> Self {
Self {
route: route.to_string(),
params: Vec::new(),
variant: variant.map(|s| s.to_string()),
}
}
}
#[derive(Serialize, Deserialize, Clone)]
pub struct PageCacheEntry {
pub content_entries_read: Vec<(String, String)>,
pub content_sources_iterated: Vec<String>,
pub images: Vec<CachedImage>,
pub scripts: Vec<CachedScript>,
pub styles: Vec<CachedStyle>,
pub output_file: PathBuf,
}
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, Hash)]
pub struct CachedScript {
pub path: PathBuf,
pub hash: String,
pub included: bool,
}
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, Hash)]
pub struct CachedStyle {
pub path: PathBuf,
pub hash: String,
pub included: bool,
pub tailwind: bool,
}
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, Hash)]
pub struct CachedImage {
pub path: PathBuf,
pub hash: String,
pub filename: PathBuf,
}
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, Hash)]
pub struct SerializedAssetRef {
pub path: PathBuf,
pub hash: String,
}
impl BuildCache {
pub fn load(cache_dir: &Path) -> Option<Self> {
let path = cache_dir.join(BUILD_CACHE_FILENAME);
let bytes = fs::read(&path).ok()?;
let cache: Self = match bincode::deserialize(&bytes) {
Ok(c) => c,
Err(e) => {
debug!("Failed to deserialize build cache: {}", e);
return None;
}
};
if cache.version != BUILD_CACHE_VERSION {
debug!(
"Build cache version mismatch: found {}, expected {}",
cache.version, BUILD_CACHE_VERSION
);
return None;
}
Some(cache)
}
pub fn save(&self, cache_dir: &Path) -> std::io::Result<()> {
fs::create_dir_all(cache_dir)?;
let path = cache_dir.join(BUILD_CACHE_FILENAME);
let tmp_path = cache_dir.join(format!("{}.tmp", BUILD_CACHE_FILENAME));
let bytes = bincode::serialize(self).expect("BuildCache serialization should not fail");
fs::write(&tmp_path, bytes)?;
fs::rename(&tmp_path, &path)?;
Ok(())
}
pub fn compute_binary_hash() -> String {
let Ok(exe_path) = std::env::current_exe() else {
return String::new();
};
let Ok(metadata) = fs::metadata(&exe_path) else {
return String::new();
};
let modified = metadata
.modified()
.ok()
.and_then(|t| t.duration_since(UNIX_EPOCH).ok())
.map(|d| d.as_secs())
.unwrap_or(0);
let size = metadata.len();
format!("{:x}-{:x}", modified, size)
}
}
pub struct IncrementalState {
pub mode: IncrementalMode,
pub previous_cache: Option<BuildCache>,
pub dirty_pages: FxHashSet<PageKey>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum IncrementalMode {
Full,
Incremental,
}
impl IncrementalState {
pub fn full_build() -> Self {
Self {
mode: IncrementalMode::Full,
previous_cache: None,
dirty_pages: FxHashSet::default(),
}
}
pub fn is_full_build(&self) -> bool {
self.mode == IncrementalMode::Full
}
pub fn is_page_dirty(&self, key: &PageKey) -> bool {
match self.mode {
IncrementalMode::Full => true,
IncrementalMode::Incremental => self.dirty_pages.contains(key),
}
}
}
pub fn hash_bytes(content: &[u8]) -> String {
use rapidhash::fast::RapidHasher;
use std::hash::Hasher;
let mut hasher = RapidHasher::default();
hasher.write(content);
format!("{:016x}", hasher.finish())
}
pub fn hash_file_content(path: &Path) -> Option<String> {
let content = fs::read(path).ok()?;
Some(hash_bytes(&content))
}
pub fn compute_content_source_state(
entry_file_info: &[(String, Vec<PathBuf>)],
raw_content_by_entry: &FxHashMap<String, &str>,
) -> ContentSourceState {
let mut files = FxHashMap::default();
let mut entry_ids: Vec<String> = Vec::with_capacity(entry_file_info.len());
let mut file_to_entry = FxHashMap::default();
for (id, file_paths) in entry_file_info {
entry_ids.push(id.clone());
let in_memory_hash = if file_paths.len() == 1 {
raw_content_by_entry
.get(id.as_str())
.map(|content| hash_bytes(content.as_bytes()))
} else {
None
};
for (i, fp) in file_paths.iter().enumerate() {
let hash = if i == 0 { in_memory_hash.clone() } else { None }
.or_else(|| hash_file_content(fp));
if let Some(hash) = hash {
files.insert(fp.clone(), hash);
file_to_entry.insert(fp.clone(), id.clone());
}
}
}
entry_ids.sort();
ContentSourceState {
files,
entry_ids,
file_to_entry,
}
}
pub fn diff_content_sources(
cached: &FxHashMap<String, ContentSourceState>,
current: &FxHashMap<String, ContentSourceState>,
) -> (FxHashSet<String>, FxHashSet<(String, String)>) {
let mut structurally_changed = FxHashSet::default();
let mut changed_entries = FxHashSet::default();
for (name, current_state) in current {
match cached.get(name) {
None => {
structurally_changed.insert(name.clone());
}
Some(cached_state) => {
if cached_state.entry_ids != current_state.entry_ids {
structurally_changed.insert(name.clone());
}
for (file_path, current_hash) in ¤t_state.files {
match cached_state.files.get(file_path) {
Some(cached_hash) if cached_hash == current_hash => {
}
_ => {
if let Some(entry_id) = current_state.file_to_entry.get(file_path) {
changed_entries.insert((name.clone(), entry_id.clone()));
}
}
}
}
}
}
}
for name in cached.keys() {
if !current.contains_key(name) {
structurally_changed.insert(name.clone());
}
}
(structurally_changed, changed_entries)
}
pub fn file_fingerprint(path: &Path) -> Option<(u64, u64)> {
let meta = fs::metadata(path).ok()?;
let mtime = meta
.modified()
.ok()?
.duration_since(UNIX_EPOCH)
.ok()?
.as_nanos() as u64;
Some((mtime, meta.len()))
}
pub fn diff_asset_files(cached: &FxHashMap<PathBuf, AssetFileFingerprint>) -> FxHashSet<PathBuf> {
let mut changed = FxHashSet::default();
for (path, cached_fp) in cached {
match file_fingerprint(path) {
Some((mtime, size)) if mtime == cached_fp.mtime_ns && size == cached_fp.size => {
}
Some(_) => {
match hash_file_content(path) {
Some(hash) if hash == cached_fp.hash => {}
_ => {
changed.insert(path.clone());
}
}
}
None => {
changed.insert(path.clone());
}
}
}
changed
}
pub fn determine_dirty_pages(
cache: &BuildCache,
structurally_changed_sources: &FxHashSet<String>,
changed_entries: &FxHashSet<(String, String)>,
changed_asset_files: &FxHashSet<PathBuf>,
) -> FxHashSet<PageKey> {
let mut dirty = FxHashSet::default();
'pages: for (page_key, page_entry) in &cache.pages {
for source_name in &page_entry.content_sources_iterated {
if structurally_changed_sources.contains(source_name) {
dirty.insert(page_key.clone());
continue 'pages;
}
}
for source_name in &page_entry.content_sources_iterated {
if changed_entries.iter().any(|(s, _)| s == source_name) {
dirty.insert(page_key.clone());
continue 'pages;
}
}
for (source_name, entry_id) in &page_entry.content_entries_read {
if changed_entries.contains(&(source_name.clone(), entry_id.clone()))
|| structurally_changed_sources.contains(source_name)
{
dirty.insert(page_key.clone());
continue 'pages;
}
}
let page_asset_paths = page_entry
.images
.iter()
.map(|a| &a.path)
.chain(page_entry.scripts.iter().map(|a| &a.path))
.chain(page_entry.styles.iter().map(|a| &a.path));
for path in page_asset_paths {
if changed_asset_files.contains(path) {
dirty.insert(page_key.clone());
continue 'pages;
}
}
}
dirty
}
pub fn find_stale_pages(
cached_pages: &FxHashMap<PageKey, PageCacheEntry>,
current_pages: &FxHashSet<PageKey>,
) -> FxHashSet<PageKey> {
cached_pages
.keys()
.filter(|k| !current_pages.contains(k))
.cloned()
.collect()
}
pub fn find_stale_static_files(
prev_static_files: &FxHashSet<String>,
current_static_files: &FxHashSet<String>,
) -> Vec<String> {
prev_static_files
.iter()
.filter(|f| !current_static_files.contains(f.as_str()))
.cloned()
.collect()
}
pub fn needs_rebundle(
cached_scripts: &[SerializedAssetRef],
cached_styles: &[SerializedAssetRef],
current_scripts: &FxHashSet<SerializedAssetRef>,
current_styles: &FxHashSet<SerializedAssetRef>,
cached_css_deps: &FxHashMap<PathBuf, AssetFileFingerprint>,
) -> bool {
fn sets_differ(cached: &[SerializedAssetRef], current: &FxHashSet<SerializedAssetRef>) -> bool {
cached.len() != current.len() || cached.iter().any(|item| !current.contains(item))
}
if sets_differ(cached_scripts, current_scripts) || sets_differ(cached_styles, current_styles) {
return true;
}
for (path, prev_fp) in cached_css_deps {
match file_fingerprint(path) {
Some((mtime, size)) if mtime == prev_fp.mtime_ns && size == prev_fp.size => {}
_ => return true,
}
}
false
}
pub fn load_incremental_state(
previous_cache: Option<BuildCache>,
current_content_states: &FxHashMap<String, ContentSourceState>,
current_binary_hash: &str,
current_options_hash: &str,
) -> IncrementalState {
let cache = match previous_cache {
Some(c) => c,
None => {
info!(target: "cache", "No valid build cache found, performing full build");
return IncrementalState::full_build();
}
};
if cache.binary_hash != current_binary_hash {
info!(target: "cache", "Binary changed, performing full build");
return IncrementalState::full_build();
}
if cache.options_hash != current_options_hash {
info!(target: "cache", "Build options changed, performing full build");
return IncrementalState::full_build();
}
let (structurally_changed_sources, changed_entries) =
diff_content_sources(&cache.content_sources, current_content_states);
if !structurally_changed_sources.is_empty() {
info!(
target: "cache",
"Content sources with structural changes: {:?}",
structurally_changed_sources
);
}
if !changed_entries.is_empty() {
info!(
target: "cache",
"Changed content entries: {:?}",
changed_entries
);
}
let changed_asset_files = diff_asset_files(&cache.asset_file_hashes);
if !changed_asset_files.is_empty() {
info!(
target: "cache",
"Changed asset files: {:?}",
changed_asset_files
);
}
let dirty_pages = determine_dirty_pages(
&cache,
&structurally_changed_sources,
&changed_entries,
&changed_asset_files,
);
IncrementalState {
mode: IncrementalMode::Incremental,
previous_cache: Some(cache),
dirty_pages,
}
}
#[cfg(test)]
mod tests {
use super::*;
fn page_entry(
entries_read: Vec<(String, String)>,
sources_iterated: Vec<String>,
output_file: &str,
) -> PageCacheEntry {
PageCacheEntry {
content_entries_read: entries_read,
content_sources_iterated: sources_iterated,
images: vec![],
scripts: vec![],
styles: vec![],
output_file: PathBuf::from(output_file),
}
}
#[test]
fn test_page_key_equality() {
let key1 = PageKey::new_static("/", None);
let key2 = PageKey::new_static("/", None);
assert_eq!(key1, key2);
let key3 = PageKey::new_static("/", Some("en"));
assert_ne!(key1, key3);
}
#[test]
fn test_cache_roundtrip() {
let cache = BuildCache {
version: BUILD_CACHE_VERSION,
binary_hash: "abc123".to_string(),
..Default::default()
};
let bytes = bincode::serialize(&cache).unwrap();
let loaded: BuildCache = bincode::deserialize(&bytes).unwrap();
assert_eq!(loaded.version, BUILD_CACHE_VERSION);
assert_eq!(loaded.binary_hash, "abc123");
}
#[test]
fn test_diff_content_sources_no_changes() {
let mut cached = FxHashMap::default();
cached.insert(
"articles".to_string(),
ContentSourceState {
files: {
let mut m = FxHashMap::default();
m.insert(PathBuf::from("content/a.md"), "hash1".to_string());
m
},
entry_ids: vec!["a".to_string()],
..Default::default()
},
);
let current = cached.clone();
let (structural, changed) = diff_content_sources(&cached, ¤t);
assert!(structural.is_empty());
assert!(changed.is_empty());
}
#[test]
fn test_diff_content_sources_entry_changed() {
let mut cached = FxHashMap::default();
cached.insert(
"articles".to_string(),
ContentSourceState {
files: {
let mut m = FxHashMap::default();
m.insert(PathBuf::from("content/a.md"), "hash1".to_string());
m
},
entry_ids: vec!["a".to_string()],
..Default::default()
},
);
let mut current = FxHashMap::default();
current.insert(
"articles".to_string(),
ContentSourceState {
files: {
let mut m = FxHashMap::default();
m.insert(PathBuf::from("content/a.md"), "hash2".to_string());
m
},
entry_ids: vec!["a".to_string()],
file_to_entry: {
let mut m = FxHashMap::default();
m.insert(PathBuf::from("content/a.md"), "a".to_string());
m
},
},
);
let (structural, changed) = diff_content_sources(&cached, ¤t);
assert!(structural.is_empty());
assert!(changed.contains(&("articles".to_string(), "a".to_string())));
}
#[test]
fn test_diff_content_sources_structural_change() {
let mut cached = FxHashMap::default();
cached.insert(
"articles".to_string(),
ContentSourceState {
files: FxHashMap::default(),
entry_ids: vec!["a".to_string()],
..Default::default()
},
);
let mut current = FxHashMap::default();
current.insert(
"articles".to_string(),
ContentSourceState {
files: FxHashMap::default(),
entry_ids: vec!["a".to_string(), "b".to_string()],
..Default::default()
},
);
let (structural, _changed) = diff_content_sources(&cached, ¤t);
assert!(structural.contains("articles"));
}
#[test]
fn test_determine_dirty_pages() {
let mut pages = FxHashMap::default();
let key_index = PageKey::new_static("/", None);
pages.insert(
key_index.clone(),
page_entry(vec![], vec!["articles".to_string()], "dist/index.html"),
);
let key_article = PageKey::new(
"/articles/[slug]",
&{
let mut m = FxHashMap::default();
m.insert("slug".to_string(), Some("foo".to_string()));
m
},
None,
);
pages.insert(
key_article.clone(),
page_entry(
vec![("articles".to_string(), "foo".to_string())],
vec![],
"dist/articles/foo/index.html",
),
);
let key_other = PageKey::new(
"/articles/[slug]",
&{
let mut m = FxHashMap::default();
m.insert("slug".to_string(), Some("bar".to_string()));
m
},
None,
);
pages.insert(
key_other.clone(),
page_entry(
vec![("articles".to_string(), "bar".to_string())],
vec![],
"dist/articles/bar/index.html",
),
);
let cache = BuildCache {
pages,
..Default::default()
};
let mut changed_entries = FxHashSet::default();
changed_entries.insert(("articles".to_string(), "foo".to_string()));
let dirty = determine_dirty_pages(
&cache,
&FxHashSet::default(),
&changed_entries,
&FxHashSet::default(),
);
assert!(dirty.contains(&key_index));
assert!(dirty.contains(&key_article));
assert!(!dirty.contains(&key_other));
}
#[test]
fn test_needs_rebundle() {
let scripts = vec![SerializedAssetRef {
path: PathBuf::from("script.js"),
hash: "abc".to_string(),
}];
let styles = vec![];
let current_scripts: FxHashSet<SerializedAssetRef> = scripts.iter().cloned().collect();
let current_styles: FxHashSet<SerializedAssetRef> = FxHashSet::default();
let no_css_deps = FxHashMap::default();
assert!(!needs_rebundle(
&scripts,
&styles,
¤t_scripts,
¤t_styles,
&no_css_deps,
));
let mut new_scripts = current_scripts;
new_scripts.insert(SerializedAssetRef {
path: PathBuf::from("new.js"),
hash: "def".to_string(),
});
assert!(needs_rebundle(
&scripts,
&styles,
&new_scripts,
¤t_styles,
&no_css_deps,
));
}
#[test]
fn test_page_no_deps_stays_clean() {
let mut pages = FxHashMap::default();
let key = PageKey::new_static("/about", None);
pages.insert(
key.clone(),
page_entry(vec![], vec![], "dist/about/index.html"),
);
let cache = BuildCache {
pages,
..Default::default()
};
let mut changed_entries = FxHashSet::default();
changed_entries.insert(("articles".to_string(), "foo".to_string()));
let dirty = determine_dirty_pages(
&cache,
&FxHashSet::default(),
&changed_entries,
&FxHashSet::default(),
);
assert!(!dirty.contains(&key));
}
#[test]
fn test_determine_dirty_pages_asset_change() {
let mut pages = FxHashMap::default();
let key = PageKey::new_static("/", None);
pages.insert(
key.clone(),
PageCacheEntry {
images: vec![CachedImage {
path: PathBuf::from("images/logo.png"),
hash: "img_hash".to_string(),
filename: PathBuf::from("logo.img_hash.png"),
}],
..page_entry(vec![], vec![], "dist/index.html")
},
);
let cache = BuildCache {
pages,
..Default::default()
};
let mut changed_assets = FxHashSet::default();
changed_assets.insert(PathBuf::from("images/logo.png"));
let dirty = determine_dirty_pages(
&cache,
&FxHashSet::default(),
&FxHashSet::default(),
&changed_assets,
);
assert!(dirty.contains(&key));
}
#[test]
fn test_find_stale_pages() {
let mut cached_pages = FxHashMap::default();
let key_kept = PageKey::new_static("/", None);
let key_removed = PageKey::new_static("/old-page", None);
cached_pages.insert(
key_kept.clone(),
page_entry(vec![], vec![], "dist/index.html"),
);
cached_pages.insert(
key_removed.clone(),
page_entry(vec![], vec![], "dist/old-page/index.html"),
);
let mut current_pages = FxHashSet::default();
current_pages.insert(key_kept.clone());
let stale = find_stale_pages(&cached_pages, ¤t_pages);
assert!(!stale.contains(&key_kept));
assert!(stale.contains(&key_removed));
}
#[test]
fn test_cache_roundtrip_with_pages() {
let mut cache = BuildCache {
version: BUILD_CACHE_VERSION,
binary_hash: "test_hash".to_string(),
..Default::default()
};
let key = PageKey::new(
"/articles/[slug]",
&{
let mut m = FxHashMap::default();
m.insert("slug".to_string(), Some("hello".to_string()));
m
},
Some("en"),
);
cache.pages.insert(
key.clone(),
PageCacheEntry {
images: vec![CachedImage {
path: PathBuf::from("images/hero.jpg"),
hash: "img123".to_string(),
filename: PathBuf::from("hero.img123.jpg"),
}],
scripts: vec![CachedScript {
path: PathBuf::from("script.js"),
hash: "js456".to_string(),
included: true,
}],
styles: vec![CachedStyle {
path: PathBuf::from("style.css"),
hash: "css789".to_string(),
included: true,
tailwind: false,
}],
..page_entry(
vec![("articles".to_string(), "hello".to_string())],
vec![],
"dist/en/articles/hello/index.html",
)
},
);
let bytes = bincode::serialize(&cache).unwrap();
let loaded: BuildCache = bincode::deserialize(&bytes).unwrap();
assert_eq!(loaded.version, BUILD_CACHE_VERSION);
assert_eq!(loaded.binary_hash, "test_hash");
assert!(loaded.pages.contains_key(&key));
let entry = &loaded.pages[&key];
assert_eq!(entry.content_entries_read.len(), 1);
assert_eq!(entry.images.len(), 1);
assert_eq!(entry.scripts.len(), 1);
assert_eq!(entry.styles.len(), 1);
assert_eq!(
entry.output_file,
PathBuf::from("dist/en/articles/hello/index.html")
);
}
#[test]
fn test_page_key_sorted_params() {
let mut m1 = FxHashMap::default();
m1.insert("a".to_string(), Some("1".to_string()));
m1.insert("b".to_string(), Some("2".to_string()));
let mut m2 = FxHashMap::default();
m2.insert("b".to_string(), Some("2".to_string()));
m2.insert("a".to_string(), Some("1".to_string()));
let key1 = PageKey::new("/test/[a]/[b]", &m1, None);
let key2 = PageKey::new("/test/[a]/[b]", &m2, None);
assert_eq!(key1, key2);
}
#[test]
fn test_structural_change_dirties_iterating_pages() {
let mut pages = FxHashMap::default();
let key_index = PageKey::new_static("/", None);
pages.insert(
key_index.clone(),
page_entry(vec![], vec!["articles".to_string()], "dist/index.html"),
);
let key_about = PageKey::new_static("/about", None);
pages.insert(
key_about.clone(),
page_entry(vec![], vec!["pages".to_string()], "dist/about/index.html"),
);
let cache = BuildCache {
pages,
..Default::default()
};
let mut structural = FxHashSet::default();
structural.insert("articles".to_string());
let dirty = determine_dirty_pages(
&cache,
&structural,
&FxHashSet::default(),
&FxHashSet::default(),
);
assert!(dirty.contains(&key_index));
assert!(!dirty.contains(&key_about));
}
#[test]
fn test_compute_content_source_state() {
let empty_raw: FxHashMap<String, &str> = FxHashMap::default();
let entries: Vec<(String, Vec<PathBuf>)> = vec![];
let state = compute_content_source_state(&entries, &empty_raw);
assert!(state.files.is_empty());
assert!(state.entry_ids.is_empty());
let entries = vec![("entry1".to_string(), vec![])];
let state = compute_content_source_state(&entries, &empty_raw);
assert!(state.files.is_empty());
assert_eq!(state.entry_ids, vec!["entry1".to_string()]);
let mut raw = FxHashMap::default();
raw.insert("entry1".to_string(), "hello world");
let entries = vec![(
"entry1".to_string(),
vec![PathBuf::from("nonexistent/file.md")],
)];
let state = compute_content_source_state(&entries, &raw);
let expected_hash = hash_bytes(b"hello world");
assert_eq!(
state.files.get(&PathBuf::from("nonexistent/file.md")),
Some(&expected_hash)
);
}
#[test]
fn test_diff_new_source_is_structural_change() {
let cached = FxHashMap::default();
let mut current = FxHashMap::default();
current.insert(
"new_source".to_string(),
ContentSourceState {
files: FxHashMap::default(),
entry_ids: vec!["a".to_string()],
..Default::default()
},
);
let (structural, changed) = diff_content_sources(&cached, ¤t);
assert!(structural.contains("new_source"));
assert!(changed.is_empty());
}
#[test]
fn test_diff_removed_source_is_structural_change() {
let mut cached = FxHashMap::default();
cached.insert(
"old_source".to_string(),
ContentSourceState {
files: FxHashMap::default(),
entry_ids: vec!["a".to_string()],
..Default::default()
},
);
let current = FxHashMap::default();
let (structural, _) = diff_content_sources(&cached, ¤t);
assert!(structural.contains("old_source"));
}
#[test]
fn test_iterated_source_content_change_dirties_page() {
let mut pages = FxHashMap::default();
let key = PageKey::new_static("/", None);
pages.insert(
key.clone(),
page_entry(vec![], vec!["articles".to_string()], "dist/index.html"),
);
let cache = BuildCache {
pages,
..Default::default()
};
let mut changed = FxHashSet::default();
changed.insert(("articles".to_string(), "some-entry".to_string()));
let dirty = determine_dirty_pages(
&cache,
&FxHashSet::default(),
&changed,
&FxHashSet::default(),
);
assert!(dirty.contains(&key));
}
#[test]
fn test_no_changes_means_no_dirty_pages() {
let mut pages = FxHashMap::default();
let key = PageKey::new_static("/", None);
pages.insert(
key.clone(),
PageCacheEntry {
images: vec![CachedImage {
path: PathBuf::from("logo.png"),
hash: "abc".to_string(),
filename: PathBuf::from("logo.abc.png"),
}],
..page_entry(
vec![("articles".to_string(), "foo".to_string())],
vec![],
"dist/index.html",
)
},
);
let cache = BuildCache {
pages,
..Default::default()
};
let dirty = determine_dirty_pages(
&cache,
&FxHashSet::default(),
&FxHashSet::default(),
&FxHashSet::default(),
);
assert!(dirty.is_empty());
}
#[test]
fn test_cache_save_and_load_roundtrip() {
let dir = tempfile::tempdir().unwrap();
let cache_dir = dir.path();
let mut cache = BuildCache {
version: BUILD_CACHE_VERSION,
binary_hash: "test123".to_string(),
..Default::default()
};
let key = PageKey::new_static("/about", None);
cache.pages.insert(
key.clone(),
page_entry(
vec![("pages".to_string(), "about".to_string())],
vec![],
"dist/about/index.html",
),
);
cache.save(cache_dir).unwrap();
let loaded = BuildCache::load(cache_dir).unwrap();
assert_eq!(loaded.version, BUILD_CACHE_VERSION);
assert_eq!(loaded.binary_hash, "test123");
assert!(loaded.pages.contains_key(&key));
assert_eq!(
loaded.pages[&key].content_entries_read,
vec![("pages".to_string(), "about".to_string())]
);
}
#[test]
fn test_cache_load_corrupt_file() {
let dir = tempfile::tempdir().unwrap();
let cache_dir = dir.path();
let cache_file = cache_dir.join(BUILD_CACHE_FILENAME);
fs::create_dir_all(cache_dir).unwrap();
fs::write(&cache_file, b"this is not valid bincode").unwrap();
assert!(BuildCache::load(cache_dir).is_none());
}
#[test]
fn test_cache_load_version_mismatch() {
let dir = tempfile::tempdir().unwrap();
let cache_dir = dir.path();
let cache = BuildCache {
version: 999, ..Default::default()
};
let bytes = bincode::serialize(&cache).unwrap();
fs::create_dir_all(cache_dir).unwrap();
fs::write(cache_dir.join(BUILD_CACHE_FILENAME), bytes).unwrap();
assert!(BuildCache::load(cache_dir).is_none());
}
#[test]
fn test_cache_load_missing_file() {
let dir = tempfile::tempdir().unwrap();
assert!(BuildCache::load(dir.path()).is_none());
}
#[test]
fn test_diff_asset_files_detects_changes() {
let dir = tempfile::tempdir().unwrap();
let file_a = dir.path().join("style.css");
let file_b = dir.path().join("script.js");
fs::write(&file_a, "body { color: red; }").unwrap();
fs::write(&file_b, "console.log('hi');").unwrap();
let mut cached = FxHashMap::default();
cached.insert(
file_a.clone(),
AssetFileFingerprint::from_path(&file_a).unwrap(),
);
cached.insert(
file_b.clone(),
AssetFileFingerprint::from_path(&file_b).unwrap(),
);
assert!(diff_asset_files(&cached).is_empty());
fs::write(&file_a, "body { color: blue; }").unwrap();
let changed = diff_asset_files(&cached);
assert!(changed.contains(&file_a));
assert!(!changed.contains(&file_b));
}
#[test]
fn test_diff_asset_files_deleted_file() {
let dir = tempfile::tempdir().unwrap();
let file = dir.path().join("gone.css");
fs::write(&file, "body {}").unwrap();
let mut cached = FxHashMap::default();
cached.insert(
file.clone(),
AssetFileFingerprint::from_path(&file).unwrap(),
);
fs::remove_file(&file).unwrap();
let changed = diff_asset_files(&cached);
assert!(changed.contains(&file));
}
}