#![allow(clippy::unwrap_used)]
use async_trait::async_trait;
use std::collections::HashSet;
use std::io::{Error as IoError, ErrorKind};
use std::path::{Path, PathBuf};
use std::sync::{Arc, RwLock};
use std::time::SystemTime;
use super::limits::{FsLimits, FsUsage};
use super::memory::InMemoryFs;
use super::traits::{DirEntry, FileSystem, FileSystemExt, FileType, Metadata};
use crate::error::Result;
pub struct OverlayFs {
lower: Arc<dyn FileSystem>,
upper: InMemoryFs,
whiteouts: RwLock<HashSet<PathBuf>>,
limits: FsLimits,
lower_hidden: RwLock<FsUsage>,
}
impl OverlayFs {
pub fn new(lower: Arc<dyn FileSystem>) -> Self {
Self::with_limits(lower, FsLimits::default())
}
pub fn with_limits(lower: Arc<dyn FileSystem>, limits: FsLimits) -> Self {
Self {
lower,
upper: InMemoryFs::with_limits(limits.clone()),
whiteouts: RwLock::new(HashSet::new()),
limits,
lower_hidden: RwLock::new(FsUsage::default()),
}
}
pub fn upper(&self) -> &InMemoryFs {
&self.upper
}
fn compute_usage(&self) -> FsUsage {
let upper_usage = self.upper.usage();
let lower_usage = self.lower.usage();
let hidden = self.lower_hidden.read().unwrap();
let total_bytes = upper_usage
.total_bytes
.saturating_add(lower_usage.total_bytes)
.saturating_sub(hidden.total_bytes);
let file_count = upper_usage
.file_count
.saturating_add(lower_usage.file_count)
.saturating_sub(hidden.file_count);
let dir_count = upper_usage
.dir_count
.saturating_add(lower_usage.dir_count)
.saturating_sub(hidden.dir_count);
FsUsage::new(total_bytes, file_count, dir_count)
}
fn hide_lower_file(&self, size: u64) {
let mut h = self.lower_hidden.write().unwrap();
h.total_bytes = h.total_bytes.saturating_add(size);
h.file_count = h.file_count.saturating_add(1);
}
fn hide_lower_dir(&self) {
let mut h = self.lower_hidden.write().unwrap();
h.dir_count = h.dir_count.saturating_add(1);
}
async fn hide_lower_children_recursive(&self, dir: &Path) {
if let Ok(entries) = self.lower.read_dir(dir).await {
for entry in entries {
let child = dir.join(&entry.name);
if self.is_whiteout(&child) {
continue;
}
if let Ok(meta) = self.lower.stat(&child).await {
match meta.file_type {
FileType::File if !self.upper.exists(&child).await.unwrap_or(false) => {
self.hide_lower_file(meta.size);
}
FileType::Directory => {
self.hide_lower_dir();
Box::pin(self.hide_lower_children_recursive(&child)).await;
}
_ => {}
}
}
}
}
}
fn check_write_limits(&self, content_size: usize) -> Result<()> {
if content_size as u64 > self.limits.max_file_size {
return Err(IoError::other(format!(
"file too large: {} bytes exceeds {} byte limit",
content_size, self.limits.max_file_size
))
.into());
}
let usage = self.compute_usage();
let new_total = usage.total_bytes + content_size as u64;
if new_total > self.limits.max_total_bytes {
return Err(IoError::other(format!(
"filesystem full: {} bytes would exceed {} byte limit",
new_total, self.limits.max_total_bytes
))
.into());
}
if usage.file_count >= self.limits.max_file_count {
return Err(IoError::other(format!(
"too many files: {} files at {} file limit",
usage.file_count, self.limits.max_file_count
))
.into());
}
Ok(())
}
fn check_dir_limits(&self, dirs_to_create: u64) -> Result<()> {
let usage = self.compute_usage();
let new_total = usage.dir_count.saturating_add(dirs_to_create);
if new_total > self.limits.max_dir_count {
return Err(IoError::other(format!(
"too many directories: {} + {} would exceed {} directory limit",
usage.dir_count, dirs_to_create, self.limits.max_dir_count
))
.into());
}
Ok(())
}
async fn count_missing_upper_dirs(&self, path: &Path, recursive: bool) -> Result<u64> {
if !recursive {
return Ok((!self.upper.exists(path).await.unwrap_or(false)) as u64);
}
let mut current = PathBuf::from("/");
let mut missing = 0u64;
for component in path.components().skip(1) {
current.push(component);
if !self.upper.exists(¤t).await.unwrap_or(false) {
missing = missing.saturating_add(1);
}
}
Ok(missing)
}
fn normalize_path(path: &Path) -> PathBuf {
super::normalize_path(path)
}
fn is_whiteout(&self, path: &Path) -> bool {
let path = Self::normalize_path(path);
let whiteouts = self.whiteouts.read().unwrap();
let mut check = path.as_path();
loop {
if whiteouts.contains(check) {
return true;
}
match check.parent() {
Some(p) if p != check => check = p,
_ => break,
}
}
false
}
fn add_whiteout(&self, path: &Path) {
let path = Self::normalize_path(path);
let mut whiteouts = self.whiteouts.write().unwrap();
whiteouts.insert(path);
}
fn remove_whiteout(&self, path: &Path) {
let path = Self::normalize_path(path);
let mut whiteouts = self.whiteouts.write().unwrap();
whiteouts.remove(&path);
}
}
#[async_trait]
impl FileSystem for OverlayFs {
async fn read_file(&self, path: &Path) -> Result<Vec<u8>> {
self.limits
.validate_path(path)
.map_err(|e| IoError::other(e.to_string()))?;
let path = Self::normalize_path(path);
if self.is_whiteout(&path) {
return Err(IoError::new(ErrorKind::NotFound, "file not found").into());
}
if self.upper.exists(&path).await.unwrap_or(false) {
return self.upper.read_file(&path).await;
}
self.lower.read_file(&path).await
}
async fn write_file(&self, path: &Path, content: &[u8]) -> Result<()> {
self.limits
.validate_path(path)
.map_err(|e| IoError::other(e.to_string()))?;
let path = Self::normalize_path(path);
self.check_write_limits(content.len())?;
let already_in_upper = self.upper.exists(&path).await.unwrap_or(false);
let already_whited = self.is_whiteout(&path);
let lower_exists = self.lower.exists(&path).await.unwrap_or(false);
self.remove_whiteout(&path);
if let Some(parent) = path.parent()
&& !self.upper.exists(parent).await.unwrap_or(false)
{
if self.lower.exists(parent).await.unwrap_or(false) {
self.upper.mkdir(parent, true).await?;
} else {
return Err(IoError::new(ErrorKind::NotFound, "parent directory not found").into());
}
}
self.upper.write_file(&path, content).await?;
if lower_exists
&& !already_in_upper
&& !already_whited
&& let Ok(meta) = self.lower.stat(&path).await
{
match meta.file_type {
FileType::File => self.hide_lower_file(meta.size),
FileType::Directory => self.hide_lower_dir(),
_ => {}
}
}
Ok(())
}
async fn append_file(&self, path: &Path, content: &[u8]) -> Result<()> {
self.limits
.validate_path(path)
.map_err(|e| IoError::other(e.to_string()))?;
let path = Self::normalize_path(path);
if self.is_whiteout(&path) {
return Err(IoError::new(ErrorKind::NotFound, "file not found").into());
}
if self.upper.exists(&path).await.unwrap_or(false) {
self.check_write_limits(content.len())?;
return self.upper.append_file(&path, content).await;
}
if self.lower.exists(&path).await.unwrap_or(false) {
let lower_meta = self.lower.stat(&path).await?;
let existing = self.lower.read_file(&path).await?;
self.check_write_limits(existing.len() + content.len())?;
if let Some(parent) = path.parent()
&& !self.upper.exists(parent).await.unwrap_or(false)
{
self.upper.mkdir(parent, true).await?;
}
let mut combined = existing;
combined.extend_from_slice(content);
self.upper.write_file(&path, &combined).await?;
self.hide_lower_file(lower_meta.size);
return Ok(());
}
self.check_write_limits(content.len())?;
self.upper.write_file(&path, content).await
}
async fn mkdir(&self, path: &Path, recursive: bool) -> Result<()> {
self.limits
.validate_path(path)
.map_err(|e| IoError::other(e.to_string()))?;
let path = Self::normalize_path(path);
let dirs_to_create = self.count_missing_upper_dirs(&path, recursive).await?;
self.check_dir_limits(dirs_to_create)?;
self.remove_whiteout(&path);
self.upper.mkdir(&path, recursive).await
}
async fn remove(&self, path: &Path, recursive: bool) -> Result<()> {
self.limits
.validate_path(path)
.map_err(|e| IoError::other(e.to_string()))?;
let path = Self::normalize_path(path);
let in_upper = self.upper.exists(&path).await.unwrap_or(false);
let in_lower = !self.is_whiteout(&path) && self.lower.exists(&path).await.unwrap_or(false);
if !in_upper && !in_lower {
return Err(IoError::new(ErrorKind::NotFound, "not found").into());
}
if in_upper {
self.upper.remove(&path, recursive).await?;
}
if in_lower {
if !in_upper && let Ok(meta) = self.lower.stat(&path).await {
match meta.file_type {
FileType::File => self.hide_lower_file(meta.size),
FileType::Directory => {
self.hide_lower_dir();
if recursive {
self.hide_lower_children_recursive(&path).await;
}
}
_ => {}
}
}
self.add_whiteout(&path);
}
Ok(())
}
async fn stat(&self, path: &Path) -> Result<Metadata> {
self.limits
.validate_path(path)
.map_err(|e| IoError::other(e.to_string()))?;
let path = Self::normalize_path(path);
if self.is_whiteout(&path) {
return Err(IoError::new(ErrorKind::NotFound, "not found").into());
}
if self.upper.exists(&path).await.unwrap_or(false) {
return self.upper.stat(&path).await;
}
self.lower.stat(&path).await
}
async fn read_dir(&self, path: &Path) -> Result<Vec<DirEntry>> {
self.limits
.validate_path(path)
.map_err(|e| IoError::other(e.to_string()))?;
let path = Self::normalize_path(path);
if self.is_whiteout(&path) {
return Err(IoError::new(ErrorKind::NotFound, "not found").into());
}
let is_dir_upper = if let Ok(meta) = self.upper.stat(&path).await {
if !meta.file_type.is_dir() {
return Err(IoError::other("not a directory").into());
}
true
} else {
false
};
let is_dir_lower = if let Ok(meta) = self.lower.stat(&path).await {
if !meta.file_type.is_dir() {
if !is_dir_upper {
return Err(IoError::other("not a directory").into());
}
false
} else {
true
}
} else {
false
};
if !is_dir_lower && !is_dir_upper {
return Err(IoError::new(ErrorKind::NotFound, "not found").into());
}
let mut entries: std::collections::HashMap<String, DirEntry> =
std::collections::HashMap::new();
if is_dir_lower && let Ok(lower_entries) = self.lower.read_dir(&path).await {
for entry in lower_entries {
let entry_path = path.join(&entry.name);
if !self.is_whiteout(&entry_path) {
entries.insert(entry.name.clone(), entry);
}
}
}
if is_dir_upper && let Ok(upper_entries) = self.upper.read_dir(&path).await {
for entry in upper_entries {
entries.insert(entry.name.clone(), entry);
}
}
Ok(entries.into_values().collect())
}
async fn exists(&self, path: &Path) -> Result<bool> {
self.limits
.validate_path(path)
.map_err(|e| IoError::other(e.to_string()))?;
let path = Self::normalize_path(path);
if self.is_whiteout(&path) {
return Ok(false);
}
if self.upper.exists(&path).await.unwrap_or(false) {
return Ok(true);
}
self.lower.exists(&path).await
}
async fn rename(&self, from: &Path, to: &Path) -> Result<()> {
self.limits
.validate_path(from)
.map_err(|e| IoError::other(e.to_string()))?;
self.limits
.validate_path(to)
.map_err(|e| IoError::other(e.to_string()))?;
let from = Self::normalize_path(from);
let to = Self::normalize_path(to);
let meta = self.stat(&from).await?;
if meta.file_type == FileType::Symlink {
let target = self.read_link(&from).await?;
self.check_write_limits(0)?;
self.remove_whiteout(&to);
self.upper.symlink(&target, &to).await?;
self.remove(&from, false).await?;
return Ok(());
}
let content = self.read_file(&from).await?;
self.write_file(&to, &content).await?;
self.remove(&from, false).await?;
Ok(())
}
async fn copy(&self, from: &Path, to: &Path) -> Result<()> {
self.limits
.validate_path(from)
.map_err(|e| IoError::other(e.to_string()))?;
self.limits
.validate_path(to)
.map_err(|e| IoError::other(e.to_string()))?;
let from = Self::normalize_path(from);
let to = Self::normalize_path(to);
let meta = self.stat(&from).await?;
if meta.file_type == FileType::Symlink {
let target = self.read_link(&from).await?;
self.check_write_limits(0)?;
self.remove_whiteout(&to);
return self.upper.symlink(&target, &to).await;
}
let content = self.read_file(&from).await?;
self.write_file(&to, &content).await
}
async fn symlink(&self, target: &Path, link: &Path) -> Result<()> {
self.limits
.validate_path(link)
.map_err(|e| IoError::other(e.to_string()))?;
let link = Self::normalize_path(link);
self.check_write_limits(0)?;
self.remove_whiteout(&link);
self.upper.symlink(target, &link).await
}
async fn read_link(&self, path: &Path) -> Result<PathBuf> {
self.limits
.validate_path(path)
.map_err(|e| IoError::other(e.to_string()))?;
let path = Self::normalize_path(path);
if self.is_whiteout(&path) {
return Err(IoError::new(ErrorKind::NotFound, "not found").into());
}
if self.upper.exists(&path).await.unwrap_or(false) {
return self.upper.read_link(&path).await;
}
self.lower.read_link(&path).await
}
async fn chmod(&self, path: &Path, mode: u32) -> Result<()> {
self.limits
.validate_path(path)
.map_err(|e| IoError::other(e.to_string()))?;
let path = Self::normalize_path(path);
if self.is_whiteout(&path) {
return Err(IoError::new(ErrorKind::NotFound, "not found").into());
}
if self.upper.exists(&path).await.unwrap_or(false) {
return self.upper.chmod(&path, mode).await;
}
if self.lower.exists(&path).await.unwrap_or(false) {
let stat = self.lower.stat(&path).await?;
if stat.file_type == FileType::File {
let content = self.lower.read_file(&path).await?;
self.check_write_limits(content.len())?;
if let Some(parent) = path.parent()
&& !self.upper.exists(parent).await.unwrap_or(false)
{
self.upper.mkdir(parent, true).await?;
}
self.upper.write_file(&path, &content).await?;
self.hide_lower_file(stat.size);
} else if stat.file_type == FileType::Directory {
let dirs_to_create = self.count_missing_upper_dirs(&path, true).await?;
self.check_dir_limits(dirs_to_create)?;
self.upper.mkdir(&path, true).await?;
self.hide_lower_dir();
}
return self.upper.chmod(&path, mode).await;
}
Err(IoError::new(ErrorKind::NotFound, "not found").into())
}
async fn set_modified_time(&self, path: &Path, time: SystemTime) -> Result<()> {
self.limits
.validate_path(path)
.map_err(|e| IoError::other(e.to_string()))?;
let path = Self::normalize_path(path);
if self.is_whiteout(&path) {
return Err(IoError::new(ErrorKind::NotFound, "not found").into());
}
if self.upper.exists(&path).await.unwrap_or(false) {
return self.upper.set_modified_time(&path, time).await;
}
if self.lower.exists(&path).await.unwrap_or(false) {
let stat = self.lower.stat(&path).await?;
match stat.file_type {
FileType::File | FileType::Fifo => {
let content = self.lower.read_file(&path).await?;
self.check_write_limits(content.len())?;
if let Some(parent) = path.parent()
&& !self.upper.exists(parent).await.unwrap_or(false)
{
self.upper.mkdir(parent, true).await?;
}
self.upper.write_file(&path, &content).await?;
self.hide_lower_file(stat.size);
}
FileType::Directory => {
let dirs_to_create = self.count_missing_upper_dirs(&path, true).await?;
self.check_dir_limits(dirs_to_create)?;
self.upper.mkdir(&path, true).await?;
self.hide_lower_dir();
}
FileType::Symlink => {
let target = self.lower.read_link(&path).await?;
self.check_write_limits(0)?;
if let Some(parent) = path.parent()
&& !self.upper.exists(parent).await.unwrap_or(false)
{
self.upper.mkdir(parent, true).await?;
}
self.upper.symlink(&target, &path).await?;
self.hide_lower_file(0);
}
}
return self.upper.set_modified_time(&path, time).await;
}
Err(IoError::new(ErrorKind::NotFound, "not found").into())
}
}
#[async_trait]
impl FileSystemExt for OverlayFs {
fn usage(&self) -> FsUsage {
self.compute_usage()
}
fn limits(&self) -> FsLimits {
self.limits.clone()
}
fn vfs_snapshot(&self) -> Option<super::VfsSnapshot> {
Some(self.upper.snapshot())
}
fn vfs_restore(&self, snapshot: &super::VfsSnapshot) -> bool {
self.upper.restore(snapshot);
true
}
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn test_read_from_lower() {
let lower = Arc::new(InMemoryFs::new());
lower
.write_file(Path::new("/tmp/test.txt"), b"hello")
.await
.unwrap();
let overlay = OverlayFs::new(lower);
let content = overlay.read_file(Path::new("/tmp/test.txt")).await.unwrap();
assert_eq!(content, b"hello");
}
#[tokio::test]
async fn test_write_to_upper() {
let lower = Arc::new(InMemoryFs::new());
let overlay = OverlayFs::new(lower.clone());
overlay
.write_file(Path::new("/tmp/new.txt"), b"new file")
.await
.unwrap();
let content = overlay.read_file(Path::new("/tmp/new.txt")).await.unwrap();
assert_eq!(content, b"new file");
assert!(!lower.exists(Path::new("/tmp/new.txt")).await.unwrap());
}
#[tokio::test]
async fn test_copy_on_write() {
let lower = Arc::new(InMemoryFs::new());
lower
.write_file(Path::new("/tmp/test.txt"), b"original")
.await
.unwrap();
let overlay = OverlayFs::new(lower.clone());
overlay
.write_file(Path::new("/tmp/test.txt"), b"modified")
.await
.unwrap();
let content = overlay.read_file(Path::new("/tmp/test.txt")).await.unwrap();
assert_eq!(content, b"modified");
let lower_content = lower.read_file(Path::new("/tmp/test.txt")).await.unwrap();
assert_eq!(lower_content, b"original");
}
#[tokio::test]
async fn test_delete_with_whiteout() {
let lower = Arc::new(InMemoryFs::new());
lower
.write_file(Path::new("/tmp/test.txt"), b"hello")
.await
.unwrap();
let overlay = OverlayFs::new(lower.clone());
overlay
.remove(Path::new("/tmp/test.txt"), false)
.await
.unwrap();
assert!(!overlay.exists(Path::new("/tmp/test.txt")).await.unwrap());
assert!(lower.exists(Path::new("/tmp/test.txt")).await.unwrap());
}
#[tokio::test]
async fn test_recreate_after_delete() {
let lower = Arc::new(InMemoryFs::new());
lower
.write_file(Path::new("/tmp/test.txt"), b"original")
.await
.unwrap();
let overlay = OverlayFs::new(lower);
overlay
.remove(Path::new("/tmp/test.txt"), false)
.await
.unwrap();
assert!(!overlay.exists(Path::new("/tmp/test.txt")).await.unwrap());
overlay
.write_file(Path::new("/tmp/test.txt"), b"new content")
.await
.unwrap();
assert!(overlay.exists(Path::new("/tmp/test.txt")).await.unwrap());
let content = overlay.read_file(Path::new("/tmp/test.txt")).await.unwrap();
assert_eq!(content, b"new content");
}
#[tokio::test]
async fn test_chmod_cow_enforces_write_limits() {
let lower = Arc::new(InMemoryFs::new());
lower
.write_file(Path::new("/tmp/big.txt"), &vec![b'x'; 5000])
.await
.unwrap();
let limits = FsLimits::new().max_total_bytes(1000);
let overlay = OverlayFs::with_limits(lower, limits);
let result = overlay.chmod(Path::new("/tmp/big.txt"), 0o755).await;
assert!(
result.is_err(),
"chmod CoW should fail when content exceeds write limits"
);
let err = result.unwrap_err().to_string();
assert!(
err.contains("filesystem full"),
"expected 'filesystem full' error, got: {err}"
);
assert!(
!overlay
.upper
.exists(Path::new("/tmp/big.txt"))
.await
.unwrap(),
"file should not have been copied to upper layer"
);
}
#[tokio::test]
async fn test_usage_no_double_count_override() {
let lower = Arc::new(InMemoryFs::new());
lower
.write_file(Path::new("/tmp/file.txt"), b"lower data") .await
.unwrap();
let overlay = OverlayFs::new(lower);
let usage_before = overlay.usage();
overlay
.write_file(Path::new("/tmp/file.txt"), b"upper!") .await
.unwrap();
let usage_after = overlay.usage();
assert_eq!(
usage_after.file_count, usage_before.file_count,
"overridden file should not increase file_count"
);
assert_eq!(
usage_after.total_bytes,
usage_before.total_bytes - 4,
"overridden file bytes should reflect upper size, not sum"
);
}
#[tokio::test]
async fn test_usage_no_double_count_whiteout() {
let lower = Arc::new(InMemoryFs::new());
lower
.write_file(Path::new("/tmp/gone.txt"), b"12345") .await
.unwrap();
let overlay = OverlayFs::new(lower.clone());
let usage_before = overlay.usage();
overlay
.remove(Path::new("/tmp/gone.txt"), false)
.await
.unwrap();
let usage_after = overlay.usage();
assert_eq!(
usage_after.file_count,
usage_before.file_count - 1,
"whited-out file should not be counted"
);
assert_eq!(
usage_after.total_bytes,
usage_before.total_bytes - 5,
"whited-out file bytes should be deducted"
);
}
#[tokio::test]
async fn test_usage_unique_files_both_layers() {
let lower = Arc::new(InMemoryFs::new());
lower
.write_file(Path::new("/tmp/lower.txt"), b"aaa") .await
.unwrap();
let overlay = OverlayFs::new(lower);
let usage_before = overlay.usage();
overlay
.write_file(Path::new("/tmp/upper.txt"), b"bbbbb") .await
.unwrap();
let usage_after = overlay.usage();
assert_eq!(
usage_after.file_count,
usage_before.file_count + 1,
"unique upper file adds one to count"
);
assert_eq!(
usage_after.total_bytes,
usage_before.total_bytes + 5,
"unique upper file adds its bytes"
);
}
#[tokio::test]
async fn test_usage_recreate_after_whiteout() {
let lower = Arc::new(InMemoryFs::new());
lower
.write_file(Path::new("/tmp/file.txt"), b"old data 10") .await
.unwrap();
let overlay = OverlayFs::new(lower);
let usage_before = overlay.usage();
overlay
.remove(Path::new("/tmp/file.txt"), false)
.await
.unwrap();
overlay
.write_file(Path::new("/tmp/file.txt"), b"new") .await
.unwrap();
let usage_after = overlay.usage();
assert_eq!(
usage_after.file_count, usage_before.file_count,
"recreated file counted once"
);
assert_eq!(
usage_after.total_bytes,
usage_before.total_bytes - 8,
"recreated file uses new size"
);
}
#[tokio::test]
async fn test_read_dir_merged() {
let lower = Arc::new(InMemoryFs::new());
lower
.write_file(Path::new("/tmp/lower.txt"), b"lower")
.await
.unwrap();
let overlay = OverlayFs::new(lower);
overlay
.write_file(Path::new("/tmp/upper.txt"), b"upper")
.await
.unwrap();
let entries = overlay.read_dir(Path::new("/tmp")).await.unwrap();
let names: Vec<_> = entries.iter().map(|e| &e.name).collect();
assert!(names.contains(&&"lower.txt".to_string()));
assert!(names.contains(&&"upper.txt".to_string()));
}
#[tokio::test]
async fn test_read_dir_on_file_returns_error() {
let lower = Arc::new(InMemoryFs::new());
lower
.write_file(Path::new("/tmp/file.txt"), b"data")
.await
.unwrap();
let overlay = OverlayFs::new(lower);
let result = overlay.read_dir(Path::new("/tmp/file.txt")).await;
assert!(result.is_err(), "read_dir on a file should return Err");
}
#[tokio::test]
async fn test_read_dir_prefers_upper_directory_over_lower_file() {
let lower = Arc::new(InMemoryFs::new());
lower
.write_file(Path::new("/work"), b"lower-file")
.await
.unwrap();
let overlay = OverlayFs::new(lower);
overlay.mkdir(Path::new("/work"), false).await.unwrap();
overlay
.write_file(Path::new("/work/upper.txt"), b"upper")
.await
.unwrap();
let entries = overlay.read_dir(Path::new("/work")).await.unwrap();
let names: Vec<_> = entries.iter().map(|e| e.name.as_str()).collect();
assert!(
names.contains(&"upper.txt"),
"upper directory should remain readable when lower has file"
);
}
#[tokio::test]
async fn test_usage_deducts_whiteouts() {
let lower = Arc::new(InMemoryFs::new());
lower
.write_file(Path::new("/tmp/deleted.txt"), &[b'X'; 50])
.await
.unwrap();
let overlay = OverlayFs::new(lower);
let before = overlay.usage();
overlay
.remove(Path::new("/tmp/deleted.txt"), false)
.await
.unwrap();
let after = overlay.usage();
assert_eq!(
after.total_bytes,
before.total_bytes - 50,
"whited-out file bytes should be deducted"
);
assert_eq!(
after.file_count,
before.file_count - 1,
"whited-out file should be deducted from count"
);
}
#[tokio::test]
async fn test_usage_no_double_count_append_cow() {
let lower = Arc::new(InMemoryFs::new());
lower
.write_file(Path::new("/tmp/log.txt"), &[b'A'; 100])
.await
.unwrap();
let overlay = OverlayFs::new(lower);
let before = overlay.usage();
overlay
.append_file(Path::new("/tmp/log.txt"), &[b'B'; 10])
.await
.unwrap();
let after = overlay.usage();
assert_eq!(
after.total_bytes,
before.total_bytes + 10,
"CoW append should add only new content bytes"
);
assert_eq!(after.file_count, before.file_count);
}
#[tokio::test]
async fn test_write_limits_include_lower_layer() {
use super::super::limits::FsLimits;
let lower = Arc::new(InMemoryFs::new());
lower
.write_file(Path::new("/tmp/big.txt"), &[b'A'; 80])
.await
.unwrap();
let limits = FsLimits::new().max_total_bytes(100);
let overlay = OverlayFs::with_limits(lower, limits);
let result = overlay
.write_file(Path::new("/tmp/extra.txt"), &[b'B'; 30])
.await;
assert!(
result.is_err(),
"should reject write that exceeds combined limit"
);
let result = overlay
.write_file(Path::new("/tmp/small.txt"), &[b'C'; 15])
.await;
assert!(result.is_ok(), "should allow write within combined limit");
}
#[tokio::test]
async fn test_file_count_limit_includes_lower() {
use super::super::limits::FsLimits;
let lower = Arc::new(InMemoryFs::new());
lower
.write_file(Path::new("/tmp/existing.txt"), b"data")
.await
.unwrap();
let temp_overlay = OverlayFs::new(lower.clone());
let base_count = temp_overlay.usage().file_count;
let limits = FsLimits::new().max_file_count(base_count + 1);
let overlay = OverlayFs::with_limits(lower, limits);
overlay
.write_file(Path::new("/tmp/new1.txt"), b"ok")
.await
.unwrap();
let result = overlay
.write_file(Path::new("/tmp/new2.txt"), b"fail")
.await;
assert!(
result.is_err(),
"should reject when combined file count exceeds limit"
);
}
#[tokio::test]
async fn test_recursive_delete_whiteouts_children() {
let lower = Arc::new(InMemoryFs::new());
lower.mkdir(Path::new("/data"), true).await.unwrap();
lower
.write_file(Path::new("/data/a.txt"), b"aaa")
.await
.unwrap();
lower
.write_file(Path::new("/data/b.txt"), b"bbb")
.await
.unwrap();
lower.mkdir(Path::new("/data/sub"), true).await.unwrap();
lower
.write_file(Path::new("/data/sub/c.txt"), b"ccc")
.await
.unwrap();
let overlay = OverlayFs::new(lower);
overlay.remove(Path::new("/data"), true).await.unwrap();
assert!(
!overlay.exists(Path::new("/data/a.txt")).await.unwrap(),
"child file should be hidden after recursive delete"
);
assert!(
!overlay.exists(Path::new("/data/sub/c.txt")).await.unwrap(),
"nested child should be hidden after recursive delete"
);
assert!(
!overlay.exists(Path::new("/data")).await.unwrap(),
"directory itself should be hidden"
);
assert!(overlay.read_file(Path::new("/data/a.txt")).await.is_err());
}
#[tokio::test]
async fn test_recursive_delete_deducts_all_children() {
let lower = Arc::new(InMemoryFs::new());
lower.mkdir(Path::new("/stuff"), true).await.unwrap();
lower
.write_file(Path::new("/stuff/x.txt"), &[b'X'; 100])
.await
.unwrap();
lower
.write_file(Path::new("/stuff/y.txt"), &[b'Y'; 200])
.await
.unwrap();
let overlay = OverlayFs::new(lower);
let before = overlay.usage();
overlay.remove(Path::new("/stuff"), true).await.unwrap();
let after = overlay.usage();
assert_eq!(
after.total_bytes,
before.total_bytes - 300,
"should deduct all child file bytes"
);
assert_eq!(
after.file_count,
before.file_count - 2,
"should deduct all child file counts"
);
}
#[tokio::test]
async fn test_recursive_delete_skips_already_hidden_children() {
let lower = Arc::new(InMemoryFs::new());
lower.mkdir(Path::new("/dir"), true).await.unwrap();
lower
.write_file(Path::new("/dir/a"), &[b'a'; 10])
.await
.unwrap();
lower
.write_file(Path::new("/dir/b"), &[b'b'; 20])
.await
.unwrap();
lower
.write_file(Path::new("/keep"), &[b'k'; 50])
.await
.unwrap();
let probe = OverlayFs::new(lower.clone());
let base = probe.usage();
let limits = FsLimits::new()
.max_total_bytes(base.total_bytes - 25)
.max_file_count(base.file_count + 10)
.max_dir_count(base.dir_count + 10);
let overlay = OverlayFs::with_limits(lower, limits);
overlay.remove(Path::new("/dir/a"), false).await.unwrap();
overlay.remove(Path::new("/dir"), true).await.unwrap();
let result = overlay.write_file(Path::new("/new.txt"), &[b'n'; 10]).await;
assert!(
result.is_err(),
"write should fail: recursive delete must not undercount usage"
);
}
#[tokio::test]
async fn test_upper_is_inmemoryfs_limits_enforced() {
let lower = Arc::new(InMemoryFs::with_limits(FsLimits::unlimited()));
let probe = OverlayFs::new(lower.clone());
let base = probe.usage();
let limits = FsLimits::new()
.max_total_bytes(base.total_bytes + 200)
.max_file_count(base.file_count + 5)
.max_dir_count(base.dir_count + 5);
let overlay = OverlayFs::with_limits(lower, limits);
overlay
.write_file(Path::new("/tmp/a.txt"), &[b'a'; 100])
.await
.unwrap();
let result = overlay
.write_file(Path::new("/tmp/b.txt"), &[b'b'; 500])
.await;
assert!(result.is_err(), "should reject write exceeding total bytes");
assert!(
result.unwrap_err().to_string().contains("filesystem full"),
"expected filesystem full error"
);
}
#[tokio::test]
async fn test_dir_count_limit_enforced() {
let lower = Arc::new(InMemoryFs::new());
let base = OverlayFs::new(lower.clone());
let base_dirs = base.usage().dir_count;
let limits = FsLimits::new().max_dir_count(base_dirs + 1);
let overlay = OverlayFs::with_limits(lower, limits);
overlay.mkdir(Path::new("/newdir"), false).await.unwrap();
let result = overlay.mkdir(Path::new("/another"), false).await;
assert!(
result.is_err(),
"should reject mkdir exceeding dir count limit"
);
assert!(
result
.unwrap_err()
.to_string()
.contains("too many directories"),
"expected 'too many directories' error"
);
}
#[tokio::test]
async fn test_usage_sync_write_delete_rewrite() {
let lower = Arc::new(InMemoryFs::new());
let limits = FsLimits::new().max_total_bytes(200).max_file_count(10);
let overlay = OverlayFs::with_limits(lower, limits);
let initial = overlay.usage();
overlay
.write_file(Path::new("/tmp/data.txt"), &[b'X'; 100])
.await
.unwrap();
let after_write = overlay.usage();
assert_eq!(after_write.total_bytes, initial.total_bytes + 100);
assert_eq!(after_write.file_count, initial.file_count + 1);
overlay
.remove(Path::new("/tmp/data.txt"), false)
.await
.unwrap();
let after_delete = overlay.usage();
assert_eq!(
after_delete.total_bytes, initial.total_bytes,
"bytes should return to initial after delete"
);
assert_eq!(
after_delete.file_count, initial.file_count,
"file count should return to initial after delete"
);
overlay
.write_file(Path::new("/tmp/data.txt"), &[b'Y'; 150])
.await
.unwrap();
let after_rewrite = overlay.usage();
assert_eq!(after_rewrite.total_bytes, initial.total_bytes + 150);
assert_eq!(after_rewrite.file_count, initial.file_count + 1);
}
#[tokio::test]
async fn test_limits_enforced_with_zero_usage_lower() {
let lower = Arc::new(InMemoryFs::with_limits(FsLimits::unlimited()));
let probe = OverlayFs::new(lower.clone());
let base = probe.usage();
let limits = FsLimits::new()
.max_total_bytes(base.total_bytes + 100)
.max_file_count(base.file_count + 10)
.max_file_size(50);
let overlay = OverlayFs::with_limits(lower, limits);
overlay
.write_file(Path::new("/tmp/ok.txt"), &[b'a'; 40])
.await
.unwrap();
let result = overlay
.write_file(Path::new("/tmp/toobig.txt"), &[b'b'; 60])
.await;
assert!(
result.is_err(),
"should reject file exceeding max_file_size"
);
assert!(
result.unwrap_err().to_string().contains("file too large"),
"expected 'file too large' error"
);
overlay
.write_file(Path::new("/tmp/second.txt"), &[b'c'; 49])
.await
.unwrap();
let result = overlay
.write_file(Path::new("/tmp/overflow.txt"), &[b'd'; 20])
.await;
assert!(
result.is_err(),
"should reject write that would exceed total bytes limit"
);
assert!(
result.unwrap_err().to_string().contains("filesystem full"),
"expected 'filesystem full' error"
);
}
#[tokio::test]
async fn test_overwrite_upper_usage_stays_correct() {
let lower = Arc::new(InMemoryFs::new());
let overlay = OverlayFs::new(lower);
overlay
.write_file(Path::new("/tmp/file.txt"), &[b'A'; 50])
.await
.unwrap();
let after_first = overlay.usage();
overlay
.write_file(Path::new("/tmp/file.txt"), &[b'B'; 80])
.await
.unwrap();
let after_second = overlay.usage();
assert_eq!(after_second.file_count, after_first.file_count);
assert_eq!(
after_second.total_bytes,
after_first.total_bytes + 30,
"overwrite should reflect size difference (80 - 50 = +30)"
);
}
}