use ::titor::*;
use tempfile::TempDir;
use proptest::prelude::*;
use std::fs;
use std::path::{Path, PathBuf};
use std::collections::BTreeMap;
use tracing::info;
#[derive(Debug, Clone)]
pub enum FileOperation {
Create { path: PathBuf, content: Vec<u8> },
Modify { path: PathBuf, content: Vec<u8> },
Delete { path: PathBuf },
}
fn file_operation_strategy() -> impl Strategy<Value = FileOperation> {
prop_oneof![
(path_strategy(), content_strategy()).prop_map(|(path, content)| {
FileOperation::Create { path, content }
}),
(path_strategy(), content_strategy()).prop_map(|(path, content)| {
FileOperation::Modify { path, content }
}),
path_strategy().prop_map(|path| FileOperation::Delete { path }),
]
}
fn path_strategy() -> impl Strategy<Value = PathBuf> {
let dir_strategy = prop::collection::vec(
prop_oneof![
"[a-z]{1,10}".prop_map(|s| s),
"dir[0-9]{1,3}".prop_map(|s| s),
],
0..=4
);
let filename_strategy = prop_oneof![
"file[0-9]{1,3}\\.txt".prop_map(|s| s),
"[a-z]{1,8}\\.(txt|rs|md)".prop_map(|s| s),
"[a-z]{3,10}".prop_map(|s| s), ];
(dir_strategy, filename_strategy).prop_map(|(dirs, filename)| {
let mut path = PathBuf::new();
for dir in dirs {
path = path.join(dir);
}
path.join(filename)
})
}
fn content_strategy() -> impl Strategy<Value = Vec<u8>> {
prop_oneof![
"[a-zA-Z0-9 \n]{1,1000}".prop_map(|s| s.into_bytes()),
prop::collection::vec(any::<u8>(), 1..10000),
(any::<u8>(), 1..1000usize).prop_map(|(byte, count)| vec![byte; count]),
]
}
fn apply_operation(root: &Path, op: &FileOperation) -> anyhow::Result<()> {
match op {
FileOperation::Create { path, content } => {
let full_path = root.join(path);
if let Some(parent) = full_path.parent() {
fs::create_dir_all(parent)?;
}
fs::write(full_path, content)?;
}
FileOperation::Modify { path, content } => {
let full_path = root.join(path);
if full_path.exists() {
fs::write(full_path, content)?;
}
}
FileOperation::Delete { path } => {
let full_path = root.join(path);
if full_path.exists() {
fs::remove_file(full_path)?;
}
}
}
Ok(())
}
fn compute_directory_hash(root: &Path) -> anyhow::Result<String> {
use sha2::{Sha256, Digest};
use walkdir::WalkDir;
let mut hasher = Sha256::new();
let mut entries = Vec::new();
for entry in WalkDir::new(root).sort_by_file_name() {
let entry = entry?;
if entry.file_type().is_file() {
let path = entry.path();
let relative = path.strip_prefix(root)?;
entries.push((relative.to_path_buf(), path.to_path_buf()));
}
}
entries.sort_by(|a, b| a.0.cmp(&b.0));
for (relative, full) in entries {
hasher.update(relative.to_string_lossy().as_bytes());
hasher.update(b"\0");
let content = fs::read(&full)?;
hasher.update(&content);
hasher.update(b"\0");
}
Ok(hex::encode(hasher.finalize()))
}
proptest! {
#![proptest_config(ProptestConfig::with_cases(50))]
#[test]
fn checkpoint_restore_identity(
operations in prop::collection::vec(file_operation_strategy(), 1..100)
) {
let temp_dir = TempDir::new().unwrap();
let storage_dir = TempDir::new().unwrap();
let mut titor = TitorBuilder::new()
.compression_strategy(CompressionStrategy::Fast)
.build(
temp_dir.path().to_path_buf(),
storage_dir.path().to_path_buf(),
)
.unwrap();
for op in &operations {
apply_operation(temp_dir.path(), op).unwrap();
}
let checkpoint = titor.checkpoint(Some("Test checkpoint".to_string())).unwrap();
let original_hash = compute_directory_hash(temp_dir.path()).unwrap();
for i in 0..10 {
let path = temp_dir.path().join(format!("extra_{}.txt", i));
fs::write(&path, format!("Extra content {}", i)).unwrap();
}
titor.restore(&checkpoint.id).unwrap();
let restored_hash = compute_directory_hash(temp_dir.path()).unwrap();
prop_assert_eq!(original_hash, restored_hash);
}
#[test]
fn checkpoint_independence(
operation_sets in prop::collection::vec(
prop::collection::vec(file_operation_strategy(), 1..20),
2..10
)
) {
let temp_dir = TempDir::new().unwrap();
let storage_dir = TempDir::new().unwrap();
let mut titor = TitorBuilder::new()
.compression_strategy(CompressionStrategy::Fast)
.build(
temp_dir.path().to_path_buf(),
storage_dir.path().to_path_buf(),
)
.unwrap();
let mut checkpoint_hashes = BTreeMap::new();
for (idx, operations) in operation_sets.iter().enumerate() {
for op in operations {
apply_operation(temp_dir.path(), op).unwrap();
}
let checkpoint = titor.checkpoint(Some(format!("Checkpoint {}", idx))).unwrap();
let hash = compute_directory_hash(temp_dir.path()).unwrap();
checkpoint_hashes.insert(checkpoint.id.clone(), hash);
}
for (checkpoint_id, expected_hash) in &checkpoint_hashes {
titor.restore(checkpoint_id).unwrap();
let actual_hash = compute_directory_hash(temp_dir.path()).unwrap();
if &actual_hash != expected_hash {
eprintln!("Checkpoint restoration failed!");
eprintln!("Checkpoint ID: {}", checkpoint_id);
eprintln!("Expected hash: {}", expected_hash);
eprintln!("Actual hash: {}", actual_hash);
eprintln!("\nCurrent directory contents:");
use walkdir::WalkDir;
for entry in WalkDir::new(temp_dir.path()).sort_by_file_name() {
if let Ok(entry) = entry {
if entry.file_type().is_file() {
let path = entry.path();
if let Ok(relative) = path.strip_prefix(temp_dir.path()) {
if let Ok(content) = fs::read(path) {
eprintln!(" {}: {} bytes", relative.display(), content.len());
}
}
}
}
}
}
prop_assert_eq!(&actual_hash, expected_hash);
}
}
#[test]
fn timeline_consistency(
operations in prop::collection::vec(
prop_oneof![
file_operation_strategy().prop_map(|op| TimelineOp::FileOp(op)),
Just(TimelineOp::Checkpoint),
Just(TimelineOp::Restore),
],
1..50
)
) {
let temp_dir = TempDir::new().unwrap();
let storage_dir = TempDir::new().unwrap();
let mut titor = TitorBuilder::new()
.compression_strategy(CompressionStrategy::Fast)
.build(
temp_dir.path().to_path_buf(),
storage_dir.path().to_path_buf(),
)
.unwrap();
let mut checkpoints = Vec::new();
for op in operations {
match op {
TimelineOp::FileOp(file_op) => {
apply_operation(temp_dir.path(), &file_op).ok();
}
TimelineOp::Checkpoint => {
if let Ok(checkpoint) = titor.checkpoint(None) {
checkpoints.push(checkpoint.id);
}
}
TimelineOp::Restore => {
if !checkpoints.is_empty() {
let idx = checkpoints.len() / 2;
titor.restore(&checkpoints[idx]).ok();
}
}
}
}
let timeline_verification = titor.verify_timeline().unwrap();
prop_assert!(timeline_verification.is_valid());
}
#[test]
fn compression_correctness(
content in prop::collection::vec(any::<u8>(), 0..100000),
should_compress in any::<bool>()
) {
let temp_dir = TempDir::new().unwrap();
let storage_dir = TempDir::new().unwrap();
let strategy = if should_compress {
CompressionStrategy::Fast
} else {
CompressionStrategy::None
};
let mut titor = TitorBuilder::new()
.compression_strategy(strategy)
.build(
temp_dir.path().to_path_buf(),
storage_dir.path().to_path_buf(),
)
.unwrap();
let file_path = temp_dir.path().join("test_file.bin");
fs::write(&file_path, &content).unwrap();
let checkpoint = titor.checkpoint(Some("Compression test".to_string())).unwrap();
fs::write(&file_path, b"modified").unwrap();
titor.restore(&checkpoint.id).unwrap();
let restored_content = fs::read(&file_path).unwrap();
prop_assert_eq!(restored_content, content);
}
#[test]
fn garbage_collection_safety(
checkpoint_count in 2..20usize,
files_per_checkpoint in 1..10usize
) {
let temp_dir = TempDir::new().unwrap();
let storage_dir = TempDir::new().unwrap();
let mut titor = TitorBuilder::new()
.build(
temp_dir.path().to_path_buf(),
storage_dir.path().to_path_buf(),
)
.unwrap();
let mut checkpoint_ids = Vec::new();
for i in 0..checkpoint_count {
for j in 0..files_per_checkpoint {
let path = temp_dir.path().join(format!("file_{}_{}.txt", i, j));
fs::write(&path, format!("Content {} {}", i, j)).unwrap();
}
let checkpoint = titor.checkpoint(Some(format!("Checkpoint {}", i))).unwrap();
checkpoint_ids.push(checkpoint.id);
}
let gc_stats = titor.gc().unwrap();
info!("GC collected {} objects", gc_stats.objects_deleted);
for checkpoint_id in &checkpoint_ids {
titor.restore(checkpoint_id).unwrap();
for j in 0..files_per_checkpoint {
let path = temp_dir.path().join(format!("file_{}_{}.txt",
checkpoint_ids.iter().position(|id| id == checkpoint_id).unwrap(), j));
prop_assert!(path.exists());
}
}
}
#[test]
fn merkle_tree_verification(
file_count in 1..100usize,
corrupt_file in any::<bool>()
) {
let temp_dir = TempDir::new().unwrap();
let storage_dir = TempDir::new().unwrap();
let mut titor = TitorBuilder::new()
.build(
temp_dir.path().to_path_buf(),
storage_dir.path().to_path_buf(),
)
.unwrap();
for i in 0..file_count {
let path = temp_dir.path().join(format!("file_{}.txt", i));
fs::write(&path, format!("Content {}", i)).unwrap();
}
let checkpoint = titor.checkpoint(Some("Merkle test".to_string())).unwrap();
let original_merkle = checkpoint.content_merkle_root.clone();
if corrupt_file && file_count > 0 {
let path = temp_dir.path().join("file_0.txt");
fs::write(&path, "corrupted content").unwrap();
let new_merkle = titor.compute_current_merkle_root().unwrap();
prop_assert_ne!(original_merkle, new_merkle);
} else {
let new_merkle = titor.compute_current_merkle_root().unwrap();
prop_assert_eq!(original_merkle, new_merkle);
}
}
}
#[derive(Debug, Clone)]
enum TimelineOp {
FileOp(FileOperation),
Checkpoint,
Restore,
}
#[cfg(test)]
mod edge_case_tests {
use super::*;
proptest! {
#[test]
fn empty_directory_handling(
dir_count in 0..20usize
) {
let temp_dir = TempDir::new().unwrap();
let storage_dir = TempDir::new().unwrap();
let mut titor = TitorBuilder::new()
.build(
temp_dir.path().to_path_buf(),
storage_dir.path().to_path_buf(),
)
.unwrap();
for i in 0..dir_count {
let dir_path = temp_dir.path().join(format!("empty_dir_{}", i));
fs::create_dir_all(&dir_path).unwrap();
}
let checkpoint = titor.checkpoint(Some("Empty dirs".to_string())).unwrap();
for i in 0..dir_count {
let file_path = temp_dir.path()
.join(format!("empty_dir_{}", i))
.join("new_file.txt");
fs::write(&file_path, "content").unwrap();
}
titor.restore(&checkpoint.id).unwrap();
for i in 0..dir_count {
let dir_path = temp_dir.path().join(format!("empty_dir_{}", i));
if dir_path.exists() {
let entries: Vec<_> = fs::read_dir(&dir_path)
.unwrap()
.collect();
prop_assert!(entries.is_empty());
}
}
}
#[test]
fn special_character_filenames(
special_chars in prop::collection::vec(
prop_oneof![
Just(' '),
Just('!'),
Just('#'),
Just('$'),
Just('%'),
Just('&'),
Just('\''),
Just('('),
Just(')'),
Just('+'),
Just(','),
Just('-'),
Just('.'),
Just('='),
Just('@'),
Just('['),
Just(']'),
Just('^'),
Just('_'),
Just('`'),
Just('{'),
Just('}'),
Just('~'),
],
1..10
)
) {
let temp_dir = TempDir::new().unwrap();
let storage_dir = TempDir::new().unwrap();
let mut titor = TitorBuilder::new()
.build(
temp_dir.path().to_path_buf(),
storage_dir.path().to_path_buf(),
)
.unwrap();
let filename: String = special_chars.into_iter()
.collect::<String>() + "_file.txt";
let file_path = temp_dir.path().join(&filename);
if let Err(_) = fs::write(&file_path, "test content") {
return Ok(());
}
let checkpoint = titor.checkpoint(Some("Special chars".to_string())).unwrap();
fs::remove_file(&file_path).unwrap();
titor.restore(&checkpoint.id).unwrap();
let content = fs::read_to_string(&file_path).unwrap();
prop_assert_eq!(content, "test content");
}
#[test]
fn max_path_length(
depth in 10..50usize
) {
let temp_dir = TempDir::new().unwrap();
let storage_dir = TempDir::new().unwrap();
let mut titor = TitorBuilder::new()
.build(
temp_dir.path().to_path_buf(),
storage_dir.path().to_path_buf(),
)
.unwrap();
let mut current_path = temp_dir.path().to_path_buf();
for i in 0..depth {
current_path = current_path.join(format!("d{}", i));
if let Err(_) = fs::create_dir(¤t_path) {
break;
}
}
let file_path = current_path.join("deep.txt");
match fs::write(&file_path, "deep content") {
Ok(_) => {
let checkpoint = titor.checkpoint(Some("Deep path".to_string())).unwrap();
fs::write(&file_path, "modified").unwrap();
titor.restore(&checkpoint.id).unwrap();
let content = fs::read_to_string(&file_path).unwrap();
prop_assert_eq!(content, "deep content");
}
Err(_) => {
}
}
}
}
}