use clock_hash::{ClockHasher, clockhash256};
#[test]
fn test_empty_updates() {
let mut hasher1 = ClockHasher::new();
let mut hasher2 = ClockHasher::new();
hasher1.update(b"hello");
hasher1.update(b""); hasher1.update(b"world");
hasher1.update(b"");
hasher2.update(b"hello");
hasher2.update(b"world");
let hash1 = hasher1.finalize();
let hash2 = hasher2.finalize();
assert_eq!(hash1, hash2, "Empty updates should not affect the hash");
}
#[test]
fn test_micro_chunking() {
let data = b"This is a test message for micro-chunking";
let reference_hash = clockhash256(data);
let mut hasher = ClockHasher::new();
for &byte in data.iter() {
hasher.update(&[byte]);
}
let chunked_hash = hasher.finalize();
assert_eq!(reference_hash, chunked_hash, "Micro-chunking should produce same result");
}
#[test]
fn test_alternating_empty_data() {
let mut hasher1 = ClockHasher::new();
let mut hasher2 = ClockHasher::new();
let data = b"chunk1chunk2chunk3";
hasher1.update(b"");
hasher1.update(b"chunk1");
hasher1.update(b"");
hasher1.update(b"chunk2");
hasher1.update(b"");
hasher1.update(b"chunk3");
hasher1.update(b"");
hasher2.update(data);
let hash1 = hasher1.finalize();
let hash2 = hasher2.finalize();
assert_eq!(hash1, hash2, "Alternating empty/data updates should not affect result");
}
#[test]
fn test_large_streaming_data() {
let chunk_size = 64 * 1024; let total_size = 1024 * 1024; let mut hasher = ClockHasher::new();
for i in 0..(total_size / chunk_size) {
let chunk: Vec<u8> = (0..chunk_size)
.map(|j| ((i * chunk_size + j) % 256) as u8)
.collect();
hasher.update(&chunk);
}
let streaming_hash = hasher.finalize();
let full_data: Vec<u8> = (0..total_size)
.map(|i| (i % 256) as u8)
.collect();
let one_shot_hash = clockhash256(&full_data);
assert_eq!(streaming_hash, one_shot_hash, "Large streaming should match one-shot");
}
#[test]
fn test_block_boundary_streaming() {
let test_sizes = [1, 127, 128, 129, 255, 256, 257, 383, 384, 385];
for &chunk_size in &test_sizes {
let data: Vec<u8> = (0..(chunk_size * 3))
.map(|i| (i % 256) as u8)
.collect();
let mut hasher = ClockHasher::new();
for chunk in data.chunks(chunk_size) {
hasher.update(chunk);
}
let chunked_hash = hasher.finalize();
let one_shot_hash = clockhash256(&data);
assert_eq!(chunked_hash, one_shot_hash,
"Chunking with size {} should match one-shot", chunk_size);
}
}
#[test]
fn test_many_small_updates() {
let mut hasher = ClockHasher::new();
let mut reference_data = Vec::new();
for i in 0..1000 {
let chunk = vec![(i % 256) as u8, ((i + 1) % 256) as u8];
hasher.update(&chunk);
reference_data.extend_from_slice(&chunk);
}
let streaming_hash = hasher.finalize();
let reference_hash = clockhash256(&reference_data);
assert_eq!(streaming_hash, reference_hash, "Many small updates should match reference");
}
#[test]
fn test_exact_block_filling() {
let mut hasher = ClockHasher::new();
let mut reference_data = Vec::new();
for block_num in 0..10 {
let block: Vec<u8> = (0..128)
.map(|i| ((block_num * 128 + i) % 256) as u8)
.collect();
hasher.update(&block);
reference_data.extend_from_slice(&block);
}
let streaming_hash = hasher.finalize();
let reference_hash = clockhash256(&reference_data);
assert_eq!(streaming_hash, reference_hash, "Exact block filling should work correctly");
}
#[test]
fn test_partial_final_block() {
let mut hasher = ClockHasher::new();
for _ in 0..5 {
let block = [0xAAu8; 128];
hasher.update(&block);
}
let partial = [0xBBu8; 73]; hasher.update(&partial);
let streaming_hash = hasher.finalize();
let mut reference_data = Vec::new();
for _ in 0..5 {
reference_data.extend_from_slice(&[0xAAu8; 128]);
}
reference_data.extend_from_slice(&partial);
let reference_hash = clockhash256(&reference_data);
assert_eq!(streaming_hash, reference_hash, "Partial final block should be handled correctly");
}
#[test]
fn test_hasher_consumed_after_finalize() {
let mut hasher = ClockHasher::new();
hasher.update(b"test data");
let _hash = hasher.finalize();
}
#[test]
fn test_zero_length_streaming() {
let mut hasher = ClockHasher::new();
hasher.update(&[]);
hasher.update(&[]);
hasher.update(b"data");
hasher.update(&[]);
let hash = hasher.finalize();
let reference_hash = clockhash256(b"data");
assert_eq!(hash, reference_hash, "Zero-length updates should be ignored");
}
#[test]
fn test_long_streaming_session() {
let mut hasher = ClockHasher::new();
let chunk_sizes = [1, 3, 7, 13, 31, 47, 64, 97, 128, 256];
let mut total_data = Vec::new();
let mut data_counter = 0u8;
for &chunk_size in chunk_sizes.iter().cycle().take(100) {
let chunk: Vec<u8> = (0..chunk_size)
.map(|_| {
let val = data_counter;
data_counter = data_counter.wrapping_add(1);
val
})
.collect();
hasher.update(&chunk);
total_data.extend_from_slice(&chunk);
}
let streaming_hash = hasher.finalize();
let reference_hash = clockhash256(&total_data);
assert_eq!(streaming_hash, reference_hash, "Long streaming session should match reference");
}
#[test]
fn test_buffer_edge_cases() {
let test_cases = vec![
("single_byte_then_large", vec![vec![0x42], vec![0xFF; 200]]),
("large_then_single_byte", vec![vec![0xAA; 200], vec![0x99]]),
("exact_buffer_fill", vec![vec![0x11; 128], vec![0x22; 128]]),
("overfill_buffer", vec![vec![0x33; 150], vec![0x44; 100]]),
("many_small_chunks", (0..50).map(|i| vec![i as u8]).collect()),
];
for (description, chunks) in test_cases {
let mut hasher = ClockHasher::new();
let mut reference_data = Vec::new();
for chunk in chunks {
hasher.update(&chunk);
reference_data.extend_from_slice(&chunk);
}
let streaming_hash = hasher.finalize();
let reference_hash = clockhash256(&reference_data);
assert_eq!(streaming_hash, reference_hash,
"Buffer edge case '{}' failed", description);
}
}
#[test]
fn test_hasher_default_implementation() {
let hasher1 = ClockHasher::new();
let hasher2 = ClockHasher::default();
let hash1 = hasher1.finalize();
let hash2 = hasher2.finalize();
assert_eq!(hash1, hash2, "Default and new() should produce identical empty hashes");
}
#[test]
fn test_streaming_pause_resume() {
let data = b"This is a long message that will be streamed and paused at various points";
for pause_point in 1..data.len() {
let mut hasher1 = ClockHasher::new();
let mut hasher2 = ClockHasher::new();
hasher1.update(data);
let hash1 = hasher1.finalize();
hasher2.update(&data[..pause_point]);
hasher2.update(&data[pause_point..]);
let hash2 = hasher2.finalize();
assert_eq!(hash1, hash2,
"Pause and resume at point {} should produce same result", pause_point);
}
}