use mismall::{compress::CompressionBuilder, compress_stream};
use std::io::Cursor;
use std::io::Write;
use std::time::Instant;
use tempfile::NamedTempFile;
#[test]
fn test_large_files_various_chunks() {
println!("Testing large files with various chunk sizes...");
let file_sizes = vec![
(1 * 1024 * 1024, "1MB"), (10 * 1024 * 1024, "10MB"), (50 * 1024 * 1024, "50MB"), ];
let chunk_sizes = vec![
(64 * 1024, "64KB"), (256 * 1024, "256KB"), (1024 * 1024, "1MB"), (16 * 1024 * 1024, "16MB"), ];
for (file_size, size_label) in file_sizes {
println!("\nTesting {} file:", size_label);
let mut test_file = NamedTempFile::new().unwrap();
let data: Vec<u8> = (0..file_size).map(|i| (i % 256) as u8).collect();
test_file.write_all(&data).unwrap();
test_file.flush().unwrap();
for (chunk_size, chunk_label) in &chunk_sizes {
let start_time = Instant::now();
let result = CompressionBuilder::new(test_file.path().to_str().unwrap())
.with_chunk_size(*chunk_size)
.compress();
match result {
Ok(compression_result) => {
let elapsed = start_time.elapsed();
let throughput_mb_per_sec =
(file_size as f64) / (1024.0 * 1024.0) / elapsed.as_secs_f64();
println!(
" {} chunks: {:.2}s ({:.1} MB/s) - {:.1}% ratio",
chunk_label,
elapsed.as_secs_f64(),
throughput_mb_per_sec,
compression_result.compression_ratio
);
assert!(
compression_result.compression_ratio >= 10.0,
"Compression ratio too low: {:.1}%",
compression_result.compression_ratio
);
assert!(
compression_result.compression_ratio <= 100.0,
"Compression ratio impossible: {:.1}%",
compression_result.compression_ratio
);
assert_eq!(
compression_result.original_size, file_size as u64,
"Original size mismatch: expected {}, got {}",
file_size, compression_result.original_size
);
}
Err(e) => {
println!(" {} chunks: FAILED - {:?}", chunk_label, e);
panic!(
"Compression failed for {} file with {} chunks: {:?}",
size_label, chunk_label, e
);
}
}
}
}
println!("\n✓ Large file tests completed successfully!");
}
#[test]
fn test_streaming_large_data() {
println!("\nTesting streaming compression of large data...");
let data_sizes = vec![(5 * 1024 * 1024, "5MB"), (20 * 1024 * 1024, "20MB")];
for (data_size, size_label) in data_sizes {
println!("Testing {} streaming data:", size_label);
let data: Vec<u8> = (0..data_size)
.map(|i| {
if i % 4 == 0 {
(i % 26 + 65) as u8
}
else if i % 4 == 1 {
32
}
else if i % 4 == 2 {
((i * 13) % 128) as u8
}
else {
((i * 17) % 256) as u8
} })
.collect();
let start_time = Instant::now();
let mut reader = Cursor::new(&data);
let mut writer = Cursor::new(Vec::new());
let result = compress_stream(
&mut reader,
"stream_test.bin",
None,
&mut writer,
16 * 1024 * 1024,
);
match result {
Ok(compression_result) => {
let elapsed = start_time.elapsed();
let compressed_data = writer.into_inner();
let throughput_mb_per_sec =
(data_size as f64) / (1024.0 * 1024.0) / elapsed.as_secs_f64();
println!(
" {:.2}s ({:.1} MB/s) - {:.1}% ratio ({} -> {} bytes)",
elapsed.as_secs_f64(),
throughput_mb_per_sec,
compression_result.compression_ratio,
data_size,
compressed_data.len()
);
assert_eq!(compression_result.original_size, data_size as u64);
let compression_efficiency = (compressed_data.len() as f64) / (data_size as f64);
assert!(
compression_efficiency <= 1.2,
"Compression efficiency poor: {:.1}%",
compression_efficiency * 100.0
);
assert!(
throughput_mb_per_sec >= 1.0,
"Streaming compression too slow: {:.1} MB/s",
throughput_mb_per_sec
);
}
Err(e) => {
panic!("Streaming compression failed for {}: {:?}", size_label, e);
}
}
}
println!("✓ Streaming large data tests completed successfully!");
}
#[test]
fn test_memory_efficiency() {
println!("\nTesting memory efficiency with minimal chunks...");
let file_size = 25 * 1024 * 1024; let mut test_file = NamedTempFile::new().unwrap();
let data: Vec<u8> = (0..file_size).map(|i| (i % 128) as u8).collect();
test_file.write_all(&data).unwrap();
test_file.flush().unwrap();
let min_chunk_size = 64 * 1024;
let start_time = Instant::now();
let result = CompressionBuilder::new(test_file.path().to_str().unwrap())
.with_chunk_size(min_chunk_size)
.compress();
match result {
Ok(compression_result) => {
let elapsed = start_time.elapsed();
let memory_efficiency = (file_size as f64) / (min_chunk_size as f64);
println!(" Memory efficiency test:");
println!(" File size: {} MB", file_size / (1024 * 1024));
println!(" Chunk size: {} KB", min_chunk_size / 1024);
println!(" Chunks processed: {:.0}", memory_efficiency);
println!(" Processing time: {:.2}s", elapsed.as_secs_f64());
println!(
" Compression ratio: {:.1}%",
compression_result.compression_ratio
);
assert!(
elapsed.as_secs() <= 30,
"Processing took too long: {:.1}s",
elapsed.as_secs_f64()
);
assert!(
compression_result.compression_ratio >= 20.0,
"Poor compression with minimal chunks"
);
println!(" ✓ Memory efficiency verified - bounded memory usage with good performance");
}
Err(e) => {
panic!("Memory efficiency test failed: {:?}", e);
}
}
}
#[test]
fn test_chunk_size_boundaries() {
println!("\nTesting chunk size boundary conditions...");
let test_size = 10 * 1024 * 1024; let mut test_file = NamedTempFile::new().unwrap();
let data = vec![42u8; test_size];
test_file.write_all(&data).unwrap();
test_file.flush().unwrap();
let boundary_sizes = vec![
(64 * 1024, "minimum"), (1024 * 1024, "common"), (16 * 1024 * 1024, "default"), (1024 * 1024 * 1024, "maximum"), ];
for (chunk_size, label) in boundary_sizes {
println!(" Testing {} chunk size ({}):", label, chunk_size / 1024);
let result = CompressionBuilder::new(test_file.path().to_str().unwrap())
.with_chunk_size(chunk_size)
.compress();
match result {
Ok(compression_result) => {
println!(
" ✓ Success - {:.1}% ratio",
compression_result.compression_ratio
);
assert_eq!(compression_result.original_size, test_size as u64);
}
Err(e) => {
panic!("Failed with {} chunk size: {:?}", label, e);
}
}
}
println!("✓ Chunk size boundary tests completed!");
}