extern crate std;
#[allow(unused_imports)]
use alloc::vec;
use alloc::vec::Vec;
use crate::decoding::StreamingDecoder;
use crate::encoding::{CompressionLevel, FrameCompressor, compress_to_vec};
use crate::io::Read;
fn generate_data(seed: u64, len: usize) -> Vec<u8> {
let mut state = seed;
let mut data = Vec::with_capacity(len);
for _ in 0..len {
state = state
.wrapping_mul(6364136223846793005)
.wrapping_add(1442695040888963407);
data.push((state >> 33) as u8);
}
data
}
fn generate_compressible(seed: u64, len: usize) -> Vec<u8> {
let pattern_len = ((seed % 16) + 1) as usize;
let pattern = generate_data(seed, pattern_len);
let mut data = Vec::with_capacity(len);
for i in 0..len {
data.push(pattern[i % pattern_len]);
}
data
}
fn roundtrip_simple(data: &[u8]) -> Vec<u8> {
let compressed = compress_to_vec(data, CompressionLevel::Fastest);
let mut decoder = StreamingDecoder::new(compressed.as_slice()).unwrap();
let mut result = Vec::new();
decoder.read_to_end(&mut result).unwrap();
result
}
fn compress_streaming(data: &[u8]) -> Vec<u8> {
let mut compressed = Vec::new();
let mut compressor = FrameCompressor::new(CompressionLevel::Fastest);
compressor.set_source(data);
compressor.set_drain(&mut compressed);
compressor.compress();
compressed
}
fn roundtrip_streaming(data: &[u8]) -> Vec<u8> {
let compressed = compress_streaming(data);
let mut decoder = StreamingDecoder::new(compressed.as_slice()).unwrap();
let mut result = Vec::new();
decoder.read_to_end(&mut result).unwrap();
result
}
fn roundtrip_default(data: &[u8]) -> Vec<u8> {
let compressed = compress_to_vec(data, CompressionLevel::Default);
let mut decoder = StreamingDecoder::new(compressed.as_slice()).unwrap();
let mut result = Vec::new();
decoder.read_to_end(&mut result).unwrap();
result
}
fn generate_huffman_friendly(seed: u64, len: usize, alphabet_size: u8) -> Vec<u8> {
assert!(alphabet_size > 0, "alphabet_size must be non-zero");
let mut state = seed;
let mut data = Vec::with_capacity(len);
for _ in 0..len {
state = state
.wrapping_mul(6364136223846793005)
.wrapping_add(1442695040888963407);
data.push(((state >> 33) as u8) % alphabet_size);
}
data
}
fn repeat_offset_fixture(pattern: &[u8], chunks: usize) -> Vec<u8> {
let mut data = Vec::with_capacity(chunks * (pattern.len() + 2));
for i in 0..chunks {
data.extend_from_slice(pattern);
data.extend_from_slice(&(i as u16).to_le_bytes());
}
data
}
#[test]
fn roundtrip_random_data_1000_iterations() {
for i in 0..1000u64 {
let len = (i * 67 % 65536) as usize;
let data = generate_data(i, len);
let result = roundtrip_simple(&data);
assert_eq!(
data, result,
"Simple API roundtrip failed at iteration {i}, len={len}"
);
}
}
#[test]
fn roundtrip_compressible_data_1000_iterations() {
for i in 0..1000u64 {
let len = (i * 131 % 65536) as usize;
let data = generate_compressible(i, len);
let result = roundtrip_simple(&data);
assert_eq!(
data, result,
"Compressible roundtrip failed at iteration {i}, len={len}"
);
}
}
#[test]
fn roundtrip_streaming_api_1000_iterations() {
for i in 0..1000u64 {
let len = (i * 97 % 32768) as usize;
let data = generate_data(i.wrapping_add(0xDEAD), len);
let result = roundtrip_streaming(&data);
assert_eq!(
data, result,
"Streaming API roundtrip failed at iteration {i}, len={len}"
);
}
}
#[test]
fn roundtrip_edge_cases() {
assert_eq!(roundtrip_simple(&[]), Vec::<u8>::new());
assert_eq!(roundtrip_simple(&[0x42]), vec![0x42]);
let zeros = vec![0u8; 100_000];
assert_eq!(roundtrip_simple(&zeros), zeros);
let ones = vec![0xFFu8; 100_000];
assert_eq!(roundtrip_simple(&ones), ones);
let ascending: Vec<u8> = (0..=255u8).cycle().take(100_000).collect();
assert_eq!(roundtrip_simple(&ascending), ascending);
let rle = vec![0xABu8; 1_000_000];
assert_eq!(roundtrip_simple(&rle), rle);
}
#[test]
fn roundtrip_large_literals() {
let data_1025 = generate_huffman_friendly(42, 1025, 16);
assert_eq!(roundtrip_simple(&data_1025), data_1025);
assert_eq!(roundtrip_streaming(&data_1025), data_1025);
let data_16383 = generate_huffman_friendly(43, 16383, 32);
assert_eq!(roundtrip_simple(&data_16383), data_16383);
let data_16384 = generate_huffman_friendly(44, 16384, 32);
assert_eq!(roundtrip_simple(&data_16384), data_16384);
assert_eq!(roundtrip_streaming(&data_16384), data_16384);
let data_64k = generate_huffman_friendly(45, 65536, 64);
assert_eq!(roundtrip_simple(&data_64k), data_64k);
let data_128k = generate_huffman_friendly(46, 128 * 1024, 64);
assert_eq!(roundtrip_simple(&data_128k), data_128k);
assert_eq!(roundtrip_streaming(&data_128k), data_128k);
}
#[test]
fn roundtrip_multi_block_large_literals() {
let data = generate_huffman_friendly(100, 512 * 1024, 48);
assert_eq!(roundtrip_simple(&data), data);
assert_eq!(roundtrip_streaming(&data), data);
}
#[test]
fn roundtrip_repeat_offsets() {
let data = repeat_offset_fixture(b"ABCDE12345", 10_000);
let result = roundtrip_simple(&data);
assert_eq!(data, result, "Repeat offset roundtrip failed");
let result = roundtrip_streaming(&data);
assert_eq!(data, result, "Repeat offset streaming roundtrip failed");
}
#[test]
fn repetitive_data_compresses_better_than_random() {
let repetitive = repeat_offset_fixture(b"ABCDE12345", 5_000);
let compressed_repetitive = compress_to_vec(&repetitive[..], CompressionLevel::Fastest);
let random = generate_data(999, repetitive.len());
let compressed_random = compress_to_vec(&random[..], CompressionLevel::Fastest);
assert!(
compressed_repetitive.len() < compressed_random.len(),
"Repetitive input should compress better than random input. \
repetitive={} bytes, random={} bytes",
compressed_repetitive.len(),
compressed_random.len()
);
}
#[test]
fn roundtrip_multi_block_repeat_offsets() {
let mut data = repeat_offset_fixture(b"HelloWorld", (512 * 1024) / 12 + 1);
data.truncate(512 * 1024);
let result = roundtrip_simple(&data);
assert_eq!(data, result, "Multi-block repeat offset roundtrip failed");
let result = roundtrip_streaming(&data);
assert_eq!(
data, result,
"Multi-block repeat offset streaming roundtrip failed"
);
let whole_frame = compress_streaming(&data);
let frame_overhead = compress_to_vec(&[][..], CompressionLevel::Fastest).len();
let independent_chunks: usize = data
.chunks(128 * 1024)
.map(|chunk| {
compress_to_vec(chunk, CompressionLevel::Fastest)
.len()
.saturating_sub(frame_overhead)
})
.sum::<usize>()
.saturating_add(frame_overhead);
assert!(
whole_frame.len() < independent_chunks,
"Cross-block reuse should beat per-block resets. whole={} bytes, split={} bytes",
whole_frame.len(),
independent_chunks
);
}
#[test]
fn roundtrip_zero_literal_length_sequences() {
let mut data = Vec::with_capacity(10_000);
for i in 0..100u8 {
data.push(i);
}
let prefix = data[..50].to_vec();
let shifted_prefix = data[1..51].to_vec();
data.extend_from_slice(&prefix);
for _ in 0..100 {
data.extend_from_slice(&shifted_prefix);
data.extend_from_slice(&prefix);
}
let result = roundtrip_simple(&data);
assert_eq!(data, result, "Zero ll sequence roundtrip failed");
}
#[test]
fn roundtrip_reused_frame_compressor_across_frames() {
let first = generate_huffman_friendly(700, 512 * 1024, 48);
let second = generate_huffman_friendly(701, 512 * 1024, 48);
let mut first_compressed = Vec::new();
let mut second_compressed = Vec::new();
{
let mut compressor = FrameCompressor::new(CompressionLevel::Fastest);
compressor.set_source(first.as_slice());
compressor.set_drain(&mut first_compressed);
compressor.compress();
compressor.set_source(second.as_slice());
compressor.set_drain(&mut second_compressed);
compressor.compress();
}
let mut decoder = StreamingDecoder::new(first_compressed.as_slice()).unwrap();
let mut first_roundtrip = Vec::new();
decoder.read_to_end(&mut first_roundtrip).unwrap();
assert_eq!(
first, first_roundtrip,
"First reused-frame roundtrip failed"
);
let mut decoder = StreamingDecoder::new(second_compressed.as_slice()).unwrap();
let mut second_roundtrip = Vec::new();
decoder.read_to_end(&mut second_roundtrip).unwrap();
assert_eq!(
second, second_roundtrip,
"Second reused-frame roundtrip failed"
);
}
#[test]
fn roundtrip_default_level_regression() {
let data = generate_compressible(777, 64 * 1024);
assert_eq!(roundtrip_default(&data), data);
}
#[test]
fn roundtrip_default_level_multi_block_regression() {
let data = generate_compressible(1337, 512 * 1024);
assert_eq!(roundtrip_default(&data), data);
}