pub mod adaptive;
pub mod dict_zip;
#[cfg(feature = "async")]
pub mod realtime;
pub mod simd_lz77;
pub mod simd_pattern_match;
pub mod stream_vbyte;
pub mod suffix_array;
pub use adaptive::{AdaptiveCompressor, AdaptiveConfig, CompressionProfile};
pub use dict_zip::{
DictionaryBuilder as PaZipDictionaryBuilder, DictionaryBuilderConfig, SuffixArrayDictionary, SuffixArrayDictionaryConfig,
PatternMatcher, DfaCache, Match,
};
pub use simd_lz77::{
SimdLz77Compressor, SimdLz77Config, SimdLz77Match, SimdLz77Stats,
CompressionTier, CompressionParallelMode, DictionaryConfig,
SimdLz77CompressorX1, SimdLz77CompressorX2, SimdLz77CompressorX4, SimdLz77CompressorX8,
get_global_simd_lz77_compressor, compress_with_simd_lz77, decompress_with_simd_lz77,
};
pub use simd_pattern_match::{
SimdPatternMatcher, SimdPatternConfig, SimdMatchResult, SimdPatternTier, ParallelMode,
get_global_simd_pattern_matcher,
};
#[cfg(feature = "async")]
pub use realtime::{CompressionMode, RealtimeCompressor, RealtimeConfig};
pub use suffix_array::{
EnhancedSuffixArray, SuffixArrayCompressor, SuffixArrayConfig, SuffixArrayStats,
};
use crate::entropy::dictionary::{DictionaryBuilder, DictionaryCompressor};
use crate::entropy::huffman::{HuffmanDecoder, HuffmanEncoder, HuffmanTree};
use crate::entropy::rans::{Rans64Decoder, Rans64Encoder, ParallelX1};
use crate::error::{Result, ZiporaError};
use std::time::Duration;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Algorithm {
None,
Lz4,
Zstd(i32),
Huffman,
Rans,
Dictionary,
SimdLz77,
Hybrid,
}
impl Algorithm {
pub fn compression_speed(&self) -> f64 {
match self {
Algorithm::None => f64::INFINITY,
Algorithm::Lz4 => 500_000_000.0, Algorithm::Zstd(level) => match level {
1..=3 => 200_000_000.0, 4..=9 => 50_000_000.0, _ => 10_000_000.0, },
Algorithm::Huffman => 100_000_000.0, Algorithm::Rans => 80_000_000.0, Algorithm::Dictionary => 150_000_000.0, Algorithm::SimdLz77 => 300_000_000.0, Algorithm::Hybrid => 50_000_000.0, }
}
pub fn compression_ratio(&self) -> f64 {
match self {
Algorithm::None => 1.0,
Algorithm::Lz4 => 0.6,
Algorithm::Zstd(level) => match level {
1..=3 => 0.5,
4..=9 => 0.4,
_ => 0.3,
},
Algorithm::Huffman => 0.65,
Algorithm::Rans => 0.55,
Algorithm::Dictionary => 0.45,
Algorithm::SimdLz77 => 0.35, Algorithm::Hybrid => 0.35,
}
}
pub fn memory_usage(&self) -> f64 {
match self {
Algorithm::None => 0.0,
Algorithm::Lz4 => 0.1,
Algorithm::Zstd(level) => match level {
1..=3 => 0.5,
4..=9 => 2.0,
_ => 8.0,
},
Algorithm::Huffman => 1.0,
Algorithm::Rans => 1.5,
Algorithm::Dictionary => 3.0,
Algorithm::SimdLz77 => 2.5, Algorithm::Hybrid => 4.0,
}
}
}
#[derive(Debug, Clone)]
pub struct PerformanceRequirements {
pub max_latency: Duration,
pub min_throughput: u64,
pub max_memory: usize,
pub target_ratio: f64,
pub speed_vs_quality: f64,
}
impl Default for PerformanceRequirements {
fn default() -> Self {
Self {
max_latency: Duration::from_millis(100),
min_throughput: 100_000_000, max_memory: 64 * 1024 * 1024, target_ratio: 0.5,
speed_vs_quality: 0.5,
}
}
}
#[derive(Debug, Clone, Default)]
pub struct CompressionStats {
pub operations: u64,
pub bytes_processed: u64,
pub bytes_compressed: u64,
pub total_time: Duration,
pub algorithm_usage: std::collections::HashMap<Algorithm, u64>,
pub avg_ratio: f64,
pub avg_throughput: f64,
}
impl CompressionStats {
pub fn compression_ratio(&self) -> f64 {
if self.bytes_processed == 0 {
0.0
} else {
self.bytes_compressed as f64 / self.bytes_processed as f64
}
}
pub fn throughput(&self) -> f64 {
if self.total_time.as_secs_f64() == 0.0 {
0.0
} else {
self.bytes_processed as f64 / self.total_time.as_secs_f64()
}
}
pub fn update(
&mut self,
input_size: usize,
output_size: usize,
duration: Duration,
algorithm: Algorithm,
) {
self.operations += 1;
self.bytes_processed += input_size as u64;
self.bytes_compressed += output_size as u64;
self.total_time += duration;
*self.algorithm_usage.entry(algorithm).or_insert(0) += 1;
let ratio = output_size as f64 / input_size as f64;
self.avg_ratio =
(self.avg_ratio * (self.operations - 1) as f64 + ratio) / self.operations as f64;
let throughput = input_size as f64 / duration.as_secs_f64();
self.avg_throughput = (self.avg_throughput * (self.operations - 1) as f64 + throughput)
/ self.operations as f64;
}
}
pub trait Compressor: Send + Sync {
fn compress(&self, data: &[u8]) -> Result<Vec<u8>>;
fn decompress(&self, data: &[u8]) -> Result<Vec<u8>>;
fn algorithm(&self) -> Algorithm;
fn estimate_ratio(&self, data: &[u8]) -> f64 {
if data.len() > 1024 {
let sample = &data[..1024];
if let Ok(compressed) = self.compress(sample) {
return compressed.len() as f64 / sample.len() as f64;
}
}
self.algorithm().compression_ratio()
}
fn is_suitable(&self, requirements: &PerformanceRequirements, data_size: usize) -> bool {
let algo = self.algorithm();
let expected_time = data_size as f64 / algo.compression_speed();
let expected_memory = (data_size as f64 * algo.memory_usage()) as usize;
Duration::from_secs_f64(expected_time) <= requirements.max_latency
&& expected_memory <= requirements.max_memory
&& algo.compression_ratio() <= requirements.target_ratio
}
}
pub struct NoCompressor;
impl Compressor for NoCompressor {
fn compress(&self, data: &[u8]) -> Result<Vec<u8>> {
Ok(data.to_vec())
}
fn decompress(&self, data: &[u8]) -> Result<Vec<u8>> {
Ok(data.to_vec())
}
fn algorithm(&self) -> Algorithm {
Algorithm::None
}
}
pub struct Lz4Compressor;
impl Compressor for Lz4Compressor {
fn compress(
&self,
#[cfg_attr(not(feature = "lz4"), allow(unused_variables))] data: &[u8],
) -> Result<Vec<u8>> {
#[cfg(feature = "lz4")]
{
Ok(lz4_flex::compress_prepend_size(data))
}
#[cfg(not(feature = "lz4"))]
{
Err(ZiporaError::not_supported("LZ4 compression not enabled"))
}
}
fn decompress(
&self,
#[cfg_attr(not(feature = "lz4"), allow(unused_variables))] data: &[u8],
) -> Result<Vec<u8>> {
#[cfg(feature = "lz4")]
{
lz4_flex::decompress_size_prepended(data)
.map_err(|e| ZiporaError::compression(&format!("LZ4 decompression failed: {}", e)))
}
#[cfg(not(feature = "lz4"))]
{
Err(ZiporaError::not_supported("LZ4 decompression not enabled"))
}
}
fn algorithm(&self) -> Algorithm {
Algorithm::Lz4
}
}
#[cfg(feature = "zstd")]
pub struct ZstdCompressor {
level: i32,
}
#[cfg(feature = "zstd")]
impl ZstdCompressor {
pub fn new(level: i32) -> Self {
Self { level }
}
}
#[cfg(feature = "zstd")]
impl Compressor for ZstdCompressor {
fn compress(&self, data: &[u8]) -> Result<Vec<u8>> {
zstd::bulk::compress(data, self.level)
.map_err(|e| ZiporaError::compression(&format!("ZSTD compression failed: {}", e)))
}
fn decompress(&self, data: &[u8]) -> Result<Vec<u8>> {
zstd::bulk::decompress(data, 100 * 1024 * 1024) .map_err(|e| ZiporaError::compression(&format!("ZSTD decompression failed: {}", e)))
}
fn algorithm(&self) -> Algorithm {
Algorithm::Zstd(self.level)
}
}
pub struct HuffmanCompressor {
encoder: HuffmanEncoder,
tree_data: Vec<u8>,
}
impl HuffmanCompressor {
pub fn new(training_data: &[u8]) -> Result<Self> {
let encoder = HuffmanEncoder::new(training_data)?;
let tree_data = encoder.tree().serialize();
Ok(Self { encoder, tree_data })
}
pub fn tree_data(&self) -> &[u8] {
&self.tree_data
}
}
impl Compressor for HuffmanCompressor {
fn compress(&self, data: &[u8]) -> Result<Vec<u8>> {
if data.is_empty() {
return Ok(Vec::new());
}
let compressed_data = self.encoder.encode(data)?;
let mut result = Vec::new();
let tree_size = self.tree_data.len() as u32;
result.extend_from_slice(&tree_size.to_le_bytes());
result.extend_from_slice(&self.tree_data);
let original_size = data.len() as u32;
result.extend_from_slice(&original_size.to_le_bytes());
result.extend_from_slice(&compressed_data);
Ok(result)
}
fn decompress(&self, data: &[u8]) -> Result<Vec<u8>> {
if data.is_empty() {
return Ok(Vec::new());
}
if data.len() < 8 {
return Err(ZiporaError::invalid_data(
"Huffman compressed data too short",
));
}
let tree_size = u32::from_le_bytes([data[0], data[1], data[2], data[3]]) as usize;
if data.len() < 8 + tree_size {
return Err(ZiporaError::invalid_data(
"Huffman compressed data truncated",
));
}
let tree_data = &data[4..4 + tree_size];
let tree = HuffmanTree::deserialize(tree_data)?;
let decoder = HuffmanDecoder::new(tree);
let size_offset = 4 + tree_size;
let original_size = u32::from_le_bytes([
data[size_offset],
data[size_offset + 1],
data[size_offset + 2],
data[size_offset + 3],
]) as usize;
let compressed_data = &data[size_offset + 4..];
decoder.decode(compressed_data, original_size)
}
fn algorithm(&self) -> Algorithm {
Algorithm::Huffman
}
}
pub struct RansCompressor {
encoder: Rans64Encoder<ParallelX1>,
}
impl RansCompressor {
pub fn new(training_data: &[u8]) -> Result<Self> {
if training_data.is_empty() {
return Err(ZiporaError::invalid_data(
"rANS compressor requires training data",
));
}
let mut frequencies = [0u32; 256];
for &byte in training_data {
frequencies[byte as usize] += 1;
}
let mut symbol_exists = [false; 256];
for &byte in training_data {
symbol_exists[byte as usize] = true;
}
for (i, freq) in frequencies.iter_mut().enumerate() {
if *freq == 0 && symbol_exists[i] {
*freq = 1; }
}
let encoder = Rans64Encoder::<ParallelX1>::new(&frequencies)?;
Ok(Self { encoder })
}
}
impl Compressor for RansCompressor {
fn compress(&self, data: &[u8]) -> Result<Vec<u8>> {
if data.is_empty() {
return Ok(Vec::new());
}
let mut result = Vec::new();
for i in 0..=255u8 {
let freq = self.encoder.get_symbol(i).freq;
result.extend_from_slice(&freq.to_le_bytes());
}
let original_size = data.len() as u32;
result.extend_from_slice(&original_size.to_le_bytes());
let compressed_data = self.encoder.encode(data)?;
result.extend_from_slice(&compressed_data);
Ok(result)
}
fn decompress(&self, data: &[u8]) -> Result<Vec<u8>> {
if data.is_empty() {
return Ok(Vec::new());
}
if data.len() < 256 * 4 + 4 {
return Err(ZiporaError::invalid_data(
"Invalid rANS compressed data format",
));
}
let mut frequencies = [0u32; 256];
for i in 0..256 {
let start = i * 4;
frequencies[i] = u32::from_le_bytes([
data[start],
data[start + 1],
data[start + 2],
data[start + 3],
]);
}
let size_offset = 256 * 4;
let original_size = u32::from_le_bytes([
data[size_offset],
data[size_offset + 1],
data[size_offset + 2],
data[size_offset + 3],
]) as usize;
let compressed_data = &data[size_offset + 4..];
let temp_encoder = Rans64Encoder::<ParallelX1>::new(&frequencies)?;
let decoder = Rans64Decoder::<ParallelX1>::new(&temp_encoder);
decoder.decode(compressed_data, original_size)
}
fn estimate_ratio(&self, _data: &[u8]) -> f64 {
0.6 }
fn algorithm(&self) -> Algorithm {
Algorithm::Rans
}
}
pub struct DictCompressor {
dictionary: DictionaryCompressor,
}
impl DictCompressor {
pub fn new(training_data: &[u8]) -> Result<Self> {
if training_data.is_empty() {
return Err(ZiporaError::invalid_data(
"Dictionary compressor requires training data",
));
}
let builder = DictionaryBuilder::new();
let dict = builder.build(training_data);
let dictionary = DictionaryCompressor::new(dict);
Ok(Self { dictionary })
}
}
impl Compressor for DictCompressor {
fn compress(&self, data: &[u8]) -> Result<Vec<u8>> {
if data.is_empty() {
return Ok(Vec::new());
}
self.dictionary.compress(data)
}
fn decompress(&self, data: &[u8]) -> Result<Vec<u8>> {
if data.is_empty() {
return Ok(Vec::new());
}
self.dictionary.decompress(data)
}
fn estimate_ratio(&self, _data: &[u8]) -> f64 {
0.7 }
fn algorithm(&self) -> Algorithm {
Algorithm::Dictionary
}
}
pub struct HybridCompressor {
compressors: Vec<Box<dyn Compressor>>,
}
impl HybridCompressor {
pub fn new(training_data: &[u8]) -> Result<Self> {
let mut compressors: Vec<Box<dyn Compressor>> = Vec::new();
compressors.push(Box::new(HuffmanCompressor::new(training_data)?));
compressors.push(Box::new(RansCompressor::new(training_data)?));
compressors.push(Box::new(DictCompressor::new(training_data)?));
Ok(Self { compressors })
}
}
impl Compressor for HybridCompressor {
fn compress(&self, data: &[u8]) -> Result<Vec<u8>> {
if data.is_empty() {
return Ok(Vec::new());
}
let mut best_result = data.to_vec();
let mut best_algorithm = 0u8;
for (i, compressor) in self.compressors.iter().enumerate() {
if let Ok(compressed) = compressor.compress(data) {
if compressed.len() < best_result.len() {
best_result = compressed;
best_algorithm = i as u8;
}
}
}
let mut result = vec![best_algorithm];
result.extend_from_slice(&best_result);
Ok(result)
}
fn decompress(&self, data: &[u8]) -> Result<Vec<u8>> {
if data.is_empty() {
return Ok(Vec::new());
}
let algorithm_id = data[0] as usize;
let compressed_data = &data[1..];
if algorithm_id >= self.compressors.len() {
return Err(ZiporaError::invalid_data(
"Invalid algorithm identifier in hybrid data",
));
}
self.compressors[algorithm_id].decompress(compressed_data)
}
fn estimate_ratio(&self, data: &[u8]) -> f64 {
self.compressors
.iter()
.map(|c| c.estimate_ratio(data))
.fold(1.0, f64::min)
}
fn algorithm(&self) -> Algorithm {
Algorithm::Hybrid
}
}
pub struct CompressorFactory;
impl CompressorFactory {
pub fn create(
algorithm: Algorithm,
training_data: Option<&[u8]>,
) -> Result<Box<dyn Compressor>> {
match algorithm {
Algorithm::None => Ok(Box::new(NoCompressor)),
Algorithm::Lz4 => Ok(Box::new(Lz4Compressor)),
#[cfg(feature = "zstd")]
Algorithm::Zstd(level) => Ok(Box::new(ZstdCompressor::new(level))),
#[cfg(not(feature = "zstd"))]
Algorithm::Zstd(_) => Err(ZiporaError::configuration(
"ZSTD compression not available - enable 'zstd' feature",
)),
Algorithm::Huffman => {
if let Some(data) = training_data {
Ok(Box::new(HuffmanCompressor::new(data)?))
} else {
Err(ZiporaError::invalid_data(
"Huffman compressor requires training data",
))
}
}
Algorithm::Rans => {
if let Some(data) = training_data {
Ok(Box::new(RansCompressor::new(data)?))
} else {
Err(ZiporaError::invalid_data(
"rANS compressor requires training data",
))
}
}
Algorithm::Dictionary => {
if let Some(data) = training_data {
Ok(Box::new(DictCompressor::new(data)?))
} else {
Err(ZiporaError::invalid_data(
"Dictionary compressor requires training data",
))
}
}
Algorithm::SimdLz77 => {
Ok(Box::new(
crate::compression::simd_lz77::SimdLz77Compressor::default()
))
}
Algorithm::Hybrid => {
if let Some(data) = training_data {
Ok(Box::new(HybridCompressor::new(data)?))
} else {
Err(ZiporaError::invalid_data(
"Hybrid compressor requires training data",
))
}
}
}
}
pub fn available_algorithms() -> Vec<Algorithm> {
#[cfg(feature = "zstd")]
{
vec![
Algorithm::None,
Algorithm::Lz4,
Algorithm::Zstd(1),
Algorithm::Zstd(3),
Algorithm::Zstd(6),
Algorithm::Zstd(9),
Algorithm::Huffman,
Algorithm::Rans,
Algorithm::Dictionary,
Algorithm::Hybrid,
]
}
#[cfg(not(feature = "zstd"))]
{
vec![
Algorithm::None,
Algorithm::Lz4,
Algorithm::Huffman,
Algorithm::Rans,
Algorithm::Dictionary,
Algorithm::Hybrid,
]
}
}
pub fn select_best(requirements: &PerformanceRequirements, data: &[u8]) -> Algorithm {
let available = Self::available_algorithms();
let mut best_algorithm = Algorithm::None;
let mut best_score = f64::NEG_INFINITY;
for algorithm in available {
if matches!(
algorithm,
Algorithm::Huffman | Algorithm::Rans | Algorithm::Dictionary
) {
continue;
}
let speed = algorithm.compression_speed();
let ratio = algorithm.compression_ratio();
let memory = algorithm.memory_usage() * data.len() as f64;
if memory > requirements.max_memory as f64 {
continue;
}
let expected_time = data.len() as f64 / speed;
if Duration::from_secs_f64(expected_time) > requirements.max_latency {
continue;
}
let speed_score = speed / 1_000_000_000.0; let ratio_score = 1.0 - ratio; let memory_score = 1.0 - (memory / requirements.max_memory as f64);
let weighted_score = requirements.speed_vs_quality * ratio_score
+ (1.0 - requirements.speed_vs_quality) * speed_score
+ 0.1 * memory_score;
if weighted_score > best_score {
best_score = weighted_score;
best_algorithm = algorithm;
}
}
best_algorithm
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_algorithm_properties() {
assert_eq!(Algorithm::None.compression_ratio(), 1.0);
assert!(Algorithm::Lz4.compression_speed() > Algorithm::Zstd(9).compression_speed());
assert!(Algorithm::Zstd(9).compression_ratio() < Algorithm::Lz4.compression_ratio());
}
#[test]
fn test_performance_requirements() {
let req = PerformanceRequirements::default();
assert_eq!(req.speed_vs_quality, 0.5);
assert!(req.max_latency > Duration::ZERO);
}
#[test]
fn test_compression_stats() {
let mut stats = CompressionStats::default();
stats.update(1000, 500, Duration::from_millis(10), Algorithm::Lz4);
assert_eq!(stats.operations, 1);
assert_eq!(stats.compression_ratio(), 0.5);
stats.update(2000, 800, Duration::from_millis(20), Algorithm::Zstd(3));
assert_eq!(stats.operations, 2);
assert!(stats.compression_ratio() < 0.7);
}
#[test]
fn test_no_compressor() {
let compressor = NoCompressor;
let data = b"test data";
let compressed = compressor.compress(data).unwrap();
assert_eq!(compressed, data);
let decompressed = compressor.decompress(&compressed).unwrap();
assert_eq!(decompressed, data);
assert_eq!(compressor.algorithm(), Algorithm::None);
}
#[cfg(feature = "lz4")]
#[test]
fn test_lz4_compressor() {
let compressor = Lz4Compressor;
let data = b"test data that should compress well with repeated patterns";
let compressed = compressor.compress(data).unwrap();
let decompressed = compressor.decompress(&compressed).unwrap();
assert_eq!(decompressed, data);
assert_eq!(compressor.algorithm(), Algorithm::Lz4);
}
#[test]
#[cfg(feature = "zstd")]
fn test_zstd_compressor() {
let compressor = ZstdCompressor::new(3);
let data = b"test data that should compress well with repeated patterns and more text";
let compressed = compressor.compress(data).unwrap();
let decompressed = compressor.decompress(&compressed).unwrap();
assert_eq!(decompressed, data);
assert_eq!(compressor.algorithm(), Algorithm::Zstd(3));
assert!(compressed.len() < data.len());
}
#[test]
fn test_compressor_factory() {
let algorithms = CompressorFactory::available_algorithms();
assert!(!algorithms.is_empty());
assert!(algorithms.contains(&Algorithm::None));
assert!(algorithms.contains(&Algorithm::Lz4));
}
#[test]
fn test_algorithm_selection() {
let req = PerformanceRequirements {
max_latency: Duration::from_millis(1),
speed_vs_quality: 0.9, ..Default::default()
};
let data = vec![0u8; 1000];
let algorithm = CompressorFactory::select_best(&req, &data);
assert!(matches!(algorithm, Algorithm::None | Algorithm::Lz4));
}
#[test]
fn test_huffman_compressor() {
let training_data = b"hello world! this is sample training data for huffman compression.";
let compressor = HuffmanCompressor::new(training_data).unwrap();
let test_data = b"hello world! this uses the same patterns as training.";
let compressed = compressor.compress(test_data).unwrap();
let decompressed = compressor.decompress(&compressed).unwrap();
assert_eq!(decompressed, test_data);
assert_eq!(compressor.algorithm(), Algorithm::Huffman);
assert!(compressed.len() >= 8); }
#[test]
fn test_huffman_compressor_empty_data() {
let training_data = b"sample data";
let compressor = HuffmanCompressor::new(training_data).unwrap();
let empty_data = b"";
let compressed = compressor.compress(empty_data).unwrap();
let decompressed = compressor.decompress(&compressed).unwrap();
assert_eq!(decompressed, empty_data);
assert!(compressed.is_empty());
}
#[test]
fn test_huffman_compressor_single_symbol() {
let training_data = b"aaaaaaaaaa"; let compressor = HuffmanCompressor::new(training_data).unwrap();
let test_data = b"aaaa";
let compressed = compressor.compress(test_data).unwrap();
let decompressed = compressor.decompress(&compressed).unwrap();
assert_eq!(decompressed, test_data);
}
#[test]
fn test_huffman_compressor_high_entropy() {
let training_data: Vec<u8> = (0..=255).cycle().take(1000).collect();
let compressor = HuffmanCompressor::new(&training_data).unwrap();
let test_data: Vec<u8> = (0..100).map(|i| (i * 7) as u8).collect();
let compressed = compressor.compress(&test_data).unwrap();
let decompressed = compressor.decompress(&compressed).unwrap();
assert_eq!(decompressed, test_data);
}
#[test]
fn test_huffman_compressor_repeated_patterns() {
let training_data = b"abcdefghijklmnopqrstuvwxyz";
let compressor = HuffmanCompressor::new(training_data).unwrap();
let test_data = b"aaaaaabbbbbbccccccdddddd";
let compressed = compressor.compress(test_data).unwrap();
let decompressed = compressor.decompress(&compressed).unwrap();
assert_eq!(decompressed, test_data);
}
#[test]
fn test_huffman_compressor_invalid_compressed_data() {
let training_data = b"sample data";
let compressor = HuffmanCompressor::new(training_data).unwrap();
let invalid_data = b"abc"; let result = compressor.decompress(invalid_data);
assert!(result.is_err());
let malformed_data = vec![1, 0, 0, 0, 255]; let result = compressor.decompress(&malformed_data);
assert!(result.is_err());
}
#[test]
fn test_huffman_compressor_tree_data() {
let training_data = b"hello world";
let compressor = HuffmanCompressor::new(training_data).unwrap();
let tree_data = compressor.tree_data();
assert!(!tree_data.is_empty());
assert!(tree_data.len() >= 2); }
#[test]
#[cfg(feature = "lz4")]
fn test_compressor_suitability() {
let compressor = Lz4Compressor;
let req = PerformanceRequirements {
target_ratio: 0.7, ..Default::default()
};
assert!(compressor.is_suitable(&req, 1024));
let strict_req = PerformanceRequirements {
max_latency: Duration::from_nanos(1),
..Default::default()
};
assert!(!compressor.is_suitable(&strict_req, 1024 * 1024));
}
}