Skip to main content

cachekit_core/
byte_storage.rs

1//! LZ4 compression and xxHash3 checksums for raw byte storage.
2//!
3//! Provides integrity-protected byte storage with security validation:
4//! - 512MB size limits for decompression bomb protection
5//! - 1000x max compression ratio enforcement
6//! - xxHash3-64 checksums for corruption detection (19x faster than Blake3)
7
8use crate::metrics::OperationMetrics;
9#[cfg(feature = "compression")]
10use lz4_flex;
11use serde::{Deserialize, Serialize};
12use std::sync::{Arc, Mutex};
13#[cfg(not(target_arch = "wasm32"))]
14use std::time::Instant;
15use thiserror::Error;
16#[cfg(feature = "checksum")]
17use xxhash_rust::xxh3::xxh3_64;
18
19/// Error types for ByteStorage operations
20#[derive(Debug, Error, Clone, PartialEq)]
21pub enum ByteStorageError {
22    #[error("input exceeds maximum size")]
23    InputTooLarge,
24
25    #[error("decompression ratio exceeds safety limit")]
26    DecompressionBomb,
27
28    #[error("integrity check failed")]
29    ChecksumMismatch,
30
31    #[error("compression failed")]
32    CompressionFailed,
33
34    #[error("decompression failed")]
35    DecompressionFailed,
36
37    #[error("size validation failed")]
38    SizeValidationFailed,
39
40    #[error("serialization failed: {0}")]
41    SerializationFailed(String),
42
43    #[error("deserialization failed: {0}")]
44    DeserializationFailed(String),
45}
46
47// Security constants - Production-safe limits
48const MAX_UNCOMPRESSED_SIZE: usize = 512 * 1024 * 1024; // 512MB limit
49const MAX_COMPRESSED_SIZE: usize = 512 * 1024 * 1024; // 512MB limit
50/// Maximum allowed compression ratio (1000:1)
51/// Uses u64 for integer-only arithmetic to prevent floating-point precision bypass attacks
52const MAX_COMPRESSION_RATIO: u64 = 1000;
53
54/// Storage envelope for raw byte storage
55/// Contains compressed data with integrity checking
56#[derive(Serialize, Deserialize)]
57pub struct StorageEnvelope {
58    /// Compressed payload data
59    pub compressed_data: Vec<u8>,
60    /// xxHash3-64 checksum for integrity (8 bytes)
61    pub checksum: [u8; 8],
62    /// Original size for validation
63    pub original_size: u32,
64    /// Format identifier (e.g., "msgpack")
65    pub format: String,
66}
67
68impl StorageEnvelope {
69    /// Create new envelope with data compression and checksum
70    #[cfg(all(feature = "compression", feature = "checksum"))]
71    pub fn new(data: Vec<u8>, format: String) -> Result<Self, ByteStorageError> {
72        // Security: Check input size before compression
73        if data.len() > MAX_UNCOMPRESSED_SIZE {
74            return Err(ByteStorageError::InputTooLarge);
75        }
76
77        let original_size = data.len() as u32;
78
79        // Compress with LZ4
80        let compressed_data = lz4_flex::compress(&data);
81
82        // Security: Check compressed size
83        if compressed_data.len() > MAX_COMPRESSED_SIZE {
84            return Err(ByteStorageError::InputTooLarge);
85        }
86
87        // Generate xxHash3-64 checksum of original data (big-endian = xxhash canonical format)
88        let checksum = xxh3_64(&data).to_be_bytes();
89
90        Ok(StorageEnvelope {
91            compressed_data,
92            checksum,
93            original_size,
94            format,
95        })
96    }
97
98    /// Extract and validate data from envelope
99    #[cfg(all(feature = "compression", feature = "checksum"))]
100    pub fn extract(&self) -> Result<Vec<u8>, ByteStorageError> {
101        // Security: Validate envelope structure first
102        if self.compressed_data.len() > MAX_COMPRESSED_SIZE {
103            return Err(ByteStorageError::InputTooLarge);
104        }
105
106        if self.original_size as usize > MAX_UNCOMPRESSED_SIZE {
107            return Err(ByteStorageError::InputTooLarge);
108        }
109
110        // Security: Check compression ratio for decompression bomb protection
111        // Uses integer arithmetic to prevent floating-point precision bypass attacks
112        let compressed_size = self.compressed_data.len() as u64;
113
114        // Step 1: Zero check - empty compressed data with non-zero original is a bomb
115        if compressed_size == 0 {
116            return Err(ByteStorageError::DecompressionBomb);
117        }
118
119        // Step 2: Checked multiplication - overflow = bomb (fail-safe)
120        let max_allowed_original = MAX_COMPRESSION_RATIO
121            .checked_mul(compressed_size)
122            .ok_or(ByteStorageError::DecompressionBomb)?;
123
124        // Step 3: Compare original_size against computed maximum
125        if (self.original_size as u64) > max_allowed_original {
126            return Err(ByteStorageError::DecompressionBomb);
127        }
128
129        // Decompress (with validated sizes)
130        let decompressed = lz4_flex::decompress(&self.compressed_data, self.original_size as usize)
131            .map_err(|_| ByteStorageError::DecompressionFailed)?;
132
133        // Verify checksum (checksum validation happens AFTER decompression to prevent processing corrupted data)
134        // Note: xxHash3 is non-cryptographic, so we use simple equality (not constant-time)
135        // Security against tampering is provided by AES-GCM authentication tag, not the checksum
136        let computed_checksum = xxh3_64(&decompressed).to_be_bytes();
137        if computed_checksum != self.checksum {
138            return Err(ByteStorageError::ChecksumMismatch);
139        }
140
141        // Verify size (final safety check)
142        if decompressed.len() != self.original_size as usize {
143            return Err(ByteStorageError::SizeValidationFailed);
144        }
145
146        Ok(decompressed)
147    }
148}
149
150/// Raw byte storage engine (pure Rust core)
151/// Simple store/retrieve interface with no type awareness
152pub struct ByteStorage {
153    default_format: String,
154    /// Last operation metrics (interior mutability for observability)
155    last_metrics: Arc<Mutex<OperationMetrics>>,
156}
157
158impl ByteStorage {
159    /// Create new ByteStorage instance
160    pub fn new(default_format: Option<String>) -> Self {
161        ByteStorage {
162            default_format: default_format.unwrap_or_else(|| "msgpack".to_string()),
163            last_metrics: Arc::new(Mutex::new(OperationMetrics::new())),
164        }
165    }
166
167    /// Store arbitrary bytes with compression and checksums
168    ///
169    /// Returns serialized StorageEnvelope bytes
170    #[cfg(all(feature = "compression", feature = "checksum", feature = "messagepack"))]
171    pub fn store(&self, data: &[u8], format: Option<String>) -> Result<Vec<u8>, ByteStorageError> {
172        // Security: Check input size before processing
173        if data.len() > MAX_UNCOMPRESSED_SIZE {
174            return Err(ByteStorageError::InputTooLarge);
175        }
176
177        let format = format.unwrap_or_else(|| self.default_format.clone());
178
179        // Time compression operation (wasm32: Instant unavailable, use 0)
180        #[cfg(not(target_arch = "wasm32"))]
181        let compression_start = Instant::now();
182        let original_size = data.len();
183
184        let envelope = StorageEnvelope::new(data.to_vec(), format)?;
185
186        #[cfg(not(target_arch = "wasm32"))]
187        let compression_micros = compression_start.elapsed().as_micros() as u64;
188        #[cfg(target_arch = "wasm32")]
189        let compression_micros = 0u64;
190        let compressed_size = envelope.compressed_data.len();
191
192        // Serialize envelope with MessagePack
193        let envelope_bytes = rmp_serde::to_vec(&envelope)
194            .map_err(|e| ByteStorageError::SerializationFailed(e.to_string()))?;
195
196        // Security: Final check on serialized envelope size
197        if envelope_bytes.len() > MAX_COMPRESSED_SIZE {
198            return Err(ByteStorageError::InputTooLarge);
199        }
200
201        // Update metrics for observability
202        if let Ok(mut metrics) = self.last_metrics.lock() {
203            *metrics = OperationMetrics::new().with_compression(
204                compression_micros,
205                original_size,
206                compressed_size,
207            );
208        }
209
210        Ok(envelope_bytes)
211    }
212
213    /// Retrieve and validate stored bytes
214    ///
215    /// Returns (original_data, format_identifier)
216    #[cfg(all(feature = "compression", feature = "checksum", feature = "messagepack"))]
217    pub fn retrieve(&self, envelope_bytes: &[u8]) -> Result<(Vec<u8>, String), ByteStorageError> {
218        // Security: Check envelope size before deserializing
219        if envelope_bytes.len() > MAX_COMPRESSED_SIZE {
220            return Err(ByteStorageError::InputTooLarge);
221        }
222
223        // Deserialize envelope
224        let envelope: StorageEnvelope = rmp_serde::from_slice(envelope_bytes)
225            .map_err(|e| ByteStorageError::DeserializationFailed(e.to_string()))?;
226
227        // Time decompression and checksum operations (wasm32: Instant unavailable, use 0)
228        #[cfg(not(target_arch = "wasm32"))]
229        let decompress_start = Instant::now();
230
231        // Extract and validate data (all security checks happen inside extract())
232        let data = envelope.extract()?;
233
234        #[cfg(not(target_arch = "wasm32"))]
235        let decompress_micros = decompress_start.elapsed().as_micros() as u64;
236        #[cfg(target_arch = "wasm32")]
237        let decompress_micros = 0u64;
238
239        // Calculate compression ratio from stored metadata
240        let compressed_size = envelope.compressed_data.len();
241        let original_size = envelope.original_size as usize;
242
243        // Update metrics for observability
244        if let Ok(mut metrics) = self.last_metrics.lock() {
245            *metrics = OperationMetrics::new().with_compression(
246                decompress_micros,
247                original_size,
248                compressed_size,
249            );
250        }
251
252        Ok((data, envelope.format))
253    }
254
255    /// Get compression ratio for given data
256    #[cfg(feature = "compression")]
257    pub fn estimate_compression(&self, data: &[u8]) -> Result<f64, ByteStorageError> {
258        // Security: Check size before compression
259        if data.len() > MAX_UNCOMPRESSED_SIZE {
260            return Err(ByteStorageError::InputTooLarge);
261        }
262
263        let compressed = lz4_flex::compress(data);
264
265        Ok(data.len() as f64 / compressed.len() as f64)
266    }
267
268    /// Validate envelope without extracting data
269    #[cfg(all(feature = "compression", feature = "checksum", feature = "messagepack"))]
270    pub fn validate(&self, envelope_bytes: &[u8]) -> bool {
271        // Security: Check size before validating
272        if envelope_bytes.len() > MAX_COMPRESSED_SIZE {
273            return false; // Invalid due to size limit
274        }
275
276        match rmp_serde::from_slice::<StorageEnvelope>(envelope_bytes) {
277            Ok(envelope) => envelope.extract().is_ok(),
278            Err(_) => false,
279        }
280    }
281
282    /// Get metrics from last operation
283    ///
284    /// Returns a snapshot of metrics from the most recent store() or retrieve() call
285    pub fn get_last_metrics(&self) -> OperationMetrics {
286        self.last_metrics
287            .lock()
288            .map(|metrics| metrics.clone())
289            .unwrap_or_else(|_| OperationMetrics::new())
290    }
291
292    /// Get security limits
293    pub fn max_uncompressed_size(&self) -> usize {
294        MAX_UNCOMPRESSED_SIZE
295    }
296
297    pub fn max_compressed_size(&self) -> usize {
298        MAX_COMPRESSED_SIZE
299    }
300
301    pub fn max_compression_ratio(&self) -> u64 {
302        MAX_COMPRESSION_RATIO
303    }
304}
305
306impl Default for ByteStorage {
307    fn default() -> Self {
308        Self::new(None)
309    }
310}
311
312#[cfg(all(
313    test,
314    feature = "compression",
315    feature = "checksum",
316    feature = "messagepack"
317))]
318mod tests {
319    use super::*;
320
321    #[test]
322    fn test_storage_envelope_roundtrip() {
323        let data = b"Hello, World! This is test data for compression.".to_vec();
324        let envelope = StorageEnvelope::new(data.clone(), "test".to_string()).unwrap();
325        let extracted = envelope.extract().unwrap();
326        assert_eq!(data, extracted);
327    }
328
329    #[test]
330    fn test_compression_works() {
331        let data = b"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa".to_vec(); // Highly compressible
332        let envelope = StorageEnvelope::new(data.clone(), "test".to_string()).unwrap();
333        assert!(envelope.compressed_data.len() < data.len());
334    }
335
336    #[test]
337    fn test_checksum_validation() {
338        let mut envelope = StorageEnvelope::new(b"test".to_vec(), "test".to_string()).unwrap();
339        // Corrupt the checksum
340        envelope.checksum[0] = !envelope.checksum[0];
341        assert!(envelope.extract().is_err());
342    }
343
344    #[test]
345    fn test_raw_persistence_roundtrip() {
346        let storage = ByteStorage::new(None);
347        let test_data = b"test data for persistence";
348
349        let stored = storage.store(test_data, None).unwrap();
350        let (retrieved_data, format) = storage.retrieve(&stored).unwrap();
351        assert_eq!(test_data, retrieved_data.as_slice());
352        assert_eq!("msgpack", format);
353    }
354
355    #[test]
356    fn test_size_limits_input() {
357        let storage = ByteStorage::new(None);
358
359        // Create data larger than MAX_UNCOMPRESSED_SIZE (512MB)
360        let large_data = vec![0u8; MAX_UNCOMPRESSED_SIZE + 1];
361
362        let result = storage.store(&large_data, None);
363        assert!(matches!(result, Err(ByteStorageError::InputTooLarge)));
364    }
365
366    #[test]
367    fn test_size_limits_envelope() {
368        // Create data exactly at the limit
369        let max_data = vec![0u8; MAX_UNCOMPRESSED_SIZE];
370        let envelope_result = StorageEnvelope::new(max_data, "test".to_string());
371
372        // Should succeed at exactly the limit
373        assert!(envelope_result.is_ok());
374    }
375
376    #[test]
377    fn test_compression_ratio_bomb_protection() {
378        // Simulate a decompression bomb scenario
379        let malicious_envelope = StorageEnvelope {
380            compressed_data: vec![0u8; 1000], // Small compressed size
381            checksum: [0u8; 8],               // Fake checksum
382            original_size: 200 * 1024 * 1024, // Claims 200MB original (200x expansion)
383            format: "test".to_string(),
384        };
385
386        let result = malicious_envelope.extract();
387        assert!(matches!(result, Err(ByteStorageError::DecompressionBomb)));
388    }
389
390    // ============================================================================
391    // Decompression Bomb Edge Case Tests (Task 6.1)
392    // ============================================================================
393
394    #[test]
395    fn test_decompression_bomb_zero_compressed_size() {
396        // WHY: Empty compressed data claiming non-zero original is always a bomb
397        let malicious_envelope = StorageEnvelope {
398            compressed_data: vec![], // Zero compressed size
399            checksum: [0u8; 8],
400            original_size: 1000, // Claims 1KB original
401            format: "test".to_string(),
402        };
403
404        let result = malicious_envelope.extract();
405        assert!(
406            matches!(result, Err(ByteStorageError::DecompressionBomb)),
407            "Zero compressed size should be rejected as decompression bomb"
408        );
409    }
410
411    #[test]
412    fn test_decompression_bomb_extreme_ratio() {
413        // WHY: Test extreme ratio that exceeds 1000:1
414        // compressed_size=1, original_size=2000 → 2000:1 ratio exceeds limit
415        // Note: u32::MAX would be caught by InputTooLarge first (> MAX_UNCOMPRESSED_SIZE)
416        let malicious_envelope = StorageEnvelope {
417            compressed_data: vec![0u8; 1], // 1 byte compressed
418            checksum: [0u8; 8],
419            original_size: 2000, // 2000:1 ratio (exceeds 1000:1 limit)
420            format: "test".to_string(),
421        };
422
423        let result = malicious_envelope.extract();
424        assert!(
425            matches!(result, Err(ByteStorageError::DecompressionBomb)),
426            "Extreme ratio should be rejected as bomb: {:?}",
427            result
428        );
429    }
430
431    #[test]
432    fn test_decompression_u32_max_original_size() {
433        // WHY: u32::MAX original_size exceeds MAX_UNCOMPRESSED_SIZE
434        // Should fail with InputTooLarge, not DecompressionBomb
435        // This validates the check order (size limits before ratio)
436        let malicious_envelope = StorageEnvelope {
437            compressed_data: vec![0u8; 1000],
438            checksum: [0u8; 8],
439            original_size: u32::MAX, // ~4GB exceeds 512MB limit
440            format: "test".to_string(),
441        };
442
443        let result = malicious_envelope.extract();
444        assert!(
445            matches!(result, Err(ByteStorageError::InputTooLarge)),
446            "u32::MAX should be rejected as InputTooLarge (exceeds 512MB limit): {:?}",
447            result
448        );
449    }
450
451    #[test]
452    fn test_decompression_exactly_at_threshold() {
453        // WHY: Exactly 1000:1 ratio should be accepted (pass ratio check)
454        // The test verifies the ratio check passes - subsequent failures are expected
455        // (invalid LZ4 data will fail at decompression or checksum)
456        let envelope = StorageEnvelope {
457            compressed_data: vec![0u8; 100], // 100 bytes compressed (not valid LZ4)
458            checksum: [0u8; 8],
459            original_size: 100_000, // 100KB = exactly 1000:1 ratio
460            format: "test".to_string(),
461        };
462
463        let result = envelope.extract();
464        // KEY: Should NOT fail with DecompressionBomb (ratio check should pass)
465        // Will fail with DecompressionFailed, ChecksumMismatch, or SizeValidationFailed
466        assert!(
467            !matches!(result, Err(ByteStorageError::DecompressionBomb)),
468            "Exactly 1000:1 ratio should pass bomb check: {:?}",
469            result
470        );
471        assert!(
472            result.is_err(),
473            "Invalid data should still fail after ratio check"
474        );
475    }
476
477    #[test]
478    fn test_decompression_just_over_threshold() {
479        // WHY: 1001:1 ratio should be rejected
480        let malicious_envelope = StorageEnvelope {
481            compressed_data: vec![0u8; 100], // 100 bytes compressed
482            checksum: [0u8; 8],
483            original_size: 100_001, // 100.001KB = 1000.01:1 ratio (just over)
484            format: "test".to_string(),
485        };
486
487        let result = malicious_envelope.extract();
488        assert!(
489            matches!(result, Err(ByteStorageError::DecompressionBomb)),
490            "Just over 1000:1 ratio should be rejected as bomb"
491        );
492    }
493
494    #[test]
495    fn test_decompression_bomb_integer_boundary() {
496        // WHY: Test near u64 overflow boundary
497        // MAX_COMPRESSION_RATIO (1000) * compressed_size must not overflow
498        // u64::MAX / 1000 ≈ 18,446,744,073,709,551 is max safe compressed_size
499        // But we're constrained by MAX_COMPRESSED_SIZE (512MB), so overflow is unlikely
500        // This test verifies the check works at realistic boundary
501
502        let envelope = StorageEnvelope {
503            compressed_data: vec![0u8; 1_000_000], // 1MB compressed
504            checksum: [0u8; 8],
505            original_size: 1_000_000_000, // 1GB = exactly 1000:1 ratio
506            format: "test".to_string(),
507        };
508
509        // Should fail due to size limit (1GB > MAX_UNCOMPRESSED_SIZE)
510        let result = envelope.extract();
511        assert!(
512            matches!(result, Err(ByteStorageError::InputTooLarge)),
513            "Should fail size check before ratio check: {:?}",
514            result
515        );
516    }
517
518    #[test]
519    fn test_envelope_size_validation() {
520        let storage = ByteStorage::new(None);
521
522        // Create oversized envelope bytes
523        let oversized_envelope = vec![0u8; MAX_COMPRESSED_SIZE + 1];
524
525        let result = storage.retrieve(&oversized_envelope);
526        assert!(matches!(result, Err(ByteStorageError::InputTooLarge)));
527    }
528
529    #[test]
530    fn test_security_limits_getters() {
531        let storage = ByteStorage::new(None);
532
533        assert_eq!(storage.max_uncompressed_size(), MAX_UNCOMPRESSED_SIZE);
534        assert_eq!(storage.max_compressed_size(), MAX_COMPRESSED_SIZE);
535        assert_eq!(storage.max_compression_ratio(), 1000u64);
536    }
537
538    #[test]
539    fn test_compression_estimate_security() {
540        let storage = ByteStorage::new(None);
541
542        // Test with oversized data
543        let large_data = vec![0u8; MAX_UNCOMPRESSED_SIZE + 1];
544        let result = storage.estimate_compression(&large_data);
545        assert!(matches!(result, Err(ByteStorageError::InputTooLarge)));
546    }
547
548    #[test]
549    fn test_validate_security() {
550        let storage = ByteStorage::new(None);
551
552        // Test with oversized envelope
553        let large_envelope = vec![0u8; MAX_COMPRESSED_SIZE + 1];
554        let result = storage.validate(&large_envelope);
555        assert!(!result); // Should be invalid due to size
556    }
557
558    #[test]
559    fn test_edge_case_exactly_at_limits() {
560        // Test data exactly at 512MB
561        let storage = ByteStorage::new(None);
562        let max_size_data = vec![1u8; MAX_UNCOMPRESSED_SIZE]; // Fill with 1s to ensure it's compressible
563
564        // Should succeed at exactly the limit
565        let result = storage.store(&max_size_data, None);
566        assert!(result.is_ok());
567    }
568
569    #[test]
570    fn test_zero_size_edge_case() {
571        let storage = ByteStorage::new(None);
572        let empty_data = vec![];
573
574        let stored = storage.store(&empty_data, None).unwrap();
575        let (retrieved_data, format) = storage.retrieve(&stored).unwrap();
576        assert_eq!(empty_data, retrieved_data);
577        assert_eq!("msgpack", format);
578    }
579
580    #[test]
581    fn test_metrics_collection_on_store() {
582        let storage = ByteStorage::new(None);
583        let test_data = b"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa".to_vec(); // Compressible
584
585        // Store data
586        storage.store(&test_data, None).unwrap();
587        let metrics = storage.get_last_metrics();
588
589        // Verify metrics were collected
590        assert!(metrics.compression_ratio > 0.0); // Should have valid compression ratio
591    }
592
593    #[test]
594    fn test_metrics_collection_on_retrieve() {
595        let storage = ByteStorage::new(None);
596        let test_data = b"test data for retrieval metrics";
597
598        // Store then retrieve
599        let stored = storage.store(test_data, None).unwrap();
600        storage.retrieve(&stored).unwrap();
601        let metrics = storage.get_last_metrics();
602
603        // Verify retrieve metrics were collected
604        assert!(metrics.compression_ratio > 0.0); // Should have valid ratio
605    }
606}
607
608// Kani Formal Verification Proofs
609// These proofs use bounded model checking to verify critical security properties
610// Bounded to complete within 10 minutes as per specification requirements
611#[cfg(kani)]
612mod kani_proofs {
613    use super::*;
614
615    /// Verify checksum integrity (corruption detection)
616    /// Property: Any single-bit checksum corruption is always detected
617    #[kani::proof]
618    #[kani::unwind(10)] // Need 8+ for memcmp of 8-byte xxHash3-64 checksum
619    fn verify_checksum_detects_corruption() {
620        // Create symbolic checksum (xxHash3-64 produces 8 bytes)
621        let checksum_a: [u8; 8] = kani::any();
622        let mut checksum_b = checksum_a;
623
624        // Flip exactly one bit
625        let byte_index: usize = kani::any();
626        let bit_index: usize = kani::any();
627        kani::assume(byte_index < 8);
628        kani::assume(bit_index < 8);
629
630        checksum_b[byte_index] ^= 1 << bit_index;
631
632        // Property: Corrupted checksum must differ from original
633        assert_ne!(checksum_a, checksum_b);
634    }
635
636    /// Verify decompression bomb protection (compression ratio limits)
637    /// Property: Malicious compression ratios exceeding 1000x are always rejected
638    /// Uses integer arithmetic to match production implementation
639    #[kani::proof]
640    #[kani::unwind(3)]
641    fn verify_decompression_bomb_protection() {
642        // Symbolic envelope parameters
643        let compressed_size: u64 = kani::any();
644        let original_size: u64 = kani::any();
645
646        // Constrain to reasonable test ranges
647        kani::assume(compressed_size > 0 && compressed_size <= 1000);
648        kani::assume(original_size > 0);
649
650        // Simulate the 3-step check from StorageEnvelope::extract()
651        // Step 1: Zero check already covered by assume
652        // Step 2: Checked multiplication
653        let max_allowed = MAX_COMPRESSION_RATIO.checked_mul(compressed_size);
654
655        // Property: If original_size exceeds max_allowed, extraction must fail
656        if let Some(max) = max_allowed {
657            let would_reject = original_size > max;
658            let exceeds_ratio = original_size > MAX_COMPRESSION_RATIO * compressed_size;
659            assert_eq!(would_reject, exceeds_ratio);
660        } else {
661            // Overflow case: always reject (fail-safe)
662            assert!(true); // Overflow is always rejected
663        }
664    }
665
666    /// Verify size limit enforcement on input
667    /// Property: Inputs exceeding MAX_UNCOMPRESSED_SIZE are always rejected
668    #[kani::proof]
669    #[kani::unwind(3)]
670    fn verify_input_size_limits() {
671        let size: usize = kani::any();
672
673        // Test boundary conditions around the limit
674        kani::assume(size <= MAX_UNCOMPRESSED_SIZE + 100);
675
676        // Property: Size check logic is correct
677        let exceeds_limit = size > MAX_UNCOMPRESSED_SIZE;
678        let should_reject = size > MAX_UNCOMPRESSED_SIZE;
679
680        assert_eq!(exceeds_limit, should_reject);
681    }
682
683    /// Verify size limit enforcement on compressed data
684    /// Property: Compressed data exceeding MAX_COMPRESSED_SIZE is rejected
685    #[kani::proof]
686    #[kani::unwind(3)]
687    fn verify_compressed_size_limits() {
688        let compressed_size: usize = kani::any();
689
690        // Test boundary conditions
691        kani::assume(compressed_size <= MAX_COMPRESSED_SIZE + 100);
692
693        // Property: Size check logic is correct
694        let exceeds_limit = compressed_size > MAX_COMPRESSED_SIZE;
695        let should_reject = compressed_size > MAX_COMPRESSED_SIZE;
696
697        assert_eq!(exceeds_limit, should_reject);
698    }
699
700    /// Verify compression ratio calculation is safe (integer arithmetic)
701    /// Property: Ratio check never panics and correctly identifies bombs
702    #[kani::proof]
703    #[kani::unwind(3)]
704    fn verify_compression_ratio_calculation_safety() {
705        let original_size: u64 = kani::any();
706        let compressed_size: u64 = kani::any();
707
708        // Constrain to prevent division by zero and keep ranges manageable
709        kani::assume(compressed_size > 0);
710        kani::assume(compressed_size <= 10000);
711        kani::assume(original_size <= 100_000_000); // 100MB max for test
712
713        // Property 1: checked_mul never panics (it returns None on overflow)
714        let result = MAX_COMPRESSION_RATIO.checked_mul(compressed_size);
715
716        // Property 2: If multiplication succeeds, comparison is valid
717        if let Some(max_allowed) = result {
718            // The check `original_size > max_allowed` is always safe
719            let is_bomb = original_size > max_allowed;
720
721            // Verify equivalence: is_bomb == (original_size > 1000 * compressed_size)
722            // This holds when no overflow occurred
723            if original_size <= max_allowed {
724                assert!(!is_bomb);
725            } else {
726                assert!(is_bomb);
727            }
728        }
729
730        // Property 3: Zero compressed_size is handled by separate check (not tested here)
731    }
732}