1use core::{ffi::CStr, marker::PhantomData, mem::MaybeUninit, ops::ControlFlow, ptr::NonNull};
2
3use crate::{
4 adler32::adler32,
5 allocate::Allocator,
6 c_api::{gz_header, internal_state, z_checksum, z_stream},
7 crc32::{crc32, Crc32Fold},
8 trace,
9 weak_slice::{WeakArrayMut, WeakSliceMut},
10 DeflateFlush, ReturnCode, ADLER32_INITIAL_VALUE, CRC32_INITIAL_VALUE, MAX_WBITS, MIN_WBITS,
11};
12
13use self::{
14 algorithm::CONFIGURATION_TABLE,
15 hash_calc::{HashCalcVariant, RollHashCalc, StandardHashCalc},
16 pending::Pending,
17 sym_buf::SymBuf,
18 trees_tbl::STATIC_LTREE,
19 window::Window,
20};
21
22mod algorithm;
23mod compare256;
24mod hash_calc;
25mod longest_match;
26mod pending;
27mod slide_hash;
28mod sym_buf;
29mod trees_tbl;
30mod window;
31
32pub(crate) type Pos = u16;
34
35#[repr(C)]
38pub struct DeflateStream<'a> {
39 pub(crate) next_in: *mut crate::c_api::Bytef,
40 pub(crate) avail_in: crate::c_api::uInt,
41 pub(crate) total_in: crate::c_api::z_size,
42 pub(crate) next_out: *mut crate::c_api::Bytef,
43 pub(crate) avail_out: crate::c_api::uInt,
44 pub(crate) total_out: crate::c_api::z_size,
45 pub(crate) msg: *const core::ffi::c_char,
46 pub(crate) state: &'a mut State<'a>,
47 pub(crate) alloc: Allocator<'a>,
48 pub(crate) data_type: core::ffi::c_int,
49 pub(crate) adler: crate::c_api::z_checksum,
50 pub(crate) reserved: crate::c_api::uLong,
51}
52
53unsafe impl Sync for DeflateStream<'_> {}
54unsafe impl Send for DeflateStream<'_> {}
55
56impl<'a> DeflateStream<'a> {
57 const _S: () = assert!(core::mem::size_of::<z_stream>() == core::mem::size_of::<Self>());
60 const _A: () = assert!(core::mem::align_of::<z_stream>() == core::mem::align_of::<Self>());
61
62 #[inline(always)]
71 pub unsafe fn from_stream_mut(strm: *mut z_stream) -> Option<&'a mut Self> {
72 {
73 let stream = unsafe { strm.as_ref() }?;
75
76 if stream.zalloc.is_none() || stream.zfree.is_none() {
77 return None;
78 }
79
80 if stream.state.is_null() {
81 return None;
82 }
83 }
84
85 unsafe { strm.cast::<DeflateStream>().as_mut() }
87 }
88
89 #[inline(always)]
98 pub unsafe fn from_stream_ref(strm: *const z_stream) -> Option<&'a Self> {
99 {
100 let stream = unsafe { strm.as_ref() }?;
102
103 if stream.zalloc.is_none() || stream.zfree.is_none() {
104 return None;
105 }
106
107 if stream.state.is_null() {
108 return None;
109 }
110 }
111
112 unsafe { strm.cast::<DeflateStream>().as_ref() }
114 }
115
116 fn as_z_stream_mut(&mut self) -> &mut z_stream {
117 unsafe { &mut *(self as *mut DeflateStream as *mut z_stream) }
119 }
120
121 pub fn pending(&self) -> (usize, u8) {
122 (
123 self.state.bit_writer.pending.pending,
124 self.state.bit_writer.bits_used,
125 )
126 }
127
128 pub fn new(config: DeflateConfig) -> Self {
129 let mut inner = crate::c_api::z_stream::default();
130
131 let ret = crate::deflate::init(&mut inner, config);
132 assert_eq!(ret, ReturnCode::Ok);
133
134 unsafe { core::mem::transmute(inner) }
135 }
136}
137
138pub(crate) const HASH_SIZE: usize = 65536;
140const HASH_BITS: usize = 16;
142
143const MAX_MEM_LEVEL: i32 = 9;
145pub const DEF_MEM_LEVEL: i32 = if MAX_MEM_LEVEL > 8 { 8 } else { MAX_MEM_LEVEL };
146
147#[repr(i32)]
148#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)]
149#[cfg_attr(feature = "__internal-fuzz", derive(arbitrary::Arbitrary))]
150pub enum Method {
151 #[default]
152 Deflated = 8,
153}
154
155impl TryFrom<i32> for Method {
156 type Error = ();
157
158 fn try_from(value: i32) -> Result<Self, Self::Error> {
159 match value {
160 8 => Ok(Self::Deflated),
161 _ => Err(()),
162 }
163 }
164}
165
166#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
178#[cfg_attr(feature = "__internal-fuzz", derive(arbitrary::Arbitrary))]
179pub struct DeflateConfig {
180 pub level: i32,
181 pub method: Method,
182 pub window_bits: i32,
183 pub mem_level: i32,
184 pub strategy: Strategy,
185}
186
187#[cfg(any(test, feature = "__internal-test"))]
188impl quickcheck::Arbitrary for DeflateConfig {
189 fn arbitrary(g: &mut quickcheck::Gen) -> Self {
190 let mem_levels: Vec<_> = (1..=9).collect();
191 let levels: Vec<_> = (0..=9).collect();
192
193 let mut window_bits = Vec::new();
194 window_bits.extend(9..=15); window_bits.extend(9 + 16..=15 + 16); window_bits.extend(-15..=-9); Self {
199 level: *g.choose(&levels).unwrap(),
200 method: Method::Deflated,
201 window_bits: *g.choose(&window_bits).unwrap(),
202 mem_level: *g.choose(&mem_levels).unwrap(),
203 strategy: *g
204 .choose(&[
205 Strategy::Default,
206 Strategy::Filtered,
207 Strategy::HuffmanOnly,
208 Strategy::Rle,
209 Strategy::Fixed,
210 ])
211 .unwrap(),
212 }
213 }
214}
215
216impl DeflateConfig {
217 pub fn new(level: i32) -> Self {
218 Self {
219 level,
220 ..Self::default()
221 }
222 }
223
224 pub fn best_compression() -> Self {
226 Self::new(crate::c_api::Z_BEST_COMPRESSION)
227 }
228
229 pub fn best_speed() -> Self {
231 Self::new(crate::c_api::Z_BEST_SPEED)
232 }
233}
234
235impl Default for DeflateConfig {
236 fn default() -> Self {
237 Self {
238 level: crate::c_api::Z_DEFAULT_COMPRESSION,
239 method: Method::Deflated,
240 window_bits: MAX_WBITS,
241 mem_level: DEF_MEM_LEVEL,
242 strategy: Strategy::Default,
243 }
244 }
245}
246
247pub fn init(stream: &mut z_stream, config: DeflateConfig) -> ReturnCode {
248 let DeflateConfig {
249 mut level,
250 method: _,
251 mut window_bits,
252 mem_level,
253 strategy,
254 } = config;
255
256 stream.msg = core::ptr::null_mut();
258
259 #[cfg(feature = "rust-allocator")]
264 if stream.zalloc.is_none() || stream.zfree.is_none() {
265 stream.configure_default_rust_allocator()
266 }
267
268 #[cfg(feature = "c-allocator")]
269 if stream.zalloc.is_none() || stream.zfree.is_none() {
270 stream.configure_default_c_allocator()
271 }
272
273 if stream.zalloc.is_none() || stream.zfree.is_none() {
274 return ReturnCode::StreamError;
275 }
276
277 if level == crate::c_api::Z_DEFAULT_COMPRESSION {
278 level = 6;
279 }
280
281 let wrap = if window_bits < 0 {
282 if window_bits < -MAX_WBITS {
283 return ReturnCode::StreamError;
284 }
285 window_bits = -window_bits;
286
287 0
288 } else if window_bits > MAX_WBITS {
289 window_bits -= 16;
290 2
291 } else {
292 1
293 };
294
295 if (!(1..=MAX_MEM_LEVEL).contains(&mem_level))
296 || !(MIN_WBITS..=MAX_WBITS).contains(&window_bits)
297 || !(0..=9).contains(&level)
298 || (window_bits == 8 && wrap != 1)
299 {
300 return ReturnCode::StreamError;
301 }
302
303 let window_bits = if window_bits == 8 {
304 9 } else {
306 window_bits as usize
307 };
308
309 let alloc = Allocator {
310 zalloc: stream.zalloc.unwrap(),
311 zfree: stream.zfree.unwrap(),
312 opaque: stream.opaque,
313 _marker: PhantomData,
314 };
315
316 let lit_bufsize = 1 << (mem_level + 6); let allocs = DeflateAllocOffsets::new(window_bits, lit_bufsize);
318
319 let Some(allocation_start) = alloc.allocate_slice_raw::<u8>(allocs.total_size) else {
321 return ReturnCode::MemError;
322 };
323
324 let w_size = 1 << window_bits;
325 let align_offset = (allocation_start.as_ptr() as usize).next_multiple_of(64)
326 - (allocation_start.as_ptr() as usize);
327 let buf = unsafe { allocation_start.as_ptr().add(align_offset) };
328
329 let (window, prev, head, pending, sym_buf) = unsafe {
330 let window_ptr: *mut u8 = buf.add(allocs.window_pos);
331 window_ptr.write_bytes(0u8, 2 * w_size);
332 let window = Window::from_raw_parts(window_ptr, window_bits);
333
334 let prev_ptr = buf.add(allocs.prev_pos).cast::<Pos>();
336 prev_ptr.write_bytes(0, w_size);
337 let prev = WeakSliceMut::from_raw_parts_mut(prev_ptr, w_size);
338
339 let head_ptr = buf.add(allocs.head_pos).cast::<[Pos; HASH_SIZE]>();
340 head_ptr.write_bytes(0, 1);
342 let head = WeakArrayMut::<Pos, HASH_SIZE>::from_ptr(head_ptr);
343
344 let pending_ptr = buf.add(allocs.pending_pos).cast::<MaybeUninit<u8>>();
345 let pending = Pending::from_raw_parts(pending_ptr, 4 * lit_bufsize);
346
347 let sym_buf_ptr = buf.add(allocs.sym_buf_pos);
348 let sym_buf = SymBuf::from_raw_parts(sym_buf_ptr, lit_bufsize);
349
350 (window, prev, head, pending, sym_buf)
351 };
352
353 let state = State {
354 status: Status::Init,
355
356 w_size,
358
359 window,
361 prev,
362 head,
363 bit_writer: BitWriter::from_pending(pending),
364
365 lit_bufsize,
367
368 sym_buf,
370
371 level: level as i8, strategy,
374
375 last_flush: 0,
377 wrap,
378 strstart: 0,
379 block_start: 0,
380 block_open: 0,
381 window_size: 0,
382 insert: 0,
383 matches: 0,
384 opt_len: 0,
385 static_len: 0,
386 lookahead: 0,
387 ins_h: 0,
388 max_chain_length: 0,
389 max_lazy_match: 0,
390 good_match: 0,
391 nice_match: 0,
392
393 l_desc: TreeDesc::EMPTY,
395 d_desc: TreeDesc::EMPTY,
396 bl_desc: TreeDesc::EMPTY,
397
398 crc_fold: Crc32Fold::new(),
400 gzhead: None,
401 gzindex: 0,
402
403 match_start: 0,
405 prev_match: 0,
406 match_available: false,
407 prev_length: 0,
408
409 allocation_start,
410 total_allocation_size: allocs.total_size,
411
412 hash_calc_variant: HashCalcVariant::Standard,
414 _cache_line_0: (),
415 _cache_line_1: (),
416 _cache_line_2: (),
417 _cache_line_3: (),
418 _padding_0: [0; 16],
419 };
420
421 let state_allocation = unsafe { buf.add(allocs.state_pos).cast::<State>() };
422 unsafe { state_allocation.write(state) };
423
424 stream.state = state_allocation.cast::<internal_state>();
425
426 let Some(stream) = (unsafe { DeflateStream::from_stream_mut(stream) }) else {
427 if cfg!(debug_assertions) {
428 unreachable!("we should have initialized the stream properly");
429 }
430 return ReturnCode::StreamError;
431 };
432
433 reset(stream)
434}
435
436pub fn params(stream: &mut DeflateStream, level: i32, strategy: Strategy) -> ReturnCode {
437 let level = if level == crate::c_api::Z_DEFAULT_COMPRESSION {
438 6
439 } else {
440 level
441 };
442
443 if !(0..=9).contains(&level) {
444 return ReturnCode::StreamError;
445 }
446
447 let level = level as i8;
448
449 let func = CONFIGURATION_TABLE[stream.state.level as usize].func;
450
451 let state = &mut stream.state;
452
453 #[allow(unpredictable_function_pointer_comparisons)]
456 if (strategy != state.strategy || func != CONFIGURATION_TABLE[level as usize].func)
457 && state.last_flush != -2
458 {
459 let err = deflate(stream, DeflateFlush::Block);
461 if err == ReturnCode::StreamError {
462 return err;
463 }
464
465 let state = &mut stream.state;
466
467 if stream.avail_in != 0
468 || ((state.strstart as isize - state.block_start) + state.lookahead as isize) != 0
469 {
470 return ReturnCode::BufError;
471 }
472 }
473
474 let state = &mut stream.state;
475
476 if state.level != level {
477 if state.level == 0 && state.matches != 0 {
478 if state.matches == 1 {
479 self::slide_hash::slide_hash(state);
480 } else {
481 state.head.as_mut_slice().fill(0);
482 }
483 state.matches = 0;
484 }
485
486 lm_set_level(state, level);
487 }
488
489 state.strategy = strategy;
490
491 ReturnCode::Ok
492}
493
494pub fn set_dictionary(stream: &mut DeflateStream, mut dictionary: &[u8]) -> ReturnCode {
495 let state = &mut stream.state;
496
497 let wrap = state.wrap;
498
499 if wrap == 2 || (wrap == 1 && state.status != Status::Init) || state.lookahead != 0 {
500 return ReturnCode::StreamError;
501 }
502
503 if wrap == 1 {
505 stream.adler = adler32(stream.adler as u32, dictionary) as z_checksum;
506 }
507
508 state.wrap = 0;
510
511 if dictionary.len() >= state.window.capacity() {
513 if wrap == 0 {
514 state.head.as_mut_slice().fill(0);
516
517 state.strstart = 0;
518 state.block_start = 0;
519 state.insert = 0;
520 } else {
521 }
523
524 dictionary = &dictionary[dictionary.len() - state.w_size..];
526 }
527
528 let avail = stream.avail_in;
530 let next = stream.next_in;
531 stream.avail_in = dictionary.len() as _;
532 stream.next_in = dictionary.as_ptr() as *mut u8;
533 fill_window(stream);
534
535 while stream.state.lookahead >= STD_MIN_MATCH {
536 let str = stream.state.strstart;
537 let n = stream.state.lookahead - (STD_MIN_MATCH - 1);
538 stream.state.insert_string(str, n);
539 stream.state.strstart = str + n;
540 stream.state.lookahead = STD_MIN_MATCH - 1;
541 fill_window(stream);
542 }
543
544 let state = &mut stream.state;
545
546 state.strstart += state.lookahead;
547 state.block_start = state.strstart as _;
548 state.insert = state.lookahead;
549 state.lookahead = 0;
550 state.prev_length = 0;
551 state.match_available = false;
552
553 stream.next_in = next;
555 stream.avail_in = avail;
556 state.wrap = wrap;
557
558 ReturnCode::Ok
559}
560
561pub fn prime(stream: &mut DeflateStream, mut bits: i32, value: i32) -> ReturnCode {
562 debug_assert!(bits <= 16, "zlib only supports up to 16 bits here");
564
565 let mut value64 = value as u64;
566
567 let state = &mut stream.state;
568
569 if bits < 0
570 || bits > BitWriter::BIT_BUF_SIZE as i32
571 || bits > (core::mem::size_of_val(&value) << 3) as i32
572 {
573 return ReturnCode::BufError;
574 }
575
576 let mut put;
577
578 loop {
579 put = BitWriter::BIT_BUF_SIZE - state.bit_writer.bits_used;
580 let put = Ord::min(put as i32, bits);
581
582 if state.bit_writer.bits_used == 0 {
583 state.bit_writer.bit_buffer = value64;
584 } else {
585 state.bit_writer.bit_buffer |=
586 (value64 & ((1 << put) - 1)) << state.bit_writer.bits_used;
587 }
588
589 state.bit_writer.bits_used += put as u8;
590 state.bit_writer.flush_bits();
591 value64 >>= put;
592 bits -= put;
593
594 if bits == 0 {
595 break;
596 }
597 }
598
599 ReturnCode::Ok
600}
601
602pub fn copy<'a>(
603 dest: &mut MaybeUninit<DeflateStream<'a>>,
604 source: &mut DeflateStream<'a>,
605) -> ReturnCode {
606 let w_size = source.state.w_size;
607 let window_bits = source.state.w_bits() as usize;
608 let lit_bufsize = source.state.lit_bufsize;
609
610 unsafe { core::ptr::copy_nonoverlapping(source, dest.as_mut_ptr(), 1) };
613
614 let source_state = &source.state;
615 let alloc = &source.alloc;
616
617 let allocs = DeflateAllocOffsets::new(window_bits, lit_bufsize);
618
619 let Some(allocation_start) = alloc.allocate_slice_raw::<u8>(allocs.total_size) else {
620 return ReturnCode::MemError;
621 };
622
623 let align_offset = (allocation_start.as_ptr() as usize).next_multiple_of(64)
624 - (allocation_start.as_ptr() as usize);
625 let buf = unsafe { allocation_start.as_ptr().add(align_offset) };
626
627 let (window, prev, head, pending, sym_buf) = unsafe {
628 let window_ptr: *mut u8 = buf.add(allocs.window_pos);
629 window_ptr
630 .copy_from_nonoverlapping(source_state.window.as_ptr(), source_state.window.capacity());
631 let window = Window::from_raw_parts(window_ptr, window_bits);
632
633 let prev_ptr = buf.add(allocs.prev_pos).cast::<Pos>();
635 prev_ptr.copy_from_nonoverlapping(source_state.prev.as_ptr(), source_state.prev.len());
636 let prev = WeakSliceMut::from_raw_parts_mut(prev_ptr, w_size);
637
638 let head_ptr = buf.add(allocs.head_pos).cast::<[Pos; HASH_SIZE]>();
640 head_ptr.copy_from_nonoverlapping(source_state.head.as_ptr(), 1);
641 let head = WeakArrayMut::<Pos, HASH_SIZE>::from_ptr(head_ptr);
642
643 let pending_ptr = buf.add(allocs.pending_pos);
644 let pending = source_state.bit_writer.pending.clone_to(pending_ptr);
645
646 let sym_buf_ptr = buf.add(allocs.sym_buf_pos);
647 let sym_buf = source_state.sym_buf.clone_to(sym_buf_ptr);
648
649 (window, prev, head, pending, sym_buf)
650 };
651
652 let mut bit_writer = BitWriter::from_pending(pending);
653 bit_writer.bits_used = source_state.bit_writer.bits_used;
654 bit_writer.bit_buffer = source_state.bit_writer.bit_buffer;
655
656 let dest_state = State {
657 status: source_state.status,
658 bit_writer,
659 last_flush: source_state.last_flush,
660 wrap: source_state.wrap,
661 strategy: source_state.strategy,
662 level: source_state.level,
663 good_match: source_state.good_match,
664 nice_match: source_state.nice_match,
665 l_desc: source_state.l_desc.clone(),
666 d_desc: source_state.d_desc.clone(),
667 bl_desc: source_state.bl_desc.clone(),
668 prev_match: source_state.prev_match,
669 match_available: source_state.match_available,
670 strstart: source_state.strstart,
671 match_start: source_state.match_start,
672 prev_length: source_state.prev_length,
673 max_chain_length: source_state.max_chain_length,
674 max_lazy_match: source_state.max_lazy_match,
675 block_start: source_state.block_start,
676 block_open: source_state.block_open,
677 window,
678 sym_buf,
679 lit_bufsize: source_state.lit_bufsize,
680 window_size: source_state.window_size,
681 matches: source_state.matches,
682 opt_len: source_state.opt_len,
683 static_len: source_state.static_len,
684 insert: source_state.insert,
685 w_size: source_state.w_size,
686 lookahead: source_state.lookahead,
687 prev,
688 head,
689 ins_h: source_state.ins_h,
690 hash_calc_variant: source_state.hash_calc_variant,
691 crc_fold: source_state.crc_fold,
692 gzhead: None,
693 gzindex: source_state.gzindex,
694 allocation_start,
695 total_allocation_size: allocs.total_size,
696 _cache_line_0: (),
697 _cache_line_1: (),
698 _cache_line_2: (),
699 _cache_line_3: (),
700 _padding_0: source_state._padding_0,
701 };
702
703 let state_allocation = unsafe { buf.add(allocs.state_pos).cast::<State>() };
705 unsafe { state_allocation.write(dest_state) }; let field_ptr = unsafe { core::ptr::addr_of_mut!((*dest.as_mut_ptr()).state) };
709 unsafe { core::ptr::write(field_ptr as *mut *mut State, state_allocation) };
710
711 let field_ptr = unsafe { core::ptr::addr_of_mut!((*dest.as_mut_ptr()).state.gzhead) };
713 unsafe { core::ptr::copy(&source_state.gzhead, field_ptr, 1) };
714
715 ReturnCode::Ok
716}
717
718pub fn end<'a>(stream: &'a mut DeflateStream) -> Result<&'a mut z_stream, &'a mut z_stream> {
723 let status = stream.state.status;
724 let allocation_start = stream.state.allocation_start;
725 let total_allocation_size = stream.state.total_allocation_size;
726 let alloc = stream.alloc;
727
728 let stream = stream.as_z_stream_mut();
729 let _ = core::mem::replace(&mut stream.state, core::ptr::null_mut());
730
731 unsafe { alloc.deallocate(allocation_start.as_ptr(), total_allocation_size) };
732
733 match status {
734 Status::Busy => Err(stream),
735 _ => Ok(stream),
736 }
737}
738
739pub fn reset(stream: &mut DeflateStream) -> ReturnCode {
740 let ret = reset_keep(stream);
741
742 if ret == ReturnCode::Ok {
743 lm_init(stream.state);
744 }
745
746 ret
747}
748
749pub fn reset_keep(stream: &mut DeflateStream) -> ReturnCode {
750 stream.total_in = 0;
751 stream.total_out = 0;
752 stream.msg = core::ptr::null_mut();
753 stream.data_type = crate::c_api::Z_UNKNOWN;
754
755 let state = &mut stream.state;
756
757 state.bit_writer.pending.reset_keep();
758
759 state.wrap = state.wrap.abs();
761
762 state.status = match state.wrap {
763 2 => Status::GZip,
764 _ => Status::Init,
765 };
766
767 stream.adler = match state.wrap {
768 2 => {
769 state.crc_fold = Crc32Fold::new();
770 CRC32_INITIAL_VALUE as _
771 }
772 _ => ADLER32_INITIAL_VALUE as _,
773 };
774
775 state.last_flush = -2;
776
777 state.zng_tr_init();
778
779 ReturnCode::Ok
780}
781
782fn lm_init(state: &mut State) {
783 state.window_size = 2 * state.w_size;
784
785 state.head.as_mut_slice().fill(0);
787
788 lm_set_level(state, state.level);
790
791 state.strstart = 0;
792 state.block_start = 0;
793 state.lookahead = 0;
794 state.insert = 0;
795 state.prev_length = 0;
796 state.match_available = false;
797 state.match_start = 0;
798 state.ins_h = 0;
799}
800
801fn lm_set_level(state: &mut State, level: i8) {
802 state.max_lazy_match = CONFIGURATION_TABLE[level as usize].max_lazy;
803 state.good_match = CONFIGURATION_TABLE[level as usize].good_length;
804 state.nice_match = CONFIGURATION_TABLE[level as usize].nice_length;
805 state.max_chain_length = CONFIGURATION_TABLE[level as usize].max_chain;
806
807 state.hash_calc_variant = HashCalcVariant::for_max_chain_length(state.max_chain_length);
808 state.level = level;
809}
810
811pub fn tune(
812 stream: &mut DeflateStream,
813 good_length: usize,
814 max_lazy: usize,
815 nice_length: usize,
816 max_chain: usize,
817) -> ReturnCode {
818 stream.state.good_match = good_length as u16;
819 stream.state.max_lazy_match = max_lazy as u16;
820 stream.state.nice_match = nice_length as u16;
821 stream.state.max_chain_length = max_chain as u16;
822
823 ReturnCode::Ok
824}
825
826#[repr(C)]
827#[derive(Debug, Clone, Copy, PartialEq, Eq)]
828pub(crate) struct Value {
829 a: u16,
830 b: u16,
831}
832
833impl Value {
834 pub(crate) const fn new(a: u16, b: u16) -> Self {
835 Self { a, b }
836 }
837
838 pub(crate) fn freq_mut(&mut self) -> &mut u16 {
839 &mut self.a
840 }
841
842 pub(crate) fn code_mut(&mut self) -> &mut u16 {
843 &mut self.a
844 }
845
846 pub(crate) fn dad_mut(&mut self) -> &mut u16 {
847 &mut self.b
848 }
849
850 pub(crate) fn len_mut(&mut self) -> &mut u16 {
851 &mut self.b
852 }
853
854 #[inline(always)]
855 pub(crate) const fn freq(self) -> u16 {
856 self.a
857 }
858
859 #[inline(always)]
860 pub(crate) const fn code(self) -> u16 {
861 self.a
862 }
863
864 #[inline(always)]
865 pub(crate) const fn dad(self) -> u16 {
866 self.b
867 }
868
869 #[inline(always)]
870 pub(crate) const fn len(self) -> u16 {
871 self.b
872 }
873}
874
875pub(crate) const LENGTH_CODES: usize = 29;
877
878const LITERALS: usize = 256;
880
881pub(crate) const L_CODES: usize = LITERALS + 1 + LENGTH_CODES;
883
884pub(crate) const D_CODES: usize = 30;
886
887const BL_CODES: usize = 19;
889
890const HEAP_SIZE: usize = 2 * L_CODES + 1;
892
893const MAX_BITS: usize = 15;
895
896const MAX_BL_BITS: usize = 7;
898
899pub(crate) const DIST_CODE_LEN: usize = 512;
900
901struct BitWriter<'a> {
902 pub(crate) pending: Pending<'a>, pub(crate) bit_buffer: u64,
904 pub(crate) bits_used: u8,
905
906 #[cfg(feature = "ZLIB_DEBUG")]
908 compressed_len: usize,
909 #[cfg(feature = "ZLIB_DEBUG")]
911 bits_sent: usize,
912}
913
914#[inline]
915const fn encode_len(ltree: &[Value], lc: u8) -> (u64, usize) {
916 let mut lc = lc as usize;
917
918 let code = self::trees_tbl::LENGTH_CODE[lc] as usize;
920 let c = code + LITERALS + 1;
921 assert!(c < L_CODES, "bad l_code");
922 let lnode = ltree[c];
925 let mut match_bits: u64 = lnode.code() as u64;
926 let mut match_bits_len = lnode.len() as usize;
927 let extra = StaticTreeDesc::EXTRA_LBITS[code] as usize;
928 if extra != 0 {
929 lc -= self::trees_tbl::BASE_LENGTH[code] as usize;
930 match_bits |= (lc as u64) << match_bits_len;
931 match_bits_len += extra;
932 }
933
934 (match_bits, match_bits_len)
935}
936
937#[inline]
938const fn encode_dist(dtree: &[Value], mut dist: u16) -> (u64, usize) {
939 dist -= 1; let code = State::d_code(dist as usize) as usize;
941 assert!(code < D_CODES, "bad d_code");
942 let dnode = dtree[code];
946 let mut match_bits = dnode.code() as u64;
947 let mut match_bits_len = dnode.len() as usize;
948 let extra = StaticTreeDesc::EXTRA_DBITS[code] as usize;
949 if extra != 0 {
950 dist -= self::trees_tbl::BASE_DIST[code];
951 match_bits |= (dist as u64) << match_bits_len;
952 match_bits_len += extra;
953 }
954
955 (match_bits, match_bits_len)
956}
957
958impl<'a> BitWriter<'a> {
959 pub(crate) const BIT_BUF_SIZE: u8 = 64;
960
961 fn from_pending(pending: Pending<'a>) -> Self {
962 Self {
963 pending,
964 bit_buffer: 0,
965 bits_used: 0,
966
967 #[cfg(feature = "ZLIB_DEBUG")]
968 compressed_len: 0,
969 #[cfg(feature = "ZLIB_DEBUG")]
970 bits_sent: 0,
971 }
972 }
973
974 fn flush_bits(&mut self) {
975 debug_assert!(self.bits_used <= 64);
976 let removed = self.bits_used.saturating_sub(7).next_multiple_of(8);
977 let keep_bytes = self.bits_used / 8; let src = &self.bit_buffer.to_le_bytes();
980 self.pending.extend(&src[..keep_bytes as usize]);
981
982 self.bits_used -= removed;
983 self.bit_buffer = self.bit_buffer.checked_shr(removed as u32).unwrap_or(0);
984 }
985
986 fn emit_align(&mut self) {
987 debug_assert!(self.bits_used <= 64);
988 let keep_bytes = self.bits_used.div_ceil(8);
989 let src = &self.bit_buffer.to_le_bytes();
990 self.pending.extend(&src[..keep_bytes as usize]);
991
992 self.bits_used = 0;
993 self.bit_buffer = 0;
994
995 self.sent_bits_align();
996 }
997
998 fn send_bits_trace(&self, _value: u64, _len: u8) {
999 trace!(" l {:>2} v {:>4x} ", _len, _value);
1000 }
1001
1002 fn cmpr_bits_add(&mut self, _len: usize) {
1003 #[cfg(feature = "ZLIB_DEBUG")]
1004 {
1005 self.compressed_len += _len;
1006 }
1007 }
1008
1009 fn cmpr_bits_align(&mut self) {
1010 #[cfg(feature = "ZLIB_DEBUG")]
1011 {
1012 self.compressed_len = self.compressed_len.next_multiple_of(8);
1013 }
1014 }
1015
1016 fn sent_bits_add(&mut self, _len: usize) {
1017 #[cfg(feature = "ZLIB_DEBUG")]
1018 {
1019 self.bits_sent += _len;
1020 }
1021 }
1022
1023 fn sent_bits_align(&mut self) {
1024 #[cfg(feature = "ZLIB_DEBUG")]
1025 {
1026 self.bits_sent = self.bits_sent.next_multiple_of(8);
1027 }
1028 }
1029
1030 #[inline(always)]
1031 fn send_bits(&mut self, val: u64, len: u8) {
1032 debug_assert!(len <= 64);
1033 debug_assert!(self.bits_used <= 64);
1034
1035 let total_bits = len + self.bits_used;
1036
1037 self.send_bits_trace(val, len);
1038 self.sent_bits_add(len as usize);
1039
1040 if total_bits < Self::BIT_BUF_SIZE {
1041 self.bit_buffer |= val << self.bits_used;
1042 self.bits_used = total_bits;
1043 } else {
1044 self.send_bits_overflow(val, total_bits);
1045 }
1046 }
1047
1048 fn send_bits_overflow(&mut self, val: u64, total_bits: u8) {
1049 if self.bits_used == Self::BIT_BUF_SIZE {
1050 self.pending.extend(&self.bit_buffer.to_le_bytes());
1051 self.bit_buffer = val;
1052 self.bits_used = total_bits - Self::BIT_BUF_SIZE;
1053 } else {
1054 self.bit_buffer |= val << self.bits_used;
1055 self.pending.extend(&self.bit_buffer.to_le_bytes());
1056 self.bit_buffer = val >> (Self::BIT_BUF_SIZE - self.bits_used);
1057 self.bits_used = total_bits - Self::BIT_BUF_SIZE;
1058 }
1059 }
1060
1061 fn send_code(&mut self, code: usize, tree: &[Value]) {
1062 let node = tree[code];
1063 self.send_bits(node.code() as u64, node.len() as u8)
1064 }
1065
1066 pub fn align(&mut self) {
1069 self.emit_tree(BlockType::StaticTrees, false);
1070 self.emit_end_block(&STATIC_LTREE, false);
1071 self.flush_bits();
1072 }
1073
1074 pub(crate) fn emit_tree(&mut self, block_type: BlockType, is_last_block: bool) {
1075 let header_bits = ((block_type as u64) << 1) | (is_last_block as u64);
1076 self.send_bits(header_bits, 3);
1077 trace!("\n--- Emit Tree: Last: {}\n", is_last_block as u8);
1078 }
1079
1080 pub(crate) fn emit_end_block_and_align(&mut self, ltree: &[Value], is_last_block: bool) {
1081 self.emit_end_block(ltree, is_last_block);
1082
1083 if is_last_block {
1084 self.emit_align();
1085 }
1086 }
1087
1088 fn emit_end_block(&mut self, ltree: &[Value], _is_last_block: bool) {
1089 const END_BLOCK: usize = 256;
1090 self.send_code(END_BLOCK, ltree);
1091
1092 trace!(
1093 "\n+++ Emit End Block: Last: {} Pending: {} Total Out: {}\n",
1094 _is_last_block as u8,
1095 self.pending.pending().len(),
1096 "<unknown>"
1097 );
1098 }
1099
1100 pub(crate) fn emit_lit(&mut self, ltree: &[Value], c: u8) -> u16 {
1101 self.send_code(c as usize, ltree);
1102
1103 #[cfg(feature = "ZLIB_DEBUG")]
1104 if let Some(c) = char::from_u32(c as u32) {
1105 if isgraph(c as u8) {
1106 trace!(" '{}' ", c);
1107 }
1108 }
1109
1110 ltree[c as usize].len()
1111 }
1112
1113 pub(crate) fn emit_dist(
1114 &mut self,
1115 ltree: &[Value],
1116 dtree: &[Value],
1117 lc: u8,
1118 dist: u16,
1119 ) -> usize {
1120 let (mut match_bits, mut match_bits_len) = encode_len(ltree, lc);
1121
1122 let (dist_match_bits, dist_match_bits_len) = encode_dist(dtree, dist);
1123
1124 match_bits |= dist_match_bits << match_bits_len;
1125 match_bits_len += dist_match_bits_len;
1126
1127 self.send_bits(match_bits, match_bits_len as u8);
1128
1129 match_bits_len
1130 }
1131
1132 pub(crate) fn emit_dist_static(&mut self, lc: u8, dist: u16) -> usize {
1133 let precomputed_len = trees_tbl::STATIC_LTREE_ENCODINGS[lc as usize];
1134 let mut match_bits = precomputed_len.code() as u64;
1135 let mut match_bits_len = precomputed_len.len() as usize;
1136
1137 let dtree = self::trees_tbl::STATIC_DTREE.as_slice();
1138 let (dist_match_bits, dist_match_bits_len) = encode_dist(dtree, dist);
1139
1140 match_bits |= dist_match_bits << match_bits_len;
1141 match_bits_len += dist_match_bits_len;
1142
1143 self.send_bits(match_bits, match_bits_len as u8);
1144
1145 match_bits_len
1146 }
1147
1148 fn compress_block_help(&mut self, sym_buf: &SymBuf, ltree: &[Value], dtree: &[Value]) {
1149 for (dist, lc) in sym_buf.iter() {
1150 match dist {
1151 0 => self.emit_lit(ltree, lc) as usize,
1152 _ => self.emit_dist(ltree, dtree, lc, dist),
1153 };
1154 }
1155
1156 self.emit_end_block(ltree, false)
1157 }
1158
1159 fn send_tree(&mut self, tree: &[Value], bl_tree: &[Value], max_code: usize) {
1160 let mut prevlen: isize = -1; let mut curlen; let mut nextlen = tree[0].len(); let mut count = 0; let mut max_count = 7; let mut min_count = 4; if nextlen == 0 {
1172 max_count = 138;
1173 min_count = 3;
1174 }
1175
1176 for n in 0..=max_code {
1177 curlen = nextlen;
1178 nextlen = tree[n + 1].len();
1179 count += 1;
1180 if count < max_count && curlen == nextlen {
1181 continue;
1182 } else if count < min_count {
1183 loop {
1184 self.send_code(curlen as usize, bl_tree);
1185
1186 count -= 1;
1187 if count == 0 {
1188 break;
1189 }
1190 }
1191 } else if curlen != 0 {
1192 if curlen as isize != prevlen {
1193 self.send_code(curlen as usize, bl_tree);
1194 count -= 1;
1195 }
1196 assert!((3..=6).contains(&count), " 3_6?");
1197 self.send_code(REP_3_6, bl_tree);
1198 self.send_bits(count - 3, 2);
1199 } else if count <= 10 {
1200 self.send_code(REPZ_3_10, bl_tree);
1201 self.send_bits(count - 3, 3);
1202 } else {
1203 self.send_code(REPZ_11_138, bl_tree);
1204 self.send_bits(count - 11, 7);
1205 }
1206
1207 count = 0;
1208 prevlen = curlen as isize;
1209
1210 if nextlen == 0 {
1211 max_count = 138;
1212 min_count = 3;
1213 } else if curlen == nextlen {
1214 max_count = 6;
1215 min_count = 3;
1216 } else {
1217 max_count = 7;
1218 min_count = 4;
1219 }
1220 }
1221 }
1222}
1223
1224#[repr(C, align(64))]
1225pub(crate) struct State<'a> {
1226 status: Status,
1227
1228 last_flush: i8, pub(crate) wrap: i8, pub(crate) strategy: Strategy,
1233 pub(crate) level: i8,
1234
1235 pub(crate) block_open: u8,
1239
1240 pub(crate) hash_calc_variant: HashCalcVariant,
1241
1242 pub(crate) match_available: bool, pub(crate) good_match: u16,
1246
1247 pub(crate) nice_match: u16,
1249
1250 pub(crate) match_start: Pos, pub(crate) prev_match: Pos, pub(crate) strstart: usize, pub(crate) window: Window<'a>,
1255 pub(crate) w_size: usize, pub(crate) lookahead: usize, _cache_line_0: (),
1260
1261 pub(crate) prev: WeakSliceMut<'a, u16>,
1267 pub(crate) head: WeakArrayMut<'a, u16, HASH_SIZE>,
1270
1271 pub(crate) prev_length: u16,
1274
1275 pub(crate) max_chain_length: u16,
1278
1279 pub(crate) max_lazy_match: u16,
1289
1290 pub(crate) matches: u8,
1295
1296 pub(crate) block_start: isize,
1299
1300 pub(crate) sym_buf: SymBuf<'a>,
1301
1302 _cache_line_1: (),
1303
1304 lit_bufsize: usize,
1322
1323 pub(crate) window_size: usize,
1325
1326 bit_writer: BitWriter<'a>,
1327
1328 _cache_line_2: (),
1329
1330 opt_len: usize,
1332 static_len: usize,
1334
1335 pub(crate) insert: usize,
1337
1338 pub(crate) ins_h: u32,
1340
1341 gzhead: Option<&'a mut gz_header>,
1342 gzindex: usize,
1343
1344 _padding_0: [u8; 16],
1345
1346 _cache_line_3: (),
1347
1348 crc_fold: crate::crc32::Crc32Fold,
1349
1350 allocation_start: NonNull<u8>,
1352 total_allocation_size: usize,
1354
1355 l_desc: TreeDesc<HEAP_SIZE>, d_desc: TreeDesc<{ 2 * D_CODES + 1 }>, bl_desc: TreeDesc<{ 2 * BL_CODES + 1 }>, }
1359
1360#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Default)]
1361#[cfg_attr(feature = "__internal-fuzz", derive(arbitrary::Arbitrary))]
1362pub enum Strategy {
1363 #[default]
1364 Default = 0,
1365 Filtered = 1,
1366 HuffmanOnly = 2,
1367 Rle = 3,
1368 Fixed = 4,
1369}
1370
1371impl TryFrom<i32> for Strategy {
1372 type Error = ();
1373
1374 fn try_from(value: i32) -> Result<Self, Self::Error> {
1375 match value {
1376 0 => Ok(Strategy::Default),
1377 1 => Ok(Strategy::Filtered),
1378 2 => Ok(Strategy::HuffmanOnly),
1379 3 => Ok(Strategy::Rle),
1380 4 => Ok(Strategy::Fixed),
1381 _ => Err(()),
1382 }
1383 }
1384}
1385
1386#[derive(Debug, PartialEq, Eq)]
1387enum DataType {
1388 Binary = 0,
1389 Text = 1,
1390 Unknown = 2,
1391}
1392
1393impl<'a> State<'a> {
1394 pub const BIT_BUF_SIZE: u8 = BitWriter::BIT_BUF_SIZE;
1395
1396 pub(crate) fn w_bits(&self) -> u32 {
1398 self.w_size.trailing_zeros()
1399 }
1400
1401 pub(crate) fn w_mask(&self) -> usize {
1402 self.w_size - 1
1403 }
1404
1405 pub(crate) fn max_dist(&self) -> usize {
1406 self.w_size - MIN_LOOKAHEAD
1407 }
1408
1409 pub(crate) fn max_insert_length(&self) -> usize {
1412 self.max_lazy_match as usize
1413 }
1414
1415 pub(crate) fn pending_buf_size(&self) -> usize {
1418 self.lit_bufsize * 4
1419 }
1420
1421 #[inline(always)]
1422 pub(crate) fn update_hash(&self, h: u32, val: u32) -> u32 {
1423 match self.hash_calc_variant {
1424 HashCalcVariant::Standard => StandardHashCalc::update_hash(h, val),
1425 HashCalcVariant::Roll => RollHashCalc::update_hash(h, val),
1426 }
1427 }
1428
1429 #[inline(always)]
1430 pub(crate) fn quick_insert_string(&mut self, string: usize) -> u16 {
1431 match self.hash_calc_variant {
1432 HashCalcVariant::Standard => StandardHashCalc::quick_insert_string(self, string),
1433 HashCalcVariant::Roll => RollHashCalc::quick_insert_string(self, string),
1434 }
1435 }
1436
1437 #[inline(always)]
1438 pub(crate) fn insert_string(&mut self, string: usize, count: usize) {
1439 match self.hash_calc_variant {
1440 HashCalcVariant::Standard => StandardHashCalc::insert_string(self, string, count),
1441 HashCalcVariant::Roll => RollHashCalc::insert_string(self, string, count),
1442 }
1443 }
1444
1445 #[inline(always)]
1446 pub(crate) fn tally_lit(&mut self, unmatched: u8) -> bool {
1447 Self::tally_lit_help(&mut self.sym_buf, &mut self.l_desc, unmatched)
1448 }
1449
1450 pub(crate) fn tally_lit_help(
1452 sym_buf: &mut SymBuf,
1453 l_desc: &mut TreeDesc<HEAP_SIZE>,
1454 unmatched: u8,
1455 ) -> bool {
1456 const _VERIFY: () = {
1457 assert!(
1460 u8::MAX as usize <= STD_MAX_MATCH - STD_MIN_MATCH,
1461 "tally_lit: bad literal"
1462 );
1463 };
1464
1465 sym_buf.push_lit(unmatched);
1466
1467 *l_desc.dyn_tree[unmatched as usize].freq_mut() += 1;
1468
1469 sym_buf.should_flush_block()
1471 }
1472
1473 const fn d_code(dist: usize) -> u8 {
1474 const _VERIFY: () = {
1475 let mut i = 0;
1477 while i < trees_tbl::DIST_CODE.len() {
1478 assert!(trees_tbl::DIST_CODE[i] < D_CODES as u8);
1479 i += 1;
1480 }
1481 };
1482
1483 let index = if dist < 256 { dist } else { 256 + (dist >> 7) };
1484 self::trees_tbl::DIST_CODE[index]
1485 }
1486
1487 #[inline(always)]
1488 pub(crate) fn tally_dist(&mut self, mut dist: usize, len: usize) -> bool {
1489 self.sym_buf.push_dist(dist as u16, len as u8);
1490
1491 self.matches = self.matches.saturating_add(1);
1492 dist -= 1;
1493
1494 assert!(dist < self.max_dist(), "tally_dist: bad match");
1495
1496 let index = self::trees_tbl::LENGTH_CODE[len] as usize + LITERALS + 1;
1497 *self.l_desc.dyn_tree[index].freq_mut() += 1;
1498
1499 *self.d_desc.dyn_tree[Self::d_code(dist) as usize].freq_mut() += 1;
1500
1501 self.sym_buf.should_flush_block()
1503 }
1504
1505 fn detect_data_type(dyn_tree: &[Value]) -> DataType {
1506 const NON_TEXT: u64 = 0xf3ffc07f;
1509 let mut mask = NON_TEXT;
1510
1511 for value in &dyn_tree[0..32] {
1513 if (mask & 1) != 0 && value.freq() != 0 {
1514 return DataType::Binary;
1515 }
1516
1517 mask >>= 1;
1518 }
1519
1520 if dyn_tree[9].freq() != 0 || dyn_tree[10].freq() != 0 || dyn_tree[13].freq() != 0 {
1522 return DataType::Text;
1523 }
1524
1525 if dyn_tree[32..LITERALS].iter().any(|v| v.freq() != 0) {
1526 return DataType::Text;
1527 }
1528
1529 DataType::Binary
1532 }
1533
1534 fn compress_block_static_trees(&mut self) {
1535 let ltree = self::trees_tbl::STATIC_LTREE.as_slice();
1536 for (dist, lc) in self.sym_buf.iter() {
1537 match dist {
1538 0 => self.bit_writer.emit_lit(ltree, lc) as usize,
1539 _ => self.bit_writer.emit_dist_static(lc, dist),
1540 };
1541 }
1542
1543 self.bit_writer.emit_end_block(ltree, false)
1544 }
1545
1546 fn compress_block_dynamic_trees(&mut self) {
1547 self.bit_writer.compress_block_help(
1548 &self.sym_buf,
1549 &self.l_desc.dyn_tree,
1550 &self.d_desc.dyn_tree,
1551 );
1552 }
1553
1554 fn header(&self) -> u16 {
1555 const PRESET_DICT: u16 = 0x20;
1557
1558 const Z_DEFLATED: u16 = 8;
1560
1561 let dict = match self.strstart {
1562 0 => 0,
1563 _ => PRESET_DICT,
1564 };
1565
1566 let h = ((Z_DEFLATED + ((self.w_bits() as u16 - 8) << 4)) << 8)
1567 | (self.level_flags() << 6)
1568 | dict;
1569
1570 h + 31 - (h % 31)
1571 }
1572
1573 fn level_flags(&self) -> u16 {
1574 if self.strategy >= Strategy::HuffmanOnly || self.level < 2 {
1575 0
1576 } else if self.level < 6 {
1577 1
1578 } else if self.level == 6 {
1579 2
1580 } else {
1581 3
1582 }
1583 }
1584
1585 fn zng_tr_init(&mut self) {
1586 self.l_desc.stat_desc = &StaticTreeDesc::L;
1587
1588 self.d_desc.stat_desc = &StaticTreeDesc::D;
1589
1590 self.bl_desc.stat_desc = &StaticTreeDesc::BL;
1591
1592 self.bit_writer.bit_buffer = 0;
1593 self.bit_writer.bits_used = 0;
1594
1595 #[cfg(feature = "ZLIB_DEBUG")]
1596 {
1597 self.bit_writer.compressed_len = 0;
1598 self.bit_writer.bits_sent = 0;
1599 }
1600
1601 self.init_block();
1603 }
1604
1605 fn init_block(&mut self) {
1607 for value in &mut self.l_desc.dyn_tree[..L_CODES] {
1611 *value.freq_mut() = 0;
1612 }
1613
1614 for value in &mut self.d_desc.dyn_tree[..D_CODES] {
1615 *value.freq_mut() = 0;
1616 }
1617
1618 for value in &mut self.bl_desc.dyn_tree[..BL_CODES] {
1619 *value.freq_mut() = 0;
1620 }
1621
1622 const END_BLOCK: usize = 256;
1624
1625 *self.l_desc.dyn_tree[END_BLOCK].freq_mut() = 1;
1626 self.opt_len = 0;
1627 self.static_len = 0;
1628 self.sym_buf.clear();
1629 self.matches = 0;
1630 }
1631}
1632
1633#[repr(u8)]
1634#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1635enum Status {
1636 Init = 1,
1637
1638 GZip = 4,
1639 Extra = 5,
1640 Name = 6,
1641 Comment = 7,
1642 Hcrc = 8,
1643
1644 Busy = 2,
1645 Finish = 3,
1646}
1647
1648const fn rank_flush(f: i8) -> i8 {
1649 ((f) * 2) - (if (f) > 4 { 9 } else { 0 })
1651}
1652
1653#[derive(Debug)]
1654pub(crate) enum BlockState {
1655 NeedMore = 0,
1657 BlockDone = 1,
1659 FinishStarted = 2,
1661 FinishDone = 3,
1663}
1664
1665pub(crate) const MAX_STORED: usize = 65535; pub(crate) fn read_buf_window(stream: &mut DeflateStream, offset: usize, size: usize) -> usize {
1669 let len = Ord::min(stream.avail_in as usize, size);
1670
1671 if len == 0 {
1672 return 0;
1673 }
1674
1675 stream.avail_in -= len as u32;
1676
1677 if stream.state.wrap == 2 {
1678 let window = &mut stream.state.window;
1681 unsafe { window.copy_and_initialize(offset..offset + len, stream.next_in) };
1683
1684 let data = &stream.state.window.filled()[offset..][..len];
1685 stream.state.crc_fold.fold(data, CRC32_INITIAL_VALUE);
1686 } else if stream.state.wrap == 1 {
1687 let window = &mut stream.state.window;
1690 unsafe { window.copy_and_initialize(offset..offset + len, stream.next_in) };
1692
1693 let data = &stream.state.window.filled()[offset..][..len];
1694 stream.adler = adler32(stream.adler as u32, data) as _;
1695 } else {
1696 let window = &mut stream.state.window;
1697 unsafe { window.copy_and_initialize(offset..offset + len, stream.next_in) };
1699 }
1700
1701 stream.next_in = stream.next_in.wrapping_add(len);
1704 stream.total_in = stream.total_in.wrapping_add(len as crate::c_api::z_size);
1705
1706 len
1707}
1708
1709pub(crate) enum BlockType {
1710 StoredBlock = 0,
1711 StaticTrees = 1,
1712 DynamicTrees = 2,
1713}
1714
1715pub(crate) fn zng_tr_stored_block(
1716 state: &mut State,
1717 window_range: core::ops::Range<usize>,
1718 is_last: bool,
1719) {
1720 state.bit_writer.emit_tree(BlockType::StoredBlock, is_last);
1722
1723 state.bit_writer.emit_align();
1725
1726 state.bit_writer.cmpr_bits_align();
1727
1728 let input_block: &[u8] = &state.window.filled()[window_range];
1729 let stored_len = input_block.len() as u16;
1730
1731 state.bit_writer.pending.extend(&stored_len.to_le_bytes());
1732 state
1733 .bit_writer
1734 .pending
1735 .extend(&(!stored_len).to_le_bytes());
1736
1737 state.bit_writer.cmpr_bits_add(32);
1738 state.bit_writer.sent_bits_add(32);
1739 if stored_len > 0 {
1740 state.bit_writer.pending.extend(input_block);
1741 state.bit_writer.cmpr_bits_add((stored_len << 3) as usize);
1742 state.bit_writer.sent_bits_add((stored_len << 3) as usize);
1743 }
1744}
1745
1746pub(crate) const STD_MIN_MATCH: usize = 3;
1748pub(crate) const STD_MAX_MATCH: usize = 258;
1750
1751pub(crate) const WANT_MIN_MATCH: usize = 4;
1753
1754pub(crate) const MIN_LOOKAHEAD: usize = STD_MAX_MATCH + STD_MIN_MATCH + 1;
1755
1756#[inline]
1757pub(crate) fn fill_window(stream: &mut DeflateStream) {
1758 debug_assert!(stream.state.lookahead < MIN_LOOKAHEAD);
1759
1760 let wsize = stream.state.w_size;
1761
1762 loop {
1763 let state = &mut *stream.state;
1764 let mut more = state.window_size - state.lookahead - state.strstart;
1765
1766 if state.strstart >= wsize + state.max_dist() {
1769 let (old, new) = state.window.filled_mut()[..2 * wsize].split_at_mut(wsize);
1771 old.copy_from_slice(new);
1772
1773 if state.match_start >= wsize as u16 {
1774 state.match_start -= wsize as u16;
1775 } else {
1776 state.match_start = 0;
1777 state.prev_length = 0;
1778 }
1779
1780 state.strstart -= wsize; state.block_start = state.block_start.wrapping_sub_unsigned(wsize);
1782 state.insert = Ord::min(state.insert, state.strstart);
1783
1784 self::slide_hash::slide_hash(state);
1785
1786 more += wsize;
1787 }
1788
1789 if stream.avail_in == 0 {
1790 break;
1791 }
1792
1793 assert!(more >= 2, "more < 2");
1804
1805 let n = read_buf_window(stream, stream.state.strstart + stream.state.lookahead, more);
1806
1807 let state = &mut *stream.state;
1808 state.lookahead += n;
1809
1810 if state.lookahead + state.insert >= STD_MIN_MATCH {
1812 let string = state.strstart - state.insert;
1813 if state.max_chain_length > 1024 {
1814 let v0 = state.window.filled()[string] as u32;
1815 let v1 = state.window.filled()[string + 1] as u32;
1816 state.ins_h = state.update_hash(v0, v1);
1817 } else if string >= 1 {
1818 state.quick_insert_string(string + 2 - STD_MIN_MATCH);
1819 }
1820 let mut count = state.insert;
1821 if state.lookahead == 1 {
1822 count -= 1;
1823 }
1824 if count > 0 {
1825 state.insert_string(string, count);
1826 state.insert -= count;
1827 }
1828 }
1829
1830 if !(stream.state.lookahead < MIN_LOOKAHEAD && stream.avail_in != 0) {
1834 break;
1835 }
1836 }
1837
1838 assert!(
1839 stream.state.strstart <= stream.state.window_size - MIN_LOOKAHEAD,
1840 "not enough room for search"
1841 );
1842}
1843
1844pub(crate) struct StaticTreeDesc {
1845 pub(crate) static_tree: &'static [Value],
1847 extra_bits: &'static [u8],
1849 extra_base: usize,
1851 elems: usize,
1853 max_length: u16,
1855}
1856
1857impl StaticTreeDesc {
1858 const EMPTY: Self = Self {
1859 static_tree: &[],
1860 extra_bits: &[],
1861 extra_base: 0,
1862 elems: 0,
1863 max_length: 0,
1864 };
1865
1866 const EXTRA_LBITS: [u8; LENGTH_CODES] = [
1868 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0,
1869 ];
1870
1871 const EXTRA_DBITS: [u8; D_CODES] = [
1873 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12,
1874 13, 13,
1875 ];
1876
1877 const EXTRA_BLBITS: [u8; BL_CODES] = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 7];
1879
1880 const BL_ORDER: [u8; BL_CODES] = [
1883 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15,
1884 ];
1885
1886 pub(crate) const L: Self = Self {
1887 static_tree: &self::trees_tbl::STATIC_LTREE,
1888 extra_bits: &Self::EXTRA_LBITS,
1889 extra_base: LITERALS + 1,
1890 elems: L_CODES,
1891 max_length: MAX_BITS as u16,
1892 };
1893
1894 pub(crate) const D: Self = Self {
1895 static_tree: &self::trees_tbl::STATIC_DTREE,
1896 extra_bits: &Self::EXTRA_DBITS,
1897 extra_base: 0,
1898 elems: D_CODES,
1899 max_length: MAX_BITS as u16,
1900 };
1901
1902 pub(crate) const BL: Self = Self {
1903 static_tree: &[],
1904 extra_bits: &Self::EXTRA_BLBITS,
1905 extra_base: 0,
1906 elems: BL_CODES,
1907 max_length: MAX_BL_BITS as u16,
1908 };
1909}
1910
1911#[derive(Clone)]
1912pub(crate) struct TreeDesc<const N: usize> {
1913 dyn_tree: [Value; N],
1914 max_code: usize,
1915 stat_desc: &'static StaticTreeDesc,
1916}
1917
1918impl<const N: usize> TreeDesc<N> {
1919 const EMPTY: Self = Self {
1920 dyn_tree: [Value::new(0, 0); N],
1921 max_code: 0,
1922 stat_desc: &StaticTreeDesc::EMPTY,
1923 };
1924}
1925
1926fn build_tree<const N: usize>(state: &mut State, desc: &mut TreeDesc<N>) {
1927 let tree = &mut desc.dyn_tree;
1928 let stree = desc.stat_desc.static_tree;
1929 let elements = desc.stat_desc.elems;
1930
1931 let mut heap = Heap::new();
1932 let mut max_code = heap.initialize(&mut tree[..elements]);
1933
1934 while heap.heap_len < 2 {
1939 heap.heap_len += 1;
1940 let node = if max_code < 2 {
1941 max_code += 1;
1942 max_code
1943 } else {
1944 0
1945 };
1946
1947 debug_assert!(node >= 0);
1948 let node = node as usize;
1949
1950 heap.heap[heap.heap_len] = node as u32;
1951 *tree[node].freq_mut() = 1;
1952 heap.depth[node] = 0;
1953 state.opt_len -= 1;
1954 if !stree.is_empty() {
1955 state.static_len -= stree[node].len() as usize;
1956 }
1957 }
1959
1960 debug_assert!(max_code >= 0);
1961 let max_code = max_code as usize;
1962 desc.max_code = max_code;
1963
1964 let mut n = heap.heap_len / 2;
1967 while n >= 1 {
1968 heap.pqdownheap(tree, n);
1969 n -= 1;
1970 }
1971
1972 heap.construct_huffman_tree(tree, elements);
1973
1974 let bl_count = gen_bitlen(state, &mut heap, desc);
1977
1978 gen_codes(&mut desc.dyn_tree, max_code, &bl_count);
1980}
1981
1982fn gen_bitlen<const N: usize>(
1983 state: &mut State,
1984 heap: &mut Heap,
1985 desc: &mut TreeDesc<N>,
1986) -> [u16; MAX_BITS + 1] {
1987 let tree = &mut desc.dyn_tree;
1988 let max_code = desc.max_code;
1989 let stree = desc.stat_desc.static_tree;
1990 let extra = desc.stat_desc.extra_bits;
1991 let base = desc.stat_desc.extra_base;
1992 let max_length = desc.stat_desc.max_length;
1993
1994 let mut bl_count = [0u16; MAX_BITS + 1];
1995
1996 *tree[heap.heap[heap.heap_max] as usize].len_mut() = 0; let mut overflow: i32 = 0;
2002
2003 for h in heap.heap_max + 1..HEAP_SIZE {
2004 let n = heap.heap[h] as usize;
2005 let mut bits = tree[tree[n].dad() as usize].len() + 1;
2006
2007 if bits > max_length {
2008 bits = max_length;
2009 overflow += 1;
2010 }
2011
2012 *tree[n].len_mut() = bits;
2014
2015 if n > max_code {
2017 continue;
2018 }
2019
2020 bl_count[bits as usize] += 1;
2021 let mut xbits = 0;
2022 if n >= base {
2023 xbits = extra[n - base] as usize;
2024 }
2025
2026 let f = tree[n].freq() as usize;
2027 state.opt_len += f * (bits as usize + xbits);
2028
2029 if !stree.is_empty() {
2030 state.static_len += f * (stree[n].len() as usize + xbits);
2031 }
2032 }
2033
2034 if overflow == 0 {
2035 return bl_count;
2036 }
2037
2038 loop {
2040 let mut bits = max_length as usize - 1;
2041 while bl_count[bits] == 0 {
2042 bits -= 1;
2043 }
2044 bl_count[bits] -= 1; bl_count[bits + 1] += 2; bl_count[max_length as usize] -= 1;
2047 overflow -= 2;
2051
2052 if overflow <= 0 {
2053 break;
2054 }
2055 }
2056
2057 let mut h = HEAP_SIZE;
2062 for bits in (1..=max_length).rev() {
2063 let mut n = bl_count[bits as usize];
2064 while n != 0 {
2065 h -= 1;
2066 let m = heap.heap[h] as usize;
2067 if m > max_code {
2068 continue;
2069 }
2070
2071 if tree[m].len() != bits {
2072 state.opt_len += (bits * tree[m].freq()) as usize;
2074 state.opt_len -= (tree[m].len() * tree[m].freq()) as usize;
2075 *tree[m].len_mut() = bits;
2076 }
2077
2078 n -= 1;
2079 }
2080 }
2081 bl_count
2082}
2083
2084#[allow(unused)]
2086fn isgraph(c: u8) -> bool {
2087 (c > 0x20) && (c <= 0x7E)
2088}
2089
2090fn gen_codes(tree: &mut [Value], max_code: usize, bl_count: &[u16]) {
2091 let mut next_code = [0; MAX_BITS + 1]; let mut code = 0; for bits in 1..=MAX_BITS {
2101 code = (code + bl_count[bits - 1]) << 1;
2102 next_code[bits] = code;
2103 }
2104
2105 assert!(
2109 code + bl_count[MAX_BITS] - 1 == (1 << MAX_BITS) - 1,
2110 "inconsistent bit counts"
2111 );
2112
2113 trace!("\ngen_codes: max_code {max_code} ");
2114
2115 for n in 0..=max_code {
2116 let len = tree[n].len();
2117 if len == 0 {
2118 continue;
2119 }
2120
2121 assert!((1..=15).contains(&len), "code length must be 1-15");
2123 *tree[n].code_mut() = next_code[len as usize].reverse_bits() >> (16 - len);
2124 next_code[len as usize] += 1;
2125
2126 if tree != self::trees_tbl::STATIC_LTREE.as_slice() {
2127 trace!(
2128 "\nn {:>3} {} l {:>2} c {:>4x} ({:x}) ",
2129 n,
2130 if isgraph(n as u8) {
2131 char::from_u32(n as u32).unwrap()
2132 } else {
2133 ' '
2134 },
2135 len,
2136 tree[n].code(),
2137 next_code[len as usize] - 1
2138 );
2139 }
2140 }
2141}
2142
2143const REP_3_6: usize = 16;
2145
2146const REPZ_3_10: usize = 17;
2148
2149const REPZ_11_138: usize = 18;
2151
2152fn scan_tree(bl_desc: &mut TreeDesc<{ 2 * BL_CODES + 1 }>, tree: &mut [Value], max_code: usize) {
2153 let mut prevlen = -1isize; let mut curlen: isize; let mut nextlen = tree[0].len(); let mut count = 0; let mut max_count = 7; let mut min_count = 4; if nextlen == 0 {
2163 max_count = 138;
2164 min_count = 3;
2165 }
2166
2167 *tree[max_code + 1].len_mut() = 0xffff; let bl_tree = &mut bl_desc.dyn_tree;
2170
2171 for n in 0..=max_code {
2172 curlen = nextlen as isize;
2173 nextlen = tree[n + 1].len();
2174 count += 1;
2175 if count < max_count && curlen == nextlen as isize {
2176 continue;
2177 } else if count < min_count {
2178 *bl_tree[curlen as usize].freq_mut() += count;
2179 } else if curlen != 0 {
2180 if curlen != prevlen {
2181 *bl_tree[curlen as usize].freq_mut() += 1;
2182 }
2183 *bl_tree[REP_3_6].freq_mut() += 1;
2184 } else if count <= 10 {
2185 *bl_tree[REPZ_3_10].freq_mut() += 1;
2186 } else {
2187 *bl_tree[REPZ_11_138].freq_mut() += 1;
2188 }
2189
2190 count = 0;
2191 prevlen = curlen;
2192
2193 if nextlen == 0 {
2194 max_count = 138;
2195 min_count = 3;
2196 } else if curlen == nextlen as isize {
2197 max_count = 6;
2198 min_count = 3;
2199 } else {
2200 max_count = 7;
2201 min_count = 4;
2202 }
2203 }
2204}
2205
2206fn send_all_trees(state: &mut State, lcodes: usize, dcodes: usize, blcodes: usize) {
2207 assert!(
2208 lcodes >= 257 && dcodes >= 1 && blcodes >= 4,
2209 "not enough codes"
2210 );
2211 assert!(
2212 lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,
2213 "too many codes"
2214 );
2215
2216 trace!("\nbl counts: ");
2217 state.bit_writer.send_bits(lcodes as u64 - 257, 5); state.bit_writer.send_bits(dcodes as u64 - 1, 5);
2219 state.bit_writer.send_bits(blcodes as u64 - 4, 4); for rank in 0..blcodes {
2222 trace!("\nbl code {:>2} ", StaticTreeDesc::BL_ORDER[rank]);
2223 state.bit_writer.send_bits(
2224 state.bl_desc.dyn_tree[StaticTreeDesc::BL_ORDER[rank] as usize].len() as u64,
2225 3,
2226 );
2227 }
2228 trace!("\nbl tree: sent {}", state.bit_writer.bits_sent);
2229
2230 state
2232 .bit_writer
2233 .send_tree(&state.l_desc.dyn_tree, &state.bl_desc.dyn_tree, lcodes - 1);
2234 trace!("\nlit tree: sent {}", state.bit_writer.bits_sent);
2235
2236 state
2238 .bit_writer
2239 .send_tree(&state.d_desc.dyn_tree, &state.bl_desc.dyn_tree, dcodes - 1);
2240 trace!("\ndist tree: sent {}", state.bit_writer.bits_sent);
2241}
2242
2243fn build_bl_tree(state: &mut State) -> usize {
2246 scan_tree(
2249 &mut state.bl_desc,
2250 &mut state.l_desc.dyn_tree,
2251 state.l_desc.max_code,
2252 );
2253
2254 scan_tree(
2255 &mut state.bl_desc,
2256 &mut state.d_desc.dyn_tree,
2257 state.d_desc.max_code,
2258 );
2259
2260 {
2262 let mut tmp = TreeDesc::EMPTY;
2263 core::mem::swap(&mut tmp, &mut state.bl_desc);
2264 build_tree(state, &mut tmp);
2265 core::mem::swap(&mut tmp, &mut state.bl_desc);
2266 }
2267
2268 let mut max_blindex = BL_CODES - 1;
2277 while max_blindex >= 3 {
2278 let index = StaticTreeDesc::BL_ORDER[max_blindex] as usize;
2279 if state.bl_desc.dyn_tree[index].len() != 0 {
2280 break;
2281 }
2282
2283 max_blindex -= 1;
2284 }
2285
2286 state.opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4;
2288 trace!(
2289 "\ndyn trees: dyn {}, stat {}",
2290 state.opt_len,
2291 state.static_len
2292 );
2293
2294 max_blindex
2295}
2296
2297fn zng_tr_flush_block(
2298 stream: &mut DeflateStream,
2299 window_offset: Option<usize>,
2300 stored_len: u32,
2301 last: bool,
2302) {
2303 let mut opt_lenb;
2308 let static_lenb;
2309 let mut max_blindex = 0;
2310
2311 let state = &mut stream.state;
2312
2313 if state.sym_buf.is_empty() {
2314 opt_lenb = 0;
2315 static_lenb = 0;
2316 state.static_len = 7;
2317 } else if state.level > 0 {
2318 if stream.data_type == DataType::Unknown as i32 {
2319 stream.data_type = State::detect_data_type(&state.l_desc.dyn_tree) as i32;
2320 }
2321
2322 {
2323 let mut tmp = TreeDesc::EMPTY;
2324 core::mem::swap(&mut tmp, &mut state.l_desc);
2325
2326 build_tree(state, &mut tmp);
2327 core::mem::swap(&mut tmp, &mut state.l_desc);
2328
2329 trace!(
2330 "\nlit data: dyn {}, stat {}",
2331 state.opt_len,
2332 state.static_len
2333 );
2334 }
2335
2336 {
2337 let mut tmp = TreeDesc::EMPTY;
2338 core::mem::swap(&mut tmp, &mut state.d_desc);
2339 build_tree(state, &mut tmp);
2340 core::mem::swap(&mut tmp, &mut state.d_desc);
2341
2342 trace!(
2343 "\ndist data: dyn {}, stat {}",
2344 state.opt_len,
2345 state.static_len
2346 );
2347 }
2348
2349 max_blindex = build_bl_tree(state);
2352
2353 opt_lenb = (state.opt_len + 3 + 7) >> 3;
2355 static_lenb = (state.static_len + 3 + 7) >> 3;
2356
2357 trace!(
2358 "\nopt {}({}) stat {}({}) stored {} lit {} ",
2359 opt_lenb,
2360 state.opt_len,
2361 static_lenb,
2362 state.static_len,
2363 stored_len,
2364 state.sym_buf.iter().count()
2365 );
2366
2367 if static_lenb <= opt_lenb || state.strategy == Strategy::Fixed {
2368 opt_lenb = static_lenb;
2369 }
2370 } else {
2371 assert!(window_offset.is_some(), "lost buf");
2372 opt_lenb = stored_len as usize + 5;
2374 static_lenb = stored_len as usize + 5;
2375 }
2376
2377 #[allow(clippy::unnecessary_unwrap)]
2378 if stored_len as usize + 4 <= opt_lenb && window_offset.is_some() {
2379 let window_offset = window_offset.unwrap();
2387 let range = window_offset..window_offset + stored_len as usize;
2388 zng_tr_stored_block(state, range, last);
2389 } else if static_lenb == opt_lenb {
2390 state.bit_writer.emit_tree(BlockType::StaticTrees, last);
2391 state.compress_block_static_trees();
2392 } else {
2394 state.bit_writer.emit_tree(BlockType::DynamicTrees, last);
2395 send_all_trees(
2396 state,
2397 state.l_desc.max_code + 1,
2398 state.d_desc.max_code + 1,
2399 max_blindex + 1,
2400 );
2401
2402 state.compress_block_dynamic_trees();
2403 }
2404
2405 state.init_block();
2410 if last {
2411 state.bit_writer.emit_align();
2412 }
2413
2414 }
2416
2417pub(crate) fn flush_block_only(stream: &mut DeflateStream, is_last: bool) {
2418 zng_tr_flush_block(
2419 stream,
2420 (stream.state.block_start >= 0).then_some(stream.state.block_start as usize),
2421 (stream.state.strstart as isize - stream.state.block_start) as u32,
2422 is_last,
2423 );
2424
2425 stream.state.block_start = stream.state.strstart as isize;
2426 flush_pending(stream)
2427}
2428
2429fn flush_bytes(stream: &mut DeflateStream, mut bytes: &[u8]) -> ControlFlow<ReturnCode> {
2430 let mut state = &mut stream.state;
2431
2432 let mut beg = state.bit_writer.pending.pending().len(); while state.bit_writer.pending.remaining() < bytes.len() {
2436 let copy = state.bit_writer.pending.remaining();
2437
2438 state.bit_writer.pending.extend(&bytes[..copy]);
2439
2440 stream.adler = crc32(
2441 stream.adler as u32,
2442 &state.bit_writer.pending.pending()[beg..],
2443 ) as z_checksum;
2444
2445 state.gzindex += copy;
2446 flush_pending(stream);
2447 state = &mut stream.state;
2448
2449 if !state.bit_writer.pending.pending().is_empty() {
2451 state.last_flush = -1;
2452 return ControlFlow::Break(ReturnCode::Ok);
2453 }
2454
2455 beg = 0;
2456 bytes = &bytes[copy..];
2457 }
2458
2459 state.bit_writer.pending.extend(bytes);
2460
2461 stream.adler = crc32(
2462 stream.adler as u32,
2463 &state.bit_writer.pending.pending()[beg..],
2464 ) as z_checksum;
2465 state.gzindex = 0;
2466
2467 ControlFlow::Continue(())
2468}
2469
2470pub fn deflate(stream: &mut DeflateStream, flush: DeflateFlush) -> ReturnCode {
2471 if stream.next_out.is_null()
2472 || (stream.avail_in != 0 && stream.next_in.is_null())
2473 || (stream.state.status == Status::Finish && flush != DeflateFlush::Finish)
2474 {
2475 let err = ReturnCode::StreamError;
2476 stream.msg = err.error_message();
2477 return err;
2478 }
2479
2480 if stream.avail_out == 0 {
2481 let err = ReturnCode::BufError;
2482 stream.msg = err.error_message();
2483 return err;
2484 }
2485
2486 let old_flush = stream.state.last_flush;
2487 stream.state.last_flush = flush as i8;
2488
2489 if !stream.state.bit_writer.pending.pending().is_empty() {
2491 flush_pending(stream);
2492 if stream.avail_out == 0 {
2493 stream.state.last_flush = -1;
2500 return ReturnCode::Ok;
2501 }
2502
2503 } else if stream.avail_in == 0
2508 && rank_flush(flush as i8) <= rank_flush(old_flush)
2509 && flush != DeflateFlush::Finish
2510 {
2511 let err = ReturnCode::BufError;
2512 stream.msg = err.error_message();
2513 return err;
2514 }
2515
2516 if stream.state.status == Status::Finish && stream.avail_in != 0 {
2518 let err = ReturnCode::BufError;
2519 stream.msg = err.error_message();
2520 return err;
2521 }
2522
2523 if stream.state.status == Status::Init && stream.state.wrap == 0 {
2525 stream.state.status = Status::Busy;
2526 }
2527
2528 if stream.state.status == Status::Init {
2529 let header = stream.state.header();
2530 stream
2531 .state
2532 .bit_writer
2533 .pending
2534 .extend(&header.to_be_bytes());
2535
2536 if stream.state.strstart != 0 {
2538 let adler = stream.adler as u32;
2539 stream.state.bit_writer.pending.extend(&adler.to_be_bytes());
2540 }
2541
2542 stream.adler = ADLER32_INITIAL_VALUE as _;
2543 stream.state.status = Status::Busy;
2544
2545 flush_pending(stream);
2547
2548 if !stream.state.bit_writer.pending.pending().is_empty() {
2549 stream.state.last_flush = -1;
2550
2551 return ReturnCode::Ok;
2552 }
2553 }
2554
2555 if stream.state.status == Status::GZip {
2556 stream.state.crc_fold = Crc32Fold::new();
2558
2559 stream.state.bit_writer.pending.extend(&[31, 139, 8]);
2560
2561 let extra_flags = if stream.state.level == 9 {
2562 2
2563 } else if stream.state.strategy >= Strategy::HuffmanOnly || stream.state.level < 2 {
2564 4
2565 } else {
2566 0
2567 };
2568
2569 match &stream.state.gzhead {
2570 None => {
2571 let bytes = [0, 0, 0, 0, 0, extra_flags, gz_header::OS_CODE];
2572 stream.state.bit_writer.pending.extend(&bytes);
2573 stream.state.status = Status::Busy;
2574
2575 flush_pending(stream);
2577 if !stream.state.bit_writer.pending.pending().is_empty() {
2578 stream.state.last_flush = -1;
2579 return ReturnCode::Ok;
2580 }
2581 }
2582 Some(gzhead) => {
2583 stream.state.bit_writer.pending.extend(&[gzhead.flags()]);
2584 let bytes = (gzhead.time as u32).to_le_bytes();
2585 stream.state.bit_writer.pending.extend(&bytes);
2586 stream
2587 .state
2588 .bit_writer
2589 .pending
2590 .extend(&[extra_flags, gzhead.os as u8]);
2591
2592 if !gzhead.extra.is_null() {
2593 let bytes = (gzhead.extra_len as u16).to_le_bytes();
2594 stream.state.bit_writer.pending.extend(&bytes);
2595 }
2596
2597 if gzhead.hcrc != 0 {
2598 stream.adler = crc32(
2599 stream.adler as u32,
2600 stream.state.bit_writer.pending.pending(),
2601 ) as z_checksum
2602 }
2603
2604 stream.state.gzindex = 0;
2605 stream.state.status = Status::Extra;
2606 }
2607 }
2608 }
2609
2610 if stream.state.status == Status::Extra {
2611 if let Some(gzhead) = stream.state.gzhead.as_ref() {
2612 if !gzhead.extra.is_null() {
2613 let gzhead_extra = gzhead.extra;
2614
2615 let extra = unsafe {
2616 core::slice::from_raw_parts(
2617 gzhead_extra.add(stream.state.gzindex),
2620 (gzhead.extra_len & 0xffff) as usize - stream.state.gzindex,
2621 )
2622 };
2623
2624 if let ControlFlow::Break(err) = flush_bytes(stream, extra) {
2625 return err;
2626 }
2627 }
2628 }
2629 stream.state.status = Status::Name;
2630 }
2631
2632 if stream.state.status == Status::Name {
2633 if let Some(gzhead) = stream.state.gzhead.as_ref() {
2634 if !gzhead.name.is_null() {
2635 let gzhead_name = unsafe { CStr::from_ptr(gzhead.name.cast()) };
2637 let bytes = gzhead_name.to_bytes_with_nul();
2638 if let ControlFlow::Break(err) = flush_bytes(stream, bytes) {
2639 return err;
2640 }
2641 }
2642 stream.state.status = Status::Comment;
2643 }
2644 }
2645
2646 if stream.state.status == Status::Comment {
2647 if let Some(gzhead) = stream.state.gzhead.as_ref() {
2648 if !gzhead.comment.is_null() {
2649 let gzhead_comment = unsafe { CStr::from_ptr(gzhead.comment.cast()) };
2651 let bytes = gzhead_comment.to_bytes_with_nul();
2652 if let ControlFlow::Break(err) = flush_bytes(stream, bytes) {
2653 return err;
2654 }
2655 }
2656 stream.state.status = Status::Hcrc;
2657 }
2658 }
2659
2660 if stream.state.status == Status::Hcrc {
2661 if let Some(gzhead) = stream.state.gzhead.as_ref() {
2662 if gzhead.hcrc != 0 {
2663 let bytes = (stream.adler as u16).to_le_bytes();
2664 if let ControlFlow::Break(err) = flush_bytes(stream, &bytes) {
2665 return err;
2666 }
2667 }
2668 }
2669
2670 stream.state.status = Status::Busy;
2671
2672 flush_pending(stream);
2674 if !stream.state.bit_writer.pending.pending().is_empty() {
2675 stream.state.last_flush = -1;
2676 return ReturnCode::Ok;
2677 }
2678 }
2679
2680 let state = &mut stream.state;
2682 if stream.avail_in != 0
2683 || state.lookahead != 0
2684 || (flush != DeflateFlush::NoFlush && state.status != Status::Finish)
2685 {
2686 let bstate = self::algorithm::run(stream, flush);
2687
2688 let state = &mut stream.state;
2689
2690 if matches!(bstate, BlockState::FinishStarted | BlockState::FinishDone) {
2691 state.status = Status::Finish;
2692 }
2693
2694 match bstate {
2695 BlockState::NeedMore | BlockState::FinishStarted => {
2696 if stream.avail_out == 0 {
2697 state.last_flush = -1; }
2699 return ReturnCode::Ok;
2700 }
2708 BlockState::BlockDone => {
2709 match flush {
2710 DeflateFlush::NoFlush => unreachable!("condition of inner surrounding if"),
2711 DeflateFlush::PartialFlush => {
2712 state.bit_writer.align();
2713 }
2714 DeflateFlush::SyncFlush => {
2715 zng_tr_stored_block(state, 0..0, false);
2719 }
2720 DeflateFlush::FullFlush => {
2721 zng_tr_stored_block(state, 0..0, false);
2725
2726 state.head.as_mut_slice().fill(0); if state.lookahead == 0 {
2729 state.strstart = 0;
2730 state.block_start = 0;
2731 state.insert = 0;
2732 }
2733 }
2734 DeflateFlush::Block => { }
2735 DeflateFlush::Finish => unreachable!("condition of outer surrounding if"),
2736 }
2737
2738 flush_pending(stream);
2739
2740 if stream.avail_out == 0 {
2741 stream.state.last_flush = -1; return ReturnCode::Ok;
2743 }
2744 }
2745 BlockState::FinishDone => { }
2746 }
2747 }
2748
2749 if flush != DeflateFlush::Finish {
2750 return ReturnCode::Ok;
2751 }
2752
2753 if stream.state.wrap == 2 {
2755 let crc_fold = core::mem::take(&mut stream.state.crc_fold);
2756 stream.adler = crc_fold.finish() as z_checksum;
2757
2758 let adler = stream.adler as u32;
2759 stream.state.bit_writer.pending.extend(&adler.to_le_bytes());
2760
2761 let total_in = stream.total_in as u32;
2762 stream
2763 .state
2764 .bit_writer
2765 .pending
2766 .extend(&total_in.to_le_bytes());
2767 } else if stream.state.wrap == 1 {
2768 let adler = stream.adler as u32;
2769 stream.state.bit_writer.pending.extend(&adler.to_be_bytes());
2770 }
2771
2772 flush_pending(stream);
2773
2774 if stream.state.wrap > 0 {
2776 stream.state.wrap = -stream.state.wrap; }
2778
2779 if stream.state.bit_writer.pending.pending().is_empty() {
2780 assert_eq!(stream.state.bit_writer.bits_used, 0, "bi_buf not flushed");
2781 return ReturnCode::StreamEnd;
2782 }
2783 ReturnCode::Ok
2784}
2785
2786pub(crate) fn flush_pending(stream: &mut DeflateStream) {
2787 let state = &mut stream.state;
2788
2789 state.bit_writer.flush_bits();
2790
2791 let pending = state.bit_writer.pending.pending();
2792 let len = Ord::min(pending.len(), stream.avail_out as usize);
2793
2794 if len == 0 {
2795 return;
2796 }
2797
2798 trace!("\n[FLUSH {len} bytes]");
2799 unsafe { core::ptr::copy_nonoverlapping(pending.as_ptr(), stream.next_out, len) };
2801
2802 stream.next_out = stream.next_out.wrapping_add(len);
2803 stream.total_out += len as crate::c_api::z_size;
2804 stream.avail_out -= len as crate::c_api::uInt;
2805
2806 state.bit_writer.pending.advance(len);
2807}
2808
2809pub fn compress_slice<'a>(
2827 output: &'a mut [u8],
2828 input: &[u8],
2829 config: DeflateConfig,
2830) -> (&'a mut [u8], ReturnCode) {
2831 let output_uninit = unsafe {
2833 core::slice::from_raw_parts_mut(output.as_mut_ptr() as *mut MaybeUninit<u8>, output.len())
2834 };
2835
2836 compress(output_uninit, input, config)
2837}
2838
2839pub fn compress<'a>(
2840 output: &'a mut [MaybeUninit<u8>],
2841 input: &[u8],
2842 config: DeflateConfig,
2843) -> (&'a mut [u8], ReturnCode) {
2844 compress_with_flush(output, input, config, DeflateFlush::Finish)
2845}
2846
2847pub fn compress_slice_with_flush<'a>(
2848 output: &'a mut [u8],
2849 input: &[u8],
2850 config: DeflateConfig,
2851 flush: DeflateFlush,
2852) -> (&'a mut [u8], ReturnCode) {
2853 let output_uninit = unsafe {
2855 core::slice::from_raw_parts_mut(output.as_mut_ptr() as *mut MaybeUninit<u8>, output.len())
2856 };
2857
2858 compress_with_flush(output_uninit, input, config, flush)
2859}
2860
2861pub fn compress_with_flush<'a>(
2862 output: &'a mut [MaybeUninit<u8>],
2863 input: &[u8],
2864 config: DeflateConfig,
2865 final_flush: DeflateFlush,
2866) -> (&'a mut [u8], ReturnCode) {
2867 let mut stream = z_stream {
2868 next_in: input.as_ptr() as *mut u8,
2869 avail_in: 0, total_in: 0,
2871 next_out: output.as_mut_ptr() as *mut u8,
2872 avail_out: 0, total_out: 0,
2874 msg: core::ptr::null_mut(),
2875 state: core::ptr::null_mut(),
2876 zalloc: None,
2877 zfree: None,
2878 opaque: core::ptr::null_mut(),
2879 data_type: 0,
2880 adler: 0,
2881 reserved: 0,
2882 };
2883
2884 let err = init(&mut stream, config);
2885 if err != ReturnCode::Ok {
2886 return (&mut [], err);
2887 }
2888
2889 let max = core::ffi::c_uint::MAX as usize;
2890
2891 let mut left = output.len();
2892 let mut source_len = input.len();
2893
2894 let return_code = loop {
2895 if stream.avail_out == 0 {
2896 stream.avail_out = Ord::min(left, max) as _;
2897 left -= stream.avail_out as usize;
2898 }
2899
2900 if stream.avail_in == 0 {
2901 stream.avail_in = Ord::min(source_len, max) as _;
2902 source_len -= stream.avail_in as usize;
2903 }
2904
2905 let flush = if source_len > 0 {
2906 DeflateFlush::NoFlush
2907 } else {
2908 final_flush
2909 };
2910
2911 let err = if let Some(stream) = unsafe { DeflateStream::from_stream_mut(&mut stream) } {
2912 deflate(stream, flush)
2913 } else {
2914 ReturnCode::StreamError
2915 };
2916
2917 match err {
2918 ReturnCode::Ok => continue,
2919 ReturnCode::StreamEnd => break ReturnCode::Ok,
2920 _ => break err,
2921 }
2922 };
2923
2924 let output_slice = unsafe {
2926 core::slice::from_raw_parts_mut(output.as_mut_ptr() as *mut u8, stream.total_out as usize)
2927 };
2928
2929 if let Some(stream) = unsafe { DeflateStream::from_stream_mut(&mut stream) } {
2931 let _ = end(stream);
2932 }
2933
2934 (output_slice, return_code)
2935}
2936
2937pub const fn compress_bound(source_len: usize) -> usize {
2957 compress_bound_help(source_len, ZLIB_WRAPLEN)
2958}
2959
2960const fn compress_bound_help(source_len: usize, wrap_len: usize) -> usize {
2961 source_len .wrapping_add(if source_len == 0 { 1 } else { 0 })
2964 .wrapping_add(if source_len < 9 { 1 } else { 0 })
2966 .wrapping_add(deflate_quick_overhead(source_len))
2968 .wrapping_add(DEFLATE_BLOCK_OVERHEAD)
2970 .wrapping_add(wrap_len)
2972}
2973
2974#[derive(Clone)]
2979struct Heap {
2980 heap: [u32; 2 * L_CODES + 1],
2981
2982 heap_len: usize,
2984
2985 heap_max: usize,
2987
2988 depth: [u8; 2 * L_CODES + 1],
2989}
2990
2991impl Heap {
2992 fn new() -> Self {
2994 Self {
2995 heap: [0; 2 * L_CODES + 1],
2996 heap_len: 0,
2997 heap_max: 0,
2998 depth: [0; 2 * L_CODES + 1],
2999 }
3000 }
3001
3002 fn initialize(&mut self, tree: &mut [Value]) -> isize {
3005 let mut max_code = -1;
3006
3007 self.heap_len = 0;
3008 self.heap_max = HEAP_SIZE;
3009
3010 for (n, node) in tree.iter_mut().enumerate() {
3011 if node.freq() > 0 {
3012 self.heap_len += 1;
3013 self.heap[self.heap_len] = n as u32;
3014 max_code = n as isize;
3015 self.depth[n] = 0;
3016 } else {
3017 *node.len_mut() = 0;
3018 }
3019 }
3020
3021 max_code
3022 }
3023
3024 const SMALLEST: usize = 1;
3026
3027 fn pqdownheap(&mut self, tree: &[Value], mut k: usize) {
3028 macro_rules! freq_and_depth {
3036 ($i:expr) => {
3037 (tree[$i as usize].freq() as u32) << 8 | self.depth[$i as usize] as u32
3038 };
3039 }
3040
3041 let v = self.heap[k];
3042 let v_val = freq_and_depth!(v);
3043 let mut j = k << 1; while j <= self.heap_len {
3046 let mut j_val = freq_and_depth!(self.heap[j]);
3048 if j < self.heap_len {
3049 let j1_val = freq_and_depth!(self.heap[j + 1]);
3050 if j1_val <= j_val {
3051 j += 1;
3052 j_val = j1_val;
3053 }
3054 }
3055
3056 if v_val <= j_val {
3058 break;
3059 }
3060
3061 self.heap[k] = self.heap[j];
3063 k = j;
3064
3065 j <<= 1;
3067 }
3068
3069 self.heap[k] = v;
3070 }
3071
3072 fn pqremove(&mut self, tree: &[Value]) -> u32 {
3075 let top = self.heap[Self::SMALLEST];
3076 self.heap[Self::SMALLEST] = self.heap[self.heap_len];
3077 self.heap_len -= 1;
3078
3079 self.pqdownheap(tree, Self::SMALLEST);
3080
3081 top
3082 }
3083
3084 fn construct_huffman_tree(&mut self, tree: &mut [Value], mut node: usize) {
3086 loop {
3087 let n = self.pqremove(tree) as usize; let m = self.heap[Heap::SMALLEST] as usize; self.heap_max -= 1;
3091 self.heap[self.heap_max] = n as u32; self.heap_max -= 1;
3093 self.heap[self.heap_max] = m as u32;
3094
3095 *tree[node].freq_mut() = tree[n].freq() + tree[m].freq();
3097 self.depth[node] = Ord::max(self.depth[n], self.depth[m]) + 1;
3098
3099 *tree[n].dad_mut() = node as u16;
3100 *tree[m].dad_mut() = node as u16;
3101
3102 self.heap[Heap::SMALLEST] = node as u32;
3104 node += 1;
3105
3106 self.pqdownheap(tree, Heap::SMALLEST);
3107
3108 if self.heap_len < 2 {
3109 break;
3110 }
3111 }
3112
3113 self.heap_max -= 1;
3114 self.heap[self.heap_max] = self.heap[Heap::SMALLEST];
3115 }
3116}
3117
3118pub unsafe fn set_header<'a>(
3127 stream: &mut DeflateStream<'a>,
3128 head: Option<&'a mut gz_header>,
3129) -> ReturnCode {
3130 if stream.state.wrap != 2 {
3131 ReturnCode::StreamError
3132 } else {
3133 stream.state.gzhead = head;
3134 ReturnCode::Ok
3135 }
3136}
3137
3138const ZLIB_WRAPLEN: usize = 6;
3140const GZIP_WRAPLEN: usize = 18;
3142
3143const DEFLATE_HEADER_BITS: usize = 3;
3144const DEFLATE_EOBS_BITS: usize = 15;
3145const DEFLATE_PAD_BITS: usize = 6;
3146const DEFLATE_BLOCK_OVERHEAD: usize =
3147 (DEFLATE_HEADER_BITS + DEFLATE_EOBS_BITS + DEFLATE_PAD_BITS) >> 3;
3148
3149const DEFLATE_QUICK_LIT_MAX_BITS: usize = 9;
3150const fn deflate_quick_overhead(x: usize) -> usize {
3151 let sum = x
3152 .wrapping_mul(DEFLATE_QUICK_LIT_MAX_BITS - 8)
3153 .wrapping_add(7);
3154
3155 (sum as core::ffi::c_ulong >> 3) as usize
3157}
3158
3159pub fn bound(stream: Option<&mut DeflateStream>, source_len: usize) -> usize {
3175 let mask = core::ffi::c_ulong::MAX as usize;
3177
3178 let comp_len = source_len
3180 .wrapping_add((source_len.wrapping_add(7) & mask) >> 3)
3181 .wrapping_add((source_len.wrapping_add(63) & mask) >> 6)
3182 .wrapping_add(5);
3183
3184 let Some(stream) = stream else {
3185 return comp_len.wrapping_add(6);
3187 };
3188
3189 let wrap_len = match stream.state.wrap {
3191 0 => {
3192 0
3194 }
3195 1 => {
3196 if stream.state.strstart != 0 {
3198 ZLIB_WRAPLEN + 4
3199 } else {
3200 ZLIB_WRAPLEN
3201 }
3202 }
3203 2 => {
3204 let mut gz_wrap_len = GZIP_WRAPLEN;
3206
3207 if let Some(header) = &stream.state.gzhead {
3208 if !header.extra.is_null() {
3209 gz_wrap_len += 2 + header.extra_len as usize;
3210 }
3211
3212 let mut c_string = header.name;
3213 if !c_string.is_null() {
3214 loop {
3215 gz_wrap_len += 1;
3216 unsafe {
3218 if *c_string == 0 {
3219 break;
3220 }
3221 c_string = c_string.add(1);
3222 }
3223 }
3224 }
3225
3226 let mut c_string = header.comment;
3227 if !c_string.is_null() {
3228 loop {
3229 gz_wrap_len += 1;
3230 unsafe {
3232 if *c_string == 0 {
3233 break;
3234 }
3235 c_string = c_string.add(1);
3236 }
3237 }
3238 }
3239
3240 if header.hcrc != 0 {
3241 gz_wrap_len += 2;
3242 }
3243 }
3244
3245 gz_wrap_len
3246 }
3247 _ => {
3248 ZLIB_WRAPLEN
3250 }
3251 };
3252
3253 if stream.state.w_bits() != MAX_WBITS as u32 || HASH_BITS < 15 {
3254 if stream.state.level == 0 {
3255 source_len
3257 .wrapping_add(source_len >> 5)
3258 .wrapping_add(source_len >> 7)
3259 .wrapping_add(source_len >> 11)
3260 .wrapping_add(7)
3261 .wrapping_add(wrap_len)
3262 } else {
3263 comp_len.wrapping_add(wrap_len)
3264 }
3265 } else {
3266 compress_bound_help(source_len, wrap_len)
3267 }
3268}
3269
3270pub unsafe fn get_dictionary(stream: &DeflateStream<'_>, dictionary: *mut u8) -> usize {
3274 let s = &stream.state;
3275 let len = Ord::min(s.strstart + s.lookahead, s.w_size);
3276
3277 if !dictionary.is_null() && len > 0 {
3278 unsafe {
3279 core::ptr::copy_nonoverlapping(
3280 s.window.as_ptr().add(s.strstart + s.lookahead - len),
3281 dictionary,
3282 len,
3283 );
3284 }
3285 }
3286
3287 len
3288}
3289
3290struct DeflateAllocOffsets {
3291 total_size: usize,
3292 state_pos: usize,
3293 window_pos: usize,
3294 pending_pos: usize,
3295 sym_buf_pos: usize,
3296 prev_pos: usize,
3297 head_pos: usize,
3298}
3299
3300impl DeflateAllocOffsets {
3301 fn new(window_bits: usize, lit_bufsize: usize) -> Self {
3302 use core::mem::size_of;
3303
3304 const ALIGN_SIZE: usize = 64;
3307 const LIT_BUFS: usize = 4;
3308
3309 let mut curr_size = 0usize;
3310
3311 let state_size = size_of::<State>();
3313 let window_size = (1 << window_bits) * 2;
3315 let prev_size = (1 << window_bits) * size_of::<Pos>();
3316 let head_size = HASH_SIZE * size_of::<Pos>();
3317 let pending_size = lit_bufsize * LIT_BUFS;
3318 let sym_buf_size = lit_bufsize * (LIT_BUFS - 1);
3319 let state_pos = curr_size.next_multiple_of(ALIGN_SIZE);
3323 curr_size = state_pos + state_size;
3324
3325 let window_pos = curr_size.next_multiple_of(ALIGN_SIZE);
3326 curr_size = window_pos + window_size;
3327
3328 let prev_pos = curr_size.next_multiple_of(ALIGN_SIZE);
3329 curr_size = prev_pos + prev_size;
3330
3331 let head_pos = curr_size.next_multiple_of(ALIGN_SIZE);
3332 curr_size = head_pos + head_size;
3333
3334 let pending_pos = curr_size.next_multiple_of(ALIGN_SIZE);
3335 curr_size = pending_pos + pending_size;
3336
3337 let sym_buf_pos = curr_size.next_multiple_of(ALIGN_SIZE);
3338 curr_size = sym_buf_pos + sym_buf_size;
3339
3340 let total_size = (curr_size + (ALIGN_SIZE - 1)).next_multiple_of(ALIGN_SIZE);
3343
3344 Self {
3345 total_size,
3346 state_pos,
3347 window_pos,
3348 pending_pos,
3349 sym_buf_pos,
3350 prev_pos,
3351 head_pos,
3352 }
3353 }
3354}
3355
3356#[cfg(test)]
3357mod test {
3358 use crate::{
3359 inflate::{decompress_slice, InflateConfig, InflateStream},
3360 InflateFlush,
3361 };
3362
3363 use super::*;
3364
3365 use core::{ffi::CStr, sync::atomic::AtomicUsize};
3366
3367 #[test]
3368 fn detect_data_type_basic() {
3369 let empty = || [Value::new(0, 0); LITERALS];
3370
3371 assert_eq!(State::detect_data_type(&empty()), DataType::Binary);
3372
3373 let mut binary = empty();
3374 binary[0] = Value::new(1, 0);
3375 assert_eq!(State::detect_data_type(&binary), DataType::Binary);
3376
3377 let mut text = empty();
3378 text[b'\r' as usize] = Value::new(1, 0);
3379 assert_eq!(State::detect_data_type(&text), DataType::Text);
3380
3381 let mut text = empty();
3382 text[b'a' as usize] = Value::new(1, 0);
3383 assert_eq!(State::detect_data_type(&text), DataType::Text);
3384
3385 let mut non_text = empty();
3386 non_text[7] = Value::new(1, 0);
3387 assert_eq!(State::detect_data_type(&non_text), DataType::Binary);
3388 }
3389
3390 #[test]
3391 fn from_stream_mut() {
3392 unsafe {
3393 assert!(DeflateStream::from_stream_mut(core::ptr::null_mut()).is_none());
3394
3395 let mut stream = z_stream::default();
3396 assert!(DeflateStream::from_stream_mut(&mut stream).is_none());
3397
3398 assert!(DeflateStream::from_stream_mut(&mut stream).is_none());
3400
3401 init(&mut stream, DeflateConfig::default());
3402 let stream = DeflateStream::from_stream_mut(&mut stream);
3403 assert!(stream.is_some());
3404
3405 assert!(end(stream.unwrap()).is_ok());
3406 }
3407 }
3408
3409 #[cfg(feature = "c-allocator")]
3410 unsafe extern "C" fn fail_nth_allocation<const N: usize>(
3411 opaque: crate::c_api::voidpf,
3412 items: crate::c_api::uInt,
3413 size: crate::c_api::uInt,
3414 ) -> crate::c_api::voidpf {
3415 let count = unsafe { &*(opaque as *const AtomicUsize) };
3416
3417 if count.fetch_add(1, core::sync::atomic::Ordering::Relaxed) != N {
3418 unsafe { (crate::allocate::C.zalloc)(opaque, items, size) }
3422 } else {
3423 core::ptr::null_mut()
3424 }
3425 }
3426
3427 #[test]
3428 #[cfg(feature = "c-allocator")]
3429 fn init_invalid_allocator() {
3430 {
3431 let atomic = AtomicUsize::new(0);
3432 let mut stream = z_stream {
3433 zalloc: Some(fail_nth_allocation::<0>),
3434 zfree: Some(crate::allocate::C.zfree),
3435 opaque: &atomic as *const _ as *const core::ffi::c_void as *mut _,
3436 ..z_stream::default()
3437 };
3438 assert_eq!(
3439 init(&mut stream, DeflateConfig::default()),
3440 ReturnCode::MemError
3441 );
3442 }
3443 }
3444
3445 #[test]
3446 #[cfg(feature = "c-allocator")]
3447 fn copy_invalid_allocator() {
3448 let mut stream = z_stream::default();
3449
3450 let atomic = AtomicUsize::new(0);
3451 stream.opaque = &atomic as *const _ as *const core::ffi::c_void as *mut _;
3452 stream.zalloc = Some(fail_nth_allocation::<1>);
3453 stream.zfree = Some(crate::allocate::C.zfree);
3454
3455 assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3457
3458 let Some(stream) = (unsafe { DeflateStream::from_stream_mut(&mut stream) }) else {
3459 unreachable!()
3460 };
3461
3462 let mut stream_copy = MaybeUninit::<DeflateStream>::zeroed();
3463
3464 assert_eq!(copy(&mut stream_copy, stream), ReturnCode::MemError);
3465
3466 assert!(end(stream).is_ok());
3467 }
3468
3469 mod invalid_deflate_config {
3470 use super::*;
3471
3472 #[test]
3473 fn sanity_check() {
3474 let mut stream = z_stream::default();
3475 assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3476
3477 assert!(stream.zalloc.is_some());
3478 assert!(stream.zfree.is_some());
3479
3480 let stream = unsafe { DeflateStream::from_stream_mut(&mut stream) }.unwrap();
3482 assert_eq!(stream.state.level, 6);
3483
3484 assert!(end(stream).is_ok());
3485 }
3486
3487 #[test]
3488 fn window_bits_correction() {
3489 let mut stream = z_stream::default();
3491 let config = DeflateConfig {
3492 window_bits: 8,
3493 ..Default::default()
3494 };
3495 assert_eq!(init(&mut stream, config), ReturnCode::Ok);
3496 let stream = unsafe { DeflateStream::from_stream_mut(&mut stream) }.unwrap();
3497 assert_eq!(stream.state.w_bits(), 9);
3498
3499 assert!(end(stream).is_ok());
3500 }
3501
3502 #[test]
3503 fn window_bits_too_low() {
3504 let mut stream = z_stream::default();
3505 let config = DeflateConfig {
3506 window_bits: -16,
3507 ..Default::default()
3508 };
3509 assert_eq!(init(&mut stream, config), ReturnCode::StreamError);
3510 }
3511
3512 #[test]
3513 fn window_bits_too_high() {
3514 let mut stream = z_stream::default();
3516 let config = DeflateConfig {
3517 window_bits: 42,
3518 ..Default::default()
3519 };
3520 assert_eq!(init(&mut stream, config), ReturnCode::StreamError);
3521 }
3522 }
3523
3524 #[test]
3525 fn end_data_error() {
3526 let mut stream = z_stream::default();
3527 assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3528 let stream = unsafe { DeflateStream::from_stream_mut(&mut stream) }.unwrap();
3529
3530 let input = b"Hello World\n";
3532 stream.next_in = input.as_ptr() as *mut u8;
3533 stream.avail_in = input.len() as _;
3534 let output = &mut [0, 0, 0];
3535 stream.next_out = output.as_mut_ptr();
3536 stream.avail_out = output.len() as _;
3537
3538 assert_eq!(deflate(stream, DeflateFlush::NoFlush), ReturnCode::Ok);
3540
3541 assert!(end(stream).is_err());
3543 }
3544
3545 #[test]
3546 fn test_reset_keep() {
3547 let mut stream = z_stream::default();
3548 assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3549 let stream = unsafe { DeflateStream::from_stream_mut(&mut stream) }.unwrap();
3550
3551 let input = b"Hello World\n";
3553 stream.next_in = input.as_ptr() as *mut u8;
3554 stream.avail_in = input.len() as _;
3555
3556 let output = &mut [0; 1024];
3557 stream.next_out = output.as_mut_ptr();
3558 stream.avail_out = output.len() as _;
3559 assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::StreamEnd);
3560
3561 assert_eq!(reset_keep(stream), ReturnCode::Ok);
3562
3563 let output = &mut [0; 1024];
3564 stream.next_out = output.as_mut_ptr();
3565 stream.avail_out = output.len() as _;
3566 assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::StreamEnd);
3567
3568 assert!(end(stream).is_ok());
3569 }
3570
3571 #[test]
3572 fn hello_world_huffman_only() {
3573 const EXPECTED: &[u8] = &[
3574 0x78, 0x01, 0xf3, 0x48, 0xcd, 0xc9, 0xc9, 0x57, 0x08, 0xcf, 0x2f, 0xca, 0x49, 0x51,
3575 0xe4, 0x02, 0x00, 0x20, 0x91, 0x04, 0x48,
3576 ];
3577
3578 let input = "Hello World!\n";
3579
3580 let mut output = vec![0; 128];
3581
3582 let config = DeflateConfig {
3583 level: 6,
3584 method: Method::Deflated,
3585 window_bits: crate::MAX_WBITS,
3586 mem_level: DEF_MEM_LEVEL,
3587 strategy: Strategy::HuffmanOnly,
3588 };
3589
3590 let (output, err) = compress_slice(&mut output, input.as_bytes(), config);
3591
3592 assert_eq!(err, ReturnCode::Ok);
3593
3594 assert_eq!(output.len(), EXPECTED.len());
3595
3596 assert_eq!(EXPECTED, output);
3597 }
3598
3599 #[test]
3600 fn hello_world_quick() {
3601 const EXPECTED: &[u8] = &[
3602 0x78, 0x01, 0xf3, 0x48, 0xcd, 0xc9, 0xc9, 0x57, 0x08, 0xcf, 0x2f, 0xca, 0x49, 0x51,
3603 0xe4, 0x02, 0x00, 0x20, 0x91, 0x04, 0x48,
3604 ];
3605
3606 let input = "Hello World!\n";
3607
3608 let mut output = vec![0; 128];
3609
3610 let config = DeflateConfig {
3611 level: 1,
3612 method: Method::Deflated,
3613 window_bits: crate::MAX_WBITS,
3614 mem_level: DEF_MEM_LEVEL,
3615 strategy: Strategy::Default,
3616 };
3617
3618 let (output, err) = compress_slice(&mut output, input.as_bytes(), config);
3619
3620 assert_eq!(err, ReturnCode::Ok);
3621
3622 assert_eq!(output.len(), EXPECTED.len());
3623
3624 assert_eq!(EXPECTED, output);
3625 }
3626
3627 #[test]
3628 fn hello_world_quick_random() {
3629 const EXPECTED: &[u8] = &[
3630 0x78, 0x01, 0x53, 0xe1, 0x50, 0x51, 0xe1, 0x52, 0x51, 0x51, 0x01, 0x00, 0x03, 0xec,
3631 0x00, 0xeb,
3632 ];
3633
3634 let input = "$\u{8}$$\n$$$";
3635
3636 let mut output = vec![0; 128];
3637
3638 let config = DeflateConfig {
3639 level: 1,
3640 method: Method::Deflated,
3641 window_bits: crate::MAX_WBITS,
3642 mem_level: DEF_MEM_LEVEL,
3643 strategy: Strategy::Default,
3644 };
3645
3646 let (output, err) = compress_slice(&mut output, input.as_bytes(), config);
3647
3648 assert_eq!(err, ReturnCode::Ok);
3649
3650 assert_eq!(output.len(), EXPECTED.len());
3651
3652 assert_eq!(EXPECTED, output);
3653 }
3654
3655 fn fuzz_based_test(input: &[u8], config: DeflateConfig, expected: &[u8]) {
3656 let mut output_rs = [0; 1 << 17];
3657 let (output_rs, err) = compress_slice(&mut output_rs, input, config);
3658 assert_eq!(err, ReturnCode::Ok);
3659
3660 assert_eq!(output_rs, expected);
3661 }
3662
3663 #[test]
3664 fn simple_rle() {
3665 fuzz_based_test(
3666 "\0\0\0\0\u{6}".as_bytes(),
3667 DeflateConfig {
3668 level: -1,
3669 method: Method::Deflated,
3670 window_bits: 11,
3671 mem_level: 4,
3672 strategy: Strategy::Rle,
3673 },
3674 &[56, 17, 99, 0, 2, 54, 0, 0, 11, 0, 7],
3675 )
3676 }
3677
3678 #[test]
3679 fn fill_window_out_of_bounds() {
3680 const INPUT: &[u8] = &[
3681 0x71, 0x71, 0x71, 0x71, 0x71, 0x6a, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3682 0x71, 0x71, 0x71, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1d, 0x1d, 0x1d, 0x1d, 0x63,
3683 0x63, 0x63, 0x63, 0x63, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d,
3684 0x1d, 0x27, 0x0, 0x0, 0x0, 0x1d, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71,
3685 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x31, 0x0, 0x0,
3686 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3687 0x0, 0x0, 0x0, 0x0, 0x1d, 0x1d, 0x0, 0x0, 0x0, 0x0, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50,
3688 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x48, 0x50,
3689 0x50, 0x50, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2c, 0x0, 0x0, 0x0, 0x0, 0x4a,
3690 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3691 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x70, 0x71, 0x71, 0x0, 0x0,
3692 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x6a, 0x0, 0x0, 0x0, 0x0,
3693 0x71, 0x0, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3694 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3695 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3696 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x31, 0x0, 0x0, 0x0, 0x0,
3697 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3698 0x71, 0x71, 0x0, 0x4a, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71,
3699 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3700 0x70, 0x71, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71,
3701 0x6a, 0x0, 0x0, 0x0, 0x0, 0x71, 0x0, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3702 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3703 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3704 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3705 0x31, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1d, 0x1d, 0x0, 0x0, 0x0, 0x0,
3706 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50,
3707 0x50, 0x50, 0x50, 0x50, 0x48, 0x50, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x3b, 0x3f, 0x71,
3708 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x50, 0x50, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3709 0x2c, 0x0, 0x0, 0x0, 0x0, 0x4a, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71,
3710 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3711 0x71, 0x70, 0x71, 0x71, 0x0, 0x0, 0x0, 0x6, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71,
3712 0x71, 0x71, 0x71, 0x70, 0x71, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3713 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x3b, 0x3f, 0x71, 0x71, 0x71,
3714 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3715 0x71, 0x3b, 0x3f, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x20, 0x0, 0x0, 0x0, 0x0,
3716 0x0, 0x0, 0x0, 0x71, 0x75, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3717 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x10, 0x0, 0x71, 0x71,
3718 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x3b, 0x71, 0x71, 0x71, 0x71, 0x71,
3719 0x71, 0x76, 0x71, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x71, 0x71, 0x71, 0x71, 0x71,
3720 0x71, 0x71, 0x71, 0x10, 0x0, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3721 0x71, 0x3b, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x76, 0x71, 0x34, 0x34, 0x34, 0x34,
3722 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34,
3723 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x0, 0x0, 0x0, 0x0, 0x0, 0x34, 0x34, 0x34, 0x34,
3724 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34,
3725 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34,
3726 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34,
3727 0x34, 0x34, 0x30, 0x34, 0x34, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3728 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3729 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3730 0x0, 0x0, 0x71, 0x0, 0x0, 0x0, 0x0, 0x6,
3731 ];
3732
3733 fuzz_based_test(
3734 INPUT,
3735 DeflateConfig {
3736 level: -1,
3737 method: Method::Deflated,
3738 window_bits: 9,
3739 mem_level: 1,
3740 strategy: Strategy::HuffmanOnly,
3741 },
3742 &[
3743 0x18, 0x19, 0x4, 0xc1, 0x21, 0x1, 0xc4, 0x0, 0x10, 0x3, 0xb0, 0x18, 0x29, 0x1e,
3744 0x7e, 0x17, 0x83, 0xf5, 0x70, 0x6c, 0xac, 0xfe, 0xc9, 0x27, 0xdb, 0xb6, 0x6f, 0xdb,
3745 0xb6, 0x6d, 0xdb, 0x80, 0x24, 0xb9, 0xbb, 0xbb, 0x24, 0x49, 0x92, 0x24, 0xf, 0x2,
3746 0xd8, 0x36, 0x0, 0xf0, 0x3, 0x0, 0x0, 0x24, 0xd0, 0xb6, 0x6d, 0xdb, 0xb6, 0x6d,
3747 0xdb, 0xbe, 0x6d, 0xf9, 0x13, 0x4, 0xc7, 0x4, 0x0, 0x80, 0x30, 0x0, 0xc3, 0x22,
3748 0x68, 0xf, 0x36, 0x90, 0xc2, 0xb5, 0xfa, 0x7f, 0x48, 0x80, 0x81, 0xb, 0x40, 0x55,
3749 0x55, 0x55, 0xd5, 0x16, 0x80, 0xaa, 0x7, 0x9, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3750 0xe, 0x7c, 0x82, 0xe0, 0x98, 0x0, 0x0, 0x0, 0x4, 0x60, 0x10, 0xf9, 0x8c, 0xe2,
3751 0xe5, 0xfa, 0x3f, 0x2, 0x54, 0x55, 0x55, 0x65, 0x0, 0xa8, 0xaa, 0xaa, 0xaa, 0xba,
3752 0x2, 0x50, 0xb5, 0x90, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x78, 0x82, 0xe0, 0xd0,
3753 0x8a, 0x41, 0x0, 0x0, 0xa2, 0x58, 0x54, 0xb7, 0x60, 0x83, 0x9a, 0x6a, 0x4, 0x96,
3754 0x87, 0xba, 0x51, 0xf8, 0xfb, 0x9b, 0x26, 0xfc, 0x0, 0x1c, 0x7, 0x6c, 0xdb, 0xb6,
3755 0x6d, 0xdb, 0xb6, 0x6d, 0xf7, 0xa8, 0x3a, 0xaf, 0xaa, 0x6a, 0x3, 0xf8, 0xc2, 0x3,
3756 0x40, 0x55, 0x55, 0x55, 0xd5, 0x5b, 0xf8, 0x80, 0xaa, 0x7a, 0xb, 0x0, 0x7f, 0x82,
3757 0xe0, 0x98, 0x0, 0x40, 0x18, 0x0, 0x82, 0xd8, 0x49, 0x40, 0x2, 0x22, 0x7e, 0xeb,
3758 0x80, 0xa6, 0xc, 0xa0, 0x9f, 0xa4, 0x2a, 0x38, 0xf, 0x0, 0x0, 0xe7, 0x1, 0xdc,
3759 0x55, 0x95, 0x17, 0x0, 0x0, 0xae, 0x0, 0x38, 0xc0, 0x67, 0xdb, 0x36, 0x80, 0x2b,
3760 0x0, 0xe, 0xf0, 0xd9, 0xf6, 0x13, 0x4, 0xc7, 0x4, 0x0, 0x0, 0x30, 0xc, 0x83, 0x22,
3761 0x69, 0x7, 0xc6, 0xea, 0xff, 0x19, 0x0, 0x0, 0x80, 0xaa, 0x0, 0x0, 0x0, 0x0, 0x0,
3762 0x0, 0x8e, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x6a,
3763 0xf5, 0x63, 0x60, 0x60, 0x3, 0x0, 0xee, 0x8a, 0x88, 0x67,
3764 ],
3765 )
3766 }
3767
3768 #[test]
3769 fn gzip_no_header() {
3770 let config = DeflateConfig {
3771 level: 9,
3772 method: Method::Deflated,
3773 window_bits: 31, ..Default::default()
3775 };
3776
3777 let input = b"Hello World!";
3778 let os = gz_header::OS_CODE;
3779
3780 fuzz_based_test(
3781 input,
3782 config,
3783 &[
3784 31, 139, 8, 0, 0, 0, 0, 0, 2, os, 243, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
3785 81, 4, 0, 163, 28, 41, 28, 12, 0, 0, 0,
3786 ],
3787 )
3788 }
3789
3790 #[test]
3791 #[rustfmt::skip]
3792 fn gzip_stored_block_checksum() {
3793 fuzz_based_test(
3794 &[
3795 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 9, 0,
3796 ],
3797 DeflateConfig {
3798 level: 0,
3799 method: Method::Deflated,
3800 window_bits: 26,
3801 mem_level: 6,
3802 strategy: Strategy::Default,
3803 },
3804 &[
3805 31, 139, 8, 0, 0, 0, 0, 0, 4, gz_header::OS_CODE, 1, 18, 0, 237, 255, 27, 27, 27, 27, 27, 27, 27,
3806 27, 27, 27, 27, 27, 27, 27, 27, 27, 9, 0, 60, 101, 156, 55, 18, 0, 0, 0,
3807 ],
3808 )
3809 }
3810
3811 #[test]
3812 fn gzip_header_pending_flush() {
3813 let extra = "aaaaaaaaaaaaaaaaaaaa\0";
3814 let name = "bbbbbbbbbbbbbbbbbbbb\0";
3815 let comment = "cccccccccccccccccccc\0";
3816
3817 let mut header = gz_header {
3818 text: 0,
3819 time: 0,
3820 xflags: 0,
3821 os: 0,
3822 extra: extra.as_ptr() as *mut _,
3823 extra_len: extra.len() as _,
3824 extra_max: 0,
3825 name: name.as_ptr() as *mut _,
3826 name_max: 0,
3827 comment: comment.as_ptr() as *mut _,
3828 comm_max: 0,
3829 hcrc: 1,
3830 done: 0,
3831 };
3832
3833 let config = DeflateConfig {
3834 window_bits: 31,
3835 mem_level: 1,
3836 ..Default::default()
3837 };
3838
3839 let mut stream = z_stream::default();
3840 assert_eq!(init(&mut stream, config), ReturnCode::Ok);
3841
3842 let Some(stream) = (unsafe { DeflateStream::from_stream_mut(&mut stream) }) else {
3843 unreachable!()
3844 };
3845
3846 unsafe { set_header(stream, Some(&mut header)) };
3847
3848 let input = b"Hello World\n";
3849 stream.next_in = input.as_ptr() as *mut _;
3850 stream.avail_in = input.len() as _;
3851
3852 let mut output = [0u8; 1024];
3853 stream.next_out = output.as_mut_ptr();
3854 stream.avail_out = 100;
3855
3856 assert_eq!(stream.state.bit_writer.pending.capacity(), 512);
3857
3858 stream.state.bit_writer.pending.extend(&[0; 500]);
3861
3862 assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::Ok);
3863
3864 stream.avail_out = output.len() as _;
3866 assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::StreamEnd);
3867
3868 let n = stream.total_out as usize;
3869
3870 assert!(end(stream).is_ok());
3871
3872 let output_rs = &mut output[..n];
3873
3874 assert_eq!(output_rs.len(), 500 + 99);
3875 }
3876
3877 #[test]
3878 fn gzip_with_header() {
3879 let extra = "some extra stuff\0";
3883 let name = "nomen est omen\0";
3884 let comment = "such comment\0";
3885
3886 let mut header = gz_header {
3887 text: 0,
3888 time: 0,
3889 xflags: 0,
3890 os: 0,
3891 extra: extra.as_ptr() as *mut _,
3892 extra_len: extra.len() as _,
3893 extra_max: 0,
3894 name: name.as_ptr() as *mut _,
3895 name_max: 0,
3896 comment: comment.as_ptr() as *mut _,
3897 comm_max: 0,
3898 hcrc: 1,
3899 done: 0,
3900 };
3901
3902 let config = DeflateConfig {
3903 window_bits: 31,
3904 ..Default::default()
3905 };
3906
3907 let mut stream = z_stream::default();
3908 assert_eq!(init(&mut stream, config), ReturnCode::Ok);
3909
3910 let Some(stream) = (unsafe { DeflateStream::from_stream_mut(&mut stream) }) else {
3911 unreachable!()
3912 };
3913
3914 unsafe { set_header(stream, Some(&mut header)) };
3915
3916 let input = b"Hello World\n";
3917 stream.next_in = input.as_ptr() as *mut _;
3918 stream.avail_in = input.len() as _;
3919
3920 let mut output = [0u8; 256];
3921 stream.next_out = output.as_mut_ptr();
3922 stream.avail_out = output.len() as _;
3923
3924 assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::StreamEnd);
3925
3926 let n = stream.total_out as usize;
3927
3928 assert!(end(stream).is_ok());
3929
3930 let output_rs = &mut output[..n];
3931
3932 assert_eq!(output_rs.len(), 81);
3933
3934 {
3935 let mut stream = z_stream::default();
3936
3937 let config = InflateConfig {
3938 window_bits: config.window_bits,
3939 };
3940
3941 assert_eq!(crate::inflate::init(&mut stream, config), ReturnCode::Ok);
3942
3943 let Some(stream) = (unsafe { InflateStream::from_stream_mut(&mut stream) }) else {
3944 unreachable!();
3945 };
3946
3947 stream.next_in = output_rs.as_mut_ptr() as _;
3948 stream.avail_in = output_rs.len() as _;
3949
3950 let mut output = [0u8; 12];
3951 stream.next_out = output.as_mut_ptr();
3952 stream.avail_out = output.len() as _;
3953
3954 let mut extra_buf = [0u8; 64];
3955 let mut name_buf = [0u8; 64];
3956 let mut comment_buf = [0u8; 64];
3957
3958 let mut header = gz_header {
3959 text: 0,
3960 time: 0,
3961 xflags: 0,
3962 os: 0,
3963 extra: extra_buf.as_mut_ptr(),
3964 extra_len: 0,
3965 extra_max: extra_buf.len() as _,
3966 name: name_buf.as_mut_ptr(),
3967 name_max: name_buf.len() as _,
3968 comment: comment_buf.as_mut_ptr(),
3969 comm_max: comment_buf.len() as _,
3970 hcrc: 0,
3971 done: 0,
3972 };
3973
3974 assert_eq!(
3975 unsafe { crate::inflate::get_header(stream, Some(&mut header)) },
3976 ReturnCode::Ok
3977 );
3978
3979 assert_eq!(
3980 unsafe { crate::inflate::inflate(stream, InflateFlush::Finish) },
3981 ReturnCode::StreamEnd
3982 );
3983
3984 crate::inflate::end(stream);
3985
3986 assert!(!header.comment.is_null());
3987 assert_eq!(
3988 unsafe { CStr::from_ptr(header.comment.cast()) }
3989 .to_str()
3990 .unwrap(),
3991 comment.trim_end_matches('\0')
3992 );
3993
3994 assert!(!header.name.is_null());
3995 assert_eq!(
3996 unsafe { CStr::from_ptr(header.name.cast()) }
3997 .to_str()
3998 .unwrap(),
3999 name.trim_end_matches('\0')
4000 );
4001
4002 assert!(!header.extra.is_null());
4003 assert_eq!(
4004 unsafe { CStr::from_ptr(header.extra.cast()) }
4005 .to_str()
4006 .unwrap(),
4007 extra.trim_end_matches('\0')
4008 );
4009 }
4010 }
4011
4012 #[test]
4013 fn insufficient_compress_space() {
4014 const DATA: &[u8] = include_bytes!("deflate/test-data/inflate_buf_error.dat");
4015
4016 fn helper(deflate_buf: &mut [u8], deflate_err: ReturnCode) -> ReturnCode {
4017 let config = DeflateConfig {
4018 level: 0,
4019 method: Method::Deflated,
4020 window_bits: 10,
4021 mem_level: 6,
4022 strategy: Strategy::Default,
4023 };
4024
4025 let (output, err) = compress_slice(deflate_buf, DATA, config);
4026 assert_eq!(err, deflate_err);
4027
4028 let config = InflateConfig {
4029 window_bits: config.window_bits,
4030 };
4031
4032 let mut uncompr = [0; 1 << 17];
4033 let (uncompr, err) = decompress_slice(&mut uncompr, output, config);
4034
4035 if err == ReturnCode::Ok {
4036 assert_eq!(DATA, uncompr);
4037 }
4038
4039 err
4040 }
4041
4042 let mut output = [0; 1 << 17];
4043
4044 assert_eq!(
4046 helper(&mut output[..1 << 16], ReturnCode::BufError),
4047 ReturnCode::DataError
4048 );
4049
4050 assert_eq!(helper(&mut output, ReturnCode::Ok), ReturnCode::Ok);
4052 }
4053
4054 fn test_flush(flush: DeflateFlush, expected: &[u8]) {
4055 let input = b"Hello World!\n";
4056
4057 let config = DeflateConfig {
4058 level: 6, method: Method::Deflated,
4060 window_bits: 16 + crate::MAX_WBITS,
4061 mem_level: DEF_MEM_LEVEL,
4062 strategy: Strategy::Default,
4063 };
4064
4065 let mut output_rs = vec![0; 128];
4066
4067 let expected_err = ReturnCode::BufError;
4071
4072 let (rs, err) = compress_slice_with_flush(&mut output_rs, input, config, flush);
4073 assert_eq!(expected_err, err);
4074
4075 assert_eq!(rs, expected);
4076 }
4077
4078 #[test]
4079 #[rustfmt::skip]
4080 fn sync_flush() {
4081 test_flush(
4082 DeflateFlush::SyncFlush,
4083 &[
4084 31, 139, 8, 0, 0, 0, 0, 0, 0, gz_header::OS_CODE, 242, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
4085 81, 228, 2, 0, 0, 0, 255, 255,
4086 ],
4087 )
4088 }
4089
4090 #[test]
4091 #[rustfmt::skip]
4092 fn partial_flush() {
4093 test_flush(
4094 DeflateFlush::PartialFlush,
4095 &[
4096 31, 139, 8, 0, 0, 0, 0, 0, 0, gz_header::OS_CODE, 242, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
4097 81, 228, 2, 8,
4098 ],
4099 );
4100 }
4101
4102 #[test]
4103 #[rustfmt::skip]
4104 fn full_flush() {
4105 test_flush(
4106 DeflateFlush::FullFlush,
4107 &[
4108 31, 139, 8, 0, 0, 0, 0, 0, 0, gz_header::OS_CODE, 242, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
4109 81, 228, 2, 0, 0, 0, 255, 255,
4110 ],
4111 );
4112 }
4113
4114 #[test]
4115 #[rustfmt::skip]
4116 fn block_flush() {
4117 test_flush(
4118 DeflateFlush::Block,
4119 &[
4120 31, 139, 8, 0, 0, 0, 0, 0, 0, gz_header::OS_CODE, 242, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
4121 81, 228, 2,
4122 ],
4123 );
4124 }
4125
4126 #[test]
4127 fn split_deflate() {
4131 let input = "Hello World!\n";
4132
4133 let (input1, input2) = input.split_at(6);
4134
4135 let mut output1 = vec![0; 128];
4136 let mut output2 = vec![0; 128];
4137
4138 let config = DeflateConfig {
4139 level: 6, method: Method::Deflated,
4141 window_bits: 16 + crate::MAX_WBITS,
4142 mem_level: DEF_MEM_LEVEL,
4143 strategy: Strategy::Default,
4144 };
4145
4146 let (prefix, err) = compress_slice_with_flush(
4149 &mut output1,
4150 input1.as_bytes(),
4151 config,
4152 DeflateFlush::SyncFlush,
4153 );
4154 assert_eq!(err, ReturnCode::BufError);
4155
4156 let (output2, err) = compress_slice_with_flush(
4157 &mut output2,
4158 input2.as_bytes(),
4159 config,
4160 DeflateFlush::Finish,
4161 );
4162 assert_eq!(err, ReturnCode::Ok);
4163
4164 let inflate_config = crate::inflate::InflateConfig {
4165 window_bits: 16 + 15,
4166 };
4167
4168 let (suffix, end) = output2.split_at(output2.len() - 8);
4170 let (crc2, len2) = end.split_at(4);
4171 let crc2 = u32::from_le_bytes(crc2.try_into().unwrap());
4172
4173 let suffix = &suffix[10..];
4175
4176 let mut result: Vec<u8> = Vec::new();
4177 result.extend(prefix.iter());
4178 result.extend(suffix);
4179
4180 let len1 = input1.len() as u32;
4183 let len2 = u32::from_le_bytes(len2.try_into().unwrap());
4184 assert_eq!(len2 as usize, input2.len());
4185
4186 let crc1 = crate::crc32::crc32(0, input1.as_bytes());
4187 let crc = crate::crc32::crc32_combine(crc1, crc2, len2 as u64);
4188
4189 let crc_cheating = crate::crc32::crc32(0, input.as_bytes());
4191 assert_eq!(crc, crc_cheating);
4192
4193 result.extend(crc.to_le_bytes());
4195 result.extend((len1 + len2).to_le_bytes());
4196
4197 let mut output = vec![0; 128];
4198 let (output, err) = crate::inflate::decompress_slice(&mut output, &result, inflate_config);
4199 assert_eq!(err, ReturnCode::Ok);
4200
4201 assert_eq!(output, input.as_bytes());
4202 }
4203
4204 #[test]
4205 fn inflate_window_copy_slice() {
4206 let uncompressed = [
4207 9, 126, 0, 0, 0, 0, 0, 0, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 76, 33, 8, 2, 0, 0,
4208 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4209 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4210 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4211 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12,
4212 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 76, 33, 8, 2, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0,
4213 0, 0, 0, 12, 10, 0, 0, 0, 0, 14, 0, 0, 0, 0, 0, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4214 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 14, 0, 0, 0, 0, 0, 69, 69, 69, 69, 69, 69, 69,
4215 69, 69, 69, 69, 69, 69, 69, 69, 9, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4216 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4217 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4218 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 12, 28, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0,
4219 0, 0, 12, 10, 0, 0, 0, 0, 14, 0, 0, 0, 0, 0, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4220 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 14, 0, 0, 0, 0, 0, 69, 69, 69, 69, 69, 69, 69,
4221 69, 69, 69, 69, 69, 69, 69, 69, 9, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4222 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4223 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4224 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 12, 28, 0, 2, 0, 0, 0, 63, 1, 0, 12, 2,
4225 36, 0, 28, 0, 0, 0, 1, 0, 0, 63, 63, 13, 0, 0, 0, 0, 0, 0, 0, 63, 63, 63, 63, 0, 0, 0,
4226 0, 0, 0, 65, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 0,
4227 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 45, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4228 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4229 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 91, 0, 0, 0, 9, 0, 0, 0, 9, 0, 0, 12, 33, 2, 0, 0, 8,
4230 0, 4, 0, 0, 0, 12, 10, 41, 12, 10, 47,
4231 ];
4232
4233 let compressed = &[
4234 31, 139, 8, 0, 0, 0, 0, 0, 4, 3, 181, 193, 49, 14, 194, 32, 24, 128, 209, 175, 192, 0,
4235 228, 151, 232, 206, 66, 226, 226, 96, 60, 2, 113, 96, 235, 13, 188, 139, 103, 23, 106,
4236 104, 108, 100, 49, 169, 239, 185, 39, 11, 199, 7, 51, 39, 171, 248, 118, 226, 63, 52,
4237 157, 120, 86, 102, 78, 86, 209, 104, 58, 241, 84, 129, 166, 12, 4, 154, 178, 229, 202,
4238 30, 36, 130, 166, 19, 79, 21, 104, 202, 64, 160, 41, 91, 174, 236, 65, 34, 10, 200, 19,
4239 162, 206, 68, 96, 130, 156, 15, 188, 229, 138, 197, 157, 161, 35, 3, 87, 126, 245, 0,
4240 28, 224, 64, 146, 2, 139, 1, 196, 95, 196, 223, 94, 10, 96, 92, 33, 86, 2, 0, 0,
4241 ];
4242
4243 let config = InflateConfig { window_bits: 25 };
4244
4245 let mut dest_vec_rs = vec![0u8; uncompressed.len()];
4246 let (output_rs, error) =
4247 crate::inflate::decompress_slice(&mut dest_vec_rs, compressed, config);
4248
4249 assert_eq!(ReturnCode::Ok, error);
4250 assert_eq!(output_rs, uncompressed);
4251 }
4252
4253 #[test]
4254 fn hash_calc_difference() {
4255 let input = [
4256 0, 0, 0, 0, 0, 43, 0, 0, 0, 0, 0, 0, 43, 0, 0, 0, 0, 0, 0, 0, 0, 48, 0, 0, 0, 0, 0, 0,
4257 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4258 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4259 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4260 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, 0, 0, 0,
4261 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4262 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 0, 0, 0, 0, 0,
4263 0, 0, 64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 0, 64, 0, 0,
4264 0, 0, 0, 0, 0, 0, 0, 0, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 102, 102, 102, 102, 102,
4265 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102,
4266 102, 102, 102, 102, 102, 102, 102, 102, 112, 102, 102, 102, 102, 102, 102, 102, 102,
4267 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 0,
4268 0, 0, 0, 0, 0, 49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4269 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4270 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4271 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4272 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4273 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 0, 0, 0, 0, 0, 0, 0, 64, 0, 0,
4274 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 0, 64, 0, 0, 0, 0, 0, 0, 0,
4275 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42, 0, 0, 0,
4276 50, 0,
4277 ];
4278
4279 let config = DeflateConfig {
4280 level: 6,
4281 method: Method::Deflated,
4282 window_bits: 9,
4283 mem_level: 8,
4284 strategy: Strategy::Default,
4285 };
4286
4287 let expected = [
4288 24, 149, 99, 96, 96, 96, 96, 208, 6, 17, 112, 138, 129, 193, 128, 1, 29, 24, 50, 208,
4289 1, 200, 146, 169, 79, 24, 74, 59, 96, 147, 52, 71, 22, 70, 246, 88, 26, 94, 80, 128,
4290 83, 6, 162, 219, 144, 76, 183, 210, 5, 8, 67, 105, 36, 159, 35, 128, 57, 118, 97, 100,
4291 160, 197, 192, 192, 96, 196, 0, 0, 3, 228, 25, 128,
4292 ];
4293
4294 fuzz_based_test(&input, config, &expected);
4295 }
4296
4297 #[cfg(any(target_arch = "x86_64", target_arch = "aarch64"))]
4298 mod _cache_lines {
4299 use super::State;
4300 use memoffset::offset_of;
4303
4304 const _: () = assert!(offset_of!(State, status) == 0);
4305 const _: () = assert!(offset_of!(State, _cache_line_0) == 64);
4306 const _: () = assert!(offset_of!(State, _cache_line_1) == 128);
4307 #[cfg(not(feature = "ZLIB_DEBUG"))] const _: () = assert!(offset_of!(State, _cache_line_2) == 192);
4309 #[cfg(not(feature = "ZLIB_DEBUG"))] const _: () = assert!(offset_of!(State, _cache_line_3) == 256);
4311 }
4312}