1use core::{ffi::CStr, marker::PhantomData, mem::MaybeUninit, ops::ControlFlow, ptr::NonNull};
2
3use crate::{
4 adler32::adler32,
5 allocate::Allocator,
6 c_api::{gz_header, internal_state, z_checksum, z_stream},
7 crc32::{crc32, Crc32Fold},
8 trace,
9 weak_slice::{WeakArrayMut, WeakSliceMut},
10 DeflateFlush, ReturnCode, ADLER32_INITIAL_VALUE, CRC32_INITIAL_VALUE, MAX_WBITS, MIN_WBITS,
11};
12
13use self::{
14 algorithm::CONFIGURATION_TABLE,
15 hash_calc::{HashCalcVariant, RollHashCalc, StandardHashCalc},
16 pending::Pending,
17 sym_buf::SymBuf,
18 trees_tbl::STATIC_LTREE,
19 window::Window,
20};
21
22mod algorithm;
23mod compare256;
24mod hash_calc;
25mod longest_match;
26mod pending;
27mod slide_hash;
28mod sym_buf;
29mod trees_tbl;
30mod window;
31
32pub(crate) type Pos = u16;
34
35#[repr(C)]
38pub struct DeflateStream<'a> {
39 pub(crate) next_in: *mut crate::c_api::Bytef,
40 pub(crate) avail_in: crate::c_api::uInt,
41 pub(crate) total_in: crate::c_api::z_size,
42 pub(crate) next_out: *mut crate::c_api::Bytef,
43 pub(crate) avail_out: crate::c_api::uInt,
44 pub(crate) total_out: crate::c_api::z_size,
45 pub(crate) msg: *const core::ffi::c_char,
46 pub(crate) state: &'a mut State<'a>,
47 pub(crate) alloc: Allocator<'a>,
48 pub(crate) data_type: core::ffi::c_int,
49 pub(crate) adler: crate::c_api::z_checksum,
50 pub(crate) reserved: crate::c_api::uLong,
51}
52
53unsafe impl Sync for DeflateStream<'_> {}
54unsafe impl Send for DeflateStream<'_> {}
55
56impl<'a> DeflateStream<'a> {
57 const _S: () = assert!(core::mem::size_of::<z_stream>() == core::mem::size_of::<Self>());
60 const _A: () = assert!(core::mem::align_of::<z_stream>() == core::mem::align_of::<Self>());
61
62 #[inline(always)]
71 pub unsafe fn from_stream_mut(strm: *mut z_stream) -> Option<&'a mut Self> {
72 {
73 let stream = unsafe { strm.as_ref() }?;
75
76 if stream.zalloc.is_none() || stream.zfree.is_none() {
77 return None;
78 }
79
80 if stream.state.is_null() {
81 return None;
82 }
83 }
84
85 unsafe { strm.cast::<DeflateStream>().as_mut() }
87 }
88
89 #[inline(always)]
98 pub unsafe fn from_stream_ref(strm: *const z_stream) -> Option<&'a Self> {
99 {
100 let stream = unsafe { strm.as_ref() }?;
102
103 if stream.zalloc.is_none() || stream.zfree.is_none() {
104 return None;
105 }
106
107 if stream.state.is_null() {
108 return None;
109 }
110 }
111
112 unsafe { strm.cast::<DeflateStream>().as_ref() }
114 }
115
116 fn as_z_stream_mut(&mut self) -> &mut z_stream {
117 unsafe { &mut *(self as *mut DeflateStream as *mut z_stream) }
119 }
120
121 pub fn pending(&self) -> (usize, u8) {
122 (
123 self.state.bit_writer.pending.pending,
124 self.state.bit_writer.bits_used,
125 )
126 }
127
128 pub fn new(config: DeflateConfig) -> Self {
129 let mut inner = crate::c_api::z_stream::default();
130
131 let ret = crate::deflate::init(&mut inner, config);
132 assert_eq!(ret, ReturnCode::Ok);
133
134 unsafe { core::mem::transmute(inner) }
135 }
136}
137
138pub(crate) const HASH_SIZE: usize = 65536;
140const HASH_BITS: usize = 16;
142
143const MAX_MEM_LEVEL: i32 = 9;
145pub const DEF_MEM_LEVEL: i32 = if MAX_MEM_LEVEL > 8 { 8 } else { MAX_MEM_LEVEL };
146
147#[repr(i32)]
148#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)]
149#[cfg_attr(feature = "__internal-fuzz", derive(arbitrary::Arbitrary))]
150pub enum Method {
151 #[default]
152 Deflated = 8,
153}
154
155impl TryFrom<i32> for Method {
156 type Error = ();
157
158 fn try_from(value: i32) -> Result<Self, Self::Error> {
159 match value {
160 8 => Ok(Self::Deflated),
161 _ => Err(()),
162 }
163 }
164}
165
166#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
178#[cfg_attr(feature = "__internal-fuzz", derive(arbitrary::Arbitrary))]
179pub struct DeflateConfig {
180 pub level: i32,
181 pub method: Method,
182 pub window_bits: i32,
183 pub mem_level: i32,
184 pub strategy: Strategy,
185}
186
187#[cfg(any(test, feature = "__internal-test"))]
188impl quickcheck::Arbitrary for DeflateConfig {
189 fn arbitrary(g: &mut quickcheck::Gen) -> Self {
190 let mem_levels: Vec<_> = (1..=9).collect();
191 let levels: Vec<_> = (0..=9).collect();
192
193 let mut window_bits = Vec::new();
194 window_bits.extend(9..=15); window_bits.extend(9 + 16..=15 + 16); window_bits.extend(-15..=-9); Self {
199 level: *g.choose(&levels).unwrap(),
200 method: Method::Deflated,
201 window_bits: *g.choose(&window_bits).unwrap(),
202 mem_level: *g.choose(&mem_levels).unwrap(),
203 strategy: *g
204 .choose(&[
205 Strategy::Default,
206 Strategy::Filtered,
207 Strategy::HuffmanOnly,
208 Strategy::Rle,
209 Strategy::Fixed,
210 ])
211 .unwrap(),
212 }
213 }
214}
215
216impl DeflateConfig {
217 pub fn new(level: i32) -> Self {
218 Self {
219 level,
220 ..Self::default()
221 }
222 }
223
224 pub fn best_compression() -> Self {
226 Self::new(crate::c_api::Z_BEST_COMPRESSION)
227 }
228
229 pub fn best_speed() -> Self {
231 Self::new(crate::c_api::Z_BEST_SPEED)
232 }
233}
234
235impl Default for DeflateConfig {
236 fn default() -> Self {
237 Self {
238 level: crate::c_api::Z_DEFAULT_COMPRESSION,
239 method: Method::Deflated,
240 window_bits: MAX_WBITS,
241 mem_level: DEF_MEM_LEVEL,
242 strategy: Strategy::Default,
243 }
244 }
245}
246
247pub fn init(stream: &mut z_stream, config: DeflateConfig) -> ReturnCode {
248 let DeflateConfig {
249 mut level,
250 method: _,
251 mut window_bits,
252 mem_level,
253 strategy,
254 } = config;
255
256 stream.msg = core::ptr::null_mut();
258
259 #[cfg(feature = "rust-allocator")]
264 if stream.zalloc.is_none() || stream.zfree.is_none() {
265 stream.configure_default_rust_allocator()
266 }
267
268 #[cfg(feature = "c-allocator")]
269 if stream.zalloc.is_none() || stream.zfree.is_none() {
270 stream.configure_default_c_allocator()
271 }
272
273 if stream.zalloc.is_none() || stream.zfree.is_none() {
274 return ReturnCode::StreamError;
275 }
276
277 if level == crate::c_api::Z_DEFAULT_COMPRESSION {
278 level = 6;
279 }
280
281 let wrap = if window_bits < 0 {
282 if window_bits < -MAX_WBITS {
283 return ReturnCode::StreamError;
284 }
285 window_bits = -window_bits;
286
287 0
288 } else if window_bits > MAX_WBITS {
289 window_bits -= 16;
290 2
291 } else {
292 1
293 };
294
295 if (!(1..=MAX_MEM_LEVEL).contains(&mem_level))
296 || !(MIN_WBITS..=MAX_WBITS).contains(&window_bits)
297 || !(0..=9).contains(&level)
298 || (window_bits == 8 && wrap != 1)
299 {
300 return ReturnCode::StreamError;
301 }
302
303 let window_bits = if window_bits == 8 {
304 9 } else {
306 window_bits as usize
307 };
308
309 let alloc = Allocator {
310 zalloc: stream.zalloc.unwrap(),
311 zfree: stream.zfree.unwrap(),
312 opaque: stream.opaque,
313 _marker: PhantomData,
314 };
315
316 let lit_bufsize = 1 << (mem_level + 6); let allocs = DeflateAllocOffsets::new(window_bits, lit_bufsize);
318
319 let Some(allocation_start) = alloc.allocate_slice_raw::<u8>(allocs.total_size) else {
321 return ReturnCode::MemError;
322 };
323
324 let w_size = 1 << window_bits;
325 let align_offset = (allocation_start.as_ptr() as usize).next_multiple_of(64)
326 - (allocation_start.as_ptr() as usize);
327 let buf = unsafe { allocation_start.as_ptr().add(align_offset) };
328
329 let (window, prev, head, pending, sym_buf) = unsafe {
330 let window_ptr: *mut u8 = buf.add(allocs.window_pos);
331 window_ptr.write_bytes(0u8, 2 * w_size);
332 let window = Window::from_raw_parts(window_ptr, window_bits);
333
334 let prev_ptr = buf.add(allocs.prev_pos).cast::<Pos>();
336 prev_ptr.write_bytes(0, w_size);
337 let prev = WeakSliceMut::from_raw_parts_mut(prev_ptr, w_size);
338
339 let head_ptr = buf.add(allocs.head_pos).cast::<[Pos; HASH_SIZE]>();
340 head_ptr.write_bytes(0, 1);
342 let head = WeakArrayMut::<Pos, HASH_SIZE>::from_ptr(head_ptr);
343
344 let pending_ptr = buf.add(allocs.pending_pos).cast::<MaybeUninit<u8>>();
345 let pending = Pending::from_raw_parts(pending_ptr, 4 * lit_bufsize);
346
347 let sym_buf_ptr = buf.add(allocs.sym_buf_pos);
348 let sym_buf = SymBuf::from_raw_parts(sym_buf_ptr, lit_bufsize);
349
350 (window, prev, head, pending, sym_buf)
351 };
352
353 let state = State {
354 status: Status::Init,
355
356 w_size,
358
359 window,
361 prev,
362 head,
363 bit_writer: BitWriter::from_pending(pending),
364
365 lit_bufsize,
367
368 sym_buf,
370
371 level: level as i8, strategy,
374
375 last_flush: 0,
377 wrap,
378 strstart: 0,
379 block_start: 0,
380 block_open: 0,
381 window_size: 0,
382 insert: 0,
383 matches: 0,
384 opt_len: 0,
385 static_len: 0,
386 lookahead: 0,
387 ins_h: 0,
388 max_chain_length: 0,
389 max_lazy_match: 0,
390 good_match: 0,
391 nice_match: 0,
392
393 l_desc: TreeDesc::EMPTY,
395 d_desc: TreeDesc::EMPTY,
396 bl_desc: TreeDesc::EMPTY,
397
398 crc_fold: Crc32Fold::new(),
400 gzhead: None,
401 gzindex: 0,
402
403 match_start: 0,
405 prev_match: 0,
406 match_available: false,
407 prev_length: 0,
408
409 allocation_start,
410 total_allocation_size: allocs.total_size,
411
412 hash_calc_variant: HashCalcVariant::Standard,
414 _cache_line_0: (),
415 _cache_line_1: (),
416 _cache_line_2: (),
417 _cache_line_3: (),
418 _padding_0: [0; 16],
419 };
420
421 let state_allocation = unsafe { buf.add(allocs.state_pos).cast::<State>() };
422 unsafe { state_allocation.write(state) };
423
424 stream.state = state_allocation.cast::<internal_state>();
425
426 let Some(stream) = (unsafe { DeflateStream::from_stream_mut(stream) }) else {
427 if cfg!(debug_assertions) {
428 unreachable!("we should have initialized the stream properly");
429 }
430 return ReturnCode::StreamError;
431 };
432
433 reset(stream)
434}
435
436pub fn params(stream: &mut DeflateStream, level: i32, strategy: Strategy) -> ReturnCode {
437 let level = if level == crate::c_api::Z_DEFAULT_COMPRESSION {
438 6
439 } else {
440 level
441 };
442
443 if !(0..=9).contains(&level) {
444 return ReturnCode::StreamError;
445 }
446
447 let level = level as i8;
448
449 let func = CONFIGURATION_TABLE[stream.state.level as usize].func;
450
451 let state = &mut stream.state;
452
453 #[allow(unpredictable_function_pointer_comparisons)]
456 if (strategy != state.strategy || func != CONFIGURATION_TABLE[level as usize].func)
457 && state.last_flush != -2
458 {
459 let err = deflate(stream, DeflateFlush::Block);
461 if err == ReturnCode::StreamError {
462 return err;
463 }
464
465 let state = &mut stream.state;
466
467 if stream.avail_in != 0
468 || ((state.strstart as isize - state.block_start) + state.lookahead as isize) != 0
469 {
470 return ReturnCode::BufError;
471 }
472 }
473
474 let state = &mut stream.state;
475
476 if state.level != level {
477 if state.level == 0 && state.matches != 0 {
478 if state.matches == 1 {
479 self::slide_hash::slide_hash(state);
480 } else {
481 state.head.as_mut_slice().fill(0);
482 }
483 state.matches = 0;
484 }
485
486 lm_set_level(state, level);
487 }
488
489 state.strategy = strategy;
490
491 ReturnCode::Ok
492}
493
494pub fn set_dictionary(stream: &mut DeflateStream, mut dictionary: &[u8]) -> ReturnCode {
495 let state = &mut stream.state;
496
497 let wrap = state.wrap;
498
499 if wrap == 2 || (wrap == 1 && state.status != Status::Init) || state.lookahead != 0 {
500 return ReturnCode::StreamError;
501 }
502
503 if wrap == 1 {
505 stream.adler = adler32(stream.adler as u32, dictionary) as z_checksum;
506 }
507
508 state.wrap = 0;
510
511 if dictionary.len() >= state.window.capacity() {
513 if wrap == 0 {
514 state.head.as_mut_slice().fill(0);
516
517 state.strstart = 0;
518 state.block_start = 0;
519 state.insert = 0;
520 } else {
521 }
523
524 dictionary = &dictionary[dictionary.len() - state.w_size..];
526 }
527
528 let avail = stream.avail_in;
530 let next = stream.next_in;
531 stream.avail_in = dictionary.len() as _;
532 stream.next_in = dictionary.as_ptr() as *mut u8;
533 fill_window(stream);
534
535 while stream.state.lookahead >= STD_MIN_MATCH {
536 let str = stream.state.strstart;
537 let n = stream.state.lookahead - (STD_MIN_MATCH - 1);
538 stream.state.insert_string(str, n);
539 stream.state.strstart = str + n;
540 stream.state.lookahead = STD_MIN_MATCH - 1;
541 fill_window(stream);
542 }
543
544 let state = &mut stream.state;
545
546 state.strstart += state.lookahead;
547 state.block_start = state.strstart as _;
548 state.insert = state.lookahead;
549 state.lookahead = 0;
550 state.prev_length = 0;
551 state.match_available = false;
552
553 stream.next_in = next;
555 stream.avail_in = avail;
556 state.wrap = wrap;
557
558 ReturnCode::Ok
559}
560
561pub fn prime(stream: &mut DeflateStream, mut bits: i32, value: i32) -> ReturnCode {
562 debug_assert!(bits <= 16, "zlib only supports up to 16 bits here");
564
565 let mut value64 = value as u64;
566
567 let state = &mut stream.state;
568
569 if bits < 0
570 || bits > BitWriter::BIT_BUF_SIZE as i32
571 || bits > (core::mem::size_of_val(&value) << 3) as i32
572 {
573 return ReturnCode::BufError;
574 }
575
576 let mut put;
577
578 loop {
579 put = BitWriter::BIT_BUF_SIZE - state.bit_writer.bits_used;
580 let put = Ord::min(put as i32, bits);
581
582 if state.bit_writer.bits_used == 0 {
583 state.bit_writer.bit_buffer = value64;
584 } else {
585 state.bit_writer.bit_buffer |=
586 (value64 & ((1 << put) - 1)) << state.bit_writer.bits_used;
587 }
588
589 state.bit_writer.bits_used += put as u8;
590 state.bit_writer.flush_bits();
591 value64 >>= put;
592 bits -= put;
593
594 if bits == 0 {
595 break;
596 }
597 }
598
599 ReturnCode::Ok
600}
601
602pub fn copy<'a>(
603 dest: &mut MaybeUninit<DeflateStream<'a>>,
604 source: &mut DeflateStream<'a>,
605) -> ReturnCode {
606 let w_size = source.state.w_size;
607 let window_bits = source.state.w_bits() as usize;
608 let lit_bufsize = source.state.lit_bufsize;
609
610 unsafe { core::ptr::copy_nonoverlapping(source, dest.as_mut_ptr(), 1) };
613
614 let source_state = &source.state;
615 let alloc = &source.alloc;
616
617 let allocs = DeflateAllocOffsets::new(window_bits, lit_bufsize);
618
619 let Some(allocation_start) = alloc.allocate_slice_raw::<u8>(allocs.total_size) else {
620 return ReturnCode::MemError;
621 };
622
623 let align_offset = (allocation_start.as_ptr() as usize).next_multiple_of(64)
624 - (allocation_start.as_ptr() as usize);
625 let buf = unsafe { allocation_start.as_ptr().add(align_offset) };
626
627 let (window, prev, head, pending, sym_buf) = unsafe {
628 let window_ptr: *mut u8 = buf.add(allocs.window_pos);
629 window_ptr
630 .copy_from_nonoverlapping(source_state.window.as_ptr(), source_state.window.capacity());
631 let window = Window::from_raw_parts(window_ptr, window_bits);
632
633 let prev_ptr = buf.add(allocs.prev_pos).cast::<Pos>();
635 prev_ptr.copy_from_nonoverlapping(source_state.prev.as_ptr(), source_state.prev.len());
636 let prev = WeakSliceMut::from_raw_parts_mut(prev_ptr, w_size);
637
638 let head_ptr = buf.add(allocs.head_pos).cast::<[Pos; HASH_SIZE]>();
640 head_ptr.copy_from_nonoverlapping(source_state.head.as_ptr(), 1);
641 let head = WeakArrayMut::<Pos, HASH_SIZE>::from_ptr(head_ptr);
642
643 let pending_ptr = buf.add(allocs.pending_pos);
644 let pending = source_state.bit_writer.pending.clone_to(pending_ptr);
645
646 let sym_buf_ptr = buf.add(allocs.sym_buf_pos);
647 let sym_buf = source_state.sym_buf.clone_to(sym_buf_ptr);
648
649 (window, prev, head, pending, sym_buf)
650 };
651
652 let mut bit_writer = BitWriter::from_pending(pending);
653 bit_writer.bits_used = source_state.bit_writer.bits_used;
654 bit_writer.bit_buffer = source_state.bit_writer.bit_buffer;
655
656 let dest_state = State {
657 status: source_state.status,
658 bit_writer,
659 last_flush: source_state.last_flush,
660 wrap: source_state.wrap,
661 strategy: source_state.strategy,
662 level: source_state.level,
663 good_match: source_state.good_match,
664 nice_match: source_state.nice_match,
665 l_desc: source_state.l_desc.clone(),
666 d_desc: source_state.d_desc.clone(),
667 bl_desc: source_state.bl_desc.clone(),
668 prev_match: source_state.prev_match,
669 match_available: source_state.match_available,
670 strstart: source_state.strstart,
671 match_start: source_state.match_start,
672 prev_length: source_state.prev_length,
673 max_chain_length: source_state.max_chain_length,
674 max_lazy_match: source_state.max_lazy_match,
675 block_start: source_state.block_start,
676 block_open: source_state.block_open,
677 window,
678 sym_buf,
679 lit_bufsize: source_state.lit_bufsize,
680 window_size: source_state.window_size,
681 matches: source_state.matches,
682 opt_len: source_state.opt_len,
683 static_len: source_state.static_len,
684 insert: source_state.insert,
685 w_size: source_state.w_size,
686 lookahead: source_state.lookahead,
687 prev,
688 head,
689 ins_h: source_state.ins_h,
690 hash_calc_variant: source_state.hash_calc_variant,
691 crc_fold: source_state.crc_fold,
692 gzhead: None,
693 gzindex: source_state.gzindex,
694 allocation_start,
695 total_allocation_size: allocs.total_size,
696 _cache_line_0: (),
697 _cache_line_1: (),
698 _cache_line_2: (),
699 _cache_line_3: (),
700 _padding_0: source_state._padding_0,
701 };
702
703 let state_allocation = unsafe { buf.add(allocs.state_pos).cast::<State>() };
705 unsafe { state_allocation.write(dest_state) }; let field_ptr = unsafe { core::ptr::addr_of_mut!((*dest.as_mut_ptr()).state) };
709 unsafe { core::ptr::write(field_ptr as *mut *mut State, state_allocation) };
710
711 let field_ptr = unsafe { core::ptr::addr_of_mut!((*dest.as_mut_ptr()).state.gzhead) };
713 unsafe { core::ptr::copy(&source_state.gzhead, field_ptr, 1) };
714
715 ReturnCode::Ok
716}
717
718pub fn end<'a>(stream: &'a mut DeflateStream) -> Result<&'a mut z_stream, &'a mut z_stream> {
723 let status = stream.state.status;
724 let allocation_start = stream.state.allocation_start;
725 let total_allocation_size = stream.state.total_allocation_size;
726 let alloc = stream.alloc;
727
728 let stream = stream.as_z_stream_mut();
729 let _ = core::mem::replace(&mut stream.state, core::ptr::null_mut());
730
731 unsafe { alloc.deallocate(allocation_start.as_ptr(), total_allocation_size) };
732
733 match status {
734 Status::Busy => Err(stream),
735 _ => Ok(stream),
736 }
737}
738
739pub fn reset(stream: &mut DeflateStream) -> ReturnCode {
740 let ret = reset_keep(stream);
741
742 if ret == ReturnCode::Ok {
743 lm_init(stream.state);
744 }
745
746 ret
747}
748
749pub fn reset_keep(stream: &mut DeflateStream) -> ReturnCode {
750 stream.total_in = 0;
751 stream.total_out = 0;
752 stream.msg = core::ptr::null_mut();
753 stream.data_type = crate::c_api::Z_UNKNOWN;
754
755 let state = &mut stream.state;
756
757 state.bit_writer.pending.reset_keep();
758
759 state.wrap = state.wrap.abs();
761
762 state.status = match state.wrap {
763 2 => Status::GZip,
764 _ => Status::Init,
765 };
766
767 stream.adler = match state.wrap {
768 2 => {
769 state.crc_fold = Crc32Fold::new();
770 CRC32_INITIAL_VALUE as _
771 }
772 _ => ADLER32_INITIAL_VALUE as _,
773 };
774
775 state.last_flush = -2;
776
777 state.zng_tr_init();
778
779 ReturnCode::Ok
780}
781
782fn lm_init(state: &mut State) {
783 state.window_size = 2 * state.w_size;
784
785 state.head.as_mut_slice().fill(0);
787
788 lm_set_level(state, state.level);
790
791 state.strstart = 0;
792 state.block_start = 0;
793 state.lookahead = 0;
794 state.insert = 0;
795 state.prev_length = 0;
796 state.match_available = false;
797 state.match_start = 0;
798 state.ins_h = 0;
799}
800
801fn lm_set_level(state: &mut State, level: i8) {
802 state.max_lazy_match = CONFIGURATION_TABLE[level as usize].max_lazy;
803 state.good_match = CONFIGURATION_TABLE[level as usize].good_length;
804 state.nice_match = CONFIGURATION_TABLE[level as usize].nice_length;
805 state.max_chain_length = CONFIGURATION_TABLE[level as usize].max_chain;
806
807 state.hash_calc_variant = HashCalcVariant::for_max_chain_length(state.max_chain_length);
808 state.level = level;
809}
810
811pub fn tune(
812 stream: &mut DeflateStream,
813 good_length: usize,
814 max_lazy: usize,
815 nice_length: usize,
816 max_chain: usize,
817) -> ReturnCode {
818 stream.state.good_match = good_length as u16;
819 stream.state.max_lazy_match = max_lazy as u16;
820 stream.state.nice_match = nice_length as u16;
821 stream.state.max_chain_length = max_chain as u16;
822
823 ReturnCode::Ok
824}
825
826#[repr(C)]
827#[derive(Debug, Clone, Copy, PartialEq, Eq)]
828pub(crate) struct Value {
829 a: u16,
830 b: u16,
831}
832
833impl Value {
834 pub(crate) const fn new(a: u16, b: u16) -> Self {
835 Self { a, b }
836 }
837
838 pub(crate) fn freq_mut(&mut self) -> &mut u16 {
839 &mut self.a
840 }
841
842 pub(crate) fn code_mut(&mut self) -> &mut u16 {
843 &mut self.a
844 }
845
846 pub(crate) fn dad_mut(&mut self) -> &mut u16 {
847 &mut self.b
848 }
849
850 pub(crate) fn len_mut(&mut self) -> &mut u16 {
851 &mut self.b
852 }
853
854 #[inline(always)]
855 pub(crate) const fn freq(self) -> u16 {
856 self.a
857 }
858
859 #[inline(always)]
860 pub(crate) const fn code(self) -> u16 {
861 self.a
862 }
863
864 #[inline(always)]
865 pub(crate) const fn dad(self) -> u16 {
866 self.b
867 }
868
869 #[inline(always)]
870 pub(crate) const fn len(self) -> u16 {
871 self.b
872 }
873}
874
875pub(crate) const LENGTH_CODES: usize = 29;
877
878const LITERALS: usize = 256;
880
881pub(crate) const L_CODES: usize = LITERALS + 1 + LENGTH_CODES;
883
884pub(crate) const D_CODES: usize = 30;
886
887const BL_CODES: usize = 19;
889
890const HEAP_SIZE: usize = 2 * L_CODES + 1;
892
893const MAX_BITS: usize = 15;
895
896const MAX_BL_BITS: usize = 7;
898
899pub(crate) const DIST_CODE_LEN: usize = 512;
900
901struct BitWriter<'a> {
902 pub(crate) pending: Pending<'a>, pub(crate) bit_buffer: u64,
904 pub(crate) bits_used: u8,
905
906 #[cfg(feature = "ZLIB_DEBUG")]
908 compressed_len: usize,
909 #[cfg(feature = "ZLIB_DEBUG")]
911 bits_sent: usize,
912}
913
914#[inline]
915const fn encode_len(ltree: &[Value], lc: u8) -> (u64, usize) {
916 let mut lc = lc as usize;
917
918 let code = self::trees_tbl::LENGTH_CODE[lc] as usize;
920 let c = code + LITERALS + 1;
921 assert!(c < L_CODES, "bad l_code");
922 let lnode = ltree[c];
925 let mut match_bits: u64 = lnode.code() as u64;
926 let mut match_bits_len = lnode.len() as usize;
927 let extra = StaticTreeDesc::EXTRA_LBITS[code] as usize;
928 if extra != 0 {
929 lc -= self::trees_tbl::BASE_LENGTH[code] as usize;
930 match_bits |= (lc as u64) << match_bits_len;
931 match_bits_len += extra;
932 }
933
934 (match_bits, match_bits_len)
935}
936
937#[inline]
938const fn encode_dist(dtree: &[Value], mut dist: u16) -> (u64, usize) {
939 dist -= 1; let code = State::d_code(dist as usize) as usize;
941 assert!(code < D_CODES, "bad d_code");
942 let dnode = dtree[code];
946 let mut match_bits = dnode.code() as u64;
947 let mut match_bits_len = dnode.len() as usize;
948 let extra = StaticTreeDesc::EXTRA_DBITS[code] as usize;
949 if extra != 0 {
950 dist -= self::trees_tbl::BASE_DIST[code];
951 match_bits |= (dist as u64) << match_bits_len;
952 match_bits_len += extra;
953 }
954
955 (match_bits, match_bits_len)
956}
957
958impl<'a> BitWriter<'a> {
959 pub(crate) const BIT_BUF_SIZE: u8 = 64;
960
961 fn from_pending(pending: Pending<'a>) -> Self {
962 Self {
963 pending,
964 bit_buffer: 0,
965 bits_used: 0,
966
967 #[cfg(feature = "ZLIB_DEBUG")]
968 compressed_len: 0,
969 #[cfg(feature = "ZLIB_DEBUG")]
970 bits_sent: 0,
971 }
972 }
973
974 fn flush_bits(&mut self) {
975 debug_assert!(self.bits_used <= 64);
976 let removed = self.bits_used.saturating_sub(7).next_multiple_of(8);
977 let keep_bytes = self.bits_used / 8; let src = &self.bit_buffer.to_le_bytes();
980 self.pending.extend(&src[..keep_bytes as usize]);
981
982 self.bits_used -= removed;
983 self.bit_buffer = self.bit_buffer.checked_shr(removed as u32).unwrap_or(0);
984 }
985
986 fn emit_align(&mut self) {
987 debug_assert!(self.bits_used <= 64);
988 let keep_bytes = self.bits_used.div_ceil(8);
989 let src = &self.bit_buffer.to_le_bytes();
990 self.pending.extend(&src[..keep_bytes as usize]);
991
992 self.bits_used = 0;
993 self.bit_buffer = 0;
994
995 self.sent_bits_align();
996 }
997
998 fn send_bits_trace(&self, _value: u64, _len: u8) {
999 trace!(" l {:>2} v {:>4x} ", _len, _value);
1000 }
1001
1002 fn cmpr_bits_add(&mut self, _len: usize) {
1003 #[cfg(feature = "ZLIB_DEBUG")]
1004 {
1005 self.compressed_len += _len;
1006 }
1007 }
1008
1009 fn cmpr_bits_align(&mut self) {
1010 #[cfg(feature = "ZLIB_DEBUG")]
1011 {
1012 self.compressed_len = self.compressed_len.next_multiple_of(8);
1013 }
1014 }
1015
1016 fn sent_bits_add(&mut self, _len: usize) {
1017 #[cfg(feature = "ZLIB_DEBUG")]
1018 {
1019 self.bits_sent += _len;
1020 }
1021 }
1022
1023 fn sent_bits_align(&mut self) {
1024 #[cfg(feature = "ZLIB_DEBUG")]
1025 {
1026 self.bits_sent = self.bits_sent.next_multiple_of(8);
1027 }
1028 }
1029
1030 #[inline(always)]
1031 fn send_bits(&mut self, val: u64, len: u8) {
1032 debug_assert!(len <= 64);
1033 debug_assert!(self.bits_used <= 64);
1034
1035 let total_bits = len + self.bits_used;
1036
1037 self.send_bits_trace(val, len);
1038 self.sent_bits_add(len as usize);
1039
1040 if total_bits < Self::BIT_BUF_SIZE {
1041 self.bit_buffer |= val << self.bits_used;
1042 self.bits_used = total_bits;
1043 } else {
1044 self.send_bits_overflow(val, total_bits);
1045 }
1046 }
1047
1048 fn send_bits_overflow(&mut self, val: u64, total_bits: u8) {
1049 if self.bits_used == Self::BIT_BUF_SIZE {
1050 self.pending.extend(&self.bit_buffer.to_le_bytes());
1051 self.bit_buffer = val;
1052 self.bits_used = total_bits - Self::BIT_BUF_SIZE;
1053 } else {
1054 self.bit_buffer |= val << self.bits_used;
1055 self.pending.extend(&self.bit_buffer.to_le_bytes());
1056 self.bit_buffer = val >> (Self::BIT_BUF_SIZE - self.bits_used);
1057 self.bits_used = total_bits - Self::BIT_BUF_SIZE;
1058 }
1059 }
1060
1061 fn send_code(&mut self, code: usize, tree: &[Value]) {
1062 let node = tree[code];
1063 self.send_bits(node.code() as u64, node.len() as u8)
1064 }
1065
1066 pub fn align(&mut self) {
1069 self.emit_tree(BlockType::StaticTrees, false);
1070 self.emit_end_block(&STATIC_LTREE, false);
1071 self.flush_bits();
1072 }
1073
1074 pub(crate) fn emit_tree(&mut self, block_type: BlockType, is_last_block: bool) {
1075 let header_bits = ((block_type as u64) << 1) | (is_last_block as u64);
1076 self.send_bits(header_bits, 3);
1077 trace!("\n--- Emit Tree: Last: {}\n", is_last_block as u8);
1078 }
1079
1080 pub(crate) fn emit_end_block_and_align(&mut self, ltree: &[Value], is_last_block: bool) {
1081 self.emit_end_block(ltree, is_last_block);
1082
1083 if is_last_block {
1084 self.emit_align();
1085 }
1086 }
1087
1088 fn emit_end_block(&mut self, ltree: &[Value], _is_last_block: bool) {
1089 const END_BLOCK: usize = 256;
1090 self.send_code(END_BLOCK, ltree);
1091
1092 trace!(
1093 "\n+++ Emit End Block: Last: {} Pending: {} Total Out: {}\n",
1094 _is_last_block as u8,
1095 self.pending.pending().len(),
1096 "<unknown>"
1097 );
1098 }
1099
1100 pub(crate) fn emit_lit(&mut self, ltree: &[Value], c: u8) -> u16 {
1101 self.send_code(c as usize, ltree);
1102
1103 #[cfg(feature = "ZLIB_DEBUG")]
1104 if let Some(c) = char::from_u32(c as u32) {
1105 if isgraph(c as u8) {
1106 trace!(" '{}' ", c);
1107 }
1108 }
1109
1110 ltree[c as usize].len()
1111 }
1112
1113 pub(crate) fn emit_dist(
1114 &mut self,
1115 ltree: &[Value],
1116 dtree: &[Value],
1117 lc: u8,
1118 dist: u16,
1119 ) -> usize {
1120 let (mut match_bits, mut match_bits_len) = encode_len(ltree, lc);
1121
1122 let (dist_match_bits, dist_match_bits_len) = encode_dist(dtree, dist);
1123
1124 match_bits |= dist_match_bits << match_bits_len;
1125 match_bits_len += dist_match_bits_len;
1126
1127 self.send_bits(match_bits, match_bits_len as u8);
1128
1129 match_bits_len
1130 }
1131
1132 pub(crate) fn emit_dist_static(&mut self, lc: u8, dist: u16) -> usize {
1133 let precomputed_len = trees_tbl::STATIC_LTREE_ENCODINGS[lc as usize];
1134 let mut match_bits = precomputed_len.code() as u64;
1135 let mut match_bits_len = precomputed_len.len() as usize;
1136
1137 let dtree = self::trees_tbl::STATIC_DTREE.as_slice();
1138 let (dist_match_bits, dist_match_bits_len) = encode_dist(dtree, dist);
1139
1140 match_bits |= dist_match_bits << match_bits_len;
1141 match_bits_len += dist_match_bits_len;
1142
1143 self.send_bits(match_bits, match_bits_len as u8);
1144
1145 match_bits_len
1146 }
1147
1148 fn compress_block_help(&mut self, sym_buf: &SymBuf, ltree: &[Value], dtree: &[Value]) {
1149 for (dist, lc) in sym_buf.iter() {
1150 match dist {
1151 0 => self.emit_lit(ltree, lc) as usize,
1152 _ => self.emit_dist(ltree, dtree, lc, dist),
1153 };
1154 }
1155
1156 self.emit_end_block(ltree, false)
1157 }
1158
1159 fn send_tree(&mut self, tree: &[Value], bl_tree: &[Value], max_code: usize) {
1160 let mut prevlen: isize = -1; let mut curlen; let mut nextlen = tree[0].len(); let mut count = 0; let mut max_count = 7; let mut min_count = 4; if nextlen == 0 {
1172 max_count = 138;
1173 min_count = 3;
1174 }
1175
1176 for n in 0..=max_code {
1177 curlen = nextlen;
1178 nextlen = tree[n + 1].len();
1179 count += 1;
1180 if count < max_count && curlen == nextlen {
1181 continue;
1182 } else if count < min_count {
1183 loop {
1184 self.send_code(curlen as usize, bl_tree);
1185
1186 count -= 1;
1187 if count == 0 {
1188 break;
1189 }
1190 }
1191 } else if curlen != 0 {
1192 if curlen as isize != prevlen {
1193 self.send_code(curlen as usize, bl_tree);
1194 count -= 1;
1195 }
1196 assert!((3..=6).contains(&count), " 3_6?");
1197 self.send_code(REP_3_6, bl_tree);
1198 self.send_bits(count - 3, 2);
1199 } else if count <= 10 {
1200 self.send_code(REPZ_3_10, bl_tree);
1201 self.send_bits(count - 3, 3);
1202 } else {
1203 self.send_code(REPZ_11_138, bl_tree);
1204 self.send_bits(count - 11, 7);
1205 }
1206
1207 count = 0;
1208 prevlen = curlen as isize;
1209
1210 if nextlen == 0 {
1211 max_count = 138;
1212 min_count = 3;
1213 } else if curlen == nextlen {
1214 max_count = 6;
1215 min_count = 3;
1216 } else {
1217 max_count = 7;
1218 min_count = 4;
1219 }
1220 }
1221 }
1222}
1223
1224#[repr(C, align(64))]
1225pub(crate) struct State<'a> {
1226 status: Status,
1227
1228 last_flush: i8, pub(crate) wrap: i8, pub(crate) strategy: Strategy,
1233 pub(crate) level: i8,
1234
1235 pub(crate) block_open: u8,
1239
1240 pub(crate) hash_calc_variant: HashCalcVariant,
1241
1242 pub(crate) match_available: bool, pub(crate) good_match: u16,
1246
1247 pub(crate) nice_match: u16,
1249
1250 pub(crate) match_start: Pos, pub(crate) prev_match: Pos, pub(crate) strstart: usize, pub(crate) window: Window<'a>,
1255 pub(crate) w_size: usize, pub(crate) lookahead: usize, _cache_line_0: (),
1260
1261 pub(crate) prev: WeakSliceMut<'a, u16>,
1267 pub(crate) head: WeakArrayMut<'a, u16, HASH_SIZE>,
1270
1271 pub(crate) prev_length: u16,
1274
1275 pub(crate) max_chain_length: u16,
1278
1279 pub(crate) max_lazy_match: u16,
1289
1290 pub(crate) matches: u8,
1295
1296 pub(crate) block_start: isize,
1299
1300 pub(crate) sym_buf: SymBuf<'a>,
1301
1302 _cache_line_1: (),
1303
1304 lit_bufsize: usize,
1322
1323 pub(crate) window_size: usize,
1325
1326 bit_writer: BitWriter<'a>,
1327
1328 _cache_line_2: (),
1329
1330 opt_len: usize,
1332 static_len: usize,
1334
1335 pub(crate) insert: usize,
1337
1338 pub(crate) ins_h: u32,
1340
1341 gzhead: Option<&'a mut gz_header>,
1342 gzindex: usize,
1343
1344 _padding_0: [u8; 16],
1345
1346 _cache_line_3: (),
1347
1348 crc_fold: crate::crc32::Crc32Fold,
1349
1350 allocation_start: NonNull<u8>,
1352 total_allocation_size: usize,
1354
1355 l_desc: TreeDesc<HEAP_SIZE>, d_desc: TreeDesc<{ 2 * D_CODES + 1 }>, bl_desc: TreeDesc<{ 2 * BL_CODES + 1 }>, }
1359
1360#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Default)]
1361#[cfg_attr(feature = "__internal-fuzz", derive(arbitrary::Arbitrary))]
1362pub enum Strategy {
1363 #[default]
1364 Default = 0,
1365 Filtered = 1,
1366 HuffmanOnly = 2,
1367 Rle = 3,
1368 Fixed = 4,
1369}
1370
1371impl TryFrom<i32> for Strategy {
1372 type Error = ();
1373
1374 fn try_from(value: i32) -> Result<Self, Self::Error> {
1375 match value {
1376 0 => Ok(Strategy::Default),
1377 1 => Ok(Strategy::Filtered),
1378 2 => Ok(Strategy::HuffmanOnly),
1379 3 => Ok(Strategy::Rle),
1380 4 => Ok(Strategy::Fixed),
1381 _ => Err(()),
1382 }
1383 }
1384}
1385
1386#[derive(Debug, PartialEq, Eq)]
1387enum DataType {
1388 Binary = 0,
1389 Text = 1,
1390 Unknown = 2,
1391}
1392
1393impl<'a> State<'a> {
1394 pub const BIT_BUF_SIZE: u8 = BitWriter::BIT_BUF_SIZE;
1395
1396 pub(crate) fn w_bits(&self) -> u32 {
1398 self.w_size.trailing_zeros()
1399 }
1400
1401 pub(crate) fn w_mask(&self) -> usize {
1402 self.w_size - 1
1403 }
1404
1405 pub(crate) fn max_dist(&self) -> usize {
1406 self.w_size - MIN_LOOKAHEAD
1407 }
1408
1409 pub(crate) fn max_insert_length(&self) -> usize {
1412 self.max_lazy_match as usize
1413 }
1414
1415 pub(crate) fn pending_buf_size(&self) -> usize {
1418 self.lit_bufsize * 4
1419 }
1420
1421 #[inline(always)]
1422 pub(crate) fn update_hash(&self, h: u32, val: u32) -> u32 {
1423 match self.hash_calc_variant {
1424 HashCalcVariant::Standard => StandardHashCalc::update_hash(h, val),
1425 HashCalcVariant::Roll => RollHashCalc::update_hash(h, val),
1426 }
1427 }
1428
1429 #[inline(always)]
1430 pub(crate) fn quick_insert_string(&mut self, string: usize) -> u16 {
1431 match self.hash_calc_variant {
1432 HashCalcVariant::Standard => StandardHashCalc::quick_insert_string(self, string),
1433 HashCalcVariant::Roll => RollHashCalc::quick_insert_string(self, string),
1434 }
1435 }
1436
1437 #[inline(always)]
1438 pub(crate) fn insert_string(&mut self, string: usize, count: usize) {
1439 match self.hash_calc_variant {
1440 HashCalcVariant::Standard => StandardHashCalc::insert_string(self, string, count),
1441 HashCalcVariant::Roll => RollHashCalc::insert_string(self, string, count),
1442 }
1443 }
1444
1445 #[inline(always)]
1446 pub(crate) fn tally_lit(&mut self, unmatched: u8) -> bool {
1447 Self::tally_lit_help(&mut self.sym_buf, &mut self.l_desc, unmatched)
1448 }
1449
1450 pub(crate) fn tally_lit_help(
1452 sym_buf: &mut SymBuf,
1453 l_desc: &mut TreeDesc<HEAP_SIZE>,
1454 unmatched: u8,
1455 ) -> bool {
1456 const _VERIFY: () = {
1457 assert!(
1460 u8::MAX as usize <= STD_MAX_MATCH - STD_MIN_MATCH,
1461 "tally_lit: bad literal"
1462 );
1463 };
1464
1465 sym_buf.push_lit(unmatched);
1466
1467 *l_desc.dyn_tree[unmatched as usize].freq_mut() += 1;
1468
1469 sym_buf.should_flush_block()
1471 }
1472
1473 const fn d_code(dist: usize) -> u8 {
1474 const _VERIFY: () = {
1475 let mut i = 0;
1477 while i < trees_tbl::DIST_CODE.len() {
1478 assert!(trees_tbl::DIST_CODE[i] < D_CODES as u8);
1479 i += 1;
1480 }
1481 };
1482
1483 let index = if dist < 256 { dist } else { 256 + (dist >> 7) };
1484 self::trees_tbl::DIST_CODE[index]
1485 }
1486
1487 #[inline(always)]
1488 pub(crate) fn tally_dist(&mut self, mut dist: usize, len: usize) -> bool {
1489 self.sym_buf.push_dist(dist as u16, len as u8);
1490
1491 self.matches = self.matches.saturating_add(1);
1492 dist -= 1;
1493
1494 assert!(dist < self.max_dist(), "tally_dist: bad match");
1495
1496 let index = self::trees_tbl::LENGTH_CODE[len] as usize + LITERALS + 1;
1497 *self.l_desc.dyn_tree[index].freq_mut() += 1;
1498
1499 *self.d_desc.dyn_tree[Self::d_code(dist) as usize].freq_mut() += 1;
1500
1501 self.sym_buf.should_flush_block()
1503 }
1504
1505 fn detect_data_type(dyn_tree: &[Value]) -> DataType {
1506 const NON_TEXT: u64 = 0xf3ffc07f;
1509 let mut mask = NON_TEXT;
1510
1511 for value in &dyn_tree[0..32] {
1513 if (mask & 1) != 0 && value.freq() != 0 {
1514 return DataType::Binary;
1515 }
1516
1517 mask >>= 1;
1518 }
1519
1520 if dyn_tree[9].freq() != 0 || dyn_tree[10].freq() != 0 || dyn_tree[13].freq() != 0 {
1522 return DataType::Text;
1523 }
1524
1525 if dyn_tree[32..LITERALS].iter().any(|v| v.freq() != 0) {
1526 return DataType::Text;
1527 }
1528
1529 DataType::Binary
1532 }
1533
1534 fn compress_block_static_trees(&mut self) {
1535 let ltree = self::trees_tbl::STATIC_LTREE.as_slice();
1536 for (dist, lc) in self.sym_buf.iter() {
1537 match dist {
1538 0 => self.bit_writer.emit_lit(ltree, lc) as usize,
1539 _ => self.bit_writer.emit_dist_static(lc, dist),
1540 };
1541 }
1542
1543 self.bit_writer.emit_end_block(ltree, false)
1544 }
1545
1546 fn compress_block_dynamic_trees(&mut self) {
1547 self.bit_writer.compress_block_help(
1548 &self.sym_buf,
1549 &self.l_desc.dyn_tree,
1550 &self.d_desc.dyn_tree,
1551 );
1552 }
1553
1554 fn header(&self) -> u16 {
1555 const PRESET_DICT: u16 = 0x20;
1557
1558 const Z_DEFLATED: u16 = 8;
1560
1561 let dict = match self.strstart {
1562 0 => 0,
1563 _ => PRESET_DICT,
1564 };
1565
1566 let h = ((Z_DEFLATED + ((self.w_bits() as u16 - 8) << 4)) << 8)
1567 | (self.level_flags() << 6)
1568 | dict;
1569
1570 h + 31 - (h % 31)
1571 }
1572
1573 fn level_flags(&self) -> u16 {
1574 if self.strategy >= Strategy::HuffmanOnly || self.level < 2 {
1575 0
1576 } else if self.level < 6 {
1577 1
1578 } else if self.level == 6 {
1579 2
1580 } else {
1581 3
1582 }
1583 }
1584
1585 fn zng_tr_init(&mut self) {
1586 self.l_desc.stat_desc = &StaticTreeDesc::L;
1587
1588 self.d_desc.stat_desc = &StaticTreeDesc::D;
1589
1590 self.bl_desc.stat_desc = &StaticTreeDesc::BL;
1591
1592 self.bit_writer.bit_buffer = 0;
1593 self.bit_writer.bits_used = 0;
1594
1595 #[cfg(feature = "ZLIB_DEBUG")]
1596 {
1597 self.bit_writer.compressed_len = 0;
1598 self.bit_writer.bits_sent = 0;
1599 }
1600
1601 self.init_block();
1603 }
1604
1605 fn init_block(&mut self) {
1607 for value in &mut self.l_desc.dyn_tree[..L_CODES] {
1611 *value.freq_mut() = 0;
1612 }
1613
1614 for value in &mut self.d_desc.dyn_tree[..D_CODES] {
1615 *value.freq_mut() = 0;
1616 }
1617
1618 for value in &mut self.bl_desc.dyn_tree[..BL_CODES] {
1619 *value.freq_mut() = 0;
1620 }
1621
1622 const END_BLOCK: usize = 256;
1624
1625 *self.l_desc.dyn_tree[END_BLOCK].freq_mut() = 1;
1626 self.opt_len = 0;
1627 self.static_len = 0;
1628 self.sym_buf.clear();
1629 self.matches = 0;
1630 }
1631}
1632
1633#[repr(u8)]
1634#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1635enum Status {
1636 Init = 1,
1637
1638 GZip = 4,
1639 Extra = 5,
1640 Name = 6,
1641 Comment = 7,
1642 Hcrc = 8,
1643
1644 Busy = 2,
1645 Finish = 3,
1646}
1647
1648const fn rank_flush(f: i8) -> i8 {
1649 ((f) * 2) - (if (f) > 4 { 9 } else { 0 })
1651}
1652
1653#[derive(Debug)]
1654pub(crate) enum BlockState {
1655 NeedMore = 0,
1657 BlockDone = 1,
1659 FinishStarted = 2,
1661 FinishDone = 3,
1663}
1664
1665pub(crate) const MAX_STORED: usize = 65535; pub(crate) fn read_buf_window(stream: &mut DeflateStream, offset: usize, size: usize) -> usize {
1669 let len = Ord::min(stream.avail_in as usize, size);
1670
1671 if len == 0 {
1672 return 0;
1673 }
1674
1675 stream.avail_in -= len as u32;
1676
1677 if stream.state.wrap == 2 {
1678 let window = &mut stream.state.window;
1681 unsafe { window.copy_and_initialize(offset..offset + len, stream.next_in) };
1683
1684 let data = &stream.state.window.filled()[offset..][..len];
1685 stream.state.crc_fold.fold(data, CRC32_INITIAL_VALUE);
1686 } else if stream.state.wrap == 1 {
1687 let window = &mut stream.state.window;
1690 unsafe { window.copy_and_initialize(offset..offset + len, stream.next_in) };
1692
1693 let data = &stream.state.window.filled()[offset..][..len];
1694 stream.adler = adler32(stream.adler as u32, data) as _;
1695 } else {
1696 let window = &mut stream.state.window;
1697 unsafe { window.copy_and_initialize(offset..offset + len, stream.next_in) };
1699 }
1700
1701 stream.next_in = stream.next_in.wrapping_add(len);
1702 stream.total_in += len as crate::c_api::z_size;
1703
1704 len
1705}
1706
1707pub(crate) enum BlockType {
1708 StoredBlock = 0,
1709 StaticTrees = 1,
1710 DynamicTrees = 2,
1711}
1712
1713pub(crate) fn zng_tr_stored_block(
1714 state: &mut State,
1715 window_range: core::ops::Range<usize>,
1716 is_last: bool,
1717) {
1718 state.bit_writer.emit_tree(BlockType::StoredBlock, is_last);
1720
1721 state.bit_writer.emit_align();
1723
1724 state.bit_writer.cmpr_bits_align();
1725
1726 let input_block: &[u8] = &state.window.filled()[window_range];
1727 let stored_len = input_block.len() as u16;
1728
1729 state.bit_writer.pending.extend(&stored_len.to_le_bytes());
1730 state
1731 .bit_writer
1732 .pending
1733 .extend(&(!stored_len).to_le_bytes());
1734
1735 state.bit_writer.cmpr_bits_add(32);
1736 state.bit_writer.sent_bits_add(32);
1737 if stored_len > 0 {
1738 state.bit_writer.pending.extend(input_block);
1739 state.bit_writer.cmpr_bits_add((stored_len << 3) as usize);
1740 state.bit_writer.sent_bits_add((stored_len << 3) as usize);
1741 }
1742}
1743
1744pub(crate) const STD_MIN_MATCH: usize = 3;
1746pub(crate) const STD_MAX_MATCH: usize = 258;
1748
1749pub(crate) const WANT_MIN_MATCH: usize = 4;
1751
1752pub(crate) const MIN_LOOKAHEAD: usize = STD_MAX_MATCH + STD_MIN_MATCH + 1;
1753
1754#[inline]
1755pub(crate) fn fill_window(stream: &mut DeflateStream) {
1756 debug_assert!(stream.state.lookahead < MIN_LOOKAHEAD);
1757
1758 let wsize = stream.state.w_size;
1759
1760 loop {
1761 let state = &mut *stream.state;
1762 let mut more = state.window_size - state.lookahead - state.strstart;
1763
1764 if state.strstart >= wsize + state.max_dist() {
1767 let (old, new) = state.window.filled_mut()[..2 * wsize].split_at_mut(wsize);
1769 old.copy_from_slice(new);
1770
1771 if state.match_start >= wsize as u16 {
1772 state.match_start -= wsize as u16;
1773 } else {
1774 state.match_start = 0;
1775 state.prev_length = 0;
1776 }
1777
1778 state.strstart -= wsize; state.block_start -= wsize as isize;
1780 state.insert = Ord::min(state.insert, state.strstart);
1781
1782 self::slide_hash::slide_hash(state);
1783
1784 more += wsize;
1785 }
1786
1787 if stream.avail_in == 0 {
1788 break;
1789 }
1790
1791 assert!(more >= 2, "more < 2");
1802
1803 let n = read_buf_window(stream, stream.state.strstart + stream.state.lookahead, more);
1804
1805 let state = &mut *stream.state;
1806 state.lookahead += n;
1807
1808 if state.lookahead + state.insert >= STD_MIN_MATCH {
1810 let string = state.strstart - state.insert;
1811 if state.max_chain_length > 1024 {
1812 let v0 = state.window.filled()[string] as u32;
1813 let v1 = state.window.filled()[string + 1] as u32;
1814 state.ins_h = state.update_hash(v0, v1);
1815 } else if string >= 1 {
1816 state.quick_insert_string(string + 2 - STD_MIN_MATCH);
1817 }
1818 let mut count = state.insert;
1819 if state.lookahead == 1 {
1820 count -= 1;
1821 }
1822 if count > 0 {
1823 state.insert_string(string, count);
1824 state.insert -= count;
1825 }
1826 }
1827
1828 if !(stream.state.lookahead < MIN_LOOKAHEAD && stream.avail_in != 0) {
1832 break;
1833 }
1834 }
1835
1836 assert!(
1837 stream.state.strstart <= stream.state.window_size - MIN_LOOKAHEAD,
1838 "not enough room for search"
1839 );
1840}
1841
1842pub(crate) struct StaticTreeDesc {
1843 pub(crate) static_tree: &'static [Value],
1845 extra_bits: &'static [u8],
1847 extra_base: usize,
1849 elems: usize,
1851 max_length: u16,
1853}
1854
1855impl StaticTreeDesc {
1856 const EMPTY: Self = Self {
1857 static_tree: &[],
1858 extra_bits: &[],
1859 extra_base: 0,
1860 elems: 0,
1861 max_length: 0,
1862 };
1863
1864 const EXTRA_LBITS: [u8; LENGTH_CODES] = [
1866 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0,
1867 ];
1868
1869 const EXTRA_DBITS: [u8; D_CODES] = [
1871 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12,
1872 13, 13,
1873 ];
1874
1875 const EXTRA_BLBITS: [u8; BL_CODES] = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 7];
1877
1878 const BL_ORDER: [u8; BL_CODES] = [
1881 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15,
1882 ];
1883
1884 pub(crate) const L: Self = Self {
1885 static_tree: &self::trees_tbl::STATIC_LTREE,
1886 extra_bits: &Self::EXTRA_LBITS,
1887 extra_base: LITERALS + 1,
1888 elems: L_CODES,
1889 max_length: MAX_BITS as u16,
1890 };
1891
1892 pub(crate) const D: Self = Self {
1893 static_tree: &self::trees_tbl::STATIC_DTREE,
1894 extra_bits: &Self::EXTRA_DBITS,
1895 extra_base: 0,
1896 elems: D_CODES,
1897 max_length: MAX_BITS as u16,
1898 };
1899
1900 pub(crate) const BL: Self = Self {
1901 static_tree: &[],
1902 extra_bits: &Self::EXTRA_BLBITS,
1903 extra_base: 0,
1904 elems: BL_CODES,
1905 max_length: MAX_BL_BITS as u16,
1906 };
1907}
1908
1909#[derive(Clone)]
1910pub(crate) struct TreeDesc<const N: usize> {
1911 dyn_tree: [Value; N],
1912 max_code: usize,
1913 stat_desc: &'static StaticTreeDesc,
1914}
1915
1916impl<const N: usize> TreeDesc<N> {
1917 const EMPTY: Self = Self {
1918 dyn_tree: [Value::new(0, 0); N],
1919 max_code: 0,
1920 stat_desc: &StaticTreeDesc::EMPTY,
1921 };
1922}
1923
1924fn build_tree<const N: usize>(state: &mut State, desc: &mut TreeDesc<N>) {
1925 let tree = &mut desc.dyn_tree;
1926 let stree = desc.stat_desc.static_tree;
1927 let elements = desc.stat_desc.elems;
1928
1929 let mut heap = Heap::new();
1930 let mut max_code = heap.initialize(&mut tree[..elements]);
1931
1932 while heap.heap_len < 2 {
1937 heap.heap_len += 1;
1938 let node = if max_code < 2 {
1939 max_code += 1;
1940 max_code
1941 } else {
1942 0
1943 };
1944
1945 debug_assert!(node >= 0);
1946 let node = node as usize;
1947
1948 heap.heap[heap.heap_len] = node as u32;
1949 *tree[node].freq_mut() = 1;
1950 heap.depth[node] = 0;
1951 state.opt_len -= 1;
1952 if !stree.is_empty() {
1953 state.static_len -= stree[node].len() as usize;
1954 }
1955 }
1957
1958 debug_assert!(max_code >= 0);
1959 let max_code = max_code as usize;
1960 desc.max_code = max_code;
1961
1962 let mut n = heap.heap_len / 2;
1965 while n >= 1 {
1966 heap.pqdownheap(tree, n);
1967 n -= 1;
1968 }
1969
1970 heap.construct_huffman_tree(tree, elements);
1971
1972 let bl_count = gen_bitlen(state, &mut heap, desc);
1975
1976 gen_codes(&mut desc.dyn_tree, max_code, &bl_count);
1978}
1979
1980fn gen_bitlen<const N: usize>(
1981 state: &mut State,
1982 heap: &mut Heap,
1983 desc: &mut TreeDesc<N>,
1984) -> [u16; MAX_BITS + 1] {
1985 let tree = &mut desc.dyn_tree;
1986 let max_code = desc.max_code;
1987 let stree = desc.stat_desc.static_tree;
1988 let extra = desc.stat_desc.extra_bits;
1989 let base = desc.stat_desc.extra_base;
1990 let max_length = desc.stat_desc.max_length;
1991
1992 let mut bl_count = [0u16; MAX_BITS + 1];
1993
1994 *tree[heap.heap[heap.heap_max] as usize].len_mut() = 0; let mut overflow: i32 = 0;
2000
2001 for h in heap.heap_max + 1..HEAP_SIZE {
2002 let n = heap.heap[h] as usize;
2003 let mut bits = tree[tree[n].dad() as usize].len() + 1;
2004
2005 if bits > max_length {
2006 bits = max_length;
2007 overflow += 1;
2008 }
2009
2010 *tree[n].len_mut() = bits;
2012
2013 if n > max_code {
2015 continue;
2016 }
2017
2018 bl_count[bits as usize] += 1;
2019 let mut xbits = 0;
2020 if n >= base {
2021 xbits = extra[n - base] as usize;
2022 }
2023
2024 let f = tree[n].freq() as usize;
2025 state.opt_len += f * (bits as usize + xbits);
2026
2027 if !stree.is_empty() {
2028 state.static_len += f * (stree[n].len() as usize + xbits);
2029 }
2030 }
2031
2032 if overflow == 0 {
2033 return bl_count;
2034 }
2035
2036 loop {
2038 let mut bits = max_length as usize - 1;
2039 while bl_count[bits] == 0 {
2040 bits -= 1;
2041 }
2042 bl_count[bits] -= 1; bl_count[bits + 1] += 2; bl_count[max_length as usize] -= 1;
2045 overflow -= 2;
2049
2050 if overflow <= 0 {
2051 break;
2052 }
2053 }
2054
2055 let mut h = HEAP_SIZE;
2060 for bits in (1..=max_length).rev() {
2061 let mut n = bl_count[bits as usize];
2062 while n != 0 {
2063 h -= 1;
2064 let m = heap.heap[h] as usize;
2065 if m > max_code {
2066 continue;
2067 }
2068
2069 if tree[m].len() != bits {
2070 state.opt_len += (bits * tree[m].freq()) as usize;
2072 state.opt_len -= (tree[m].len() * tree[m].freq()) as usize;
2073 *tree[m].len_mut() = bits;
2074 }
2075
2076 n -= 1;
2077 }
2078 }
2079 bl_count
2080}
2081
2082#[allow(unused)]
2084fn isgraph(c: u8) -> bool {
2085 (c > 0x20) && (c <= 0x7E)
2086}
2087
2088fn gen_codes(tree: &mut [Value], max_code: usize, bl_count: &[u16]) {
2089 let mut next_code = [0; MAX_BITS + 1]; let mut code = 0; for bits in 1..=MAX_BITS {
2099 code = (code + bl_count[bits - 1]) << 1;
2100 next_code[bits] = code;
2101 }
2102
2103 assert!(
2107 code + bl_count[MAX_BITS] - 1 == (1 << MAX_BITS) - 1,
2108 "inconsistent bit counts"
2109 );
2110
2111 trace!("\ngen_codes: max_code {max_code} ");
2112
2113 for n in 0..=max_code {
2114 let len = tree[n].len();
2115 if len == 0 {
2116 continue;
2117 }
2118
2119 assert!((1..=15).contains(&len), "code length must be 1-15");
2121 *tree[n].code_mut() = next_code[len as usize].reverse_bits() >> (16 - len);
2122 next_code[len as usize] += 1;
2123
2124 if tree != self::trees_tbl::STATIC_LTREE.as_slice() {
2125 trace!(
2126 "\nn {:>3} {} l {:>2} c {:>4x} ({:x}) ",
2127 n,
2128 if isgraph(n as u8) {
2129 char::from_u32(n as u32).unwrap()
2130 } else {
2131 ' '
2132 },
2133 len,
2134 tree[n].code(),
2135 next_code[len as usize] - 1
2136 );
2137 }
2138 }
2139}
2140
2141const REP_3_6: usize = 16;
2143
2144const REPZ_3_10: usize = 17;
2146
2147const REPZ_11_138: usize = 18;
2149
2150fn scan_tree(bl_desc: &mut TreeDesc<{ 2 * BL_CODES + 1 }>, tree: &mut [Value], max_code: usize) {
2151 let mut prevlen = -1isize; let mut curlen: isize; let mut nextlen = tree[0].len(); let mut count = 0; let mut max_count = 7; let mut min_count = 4; if nextlen == 0 {
2161 max_count = 138;
2162 min_count = 3;
2163 }
2164
2165 *tree[max_code + 1].len_mut() = 0xffff; let bl_tree = &mut bl_desc.dyn_tree;
2168
2169 for n in 0..=max_code {
2170 curlen = nextlen as isize;
2171 nextlen = tree[n + 1].len();
2172 count += 1;
2173 if count < max_count && curlen == nextlen as isize {
2174 continue;
2175 } else if count < min_count {
2176 *bl_tree[curlen as usize].freq_mut() += count;
2177 } else if curlen != 0 {
2178 if curlen != prevlen {
2179 *bl_tree[curlen as usize].freq_mut() += 1;
2180 }
2181 *bl_tree[REP_3_6].freq_mut() += 1;
2182 } else if count <= 10 {
2183 *bl_tree[REPZ_3_10].freq_mut() += 1;
2184 } else {
2185 *bl_tree[REPZ_11_138].freq_mut() += 1;
2186 }
2187
2188 count = 0;
2189 prevlen = curlen;
2190
2191 if nextlen == 0 {
2192 max_count = 138;
2193 min_count = 3;
2194 } else if curlen == nextlen as isize {
2195 max_count = 6;
2196 min_count = 3;
2197 } else {
2198 max_count = 7;
2199 min_count = 4;
2200 }
2201 }
2202}
2203
2204fn send_all_trees(state: &mut State, lcodes: usize, dcodes: usize, blcodes: usize) {
2205 assert!(
2206 lcodes >= 257 && dcodes >= 1 && blcodes >= 4,
2207 "not enough codes"
2208 );
2209 assert!(
2210 lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,
2211 "too many codes"
2212 );
2213
2214 trace!("\nbl counts: ");
2215 state.bit_writer.send_bits(lcodes as u64 - 257, 5); state.bit_writer.send_bits(dcodes as u64 - 1, 5);
2217 state.bit_writer.send_bits(blcodes as u64 - 4, 4); for rank in 0..blcodes {
2220 trace!("\nbl code {:>2} ", StaticTreeDesc::BL_ORDER[rank]);
2221 state.bit_writer.send_bits(
2222 state.bl_desc.dyn_tree[StaticTreeDesc::BL_ORDER[rank] as usize].len() as u64,
2223 3,
2224 );
2225 }
2226 trace!("\nbl tree: sent {}", state.bit_writer.bits_sent);
2227
2228 state
2230 .bit_writer
2231 .send_tree(&state.l_desc.dyn_tree, &state.bl_desc.dyn_tree, lcodes - 1);
2232 trace!("\nlit tree: sent {}", state.bit_writer.bits_sent);
2233
2234 state
2236 .bit_writer
2237 .send_tree(&state.d_desc.dyn_tree, &state.bl_desc.dyn_tree, dcodes - 1);
2238 trace!("\ndist tree: sent {}", state.bit_writer.bits_sent);
2239}
2240
2241fn build_bl_tree(state: &mut State) -> usize {
2244 scan_tree(
2247 &mut state.bl_desc,
2248 &mut state.l_desc.dyn_tree,
2249 state.l_desc.max_code,
2250 );
2251
2252 scan_tree(
2253 &mut state.bl_desc,
2254 &mut state.d_desc.dyn_tree,
2255 state.d_desc.max_code,
2256 );
2257
2258 {
2260 let mut tmp = TreeDesc::EMPTY;
2261 core::mem::swap(&mut tmp, &mut state.bl_desc);
2262 build_tree(state, &mut tmp);
2263 core::mem::swap(&mut tmp, &mut state.bl_desc);
2264 }
2265
2266 let mut max_blindex = BL_CODES - 1;
2275 while max_blindex >= 3 {
2276 let index = StaticTreeDesc::BL_ORDER[max_blindex] as usize;
2277 if state.bl_desc.dyn_tree[index].len() != 0 {
2278 break;
2279 }
2280
2281 max_blindex -= 1;
2282 }
2283
2284 state.opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4;
2286 trace!(
2287 "\ndyn trees: dyn {}, stat {}",
2288 state.opt_len,
2289 state.static_len
2290 );
2291
2292 max_blindex
2293}
2294
2295fn zng_tr_flush_block(
2296 stream: &mut DeflateStream,
2297 window_offset: Option<usize>,
2298 stored_len: u32,
2299 last: bool,
2300) {
2301 let mut opt_lenb;
2306 let static_lenb;
2307 let mut max_blindex = 0;
2308
2309 let state = &mut stream.state;
2310
2311 if state.sym_buf.is_empty() {
2312 opt_lenb = 0;
2313 static_lenb = 0;
2314 state.static_len = 7;
2315 } else if state.level > 0 {
2316 if stream.data_type == DataType::Unknown as i32 {
2317 stream.data_type = State::detect_data_type(&state.l_desc.dyn_tree) as i32;
2318 }
2319
2320 {
2321 let mut tmp = TreeDesc::EMPTY;
2322 core::mem::swap(&mut tmp, &mut state.l_desc);
2323
2324 build_tree(state, &mut tmp);
2325 core::mem::swap(&mut tmp, &mut state.l_desc);
2326
2327 trace!(
2328 "\nlit data: dyn {}, stat {}",
2329 state.opt_len,
2330 state.static_len
2331 );
2332 }
2333
2334 {
2335 let mut tmp = TreeDesc::EMPTY;
2336 core::mem::swap(&mut tmp, &mut state.d_desc);
2337 build_tree(state, &mut tmp);
2338 core::mem::swap(&mut tmp, &mut state.d_desc);
2339
2340 trace!(
2341 "\ndist data: dyn {}, stat {}",
2342 state.opt_len,
2343 state.static_len
2344 );
2345 }
2346
2347 max_blindex = build_bl_tree(state);
2350
2351 opt_lenb = (state.opt_len + 3 + 7) >> 3;
2353 static_lenb = (state.static_len + 3 + 7) >> 3;
2354
2355 trace!(
2356 "\nopt {}({}) stat {}({}) stored {} lit {} ",
2357 opt_lenb,
2358 state.opt_len,
2359 static_lenb,
2360 state.static_len,
2361 stored_len,
2362 state.sym_buf.len() / 3
2363 );
2364
2365 if static_lenb <= opt_lenb || state.strategy == Strategy::Fixed {
2366 opt_lenb = static_lenb;
2367 }
2368 } else {
2369 assert!(window_offset.is_some(), "lost buf");
2370 opt_lenb = stored_len as usize + 5;
2372 static_lenb = stored_len as usize + 5;
2373 }
2374
2375 #[allow(clippy::unnecessary_unwrap)]
2376 if stored_len as usize + 4 <= opt_lenb && window_offset.is_some() {
2377 let window_offset = window_offset.unwrap();
2385 let range = window_offset..window_offset + stored_len as usize;
2386 zng_tr_stored_block(state, range, last);
2387 } else if static_lenb == opt_lenb {
2388 state.bit_writer.emit_tree(BlockType::StaticTrees, last);
2389 state.compress_block_static_trees();
2390 } else {
2392 state.bit_writer.emit_tree(BlockType::DynamicTrees, last);
2393 send_all_trees(
2394 state,
2395 state.l_desc.max_code + 1,
2396 state.d_desc.max_code + 1,
2397 max_blindex + 1,
2398 );
2399
2400 state.compress_block_dynamic_trees();
2401 }
2402
2403 state.init_block();
2408 if last {
2409 state.bit_writer.emit_align();
2410 }
2411
2412 }
2414
2415pub(crate) fn flush_block_only(stream: &mut DeflateStream, is_last: bool) {
2416 zng_tr_flush_block(
2417 stream,
2418 (stream.state.block_start >= 0).then_some(stream.state.block_start as usize),
2419 (stream.state.strstart as isize - stream.state.block_start) as u32,
2420 is_last,
2421 );
2422
2423 stream.state.block_start = stream.state.strstart as isize;
2424 flush_pending(stream)
2425}
2426
2427fn flush_bytes(stream: &mut DeflateStream, mut bytes: &[u8]) -> ControlFlow<ReturnCode> {
2428 let mut state = &mut stream.state;
2429
2430 let mut beg = state.bit_writer.pending.pending().len(); while state.bit_writer.pending.remaining() < bytes.len() {
2434 let copy = state.bit_writer.pending.remaining();
2435
2436 state.bit_writer.pending.extend(&bytes[..copy]);
2437
2438 stream.adler = crc32(
2439 stream.adler as u32,
2440 &state.bit_writer.pending.pending()[beg..],
2441 ) as z_checksum;
2442
2443 state.gzindex += copy;
2444 flush_pending(stream);
2445 state = &mut stream.state;
2446
2447 if !state.bit_writer.pending.pending().is_empty() {
2449 state.last_flush = -1;
2450 return ControlFlow::Break(ReturnCode::Ok);
2451 }
2452
2453 beg = 0;
2454 bytes = &bytes[copy..];
2455 }
2456
2457 state.bit_writer.pending.extend(bytes);
2458
2459 stream.adler = crc32(
2460 stream.adler as u32,
2461 &state.bit_writer.pending.pending()[beg..],
2462 ) as z_checksum;
2463 state.gzindex = 0;
2464
2465 ControlFlow::Continue(())
2466}
2467
2468pub fn deflate(stream: &mut DeflateStream, flush: DeflateFlush) -> ReturnCode {
2469 if stream.next_out.is_null()
2470 || (stream.avail_in != 0 && stream.next_in.is_null())
2471 || (stream.state.status == Status::Finish && flush != DeflateFlush::Finish)
2472 {
2473 let err = ReturnCode::StreamError;
2474 stream.msg = err.error_message();
2475 return err;
2476 }
2477
2478 if stream.avail_out == 0 {
2479 let err = ReturnCode::BufError;
2480 stream.msg = err.error_message();
2481 return err;
2482 }
2483
2484 let old_flush = stream.state.last_flush;
2485 stream.state.last_flush = flush as i8;
2486
2487 if !stream.state.bit_writer.pending.pending().is_empty() {
2489 flush_pending(stream);
2490 if stream.avail_out == 0 {
2491 stream.state.last_flush = -1;
2498 return ReturnCode::Ok;
2499 }
2500
2501 } else if stream.avail_in == 0
2506 && rank_flush(flush as i8) <= rank_flush(old_flush)
2507 && flush != DeflateFlush::Finish
2508 {
2509 let err = ReturnCode::BufError;
2510 stream.msg = err.error_message();
2511 return err;
2512 }
2513
2514 if stream.state.status == Status::Finish && stream.avail_in != 0 {
2516 let err = ReturnCode::BufError;
2517 stream.msg = err.error_message();
2518 return err;
2519 }
2520
2521 if stream.state.status == Status::Init && stream.state.wrap == 0 {
2523 stream.state.status = Status::Busy;
2524 }
2525
2526 if stream.state.status == Status::Init {
2527 let header = stream.state.header();
2528 stream
2529 .state
2530 .bit_writer
2531 .pending
2532 .extend(&header.to_be_bytes());
2533
2534 if stream.state.strstart != 0 {
2536 let adler = stream.adler as u32;
2537 stream.state.bit_writer.pending.extend(&adler.to_be_bytes());
2538 }
2539
2540 stream.adler = ADLER32_INITIAL_VALUE as _;
2541 stream.state.status = Status::Busy;
2542
2543 flush_pending(stream);
2545
2546 if !stream.state.bit_writer.pending.pending().is_empty() {
2547 stream.state.last_flush = -1;
2548
2549 return ReturnCode::Ok;
2550 }
2551 }
2552
2553 if stream.state.status == Status::GZip {
2554 stream.state.crc_fold = Crc32Fold::new();
2556
2557 stream.state.bit_writer.pending.extend(&[31, 139, 8]);
2558
2559 let extra_flags = if stream.state.level == 9 {
2560 2
2561 } else if stream.state.strategy >= Strategy::HuffmanOnly || stream.state.level < 2 {
2562 4
2563 } else {
2564 0
2565 };
2566
2567 match &stream.state.gzhead {
2568 None => {
2569 let bytes = [0, 0, 0, 0, 0, extra_flags, gz_header::OS_CODE];
2570 stream.state.bit_writer.pending.extend(&bytes);
2571 stream.state.status = Status::Busy;
2572
2573 flush_pending(stream);
2575 if !stream.state.bit_writer.pending.pending().is_empty() {
2576 stream.state.last_flush = -1;
2577 return ReturnCode::Ok;
2578 }
2579 }
2580 Some(gzhead) => {
2581 stream.state.bit_writer.pending.extend(&[gzhead.flags()]);
2582 let bytes = (gzhead.time as u32).to_le_bytes();
2583 stream.state.bit_writer.pending.extend(&bytes);
2584 stream
2585 .state
2586 .bit_writer
2587 .pending
2588 .extend(&[extra_flags, gzhead.os as u8]);
2589
2590 if !gzhead.extra.is_null() {
2591 let bytes = (gzhead.extra_len as u16).to_le_bytes();
2592 stream.state.bit_writer.pending.extend(&bytes);
2593 }
2594
2595 if gzhead.hcrc != 0 {
2596 stream.adler = crc32(
2597 stream.adler as u32,
2598 stream.state.bit_writer.pending.pending(),
2599 ) as z_checksum
2600 }
2601
2602 stream.state.gzindex = 0;
2603 stream.state.status = Status::Extra;
2604 }
2605 }
2606 }
2607
2608 if stream.state.status == Status::Extra {
2609 if let Some(gzhead) = stream.state.gzhead.as_ref() {
2610 if !gzhead.extra.is_null() {
2611 let gzhead_extra = gzhead.extra;
2612
2613 let extra = unsafe {
2614 core::slice::from_raw_parts(
2615 gzhead_extra.add(stream.state.gzindex),
2618 (gzhead.extra_len & 0xffff) as usize - stream.state.gzindex,
2619 )
2620 };
2621
2622 if let ControlFlow::Break(err) = flush_bytes(stream, extra) {
2623 return err;
2624 }
2625 }
2626 }
2627 stream.state.status = Status::Name;
2628 }
2629
2630 if stream.state.status == Status::Name {
2631 if let Some(gzhead) = stream.state.gzhead.as_ref() {
2632 if !gzhead.name.is_null() {
2633 let gzhead_name = unsafe { CStr::from_ptr(gzhead.name.cast()) };
2635 let bytes = gzhead_name.to_bytes_with_nul();
2636 if let ControlFlow::Break(err) = flush_bytes(stream, bytes) {
2637 return err;
2638 }
2639 }
2640 stream.state.status = Status::Comment;
2641 }
2642 }
2643
2644 if stream.state.status == Status::Comment {
2645 if let Some(gzhead) = stream.state.gzhead.as_ref() {
2646 if !gzhead.comment.is_null() {
2647 let gzhead_comment = unsafe { CStr::from_ptr(gzhead.comment.cast()) };
2649 let bytes = gzhead_comment.to_bytes_with_nul();
2650 if let ControlFlow::Break(err) = flush_bytes(stream, bytes) {
2651 return err;
2652 }
2653 }
2654 stream.state.status = Status::Hcrc;
2655 }
2656 }
2657
2658 if stream.state.status == Status::Hcrc {
2659 if let Some(gzhead) = stream.state.gzhead.as_ref() {
2660 if gzhead.hcrc != 0 {
2661 let bytes = (stream.adler as u16).to_le_bytes();
2662 if let ControlFlow::Break(err) = flush_bytes(stream, &bytes) {
2663 return err;
2664 }
2665 }
2666 }
2667
2668 stream.state.status = Status::Busy;
2669
2670 flush_pending(stream);
2672 if !stream.state.bit_writer.pending.pending().is_empty() {
2673 stream.state.last_flush = -1;
2674 return ReturnCode::Ok;
2675 }
2676 }
2677
2678 let state = &mut stream.state;
2680 if stream.avail_in != 0
2681 || state.lookahead != 0
2682 || (flush != DeflateFlush::NoFlush && state.status != Status::Finish)
2683 {
2684 let bstate = self::algorithm::run(stream, flush);
2685
2686 let state = &mut stream.state;
2687
2688 if matches!(bstate, BlockState::FinishStarted | BlockState::FinishDone) {
2689 state.status = Status::Finish;
2690 }
2691
2692 match bstate {
2693 BlockState::NeedMore | BlockState::FinishStarted => {
2694 if stream.avail_out == 0 {
2695 state.last_flush = -1; }
2697 return ReturnCode::Ok;
2698 }
2706 BlockState::BlockDone => {
2707 match flush {
2708 DeflateFlush::NoFlush => unreachable!("condition of inner surrounding if"),
2709 DeflateFlush::PartialFlush => {
2710 state.bit_writer.align();
2711 }
2712 DeflateFlush::SyncFlush => {
2713 zng_tr_stored_block(state, 0..0, false);
2717 }
2718 DeflateFlush::FullFlush => {
2719 zng_tr_stored_block(state, 0..0, false);
2723
2724 state.head.as_mut_slice().fill(0); if state.lookahead == 0 {
2727 state.strstart = 0;
2728 state.block_start = 0;
2729 state.insert = 0;
2730 }
2731 }
2732 DeflateFlush::Block => { }
2733 DeflateFlush::Finish => unreachable!("condition of outer surrounding if"),
2734 }
2735
2736 flush_pending(stream);
2737
2738 if stream.avail_out == 0 {
2739 stream.state.last_flush = -1; return ReturnCode::Ok;
2741 }
2742 }
2743 BlockState::FinishDone => { }
2744 }
2745 }
2746
2747 if flush != DeflateFlush::Finish {
2748 return ReturnCode::Ok;
2749 }
2750
2751 if stream.state.wrap == 2 {
2753 let crc_fold = core::mem::take(&mut stream.state.crc_fold);
2754 stream.adler = crc_fold.finish() as z_checksum;
2755
2756 let adler = stream.adler as u32;
2757 stream.state.bit_writer.pending.extend(&adler.to_le_bytes());
2758
2759 let total_in = stream.total_in as u32;
2760 stream
2761 .state
2762 .bit_writer
2763 .pending
2764 .extend(&total_in.to_le_bytes());
2765 } else if stream.state.wrap == 1 {
2766 let adler = stream.adler as u32;
2767 stream.state.bit_writer.pending.extend(&adler.to_be_bytes());
2768 }
2769
2770 flush_pending(stream);
2771
2772 if stream.state.wrap > 0 {
2774 stream.state.wrap = -stream.state.wrap; }
2776
2777 if stream.state.bit_writer.pending.pending().is_empty() {
2778 assert_eq!(stream.state.bit_writer.bits_used, 0, "bi_buf not flushed");
2779 return ReturnCode::StreamEnd;
2780 }
2781 ReturnCode::Ok
2782}
2783
2784pub(crate) fn flush_pending(stream: &mut DeflateStream) {
2785 let state = &mut stream.state;
2786
2787 state.bit_writer.flush_bits();
2788
2789 let pending = state.bit_writer.pending.pending();
2790 let len = Ord::min(pending.len(), stream.avail_out as usize);
2791
2792 if len == 0 {
2793 return;
2794 }
2795
2796 trace!("\n[FLUSH {len} bytes]");
2797 unsafe { core::ptr::copy_nonoverlapping(pending.as_ptr(), stream.next_out, len) };
2799
2800 stream.next_out = stream.next_out.wrapping_add(len);
2801 stream.total_out += len as crate::c_api::z_size;
2802 stream.avail_out -= len as crate::c_api::uInt;
2803
2804 state.bit_writer.pending.advance(len);
2805}
2806
2807pub fn compress_slice<'a>(
2825 output: &'a mut [u8],
2826 input: &[u8],
2827 config: DeflateConfig,
2828) -> (&'a mut [u8], ReturnCode) {
2829 let output_uninit = unsafe {
2831 core::slice::from_raw_parts_mut(output.as_mut_ptr() as *mut MaybeUninit<u8>, output.len())
2832 };
2833
2834 compress(output_uninit, input, config)
2835}
2836
2837pub fn compress<'a>(
2838 output: &'a mut [MaybeUninit<u8>],
2839 input: &[u8],
2840 config: DeflateConfig,
2841) -> (&'a mut [u8], ReturnCode) {
2842 compress_with_flush(output, input, config, DeflateFlush::Finish)
2843}
2844
2845pub fn compress_slice_with_flush<'a>(
2846 output: &'a mut [u8],
2847 input: &[u8],
2848 config: DeflateConfig,
2849 flush: DeflateFlush,
2850) -> (&'a mut [u8], ReturnCode) {
2851 let output_uninit = unsafe {
2853 core::slice::from_raw_parts_mut(output.as_mut_ptr() as *mut MaybeUninit<u8>, output.len())
2854 };
2855
2856 compress_with_flush(output_uninit, input, config, flush)
2857}
2858
2859pub fn compress_with_flush<'a>(
2860 output: &'a mut [MaybeUninit<u8>],
2861 input: &[u8],
2862 config: DeflateConfig,
2863 final_flush: DeflateFlush,
2864) -> (&'a mut [u8], ReturnCode) {
2865 let mut stream = z_stream {
2866 next_in: input.as_ptr() as *mut u8,
2867 avail_in: 0, total_in: 0,
2869 next_out: output.as_mut_ptr() as *mut u8,
2870 avail_out: 0, total_out: 0,
2872 msg: core::ptr::null_mut(),
2873 state: core::ptr::null_mut(),
2874 zalloc: None,
2875 zfree: None,
2876 opaque: core::ptr::null_mut(),
2877 data_type: 0,
2878 adler: 0,
2879 reserved: 0,
2880 };
2881
2882 let err = init(&mut stream, config);
2883 if err != ReturnCode::Ok {
2884 return (&mut [], err);
2885 }
2886
2887 let max = core::ffi::c_uint::MAX as usize;
2888
2889 let mut left = output.len();
2890 let mut source_len = input.len();
2891
2892 let return_code = loop {
2893 if stream.avail_out == 0 {
2894 stream.avail_out = Ord::min(left, max) as _;
2895 left -= stream.avail_out as usize;
2896 }
2897
2898 if stream.avail_in == 0 {
2899 stream.avail_in = Ord::min(source_len, max) as _;
2900 source_len -= stream.avail_in as usize;
2901 }
2902
2903 let flush = if source_len > 0 {
2904 DeflateFlush::NoFlush
2905 } else {
2906 final_flush
2907 };
2908
2909 let err = if let Some(stream) = unsafe { DeflateStream::from_stream_mut(&mut stream) } {
2910 deflate(stream, flush)
2911 } else {
2912 ReturnCode::StreamError
2913 };
2914
2915 match err {
2916 ReturnCode::Ok => continue,
2917 ReturnCode::StreamEnd => break ReturnCode::Ok,
2918 _ => break err,
2919 }
2920 };
2921
2922 let output_slice = unsafe {
2924 core::slice::from_raw_parts_mut(output.as_mut_ptr() as *mut u8, stream.total_out as usize)
2925 };
2926
2927 if let Some(stream) = unsafe { DeflateStream::from_stream_mut(&mut stream) } {
2929 let _ = end(stream);
2930 }
2931
2932 (output_slice, return_code)
2933}
2934
2935pub const fn compress_bound(source_len: usize) -> usize {
2955 compress_bound_help(source_len, ZLIB_WRAPLEN)
2956}
2957
2958const fn compress_bound_help(source_len: usize, wrap_len: usize) -> usize {
2959 source_len .wrapping_add(if source_len == 0 { 1 } else { 0 })
2962 .wrapping_add(if source_len < 9 { 1 } else { 0 })
2964 .wrapping_add(deflate_quick_overhead(source_len))
2966 .wrapping_add(DEFLATE_BLOCK_OVERHEAD)
2968 .wrapping_add(wrap_len)
2970}
2971
2972#[derive(Clone)]
2977struct Heap {
2978 heap: [u32; 2 * L_CODES + 1],
2979
2980 heap_len: usize,
2982
2983 heap_max: usize,
2985
2986 depth: [u8; 2 * L_CODES + 1],
2987}
2988
2989impl Heap {
2990 fn new() -> Self {
2992 Self {
2993 heap: [0; 2 * L_CODES + 1],
2994 heap_len: 0,
2995 heap_max: 0,
2996 depth: [0; 2 * L_CODES + 1],
2997 }
2998 }
2999
3000 fn initialize(&mut self, tree: &mut [Value]) -> isize {
3003 let mut max_code = -1;
3004
3005 self.heap_len = 0;
3006 self.heap_max = HEAP_SIZE;
3007
3008 for (n, node) in tree.iter_mut().enumerate() {
3009 if node.freq() > 0 {
3010 self.heap_len += 1;
3011 self.heap[self.heap_len] = n as u32;
3012 max_code = n as isize;
3013 self.depth[n] = 0;
3014 } else {
3015 *node.len_mut() = 0;
3016 }
3017 }
3018
3019 max_code
3020 }
3021
3022 const SMALLEST: usize = 1;
3024
3025 fn pqdownheap(&mut self, tree: &[Value], mut k: usize) {
3026 macro_rules! freq_and_depth {
3034 ($i:expr) => {
3035 (tree[$i as usize].freq() as u32) << 8 | self.depth[$i as usize] as u32
3036 };
3037 }
3038
3039 let v = self.heap[k];
3040 let v_val = freq_and_depth!(v);
3041 let mut j = k << 1; while j <= self.heap_len {
3044 let mut j_val = freq_and_depth!(self.heap[j]);
3046 if j < self.heap_len {
3047 let j1_val = freq_and_depth!(self.heap[j + 1]);
3048 if j1_val <= j_val {
3049 j += 1;
3050 j_val = j1_val;
3051 }
3052 }
3053
3054 if v_val <= j_val {
3056 break;
3057 }
3058
3059 self.heap[k] = self.heap[j];
3061 k = j;
3062
3063 j <<= 1;
3065 }
3066
3067 self.heap[k] = v;
3068 }
3069
3070 fn pqremove(&mut self, tree: &[Value]) -> u32 {
3073 let top = self.heap[Self::SMALLEST];
3074 self.heap[Self::SMALLEST] = self.heap[self.heap_len];
3075 self.heap_len -= 1;
3076
3077 self.pqdownheap(tree, Self::SMALLEST);
3078
3079 top
3080 }
3081
3082 fn construct_huffman_tree(&mut self, tree: &mut [Value], mut node: usize) {
3084 loop {
3085 let n = self.pqremove(tree) as usize; let m = self.heap[Heap::SMALLEST] as usize; self.heap_max -= 1;
3089 self.heap[self.heap_max] = n as u32; self.heap_max -= 1;
3091 self.heap[self.heap_max] = m as u32;
3092
3093 *tree[node].freq_mut() = tree[n].freq() + tree[m].freq();
3095 self.depth[node] = Ord::max(self.depth[n], self.depth[m]) + 1;
3096
3097 *tree[n].dad_mut() = node as u16;
3098 *tree[m].dad_mut() = node as u16;
3099
3100 self.heap[Heap::SMALLEST] = node as u32;
3102 node += 1;
3103
3104 self.pqdownheap(tree, Heap::SMALLEST);
3105
3106 if self.heap_len < 2 {
3107 break;
3108 }
3109 }
3110
3111 self.heap_max -= 1;
3112 self.heap[self.heap_max] = self.heap[Heap::SMALLEST];
3113 }
3114}
3115
3116pub unsafe fn set_header<'a>(
3125 stream: &mut DeflateStream<'a>,
3126 head: Option<&'a mut gz_header>,
3127) -> ReturnCode {
3128 if stream.state.wrap != 2 {
3129 ReturnCode::StreamError
3130 } else {
3131 stream.state.gzhead = head;
3132 ReturnCode::Ok
3133 }
3134}
3135
3136const ZLIB_WRAPLEN: usize = 6;
3138const GZIP_WRAPLEN: usize = 18;
3140
3141const DEFLATE_HEADER_BITS: usize = 3;
3142const DEFLATE_EOBS_BITS: usize = 15;
3143const DEFLATE_PAD_BITS: usize = 6;
3144const DEFLATE_BLOCK_OVERHEAD: usize =
3145 (DEFLATE_HEADER_BITS + DEFLATE_EOBS_BITS + DEFLATE_PAD_BITS) >> 3;
3146
3147const DEFLATE_QUICK_LIT_MAX_BITS: usize = 9;
3148const fn deflate_quick_overhead(x: usize) -> usize {
3149 let sum = x
3150 .wrapping_mul(DEFLATE_QUICK_LIT_MAX_BITS - 8)
3151 .wrapping_add(7);
3152
3153 (sum as core::ffi::c_ulong >> 3) as usize
3155}
3156
3157pub fn bound(stream: Option<&mut DeflateStream>, source_len: usize) -> usize {
3173 let mask = core::ffi::c_ulong::MAX as usize;
3175
3176 let comp_len = source_len
3178 .wrapping_add((source_len.wrapping_add(7) & mask) >> 3)
3179 .wrapping_add((source_len.wrapping_add(63) & mask) >> 6)
3180 .wrapping_add(5);
3181
3182 let Some(stream) = stream else {
3183 return comp_len.wrapping_add(6);
3185 };
3186
3187 let wrap_len = match stream.state.wrap {
3189 0 => {
3190 0
3192 }
3193 1 => {
3194 if stream.state.strstart != 0 {
3196 ZLIB_WRAPLEN + 4
3197 } else {
3198 ZLIB_WRAPLEN
3199 }
3200 }
3201 2 => {
3202 let mut gz_wrap_len = GZIP_WRAPLEN;
3204
3205 if let Some(header) = &stream.state.gzhead {
3206 if !header.extra.is_null() {
3207 gz_wrap_len += 2 + header.extra_len as usize;
3208 }
3209
3210 let mut c_string = header.name;
3211 if !c_string.is_null() {
3212 loop {
3213 gz_wrap_len += 1;
3214 unsafe {
3216 if *c_string == 0 {
3217 break;
3218 }
3219 c_string = c_string.add(1);
3220 }
3221 }
3222 }
3223
3224 let mut c_string = header.comment;
3225 if !c_string.is_null() {
3226 loop {
3227 gz_wrap_len += 1;
3228 unsafe {
3230 if *c_string == 0 {
3231 break;
3232 }
3233 c_string = c_string.add(1);
3234 }
3235 }
3236 }
3237
3238 if header.hcrc != 0 {
3239 gz_wrap_len += 2;
3240 }
3241 }
3242
3243 gz_wrap_len
3244 }
3245 _ => {
3246 ZLIB_WRAPLEN
3248 }
3249 };
3250
3251 if stream.state.w_bits() != MAX_WBITS as u32 || HASH_BITS < 15 {
3252 if stream.state.level == 0 {
3253 source_len
3255 .wrapping_add(source_len >> 5)
3256 .wrapping_add(source_len >> 7)
3257 .wrapping_add(source_len >> 11)
3258 .wrapping_add(7)
3259 .wrapping_add(wrap_len)
3260 } else {
3261 comp_len.wrapping_add(wrap_len)
3262 }
3263 } else {
3264 compress_bound_help(source_len, wrap_len)
3265 }
3266}
3267
3268pub unsafe fn get_dictionary(stream: &DeflateStream<'_>, dictionary: *mut u8) -> usize {
3272 let s = &stream.state;
3273 let len = Ord::min(s.strstart + s.lookahead, s.w_size);
3274
3275 if !dictionary.is_null() && len > 0 {
3276 unsafe {
3277 core::ptr::copy_nonoverlapping(
3278 s.window.as_ptr().add(s.strstart + s.lookahead - len),
3279 dictionary,
3280 len,
3281 );
3282 }
3283 }
3284
3285 len
3286}
3287
3288struct DeflateAllocOffsets {
3289 total_size: usize,
3290 state_pos: usize,
3291 window_pos: usize,
3292 pending_pos: usize,
3293 sym_buf_pos: usize,
3294 prev_pos: usize,
3295 head_pos: usize,
3296}
3297
3298impl DeflateAllocOffsets {
3299 fn new(window_bits: usize, lit_bufsize: usize) -> Self {
3300 use core::mem::size_of;
3301
3302 const ALIGN_SIZE: usize = 64;
3305 const LIT_BUFS: usize = 4;
3306
3307 let mut curr_size = 0usize;
3308
3309 let state_size = size_of::<State>();
3311 let window_size = (1 << window_bits) * 2;
3313 let prev_size = (1 << window_bits) * size_of::<Pos>();
3314 let head_size = HASH_SIZE * size_of::<Pos>();
3315 let pending_size = lit_bufsize * LIT_BUFS;
3316 let sym_buf_size = lit_bufsize * (LIT_BUFS - 1);
3317 let state_pos = curr_size.next_multiple_of(ALIGN_SIZE);
3321 curr_size = state_pos + state_size;
3322
3323 let window_pos = curr_size.next_multiple_of(ALIGN_SIZE);
3324 curr_size = window_pos + window_size;
3325
3326 let prev_pos = curr_size.next_multiple_of(ALIGN_SIZE);
3327 curr_size = prev_pos + prev_size;
3328
3329 let head_pos = curr_size.next_multiple_of(ALIGN_SIZE);
3330 curr_size = head_pos + head_size;
3331
3332 let pending_pos = curr_size.next_multiple_of(ALIGN_SIZE);
3333 curr_size = pending_pos + pending_size;
3334
3335 let sym_buf_pos = curr_size.next_multiple_of(ALIGN_SIZE);
3336 curr_size = sym_buf_pos + sym_buf_size;
3337
3338 let total_size = (curr_size + (ALIGN_SIZE - 1)).next_multiple_of(ALIGN_SIZE);
3341
3342 Self {
3343 total_size,
3344 state_pos,
3345 window_pos,
3346 pending_pos,
3347 sym_buf_pos,
3348 prev_pos,
3349 head_pos,
3350 }
3351 }
3352}
3353
3354#[cfg(test)]
3355mod test {
3356 use crate::{
3357 inflate::{decompress_slice, InflateConfig, InflateStream},
3358 InflateFlush,
3359 };
3360
3361 use super::*;
3362
3363 use core::{ffi::CStr, sync::atomic::AtomicUsize};
3364
3365 #[test]
3366 fn detect_data_type_basic() {
3367 let empty = || [Value::new(0, 0); LITERALS];
3368
3369 assert_eq!(State::detect_data_type(&empty()), DataType::Binary);
3370
3371 let mut binary = empty();
3372 binary[0] = Value::new(1, 0);
3373 assert_eq!(State::detect_data_type(&binary), DataType::Binary);
3374
3375 let mut text = empty();
3376 text[b'\r' as usize] = Value::new(1, 0);
3377 assert_eq!(State::detect_data_type(&text), DataType::Text);
3378
3379 let mut text = empty();
3380 text[b'a' as usize] = Value::new(1, 0);
3381 assert_eq!(State::detect_data_type(&text), DataType::Text);
3382
3383 let mut non_text = empty();
3384 non_text[7] = Value::new(1, 0);
3385 assert_eq!(State::detect_data_type(&non_text), DataType::Binary);
3386 }
3387
3388 #[test]
3389 fn from_stream_mut() {
3390 unsafe {
3391 assert!(DeflateStream::from_stream_mut(core::ptr::null_mut()).is_none());
3392
3393 let mut stream = z_stream::default();
3394 assert!(DeflateStream::from_stream_mut(&mut stream).is_none());
3395
3396 assert!(DeflateStream::from_stream_mut(&mut stream).is_none());
3398
3399 init(&mut stream, DeflateConfig::default());
3400 let stream = DeflateStream::from_stream_mut(&mut stream);
3401 assert!(stream.is_some());
3402
3403 assert!(end(stream.unwrap()).is_ok());
3404 }
3405 }
3406
3407 #[cfg(feature = "c-allocator")]
3408 unsafe extern "C" fn fail_nth_allocation<const N: usize>(
3409 opaque: crate::c_api::voidpf,
3410 items: crate::c_api::uInt,
3411 size: crate::c_api::uInt,
3412 ) -> crate::c_api::voidpf {
3413 let count = unsafe { &*(opaque as *const AtomicUsize) };
3414
3415 if count.fetch_add(1, core::sync::atomic::Ordering::Relaxed) != N {
3416 unsafe { (crate::allocate::C.zalloc)(opaque, items, size) }
3420 } else {
3421 core::ptr::null_mut()
3422 }
3423 }
3424
3425 #[test]
3426 #[cfg(feature = "c-allocator")]
3427 fn init_invalid_allocator() {
3428 {
3429 let atomic = AtomicUsize::new(0);
3430 let mut stream = z_stream {
3431 zalloc: Some(fail_nth_allocation::<0>),
3432 zfree: Some(crate::allocate::C.zfree),
3433 opaque: &atomic as *const _ as *const core::ffi::c_void as *mut _,
3434 ..z_stream::default()
3435 };
3436 assert_eq!(
3437 init(&mut stream, DeflateConfig::default()),
3438 ReturnCode::MemError
3439 );
3440 }
3441 }
3442
3443 #[test]
3444 #[cfg(feature = "c-allocator")]
3445 fn copy_invalid_allocator() {
3446 let mut stream = z_stream::default();
3447
3448 let atomic = AtomicUsize::new(0);
3449 stream.opaque = &atomic as *const _ as *const core::ffi::c_void as *mut _;
3450 stream.zalloc = Some(fail_nth_allocation::<1>);
3451 stream.zfree = Some(crate::allocate::C.zfree);
3452
3453 assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3455
3456 let Some(stream) = (unsafe { DeflateStream::from_stream_mut(&mut stream) }) else {
3457 unreachable!()
3458 };
3459
3460 let mut stream_copy = MaybeUninit::<DeflateStream>::zeroed();
3461
3462 assert_eq!(copy(&mut stream_copy, stream), ReturnCode::MemError);
3463
3464 assert!(end(stream).is_ok());
3465 }
3466
3467 mod invalid_deflate_config {
3468 use super::*;
3469
3470 #[test]
3471 fn sanity_check() {
3472 let mut stream = z_stream::default();
3473 assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3474
3475 assert!(stream.zalloc.is_some());
3476 assert!(stream.zfree.is_some());
3477
3478 let stream = unsafe { DeflateStream::from_stream_mut(&mut stream) }.unwrap();
3480 assert_eq!(stream.state.level, 6);
3481
3482 assert!(end(stream).is_ok());
3483 }
3484
3485 #[test]
3486 fn window_bits_correction() {
3487 let mut stream = z_stream::default();
3489 let config = DeflateConfig {
3490 window_bits: 8,
3491 ..Default::default()
3492 };
3493 assert_eq!(init(&mut stream, config), ReturnCode::Ok);
3494 let stream = unsafe { DeflateStream::from_stream_mut(&mut stream) }.unwrap();
3495 assert_eq!(stream.state.w_bits(), 9);
3496
3497 assert!(end(stream).is_ok());
3498 }
3499
3500 #[test]
3501 fn window_bits_too_low() {
3502 let mut stream = z_stream::default();
3503 let config = DeflateConfig {
3504 window_bits: -16,
3505 ..Default::default()
3506 };
3507 assert_eq!(init(&mut stream, config), ReturnCode::StreamError);
3508 }
3509
3510 #[test]
3511 fn window_bits_too_high() {
3512 let mut stream = z_stream::default();
3514 let config = DeflateConfig {
3515 window_bits: 42,
3516 ..Default::default()
3517 };
3518 assert_eq!(init(&mut stream, config), ReturnCode::StreamError);
3519 }
3520 }
3521
3522 #[test]
3523 fn end_data_error() {
3524 let mut stream = z_stream::default();
3525 assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3526 let stream = unsafe { DeflateStream::from_stream_mut(&mut stream) }.unwrap();
3527
3528 let input = b"Hello World\n";
3530 stream.next_in = input.as_ptr() as *mut u8;
3531 stream.avail_in = input.len() as _;
3532 let output = &mut [0, 0, 0];
3533 stream.next_out = output.as_mut_ptr();
3534 stream.avail_out = output.len() as _;
3535
3536 assert_eq!(deflate(stream, DeflateFlush::NoFlush), ReturnCode::Ok);
3538
3539 assert!(end(stream).is_err());
3541 }
3542
3543 #[test]
3544 fn test_reset_keep() {
3545 let mut stream = z_stream::default();
3546 assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3547 let stream = unsafe { DeflateStream::from_stream_mut(&mut stream) }.unwrap();
3548
3549 let input = b"Hello World\n";
3551 stream.next_in = input.as_ptr() as *mut u8;
3552 stream.avail_in = input.len() as _;
3553
3554 let output = &mut [0; 1024];
3555 stream.next_out = output.as_mut_ptr();
3556 stream.avail_out = output.len() as _;
3557 assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::StreamEnd);
3558
3559 assert_eq!(reset_keep(stream), ReturnCode::Ok);
3560
3561 let output = &mut [0; 1024];
3562 stream.next_out = output.as_mut_ptr();
3563 stream.avail_out = output.len() as _;
3564 assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::StreamEnd);
3565
3566 assert!(end(stream).is_ok());
3567 }
3568
3569 #[test]
3570 fn hello_world_huffman_only() {
3571 const EXPECTED: &[u8] = &[
3572 0x78, 0x01, 0xf3, 0x48, 0xcd, 0xc9, 0xc9, 0x57, 0x08, 0xcf, 0x2f, 0xca, 0x49, 0x51,
3573 0xe4, 0x02, 0x00, 0x20, 0x91, 0x04, 0x48,
3574 ];
3575
3576 let input = "Hello World!\n";
3577
3578 let mut output = vec![0; 128];
3579
3580 let config = DeflateConfig {
3581 level: 6,
3582 method: Method::Deflated,
3583 window_bits: crate::MAX_WBITS,
3584 mem_level: DEF_MEM_LEVEL,
3585 strategy: Strategy::HuffmanOnly,
3586 };
3587
3588 let (output, err) = compress_slice(&mut output, input.as_bytes(), config);
3589
3590 assert_eq!(err, ReturnCode::Ok);
3591
3592 assert_eq!(output.len(), EXPECTED.len());
3593
3594 assert_eq!(EXPECTED, output);
3595 }
3596
3597 #[test]
3598 fn hello_world_quick() {
3599 const EXPECTED: &[u8] = &[
3600 0x78, 0x01, 0xf3, 0x48, 0xcd, 0xc9, 0xc9, 0x57, 0x08, 0xcf, 0x2f, 0xca, 0x49, 0x51,
3601 0xe4, 0x02, 0x00, 0x20, 0x91, 0x04, 0x48,
3602 ];
3603
3604 let input = "Hello World!\n";
3605
3606 let mut output = vec![0; 128];
3607
3608 let config = DeflateConfig {
3609 level: 1,
3610 method: Method::Deflated,
3611 window_bits: crate::MAX_WBITS,
3612 mem_level: DEF_MEM_LEVEL,
3613 strategy: Strategy::Default,
3614 };
3615
3616 let (output, err) = compress_slice(&mut output, input.as_bytes(), config);
3617
3618 assert_eq!(err, ReturnCode::Ok);
3619
3620 assert_eq!(output.len(), EXPECTED.len());
3621
3622 assert_eq!(EXPECTED, output);
3623 }
3624
3625 #[test]
3626 fn hello_world_quick_random() {
3627 const EXPECTED: &[u8] = &[
3628 0x78, 0x01, 0x53, 0xe1, 0x50, 0x51, 0xe1, 0x52, 0x51, 0x51, 0x01, 0x00, 0x03, 0xec,
3629 0x00, 0xeb,
3630 ];
3631
3632 let input = "$\u{8}$$\n$$$";
3633
3634 let mut output = vec![0; 128];
3635
3636 let config = DeflateConfig {
3637 level: 1,
3638 method: Method::Deflated,
3639 window_bits: crate::MAX_WBITS,
3640 mem_level: DEF_MEM_LEVEL,
3641 strategy: Strategy::Default,
3642 };
3643
3644 let (output, err) = compress_slice(&mut output, input.as_bytes(), config);
3645
3646 assert_eq!(err, ReturnCode::Ok);
3647
3648 assert_eq!(output.len(), EXPECTED.len());
3649
3650 assert_eq!(EXPECTED, output);
3651 }
3652
3653 fn fuzz_based_test(input: &[u8], config: DeflateConfig, expected: &[u8]) {
3654 let mut output_rs = [0; 1 << 17];
3655 let (output_rs, err) = compress_slice(&mut output_rs, input, config);
3656 assert_eq!(err, ReturnCode::Ok);
3657
3658 assert_eq!(output_rs, expected);
3659 }
3660
3661 #[test]
3662 fn simple_rle() {
3663 fuzz_based_test(
3664 "\0\0\0\0\u{6}".as_bytes(),
3665 DeflateConfig {
3666 level: -1,
3667 method: Method::Deflated,
3668 window_bits: 11,
3669 mem_level: 4,
3670 strategy: Strategy::Rle,
3671 },
3672 &[56, 17, 99, 0, 2, 54, 0, 0, 11, 0, 7],
3673 )
3674 }
3675
3676 #[test]
3677 fn fill_window_out_of_bounds() {
3678 const INPUT: &[u8] = &[
3679 0x71, 0x71, 0x71, 0x71, 0x71, 0x6a, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3680 0x71, 0x71, 0x71, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1d, 0x1d, 0x1d, 0x1d, 0x63,
3681 0x63, 0x63, 0x63, 0x63, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d,
3682 0x1d, 0x27, 0x0, 0x0, 0x0, 0x1d, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71,
3683 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x31, 0x0, 0x0,
3684 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3685 0x0, 0x0, 0x0, 0x0, 0x1d, 0x1d, 0x0, 0x0, 0x0, 0x0, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50,
3686 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x48, 0x50,
3687 0x50, 0x50, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2c, 0x0, 0x0, 0x0, 0x0, 0x4a,
3688 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3689 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x70, 0x71, 0x71, 0x0, 0x0,
3690 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x6a, 0x0, 0x0, 0x0, 0x0,
3691 0x71, 0x0, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3692 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3693 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3694 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x31, 0x0, 0x0, 0x0, 0x0,
3695 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3696 0x71, 0x71, 0x0, 0x4a, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71,
3697 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3698 0x70, 0x71, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71,
3699 0x6a, 0x0, 0x0, 0x0, 0x0, 0x71, 0x0, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3700 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3701 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3702 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3703 0x31, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1d, 0x1d, 0x0, 0x0, 0x0, 0x0,
3704 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50,
3705 0x50, 0x50, 0x50, 0x50, 0x48, 0x50, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x3b, 0x3f, 0x71,
3706 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x50, 0x50, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3707 0x2c, 0x0, 0x0, 0x0, 0x0, 0x4a, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71,
3708 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3709 0x71, 0x70, 0x71, 0x71, 0x0, 0x0, 0x0, 0x6, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71,
3710 0x71, 0x71, 0x71, 0x70, 0x71, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3711 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x3b, 0x3f, 0x71, 0x71, 0x71,
3712 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3713 0x71, 0x3b, 0x3f, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x20, 0x0, 0x0, 0x0, 0x0,
3714 0x0, 0x0, 0x0, 0x71, 0x75, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3715 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x10, 0x0, 0x71, 0x71,
3716 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x3b, 0x71, 0x71, 0x71, 0x71, 0x71,
3717 0x71, 0x76, 0x71, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x71, 0x71, 0x71, 0x71, 0x71,
3718 0x71, 0x71, 0x71, 0x10, 0x0, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3719 0x71, 0x3b, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x76, 0x71, 0x34, 0x34, 0x34, 0x34,
3720 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34,
3721 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x0, 0x0, 0x0, 0x0, 0x0, 0x34, 0x34, 0x34, 0x34,
3722 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34,
3723 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34,
3724 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34,
3725 0x34, 0x34, 0x30, 0x34, 0x34, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3726 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3727 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3728 0x0, 0x0, 0x71, 0x0, 0x0, 0x0, 0x0, 0x6,
3729 ];
3730
3731 fuzz_based_test(
3732 INPUT,
3733 DeflateConfig {
3734 level: -1,
3735 method: Method::Deflated,
3736 window_bits: 9,
3737 mem_level: 1,
3738 strategy: Strategy::HuffmanOnly,
3739 },
3740 &[
3741 0x18, 0x19, 0x4, 0xc1, 0x21, 0x1, 0xc4, 0x0, 0x10, 0x3, 0xb0, 0x18, 0x29, 0x1e,
3742 0x7e, 0x17, 0x83, 0xf5, 0x70, 0x6c, 0xac, 0xfe, 0xc9, 0x27, 0xdb, 0xb6, 0x6f, 0xdb,
3743 0xb6, 0x6d, 0xdb, 0x80, 0x24, 0xb9, 0xbb, 0xbb, 0x24, 0x49, 0x92, 0x24, 0xf, 0x2,
3744 0xd8, 0x36, 0x0, 0xf0, 0x3, 0x0, 0x0, 0x24, 0xd0, 0xb6, 0x6d, 0xdb, 0xb6, 0x6d,
3745 0xdb, 0xbe, 0x6d, 0xf9, 0x13, 0x4, 0xc7, 0x4, 0x0, 0x80, 0x30, 0x0, 0xc3, 0x22,
3746 0x68, 0xf, 0x36, 0x90, 0xc2, 0xb5, 0xfa, 0x7f, 0x48, 0x80, 0x81, 0xb, 0x40, 0x55,
3747 0x55, 0x55, 0xd5, 0x16, 0x80, 0xaa, 0x7, 0x9, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3748 0xe, 0x7c, 0x82, 0xe0, 0x98, 0x0, 0x0, 0x0, 0x4, 0x60, 0x10, 0xf9, 0x8c, 0xe2,
3749 0xe5, 0xfa, 0x3f, 0x2, 0x54, 0x55, 0x55, 0x65, 0x0, 0xa8, 0xaa, 0xaa, 0xaa, 0xba,
3750 0x2, 0x50, 0xb5, 0x90, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x78, 0x82, 0xe0, 0xd0,
3751 0x8a, 0x41, 0x0, 0x0, 0xa2, 0x58, 0x54, 0xb7, 0x60, 0x83, 0x9a, 0x6a, 0x4, 0x96,
3752 0x87, 0xba, 0x51, 0xf8, 0xfb, 0x9b, 0x26, 0xfc, 0x0, 0x1c, 0x7, 0x6c, 0xdb, 0xb6,
3753 0x6d, 0xdb, 0xb6, 0x6d, 0xf7, 0xa8, 0x3a, 0xaf, 0xaa, 0x6a, 0x3, 0xf8, 0xc2, 0x3,
3754 0x40, 0x55, 0x55, 0x55, 0xd5, 0x5b, 0xf8, 0x80, 0xaa, 0x7a, 0xb, 0x0, 0x7f, 0x82,
3755 0xe0, 0x98, 0x0, 0x40, 0x18, 0x0, 0x82, 0xd8, 0x49, 0x40, 0x2, 0x22, 0x7e, 0xeb,
3756 0x80, 0xa6, 0xc, 0xa0, 0x9f, 0xa4, 0x2a, 0x38, 0xf, 0x0, 0x0, 0xe7, 0x1, 0xdc,
3757 0x55, 0x95, 0x17, 0x0, 0x0, 0xae, 0x0, 0x38, 0xc0, 0x67, 0xdb, 0x36, 0x80, 0x2b,
3758 0x0, 0xe, 0xf0, 0xd9, 0xf6, 0x13, 0x4, 0xc7, 0x4, 0x0, 0x0, 0x30, 0xc, 0x83, 0x22,
3759 0x69, 0x7, 0xc6, 0xea, 0xff, 0x19, 0x0, 0x0, 0x80, 0xaa, 0x0, 0x0, 0x0, 0x0, 0x0,
3760 0x0, 0x8e, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x6a,
3761 0xf5, 0x63, 0x60, 0x60, 0x3, 0x0, 0xee, 0x8a, 0x88, 0x67,
3762 ],
3763 )
3764 }
3765
3766 #[test]
3767 fn gzip_no_header() {
3768 let config = DeflateConfig {
3769 level: 9,
3770 method: Method::Deflated,
3771 window_bits: 31, ..Default::default()
3773 };
3774
3775 let input = b"Hello World!";
3776 let os = gz_header::OS_CODE;
3777
3778 fuzz_based_test(
3779 input,
3780 config,
3781 &[
3782 31, 139, 8, 0, 0, 0, 0, 0, 2, os, 243, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
3783 81, 4, 0, 163, 28, 41, 28, 12, 0, 0, 0,
3784 ],
3785 )
3786 }
3787
3788 #[test]
3789 #[rustfmt::skip]
3790 fn gzip_stored_block_checksum() {
3791 fuzz_based_test(
3792 &[
3793 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 9, 0,
3794 ],
3795 DeflateConfig {
3796 level: 0,
3797 method: Method::Deflated,
3798 window_bits: 26,
3799 mem_level: 6,
3800 strategy: Strategy::Default,
3801 },
3802 &[
3803 31, 139, 8, 0, 0, 0, 0, 0, 4, gz_header::OS_CODE, 1, 18, 0, 237, 255, 27, 27, 27, 27, 27, 27, 27,
3804 27, 27, 27, 27, 27, 27, 27, 27, 27, 9, 0, 60, 101, 156, 55, 18, 0, 0, 0,
3805 ],
3806 )
3807 }
3808
3809 #[test]
3810 fn gzip_header_pending_flush() {
3811 let extra = "aaaaaaaaaaaaaaaaaaaa\0";
3812 let name = "bbbbbbbbbbbbbbbbbbbb\0";
3813 let comment = "cccccccccccccccccccc\0";
3814
3815 let mut header = gz_header {
3816 text: 0,
3817 time: 0,
3818 xflags: 0,
3819 os: 0,
3820 extra: extra.as_ptr() as *mut _,
3821 extra_len: extra.len() as _,
3822 extra_max: 0,
3823 name: name.as_ptr() as *mut _,
3824 name_max: 0,
3825 comment: comment.as_ptr() as *mut _,
3826 comm_max: 0,
3827 hcrc: 1,
3828 done: 0,
3829 };
3830
3831 let config = DeflateConfig {
3832 window_bits: 31,
3833 mem_level: 1,
3834 ..Default::default()
3835 };
3836
3837 let mut stream = z_stream::default();
3838 assert_eq!(init(&mut stream, config), ReturnCode::Ok);
3839
3840 let Some(stream) = (unsafe { DeflateStream::from_stream_mut(&mut stream) }) else {
3841 unreachable!()
3842 };
3843
3844 unsafe { set_header(stream, Some(&mut header)) };
3845
3846 let input = b"Hello World\n";
3847 stream.next_in = input.as_ptr() as *mut _;
3848 stream.avail_in = input.len() as _;
3849
3850 let mut output = [0u8; 1024];
3851 stream.next_out = output.as_mut_ptr();
3852 stream.avail_out = 100;
3853
3854 assert_eq!(stream.state.bit_writer.pending.capacity(), 512);
3855
3856 stream.state.bit_writer.pending.extend(&[0; 500]);
3859
3860 assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::Ok);
3861
3862 stream.avail_out = output.len() as _;
3864 assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::StreamEnd);
3865
3866 let n = stream.total_out as usize;
3867
3868 assert!(end(stream).is_ok());
3869
3870 let output_rs = &mut output[..n];
3871
3872 assert_eq!(output_rs.len(), 500 + 99);
3873 }
3874
3875 #[test]
3876 fn gzip_with_header() {
3877 let extra = "some extra stuff\0";
3881 let name = "nomen est omen\0";
3882 let comment = "such comment\0";
3883
3884 let mut header = gz_header {
3885 text: 0,
3886 time: 0,
3887 xflags: 0,
3888 os: 0,
3889 extra: extra.as_ptr() as *mut _,
3890 extra_len: extra.len() as _,
3891 extra_max: 0,
3892 name: name.as_ptr() as *mut _,
3893 name_max: 0,
3894 comment: comment.as_ptr() as *mut _,
3895 comm_max: 0,
3896 hcrc: 1,
3897 done: 0,
3898 };
3899
3900 let config = DeflateConfig {
3901 window_bits: 31,
3902 ..Default::default()
3903 };
3904
3905 let mut stream = z_stream::default();
3906 assert_eq!(init(&mut stream, config), ReturnCode::Ok);
3907
3908 let Some(stream) = (unsafe { DeflateStream::from_stream_mut(&mut stream) }) else {
3909 unreachable!()
3910 };
3911
3912 unsafe { set_header(stream, Some(&mut header)) };
3913
3914 let input = b"Hello World\n";
3915 stream.next_in = input.as_ptr() as *mut _;
3916 stream.avail_in = input.len() as _;
3917
3918 let mut output = [0u8; 256];
3919 stream.next_out = output.as_mut_ptr();
3920 stream.avail_out = output.len() as _;
3921
3922 assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::StreamEnd);
3923
3924 let n = stream.total_out as usize;
3925
3926 assert!(end(stream).is_ok());
3927
3928 let output_rs = &mut output[..n];
3929
3930 assert_eq!(output_rs.len(), 81);
3931
3932 {
3933 let mut stream = z_stream::default();
3934
3935 let config = InflateConfig {
3936 window_bits: config.window_bits,
3937 };
3938
3939 assert_eq!(crate::inflate::init(&mut stream, config), ReturnCode::Ok);
3940
3941 let Some(stream) = (unsafe { InflateStream::from_stream_mut(&mut stream) }) else {
3942 unreachable!();
3943 };
3944
3945 stream.next_in = output_rs.as_mut_ptr() as _;
3946 stream.avail_in = output_rs.len() as _;
3947
3948 let mut output = [0u8; 12];
3949 stream.next_out = output.as_mut_ptr();
3950 stream.avail_out = output.len() as _;
3951
3952 let mut extra_buf = [0u8; 64];
3953 let mut name_buf = [0u8; 64];
3954 let mut comment_buf = [0u8; 64];
3955
3956 let mut header = gz_header {
3957 text: 0,
3958 time: 0,
3959 xflags: 0,
3960 os: 0,
3961 extra: extra_buf.as_mut_ptr(),
3962 extra_len: 0,
3963 extra_max: extra_buf.len() as _,
3964 name: name_buf.as_mut_ptr(),
3965 name_max: name_buf.len() as _,
3966 comment: comment_buf.as_mut_ptr(),
3967 comm_max: comment_buf.len() as _,
3968 hcrc: 0,
3969 done: 0,
3970 };
3971
3972 assert_eq!(
3973 unsafe { crate::inflate::get_header(stream, Some(&mut header)) },
3974 ReturnCode::Ok
3975 );
3976
3977 assert_eq!(
3978 unsafe { crate::inflate::inflate(stream, InflateFlush::Finish) },
3979 ReturnCode::StreamEnd
3980 );
3981
3982 crate::inflate::end(stream);
3983
3984 assert!(!header.comment.is_null());
3985 assert_eq!(
3986 unsafe { CStr::from_ptr(header.comment.cast()) }
3987 .to_str()
3988 .unwrap(),
3989 comment.trim_end_matches('\0')
3990 );
3991
3992 assert!(!header.name.is_null());
3993 assert_eq!(
3994 unsafe { CStr::from_ptr(header.name.cast()) }
3995 .to_str()
3996 .unwrap(),
3997 name.trim_end_matches('\0')
3998 );
3999
4000 assert!(!header.extra.is_null());
4001 assert_eq!(
4002 unsafe { CStr::from_ptr(header.extra.cast()) }
4003 .to_str()
4004 .unwrap(),
4005 extra.trim_end_matches('\0')
4006 );
4007 }
4008 }
4009
4010 #[test]
4011 fn insufficient_compress_space() {
4012 const DATA: &[u8] = include_bytes!("deflate/test-data/inflate_buf_error.dat");
4013
4014 fn helper(deflate_buf: &mut [u8], deflate_err: ReturnCode) -> ReturnCode {
4015 let config = DeflateConfig {
4016 level: 0,
4017 method: Method::Deflated,
4018 window_bits: 10,
4019 mem_level: 6,
4020 strategy: Strategy::Default,
4021 };
4022
4023 let (output, err) = compress_slice(deflate_buf, DATA, config);
4024 assert_eq!(err, deflate_err);
4025
4026 let config = InflateConfig {
4027 window_bits: config.window_bits,
4028 };
4029
4030 let mut uncompr = [0; 1 << 17];
4031 let (uncompr, err) = decompress_slice(&mut uncompr, output, config);
4032
4033 if err == ReturnCode::Ok {
4034 assert_eq!(DATA, uncompr);
4035 }
4036
4037 err
4038 }
4039
4040 let mut output = [0; 1 << 17];
4041
4042 assert_eq!(
4044 helper(&mut output[..1 << 16], ReturnCode::BufError),
4045 ReturnCode::DataError
4046 );
4047
4048 assert_eq!(helper(&mut output, ReturnCode::Ok), ReturnCode::Ok);
4050 }
4051
4052 fn test_flush(flush: DeflateFlush, expected: &[u8]) {
4053 let input = b"Hello World!\n";
4054
4055 let config = DeflateConfig {
4056 level: 6, method: Method::Deflated,
4058 window_bits: 16 + crate::MAX_WBITS,
4059 mem_level: DEF_MEM_LEVEL,
4060 strategy: Strategy::Default,
4061 };
4062
4063 let mut output_rs = vec![0; 128];
4064
4065 let expected_err = ReturnCode::BufError;
4069
4070 let (rs, err) = compress_slice_with_flush(&mut output_rs, input, config, flush);
4071 assert_eq!(expected_err, err);
4072
4073 assert_eq!(rs, expected);
4074 }
4075
4076 #[test]
4077 #[rustfmt::skip]
4078 fn sync_flush() {
4079 test_flush(
4080 DeflateFlush::SyncFlush,
4081 &[
4082 31, 139, 8, 0, 0, 0, 0, 0, 0, gz_header::OS_CODE, 242, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
4083 81, 228, 2, 0, 0, 0, 255, 255,
4084 ],
4085 )
4086 }
4087
4088 #[test]
4089 #[rustfmt::skip]
4090 fn partial_flush() {
4091 test_flush(
4092 DeflateFlush::PartialFlush,
4093 &[
4094 31, 139, 8, 0, 0, 0, 0, 0, 0, gz_header::OS_CODE, 242, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
4095 81, 228, 2, 8,
4096 ],
4097 );
4098 }
4099
4100 #[test]
4101 #[rustfmt::skip]
4102 fn full_flush() {
4103 test_flush(
4104 DeflateFlush::FullFlush,
4105 &[
4106 31, 139, 8, 0, 0, 0, 0, 0, 0, gz_header::OS_CODE, 242, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
4107 81, 228, 2, 0, 0, 0, 255, 255,
4108 ],
4109 );
4110 }
4111
4112 #[test]
4113 #[rustfmt::skip]
4114 fn block_flush() {
4115 test_flush(
4116 DeflateFlush::Block,
4117 &[
4118 31, 139, 8, 0, 0, 0, 0, 0, 0, gz_header::OS_CODE, 242, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
4119 81, 228, 2,
4120 ],
4121 );
4122 }
4123
4124 #[test]
4125 fn split_deflate() {
4129 let input = "Hello World!\n";
4130
4131 let (input1, input2) = input.split_at(6);
4132
4133 let mut output1 = vec![0; 128];
4134 let mut output2 = vec![0; 128];
4135
4136 let config = DeflateConfig {
4137 level: 6, method: Method::Deflated,
4139 window_bits: 16 + crate::MAX_WBITS,
4140 mem_level: DEF_MEM_LEVEL,
4141 strategy: Strategy::Default,
4142 };
4143
4144 let (prefix, err) = compress_slice_with_flush(
4147 &mut output1,
4148 input1.as_bytes(),
4149 config,
4150 DeflateFlush::SyncFlush,
4151 );
4152 assert_eq!(err, ReturnCode::BufError);
4153
4154 let (output2, err) = compress_slice_with_flush(
4155 &mut output2,
4156 input2.as_bytes(),
4157 config,
4158 DeflateFlush::Finish,
4159 );
4160 assert_eq!(err, ReturnCode::Ok);
4161
4162 let inflate_config = crate::inflate::InflateConfig {
4163 window_bits: 16 + 15,
4164 };
4165
4166 let (suffix, end) = output2.split_at(output2.len() - 8);
4168 let (crc2, len2) = end.split_at(4);
4169 let crc2 = u32::from_le_bytes(crc2.try_into().unwrap());
4170
4171 let suffix = &suffix[10..];
4173
4174 let mut result: Vec<u8> = Vec::new();
4175 result.extend(prefix.iter());
4176 result.extend(suffix);
4177
4178 let len1 = input1.len() as u32;
4181 let len2 = u32::from_le_bytes(len2.try_into().unwrap());
4182 assert_eq!(len2 as usize, input2.len());
4183
4184 let crc1 = crate::crc32::crc32(0, input1.as_bytes());
4185 let crc = crate::crc32::crc32_combine(crc1, crc2, len2 as u64);
4186
4187 let crc_cheating = crate::crc32::crc32(0, input.as_bytes());
4189 assert_eq!(crc, crc_cheating);
4190
4191 result.extend(crc.to_le_bytes());
4193 result.extend((len1 + len2).to_le_bytes());
4194
4195 let mut output = vec![0; 128];
4196 let (output, err) = crate::inflate::decompress_slice(&mut output, &result, inflate_config);
4197 assert_eq!(err, ReturnCode::Ok);
4198
4199 assert_eq!(output, input.as_bytes());
4200 }
4201
4202 #[test]
4203 fn inflate_window_copy_slice() {
4204 let uncompressed = [
4205 9, 126, 0, 0, 0, 0, 0, 0, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 76, 33, 8, 2, 0, 0,
4206 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4207 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4208 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4209 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12,
4210 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 76, 33, 8, 2, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0,
4211 0, 0, 0, 12, 10, 0, 0, 0, 0, 14, 0, 0, 0, 0, 0, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4212 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 14, 0, 0, 0, 0, 0, 69, 69, 69, 69, 69, 69, 69,
4213 69, 69, 69, 69, 69, 69, 69, 69, 9, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4214 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4215 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4216 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 12, 28, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0,
4217 0, 0, 12, 10, 0, 0, 0, 0, 14, 0, 0, 0, 0, 0, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4218 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 14, 0, 0, 0, 0, 0, 69, 69, 69, 69, 69, 69, 69,
4219 69, 69, 69, 69, 69, 69, 69, 69, 9, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4220 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4221 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4222 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 12, 28, 0, 2, 0, 0, 0, 63, 1, 0, 12, 2,
4223 36, 0, 28, 0, 0, 0, 1, 0, 0, 63, 63, 13, 0, 0, 0, 0, 0, 0, 0, 63, 63, 63, 63, 0, 0, 0,
4224 0, 0, 0, 65, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 0,
4225 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 45, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4226 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4227 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 91, 0, 0, 0, 9, 0, 0, 0, 9, 0, 0, 12, 33, 2, 0, 0, 8,
4228 0, 4, 0, 0, 0, 12, 10, 41, 12, 10, 47,
4229 ];
4230
4231 let compressed = &[
4232 31, 139, 8, 0, 0, 0, 0, 0, 4, 3, 181, 193, 49, 14, 194, 32, 24, 128, 209, 175, 192, 0,
4233 228, 151, 232, 206, 66, 226, 226, 96, 60, 2, 113, 96, 235, 13, 188, 139, 103, 23, 106,
4234 104, 108, 100, 49, 169, 239, 185, 39, 11, 199, 7, 51, 39, 171, 248, 118, 226, 63, 52,
4235 157, 120, 86, 102, 78, 86, 209, 104, 58, 241, 84, 129, 166, 12, 4, 154, 178, 229, 202,
4236 30, 36, 130, 166, 19, 79, 21, 104, 202, 64, 160, 41, 91, 174, 236, 65, 34, 10, 200, 19,
4237 162, 206, 68, 96, 130, 156, 15, 188, 229, 138, 197, 157, 161, 35, 3, 87, 126, 245, 0,
4238 28, 224, 64, 146, 2, 139, 1, 196, 95, 196, 223, 94, 10, 96, 92, 33, 86, 2, 0, 0,
4239 ];
4240
4241 let config = InflateConfig { window_bits: 25 };
4242
4243 let mut dest_vec_rs = vec![0u8; uncompressed.len()];
4244 let (output_rs, error) =
4245 crate::inflate::decompress_slice(&mut dest_vec_rs, compressed, config);
4246
4247 assert_eq!(ReturnCode::Ok, error);
4248 assert_eq!(output_rs, uncompressed);
4249 }
4250
4251 #[test]
4252 fn hash_calc_difference() {
4253 let input = [
4254 0, 0, 0, 0, 0, 43, 0, 0, 0, 0, 0, 0, 43, 0, 0, 0, 0, 0, 0, 0, 0, 48, 0, 0, 0, 0, 0, 0,
4255 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4256 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4257 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4258 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, 0, 0, 0,
4259 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4260 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 0, 0, 0, 0, 0,
4261 0, 0, 64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 0, 64, 0, 0,
4262 0, 0, 0, 0, 0, 0, 0, 0, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 102, 102, 102, 102, 102,
4263 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102,
4264 102, 102, 102, 102, 102, 102, 102, 102, 112, 102, 102, 102, 102, 102, 102, 102, 102,
4265 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 0,
4266 0, 0, 0, 0, 0, 49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4267 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4268 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4269 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4270 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4271 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 0, 0, 0, 0, 0, 0, 0, 64, 0, 0,
4272 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 0, 64, 0, 0, 0, 0, 0, 0, 0,
4273 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42, 0, 0, 0,
4274 50, 0,
4275 ];
4276
4277 let config = DeflateConfig {
4278 level: 6,
4279 method: Method::Deflated,
4280 window_bits: 9,
4281 mem_level: 8,
4282 strategy: Strategy::Default,
4283 };
4284
4285 let expected = [
4286 24, 149, 99, 96, 96, 96, 96, 208, 6, 17, 112, 138, 129, 193, 128, 1, 29, 24, 50, 208,
4287 1, 200, 146, 169, 79, 24, 74, 59, 96, 147, 52, 71, 22, 70, 246, 88, 26, 94, 80, 128,
4288 83, 6, 162, 219, 144, 76, 183, 210, 5, 8, 67, 105, 36, 159, 35, 128, 57, 118, 97, 100,
4289 160, 197, 192, 192, 96, 196, 0, 0, 3, 228, 25, 128,
4290 ];
4291
4292 fuzz_based_test(&input, config, &expected);
4293 }
4294
4295 #[cfg(any(target_arch = "x86_64", target_arch = "aarch64"))]
4296 mod _cache_lines {
4297 use super::State;
4298 use memoffset::offset_of;
4301
4302 const _: () = assert!(offset_of!(State, status) == 0);
4303 const _: () = assert!(offset_of!(State, _cache_line_0) == 64);
4304 const _: () = assert!(offset_of!(State, _cache_line_1) == 128);
4305 const _: () = assert!(offset_of!(State, _cache_line_2) == 192);
4306 const _: () = assert!(offset_of!(State, _cache_line_3) == 256);
4307 }
4308}