1#![warn(unsafe_op_in_unsafe_fn)]
2use core::{ffi::CStr, marker::PhantomData, mem::MaybeUninit, ops::ControlFlow};
3
4use crate::{
5 adler32::adler32,
6 allocate::Allocator,
7 c_api::{gz_header, internal_state, z_checksum, z_stream},
8 crc32::{crc32, Crc32Fold},
9 read_buf::ReadBuf,
10 trace,
11 weak_slice::{WeakArrayMut, WeakSliceMut},
12 DeflateFlush, ReturnCode, ADLER32_INITIAL_VALUE, CRC32_INITIAL_VALUE, MAX_WBITS, MIN_WBITS,
13};
14
15use self::{
16 algorithm::CONFIGURATION_TABLE,
17 hash_calc::{HashCalcVariant, RollHashCalc, StandardHashCalc},
18 pending::Pending,
19 trees_tbl::STATIC_LTREE,
20 window::Window,
21};
22
23mod algorithm;
24mod compare256;
25mod hash_calc;
26mod longest_match;
27mod pending;
28mod slide_hash;
29mod trees_tbl;
30mod window;
31
32pub(crate) type Pos = u16;
34
35#[repr(C)]
38pub struct DeflateStream<'a> {
39 pub(crate) next_in: *mut crate::c_api::Bytef,
40 pub(crate) avail_in: crate::c_api::uInt,
41 pub(crate) total_in: crate::c_api::z_size,
42 pub(crate) next_out: *mut crate::c_api::Bytef,
43 pub(crate) avail_out: crate::c_api::uInt,
44 pub(crate) total_out: crate::c_api::z_size,
45 pub(crate) msg: *const core::ffi::c_char,
46 pub(crate) state: &'a mut State<'a>,
47 pub(crate) alloc: Allocator<'a>,
48 pub(crate) data_type: core::ffi::c_int,
49 pub(crate) adler: crate::c_api::z_checksum,
50 pub(crate) reserved: crate::c_api::uLong,
51}
52
53impl<'a> DeflateStream<'a> {
54 const _S: () = assert!(core::mem::size_of::<z_stream>() == core::mem::size_of::<Self>());
57 const _A: () = assert!(core::mem::align_of::<z_stream>() == core::mem::align_of::<Self>());
58
59 #[inline(always)]
68 pub unsafe fn from_stream_mut(strm: *mut z_stream) -> Option<&'a mut Self> {
69 {
70 let stream = unsafe { strm.as_ref() }?;
72
73 if stream.zalloc.is_none() || stream.zfree.is_none() {
74 return None;
75 }
76
77 if stream.state.is_null() {
78 return None;
79 }
80 }
81
82 unsafe { strm.cast::<DeflateStream>().as_mut() }
84 }
85
86 #[inline(always)]
95 pub unsafe fn from_stream_ref(strm: *const z_stream) -> Option<&'a Self> {
96 {
97 let stream = unsafe { strm.as_ref() }?;
99
100 if stream.zalloc.is_none() || stream.zfree.is_none() {
101 return None;
102 }
103
104 if stream.state.is_null() {
105 return None;
106 }
107 }
108
109 unsafe { strm.cast::<DeflateStream>().as_ref() }
111 }
112
113 fn as_z_stream_mut(&mut self) -> &mut z_stream {
114 unsafe { &mut *(self as *mut DeflateStream as *mut z_stream) }
116 }
117
118 pub fn pending(&self) -> (usize, u8) {
119 (
120 self.state.bit_writer.pending.pending,
121 self.state.bit_writer.bits_used,
122 )
123 }
124}
125
126pub(crate) const HASH_SIZE: usize = 65536;
128const HASH_BITS: usize = 16;
130
131const MAX_MEM_LEVEL: i32 = 9;
133const DEF_MEM_LEVEL: i32 = if MAX_MEM_LEVEL > 8 { 8 } else { MAX_MEM_LEVEL };
134
135#[repr(i32)]
136#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)]
137#[cfg_attr(feature = "__internal-fuzz", derive(arbitrary::Arbitrary))]
138pub enum Method {
139 #[default]
140 Deflated = 8,
141}
142
143impl TryFrom<i32> for Method {
144 type Error = ();
145
146 fn try_from(value: i32) -> Result<Self, Self::Error> {
147 match value {
148 8 => Ok(Self::Deflated),
149 _ => Err(()),
150 }
151 }
152}
153
154#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
155#[cfg_attr(feature = "__internal-fuzz", derive(arbitrary::Arbitrary))]
156pub struct DeflateConfig {
157 pub level: i32,
158 pub method: Method,
159 pub window_bits: i32,
160 pub mem_level: i32,
161 pub strategy: Strategy,
162}
163
164#[cfg(any(test, feature = "__internal-test"))]
165impl quickcheck::Arbitrary for DeflateConfig {
166 fn arbitrary(g: &mut quickcheck::Gen) -> Self {
167 let mem_levels: Vec<_> = (1..=9).collect();
168 let levels: Vec<_> = (0..=9).collect();
169
170 let mut window_bits = Vec::new();
171 window_bits.extend(9..=15); window_bits.extend(9 + 16..=15 + 16); window_bits.extend(-15..=-9); Self {
176 level: *g.choose(&levels).unwrap(),
177 method: Method::Deflated,
178 window_bits: *g.choose(&window_bits).unwrap(),
179 mem_level: *g.choose(&mem_levels).unwrap(),
180 strategy: *g
181 .choose(&[
182 Strategy::Default,
183 Strategy::Filtered,
184 Strategy::HuffmanOnly,
185 Strategy::Rle,
186 Strategy::Fixed,
187 ])
188 .unwrap(),
189 }
190 }
191}
192
193impl DeflateConfig {
194 pub fn new(level: i32) -> Self {
195 Self {
196 level,
197 ..Self::default()
198 }
199 }
200}
201
202impl Default for DeflateConfig {
203 fn default() -> Self {
204 Self {
205 level: crate::c_api::Z_DEFAULT_COMPRESSION,
206 method: Method::Deflated,
207 window_bits: MAX_WBITS,
208 mem_level: DEF_MEM_LEVEL,
209 strategy: Strategy::Default,
210 }
211 }
212}
213
214pub fn init(stream: &mut z_stream, config: DeflateConfig) -> ReturnCode {
215 let DeflateConfig {
216 mut level,
217 method: _,
218 mut window_bits,
219 mem_level,
220 strategy,
221 } = config;
222
223 stream.msg = core::ptr::null_mut();
225
226 #[cfg(feature = "rust-allocator")]
231 if stream.zalloc.is_none() || stream.zfree.is_none() {
232 stream.configure_default_rust_allocator()
233 }
234
235 #[cfg(feature = "c-allocator")]
236 if stream.zalloc.is_none() || stream.zfree.is_none() {
237 stream.configure_default_c_allocator()
238 }
239
240 if stream.zalloc.is_none() || stream.zfree.is_none() {
241 return ReturnCode::StreamError;
242 }
243
244 if level == crate::c_api::Z_DEFAULT_COMPRESSION {
245 level = 6;
246 }
247
248 let wrap = if window_bits < 0 {
249 if window_bits < -MAX_WBITS {
250 return ReturnCode::StreamError;
251 }
252 window_bits = -window_bits;
253
254 0
255 } else if window_bits > MAX_WBITS {
256 window_bits -= 16;
257 2
258 } else {
259 1
260 };
261
262 if (!(1..=MAX_MEM_LEVEL).contains(&mem_level))
263 || !(MIN_WBITS..=MAX_WBITS).contains(&window_bits)
264 || !(0..=9).contains(&level)
265 || (window_bits == 8 && wrap != 1)
266 {
267 return ReturnCode::StreamError;
268 }
269
270 let window_bits = if window_bits == 8 {
271 9 } else {
273 window_bits as usize
274 };
275
276 let alloc = Allocator {
277 zalloc: stream.zalloc.unwrap(),
278 zfree: stream.zfree.unwrap(),
279 opaque: stream.opaque,
280 _marker: PhantomData,
281 };
282
283 let Some(state_allocation) = alloc.allocate_raw::<State>() else {
285 return ReturnCode::MemError;
286 };
287
288 let w_size = 1 << window_bits;
289 let window = Window::new_in(&alloc, window_bits);
290
291 let prev = alloc.allocate_slice_raw::<u16>(w_size);
292 let head = alloc.allocate_raw::<[u16; HASH_SIZE]>();
293
294 let lit_bufsize = 1 << (mem_level + 6); let pending = Pending::new_in(&alloc, 4 * lit_bufsize);
296
297 let sym_buf = ReadBuf::new_in(&alloc, 3 * lit_bufsize);
299
300 let (window, prev, head, pending, sym_buf) = match (window, prev, head, pending, sym_buf) {
302 (Some(window), Some(prev), Some(head), Some(pending), Some(sym_buf)) => {
303 (window, prev, head, pending, sym_buf)
304 }
305 (window, prev, head, pending, sym_buf) => {
306 unsafe {
308 if let Some(mut sym_buf) = sym_buf {
309 alloc.deallocate(sym_buf.as_mut_ptr(), sym_buf.capacity())
310 }
311 if let Some(mut pending) = pending {
312 pending.drop_in(&alloc);
313 }
314 if let Some(head) = head {
315 alloc.deallocate(head.as_ptr(), 1)
316 }
317 if let Some(prev) = prev {
318 alloc.deallocate(prev.as_ptr(), w_size)
319 }
320 if let Some(mut window) = window {
321 window.drop_in(&alloc);
322 }
323
324 alloc.deallocate(state_allocation.as_ptr(), 1);
325 }
326
327 return ReturnCode::MemError;
328 }
329 };
330
331 let prev = prev.as_ptr(); unsafe { prev.write_bytes(0, w_size) };
334 let prev = unsafe { WeakSliceMut::from_raw_parts_mut(prev, w_size) };
335
336 let head = head.as_ptr(); unsafe { head.write_bytes(0, 1) };
339 let head = unsafe { WeakArrayMut::<u16, HASH_SIZE>::from_ptr(head) };
340
341 let state = State {
342 status: Status::Init,
343
344 w_size,
346
347 window,
349 prev,
350 head,
351 bit_writer: BitWriter::from_pending(pending),
352
353 lit_bufsize,
355
356 sym_buf,
358
359 level: level as i8, strategy,
362
363 last_flush: 0,
365 wrap,
366 strstart: 0,
367 block_start: 0,
368 block_open: 0,
369 window_size: 0,
370 insert: 0,
371 matches: 0,
372 opt_len: 0,
373 static_len: 0,
374 lookahead: 0,
375 ins_h: 0,
376 max_chain_length: 0,
377 max_lazy_match: 0,
378 good_match: 0,
379 nice_match: 0,
380
381 l_desc: TreeDesc::EMPTY,
383 d_desc: TreeDesc::EMPTY,
384 bl_desc: TreeDesc::EMPTY,
385
386 crc_fold: Crc32Fold::new(),
388 gzhead: None,
389 gzindex: 0,
390
391 match_start: 0,
393 prev_match: 0,
394 match_available: false,
395 prev_length: 0,
396
397 hash_calc_variant: HashCalcVariant::Standard,
399 _cache_line_0: (),
400 _cache_line_1: (),
401 _cache_line_2: (),
402 _cache_line_3: (),
403 _padding_0: [0; 16],
404 };
405
406 unsafe { state_allocation.as_ptr().write(state) }; stream.state = state_allocation.as_ptr() as *mut internal_state;
409
410 let Some(stream) = (unsafe { DeflateStream::from_stream_mut(stream) }) else {
411 if cfg!(debug_assertions) {
412 unreachable!("we should have initialized the stream properly");
413 }
414 return ReturnCode::StreamError;
415 };
416
417 reset(stream)
418}
419
420pub fn params(stream: &mut DeflateStream, level: i32, strategy: Strategy) -> ReturnCode {
421 let level = if level == crate::c_api::Z_DEFAULT_COMPRESSION {
422 6
423 } else {
424 level
425 };
426
427 if !(0..=9).contains(&level) {
428 return ReturnCode::StreamError;
429 }
430
431 let level = level as i8;
432
433 let func = CONFIGURATION_TABLE[stream.state.level as usize].func;
434
435 let state = &mut stream.state;
436
437 #[allow(unpredictable_function_pointer_comparisons)]
440 if (strategy != state.strategy || func != CONFIGURATION_TABLE[level as usize].func)
441 && state.last_flush != -2
442 {
443 let err = deflate(stream, DeflateFlush::Block);
445 if err == ReturnCode::StreamError {
446 return err;
447 }
448
449 let state = &mut stream.state;
450
451 if stream.avail_in != 0
452 || ((state.strstart as isize - state.block_start) + state.lookahead as isize) != 0
453 {
454 return ReturnCode::BufError;
455 }
456 }
457
458 let state = &mut stream.state;
459
460 if state.level != level {
461 if state.level == 0 && state.matches != 0 {
462 if state.matches == 1 {
463 self::slide_hash::slide_hash(state);
464 } else {
465 state.head.as_mut_slice().fill(0);
466 }
467 state.matches = 0;
468 }
469
470 lm_set_level(state, level);
471 }
472
473 state.strategy = strategy;
474
475 ReturnCode::Ok
476}
477
478pub fn set_dictionary(stream: &mut DeflateStream, mut dictionary: &[u8]) -> ReturnCode {
479 let state = &mut stream.state;
480
481 let wrap = state.wrap;
482
483 if wrap == 2 || (wrap == 1 && state.status != Status::Init) || state.lookahead != 0 {
484 return ReturnCode::StreamError;
485 }
486
487 if wrap == 1 {
489 stream.adler = adler32(stream.adler as u32, dictionary) as z_checksum;
490 }
491
492 state.wrap = 0;
494
495 if dictionary.len() >= state.window.capacity() {
497 if wrap == 0 {
498 state.head.as_mut_slice().fill(0);
500
501 state.strstart = 0;
502 state.block_start = 0;
503 state.insert = 0;
504 } else {
505 }
507
508 dictionary = &dictionary[dictionary.len() - state.w_size..];
510 }
511
512 let avail = stream.avail_in;
514 let next = stream.next_in;
515 stream.avail_in = dictionary.len() as _;
516 stream.next_in = dictionary.as_ptr() as *mut u8;
517 fill_window(stream);
518
519 while stream.state.lookahead >= STD_MIN_MATCH {
520 let str = stream.state.strstart;
521 let n = stream.state.lookahead - (STD_MIN_MATCH - 1);
522 stream.state.insert_string(str, n);
523 stream.state.strstart = str + n;
524 stream.state.lookahead = STD_MIN_MATCH - 1;
525 fill_window(stream);
526 }
527
528 let state = &mut stream.state;
529
530 state.strstart += state.lookahead;
531 state.block_start = state.strstart as _;
532 state.insert = state.lookahead;
533 state.lookahead = 0;
534 state.prev_length = 0;
535 state.match_available = false;
536
537 stream.next_in = next;
539 stream.avail_in = avail;
540 state.wrap = wrap;
541
542 ReturnCode::Ok
543}
544
545pub fn prime(stream: &mut DeflateStream, mut bits: i32, value: i32) -> ReturnCode {
546 debug_assert!(bits <= 16, "zlib only supports up to 16 bits here");
548
549 let mut value64 = value as u64;
550
551 let state = &mut stream.state;
552
553 if bits < 0
554 || bits > BitWriter::BIT_BUF_SIZE as i32
555 || bits > (core::mem::size_of_val(&value) << 3) as i32
556 {
557 return ReturnCode::BufError;
558 }
559
560 let mut put;
561
562 loop {
563 put = BitWriter::BIT_BUF_SIZE - state.bit_writer.bits_used;
564 let put = Ord::min(put as i32, bits);
565
566 if state.bit_writer.bits_used == 0 {
567 state.bit_writer.bit_buffer = value64;
568 } else {
569 state.bit_writer.bit_buffer |=
570 (value64 & ((1 << put) - 1)) << state.bit_writer.bits_used;
571 }
572
573 state.bit_writer.bits_used += put as u8;
574 state.bit_writer.flush_bits();
575 value64 >>= put;
576 bits -= put;
577
578 if bits == 0 {
579 break;
580 }
581 }
582
583 ReturnCode::Ok
584}
585
586pub fn copy<'a>(
587 dest: &mut MaybeUninit<DeflateStream<'a>>,
588 source: &mut DeflateStream<'a>,
589) -> ReturnCode {
590 unsafe {
593 core::ptr::copy_nonoverlapping(source, dest.as_mut_ptr(), 1);
594 }
595
596 let alloc = &source.alloc;
597
598 let Some(state_allocation) = alloc.allocate_raw::<State>() else {
600 return ReturnCode::MemError;
601 };
602
603 let source_state = &source.state;
604
605 let window = source_state.window.clone_in(alloc);
606
607 let prev = alloc.allocate_slice_raw::<u16>(source_state.w_size);
608 let head = alloc.allocate_raw::<[u16; HASH_SIZE]>();
609
610 let pending = source_state.bit_writer.pending.clone_in(alloc);
611 let sym_buf = source_state.sym_buf.clone_in(alloc);
612
613 let (window, prev, head, pending, sym_buf) = match (window, prev, head, pending, sym_buf) {
615 (Some(window), Some(prev), Some(head), Some(pending), Some(sym_buf)) => {
616 (window, prev, head, pending, sym_buf)
617 }
618 (window, prev, head, pending, sym_buf) => {
619 let field_ptr = unsafe { core::ptr::addr_of_mut!((*dest.as_mut_ptr()).state) };
621 unsafe { core::ptr::write(field_ptr as *mut *mut State, core::ptr::null_mut()) };
622
623 unsafe {
625 if let Some(mut sym_buf) = sym_buf {
626 alloc.deallocate(sym_buf.as_mut_ptr(), sym_buf.capacity())
627 }
628 if let Some(mut pending) = pending {
629 pending.drop_in(alloc);
630 }
631 if let Some(head) = head {
632 alloc.deallocate(head.as_ptr(), HASH_SIZE)
633 }
634 if let Some(prev) = prev {
635 alloc.deallocate(prev.as_ptr(), source_state.w_size)
636 }
637 if let Some(mut window) = window {
638 window.drop_in(alloc);
639 }
640
641 alloc.deallocate(state_allocation.as_ptr(), 1);
642 }
643
644 return ReturnCode::MemError;
645 }
646 };
647
648 let prev = unsafe {
649 let prev = prev.as_ptr();
650 prev.copy_from_nonoverlapping(source_state.prev.as_ptr(), source_state.prev.len());
651 WeakSliceMut::from_raw_parts_mut(prev, source_state.prev.len())
652 };
653
654 let head = unsafe {
656 let head = head.as_ptr();
657 head.write_bytes(0, 1);
658 head.cast::<u16>().write(source_state.head.as_slice()[0]);
659 WeakArrayMut::from_ptr(head)
660 };
661
662 let mut bit_writer = BitWriter::from_pending(pending);
663 bit_writer.bits_used = source_state.bit_writer.bits_used;
664 bit_writer.bit_buffer = source_state.bit_writer.bit_buffer;
665
666 let dest_state = State {
667 status: source_state.status,
668 bit_writer,
669 last_flush: source_state.last_flush,
670 wrap: source_state.wrap,
671 strategy: source_state.strategy,
672 level: source_state.level,
673 good_match: source_state.good_match,
674 nice_match: source_state.nice_match,
675 l_desc: source_state.l_desc.clone(),
676 d_desc: source_state.d_desc.clone(),
677 bl_desc: source_state.bl_desc.clone(),
678 prev_match: source_state.prev_match,
679 match_available: source_state.match_available,
680 strstart: source_state.strstart,
681 match_start: source_state.match_start,
682 prev_length: source_state.prev_length,
683 max_chain_length: source_state.max_chain_length,
684 max_lazy_match: source_state.max_lazy_match,
685 block_start: source_state.block_start,
686 block_open: source_state.block_open,
687 window,
688 sym_buf,
689 lit_bufsize: source_state.lit_bufsize,
690 window_size: source_state.window_size,
691 matches: source_state.matches,
692 opt_len: source_state.opt_len,
693 static_len: source_state.static_len,
694 insert: source_state.insert,
695 w_size: source_state.w_size,
696 lookahead: source_state.lookahead,
697 prev,
698 head,
699 ins_h: source_state.ins_h,
700 hash_calc_variant: source_state.hash_calc_variant,
701 crc_fold: source_state.crc_fold,
702 gzhead: None,
703 gzindex: source_state.gzindex,
704 _cache_line_0: (),
705 _cache_line_1: (),
706 _cache_line_2: (),
707 _cache_line_3: (),
708 _padding_0: source_state._padding_0,
709 };
710
711 unsafe { state_allocation.as_ptr().write(dest_state) }; let field_ptr = unsafe { core::ptr::addr_of_mut!((*dest.as_mut_ptr()).state) };
716 unsafe { core::ptr::write(field_ptr as *mut *mut State, state_allocation.as_ptr()) };
717
718 let field_ptr = unsafe { core::ptr::addr_of_mut!((*dest.as_mut_ptr()).state.gzhead) };
720 unsafe { core::ptr::copy(&source_state.gzhead, field_ptr, 1) };
721
722 ReturnCode::Ok
723}
724
725pub fn end<'a>(stream: &'a mut DeflateStream) -> Result<&'a mut z_stream, &'a mut z_stream> {
730 let status = stream.state.status;
731
732 let alloc = stream.alloc;
733
734 unsafe {
736 stream.state.sym_buf.drop_in(&alloc);
738 stream.state.bit_writer.pending.drop_in(&alloc);
739 alloc.deallocate(stream.state.head.as_mut_ptr(), 1);
740 if !stream.state.prev.is_empty() {
741 alloc.deallocate(stream.state.prev.as_mut_ptr(), stream.state.prev.len());
742 }
743 stream.state.window.drop_in(&alloc);
744 }
745
746 let stream = stream.as_z_stream_mut();
747 let state = core::mem::replace(&mut stream.state, core::ptr::null_mut());
748
749 unsafe {
751 alloc.deallocate(state as *mut State, 1);
752 }
753
754 match status {
755 Status::Busy => Err(stream),
756 _ => Ok(stream),
757 }
758}
759
760pub fn reset(stream: &mut DeflateStream) -> ReturnCode {
761 let ret = reset_keep(stream);
762
763 if ret == ReturnCode::Ok {
764 lm_init(stream.state);
765 }
766
767 ret
768}
769
770fn reset_keep(stream: &mut DeflateStream) -> ReturnCode {
771 stream.total_in = 0;
772 stream.total_out = 0;
773 stream.msg = core::ptr::null_mut();
774 stream.data_type = crate::c_api::Z_UNKNOWN;
775
776 let state = &mut stream.state;
777
778 state.bit_writer.pending.reset_keep();
779
780 state.wrap = state.wrap.abs();
782
783 state.status = match state.wrap {
784 2 => Status::GZip,
785 _ => Status::Init,
786 };
787
788 stream.adler = match state.wrap {
789 2 => {
790 state.crc_fold = Crc32Fold::new();
791 CRC32_INITIAL_VALUE as _
792 }
793 _ => ADLER32_INITIAL_VALUE as _,
794 };
795
796 state.last_flush = -2;
797
798 state.zng_tr_init();
799
800 ReturnCode::Ok
801}
802
803fn lm_init(state: &mut State) {
804 state.window_size = 2 * state.w_size;
805
806 state.head.as_mut_slice().fill(0);
808
809 lm_set_level(state, state.level);
811
812 state.strstart = 0;
813 state.block_start = 0;
814 state.lookahead = 0;
815 state.insert = 0;
816 state.prev_length = 0;
817 state.match_available = false;
818 state.match_start = 0;
819 state.ins_h = 0;
820}
821
822fn lm_set_level(state: &mut State, level: i8) {
823 state.max_lazy_match = CONFIGURATION_TABLE[level as usize].max_lazy;
824 state.good_match = CONFIGURATION_TABLE[level as usize].good_length;
825 state.nice_match = CONFIGURATION_TABLE[level as usize].nice_length;
826 state.max_chain_length = CONFIGURATION_TABLE[level as usize].max_chain;
827
828 state.hash_calc_variant = HashCalcVariant::for_max_chain_length(state.max_chain_length);
829 state.level = level;
830}
831
832pub fn tune(
833 stream: &mut DeflateStream,
834 good_length: usize,
835 max_lazy: usize,
836 nice_length: usize,
837 max_chain: usize,
838) -> ReturnCode {
839 stream.state.good_match = good_length as u16;
840 stream.state.max_lazy_match = max_lazy as u16;
841 stream.state.nice_match = nice_length as u16;
842 stream.state.max_chain_length = max_chain as u16;
843
844 ReturnCode::Ok
845}
846
847#[repr(C)]
848#[derive(Debug, Clone, Copy, PartialEq, Eq)]
849pub(crate) struct Value {
850 a: u16,
851 b: u16,
852}
853
854impl Value {
855 pub(crate) const fn new(a: u16, b: u16) -> Self {
856 Self { a, b }
857 }
858
859 pub(crate) fn freq_mut(&mut self) -> &mut u16 {
860 &mut self.a
861 }
862
863 pub(crate) fn code_mut(&mut self) -> &mut u16 {
864 &mut self.a
865 }
866
867 pub(crate) fn dad_mut(&mut self) -> &mut u16 {
868 &mut self.b
869 }
870
871 pub(crate) fn len_mut(&mut self) -> &mut u16 {
872 &mut self.b
873 }
874
875 #[inline(always)]
876 pub(crate) const fn freq(self) -> u16 {
877 self.a
878 }
879
880 #[inline(always)]
881 pub(crate) const fn code(self) -> u16 {
882 self.a
883 }
884
885 #[inline(always)]
886 pub(crate) const fn dad(self) -> u16 {
887 self.b
888 }
889
890 #[inline(always)]
891 pub(crate) const fn len(self) -> u16 {
892 self.b
893 }
894}
895
896pub(crate) const LENGTH_CODES: usize = 29;
898
899const LITERALS: usize = 256;
901
902pub(crate) const L_CODES: usize = LITERALS + 1 + LENGTH_CODES;
904
905pub(crate) const D_CODES: usize = 30;
907
908const BL_CODES: usize = 19;
910
911const HEAP_SIZE: usize = 2 * L_CODES + 1;
913
914const MAX_BITS: usize = 15;
916
917const MAX_BL_BITS: usize = 7;
919
920pub(crate) const DIST_CODE_LEN: usize = 512;
921
922struct BitWriter<'a> {
923 pub(crate) pending: Pending<'a>, pub(crate) bit_buffer: u64,
925 pub(crate) bits_used: u8,
926
927 #[cfg(feature = "ZLIB_DEBUG")]
929 compressed_len: usize,
930 #[cfg(feature = "ZLIB_DEBUG")]
932 bits_sent: usize,
933}
934
935#[inline]
936const fn encode_len(ltree: &[Value], lc: u8) -> (u64, usize) {
937 let mut lc = lc as usize;
938
939 let code = self::trees_tbl::LENGTH_CODE[lc] as usize;
941 let c = code + LITERALS + 1;
942 assert!(c < L_CODES, "bad l_code");
943 let lnode = ltree[c];
946 let mut match_bits: u64 = lnode.code() as u64;
947 let mut match_bits_len = lnode.len() as usize;
948 let extra = StaticTreeDesc::EXTRA_LBITS[code] as usize;
949 if extra != 0 {
950 lc -= self::trees_tbl::BASE_LENGTH[code] as usize;
951 match_bits |= (lc as u64) << match_bits_len;
952 match_bits_len += extra;
953 }
954
955 (match_bits, match_bits_len)
956}
957
958#[inline]
959const fn encode_dist(dtree: &[Value], mut dist: u16) -> (u64, usize) {
960 dist -= 1; let code = State::d_code(dist as usize) as usize;
962 assert!(code < D_CODES, "bad d_code");
963 let dnode = dtree[code];
967 let mut match_bits = dnode.code() as u64;
968 let mut match_bits_len = dnode.len() as usize;
969 let extra = StaticTreeDesc::EXTRA_DBITS[code] as usize;
970 if extra != 0 {
971 dist -= self::trees_tbl::BASE_DIST[code];
972 match_bits |= (dist as u64) << match_bits_len;
973 match_bits_len += extra;
974 }
975
976 (match_bits, match_bits_len)
977}
978
979impl<'a> BitWriter<'a> {
980 pub(crate) const BIT_BUF_SIZE: u8 = 64;
981
982 fn from_pending(pending: Pending<'a>) -> Self {
983 Self {
984 pending,
985 bit_buffer: 0,
986 bits_used: 0,
987
988 #[cfg(feature = "ZLIB_DEBUG")]
989 compressed_len: 0,
990 #[cfg(feature = "ZLIB_DEBUG")]
991 bits_sent: 0,
992 }
993 }
994
995 fn flush_bits(&mut self) {
996 debug_assert!(self.bits_used <= 64);
997 let removed = self.bits_used.saturating_sub(7).next_multiple_of(8);
998 let keep_bytes = self.bits_used / 8; let src = &self.bit_buffer.to_le_bytes();
1001 self.pending.extend(&src[..keep_bytes as usize]);
1002
1003 self.bits_used -= removed;
1004 self.bit_buffer = self.bit_buffer.checked_shr(removed as u32).unwrap_or(0);
1005 }
1006
1007 fn emit_align(&mut self) {
1008 debug_assert!(self.bits_used <= 64);
1009 let keep_bytes = self.bits_used.div_ceil(8);
1010 let src = &self.bit_buffer.to_le_bytes();
1011 self.pending.extend(&src[..keep_bytes as usize]);
1012
1013 self.bits_used = 0;
1014 self.bit_buffer = 0;
1015
1016 self.sent_bits_align();
1017 }
1018
1019 fn send_bits_trace(&self, _value: u64, _len: u8) {
1020 trace!(" l {:>2} v {:>4x} ", _len, _value);
1021 }
1022
1023 fn cmpr_bits_add(&mut self, _len: usize) {
1024 #[cfg(feature = "ZLIB_DEBUG")]
1025 {
1026 self.compressed_len += _len;
1027 }
1028 }
1029
1030 fn cmpr_bits_align(&mut self) {
1031 #[cfg(feature = "ZLIB_DEBUG")]
1032 {
1033 self.compressed_len = self.compressed_len.next_multiple_of(8);
1034 }
1035 }
1036
1037 fn sent_bits_add(&mut self, _len: usize) {
1038 #[cfg(feature = "ZLIB_DEBUG")]
1039 {
1040 self.bits_sent += _len;
1041 }
1042 }
1043
1044 fn sent_bits_align(&mut self) {
1045 #[cfg(feature = "ZLIB_DEBUG")]
1046 {
1047 self.bits_sent = self.bits_sent.next_multiple_of(8);
1048 }
1049 }
1050
1051 #[inline(always)]
1052 fn send_bits(&mut self, val: u64, len: u8) {
1053 debug_assert!(len <= 64);
1054 debug_assert!(self.bits_used <= 64);
1055
1056 let total_bits = len + self.bits_used;
1057
1058 self.send_bits_trace(val, len);
1059 self.sent_bits_add(len as usize);
1060
1061 if total_bits < Self::BIT_BUF_SIZE {
1062 self.bit_buffer |= val << self.bits_used;
1063 self.bits_used = total_bits;
1064 } else {
1065 self.send_bits_overflow(val, total_bits);
1066 }
1067 }
1068
1069 fn send_bits_overflow(&mut self, val: u64, total_bits: u8) {
1070 if self.bits_used == Self::BIT_BUF_SIZE {
1071 self.pending.extend(&self.bit_buffer.to_le_bytes());
1072 self.bit_buffer = val;
1073 self.bits_used = total_bits - Self::BIT_BUF_SIZE;
1074 } else {
1075 self.bit_buffer |= val << self.bits_used;
1076 self.pending.extend(&self.bit_buffer.to_le_bytes());
1077 self.bit_buffer = val >> (Self::BIT_BUF_SIZE - self.bits_used);
1078 self.bits_used = total_bits - Self::BIT_BUF_SIZE;
1079 }
1080 }
1081
1082 fn send_code(&mut self, code: usize, tree: &[Value]) {
1083 let node = tree[code];
1084 self.send_bits(node.code() as u64, node.len() as u8)
1085 }
1086
1087 pub fn align(&mut self) {
1090 self.emit_tree(BlockType::StaticTrees, false);
1091 self.emit_end_block(&STATIC_LTREE, false);
1092 self.flush_bits();
1093 }
1094
1095 pub(crate) fn emit_tree(&mut self, block_type: BlockType, is_last_block: bool) {
1096 let header_bits = ((block_type as u64) << 1) | (is_last_block as u64);
1097 self.send_bits(header_bits, 3);
1098 trace!("\n--- Emit Tree: Last: {}\n", is_last_block as u8);
1099 }
1100
1101 pub(crate) fn emit_end_block_and_align(&mut self, ltree: &[Value], is_last_block: bool) {
1102 self.emit_end_block(ltree, is_last_block);
1103
1104 if is_last_block {
1105 self.emit_align();
1106 }
1107 }
1108
1109 fn emit_end_block(&mut self, ltree: &[Value], _is_last_block: bool) {
1110 const END_BLOCK: usize = 256;
1111 self.send_code(END_BLOCK, ltree);
1112
1113 trace!(
1114 "\n+++ Emit End Block: Last: {} Pending: {} Total Out: {}\n",
1115 _is_last_block as u8,
1116 self.pending.pending().len(),
1117 "<unknown>"
1118 );
1119 }
1120
1121 pub(crate) fn emit_lit(&mut self, ltree: &[Value], c: u8) -> u16 {
1122 self.send_code(c as usize, ltree);
1123
1124 #[cfg(feature = "ZLIB_DEBUG")]
1125 if let Some(c) = char::from_u32(c as u32) {
1126 if isgraph(c as u8) {
1127 trace!(" '{}' ", c);
1128 }
1129 }
1130
1131 ltree[c as usize].len()
1132 }
1133
1134 pub(crate) fn emit_dist(
1135 &mut self,
1136 ltree: &[Value],
1137 dtree: &[Value],
1138 lc: u8,
1139 dist: u16,
1140 ) -> usize {
1141 let (mut match_bits, mut match_bits_len) = encode_len(ltree, lc);
1142
1143 let (dist_match_bits, dist_match_bits_len) = encode_dist(dtree, dist);
1144
1145 match_bits |= dist_match_bits << match_bits_len;
1146 match_bits_len += dist_match_bits_len;
1147
1148 self.send_bits(match_bits, match_bits_len as u8);
1149
1150 match_bits_len
1151 }
1152
1153 pub(crate) fn emit_dist_static(&mut self, lc: u8, dist: u16) -> usize {
1154 let precomputed_len = trees_tbl::STATIC_LTREE_ENCODINGS[lc as usize];
1155 let mut match_bits = precomputed_len.code() as u64;
1156 let mut match_bits_len = precomputed_len.len() as usize;
1157
1158 let dtree = self::trees_tbl::STATIC_DTREE.as_slice();
1159 let (dist_match_bits, dist_match_bits_len) = encode_dist(dtree, dist);
1160
1161 match_bits |= dist_match_bits << match_bits_len;
1162 match_bits_len += dist_match_bits_len;
1163
1164 self.send_bits(match_bits, match_bits_len as u8);
1165
1166 match_bits_len
1167 }
1168
1169 fn compress_block_help(&mut self, sym_buf: &[u8], ltree: &[Value], dtree: &[Value]) {
1170 for chunk in sym_buf.chunks_exact(3) {
1171 let [dist_low, dist_high, lc] = *chunk else {
1172 unreachable!("out of bound access on the symbol buffer");
1173 };
1174
1175 match u16::from_le_bytes([dist_low, dist_high]) {
1176 0 => self.emit_lit(ltree, lc) as usize,
1177 dist => self.emit_dist(ltree, dtree, lc, dist),
1178 };
1179 }
1180
1181 self.emit_end_block(ltree, false)
1182 }
1183
1184 fn send_tree(&mut self, tree: &[Value], bl_tree: &[Value], max_code: usize) {
1185 let mut prevlen: isize = -1; let mut curlen; let mut nextlen = tree[0].len(); let mut count = 0; let mut max_count = 7; let mut min_count = 4; if nextlen == 0 {
1197 max_count = 138;
1198 min_count = 3;
1199 }
1200
1201 for n in 0..=max_code {
1202 curlen = nextlen;
1203 nextlen = tree[n + 1].len();
1204 count += 1;
1205 if count < max_count && curlen == nextlen {
1206 continue;
1207 } else if count < min_count {
1208 loop {
1209 self.send_code(curlen as usize, bl_tree);
1210
1211 count -= 1;
1212 if count == 0 {
1213 break;
1214 }
1215 }
1216 } else if curlen != 0 {
1217 if curlen as isize != prevlen {
1218 self.send_code(curlen as usize, bl_tree);
1219 count -= 1;
1220 }
1221 assert!((3..=6).contains(&count), " 3_6?");
1222 self.send_code(REP_3_6, bl_tree);
1223 self.send_bits(count - 3, 2);
1224 } else if count <= 10 {
1225 self.send_code(REPZ_3_10, bl_tree);
1226 self.send_bits(count - 3, 3);
1227 } else {
1228 self.send_code(REPZ_11_138, bl_tree);
1229 self.send_bits(count - 11, 7);
1230 }
1231
1232 count = 0;
1233 prevlen = curlen as isize;
1234
1235 if nextlen == 0 {
1236 max_count = 138;
1237 min_count = 3;
1238 } else if curlen == nextlen {
1239 max_count = 6;
1240 min_count = 3;
1241 } else {
1242 max_count = 7;
1243 min_count = 4;
1244 }
1245 }
1246 }
1247}
1248
1249#[repr(C, align(64))]
1250pub(crate) struct State<'a> {
1251 status: Status,
1252
1253 last_flush: i8, pub(crate) wrap: i8, pub(crate) strategy: Strategy,
1258 pub(crate) level: i8,
1259
1260 pub(crate) block_open: u8,
1264
1265 pub(crate) hash_calc_variant: HashCalcVariant,
1266
1267 pub(crate) match_available: bool, pub(crate) good_match: u16,
1271
1272 pub(crate) nice_match: u16,
1274
1275 pub(crate) match_start: Pos, pub(crate) prev_match: Pos, pub(crate) strstart: usize, pub(crate) window: Window<'a>,
1280 pub(crate) w_size: usize, pub(crate) lookahead: usize, _cache_line_0: (),
1285
1286 pub(crate) prev: WeakSliceMut<'a, u16>,
1292 pub(crate) head: WeakArrayMut<'a, u16, HASH_SIZE>,
1295
1296 pub(crate) prev_length: u16,
1299
1300 pub(crate) max_chain_length: u16,
1303
1304 pub(crate) max_lazy_match: u16,
1314
1315 pub(crate) matches: u8,
1320
1321 pub(crate) block_start: isize,
1324
1325 pub(crate) sym_buf: ReadBuf<'a>,
1326
1327 _cache_line_1: (),
1328
1329 lit_bufsize: usize,
1347
1348 pub(crate) window_size: usize,
1350
1351 bit_writer: BitWriter<'a>,
1352
1353 _cache_line_2: (),
1354
1355 opt_len: usize,
1357 static_len: usize,
1359
1360 pub(crate) insert: usize,
1362
1363 pub(crate) ins_h: u32,
1365
1366 gzhead: Option<&'a mut gz_header>,
1367 gzindex: usize,
1368
1369 _padding_0: [u8; 16],
1370
1371 _cache_line_3: (),
1372
1373 crc_fold: crate::crc32::Crc32Fold,
1374
1375 l_desc: TreeDesc<HEAP_SIZE>, d_desc: TreeDesc<{ 2 * D_CODES + 1 }>, bl_desc: TreeDesc<{ 2 * BL_CODES + 1 }>, }
1379
1380#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Default)]
1381#[cfg_attr(feature = "__internal-fuzz", derive(arbitrary::Arbitrary))]
1382pub enum Strategy {
1383 #[default]
1384 Default = 0,
1385 Filtered = 1,
1386 HuffmanOnly = 2,
1387 Rle = 3,
1388 Fixed = 4,
1389}
1390
1391impl TryFrom<i32> for Strategy {
1392 type Error = ();
1393
1394 fn try_from(value: i32) -> Result<Self, Self::Error> {
1395 match value {
1396 0 => Ok(Strategy::Default),
1397 1 => Ok(Strategy::Filtered),
1398 2 => Ok(Strategy::HuffmanOnly),
1399 3 => Ok(Strategy::Rle),
1400 4 => Ok(Strategy::Fixed),
1401 _ => Err(()),
1402 }
1403 }
1404}
1405
1406#[derive(Debug, PartialEq, Eq)]
1407enum DataType {
1408 Binary = 0,
1409 Text = 1,
1410 Unknown = 2,
1411}
1412
1413impl<'a> State<'a> {
1414 pub const BIT_BUF_SIZE: u8 = BitWriter::BIT_BUF_SIZE;
1415
1416 pub(crate) fn w_bits(&self) -> u32 {
1418 self.w_size.trailing_zeros()
1419 }
1420
1421 pub(crate) fn w_mask(&self) -> usize {
1422 self.w_size - 1
1423 }
1424
1425 pub(crate) fn max_dist(&self) -> usize {
1426 self.w_size - MIN_LOOKAHEAD
1427 }
1428
1429 pub(crate) fn max_insert_length(&self) -> usize {
1432 self.max_lazy_match as usize
1433 }
1434
1435 pub(crate) fn pending_buf_size(&self) -> usize {
1438 self.lit_bufsize * 4
1439 }
1440
1441 #[inline(always)]
1442 pub(crate) fn update_hash(&self, h: u32, val: u32) -> u32 {
1443 match self.hash_calc_variant {
1444 HashCalcVariant::Standard => StandardHashCalc::update_hash(h, val),
1445 HashCalcVariant::Roll => RollHashCalc::update_hash(h, val),
1446 }
1447 }
1448
1449 #[inline(always)]
1450 pub(crate) fn quick_insert_string(&mut self, string: usize) -> u16 {
1451 match self.hash_calc_variant {
1452 HashCalcVariant::Standard => StandardHashCalc::quick_insert_string(self, string),
1453 HashCalcVariant::Roll => RollHashCalc::quick_insert_string(self, string),
1454 }
1455 }
1456
1457 #[inline(always)]
1458 pub(crate) fn insert_string(&mut self, string: usize, count: usize) {
1459 match self.hash_calc_variant {
1460 HashCalcVariant::Standard => StandardHashCalc::insert_string(self, string, count),
1461 HashCalcVariant::Roll => RollHashCalc::insert_string(self, string, count),
1462 }
1463 }
1464
1465 #[inline(always)]
1466 pub(crate) fn tally_lit(&mut self, unmatched: u8) -> bool {
1467 Self::tally_lit_help(&mut self.sym_buf, &mut self.l_desc, unmatched)
1468 }
1469
1470 #[inline(always)]
1471 pub(crate) fn tally_lit_help(
1472 sym_buf: &mut ReadBuf<'a>,
1473 l_desc: &mut TreeDesc<HEAP_SIZE>,
1474 unmatched: u8,
1475 ) -> bool {
1476 sym_buf.push_lit(unmatched);
1477
1478 *l_desc.dyn_tree[unmatched as usize].freq_mut() += 1;
1479
1480 assert!(
1481 unmatched as usize <= STD_MAX_MATCH - STD_MIN_MATCH,
1482 "zng_tr_tally: bad literal"
1483 );
1484
1485 sym_buf.len() == sym_buf.capacity() - 3
1487 }
1488
1489 const fn d_code(dist: usize) -> u8 {
1490 let index = if dist < 256 { dist } else { 256 + (dist >> 7) };
1491 self::trees_tbl::DIST_CODE[index]
1492 }
1493
1494 #[inline(always)]
1495 pub(crate) fn tally_dist(&mut self, mut dist: usize, len: usize) -> bool {
1496 self.sym_buf.push_dist(dist as u16, len as u8);
1497
1498 self.matches = self.matches.saturating_add(1);
1499 dist -= 1;
1500
1501 assert!(
1502 dist < self.max_dist() && Self::d_code(dist) < D_CODES as u8,
1503 "tally_dist: bad match"
1504 );
1505
1506 let index = self::trees_tbl::LENGTH_CODE[len] as usize + LITERALS + 1;
1507 *self.l_desc.dyn_tree[index].freq_mut() += 1;
1508
1509 *self.d_desc.dyn_tree[Self::d_code(dist) as usize].freq_mut() += 1;
1510
1511 self.sym_buf.len() == self.sym_buf.capacity() - 3
1513 }
1514
1515 fn detect_data_type(dyn_tree: &[Value]) -> DataType {
1516 const NON_TEXT: u64 = 0xf3ffc07f;
1519 let mut mask = NON_TEXT;
1520
1521 for value in &dyn_tree[0..32] {
1523 if (mask & 1) != 0 && value.freq() != 0 {
1524 return DataType::Binary;
1525 }
1526
1527 mask >>= 1;
1528 }
1529
1530 if dyn_tree[9].freq() != 0 || dyn_tree[10].freq() != 0 || dyn_tree[13].freq() != 0 {
1532 return DataType::Text;
1533 }
1534
1535 if dyn_tree[32..LITERALS].iter().any(|v| v.freq() != 0) {
1536 return DataType::Text;
1537 }
1538
1539 DataType::Binary
1542 }
1543
1544 fn compress_block_static_trees(&mut self) {
1545 let ltree = self::trees_tbl::STATIC_LTREE.as_slice();
1546 for chunk in self.sym_buf.filled().chunks_exact(3) {
1547 let [dist_low, dist_high, lc] = *chunk else {
1548 unreachable!("out of bound access on the symbol buffer");
1549 };
1550
1551 match u16::from_le_bytes([dist_low, dist_high]) {
1552 0 => self.bit_writer.emit_lit(ltree, lc) as usize,
1553 dist => self.bit_writer.emit_dist_static(lc, dist),
1554 };
1555 }
1556
1557 self.bit_writer.emit_end_block(ltree, false)
1558 }
1559
1560 fn compress_block_dynamic_trees(&mut self) {
1561 self.bit_writer.compress_block_help(
1562 self.sym_buf.filled(),
1563 &self.l_desc.dyn_tree,
1564 &self.d_desc.dyn_tree,
1565 );
1566 }
1567
1568 fn header(&self) -> u16 {
1569 const PRESET_DICT: u16 = 0x20;
1571
1572 const Z_DEFLATED: u16 = 8;
1574
1575 let dict = match self.strstart {
1576 0 => 0,
1577 _ => PRESET_DICT,
1578 };
1579
1580 let h = ((Z_DEFLATED + ((self.w_bits() as u16 - 8) << 4)) << 8)
1581 | (self.level_flags() << 6)
1582 | dict;
1583
1584 h + 31 - (h % 31)
1585 }
1586
1587 fn level_flags(&self) -> u16 {
1588 if self.strategy >= Strategy::HuffmanOnly || self.level < 2 {
1589 0
1590 } else if self.level < 6 {
1591 1
1592 } else if self.level == 6 {
1593 2
1594 } else {
1595 3
1596 }
1597 }
1598
1599 fn zng_tr_init(&mut self) {
1600 self.l_desc.stat_desc = &StaticTreeDesc::L;
1601
1602 self.d_desc.stat_desc = &StaticTreeDesc::D;
1603
1604 self.bl_desc.stat_desc = &StaticTreeDesc::BL;
1605
1606 self.bit_writer.bit_buffer = 0;
1607 self.bit_writer.bits_used = 0;
1608
1609 #[cfg(feature = "ZLIB_DEBUG")]
1610 {
1611 self.bit_writer.compressed_len = 0;
1612 self.bit_writer.bits_sent = 0;
1613 }
1614
1615 self.init_block();
1617 }
1618
1619 fn init_block(&mut self) {
1621 for value in &mut self.l_desc.dyn_tree[..L_CODES] {
1625 *value.freq_mut() = 0;
1626 }
1627
1628 for value in &mut self.d_desc.dyn_tree[..D_CODES] {
1629 *value.freq_mut() = 0;
1630 }
1631
1632 for value in &mut self.bl_desc.dyn_tree[..BL_CODES] {
1633 *value.freq_mut() = 0;
1634 }
1635
1636 const END_BLOCK: usize = 256;
1638
1639 *self.l_desc.dyn_tree[END_BLOCK].freq_mut() = 1;
1640 self.opt_len = 0;
1641 self.static_len = 0;
1642 self.sym_buf.clear();
1643 self.matches = 0;
1644 }
1645}
1646
1647#[repr(u8)]
1648#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1649enum Status {
1650 Init = 1,
1651
1652 GZip = 4,
1653 Extra = 5,
1654 Name = 6,
1655 Comment = 7,
1656 Hcrc = 8,
1657
1658 Busy = 2,
1659 Finish = 3,
1660}
1661
1662const fn rank_flush(f: i8) -> i8 {
1663 ((f) * 2) - (if (f) > 4 { 9 } else { 0 })
1665}
1666
1667#[derive(Debug)]
1668pub(crate) enum BlockState {
1669 NeedMore = 0,
1671 BlockDone = 1,
1673 FinishStarted = 2,
1675 FinishDone = 3,
1677}
1678
1679pub(crate) const MAX_STORED: usize = 65535; pub(crate) fn read_buf_window(stream: &mut DeflateStream, offset: usize, size: usize) -> usize {
1683 let len = Ord::min(stream.avail_in as usize, size);
1684
1685 if len == 0 {
1686 return 0;
1687 }
1688
1689 stream.avail_in -= len as u32;
1690
1691 if stream.state.wrap == 2 {
1692 let window = &mut stream.state.window;
1695 unsafe { window.copy_and_initialize(offset..offset + len, stream.next_in) };
1697
1698 let data = &stream.state.window.filled()[offset..][..len];
1699 stream.state.crc_fold.fold(data, CRC32_INITIAL_VALUE);
1700 } else if stream.state.wrap == 1 {
1701 let window = &mut stream.state.window;
1704 unsafe { window.copy_and_initialize(offset..offset + len, stream.next_in) };
1706
1707 let data = &stream.state.window.filled()[offset..][..len];
1708 stream.adler = adler32(stream.adler as u32, data) as _;
1709 } else {
1710 let window = &mut stream.state.window;
1711 unsafe { window.copy_and_initialize(offset..offset + len, stream.next_in) };
1713 }
1714
1715 stream.next_in = stream.next_in.wrapping_add(len);
1716 stream.total_in += len as crate::c_api::z_size;
1717
1718 len
1719}
1720
1721pub(crate) enum BlockType {
1722 StoredBlock = 0,
1723 StaticTrees = 1,
1724 DynamicTrees = 2,
1725}
1726
1727pub(crate) fn zng_tr_stored_block(
1728 state: &mut State,
1729 window_range: core::ops::Range<usize>,
1730 is_last: bool,
1731) {
1732 state.bit_writer.emit_tree(BlockType::StoredBlock, is_last);
1734
1735 state.bit_writer.emit_align();
1737
1738 state.bit_writer.cmpr_bits_align();
1739
1740 let input_block: &[u8] = &state.window.filled()[window_range];
1741 let stored_len = input_block.len() as u16;
1742
1743 state.bit_writer.pending.extend(&stored_len.to_le_bytes());
1744 state
1745 .bit_writer
1746 .pending
1747 .extend(&(!stored_len).to_le_bytes());
1748
1749 state.bit_writer.cmpr_bits_add(32);
1750 state.bit_writer.sent_bits_add(32);
1751 if stored_len > 0 {
1752 state.bit_writer.pending.extend(input_block);
1753 state.bit_writer.cmpr_bits_add((stored_len << 3) as usize);
1754 state.bit_writer.sent_bits_add((stored_len << 3) as usize);
1755 }
1756}
1757
1758pub(crate) const STD_MIN_MATCH: usize = 3;
1760pub(crate) const STD_MAX_MATCH: usize = 258;
1762
1763pub(crate) const WANT_MIN_MATCH: usize = 4;
1765
1766pub(crate) const MIN_LOOKAHEAD: usize = STD_MAX_MATCH + STD_MIN_MATCH + 1;
1767
1768#[inline]
1769pub(crate) fn fill_window(stream: &mut DeflateStream) {
1770 debug_assert!(stream.state.lookahead < MIN_LOOKAHEAD);
1771
1772 let wsize = stream.state.w_size;
1773
1774 loop {
1775 let state = &mut *stream.state;
1776 let mut more = state.window_size - state.lookahead - state.strstart;
1777
1778 if state.strstart >= wsize + state.max_dist() {
1781 let (old, new) = state.window.filled_mut()[..2 * wsize].split_at_mut(wsize);
1783 old.copy_from_slice(new);
1784
1785 state.match_start = state.match_start.saturating_sub(wsize as u16);
1786 if state.match_start == 0 {
1787 state.prev_length = 0;
1788 }
1789
1790 state.strstart -= wsize; state.block_start -= wsize as isize;
1792 state.insert = Ord::min(state.insert, state.strstart);
1793
1794 self::slide_hash::slide_hash(state);
1795
1796 more += wsize;
1797 }
1798
1799 if stream.avail_in == 0 {
1800 break;
1801 }
1802
1803 assert!(more >= 2, "more < 2");
1814
1815 let n = read_buf_window(stream, stream.state.strstart + stream.state.lookahead, more);
1816
1817 let state = &mut *stream.state;
1818 state.lookahead += n;
1819
1820 if state.lookahead + state.insert >= STD_MIN_MATCH {
1822 let string = state.strstart - state.insert;
1823 if state.max_chain_length > 1024 {
1824 let v0 = state.window.filled()[string] as u32;
1825 let v1 = state.window.filled()[string + 1] as u32;
1826 state.ins_h = state.update_hash(v0, v1);
1827 } else if string >= 1 {
1828 state.quick_insert_string(string + 2 - STD_MIN_MATCH);
1829 }
1830 let mut count = state.insert;
1831 if state.lookahead == 1 {
1832 count -= 1;
1833 }
1834 if count > 0 {
1835 state.insert_string(string, count);
1836 state.insert -= count;
1837 }
1838 }
1839
1840 if !(stream.state.lookahead < MIN_LOOKAHEAD && stream.avail_in != 0) {
1844 break;
1845 }
1846 }
1847
1848 assert!(
1849 stream.state.strstart <= stream.state.window_size - MIN_LOOKAHEAD,
1850 "not enough room for search"
1851 );
1852}
1853
1854pub(crate) struct StaticTreeDesc {
1855 pub(crate) static_tree: &'static [Value],
1857 extra_bits: &'static [u8],
1859 extra_base: usize,
1861 elems: usize,
1863 max_length: u16,
1865}
1866
1867impl StaticTreeDesc {
1868 const EMPTY: Self = Self {
1869 static_tree: &[],
1870 extra_bits: &[],
1871 extra_base: 0,
1872 elems: 0,
1873 max_length: 0,
1874 };
1875
1876 const EXTRA_LBITS: [u8; LENGTH_CODES] = [
1878 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0,
1879 ];
1880
1881 const EXTRA_DBITS: [u8; D_CODES] = [
1883 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12,
1884 13, 13,
1885 ];
1886
1887 const EXTRA_BLBITS: [u8; BL_CODES] = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 7];
1889
1890 const BL_ORDER: [u8; BL_CODES] = [
1893 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15,
1894 ];
1895
1896 pub(crate) const L: Self = Self {
1897 static_tree: &self::trees_tbl::STATIC_LTREE,
1898 extra_bits: &Self::EXTRA_LBITS,
1899 extra_base: LITERALS + 1,
1900 elems: L_CODES,
1901 max_length: MAX_BITS as u16,
1902 };
1903
1904 pub(crate) const D: Self = Self {
1905 static_tree: &self::trees_tbl::STATIC_DTREE,
1906 extra_bits: &Self::EXTRA_DBITS,
1907 extra_base: 0,
1908 elems: D_CODES,
1909 max_length: MAX_BITS as u16,
1910 };
1911
1912 pub(crate) const BL: Self = Self {
1913 static_tree: &[],
1914 extra_bits: &Self::EXTRA_BLBITS,
1915 extra_base: 0,
1916 elems: BL_CODES,
1917 max_length: MAX_BL_BITS as u16,
1918 };
1919}
1920
1921#[derive(Clone)]
1922pub(crate) struct TreeDesc<const N: usize> {
1923 dyn_tree: [Value; N],
1924 max_code: usize,
1925 stat_desc: &'static StaticTreeDesc,
1926}
1927
1928impl<const N: usize> TreeDesc<N> {
1929 const EMPTY: Self = Self {
1930 dyn_tree: [Value::new(0, 0); N],
1931 max_code: 0,
1932 stat_desc: &StaticTreeDesc::EMPTY,
1933 };
1934}
1935
1936fn build_tree<const N: usize>(state: &mut State, desc: &mut TreeDesc<N>) {
1937 let tree = &mut desc.dyn_tree;
1938 let stree = desc.stat_desc.static_tree;
1939 let elements = desc.stat_desc.elems;
1940
1941 let mut heap = Heap::new();
1942 let mut max_code = heap.initialize(&mut tree[..elements]);
1943
1944 while heap.heap_len < 2 {
1949 heap.heap_len += 1;
1950 let node = if max_code < 2 {
1951 max_code += 1;
1952 max_code
1953 } else {
1954 0
1955 };
1956
1957 debug_assert!(node >= 0);
1958 let node = node as usize;
1959
1960 heap.heap[heap.heap_len] = node as u32;
1961 *tree[node].freq_mut() = 1;
1962 heap.depth[node] = 0;
1963 state.opt_len -= 1;
1964 if !stree.is_empty() {
1965 state.static_len -= stree[node].len() as usize;
1966 }
1967 }
1969
1970 debug_assert!(max_code >= 0);
1971 let max_code = max_code as usize;
1972 desc.max_code = max_code;
1973
1974 let mut n = heap.heap_len / 2;
1977 while n >= 1 {
1978 heap.pqdownheap(tree, n);
1979 n -= 1;
1980 }
1981
1982 heap.construct_huffman_tree(tree, elements);
1983
1984 let bl_count = gen_bitlen(state, &mut heap, desc);
1987
1988 gen_codes(&mut desc.dyn_tree, max_code, &bl_count);
1990}
1991
1992fn gen_bitlen<const N: usize>(
1993 state: &mut State,
1994 heap: &mut Heap,
1995 desc: &mut TreeDesc<N>,
1996) -> [u16; MAX_BITS + 1] {
1997 let tree = &mut desc.dyn_tree;
1998 let max_code = desc.max_code;
1999 let stree = desc.stat_desc.static_tree;
2000 let extra = desc.stat_desc.extra_bits;
2001 let base = desc.stat_desc.extra_base;
2002 let max_length = desc.stat_desc.max_length;
2003
2004 let mut bl_count = [0u16; MAX_BITS + 1];
2005
2006 *tree[heap.heap[heap.heap_max] as usize].len_mut() = 0; let mut overflow: i32 = 0;
2012
2013 for h in heap.heap_max + 1..HEAP_SIZE {
2014 let n = heap.heap[h] as usize;
2015 let mut bits = tree[tree[n].dad() as usize].len() + 1;
2016
2017 if bits > max_length {
2018 bits = max_length;
2019 overflow += 1;
2020 }
2021
2022 *tree[n].len_mut() = bits;
2024
2025 if n > max_code {
2027 continue;
2028 }
2029
2030 bl_count[bits as usize] += 1;
2031 let mut xbits = 0;
2032 if n >= base {
2033 xbits = extra[n - base] as usize;
2034 }
2035
2036 let f = tree[n].freq() as usize;
2037 state.opt_len += f * (bits as usize + xbits);
2038
2039 if !stree.is_empty() {
2040 state.static_len += f * (stree[n].len() as usize + xbits);
2041 }
2042 }
2043
2044 if overflow == 0 {
2045 return bl_count;
2046 }
2047
2048 loop {
2050 let mut bits = max_length as usize - 1;
2051 while bl_count[bits] == 0 {
2052 bits -= 1;
2053 }
2054 bl_count[bits] -= 1; bl_count[bits + 1] += 2; bl_count[max_length as usize] -= 1;
2057 overflow -= 2;
2061
2062 if overflow <= 0 {
2063 break;
2064 }
2065 }
2066
2067 let mut h = HEAP_SIZE;
2072 for bits in (1..=max_length).rev() {
2073 let mut n = bl_count[bits as usize];
2074 while n != 0 {
2075 h -= 1;
2076 let m = heap.heap[h] as usize;
2077 if m > max_code {
2078 continue;
2079 }
2080
2081 if tree[m].len() != bits {
2082 state.opt_len += (bits * tree[m].freq()) as usize;
2084 state.opt_len -= (tree[m].len() * tree[m].freq()) as usize;
2085 *tree[m].len_mut() = bits;
2086 }
2087
2088 n -= 1;
2089 }
2090 }
2091 bl_count
2092}
2093
2094#[allow(unused)]
2096fn isgraph(c: u8) -> bool {
2097 (c > 0x20) && (c <= 0x7E)
2098}
2099
2100fn gen_codes(tree: &mut [Value], max_code: usize, bl_count: &[u16]) {
2101 let mut next_code = [0; MAX_BITS + 1]; let mut code = 0; for bits in 1..=MAX_BITS {
2111 code = (code + bl_count[bits - 1]) << 1;
2112 next_code[bits] = code;
2113 }
2114
2115 assert!(
2119 code + bl_count[MAX_BITS] - 1 == (1 << MAX_BITS) - 1,
2120 "inconsistent bit counts"
2121 );
2122
2123 trace!("\ngen_codes: max_code {max_code} ");
2124
2125 for n in 0..=max_code {
2126 let len = tree[n].len();
2127 if len == 0 {
2128 continue;
2129 }
2130
2131 assert!((1..=15).contains(&len), "code length must be 1-15");
2133 *tree[n].code_mut() = next_code[len as usize].reverse_bits() >> (16 - len);
2134 next_code[len as usize] += 1;
2135
2136 if tree != self::trees_tbl::STATIC_LTREE.as_slice() {
2137 trace!(
2138 "\nn {:>3} {} l {:>2} c {:>4x} ({:x}) ",
2139 n,
2140 if isgraph(n as u8) {
2141 char::from_u32(n as u32).unwrap()
2142 } else {
2143 ' '
2144 },
2145 len,
2146 tree[n].code(),
2147 next_code[len as usize] - 1
2148 );
2149 }
2150 }
2151}
2152
2153const REP_3_6: usize = 16;
2155
2156const REPZ_3_10: usize = 17;
2158
2159const REPZ_11_138: usize = 18;
2161
2162fn scan_tree(bl_desc: &mut TreeDesc<{ 2 * BL_CODES + 1 }>, tree: &mut [Value], max_code: usize) {
2163 let mut prevlen = -1isize; let mut curlen: isize; let mut nextlen = tree[0].len(); let mut count = 0; let mut max_count = 7; let mut min_count = 4; if nextlen == 0 {
2173 max_count = 138;
2174 min_count = 3;
2175 }
2176
2177 *tree[max_code + 1].len_mut() = 0xffff; let bl_tree = &mut bl_desc.dyn_tree;
2180
2181 for n in 0..=max_code {
2182 curlen = nextlen as isize;
2183 nextlen = tree[n + 1].len();
2184 count += 1;
2185 if count < max_count && curlen == nextlen as isize {
2186 continue;
2187 } else if count < min_count {
2188 *bl_tree[curlen as usize].freq_mut() += count;
2189 } else if curlen != 0 {
2190 if curlen != prevlen {
2191 *bl_tree[curlen as usize].freq_mut() += 1;
2192 }
2193 *bl_tree[REP_3_6].freq_mut() += 1;
2194 } else if count <= 10 {
2195 *bl_tree[REPZ_3_10].freq_mut() += 1;
2196 } else {
2197 *bl_tree[REPZ_11_138].freq_mut() += 1;
2198 }
2199
2200 count = 0;
2201 prevlen = curlen;
2202
2203 if nextlen == 0 {
2204 max_count = 138;
2205 min_count = 3;
2206 } else if curlen == nextlen as isize {
2207 max_count = 6;
2208 min_count = 3;
2209 } else {
2210 max_count = 7;
2211 min_count = 4;
2212 }
2213 }
2214}
2215
2216fn send_all_trees(state: &mut State, lcodes: usize, dcodes: usize, blcodes: usize) {
2217 assert!(
2218 lcodes >= 257 && dcodes >= 1 && blcodes >= 4,
2219 "not enough codes"
2220 );
2221 assert!(
2222 lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,
2223 "too many codes"
2224 );
2225
2226 trace!("\nbl counts: ");
2227 state.bit_writer.send_bits(lcodes as u64 - 257, 5); state.bit_writer.send_bits(dcodes as u64 - 1, 5);
2229 state.bit_writer.send_bits(blcodes as u64 - 4, 4); for rank in 0..blcodes {
2232 trace!("\nbl code {:>2} ", StaticTreeDesc::BL_ORDER[rank]);
2233 state.bit_writer.send_bits(
2234 state.bl_desc.dyn_tree[StaticTreeDesc::BL_ORDER[rank] as usize].len() as u64,
2235 3,
2236 );
2237 }
2238 trace!("\nbl tree: sent {}", state.bit_writer.bits_sent);
2239
2240 state
2242 .bit_writer
2243 .send_tree(&state.l_desc.dyn_tree, &state.bl_desc.dyn_tree, lcodes - 1);
2244 trace!("\nlit tree: sent {}", state.bit_writer.bits_sent);
2245
2246 state
2248 .bit_writer
2249 .send_tree(&state.d_desc.dyn_tree, &state.bl_desc.dyn_tree, dcodes - 1);
2250 trace!("\ndist tree: sent {}", state.bit_writer.bits_sent);
2251}
2252
2253fn build_bl_tree(state: &mut State) -> usize {
2256 scan_tree(
2259 &mut state.bl_desc,
2260 &mut state.l_desc.dyn_tree,
2261 state.l_desc.max_code,
2262 );
2263
2264 scan_tree(
2265 &mut state.bl_desc,
2266 &mut state.d_desc.dyn_tree,
2267 state.d_desc.max_code,
2268 );
2269
2270 {
2272 let mut tmp = TreeDesc::EMPTY;
2273 core::mem::swap(&mut tmp, &mut state.bl_desc);
2274 build_tree(state, &mut tmp);
2275 core::mem::swap(&mut tmp, &mut state.bl_desc);
2276 }
2277
2278 let mut max_blindex = BL_CODES - 1;
2287 while max_blindex >= 3 {
2288 let index = StaticTreeDesc::BL_ORDER[max_blindex] as usize;
2289 if state.bl_desc.dyn_tree[index].len() != 0 {
2290 break;
2291 }
2292
2293 max_blindex -= 1;
2294 }
2295
2296 state.opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4;
2298 trace!(
2299 "\ndyn trees: dyn {}, stat {}",
2300 state.opt_len,
2301 state.static_len
2302 );
2303
2304 max_blindex
2305}
2306
2307fn zng_tr_flush_block(
2308 stream: &mut DeflateStream,
2309 window_offset: Option<usize>,
2310 stored_len: u32,
2311 last: bool,
2312) {
2313 let mut opt_lenb;
2318 let static_lenb;
2319 let mut max_blindex = 0;
2320
2321 let state = &mut stream.state;
2322
2323 if state.sym_buf.is_empty() {
2324 opt_lenb = 0;
2325 static_lenb = 0;
2326 state.static_len = 7;
2327 } else if state.level > 0 {
2328 if stream.data_type == DataType::Unknown as i32 {
2329 stream.data_type = State::detect_data_type(&state.l_desc.dyn_tree) as i32;
2330 }
2331
2332 {
2333 let mut tmp = TreeDesc::EMPTY;
2334 core::mem::swap(&mut tmp, &mut state.l_desc);
2335
2336 build_tree(state, &mut tmp);
2337 core::mem::swap(&mut tmp, &mut state.l_desc);
2338
2339 trace!(
2340 "\nlit data: dyn {}, stat {}",
2341 state.opt_len,
2342 state.static_len
2343 );
2344 }
2345
2346 {
2347 let mut tmp = TreeDesc::EMPTY;
2348 core::mem::swap(&mut tmp, &mut state.d_desc);
2349 build_tree(state, &mut tmp);
2350 core::mem::swap(&mut tmp, &mut state.d_desc);
2351
2352 trace!(
2353 "\ndist data: dyn {}, stat {}",
2354 state.opt_len,
2355 state.static_len
2356 );
2357 }
2358
2359 max_blindex = build_bl_tree(state);
2362
2363 opt_lenb = (state.opt_len + 3 + 7) >> 3;
2365 static_lenb = (state.static_len + 3 + 7) >> 3;
2366
2367 trace!(
2368 "\nopt {}({}) stat {}({}) stored {} lit {} ",
2369 opt_lenb,
2370 state.opt_len,
2371 static_lenb,
2372 state.static_len,
2373 stored_len,
2374 state.sym_buf.len() / 3
2375 );
2376
2377 if static_lenb <= opt_lenb || state.strategy == Strategy::Fixed {
2378 opt_lenb = static_lenb;
2379 }
2380 } else {
2381 assert!(window_offset.is_some(), "lost buf");
2382 opt_lenb = stored_len as usize + 5;
2384 static_lenb = stored_len as usize + 5;
2385 }
2386
2387 if stored_len as usize + 4 <= opt_lenb && window_offset.is_some() {
2388 let window_offset = window_offset.unwrap();
2396 let range = window_offset..window_offset + stored_len as usize;
2397 zng_tr_stored_block(state, range, last);
2398 } else if static_lenb == opt_lenb {
2399 state.bit_writer.emit_tree(BlockType::StaticTrees, last);
2400 state.compress_block_static_trees();
2401 } else {
2403 state.bit_writer.emit_tree(BlockType::DynamicTrees, last);
2404 send_all_trees(
2405 state,
2406 state.l_desc.max_code + 1,
2407 state.d_desc.max_code + 1,
2408 max_blindex + 1,
2409 );
2410
2411 state.compress_block_dynamic_trees();
2412 }
2413
2414 state.init_block();
2419 if last {
2420 state.bit_writer.emit_align();
2421 }
2422
2423 }
2425
2426pub(crate) fn flush_block_only(stream: &mut DeflateStream, is_last: bool) {
2427 zng_tr_flush_block(
2428 stream,
2429 (stream.state.block_start >= 0).then_some(stream.state.block_start as usize),
2430 (stream.state.strstart as isize - stream.state.block_start) as u32,
2431 is_last,
2432 );
2433
2434 stream.state.block_start = stream.state.strstart as isize;
2435 flush_pending(stream)
2436}
2437
2438fn flush_bytes(stream: &mut DeflateStream, mut bytes: &[u8]) -> ControlFlow<ReturnCode> {
2439 let mut state = &mut stream.state;
2440
2441 let mut beg = state.bit_writer.pending.pending().len(); while state.bit_writer.pending.remaining() < bytes.len() {
2445 let copy = state.bit_writer.pending.remaining();
2446
2447 state.bit_writer.pending.extend(&bytes[..copy]);
2448
2449 stream.adler = crc32(
2450 stream.adler as u32,
2451 &state.bit_writer.pending.pending()[beg..],
2452 ) as z_checksum;
2453
2454 state.gzindex += copy;
2455 flush_pending(stream);
2456 state = &mut stream.state;
2457
2458 if !state.bit_writer.pending.pending().is_empty() {
2460 state.last_flush = -1;
2461 return ControlFlow::Break(ReturnCode::Ok);
2462 }
2463
2464 beg = 0;
2465 bytes = &bytes[copy..];
2466 }
2467
2468 state.bit_writer.pending.extend(bytes);
2469
2470 stream.adler = crc32(
2471 stream.adler as u32,
2472 &state.bit_writer.pending.pending()[beg..],
2473 ) as z_checksum;
2474 state.gzindex = 0;
2475
2476 ControlFlow::Continue(())
2477}
2478
2479pub fn deflate(stream: &mut DeflateStream, flush: DeflateFlush) -> ReturnCode {
2480 if stream.next_out.is_null()
2481 || (stream.avail_in != 0 && stream.next_in.is_null())
2482 || (stream.state.status == Status::Finish && flush != DeflateFlush::Finish)
2483 {
2484 let err = ReturnCode::StreamError;
2485 stream.msg = err.error_message();
2486 return err;
2487 }
2488
2489 if stream.avail_out == 0 {
2490 let err = ReturnCode::BufError;
2491 stream.msg = err.error_message();
2492 return err;
2493 }
2494
2495 let old_flush = stream.state.last_flush;
2496 stream.state.last_flush = flush as i8;
2497
2498 if !stream.state.bit_writer.pending.pending().is_empty() {
2500 flush_pending(stream);
2501 if stream.avail_out == 0 {
2502 stream.state.last_flush = -1;
2509 return ReturnCode::Ok;
2510 }
2511
2512 } else if stream.avail_in == 0
2517 && rank_flush(flush as i8) <= rank_flush(old_flush)
2518 && flush != DeflateFlush::Finish
2519 {
2520 let err = ReturnCode::BufError;
2521 stream.msg = err.error_message();
2522 return err;
2523 }
2524
2525 if stream.state.status == Status::Finish && stream.avail_in != 0 {
2527 let err = ReturnCode::BufError;
2528 stream.msg = err.error_message();
2529 return err;
2530 }
2531
2532 if stream.state.status == Status::Init && stream.state.wrap == 0 {
2534 stream.state.status = Status::Busy;
2535 }
2536
2537 if stream.state.status == Status::Init {
2538 let header = stream.state.header();
2539 stream
2540 .state
2541 .bit_writer
2542 .pending
2543 .extend(&header.to_be_bytes());
2544
2545 if stream.state.strstart != 0 {
2547 let adler = stream.adler as u32;
2548 stream.state.bit_writer.pending.extend(&adler.to_be_bytes());
2549 }
2550
2551 stream.adler = ADLER32_INITIAL_VALUE as _;
2552 stream.state.status = Status::Busy;
2553
2554 flush_pending(stream);
2556
2557 if !stream.state.bit_writer.pending.pending().is_empty() {
2558 stream.state.last_flush = -1;
2559
2560 return ReturnCode::Ok;
2561 }
2562 }
2563
2564 if stream.state.status == Status::GZip {
2565 stream.state.crc_fold = Crc32Fold::new();
2567
2568 stream.state.bit_writer.pending.extend(&[31, 139, 8]);
2569
2570 let extra_flags = if stream.state.level == 9 {
2571 2
2572 } else if stream.state.strategy >= Strategy::HuffmanOnly || stream.state.level < 2 {
2573 4
2574 } else {
2575 0
2576 };
2577
2578 match &stream.state.gzhead {
2579 None => {
2580 let bytes = [0, 0, 0, 0, 0, extra_flags, gz_header::OS_CODE];
2581 stream.state.bit_writer.pending.extend(&bytes);
2582 stream.state.status = Status::Busy;
2583
2584 flush_pending(stream);
2586 if !stream.state.bit_writer.pending.pending().is_empty() {
2587 stream.state.last_flush = -1;
2588 return ReturnCode::Ok;
2589 }
2590 }
2591 Some(gzhead) => {
2592 stream.state.bit_writer.pending.extend(&[gzhead.flags()]);
2593 let bytes = (gzhead.time as u32).to_le_bytes();
2594 stream.state.bit_writer.pending.extend(&bytes);
2595 stream
2596 .state
2597 .bit_writer
2598 .pending
2599 .extend(&[extra_flags, gzhead.os as u8]);
2600
2601 if !gzhead.extra.is_null() {
2602 let bytes = (gzhead.extra_len as u16).to_le_bytes();
2603 stream.state.bit_writer.pending.extend(&bytes);
2604 }
2605
2606 if gzhead.hcrc != 0 {
2607 stream.adler = crc32(
2608 stream.adler as u32,
2609 stream.state.bit_writer.pending.pending(),
2610 ) as z_checksum
2611 }
2612
2613 stream.state.gzindex = 0;
2614 stream.state.status = Status::Extra;
2615 }
2616 }
2617 }
2618
2619 if stream.state.status == Status::Extra {
2620 if let Some(gzhead) = stream.state.gzhead.as_ref() {
2621 if !gzhead.extra.is_null() {
2622 let gzhead_extra = gzhead.extra;
2623
2624 let extra = unsafe {
2625 core::slice::from_raw_parts(
2626 gzhead_extra.add(stream.state.gzindex),
2629 (gzhead.extra_len & 0xffff) as usize - stream.state.gzindex,
2630 )
2631 };
2632
2633 if let ControlFlow::Break(err) = flush_bytes(stream, extra) {
2634 return err;
2635 }
2636 }
2637 }
2638 stream.state.status = Status::Name;
2639 }
2640
2641 if stream.state.status == Status::Name {
2642 if let Some(gzhead) = stream.state.gzhead.as_ref() {
2643 if !gzhead.name.is_null() {
2644 let gzhead_name = unsafe { CStr::from_ptr(gzhead.name.cast()) };
2646 let bytes = gzhead_name.to_bytes_with_nul();
2647 if let ControlFlow::Break(err) = flush_bytes(stream, bytes) {
2648 return err;
2649 }
2650 }
2651 stream.state.status = Status::Comment;
2652 }
2653 }
2654
2655 if stream.state.status == Status::Comment {
2656 if let Some(gzhead) = stream.state.gzhead.as_ref() {
2657 if !gzhead.comment.is_null() {
2658 let gzhead_comment = unsafe { CStr::from_ptr(gzhead.comment.cast()) };
2660 let bytes = gzhead_comment.to_bytes_with_nul();
2661 if let ControlFlow::Break(err) = flush_bytes(stream, bytes) {
2662 return err;
2663 }
2664 }
2665 stream.state.status = Status::Hcrc;
2666 }
2667 }
2668
2669 if stream.state.status == Status::Hcrc {
2670 if let Some(gzhead) = stream.state.gzhead.as_ref() {
2671 if gzhead.hcrc != 0 {
2672 let bytes = (stream.adler as u16).to_le_bytes();
2673 if let ControlFlow::Break(err) = flush_bytes(stream, &bytes) {
2674 return err;
2675 }
2676 }
2677 }
2678
2679 stream.state.status = Status::Busy;
2680
2681 flush_pending(stream);
2683 if !stream.state.bit_writer.pending.pending().is_empty() {
2684 stream.state.last_flush = -1;
2685 return ReturnCode::Ok;
2686 }
2687 }
2688
2689 let state = &mut stream.state;
2691 if stream.avail_in != 0
2692 || state.lookahead != 0
2693 || (flush != DeflateFlush::NoFlush && state.status != Status::Finish)
2694 {
2695 let bstate = self::algorithm::run(stream, flush);
2696
2697 let state = &mut stream.state;
2698
2699 if matches!(bstate, BlockState::FinishStarted | BlockState::FinishDone) {
2700 state.status = Status::Finish;
2701 }
2702
2703 match bstate {
2704 BlockState::NeedMore | BlockState::FinishStarted => {
2705 if stream.avail_out == 0 {
2706 state.last_flush = -1; }
2708 return ReturnCode::Ok;
2709 }
2717 BlockState::BlockDone => {
2718 match flush {
2719 DeflateFlush::NoFlush => unreachable!("condition of inner surrounding if"),
2720 DeflateFlush::PartialFlush => {
2721 state.bit_writer.align();
2722 }
2723 DeflateFlush::SyncFlush => {
2724 zng_tr_stored_block(state, 0..0, false);
2728 }
2729 DeflateFlush::FullFlush => {
2730 zng_tr_stored_block(state, 0..0, false);
2734
2735 state.head.as_mut_slice().fill(0); if state.lookahead == 0 {
2738 state.strstart = 0;
2739 state.block_start = 0;
2740 state.insert = 0;
2741 }
2742 }
2743 DeflateFlush::Block => { }
2744 DeflateFlush::Finish => unreachable!("condition of outer surrounding if"),
2745 }
2746
2747 flush_pending(stream);
2748
2749 if stream.avail_out == 0 {
2750 stream.state.last_flush = -1; return ReturnCode::Ok;
2752 }
2753 }
2754 BlockState::FinishDone => { }
2755 }
2756 }
2757
2758 if flush != DeflateFlush::Finish {
2759 return ReturnCode::Ok;
2760 }
2761
2762 if stream.state.wrap == 2 {
2764 let crc_fold = core::mem::take(&mut stream.state.crc_fold);
2765 stream.adler = crc_fold.finish() as z_checksum;
2766
2767 let adler = stream.adler as u32;
2768 stream.state.bit_writer.pending.extend(&adler.to_le_bytes());
2769
2770 let total_in = stream.total_in as u32;
2771 stream
2772 .state
2773 .bit_writer
2774 .pending
2775 .extend(&total_in.to_le_bytes());
2776 } else if stream.state.wrap == 1 {
2777 let adler = stream.adler as u32;
2778 stream.state.bit_writer.pending.extend(&adler.to_be_bytes());
2779 }
2780
2781 flush_pending(stream);
2782
2783 if stream.state.wrap > 0 {
2785 stream.state.wrap = -stream.state.wrap; }
2787
2788 if stream.state.bit_writer.pending.pending().is_empty() {
2789 assert_eq!(stream.state.bit_writer.bits_used, 0, "bi_buf not flushed");
2790 return ReturnCode::StreamEnd;
2791 }
2792 ReturnCode::Ok
2793}
2794
2795pub(crate) fn flush_pending(stream: &mut DeflateStream) {
2796 let state = &mut stream.state;
2797
2798 state.bit_writer.flush_bits();
2799
2800 let pending = state.bit_writer.pending.pending();
2801 let len = Ord::min(pending.len(), stream.avail_out as usize);
2802
2803 if len == 0 {
2804 return;
2805 }
2806
2807 trace!("\n[FLUSH {len} bytes]");
2808 unsafe { core::ptr::copy_nonoverlapping(pending.as_ptr(), stream.next_out, len) };
2810
2811 stream.next_out = stream.next_out.wrapping_add(len);
2812 stream.total_out += len as crate::c_api::z_size;
2813 stream.avail_out -= len as crate::c_api::uInt;
2814
2815 state.bit_writer.pending.advance(len);
2816}
2817
2818pub fn compress_slice<'a>(
2819 output: &'a mut [u8],
2820 input: &[u8],
2821 config: DeflateConfig,
2822) -> (&'a mut [u8], ReturnCode) {
2823 let output_uninit = unsafe {
2825 core::slice::from_raw_parts_mut(output.as_mut_ptr() as *mut MaybeUninit<u8>, output.len())
2826 };
2827
2828 compress(output_uninit, input, config)
2829}
2830
2831pub fn compress<'a>(
2832 output: &'a mut [MaybeUninit<u8>],
2833 input: &[u8],
2834 config: DeflateConfig,
2835) -> (&'a mut [u8], ReturnCode) {
2836 compress_with_flush(output, input, config, DeflateFlush::Finish)
2837}
2838
2839pub fn compress_slice_with_flush<'a>(
2840 output: &'a mut [u8],
2841 input: &[u8],
2842 config: DeflateConfig,
2843 flush: DeflateFlush,
2844) -> (&'a mut [u8], ReturnCode) {
2845 let output_uninit = unsafe {
2847 core::slice::from_raw_parts_mut(output.as_mut_ptr() as *mut MaybeUninit<u8>, output.len())
2848 };
2849
2850 compress_with_flush(output_uninit, input, config, flush)
2851}
2852
2853pub fn compress_with_flush<'a>(
2854 output: &'a mut [MaybeUninit<u8>],
2855 input: &[u8],
2856 config: DeflateConfig,
2857 final_flush: DeflateFlush,
2858) -> (&'a mut [u8], ReturnCode) {
2859 let mut stream = z_stream {
2860 next_in: input.as_ptr() as *mut u8,
2861 avail_in: 0, total_in: 0,
2863 next_out: output.as_mut_ptr() as *mut u8,
2864 avail_out: 0, total_out: 0,
2866 msg: core::ptr::null_mut(),
2867 state: core::ptr::null_mut(),
2868 zalloc: None,
2869 zfree: None,
2870 opaque: core::ptr::null_mut(),
2871 data_type: 0,
2872 adler: 0,
2873 reserved: 0,
2874 };
2875
2876 let err = init(&mut stream, config);
2877 if err != ReturnCode::Ok {
2878 return (&mut [], err);
2879 }
2880
2881 let max = core::ffi::c_uint::MAX as usize;
2882
2883 let mut left = output.len();
2884 let mut source_len = input.len();
2885
2886 loop {
2887 if stream.avail_out == 0 {
2888 stream.avail_out = Ord::min(left, max) as _;
2889 left -= stream.avail_out as usize;
2890 }
2891
2892 if stream.avail_in == 0 {
2893 stream.avail_in = Ord::min(source_len, max) as _;
2894 source_len -= stream.avail_in as usize;
2895 }
2896
2897 let flush = if source_len > 0 {
2898 DeflateFlush::NoFlush
2899 } else {
2900 final_flush
2901 };
2902
2903 let err = if let Some(stream) = unsafe { DeflateStream::from_stream_mut(&mut stream) } {
2904 deflate(stream, flush)
2905 } else {
2906 ReturnCode::StreamError
2907 };
2908
2909 if err != ReturnCode::Ok {
2910 break;
2911 }
2912 }
2913
2914 let output_slice = unsafe {
2916 core::slice::from_raw_parts_mut(output.as_mut_ptr() as *mut u8, stream.total_out as usize)
2917 };
2918
2919 let return_code = if let Some(stream) = unsafe { DeflateStream::from_stream_mut(&mut stream) } {
2921 match end(stream) {
2922 Ok(_) => ReturnCode::Ok,
2923 Err(_) => ReturnCode::DataError,
2924 }
2925 } else {
2926 ReturnCode::Ok
2927 };
2928
2929 (output_slice, return_code)
2930}
2931
2932pub const fn compress_bound(source_len: usize) -> usize {
2933 compress_bound_help(source_len, ZLIB_WRAPLEN)
2934}
2935
2936const fn compress_bound_help(source_len: usize, wrap_len: usize) -> usize {
2937 source_len .wrapping_add(if source_len == 0 { 1 } else { 0 })
2940 .wrapping_add(if source_len < 9 { 1 } else { 0 })
2942 .wrapping_add(deflate_quick_overhead(source_len))
2944 .wrapping_add(DEFLATE_BLOCK_OVERHEAD)
2946 .wrapping_add(wrap_len)
2948}
2949
2950#[derive(Clone)]
2955struct Heap {
2956 heap: [u32; 2 * L_CODES + 1],
2957
2958 heap_len: usize,
2960
2961 heap_max: usize,
2963
2964 depth: [u8; 2 * L_CODES + 1],
2965}
2966
2967impl Heap {
2968 fn new() -> Self {
2970 Self {
2971 heap: [0; 2 * L_CODES + 1],
2972 heap_len: 0,
2973 heap_max: 0,
2974 depth: [0; 2 * L_CODES + 1],
2975 }
2976 }
2977
2978 fn initialize(&mut self, tree: &mut [Value]) -> isize {
2981 let mut max_code = -1;
2982
2983 self.heap_len = 0;
2984 self.heap_max = HEAP_SIZE;
2985
2986 for (n, node) in tree.iter_mut().enumerate() {
2987 if node.freq() > 0 {
2988 self.heap_len += 1;
2989 self.heap[self.heap_len] = n as u32;
2990 max_code = n as isize;
2991 self.depth[n] = 0;
2992 } else {
2993 *node.len_mut() = 0;
2994 }
2995 }
2996
2997 max_code
2998 }
2999
3000 const SMALLEST: usize = 1;
3002
3003 fn pqdownheap(&mut self, tree: &[Value], mut k: usize) {
3004 macro_rules! freq_and_depth {
3012 ($i:expr) => {
3013 (tree[$i as usize].freq() as u32) << 8 | self.depth[$i as usize] as u32
3014 };
3015 }
3016
3017 let v = self.heap[k];
3018 let v_val = freq_and_depth!(v);
3019 let mut j = k << 1; while j <= self.heap_len {
3022 let mut j_val = freq_and_depth!(self.heap[j]);
3024 if j < self.heap_len {
3025 let j1_val = freq_and_depth!(self.heap[j + 1]);
3026 if j1_val <= j_val {
3027 j += 1;
3028 j_val = j1_val;
3029 }
3030 }
3031
3032 if v_val <= j_val {
3034 break;
3035 }
3036
3037 self.heap[k] = self.heap[j];
3039 k = j;
3040
3041 j <<= 1;
3043 }
3044
3045 self.heap[k] = v;
3046 }
3047
3048 fn pqremove(&mut self, tree: &[Value]) -> u32 {
3051 let top = self.heap[Self::SMALLEST];
3052 self.heap[Self::SMALLEST] = self.heap[self.heap_len];
3053 self.heap_len -= 1;
3054
3055 self.pqdownheap(tree, Self::SMALLEST);
3056
3057 top
3058 }
3059
3060 fn construct_huffman_tree(&mut self, tree: &mut [Value], mut node: usize) {
3062 loop {
3063 let n = self.pqremove(tree) as usize; let m = self.heap[Heap::SMALLEST] as usize; self.heap_max -= 1;
3067 self.heap[self.heap_max] = n as u32; self.heap_max -= 1;
3069 self.heap[self.heap_max] = m as u32;
3070
3071 *tree[node].freq_mut() = tree[n].freq() + tree[m].freq();
3073 self.depth[node] = Ord::max(self.depth[n], self.depth[m]) + 1;
3074
3075 *tree[n].dad_mut() = node as u16;
3076 *tree[m].dad_mut() = node as u16;
3077
3078 self.heap[Heap::SMALLEST] = node as u32;
3080 node += 1;
3081
3082 self.pqdownheap(tree, Heap::SMALLEST);
3083
3084 if self.heap_len < 2 {
3085 break;
3086 }
3087 }
3088
3089 self.heap_max -= 1;
3090 self.heap[self.heap_max] = self.heap[Heap::SMALLEST];
3091 }
3092}
3093
3094pub unsafe fn set_header<'a>(
3103 stream: &mut DeflateStream<'a>,
3104 head: Option<&'a mut gz_header>,
3105) -> ReturnCode {
3106 if stream.state.wrap != 2 {
3107 ReturnCode::StreamError as _
3108 } else {
3109 stream.state.gzhead = head;
3110 ReturnCode::Ok as _
3111 }
3112}
3113
3114const ZLIB_WRAPLEN: usize = 6;
3116const GZIP_WRAPLEN: usize = 18;
3118
3119const DEFLATE_HEADER_BITS: usize = 3;
3120const DEFLATE_EOBS_BITS: usize = 15;
3121const DEFLATE_PAD_BITS: usize = 6;
3122const DEFLATE_BLOCK_OVERHEAD: usize =
3123 (DEFLATE_HEADER_BITS + DEFLATE_EOBS_BITS + DEFLATE_PAD_BITS) >> 3;
3124
3125const DEFLATE_QUICK_LIT_MAX_BITS: usize = 9;
3126const fn deflate_quick_overhead(x: usize) -> usize {
3127 let sum = x
3128 .wrapping_mul(DEFLATE_QUICK_LIT_MAX_BITS - 8)
3129 .wrapping_add(7);
3130
3131 (sum as core::ffi::c_ulong >> 3) as usize
3133}
3134
3135pub fn bound(stream: Option<&mut DeflateStream>, source_len: usize) -> usize {
3151 let mask = core::ffi::c_ulong::MAX as usize;
3153
3154 let comp_len = source_len
3156 .wrapping_add((source_len.wrapping_add(7) & mask) >> 3)
3157 .wrapping_add((source_len.wrapping_add(63) & mask) >> 6)
3158 .wrapping_add(5);
3159
3160 let Some(stream) = stream else {
3161 return comp_len.wrapping_add(6);
3163 };
3164
3165 let wrap_len = match stream.state.wrap {
3167 0 => {
3168 0
3170 }
3171 1 => {
3172 if stream.state.strstart != 0 {
3174 ZLIB_WRAPLEN + 4
3175 } else {
3176 ZLIB_WRAPLEN
3177 }
3178 }
3179 2 => {
3180 let mut gz_wrap_len = GZIP_WRAPLEN;
3182
3183 if let Some(header) = &stream.state.gzhead {
3184 if !header.extra.is_null() {
3185 gz_wrap_len += 2 + header.extra_len as usize;
3186 }
3187
3188 let mut c_string = header.name;
3189 if !c_string.is_null() {
3190 loop {
3191 gz_wrap_len += 1;
3192 unsafe {
3194 if *c_string == 0 {
3195 break;
3196 }
3197 c_string = c_string.add(1);
3198 }
3199 }
3200 }
3201
3202 let mut c_string = header.comment;
3203 if !c_string.is_null() {
3204 loop {
3205 gz_wrap_len += 1;
3206 unsafe {
3208 if *c_string == 0 {
3209 break;
3210 }
3211 c_string = c_string.add(1);
3212 }
3213 }
3214 }
3215
3216 if header.hcrc != 0 {
3217 gz_wrap_len += 2;
3218 }
3219 }
3220
3221 gz_wrap_len
3222 }
3223 _ => {
3224 ZLIB_WRAPLEN
3226 }
3227 };
3228
3229 if stream.state.w_bits() != MAX_WBITS as u32 || HASH_BITS < 15 {
3230 if stream.state.level == 0 {
3231 source_len
3233 .wrapping_add(source_len >> 5)
3234 .wrapping_add(source_len >> 7)
3235 .wrapping_add(source_len >> 11)
3236 .wrapping_add(7)
3237 .wrapping_add(wrap_len)
3238 } else {
3239 comp_len.wrapping_add(wrap_len)
3240 }
3241 } else {
3242 compress_bound_help(source_len, wrap_len)
3243 }
3244}
3245
3246pub unsafe fn get_dictionary(stream: &DeflateStream<'_>, dictionary: *mut u8) -> usize {
3250 let s = &stream.state;
3251 let len = Ord::min(s.strstart + s.lookahead, s.w_size);
3252
3253 if !dictionary.is_null() && len > 0 {
3254 unsafe {
3255 core::ptr::copy_nonoverlapping(
3256 s.window.as_ptr().add(s.strstart + s.lookahead - len),
3257 dictionary,
3258 len,
3259 );
3260 }
3261 }
3262
3263 len
3264}
3265
3266#[cfg(test)]
3267mod test {
3268 use crate::{
3269 inflate::{uncompress_slice, InflateConfig, InflateStream},
3270 InflateFlush,
3271 };
3272
3273 use super::*;
3274
3275 use core::{ffi::CStr, sync::atomic::AtomicUsize};
3276
3277 #[test]
3278 fn detect_data_type_basic() {
3279 let empty = || [Value::new(0, 0); LITERALS];
3280
3281 assert_eq!(State::detect_data_type(&empty()), DataType::Binary);
3282
3283 let mut binary = empty();
3284 binary[0] = Value::new(1, 0);
3285 assert_eq!(State::detect_data_type(&binary), DataType::Binary);
3286
3287 let mut text = empty();
3288 text[b'\r' as usize] = Value::new(1, 0);
3289 assert_eq!(State::detect_data_type(&text), DataType::Text);
3290
3291 let mut text = empty();
3292 text[b'a' as usize] = Value::new(1, 0);
3293 assert_eq!(State::detect_data_type(&text), DataType::Text);
3294
3295 let mut non_text = empty();
3296 non_text[7] = Value::new(1, 0);
3297 assert_eq!(State::detect_data_type(&non_text), DataType::Binary);
3298 }
3299
3300 #[test]
3301 fn from_stream_mut() {
3302 unsafe {
3303 assert!(DeflateStream::from_stream_mut(core::ptr::null_mut()).is_none());
3304
3305 let mut stream = z_stream::default();
3306 assert!(DeflateStream::from_stream_mut(&mut stream).is_none());
3307
3308 assert!(DeflateStream::from_stream_mut(&mut stream).is_none());
3310
3311 init(&mut stream, DeflateConfig::default());
3312 let stream = DeflateStream::from_stream_mut(&mut stream);
3313 assert!(stream.is_some());
3314
3315 assert!(end(stream.unwrap()).is_ok());
3316 }
3317 }
3318
3319 unsafe extern "C" fn fail_nth_allocation<const N: usize>(
3320 opaque: crate::c_api::voidpf,
3321 items: crate::c_api::uInt,
3322 size: crate::c_api::uInt,
3323 ) -> crate::c_api::voidpf {
3324 let count = unsafe { &*(opaque as *const AtomicUsize) };
3325
3326 if count.fetch_add(1, core::sync::atomic::Ordering::Relaxed) != N {
3327 unsafe { (crate::allocate::Allocator::C.zalloc)(opaque, items, size) }
3331 } else {
3332 core::ptr::null_mut()
3333 }
3334 }
3335
3336 #[test]
3337 fn init_invalid_allocator() {
3338 {
3339 let atomic = AtomicUsize::new(0);
3340 let mut stream = z_stream {
3341 zalloc: Some(fail_nth_allocation::<0>),
3342 zfree: Some(crate::allocate::Allocator::C.zfree),
3343 opaque: &atomic as *const _ as *const core::ffi::c_void as *mut _,
3344 ..z_stream::default()
3345 };
3346 assert_eq!(
3347 init(&mut stream, DeflateConfig::default()),
3348 ReturnCode::MemError
3349 );
3350 }
3351
3352 {
3353 let atomic = AtomicUsize::new(0);
3354 let mut stream = z_stream {
3355 zalloc: Some(fail_nth_allocation::<3>),
3356 zfree: Some(crate::allocate::Allocator::C.zfree),
3357 opaque: &atomic as *const _ as *const core::ffi::c_void as *mut _,
3358 ..z_stream::default()
3359 };
3360 assert_eq!(
3361 init(&mut stream, DeflateConfig::default()),
3362 ReturnCode::MemError
3363 );
3364 }
3365
3366 {
3367 let atomic = AtomicUsize::new(0);
3368 let mut stream = z_stream {
3369 zalloc: Some(fail_nth_allocation::<5>),
3370 zfree: Some(crate::allocate::Allocator::C.zfree),
3371 opaque: &atomic as *const _ as *const core::ffi::c_void as *mut _,
3372 ..z_stream::default()
3373 };
3374 assert_eq!(
3375 init(&mut stream, DeflateConfig::default()),
3376 ReturnCode::MemError
3377 );
3378 }
3379 }
3380
3381 mod copy_invalid_allocator {
3382 use super::*;
3383
3384 #[test]
3385 fn fail_0() {
3386 let mut stream = z_stream::default();
3387
3388 let atomic = AtomicUsize::new(0);
3389 stream.opaque = &atomic as *const _ as *const core::ffi::c_void as *mut _;
3390 stream.zalloc = Some(fail_nth_allocation::<6>);
3391 stream.zfree = Some(crate::allocate::Allocator::C.zfree);
3392
3393 assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3395
3396 let Some(stream) = (unsafe { DeflateStream::from_stream_mut(&mut stream) }) else {
3397 unreachable!()
3398 };
3399
3400 let mut stream_copy = MaybeUninit::<DeflateStream>::zeroed();
3401
3402 assert_eq!(copy(&mut stream_copy, stream), ReturnCode::MemError);
3403
3404 assert!(end(stream).is_ok());
3405 }
3406
3407 #[test]
3408 fn fail_3() {
3409 let mut stream = z_stream::default();
3410
3411 let atomic = AtomicUsize::new(0);
3412 stream.zalloc = Some(fail_nth_allocation::<{ 6 + 3 }>);
3413 stream.zfree = Some(crate::allocate::Allocator::C.zfree);
3414 stream.opaque = &atomic as *const _ as *const core::ffi::c_void as *mut _;
3415
3416 assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3418
3419 let Some(stream) = (unsafe { DeflateStream::from_stream_mut(&mut stream) }) else {
3420 unreachable!()
3421 };
3422
3423 let mut stream_copy = MaybeUninit::<DeflateStream>::zeroed();
3424
3425 assert_eq!(copy(&mut stream_copy, stream), ReturnCode::MemError);
3426
3427 assert!(end(stream).is_ok());
3428 }
3429
3430 #[test]
3431 fn fail_5() {
3432 let mut stream = z_stream::default();
3433
3434 let atomic = AtomicUsize::new(0);
3435 stream.zalloc = Some(fail_nth_allocation::<{ 6 + 5 }>);
3436 stream.zfree = Some(crate::allocate::Allocator::C.zfree);
3437 stream.opaque = &atomic as *const _ as *const core::ffi::c_void as *mut _;
3438
3439 assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3441
3442 let Some(stream) = (unsafe { DeflateStream::from_stream_mut(&mut stream) }) else {
3443 unreachable!()
3444 };
3445
3446 let mut stream_copy = MaybeUninit::<DeflateStream>::zeroed();
3447
3448 assert_eq!(copy(&mut stream_copy, stream), ReturnCode::MemError);
3449
3450 assert!(end(stream).is_ok());
3451 }
3452 }
3453
3454 mod invalid_deflate_config {
3455 use super::*;
3456
3457 #[test]
3458 fn sanity_check() {
3459 let mut stream = z_stream::default();
3460 assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3461
3462 assert!(stream.zalloc.is_some());
3463 assert!(stream.zfree.is_some());
3464
3465 let stream = unsafe { DeflateStream::from_stream_mut(&mut stream) }.unwrap();
3467 assert_eq!(stream.state.level, 6);
3468
3469 assert!(end(stream).is_ok());
3470 }
3471
3472 #[test]
3473 fn window_bits_correction() {
3474 let mut stream = z_stream::default();
3476 let config = DeflateConfig {
3477 window_bits: 8,
3478 ..Default::default()
3479 };
3480 assert_eq!(init(&mut stream, config), ReturnCode::Ok);
3481 let stream = unsafe { DeflateStream::from_stream_mut(&mut stream) }.unwrap();
3482 assert_eq!(stream.state.w_bits(), 9);
3483
3484 assert!(end(stream).is_ok());
3485 }
3486
3487 #[test]
3488 fn window_bits_too_low() {
3489 let mut stream = z_stream::default();
3490 let config = DeflateConfig {
3491 window_bits: -16,
3492 ..Default::default()
3493 };
3494 assert_eq!(init(&mut stream, config), ReturnCode::StreamError);
3495 }
3496
3497 #[test]
3498 fn window_bits_too_high() {
3499 let mut stream = z_stream::default();
3501 let config = DeflateConfig {
3502 window_bits: 42,
3503 ..Default::default()
3504 };
3505 assert_eq!(init(&mut stream, config), ReturnCode::StreamError);
3506 }
3507 }
3508
3509 #[test]
3510 fn end_data_error() {
3511 let mut stream = z_stream::default();
3512 assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3513 let stream = unsafe { DeflateStream::from_stream_mut(&mut stream) }.unwrap();
3514
3515 let input = b"Hello World\n";
3517 stream.next_in = input.as_ptr() as *mut u8;
3518 stream.avail_in = input.len() as _;
3519 let output = &mut [0, 0, 0];
3520 stream.next_out = output.as_mut_ptr();
3521 stream.avail_out = output.len() as _;
3522
3523 assert_eq!(deflate(stream, DeflateFlush::NoFlush), ReturnCode::Ok);
3525
3526 assert!(end(stream).is_err());
3528 }
3529
3530 #[test]
3531 fn test_reset_keep() {
3532 let mut stream = z_stream::default();
3533 assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3534 let stream = unsafe { DeflateStream::from_stream_mut(&mut stream) }.unwrap();
3535
3536 let input = b"Hello World\n";
3538 stream.next_in = input.as_ptr() as *mut u8;
3539 stream.avail_in = input.len() as _;
3540
3541 let output = &mut [0; 1024];
3542 stream.next_out = output.as_mut_ptr();
3543 stream.avail_out = output.len() as _;
3544 assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::StreamEnd);
3545
3546 assert_eq!(reset_keep(stream), ReturnCode::Ok);
3547
3548 let output = &mut [0; 1024];
3549 stream.next_out = output.as_mut_ptr();
3550 stream.avail_out = output.len() as _;
3551 assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::StreamEnd);
3552
3553 assert!(end(stream).is_ok());
3554 }
3555
3556 #[test]
3557 fn hello_world_huffman_only() {
3558 const EXPECTED: &[u8] = &[
3559 0x78, 0x01, 0xf3, 0x48, 0xcd, 0xc9, 0xc9, 0x57, 0x08, 0xcf, 0x2f, 0xca, 0x49, 0x51,
3560 0xe4, 0x02, 0x00, 0x20, 0x91, 0x04, 0x48,
3561 ];
3562
3563 let input = "Hello World!\n";
3564
3565 let mut output = vec![0; 128];
3566
3567 let config = DeflateConfig {
3568 level: 6,
3569 method: Method::Deflated,
3570 window_bits: crate::MAX_WBITS,
3571 mem_level: DEF_MEM_LEVEL,
3572 strategy: Strategy::HuffmanOnly,
3573 };
3574
3575 let (output, err) = compress_slice(&mut output, input.as_bytes(), config);
3576
3577 assert_eq!(err, ReturnCode::Ok);
3578
3579 assert_eq!(output.len(), EXPECTED.len());
3580
3581 assert_eq!(EXPECTED, output);
3582 }
3583
3584 #[test]
3585 fn hello_world_quick() {
3586 const EXPECTED: &[u8] = &[
3587 0x78, 0x01, 0xf3, 0x48, 0xcd, 0xc9, 0xc9, 0x57, 0x08, 0xcf, 0x2f, 0xca, 0x49, 0x51,
3588 0xe4, 0x02, 0x00, 0x20, 0x91, 0x04, 0x48,
3589 ];
3590
3591 let input = "Hello World!\n";
3592
3593 let mut output = vec![0; 128];
3594
3595 let config = DeflateConfig {
3596 level: 1,
3597 method: Method::Deflated,
3598 window_bits: crate::MAX_WBITS,
3599 mem_level: DEF_MEM_LEVEL,
3600 strategy: Strategy::Default,
3601 };
3602
3603 let (output, err) = compress_slice(&mut output, input.as_bytes(), config);
3604
3605 assert_eq!(err, ReturnCode::Ok);
3606
3607 assert_eq!(output.len(), EXPECTED.len());
3608
3609 assert_eq!(EXPECTED, output);
3610 }
3611
3612 #[test]
3613 fn hello_world_quick_random() {
3614 const EXPECTED: &[u8] = &[
3615 0x78, 0x01, 0x53, 0xe1, 0x50, 0x51, 0xe1, 0x52, 0x51, 0x51, 0x01, 0x00, 0x03, 0xec,
3616 0x00, 0xeb,
3617 ];
3618
3619 let input = "$\u{8}$$\n$$$";
3620
3621 let mut output = vec![0; 128];
3622
3623 let config = DeflateConfig {
3624 level: 1,
3625 method: Method::Deflated,
3626 window_bits: crate::MAX_WBITS,
3627 mem_level: DEF_MEM_LEVEL,
3628 strategy: Strategy::Default,
3629 };
3630
3631 let (output, err) = compress_slice(&mut output, input.as_bytes(), config);
3632
3633 assert_eq!(err, ReturnCode::Ok);
3634
3635 assert_eq!(output.len(), EXPECTED.len());
3636
3637 assert_eq!(EXPECTED, output);
3638 }
3639
3640 fn fuzz_based_test(input: &[u8], config: DeflateConfig, expected: &[u8]) {
3641 let mut output_rs = [0; 1 << 17];
3642 let (output_rs, err) = compress_slice(&mut output_rs, input, config);
3643 assert_eq!(err, ReturnCode::Ok);
3644
3645 assert_eq!(output_rs, expected);
3646 }
3647
3648 #[test]
3649 fn simple_rle() {
3650 fuzz_based_test(
3651 "\0\0\0\0\u{6}".as_bytes(),
3652 DeflateConfig {
3653 level: -1,
3654 method: Method::Deflated,
3655 window_bits: 11,
3656 mem_level: 4,
3657 strategy: Strategy::Rle,
3658 },
3659 &[56, 17, 99, 0, 2, 54, 0, 0, 11, 0, 7],
3660 )
3661 }
3662
3663 #[test]
3664 fn fill_window_out_of_bounds() {
3665 const INPUT: &[u8] = &[
3666 0x71, 0x71, 0x71, 0x71, 0x71, 0x6a, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3667 0x71, 0x71, 0x71, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1d, 0x1d, 0x1d, 0x1d, 0x63,
3668 0x63, 0x63, 0x63, 0x63, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d,
3669 0x1d, 0x27, 0x0, 0x0, 0x0, 0x1d, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71,
3670 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x31, 0x0, 0x0,
3671 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3672 0x0, 0x0, 0x0, 0x0, 0x1d, 0x1d, 0x0, 0x0, 0x0, 0x0, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50,
3673 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x48, 0x50,
3674 0x50, 0x50, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2c, 0x0, 0x0, 0x0, 0x0, 0x4a,
3675 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3676 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x70, 0x71, 0x71, 0x0, 0x0,
3677 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x6a, 0x0, 0x0, 0x0, 0x0,
3678 0x71, 0x0, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3679 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3680 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3681 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x31, 0x0, 0x0, 0x0, 0x0,
3682 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3683 0x71, 0x71, 0x0, 0x4a, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71,
3684 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3685 0x70, 0x71, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71,
3686 0x6a, 0x0, 0x0, 0x0, 0x0, 0x71, 0x0, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3687 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3688 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3689 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3690 0x31, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1d, 0x1d, 0x0, 0x0, 0x0, 0x0,
3691 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50,
3692 0x50, 0x50, 0x50, 0x50, 0x48, 0x50, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x3b, 0x3f, 0x71,
3693 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x50, 0x50, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3694 0x2c, 0x0, 0x0, 0x0, 0x0, 0x4a, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71,
3695 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3696 0x71, 0x70, 0x71, 0x71, 0x0, 0x0, 0x0, 0x6, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71,
3697 0x71, 0x71, 0x71, 0x70, 0x71, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3698 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x3b, 0x3f, 0x71, 0x71, 0x71,
3699 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3700 0x71, 0x3b, 0x3f, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x20, 0x0, 0x0, 0x0, 0x0,
3701 0x0, 0x0, 0x0, 0x71, 0x75, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3702 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x10, 0x0, 0x71, 0x71,
3703 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x3b, 0x71, 0x71, 0x71, 0x71, 0x71,
3704 0x71, 0x76, 0x71, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x71, 0x71, 0x71, 0x71, 0x71,
3705 0x71, 0x71, 0x71, 0x10, 0x0, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3706 0x71, 0x3b, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x76, 0x71, 0x34, 0x34, 0x34, 0x34,
3707 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34,
3708 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x0, 0x0, 0x0, 0x0, 0x0, 0x34, 0x34, 0x34, 0x34,
3709 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34,
3710 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34,
3711 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34,
3712 0x34, 0x34, 0x30, 0x34, 0x34, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3713 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3714 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3715 0x0, 0x0, 0x71, 0x0, 0x0, 0x0, 0x0, 0x6,
3716 ];
3717
3718 fuzz_based_test(
3719 INPUT,
3720 DeflateConfig {
3721 level: -1,
3722 method: Method::Deflated,
3723 window_bits: 9,
3724 mem_level: 1,
3725 strategy: Strategy::HuffmanOnly,
3726 },
3727 &[
3728 0x18, 0x19, 0x4, 0xc1, 0x21, 0x1, 0xc4, 0x0, 0x10, 0x3, 0xb0, 0x18, 0x29, 0x1e,
3729 0x7e, 0x17, 0x83, 0xf5, 0x70, 0x6c, 0xac, 0xfe, 0xc9, 0x27, 0xdb, 0xb6, 0x6f, 0xdb,
3730 0xb6, 0x6d, 0xdb, 0x80, 0x24, 0xb9, 0xbb, 0xbb, 0x24, 0x49, 0x92, 0x24, 0xf, 0x2,
3731 0xd8, 0x36, 0x0, 0xf0, 0x3, 0x0, 0x0, 0x24, 0xd0, 0xb6, 0x6d, 0xdb, 0xb6, 0x6d,
3732 0xdb, 0xbe, 0x6d, 0xf9, 0x13, 0x4, 0xc7, 0x4, 0x0, 0x80, 0x30, 0x0, 0xc3, 0x22,
3733 0x68, 0xf, 0x36, 0x90, 0xc2, 0xb5, 0xfa, 0x7f, 0x48, 0x80, 0x81, 0xb, 0x40, 0x55,
3734 0x55, 0x55, 0xd5, 0x16, 0x80, 0xaa, 0x7, 0x9, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3735 0xe, 0x7c, 0x82, 0xe0, 0x98, 0x0, 0x0, 0x0, 0x4, 0x60, 0x10, 0xf9, 0x8c, 0xe2,
3736 0xe5, 0xfa, 0x3f, 0x2, 0x54, 0x55, 0x55, 0x65, 0x0, 0xa8, 0xaa, 0xaa, 0xaa, 0xba,
3737 0x2, 0x50, 0xb5, 0x90, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x78, 0x82, 0xe0, 0xd0,
3738 0x8a, 0x41, 0x0, 0x0, 0xa2, 0x58, 0x54, 0xb7, 0x60, 0x83, 0x9a, 0x6a, 0x4, 0x96,
3739 0x87, 0xba, 0x51, 0xf8, 0xfb, 0x9b, 0x26, 0xfc, 0x0, 0x1c, 0x7, 0x6c, 0xdb, 0xb6,
3740 0x6d, 0xdb, 0xb6, 0x6d, 0xf7, 0xa8, 0x3a, 0xaf, 0xaa, 0x6a, 0x3, 0xf8, 0xc2, 0x3,
3741 0x40, 0x55, 0x55, 0x55, 0xd5, 0x5b, 0xf8, 0x80, 0xaa, 0x7a, 0xb, 0x0, 0x7f, 0x82,
3742 0xe0, 0x98, 0x0, 0x40, 0x18, 0x0, 0x82, 0xd8, 0x49, 0x40, 0x2, 0x22, 0x7e, 0xeb,
3743 0x80, 0xa6, 0xc, 0xa0, 0x9f, 0xa4, 0x2a, 0x38, 0xf, 0x0, 0x0, 0xe7, 0x1, 0xdc,
3744 0x55, 0x95, 0x17, 0x0, 0x0, 0xae, 0x0, 0x38, 0xc0, 0x67, 0xdb, 0x36, 0x80, 0x2b,
3745 0x0, 0xe, 0xf0, 0xd9, 0xf6, 0x13, 0x4, 0xc7, 0x4, 0x0, 0x0, 0x30, 0xc, 0x83, 0x22,
3746 0x69, 0x7, 0xc6, 0xea, 0xff, 0x19, 0x0, 0x0, 0x80, 0xaa, 0x0, 0x0, 0x0, 0x0, 0x0,
3747 0x0, 0x8e, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x6a,
3748 0xf5, 0x63, 0x60, 0x60, 0x3, 0x0, 0xee, 0x8a, 0x88, 0x67,
3749 ],
3750 )
3751 }
3752
3753 #[test]
3754 fn gzip_no_header() {
3755 let config = DeflateConfig {
3756 level: 9,
3757 method: Method::Deflated,
3758 window_bits: 31, ..Default::default()
3760 };
3761
3762 let input = b"Hello World!";
3763 let os = gz_header::OS_CODE;
3764
3765 fuzz_based_test(
3766 input,
3767 config,
3768 &[
3769 31, 139, 8, 0, 0, 0, 0, 0, 2, os, 243, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
3770 81, 4, 0, 163, 28, 41, 28, 12, 0, 0, 0,
3771 ],
3772 )
3773 }
3774
3775 #[test]
3776 #[rustfmt::skip]
3777 fn gzip_stored_block_checksum() {
3778 fuzz_based_test(
3779 &[
3780 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 9, 0,
3781 ],
3782 DeflateConfig {
3783 level: 0,
3784 method: Method::Deflated,
3785 window_bits: 26,
3786 mem_level: 6,
3787 strategy: Strategy::Default,
3788 },
3789 &[
3790 31, 139, 8, 0, 0, 0, 0, 0, 4, gz_header::OS_CODE, 1, 18, 0, 237, 255, 27, 27, 27, 27, 27, 27, 27,
3791 27, 27, 27, 27, 27, 27, 27, 27, 27, 9, 0, 60, 101, 156, 55, 18, 0, 0, 0,
3792 ],
3793 )
3794 }
3795
3796 #[test]
3797 fn gzip_header_pending_flush() {
3798 let extra = "aaaaaaaaaaaaaaaaaaaa\0";
3799 let name = "bbbbbbbbbbbbbbbbbbbb\0";
3800 let comment = "cccccccccccccccccccc\0";
3801
3802 let mut header = gz_header {
3803 text: 0,
3804 time: 0,
3805 xflags: 0,
3806 os: 0,
3807 extra: extra.as_ptr() as *mut _,
3808 extra_len: extra.len() as _,
3809 extra_max: 0,
3810 name: name.as_ptr() as *mut _,
3811 name_max: 0,
3812 comment: comment.as_ptr() as *mut _,
3813 comm_max: 0,
3814 hcrc: 1,
3815 done: 0,
3816 };
3817
3818 let config = DeflateConfig {
3819 window_bits: 31,
3820 mem_level: 1,
3821 ..Default::default()
3822 };
3823
3824 let mut stream = z_stream::default();
3825 assert_eq!(init(&mut stream, config), ReturnCode::Ok);
3826
3827 let Some(stream) = (unsafe { DeflateStream::from_stream_mut(&mut stream) }) else {
3828 unreachable!()
3829 };
3830
3831 unsafe { set_header(stream, Some(&mut header)) };
3832
3833 let input = b"Hello World\n";
3834 stream.next_in = input.as_ptr() as *mut _;
3835 stream.avail_in = input.len() as _;
3836
3837 let mut output = [0u8; 1024];
3838 stream.next_out = output.as_mut_ptr();
3839 stream.avail_out = 100;
3840
3841 assert_eq!(stream.state.bit_writer.pending.capacity(), 512);
3842
3843 stream.state.bit_writer.pending.extend(&[0; 500]);
3846
3847 assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::Ok);
3848
3849 stream.avail_out = output.len() as _;
3851 assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::StreamEnd);
3852
3853 let n = stream.total_out as usize;
3854
3855 assert!(end(stream).is_ok());
3856
3857 let output_rs = &mut output[..n];
3858
3859 assert_eq!(output_rs.len(), 500 + 99);
3860 }
3861
3862 #[test]
3863 fn gzip_with_header() {
3864 let extra = "some extra stuff\0";
3868 let name = "nomen est omen\0";
3869 let comment = "such comment\0";
3870
3871 let mut header = gz_header {
3872 text: 0,
3873 time: 0,
3874 xflags: 0,
3875 os: 0,
3876 extra: extra.as_ptr() as *mut _,
3877 extra_len: extra.len() as _,
3878 extra_max: 0,
3879 name: name.as_ptr() as *mut _,
3880 name_max: 0,
3881 comment: comment.as_ptr() as *mut _,
3882 comm_max: 0,
3883 hcrc: 1,
3884 done: 0,
3885 };
3886
3887 let config = DeflateConfig {
3888 window_bits: 31,
3889 ..Default::default()
3890 };
3891
3892 let mut stream = z_stream::default();
3893 assert_eq!(init(&mut stream, config), ReturnCode::Ok);
3894
3895 let Some(stream) = (unsafe { DeflateStream::from_stream_mut(&mut stream) }) else {
3896 unreachable!()
3897 };
3898
3899 unsafe { set_header(stream, Some(&mut header)) };
3900
3901 let input = b"Hello World\n";
3902 stream.next_in = input.as_ptr() as *mut _;
3903 stream.avail_in = input.len() as _;
3904
3905 let mut output = [0u8; 256];
3906 stream.next_out = output.as_mut_ptr();
3907 stream.avail_out = output.len() as _;
3908
3909 assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::StreamEnd);
3910
3911 let n = stream.total_out as usize;
3912
3913 assert!(end(stream).is_ok());
3914
3915 let output_rs = &mut output[..n];
3916
3917 assert_eq!(output_rs.len(), 81);
3918
3919 {
3920 let mut stream = z_stream::default();
3921
3922 let config = InflateConfig {
3923 window_bits: config.window_bits,
3924 };
3925
3926 assert_eq!(crate::inflate::init(&mut stream, config), ReturnCode::Ok);
3927
3928 let Some(stream) = (unsafe { InflateStream::from_stream_mut(&mut stream) }) else {
3929 unreachable!();
3930 };
3931
3932 stream.next_in = output_rs.as_mut_ptr() as _;
3933 stream.avail_in = output_rs.len() as _;
3934
3935 let mut output = [0u8; 12];
3936 stream.next_out = output.as_mut_ptr();
3937 stream.avail_out = output.len() as _;
3938
3939 let mut extra_buf = [0u8; 64];
3940 let mut name_buf = [0u8; 64];
3941 let mut comment_buf = [0u8; 64];
3942
3943 let mut header = gz_header {
3944 text: 0,
3945 time: 0,
3946 xflags: 0,
3947 os: 0,
3948 extra: extra_buf.as_mut_ptr(),
3949 extra_len: 0,
3950 extra_max: extra_buf.len() as _,
3951 name: name_buf.as_mut_ptr(),
3952 name_max: name_buf.len() as _,
3953 comment: comment_buf.as_mut_ptr(),
3954 comm_max: comment_buf.len() as _,
3955 hcrc: 0,
3956 done: 0,
3957 };
3958
3959 assert_eq!(
3960 unsafe { crate::inflate::get_header(stream, Some(&mut header)) },
3961 ReturnCode::Ok
3962 );
3963
3964 assert_eq!(
3965 unsafe { crate::inflate::inflate(stream, InflateFlush::Finish) },
3966 ReturnCode::StreamEnd
3967 );
3968
3969 crate::inflate::end(stream);
3970
3971 assert!(!header.comment.is_null());
3972 assert_eq!(
3973 unsafe { CStr::from_ptr(header.comment.cast()) }
3974 .to_str()
3975 .unwrap(),
3976 comment.trim_end_matches('\0')
3977 );
3978
3979 assert!(!header.name.is_null());
3980 assert_eq!(
3981 unsafe { CStr::from_ptr(header.name.cast()) }
3982 .to_str()
3983 .unwrap(),
3984 name.trim_end_matches('\0')
3985 );
3986
3987 assert!(!header.extra.is_null());
3988 assert_eq!(
3989 unsafe { CStr::from_ptr(header.extra.cast()) }
3990 .to_str()
3991 .unwrap(),
3992 extra.trim_end_matches('\0')
3993 );
3994 }
3995 }
3996
3997 #[test]
3998 fn insufficient_compress_space() {
3999 const DATA: &[u8] = include_bytes!("deflate/test-data/inflate_buf_error.dat");
4000
4001 fn helper(deflate_buf: &mut [u8]) -> ReturnCode {
4002 let config = DeflateConfig {
4003 level: 0,
4004 method: Method::Deflated,
4005 window_bits: 10,
4006 mem_level: 6,
4007 strategy: Strategy::Default,
4008 };
4009
4010 let (output, err) = compress_slice(deflate_buf, DATA, config);
4011 assert_eq!(err, ReturnCode::Ok);
4012
4013 let config = InflateConfig {
4014 window_bits: config.window_bits,
4015 };
4016
4017 let mut uncompr = [0; 1 << 17];
4018 let (uncompr, err) = uncompress_slice(&mut uncompr, output, config);
4019
4020 if err == ReturnCode::Ok {
4021 assert_eq!(DATA, uncompr);
4022 }
4023
4024 err
4025 }
4026
4027 let mut output = [0; 1 << 17];
4028
4029 assert_eq!(helper(&mut output[..1 << 16]), ReturnCode::DataError);
4031
4032 assert_eq!(helper(&mut output), ReturnCode::Ok);
4034 }
4035
4036 fn test_flush(flush: DeflateFlush, expected: &[u8]) {
4037 let input = b"Hello World!\n";
4038
4039 let config = DeflateConfig {
4040 level: 6, method: Method::Deflated,
4042 window_bits: 16 + crate::MAX_WBITS,
4043 mem_level: DEF_MEM_LEVEL,
4044 strategy: Strategy::Default,
4045 };
4046
4047 let mut output_rs = vec![0; 128];
4048
4049 let expected_err = ReturnCode::DataError;
4052
4053 let (rs, err) = compress_slice_with_flush(&mut output_rs, input, config, flush);
4054 assert_eq!(expected_err, err);
4055
4056 assert_eq!(rs, expected);
4057 }
4058
4059 #[test]
4060 #[rustfmt::skip]
4061 fn sync_flush() {
4062 test_flush(
4063 DeflateFlush::SyncFlush,
4064 &[
4065 31, 139, 8, 0, 0, 0, 0, 0, 0, gz_header::OS_CODE, 242, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
4066 81, 228, 2, 0, 0, 0, 255, 255,
4067 ],
4068 )
4069 }
4070
4071 #[test]
4072 #[rustfmt::skip]
4073 fn partial_flush() {
4074 test_flush(
4075 DeflateFlush::PartialFlush,
4076 &[
4077 31, 139, 8, 0, 0, 0, 0, 0, 0, gz_header::OS_CODE, 242, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
4078 81, 228, 2, 8,
4079 ],
4080 );
4081 }
4082
4083 #[test]
4084 #[rustfmt::skip]
4085 fn full_flush() {
4086 test_flush(
4087 DeflateFlush::FullFlush,
4088 &[
4089 31, 139, 8, 0, 0, 0, 0, 0, 0, gz_header::OS_CODE, 242, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
4090 81, 228, 2, 0, 0, 0, 255, 255,
4091 ],
4092 );
4093 }
4094
4095 #[test]
4096 #[rustfmt::skip]
4097 fn block_flush() {
4098 test_flush(
4099 DeflateFlush::Block,
4100 &[
4101 31, 139, 8, 0, 0, 0, 0, 0, 0, gz_header::OS_CODE, 242, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
4102 81, 228, 2,
4103 ],
4104 );
4105 }
4106
4107 #[test]
4108 fn split_deflate() {
4112 let input = "Hello World!\n";
4113
4114 let (input1, input2) = input.split_at(6);
4115
4116 let mut output1 = vec![0; 128];
4117 let mut output2 = vec![0; 128];
4118
4119 let config = DeflateConfig {
4120 level: 6, method: Method::Deflated,
4122 window_bits: 16 + crate::MAX_WBITS,
4123 mem_level: DEF_MEM_LEVEL,
4124 strategy: Strategy::Default,
4125 };
4126
4127 let (prefix, err) = compress_slice_with_flush(
4130 &mut output1,
4131 input1.as_bytes(),
4132 config,
4133 DeflateFlush::SyncFlush,
4134 );
4135 assert_eq!(err, ReturnCode::DataError);
4136
4137 let (output2, err) = compress_slice_with_flush(
4138 &mut output2,
4139 input2.as_bytes(),
4140 config,
4141 DeflateFlush::Finish,
4142 );
4143 assert_eq!(err, ReturnCode::Ok);
4144
4145 let inflate_config = crate::inflate::InflateConfig {
4146 window_bits: 16 + 15,
4147 };
4148
4149 let (suffix, end) = output2.split_at(output2.len() - 8);
4151 let (crc2, len2) = end.split_at(4);
4152 let crc2 = u32::from_le_bytes(crc2.try_into().unwrap());
4153
4154 let suffix = &suffix[10..];
4156
4157 let mut result: Vec<u8> = Vec::new();
4158 result.extend(prefix.iter());
4159 result.extend(suffix);
4160
4161 let len1 = input1.len() as u32;
4164 let len2 = u32::from_le_bytes(len2.try_into().unwrap());
4165 assert_eq!(len2 as usize, input2.len());
4166
4167 let crc1 = crate::crc32(0, input1.as_bytes());
4168 let crc = crate::crc32_combine(crc1, crc2, len2 as u64);
4169
4170 let crc_cheating = crate::crc32(0, input.as_bytes());
4172 assert_eq!(crc, crc_cheating);
4173
4174 result.extend(crc.to_le_bytes());
4176 result.extend((len1 + len2).to_le_bytes());
4177
4178 let mut output = vec![0; 128];
4179 let (output, err) = crate::inflate::uncompress_slice(&mut output, &result, inflate_config);
4180 assert_eq!(err, ReturnCode::Ok);
4181
4182 assert_eq!(output, input.as_bytes());
4183 }
4184
4185 #[test]
4186 fn inflate_window_copy_slice() {
4187 let uncompressed = [
4188 9, 126, 0, 0, 0, 0, 0, 0, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 76, 33, 8, 2, 0, 0,
4189 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4190 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4191 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4192 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12,
4193 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 76, 33, 8, 2, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0,
4194 0, 0, 0, 12, 10, 0, 0, 0, 0, 14, 0, 0, 0, 0, 0, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4195 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 14, 0, 0, 0, 0, 0, 69, 69, 69, 69, 69, 69, 69,
4196 69, 69, 69, 69, 69, 69, 69, 69, 9, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4197 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4198 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4199 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 12, 28, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0,
4200 0, 0, 12, 10, 0, 0, 0, 0, 14, 0, 0, 0, 0, 0, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4201 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 14, 0, 0, 0, 0, 0, 69, 69, 69, 69, 69, 69, 69,
4202 69, 69, 69, 69, 69, 69, 69, 69, 9, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4203 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4204 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4205 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 12, 28, 0, 2, 0, 0, 0, 63, 1, 0, 12, 2,
4206 36, 0, 28, 0, 0, 0, 1, 0, 0, 63, 63, 13, 0, 0, 0, 0, 0, 0, 0, 63, 63, 63, 63, 0, 0, 0,
4207 0, 0, 0, 65, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 0,
4208 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 45, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4209 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4210 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 91, 0, 0, 0, 9, 0, 0, 0, 9, 0, 0, 12, 33, 2, 0, 0, 8,
4211 0, 4, 0, 0, 0, 12, 10, 41, 12, 10, 47,
4212 ];
4213
4214 let compressed = &[
4215 31, 139, 8, 0, 0, 0, 0, 0, 4, 3, 181, 193, 49, 14, 194, 32, 24, 128, 209, 175, 192, 0,
4216 228, 151, 232, 206, 66, 226, 226, 96, 60, 2, 113, 96, 235, 13, 188, 139, 103, 23, 106,
4217 104, 108, 100, 49, 169, 239, 185, 39, 11, 199, 7, 51, 39, 171, 248, 118, 226, 63, 52,
4218 157, 120, 86, 102, 78, 86, 209, 104, 58, 241, 84, 129, 166, 12, 4, 154, 178, 229, 202,
4219 30, 36, 130, 166, 19, 79, 21, 104, 202, 64, 160, 41, 91, 174, 236, 65, 34, 10, 200, 19,
4220 162, 206, 68, 96, 130, 156, 15, 188, 229, 138, 197, 157, 161, 35, 3, 87, 126, 245, 0,
4221 28, 224, 64, 146, 2, 139, 1, 196, 95, 196, 223, 94, 10, 96, 92, 33, 86, 2, 0, 0,
4222 ];
4223
4224 let config = InflateConfig { window_bits: 25 };
4225
4226 let mut dest_vec_rs = vec![0u8; uncompressed.len()];
4227 let (output_rs, error) =
4228 crate::inflate::uncompress_slice(&mut dest_vec_rs, compressed, config);
4229
4230 assert_eq!(ReturnCode::Ok, error);
4231 assert_eq!(output_rs, uncompressed);
4232 }
4233
4234 #[test]
4235 fn hash_calc_difference() {
4236 let input = [
4237 0, 0, 0, 0, 0, 43, 0, 0, 0, 0, 0, 0, 43, 0, 0, 0, 0, 0, 0, 0, 0, 48, 0, 0, 0, 0, 0, 0,
4238 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4239 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4240 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4241 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, 0, 0, 0,
4242 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4243 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 0, 0, 0, 0, 0,
4244 0, 0, 64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 0, 64, 0, 0,
4245 0, 0, 0, 0, 0, 0, 0, 0, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 102, 102, 102, 102, 102,
4246 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102,
4247 102, 102, 102, 102, 102, 102, 102, 102, 112, 102, 102, 102, 102, 102, 102, 102, 102,
4248 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 0,
4249 0, 0, 0, 0, 0, 49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4250 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4251 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4252 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4253 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4254 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 0, 0, 0, 0, 0, 0, 0, 64, 0, 0,
4255 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 0, 64, 0, 0, 0, 0, 0, 0, 0,
4256 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42, 0, 0, 0,
4257 50, 0,
4258 ];
4259
4260 let config = DeflateConfig {
4261 level: 6,
4262 method: Method::Deflated,
4263 window_bits: 9,
4264 mem_level: 8,
4265 strategy: Strategy::Default,
4266 };
4267
4268 let expected = [
4269 24, 149, 99, 96, 96, 96, 96, 208, 6, 17, 112, 138, 129, 193, 128, 1, 29, 24, 50, 208,
4270 1, 200, 146, 169, 79, 24, 74, 59, 96, 147, 52, 71, 22, 70, 246, 88, 26, 94, 80, 128,
4271 83, 6, 162, 219, 144, 76, 183, 210, 5, 8, 67, 105, 36, 159, 35, 128, 57, 118, 97, 100,
4272 160, 197, 192, 192, 96, 196, 0, 0, 3, 228, 25, 128,
4273 ];
4274
4275 fuzz_based_test(&input, config, &expected);
4276 }
4277
4278 #[cfg(any(target_arch = "x86_64", target_arch = "aarch64"))]
4279 mod _cache_lines {
4280 use super::State;
4281 use memoffset::offset_of;
4284
4285 const _: () = assert!(offset_of!(State, status) == 0);
4286 const _: () = assert!(offset_of!(State, _cache_line_0) == 64);
4287 const _: () = assert!(offset_of!(State, _cache_line_1) == 128);
4288 const _: () = assert!(offset_of!(State, _cache_line_2) == 192);
4289 const _: () = assert!(offset_of!(State, _cache_line_3) == 256);
4290 }
4291}