zlib_rs/
deflate.rs

1use core::{ffi::CStr, marker::PhantomData, mem::MaybeUninit, ops::ControlFlow, ptr::NonNull};
2
3use crate::{
4    adler32::adler32,
5    allocate::Allocator,
6    c_api::{gz_header, internal_state, z_checksum, z_stream},
7    crc32::{crc32, Crc32Fold},
8    trace,
9    weak_slice::{WeakArrayMut, WeakSliceMut},
10    DeflateFlush, ReturnCode, ADLER32_INITIAL_VALUE, CRC32_INITIAL_VALUE, MAX_WBITS, MIN_WBITS,
11};
12
13use self::{
14    algorithm::CONFIGURATION_TABLE,
15    hash_calc::{HashCalcVariant, RollHashCalc, StandardHashCalc},
16    pending::Pending,
17    sym_buf::SymBuf,
18    trees_tbl::STATIC_LTREE,
19    window::Window,
20};
21
22mod algorithm;
23mod compare256;
24mod hash_calc;
25mod longest_match;
26mod pending;
27mod slide_hash;
28mod sym_buf;
29mod trees_tbl;
30mod window;
31
32// Position relative to the current window
33pub(crate) type Pos = u16;
34
35// SAFETY: This struct must have the same layout as [`z_stream`], so that casts and transmutations
36// between the two can work without UB.
37#[repr(C)]
38pub struct DeflateStream<'a> {
39    pub(crate) next_in: *mut crate::c_api::Bytef,
40    pub(crate) avail_in: crate::c_api::uInt,
41    pub(crate) total_in: crate::c_api::z_size,
42    pub(crate) next_out: *mut crate::c_api::Bytef,
43    pub(crate) avail_out: crate::c_api::uInt,
44    pub(crate) total_out: crate::c_api::z_size,
45    pub(crate) msg: *const core::ffi::c_char,
46    pub(crate) state: &'a mut State<'a>,
47    pub(crate) alloc: Allocator<'a>,
48    pub(crate) data_type: core::ffi::c_int,
49    pub(crate) adler: crate::c_api::z_checksum,
50    pub(crate) reserved: crate::c_api::uLong,
51}
52
53unsafe impl Sync for DeflateStream<'_> {}
54unsafe impl Send for DeflateStream<'_> {}
55
56impl<'a> DeflateStream<'a> {
57    // z_stream and DeflateStream must have the same layout. Do our best to check if this is true.
58    // (imperfect check, but should catch most mistakes.)
59    const _S: () = assert!(core::mem::size_of::<z_stream>() == core::mem::size_of::<Self>());
60    const _A: () = assert!(core::mem::align_of::<z_stream>() == core::mem::align_of::<Self>());
61
62    /// # Safety
63    ///
64    /// Behavior is undefined if any of the following conditions are violated:
65    ///
66    /// - `strm` satisfies the conditions of [`pointer::as_mut`]
67    /// - if not `NULL`, `strm` as initialized using [`init`] or similar
68    ///
69    /// [`pointer::as_mut`]: https://doc.rust-lang.org/core/primitive.pointer.html#method.as_mut
70    #[inline(always)]
71    pub unsafe fn from_stream_mut(strm: *mut z_stream) -> Option<&'a mut Self> {
72        {
73            // Safety: ptr points to a valid value of type z_stream (if non-null)
74            let stream = unsafe { strm.as_ref() }?;
75
76            if stream.zalloc.is_none() || stream.zfree.is_none() {
77                return None;
78            }
79
80            if stream.state.is_null() {
81                return None;
82            }
83        }
84
85        // SAFETY: DeflateStream has an equivalent layout as z_stream
86        unsafe { strm.cast::<DeflateStream>().as_mut() }
87    }
88
89    /// # Safety
90    ///
91    /// Behavior is undefined if any of the following conditions are violated:
92    ///
93    /// - `strm` satisfies the conditions of [`pointer::as_ref`]
94    /// - if not `NULL`, `strm` as initialized using [`init`] or similar
95    ///
96    /// [`pointer::as_ref`]: https://doc.rust-lang.org/core/primitive.pointer.html#method.as_ref
97    #[inline(always)]
98    pub unsafe fn from_stream_ref(strm: *const z_stream) -> Option<&'a Self> {
99        {
100            // Safety: ptr points to a valid value of type z_stream (if non-null)
101            let stream = unsafe { strm.as_ref() }?;
102
103            if stream.zalloc.is_none() || stream.zfree.is_none() {
104                return None;
105            }
106
107            if stream.state.is_null() {
108                return None;
109            }
110        }
111
112        // SAFETY: DeflateStream has an equivalent layout as z_stream
113        unsafe { strm.cast::<DeflateStream>().as_ref() }
114    }
115
116    fn as_z_stream_mut(&mut self) -> &mut z_stream {
117        // SAFETY: a valid &mut DeflateStream is also a valid &mut z_stream
118        unsafe { &mut *(self as *mut DeflateStream as *mut z_stream) }
119    }
120
121    pub fn pending(&self) -> (usize, u8) {
122        (
123            self.state.bit_writer.pending.pending,
124            self.state.bit_writer.bits_used,
125        )
126    }
127
128    pub fn new(config: DeflateConfig) -> Self {
129        let mut inner = crate::c_api::z_stream::default();
130
131        let ret = crate::deflate::init(&mut inner, config);
132        assert_eq!(ret, ReturnCode::Ok);
133
134        unsafe { core::mem::transmute(inner) }
135    }
136}
137
138/// number of elements in hash table
139pub(crate) const HASH_SIZE: usize = 65536;
140/// log2(HASH_SIZE)
141const HASH_BITS: usize = 16;
142
143/// Maximum value for memLevel in deflateInit2
144const MAX_MEM_LEVEL: i32 = 9;
145pub const DEF_MEM_LEVEL: i32 = if MAX_MEM_LEVEL > 8 { 8 } else { MAX_MEM_LEVEL };
146
147#[repr(i32)]
148#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)]
149#[cfg_attr(feature = "__internal-fuzz", derive(arbitrary::Arbitrary))]
150pub enum Method {
151    #[default]
152    Deflated = 8,
153}
154
155impl TryFrom<i32> for Method {
156    type Error = ();
157
158    fn try_from(value: i32) -> Result<Self, Self::Error> {
159        match value {
160            8 => Ok(Self::Deflated),
161            _ => Err(()),
162        }
163    }
164}
165
166/// Configuration for compression.
167///
168/// Used with [`compress_slice`].
169///
170/// In most cases only the compression level is relevant. We provide three profiles:
171///
172/// - [`DeflateConfig::best_speed`] provides the fastest compression (at the cost of compression
173///   quality)
174/// - [`DeflateConfig::default`] tries to find a happy middle
175/// - [`DeflateConfig::best_compression`] provides the best compression (at the cost of longer
176///   runtime)
177#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
178#[cfg_attr(feature = "__internal-fuzz", derive(arbitrary::Arbitrary))]
179pub struct DeflateConfig {
180    pub level: i32,
181    pub method: Method,
182    pub window_bits: i32,
183    pub mem_level: i32,
184    pub strategy: Strategy,
185}
186
187#[cfg(any(test, feature = "__internal-test"))]
188impl quickcheck::Arbitrary for DeflateConfig {
189    fn arbitrary(g: &mut quickcheck::Gen) -> Self {
190        let mem_levels: Vec<_> = (1..=9).collect();
191        let levels: Vec<_> = (0..=9).collect();
192
193        let mut window_bits = Vec::new();
194        window_bits.extend(9..=15); // zlib
195        window_bits.extend(9 + 16..=15 + 16); // gzip
196        window_bits.extend(-15..=-9); // raw
197
198        Self {
199            level: *g.choose(&levels).unwrap(),
200            method: Method::Deflated,
201            window_bits: *g.choose(&window_bits).unwrap(),
202            mem_level: *g.choose(&mem_levels).unwrap(),
203            strategy: *g
204                .choose(&[
205                    Strategy::Default,
206                    Strategy::Filtered,
207                    Strategy::HuffmanOnly,
208                    Strategy::Rle,
209                    Strategy::Fixed,
210                ])
211                .unwrap(),
212        }
213    }
214}
215
216impl DeflateConfig {
217    pub fn new(level: i32) -> Self {
218        Self {
219            level,
220            ..Self::default()
221        }
222    }
223
224    /// Configure for the best compression (takes longer).
225    pub fn best_compression() -> Self {
226        Self::new(crate::c_api::Z_BEST_COMPRESSION)
227    }
228
229    /// Configure for the fastest compression (compresses less well).
230    pub fn best_speed() -> Self {
231        Self::new(crate::c_api::Z_BEST_SPEED)
232    }
233}
234
235impl Default for DeflateConfig {
236    fn default() -> Self {
237        Self {
238            level: crate::c_api::Z_DEFAULT_COMPRESSION,
239            method: Method::Deflated,
240            window_bits: MAX_WBITS,
241            mem_level: DEF_MEM_LEVEL,
242            strategy: Strategy::Default,
243        }
244    }
245}
246
247pub fn init(stream: &mut z_stream, config: DeflateConfig) -> ReturnCode {
248    let DeflateConfig {
249        mut level,
250        method: _,
251        mut window_bits,
252        mem_level,
253        strategy,
254    } = config;
255
256    /* Todo: ignore strm->next_in if we use it as window */
257    stream.msg = core::ptr::null_mut();
258
259    // for safety we  must really make sure that alloc and free are consistent
260    // this is a (slight) deviation from stock zlib. In this crate we pick the rust
261    // allocator as the default, but `libz-rs-sys` always explicitly sets an allocator,
262    // and can configure the C allocator
263    #[cfg(feature = "rust-allocator")]
264    if stream.zalloc.is_none() || stream.zfree.is_none() {
265        stream.configure_default_rust_allocator()
266    }
267
268    #[cfg(feature = "c-allocator")]
269    if stream.zalloc.is_none() || stream.zfree.is_none() {
270        stream.configure_default_c_allocator()
271    }
272
273    if stream.zalloc.is_none() || stream.zfree.is_none() {
274        return ReturnCode::StreamError;
275    }
276
277    if level == crate::c_api::Z_DEFAULT_COMPRESSION {
278        level = 6;
279    }
280
281    let wrap = if window_bits < 0 {
282        if window_bits < -MAX_WBITS {
283            return ReturnCode::StreamError;
284        }
285        window_bits = -window_bits;
286
287        0
288    } else if window_bits > MAX_WBITS {
289        window_bits -= 16;
290        2
291    } else {
292        1
293    };
294
295    if (!(1..=MAX_MEM_LEVEL).contains(&mem_level))
296        || !(MIN_WBITS..=MAX_WBITS).contains(&window_bits)
297        || !(0..=9).contains(&level)
298        || (window_bits == 8 && wrap != 1)
299    {
300        return ReturnCode::StreamError;
301    }
302
303    let window_bits = if window_bits == 8 {
304        9 /* until 256-byte window bug fixed */
305    } else {
306        window_bits as usize
307    };
308
309    let alloc = Allocator {
310        zalloc: stream.zalloc.unwrap(),
311        zfree: stream.zfree.unwrap(),
312        opaque: stream.opaque,
313        _marker: PhantomData,
314    };
315
316    let lit_bufsize = 1 << (mem_level + 6); // 16K elements by default
317    let allocs = DeflateAllocOffsets::new(window_bits, lit_bufsize);
318
319    // FIXME: pointer methods on NonNull are stable since 1.80.0
320    let Some(allocation_start) = alloc.allocate_slice_raw::<u8>(allocs.total_size) else {
321        return ReturnCode::MemError;
322    };
323
324    let w_size = 1 << window_bits;
325    let align_offset = (allocation_start.as_ptr() as usize).next_multiple_of(64)
326        - (allocation_start.as_ptr() as usize);
327    let buf = unsafe { allocation_start.as_ptr().add(align_offset) };
328
329    let (window, prev, head, pending, sym_buf) = unsafe {
330        let window_ptr: *mut u8 = buf.add(allocs.window_pos);
331        window_ptr.write_bytes(0u8, 2 * w_size);
332        let window = Window::from_raw_parts(window_ptr, window_bits);
333
334        // FIXME: write_bytes is stable for NonNull since 1.80.0
335        let prev_ptr = buf.add(allocs.prev_pos).cast::<Pos>();
336        prev_ptr.write_bytes(0, w_size);
337        let prev = WeakSliceMut::from_raw_parts_mut(prev_ptr, w_size);
338
339        let head_ptr = buf.add(allocs.head_pos).cast::<[Pos; HASH_SIZE]>();
340        // Zero out the full array. `write_bytes` will write `1 * size_of<[Pos; HASH_SIZE]>` bytes.
341        head_ptr.write_bytes(0, 1);
342        let head = WeakArrayMut::<Pos, HASH_SIZE>::from_ptr(head_ptr);
343
344        let pending_ptr = buf.add(allocs.pending_pos).cast::<MaybeUninit<u8>>();
345        let pending = Pending::from_raw_parts(pending_ptr, 4 * lit_bufsize);
346
347        let sym_buf_ptr = buf.add(allocs.sym_buf_pos);
348        let sym_buf = SymBuf::from_raw_parts(sym_buf_ptr, lit_bufsize);
349
350        (window, prev, head, pending, sym_buf)
351    };
352
353    let state = State {
354        status: Status::Init,
355
356        // window
357        w_size,
358
359        // allocated values
360        window,
361        prev,
362        head,
363        bit_writer: BitWriter::from_pending(pending),
364
365        //
366        lit_bufsize,
367
368        //
369        sym_buf,
370
371        //
372        level: level as i8, // set to zero again for testing?
373        strategy,
374
375        // these fields are not set explicitly at this point
376        last_flush: 0,
377        wrap,
378        strstart: 0,
379        block_start: 0,
380        block_open: 0,
381        window_size: 0,
382        insert: 0,
383        matches: 0,
384        opt_len: 0,
385        static_len: 0,
386        lookahead: 0,
387        ins_h: 0,
388        max_chain_length: 0,
389        max_lazy_match: 0,
390        good_match: 0,
391        nice_match: 0,
392
393        //
394        l_desc: TreeDesc::EMPTY,
395        d_desc: TreeDesc::EMPTY,
396        bl_desc: TreeDesc::EMPTY,
397
398        //
399        crc_fold: Crc32Fold::new(),
400        gzhead: None,
401        gzindex: 0,
402
403        //
404        match_start: 0,
405        prev_match: 0,
406        match_available: false,
407        prev_length: 0,
408
409        allocation_start,
410        total_allocation_size: allocs.total_size,
411
412        // just provide a valid default; gets set properly later
413        hash_calc_variant: HashCalcVariant::Standard,
414        _cache_line_0: (),
415        _cache_line_1: (),
416        _cache_line_2: (),
417        _cache_line_3: (),
418        _padding_0: [0; 16],
419    };
420
421    let state_allocation = unsafe { buf.add(allocs.state_pos).cast::<State>() };
422    unsafe { state_allocation.write(state) };
423
424    stream.state = state_allocation.cast::<internal_state>();
425
426    let Some(stream) = (unsafe { DeflateStream::from_stream_mut(stream) }) else {
427        if cfg!(debug_assertions) {
428            unreachable!("we should have initialized the stream properly");
429        }
430        return ReturnCode::StreamError;
431    };
432
433    reset(stream)
434}
435
436pub fn params(stream: &mut DeflateStream, level: i32, strategy: Strategy) -> ReturnCode {
437    let level = if level == crate::c_api::Z_DEFAULT_COMPRESSION {
438        6
439    } else {
440        level
441    };
442
443    if !(0..=9).contains(&level) {
444        return ReturnCode::StreamError;
445    }
446
447    let level = level as i8;
448
449    let func = CONFIGURATION_TABLE[stream.state.level as usize].func;
450
451    let state = &mut stream.state;
452
453    // FIXME: use fn_addr_eq when it's available in our MSRV. The comparison returning false here
454    // is not functionally incorrect, but would be inconsistent with zlib-ng.
455    #[allow(unpredictable_function_pointer_comparisons)]
456    if (strategy != state.strategy || func != CONFIGURATION_TABLE[level as usize].func)
457        && state.last_flush != -2
458    {
459        // Flush the last buffer.
460        let err = deflate(stream, DeflateFlush::Block);
461        if err == ReturnCode::StreamError {
462            return err;
463        }
464
465        let state = &mut stream.state;
466
467        if stream.avail_in != 0
468            || ((state.strstart as isize - state.block_start) + state.lookahead as isize) != 0
469        {
470            return ReturnCode::BufError;
471        }
472    }
473
474    let state = &mut stream.state;
475
476    if state.level != level {
477        if state.level == 0 && state.matches != 0 {
478            if state.matches == 1 {
479                self::slide_hash::slide_hash(state);
480            } else {
481                state.head.as_mut_slice().fill(0);
482            }
483            state.matches = 0;
484        }
485
486        lm_set_level(state, level);
487    }
488
489    state.strategy = strategy;
490
491    ReturnCode::Ok
492}
493
494pub fn set_dictionary(stream: &mut DeflateStream, mut dictionary: &[u8]) -> ReturnCode {
495    let state = &mut stream.state;
496
497    let wrap = state.wrap;
498
499    if wrap == 2 || (wrap == 1 && state.status != Status::Init) || state.lookahead != 0 {
500        return ReturnCode::StreamError;
501    }
502
503    // when using zlib wrappers, compute Adler-32 for provided dictionary
504    if wrap == 1 {
505        stream.adler = adler32(stream.adler as u32, dictionary) as z_checksum;
506    }
507
508    // avoid computing Adler-32 in read_buf
509    state.wrap = 0;
510
511    // if dictionary would fill window, just replace the history
512    if dictionary.len() >= state.window.capacity() {
513        if wrap == 0 {
514            // clear the hash table
515            state.head.as_mut_slice().fill(0);
516
517            state.strstart = 0;
518            state.block_start = 0;
519            state.insert = 0;
520        } else {
521            /* already empty otherwise */
522        }
523
524        // use the tail
525        dictionary = &dictionary[dictionary.len() - state.w_size..];
526    }
527
528    // insert dictionary into window and hash
529    let avail = stream.avail_in;
530    let next = stream.next_in;
531    stream.avail_in = dictionary.len() as _;
532    stream.next_in = dictionary.as_ptr() as *mut u8;
533    fill_window(stream);
534
535    while stream.state.lookahead >= STD_MIN_MATCH {
536        let str = stream.state.strstart;
537        let n = stream.state.lookahead - (STD_MIN_MATCH - 1);
538        stream.state.insert_string(str, n);
539        stream.state.strstart = str + n;
540        stream.state.lookahead = STD_MIN_MATCH - 1;
541        fill_window(stream);
542    }
543
544    let state = &mut stream.state;
545
546    state.strstart += state.lookahead;
547    state.block_start = state.strstart as _;
548    state.insert = state.lookahead;
549    state.lookahead = 0;
550    state.prev_length = 0;
551    state.match_available = false;
552
553    // restore the state
554    stream.next_in = next;
555    stream.avail_in = avail;
556    state.wrap = wrap;
557
558    ReturnCode::Ok
559}
560
561pub fn prime(stream: &mut DeflateStream, mut bits: i32, value: i32) -> ReturnCode {
562    // our logic actually supports up to 32 bits.
563    debug_assert!(bits <= 16, "zlib only supports up to 16 bits here");
564
565    let mut value64 = value as u64;
566
567    let state = &mut stream.state;
568
569    if bits < 0
570        || bits > BitWriter::BIT_BUF_SIZE as i32
571        || bits > (core::mem::size_of_val(&value) << 3) as i32
572    {
573        return ReturnCode::BufError;
574    }
575
576    let mut put;
577
578    loop {
579        put = BitWriter::BIT_BUF_SIZE - state.bit_writer.bits_used;
580        let put = Ord::min(put as i32, bits);
581
582        if state.bit_writer.bits_used == 0 {
583            state.bit_writer.bit_buffer = value64;
584        } else {
585            state.bit_writer.bit_buffer |=
586                (value64 & ((1 << put) - 1)) << state.bit_writer.bits_used;
587        }
588
589        state.bit_writer.bits_used += put as u8;
590        state.bit_writer.flush_bits();
591        value64 >>= put;
592        bits -= put;
593
594        if bits == 0 {
595            break;
596        }
597    }
598
599    ReturnCode::Ok
600}
601
602pub fn copy<'a>(
603    dest: &mut MaybeUninit<DeflateStream<'a>>,
604    source: &mut DeflateStream<'a>,
605) -> ReturnCode {
606    let w_size = source.state.w_size;
607    let window_bits = source.state.w_bits() as usize;
608    let lit_bufsize = source.state.lit_bufsize;
609
610    // SAFETY: source and dest are both mutable references, so guaranteed not to overlap.
611    // dest being a reference to maybe uninitialized memory makes a copy of 1 DeflateStream valid.
612    unsafe { core::ptr::copy_nonoverlapping(source, dest.as_mut_ptr(), 1) };
613
614    let source_state = &source.state;
615    let alloc = &source.alloc;
616
617    let allocs = DeflateAllocOffsets::new(window_bits, lit_bufsize);
618
619    let Some(allocation_start) = alloc.allocate_slice_raw::<u8>(allocs.total_size) else {
620        return ReturnCode::MemError;
621    };
622
623    let align_offset = (allocation_start.as_ptr() as usize).next_multiple_of(64)
624        - (allocation_start.as_ptr() as usize);
625    let buf = unsafe { allocation_start.as_ptr().add(align_offset) };
626
627    let (window, prev, head, pending, sym_buf) = unsafe {
628        let window_ptr: *mut u8 = buf.add(allocs.window_pos);
629        window_ptr
630            .copy_from_nonoverlapping(source_state.window.as_ptr(), source_state.window.capacity());
631        let window = Window::from_raw_parts(window_ptr, window_bits);
632
633        // FIXME: write_bytes is stable for NonNull since 1.80.0
634        let prev_ptr = buf.add(allocs.prev_pos).cast::<Pos>();
635        prev_ptr.copy_from_nonoverlapping(source_state.prev.as_ptr(), source_state.prev.len());
636        let prev = WeakSliceMut::from_raw_parts_mut(prev_ptr, w_size);
637
638        // zero out head's first element
639        let head_ptr = buf.add(allocs.head_pos).cast::<[Pos; HASH_SIZE]>();
640        head_ptr.copy_from_nonoverlapping(source_state.head.as_ptr(), 1);
641        let head = WeakArrayMut::<Pos, HASH_SIZE>::from_ptr(head_ptr);
642
643        let pending_ptr = buf.add(allocs.pending_pos);
644        let pending = source_state.bit_writer.pending.clone_to(pending_ptr);
645
646        let sym_buf_ptr = buf.add(allocs.sym_buf_pos);
647        let sym_buf = source_state.sym_buf.clone_to(sym_buf_ptr);
648
649        (window, prev, head, pending, sym_buf)
650    };
651
652    let mut bit_writer = BitWriter::from_pending(pending);
653    bit_writer.bits_used = source_state.bit_writer.bits_used;
654    bit_writer.bit_buffer = source_state.bit_writer.bit_buffer;
655
656    let dest_state = State {
657        status: source_state.status,
658        bit_writer,
659        last_flush: source_state.last_flush,
660        wrap: source_state.wrap,
661        strategy: source_state.strategy,
662        level: source_state.level,
663        good_match: source_state.good_match,
664        nice_match: source_state.nice_match,
665        l_desc: source_state.l_desc.clone(),
666        d_desc: source_state.d_desc.clone(),
667        bl_desc: source_state.bl_desc.clone(),
668        prev_match: source_state.prev_match,
669        match_available: source_state.match_available,
670        strstart: source_state.strstart,
671        match_start: source_state.match_start,
672        prev_length: source_state.prev_length,
673        max_chain_length: source_state.max_chain_length,
674        max_lazy_match: source_state.max_lazy_match,
675        block_start: source_state.block_start,
676        block_open: source_state.block_open,
677        window,
678        sym_buf,
679        lit_bufsize: source_state.lit_bufsize,
680        window_size: source_state.window_size,
681        matches: source_state.matches,
682        opt_len: source_state.opt_len,
683        static_len: source_state.static_len,
684        insert: source_state.insert,
685        w_size: source_state.w_size,
686        lookahead: source_state.lookahead,
687        prev,
688        head,
689        ins_h: source_state.ins_h,
690        hash_calc_variant: source_state.hash_calc_variant,
691        crc_fold: source_state.crc_fold,
692        gzhead: None,
693        gzindex: source_state.gzindex,
694        allocation_start,
695        total_allocation_size: allocs.total_size,
696        _cache_line_0: (),
697        _cache_line_1: (),
698        _cache_line_2: (),
699        _cache_line_3: (),
700        _padding_0: source_state._padding_0,
701    };
702
703    // write the cloned state into state_ptr
704    let state_allocation = unsafe { buf.add(allocs.state_pos).cast::<State>() };
705    unsafe { state_allocation.write(dest_state) }; // FIXME: write is stable for NonNull since 1.80.0
706
707    // insert the state_ptr into `dest`
708    let field_ptr = unsafe { core::ptr::addr_of_mut!((*dest.as_mut_ptr()).state) };
709    unsafe { core::ptr::write(field_ptr as *mut *mut State, state_allocation) };
710
711    // update the gzhead field (it contains a mutable reference so we need to be careful
712    let field_ptr = unsafe { core::ptr::addr_of_mut!((*dest.as_mut_ptr()).state.gzhead) };
713    unsafe { core::ptr::copy(&source_state.gzhead, field_ptr, 1) };
714
715    ReturnCode::Ok
716}
717
718/// # Returns
719///
720/// - Err when deflate is not done. A common cause is insufficient output space
721/// - Ok otherwise
722pub fn end<'a>(stream: &'a mut DeflateStream) -> Result<&'a mut z_stream, &'a mut z_stream> {
723    let status = stream.state.status;
724    let allocation_start = stream.state.allocation_start;
725    let total_allocation_size = stream.state.total_allocation_size;
726    let alloc = stream.alloc;
727
728    let stream = stream.as_z_stream_mut();
729    let _ = core::mem::replace(&mut stream.state, core::ptr::null_mut());
730
731    unsafe { alloc.deallocate(allocation_start.as_ptr(), total_allocation_size) };
732
733    match status {
734        Status::Busy => Err(stream),
735        _ => Ok(stream),
736    }
737}
738
739pub fn reset(stream: &mut DeflateStream) -> ReturnCode {
740    let ret = reset_keep(stream);
741
742    if ret == ReturnCode::Ok {
743        lm_init(stream.state);
744    }
745
746    ret
747}
748
749pub fn reset_keep(stream: &mut DeflateStream) -> ReturnCode {
750    stream.total_in = 0;
751    stream.total_out = 0;
752    stream.msg = core::ptr::null_mut();
753    stream.data_type = crate::c_api::Z_UNKNOWN;
754
755    let state = &mut stream.state;
756
757    state.bit_writer.pending.reset_keep();
758
759    // can be made negative by deflate(..., Z_FINISH);
760    state.wrap = state.wrap.abs();
761
762    state.status = match state.wrap {
763        2 => Status::GZip,
764        _ => Status::Init,
765    };
766
767    stream.adler = match state.wrap {
768        2 => {
769            state.crc_fold = Crc32Fold::new();
770            CRC32_INITIAL_VALUE as _
771        }
772        _ => ADLER32_INITIAL_VALUE as _,
773    };
774
775    state.last_flush = -2;
776
777    state.zng_tr_init();
778
779    ReturnCode::Ok
780}
781
782fn lm_init(state: &mut State) {
783    state.window_size = 2 * state.w_size;
784
785    // zlib uses CLEAR_HASH here
786    state.head.as_mut_slice().fill(0);
787
788    // Set the default configuration parameters:
789    lm_set_level(state, state.level);
790
791    state.strstart = 0;
792    state.block_start = 0;
793    state.lookahead = 0;
794    state.insert = 0;
795    state.prev_length = 0;
796    state.match_available = false;
797    state.match_start = 0;
798    state.ins_h = 0;
799}
800
801fn lm_set_level(state: &mut State, level: i8) {
802    state.max_lazy_match = CONFIGURATION_TABLE[level as usize].max_lazy;
803    state.good_match = CONFIGURATION_TABLE[level as usize].good_length;
804    state.nice_match = CONFIGURATION_TABLE[level as usize].nice_length;
805    state.max_chain_length = CONFIGURATION_TABLE[level as usize].max_chain;
806
807    state.hash_calc_variant = HashCalcVariant::for_max_chain_length(state.max_chain_length);
808    state.level = level;
809}
810
811pub fn tune(
812    stream: &mut DeflateStream,
813    good_length: usize,
814    max_lazy: usize,
815    nice_length: usize,
816    max_chain: usize,
817) -> ReturnCode {
818    stream.state.good_match = good_length as u16;
819    stream.state.max_lazy_match = max_lazy as u16;
820    stream.state.nice_match = nice_length as u16;
821    stream.state.max_chain_length = max_chain as u16;
822
823    ReturnCode::Ok
824}
825
826#[repr(C)]
827#[derive(Debug, Clone, Copy, PartialEq, Eq)]
828pub(crate) struct Value {
829    a: u16,
830    b: u16,
831}
832
833impl Value {
834    pub(crate) const fn new(a: u16, b: u16) -> Self {
835        Self { a, b }
836    }
837
838    pub(crate) fn freq_mut(&mut self) -> &mut u16 {
839        &mut self.a
840    }
841
842    pub(crate) fn code_mut(&mut self) -> &mut u16 {
843        &mut self.a
844    }
845
846    pub(crate) fn dad_mut(&mut self) -> &mut u16 {
847        &mut self.b
848    }
849
850    pub(crate) fn len_mut(&mut self) -> &mut u16 {
851        &mut self.b
852    }
853
854    #[inline(always)]
855    pub(crate) const fn freq(self) -> u16 {
856        self.a
857    }
858
859    #[inline(always)]
860    pub(crate) const fn code(self) -> u16 {
861        self.a
862    }
863
864    #[inline(always)]
865    pub(crate) const fn dad(self) -> u16 {
866        self.b
867    }
868
869    #[inline(always)]
870    pub(crate) const fn len(self) -> u16 {
871        self.b
872    }
873}
874
875/// number of length codes, not counting the special END_BLOCK code
876pub(crate) const LENGTH_CODES: usize = 29;
877
878/// number of literal bytes 0..255
879const LITERALS: usize = 256;
880
881/// number of Literal or Length codes, including the END_BLOCK code
882pub(crate) const L_CODES: usize = LITERALS + 1 + LENGTH_CODES;
883
884/// number of distance codes
885pub(crate) const D_CODES: usize = 30;
886
887/// number of codes used to transfer the bit lengths
888const BL_CODES: usize = 19;
889
890/// maximum heap size
891const HEAP_SIZE: usize = 2 * L_CODES + 1;
892
893/// all codes must not exceed MAX_BITS bits
894const MAX_BITS: usize = 15;
895
896/// Bit length codes must not exceed MAX_BL_BITS bits
897const MAX_BL_BITS: usize = 7;
898
899pub(crate) const DIST_CODE_LEN: usize = 512;
900
901struct BitWriter<'a> {
902    pub(crate) pending: Pending<'a>, // output still pending
903    pub(crate) bit_buffer: u64,
904    pub(crate) bits_used: u8,
905
906    /// total bit length of compressed file (NOTE: zlib-ng uses a 32-bit integer here)
907    #[cfg(feature = "ZLIB_DEBUG")]
908    compressed_len: usize,
909    /// bit length of compressed data sent (NOTE: zlib-ng uses a 32-bit integer here)
910    #[cfg(feature = "ZLIB_DEBUG")]
911    bits_sent: usize,
912}
913
914#[inline]
915const fn encode_len(ltree: &[Value], lc: u8) -> (u64, usize) {
916    let mut lc = lc as usize;
917
918    /* Send the length code, len is the match length - STD_MIN_MATCH */
919    let code = self::trees_tbl::LENGTH_CODE[lc] as usize;
920    let c = code + LITERALS + 1;
921    assert!(c < L_CODES, "bad l_code");
922    // send_code_trace(s, c);
923
924    let lnode = ltree[c];
925    let mut match_bits: u64 = lnode.code() as u64;
926    let mut match_bits_len = lnode.len() as usize;
927    let extra = StaticTreeDesc::EXTRA_LBITS[code] as usize;
928    if extra != 0 {
929        lc -= self::trees_tbl::BASE_LENGTH[code] as usize;
930        match_bits |= (lc as u64) << match_bits_len;
931        match_bits_len += extra;
932    }
933
934    (match_bits, match_bits_len)
935}
936
937#[inline]
938const fn encode_dist(dtree: &[Value], mut dist: u16) -> (u64, usize) {
939    dist -= 1; /* dist is now the match distance - 1 */
940    let code = State::d_code(dist as usize) as usize;
941    assert!(code < D_CODES, "bad d_code");
942    // send_code_trace(s, code);
943
944    /* Send the distance code */
945    let dnode = dtree[code];
946    let mut match_bits = dnode.code() as u64;
947    let mut match_bits_len = dnode.len() as usize;
948    let extra = StaticTreeDesc::EXTRA_DBITS[code] as usize;
949    if extra != 0 {
950        dist -= self::trees_tbl::BASE_DIST[code];
951        match_bits |= (dist as u64) << match_bits_len;
952        match_bits_len += extra;
953    }
954
955    (match_bits, match_bits_len)
956}
957
958impl<'a> BitWriter<'a> {
959    pub(crate) const BIT_BUF_SIZE: u8 = 64;
960
961    fn from_pending(pending: Pending<'a>) -> Self {
962        Self {
963            pending,
964            bit_buffer: 0,
965            bits_used: 0,
966
967            #[cfg(feature = "ZLIB_DEBUG")]
968            compressed_len: 0,
969            #[cfg(feature = "ZLIB_DEBUG")]
970            bits_sent: 0,
971        }
972    }
973
974    fn flush_bits(&mut self) {
975        debug_assert!(self.bits_used <= 64);
976        let removed = self.bits_used.saturating_sub(7).next_multiple_of(8);
977        let keep_bytes = self.bits_used / 8; // can never divide by zero
978
979        let src = &self.bit_buffer.to_le_bytes();
980        self.pending.extend(&src[..keep_bytes as usize]);
981
982        self.bits_used -= removed;
983        self.bit_buffer = self.bit_buffer.checked_shr(removed as u32).unwrap_or(0);
984    }
985
986    fn emit_align(&mut self) {
987        debug_assert!(self.bits_used <= 64);
988        let keep_bytes = self.bits_used.div_ceil(8);
989        let src = &self.bit_buffer.to_le_bytes();
990        self.pending.extend(&src[..keep_bytes as usize]);
991
992        self.bits_used = 0;
993        self.bit_buffer = 0;
994
995        self.sent_bits_align();
996    }
997
998    fn send_bits_trace(&self, _value: u64, _len: u8) {
999        trace!(" l {:>2} v {:>4x} ", _len, _value);
1000    }
1001
1002    fn cmpr_bits_add(&mut self, _len: usize) {
1003        #[cfg(feature = "ZLIB_DEBUG")]
1004        {
1005            self.compressed_len += _len;
1006        }
1007    }
1008
1009    fn cmpr_bits_align(&mut self) {
1010        #[cfg(feature = "ZLIB_DEBUG")]
1011        {
1012            self.compressed_len = self.compressed_len.next_multiple_of(8);
1013        }
1014    }
1015
1016    fn sent_bits_add(&mut self, _len: usize) {
1017        #[cfg(feature = "ZLIB_DEBUG")]
1018        {
1019            self.bits_sent += _len;
1020        }
1021    }
1022
1023    fn sent_bits_align(&mut self) {
1024        #[cfg(feature = "ZLIB_DEBUG")]
1025        {
1026            self.bits_sent = self.bits_sent.next_multiple_of(8);
1027        }
1028    }
1029
1030    #[inline(always)]
1031    fn send_bits(&mut self, val: u64, len: u8) {
1032        debug_assert!(len <= 64);
1033        debug_assert!(self.bits_used <= 64);
1034
1035        let total_bits = len + self.bits_used;
1036
1037        self.send_bits_trace(val, len);
1038        self.sent_bits_add(len as usize);
1039
1040        if total_bits < Self::BIT_BUF_SIZE {
1041            self.bit_buffer |= val << self.bits_used;
1042            self.bits_used = total_bits;
1043        } else {
1044            self.send_bits_overflow(val, total_bits);
1045        }
1046    }
1047
1048    fn send_bits_overflow(&mut self, val: u64, total_bits: u8) {
1049        if self.bits_used == Self::BIT_BUF_SIZE {
1050            self.pending.extend(&self.bit_buffer.to_le_bytes());
1051            self.bit_buffer = val;
1052            self.bits_used = total_bits - Self::BIT_BUF_SIZE;
1053        } else {
1054            self.bit_buffer |= val << self.bits_used;
1055            self.pending.extend(&self.bit_buffer.to_le_bytes());
1056            self.bit_buffer = val >> (Self::BIT_BUF_SIZE - self.bits_used);
1057            self.bits_used = total_bits - Self::BIT_BUF_SIZE;
1058        }
1059    }
1060
1061    fn send_code(&mut self, code: usize, tree: &[Value]) {
1062        let node = tree[code];
1063        self.send_bits(node.code() as u64, node.len() as u8)
1064    }
1065
1066    /// Send one empty static block to give enough lookahead for inflate.
1067    /// This takes 10 bits, of which 7 may remain in the bit buffer.
1068    pub fn align(&mut self) {
1069        self.emit_tree(BlockType::StaticTrees, false);
1070        self.emit_end_block(&STATIC_LTREE, false);
1071        self.flush_bits();
1072    }
1073
1074    pub(crate) fn emit_tree(&mut self, block_type: BlockType, is_last_block: bool) {
1075        let header_bits = ((block_type as u64) << 1) | (is_last_block as u64);
1076        self.send_bits(header_bits, 3);
1077        trace!("\n--- Emit Tree: Last: {}\n", is_last_block as u8);
1078    }
1079
1080    pub(crate) fn emit_end_block_and_align(&mut self, ltree: &[Value], is_last_block: bool) {
1081        self.emit_end_block(ltree, is_last_block);
1082
1083        if is_last_block {
1084            self.emit_align();
1085        }
1086    }
1087
1088    fn emit_end_block(&mut self, ltree: &[Value], _is_last_block: bool) {
1089        const END_BLOCK: usize = 256;
1090        self.send_code(END_BLOCK, ltree);
1091
1092        trace!(
1093            "\n+++ Emit End Block: Last: {} Pending: {} Total Out: {}\n",
1094            _is_last_block as u8,
1095            self.pending.pending().len(),
1096            "<unknown>"
1097        );
1098    }
1099
1100    pub(crate) fn emit_lit(&mut self, ltree: &[Value], c: u8) -> u16 {
1101        self.send_code(c as usize, ltree);
1102
1103        #[cfg(feature = "ZLIB_DEBUG")]
1104        if let Some(c) = char::from_u32(c as u32) {
1105            if isgraph(c as u8) {
1106                trace!(" '{}' ", c);
1107            }
1108        }
1109
1110        ltree[c as usize].len()
1111    }
1112
1113    pub(crate) fn emit_dist(
1114        &mut self,
1115        ltree: &[Value],
1116        dtree: &[Value],
1117        lc: u8,
1118        dist: u16,
1119    ) -> usize {
1120        let (mut match_bits, mut match_bits_len) = encode_len(ltree, lc);
1121
1122        let (dist_match_bits, dist_match_bits_len) = encode_dist(dtree, dist);
1123
1124        match_bits |= dist_match_bits << match_bits_len;
1125        match_bits_len += dist_match_bits_len;
1126
1127        self.send_bits(match_bits, match_bits_len as u8);
1128
1129        match_bits_len
1130    }
1131
1132    pub(crate) fn emit_dist_static(&mut self, lc: u8, dist: u16) -> usize {
1133        let precomputed_len = trees_tbl::STATIC_LTREE_ENCODINGS[lc as usize];
1134        let mut match_bits = precomputed_len.code() as u64;
1135        let mut match_bits_len = precomputed_len.len() as usize;
1136
1137        let dtree = self::trees_tbl::STATIC_DTREE.as_slice();
1138        let (dist_match_bits, dist_match_bits_len) = encode_dist(dtree, dist);
1139
1140        match_bits |= dist_match_bits << match_bits_len;
1141        match_bits_len += dist_match_bits_len;
1142
1143        self.send_bits(match_bits, match_bits_len as u8);
1144
1145        match_bits_len
1146    }
1147
1148    fn compress_block_help(&mut self, sym_buf: &SymBuf, ltree: &[Value], dtree: &[Value]) {
1149        for (dist, lc) in sym_buf.iter() {
1150            match dist {
1151                0 => self.emit_lit(ltree, lc) as usize,
1152                _ => self.emit_dist(ltree, dtree, lc, dist),
1153            };
1154        }
1155
1156        self.emit_end_block(ltree, false)
1157    }
1158
1159    fn send_tree(&mut self, tree: &[Value], bl_tree: &[Value], max_code: usize) {
1160        /* tree: the tree to be scanned */
1161        /* max_code and its largest code of non zero frequency */
1162        let mut prevlen: isize = -1; /* last emitted length */
1163        let mut curlen; /* length of current code */
1164        let mut nextlen = tree[0].len(); /* length of next code */
1165        let mut count = 0; /* repeat count of the current code */
1166        let mut max_count = 7; /* max repeat count */
1167        let mut min_count = 4; /* min repeat count */
1168
1169        /* tree[max_code+1].Len = -1; */
1170        /* guard already set */
1171        if nextlen == 0 {
1172            max_count = 138;
1173            min_count = 3;
1174        }
1175
1176        for n in 0..=max_code {
1177            curlen = nextlen;
1178            nextlen = tree[n + 1].len();
1179            count += 1;
1180            if count < max_count && curlen == nextlen {
1181                continue;
1182            } else if count < min_count {
1183                loop {
1184                    self.send_code(curlen as usize, bl_tree);
1185
1186                    count -= 1;
1187                    if count == 0 {
1188                        break;
1189                    }
1190                }
1191            } else if curlen != 0 {
1192                if curlen as isize != prevlen {
1193                    self.send_code(curlen as usize, bl_tree);
1194                    count -= 1;
1195                }
1196                assert!((3..=6).contains(&count), " 3_6?");
1197                self.send_code(REP_3_6, bl_tree);
1198                self.send_bits(count - 3, 2);
1199            } else if count <= 10 {
1200                self.send_code(REPZ_3_10, bl_tree);
1201                self.send_bits(count - 3, 3);
1202            } else {
1203                self.send_code(REPZ_11_138, bl_tree);
1204                self.send_bits(count - 11, 7);
1205            }
1206
1207            count = 0;
1208            prevlen = curlen as isize;
1209
1210            if nextlen == 0 {
1211                max_count = 138;
1212                min_count = 3;
1213            } else if curlen == nextlen {
1214                max_count = 6;
1215                min_count = 3;
1216            } else {
1217                max_count = 7;
1218                min_count = 4;
1219            }
1220        }
1221    }
1222}
1223
1224#[repr(C, align(64))]
1225pub(crate) struct State<'a> {
1226    status: Status,
1227
1228    last_flush: i8, /* value of flush param for previous deflate call */
1229
1230    pub(crate) wrap: i8, /* bit 0 true for zlib, bit 1 true for gzip */
1231
1232    pub(crate) strategy: Strategy,
1233    pub(crate) level: i8,
1234
1235    /// Whether or not a block is currently open for the QUICK deflation scheme.
1236    /// 0 if the block is closed, 1 if there is an active block, or 2 if there
1237    /// is an active block and it is the last block.
1238    pub(crate) block_open: u8,
1239
1240    pub(crate) hash_calc_variant: HashCalcVariant,
1241
1242    pub(crate) match_available: bool, /* set if previous match exists */
1243
1244    /// Use a faster search when the previous match is longer than this
1245    pub(crate) good_match: u16,
1246
1247    /// Stop searching when current match exceeds this
1248    pub(crate) nice_match: u16,
1249
1250    pub(crate) match_start: Pos, /* start of matching string */
1251    pub(crate) prev_match: Pos,  /* previous match */
1252    pub(crate) strstart: usize,  /* start of string to insert */
1253
1254    pub(crate) window: Window<'a>,
1255    pub(crate) w_size: usize, /* LZ77 window size (32K by default) */
1256
1257    pub(crate) lookahead: usize, /* number of valid bytes ahead in window */
1258
1259    _cache_line_0: (),
1260
1261    /// prev[N], where N is an offset in the current window, contains the offset in the window
1262    /// of the previous 4-byte sequence that hashes to the same value as the 4-byte sequence
1263    /// starting at N. Together with head, prev forms a chained hash table that can be used
1264    /// to find earlier strings in the window that are potential matches for new input being
1265    /// deflated.
1266    pub(crate) prev: WeakSliceMut<'a, u16>,
1267    /// head[H] contains the offset of the last 4-character sequence seen so far in
1268    /// the current window that hashes to H (as calculated using the hash_calc_variant).
1269    pub(crate) head: WeakArrayMut<'a, u16, HASH_SIZE>,
1270
1271    /// Length of the best match at previous step. Matches not greater than this
1272    /// are discarded. This is used in the lazy match evaluation.
1273    pub(crate) prev_length: u16,
1274
1275    /// To speed up deflation, hash chains are never searched beyond this length.
1276    /// A higher limit improves compression ratio but degrades the speed.
1277    pub(crate) max_chain_length: u16,
1278
1279    // TODO untangle this mess! zlib uses the same field differently based on compression level
1280    // we should just have 2 fields for clarity!
1281    //
1282    // Insert new strings in the hash table only if the match length is not
1283    // greater than this length. This saves time but degrades compression.
1284    // max_insert_length is used only for compression levels <= 6.
1285    // define max_insert_length  max_lazy_match
1286    /// Attempt to find a better match only when the current match is strictly smaller
1287    /// than this value. This mechanism is used only for compression levels >= 4.
1288    pub(crate) max_lazy_match: u16,
1289
1290    /// number of string matches in current block
1291    /// NOTE: this is a saturating 8-bit counter, to help keep the struct compact. The code that
1292    /// makes decisions based on this field only cares whether the count is greater than 2, so
1293    /// an 8-bit counter is sufficient.
1294    pub(crate) matches: u8,
1295
1296    /// Window position at the beginning of the current output block. Gets
1297    /// negative when the window is moved backwards.
1298    pub(crate) block_start: isize,
1299
1300    pub(crate) sym_buf: SymBuf<'a>,
1301
1302    _cache_line_1: (),
1303
1304    /// Size of match buffer for literals/lengths.  There are 4 reasons for
1305    /// limiting lit_bufsize to 64K:
1306    ///   - frequencies can be kept in 16 bit counters
1307    ///   - if compression is not successful for the first block, all input
1308    ///     data is still in the window so we can still emit a stored block even
1309    ///     when input comes from standard input.  (This can also be done for
1310    ///     all blocks if lit_bufsize is not greater than 32K.)
1311    ///   - if compression is not successful for a file smaller than 64K, we can
1312    ///     even emit a stored file instead of a stored block (saving 5 bytes).
1313    ///     This is applicable only for zip (not gzip or zlib).
1314    ///   - creating new Huffman trees less frequently may not provide fast
1315    ///     adaptation to changes in the input data statistics. (Take for
1316    ///     example a binary file with poorly compressible code followed by
1317    ///     a highly compressible string table.) Smaller buffer sizes give
1318    ///     fast adaptation but have of course the overhead of transmitting
1319    ///     trees more frequently.
1320    ///   - I can't count above 4
1321    lit_bufsize: usize,
1322
1323    /// Actual size of window: 2*w_size, except when the user input buffer is directly used as sliding window.
1324    pub(crate) window_size: usize,
1325
1326    bit_writer: BitWriter<'a>,
1327
1328    _cache_line_2: (),
1329
1330    /// bit length of current block with optimal trees
1331    opt_len: usize,
1332    /// bit length of current block with static trees
1333    static_len: usize,
1334
1335    /// bytes at end of window left to insert
1336    pub(crate) insert: usize,
1337
1338    ///  hash index of string to be inserted
1339    pub(crate) ins_h: u32,
1340
1341    gzhead: Option<&'a mut gz_header>,
1342    gzindex: usize,
1343
1344    _padding_0: [u8; 16],
1345
1346    _cache_line_3: (),
1347
1348    crc_fold: crate::crc32::Crc32Fold,
1349
1350    /// The (unaligned) address of the state allocation that can be passed to zfree.
1351    allocation_start: NonNull<u8>,
1352    /// Total size of the allocation in bytes.
1353    total_allocation_size: usize,
1354
1355    l_desc: TreeDesc<HEAP_SIZE>,             /* literal and length tree */
1356    d_desc: TreeDesc<{ 2 * D_CODES + 1 }>,   /* distance tree */
1357    bl_desc: TreeDesc<{ 2 * BL_CODES + 1 }>, /* Huffman tree for bit lengths */
1358}
1359
1360#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Default)]
1361#[cfg_attr(feature = "__internal-fuzz", derive(arbitrary::Arbitrary))]
1362pub enum Strategy {
1363    #[default]
1364    Default = 0,
1365    Filtered = 1,
1366    HuffmanOnly = 2,
1367    Rle = 3,
1368    Fixed = 4,
1369}
1370
1371impl TryFrom<i32> for Strategy {
1372    type Error = ();
1373
1374    fn try_from(value: i32) -> Result<Self, Self::Error> {
1375        match value {
1376            0 => Ok(Strategy::Default),
1377            1 => Ok(Strategy::Filtered),
1378            2 => Ok(Strategy::HuffmanOnly),
1379            3 => Ok(Strategy::Rle),
1380            4 => Ok(Strategy::Fixed),
1381            _ => Err(()),
1382        }
1383    }
1384}
1385
1386#[derive(Debug, PartialEq, Eq)]
1387enum DataType {
1388    Binary = 0,
1389    Text = 1,
1390    Unknown = 2,
1391}
1392
1393impl<'a> State<'a> {
1394    pub const BIT_BUF_SIZE: u8 = BitWriter::BIT_BUF_SIZE;
1395
1396    // log2(w_size)  (in the range MIN_WBITS..=MAX_WBITS)
1397    pub(crate) fn w_bits(&self) -> u32 {
1398        self.w_size.trailing_zeros()
1399    }
1400
1401    pub(crate) fn w_mask(&self) -> usize {
1402        self.w_size - 1
1403    }
1404
1405    pub(crate) fn max_dist(&self) -> usize {
1406        self.w_size - MIN_LOOKAHEAD
1407    }
1408
1409    // TODO untangle this mess! zlib uses the same field differently based on compression level
1410    // we should just have 2 fields for clarity!
1411    pub(crate) fn max_insert_length(&self) -> usize {
1412        self.max_lazy_match as usize
1413    }
1414
1415    /// Total size of the pending buf. But because `pending` shares memory with `sym_buf`, this is
1416    /// not the number of bytes that are actually in `pending`!
1417    pub(crate) fn pending_buf_size(&self) -> usize {
1418        self.lit_bufsize * 4
1419    }
1420
1421    #[inline(always)]
1422    pub(crate) fn update_hash(&self, h: u32, val: u32) -> u32 {
1423        match self.hash_calc_variant {
1424            HashCalcVariant::Standard => StandardHashCalc::update_hash(h, val),
1425            HashCalcVariant::Roll => RollHashCalc::update_hash(h, val),
1426        }
1427    }
1428
1429    #[inline(always)]
1430    pub(crate) fn quick_insert_string(&mut self, string: usize) -> u16 {
1431        match self.hash_calc_variant {
1432            HashCalcVariant::Standard => StandardHashCalc::quick_insert_string(self, string),
1433            HashCalcVariant::Roll => RollHashCalc::quick_insert_string(self, string),
1434        }
1435    }
1436
1437    #[inline(always)]
1438    pub(crate) fn insert_string(&mut self, string: usize, count: usize) {
1439        match self.hash_calc_variant {
1440            HashCalcVariant::Standard => StandardHashCalc::insert_string(self, string, count),
1441            HashCalcVariant::Roll => RollHashCalc::insert_string(self, string, count),
1442        }
1443    }
1444
1445    #[inline(always)]
1446    pub(crate) fn tally_lit(&mut self, unmatched: u8) -> bool {
1447        Self::tally_lit_help(&mut self.sym_buf, &mut self.l_desc, unmatched)
1448    }
1449
1450    // This helper is to work around an ownership issue in algorithm/medium.
1451    pub(crate) fn tally_lit_help(
1452        sym_buf: &mut SymBuf,
1453        l_desc: &mut TreeDesc<HEAP_SIZE>,
1454        unmatched: u8,
1455    ) -> bool {
1456        const _VERIFY: () = {
1457            // Verify during compilation that even the largest possible value
1458            // of unmatched will fit within the expected range.
1459            assert!(
1460                u8::MAX as usize <= STD_MAX_MATCH - STD_MIN_MATCH,
1461                "tally_lit: bad literal"
1462            );
1463        };
1464
1465        sym_buf.push_lit(unmatched);
1466
1467        *l_desc.dyn_tree[unmatched as usize].freq_mut() += 1;
1468
1469        // signal that the current block should be flushed
1470        sym_buf.should_flush_block()
1471    }
1472
1473    const fn d_code(dist: usize) -> u8 {
1474        const _VERIFY: () = {
1475            // Verify during compilation that every DIST_CODE value is < D_CODES.
1476            let mut i = 0;
1477            while i < trees_tbl::DIST_CODE.len() {
1478                assert!(trees_tbl::DIST_CODE[i] < D_CODES as u8);
1479                i += 1;
1480            }
1481        };
1482
1483        let index = if dist < 256 { dist } else { 256 + (dist >> 7) };
1484        self::trees_tbl::DIST_CODE[index]
1485    }
1486
1487    #[inline(always)]
1488    pub(crate) fn tally_dist(&mut self, mut dist: usize, len: usize) -> bool {
1489        self.sym_buf.push_dist(dist as u16, len as u8);
1490
1491        self.matches = self.matches.saturating_add(1);
1492        dist -= 1;
1493
1494        assert!(dist < self.max_dist(), "tally_dist: bad match");
1495
1496        let index = self::trees_tbl::LENGTH_CODE[len] as usize + LITERALS + 1;
1497        *self.l_desc.dyn_tree[index].freq_mut() += 1;
1498
1499        *self.d_desc.dyn_tree[Self::d_code(dist) as usize].freq_mut() += 1;
1500
1501        // signal that the current block should be flushed
1502        self.sym_buf.should_flush_block()
1503    }
1504
1505    fn detect_data_type(dyn_tree: &[Value]) -> DataType {
1506        // set bits 0..6, 14..25, and 28..31
1507        // 0xf3ffc07f = binary 11110011111111111100000001111111
1508        const NON_TEXT: u64 = 0xf3ffc07f;
1509        let mut mask = NON_TEXT;
1510
1511        /* Check for non-textual bytes. */
1512        for value in &dyn_tree[0..32] {
1513            if (mask & 1) != 0 && value.freq() != 0 {
1514                return DataType::Binary;
1515            }
1516
1517            mask >>= 1;
1518        }
1519
1520        /* Check for textual bytes. */
1521        if dyn_tree[9].freq() != 0 || dyn_tree[10].freq() != 0 || dyn_tree[13].freq() != 0 {
1522            return DataType::Text;
1523        }
1524
1525        if dyn_tree[32..LITERALS].iter().any(|v| v.freq() != 0) {
1526            return DataType::Text;
1527        }
1528
1529        // there are no explicit text or non-text bytes. The stream is either empty or has only
1530        // tolerated bytes
1531        DataType::Binary
1532    }
1533
1534    fn compress_block_static_trees(&mut self) {
1535        let ltree = self::trees_tbl::STATIC_LTREE.as_slice();
1536        for (dist, lc) in self.sym_buf.iter() {
1537            match dist {
1538                0 => self.bit_writer.emit_lit(ltree, lc) as usize,
1539                _ => self.bit_writer.emit_dist_static(lc, dist),
1540            };
1541        }
1542
1543        self.bit_writer.emit_end_block(ltree, false)
1544    }
1545
1546    fn compress_block_dynamic_trees(&mut self) {
1547        self.bit_writer.compress_block_help(
1548            &self.sym_buf,
1549            &self.l_desc.dyn_tree,
1550            &self.d_desc.dyn_tree,
1551        );
1552    }
1553
1554    fn header(&self) -> u16 {
1555        // preset dictionary flag in zlib header
1556        const PRESET_DICT: u16 = 0x20;
1557
1558        // The deflate compression method (the only one supported in this version)
1559        const Z_DEFLATED: u16 = 8;
1560
1561        let dict = match self.strstart {
1562            0 => 0,
1563            _ => PRESET_DICT,
1564        };
1565
1566        let h = ((Z_DEFLATED + ((self.w_bits() as u16 - 8) << 4)) << 8)
1567            | (self.level_flags() << 6)
1568            | dict;
1569
1570        h + 31 - (h % 31)
1571    }
1572
1573    fn level_flags(&self) -> u16 {
1574        if self.strategy >= Strategy::HuffmanOnly || self.level < 2 {
1575            0
1576        } else if self.level < 6 {
1577            1
1578        } else if self.level == 6 {
1579            2
1580        } else {
1581            3
1582        }
1583    }
1584
1585    fn zng_tr_init(&mut self) {
1586        self.l_desc.stat_desc = &StaticTreeDesc::L;
1587
1588        self.d_desc.stat_desc = &StaticTreeDesc::D;
1589
1590        self.bl_desc.stat_desc = &StaticTreeDesc::BL;
1591
1592        self.bit_writer.bit_buffer = 0;
1593        self.bit_writer.bits_used = 0;
1594
1595        #[cfg(feature = "ZLIB_DEBUG")]
1596        {
1597            self.bit_writer.compressed_len = 0;
1598            self.bit_writer.bits_sent = 0;
1599        }
1600
1601        // Initialize the first block of the first file:
1602        self.init_block();
1603    }
1604
1605    /// initializes a new block
1606    fn init_block(&mut self) {
1607        // Initialize the trees.
1608        // TODO would a memset also work here?
1609
1610        for value in &mut self.l_desc.dyn_tree[..L_CODES] {
1611            *value.freq_mut() = 0;
1612        }
1613
1614        for value in &mut self.d_desc.dyn_tree[..D_CODES] {
1615            *value.freq_mut() = 0;
1616        }
1617
1618        for value in &mut self.bl_desc.dyn_tree[..BL_CODES] {
1619            *value.freq_mut() = 0;
1620        }
1621
1622        // end of block literal code
1623        const END_BLOCK: usize = 256;
1624
1625        *self.l_desc.dyn_tree[END_BLOCK].freq_mut() = 1;
1626        self.opt_len = 0;
1627        self.static_len = 0;
1628        self.sym_buf.clear();
1629        self.matches = 0;
1630    }
1631}
1632
1633#[repr(u8)]
1634#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1635enum Status {
1636    Init = 1,
1637
1638    GZip = 4,
1639    Extra = 5,
1640    Name = 6,
1641    Comment = 7,
1642    Hcrc = 8,
1643
1644    Busy = 2,
1645    Finish = 3,
1646}
1647
1648const fn rank_flush(f: i8) -> i8 {
1649    // rank Z_BLOCK between Z_NO_FLUSH and Z_PARTIAL_FLUSH
1650    ((f) * 2) - (if (f) > 4 { 9 } else { 0 })
1651}
1652
1653#[derive(Debug)]
1654pub(crate) enum BlockState {
1655    /// block not completed, need more input or more output
1656    NeedMore = 0,
1657    /// block flush performed
1658    BlockDone = 1,
1659    /// finish started, need only more output at next deflate
1660    FinishStarted = 2,
1661    /// finish done, accept no more input or output
1662    FinishDone = 3,
1663}
1664
1665// Maximum stored block length in deflate format (not including header).
1666pub(crate) const MAX_STORED: usize = 65535; // so u16::max
1667
1668pub(crate) fn read_buf_window(stream: &mut DeflateStream, offset: usize, size: usize) -> usize {
1669    let len = Ord::min(stream.avail_in as usize, size);
1670
1671    if len == 0 {
1672        return 0;
1673    }
1674
1675    stream.avail_in -= len as u32;
1676
1677    if stream.state.wrap == 2 {
1678        // we likely cannot fuse the crc32 and the copy here because the input can be changed by
1679        // a concurrent thread. Therefore it cannot be converted into a slice!
1680        let window = &mut stream.state.window;
1681        // SAFETY: len is bounded by avail_in, so this copy is in bounds.
1682        unsafe { window.copy_and_initialize(offset..offset + len, stream.next_in) };
1683
1684        let data = &stream.state.window.filled()[offset..][..len];
1685        stream.state.crc_fold.fold(data, CRC32_INITIAL_VALUE);
1686    } else if stream.state.wrap == 1 {
1687        // we likely cannot fuse the adler32 and the copy here because the input can be changed by
1688        // a concurrent thread. Therefore it cannot be converted into a slice!
1689        let window = &mut stream.state.window;
1690        // SAFETY: len is bounded by avail_in, so this copy is in bounds.
1691        unsafe { window.copy_and_initialize(offset..offset + len, stream.next_in) };
1692
1693        let data = &stream.state.window.filled()[offset..][..len];
1694        stream.adler = adler32(stream.adler as u32, data) as _;
1695    } else {
1696        let window = &mut stream.state.window;
1697        // SAFETY: len is bounded by avail_in, so this copy is in bounds.
1698        unsafe { window.copy_and_initialize(offset..offset + len, stream.next_in) };
1699    }
1700
1701    stream.next_in = stream.next_in.wrapping_add(len);
1702    stream.total_in += len as crate::c_api::z_size;
1703
1704    len
1705}
1706
1707pub(crate) enum BlockType {
1708    StoredBlock = 0,
1709    StaticTrees = 1,
1710    DynamicTrees = 2,
1711}
1712
1713pub(crate) fn zng_tr_stored_block(
1714    state: &mut State,
1715    window_range: core::ops::Range<usize>,
1716    is_last: bool,
1717) {
1718    // send block type
1719    state.bit_writer.emit_tree(BlockType::StoredBlock, is_last);
1720
1721    // align on byte boundary
1722    state.bit_writer.emit_align();
1723
1724    state.bit_writer.cmpr_bits_align();
1725
1726    let input_block: &[u8] = &state.window.filled()[window_range];
1727    let stored_len = input_block.len() as u16;
1728
1729    state.bit_writer.pending.extend(&stored_len.to_le_bytes());
1730    state
1731        .bit_writer
1732        .pending
1733        .extend(&(!stored_len).to_le_bytes());
1734
1735    state.bit_writer.cmpr_bits_add(32);
1736    state.bit_writer.sent_bits_add(32);
1737    if stored_len > 0 {
1738        state.bit_writer.pending.extend(input_block);
1739        state.bit_writer.cmpr_bits_add((stored_len << 3) as usize);
1740        state.bit_writer.sent_bits_add((stored_len << 3) as usize);
1741    }
1742}
1743
1744/// The minimum match length mandated by the deflate standard
1745pub(crate) const STD_MIN_MATCH: usize = 3;
1746/// The maximum match length mandated by the deflate standard
1747pub(crate) const STD_MAX_MATCH: usize = 258;
1748
1749/// The minimum wanted match length, affects deflate_quick, deflate_fast, deflate_medium and deflate_slow
1750pub(crate) const WANT_MIN_MATCH: usize = 4;
1751
1752pub(crate) const MIN_LOOKAHEAD: usize = STD_MAX_MATCH + STD_MIN_MATCH + 1;
1753
1754#[inline]
1755pub(crate) fn fill_window(stream: &mut DeflateStream) {
1756    debug_assert!(stream.state.lookahead < MIN_LOOKAHEAD);
1757
1758    let wsize = stream.state.w_size;
1759
1760    loop {
1761        let state = &mut *stream.state;
1762        let mut more = state.window_size - state.lookahead - state.strstart;
1763
1764        // If the window is almost full and there is insufficient lookahead,
1765        // move the upper half to the lower one to make room in the upper half.
1766        if state.strstart >= wsize + state.max_dist() {
1767            // shift the window to the left
1768            let (old, new) = state.window.filled_mut()[..2 * wsize].split_at_mut(wsize);
1769            old.copy_from_slice(new);
1770
1771            if state.match_start >= wsize as u16 {
1772                state.match_start -= wsize as u16;
1773            } else {
1774                state.match_start = 0;
1775                state.prev_length = 0;
1776            }
1777
1778            state.strstart -= wsize; /* we now have strstart >= MAX_DIST */
1779            state.block_start -= wsize as isize;
1780            state.insert = Ord::min(state.insert, state.strstart);
1781
1782            self::slide_hash::slide_hash(state);
1783
1784            more += wsize;
1785        }
1786
1787        if stream.avail_in == 0 {
1788            break;
1789        }
1790
1791        // If there was no sliding:
1792        //    strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 &&
1793        //    more == window_size - lookahead - strstart
1794        // => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1)
1795        // => more >= window_size - 2*WSIZE + 2
1796        // In the BIG_MEM or MMAP case (not yet supported),
1797        //   window_size == input_size + MIN_LOOKAHEAD  &&
1798        //   strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD.
1799        // Otherwise, window_size == 2*WSIZE so more >= 2.
1800        // If there was sliding, more >= WSIZE. So in all cases, more >= 2.
1801        assert!(more >= 2, "more < 2");
1802
1803        let n = read_buf_window(stream, stream.state.strstart + stream.state.lookahead, more);
1804
1805        let state = &mut *stream.state;
1806        state.lookahead += n;
1807
1808        // Initialize the hash value now that we have some input:
1809        if state.lookahead + state.insert >= STD_MIN_MATCH {
1810            let string = state.strstart - state.insert;
1811            if state.max_chain_length > 1024 {
1812                let v0 = state.window.filled()[string] as u32;
1813                let v1 = state.window.filled()[string + 1] as u32;
1814                state.ins_h = state.update_hash(v0, v1);
1815            } else if string >= 1 {
1816                state.quick_insert_string(string + 2 - STD_MIN_MATCH);
1817            }
1818            let mut count = state.insert;
1819            if state.lookahead == 1 {
1820                count -= 1;
1821            }
1822            if count > 0 {
1823                state.insert_string(string, count);
1824                state.insert -= count;
1825            }
1826        }
1827
1828        // If the whole input has less than STD_MIN_MATCH bytes, ins_h is garbage,
1829        // but this is not important since only literal bytes will be emitted.
1830
1831        if !(stream.state.lookahead < MIN_LOOKAHEAD && stream.avail_in != 0) {
1832            break;
1833        }
1834    }
1835
1836    assert!(
1837        stream.state.strstart <= stream.state.window_size - MIN_LOOKAHEAD,
1838        "not enough room for search"
1839    );
1840}
1841
1842pub(crate) struct StaticTreeDesc {
1843    /// static tree or NULL
1844    pub(crate) static_tree: &'static [Value],
1845    /// extra bits for each code or NULL
1846    extra_bits: &'static [u8],
1847    /// base index for extra_bits
1848    extra_base: usize,
1849    /// max number of elements in the tree
1850    elems: usize,
1851    /// max bit length for the codes
1852    max_length: u16,
1853}
1854
1855impl StaticTreeDesc {
1856    const EMPTY: Self = Self {
1857        static_tree: &[],
1858        extra_bits: &[],
1859        extra_base: 0,
1860        elems: 0,
1861        max_length: 0,
1862    };
1863
1864    /// extra bits for each length code
1865    const EXTRA_LBITS: [u8; LENGTH_CODES] = [
1866        0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0,
1867    ];
1868
1869    /// extra bits for each distance code
1870    const EXTRA_DBITS: [u8; D_CODES] = [
1871        0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12,
1872        13, 13,
1873    ];
1874
1875    /// extra bits for each bit length code
1876    const EXTRA_BLBITS: [u8; BL_CODES] = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 7];
1877
1878    /// The lengths of the bit length codes are sent in order of decreasing
1879    /// probability, to avoid transmitting the lengths for unused bit length codes.
1880    const BL_ORDER: [u8; BL_CODES] = [
1881        16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15,
1882    ];
1883
1884    pub(crate) const L: Self = Self {
1885        static_tree: &self::trees_tbl::STATIC_LTREE,
1886        extra_bits: &Self::EXTRA_LBITS,
1887        extra_base: LITERALS + 1,
1888        elems: L_CODES,
1889        max_length: MAX_BITS as u16,
1890    };
1891
1892    pub(crate) const D: Self = Self {
1893        static_tree: &self::trees_tbl::STATIC_DTREE,
1894        extra_bits: &Self::EXTRA_DBITS,
1895        extra_base: 0,
1896        elems: D_CODES,
1897        max_length: MAX_BITS as u16,
1898    };
1899
1900    pub(crate) const BL: Self = Self {
1901        static_tree: &[],
1902        extra_bits: &Self::EXTRA_BLBITS,
1903        extra_base: 0,
1904        elems: BL_CODES,
1905        max_length: MAX_BL_BITS as u16,
1906    };
1907}
1908
1909#[derive(Clone)]
1910pub(crate) struct TreeDesc<const N: usize> {
1911    dyn_tree: [Value; N],
1912    max_code: usize,
1913    stat_desc: &'static StaticTreeDesc,
1914}
1915
1916impl<const N: usize> TreeDesc<N> {
1917    const EMPTY: Self = Self {
1918        dyn_tree: [Value::new(0, 0); N],
1919        max_code: 0,
1920        stat_desc: &StaticTreeDesc::EMPTY,
1921    };
1922}
1923
1924fn build_tree<const N: usize>(state: &mut State, desc: &mut TreeDesc<N>) {
1925    let tree = &mut desc.dyn_tree;
1926    let stree = desc.stat_desc.static_tree;
1927    let elements = desc.stat_desc.elems;
1928
1929    let mut heap = Heap::new();
1930    let mut max_code = heap.initialize(&mut tree[..elements]);
1931
1932    // The pkzip format requires that at least one distance code exists,
1933    // and that at least one bit should be sent even if there is only one
1934    // possible code. So to avoid special checks later on we force at least
1935    // two codes of non zero frequency.
1936    while heap.heap_len < 2 {
1937        heap.heap_len += 1;
1938        let node = if max_code < 2 {
1939            max_code += 1;
1940            max_code
1941        } else {
1942            0
1943        };
1944
1945        debug_assert!(node >= 0);
1946        let node = node as usize;
1947
1948        heap.heap[heap.heap_len] = node as u32;
1949        *tree[node].freq_mut() = 1;
1950        heap.depth[node] = 0;
1951        state.opt_len -= 1;
1952        if !stree.is_empty() {
1953            state.static_len -= stree[node].len() as usize;
1954        }
1955        /* node is 0 or 1 so it does not have extra bits */
1956    }
1957
1958    debug_assert!(max_code >= 0);
1959    let max_code = max_code as usize;
1960    desc.max_code = max_code;
1961
1962    // The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
1963    // establish sub-heaps of increasing lengths:
1964    let mut n = heap.heap_len / 2;
1965    while n >= 1 {
1966        heap.pqdownheap(tree, n);
1967        n -= 1;
1968    }
1969
1970    heap.construct_huffman_tree(tree, elements);
1971
1972    // At this point, the fields freq and dad are set. We can now
1973    // generate the bit lengths.
1974    let bl_count = gen_bitlen(state, &mut heap, desc);
1975
1976    // The field len is now set, we can generate the bit codes
1977    gen_codes(&mut desc.dyn_tree, max_code, &bl_count);
1978}
1979
1980fn gen_bitlen<const N: usize>(
1981    state: &mut State,
1982    heap: &mut Heap,
1983    desc: &mut TreeDesc<N>,
1984) -> [u16; MAX_BITS + 1] {
1985    let tree = &mut desc.dyn_tree;
1986    let max_code = desc.max_code;
1987    let stree = desc.stat_desc.static_tree;
1988    let extra = desc.stat_desc.extra_bits;
1989    let base = desc.stat_desc.extra_base;
1990    let max_length = desc.stat_desc.max_length;
1991
1992    let mut bl_count = [0u16; MAX_BITS + 1];
1993
1994    // In a first pass, compute the optimal bit lengths (which may
1995    // overflow in the case of the bit length tree).
1996    *tree[heap.heap[heap.heap_max] as usize].len_mut() = 0; /* root of the heap */
1997
1998    // number of elements with bit length too large
1999    let mut overflow: i32 = 0;
2000
2001    for h in heap.heap_max + 1..HEAP_SIZE {
2002        let n = heap.heap[h] as usize;
2003        let mut bits = tree[tree[n].dad() as usize].len() + 1;
2004
2005        if bits > max_length {
2006            bits = max_length;
2007            overflow += 1;
2008        }
2009
2010        // We overwrite tree[n].Dad which is no longer needed
2011        *tree[n].len_mut() = bits;
2012
2013        // not a leaf node
2014        if n > max_code {
2015            continue;
2016        }
2017
2018        bl_count[bits as usize] += 1;
2019        let mut xbits = 0;
2020        if n >= base {
2021            xbits = extra[n - base] as usize;
2022        }
2023
2024        let f = tree[n].freq() as usize;
2025        state.opt_len += f * (bits as usize + xbits);
2026
2027        if !stree.is_empty() {
2028            state.static_len += f * (stree[n].len() as usize + xbits);
2029        }
2030    }
2031
2032    if overflow == 0 {
2033        return bl_count;
2034    }
2035
2036    /* Find the first bit length which could increase: */
2037    loop {
2038        let mut bits = max_length as usize - 1;
2039        while bl_count[bits] == 0 {
2040            bits -= 1;
2041        }
2042        bl_count[bits] -= 1; /* move one leaf down the tree */
2043        bl_count[bits + 1] += 2; /* move one overflow item as its brother */
2044        bl_count[max_length as usize] -= 1;
2045        /* The brother of the overflow item also moves one step up,
2046         * but this does not affect bl_count[max_length]
2047         */
2048        overflow -= 2;
2049
2050        if overflow <= 0 {
2051            break;
2052        }
2053    }
2054
2055    // Now recompute all bit lengths, scanning in increasing frequency.
2056    // h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
2057    // lengths instead of fixing only the wrong ones. This idea is taken
2058    // from 'ar' written by Haruhiko Okumura.)
2059    let mut h = HEAP_SIZE;
2060    for bits in (1..=max_length).rev() {
2061        let mut n = bl_count[bits as usize];
2062        while n != 0 {
2063            h -= 1;
2064            let m = heap.heap[h] as usize;
2065            if m > max_code {
2066                continue;
2067            }
2068
2069            if tree[m].len() != bits {
2070                // Tracev((stderr, "code %d bits %d->%u\n", m, tree[m].Len, bits));
2071                state.opt_len += (bits * tree[m].freq()) as usize;
2072                state.opt_len -= (tree[m].len() * tree[m].freq()) as usize;
2073                *tree[m].len_mut() = bits;
2074            }
2075
2076            n -= 1;
2077        }
2078    }
2079    bl_count
2080}
2081
2082/// Checks that symbol is a printing character (excluding space)
2083#[allow(unused)]
2084fn isgraph(c: u8) -> bool {
2085    (c > 0x20) && (c <= 0x7E)
2086}
2087
2088fn gen_codes(tree: &mut [Value], max_code: usize, bl_count: &[u16]) {
2089    /* tree: the tree to decorate */
2090    /* max_code: largest code with non zero frequency */
2091    /* bl_count: number of codes at each bit length */
2092    let mut next_code = [0; MAX_BITS + 1]; /* next code value for each bit length */
2093    let mut code = 0; /* running code value */
2094
2095    /* The distribution counts are first used to generate the code values
2096     * without bit reversal.
2097     */
2098    for bits in 1..=MAX_BITS {
2099        code = (code + bl_count[bits - 1]) << 1;
2100        next_code[bits] = code;
2101    }
2102
2103    /* Check that the bit counts in bl_count are consistent. The last code
2104     * must be all ones.
2105     */
2106    assert!(
2107        code + bl_count[MAX_BITS] - 1 == (1 << MAX_BITS) - 1,
2108        "inconsistent bit counts"
2109    );
2110
2111    trace!("\ngen_codes: max_code {max_code} ");
2112
2113    for n in 0..=max_code {
2114        let len = tree[n].len();
2115        if len == 0 {
2116            continue;
2117        }
2118
2119        /* Now reverse the bits */
2120        assert!((1..=15).contains(&len), "code length must be 1-15");
2121        *tree[n].code_mut() = next_code[len as usize].reverse_bits() >> (16 - len);
2122        next_code[len as usize] += 1;
2123
2124        if tree != self::trees_tbl::STATIC_LTREE.as_slice() {
2125            trace!(
2126                "\nn {:>3} {} l {:>2} c {:>4x} ({:x}) ",
2127                n,
2128                if isgraph(n as u8) {
2129                    char::from_u32(n as u32).unwrap()
2130                } else {
2131                    ' '
2132                },
2133                len,
2134                tree[n].code(),
2135                next_code[len as usize] - 1
2136            );
2137        }
2138    }
2139}
2140
2141/// repeat previous bit length 3-6 times (2 bits of repeat count)
2142const REP_3_6: usize = 16;
2143
2144/// repeat a zero length 3-10 times  (3 bits of repeat count)
2145const REPZ_3_10: usize = 17;
2146
2147/// repeat a zero length 11-138 times  (7 bits of repeat count)
2148const REPZ_11_138: usize = 18;
2149
2150fn scan_tree(bl_desc: &mut TreeDesc<{ 2 * BL_CODES + 1 }>, tree: &mut [Value], max_code: usize) {
2151    /* tree: the tree to be scanned */
2152    /* max_code: and its largest code of non zero frequency */
2153    let mut prevlen = -1isize; /* last emitted length */
2154    let mut curlen: isize; /* length of current code */
2155    let mut nextlen = tree[0].len(); /* length of next code */
2156    let mut count = 0; /* repeat count of the current code */
2157    let mut max_count = 7; /* max repeat count */
2158    let mut min_count = 4; /* min repeat count */
2159
2160    if nextlen == 0 {
2161        max_count = 138;
2162        min_count = 3;
2163    }
2164
2165    *tree[max_code + 1].len_mut() = 0xffff; /* guard */
2166
2167    let bl_tree = &mut bl_desc.dyn_tree;
2168
2169    for n in 0..=max_code {
2170        curlen = nextlen as isize;
2171        nextlen = tree[n + 1].len();
2172        count += 1;
2173        if count < max_count && curlen == nextlen as isize {
2174            continue;
2175        } else if count < min_count {
2176            *bl_tree[curlen as usize].freq_mut() += count;
2177        } else if curlen != 0 {
2178            if curlen != prevlen {
2179                *bl_tree[curlen as usize].freq_mut() += 1;
2180            }
2181            *bl_tree[REP_3_6].freq_mut() += 1;
2182        } else if count <= 10 {
2183            *bl_tree[REPZ_3_10].freq_mut() += 1;
2184        } else {
2185            *bl_tree[REPZ_11_138].freq_mut() += 1;
2186        }
2187
2188        count = 0;
2189        prevlen = curlen;
2190
2191        if nextlen == 0 {
2192            max_count = 138;
2193            min_count = 3;
2194        } else if curlen == nextlen as isize {
2195            max_count = 6;
2196            min_count = 3;
2197        } else {
2198            max_count = 7;
2199            min_count = 4;
2200        }
2201    }
2202}
2203
2204fn send_all_trees(state: &mut State, lcodes: usize, dcodes: usize, blcodes: usize) {
2205    assert!(
2206        lcodes >= 257 && dcodes >= 1 && blcodes >= 4,
2207        "not enough codes"
2208    );
2209    assert!(
2210        lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,
2211        "too many codes"
2212    );
2213
2214    trace!("\nbl counts: ");
2215    state.bit_writer.send_bits(lcodes as u64 - 257, 5); /* not +255 as stated in appnote.txt */
2216    state.bit_writer.send_bits(dcodes as u64 - 1, 5);
2217    state.bit_writer.send_bits(blcodes as u64 - 4, 4); /* not -3 as stated in appnote.txt */
2218
2219    for rank in 0..blcodes {
2220        trace!("\nbl code {:>2} ", StaticTreeDesc::BL_ORDER[rank]);
2221        state.bit_writer.send_bits(
2222            state.bl_desc.dyn_tree[StaticTreeDesc::BL_ORDER[rank] as usize].len() as u64,
2223            3,
2224        );
2225    }
2226    trace!("\nbl tree: sent {}", state.bit_writer.bits_sent);
2227
2228    // literal tree
2229    state
2230        .bit_writer
2231        .send_tree(&state.l_desc.dyn_tree, &state.bl_desc.dyn_tree, lcodes - 1);
2232    trace!("\nlit tree: sent {}", state.bit_writer.bits_sent);
2233
2234    // distance tree
2235    state
2236        .bit_writer
2237        .send_tree(&state.d_desc.dyn_tree, &state.bl_desc.dyn_tree, dcodes - 1);
2238    trace!("\ndist tree: sent {}", state.bit_writer.bits_sent);
2239}
2240
2241/// Construct the Huffman tree for the bit lengths and return the index in
2242/// bl_order of the last bit length code to send.
2243fn build_bl_tree(state: &mut State) -> usize {
2244    /* Determine the bit length frequencies for literal and distance trees */
2245
2246    scan_tree(
2247        &mut state.bl_desc,
2248        &mut state.l_desc.dyn_tree,
2249        state.l_desc.max_code,
2250    );
2251
2252    scan_tree(
2253        &mut state.bl_desc,
2254        &mut state.d_desc.dyn_tree,
2255        state.d_desc.max_code,
2256    );
2257
2258    /* Build the bit length tree: */
2259    {
2260        let mut tmp = TreeDesc::EMPTY;
2261        core::mem::swap(&mut tmp, &mut state.bl_desc);
2262        build_tree(state, &mut tmp);
2263        core::mem::swap(&mut tmp, &mut state.bl_desc);
2264    }
2265
2266    /* opt_len now includes the length of the tree representations, except
2267     * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
2268     */
2269
2270    /* Determine the number of bit length codes to send. The pkzip format
2271     * requires that at least 4 bit length codes be sent. (appnote.txt says
2272     * 3 but the actual value used is 4.)
2273     */
2274    let mut max_blindex = BL_CODES - 1;
2275    while max_blindex >= 3 {
2276        let index = StaticTreeDesc::BL_ORDER[max_blindex] as usize;
2277        if state.bl_desc.dyn_tree[index].len() != 0 {
2278            break;
2279        }
2280
2281        max_blindex -= 1;
2282    }
2283
2284    /* Update opt_len to include the bit length tree and counts */
2285    state.opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4;
2286    trace!(
2287        "\ndyn trees: dyn {}, stat {}",
2288        state.opt_len,
2289        state.static_len
2290    );
2291
2292    max_blindex
2293}
2294
2295fn zng_tr_flush_block(
2296    stream: &mut DeflateStream,
2297    window_offset: Option<usize>,
2298    stored_len: u32,
2299    last: bool,
2300) {
2301    /* window_offset: offset of the input block into the window */
2302    /* stored_len: length of input block */
2303    /* last: one if this is the last block for a file */
2304
2305    let mut opt_lenb;
2306    let static_lenb;
2307    let mut max_blindex = 0;
2308
2309    let state = &mut stream.state;
2310
2311    if state.sym_buf.is_empty() {
2312        opt_lenb = 0;
2313        static_lenb = 0;
2314        state.static_len = 7;
2315    } else if state.level > 0 {
2316        if stream.data_type == DataType::Unknown as i32 {
2317            stream.data_type = State::detect_data_type(&state.l_desc.dyn_tree) as i32;
2318        }
2319
2320        {
2321            let mut tmp = TreeDesc::EMPTY;
2322            core::mem::swap(&mut tmp, &mut state.l_desc);
2323
2324            build_tree(state, &mut tmp);
2325            core::mem::swap(&mut tmp, &mut state.l_desc);
2326
2327            trace!(
2328                "\nlit data: dyn {}, stat {}",
2329                state.opt_len,
2330                state.static_len
2331            );
2332        }
2333
2334        {
2335            let mut tmp = TreeDesc::EMPTY;
2336            core::mem::swap(&mut tmp, &mut state.d_desc);
2337            build_tree(state, &mut tmp);
2338            core::mem::swap(&mut tmp, &mut state.d_desc);
2339
2340            trace!(
2341                "\ndist data: dyn {}, stat {}",
2342                state.opt_len,
2343                state.static_len
2344            );
2345        }
2346
2347        // Build the bit length tree for the above two trees, and get the index
2348        // in bl_order of the last bit length code to send.
2349        max_blindex = build_bl_tree(state);
2350
2351        // Determine the best encoding. Compute the block lengths in bytes.
2352        opt_lenb = (state.opt_len + 3 + 7) >> 3;
2353        static_lenb = (state.static_len + 3 + 7) >> 3;
2354
2355        trace!(
2356            "\nopt {}({}) stat {}({}) stored {} lit {} ",
2357            opt_lenb,
2358            state.opt_len,
2359            static_lenb,
2360            state.static_len,
2361            stored_len,
2362            state.sym_buf.len() / 3
2363        );
2364
2365        if static_lenb <= opt_lenb || state.strategy == Strategy::Fixed {
2366            opt_lenb = static_lenb;
2367        }
2368    } else {
2369        assert!(window_offset.is_some(), "lost buf");
2370        /* force a stored block */
2371        opt_lenb = stored_len as usize + 5;
2372        static_lenb = stored_len as usize + 5;
2373    }
2374
2375    #[allow(clippy::unnecessary_unwrap)]
2376    if stored_len as usize + 4 <= opt_lenb && window_offset.is_some() {
2377        /* 4: two words for the lengths
2378         * The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
2379         * Otherwise we can't have processed more than WSIZE input bytes since
2380         * the last block flush, because compression would have been
2381         * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
2382         * transform a block into a stored block.
2383         */
2384        let window_offset = window_offset.unwrap();
2385        let range = window_offset..window_offset + stored_len as usize;
2386        zng_tr_stored_block(state, range, last);
2387    } else if static_lenb == opt_lenb {
2388        state.bit_writer.emit_tree(BlockType::StaticTrees, last);
2389        state.compress_block_static_trees();
2390    // cmpr_bits_add(s, s.static_len);
2391    } else {
2392        state.bit_writer.emit_tree(BlockType::DynamicTrees, last);
2393        send_all_trees(
2394            state,
2395            state.l_desc.max_code + 1,
2396            state.d_desc.max_code + 1,
2397            max_blindex + 1,
2398        );
2399
2400        state.compress_block_dynamic_trees();
2401    }
2402
2403    // TODO
2404    // This check is made mod 2^32, for files larger than 512 MB and unsigned long implemented on 32 bits.
2405    // assert_eq!(state.compressed_len, state.bits_sent, "bad compressed size");
2406
2407    state.init_block();
2408    if last {
2409        state.bit_writer.emit_align();
2410    }
2411
2412    // Tracev((stderr, "\ncomprlen {}(%lu) ", s->compressed_len>>3, s->compressed_len-7*last));
2413}
2414
2415pub(crate) fn flush_block_only(stream: &mut DeflateStream, is_last: bool) {
2416    zng_tr_flush_block(
2417        stream,
2418        (stream.state.block_start >= 0).then_some(stream.state.block_start as usize),
2419        (stream.state.strstart as isize - stream.state.block_start) as u32,
2420        is_last,
2421    );
2422
2423    stream.state.block_start = stream.state.strstart as isize;
2424    flush_pending(stream)
2425}
2426
2427fn flush_bytes(stream: &mut DeflateStream, mut bytes: &[u8]) -> ControlFlow<ReturnCode> {
2428    let mut state = &mut stream.state;
2429
2430    // we'll be using the pending buffer as temporary storage
2431    let mut beg = state.bit_writer.pending.pending().len(); /* start of bytes to update crc */
2432
2433    while state.bit_writer.pending.remaining() < bytes.len() {
2434        let copy = state.bit_writer.pending.remaining();
2435
2436        state.bit_writer.pending.extend(&bytes[..copy]);
2437
2438        stream.adler = crc32(
2439            stream.adler as u32,
2440            &state.bit_writer.pending.pending()[beg..],
2441        ) as z_checksum;
2442
2443        state.gzindex += copy;
2444        flush_pending(stream);
2445        state = &mut stream.state;
2446
2447        // could not flush all the pending output
2448        if !state.bit_writer.pending.pending().is_empty() {
2449            state.last_flush = -1;
2450            return ControlFlow::Break(ReturnCode::Ok);
2451        }
2452
2453        beg = 0;
2454        bytes = &bytes[copy..];
2455    }
2456
2457    state.bit_writer.pending.extend(bytes);
2458
2459    stream.adler = crc32(
2460        stream.adler as u32,
2461        &state.bit_writer.pending.pending()[beg..],
2462    ) as z_checksum;
2463    state.gzindex = 0;
2464
2465    ControlFlow::Continue(())
2466}
2467
2468pub fn deflate(stream: &mut DeflateStream, flush: DeflateFlush) -> ReturnCode {
2469    if stream.next_out.is_null()
2470        || (stream.avail_in != 0 && stream.next_in.is_null())
2471        || (stream.state.status == Status::Finish && flush != DeflateFlush::Finish)
2472    {
2473        let err = ReturnCode::StreamError;
2474        stream.msg = err.error_message();
2475        return err;
2476    }
2477
2478    if stream.avail_out == 0 {
2479        let err = ReturnCode::BufError;
2480        stream.msg = err.error_message();
2481        return err;
2482    }
2483
2484    let old_flush = stream.state.last_flush;
2485    stream.state.last_flush = flush as i8;
2486
2487    /* Flush as much pending output as possible */
2488    if !stream.state.bit_writer.pending.pending().is_empty() {
2489        flush_pending(stream);
2490        if stream.avail_out == 0 {
2491            /* Since avail_out is 0, deflate will be called again with
2492             * more output space, but possibly with both pending and
2493             * avail_in equal to zero. There won't be anything to do,
2494             * but this is not an error situation so make sure we
2495             * return OK instead of BUF_ERROR at next call of deflate:
2496             */
2497            stream.state.last_flush = -1;
2498            return ReturnCode::Ok;
2499        }
2500
2501        /* Make sure there is something to do and avoid duplicate consecutive
2502         * flushes. For repeated and useless calls with Z_FINISH, we keep
2503         * returning Z_STREAM_END instead of Z_BUF_ERROR.
2504         */
2505    } else if stream.avail_in == 0
2506        && rank_flush(flush as i8) <= rank_flush(old_flush)
2507        && flush != DeflateFlush::Finish
2508    {
2509        let err = ReturnCode::BufError;
2510        stream.msg = err.error_message();
2511        return err;
2512    }
2513
2514    /* User must not provide more input after the first FINISH: */
2515    if stream.state.status == Status::Finish && stream.avail_in != 0 {
2516        let err = ReturnCode::BufError;
2517        stream.msg = err.error_message();
2518        return err;
2519    }
2520
2521    /* Write the header */
2522    if stream.state.status == Status::Init && stream.state.wrap == 0 {
2523        stream.state.status = Status::Busy;
2524    }
2525
2526    if stream.state.status == Status::Init {
2527        let header = stream.state.header();
2528        stream
2529            .state
2530            .bit_writer
2531            .pending
2532            .extend(&header.to_be_bytes());
2533
2534        /* Save the adler32 of the preset dictionary: */
2535        if stream.state.strstart != 0 {
2536            let adler = stream.adler as u32;
2537            stream.state.bit_writer.pending.extend(&adler.to_be_bytes());
2538        }
2539
2540        stream.adler = ADLER32_INITIAL_VALUE as _;
2541        stream.state.status = Status::Busy;
2542
2543        // compression must start with an empty pending buffer
2544        flush_pending(stream);
2545
2546        if !stream.state.bit_writer.pending.pending().is_empty() {
2547            stream.state.last_flush = -1;
2548
2549            return ReturnCode::Ok;
2550        }
2551    }
2552
2553    if stream.state.status == Status::GZip {
2554        /* gzip header */
2555        stream.state.crc_fold = Crc32Fold::new();
2556
2557        stream.state.bit_writer.pending.extend(&[31, 139, 8]);
2558
2559        let extra_flags = if stream.state.level == 9 {
2560            2
2561        } else if stream.state.strategy >= Strategy::HuffmanOnly || stream.state.level < 2 {
2562            4
2563        } else {
2564            0
2565        };
2566
2567        match &stream.state.gzhead {
2568            None => {
2569                let bytes = [0, 0, 0, 0, 0, extra_flags, gz_header::OS_CODE];
2570                stream.state.bit_writer.pending.extend(&bytes);
2571                stream.state.status = Status::Busy;
2572
2573                /* Compression must start with an empty pending buffer */
2574                flush_pending(stream);
2575                if !stream.state.bit_writer.pending.pending().is_empty() {
2576                    stream.state.last_flush = -1;
2577                    return ReturnCode::Ok;
2578                }
2579            }
2580            Some(gzhead) => {
2581                stream.state.bit_writer.pending.extend(&[gzhead.flags()]);
2582                let bytes = (gzhead.time as u32).to_le_bytes();
2583                stream.state.bit_writer.pending.extend(&bytes);
2584                stream
2585                    .state
2586                    .bit_writer
2587                    .pending
2588                    .extend(&[extra_flags, gzhead.os as u8]);
2589
2590                if !gzhead.extra.is_null() {
2591                    let bytes = (gzhead.extra_len as u16).to_le_bytes();
2592                    stream.state.bit_writer.pending.extend(&bytes);
2593                }
2594
2595                if gzhead.hcrc != 0 {
2596                    stream.adler = crc32(
2597                        stream.adler as u32,
2598                        stream.state.bit_writer.pending.pending(),
2599                    ) as z_checksum
2600                }
2601
2602                stream.state.gzindex = 0;
2603                stream.state.status = Status::Extra;
2604            }
2605        }
2606    }
2607
2608    if stream.state.status == Status::Extra {
2609        if let Some(gzhead) = stream.state.gzhead.as_ref() {
2610            if !gzhead.extra.is_null() {
2611                let gzhead_extra = gzhead.extra;
2612
2613                let extra = unsafe {
2614                    core::slice::from_raw_parts(
2615                        // SAFETY: gzindex is always less than extra_len, and the user
2616                        // guarantees the pointer is valid for extra_len.
2617                        gzhead_extra.add(stream.state.gzindex),
2618                        (gzhead.extra_len & 0xffff) as usize - stream.state.gzindex,
2619                    )
2620                };
2621
2622                if let ControlFlow::Break(err) = flush_bytes(stream, extra) {
2623                    return err;
2624                }
2625            }
2626        }
2627        stream.state.status = Status::Name;
2628    }
2629
2630    if stream.state.status == Status::Name {
2631        if let Some(gzhead) = stream.state.gzhead.as_ref() {
2632            if !gzhead.name.is_null() {
2633                // SAFETY: user satisfies precondition that gzhead.name is a C string.
2634                let gzhead_name = unsafe { CStr::from_ptr(gzhead.name.cast()) };
2635                let bytes = gzhead_name.to_bytes_with_nul();
2636                if let ControlFlow::Break(err) = flush_bytes(stream, bytes) {
2637                    return err;
2638                }
2639            }
2640            stream.state.status = Status::Comment;
2641        }
2642    }
2643
2644    if stream.state.status == Status::Comment {
2645        if let Some(gzhead) = stream.state.gzhead.as_ref() {
2646            if !gzhead.comment.is_null() {
2647                // SAFETY: user satisfies precondition that gzhead.name is a C string.
2648                let gzhead_comment = unsafe { CStr::from_ptr(gzhead.comment.cast()) };
2649                let bytes = gzhead_comment.to_bytes_with_nul();
2650                if let ControlFlow::Break(err) = flush_bytes(stream, bytes) {
2651                    return err;
2652                }
2653            }
2654            stream.state.status = Status::Hcrc;
2655        }
2656    }
2657
2658    if stream.state.status == Status::Hcrc {
2659        if let Some(gzhead) = stream.state.gzhead.as_ref() {
2660            if gzhead.hcrc != 0 {
2661                let bytes = (stream.adler as u16).to_le_bytes();
2662                if let ControlFlow::Break(err) = flush_bytes(stream, &bytes) {
2663                    return err;
2664                }
2665            }
2666        }
2667
2668        stream.state.status = Status::Busy;
2669
2670        // compression must start with an empty pending buffer
2671        flush_pending(stream);
2672        if !stream.state.bit_writer.pending.pending().is_empty() {
2673            stream.state.last_flush = -1;
2674            return ReturnCode::Ok;
2675        }
2676    }
2677
2678    // Start a new block or continue the current one.
2679    let state = &mut stream.state;
2680    if stream.avail_in != 0
2681        || state.lookahead != 0
2682        || (flush != DeflateFlush::NoFlush && state.status != Status::Finish)
2683    {
2684        let bstate = self::algorithm::run(stream, flush);
2685
2686        let state = &mut stream.state;
2687
2688        if matches!(bstate, BlockState::FinishStarted | BlockState::FinishDone) {
2689            state.status = Status::Finish;
2690        }
2691
2692        match bstate {
2693            BlockState::NeedMore | BlockState::FinishStarted => {
2694                if stream.avail_out == 0 {
2695                    state.last_flush = -1; /* avoid BUF_ERROR next call, see above */
2696                }
2697                return ReturnCode::Ok;
2698                /* If flush != Z_NO_FLUSH && avail_out == 0, the next call
2699                 * of deflate should use the same flush parameter to make sure
2700                 * that the flush is complete. So we don't have to output an
2701                 * empty block here, this will be done at next call. This also
2702                 * ensures that for a very small output buffer, we emit at most
2703                 * one empty block.
2704                 */
2705            }
2706            BlockState::BlockDone => {
2707                match flush {
2708                    DeflateFlush::NoFlush => unreachable!("condition of inner surrounding if"),
2709                    DeflateFlush::PartialFlush => {
2710                        state.bit_writer.align();
2711                    }
2712                    DeflateFlush::SyncFlush => {
2713                        // add an empty stored block that is marked as not final. This is useful for
2714                        // parallel deflate where we want to make sure the intermediate blocks are not
2715                        // marked as "last block".
2716                        zng_tr_stored_block(state, 0..0, false);
2717                    }
2718                    DeflateFlush::FullFlush => {
2719                        // add an empty stored block that is marked as not final. This is useful for
2720                        // parallel deflate where we want to make sure the intermediate blocks are not
2721                        // marked as "last block".
2722                        zng_tr_stored_block(state, 0..0, false);
2723
2724                        state.head.as_mut_slice().fill(0); // forget history
2725
2726                        if state.lookahead == 0 {
2727                            state.strstart = 0;
2728                            state.block_start = 0;
2729                            state.insert = 0;
2730                        }
2731                    }
2732                    DeflateFlush::Block => { /* fall through */ }
2733                    DeflateFlush::Finish => unreachable!("condition of outer surrounding if"),
2734                }
2735
2736                flush_pending(stream);
2737
2738                if stream.avail_out == 0 {
2739                    stream.state.last_flush = -1; /* avoid BUF_ERROR at next call, see above */
2740                    return ReturnCode::Ok;
2741                }
2742            }
2743            BlockState::FinishDone => { /* do nothing */ }
2744        }
2745    }
2746
2747    if flush != DeflateFlush::Finish {
2748        return ReturnCode::Ok;
2749    }
2750
2751    // write the trailer
2752    if stream.state.wrap == 2 {
2753        let crc_fold = core::mem::take(&mut stream.state.crc_fold);
2754        stream.adler = crc_fold.finish() as z_checksum;
2755
2756        let adler = stream.adler as u32;
2757        stream.state.bit_writer.pending.extend(&adler.to_le_bytes());
2758
2759        let total_in = stream.total_in as u32;
2760        stream
2761            .state
2762            .bit_writer
2763            .pending
2764            .extend(&total_in.to_le_bytes());
2765    } else if stream.state.wrap == 1 {
2766        let adler = stream.adler as u32;
2767        stream.state.bit_writer.pending.extend(&adler.to_be_bytes());
2768    }
2769
2770    flush_pending(stream);
2771
2772    // If avail_out is zero, the application will call deflate again to flush the rest.
2773    if stream.state.wrap > 0 {
2774        stream.state.wrap = -stream.state.wrap; /* write the trailer only once! */
2775    }
2776
2777    if stream.state.bit_writer.pending.pending().is_empty() {
2778        assert_eq!(stream.state.bit_writer.bits_used, 0, "bi_buf not flushed");
2779        return ReturnCode::StreamEnd;
2780    }
2781    ReturnCode::Ok
2782}
2783
2784pub(crate) fn flush_pending(stream: &mut DeflateStream) {
2785    let state = &mut stream.state;
2786
2787    state.bit_writer.flush_bits();
2788
2789    let pending = state.bit_writer.pending.pending();
2790    let len = Ord::min(pending.len(), stream.avail_out as usize);
2791
2792    if len == 0 {
2793        return;
2794    }
2795
2796    trace!("\n[FLUSH {len} bytes]");
2797    // SAFETY: len is min(pending, stream.avail_out), so we won't overrun next_out.
2798    unsafe { core::ptr::copy_nonoverlapping(pending.as_ptr(), stream.next_out, len) };
2799
2800    stream.next_out = stream.next_out.wrapping_add(len);
2801    stream.total_out += len as crate::c_api::z_size;
2802    stream.avail_out -= len as crate::c_api::uInt;
2803
2804    state.bit_writer.pending.advance(len);
2805}
2806
2807/// Compresses `input` into the provided `output` buffer.
2808///
2809/// Returns a subslice of `output` containing the compressed bytes and a
2810/// [`ReturnCode`] indicating the result of the operation. Returns [`ReturnCode::BufError`] if
2811/// there is insufficient output space.
2812///
2813/// Use [`compress_bound`] for an upper bound on how large the output buffer needs to be.
2814///
2815/// # Example
2816///
2817/// ```
2818/// # use zlib_rs::*;
2819/// # fn foo(input: &[u8]) {
2820/// let mut buf = vec![0u8; compress_bound(input.len())];
2821/// let (compressed, rc) = compress_slice(&mut buf, input, DeflateConfig::default());
2822/// # }
2823/// ```
2824pub fn compress_slice<'a>(
2825    output: &'a mut [u8],
2826    input: &[u8],
2827    config: DeflateConfig,
2828) -> (&'a mut [u8], ReturnCode) {
2829    // SAFETY: a [u8] is a valid [MaybeUninit<u8>].
2830    let output_uninit = unsafe {
2831        core::slice::from_raw_parts_mut(output.as_mut_ptr() as *mut MaybeUninit<u8>, output.len())
2832    };
2833
2834    compress(output_uninit, input, config)
2835}
2836
2837pub fn compress<'a>(
2838    output: &'a mut [MaybeUninit<u8>],
2839    input: &[u8],
2840    config: DeflateConfig,
2841) -> (&'a mut [u8], ReturnCode) {
2842    compress_with_flush(output, input, config, DeflateFlush::Finish)
2843}
2844
2845pub fn compress_slice_with_flush<'a>(
2846    output: &'a mut [u8],
2847    input: &[u8],
2848    config: DeflateConfig,
2849    flush: DeflateFlush,
2850) -> (&'a mut [u8], ReturnCode) {
2851    // SAFETY: a [u8] is a valid [MaybeUninit<u8>], and `compress_with_flush` never uninitializes previously initialized memory.
2852    let output_uninit = unsafe {
2853        core::slice::from_raw_parts_mut(output.as_mut_ptr() as *mut MaybeUninit<u8>, output.len())
2854    };
2855
2856    compress_with_flush(output_uninit, input, config, flush)
2857}
2858
2859pub fn compress_with_flush<'a>(
2860    output: &'a mut [MaybeUninit<u8>],
2861    input: &[u8],
2862    config: DeflateConfig,
2863    final_flush: DeflateFlush,
2864) -> (&'a mut [u8], ReturnCode) {
2865    let mut stream = z_stream {
2866        next_in: input.as_ptr() as *mut u8,
2867        avail_in: 0, // for special logic in the first  iteration
2868        total_in: 0,
2869        next_out: output.as_mut_ptr() as *mut u8,
2870        avail_out: 0, // for special logic on the first iteration
2871        total_out: 0,
2872        msg: core::ptr::null_mut(),
2873        state: core::ptr::null_mut(),
2874        zalloc: None,
2875        zfree: None,
2876        opaque: core::ptr::null_mut(),
2877        data_type: 0,
2878        adler: 0,
2879        reserved: 0,
2880    };
2881
2882    let err = init(&mut stream, config);
2883    if err != ReturnCode::Ok {
2884        return (&mut [], err);
2885    }
2886
2887    let max = core::ffi::c_uint::MAX as usize;
2888
2889    let mut left = output.len();
2890    let mut source_len = input.len();
2891
2892    let return_code = loop {
2893        if stream.avail_out == 0 {
2894            stream.avail_out = Ord::min(left, max) as _;
2895            left -= stream.avail_out as usize;
2896        }
2897
2898        if stream.avail_in == 0 {
2899            stream.avail_in = Ord::min(source_len, max) as _;
2900            source_len -= stream.avail_in as usize;
2901        }
2902
2903        let flush = if source_len > 0 {
2904            DeflateFlush::NoFlush
2905        } else {
2906            final_flush
2907        };
2908
2909        let err = if let Some(stream) = unsafe { DeflateStream::from_stream_mut(&mut stream) } {
2910            deflate(stream, flush)
2911        } else {
2912            ReturnCode::StreamError
2913        };
2914
2915        match err {
2916            ReturnCode::Ok => continue,
2917            ReturnCode::StreamEnd => break ReturnCode::Ok,
2918            _ => break err,
2919        }
2920    };
2921
2922    // SAFETY: we have now initialized these bytes
2923    let output_slice = unsafe {
2924        core::slice::from_raw_parts_mut(output.as_mut_ptr() as *mut u8, stream.total_out as usize)
2925    };
2926
2927    // may DataError if insufficient output space
2928    if let Some(stream) = unsafe { DeflateStream::from_stream_mut(&mut stream) } {
2929        let _ = end(stream);
2930    }
2931
2932    (output_slice, return_code)
2933}
2934
2935/// Returns the upper bound on the compressed size for an input of `source_len` bytes.
2936///
2937/// When compression has this much space available, it will never fail because of insufficient
2938/// output space.
2939///
2940/// # Example
2941///
2942/// ```
2943/// # use zlib_rs::*;
2944///
2945/// assert_eq!(compress_bound(1024), 1161);
2946/// assert_eq!(compress_bound(4096), 4617);
2947/// assert_eq!(compress_bound(65536), 73737);
2948///
2949/// # fn foo(input: &[u8]) {
2950/// let mut buf = vec![0u8; compress_bound(input.len())];
2951/// let (compressed, rc) = compress_slice(&mut buf, input, DeflateConfig::default());
2952/// # }
2953/// ```
2954pub const fn compress_bound(source_len: usize) -> usize {
2955    compress_bound_help(source_len, ZLIB_WRAPLEN)
2956}
2957
2958const fn compress_bound_help(source_len: usize, wrap_len: usize) -> usize {
2959    source_len // The source size itself */
2960        // Always at least one byte for any input
2961        .wrapping_add(if source_len == 0 { 1 } else { 0 })
2962        // One extra byte for lengths less than 9
2963        .wrapping_add(if source_len < 9 { 1 } else { 0 })
2964        // Source encoding overhead, padded to next full byte
2965        .wrapping_add(deflate_quick_overhead(source_len))
2966        // Deflate block overhead bytes
2967        .wrapping_add(DEFLATE_BLOCK_OVERHEAD)
2968        // none, zlib or gzip wrapper
2969        .wrapping_add(wrap_len)
2970}
2971
2972///  heap used to build the Huffman trees
2973///
2974/// The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used.
2975/// The same heap array is used to build all trees.
2976#[derive(Clone)]
2977struct Heap {
2978    heap: [u32; 2 * L_CODES + 1],
2979
2980    /// number of elements in the heap
2981    heap_len: usize,
2982
2983    /// element of the largest frequency
2984    heap_max: usize,
2985
2986    depth: [u8; 2 * L_CODES + 1],
2987}
2988
2989impl Heap {
2990    // an empty heap
2991    fn new() -> Self {
2992        Self {
2993            heap: [0; 2 * L_CODES + 1],
2994            heap_len: 0,
2995            heap_max: 0,
2996            depth: [0; 2 * L_CODES + 1],
2997        }
2998    }
2999
3000    /// Construct the initial heap, with least frequent element in
3001    /// heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used.
3002    fn initialize(&mut self, tree: &mut [Value]) -> isize {
3003        let mut max_code = -1;
3004
3005        self.heap_len = 0;
3006        self.heap_max = HEAP_SIZE;
3007
3008        for (n, node) in tree.iter_mut().enumerate() {
3009            if node.freq() > 0 {
3010                self.heap_len += 1;
3011                self.heap[self.heap_len] = n as u32;
3012                max_code = n as isize;
3013                self.depth[n] = 0;
3014            } else {
3015                *node.len_mut() = 0;
3016            }
3017        }
3018
3019        max_code
3020    }
3021
3022    /// Index within the heap array of least frequent node in the Huffman tree
3023    const SMALLEST: usize = 1;
3024
3025    fn pqdownheap(&mut self, tree: &[Value], mut k: usize) {
3026        /* tree: the tree to restore */
3027        /* k: node to move down */
3028
3029        // Given the index $i of a node in the tree, pack the node's frequency and depth
3030        // into a single integer. The heap ordering logic uses a primary sort on frequency
3031        // and a secondary sort on depth, so packing both into one integer makes it
3032        // possible to sort with fewer comparison operations.
3033        macro_rules! freq_and_depth {
3034            ($i:expr) => {
3035                (tree[$i as usize].freq() as u32) << 8 | self.depth[$i as usize] as u32
3036            };
3037        }
3038
3039        let v = self.heap[k];
3040        let v_val = freq_and_depth!(v);
3041        let mut j = k << 1; /* left son of k */
3042
3043        while j <= self.heap_len {
3044            /* Set j to the smallest of the two sons: */
3045            let mut j_val = freq_and_depth!(self.heap[j]);
3046            if j < self.heap_len {
3047                let j1_val = freq_and_depth!(self.heap[j + 1]);
3048                if j1_val <= j_val {
3049                    j += 1;
3050                    j_val = j1_val;
3051                }
3052            }
3053
3054            /* Exit if v is smaller than both sons */
3055            if v_val <= j_val {
3056                break;
3057            }
3058
3059            /* Exchange v with the smallest son */
3060            self.heap[k] = self.heap[j];
3061            k = j;
3062
3063            /* And continue down the tree, setting j to the left son of k */
3064            j <<= 1;
3065        }
3066
3067        self.heap[k] = v;
3068    }
3069
3070    /// Remove the smallest element from the heap and recreate the heap with
3071    /// one less element. Updates heap and heap_len.
3072    fn pqremove(&mut self, tree: &[Value]) -> u32 {
3073        let top = self.heap[Self::SMALLEST];
3074        self.heap[Self::SMALLEST] = self.heap[self.heap_len];
3075        self.heap_len -= 1;
3076
3077        self.pqdownheap(tree, Self::SMALLEST);
3078
3079        top
3080    }
3081
3082    /// Construct the Huffman tree by repeatedly combining the least two frequent nodes.
3083    fn construct_huffman_tree(&mut self, tree: &mut [Value], mut node: usize) {
3084        loop {
3085            let n = self.pqremove(tree) as usize; /* n = node of least frequency */
3086            let m = self.heap[Heap::SMALLEST] as usize; /* m = node of next least frequency */
3087
3088            self.heap_max -= 1;
3089            self.heap[self.heap_max] = n as u32; /* keep the nodes sorted by frequency */
3090            self.heap_max -= 1;
3091            self.heap[self.heap_max] = m as u32;
3092
3093            /* Create a new node father of n and m */
3094            *tree[node].freq_mut() = tree[n].freq() + tree[m].freq();
3095            self.depth[node] = Ord::max(self.depth[n], self.depth[m]) + 1;
3096
3097            *tree[n].dad_mut() = node as u16;
3098            *tree[m].dad_mut() = node as u16;
3099
3100            /* and insert the new node in the heap */
3101            self.heap[Heap::SMALLEST] = node as u32;
3102            node += 1;
3103
3104            self.pqdownheap(tree, Heap::SMALLEST);
3105
3106            if self.heap_len < 2 {
3107                break;
3108            }
3109        }
3110
3111        self.heap_max -= 1;
3112        self.heap[self.heap_max] = self.heap[Heap::SMALLEST];
3113    }
3114}
3115
3116/// # Safety
3117///
3118/// The caller must guarantee:
3119///
3120/// * If `head` is `Some`
3121///     - `head.extra` is `NULL` or is readable for at least `head.extra_len` bytes
3122///     - `head.name` is `NULL` or satisfies the requirements of [`core::ffi::CStr::from_ptr`]
3123///     - `head.comment` is `NULL` or satisfies the requirements of [`core::ffi::CStr::from_ptr`]
3124pub unsafe fn set_header<'a>(
3125    stream: &mut DeflateStream<'a>,
3126    head: Option<&'a mut gz_header>,
3127) -> ReturnCode {
3128    if stream.state.wrap != 2 {
3129        ReturnCode::StreamError
3130    } else {
3131        stream.state.gzhead = head;
3132        ReturnCode::Ok
3133    }
3134}
3135
3136// zlib format overhead
3137const ZLIB_WRAPLEN: usize = 6;
3138// gzip format overhead
3139const GZIP_WRAPLEN: usize = 18;
3140
3141const DEFLATE_HEADER_BITS: usize = 3;
3142const DEFLATE_EOBS_BITS: usize = 15;
3143const DEFLATE_PAD_BITS: usize = 6;
3144const DEFLATE_BLOCK_OVERHEAD: usize =
3145    (DEFLATE_HEADER_BITS + DEFLATE_EOBS_BITS + DEFLATE_PAD_BITS) >> 3;
3146
3147const DEFLATE_QUICK_LIT_MAX_BITS: usize = 9;
3148const fn deflate_quick_overhead(x: usize) -> usize {
3149    let sum = x
3150        .wrapping_mul(DEFLATE_QUICK_LIT_MAX_BITS - 8)
3151        .wrapping_add(7);
3152
3153    // imitate zlib-ng rounding behavior (on windows, c_ulong is 32 bits)
3154    (sum as core::ffi::c_ulong >> 3) as usize
3155}
3156
3157/// For the default windowBits of 15 and memLevel of 8, this function returns
3158/// a close to exact, as well as small, upper bound on the compressed size.
3159/// They are coded as constants here for a reason--if the #define's are
3160/// changed, then this function needs to be changed as well.  The return
3161/// value for 15 and 8 only works for those exact settings.
3162///
3163/// For any setting other than those defaults for windowBits and memLevel,
3164/// the value returned is a conservative worst case for the maximum expansion
3165/// resulting from using fixed blocks instead of stored blocks, which deflate
3166/// can emit on compressed data for some combinations of the parameters.
3167///
3168/// This function could be more sophisticated to provide closer upper bounds for
3169/// every combination of windowBits and memLevel.  But even the conservative
3170/// upper bound of about 14% expansion does not seem onerous for output buffer
3171/// allocation.
3172pub fn bound(stream: Option<&mut DeflateStream>, source_len: usize) -> usize {
3173    // on windows, c_ulong is only a 32-bit integer
3174    let mask = core::ffi::c_ulong::MAX as usize;
3175
3176    // conservative upper bound for compressed data
3177    let comp_len = source_len
3178        .wrapping_add((source_len.wrapping_add(7) & mask) >> 3)
3179        .wrapping_add((source_len.wrapping_add(63) & mask) >> 6)
3180        .wrapping_add(5);
3181
3182    let Some(stream) = stream else {
3183        // return conservative bound plus zlib wrapper
3184        return comp_len.wrapping_add(6);
3185    };
3186
3187    /* compute wrapper length */
3188    let wrap_len = match stream.state.wrap {
3189        0 => {
3190            // raw deflate
3191            0
3192        }
3193        1 => {
3194            // zlib wrapper
3195            if stream.state.strstart != 0 {
3196                ZLIB_WRAPLEN + 4
3197            } else {
3198                ZLIB_WRAPLEN
3199            }
3200        }
3201        2 => {
3202            // gzip wrapper
3203            let mut gz_wrap_len = GZIP_WRAPLEN;
3204
3205            if let Some(header) = &stream.state.gzhead {
3206                if !header.extra.is_null() {
3207                    gz_wrap_len += 2 + header.extra_len as usize;
3208                }
3209
3210                let mut c_string = header.name;
3211                if !c_string.is_null() {
3212                    loop {
3213                        gz_wrap_len += 1;
3214                        // SAFETY: user guarantees header.name is a valid C string.
3215                        unsafe {
3216                            if *c_string == 0 {
3217                                break;
3218                            }
3219                            c_string = c_string.add(1);
3220                        }
3221                    }
3222                }
3223
3224                let mut c_string = header.comment;
3225                if !c_string.is_null() {
3226                    loop {
3227                        gz_wrap_len += 1;
3228                        // SAFETY: user guarantees header.comment is a valid C string.
3229                        unsafe {
3230                            if *c_string == 0 {
3231                                break;
3232                            }
3233                            c_string = c_string.add(1);
3234                        }
3235                    }
3236                }
3237
3238                if header.hcrc != 0 {
3239                    gz_wrap_len += 2;
3240                }
3241            }
3242
3243            gz_wrap_len
3244        }
3245        _ => {
3246            // default
3247            ZLIB_WRAPLEN
3248        }
3249    };
3250
3251    if stream.state.w_bits() != MAX_WBITS as u32 || HASH_BITS < 15 {
3252        if stream.state.level == 0 {
3253            /* upper bound for stored blocks with length 127 (memLevel == 1) ~4% overhead plus a small constant */
3254            source_len
3255                .wrapping_add(source_len >> 5)
3256                .wrapping_add(source_len >> 7)
3257                .wrapping_add(source_len >> 11)
3258                .wrapping_add(7)
3259                .wrapping_add(wrap_len)
3260        } else {
3261            comp_len.wrapping_add(wrap_len)
3262        }
3263    } else {
3264        compress_bound_help(source_len, wrap_len)
3265    }
3266}
3267
3268/// # Safety
3269///
3270/// The `dictionary` must have enough space for the dictionary.
3271pub unsafe fn get_dictionary(stream: &DeflateStream<'_>, dictionary: *mut u8) -> usize {
3272    let s = &stream.state;
3273    let len = Ord::min(s.strstart + s.lookahead, s.w_size);
3274
3275    if !dictionary.is_null() && len > 0 {
3276        unsafe {
3277            core::ptr::copy_nonoverlapping(
3278                s.window.as_ptr().add(s.strstart + s.lookahead - len),
3279                dictionary,
3280                len,
3281            );
3282        }
3283    }
3284
3285    len
3286}
3287
3288struct DeflateAllocOffsets {
3289    total_size: usize,
3290    state_pos: usize,
3291    window_pos: usize,
3292    pending_pos: usize,
3293    sym_buf_pos: usize,
3294    prev_pos: usize,
3295    head_pos: usize,
3296}
3297
3298impl DeflateAllocOffsets {
3299    fn new(window_bits: usize, lit_bufsize: usize) -> Self {
3300        use core::mem::size_of;
3301
3302        // 64B alignment of individual items in the alloc.
3303        // Note that changing this also requires changes in 'init' and 'copy'.
3304        const ALIGN_SIZE: usize = 64;
3305        const LIT_BUFS: usize = 4;
3306
3307        let mut curr_size = 0usize;
3308
3309        /* Define sizes */
3310        let state_size = size_of::<State>();
3311        // Allocate a second window worth of space to avoid the need to shift the data constantly.
3312        let window_size = (1 << window_bits) * 2;
3313        let prev_size = (1 << window_bits) * size_of::<Pos>();
3314        let head_size = HASH_SIZE * size_of::<Pos>();
3315        let pending_size = lit_bufsize * LIT_BUFS;
3316        let sym_buf_size = lit_bufsize * (LIT_BUFS - 1);
3317        // let alloc_size = size_of::<DeflateAlloc>();
3318
3319        /* Calculate relative buffer positions and paddings */
3320        let state_pos = curr_size.next_multiple_of(ALIGN_SIZE);
3321        curr_size = state_pos + state_size;
3322
3323        let window_pos = curr_size.next_multiple_of(ALIGN_SIZE);
3324        curr_size = window_pos + window_size;
3325
3326        let prev_pos = curr_size.next_multiple_of(ALIGN_SIZE);
3327        curr_size = prev_pos + prev_size;
3328
3329        let head_pos = curr_size.next_multiple_of(ALIGN_SIZE);
3330        curr_size = head_pos + head_size;
3331
3332        let pending_pos = curr_size.next_multiple_of(ALIGN_SIZE);
3333        curr_size = pending_pos + pending_size;
3334
3335        let sym_buf_pos = curr_size.next_multiple_of(ALIGN_SIZE);
3336        curr_size = sym_buf_pos + sym_buf_size;
3337
3338        /* Add ALIGN_SIZE-1 to allow alignment (done in the 'init' and 'copy' functions), and round
3339         * size of buffer up to next multiple of ALIGN_SIZE */
3340        let total_size = (curr_size + (ALIGN_SIZE - 1)).next_multiple_of(ALIGN_SIZE);
3341
3342        Self {
3343            total_size,
3344            state_pos,
3345            window_pos,
3346            pending_pos,
3347            sym_buf_pos,
3348            prev_pos,
3349            head_pos,
3350        }
3351    }
3352}
3353
3354#[cfg(test)]
3355mod test {
3356    use crate::{
3357        inflate::{decompress_slice, InflateConfig, InflateStream},
3358        InflateFlush,
3359    };
3360
3361    use super::*;
3362
3363    use core::{ffi::CStr, sync::atomic::AtomicUsize};
3364
3365    #[test]
3366    fn detect_data_type_basic() {
3367        let empty = || [Value::new(0, 0); LITERALS];
3368
3369        assert_eq!(State::detect_data_type(&empty()), DataType::Binary);
3370
3371        let mut binary = empty();
3372        binary[0] = Value::new(1, 0);
3373        assert_eq!(State::detect_data_type(&binary), DataType::Binary);
3374
3375        let mut text = empty();
3376        text[b'\r' as usize] = Value::new(1, 0);
3377        assert_eq!(State::detect_data_type(&text), DataType::Text);
3378
3379        let mut text = empty();
3380        text[b'a' as usize] = Value::new(1, 0);
3381        assert_eq!(State::detect_data_type(&text), DataType::Text);
3382
3383        let mut non_text = empty();
3384        non_text[7] = Value::new(1, 0);
3385        assert_eq!(State::detect_data_type(&non_text), DataType::Binary);
3386    }
3387
3388    #[test]
3389    fn from_stream_mut() {
3390        unsafe {
3391            assert!(DeflateStream::from_stream_mut(core::ptr::null_mut()).is_none());
3392
3393            let mut stream = z_stream::default();
3394            assert!(DeflateStream::from_stream_mut(&mut stream).is_none());
3395
3396            // state is still NULL
3397            assert!(DeflateStream::from_stream_mut(&mut stream).is_none());
3398
3399            init(&mut stream, DeflateConfig::default());
3400            let stream = DeflateStream::from_stream_mut(&mut stream);
3401            assert!(stream.is_some());
3402
3403            assert!(end(stream.unwrap()).is_ok());
3404        }
3405    }
3406
3407    #[cfg(feature = "c-allocator")]
3408    unsafe extern "C" fn fail_nth_allocation<const N: usize>(
3409        opaque: crate::c_api::voidpf,
3410        items: crate::c_api::uInt,
3411        size: crate::c_api::uInt,
3412    ) -> crate::c_api::voidpf {
3413        let count = unsafe { &*(opaque as *const AtomicUsize) };
3414
3415        if count.fetch_add(1, core::sync::atomic::Ordering::Relaxed) != N {
3416            // must use the C allocator internally because (de)allocation is based on function
3417            // pointer values and because we don't use the rust allocator directly, the allocation
3418            // logic will store the pointer to the start at the start of the allocation.
3419            unsafe { (crate::allocate::C.zalloc)(opaque, items, size) }
3420        } else {
3421            core::ptr::null_mut()
3422        }
3423    }
3424
3425    #[test]
3426    #[cfg(feature = "c-allocator")]
3427    fn init_invalid_allocator() {
3428        {
3429            let atomic = AtomicUsize::new(0);
3430            let mut stream = z_stream {
3431                zalloc: Some(fail_nth_allocation::<0>),
3432                zfree: Some(crate::allocate::C.zfree),
3433                opaque: &atomic as *const _ as *const core::ffi::c_void as *mut _,
3434                ..z_stream::default()
3435            };
3436            assert_eq!(
3437                init(&mut stream, DeflateConfig::default()),
3438                ReturnCode::MemError
3439            );
3440        }
3441    }
3442
3443    #[test]
3444    #[cfg(feature = "c-allocator")]
3445    fn copy_invalid_allocator() {
3446        let mut stream = z_stream::default();
3447
3448        let atomic = AtomicUsize::new(0);
3449        stream.opaque = &atomic as *const _ as *const core::ffi::c_void as *mut _;
3450        stream.zalloc = Some(fail_nth_allocation::<1>);
3451        stream.zfree = Some(crate::allocate::C.zfree);
3452
3453        // init performs 6 allocations; we don't want those to fail
3454        assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3455
3456        let Some(stream) = (unsafe { DeflateStream::from_stream_mut(&mut stream) }) else {
3457            unreachable!()
3458        };
3459
3460        let mut stream_copy = MaybeUninit::<DeflateStream>::zeroed();
3461
3462        assert_eq!(copy(&mut stream_copy, stream), ReturnCode::MemError);
3463
3464        assert!(end(stream).is_ok());
3465    }
3466
3467    mod invalid_deflate_config {
3468        use super::*;
3469
3470        #[test]
3471        fn sanity_check() {
3472            let mut stream = z_stream::default();
3473            assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3474
3475            assert!(stream.zalloc.is_some());
3476            assert!(stream.zfree.is_some());
3477
3478            // this should be the default level
3479            let stream = unsafe { DeflateStream::from_stream_mut(&mut stream) }.unwrap();
3480            assert_eq!(stream.state.level, 6);
3481
3482            assert!(end(stream).is_ok());
3483        }
3484
3485        #[test]
3486        fn window_bits_correction() {
3487            // window_bits of 8 gets turned into 9 internally
3488            let mut stream = z_stream::default();
3489            let config = DeflateConfig {
3490                window_bits: 8,
3491                ..Default::default()
3492            };
3493            assert_eq!(init(&mut stream, config), ReturnCode::Ok);
3494            let stream = unsafe { DeflateStream::from_stream_mut(&mut stream) }.unwrap();
3495            assert_eq!(stream.state.w_bits(), 9);
3496
3497            assert!(end(stream).is_ok());
3498        }
3499
3500        #[test]
3501        fn window_bits_too_low() {
3502            let mut stream = z_stream::default();
3503            let config = DeflateConfig {
3504                window_bits: -16,
3505                ..Default::default()
3506            };
3507            assert_eq!(init(&mut stream, config), ReturnCode::StreamError);
3508        }
3509
3510        #[test]
3511        fn window_bits_too_high() {
3512            // window bits too high
3513            let mut stream = z_stream::default();
3514            let config = DeflateConfig {
3515                window_bits: 42,
3516                ..Default::default()
3517            };
3518            assert_eq!(init(&mut stream, config), ReturnCode::StreamError);
3519        }
3520    }
3521
3522    #[test]
3523    fn end_data_error() {
3524        let mut stream = z_stream::default();
3525        assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3526        let stream = unsafe { DeflateStream::from_stream_mut(&mut stream) }.unwrap();
3527
3528        // next deflate into too little space
3529        let input = b"Hello World\n";
3530        stream.next_in = input.as_ptr() as *mut u8;
3531        stream.avail_in = input.len() as _;
3532        let output = &mut [0, 0, 0];
3533        stream.next_out = output.as_mut_ptr();
3534        stream.avail_out = output.len() as _;
3535
3536        // the deflate is fine
3537        assert_eq!(deflate(stream, DeflateFlush::NoFlush), ReturnCode::Ok);
3538
3539        // but end is not
3540        assert!(end(stream).is_err());
3541    }
3542
3543    #[test]
3544    fn test_reset_keep() {
3545        let mut stream = z_stream::default();
3546        assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3547        let stream = unsafe { DeflateStream::from_stream_mut(&mut stream) }.unwrap();
3548
3549        // next deflate into too little space
3550        let input = b"Hello World\n";
3551        stream.next_in = input.as_ptr() as *mut u8;
3552        stream.avail_in = input.len() as _;
3553
3554        let output = &mut [0; 1024];
3555        stream.next_out = output.as_mut_ptr();
3556        stream.avail_out = output.len() as _;
3557        assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::StreamEnd);
3558
3559        assert_eq!(reset_keep(stream), ReturnCode::Ok);
3560
3561        let output = &mut [0; 1024];
3562        stream.next_out = output.as_mut_ptr();
3563        stream.avail_out = output.len() as _;
3564        assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::StreamEnd);
3565
3566        assert!(end(stream).is_ok());
3567    }
3568
3569    #[test]
3570    fn hello_world_huffman_only() {
3571        const EXPECTED: &[u8] = &[
3572            0x78, 0x01, 0xf3, 0x48, 0xcd, 0xc9, 0xc9, 0x57, 0x08, 0xcf, 0x2f, 0xca, 0x49, 0x51,
3573            0xe4, 0x02, 0x00, 0x20, 0x91, 0x04, 0x48,
3574        ];
3575
3576        let input = "Hello World!\n";
3577
3578        let mut output = vec![0; 128];
3579
3580        let config = DeflateConfig {
3581            level: 6,
3582            method: Method::Deflated,
3583            window_bits: crate::MAX_WBITS,
3584            mem_level: DEF_MEM_LEVEL,
3585            strategy: Strategy::HuffmanOnly,
3586        };
3587
3588        let (output, err) = compress_slice(&mut output, input.as_bytes(), config);
3589
3590        assert_eq!(err, ReturnCode::Ok);
3591
3592        assert_eq!(output.len(), EXPECTED.len());
3593
3594        assert_eq!(EXPECTED, output);
3595    }
3596
3597    #[test]
3598    fn hello_world_quick() {
3599        const EXPECTED: &[u8] = &[
3600            0x78, 0x01, 0xf3, 0x48, 0xcd, 0xc9, 0xc9, 0x57, 0x08, 0xcf, 0x2f, 0xca, 0x49, 0x51,
3601            0xe4, 0x02, 0x00, 0x20, 0x91, 0x04, 0x48,
3602        ];
3603
3604        let input = "Hello World!\n";
3605
3606        let mut output = vec![0; 128];
3607
3608        let config = DeflateConfig {
3609            level: 1,
3610            method: Method::Deflated,
3611            window_bits: crate::MAX_WBITS,
3612            mem_level: DEF_MEM_LEVEL,
3613            strategy: Strategy::Default,
3614        };
3615
3616        let (output, err) = compress_slice(&mut output, input.as_bytes(), config);
3617
3618        assert_eq!(err, ReturnCode::Ok);
3619
3620        assert_eq!(output.len(), EXPECTED.len());
3621
3622        assert_eq!(EXPECTED, output);
3623    }
3624
3625    #[test]
3626    fn hello_world_quick_random() {
3627        const EXPECTED: &[u8] = &[
3628            0x78, 0x01, 0x53, 0xe1, 0x50, 0x51, 0xe1, 0x52, 0x51, 0x51, 0x01, 0x00, 0x03, 0xec,
3629            0x00, 0xeb,
3630        ];
3631
3632        let input = "$\u{8}$$\n$$$";
3633
3634        let mut output = vec![0; 128];
3635
3636        let config = DeflateConfig {
3637            level: 1,
3638            method: Method::Deflated,
3639            window_bits: crate::MAX_WBITS,
3640            mem_level: DEF_MEM_LEVEL,
3641            strategy: Strategy::Default,
3642        };
3643
3644        let (output, err) = compress_slice(&mut output, input.as_bytes(), config);
3645
3646        assert_eq!(err, ReturnCode::Ok);
3647
3648        assert_eq!(output.len(), EXPECTED.len());
3649
3650        assert_eq!(EXPECTED, output);
3651    }
3652
3653    fn fuzz_based_test(input: &[u8], config: DeflateConfig, expected: &[u8]) {
3654        let mut output_rs = [0; 1 << 17];
3655        let (output_rs, err) = compress_slice(&mut output_rs, input, config);
3656        assert_eq!(err, ReturnCode::Ok);
3657
3658        assert_eq!(output_rs, expected);
3659    }
3660
3661    #[test]
3662    fn simple_rle() {
3663        fuzz_based_test(
3664            "\0\0\0\0\u{6}".as_bytes(),
3665            DeflateConfig {
3666                level: -1,
3667                method: Method::Deflated,
3668                window_bits: 11,
3669                mem_level: 4,
3670                strategy: Strategy::Rle,
3671            },
3672            &[56, 17, 99, 0, 2, 54, 0, 0, 11, 0, 7],
3673        )
3674    }
3675
3676    #[test]
3677    fn fill_window_out_of_bounds() {
3678        const INPUT: &[u8] = &[
3679            0x71, 0x71, 0x71, 0x71, 0x71, 0x6a, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3680            0x71, 0x71, 0x71, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1d, 0x1d, 0x1d, 0x1d, 0x63,
3681            0x63, 0x63, 0x63, 0x63, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d,
3682            0x1d, 0x27, 0x0, 0x0, 0x0, 0x1d, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71,
3683            0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x31, 0x0, 0x0,
3684            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3685            0x0, 0x0, 0x0, 0x0, 0x1d, 0x1d, 0x0, 0x0, 0x0, 0x0, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50,
3686            0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x48, 0x50,
3687            0x50, 0x50, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2c, 0x0, 0x0, 0x0, 0x0, 0x4a,
3688            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3689            0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x70, 0x71, 0x71, 0x0, 0x0,
3690            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x6a, 0x0, 0x0, 0x0, 0x0,
3691            0x71, 0x0, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3692            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3693            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3694            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x31, 0x0, 0x0, 0x0, 0x0,
3695            0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3696            0x71, 0x71, 0x0, 0x4a, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71,
3697            0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3698            0x70, 0x71, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71,
3699            0x6a, 0x0, 0x0, 0x0, 0x0, 0x71, 0x0, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3700            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3701            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3702            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3703            0x31, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1d, 0x1d, 0x0, 0x0, 0x0, 0x0,
3704            0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50,
3705            0x50, 0x50, 0x50, 0x50, 0x48, 0x50, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x3b, 0x3f, 0x71,
3706            0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x50, 0x50, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3707            0x2c, 0x0, 0x0, 0x0, 0x0, 0x4a, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71,
3708            0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3709            0x71, 0x70, 0x71, 0x71, 0x0, 0x0, 0x0, 0x6, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71,
3710            0x71, 0x71, 0x71, 0x70, 0x71, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3711            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x3b, 0x3f, 0x71, 0x71, 0x71,
3712            0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3713            0x71, 0x3b, 0x3f, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x20, 0x0, 0x0, 0x0, 0x0,
3714            0x0, 0x0, 0x0, 0x71, 0x75, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3715            0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x10, 0x0, 0x71, 0x71,
3716            0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x3b, 0x71, 0x71, 0x71, 0x71, 0x71,
3717            0x71, 0x76, 0x71, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x71, 0x71, 0x71, 0x71, 0x71,
3718            0x71, 0x71, 0x71, 0x10, 0x0, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3719            0x71, 0x3b, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x76, 0x71, 0x34, 0x34, 0x34, 0x34,
3720            0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34,
3721            0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x0, 0x0, 0x0, 0x0, 0x0, 0x34, 0x34, 0x34, 0x34,
3722            0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34,
3723            0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34,
3724            0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34,
3725            0x34, 0x34, 0x30, 0x34, 0x34, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3726            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3727            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3728            0x0, 0x0, 0x71, 0x0, 0x0, 0x0, 0x0, 0x6,
3729        ];
3730
3731        fuzz_based_test(
3732            INPUT,
3733            DeflateConfig {
3734                level: -1,
3735                method: Method::Deflated,
3736                window_bits: 9,
3737                mem_level: 1,
3738                strategy: Strategy::HuffmanOnly,
3739            },
3740            &[
3741                0x18, 0x19, 0x4, 0xc1, 0x21, 0x1, 0xc4, 0x0, 0x10, 0x3, 0xb0, 0x18, 0x29, 0x1e,
3742                0x7e, 0x17, 0x83, 0xf5, 0x70, 0x6c, 0xac, 0xfe, 0xc9, 0x27, 0xdb, 0xb6, 0x6f, 0xdb,
3743                0xb6, 0x6d, 0xdb, 0x80, 0x24, 0xb9, 0xbb, 0xbb, 0x24, 0x49, 0x92, 0x24, 0xf, 0x2,
3744                0xd8, 0x36, 0x0, 0xf0, 0x3, 0x0, 0x0, 0x24, 0xd0, 0xb6, 0x6d, 0xdb, 0xb6, 0x6d,
3745                0xdb, 0xbe, 0x6d, 0xf9, 0x13, 0x4, 0xc7, 0x4, 0x0, 0x80, 0x30, 0x0, 0xc3, 0x22,
3746                0x68, 0xf, 0x36, 0x90, 0xc2, 0xb5, 0xfa, 0x7f, 0x48, 0x80, 0x81, 0xb, 0x40, 0x55,
3747                0x55, 0x55, 0xd5, 0x16, 0x80, 0xaa, 0x7, 0x9, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3748                0xe, 0x7c, 0x82, 0xe0, 0x98, 0x0, 0x0, 0x0, 0x4, 0x60, 0x10, 0xf9, 0x8c, 0xe2,
3749                0xe5, 0xfa, 0x3f, 0x2, 0x54, 0x55, 0x55, 0x65, 0x0, 0xa8, 0xaa, 0xaa, 0xaa, 0xba,
3750                0x2, 0x50, 0xb5, 0x90, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x78, 0x82, 0xe0, 0xd0,
3751                0x8a, 0x41, 0x0, 0x0, 0xa2, 0x58, 0x54, 0xb7, 0x60, 0x83, 0x9a, 0x6a, 0x4, 0x96,
3752                0x87, 0xba, 0x51, 0xf8, 0xfb, 0x9b, 0x26, 0xfc, 0x0, 0x1c, 0x7, 0x6c, 0xdb, 0xb6,
3753                0x6d, 0xdb, 0xb6, 0x6d, 0xf7, 0xa8, 0x3a, 0xaf, 0xaa, 0x6a, 0x3, 0xf8, 0xc2, 0x3,
3754                0x40, 0x55, 0x55, 0x55, 0xd5, 0x5b, 0xf8, 0x80, 0xaa, 0x7a, 0xb, 0x0, 0x7f, 0x82,
3755                0xe0, 0x98, 0x0, 0x40, 0x18, 0x0, 0x82, 0xd8, 0x49, 0x40, 0x2, 0x22, 0x7e, 0xeb,
3756                0x80, 0xa6, 0xc, 0xa0, 0x9f, 0xa4, 0x2a, 0x38, 0xf, 0x0, 0x0, 0xe7, 0x1, 0xdc,
3757                0x55, 0x95, 0x17, 0x0, 0x0, 0xae, 0x0, 0x38, 0xc0, 0x67, 0xdb, 0x36, 0x80, 0x2b,
3758                0x0, 0xe, 0xf0, 0xd9, 0xf6, 0x13, 0x4, 0xc7, 0x4, 0x0, 0x0, 0x30, 0xc, 0x83, 0x22,
3759                0x69, 0x7, 0xc6, 0xea, 0xff, 0x19, 0x0, 0x0, 0x80, 0xaa, 0x0, 0x0, 0x0, 0x0, 0x0,
3760                0x0, 0x8e, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x6a,
3761                0xf5, 0x63, 0x60, 0x60, 0x3, 0x0, 0xee, 0x8a, 0x88, 0x67,
3762            ],
3763        )
3764    }
3765
3766    #[test]
3767    fn gzip_no_header() {
3768        let config = DeflateConfig {
3769            level: 9,
3770            method: Method::Deflated,
3771            window_bits: 31, // gzip
3772            ..Default::default()
3773        };
3774
3775        let input = b"Hello World!";
3776        let os = gz_header::OS_CODE;
3777
3778        fuzz_based_test(
3779            input,
3780            config,
3781            &[
3782                31, 139, 8, 0, 0, 0, 0, 0, 2, os, 243, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
3783                81, 4, 0, 163, 28, 41, 28, 12, 0, 0, 0,
3784            ],
3785        )
3786    }
3787
3788    #[test]
3789    #[rustfmt::skip]
3790    fn gzip_stored_block_checksum() {
3791        fuzz_based_test(
3792            &[
3793                27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 9, 0,
3794            ],
3795            DeflateConfig {
3796                level: 0,
3797                method: Method::Deflated,
3798                window_bits: 26,
3799                mem_level: 6,
3800                strategy: Strategy::Default,
3801            },
3802            &[
3803                31, 139, 8, 0, 0, 0, 0, 0, 4, gz_header::OS_CODE, 1, 18, 0, 237, 255, 27, 27, 27, 27, 27, 27, 27,
3804                27, 27, 27, 27, 27, 27, 27, 27, 27, 9, 0, 60, 101, 156, 55, 18, 0, 0, 0,
3805            ],
3806        )
3807    }
3808
3809    #[test]
3810    fn gzip_header_pending_flush() {
3811        let extra = "aaaaaaaaaaaaaaaaaaaa\0";
3812        let name = "bbbbbbbbbbbbbbbbbbbb\0";
3813        let comment = "cccccccccccccccccccc\0";
3814
3815        let mut header = gz_header {
3816            text: 0,
3817            time: 0,
3818            xflags: 0,
3819            os: 0,
3820            extra: extra.as_ptr() as *mut _,
3821            extra_len: extra.len() as _,
3822            extra_max: 0,
3823            name: name.as_ptr() as *mut _,
3824            name_max: 0,
3825            comment: comment.as_ptr() as *mut _,
3826            comm_max: 0,
3827            hcrc: 1,
3828            done: 0,
3829        };
3830
3831        let config = DeflateConfig {
3832            window_bits: 31,
3833            mem_level: 1,
3834            ..Default::default()
3835        };
3836
3837        let mut stream = z_stream::default();
3838        assert_eq!(init(&mut stream, config), ReturnCode::Ok);
3839
3840        let Some(stream) = (unsafe { DeflateStream::from_stream_mut(&mut stream) }) else {
3841            unreachable!()
3842        };
3843
3844        unsafe { set_header(stream, Some(&mut header)) };
3845
3846        let input = b"Hello World\n";
3847        stream.next_in = input.as_ptr() as *mut _;
3848        stream.avail_in = input.len() as _;
3849
3850        let mut output = [0u8; 1024];
3851        stream.next_out = output.as_mut_ptr();
3852        stream.avail_out = 100;
3853
3854        assert_eq!(stream.state.bit_writer.pending.capacity(), 512);
3855
3856        // only 12 bytes remain, so to write the name the pending buffer must be flushed.
3857        // but there is insufficient output space to flush (only 100 bytes)
3858        stream.state.bit_writer.pending.extend(&[0; 500]);
3859
3860        assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::Ok);
3861
3862        // now try that again but with sufficient output space
3863        stream.avail_out = output.len() as _;
3864        assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::StreamEnd);
3865
3866        let n = stream.total_out as usize;
3867
3868        assert!(end(stream).is_ok());
3869
3870        let output_rs = &mut output[..n];
3871
3872        assert_eq!(output_rs.len(), 500 + 99);
3873    }
3874
3875    #[test]
3876    fn gzip_with_header() {
3877        // this test is here mostly so we get some MIRI action on the gzip header. A test that
3878        // compares behavior with zlib-ng is in the libz-rs-sys test suite
3879
3880        let extra = "some extra stuff\0";
3881        let name = "nomen est omen\0";
3882        let comment = "such comment\0";
3883
3884        let mut header = gz_header {
3885            text: 0,
3886            time: 0,
3887            xflags: 0,
3888            os: 0,
3889            extra: extra.as_ptr() as *mut _,
3890            extra_len: extra.len() as _,
3891            extra_max: 0,
3892            name: name.as_ptr() as *mut _,
3893            name_max: 0,
3894            comment: comment.as_ptr() as *mut _,
3895            comm_max: 0,
3896            hcrc: 1,
3897            done: 0,
3898        };
3899
3900        let config = DeflateConfig {
3901            window_bits: 31,
3902            ..Default::default()
3903        };
3904
3905        let mut stream = z_stream::default();
3906        assert_eq!(init(&mut stream, config), ReturnCode::Ok);
3907
3908        let Some(stream) = (unsafe { DeflateStream::from_stream_mut(&mut stream) }) else {
3909            unreachable!()
3910        };
3911
3912        unsafe { set_header(stream, Some(&mut header)) };
3913
3914        let input = b"Hello World\n";
3915        stream.next_in = input.as_ptr() as *mut _;
3916        stream.avail_in = input.len() as _;
3917
3918        let mut output = [0u8; 256];
3919        stream.next_out = output.as_mut_ptr();
3920        stream.avail_out = output.len() as _;
3921
3922        assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::StreamEnd);
3923
3924        let n = stream.total_out as usize;
3925
3926        assert!(end(stream).is_ok());
3927
3928        let output_rs = &mut output[..n];
3929
3930        assert_eq!(output_rs.len(), 81);
3931
3932        {
3933            let mut stream = z_stream::default();
3934
3935            let config = InflateConfig {
3936                window_bits: config.window_bits,
3937            };
3938
3939            assert_eq!(crate::inflate::init(&mut stream, config), ReturnCode::Ok);
3940
3941            let Some(stream) = (unsafe { InflateStream::from_stream_mut(&mut stream) }) else {
3942                unreachable!();
3943            };
3944
3945            stream.next_in = output_rs.as_mut_ptr() as _;
3946            stream.avail_in = output_rs.len() as _;
3947
3948            let mut output = [0u8; 12];
3949            stream.next_out = output.as_mut_ptr();
3950            stream.avail_out = output.len() as _;
3951
3952            let mut extra_buf = [0u8; 64];
3953            let mut name_buf = [0u8; 64];
3954            let mut comment_buf = [0u8; 64];
3955
3956            let mut header = gz_header {
3957                text: 0,
3958                time: 0,
3959                xflags: 0,
3960                os: 0,
3961                extra: extra_buf.as_mut_ptr(),
3962                extra_len: 0,
3963                extra_max: extra_buf.len() as _,
3964                name: name_buf.as_mut_ptr(),
3965                name_max: name_buf.len() as _,
3966                comment: comment_buf.as_mut_ptr(),
3967                comm_max: comment_buf.len() as _,
3968                hcrc: 0,
3969                done: 0,
3970            };
3971
3972            assert_eq!(
3973                unsafe { crate::inflate::get_header(stream, Some(&mut header)) },
3974                ReturnCode::Ok
3975            );
3976
3977            assert_eq!(
3978                unsafe { crate::inflate::inflate(stream, InflateFlush::Finish) },
3979                ReturnCode::StreamEnd
3980            );
3981
3982            crate::inflate::end(stream);
3983
3984            assert!(!header.comment.is_null());
3985            assert_eq!(
3986                unsafe { CStr::from_ptr(header.comment.cast()) }
3987                    .to_str()
3988                    .unwrap(),
3989                comment.trim_end_matches('\0')
3990            );
3991
3992            assert!(!header.name.is_null());
3993            assert_eq!(
3994                unsafe { CStr::from_ptr(header.name.cast()) }
3995                    .to_str()
3996                    .unwrap(),
3997                name.trim_end_matches('\0')
3998            );
3999
4000            assert!(!header.extra.is_null());
4001            assert_eq!(
4002                unsafe { CStr::from_ptr(header.extra.cast()) }
4003                    .to_str()
4004                    .unwrap(),
4005                extra.trim_end_matches('\0')
4006            );
4007        }
4008    }
4009
4010    #[test]
4011    fn insufficient_compress_space() {
4012        const DATA: &[u8] = include_bytes!("deflate/test-data/inflate_buf_error.dat");
4013
4014        fn helper(deflate_buf: &mut [u8], deflate_err: ReturnCode) -> ReturnCode {
4015            let config = DeflateConfig {
4016                level: 0,
4017                method: Method::Deflated,
4018                window_bits: 10,
4019                mem_level: 6,
4020                strategy: Strategy::Default,
4021            };
4022
4023            let (output, err) = compress_slice(deflate_buf, DATA, config);
4024            assert_eq!(err, deflate_err);
4025
4026            let config = InflateConfig {
4027                window_bits: config.window_bits,
4028            };
4029
4030            let mut uncompr = [0; 1 << 17];
4031            let (uncompr, err) = decompress_slice(&mut uncompr, output, config);
4032
4033            if err == ReturnCode::Ok {
4034                assert_eq!(DATA, uncompr);
4035            }
4036
4037            err
4038        }
4039
4040        let mut output = [0; 1 << 17];
4041
4042        // this is too little space
4043        assert_eq!(
4044            helper(&mut output[..1 << 16], ReturnCode::BufError),
4045            ReturnCode::DataError
4046        );
4047
4048        // this is sufficient space
4049        assert_eq!(helper(&mut output, ReturnCode::Ok), ReturnCode::Ok);
4050    }
4051
4052    fn test_flush(flush: DeflateFlush, expected: &[u8]) {
4053        let input = b"Hello World!\n";
4054
4055        let config = DeflateConfig {
4056            level: 6, // use gzip
4057            method: Method::Deflated,
4058            window_bits: 16 + crate::MAX_WBITS,
4059            mem_level: DEF_MEM_LEVEL,
4060            strategy: Strategy::Default,
4061        };
4062
4063        let mut output_rs = vec![0; 128];
4064
4065        // with the flush modes that we test here, the deflate process still has `Status::Busy`,
4066        // and the `deflate` function will return `BufError` because more input is needed before
4067        // the flush can occur.
4068        let expected_err = ReturnCode::BufError;
4069
4070        let (rs, err) = compress_slice_with_flush(&mut output_rs, input, config, flush);
4071        assert_eq!(expected_err, err);
4072
4073        assert_eq!(rs, expected);
4074    }
4075
4076    #[test]
4077    #[rustfmt::skip]
4078    fn sync_flush() {
4079        test_flush(
4080            DeflateFlush::SyncFlush,
4081            &[
4082                31, 139, 8, 0, 0, 0, 0, 0, 0, gz_header::OS_CODE, 242, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
4083                81, 228, 2, 0, 0, 0, 255, 255,
4084            ],
4085        )
4086    }
4087
4088    #[test]
4089    #[rustfmt::skip]
4090    fn partial_flush() {
4091        test_flush(
4092            DeflateFlush::PartialFlush,
4093            &[
4094                31, 139, 8, 0, 0, 0, 0, 0, 0, gz_header::OS_CODE, 242, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
4095                81, 228, 2, 8,
4096            ],
4097        );
4098    }
4099
4100    #[test]
4101    #[rustfmt::skip]
4102    fn full_flush() {
4103        test_flush(
4104            DeflateFlush::FullFlush,
4105            &[
4106                31, 139, 8, 0, 0, 0, 0, 0, 0, gz_header::OS_CODE, 242, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
4107                81, 228, 2, 0, 0, 0, 255, 255,
4108            ],
4109        );
4110    }
4111
4112    #[test]
4113    #[rustfmt::skip]
4114    fn block_flush() {
4115        test_flush(
4116            DeflateFlush::Block,
4117            &[
4118                31, 139, 8, 0, 0, 0, 0, 0, 0, gz_header::OS_CODE, 242, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
4119                81, 228, 2,
4120            ],
4121        );
4122    }
4123
4124    #[test]
4125    // splits the input into two, deflates them seperately and then joins the deflated byte streams
4126    // into something that can be correctly inflated again. This is the basic idea behind pigz, and
4127    // allows for parallel compression.
4128    fn split_deflate() {
4129        let input = "Hello World!\n";
4130
4131        let (input1, input2) = input.split_at(6);
4132
4133        let mut output1 = vec![0; 128];
4134        let mut output2 = vec![0; 128];
4135
4136        let config = DeflateConfig {
4137            level: 6, // use gzip
4138            method: Method::Deflated,
4139            window_bits: 16 + crate::MAX_WBITS,
4140            mem_level: DEF_MEM_LEVEL,
4141            strategy: Strategy::Default,
4142        };
4143
4144        // see also the docs on `SyncFlush`. it makes sure everything is flushed, ends on a byte
4145        // boundary, and that the final block does not have the "last block" bit set.
4146        let (prefix, err) = compress_slice_with_flush(
4147            &mut output1,
4148            input1.as_bytes(),
4149            config,
4150            DeflateFlush::SyncFlush,
4151        );
4152        assert_eq!(err, ReturnCode::BufError);
4153
4154        let (output2, err) = compress_slice_with_flush(
4155            &mut output2,
4156            input2.as_bytes(),
4157            config,
4158            DeflateFlush::Finish,
4159        );
4160        assert_eq!(err, ReturnCode::Ok);
4161
4162        let inflate_config = crate::inflate::InflateConfig {
4163            window_bits: 16 + 15,
4164        };
4165
4166        // cuts off the length and crc
4167        let (suffix, end) = output2.split_at(output2.len() - 8);
4168        let (crc2, len2) = end.split_at(4);
4169        let crc2 = u32::from_le_bytes(crc2.try_into().unwrap());
4170
4171        // cuts off the gzip header (10 bytes) from the front
4172        let suffix = &suffix[10..];
4173
4174        let mut result: Vec<u8> = Vec::new();
4175        result.extend(prefix.iter());
4176        result.extend(suffix);
4177
4178        // it would be more proper to use `stream.total_in` here, but the slice helpers hide the
4179        // stream so we're cheating a bit here
4180        let len1 = input1.len() as u32;
4181        let len2 = u32::from_le_bytes(len2.try_into().unwrap());
4182        assert_eq!(len2 as usize, input2.len());
4183
4184        let crc1 = crate::crc32::crc32(0, input1.as_bytes());
4185        let crc = crate::crc32::crc32_combine(crc1, crc2, len2 as u64);
4186
4187        // combined crc of the parts should be the crc of the whole
4188        let crc_cheating = crate::crc32::crc32(0, input.as_bytes());
4189        assert_eq!(crc, crc_cheating);
4190
4191        // write the trailer
4192        result.extend(crc.to_le_bytes());
4193        result.extend((len1 + len2).to_le_bytes());
4194
4195        let mut output = vec![0; 128];
4196        let (output, err) = crate::inflate::decompress_slice(&mut output, &result, inflate_config);
4197        assert_eq!(err, ReturnCode::Ok);
4198
4199        assert_eq!(output, input.as_bytes());
4200    }
4201
4202    #[test]
4203    fn inflate_window_copy_slice() {
4204        let uncompressed = [
4205            9, 126, 0, 0, 0, 0, 0, 0, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 76, 33, 8, 2, 0, 0,
4206            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4207            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4208            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4209            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12,
4210            10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 76, 33, 8, 2, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0,
4211            0, 0, 0, 12, 10, 0, 0, 0, 0, 14, 0, 0, 0, 0, 0, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4212            69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 14, 0, 0, 0, 0, 0, 69, 69, 69, 69, 69, 69, 69,
4213            69, 69, 69, 69, 69, 69, 69, 69, 9, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4214            69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4215            69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4216            69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 12, 28, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0,
4217            0, 0, 12, 10, 0, 0, 0, 0, 14, 0, 0, 0, 0, 0, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4218            69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 14, 0, 0, 0, 0, 0, 69, 69, 69, 69, 69, 69, 69,
4219            69, 69, 69, 69, 69, 69, 69, 69, 9, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4220            69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4221            69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4222            69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 12, 28, 0, 2, 0, 0, 0, 63, 1, 0, 12, 2,
4223            36, 0, 28, 0, 0, 0, 1, 0, 0, 63, 63, 13, 0, 0, 0, 0, 0, 0, 0, 63, 63, 63, 63, 0, 0, 0,
4224            0, 0, 0, 65, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 0,
4225            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 45, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4226            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4227            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 91, 0, 0, 0, 9, 0, 0, 0, 9, 0, 0, 12, 33, 2, 0, 0, 8,
4228            0, 4, 0, 0, 0, 12, 10, 41, 12, 10, 47,
4229        ];
4230
4231        let compressed = &[
4232            31, 139, 8, 0, 0, 0, 0, 0, 4, 3, 181, 193, 49, 14, 194, 32, 24, 128, 209, 175, 192, 0,
4233            228, 151, 232, 206, 66, 226, 226, 96, 60, 2, 113, 96, 235, 13, 188, 139, 103, 23, 106,
4234            104, 108, 100, 49, 169, 239, 185, 39, 11, 199, 7, 51, 39, 171, 248, 118, 226, 63, 52,
4235            157, 120, 86, 102, 78, 86, 209, 104, 58, 241, 84, 129, 166, 12, 4, 154, 178, 229, 202,
4236            30, 36, 130, 166, 19, 79, 21, 104, 202, 64, 160, 41, 91, 174, 236, 65, 34, 10, 200, 19,
4237            162, 206, 68, 96, 130, 156, 15, 188, 229, 138, 197, 157, 161, 35, 3, 87, 126, 245, 0,
4238            28, 224, 64, 146, 2, 139, 1, 196, 95, 196, 223, 94, 10, 96, 92, 33, 86, 2, 0, 0,
4239        ];
4240
4241        let config = InflateConfig { window_bits: 25 };
4242
4243        let mut dest_vec_rs = vec![0u8; uncompressed.len()];
4244        let (output_rs, error) =
4245            crate::inflate::decompress_slice(&mut dest_vec_rs, compressed, config);
4246
4247        assert_eq!(ReturnCode::Ok, error);
4248        assert_eq!(output_rs, uncompressed);
4249    }
4250
4251    #[test]
4252    fn hash_calc_difference() {
4253        let input = [
4254            0, 0, 0, 0, 0, 43, 0, 0, 0, 0, 0, 0, 43, 0, 0, 0, 0, 0, 0, 0, 0, 48, 0, 0, 0, 0, 0, 0,
4255            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4256            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4257            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4258            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, 0, 0, 0,
4259            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4260            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 0, 0, 0, 0, 0,
4261            0, 0, 64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 0, 64, 0, 0,
4262            0, 0, 0, 0, 0, 0, 0, 0, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 102, 102, 102, 102, 102,
4263            102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102,
4264            102, 102, 102, 102, 102, 102, 102, 102, 112, 102, 102, 102, 102, 102, 102, 102, 102,
4265            102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 0,
4266            0, 0, 0, 0, 0, 49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4267            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4268            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4269            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4270            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4271            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 0, 0, 0, 0, 0, 0, 0, 64, 0, 0,
4272            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 0, 64, 0, 0, 0, 0, 0, 0, 0,
4273            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42, 0, 0, 0,
4274            50, 0,
4275        ];
4276
4277        let config = DeflateConfig {
4278            level: 6,
4279            method: Method::Deflated,
4280            window_bits: 9,
4281            mem_level: 8,
4282            strategy: Strategy::Default,
4283        };
4284
4285        let expected = [
4286            24, 149, 99, 96, 96, 96, 96, 208, 6, 17, 112, 138, 129, 193, 128, 1, 29, 24, 50, 208,
4287            1, 200, 146, 169, 79, 24, 74, 59, 96, 147, 52, 71, 22, 70, 246, 88, 26, 94, 80, 128,
4288            83, 6, 162, 219, 144, 76, 183, 210, 5, 8, 67, 105, 36, 159, 35, 128, 57, 118, 97, 100,
4289            160, 197, 192, 192, 96, 196, 0, 0, 3, 228, 25, 128,
4290        ];
4291
4292        fuzz_based_test(&input, config, &expected);
4293    }
4294
4295    #[cfg(any(target_arch = "x86_64", target_arch = "aarch64"))]
4296    mod _cache_lines {
4297        use super::State;
4298        // FIXME: once zlib-rs Minimum Supported Rust Version >= 1.77, switch to core::mem::offset_of
4299        // and move this _cache_lines module from up a level from tests to super::
4300        use memoffset::offset_of;
4301
4302        const _: () = assert!(offset_of!(State, status) == 0);
4303        const _: () = assert!(offset_of!(State, _cache_line_0) == 64);
4304        const _: () = assert!(offset_of!(State, _cache_line_1) == 128);
4305        const _: () = assert!(offset_of!(State, _cache_line_2) == 192);
4306        const _: () = assert!(offset_of!(State, _cache_line_3) == 256);
4307    }
4308}