zlib_rs/
deflate.rs

1use core::{ffi::CStr, marker::PhantomData, mem::MaybeUninit, ops::ControlFlow, ptr::NonNull};
2
3use crate::{
4    adler32::adler32,
5    allocate::Allocator,
6    c_api::{gz_header, internal_state, z_checksum, z_stream},
7    crc32::{crc32, Crc32Fold},
8    trace,
9    weak_slice::{WeakArrayMut, WeakSliceMut},
10    DeflateFlush, ReturnCode, ADLER32_INITIAL_VALUE, CRC32_INITIAL_VALUE, MAX_WBITS, MIN_WBITS,
11};
12
13use self::{
14    algorithm::CONFIGURATION_TABLE,
15    hash_calc::{HashCalcVariant, RollHashCalc, StandardHashCalc},
16    pending::Pending,
17    sym_buf::SymBuf,
18    trees_tbl::STATIC_LTREE,
19    window::Window,
20};
21
22mod algorithm;
23mod compare256;
24mod hash_calc;
25mod longest_match;
26mod pending;
27mod slide_hash;
28mod sym_buf;
29mod trees_tbl;
30mod window;
31
32// Position relative to the current window
33pub(crate) type Pos = u16;
34
35// SAFETY: This struct must have the same layout as [`z_stream`], so that casts and transmutations
36// between the two can work without UB.
37#[repr(C)]
38pub struct DeflateStream<'a> {
39    pub(crate) next_in: *mut crate::c_api::Bytef,
40    pub(crate) avail_in: crate::c_api::uInt,
41    pub(crate) total_in: crate::c_api::z_size,
42    pub(crate) next_out: *mut crate::c_api::Bytef,
43    pub(crate) avail_out: crate::c_api::uInt,
44    pub(crate) total_out: crate::c_api::z_size,
45    pub(crate) msg: *const core::ffi::c_char,
46    pub(crate) state: &'a mut State<'a>,
47    pub(crate) alloc: Allocator<'a>,
48    pub(crate) data_type: core::ffi::c_int,
49    pub(crate) adler: crate::c_api::z_checksum,
50    pub(crate) reserved: crate::c_api::uLong,
51}
52
53unsafe impl Sync for DeflateStream<'_> {}
54unsafe impl Send for DeflateStream<'_> {}
55
56impl<'a> DeflateStream<'a> {
57    // z_stream and DeflateStream must have the same layout. Do our best to check if this is true.
58    // (imperfect check, but should catch most mistakes.)
59    const _S: () = assert!(core::mem::size_of::<z_stream>() == core::mem::size_of::<Self>());
60    const _A: () = assert!(core::mem::align_of::<z_stream>() == core::mem::align_of::<Self>());
61
62    /// # Safety
63    ///
64    /// Behavior is undefined if any of the following conditions are violated:
65    ///
66    /// - `strm` satisfies the conditions of [`pointer::as_mut`]
67    /// - if not `NULL`, `strm` as initialized using [`init`] or similar
68    ///
69    /// [`pointer::as_mut`]: https://doc.rust-lang.org/core/primitive.pointer.html#method.as_mut
70    #[inline(always)]
71    pub unsafe fn from_stream_mut(strm: *mut z_stream) -> Option<&'a mut Self> {
72        {
73            // Safety: ptr points to a valid value of type z_stream (if non-null)
74            let stream = unsafe { strm.as_ref() }?;
75
76            if stream.zalloc.is_none() || stream.zfree.is_none() {
77                return None;
78            }
79
80            if stream.state.is_null() {
81                return None;
82            }
83        }
84
85        // SAFETY: DeflateStream has an equivalent layout as z_stream
86        unsafe { strm.cast::<DeflateStream>().as_mut() }
87    }
88
89    /// # Safety
90    ///
91    /// Behavior is undefined if any of the following conditions are violated:
92    ///
93    /// - `strm` satisfies the conditions of [`pointer::as_ref`]
94    /// - if not `NULL`, `strm` as initialized using [`init`] or similar
95    ///
96    /// [`pointer::as_ref`]: https://doc.rust-lang.org/core/primitive.pointer.html#method.as_ref
97    #[inline(always)]
98    pub unsafe fn from_stream_ref(strm: *const z_stream) -> Option<&'a Self> {
99        {
100            // Safety: ptr points to a valid value of type z_stream (if non-null)
101            let stream = unsafe { strm.as_ref() }?;
102
103            if stream.zalloc.is_none() || stream.zfree.is_none() {
104                return None;
105            }
106
107            if stream.state.is_null() {
108                return None;
109            }
110        }
111
112        // SAFETY: DeflateStream has an equivalent layout as z_stream
113        unsafe { strm.cast::<DeflateStream>().as_ref() }
114    }
115
116    fn as_z_stream_mut(&mut self) -> &mut z_stream {
117        // SAFETY: a valid &mut DeflateStream is also a valid &mut z_stream
118        unsafe { &mut *(self as *mut DeflateStream as *mut z_stream) }
119    }
120
121    pub fn pending(&self) -> (usize, u8) {
122        (
123            self.state.bit_writer.pending.pending,
124            self.state.bit_writer.bits_used,
125        )
126    }
127
128    pub fn new(config: DeflateConfig) -> Self {
129        let mut inner = crate::c_api::z_stream::default();
130
131        let ret = crate::deflate::init(&mut inner, config);
132        assert_eq!(ret, ReturnCode::Ok);
133
134        unsafe { core::mem::transmute(inner) }
135    }
136}
137
138/// number of elements in hash table
139pub(crate) const HASH_SIZE: usize = 65536;
140/// log2(HASH_SIZE)
141const HASH_BITS: usize = 16;
142
143/// Maximum value for memLevel in deflateInit2
144const MAX_MEM_LEVEL: i32 = 9;
145pub const DEF_MEM_LEVEL: i32 = if MAX_MEM_LEVEL > 8 { 8 } else { MAX_MEM_LEVEL };
146
147#[repr(i32)]
148#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)]
149#[cfg_attr(feature = "__internal-fuzz", derive(arbitrary::Arbitrary))]
150pub enum Method {
151    #[default]
152    Deflated = 8,
153}
154
155impl TryFrom<i32> for Method {
156    type Error = ();
157
158    fn try_from(value: i32) -> Result<Self, Self::Error> {
159        match value {
160            8 => Ok(Self::Deflated),
161            _ => Err(()),
162        }
163    }
164}
165
166#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
167#[cfg_attr(feature = "__internal-fuzz", derive(arbitrary::Arbitrary))]
168pub struct DeflateConfig {
169    pub level: i32,
170    pub method: Method,
171    pub window_bits: i32,
172    pub mem_level: i32,
173    pub strategy: Strategy,
174}
175
176#[cfg(any(test, feature = "__internal-test"))]
177impl quickcheck::Arbitrary for DeflateConfig {
178    fn arbitrary(g: &mut quickcheck::Gen) -> Self {
179        let mem_levels: Vec<_> = (1..=9).collect();
180        let levels: Vec<_> = (0..=9).collect();
181
182        let mut window_bits = Vec::new();
183        window_bits.extend(9..=15); // zlib
184        window_bits.extend(9 + 16..=15 + 16); // gzip
185        window_bits.extend(-15..=-9); // raw
186
187        Self {
188            level: *g.choose(&levels).unwrap(),
189            method: Method::Deflated,
190            window_bits: *g.choose(&window_bits).unwrap(),
191            mem_level: *g.choose(&mem_levels).unwrap(),
192            strategy: *g
193                .choose(&[
194                    Strategy::Default,
195                    Strategy::Filtered,
196                    Strategy::HuffmanOnly,
197                    Strategy::Rle,
198                    Strategy::Fixed,
199                ])
200                .unwrap(),
201        }
202    }
203}
204
205impl DeflateConfig {
206    pub fn new(level: i32) -> Self {
207        Self {
208            level,
209            ..Self::default()
210        }
211    }
212}
213
214impl Default for DeflateConfig {
215    fn default() -> Self {
216        Self {
217            level: crate::c_api::Z_DEFAULT_COMPRESSION,
218            method: Method::Deflated,
219            window_bits: MAX_WBITS,
220            mem_level: DEF_MEM_LEVEL,
221            strategy: Strategy::Default,
222        }
223    }
224}
225
226pub fn init(stream: &mut z_stream, config: DeflateConfig) -> ReturnCode {
227    let DeflateConfig {
228        mut level,
229        method: _,
230        mut window_bits,
231        mem_level,
232        strategy,
233    } = config;
234
235    /* Todo: ignore strm->next_in if we use it as window */
236    stream.msg = core::ptr::null_mut();
237
238    // for safety we  must really make sure that alloc and free are consistent
239    // this is a (slight) deviation from stock zlib. In this crate we pick the rust
240    // allocator as the default, but `libz-rs-sys` always explicitly sets an allocator,
241    // and can configure the C allocator
242    #[cfg(feature = "rust-allocator")]
243    if stream.zalloc.is_none() || stream.zfree.is_none() {
244        stream.configure_default_rust_allocator()
245    }
246
247    #[cfg(feature = "c-allocator")]
248    if stream.zalloc.is_none() || stream.zfree.is_none() {
249        stream.configure_default_c_allocator()
250    }
251
252    if stream.zalloc.is_none() || stream.zfree.is_none() {
253        return ReturnCode::StreamError;
254    }
255
256    if level == crate::c_api::Z_DEFAULT_COMPRESSION {
257        level = 6;
258    }
259
260    let wrap = if window_bits < 0 {
261        if window_bits < -MAX_WBITS {
262            return ReturnCode::StreamError;
263        }
264        window_bits = -window_bits;
265
266        0
267    } else if window_bits > MAX_WBITS {
268        window_bits -= 16;
269        2
270    } else {
271        1
272    };
273
274    if (!(1..=MAX_MEM_LEVEL).contains(&mem_level))
275        || !(MIN_WBITS..=MAX_WBITS).contains(&window_bits)
276        || !(0..=9).contains(&level)
277        || (window_bits == 8 && wrap != 1)
278    {
279        return ReturnCode::StreamError;
280    }
281
282    let window_bits = if window_bits == 8 {
283        9 /* until 256-byte window bug fixed */
284    } else {
285        window_bits as usize
286    };
287
288    let alloc = Allocator {
289        zalloc: stream.zalloc.unwrap(),
290        zfree: stream.zfree.unwrap(),
291        opaque: stream.opaque,
292        _marker: PhantomData,
293    };
294
295    let lit_bufsize = 1 << (mem_level + 6); // 16K elements by default
296    let allocs = DeflateAllocOffsets::new(window_bits, lit_bufsize);
297
298    // FIXME: pointer methods on NonNull are stable since 1.80.0
299    let Some(allocation_start) = alloc.allocate_slice_raw::<u8>(allocs.total_size) else {
300        return ReturnCode::MemError;
301    };
302
303    let w_size = 1 << window_bits;
304    let align_offset = (allocation_start.as_ptr() as usize).next_multiple_of(64)
305        - (allocation_start.as_ptr() as usize);
306    let buf = unsafe { allocation_start.as_ptr().add(align_offset) };
307
308    let (window, prev, head, pending, sym_buf) = unsafe {
309        let window_ptr: *mut u8 = buf.add(allocs.window_pos);
310        window_ptr.write_bytes(0u8, 2 * w_size);
311        let window = Window::from_raw_parts(window_ptr, window_bits);
312
313        // FIXME: write_bytes is stable for NonNull since 1.80.0
314        let prev_ptr = buf.add(allocs.prev_pos).cast::<Pos>();
315        prev_ptr.write_bytes(0, w_size);
316        let prev = WeakSliceMut::from_raw_parts_mut(prev_ptr, w_size);
317
318        let head_ptr = buf.add(allocs.head_pos).cast::<[Pos; HASH_SIZE]>();
319        // Zero out the full array. `write_bytes` will write `1 * size_of<[Pos; HASH_SIZE]>` bytes.
320        head_ptr.write_bytes(0, 1);
321        let head = WeakArrayMut::<Pos, HASH_SIZE>::from_ptr(head_ptr);
322
323        let pending_ptr = buf.add(allocs.pending_pos).cast::<MaybeUninit<u8>>();
324        let pending = Pending::from_raw_parts(pending_ptr, 4 * lit_bufsize);
325
326        let sym_buf_ptr = buf.add(allocs.sym_buf_pos);
327        let sym_buf = SymBuf::from_raw_parts(sym_buf_ptr, lit_bufsize);
328
329        (window, prev, head, pending, sym_buf)
330    };
331
332    let state = State {
333        status: Status::Init,
334
335        // window
336        w_size,
337
338        // allocated values
339        window,
340        prev,
341        head,
342        bit_writer: BitWriter::from_pending(pending),
343
344        //
345        lit_bufsize,
346
347        //
348        sym_buf,
349
350        //
351        level: level as i8, // set to zero again for testing?
352        strategy,
353
354        // these fields are not set explicitly at this point
355        last_flush: 0,
356        wrap,
357        strstart: 0,
358        block_start: 0,
359        block_open: 0,
360        window_size: 0,
361        insert: 0,
362        matches: 0,
363        opt_len: 0,
364        static_len: 0,
365        lookahead: 0,
366        ins_h: 0,
367        max_chain_length: 0,
368        max_lazy_match: 0,
369        good_match: 0,
370        nice_match: 0,
371
372        //
373        l_desc: TreeDesc::EMPTY,
374        d_desc: TreeDesc::EMPTY,
375        bl_desc: TreeDesc::EMPTY,
376
377        //
378        crc_fold: Crc32Fold::new(),
379        gzhead: None,
380        gzindex: 0,
381
382        //
383        match_start: 0,
384        prev_match: 0,
385        match_available: false,
386        prev_length: 0,
387
388        allocation_start,
389        total_allocation_size: allocs.total_size,
390
391        // just provide a valid default; gets set properly later
392        hash_calc_variant: HashCalcVariant::Standard,
393        _cache_line_0: (),
394        _cache_line_1: (),
395        _cache_line_2: (),
396        _cache_line_3: (),
397        _padding_0: [0; 16],
398    };
399
400    let state_allocation = unsafe { buf.add(allocs.state_pos).cast::<State>() };
401    unsafe { state_allocation.write(state) };
402
403    stream.state = state_allocation.cast::<internal_state>();
404
405    let Some(stream) = (unsafe { DeflateStream::from_stream_mut(stream) }) else {
406        if cfg!(debug_assertions) {
407            unreachable!("we should have initialized the stream properly");
408        }
409        return ReturnCode::StreamError;
410    };
411
412    reset(stream)
413}
414
415pub fn params(stream: &mut DeflateStream, level: i32, strategy: Strategy) -> ReturnCode {
416    let level = if level == crate::c_api::Z_DEFAULT_COMPRESSION {
417        6
418    } else {
419        level
420    };
421
422    if !(0..=9).contains(&level) {
423        return ReturnCode::StreamError;
424    }
425
426    let level = level as i8;
427
428    let func = CONFIGURATION_TABLE[stream.state.level as usize].func;
429
430    let state = &mut stream.state;
431
432    // FIXME: use fn_addr_eq when it's available in our MSRV. The comparison returning false here
433    // is not functionally incorrect, but would be inconsistent with zlib-ng.
434    #[allow(unpredictable_function_pointer_comparisons)]
435    if (strategy != state.strategy || func != CONFIGURATION_TABLE[level as usize].func)
436        && state.last_flush != -2
437    {
438        // Flush the last buffer.
439        let err = deflate(stream, DeflateFlush::Block);
440        if err == ReturnCode::StreamError {
441            return err;
442        }
443
444        let state = &mut stream.state;
445
446        if stream.avail_in != 0
447            || ((state.strstart as isize - state.block_start) + state.lookahead as isize) != 0
448        {
449            return ReturnCode::BufError;
450        }
451    }
452
453    let state = &mut stream.state;
454
455    if state.level != level {
456        if state.level == 0 && state.matches != 0 {
457            if state.matches == 1 {
458                self::slide_hash::slide_hash(state);
459            } else {
460                state.head.as_mut_slice().fill(0);
461            }
462            state.matches = 0;
463        }
464
465        lm_set_level(state, level);
466    }
467
468    state.strategy = strategy;
469
470    ReturnCode::Ok
471}
472
473pub fn set_dictionary(stream: &mut DeflateStream, mut dictionary: &[u8]) -> ReturnCode {
474    let state = &mut stream.state;
475
476    let wrap = state.wrap;
477
478    if wrap == 2 || (wrap == 1 && state.status != Status::Init) || state.lookahead != 0 {
479        return ReturnCode::StreamError;
480    }
481
482    // when using zlib wrappers, compute Adler-32 for provided dictionary
483    if wrap == 1 {
484        stream.adler = adler32(stream.adler as u32, dictionary) as z_checksum;
485    }
486
487    // avoid computing Adler-32 in read_buf
488    state.wrap = 0;
489
490    // if dictionary would fill window, just replace the history
491    if dictionary.len() >= state.window.capacity() {
492        if wrap == 0 {
493            // clear the hash table
494            state.head.as_mut_slice().fill(0);
495
496            state.strstart = 0;
497            state.block_start = 0;
498            state.insert = 0;
499        } else {
500            /* already empty otherwise */
501        }
502
503        // use the tail
504        dictionary = &dictionary[dictionary.len() - state.w_size..];
505    }
506
507    // insert dictionary into window and hash
508    let avail = stream.avail_in;
509    let next = stream.next_in;
510    stream.avail_in = dictionary.len() as _;
511    stream.next_in = dictionary.as_ptr() as *mut u8;
512    fill_window(stream);
513
514    while stream.state.lookahead >= STD_MIN_MATCH {
515        let str = stream.state.strstart;
516        let n = stream.state.lookahead - (STD_MIN_MATCH - 1);
517        stream.state.insert_string(str, n);
518        stream.state.strstart = str + n;
519        stream.state.lookahead = STD_MIN_MATCH - 1;
520        fill_window(stream);
521    }
522
523    let state = &mut stream.state;
524
525    state.strstart += state.lookahead;
526    state.block_start = state.strstart as _;
527    state.insert = state.lookahead;
528    state.lookahead = 0;
529    state.prev_length = 0;
530    state.match_available = false;
531
532    // restore the state
533    stream.next_in = next;
534    stream.avail_in = avail;
535    state.wrap = wrap;
536
537    ReturnCode::Ok
538}
539
540pub fn prime(stream: &mut DeflateStream, mut bits: i32, value: i32) -> ReturnCode {
541    // our logic actually supports up to 32 bits.
542    debug_assert!(bits <= 16, "zlib only supports up to 16 bits here");
543
544    let mut value64 = value as u64;
545
546    let state = &mut stream.state;
547
548    if bits < 0
549        || bits > BitWriter::BIT_BUF_SIZE as i32
550        || bits > (core::mem::size_of_val(&value) << 3) as i32
551    {
552        return ReturnCode::BufError;
553    }
554
555    let mut put;
556
557    loop {
558        put = BitWriter::BIT_BUF_SIZE - state.bit_writer.bits_used;
559        let put = Ord::min(put as i32, bits);
560
561        if state.bit_writer.bits_used == 0 {
562            state.bit_writer.bit_buffer = value64;
563        } else {
564            state.bit_writer.bit_buffer |=
565                (value64 & ((1 << put) - 1)) << state.bit_writer.bits_used;
566        }
567
568        state.bit_writer.bits_used += put as u8;
569        state.bit_writer.flush_bits();
570        value64 >>= put;
571        bits -= put;
572
573        if bits == 0 {
574            break;
575        }
576    }
577
578    ReturnCode::Ok
579}
580
581pub fn copy<'a>(
582    dest: &mut MaybeUninit<DeflateStream<'a>>,
583    source: &mut DeflateStream<'a>,
584) -> ReturnCode {
585    let w_size = source.state.w_size;
586    let window_bits = source.state.w_bits() as usize;
587    let lit_bufsize = source.state.lit_bufsize;
588
589    // SAFETY: source and dest are both mutable references, so guaranteed not to overlap.
590    // dest being a reference to maybe uninitialized memory makes a copy of 1 DeflateStream valid.
591    unsafe { core::ptr::copy_nonoverlapping(source, dest.as_mut_ptr(), 1) };
592
593    let source_state = &source.state;
594    let alloc = &source.alloc;
595
596    let allocs = DeflateAllocOffsets::new(window_bits, lit_bufsize);
597
598    let Some(allocation_start) = alloc.allocate_slice_raw::<u8>(allocs.total_size) else {
599        return ReturnCode::MemError;
600    };
601
602    let align_offset = (allocation_start.as_ptr() as usize).next_multiple_of(64)
603        - (allocation_start.as_ptr() as usize);
604    let buf = unsafe { allocation_start.as_ptr().add(align_offset) };
605
606    let (window, prev, head, pending, sym_buf) = unsafe {
607        let window_ptr: *mut u8 = buf.add(allocs.window_pos);
608        window_ptr
609            .copy_from_nonoverlapping(source_state.window.as_ptr(), source_state.window.capacity());
610        let window = Window::from_raw_parts(window_ptr, window_bits);
611
612        // FIXME: write_bytes is stable for NonNull since 1.80.0
613        let prev_ptr = buf.add(allocs.prev_pos).cast::<Pos>();
614        prev_ptr.copy_from_nonoverlapping(source_state.prev.as_ptr(), source_state.prev.len());
615        let prev = WeakSliceMut::from_raw_parts_mut(prev_ptr, w_size);
616
617        // zero out head's first element
618        let head_ptr = buf.add(allocs.head_pos).cast::<[Pos; HASH_SIZE]>();
619        head_ptr.copy_from_nonoverlapping(source_state.head.as_ptr(), 1);
620        let head = WeakArrayMut::<Pos, HASH_SIZE>::from_ptr(head_ptr);
621
622        let pending_ptr = buf.add(allocs.pending_pos);
623        let pending = source_state.bit_writer.pending.clone_to(pending_ptr);
624
625        let sym_buf_ptr = buf.add(allocs.sym_buf_pos);
626        let sym_buf = source_state.sym_buf.clone_to(sym_buf_ptr);
627
628        (window, prev, head, pending, sym_buf)
629    };
630
631    let mut bit_writer = BitWriter::from_pending(pending);
632    bit_writer.bits_used = source_state.bit_writer.bits_used;
633    bit_writer.bit_buffer = source_state.bit_writer.bit_buffer;
634
635    let dest_state = State {
636        status: source_state.status,
637        bit_writer,
638        last_flush: source_state.last_flush,
639        wrap: source_state.wrap,
640        strategy: source_state.strategy,
641        level: source_state.level,
642        good_match: source_state.good_match,
643        nice_match: source_state.nice_match,
644        l_desc: source_state.l_desc.clone(),
645        d_desc: source_state.d_desc.clone(),
646        bl_desc: source_state.bl_desc.clone(),
647        prev_match: source_state.prev_match,
648        match_available: source_state.match_available,
649        strstart: source_state.strstart,
650        match_start: source_state.match_start,
651        prev_length: source_state.prev_length,
652        max_chain_length: source_state.max_chain_length,
653        max_lazy_match: source_state.max_lazy_match,
654        block_start: source_state.block_start,
655        block_open: source_state.block_open,
656        window,
657        sym_buf,
658        lit_bufsize: source_state.lit_bufsize,
659        window_size: source_state.window_size,
660        matches: source_state.matches,
661        opt_len: source_state.opt_len,
662        static_len: source_state.static_len,
663        insert: source_state.insert,
664        w_size: source_state.w_size,
665        lookahead: source_state.lookahead,
666        prev,
667        head,
668        ins_h: source_state.ins_h,
669        hash_calc_variant: source_state.hash_calc_variant,
670        crc_fold: source_state.crc_fold,
671        gzhead: None,
672        gzindex: source_state.gzindex,
673        allocation_start,
674        total_allocation_size: allocs.total_size,
675        _cache_line_0: (),
676        _cache_line_1: (),
677        _cache_line_2: (),
678        _cache_line_3: (),
679        _padding_0: source_state._padding_0,
680    };
681
682    // write the cloned state into state_ptr
683    let state_allocation = unsafe { buf.add(allocs.state_pos).cast::<State>() };
684    unsafe { state_allocation.write(dest_state) }; // FIXME: write is stable for NonNull since 1.80.0
685
686    // insert the state_ptr into `dest`
687    let field_ptr = unsafe { core::ptr::addr_of_mut!((*dest.as_mut_ptr()).state) };
688    unsafe { core::ptr::write(field_ptr as *mut *mut State, state_allocation) };
689
690    // update the gzhead field (it contains a mutable reference so we need to be careful
691    let field_ptr = unsafe { core::ptr::addr_of_mut!((*dest.as_mut_ptr()).state.gzhead) };
692    unsafe { core::ptr::copy(&source_state.gzhead, field_ptr, 1) };
693
694    ReturnCode::Ok
695}
696
697/// # Returns
698///
699/// - Err when deflate is not done. A common cause is insufficient output space
700/// - Ok otherwise
701pub fn end<'a>(stream: &'a mut DeflateStream) -> Result<&'a mut z_stream, &'a mut z_stream> {
702    let status = stream.state.status;
703    let allocation_start = stream.state.allocation_start;
704    let total_allocation_size = stream.state.total_allocation_size;
705    let alloc = stream.alloc;
706
707    let stream = stream.as_z_stream_mut();
708    let _ = core::mem::replace(&mut stream.state, core::ptr::null_mut());
709
710    unsafe { alloc.deallocate(allocation_start.as_ptr(), total_allocation_size) };
711
712    match status {
713        Status::Busy => Err(stream),
714        _ => Ok(stream),
715    }
716}
717
718pub fn reset(stream: &mut DeflateStream) -> ReturnCode {
719    let ret = reset_keep(stream);
720
721    if ret == ReturnCode::Ok {
722        lm_init(stream.state);
723    }
724
725    ret
726}
727
728pub fn reset_keep(stream: &mut DeflateStream) -> ReturnCode {
729    stream.total_in = 0;
730    stream.total_out = 0;
731    stream.msg = core::ptr::null_mut();
732    stream.data_type = crate::c_api::Z_UNKNOWN;
733
734    let state = &mut stream.state;
735
736    state.bit_writer.pending.reset_keep();
737
738    // can be made negative by deflate(..., Z_FINISH);
739    state.wrap = state.wrap.abs();
740
741    state.status = match state.wrap {
742        2 => Status::GZip,
743        _ => Status::Init,
744    };
745
746    stream.adler = match state.wrap {
747        2 => {
748            state.crc_fold = Crc32Fold::new();
749            CRC32_INITIAL_VALUE as _
750        }
751        _ => ADLER32_INITIAL_VALUE as _,
752    };
753
754    state.last_flush = -2;
755
756    state.zng_tr_init();
757
758    ReturnCode::Ok
759}
760
761fn lm_init(state: &mut State) {
762    state.window_size = 2 * state.w_size;
763
764    // zlib uses CLEAR_HASH here
765    state.head.as_mut_slice().fill(0);
766
767    // Set the default configuration parameters:
768    lm_set_level(state, state.level);
769
770    state.strstart = 0;
771    state.block_start = 0;
772    state.lookahead = 0;
773    state.insert = 0;
774    state.prev_length = 0;
775    state.match_available = false;
776    state.match_start = 0;
777    state.ins_h = 0;
778}
779
780fn lm_set_level(state: &mut State, level: i8) {
781    state.max_lazy_match = CONFIGURATION_TABLE[level as usize].max_lazy;
782    state.good_match = CONFIGURATION_TABLE[level as usize].good_length;
783    state.nice_match = CONFIGURATION_TABLE[level as usize].nice_length;
784    state.max_chain_length = CONFIGURATION_TABLE[level as usize].max_chain;
785
786    state.hash_calc_variant = HashCalcVariant::for_max_chain_length(state.max_chain_length);
787    state.level = level;
788}
789
790pub fn tune(
791    stream: &mut DeflateStream,
792    good_length: usize,
793    max_lazy: usize,
794    nice_length: usize,
795    max_chain: usize,
796) -> ReturnCode {
797    stream.state.good_match = good_length as u16;
798    stream.state.max_lazy_match = max_lazy as u16;
799    stream.state.nice_match = nice_length as u16;
800    stream.state.max_chain_length = max_chain as u16;
801
802    ReturnCode::Ok
803}
804
805#[repr(C)]
806#[derive(Debug, Clone, Copy, PartialEq, Eq)]
807pub(crate) struct Value {
808    a: u16,
809    b: u16,
810}
811
812impl Value {
813    pub(crate) const fn new(a: u16, b: u16) -> Self {
814        Self { a, b }
815    }
816
817    pub(crate) fn freq_mut(&mut self) -> &mut u16 {
818        &mut self.a
819    }
820
821    pub(crate) fn code_mut(&mut self) -> &mut u16 {
822        &mut self.a
823    }
824
825    pub(crate) fn dad_mut(&mut self) -> &mut u16 {
826        &mut self.b
827    }
828
829    pub(crate) fn len_mut(&mut self) -> &mut u16 {
830        &mut self.b
831    }
832
833    #[inline(always)]
834    pub(crate) const fn freq(self) -> u16 {
835        self.a
836    }
837
838    #[inline(always)]
839    pub(crate) const fn code(self) -> u16 {
840        self.a
841    }
842
843    #[inline(always)]
844    pub(crate) const fn dad(self) -> u16 {
845        self.b
846    }
847
848    #[inline(always)]
849    pub(crate) const fn len(self) -> u16 {
850        self.b
851    }
852}
853
854/// number of length codes, not counting the special END_BLOCK code
855pub(crate) const LENGTH_CODES: usize = 29;
856
857/// number of literal bytes 0..255
858const LITERALS: usize = 256;
859
860/// number of Literal or Length codes, including the END_BLOCK code
861pub(crate) const L_CODES: usize = LITERALS + 1 + LENGTH_CODES;
862
863/// number of distance codes
864pub(crate) const D_CODES: usize = 30;
865
866/// number of codes used to transfer the bit lengths
867const BL_CODES: usize = 19;
868
869/// maximum heap size
870const HEAP_SIZE: usize = 2 * L_CODES + 1;
871
872/// all codes must not exceed MAX_BITS bits
873const MAX_BITS: usize = 15;
874
875/// Bit length codes must not exceed MAX_BL_BITS bits
876const MAX_BL_BITS: usize = 7;
877
878pub(crate) const DIST_CODE_LEN: usize = 512;
879
880struct BitWriter<'a> {
881    pub(crate) pending: Pending<'a>, // output still pending
882    pub(crate) bit_buffer: u64,
883    pub(crate) bits_used: u8,
884
885    /// total bit length of compressed file (NOTE: zlib-ng uses a 32-bit integer here)
886    #[cfg(feature = "ZLIB_DEBUG")]
887    compressed_len: usize,
888    /// bit length of compressed data sent (NOTE: zlib-ng uses a 32-bit integer here)
889    #[cfg(feature = "ZLIB_DEBUG")]
890    bits_sent: usize,
891}
892
893#[inline]
894const fn encode_len(ltree: &[Value], lc: u8) -> (u64, usize) {
895    let mut lc = lc as usize;
896
897    /* Send the length code, len is the match length - STD_MIN_MATCH */
898    let code = self::trees_tbl::LENGTH_CODE[lc] as usize;
899    let c = code + LITERALS + 1;
900    assert!(c < L_CODES, "bad l_code");
901    // send_code_trace(s, c);
902
903    let lnode = ltree[c];
904    let mut match_bits: u64 = lnode.code() as u64;
905    let mut match_bits_len = lnode.len() as usize;
906    let extra = StaticTreeDesc::EXTRA_LBITS[code] as usize;
907    if extra != 0 {
908        lc -= self::trees_tbl::BASE_LENGTH[code] as usize;
909        match_bits |= (lc as u64) << match_bits_len;
910        match_bits_len += extra;
911    }
912
913    (match_bits, match_bits_len)
914}
915
916#[inline]
917const fn encode_dist(dtree: &[Value], mut dist: u16) -> (u64, usize) {
918    dist -= 1; /* dist is now the match distance - 1 */
919    let code = State::d_code(dist as usize) as usize;
920    assert!(code < D_CODES, "bad d_code");
921    // send_code_trace(s, code);
922
923    /* Send the distance code */
924    let dnode = dtree[code];
925    let mut match_bits = dnode.code() as u64;
926    let mut match_bits_len = dnode.len() as usize;
927    let extra = StaticTreeDesc::EXTRA_DBITS[code] as usize;
928    if extra != 0 {
929        dist -= self::trees_tbl::BASE_DIST[code];
930        match_bits |= (dist as u64) << match_bits_len;
931        match_bits_len += extra;
932    }
933
934    (match_bits, match_bits_len)
935}
936
937impl<'a> BitWriter<'a> {
938    pub(crate) const BIT_BUF_SIZE: u8 = 64;
939
940    fn from_pending(pending: Pending<'a>) -> Self {
941        Self {
942            pending,
943            bit_buffer: 0,
944            bits_used: 0,
945
946            #[cfg(feature = "ZLIB_DEBUG")]
947            compressed_len: 0,
948            #[cfg(feature = "ZLIB_DEBUG")]
949            bits_sent: 0,
950        }
951    }
952
953    fn flush_bits(&mut self) {
954        debug_assert!(self.bits_used <= 64);
955        let removed = self.bits_used.saturating_sub(7).next_multiple_of(8);
956        let keep_bytes = self.bits_used / 8; // can never divide by zero
957
958        let src = &self.bit_buffer.to_le_bytes();
959        self.pending.extend(&src[..keep_bytes as usize]);
960
961        self.bits_used -= removed;
962        self.bit_buffer = self.bit_buffer.checked_shr(removed as u32).unwrap_or(0);
963    }
964
965    fn emit_align(&mut self) {
966        debug_assert!(self.bits_used <= 64);
967        let keep_bytes = self.bits_used.div_ceil(8);
968        let src = &self.bit_buffer.to_le_bytes();
969        self.pending.extend(&src[..keep_bytes as usize]);
970
971        self.bits_used = 0;
972        self.bit_buffer = 0;
973
974        self.sent_bits_align();
975    }
976
977    fn send_bits_trace(&self, _value: u64, _len: u8) {
978        trace!(" l {:>2} v {:>4x} ", _len, _value);
979    }
980
981    fn cmpr_bits_add(&mut self, _len: usize) {
982        #[cfg(feature = "ZLIB_DEBUG")]
983        {
984            self.compressed_len += _len;
985        }
986    }
987
988    fn cmpr_bits_align(&mut self) {
989        #[cfg(feature = "ZLIB_DEBUG")]
990        {
991            self.compressed_len = self.compressed_len.next_multiple_of(8);
992        }
993    }
994
995    fn sent_bits_add(&mut self, _len: usize) {
996        #[cfg(feature = "ZLIB_DEBUG")]
997        {
998            self.bits_sent += _len;
999        }
1000    }
1001
1002    fn sent_bits_align(&mut self) {
1003        #[cfg(feature = "ZLIB_DEBUG")]
1004        {
1005            self.bits_sent = self.bits_sent.next_multiple_of(8);
1006        }
1007    }
1008
1009    #[inline(always)]
1010    fn send_bits(&mut self, val: u64, len: u8) {
1011        debug_assert!(len <= 64);
1012        debug_assert!(self.bits_used <= 64);
1013
1014        let total_bits = len + self.bits_used;
1015
1016        self.send_bits_trace(val, len);
1017        self.sent_bits_add(len as usize);
1018
1019        if total_bits < Self::BIT_BUF_SIZE {
1020            self.bit_buffer |= val << self.bits_used;
1021            self.bits_used = total_bits;
1022        } else {
1023            self.send_bits_overflow(val, total_bits);
1024        }
1025    }
1026
1027    fn send_bits_overflow(&mut self, val: u64, total_bits: u8) {
1028        if self.bits_used == Self::BIT_BUF_SIZE {
1029            self.pending.extend(&self.bit_buffer.to_le_bytes());
1030            self.bit_buffer = val;
1031            self.bits_used = total_bits - Self::BIT_BUF_SIZE;
1032        } else {
1033            self.bit_buffer |= val << self.bits_used;
1034            self.pending.extend(&self.bit_buffer.to_le_bytes());
1035            self.bit_buffer = val >> (Self::BIT_BUF_SIZE - self.bits_used);
1036            self.bits_used = total_bits - Self::BIT_BUF_SIZE;
1037        }
1038    }
1039
1040    fn send_code(&mut self, code: usize, tree: &[Value]) {
1041        let node = tree[code];
1042        self.send_bits(node.code() as u64, node.len() as u8)
1043    }
1044
1045    /// Send one empty static block to give enough lookahead for inflate.
1046    /// This takes 10 bits, of which 7 may remain in the bit buffer.
1047    pub fn align(&mut self) {
1048        self.emit_tree(BlockType::StaticTrees, false);
1049        self.emit_end_block(&STATIC_LTREE, false);
1050        self.flush_bits();
1051    }
1052
1053    pub(crate) fn emit_tree(&mut self, block_type: BlockType, is_last_block: bool) {
1054        let header_bits = ((block_type as u64) << 1) | (is_last_block as u64);
1055        self.send_bits(header_bits, 3);
1056        trace!("\n--- Emit Tree: Last: {}\n", is_last_block as u8);
1057    }
1058
1059    pub(crate) fn emit_end_block_and_align(&mut self, ltree: &[Value], is_last_block: bool) {
1060        self.emit_end_block(ltree, is_last_block);
1061
1062        if is_last_block {
1063            self.emit_align();
1064        }
1065    }
1066
1067    fn emit_end_block(&mut self, ltree: &[Value], _is_last_block: bool) {
1068        const END_BLOCK: usize = 256;
1069        self.send_code(END_BLOCK, ltree);
1070
1071        trace!(
1072            "\n+++ Emit End Block: Last: {} Pending: {} Total Out: {}\n",
1073            _is_last_block as u8,
1074            self.pending.pending().len(),
1075            "<unknown>"
1076        );
1077    }
1078
1079    pub(crate) fn emit_lit(&mut self, ltree: &[Value], c: u8) -> u16 {
1080        self.send_code(c as usize, ltree);
1081
1082        #[cfg(feature = "ZLIB_DEBUG")]
1083        if let Some(c) = char::from_u32(c as u32) {
1084            if isgraph(c as u8) {
1085                trace!(" '{}' ", c);
1086            }
1087        }
1088
1089        ltree[c as usize].len()
1090    }
1091
1092    pub(crate) fn emit_dist(
1093        &mut self,
1094        ltree: &[Value],
1095        dtree: &[Value],
1096        lc: u8,
1097        dist: u16,
1098    ) -> usize {
1099        let (mut match_bits, mut match_bits_len) = encode_len(ltree, lc);
1100
1101        let (dist_match_bits, dist_match_bits_len) = encode_dist(dtree, dist);
1102
1103        match_bits |= dist_match_bits << match_bits_len;
1104        match_bits_len += dist_match_bits_len;
1105
1106        self.send_bits(match_bits, match_bits_len as u8);
1107
1108        match_bits_len
1109    }
1110
1111    pub(crate) fn emit_dist_static(&mut self, lc: u8, dist: u16) -> usize {
1112        let precomputed_len = trees_tbl::STATIC_LTREE_ENCODINGS[lc as usize];
1113        let mut match_bits = precomputed_len.code() as u64;
1114        let mut match_bits_len = precomputed_len.len() as usize;
1115
1116        let dtree = self::trees_tbl::STATIC_DTREE.as_slice();
1117        let (dist_match_bits, dist_match_bits_len) = encode_dist(dtree, dist);
1118
1119        match_bits |= dist_match_bits << match_bits_len;
1120        match_bits_len += dist_match_bits_len;
1121
1122        self.send_bits(match_bits, match_bits_len as u8);
1123
1124        match_bits_len
1125    }
1126
1127    fn compress_block_help(&mut self, sym_buf: &SymBuf, ltree: &[Value], dtree: &[Value]) {
1128        for (dist, lc) in sym_buf.iter() {
1129            match dist {
1130                0 => self.emit_lit(ltree, lc) as usize,
1131                _ => self.emit_dist(ltree, dtree, lc, dist),
1132            };
1133        }
1134
1135        self.emit_end_block(ltree, false)
1136    }
1137
1138    fn send_tree(&mut self, tree: &[Value], bl_tree: &[Value], max_code: usize) {
1139        /* tree: the tree to be scanned */
1140        /* max_code and its largest code of non zero frequency */
1141        let mut prevlen: isize = -1; /* last emitted length */
1142        let mut curlen; /* length of current code */
1143        let mut nextlen = tree[0].len(); /* length of next code */
1144        let mut count = 0; /* repeat count of the current code */
1145        let mut max_count = 7; /* max repeat count */
1146        let mut min_count = 4; /* min repeat count */
1147
1148        /* tree[max_code+1].Len = -1; */
1149        /* guard already set */
1150        if nextlen == 0 {
1151            max_count = 138;
1152            min_count = 3;
1153        }
1154
1155        for n in 0..=max_code {
1156            curlen = nextlen;
1157            nextlen = tree[n + 1].len();
1158            count += 1;
1159            if count < max_count && curlen == nextlen {
1160                continue;
1161            } else if count < min_count {
1162                loop {
1163                    self.send_code(curlen as usize, bl_tree);
1164
1165                    count -= 1;
1166                    if count == 0 {
1167                        break;
1168                    }
1169                }
1170            } else if curlen != 0 {
1171                if curlen as isize != prevlen {
1172                    self.send_code(curlen as usize, bl_tree);
1173                    count -= 1;
1174                }
1175                assert!((3..=6).contains(&count), " 3_6?");
1176                self.send_code(REP_3_6, bl_tree);
1177                self.send_bits(count - 3, 2);
1178            } else if count <= 10 {
1179                self.send_code(REPZ_3_10, bl_tree);
1180                self.send_bits(count - 3, 3);
1181            } else {
1182                self.send_code(REPZ_11_138, bl_tree);
1183                self.send_bits(count - 11, 7);
1184            }
1185
1186            count = 0;
1187            prevlen = curlen as isize;
1188
1189            if nextlen == 0 {
1190                max_count = 138;
1191                min_count = 3;
1192            } else if curlen == nextlen {
1193                max_count = 6;
1194                min_count = 3;
1195            } else {
1196                max_count = 7;
1197                min_count = 4;
1198            }
1199        }
1200    }
1201}
1202
1203#[repr(C, align(64))]
1204pub(crate) struct State<'a> {
1205    status: Status,
1206
1207    last_flush: i8, /* value of flush param for previous deflate call */
1208
1209    pub(crate) wrap: i8, /* bit 0 true for zlib, bit 1 true for gzip */
1210
1211    pub(crate) strategy: Strategy,
1212    pub(crate) level: i8,
1213
1214    /// Whether or not a block is currently open for the QUICK deflation scheme.
1215    /// 0 if the block is closed, 1 if there is an active block, or 2 if there
1216    /// is an active block and it is the last block.
1217    pub(crate) block_open: u8,
1218
1219    pub(crate) hash_calc_variant: HashCalcVariant,
1220
1221    pub(crate) match_available: bool, /* set if previous match exists */
1222
1223    /// Use a faster search when the previous match is longer than this
1224    pub(crate) good_match: u16,
1225
1226    /// Stop searching when current match exceeds this
1227    pub(crate) nice_match: u16,
1228
1229    pub(crate) match_start: Pos, /* start of matching string */
1230    pub(crate) prev_match: Pos,  /* previous match */
1231    pub(crate) strstart: usize,  /* start of string to insert */
1232
1233    pub(crate) window: Window<'a>,
1234    pub(crate) w_size: usize, /* LZ77 window size (32K by default) */
1235
1236    pub(crate) lookahead: usize, /* number of valid bytes ahead in window */
1237
1238    _cache_line_0: (),
1239
1240    /// prev[N], where N is an offset in the current window, contains the offset in the window
1241    /// of the previous 4-byte sequence that hashes to the same value as the 4-byte sequence
1242    /// starting at N. Together with head, prev forms a chained hash table that can be used
1243    /// to find earlier strings in the window that are potential matches for new input being
1244    /// deflated.
1245    pub(crate) prev: WeakSliceMut<'a, u16>,
1246    /// head[H] contains the offset of the last 4-character sequence seen so far in
1247    /// the current window that hashes to H (as calculated using the hash_calc_variant).
1248    pub(crate) head: WeakArrayMut<'a, u16, HASH_SIZE>,
1249
1250    /// Length of the best match at previous step. Matches not greater than this
1251    /// are discarded. This is used in the lazy match evaluation.
1252    pub(crate) prev_length: u16,
1253
1254    /// To speed up deflation, hash chains are never searched beyond this length.
1255    /// A higher limit improves compression ratio but degrades the speed.
1256    pub(crate) max_chain_length: u16,
1257
1258    // TODO untangle this mess! zlib uses the same field differently based on compression level
1259    // we should just have 2 fields for clarity!
1260    //
1261    // Insert new strings in the hash table only if the match length is not
1262    // greater than this length. This saves time but degrades compression.
1263    // max_insert_length is used only for compression levels <= 6.
1264    // define max_insert_length  max_lazy_match
1265    /// Attempt to find a better match only when the current match is strictly smaller
1266    /// than this value. This mechanism is used only for compression levels >= 4.
1267    pub(crate) max_lazy_match: u16,
1268
1269    /// number of string matches in current block
1270    /// NOTE: this is a saturating 8-bit counter, to help keep the struct compact. The code that
1271    /// makes decisions based on this field only cares whether the count is greater than 2, so
1272    /// an 8-bit counter is sufficient.
1273    pub(crate) matches: u8,
1274
1275    /// Window position at the beginning of the current output block. Gets
1276    /// negative when the window is moved backwards.
1277    pub(crate) block_start: isize,
1278
1279    pub(crate) sym_buf: SymBuf<'a>,
1280
1281    _cache_line_1: (),
1282
1283    /// Size of match buffer for literals/lengths.  There are 4 reasons for
1284    /// limiting lit_bufsize to 64K:
1285    ///   - frequencies can be kept in 16 bit counters
1286    ///   - if compression is not successful for the first block, all input
1287    ///     data is still in the window so we can still emit a stored block even
1288    ///     when input comes from standard input.  (This can also be done for
1289    ///     all blocks if lit_bufsize is not greater than 32K.)
1290    ///   - if compression is not successful for a file smaller than 64K, we can
1291    ///     even emit a stored file instead of a stored block (saving 5 bytes).
1292    ///     This is applicable only for zip (not gzip or zlib).
1293    ///   - creating new Huffman trees less frequently may not provide fast
1294    ///     adaptation to changes in the input data statistics. (Take for
1295    ///     example a binary file with poorly compressible code followed by
1296    ///     a highly compressible string table.) Smaller buffer sizes give
1297    ///     fast adaptation but have of course the overhead of transmitting
1298    ///     trees more frequently.
1299    ///   - I can't count above 4
1300    lit_bufsize: usize,
1301
1302    /// Actual size of window: 2*w_size, except when the user input buffer is directly used as sliding window.
1303    pub(crate) window_size: usize,
1304
1305    bit_writer: BitWriter<'a>,
1306
1307    _cache_line_2: (),
1308
1309    /// bit length of current block with optimal trees
1310    opt_len: usize,
1311    /// bit length of current block with static trees
1312    static_len: usize,
1313
1314    /// bytes at end of window left to insert
1315    pub(crate) insert: usize,
1316
1317    ///  hash index of string to be inserted
1318    pub(crate) ins_h: u32,
1319
1320    gzhead: Option<&'a mut gz_header>,
1321    gzindex: usize,
1322
1323    _padding_0: [u8; 16],
1324
1325    _cache_line_3: (),
1326
1327    crc_fold: crate::crc32::Crc32Fold,
1328
1329    /// The (unaligned) address of the state allocation that can be passed to zfree.
1330    allocation_start: NonNull<u8>,
1331    /// Total size of the allocation in bytes.
1332    total_allocation_size: usize,
1333
1334    l_desc: TreeDesc<HEAP_SIZE>,             /* literal and length tree */
1335    d_desc: TreeDesc<{ 2 * D_CODES + 1 }>,   /* distance tree */
1336    bl_desc: TreeDesc<{ 2 * BL_CODES + 1 }>, /* Huffman tree for bit lengths */
1337}
1338
1339#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Default)]
1340#[cfg_attr(feature = "__internal-fuzz", derive(arbitrary::Arbitrary))]
1341pub enum Strategy {
1342    #[default]
1343    Default = 0,
1344    Filtered = 1,
1345    HuffmanOnly = 2,
1346    Rle = 3,
1347    Fixed = 4,
1348}
1349
1350impl TryFrom<i32> for Strategy {
1351    type Error = ();
1352
1353    fn try_from(value: i32) -> Result<Self, Self::Error> {
1354        match value {
1355            0 => Ok(Strategy::Default),
1356            1 => Ok(Strategy::Filtered),
1357            2 => Ok(Strategy::HuffmanOnly),
1358            3 => Ok(Strategy::Rle),
1359            4 => Ok(Strategy::Fixed),
1360            _ => Err(()),
1361        }
1362    }
1363}
1364
1365#[derive(Debug, PartialEq, Eq)]
1366enum DataType {
1367    Binary = 0,
1368    Text = 1,
1369    Unknown = 2,
1370}
1371
1372impl<'a> State<'a> {
1373    pub const BIT_BUF_SIZE: u8 = BitWriter::BIT_BUF_SIZE;
1374
1375    // log2(w_size)  (in the range MIN_WBITS..=MAX_WBITS)
1376    pub(crate) fn w_bits(&self) -> u32 {
1377        self.w_size.trailing_zeros()
1378    }
1379
1380    pub(crate) fn w_mask(&self) -> usize {
1381        self.w_size - 1
1382    }
1383
1384    pub(crate) fn max_dist(&self) -> usize {
1385        self.w_size - MIN_LOOKAHEAD
1386    }
1387
1388    // TODO untangle this mess! zlib uses the same field differently based on compression level
1389    // we should just have 2 fields for clarity!
1390    pub(crate) fn max_insert_length(&self) -> usize {
1391        self.max_lazy_match as usize
1392    }
1393
1394    /// Total size of the pending buf. But because `pending` shares memory with `sym_buf`, this is
1395    /// not the number of bytes that are actually in `pending`!
1396    pub(crate) fn pending_buf_size(&self) -> usize {
1397        self.lit_bufsize * 4
1398    }
1399
1400    #[inline(always)]
1401    pub(crate) fn update_hash(&self, h: u32, val: u32) -> u32 {
1402        match self.hash_calc_variant {
1403            HashCalcVariant::Standard => StandardHashCalc::update_hash(h, val),
1404            HashCalcVariant::Roll => RollHashCalc::update_hash(h, val),
1405        }
1406    }
1407
1408    #[inline(always)]
1409    pub(crate) fn quick_insert_string(&mut self, string: usize) -> u16 {
1410        match self.hash_calc_variant {
1411            HashCalcVariant::Standard => StandardHashCalc::quick_insert_string(self, string),
1412            HashCalcVariant::Roll => RollHashCalc::quick_insert_string(self, string),
1413        }
1414    }
1415
1416    #[inline(always)]
1417    pub(crate) fn insert_string(&mut self, string: usize, count: usize) {
1418        match self.hash_calc_variant {
1419            HashCalcVariant::Standard => StandardHashCalc::insert_string(self, string, count),
1420            HashCalcVariant::Roll => RollHashCalc::insert_string(self, string, count),
1421        }
1422    }
1423
1424    #[inline(always)]
1425    pub(crate) fn tally_lit(&mut self, unmatched: u8) -> bool {
1426        Self::tally_lit_help(&mut self.sym_buf, &mut self.l_desc, unmatched)
1427    }
1428
1429    // This helper is to work around an ownership issue in algorithm/medium.
1430    pub(crate) fn tally_lit_help(
1431        sym_buf: &mut SymBuf,
1432        l_desc: &mut TreeDesc<HEAP_SIZE>,
1433        unmatched: u8,
1434    ) -> bool {
1435        sym_buf.push_lit(unmatched);
1436
1437        *l_desc.dyn_tree[unmatched as usize].freq_mut() += 1;
1438
1439        assert!(
1440            unmatched as usize <= STD_MAX_MATCH - STD_MIN_MATCH,
1441            "zng_tr_tally: bad literal"
1442        );
1443
1444        // signal that the current block should be flushed
1445        sym_buf.should_flush_block()
1446    }
1447
1448    const fn d_code(dist: usize) -> u8 {
1449        let index = if dist < 256 { dist } else { 256 + (dist >> 7) };
1450        self::trees_tbl::DIST_CODE[index]
1451    }
1452
1453    #[inline(always)]
1454    pub(crate) fn tally_dist(&mut self, mut dist: usize, len: usize) -> bool {
1455        self.sym_buf.push_dist(dist as u16, len as u8);
1456
1457        self.matches = self.matches.saturating_add(1);
1458        dist -= 1;
1459
1460        assert!(
1461            dist < self.max_dist() && Self::d_code(dist) < D_CODES as u8,
1462            "tally_dist: bad match"
1463        );
1464
1465        let index = self::trees_tbl::LENGTH_CODE[len] as usize + LITERALS + 1;
1466        *self.l_desc.dyn_tree[index].freq_mut() += 1;
1467
1468        *self.d_desc.dyn_tree[Self::d_code(dist) as usize].freq_mut() += 1;
1469
1470        // signal that the current block should be flushed
1471        self.sym_buf.should_flush_block()
1472    }
1473
1474    fn detect_data_type(dyn_tree: &[Value]) -> DataType {
1475        // set bits 0..6, 14..25, and 28..31
1476        // 0xf3ffc07f = binary 11110011111111111100000001111111
1477        const NON_TEXT: u64 = 0xf3ffc07f;
1478        let mut mask = NON_TEXT;
1479
1480        /* Check for non-textual bytes. */
1481        for value in &dyn_tree[0..32] {
1482            if (mask & 1) != 0 && value.freq() != 0 {
1483                return DataType::Binary;
1484            }
1485
1486            mask >>= 1;
1487        }
1488
1489        /* Check for textual bytes. */
1490        if dyn_tree[9].freq() != 0 || dyn_tree[10].freq() != 0 || dyn_tree[13].freq() != 0 {
1491            return DataType::Text;
1492        }
1493
1494        if dyn_tree[32..LITERALS].iter().any(|v| v.freq() != 0) {
1495            return DataType::Text;
1496        }
1497
1498        // there are no explicit text or non-text bytes. The stream is either empty or has only
1499        // tolerated bytes
1500        DataType::Binary
1501    }
1502
1503    fn compress_block_static_trees(&mut self) {
1504        let ltree = self::trees_tbl::STATIC_LTREE.as_slice();
1505        for (dist, lc) in self.sym_buf.iter() {
1506            match dist {
1507                0 => self.bit_writer.emit_lit(ltree, lc) as usize,
1508                _ => self.bit_writer.emit_dist_static(lc, dist),
1509            };
1510        }
1511
1512        self.bit_writer.emit_end_block(ltree, false)
1513    }
1514
1515    fn compress_block_dynamic_trees(&mut self) {
1516        self.bit_writer.compress_block_help(
1517            &self.sym_buf,
1518            &self.l_desc.dyn_tree,
1519            &self.d_desc.dyn_tree,
1520        );
1521    }
1522
1523    fn header(&self) -> u16 {
1524        // preset dictionary flag in zlib header
1525        const PRESET_DICT: u16 = 0x20;
1526
1527        // The deflate compression method (the only one supported in this version)
1528        const Z_DEFLATED: u16 = 8;
1529
1530        let dict = match self.strstart {
1531            0 => 0,
1532            _ => PRESET_DICT,
1533        };
1534
1535        let h = ((Z_DEFLATED + ((self.w_bits() as u16 - 8) << 4)) << 8)
1536            | (self.level_flags() << 6)
1537            | dict;
1538
1539        h + 31 - (h % 31)
1540    }
1541
1542    fn level_flags(&self) -> u16 {
1543        if self.strategy >= Strategy::HuffmanOnly || self.level < 2 {
1544            0
1545        } else if self.level < 6 {
1546            1
1547        } else if self.level == 6 {
1548            2
1549        } else {
1550            3
1551        }
1552    }
1553
1554    fn zng_tr_init(&mut self) {
1555        self.l_desc.stat_desc = &StaticTreeDesc::L;
1556
1557        self.d_desc.stat_desc = &StaticTreeDesc::D;
1558
1559        self.bl_desc.stat_desc = &StaticTreeDesc::BL;
1560
1561        self.bit_writer.bit_buffer = 0;
1562        self.bit_writer.bits_used = 0;
1563
1564        #[cfg(feature = "ZLIB_DEBUG")]
1565        {
1566            self.bit_writer.compressed_len = 0;
1567            self.bit_writer.bits_sent = 0;
1568        }
1569
1570        // Initialize the first block of the first file:
1571        self.init_block();
1572    }
1573
1574    /// initializes a new block
1575    fn init_block(&mut self) {
1576        // Initialize the trees.
1577        // TODO would a memset also work here?
1578
1579        for value in &mut self.l_desc.dyn_tree[..L_CODES] {
1580            *value.freq_mut() = 0;
1581        }
1582
1583        for value in &mut self.d_desc.dyn_tree[..D_CODES] {
1584            *value.freq_mut() = 0;
1585        }
1586
1587        for value in &mut self.bl_desc.dyn_tree[..BL_CODES] {
1588            *value.freq_mut() = 0;
1589        }
1590
1591        // end of block literal code
1592        const END_BLOCK: usize = 256;
1593
1594        *self.l_desc.dyn_tree[END_BLOCK].freq_mut() = 1;
1595        self.opt_len = 0;
1596        self.static_len = 0;
1597        self.sym_buf.clear();
1598        self.matches = 0;
1599    }
1600}
1601
1602#[repr(u8)]
1603#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1604enum Status {
1605    Init = 1,
1606
1607    GZip = 4,
1608    Extra = 5,
1609    Name = 6,
1610    Comment = 7,
1611    Hcrc = 8,
1612
1613    Busy = 2,
1614    Finish = 3,
1615}
1616
1617const fn rank_flush(f: i8) -> i8 {
1618    // rank Z_BLOCK between Z_NO_FLUSH and Z_PARTIAL_FLUSH
1619    ((f) * 2) - (if (f) > 4 { 9 } else { 0 })
1620}
1621
1622#[derive(Debug)]
1623pub(crate) enum BlockState {
1624    /// block not completed, need more input or more output
1625    NeedMore = 0,
1626    /// block flush performed
1627    BlockDone = 1,
1628    /// finish started, need only more output at next deflate
1629    FinishStarted = 2,
1630    /// finish done, accept no more input or output
1631    FinishDone = 3,
1632}
1633
1634// Maximum stored block length in deflate format (not including header).
1635pub(crate) const MAX_STORED: usize = 65535; // so u16::max
1636
1637pub(crate) fn read_buf_window(stream: &mut DeflateStream, offset: usize, size: usize) -> usize {
1638    let len = Ord::min(stream.avail_in as usize, size);
1639
1640    if len == 0 {
1641        return 0;
1642    }
1643
1644    stream.avail_in -= len as u32;
1645
1646    if stream.state.wrap == 2 {
1647        // we likely cannot fuse the crc32 and the copy here because the input can be changed by
1648        // a concurrent thread. Therefore it cannot be converted into a slice!
1649        let window = &mut stream.state.window;
1650        // SAFETY: len is bounded by avail_in, so this copy is in bounds.
1651        unsafe { window.copy_and_initialize(offset..offset + len, stream.next_in) };
1652
1653        let data = &stream.state.window.filled()[offset..][..len];
1654        stream.state.crc_fold.fold(data, CRC32_INITIAL_VALUE);
1655    } else if stream.state.wrap == 1 {
1656        // we likely cannot fuse the adler32 and the copy here because the input can be changed by
1657        // a concurrent thread. Therefore it cannot be converted into a slice!
1658        let window = &mut stream.state.window;
1659        // SAFETY: len is bounded by avail_in, so this copy is in bounds.
1660        unsafe { window.copy_and_initialize(offset..offset + len, stream.next_in) };
1661
1662        let data = &stream.state.window.filled()[offset..][..len];
1663        stream.adler = adler32(stream.adler as u32, data) as _;
1664    } else {
1665        let window = &mut stream.state.window;
1666        // SAFETY: len is bounded by avail_in, so this copy is in bounds.
1667        unsafe { window.copy_and_initialize(offset..offset + len, stream.next_in) };
1668    }
1669
1670    stream.next_in = stream.next_in.wrapping_add(len);
1671    stream.total_in += len as crate::c_api::z_size;
1672
1673    len
1674}
1675
1676pub(crate) enum BlockType {
1677    StoredBlock = 0,
1678    StaticTrees = 1,
1679    DynamicTrees = 2,
1680}
1681
1682pub(crate) fn zng_tr_stored_block(
1683    state: &mut State,
1684    window_range: core::ops::Range<usize>,
1685    is_last: bool,
1686) {
1687    // send block type
1688    state.bit_writer.emit_tree(BlockType::StoredBlock, is_last);
1689
1690    // align on byte boundary
1691    state.bit_writer.emit_align();
1692
1693    state.bit_writer.cmpr_bits_align();
1694
1695    let input_block: &[u8] = &state.window.filled()[window_range];
1696    let stored_len = input_block.len() as u16;
1697
1698    state.bit_writer.pending.extend(&stored_len.to_le_bytes());
1699    state
1700        .bit_writer
1701        .pending
1702        .extend(&(!stored_len).to_le_bytes());
1703
1704    state.bit_writer.cmpr_bits_add(32);
1705    state.bit_writer.sent_bits_add(32);
1706    if stored_len > 0 {
1707        state.bit_writer.pending.extend(input_block);
1708        state.bit_writer.cmpr_bits_add((stored_len << 3) as usize);
1709        state.bit_writer.sent_bits_add((stored_len << 3) as usize);
1710    }
1711}
1712
1713/// The minimum match length mandated by the deflate standard
1714pub(crate) const STD_MIN_MATCH: usize = 3;
1715/// The maximum match length mandated by the deflate standard
1716pub(crate) const STD_MAX_MATCH: usize = 258;
1717
1718/// The minimum wanted match length, affects deflate_quick, deflate_fast, deflate_medium and deflate_slow
1719pub(crate) const WANT_MIN_MATCH: usize = 4;
1720
1721pub(crate) const MIN_LOOKAHEAD: usize = STD_MAX_MATCH + STD_MIN_MATCH + 1;
1722
1723#[inline]
1724pub(crate) fn fill_window(stream: &mut DeflateStream) {
1725    debug_assert!(stream.state.lookahead < MIN_LOOKAHEAD);
1726
1727    let wsize = stream.state.w_size;
1728
1729    loop {
1730        let state = &mut *stream.state;
1731        let mut more = state.window_size - state.lookahead - state.strstart;
1732
1733        // If the window is almost full and there is insufficient lookahead,
1734        // move the upper half to the lower one to make room in the upper half.
1735        if state.strstart >= wsize + state.max_dist() {
1736            // shift the window to the left
1737            let (old, new) = state.window.filled_mut()[..2 * wsize].split_at_mut(wsize);
1738            old.copy_from_slice(new);
1739
1740            if state.match_start >= wsize as u16 {
1741                state.match_start -= wsize as u16;
1742            } else {
1743                state.match_start = 0;
1744                state.prev_length = 0;
1745            }
1746
1747            state.strstart -= wsize; /* we now have strstart >= MAX_DIST */
1748            state.block_start -= wsize as isize;
1749            state.insert = Ord::min(state.insert, state.strstart);
1750
1751            self::slide_hash::slide_hash(state);
1752
1753            more += wsize;
1754        }
1755
1756        if stream.avail_in == 0 {
1757            break;
1758        }
1759
1760        // If there was no sliding:
1761        //    strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 &&
1762        //    more == window_size - lookahead - strstart
1763        // => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1)
1764        // => more >= window_size - 2*WSIZE + 2
1765        // In the BIG_MEM or MMAP case (not yet supported),
1766        //   window_size == input_size + MIN_LOOKAHEAD  &&
1767        //   strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD.
1768        // Otherwise, window_size == 2*WSIZE so more >= 2.
1769        // If there was sliding, more >= WSIZE. So in all cases, more >= 2.
1770        assert!(more >= 2, "more < 2");
1771
1772        let n = read_buf_window(stream, stream.state.strstart + stream.state.lookahead, more);
1773
1774        let state = &mut *stream.state;
1775        state.lookahead += n;
1776
1777        // Initialize the hash value now that we have some input:
1778        if state.lookahead + state.insert >= STD_MIN_MATCH {
1779            let string = state.strstart - state.insert;
1780            if state.max_chain_length > 1024 {
1781                let v0 = state.window.filled()[string] as u32;
1782                let v1 = state.window.filled()[string + 1] as u32;
1783                state.ins_h = state.update_hash(v0, v1);
1784            } else if string >= 1 {
1785                state.quick_insert_string(string + 2 - STD_MIN_MATCH);
1786            }
1787            let mut count = state.insert;
1788            if state.lookahead == 1 {
1789                count -= 1;
1790            }
1791            if count > 0 {
1792                state.insert_string(string, count);
1793                state.insert -= count;
1794            }
1795        }
1796
1797        // If the whole input has less than STD_MIN_MATCH bytes, ins_h is garbage,
1798        // but this is not important since only literal bytes will be emitted.
1799
1800        if !(stream.state.lookahead < MIN_LOOKAHEAD && stream.avail_in != 0) {
1801            break;
1802        }
1803    }
1804
1805    assert!(
1806        stream.state.strstart <= stream.state.window_size - MIN_LOOKAHEAD,
1807        "not enough room for search"
1808    );
1809}
1810
1811pub(crate) struct StaticTreeDesc {
1812    /// static tree or NULL
1813    pub(crate) static_tree: &'static [Value],
1814    /// extra bits for each code or NULL
1815    extra_bits: &'static [u8],
1816    /// base index for extra_bits
1817    extra_base: usize,
1818    /// max number of elements in the tree
1819    elems: usize,
1820    /// max bit length for the codes
1821    max_length: u16,
1822}
1823
1824impl StaticTreeDesc {
1825    const EMPTY: Self = Self {
1826        static_tree: &[],
1827        extra_bits: &[],
1828        extra_base: 0,
1829        elems: 0,
1830        max_length: 0,
1831    };
1832
1833    /// extra bits for each length code
1834    const EXTRA_LBITS: [u8; LENGTH_CODES] = [
1835        0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0,
1836    ];
1837
1838    /// extra bits for each distance code
1839    const EXTRA_DBITS: [u8; D_CODES] = [
1840        0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12,
1841        13, 13,
1842    ];
1843
1844    /// extra bits for each bit length code
1845    const EXTRA_BLBITS: [u8; BL_CODES] = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 7];
1846
1847    /// The lengths of the bit length codes are sent in order of decreasing
1848    /// probability, to avoid transmitting the lengths for unused bit length codes.
1849    const BL_ORDER: [u8; BL_CODES] = [
1850        16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15,
1851    ];
1852
1853    pub(crate) const L: Self = Self {
1854        static_tree: &self::trees_tbl::STATIC_LTREE,
1855        extra_bits: &Self::EXTRA_LBITS,
1856        extra_base: LITERALS + 1,
1857        elems: L_CODES,
1858        max_length: MAX_BITS as u16,
1859    };
1860
1861    pub(crate) const D: Self = Self {
1862        static_tree: &self::trees_tbl::STATIC_DTREE,
1863        extra_bits: &Self::EXTRA_DBITS,
1864        extra_base: 0,
1865        elems: D_CODES,
1866        max_length: MAX_BITS as u16,
1867    };
1868
1869    pub(crate) const BL: Self = Self {
1870        static_tree: &[],
1871        extra_bits: &Self::EXTRA_BLBITS,
1872        extra_base: 0,
1873        elems: BL_CODES,
1874        max_length: MAX_BL_BITS as u16,
1875    };
1876}
1877
1878#[derive(Clone)]
1879pub(crate) struct TreeDesc<const N: usize> {
1880    dyn_tree: [Value; N],
1881    max_code: usize,
1882    stat_desc: &'static StaticTreeDesc,
1883}
1884
1885impl<const N: usize> TreeDesc<N> {
1886    const EMPTY: Self = Self {
1887        dyn_tree: [Value::new(0, 0); N],
1888        max_code: 0,
1889        stat_desc: &StaticTreeDesc::EMPTY,
1890    };
1891}
1892
1893fn build_tree<const N: usize>(state: &mut State, desc: &mut TreeDesc<N>) {
1894    let tree = &mut desc.dyn_tree;
1895    let stree = desc.stat_desc.static_tree;
1896    let elements = desc.stat_desc.elems;
1897
1898    let mut heap = Heap::new();
1899    let mut max_code = heap.initialize(&mut tree[..elements]);
1900
1901    // The pkzip format requires that at least one distance code exists,
1902    // and that at least one bit should be sent even if there is only one
1903    // possible code. So to avoid special checks later on we force at least
1904    // two codes of non zero frequency.
1905    while heap.heap_len < 2 {
1906        heap.heap_len += 1;
1907        let node = if max_code < 2 {
1908            max_code += 1;
1909            max_code
1910        } else {
1911            0
1912        };
1913
1914        debug_assert!(node >= 0);
1915        let node = node as usize;
1916
1917        heap.heap[heap.heap_len] = node as u32;
1918        *tree[node].freq_mut() = 1;
1919        heap.depth[node] = 0;
1920        state.opt_len -= 1;
1921        if !stree.is_empty() {
1922            state.static_len -= stree[node].len() as usize;
1923        }
1924        /* node is 0 or 1 so it does not have extra bits */
1925    }
1926
1927    debug_assert!(max_code >= 0);
1928    let max_code = max_code as usize;
1929    desc.max_code = max_code;
1930
1931    // The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
1932    // establish sub-heaps of increasing lengths:
1933    let mut n = heap.heap_len / 2;
1934    while n >= 1 {
1935        heap.pqdownheap(tree, n);
1936        n -= 1;
1937    }
1938
1939    heap.construct_huffman_tree(tree, elements);
1940
1941    // At this point, the fields freq and dad are set. We can now
1942    // generate the bit lengths.
1943    let bl_count = gen_bitlen(state, &mut heap, desc);
1944
1945    // The field len is now set, we can generate the bit codes
1946    gen_codes(&mut desc.dyn_tree, max_code, &bl_count);
1947}
1948
1949fn gen_bitlen<const N: usize>(
1950    state: &mut State,
1951    heap: &mut Heap,
1952    desc: &mut TreeDesc<N>,
1953) -> [u16; MAX_BITS + 1] {
1954    let tree = &mut desc.dyn_tree;
1955    let max_code = desc.max_code;
1956    let stree = desc.stat_desc.static_tree;
1957    let extra = desc.stat_desc.extra_bits;
1958    let base = desc.stat_desc.extra_base;
1959    let max_length = desc.stat_desc.max_length;
1960
1961    let mut bl_count = [0u16; MAX_BITS + 1];
1962
1963    // In a first pass, compute the optimal bit lengths (which may
1964    // overflow in the case of the bit length tree).
1965    *tree[heap.heap[heap.heap_max] as usize].len_mut() = 0; /* root of the heap */
1966
1967    // number of elements with bit length too large
1968    let mut overflow: i32 = 0;
1969
1970    for h in heap.heap_max + 1..HEAP_SIZE {
1971        let n = heap.heap[h] as usize;
1972        let mut bits = tree[tree[n].dad() as usize].len() + 1;
1973
1974        if bits > max_length {
1975            bits = max_length;
1976            overflow += 1;
1977        }
1978
1979        // We overwrite tree[n].Dad which is no longer needed
1980        *tree[n].len_mut() = bits;
1981
1982        // not a leaf node
1983        if n > max_code {
1984            continue;
1985        }
1986
1987        bl_count[bits as usize] += 1;
1988        let mut xbits = 0;
1989        if n >= base {
1990            xbits = extra[n - base] as usize;
1991        }
1992
1993        let f = tree[n].freq() as usize;
1994        state.opt_len += f * (bits as usize + xbits);
1995
1996        if !stree.is_empty() {
1997            state.static_len += f * (stree[n].len() as usize + xbits);
1998        }
1999    }
2000
2001    if overflow == 0 {
2002        return bl_count;
2003    }
2004
2005    /* Find the first bit length which could increase: */
2006    loop {
2007        let mut bits = max_length as usize - 1;
2008        while bl_count[bits] == 0 {
2009            bits -= 1;
2010        }
2011        bl_count[bits] -= 1; /* move one leaf down the tree */
2012        bl_count[bits + 1] += 2; /* move one overflow item as its brother */
2013        bl_count[max_length as usize] -= 1;
2014        /* The brother of the overflow item also moves one step up,
2015         * but this does not affect bl_count[max_length]
2016         */
2017        overflow -= 2;
2018
2019        if overflow <= 0 {
2020            break;
2021        }
2022    }
2023
2024    // Now recompute all bit lengths, scanning in increasing frequency.
2025    // h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
2026    // lengths instead of fixing only the wrong ones. This idea is taken
2027    // from 'ar' written by Haruhiko Okumura.)
2028    let mut h = HEAP_SIZE;
2029    for bits in (1..=max_length).rev() {
2030        let mut n = bl_count[bits as usize];
2031        while n != 0 {
2032            h -= 1;
2033            let m = heap.heap[h] as usize;
2034            if m > max_code {
2035                continue;
2036            }
2037
2038            if tree[m].len() != bits {
2039                // Tracev((stderr, "code %d bits %d->%u\n", m, tree[m].Len, bits));
2040                state.opt_len += (bits * tree[m].freq()) as usize;
2041                state.opt_len -= (tree[m].len() * tree[m].freq()) as usize;
2042                *tree[m].len_mut() = bits;
2043            }
2044
2045            n -= 1;
2046        }
2047    }
2048    bl_count
2049}
2050
2051/// Checks that symbol is a printing character (excluding space)
2052#[allow(unused)]
2053fn isgraph(c: u8) -> bool {
2054    (c > 0x20) && (c <= 0x7E)
2055}
2056
2057fn gen_codes(tree: &mut [Value], max_code: usize, bl_count: &[u16]) {
2058    /* tree: the tree to decorate */
2059    /* max_code: largest code with non zero frequency */
2060    /* bl_count: number of codes at each bit length */
2061    let mut next_code = [0; MAX_BITS + 1]; /* next code value for each bit length */
2062    let mut code = 0; /* running code value */
2063
2064    /* The distribution counts are first used to generate the code values
2065     * without bit reversal.
2066     */
2067    for bits in 1..=MAX_BITS {
2068        code = (code + bl_count[bits - 1]) << 1;
2069        next_code[bits] = code;
2070    }
2071
2072    /* Check that the bit counts in bl_count are consistent. The last code
2073     * must be all ones.
2074     */
2075    assert!(
2076        code + bl_count[MAX_BITS] - 1 == (1 << MAX_BITS) - 1,
2077        "inconsistent bit counts"
2078    );
2079
2080    trace!("\ngen_codes: max_code {max_code} ");
2081
2082    for n in 0..=max_code {
2083        let len = tree[n].len();
2084        if len == 0 {
2085            continue;
2086        }
2087
2088        /* Now reverse the bits */
2089        assert!((1..=15).contains(&len), "code length must be 1-15");
2090        *tree[n].code_mut() = next_code[len as usize].reverse_bits() >> (16 - len);
2091        next_code[len as usize] += 1;
2092
2093        if tree != self::trees_tbl::STATIC_LTREE.as_slice() {
2094            trace!(
2095                "\nn {:>3} {} l {:>2} c {:>4x} ({:x}) ",
2096                n,
2097                if isgraph(n as u8) {
2098                    char::from_u32(n as u32).unwrap()
2099                } else {
2100                    ' '
2101                },
2102                len,
2103                tree[n].code(),
2104                next_code[len as usize] - 1
2105            );
2106        }
2107    }
2108}
2109
2110/// repeat previous bit length 3-6 times (2 bits of repeat count)
2111const REP_3_6: usize = 16;
2112
2113/// repeat a zero length 3-10 times  (3 bits of repeat count)
2114const REPZ_3_10: usize = 17;
2115
2116/// repeat a zero length 11-138 times  (7 bits of repeat count)
2117const REPZ_11_138: usize = 18;
2118
2119fn scan_tree(bl_desc: &mut TreeDesc<{ 2 * BL_CODES + 1 }>, tree: &mut [Value], max_code: usize) {
2120    /* tree: the tree to be scanned */
2121    /* max_code: and its largest code of non zero frequency */
2122    let mut prevlen = -1isize; /* last emitted length */
2123    let mut curlen: isize; /* length of current code */
2124    let mut nextlen = tree[0].len(); /* length of next code */
2125    let mut count = 0; /* repeat count of the current code */
2126    let mut max_count = 7; /* max repeat count */
2127    let mut min_count = 4; /* min repeat count */
2128
2129    if nextlen == 0 {
2130        max_count = 138;
2131        min_count = 3;
2132    }
2133
2134    *tree[max_code + 1].len_mut() = 0xffff; /* guard */
2135
2136    let bl_tree = &mut bl_desc.dyn_tree;
2137
2138    for n in 0..=max_code {
2139        curlen = nextlen as isize;
2140        nextlen = tree[n + 1].len();
2141        count += 1;
2142        if count < max_count && curlen == nextlen as isize {
2143            continue;
2144        } else if count < min_count {
2145            *bl_tree[curlen as usize].freq_mut() += count;
2146        } else if curlen != 0 {
2147            if curlen != prevlen {
2148                *bl_tree[curlen as usize].freq_mut() += 1;
2149            }
2150            *bl_tree[REP_3_6].freq_mut() += 1;
2151        } else if count <= 10 {
2152            *bl_tree[REPZ_3_10].freq_mut() += 1;
2153        } else {
2154            *bl_tree[REPZ_11_138].freq_mut() += 1;
2155        }
2156
2157        count = 0;
2158        prevlen = curlen;
2159
2160        if nextlen == 0 {
2161            max_count = 138;
2162            min_count = 3;
2163        } else if curlen == nextlen as isize {
2164            max_count = 6;
2165            min_count = 3;
2166        } else {
2167            max_count = 7;
2168            min_count = 4;
2169        }
2170    }
2171}
2172
2173fn send_all_trees(state: &mut State, lcodes: usize, dcodes: usize, blcodes: usize) {
2174    assert!(
2175        lcodes >= 257 && dcodes >= 1 && blcodes >= 4,
2176        "not enough codes"
2177    );
2178    assert!(
2179        lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,
2180        "too many codes"
2181    );
2182
2183    trace!("\nbl counts: ");
2184    state.bit_writer.send_bits(lcodes as u64 - 257, 5); /* not +255 as stated in appnote.txt */
2185    state.bit_writer.send_bits(dcodes as u64 - 1, 5);
2186    state.bit_writer.send_bits(blcodes as u64 - 4, 4); /* not -3 as stated in appnote.txt */
2187
2188    for rank in 0..blcodes {
2189        trace!("\nbl code {:>2} ", StaticTreeDesc::BL_ORDER[rank]);
2190        state.bit_writer.send_bits(
2191            state.bl_desc.dyn_tree[StaticTreeDesc::BL_ORDER[rank] as usize].len() as u64,
2192            3,
2193        );
2194    }
2195    trace!("\nbl tree: sent {}", state.bit_writer.bits_sent);
2196
2197    // literal tree
2198    state
2199        .bit_writer
2200        .send_tree(&state.l_desc.dyn_tree, &state.bl_desc.dyn_tree, lcodes - 1);
2201    trace!("\nlit tree: sent {}", state.bit_writer.bits_sent);
2202
2203    // distance tree
2204    state
2205        .bit_writer
2206        .send_tree(&state.d_desc.dyn_tree, &state.bl_desc.dyn_tree, dcodes - 1);
2207    trace!("\ndist tree: sent {}", state.bit_writer.bits_sent);
2208}
2209
2210/// Construct the Huffman tree for the bit lengths and return the index in
2211/// bl_order of the last bit length code to send.
2212fn build_bl_tree(state: &mut State) -> usize {
2213    /* Determine the bit length frequencies for literal and distance trees */
2214
2215    scan_tree(
2216        &mut state.bl_desc,
2217        &mut state.l_desc.dyn_tree,
2218        state.l_desc.max_code,
2219    );
2220
2221    scan_tree(
2222        &mut state.bl_desc,
2223        &mut state.d_desc.dyn_tree,
2224        state.d_desc.max_code,
2225    );
2226
2227    /* Build the bit length tree: */
2228    {
2229        let mut tmp = TreeDesc::EMPTY;
2230        core::mem::swap(&mut tmp, &mut state.bl_desc);
2231        build_tree(state, &mut tmp);
2232        core::mem::swap(&mut tmp, &mut state.bl_desc);
2233    }
2234
2235    /* opt_len now includes the length of the tree representations, except
2236     * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
2237     */
2238
2239    /* Determine the number of bit length codes to send. The pkzip format
2240     * requires that at least 4 bit length codes be sent. (appnote.txt says
2241     * 3 but the actual value used is 4.)
2242     */
2243    let mut max_blindex = BL_CODES - 1;
2244    while max_blindex >= 3 {
2245        let index = StaticTreeDesc::BL_ORDER[max_blindex] as usize;
2246        if state.bl_desc.dyn_tree[index].len() != 0 {
2247            break;
2248        }
2249
2250        max_blindex -= 1;
2251    }
2252
2253    /* Update opt_len to include the bit length tree and counts */
2254    state.opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4;
2255    trace!(
2256        "\ndyn trees: dyn {}, stat {}",
2257        state.opt_len,
2258        state.static_len
2259    );
2260
2261    max_blindex
2262}
2263
2264fn zng_tr_flush_block(
2265    stream: &mut DeflateStream,
2266    window_offset: Option<usize>,
2267    stored_len: u32,
2268    last: bool,
2269) {
2270    /* window_offset: offset of the input block into the window */
2271    /* stored_len: length of input block */
2272    /* last: one if this is the last block for a file */
2273
2274    let mut opt_lenb;
2275    let static_lenb;
2276    let mut max_blindex = 0;
2277
2278    let state = &mut stream.state;
2279
2280    if state.sym_buf.is_empty() {
2281        opt_lenb = 0;
2282        static_lenb = 0;
2283        state.static_len = 7;
2284    } else if state.level > 0 {
2285        if stream.data_type == DataType::Unknown as i32 {
2286            stream.data_type = State::detect_data_type(&state.l_desc.dyn_tree) as i32;
2287        }
2288
2289        {
2290            let mut tmp = TreeDesc::EMPTY;
2291            core::mem::swap(&mut tmp, &mut state.l_desc);
2292
2293            build_tree(state, &mut tmp);
2294            core::mem::swap(&mut tmp, &mut state.l_desc);
2295
2296            trace!(
2297                "\nlit data: dyn {}, stat {}",
2298                state.opt_len,
2299                state.static_len
2300            );
2301        }
2302
2303        {
2304            let mut tmp = TreeDesc::EMPTY;
2305            core::mem::swap(&mut tmp, &mut state.d_desc);
2306            build_tree(state, &mut tmp);
2307            core::mem::swap(&mut tmp, &mut state.d_desc);
2308
2309            trace!(
2310                "\ndist data: dyn {}, stat {}",
2311                state.opt_len,
2312                state.static_len
2313            );
2314        }
2315
2316        // Build the bit length tree for the above two trees, and get the index
2317        // in bl_order of the last bit length code to send.
2318        max_blindex = build_bl_tree(state);
2319
2320        // Determine the best encoding. Compute the block lengths in bytes.
2321        opt_lenb = (state.opt_len + 3 + 7) >> 3;
2322        static_lenb = (state.static_len + 3 + 7) >> 3;
2323
2324        trace!(
2325            "\nopt {}({}) stat {}({}) stored {} lit {} ",
2326            opt_lenb,
2327            state.opt_len,
2328            static_lenb,
2329            state.static_len,
2330            stored_len,
2331            state.sym_buf.len() / 3
2332        );
2333
2334        if static_lenb <= opt_lenb || state.strategy == Strategy::Fixed {
2335            opt_lenb = static_lenb;
2336        }
2337    } else {
2338        assert!(window_offset.is_some(), "lost buf");
2339        /* force a stored block */
2340        opt_lenb = stored_len as usize + 5;
2341        static_lenb = stored_len as usize + 5;
2342    }
2343
2344    #[allow(clippy::unnecessary_unwrap)]
2345    if stored_len as usize + 4 <= opt_lenb && window_offset.is_some() {
2346        /* 4: two words for the lengths
2347         * The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
2348         * Otherwise we can't have processed more than WSIZE input bytes since
2349         * the last block flush, because compression would have been
2350         * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
2351         * transform a block into a stored block.
2352         */
2353        let window_offset = window_offset.unwrap();
2354        let range = window_offset..window_offset + stored_len as usize;
2355        zng_tr_stored_block(state, range, last);
2356    } else if static_lenb == opt_lenb {
2357        state.bit_writer.emit_tree(BlockType::StaticTrees, last);
2358        state.compress_block_static_trees();
2359    // cmpr_bits_add(s, s.static_len);
2360    } else {
2361        state.bit_writer.emit_tree(BlockType::DynamicTrees, last);
2362        send_all_trees(
2363            state,
2364            state.l_desc.max_code + 1,
2365            state.d_desc.max_code + 1,
2366            max_blindex + 1,
2367        );
2368
2369        state.compress_block_dynamic_trees();
2370    }
2371
2372    // TODO
2373    // This check is made mod 2^32, for files larger than 512 MB and unsigned long implemented on 32 bits.
2374    // assert_eq!(state.compressed_len, state.bits_sent, "bad compressed size");
2375
2376    state.init_block();
2377    if last {
2378        state.bit_writer.emit_align();
2379    }
2380
2381    // Tracev((stderr, "\ncomprlen {}(%lu) ", s->compressed_len>>3, s->compressed_len-7*last));
2382}
2383
2384pub(crate) fn flush_block_only(stream: &mut DeflateStream, is_last: bool) {
2385    zng_tr_flush_block(
2386        stream,
2387        (stream.state.block_start >= 0).then_some(stream.state.block_start as usize),
2388        (stream.state.strstart as isize - stream.state.block_start) as u32,
2389        is_last,
2390    );
2391
2392    stream.state.block_start = stream.state.strstart as isize;
2393    flush_pending(stream)
2394}
2395
2396fn flush_bytes(stream: &mut DeflateStream, mut bytes: &[u8]) -> ControlFlow<ReturnCode> {
2397    let mut state = &mut stream.state;
2398
2399    // we'll be using the pending buffer as temporary storage
2400    let mut beg = state.bit_writer.pending.pending().len(); /* start of bytes to update crc */
2401
2402    while state.bit_writer.pending.remaining() < bytes.len() {
2403        let copy = state.bit_writer.pending.remaining();
2404
2405        state.bit_writer.pending.extend(&bytes[..copy]);
2406
2407        stream.adler = crc32(
2408            stream.adler as u32,
2409            &state.bit_writer.pending.pending()[beg..],
2410        ) as z_checksum;
2411
2412        state.gzindex += copy;
2413        flush_pending(stream);
2414        state = &mut stream.state;
2415
2416        // could not flush all the pending output
2417        if !state.bit_writer.pending.pending().is_empty() {
2418            state.last_flush = -1;
2419            return ControlFlow::Break(ReturnCode::Ok);
2420        }
2421
2422        beg = 0;
2423        bytes = &bytes[copy..];
2424    }
2425
2426    state.bit_writer.pending.extend(bytes);
2427
2428    stream.adler = crc32(
2429        stream.adler as u32,
2430        &state.bit_writer.pending.pending()[beg..],
2431    ) as z_checksum;
2432    state.gzindex = 0;
2433
2434    ControlFlow::Continue(())
2435}
2436
2437pub fn deflate(stream: &mut DeflateStream, flush: DeflateFlush) -> ReturnCode {
2438    if stream.next_out.is_null()
2439        || (stream.avail_in != 0 && stream.next_in.is_null())
2440        || (stream.state.status == Status::Finish && flush != DeflateFlush::Finish)
2441    {
2442        let err = ReturnCode::StreamError;
2443        stream.msg = err.error_message();
2444        return err;
2445    }
2446
2447    if stream.avail_out == 0 {
2448        let err = ReturnCode::BufError;
2449        stream.msg = err.error_message();
2450        return err;
2451    }
2452
2453    let old_flush = stream.state.last_flush;
2454    stream.state.last_flush = flush as i8;
2455
2456    /* Flush as much pending output as possible */
2457    if !stream.state.bit_writer.pending.pending().is_empty() {
2458        flush_pending(stream);
2459        if stream.avail_out == 0 {
2460            /* Since avail_out is 0, deflate will be called again with
2461             * more output space, but possibly with both pending and
2462             * avail_in equal to zero. There won't be anything to do,
2463             * but this is not an error situation so make sure we
2464             * return OK instead of BUF_ERROR at next call of deflate:
2465             */
2466            stream.state.last_flush = -1;
2467            return ReturnCode::Ok;
2468        }
2469
2470        /* Make sure there is something to do and avoid duplicate consecutive
2471         * flushes. For repeated and useless calls with Z_FINISH, we keep
2472         * returning Z_STREAM_END instead of Z_BUF_ERROR.
2473         */
2474    } else if stream.avail_in == 0
2475        && rank_flush(flush as i8) <= rank_flush(old_flush)
2476        && flush != DeflateFlush::Finish
2477    {
2478        let err = ReturnCode::BufError;
2479        stream.msg = err.error_message();
2480        return err;
2481    }
2482
2483    /* User must not provide more input after the first FINISH: */
2484    if stream.state.status == Status::Finish && stream.avail_in != 0 {
2485        let err = ReturnCode::BufError;
2486        stream.msg = err.error_message();
2487        return err;
2488    }
2489
2490    /* Write the header */
2491    if stream.state.status == Status::Init && stream.state.wrap == 0 {
2492        stream.state.status = Status::Busy;
2493    }
2494
2495    if stream.state.status == Status::Init {
2496        let header = stream.state.header();
2497        stream
2498            .state
2499            .bit_writer
2500            .pending
2501            .extend(&header.to_be_bytes());
2502
2503        /* Save the adler32 of the preset dictionary: */
2504        if stream.state.strstart != 0 {
2505            let adler = stream.adler as u32;
2506            stream.state.bit_writer.pending.extend(&adler.to_be_bytes());
2507        }
2508
2509        stream.adler = ADLER32_INITIAL_VALUE as _;
2510        stream.state.status = Status::Busy;
2511
2512        // compression must start with an empty pending buffer
2513        flush_pending(stream);
2514
2515        if !stream.state.bit_writer.pending.pending().is_empty() {
2516            stream.state.last_flush = -1;
2517
2518            return ReturnCode::Ok;
2519        }
2520    }
2521
2522    if stream.state.status == Status::GZip {
2523        /* gzip header */
2524        stream.state.crc_fold = Crc32Fold::new();
2525
2526        stream.state.bit_writer.pending.extend(&[31, 139, 8]);
2527
2528        let extra_flags = if stream.state.level == 9 {
2529            2
2530        } else if stream.state.strategy >= Strategy::HuffmanOnly || stream.state.level < 2 {
2531            4
2532        } else {
2533            0
2534        };
2535
2536        match &stream.state.gzhead {
2537            None => {
2538                let bytes = [0, 0, 0, 0, 0, extra_flags, gz_header::OS_CODE];
2539                stream.state.bit_writer.pending.extend(&bytes);
2540                stream.state.status = Status::Busy;
2541
2542                /* Compression must start with an empty pending buffer */
2543                flush_pending(stream);
2544                if !stream.state.bit_writer.pending.pending().is_empty() {
2545                    stream.state.last_flush = -1;
2546                    return ReturnCode::Ok;
2547                }
2548            }
2549            Some(gzhead) => {
2550                stream.state.bit_writer.pending.extend(&[gzhead.flags()]);
2551                let bytes = (gzhead.time as u32).to_le_bytes();
2552                stream.state.bit_writer.pending.extend(&bytes);
2553                stream
2554                    .state
2555                    .bit_writer
2556                    .pending
2557                    .extend(&[extra_flags, gzhead.os as u8]);
2558
2559                if !gzhead.extra.is_null() {
2560                    let bytes = (gzhead.extra_len as u16).to_le_bytes();
2561                    stream.state.bit_writer.pending.extend(&bytes);
2562                }
2563
2564                if gzhead.hcrc != 0 {
2565                    stream.adler = crc32(
2566                        stream.adler as u32,
2567                        stream.state.bit_writer.pending.pending(),
2568                    ) as z_checksum
2569                }
2570
2571                stream.state.gzindex = 0;
2572                stream.state.status = Status::Extra;
2573            }
2574        }
2575    }
2576
2577    if stream.state.status == Status::Extra {
2578        if let Some(gzhead) = stream.state.gzhead.as_ref() {
2579            if !gzhead.extra.is_null() {
2580                let gzhead_extra = gzhead.extra;
2581
2582                let extra = unsafe {
2583                    core::slice::from_raw_parts(
2584                        // SAFETY: gzindex is always less than extra_len, and the user
2585                        // guarantees the pointer is valid for extra_len.
2586                        gzhead_extra.add(stream.state.gzindex),
2587                        (gzhead.extra_len & 0xffff) as usize - stream.state.gzindex,
2588                    )
2589                };
2590
2591                if let ControlFlow::Break(err) = flush_bytes(stream, extra) {
2592                    return err;
2593                }
2594            }
2595        }
2596        stream.state.status = Status::Name;
2597    }
2598
2599    if stream.state.status == Status::Name {
2600        if let Some(gzhead) = stream.state.gzhead.as_ref() {
2601            if !gzhead.name.is_null() {
2602                // SAFETY: user satisfies precondition that gzhead.name is a C string.
2603                let gzhead_name = unsafe { CStr::from_ptr(gzhead.name.cast()) };
2604                let bytes = gzhead_name.to_bytes_with_nul();
2605                if let ControlFlow::Break(err) = flush_bytes(stream, bytes) {
2606                    return err;
2607                }
2608            }
2609            stream.state.status = Status::Comment;
2610        }
2611    }
2612
2613    if stream.state.status == Status::Comment {
2614        if let Some(gzhead) = stream.state.gzhead.as_ref() {
2615            if !gzhead.comment.is_null() {
2616                // SAFETY: user satisfies precondition that gzhead.name is a C string.
2617                let gzhead_comment = unsafe { CStr::from_ptr(gzhead.comment.cast()) };
2618                let bytes = gzhead_comment.to_bytes_with_nul();
2619                if let ControlFlow::Break(err) = flush_bytes(stream, bytes) {
2620                    return err;
2621                }
2622            }
2623            stream.state.status = Status::Hcrc;
2624        }
2625    }
2626
2627    if stream.state.status == Status::Hcrc {
2628        if let Some(gzhead) = stream.state.gzhead.as_ref() {
2629            if gzhead.hcrc != 0 {
2630                let bytes = (stream.adler as u16).to_le_bytes();
2631                if let ControlFlow::Break(err) = flush_bytes(stream, &bytes) {
2632                    return err;
2633                }
2634            }
2635        }
2636
2637        stream.state.status = Status::Busy;
2638
2639        // compression must start with an empty pending buffer
2640        flush_pending(stream);
2641        if !stream.state.bit_writer.pending.pending().is_empty() {
2642            stream.state.last_flush = -1;
2643            return ReturnCode::Ok;
2644        }
2645    }
2646
2647    // Start a new block or continue the current one.
2648    let state = &mut stream.state;
2649    if stream.avail_in != 0
2650        || state.lookahead != 0
2651        || (flush != DeflateFlush::NoFlush && state.status != Status::Finish)
2652    {
2653        let bstate = self::algorithm::run(stream, flush);
2654
2655        let state = &mut stream.state;
2656
2657        if matches!(bstate, BlockState::FinishStarted | BlockState::FinishDone) {
2658            state.status = Status::Finish;
2659        }
2660
2661        match bstate {
2662            BlockState::NeedMore | BlockState::FinishStarted => {
2663                if stream.avail_out == 0 {
2664                    state.last_flush = -1; /* avoid BUF_ERROR next call, see above */
2665                }
2666                return ReturnCode::Ok;
2667                /* If flush != Z_NO_FLUSH && avail_out == 0, the next call
2668                 * of deflate should use the same flush parameter to make sure
2669                 * that the flush is complete. So we don't have to output an
2670                 * empty block here, this will be done at next call. This also
2671                 * ensures that for a very small output buffer, we emit at most
2672                 * one empty block.
2673                 */
2674            }
2675            BlockState::BlockDone => {
2676                match flush {
2677                    DeflateFlush::NoFlush => unreachable!("condition of inner surrounding if"),
2678                    DeflateFlush::PartialFlush => {
2679                        state.bit_writer.align();
2680                    }
2681                    DeflateFlush::SyncFlush => {
2682                        // add an empty stored block that is marked as not final. This is useful for
2683                        // parallel deflate where we want to make sure the intermediate blocks are not
2684                        // marked as "last block".
2685                        zng_tr_stored_block(state, 0..0, false);
2686                    }
2687                    DeflateFlush::FullFlush => {
2688                        // add an empty stored block that is marked as not final. This is useful for
2689                        // parallel deflate where we want to make sure the intermediate blocks are not
2690                        // marked as "last block".
2691                        zng_tr_stored_block(state, 0..0, false);
2692
2693                        state.head.as_mut_slice().fill(0); // forget history
2694
2695                        if state.lookahead == 0 {
2696                            state.strstart = 0;
2697                            state.block_start = 0;
2698                            state.insert = 0;
2699                        }
2700                    }
2701                    DeflateFlush::Block => { /* fall through */ }
2702                    DeflateFlush::Finish => unreachable!("condition of outer surrounding if"),
2703                }
2704
2705                flush_pending(stream);
2706
2707                if stream.avail_out == 0 {
2708                    stream.state.last_flush = -1; /* avoid BUF_ERROR at next call, see above */
2709                    return ReturnCode::Ok;
2710                }
2711            }
2712            BlockState::FinishDone => { /* do nothing */ }
2713        }
2714    }
2715
2716    if flush != DeflateFlush::Finish {
2717        return ReturnCode::Ok;
2718    }
2719
2720    // write the trailer
2721    if stream.state.wrap == 2 {
2722        let crc_fold = core::mem::take(&mut stream.state.crc_fold);
2723        stream.adler = crc_fold.finish() as z_checksum;
2724
2725        let adler = stream.adler as u32;
2726        stream.state.bit_writer.pending.extend(&adler.to_le_bytes());
2727
2728        let total_in = stream.total_in as u32;
2729        stream
2730            .state
2731            .bit_writer
2732            .pending
2733            .extend(&total_in.to_le_bytes());
2734    } else if stream.state.wrap == 1 {
2735        let adler = stream.adler as u32;
2736        stream.state.bit_writer.pending.extend(&adler.to_be_bytes());
2737    }
2738
2739    flush_pending(stream);
2740
2741    // If avail_out is zero, the application will call deflate again to flush the rest.
2742    if stream.state.wrap > 0 {
2743        stream.state.wrap = -stream.state.wrap; /* write the trailer only once! */
2744    }
2745
2746    if stream.state.bit_writer.pending.pending().is_empty() {
2747        assert_eq!(stream.state.bit_writer.bits_used, 0, "bi_buf not flushed");
2748        return ReturnCode::StreamEnd;
2749    }
2750    ReturnCode::Ok
2751}
2752
2753pub(crate) fn flush_pending(stream: &mut DeflateStream) {
2754    let state = &mut stream.state;
2755
2756    state.bit_writer.flush_bits();
2757
2758    let pending = state.bit_writer.pending.pending();
2759    let len = Ord::min(pending.len(), stream.avail_out as usize);
2760
2761    if len == 0 {
2762        return;
2763    }
2764
2765    trace!("\n[FLUSH {len} bytes]");
2766    // SAFETY: len is min(pending, stream.avail_out), so we won't overrun next_out.
2767    unsafe { core::ptr::copy_nonoverlapping(pending.as_ptr(), stream.next_out, len) };
2768
2769    stream.next_out = stream.next_out.wrapping_add(len);
2770    stream.total_out += len as crate::c_api::z_size;
2771    stream.avail_out -= len as crate::c_api::uInt;
2772
2773    state.bit_writer.pending.advance(len);
2774}
2775
2776pub fn compress_slice<'a>(
2777    output: &'a mut [u8],
2778    input: &[u8],
2779    config: DeflateConfig,
2780) -> (&'a mut [u8], ReturnCode) {
2781    // SAFETY: a [u8] is a valid [MaybeUninit<u8>].
2782    let output_uninit = unsafe {
2783        core::slice::from_raw_parts_mut(output.as_mut_ptr() as *mut MaybeUninit<u8>, output.len())
2784    };
2785
2786    compress(output_uninit, input, config)
2787}
2788
2789pub fn compress<'a>(
2790    output: &'a mut [MaybeUninit<u8>],
2791    input: &[u8],
2792    config: DeflateConfig,
2793) -> (&'a mut [u8], ReturnCode) {
2794    compress_with_flush(output, input, config, DeflateFlush::Finish)
2795}
2796
2797pub fn compress_slice_with_flush<'a>(
2798    output: &'a mut [u8],
2799    input: &[u8],
2800    config: DeflateConfig,
2801    flush: DeflateFlush,
2802) -> (&'a mut [u8], ReturnCode) {
2803    // SAFETY: a [u8] is a valid [MaybeUninit<u8>], and `compress_with_flush` never uninitializes previously initialized memory.
2804    let output_uninit = unsafe {
2805        core::slice::from_raw_parts_mut(output.as_mut_ptr() as *mut MaybeUninit<u8>, output.len())
2806    };
2807
2808    compress_with_flush(output_uninit, input, config, flush)
2809}
2810
2811pub fn compress_with_flush<'a>(
2812    output: &'a mut [MaybeUninit<u8>],
2813    input: &[u8],
2814    config: DeflateConfig,
2815    final_flush: DeflateFlush,
2816) -> (&'a mut [u8], ReturnCode) {
2817    let mut stream = z_stream {
2818        next_in: input.as_ptr() as *mut u8,
2819        avail_in: 0, // for special logic in the first  iteration
2820        total_in: 0,
2821        next_out: output.as_mut_ptr() as *mut u8,
2822        avail_out: 0, // for special logic on the first iteration
2823        total_out: 0,
2824        msg: core::ptr::null_mut(),
2825        state: core::ptr::null_mut(),
2826        zalloc: None,
2827        zfree: None,
2828        opaque: core::ptr::null_mut(),
2829        data_type: 0,
2830        adler: 0,
2831        reserved: 0,
2832    };
2833
2834    let err = init(&mut stream, config);
2835    if err != ReturnCode::Ok {
2836        return (&mut [], err);
2837    }
2838
2839    let max = core::ffi::c_uint::MAX as usize;
2840
2841    let mut left = output.len();
2842    let mut source_len = input.len();
2843
2844    loop {
2845        if stream.avail_out == 0 {
2846            stream.avail_out = Ord::min(left, max) as _;
2847            left -= stream.avail_out as usize;
2848        }
2849
2850        if stream.avail_in == 0 {
2851            stream.avail_in = Ord::min(source_len, max) as _;
2852            source_len -= stream.avail_in as usize;
2853        }
2854
2855        let flush = if source_len > 0 {
2856            DeflateFlush::NoFlush
2857        } else {
2858            final_flush
2859        };
2860
2861        let err = if let Some(stream) = unsafe { DeflateStream::from_stream_mut(&mut stream) } {
2862            deflate(stream, flush)
2863        } else {
2864            ReturnCode::StreamError
2865        };
2866
2867        if err != ReturnCode::Ok {
2868            break;
2869        }
2870    }
2871
2872    // SAFETY: we have now initialized these bytes
2873    let output_slice = unsafe {
2874        core::slice::from_raw_parts_mut(output.as_mut_ptr() as *mut u8, stream.total_out as usize)
2875    };
2876
2877    // may DataError if insufficient output space
2878    let return_code = if let Some(stream) = unsafe { DeflateStream::from_stream_mut(&mut stream) } {
2879        match end(stream) {
2880            Ok(_) => ReturnCode::Ok,
2881            Err(_) => ReturnCode::DataError,
2882        }
2883    } else {
2884        ReturnCode::Ok
2885    };
2886
2887    (output_slice, return_code)
2888}
2889
2890pub const fn compress_bound(source_len: usize) -> usize {
2891    compress_bound_help(source_len, ZLIB_WRAPLEN)
2892}
2893
2894const fn compress_bound_help(source_len: usize, wrap_len: usize) -> usize {
2895    source_len // The source size itself */
2896        // Always at least one byte for any input
2897        .wrapping_add(if source_len == 0 { 1 } else { 0 })
2898        // One extra byte for lengths less than 9
2899        .wrapping_add(if source_len < 9 { 1 } else { 0 })
2900        // Source encoding overhead, padded to next full byte
2901        .wrapping_add(deflate_quick_overhead(source_len))
2902        // Deflate block overhead bytes
2903        .wrapping_add(DEFLATE_BLOCK_OVERHEAD)
2904        // none, zlib or gzip wrapper
2905        .wrapping_add(wrap_len)
2906}
2907
2908///  heap used to build the Huffman trees
2909///
2910/// The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used.
2911/// The same heap array is used to build all trees.
2912#[derive(Clone)]
2913struct Heap {
2914    heap: [u32; 2 * L_CODES + 1],
2915
2916    /// number of elements in the heap
2917    heap_len: usize,
2918
2919    /// element of the largest frequency
2920    heap_max: usize,
2921
2922    depth: [u8; 2 * L_CODES + 1],
2923}
2924
2925impl Heap {
2926    // an empty heap
2927    fn new() -> Self {
2928        Self {
2929            heap: [0; 2 * L_CODES + 1],
2930            heap_len: 0,
2931            heap_max: 0,
2932            depth: [0; 2 * L_CODES + 1],
2933        }
2934    }
2935
2936    /// Construct the initial heap, with least frequent element in
2937    /// heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used.
2938    fn initialize(&mut self, tree: &mut [Value]) -> isize {
2939        let mut max_code = -1;
2940
2941        self.heap_len = 0;
2942        self.heap_max = HEAP_SIZE;
2943
2944        for (n, node) in tree.iter_mut().enumerate() {
2945            if node.freq() > 0 {
2946                self.heap_len += 1;
2947                self.heap[self.heap_len] = n as u32;
2948                max_code = n as isize;
2949                self.depth[n] = 0;
2950            } else {
2951                *node.len_mut() = 0;
2952            }
2953        }
2954
2955        max_code
2956    }
2957
2958    /// Index within the heap array of least frequent node in the Huffman tree
2959    const SMALLEST: usize = 1;
2960
2961    fn pqdownheap(&mut self, tree: &[Value], mut k: usize) {
2962        /* tree: the tree to restore */
2963        /* k: node to move down */
2964
2965        // Given the index $i of a node in the tree, pack the node's frequency and depth
2966        // into a single integer. The heap ordering logic uses a primary sort on frequency
2967        // and a secondary sort on depth, so packing both into one integer makes it
2968        // possible to sort with fewer comparison operations.
2969        macro_rules! freq_and_depth {
2970            ($i:expr) => {
2971                (tree[$i as usize].freq() as u32) << 8 | self.depth[$i as usize] as u32
2972            };
2973        }
2974
2975        let v = self.heap[k];
2976        let v_val = freq_and_depth!(v);
2977        let mut j = k << 1; /* left son of k */
2978
2979        while j <= self.heap_len {
2980            /* Set j to the smallest of the two sons: */
2981            let mut j_val = freq_and_depth!(self.heap[j]);
2982            if j < self.heap_len {
2983                let j1_val = freq_and_depth!(self.heap[j + 1]);
2984                if j1_val <= j_val {
2985                    j += 1;
2986                    j_val = j1_val;
2987                }
2988            }
2989
2990            /* Exit if v is smaller than both sons */
2991            if v_val <= j_val {
2992                break;
2993            }
2994
2995            /* Exchange v with the smallest son */
2996            self.heap[k] = self.heap[j];
2997            k = j;
2998
2999            /* And continue down the tree, setting j to the left son of k */
3000            j <<= 1;
3001        }
3002
3003        self.heap[k] = v;
3004    }
3005
3006    /// Remove the smallest element from the heap and recreate the heap with
3007    /// one less element. Updates heap and heap_len.
3008    fn pqremove(&mut self, tree: &[Value]) -> u32 {
3009        let top = self.heap[Self::SMALLEST];
3010        self.heap[Self::SMALLEST] = self.heap[self.heap_len];
3011        self.heap_len -= 1;
3012
3013        self.pqdownheap(tree, Self::SMALLEST);
3014
3015        top
3016    }
3017
3018    /// Construct the Huffman tree by repeatedly combining the least two frequent nodes.
3019    fn construct_huffman_tree(&mut self, tree: &mut [Value], mut node: usize) {
3020        loop {
3021            let n = self.pqremove(tree) as usize; /* n = node of least frequency */
3022            let m = self.heap[Heap::SMALLEST] as usize; /* m = node of next least frequency */
3023
3024            self.heap_max -= 1;
3025            self.heap[self.heap_max] = n as u32; /* keep the nodes sorted by frequency */
3026            self.heap_max -= 1;
3027            self.heap[self.heap_max] = m as u32;
3028
3029            /* Create a new node father of n and m */
3030            *tree[node].freq_mut() = tree[n].freq() + tree[m].freq();
3031            self.depth[node] = Ord::max(self.depth[n], self.depth[m]) + 1;
3032
3033            *tree[n].dad_mut() = node as u16;
3034            *tree[m].dad_mut() = node as u16;
3035
3036            /* and insert the new node in the heap */
3037            self.heap[Heap::SMALLEST] = node as u32;
3038            node += 1;
3039
3040            self.pqdownheap(tree, Heap::SMALLEST);
3041
3042            if self.heap_len < 2 {
3043                break;
3044            }
3045        }
3046
3047        self.heap_max -= 1;
3048        self.heap[self.heap_max] = self.heap[Heap::SMALLEST];
3049    }
3050}
3051
3052/// # Safety
3053///
3054/// The caller must guarantee:
3055///
3056/// * If `head` is `Some`
3057///     - `head.extra` is `NULL` or is readable for at least `head.extra_len` bytes
3058///     - `head.name` is `NULL` or satisfies the requirements of [`core::ffi::CStr::from_ptr`]
3059///     - `head.comment` is `NULL` or satisfies the requirements of [`core::ffi::CStr::from_ptr`]
3060pub unsafe fn set_header<'a>(
3061    stream: &mut DeflateStream<'a>,
3062    head: Option<&'a mut gz_header>,
3063) -> ReturnCode {
3064    if stream.state.wrap != 2 {
3065        ReturnCode::StreamError as _
3066    } else {
3067        stream.state.gzhead = head;
3068        ReturnCode::Ok as _
3069    }
3070}
3071
3072// zlib format overhead
3073const ZLIB_WRAPLEN: usize = 6;
3074// gzip format overhead
3075const GZIP_WRAPLEN: usize = 18;
3076
3077const DEFLATE_HEADER_BITS: usize = 3;
3078const DEFLATE_EOBS_BITS: usize = 15;
3079const DEFLATE_PAD_BITS: usize = 6;
3080const DEFLATE_BLOCK_OVERHEAD: usize =
3081    (DEFLATE_HEADER_BITS + DEFLATE_EOBS_BITS + DEFLATE_PAD_BITS) >> 3;
3082
3083const DEFLATE_QUICK_LIT_MAX_BITS: usize = 9;
3084const fn deflate_quick_overhead(x: usize) -> usize {
3085    let sum = x
3086        .wrapping_mul(DEFLATE_QUICK_LIT_MAX_BITS - 8)
3087        .wrapping_add(7);
3088
3089    // imitate zlib-ng rounding behavior (on windows, c_ulong is 32 bits)
3090    (sum as core::ffi::c_ulong >> 3) as usize
3091}
3092
3093/// For the default windowBits of 15 and memLevel of 8, this function returns
3094/// a close to exact, as well as small, upper bound on the compressed size.
3095/// They are coded as constants here for a reason--if the #define's are
3096/// changed, then this function needs to be changed as well.  The return
3097/// value for 15 and 8 only works for those exact settings.
3098///
3099/// For any setting other than those defaults for windowBits and memLevel,
3100/// the value returned is a conservative worst case for the maximum expansion
3101/// resulting from using fixed blocks instead of stored blocks, which deflate
3102/// can emit on compressed data for some combinations of the parameters.
3103///
3104/// This function could be more sophisticated to provide closer upper bounds for
3105/// every combination of windowBits and memLevel.  But even the conservative
3106/// upper bound of about 14% expansion does not seem onerous for output buffer
3107/// allocation.
3108pub fn bound(stream: Option<&mut DeflateStream>, source_len: usize) -> usize {
3109    // on windows, c_ulong is only a 32-bit integer
3110    let mask = core::ffi::c_ulong::MAX as usize;
3111
3112    // conservative upper bound for compressed data
3113    let comp_len = source_len
3114        .wrapping_add((source_len.wrapping_add(7) & mask) >> 3)
3115        .wrapping_add((source_len.wrapping_add(63) & mask) >> 6)
3116        .wrapping_add(5);
3117
3118    let Some(stream) = stream else {
3119        // return conservative bound plus zlib wrapper
3120        return comp_len.wrapping_add(6);
3121    };
3122
3123    /* compute wrapper length */
3124    let wrap_len = match stream.state.wrap {
3125        0 => {
3126            // raw deflate
3127            0
3128        }
3129        1 => {
3130            // zlib wrapper
3131            if stream.state.strstart != 0 {
3132                ZLIB_WRAPLEN + 4
3133            } else {
3134                ZLIB_WRAPLEN
3135            }
3136        }
3137        2 => {
3138            // gzip wrapper
3139            let mut gz_wrap_len = GZIP_WRAPLEN;
3140
3141            if let Some(header) = &stream.state.gzhead {
3142                if !header.extra.is_null() {
3143                    gz_wrap_len += 2 + header.extra_len as usize;
3144                }
3145
3146                let mut c_string = header.name;
3147                if !c_string.is_null() {
3148                    loop {
3149                        gz_wrap_len += 1;
3150                        // SAFETY: user guarantees header.name is a valid C string.
3151                        unsafe {
3152                            if *c_string == 0 {
3153                                break;
3154                            }
3155                            c_string = c_string.add(1);
3156                        }
3157                    }
3158                }
3159
3160                let mut c_string = header.comment;
3161                if !c_string.is_null() {
3162                    loop {
3163                        gz_wrap_len += 1;
3164                        // SAFETY: user guarantees header.comment is a valid C string.
3165                        unsafe {
3166                            if *c_string == 0 {
3167                                break;
3168                            }
3169                            c_string = c_string.add(1);
3170                        }
3171                    }
3172                }
3173
3174                if header.hcrc != 0 {
3175                    gz_wrap_len += 2;
3176                }
3177            }
3178
3179            gz_wrap_len
3180        }
3181        _ => {
3182            // default
3183            ZLIB_WRAPLEN
3184        }
3185    };
3186
3187    if stream.state.w_bits() != MAX_WBITS as u32 || HASH_BITS < 15 {
3188        if stream.state.level == 0 {
3189            /* upper bound for stored blocks with length 127 (memLevel == 1) ~4% overhead plus a small constant */
3190            source_len
3191                .wrapping_add(source_len >> 5)
3192                .wrapping_add(source_len >> 7)
3193                .wrapping_add(source_len >> 11)
3194                .wrapping_add(7)
3195                .wrapping_add(wrap_len)
3196        } else {
3197            comp_len.wrapping_add(wrap_len)
3198        }
3199    } else {
3200        compress_bound_help(source_len, wrap_len)
3201    }
3202}
3203
3204/// # Safety
3205///
3206/// The `dictionary` must have enough space for the dictionary.
3207pub unsafe fn get_dictionary(stream: &DeflateStream<'_>, dictionary: *mut u8) -> usize {
3208    let s = &stream.state;
3209    let len = Ord::min(s.strstart + s.lookahead, s.w_size);
3210
3211    if !dictionary.is_null() && len > 0 {
3212        unsafe {
3213            core::ptr::copy_nonoverlapping(
3214                s.window.as_ptr().add(s.strstart + s.lookahead - len),
3215                dictionary,
3216                len,
3217            );
3218        }
3219    }
3220
3221    len
3222}
3223
3224struct DeflateAllocOffsets {
3225    total_size: usize,
3226    state_pos: usize,
3227    window_pos: usize,
3228    pending_pos: usize,
3229    sym_buf_pos: usize,
3230    prev_pos: usize,
3231    head_pos: usize,
3232}
3233
3234impl DeflateAllocOffsets {
3235    fn new(window_bits: usize, lit_bufsize: usize) -> Self {
3236        use core::mem::size_of;
3237
3238        const WINDOW_PAD_SIZE: usize = 64;
3239        const LIT_BUFS: usize = 4;
3240
3241        let mut curr_size = 0usize;
3242
3243        /* Define sizes */
3244        let window_size = (1 << window_bits) * 2;
3245        let prev_size = (1 << window_bits) * size_of::<Pos>();
3246        let head_size = HASH_SIZE * size_of::<Pos>();
3247        let pending_size = lit_bufsize * LIT_BUFS;
3248        let sym_buf_size = lit_bufsize * (LIT_BUFS - 1);
3249        let state_size = size_of::<State>();
3250        // let alloc_size = size_of::<DeflateAlloc>();
3251
3252        /* Calculate relative buffer positions and paddings */
3253        let window_pos = curr_size.next_multiple_of(WINDOW_PAD_SIZE);
3254        curr_size = window_pos + window_size;
3255
3256        let prev_pos = curr_size.next_multiple_of(64);
3257        curr_size = prev_pos + prev_size;
3258
3259        let head_pos = curr_size.next_multiple_of(64);
3260        curr_size = head_pos + head_size;
3261
3262        let pending_pos = curr_size.next_multiple_of(64);
3263        curr_size = pending_pos + pending_size;
3264
3265        let sym_buf_pos = curr_size.next_multiple_of(64);
3266        curr_size = sym_buf_pos + sym_buf_size;
3267
3268        let state_pos = curr_size.next_multiple_of(64);
3269        curr_size = state_pos + state_size;
3270
3271        /* Add 64-1 or 4096-1 to allow window alignment, and round size of buffer up to multiple of 64 */
3272        let total_size = (curr_size + (WINDOW_PAD_SIZE - 1)).next_multiple_of(64);
3273
3274        Self {
3275            total_size,
3276            state_pos,
3277            window_pos,
3278            pending_pos,
3279            sym_buf_pos,
3280            prev_pos,
3281            head_pos,
3282        }
3283    }
3284}
3285
3286#[cfg(test)]
3287mod test {
3288    use crate::{
3289        inflate::{uncompress_slice, InflateConfig, InflateStream},
3290        InflateFlush,
3291    };
3292
3293    use super::*;
3294
3295    use core::{ffi::CStr, sync::atomic::AtomicUsize};
3296
3297    #[test]
3298    fn detect_data_type_basic() {
3299        let empty = || [Value::new(0, 0); LITERALS];
3300
3301        assert_eq!(State::detect_data_type(&empty()), DataType::Binary);
3302
3303        let mut binary = empty();
3304        binary[0] = Value::new(1, 0);
3305        assert_eq!(State::detect_data_type(&binary), DataType::Binary);
3306
3307        let mut text = empty();
3308        text[b'\r' as usize] = Value::new(1, 0);
3309        assert_eq!(State::detect_data_type(&text), DataType::Text);
3310
3311        let mut text = empty();
3312        text[b'a' as usize] = Value::new(1, 0);
3313        assert_eq!(State::detect_data_type(&text), DataType::Text);
3314
3315        let mut non_text = empty();
3316        non_text[7] = Value::new(1, 0);
3317        assert_eq!(State::detect_data_type(&non_text), DataType::Binary);
3318    }
3319
3320    #[test]
3321    fn from_stream_mut() {
3322        unsafe {
3323            assert!(DeflateStream::from_stream_mut(core::ptr::null_mut()).is_none());
3324
3325            let mut stream = z_stream::default();
3326            assert!(DeflateStream::from_stream_mut(&mut stream).is_none());
3327
3328            // state is still NULL
3329            assert!(DeflateStream::from_stream_mut(&mut stream).is_none());
3330
3331            init(&mut stream, DeflateConfig::default());
3332            let stream = DeflateStream::from_stream_mut(&mut stream);
3333            assert!(stream.is_some());
3334
3335            assert!(end(stream.unwrap()).is_ok());
3336        }
3337    }
3338
3339    #[cfg(feature = "c-allocator")]
3340    unsafe extern "C" fn fail_nth_allocation<const N: usize>(
3341        opaque: crate::c_api::voidpf,
3342        items: crate::c_api::uInt,
3343        size: crate::c_api::uInt,
3344    ) -> crate::c_api::voidpf {
3345        let count = unsafe { &*(opaque as *const AtomicUsize) };
3346
3347        if count.fetch_add(1, core::sync::atomic::Ordering::Relaxed) != N {
3348            // must use the C allocator internally because (de)allocation is based on function
3349            // pointer values and because we don't use the rust allocator directly, the allocation
3350            // logic will store the pointer to the start at the start of the allocation.
3351            unsafe { (crate::allocate::C.zalloc)(opaque, items, size) }
3352        } else {
3353            core::ptr::null_mut()
3354        }
3355    }
3356
3357    #[test]
3358    #[cfg(feature = "c-allocator")]
3359    fn init_invalid_allocator() {
3360        {
3361            let atomic = AtomicUsize::new(0);
3362            let mut stream = z_stream {
3363                zalloc: Some(fail_nth_allocation::<0>),
3364                zfree: Some(crate::allocate::C.zfree),
3365                opaque: &atomic as *const _ as *const core::ffi::c_void as *mut _,
3366                ..z_stream::default()
3367            };
3368            assert_eq!(
3369                init(&mut stream, DeflateConfig::default()),
3370                ReturnCode::MemError
3371            );
3372        }
3373    }
3374
3375    #[test]
3376    #[cfg(feature = "c-allocator")]
3377    fn copy_invalid_allocator() {
3378        let mut stream = z_stream::default();
3379
3380        let atomic = AtomicUsize::new(0);
3381        stream.opaque = &atomic as *const _ as *const core::ffi::c_void as *mut _;
3382        stream.zalloc = Some(fail_nth_allocation::<1>);
3383        stream.zfree = Some(crate::allocate::C.zfree);
3384
3385        // init performs 6 allocations; we don't want those to fail
3386        assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3387
3388        let Some(stream) = (unsafe { DeflateStream::from_stream_mut(&mut stream) }) else {
3389            unreachable!()
3390        };
3391
3392        let mut stream_copy = MaybeUninit::<DeflateStream>::zeroed();
3393
3394        assert_eq!(copy(&mut stream_copy, stream), ReturnCode::MemError);
3395
3396        assert!(end(stream).is_ok());
3397    }
3398
3399    mod invalid_deflate_config {
3400        use super::*;
3401
3402        #[test]
3403        fn sanity_check() {
3404            let mut stream = z_stream::default();
3405            assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3406
3407            assert!(stream.zalloc.is_some());
3408            assert!(stream.zfree.is_some());
3409
3410            // this should be the default level
3411            let stream = unsafe { DeflateStream::from_stream_mut(&mut stream) }.unwrap();
3412            assert_eq!(stream.state.level, 6);
3413
3414            assert!(end(stream).is_ok());
3415        }
3416
3417        #[test]
3418        fn window_bits_correction() {
3419            // window_bits of 8 gets turned into 9 internally
3420            let mut stream = z_stream::default();
3421            let config = DeflateConfig {
3422                window_bits: 8,
3423                ..Default::default()
3424            };
3425            assert_eq!(init(&mut stream, config), ReturnCode::Ok);
3426            let stream = unsafe { DeflateStream::from_stream_mut(&mut stream) }.unwrap();
3427            assert_eq!(stream.state.w_bits(), 9);
3428
3429            assert!(end(stream).is_ok());
3430        }
3431
3432        #[test]
3433        fn window_bits_too_low() {
3434            let mut stream = z_stream::default();
3435            let config = DeflateConfig {
3436                window_bits: -16,
3437                ..Default::default()
3438            };
3439            assert_eq!(init(&mut stream, config), ReturnCode::StreamError);
3440        }
3441
3442        #[test]
3443        fn window_bits_too_high() {
3444            // window bits too high
3445            let mut stream = z_stream::default();
3446            let config = DeflateConfig {
3447                window_bits: 42,
3448                ..Default::default()
3449            };
3450            assert_eq!(init(&mut stream, config), ReturnCode::StreamError);
3451        }
3452    }
3453
3454    #[test]
3455    fn end_data_error() {
3456        let mut stream = z_stream::default();
3457        assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3458        let stream = unsafe { DeflateStream::from_stream_mut(&mut stream) }.unwrap();
3459
3460        // next deflate into too little space
3461        let input = b"Hello World\n";
3462        stream.next_in = input.as_ptr() as *mut u8;
3463        stream.avail_in = input.len() as _;
3464        let output = &mut [0, 0, 0];
3465        stream.next_out = output.as_mut_ptr();
3466        stream.avail_out = output.len() as _;
3467
3468        // the deflate is fine
3469        assert_eq!(deflate(stream, DeflateFlush::NoFlush), ReturnCode::Ok);
3470
3471        // but end is not
3472        assert!(end(stream).is_err());
3473    }
3474
3475    #[test]
3476    fn test_reset_keep() {
3477        let mut stream = z_stream::default();
3478        assert_eq!(init(&mut stream, DeflateConfig::default()), ReturnCode::Ok);
3479        let stream = unsafe { DeflateStream::from_stream_mut(&mut stream) }.unwrap();
3480
3481        // next deflate into too little space
3482        let input = b"Hello World\n";
3483        stream.next_in = input.as_ptr() as *mut u8;
3484        stream.avail_in = input.len() as _;
3485
3486        let output = &mut [0; 1024];
3487        stream.next_out = output.as_mut_ptr();
3488        stream.avail_out = output.len() as _;
3489        assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::StreamEnd);
3490
3491        assert_eq!(reset_keep(stream), ReturnCode::Ok);
3492
3493        let output = &mut [0; 1024];
3494        stream.next_out = output.as_mut_ptr();
3495        stream.avail_out = output.len() as _;
3496        assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::StreamEnd);
3497
3498        assert!(end(stream).is_ok());
3499    }
3500
3501    #[test]
3502    fn hello_world_huffman_only() {
3503        const EXPECTED: &[u8] = &[
3504            0x78, 0x01, 0xf3, 0x48, 0xcd, 0xc9, 0xc9, 0x57, 0x08, 0xcf, 0x2f, 0xca, 0x49, 0x51,
3505            0xe4, 0x02, 0x00, 0x20, 0x91, 0x04, 0x48,
3506        ];
3507
3508        let input = "Hello World!\n";
3509
3510        let mut output = vec![0; 128];
3511
3512        let config = DeflateConfig {
3513            level: 6,
3514            method: Method::Deflated,
3515            window_bits: crate::MAX_WBITS,
3516            mem_level: DEF_MEM_LEVEL,
3517            strategy: Strategy::HuffmanOnly,
3518        };
3519
3520        let (output, err) = compress_slice(&mut output, input.as_bytes(), config);
3521
3522        assert_eq!(err, ReturnCode::Ok);
3523
3524        assert_eq!(output.len(), EXPECTED.len());
3525
3526        assert_eq!(EXPECTED, output);
3527    }
3528
3529    #[test]
3530    fn hello_world_quick() {
3531        const EXPECTED: &[u8] = &[
3532            0x78, 0x01, 0xf3, 0x48, 0xcd, 0xc9, 0xc9, 0x57, 0x08, 0xcf, 0x2f, 0xca, 0x49, 0x51,
3533            0xe4, 0x02, 0x00, 0x20, 0x91, 0x04, 0x48,
3534        ];
3535
3536        let input = "Hello World!\n";
3537
3538        let mut output = vec![0; 128];
3539
3540        let config = DeflateConfig {
3541            level: 1,
3542            method: Method::Deflated,
3543            window_bits: crate::MAX_WBITS,
3544            mem_level: DEF_MEM_LEVEL,
3545            strategy: Strategy::Default,
3546        };
3547
3548        let (output, err) = compress_slice(&mut output, input.as_bytes(), config);
3549
3550        assert_eq!(err, ReturnCode::Ok);
3551
3552        assert_eq!(output.len(), EXPECTED.len());
3553
3554        assert_eq!(EXPECTED, output);
3555    }
3556
3557    #[test]
3558    fn hello_world_quick_random() {
3559        const EXPECTED: &[u8] = &[
3560            0x78, 0x01, 0x53, 0xe1, 0x50, 0x51, 0xe1, 0x52, 0x51, 0x51, 0x01, 0x00, 0x03, 0xec,
3561            0x00, 0xeb,
3562        ];
3563
3564        let input = "$\u{8}$$\n$$$";
3565
3566        let mut output = vec![0; 128];
3567
3568        let config = DeflateConfig {
3569            level: 1,
3570            method: Method::Deflated,
3571            window_bits: crate::MAX_WBITS,
3572            mem_level: DEF_MEM_LEVEL,
3573            strategy: Strategy::Default,
3574        };
3575
3576        let (output, err) = compress_slice(&mut output, input.as_bytes(), config);
3577
3578        assert_eq!(err, ReturnCode::Ok);
3579
3580        assert_eq!(output.len(), EXPECTED.len());
3581
3582        assert_eq!(EXPECTED, output);
3583    }
3584
3585    fn fuzz_based_test(input: &[u8], config: DeflateConfig, expected: &[u8]) {
3586        let mut output_rs = [0; 1 << 17];
3587        let (output_rs, err) = compress_slice(&mut output_rs, input, config);
3588        assert_eq!(err, ReturnCode::Ok);
3589
3590        assert_eq!(output_rs, expected);
3591    }
3592
3593    #[test]
3594    fn simple_rle() {
3595        fuzz_based_test(
3596            "\0\0\0\0\u{6}".as_bytes(),
3597            DeflateConfig {
3598                level: -1,
3599                method: Method::Deflated,
3600                window_bits: 11,
3601                mem_level: 4,
3602                strategy: Strategy::Rle,
3603            },
3604            &[56, 17, 99, 0, 2, 54, 0, 0, 11, 0, 7],
3605        )
3606    }
3607
3608    #[test]
3609    fn fill_window_out_of_bounds() {
3610        const INPUT: &[u8] = &[
3611            0x71, 0x71, 0x71, 0x71, 0x71, 0x6a, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3612            0x71, 0x71, 0x71, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1d, 0x1d, 0x1d, 0x1d, 0x63,
3613            0x63, 0x63, 0x63, 0x63, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d, 0x1d,
3614            0x1d, 0x27, 0x0, 0x0, 0x0, 0x1d, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71,
3615            0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x31, 0x0, 0x0,
3616            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3617            0x0, 0x0, 0x0, 0x0, 0x1d, 0x1d, 0x0, 0x0, 0x0, 0x0, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50,
3618            0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x48, 0x50,
3619            0x50, 0x50, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2c, 0x0, 0x0, 0x0, 0x0, 0x4a,
3620            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3621            0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x70, 0x71, 0x71, 0x0, 0x0,
3622            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x6a, 0x0, 0x0, 0x0, 0x0,
3623            0x71, 0x0, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3624            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3625            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3626            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x31, 0x0, 0x0, 0x0, 0x0,
3627            0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3628            0x71, 0x71, 0x0, 0x4a, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71,
3629            0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3630            0x70, 0x71, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71,
3631            0x6a, 0x0, 0x0, 0x0, 0x0, 0x71, 0x0, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3632            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3633            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3634            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3635            0x31, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1d, 0x1d, 0x0, 0x0, 0x0, 0x0,
3636            0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50, 0x50,
3637            0x50, 0x50, 0x50, 0x50, 0x48, 0x50, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x3b, 0x3f, 0x71,
3638            0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x50, 0x50, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3639            0x2c, 0x0, 0x0, 0x0, 0x0, 0x4a, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71,
3640            0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3641            0x71, 0x70, 0x71, 0x71, 0x0, 0x0, 0x0, 0x6, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71,
3642            0x71, 0x71, 0x71, 0x70, 0x71, 0x71, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3643            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71, 0x71, 0x71, 0x71, 0x3b, 0x3f, 0x71, 0x71, 0x71,
3644            0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3645            0x71, 0x3b, 0x3f, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x20, 0x0, 0x0, 0x0, 0x0,
3646            0x0, 0x0, 0x0, 0x71, 0x75, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3647            0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x10, 0x0, 0x71, 0x71,
3648            0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x3b, 0x71, 0x71, 0x71, 0x71, 0x71,
3649            0x71, 0x76, 0x71, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x71, 0x71, 0x71, 0x71, 0x71,
3650            0x71, 0x71, 0x71, 0x10, 0x0, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71,
3651            0x71, 0x3b, 0x71, 0x71, 0x71, 0x71, 0x71, 0x71, 0x76, 0x71, 0x34, 0x34, 0x34, 0x34,
3652            0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34,
3653            0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x0, 0x0, 0x0, 0x0, 0x0, 0x34, 0x34, 0x34, 0x34,
3654            0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34,
3655            0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34,
3656            0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34,
3657            0x34, 0x34, 0x30, 0x34, 0x34, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3658            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3659            0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3660            0x0, 0x0, 0x71, 0x0, 0x0, 0x0, 0x0, 0x6,
3661        ];
3662
3663        fuzz_based_test(
3664            INPUT,
3665            DeflateConfig {
3666                level: -1,
3667                method: Method::Deflated,
3668                window_bits: 9,
3669                mem_level: 1,
3670                strategy: Strategy::HuffmanOnly,
3671            },
3672            &[
3673                0x18, 0x19, 0x4, 0xc1, 0x21, 0x1, 0xc4, 0x0, 0x10, 0x3, 0xb0, 0x18, 0x29, 0x1e,
3674                0x7e, 0x17, 0x83, 0xf5, 0x70, 0x6c, 0xac, 0xfe, 0xc9, 0x27, 0xdb, 0xb6, 0x6f, 0xdb,
3675                0xb6, 0x6d, 0xdb, 0x80, 0x24, 0xb9, 0xbb, 0xbb, 0x24, 0x49, 0x92, 0x24, 0xf, 0x2,
3676                0xd8, 0x36, 0x0, 0xf0, 0x3, 0x0, 0x0, 0x24, 0xd0, 0xb6, 0x6d, 0xdb, 0xb6, 0x6d,
3677                0xdb, 0xbe, 0x6d, 0xf9, 0x13, 0x4, 0xc7, 0x4, 0x0, 0x80, 0x30, 0x0, 0xc3, 0x22,
3678                0x68, 0xf, 0x36, 0x90, 0xc2, 0xb5, 0xfa, 0x7f, 0x48, 0x80, 0x81, 0xb, 0x40, 0x55,
3679                0x55, 0x55, 0xd5, 0x16, 0x80, 0xaa, 0x7, 0x9, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
3680                0xe, 0x7c, 0x82, 0xe0, 0x98, 0x0, 0x0, 0x0, 0x4, 0x60, 0x10, 0xf9, 0x8c, 0xe2,
3681                0xe5, 0xfa, 0x3f, 0x2, 0x54, 0x55, 0x55, 0x65, 0x0, 0xa8, 0xaa, 0xaa, 0xaa, 0xba,
3682                0x2, 0x50, 0xb5, 0x90, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x78, 0x82, 0xe0, 0xd0,
3683                0x8a, 0x41, 0x0, 0x0, 0xa2, 0x58, 0x54, 0xb7, 0x60, 0x83, 0x9a, 0x6a, 0x4, 0x96,
3684                0x87, 0xba, 0x51, 0xf8, 0xfb, 0x9b, 0x26, 0xfc, 0x0, 0x1c, 0x7, 0x6c, 0xdb, 0xb6,
3685                0x6d, 0xdb, 0xb6, 0x6d, 0xf7, 0xa8, 0x3a, 0xaf, 0xaa, 0x6a, 0x3, 0xf8, 0xc2, 0x3,
3686                0x40, 0x55, 0x55, 0x55, 0xd5, 0x5b, 0xf8, 0x80, 0xaa, 0x7a, 0xb, 0x0, 0x7f, 0x82,
3687                0xe0, 0x98, 0x0, 0x40, 0x18, 0x0, 0x82, 0xd8, 0x49, 0x40, 0x2, 0x22, 0x7e, 0xeb,
3688                0x80, 0xa6, 0xc, 0xa0, 0x9f, 0xa4, 0x2a, 0x38, 0xf, 0x0, 0x0, 0xe7, 0x1, 0xdc,
3689                0x55, 0x95, 0x17, 0x0, 0x0, 0xae, 0x0, 0x38, 0xc0, 0x67, 0xdb, 0x36, 0x80, 0x2b,
3690                0x0, 0xe, 0xf0, 0xd9, 0xf6, 0x13, 0x4, 0xc7, 0x4, 0x0, 0x0, 0x30, 0xc, 0x83, 0x22,
3691                0x69, 0x7, 0xc6, 0xea, 0xff, 0x19, 0x0, 0x0, 0x80, 0xaa, 0x0, 0x0, 0x0, 0x0, 0x0,
3692                0x0, 0x8e, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x6a,
3693                0xf5, 0x63, 0x60, 0x60, 0x3, 0x0, 0xee, 0x8a, 0x88, 0x67,
3694            ],
3695        )
3696    }
3697
3698    #[test]
3699    fn gzip_no_header() {
3700        let config = DeflateConfig {
3701            level: 9,
3702            method: Method::Deflated,
3703            window_bits: 31, // gzip
3704            ..Default::default()
3705        };
3706
3707        let input = b"Hello World!";
3708        let os = gz_header::OS_CODE;
3709
3710        fuzz_based_test(
3711            input,
3712            config,
3713            &[
3714                31, 139, 8, 0, 0, 0, 0, 0, 2, os, 243, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
3715                81, 4, 0, 163, 28, 41, 28, 12, 0, 0, 0,
3716            ],
3717        )
3718    }
3719
3720    #[test]
3721    #[rustfmt::skip]
3722    fn gzip_stored_block_checksum() {
3723        fuzz_based_test(
3724            &[
3725                27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 9, 0,
3726            ],
3727            DeflateConfig {
3728                level: 0,
3729                method: Method::Deflated,
3730                window_bits: 26,
3731                mem_level: 6,
3732                strategy: Strategy::Default,
3733            },
3734            &[
3735                31, 139, 8, 0, 0, 0, 0, 0, 4, gz_header::OS_CODE, 1, 18, 0, 237, 255, 27, 27, 27, 27, 27, 27, 27,
3736                27, 27, 27, 27, 27, 27, 27, 27, 27, 9, 0, 60, 101, 156, 55, 18, 0, 0, 0,
3737            ],
3738        )
3739    }
3740
3741    #[test]
3742    fn gzip_header_pending_flush() {
3743        let extra = "aaaaaaaaaaaaaaaaaaaa\0";
3744        let name = "bbbbbbbbbbbbbbbbbbbb\0";
3745        let comment = "cccccccccccccccccccc\0";
3746
3747        let mut header = gz_header {
3748            text: 0,
3749            time: 0,
3750            xflags: 0,
3751            os: 0,
3752            extra: extra.as_ptr() as *mut _,
3753            extra_len: extra.len() as _,
3754            extra_max: 0,
3755            name: name.as_ptr() as *mut _,
3756            name_max: 0,
3757            comment: comment.as_ptr() as *mut _,
3758            comm_max: 0,
3759            hcrc: 1,
3760            done: 0,
3761        };
3762
3763        let config = DeflateConfig {
3764            window_bits: 31,
3765            mem_level: 1,
3766            ..Default::default()
3767        };
3768
3769        let mut stream = z_stream::default();
3770        assert_eq!(init(&mut stream, config), ReturnCode::Ok);
3771
3772        let Some(stream) = (unsafe { DeflateStream::from_stream_mut(&mut stream) }) else {
3773            unreachable!()
3774        };
3775
3776        unsafe { set_header(stream, Some(&mut header)) };
3777
3778        let input = b"Hello World\n";
3779        stream.next_in = input.as_ptr() as *mut _;
3780        stream.avail_in = input.len() as _;
3781
3782        let mut output = [0u8; 1024];
3783        stream.next_out = output.as_mut_ptr();
3784        stream.avail_out = 100;
3785
3786        assert_eq!(stream.state.bit_writer.pending.capacity(), 512);
3787
3788        // only 12 bytes remain, so to write the name the pending buffer must be flushed.
3789        // but there is insufficient output space to flush (only 100 bytes)
3790        stream.state.bit_writer.pending.extend(&[0; 500]);
3791
3792        assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::Ok);
3793
3794        // now try that again but with sufficient output space
3795        stream.avail_out = output.len() as _;
3796        assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::StreamEnd);
3797
3798        let n = stream.total_out as usize;
3799
3800        assert!(end(stream).is_ok());
3801
3802        let output_rs = &mut output[..n];
3803
3804        assert_eq!(output_rs.len(), 500 + 99);
3805    }
3806
3807    #[test]
3808    fn gzip_with_header() {
3809        // this test is here mostly so we get some MIRI action on the gzip header. A test that
3810        // compares behavior with zlib-ng is in the libz-rs-sys test suite
3811
3812        let extra = "some extra stuff\0";
3813        let name = "nomen est omen\0";
3814        let comment = "such comment\0";
3815
3816        let mut header = gz_header {
3817            text: 0,
3818            time: 0,
3819            xflags: 0,
3820            os: 0,
3821            extra: extra.as_ptr() as *mut _,
3822            extra_len: extra.len() as _,
3823            extra_max: 0,
3824            name: name.as_ptr() as *mut _,
3825            name_max: 0,
3826            comment: comment.as_ptr() as *mut _,
3827            comm_max: 0,
3828            hcrc: 1,
3829            done: 0,
3830        };
3831
3832        let config = DeflateConfig {
3833            window_bits: 31,
3834            ..Default::default()
3835        };
3836
3837        let mut stream = z_stream::default();
3838        assert_eq!(init(&mut stream, config), ReturnCode::Ok);
3839
3840        let Some(stream) = (unsafe { DeflateStream::from_stream_mut(&mut stream) }) else {
3841            unreachable!()
3842        };
3843
3844        unsafe { set_header(stream, Some(&mut header)) };
3845
3846        let input = b"Hello World\n";
3847        stream.next_in = input.as_ptr() as *mut _;
3848        stream.avail_in = input.len() as _;
3849
3850        let mut output = [0u8; 256];
3851        stream.next_out = output.as_mut_ptr();
3852        stream.avail_out = output.len() as _;
3853
3854        assert_eq!(deflate(stream, DeflateFlush::Finish), ReturnCode::StreamEnd);
3855
3856        let n = stream.total_out as usize;
3857
3858        assert!(end(stream).is_ok());
3859
3860        let output_rs = &mut output[..n];
3861
3862        assert_eq!(output_rs.len(), 81);
3863
3864        {
3865            let mut stream = z_stream::default();
3866
3867            let config = InflateConfig {
3868                window_bits: config.window_bits,
3869            };
3870
3871            assert_eq!(crate::inflate::init(&mut stream, config), ReturnCode::Ok);
3872
3873            let Some(stream) = (unsafe { InflateStream::from_stream_mut(&mut stream) }) else {
3874                unreachable!();
3875            };
3876
3877            stream.next_in = output_rs.as_mut_ptr() as _;
3878            stream.avail_in = output_rs.len() as _;
3879
3880            let mut output = [0u8; 12];
3881            stream.next_out = output.as_mut_ptr();
3882            stream.avail_out = output.len() as _;
3883
3884            let mut extra_buf = [0u8; 64];
3885            let mut name_buf = [0u8; 64];
3886            let mut comment_buf = [0u8; 64];
3887
3888            let mut header = gz_header {
3889                text: 0,
3890                time: 0,
3891                xflags: 0,
3892                os: 0,
3893                extra: extra_buf.as_mut_ptr(),
3894                extra_len: 0,
3895                extra_max: extra_buf.len() as _,
3896                name: name_buf.as_mut_ptr(),
3897                name_max: name_buf.len() as _,
3898                comment: comment_buf.as_mut_ptr(),
3899                comm_max: comment_buf.len() as _,
3900                hcrc: 0,
3901                done: 0,
3902            };
3903
3904            assert_eq!(
3905                unsafe { crate::inflate::get_header(stream, Some(&mut header)) },
3906                ReturnCode::Ok
3907            );
3908
3909            assert_eq!(
3910                unsafe { crate::inflate::inflate(stream, InflateFlush::Finish) },
3911                ReturnCode::StreamEnd
3912            );
3913
3914            crate::inflate::end(stream);
3915
3916            assert!(!header.comment.is_null());
3917            assert_eq!(
3918                unsafe { CStr::from_ptr(header.comment.cast()) }
3919                    .to_str()
3920                    .unwrap(),
3921                comment.trim_end_matches('\0')
3922            );
3923
3924            assert!(!header.name.is_null());
3925            assert_eq!(
3926                unsafe { CStr::from_ptr(header.name.cast()) }
3927                    .to_str()
3928                    .unwrap(),
3929                name.trim_end_matches('\0')
3930            );
3931
3932            assert!(!header.extra.is_null());
3933            assert_eq!(
3934                unsafe { CStr::from_ptr(header.extra.cast()) }
3935                    .to_str()
3936                    .unwrap(),
3937                extra.trim_end_matches('\0')
3938            );
3939        }
3940    }
3941
3942    #[test]
3943    fn insufficient_compress_space() {
3944        const DATA: &[u8] = include_bytes!("deflate/test-data/inflate_buf_error.dat");
3945
3946        fn helper(deflate_buf: &mut [u8]) -> ReturnCode {
3947            let config = DeflateConfig {
3948                level: 0,
3949                method: Method::Deflated,
3950                window_bits: 10,
3951                mem_level: 6,
3952                strategy: Strategy::Default,
3953            };
3954
3955            let (output, err) = compress_slice(deflate_buf, DATA, config);
3956            assert_eq!(err, ReturnCode::Ok);
3957
3958            let config = InflateConfig {
3959                window_bits: config.window_bits,
3960            };
3961
3962            let mut uncompr = [0; 1 << 17];
3963            let (uncompr, err) = uncompress_slice(&mut uncompr, output, config);
3964
3965            if err == ReturnCode::Ok {
3966                assert_eq!(DATA, uncompr);
3967            }
3968
3969            err
3970        }
3971
3972        let mut output = [0; 1 << 17];
3973
3974        // this is too little space
3975        assert_eq!(helper(&mut output[..1 << 16]), ReturnCode::DataError);
3976
3977        // this is sufficient space
3978        assert_eq!(helper(&mut output), ReturnCode::Ok);
3979    }
3980
3981    fn test_flush(flush: DeflateFlush, expected: &[u8]) {
3982        let input = b"Hello World!\n";
3983
3984        let config = DeflateConfig {
3985            level: 6, // use gzip
3986            method: Method::Deflated,
3987            window_bits: 16 + crate::MAX_WBITS,
3988            mem_level: DEF_MEM_LEVEL,
3989            strategy: Strategy::Default,
3990        };
3991
3992        let mut output_rs = vec![0; 128];
3993
3994        // with the flush modes that we test here, the deflate process still has `Status::Busy`,
3995        // and the `deflateEnd` function will return `DataError`.
3996        let expected_err = ReturnCode::DataError;
3997
3998        let (rs, err) = compress_slice_with_flush(&mut output_rs, input, config, flush);
3999        assert_eq!(expected_err, err);
4000
4001        assert_eq!(rs, expected);
4002    }
4003
4004    #[test]
4005    #[rustfmt::skip]
4006    fn sync_flush() {
4007        test_flush(
4008            DeflateFlush::SyncFlush,
4009            &[
4010                31, 139, 8, 0, 0, 0, 0, 0, 0, gz_header::OS_CODE, 242, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
4011                81, 228, 2, 0, 0, 0, 255, 255,
4012            ],
4013        )
4014    }
4015
4016    #[test]
4017    #[rustfmt::skip]
4018    fn partial_flush() {
4019        test_flush(
4020            DeflateFlush::PartialFlush,
4021            &[
4022                31, 139, 8, 0, 0, 0, 0, 0, 0, gz_header::OS_CODE, 242, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
4023                81, 228, 2, 8,
4024            ],
4025        );
4026    }
4027
4028    #[test]
4029    #[rustfmt::skip]
4030    fn full_flush() {
4031        test_flush(
4032            DeflateFlush::FullFlush,
4033            &[
4034                31, 139, 8, 0, 0, 0, 0, 0, 0, gz_header::OS_CODE, 242, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
4035                81, 228, 2, 0, 0, 0, 255, 255,
4036            ],
4037        );
4038    }
4039
4040    #[test]
4041    #[rustfmt::skip]
4042    fn block_flush() {
4043        test_flush(
4044            DeflateFlush::Block,
4045            &[
4046                31, 139, 8, 0, 0, 0, 0, 0, 0, gz_header::OS_CODE, 242, 72, 205, 201, 201, 87, 8, 207, 47, 202, 73,
4047                81, 228, 2,
4048            ],
4049        );
4050    }
4051
4052    #[test]
4053    // splits the input into two, deflates them seperately and then joins the deflated byte streams
4054    // into something that can be correctly inflated again. This is the basic idea behind pigz, and
4055    // allows for parallel compression.
4056    fn split_deflate() {
4057        let input = "Hello World!\n";
4058
4059        let (input1, input2) = input.split_at(6);
4060
4061        let mut output1 = vec![0; 128];
4062        let mut output2 = vec![0; 128];
4063
4064        let config = DeflateConfig {
4065            level: 6, // use gzip
4066            method: Method::Deflated,
4067            window_bits: 16 + crate::MAX_WBITS,
4068            mem_level: DEF_MEM_LEVEL,
4069            strategy: Strategy::Default,
4070        };
4071
4072        // see also the docs on `SyncFlush`. it makes sure everything is flushed, ends on a byte
4073        // boundary, and that the final block does not have the "last block" bit set.
4074        let (prefix, err) = compress_slice_with_flush(
4075            &mut output1,
4076            input1.as_bytes(),
4077            config,
4078            DeflateFlush::SyncFlush,
4079        );
4080        assert_eq!(err, ReturnCode::DataError);
4081
4082        let (output2, err) = compress_slice_with_flush(
4083            &mut output2,
4084            input2.as_bytes(),
4085            config,
4086            DeflateFlush::Finish,
4087        );
4088        assert_eq!(err, ReturnCode::Ok);
4089
4090        let inflate_config = crate::inflate::InflateConfig {
4091            window_bits: 16 + 15,
4092        };
4093
4094        // cuts off the length and crc
4095        let (suffix, end) = output2.split_at(output2.len() - 8);
4096        let (crc2, len2) = end.split_at(4);
4097        let crc2 = u32::from_le_bytes(crc2.try_into().unwrap());
4098
4099        // cuts off the gzip header (10 bytes) from the front
4100        let suffix = &suffix[10..];
4101
4102        let mut result: Vec<u8> = Vec::new();
4103        result.extend(prefix.iter());
4104        result.extend(suffix);
4105
4106        // it would be more proper to use `stream.total_in` here, but the slice helpers hide the
4107        // stream so we're cheating a bit here
4108        let len1 = input1.len() as u32;
4109        let len2 = u32::from_le_bytes(len2.try_into().unwrap());
4110        assert_eq!(len2 as usize, input2.len());
4111
4112        let crc1 = crate::crc32(0, input1.as_bytes());
4113        let crc = crate::crc32_combine(crc1, crc2, len2 as u64);
4114
4115        // combined crc of the parts should be the crc of the whole
4116        let crc_cheating = crate::crc32(0, input.as_bytes());
4117        assert_eq!(crc, crc_cheating);
4118
4119        // write the trailer
4120        result.extend(crc.to_le_bytes());
4121        result.extend((len1 + len2).to_le_bytes());
4122
4123        let mut output = vec![0; 128];
4124        let (output, err) = crate::inflate::uncompress_slice(&mut output, &result, inflate_config);
4125        assert_eq!(err, ReturnCode::Ok);
4126
4127        assert_eq!(output, input.as_bytes());
4128    }
4129
4130    #[test]
4131    fn inflate_window_copy_slice() {
4132        let uncompressed = [
4133            9, 126, 0, 0, 0, 0, 0, 0, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 76, 33, 8, 2, 0, 0,
4134            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4135            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4136            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4137            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12,
4138            10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 76, 33, 8, 2, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0,
4139            0, 0, 0, 12, 10, 0, 0, 0, 0, 14, 0, 0, 0, 0, 0, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4140            69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 14, 0, 0, 0, 0, 0, 69, 69, 69, 69, 69, 69, 69,
4141            69, 69, 69, 69, 69, 69, 69, 69, 9, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4142            69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4143            69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4144            69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 12, 28, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0,
4145            0, 0, 12, 10, 0, 0, 0, 0, 14, 0, 0, 0, 0, 0, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4146            69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 14, 0, 0, 0, 0, 0, 69, 69, 69, 69, 69, 69, 69,
4147            69, 69, 69, 69, 69, 69, 69, 69, 9, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4148            69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4149            69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
4150            69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 12, 28, 0, 2, 0, 0, 0, 63, 1, 0, 12, 2,
4151            36, 0, 28, 0, 0, 0, 1, 0, 0, 63, 63, 13, 0, 0, 0, 0, 0, 0, 0, 63, 63, 63, 63, 0, 0, 0,
4152            0, 0, 0, 65, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 0,
4153            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 45, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4154            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4155            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 91, 0, 0, 0, 9, 0, 0, 0, 9, 0, 0, 12, 33, 2, 0, 0, 8,
4156            0, 4, 0, 0, 0, 12, 10, 41, 12, 10, 47,
4157        ];
4158
4159        let compressed = &[
4160            31, 139, 8, 0, 0, 0, 0, 0, 4, 3, 181, 193, 49, 14, 194, 32, 24, 128, 209, 175, 192, 0,
4161            228, 151, 232, 206, 66, 226, 226, 96, 60, 2, 113, 96, 235, 13, 188, 139, 103, 23, 106,
4162            104, 108, 100, 49, 169, 239, 185, 39, 11, 199, 7, 51, 39, 171, 248, 118, 226, 63, 52,
4163            157, 120, 86, 102, 78, 86, 209, 104, 58, 241, 84, 129, 166, 12, 4, 154, 178, 229, 202,
4164            30, 36, 130, 166, 19, 79, 21, 104, 202, 64, 160, 41, 91, 174, 236, 65, 34, 10, 200, 19,
4165            162, 206, 68, 96, 130, 156, 15, 188, 229, 138, 197, 157, 161, 35, 3, 87, 126, 245, 0,
4166            28, 224, 64, 146, 2, 139, 1, 196, 95, 196, 223, 94, 10, 96, 92, 33, 86, 2, 0, 0,
4167        ];
4168
4169        let config = InflateConfig { window_bits: 25 };
4170
4171        let mut dest_vec_rs = vec![0u8; uncompressed.len()];
4172        let (output_rs, error) =
4173            crate::inflate::uncompress_slice(&mut dest_vec_rs, compressed, config);
4174
4175        assert_eq!(ReturnCode::Ok, error);
4176        assert_eq!(output_rs, uncompressed);
4177    }
4178
4179    #[test]
4180    fn hash_calc_difference() {
4181        let input = [
4182            0, 0, 0, 0, 0, 43, 0, 0, 0, 0, 0, 0, 43, 0, 0, 0, 0, 0, 0, 0, 0, 48, 0, 0, 0, 0, 0, 0,
4183            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4184            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4185            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4186            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, 0, 0, 0,
4187            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4188            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 0, 0, 0, 0, 0,
4189            0, 0, 64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 0, 64, 0, 0,
4190            0, 0, 0, 0, 0, 0, 0, 0, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 102, 102, 102, 102, 102,
4191            102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102,
4192            102, 102, 102, 102, 102, 102, 102, 102, 112, 102, 102, 102, 102, 102, 102, 102, 102,
4193            102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 0,
4194            0, 0, 0, 0, 0, 49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4195            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4196            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4197            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4198            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4199            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 0, 0, 0, 0, 0, 0, 0, 64, 0, 0,
4200            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 0, 64, 0, 0, 0, 0, 0, 0, 0,
4201            0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42, 0, 0, 0,
4202            50, 0,
4203        ];
4204
4205        let config = DeflateConfig {
4206            level: 6,
4207            method: Method::Deflated,
4208            window_bits: 9,
4209            mem_level: 8,
4210            strategy: Strategy::Default,
4211        };
4212
4213        let expected = [
4214            24, 149, 99, 96, 96, 96, 96, 208, 6, 17, 112, 138, 129, 193, 128, 1, 29, 24, 50, 208,
4215            1, 200, 146, 169, 79, 24, 74, 59, 96, 147, 52, 71, 22, 70, 246, 88, 26, 94, 80, 128,
4216            83, 6, 162, 219, 144, 76, 183, 210, 5, 8, 67, 105, 36, 159, 35, 128, 57, 118, 97, 100,
4217            160, 197, 192, 192, 96, 196, 0, 0, 3, 228, 25, 128,
4218        ];
4219
4220        fuzz_based_test(&input, config, &expected);
4221    }
4222
4223    #[cfg(any(target_arch = "x86_64", target_arch = "aarch64"))]
4224    mod _cache_lines {
4225        use super::State;
4226        // FIXME: once zlib-rs Minimum Supported Rust Version >= 1.77, switch to core::mem::offset_of
4227        // and move this _cache_lines module from up a level from tests to super::
4228        use memoffset::offset_of;
4229
4230        const _: () = assert!(offset_of!(State, status) == 0);
4231        const _: () = assert!(offset_of!(State, _cache_line_0) == 64);
4232        const _: () = assert!(offset_of!(State, _cache_line_1) == 128);
4233        const _: () = assert!(offset_of!(State, _cache_line_2) == 192);
4234        const _: () = assert!(offset_of!(State, _cache_line_3) == 256);
4235    }
4236}