1#![allow(non_snake_case)] #![allow(clippy::missing_safety_doc)] use core::ffi::{c_char, c_int, c_long, c_ulong};
5use core::marker::PhantomData;
6use core::mem::MaybeUninit;
7use core::ops::ControlFlow;
8
9mod bitreader;
10mod infback;
11mod inffixed_tbl;
12mod inftrees;
13mod window;
14mod writer;
15
16use crate::allocate::Allocator;
17use crate::c_api::internal_state;
18use crate::cpu_features::CpuFeatures;
19use crate::{
20 adler32::adler32,
21 c_api::{gz_header, z_checksum, z_size, z_stream, Z_DEFLATED},
22 inflate::writer::Writer,
23 Code, InflateFlush, ReturnCode, DEF_WBITS, MAX_WBITS, MIN_WBITS,
24};
25
26use crate::crc32::{crc32, Crc32Fold};
27
28pub use self::infback::{back, back_end, back_init};
29pub use self::window::Window;
30use self::{
31 bitreader::BitReader,
32 inftrees::{inflate_table, CodeType, InflateTable},
33};
34
35const INFLATE_STRICT: bool = false;
36
37#[repr(C)]
40pub struct InflateStream<'a> {
41 pub(crate) next_in: *mut crate::c_api::Bytef,
42 pub(crate) avail_in: crate::c_api::uInt,
43 pub(crate) total_in: crate::c_api::z_size,
44 pub(crate) next_out: *mut crate::c_api::Bytef,
45 pub(crate) avail_out: crate::c_api::uInt,
46 pub(crate) total_out: crate::c_api::z_size,
47 pub(crate) msg: *mut c_char,
48 pub(crate) state: &'a mut State<'a>,
49 pub(crate) alloc: Allocator<'a>,
50 pub(crate) data_type: c_int,
51 pub(crate) adler: crate::c_api::z_checksum,
52 pub(crate) reserved: crate::c_api::uLong,
53}
54
55unsafe impl Sync for InflateStream<'_> {}
56unsafe impl Send for InflateStream<'_> {}
57
58#[cfg(feature = "__internal-test")]
59#[doc(hidden)]
60pub const INFLATE_STATE_SIZE: usize = core::mem::size_of::<crate::inflate::State>();
61
62#[cfg(feature = "__internal-test")]
63#[doc(hidden)]
64pub unsafe fn set_mode_dict(strm: &mut z_stream) {
65 unsafe {
66 (*(strm.state as *mut State)).mode = Mode::Dict;
67 }
68}
69
70#[cfg(feature = "__internal-test")]
71#[doc(hidden)]
72pub unsafe fn set_mode_sync(strm: *mut z_stream) {
73 unsafe {
74 (*((*strm).state as *mut State)).mode = Mode::Sync;
75 }
76}
77
78impl<'a> InflateStream<'a> {
79 const _S: () = assert!(core::mem::size_of::<z_stream>() == core::mem::size_of::<Self>());
82 const _A: () = assert!(core::mem::align_of::<z_stream>() == core::mem::align_of::<Self>());
83
84 #[inline(always)]
93 pub unsafe fn from_stream_ref(strm: *const z_stream) -> Option<&'a Self> {
94 {
95 let stream = unsafe { strm.as_ref() }?;
97
98 if stream.zalloc.is_none() || stream.zfree.is_none() {
99 return None;
100 }
101
102 if stream.state.is_null() {
103 return None;
104 }
105 }
106
107 unsafe { strm.cast::<InflateStream>().as_ref() }
109 }
110
111 #[inline(always)]
120 pub unsafe fn from_stream_mut(strm: *mut z_stream) -> Option<&'a mut Self> {
121 {
122 let stream = unsafe { strm.as_ref() }?;
124
125 if stream.zalloc.is_none() || stream.zfree.is_none() {
126 return None;
127 }
128
129 if stream.state.is_null() {
130 return None;
131 }
132 }
133
134 unsafe { strm.cast::<InflateStream>().as_mut() }
136 }
137
138 fn as_z_stream_mut(&mut self) -> &mut z_stream {
139 unsafe { &mut *(self as *mut _ as *mut z_stream) }
141 }
142
143 pub fn new(config: InflateConfig) -> Self {
144 let mut inner = crate::c_api::z_stream::default();
145
146 let ret = crate::inflate::init(&mut inner, config);
147 assert_eq!(ret, ReturnCode::Ok);
148
149 unsafe { core::mem::transmute(inner) }
150 }
151}
152
153const MAX_BITS: u8 = 15; const MAX_DIST_EXTRA_BITS: u8 = 13; pub fn decompress_slice<'a>(
173 output: &'a mut [u8],
174 input: &[u8],
175 config: InflateConfig,
176) -> (&'a mut [u8], ReturnCode) {
177 let output_uninit = unsafe {
179 core::slice::from_raw_parts_mut(output.as_mut_ptr() as *mut MaybeUninit<u8>, output.len())
180 };
181
182 uncompress(output_uninit, input, config)
183}
184
185pub fn uncompress<'a>(
187 output: &'a mut [MaybeUninit<u8>],
188 input: &[u8],
189 config: InflateConfig,
190) -> (&'a mut [u8], ReturnCode) {
191 let (_consumed, output, ret) = uncompress2(output, input, config);
192 (output, ret)
193}
194
195pub fn uncompress2<'a>(
196 output: &'a mut [MaybeUninit<u8>],
197 input: &[u8],
198 config: InflateConfig,
199) -> (u64, &'a mut [u8], ReturnCode) {
200 let mut dest_len_ptr = output.len() as z_checksum;
201
202 let mut buf = [0u8];
204
205 let mut left;
206 let mut len = input.len() as u64;
207
208 let dest = if output.is_empty() {
209 left = 1;
210
211 buf.as_mut_ptr()
212 } else {
213 left = output.len() as u64;
214 dest_len_ptr = 0;
215
216 output.as_mut_ptr() as *mut u8
217 };
218
219 let mut stream = z_stream {
220 next_in: input.as_ptr() as *mut u8,
221 avail_in: 0,
222
223 zalloc: None,
224 zfree: None,
225 opaque: core::ptr::null_mut(),
226
227 ..z_stream::default()
228 };
229
230 let err = init(&mut stream, config);
231 if err != ReturnCode::Ok {
232 return (0, &mut [], err);
233 }
234
235 stream.next_out = dest;
236 stream.avail_out = 0;
237
238 let Some(stream) = (unsafe { InflateStream::from_stream_mut(&mut stream) }) else {
239 return (0, &mut [], ReturnCode::StreamError);
240 };
241
242 let err = loop {
243 if stream.avail_out == 0 {
244 stream.avail_out = Ord::min(left, u32::MAX as u64) as u32;
245 left -= stream.avail_out as u64;
246 }
247
248 if stream.avail_in == 0 {
249 stream.avail_in = Ord::min(len, u32::MAX as u64) as u32;
250 len -= stream.avail_in as u64;
251 }
252
253 let err = unsafe { inflate(stream, InflateFlush::NoFlush) };
254
255 if err != ReturnCode::Ok {
256 break err;
257 }
258 };
259
260 let consumed = len + u64::from(stream.avail_in);
261 if !output.is_empty() {
262 dest_len_ptr = stream.total_out;
263 } else if stream.total_out != 0 && err == ReturnCode::BufError {
264 left = 1;
265 }
266
267 let avail_out = stream.avail_out;
268
269 end(stream);
270
271 let ret = match err {
272 ReturnCode::StreamEnd => ReturnCode::Ok,
273 ReturnCode::NeedDict => ReturnCode::DataError,
274 ReturnCode::BufError if (left + avail_out as u64) != 0 => ReturnCode::DataError,
275 _ => err,
276 };
277
278 let output_slice = unsafe {
280 core::slice::from_raw_parts_mut(output.as_mut_ptr() as *mut u8, dest_len_ptr as usize)
281 };
282
283 (consumed, output_slice, ret)
284}
285
286#[derive(Debug, Clone, Copy)]
287#[repr(u8)]
288pub enum Mode {
289 Head,
290 Flags,
291 Time,
292 Os,
293 ExLen,
294 Extra,
295 Name,
296 Comment,
297 HCrc,
298 Sync,
299 Mem,
300 Length,
301 Type,
302 TypeDo,
303 Stored,
304 CopyBlock,
305 Check,
306 Len_,
307 Len,
308 Lit,
309 LenExt,
310 Dist,
311 DistExt,
312 Match,
313 Table,
314 LenLens,
315 CodeLens,
316 DictId,
317 Dict,
318 Done,
319 Bad,
320}
321
322#[derive(Default, Clone, Copy)]
323#[allow(clippy::enum_variant_names)]
324enum Codes {
325 #[default]
326 Fixed,
327 Codes,
328 Len,
329 Dist,
330}
331
332#[derive(Default, Clone, Copy)]
333struct Table {
334 codes: Codes,
335 bits: usize,
336}
337
338#[derive(Clone, Copy)]
339struct Flags(u8);
340
341impl Default for Flags {
342 fn default() -> Self {
343 Self::SANE
344 }
345}
346
347impl Flags {
348 const IS_LAST_BLOCK: Self = Self(0b0000_0001);
350
351 const HAVE_DICT: Self = Self(0b0000_0010);
353
354 const SANE: Self = Self(0b0000_0100);
356
357 pub(crate) const fn contains(self, other: Self) -> bool {
358 debug_assert!(other.0.count_ones() == 1);
359
360 self.0 & other.0 != 0
361 }
362
363 #[inline(always)]
364 pub(crate) fn update(&mut self, other: Self, value: bool) {
365 if value {
366 *self = Self(self.0 | other.0);
367 } else {
368 *self = Self(self.0 & !other.0);
369 }
370 }
371}
372
373#[repr(C, align(64))]
374pub(crate) struct State<'a> {
375 mode: Mode,
377
378 flags: Flags,
379
380 wbits: u8,
382
383 wrap: u8,
389
390 flush: InflateFlush,
391
392 window: Window<'a>,
394
395 ncode: usize,
398 nlen: usize,
400 ndist: usize,
402 have: usize,
404 next: usize, bit_reader: BitReader<'a>,
409
410 writer: Writer<'a>,
411 total: usize,
412
413 length: usize,
415 offset: usize,
417
418 extra: usize,
420
421 back: usize,
423
424 was: usize,
426
427 chunksize: usize,
429
430 in_available: usize,
431 out_available: usize,
432
433 gzip_flags: i32,
434
435 checksum: u32,
436 crc_fold: Crc32Fold,
437
438 error_message: Option<&'static str>,
439
440 head: Option<&'a mut gz_header>,
442 dmax: usize,
443
444 len_table: Table,
446
447 dist_table: Table,
449
450 codes_codes: [Code; crate::ENOUGH_LENS],
451 len_codes: [Code; crate::ENOUGH_LENS],
452 dist_codes: [Code; crate::ENOUGH_DISTS],
453
454 lens: [u16; 320],
456 work: [u16; 288],
458
459 allocation_start: *mut u8,
460 total_allocation_size: usize,
461}
462
463impl<'a> State<'a> {
464 fn new(reader: &'a [u8], writer: Writer<'a>) -> Self {
465 let in_available = reader.len();
466 let out_available = writer.capacity();
467
468 Self {
469 flush: InflateFlush::NoFlush,
470
471 flags: Flags::default(),
472 wrap: 0,
473 mode: Mode::Head,
474 length: 0,
475
476 len_table: Table::default(),
477 dist_table: Table::default(),
478
479 wbits: 0,
480 offset: 0,
481 extra: 0,
482 back: 0,
483 was: 0,
484 chunksize: 0,
485 in_available,
486 out_available,
487
488 bit_reader: BitReader::new(reader),
489
490 writer,
491 total: 0,
492
493 window: Window::empty(),
494 head: None,
495
496 lens: [0u16; 320],
497 work: [0u16; 288],
498
499 ncode: 0,
500 nlen: 0,
501 ndist: 0,
502 have: 0,
503 next: 0,
504
505 error_message: None,
506
507 checksum: 0,
508 crc_fold: Crc32Fold::new(),
509
510 dmax: 0,
511 gzip_flags: 0,
512
513 codes_codes: [Code::default(); crate::ENOUGH_LENS],
514 len_codes: [Code::default(); crate::ENOUGH_LENS],
515 dist_codes: [Code::default(); crate::ENOUGH_DISTS],
516
517 allocation_start: core::ptr::null_mut(),
518 total_allocation_size: 0,
519 }
520 }
521
522 fn len_table_ref(&self) -> &[Code] {
523 match self.len_table.codes {
524 Codes::Fixed => &self::inffixed_tbl::LENFIX,
525 Codes::Codes => &self.codes_codes,
526 Codes::Len => &self.len_codes,
527 Codes::Dist => &self.dist_codes,
528 }
529 }
530
531 fn dist_table_ref(&self) -> &[Code] {
532 match self.dist_table.codes {
533 Codes::Fixed => &self::inffixed_tbl::DISTFIX,
534 Codes::Codes => &self.codes_codes,
535 Codes::Len => &self.len_codes,
536 Codes::Dist => &self.dist_codes,
537 }
538 }
539
540 fn len_table_get(&self, index: usize) -> Code {
541 self.len_table_ref()[index]
542 }
543
544 fn dist_table_get(&self, index: usize) -> Code {
545 self.dist_table_ref()[index]
546 }
547}
548
549const fn zswap32(q: u32) -> u32 {
551 u32::from_be(q.to_le())
552}
553
554const INFLATE_FAST_MIN_HAVE: usize = 15;
555const INFLATE_FAST_MIN_LEFT: usize = 260;
556
557impl State<'_> {
558 fn len_and_friends(&mut self) -> ControlFlow<ReturnCode, ()> {
568 let avail_in = self.bit_reader.bytes_remaining();
569 let avail_out = self.writer.remaining();
570
571 if avail_in >= INFLATE_FAST_MIN_HAVE && avail_out >= INFLATE_FAST_MIN_LEFT {
572 unsafe { inflate_fast_help(self, 0) };
574 match self.mode {
575 Mode::Len => {}
576 _ => return ControlFlow::Continue(()),
577 }
578 }
579
580 let mut mode;
581 let mut writer;
582 let mut bit_reader;
583
584 macro_rules! load {
585 () => {
586 mode = self.mode;
587 writer = core::mem::replace(&mut self.writer, Writer::new(&mut []));
588 bit_reader = self.bit_reader;
589 };
590 }
591
592 macro_rules! restore {
593 () => {
594 self.mode = mode;
595 self.writer = writer;
596 self.bit_reader = bit_reader;
597 };
598 }
599
600 load!();
601
602 let len_table = match self.len_table.codes {
603 Codes::Fixed => &self::inffixed_tbl::LENFIX[..],
604 Codes::Codes => &self.codes_codes,
605 Codes::Len => &self.len_codes,
606 Codes::Dist => &self.dist_codes,
607 };
608
609 let dist_table = match self.dist_table.codes {
610 Codes::Fixed => &self::inffixed_tbl::DISTFIX[..],
611 Codes::Codes => &self.codes_codes,
612 Codes::Len => &self.len_codes,
613 Codes::Dist => &self.dist_codes,
614 };
615
616 loop {
617 mode = 'top: {
618 match mode {
619 Mode::Len => {
620 let avail_in = bit_reader.bytes_remaining();
621 let avail_out = writer.remaining();
622
623 if avail_in >= INFLATE_FAST_MIN_HAVE && avail_out >= INFLATE_FAST_MIN_LEFT {
628 restore!();
629 unsafe { inflate_fast_help(self, 0) };
633 return ControlFlow::Continue(());
634 }
635
636 self.back = 0;
637
638 let mut here;
640 loop {
641 let bits = bit_reader.bits(self.len_table.bits);
642 here = len_table[bits as usize];
643
644 if here.bits <= bit_reader.bits_in_buffer() {
645 break;
646 }
647
648 if let Err(return_code) = bit_reader.pull_byte() {
649 restore!();
650 return ControlFlow::Break(return_code);
651 };
652 }
653
654 if here.op != 0 && here.op & 0xf0 == 0 {
655 let last = here;
656 loop {
657 let bits = bit_reader.bits((last.bits + last.op) as usize) as u16;
658 here = len_table[(last.val + (bits >> last.bits)) as usize];
659 if last.bits + here.bits <= bit_reader.bits_in_buffer() {
660 break;
661 }
662
663 if let Err(return_code) = bit_reader.pull_byte() {
664 restore!();
665 return ControlFlow::Break(return_code);
666 };
667 }
668
669 bit_reader.drop_bits(last.bits);
670 self.back += last.bits as usize;
671 }
672
673 bit_reader.drop_bits(here.bits);
674 self.back += here.bits as usize;
675 self.length = here.val as usize;
676
677 if here.op == 0 {
678 break 'top Mode::Lit;
679 } else if here.op & 32 != 0 {
680 self.back = usize::MAX;
685 mode = Mode::Type;
686
687 restore!();
688 return ControlFlow::Continue(());
689 } else if here.op & 64 != 0 {
690 mode = Mode::Bad;
691 {
692 restore!();
693 let this = &mut *self;
694 let msg: &'static str = "invalid literal/length code\0";
695 #[cfg(all(feature = "std", test))]
696 dbg!(msg);
697 this.error_message = Some(msg);
698 return ControlFlow::Break(ReturnCode::DataError);
699 }
700 } else {
701 self.extra = (here.op & MAX_BITS) as usize;
703 break 'top Mode::LenExt;
704 }
705 }
706 Mode::Lit => {
707 if writer.is_full() {
709 restore!();
710 #[cfg(all(test, feature = "std"))]
711 eprintln!("Ok: writer is full ({} bytes)", self.writer.capacity());
712 return ControlFlow::Break(ReturnCode::Ok);
713 }
714
715 writer.push(self.length as u8);
716
717 break 'top Mode::Len;
718 }
719 Mode::LenExt => {
720 let extra = self.extra;
722
723 if extra != 0 {
725 match bit_reader.need_bits(extra) {
726 Err(return_code) => {
727 restore!();
728 return ControlFlow::Break(return_code);
729 }
730 Ok(v) => v,
731 };
732 self.length += bit_reader.bits(extra) as usize;
733 bit_reader.drop_bits(extra as u8);
734 self.back += extra;
735 }
736
737 self.was = self.length;
740
741 break 'top Mode::Dist;
742 }
743 Mode::Dist => {
744 let mut here;
748 loop {
749 let bits = bit_reader.bits(self.dist_table.bits) as usize;
750 here = dist_table[bits];
751 if here.bits <= bit_reader.bits_in_buffer() {
752 break;
753 }
754
755 if let Err(return_code) = bit_reader.pull_byte() {
756 restore!();
757 return ControlFlow::Break(return_code);
758 };
759 }
760
761 if here.op & 0xf0 == 0 {
762 let last = here;
763
764 loop {
765 let bits = bit_reader.bits((last.bits + last.op) as usize);
766 here =
767 dist_table[last.val as usize + ((bits as usize) >> last.bits)];
768
769 if last.bits + here.bits <= bit_reader.bits_in_buffer() {
770 break;
771 }
772
773 if let Err(return_code) = bit_reader.pull_byte() {
774 restore!();
775 return ControlFlow::Break(return_code);
776 };
777 }
778
779 bit_reader.drop_bits(last.bits);
780 self.back += last.bits as usize;
781 }
782
783 bit_reader.drop_bits(here.bits);
784
785 if here.op & 64 != 0 {
786 restore!();
787 self.mode = Mode::Bad;
788 return ControlFlow::Break(self.bad("invalid distance code\0"));
789 }
790
791 self.offset = here.val as usize;
792
793 self.extra = (here.op & MAX_BITS) as usize;
794
795 break 'top Mode::DistExt;
796 }
797 Mode::DistExt => {
798 let extra = self.extra;
800
801 if extra > 0 {
802 match bit_reader.need_bits(extra) {
803 Err(return_code) => {
804 restore!();
805 return ControlFlow::Break(return_code);
806 }
807 Ok(v) => v,
808 };
809 self.offset += bit_reader.bits(extra) as usize;
810 bit_reader.drop_bits(extra as u8);
811 self.back += extra;
812 }
813
814 if INFLATE_STRICT && self.offset > self.dmax {
815 restore!();
816 self.mode = Mode::Bad;
817 return ControlFlow::Break(
818 self.bad("invalid distance code too far back\0"),
819 );
820 }
821
822 break 'top Mode::Match;
825 }
826 Mode::Match => {
827 if writer.is_full() {
829 restore!();
830 #[cfg(all(feature = "std", test))]
831 eprintln!(
832 "BufError: writer is full ({} bytes)",
833 self.writer.capacity()
834 );
835 return ControlFlow::Break(ReturnCode::Ok);
836 }
837
838 let left = writer.remaining();
839 let copy = writer.len();
840
841 let copy = if self.offset > copy {
842 let mut copy = self.offset - copy;
845
846 if copy > self.window.have() {
847 if self.flags.contains(Flags::SANE) {
848 restore!();
849 self.mode = Mode::Bad;
850 return ControlFlow::Break(
851 self.bad("invalid distance too far back\0"),
852 );
853 }
854
855 panic!("INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR")
857 }
858
859 let wnext = self.window.next();
860 let wsize = self.window.size();
861
862 let from = if copy > wnext {
863 copy -= wnext;
864 wsize - copy
865 } else {
866 wnext - copy
867 };
868
869 copy = Ord::min(copy, self.length);
870 copy = Ord::min(copy, left);
871
872 writer.extend_from_window(&self.window, from..from + copy);
873
874 copy
875 } else {
876 let copy = Ord::min(self.length, left);
877 writer.copy_match(self.offset, copy);
878
879 copy
880 };
881
882 self.length -= copy;
883
884 if self.length == 0 {
885 break 'top Mode::Len;
886 } else {
887 break 'top Mode::Match;
890 }
891 }
892 _ => unsafe { core::hint::unreachable_unchecked() },
893 }
894 }
895 }
896 }
897
898 fn dispatch(&mut self) -> ReturnCode {
899 let mut mode = self.mode;
901
902 macro_rules! pull_byte {
903 ($self:expr) => {
904 match $self.bit_reader.pull_byte() {
905 Err(return_code) => {
906 self.mode = mode;
907 return $self.inflate_leave(return_code);
908 }
909 Ok(_) => (),
910 }
911 };
912 }
913
914 macro_rules! need_bits {
915 ($self:expr, $n:expr) => {
916 match $self.bit_reader.need_bits($n) {
917 Err(return_code) => {
918 self.mode = mode;
919 return $self.inflate_leave(return_code);
920 }
921 Ok(v) => v,
922 }
923 };
924 }
925
926 let ret = 'label: loop {
927 mode = 'blk: {
928 match mode {
929 Mode::Head => {
930 if self.wrap == 0 {
931 break 'blk Mode::TypeDo;
932 }
933
934 need_bits!(self, 16);
935
936 if (self.wrap & 2) != 0 && self.bit_reader.hold() == 0x8b1f {
938 if self.wbits == 0 {
939 self.wbits = 15;
940 }
941
942 let b0 = self.bit_reader.bits(8) as u8;
943 let b1 = (self.bit_reader.hold() >> 8) as u8;
944 self.checksum = crc32(crate::CRC32_INITIAL_VALUE, &[b0, b1]);
945 self.bit_reader.init_bits();
946
947 break 'blk Mode::Flags;
948 }
949
950 if let Some(header) = &mut self.head {
951 header.done = -1;
952 }
953
954 if (self.wrap & 1) == 0
956 || ((self.bit_reader.bits(8) << 8) + (self.bit_reader.hold() >> 8)) % 31
957 != 0
958 {
959 mode = Mode::Bad;
960 break 'label self.bad("incorrect header check\0");
961 }
962
963 if self.bit_reader.bits(4) != Z_DEFLATED as u64 {
964 mode = Mode::Bad;
965 break 'label self.bad("unknown compression method\0");
966 }
967
968 self.bit_reader.drop_bits(4);
969 let len = self.bit_reader.bits(4) as u8 + 8;
970
971 if self.wbits == 0 {
972 self.wbits = len;
973 }
974
975 if len as i32 > MAX_WBITS || len > self.wbits {
976 mode = Mode::Bad;
977 break 'label self.bad("invalid window size\0");
978 }
979
980 self.dmax = 1 << len;
981 self.gzip_flags = 0; self.checksum = crate::ADLER32_INITIAL_VALUE as _;
983
984 if self.bit_reader.hold() & 0x200 != 0 {
985 self.bit_reader.init_bits();
986
987 break 'blk Mode::DictId;
988 } else {
989 self.bit_reader.init_bits();
990
991 break 'blk Mode::Type;
992 }
993 }
994 Mode::Flags => {
995 need_bits!(self, 16);
996 self.gzip_flags = self.bit_reader.hold() as i32;
997
998 if self.gzip_flags & 0xff != Z_DEFLATED {
1000 mode = Mode::Bad;
1001 break 'label self.bad("unknown compression method\0");
1002 }
1003
1004 if self.gzip_flags & 0xe000 != 0 {
1005 mode = Mode::Bad;
1006 break 'label self.bad("unknown header flags set\0");
1007 }
1008
1009 if let Some(head) = self.head.as_mut() {
1010 head.text = ((self.bit_reader.hold() >> 8) & 1) as i32;
1011 }
1012
1013 if (self.gzip_flags & 0x0200) != 0 && (self.wrap & 4) != 0 {
1014 let b0 = self.bit_reader.bits(8) as u8;
1015 let b1 = (self.bit_reader.hold() >> 8) as u8;
1016 self.checksum = crc32(self.checksum, &[b0, b1]);
1017 }
1018
1019 self.bit_reader.init_bits();
1020
1021 break 'blk Mode::Time;
1022 }
1023 Mode::Time => {
1024 need_bits!(self, 32);
1025 if let Some(head) = self.head.as_mut() {
1026 head.time = self.bit_reader.hold() as z_size;
1027 }
1028
1029 if (self.gzip_flags & 0x0200) != 0 && (self.wrap & 4) != 0 {
1030 let bytes = (self.bit_reader.hold() as u32).to_le_bytes();
1031 self.checksum = crc32(self.checksum, &bytes);
1032 }
1033
1034 self.bit_reader.init_bits();
1035
1036 break 'blk Mode::Os;
1037 }
1038 Mode::Os => {
1039 need_bits!(self, 16);
1040 if let Some(head) = self.head.as_mut() {
1041 head.xflags = (self.bit_reader.hold() & 0xff) as i32;
1042 head.os = (self.bit_reader.hold() >> 8) as i32;
1043 }
1044
1045 if (self.gzip_flags & 0x0200) != 0 && (self.wrap & 4) != 0 {
1046 let bytes = (self.bit_reader.hold() as u16).to_le_bytes();
1047 self.checksum = crc32(self.checksum, &bytes);
1048 }
1049
1050 self.bit_reader.init_bits();
1051
1052 break 'blk Mode::ExLen;
1053 }
1054 Mode::ExLen => {
1055 if (self.gzip_flags & 0x0400) != 0 {
1056 need_bits!(self, 16);
1057
1058 self.length = self.bit_reader.hold() as usize;
1060 if let Some(head) = self.head.as_mut() {
1061 head.extra_len = self.length as u32;
1062 }
1063
1064 if (self.gzip_flags & 0x0200) != 0 && (self.wrap & 4) != 0 {
1065 let bytes = (self.bit_reader.hold() as u16).to_le_bytes();
1066 self.checksum = crc32(self.checksum, &bytes);
1067 }
1068 self.bit_reader.init_bits();
1069 } else if let Some(head) = self.head.as_mut() {
1070 head.extra = core::ptr::null_mut();
1071 }
1072
1073 break 'blk Mode::Extra;
1074 }
1075 Mode::Extra => {
1076 if (self.gzip_flags & 0x0400) != 0 {
1077 let extra_available =
1079 Ord::min(self.length, self.bit_reader.bytes_remaining());
1080
1081 if extra_available > 0 {
1082 if let Some(head) = self.head.as_mut() {
1083 if !head.extra.is_null() {
1084 let written_so_far = head.extra_len as usize - self.length;
1091
1092 let count = Ord::min(
1094 (head.extra_max as usize)
1095 .saturating_sub(written_so_far),
1096 extra_available,
1097 );
1098
1099 let next_write_offset =
1102 Ord::min(written_so_far, head.extra_max as usize);
1103
1104 unsafe {
1105 core::ptr::copy_nonoverlapping(
1109 self.bit_reader.as_mut_ptr(),
1110 head.extra.add(next_write_offset),
1111 count,
1112 );
1113 }
1114 }
1115 }
1116
1117 if (self.gzip_flags & 0x0200) != 0 && (self.wrap & 4) != 0 {
1119 let extra_slice =
1120 &self.bit_reader.as_slice()[..extra_available];
1121 self.checksum = crc32(self.checksum, extra_slice)
1122 }
1123
1124 self.in_available -= extra_available;
1125 self.bit_reader.advance(extra_available);
1126 self.length -= extra_available;
1127 }
1128
1129 if self.length != 0 {
1131 break 'label self.inflate_leave(ReturnCode::Ok);
1132 }
1133 }
1134
1135 self.length = 0;
1136
1137 break 'blk Mode::Name;
1138 }
1139 Mode::Name => {
1140 if (self.gzip_flags & 0x0800) != 0 {
1141 if self.in_available == 0 {
1142 break 'label self.inflate_leave(ReturnCode::Ok);
1143 }
1144
1145 let slice = self.bit_reader.as_slice();
1148 let null_terminator_index = slice.iter().position(|c| *c == 0);
1149
1150 let name_slice = match null_terminator_index {
1152 Some(i) => &slice[..=i],
1153 None => slice,
1154 };
1155
1156 if let Some(head) = self.head.as_mut() {
1158 if !head.name.is_null() {
1159 let remaining_name_bytes = (head.name_max as usize)
1160 .checked_sub(self.length)
1161 .expect("name out of bounds");
1162 let copy = Ord::min(name_slice.len(), remaining_name_bytes);
1163
1164 unsafe {
1165 core::ptr::copy_nonoverlapping(
1168 name_slice.as_ptr(),
1169 head.name.add(self.length),
1170 copy,
1171 )
1172 };
1173
1174 self.length += copy;
1175 }
1176 }
1177
1178 if (self.gzip_flags & 0x0200) != 0 && (self.wrap & 4) != 0 {
1179 self.checksum = crc32(self.checksum, name_slice);
1180 }
1181
1182 let reached_end = name_slice.last() == Some(&0);
1183 self.bit_reader.advance(name_slice.len());
1184
1185 if !reached_end && self.bit_reader.bytes_remaining() == 0 {
1186 break 'label self.inflate_leave(ReturnCode::Ok);
1187 }
1188 } else if let Some(head) = self.head.as_mut() {
1189 head.name = core::ptr::null_mut();
1190 }
1191
1192 self.length = 0;
1193
1194 break 'blk Mode::Comment;
1195 }
1196 Mode::Comment => {
1197 if (self.gzip_flags & 0x01000) != 0 {
1198 if self.in_available == 0 {
1199 break 'label self.inflate_leave(ReturnCode::Ok);
1200 }
1201
1202 let slice = self.bit_reader.as_slice();
1205 let null_terminator_index = slice.iter().position(|c| *c == 0);
1206
1207 let comment_slice = match null_terminator_index {
1209 Some(i) => &slice[..=i],
1210 None => slice,
1211 };
1212
1213 if let Some(head) = self.head.as_mut() {
1215 if !head.comment.is_null() {
1216 let remaining_comm_bytes = (head.comm_max as usize)
1217 .checked_sub(self.length)
1218 .expect("comm out of bounds");
1219 let copy = Ord::min(comment_slice.len(), remaining_comm_bytes);
1220
1221 unsafe {
1222 core::ptr::copy_nonoverlapping(
1225 comment_slice.as_ptr(),
1226 head.comment.add(self.length),
1227 copy,
1228 )
1229 };
1230
1231 self.length += copy;
1232 }
1233 }
1234
1235 if (self.gzip_flags & 0x0200) != 0 && (self.wrap & 4) != 0 {
1236 self.checksum = crc32(self.checksum, comment_slice);
1237 }
1238
1239 let reached_end = comment_slice.last() == Some(&0);
1240 self.bit_reader.advance(comment_slice.len());
1241
1242 if !reached_end && self.bit_reader.bytes_remaining() == 0 {
1243 break 'label self.inflate_leave(ReturnCode::Ok);
1244 }
1245 } else if let Some(head) = self.head.as_mut() {
1246 head.comment = core::ptr::null_mut();
1247 }
1248
1249 break 'blk Mode::HCrc;
1250 }
1251 Mode::HCrc => {
1252 if (self.gzip_flags & 0x0200) != 0 {
1253 need_bits!(self, 16);
1254
1255 if (self.wrap & 4) != 0
1256 && self.bit_reader.hold() as u32 != (self.checksum & 0xffff)
1257 {
1258 mode = Mode::Bad;
1259 break 'label self.bad("header crc mismatch\0");
1260 }
1261
1262 self.bit_reader.init_bits();
1263 }
1264
1265 if let Some(head) = self.head.as_mut() {
1266 head.hcrc = (self.gzip_flags >> 9) & 1;
1267 head.done = 1;
1268 }
1269
1270 if (self.wrap & 4 != 0) && self.gzip_flags != 0 {
1272 self.crc_fold = Crc32Fold::new();
1273 self.checksum = crate::CRC32_INITIAL_VALUE;
1274 }
1275
1276 break 'blk Mode::Type;
1277 }
1278 Mode::Type => {
1279 use InflateFlush::*;
1280
1281 match self.flush {
1282 Block | Trees => break 'label ReturnCode::Ok,
1283 NoFlush | SyncFlush | Finish => {
1284 break 'blk Mode::TypeDo;
1286 }
1287 }
1288 }
1289 Mode::TypeDo => {
1290 if self.flags.contains(Flags::IS_LAST_BLOCK) {
1291 self.bit_reader.next_byte_boundary();
1292 break 'blk Mode::Check;
1293 }
1294
1295 need_bits!(self, 3);
1296 self.flags
1298 .update(Flags::IS_LAST_BLOCK, self.bit_reader.bits(1) != 0);
1299 self.bit_reader.drop_bits(1);
1300
1301 match self.bit_reader.bits(2) {
1302 0b00 => {
1303 self.bit_reader.drop_bits(2);
1306
1307 break 'blk Mode::Stored;
1308 }
1309 0b01 => {
1310 self.len_table = Table {
1313 codes: Codes::Fixed,
1314 bits: 9,
1315 };
1316
1317 self.dist_table = Table {
1318 codes: Codes::Fixed,
1319 bits: 5,
1320 };
1321
1322 mode = Mode::Len_;
1323
1324 self.bit_reader.drop_bits(2);
1325
1326 if let InflateFlush::Trees = self.flush {
1327 break 'label self.inflate_leave(ReturnCode::Ok);
1328 } else {
1329 break 'blk Mode::Len_;
1330 }
1331 }
1332 0b10 => {
1333 self.bit_reader.drop_bits(2);
1336
1337 break 'blk Mode::Table;
1338 }
1339 0b11 => {
1340 self.bit_reader.drop_bits(2);
1343
1344 mode = Mode::Bad;
1345 break 'label self.bad("invalid block type\0");
1346 }
1347 _ => {
1348 unreachable!("BitReader::bits(2) only yields a value of two bits, so this match is already exhaustive")
1350 }
1351 }
1352 }
1353 Mode::Stored => {
1354 self.bit_reader.next_byte_boundary();
1355
1356 need_bits!(self, 32);
1357
1358 let hold = self.bit_reader.bits(32) as u32;
1359
1360 if hold as u16 != !((hold >> 16) as u16) {
1363 mode = Mode::Bad;
1364 break 'label self.bad("invalid stored block lengths\0");
1365 }
1366
1367 self.length = hold as usize & 0xFFFF;
1368 self.bit_reader.init_bits();
1371
1372 if let InflateFlush::Trees = self.flush {
1373 break 'label self.inflate_leave(ReturnCode::Ok);
1374 } else {
1375 break 'blk Mode::CopyBlock;
1376 }
1377 }
1378 Mode::CopyBlock => {
1379 loop {
1380 let mut copy = self.length;
1381
1382 if copy == 0 {
1383 break;
1384 }
1385
1386 copy = Ord::min(copy, self.writer.remaining());
1387 copy = Ord::min(copy, self.bit_reader.bytes_remaining());
1388
1389 if copy == 0 {
1390 break 'label self.inflate_leave(ReturnCode::Ok);
1391 }
1392
1393 self.writer.extend(&self.bit_reader.as_slice()[..copy]);
1394 self.bit_reader.advance(copy);
1395
1396 self.length -= copy;
1397 }
1398
1399 break 'blk Mode::Type;
1400 }
1401 Mode::Check => {
1402 if !cfg!(feature = "__internal-fuzz-disable-checksum") && self.wrap != 0 {
1403 need_bits!(self, 32);
1404
1405 self.total += self.writer.len();
1406
1407 if self.wrap & 4 != 0 {
1408 if self.gzip_flags != 0 {
1409 self.crc_fold.fold(self.writer.filled(), self.checksum);
1410 self.checksum = self.crc_fold.finish();
1411 } else {
1412 self.checksum = adler32(self.checksum, self.writer.filled());
1413 }
1414 }
1415
1416 let given_checksum = if self.gzip_flags != 0 {
1417 self.bit_reader.hold() as u32
1418 } else {
1419 zswap32(self.bit_reader.hold() as u32)
1420 };
1421
1422 self.out_available = self.writer.capacity() - self.writer.len();
1423
1424 if self.wrap & 4 != 0 && given_checksum != self.checksum {
1425 mode = Mode::Bad;
1426 break 'label self.bad("incorrect data check\0");
1427 }
1428
1429 self.bit_reader.init_bits();
1430 }
1431
1432 break 'blk Mode::Length;
1433 }
1434 Mode::Len_ => {
1435 break 'blk Mode::Len;
1436 }
1437 Mode::Len => {
1438 self.mode = mode;
1439 let val = self.len_and_friends();
1440 mode = self.mode;
1441 match val {
1442 ControlFlow::Break(return_code) => break 'label return_code,
1443 ControlFlow::Continue(()) => continue 'label,
1444 }
1445 }
1446 Mode::LenExt => {
1447 let extra = self.extra;
1449
1450 if extra != 0 {
1452 need_bits!(self, extra);
1453 self.length += self.bit_reader.bits(extra) as usize;
1454 self.bit_reader.drop_bits(extra as u8);
1455 self.back += extra;
1456 }
1457
1458 self.was = self.length;
1461
1462 break 'blk Mode::Dist;
1463 }
1464 Mode::Lit => {
1465 if self.writer.is_full() {
1467 #[cfg(all(test, feature = "std"))]
1468 eprintln!("Ok: writer is full ({} bytes)", self.writer.capacity());
1469 break 'label self.inflate_leave(ReturnCode::Ok);
1470 }
1471
1472 self.writer.push(self.length as u8);
1473
1474 break 'blk Mode::Len;
1475 }
1476 Mode::Dist => {
1477 let mut here;
1481 loop {
1482 let bits = self.bit_reader.bits(self.dist_table.bits) as usize;
1483 here = self.dist_table_get(bits);
1484 if here.bits <= self.bit_reader.bits_in_buffer() {
1485 break;
1486 }
1487
1488 pull_byte!(self);
1489 }
1490
1491 if here.op & 0xf0 == 0 {
1492 let last = here;
1493
1494 loop {
1495 let bits = self.bit_reader.bits((last.bits + last.op) as usize);
1496 here = self.dist_table_get(
1497 last.val as usize + ((bits as usize) >> last.bits),
1498 );
1499
1500 if last.bits + here.bits <= self.bit_reader.bits_in_buffer() {
1501 break;
1502 }
1503
1504 pull_byte!(self);
1505 }
1506
1507 self.bit_reader.drop_bits(last.bits);
1508 self.back += last.bits as usize;
1509 }
1510
1511 self.bit_reader.drop_bits(here.bits);
1512
1513 if here.op & 64 != 0 {
1514 mode = Mode::Bad;
1515 break 'label self.bad("invalid distance code\0");
1516 }
1517
1518 self.offset = here.val as usize;
1519
1520 self.extra = (here.op & MAX_BITS) as usize;
1521
1522 break 'blk Mode::DistExt;
1523 }
1524 Mode::DistExt => {
1525 let extra = self.extra;
1527
1528 if extra > 0 {
1529 need_bits!(self, extra);
1530 self.offset += self.bit_reader.bits(extra) as usize;
1531 self.bit_reader.drop_bits(extra as u8);
1532 self.back += extra;
1533 }
1534
1535 if INFLATE_STRICT && self.offset > self.dmax {
1536 mode = Mode::Bad;
1537 break 'label self.bad("invalid distance code too far back\0");
1538 }
1539
1540 break 'blk Mode::Match;
1543 }
1544 Mode::Match => {
1545 'match_: loop {
1548 if self.writer.is_full() {
1549 #[cfg(all(feature = "std", test))]
1550 eprintln!(
1551 "BufError: writer is full ({} bytes)",
1552 self.writer.capacity()
1553 );
1554 break 'label self.inflate_leave(ReturnCode::Ok);
1555 }
1556
1557 let left = self.writer.remaining();
1558 let copy = self.writer.len();
1559
1560 let copy = if self.offset > copy {
1561 let mut copy = self.offset - copy;
1564
1565 if copy > self.window.have() {
1566 if self.flags.contains(Flags::SANE) {
1567 mode = Mode::Bad;
1568 break 'label self.bad("invalid distance too far back\0");
1569 }
1570
1571 panic!("INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR")
1573 }
1574
1575 let wnext = self.window.next();
1576 let wsize = self.window.size();
1577
1578 let from = if copy > wnext {
1579 copy -= wnext;
1580 wsize - copy
1581 } else {
1582 wnext - copy
1583 };
1584
1585 copy = Ord::min(copy, self.length);
1586 copy = Ord::min(copy, left);
1587
1588 self.writer
1589 .extend_from_window(&self.window, from..from + copy);
1590
1591 copy
1592 } else {
1593 let copy = Ord::min(self.length, left);
1594 self.writer.copy_match(self.offset, copy);
1595
1596 copy
1597 };
1598
1599 self.length -= copy;
1600
1601 if self.length == 0 {
1602 break 'blk Mode::Len;
1603 } else {
1604 continue 'match_;
1606 }
1607 }
1608 }
1609 Mode::Table => {
1610 need_bits!(self, 14);
1611 self.nlen = self.bit_reader.bits(5) as usize + 257;
1612 self.bit_reader.drop_bits(5);
1613 self.ndist = self.bit_reader.bits(5) as usize + 1;
1614 self.bit_reader.drop_bits(5);
1615 self.ncode = self.bit_reader.bits(4) as usize + 4;
1616 self.bit_reader.drop_bits(4);
1617
1618 if self.nlen > 286 || self.ndist > 30 {
1620 mode = Mode::Bad;
1621 break 'label self.bad("too many length or distance symbols\0");
1622 }
1623
1624 self.have = 0;
1625
1626 break 'blk Mode::LenLens;
1627 }
1628 Mode::LenLens => {
1629 const ORDER: [u8; 19] = [
1631 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15,
1632 ];
1633
1634 while self.have < self.ncode {
1635 need_bits!(self, 3);
1636 self.lens[usize::from(ORDER[self.have])] =
1637 self.bit_reader.bits(3) as u16;
1638 self.have += 1;
1639 self.bit_reader.drop_bits(3);
1640 }
1641
1642 while self.have < 19 {
1643 self.lens[usize::from(ORDER[self.have])] = 0;
1644 self.have += 1;
1645 }
1646
1647 let InflateTable::Success { root, used } = inflate_table(
1648 CodeType::Codes,
1649 &self.lens[..19],
1650 &mut self.codes_codes,
1651 7,
1652 &mut self.work,
1653 ) else {
1654 mode = Mode::Bad;
1655 break 'label self.bad("invalid code lengths set\0");
1656 };
1657
1658 self.next = used;
1659 self.len_table.codes = Codes::Codes;
1660 self.len_table.bits = root;
1661
1662 self.have = 0;
1663
1664 break 'blk Mode::CodeLens;
1665 }
1666 Mode::CodeLens => {
1667 while self.have < self.nlen + self.ndist {
1668 let here = loop {
1669 let bits = self.bit_reader.bits(self.len_table.bits);
1670 let here = self.len_table_get(bits as usize);
1671 if here.bits <= self.bit_reader.bits_in_buffer() {
1672 break here;
1673 }
1674
1675 pull_byte!(self);
1676 };
1677
1678 let here_bits = here.bits;
1679
1680 match here.val {
1681 0..=15 => {
1682 self.bit_reader.drop_bits(here_bits);
1683 self.lens[self.have] = here.val;
1684 self.have += 1;
1685 }
1686 16 => {
1687 need_bits!(self, usize::from(here_bits) + 2);
1688 self.bit_reader.drop_bits(here_bits);
1689 if self.have == 0 {
1690 mode = Mode::Bad;
1691 break 'label self.bad("invalid bit length repeat\0");
1692 }
1693
1694 let len = self.lens[self.have - 1];
1695 let copy = 3 + self.bit_reader.bits(2) as usize;
1696 self.bit_reader.drop_bits(2);
1697
1698 if self.have + copy > self.nlen + self.ndist {
1699 mode = Mode::Bad;
1700 break 'label self.bad("invalid bit length repeat\0");
1701 }
1702
1703 self.lens[self.have..][..copy].fill(len);
1704 self.have += copy;
1705 }
1706 17 => {
1707 need_bits!(self, usize::from(here_bits) + 3);
1708 self.bit_reader.drop_bits(here_bits);
1709 let copy = 3 + self.bit_reader.bits(3) as usize;
1710 self.bit_reader.drop_bits(3);
1711
1712 if self.have + copy > self.nlen + self.ndist {
1713 mode = Mode::Bad;
1714 break 'label self.bad("invalid bit length repeat\0");
1715 }
1716
1717 self.lens[self.have..][..copy].fill(0);
1718 self.have += copy;
1719 }
1720 18.. => {
1721 need_bits!(self, usize::from(here_bits) + 7);
1722 self.bit_reader.drop_bits(here_bits);
1723 let copy = 11 + self.bit_reader.bits(7) as usize;
1724 self.bit_reader.drop_bits(7);
1725
1726 if self.have + copy > self.nlen + self.ndist {
1727 mode = Mode::Bad;
1728 break 'label self.bad("invalid bit length repeat\0");
1729 }
1730
1731 self.lens[self.have..][..copy].fill(0);
1732 self.have += copy;
1733 }
1734 }
1735 }
1736
1737 if self.lens[256] == 0 {
1739 mode = Mode::Bad;
1740 break 'label self.bad("invalid code -- missing end-of-block\0");
1741 }
1742
1743 let InflateTable::Success { root, used } = inflate_table(
1746 CodeType::Lens,
1747 &self.lens[..self.nlen],
1748 &mut self.len_codes,
1749 10,
1750 &mut self.work,
1751 ) else {
1752 mode = Mode::Bad;
1753 break 'label self.bad("invalid literal/lengths set\0");
1754 };
1755
1756 self.len_table.codes = Codes::Len;
1757 self.len_table.bits = root;
1758 self.next = used;
1759
1760 let InflateTable::Success { root, used } = inflate_table(
1761 CodeType::Dists,
1762 &self.lens[self.nlen..][..self.ndist],
1763 &mut self.dist_codes,
1764 9,
1765 &mut self.work,
1766 ) else {
1767 mode = Mode::Bad;
1768 break 'label self.bad("invalid distances set\0");
1769 };
1770
1771 self.dist_table.bits = root;
1772 self.dist_table.codes = Codes::Dist;
1773 self.next += used;
1774
1775 mode = Mode::Len_;
1776
1777 if matches!(self.flush, InflateFlush::Trees) {
1778 break 'label self.inflate_leave(ReturnCode::Ok);
1779 }
1780
1781 break 'blk Mode::Len_;
1782 }
1783 Mode::Dict => {
1784 if !self.flags.contains(Flags::HAVE_DICT) {
1785 break 'label self.inflate_leave(ReturnCode::NeedDict);
1786 }
1787
1788 self.checksum = crate::ADLER32_INITIAL_VALUE as _;
1789
1790 break 'blk Mode::Type;
1791 }
1792 Mode::DictId => {
1793 need_bits!(self, 32);
1794
1795 self.checksum = zswap32(self.bit_reader.hold() as u32);
1796
1797 self.bit_reader.init_bits();
1798
1799 break 'blk Mode::Dict;
1800 }
1801 Mode::Done => {
1802 break 'label ReturnCode::StreamEnd;
1804 }
1805 Mode::Bad => {
1806 let msg = "repeated call with bad state\0";
1807 #[cfg(all(feature = "std", test))]
1808 dbg!(msg);
1809 self.error_message = Some(msg);
1810
1811 break 'label ReturnCode::DataError;
1812 }
1813 Mode::Mem => {
1814 break 'label ReturnCode::MemError;
1815 }
1816 Mode::Sync => {
1817 break 'label ReturnCode::StreamError;
1818 }
1819 Mode::Length => {
1820 if self.wrap != 0 && self.gzip_flags != 0 {
1822 need_bits!(self, 32);
1823 if (self.wrap & 0b100) != 0
1824 && self.bit_reader.hold() as u32 != self.total as u32
1825 {
1826 mode = Mode::Bad;
1827 break 'label self.bad("incorrect length check\0");
1828 }
1829
1830 self.bit_reader.init_bits();
1831 }
1832
1833 mode = Mode::Done;
1834 break 'label ReturnCode::StreamEnd;
1836 }
1837 };
1838 }
1839 };
1840
1841 self.mode = mode;
1842
1843 ret
1844 }
1845
1846 fn bad(&mut self, msg: &'static str) -> ReturnCode {
1847 #[cfg(all(feature = "std", test))]
1848 dbg!(msg);
1849 self.error_message = Some(msg);
1850 self.inflate_leave(ReturnCode::DataError)
1851 }
1852
1853 fn inflate_leave(&mut self, return_code: ReturnCode) -> ReturnCode {
1856 return_code
1858 }
1859
1860 fn decoding_state(&self) -> i32 {
1862 let bit_reader_bits = self.bit_reader.bits_in_buffer() as i32;
1863 debug_assert!(bit_reader_bits < 64);
1864
1865 let last = if self.flags.contains(Flags::IS_LAST_BLOCK) {
1866 64
1867 } else {
1868 0
1869 };
1870
1871 let mode = match self.mode {
1872 Mode::Type => 128,
1873 Mode::Len_ | Mode::CopyBlock => 256,
1874 _ => 0,
1875 };
1876
1877 bit_reader_bits | last | mode
1878 }
1879}
1880
1881unsafe fn inflate_fast_help(state: &mut State, start: usize) {
1886 #[cfg(any(target_arch = "x86_64", target_arch = "x86"))]
1887 if crate::cpu_features::is_enabled_avx2_and_bmi2() {
1888 return unsafe { inflate_fast_help_avx2(state, start) };
1890 }
1891
1892 unsafe { inflate_fast_help_vanilla(state, start) };
1894}
1895
1896#[cfg(any(target_arch = "x86_64", target_arch = "x86"))]
1901#[target_feature(enable = "avx2")]
1902#[target_feature(enable = "bmi2")]
1903#[target_feature(enable = "bmi1")]
1904unsafe fn inflate_fast_help_avx2(state: &mut State, start: usize) {
1905 unsafe { inflate_fast_help_impl::<{ CpuFeatures::AVX2 }>(state, start) };
1907}
1908
1909unsafe fn inflate_fast_help_vanilla(state: &mut State, start: usize) {
1914 unsafe { inflate_fast_help_impl::<{ CpuFeatures::NONE }>(state, start) };
1916}
1917
1918#[inline(always)]
1923unsafe fn inflate_fast_help_impl<const FEATURES: usize>(state: &mut State, _start: usize) {
1924 let mut bit_reader = BitReader::new(&[]);
1925 core::mem::swap(&mut bit_reader, &mut state.bit_reader);
1926 debug_assert!(bit_reader.bytes_remaining() >= 15);
1927
1928 let mut writer = Writer::new(&mut []);
1929 core::mem::swap(&mut writer, &mut state.writer);
1930
1931 let lcode = state.len_table_ref();
1932 let dcode = state.dist_table_ref();
1933
1934 let lmask = (1u64 << state.len_table.bits) - 1;
1936 let dmask = (1u64 << state.dist_table.bits) - 1;
1937
1938 let extra_safe = false;
1940
1941 let window_size = state.window.size();
1942
1943 let mut bad = None;
1944
1945 if bit_reader.bits_in_buffer() < 10 {
1946 debug_assert!(bit_reader.bytes_remaining() >= 15);
1947 unsafe { bit_reader.refill() };
1949 }
1950 debug_assert!(
1953 bit_reader.bytes_remaining() >= 8 && bit_reader.bytes_remaining_including_buffer() >= 15
1954 );
1955
1956 'outer: loop {
1957 debug_assert!(
1963 bit_reader.bytes_remaining() >= 8
1964 && bit_reader.bytes_remaining_including_buffer() >= 15
1965 );
1966
1967 let mut here = {
1968 let bits = bit_reader.bits_in_buffer();
1969 let hold = bit_reader.hold();
1970
1971 unsafe { bit_reader.refill() };
1978 debug_assert!(bit_reader.bytes_remaining() >= 8);
1980
1981 if bits as usize >= state.len_table.bits {
1984 lcode[(hold & lmask) as usize]
1985 } else {
1986 lcode[(bit_reader.hold() & lmask) as usize]
1987 }
1988 };
1989
1990 if here.op == 0 {
1991 writer.push(here.val as u8);
1992 bit_reader.drop_bits(here.bits);
1993 here = lcode[(bit_reader.hold() & lmask) as usize];
1994
1995 if here.op == 0 {
1996 writer.push(here.val as u8);
1997 bit_reader.drop_bits(here.bits);
1998 here = lcode[(bit_reader.hold() & lmask) as usize];
1999 }
2000 }
2001
2002 'dolen: loop {
2003 bit_reader.drop_bits(here.bits);
2004 let op = here.op;
2005
2006 if op == 0 {
2007 writer.push(here.val as u8);
2008 } else if op & 16 != 0 {
2009 let op = op & MAX_BITS;
2010 let mut len = here.val + bit_reader.bits(op as usize) as u16;
2011 bit_reader.drop_bits(op);
2012
2013 here = dcode[(bit_reader.hold() & dmask) as usize];
2014
2015 if bit_reader.bits_in_buffer() < MAX_BITS + MAX_DIST_EXTRA_BITS {
2018 debug_assert!(bit_reader.bytes_remaining() >= 8);
2019 unsafe { bit_reader.refill() };
2026 }
2027
2028 'dodist: loop {
2029 bit_reader.drop_bits(here.bits);
2030 let op = here.op;
2031
2032 if op & 16 != 0 {
2033 let op = op & MAX_BITS;
2034 let dist = here.val + bit_reader.bits(op as usize) as u16;
2035
2036 if INFLATE_STRICT && dist as usize > state.dmax {
2037 bad = Some("invalid distance too far back\0");
2038 state.mode = Mode::Bad;
2039 break 'outer;
2040 }
2041
2042 bit_reader.drop_bits(op);
2043
2044 let written = writer.len();
2046
2047 if dist as usize > written {
2048 if (dist as usize - written) > state.window.have() {
2050 if state.flags.contains(Flags::SANE) {
2051 bad = Some("invalid distance too far back\0");
2052 state.mode = Mode::Bad;
2053 break 'outer;
2054 }
2055
2056 panic!("INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR")
2057 }
2058
2059 let mut op = dist as usize - written;
2060 let mut from;
2061
2062 let window_next = state.window.next();
2063
2064 if window_next == 0 {
2065 from = window_size - op;
2072 } else if window_next >= op {
2073 from = window_next - op;
2075 } else {
2076 op -= window_next;
2081 from = window_size - op;
2082
2083 if op < len as usize {
2084 len -= op as u16;
2088 writer.extend_from_window_with_features::<FEATURES>(
2089 &state.window,
2090 from..from + op,
2091 );
2092 from = 0;
2093 op = window_next;
2094 }
2095 }
2096
2097 let copy = Ord::min(op, len as usize);
2098 writer.extend_from_window_with_features::<FEATURES>(
2099 &state.window,
2100 from..from + copy,
2101 );
2102
2103 if op < len as usize {
2104 writer.copy_match_with_features::<FEATURES>(
2106 dist as usize,
2107 len as usize - op,
2108 );
2109 }
2110 } else if extra_safe {
2111 todo!()
2112 } else {
2113 writer.copy_match_with_features::<FEATURES>(dist as usize, len as usize)
2114 }
2115 } else if (op & 64) == 0 {
2116 here = dcode[(here.val + bit_reader.bits(op as usize) as u16) as usize];
2118 continue 'dodist;
2119 } else {
2120 bad = Some("invalid distance code\0");
2121 state.mode = Mode::Bad;
2122 break 'outer;
2123 }
2124
2125 break 'dodist;
2126 }
2127 } else if (op & 64) == 0 {
2128 here = lcode[(here.val + bit_reader.bits(op as usize) as u16) as usize];
2130 continue 'dolen;
2131 } else if op & 32 != 0 {
2132 state.mode = Mode::Type;
2134 break 'outer;
2135 } else {
2136 bad = Some("invalid literal/length code\0");
2137 state.mode = Mode::Bad;
2138 break 'outer;
2139 }
2140
2141 break 'dolen;
2142 }
2143
2144 let remaining = bit_reader.bytes_remaining_including_buffer();
2146 if remaining >= INFLATE_FAST_MIN_HAVE && writer.remaining() >= INFLATE_FAST_MIN_LEFT {
2147 continue;
2148 }
2149
2150 break 'outer;
2151 }
2152
2153 bit_reader.return_unused_bytes();
2155
2156 state.bit_reader = bit_reader;
2157 state.writer = writer;
2158
2159 if let Some(error_message) = bad {
2160 debug_assert!(matches!(state.mode, Mode::Bad));
2161 state.bad(error_message);
2162 }
2163}
2164
2165pub fn prime(stream: &mut InflateStream, bits: i32, value: i32) -> ReturnCode {
2166 if bits == 0 {
2167 } else if bits < 0 {
2169 stream.state.bit_reader.init_bits();
2170 } else if bits > 16 || stream.state.bit_reader.bits_in_buffer() + bits as u8 > 32 {
2171 return ReturnCode::StreamError;
2172 } else {
2173 stream.state.bit_reader.prime(bits as u8, value as u64);
2174 }
2175
2176 ReturnCode::Ok
2177}
2178
2179struct InflateAllocOffsets {
2180 total_size: usize,
2181 state_pos: usize,
2182 window_pos: usize,
2183}
2184
2185impl InflateAllocOffsets {
2186 fn new() -> Self {
2187 use core::mem::size_of;
2188
2189 const WINDOW_PAD_SIZE: usize = 64;
2192
2193 const ALIGN_SIZE: usize = 64;
2196 let mut curr_size = 0usize;
2197
2198 let state_size = size_of::<State>();
2200 let window_size = (1 << MAX_WBITS) + WINDOW_PAD_SIZE;
2201
2202 let state_pos = curr_size.next_multiple_of(ALIGN_SIZE);
2204 curr_size = state_pos + state_size;
2205
2206 let window_pos = curr_size.next_multiple_of(ALIGN_SIZE);
2207 curr_size = window_pos + window_size;
2208
2209 let total_size = (curr_size + (ALIGN_SIZE - 1)).next_multiple_of(ALIGN_SIZE);
2212
2213 Self {
2214 total_size,
2215 state_pos,
2216 window_pos,
2217 }
2218 }
2219}
2220
2221#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
2225pub struct InflateConfig {
2226 pub window_bits: i32,
2227}
2228
2229impl Default for InflateConfig {
2230 fn default() -> Self {
2231 Self {
2232 window_bits: DEF_WBITS,
2233 }
2234 }
2235}
2236
2237pub fn init(stream: &mut z_stream, config: InflateConfig) -> ReturnCode {
2239 stream.msg = core::ptr::null_mut();
2240
2241 #[cfg(feature = "rust-allocator")]
2245 if stream.zalloc.is_none() || stream.zfree.is_none() {
2246 stream.configure_default_rust_allocator()
2247 }
2248
2249 #[cfg(feature = "c-allocator")]
2250 if stream.zalloc.is_none() || stream.zfree.is_none() {
2251 stream.configure_default_c_allocator()
2252 }
2253
2254 if stream.zalloc.is_none() || stream.zfree.is_none() {
2255 return ReturnCode::StreamError;
2256 }
2257
2258 let mut state = State::new(&[], Writer::new(&mut []));
2259
2260 state.chunksize = 32;
2262
2263 let alloc = Allocator {
2264 zalloc: stream.zalloc.unwrap(),
2265 zfree: stream.zfree.unwrap(),
2266 opaque: stream.opaque,
2267 _marker: PhantomData,
2268 };
2269 let allocs = InflateAllocOffsets::new();
2270
2271 let Some(allocation_start) = alloc.allocate_slice_raw::<u8>(allocs.total_size) else {
2272 return ReturnCode::MemError;
2273 };
2274
2275 let address = allocation_start.as_ptr() as usize;
2276 let align_offset = address.next_multiple_of(64) - address;
2277 let buf = unsafe { allocation_start.as_ptr().add(align_offset) };
2278
2279 let window_allocation = unsafe { buf.add(allocs.window_pos) };
2280 let window = unsafe { Window::from_raw_parts(window_allocation, (1 << MAX_WBITS) + 64) };
2281 state.window = window;
2282
2283 let state_allocation = unsafe { buf.add(allocs.state_pos).cast::<State>() };
2284 unsafe { state_allocation.write(state) };
2285 stream.state = state_allocation.cast::<internal_state>();
2286
2287 if let Some(stream) = unsafe { InflateStream::from_stream_mut(stream) } {
2289 stream.state.allocation_start = allocation_start.as_ptr();
2290 stream.state.total_allocation_size = allocs.total_size;
2291 let ret = reset_with_config(stream, config);
2292
2293 if ret != ReturnCode::Ok {
2294 end(stream);
2295 }
2296
2297 ret
2298 } else {
2299 ReturnCode::StreamError
2300 }
2301}
2302
2303pub fn reset_with_config(stream: &mut InflateStream, config: InflateConfig) -> ReturnCode {
2304 let mut window_bits = config.window_bits;
2305 let wrap;
2306
2307 if window_bits < 0 {
2308 wrap = 0;
2309
2310 if window_bits < -MAX_WBITS {
2311 return ReturnCode::StreamError;
2312 }
2313
2314 window_bits = -window_bits;
2315 } else {
2316 wrap = (window_bits >> 4) + 5; if window_bits < 48 {
2319 window_bits &= MAX_WBITS;
2320 }
2321 }
2322
2323 if window_bits != 0 && !(MIN_WBITS..=MAX_WBITS).contains(&window_bits) {
2324 #[cfg(feature = "std")]
2325 eprintln!("invalid windowBits");
2326 return ReturnCode::StreamError;
2327 }
2328
2329 stream.state.wrap = wrap as u8;
2330 stream.state.wbits = window_bits as _;
2331
2332 reset(stream)
2333}
2334
2335pub fn reset(stream: &mut InflateStream) -> ReturnCode {
2336 stream.state.window.clear();
2338
2339 stream.state.error_message = None;
2340
2341 reset_keep(stream)
2342}
2343
2344pub fn reset_keep(stream: &mut InflateStream) -> ReturnCode {
2345 stream.total_in = 0;
2346 stream.total_out = 0;
2347 stream.state.total = 0;
2348
2349 stream.msg = core::ptr::null_mut();
2350
2351 let state = &mut stream.state;
2352
2353 if state.wrap != 0 {
2354 stream.adler = (state.wrap & 1) as _;
2356 }
2357
2358 state.mode = Mode::Head;
2359 state.checksum = crate::ADLER32_INITIAL_VALUE as u32;
2360
2361 state.flags.update(Flags::IS_LAST_BLOCK, false);
2362 state.flags.update(Flags::HAVE_DICT, false);
2363 state.flags.update(Flags::SANE, true);
2364 state.gzip_flags = -1;
2365 state.dmax = 32768;
2366 state.head = None;
2367 state.bit_reader = BitReader::new(&[]);
2368
2369 state.next = 0;
2370 state.len_table = Table::default();
2371 state.dist_table = Table::default();
2372
2373 state.back = usize::MAX;
2374
2375 ReturnCode::Ok
2376}
2377
2378pub fn codes_used(stream: &InflateStream) -> usize {
2379 stream.state.next
2380}
2381
2382pub unsafe fn inflate(stream: &mut InflateStream, flush: InflateFlush) -> ReturnCode {
2383 if stream.next_out.is_null() || (stream.next_in.is_null() && stream.avail_in != 0) {
2384 return ReturnCode::StreamError;
2385 }
2386
2387 let state = &mut stream.state;
2388
2389 if let Mode::Type = state.mode {
2391 state.mode = Mode::TypeDo;
2392 }
2393
2394 state.flush = flush;
2395
2396 unsafe {
2397 state
2398 .bit_reader
2399 .update_slice(stream.next_in, stream.avail_in as usize)
2400 };
2401 state.writer = unsafe { Writer::new_uninit(stream.next_out.cast(), stream.avail_out as usize) };
2403
2404 state.in_available = stream.avail_in as _;
2405 state.out_available = stream.avail_out as _;
2406
2407 let err = state.dispatch();
2408
2409 let in_read = state.bit_reader.as_ptr() as usize - stream.next_in as usize;
2410 let out_written = state.out_available - (state.writer.capacity() - state.writer.len());
2411
2412 stream.total_in += in_read as z_size;
2413 state.total = state.total.wrapping_add(out_written);
2414 stream.total_out = state.total as _;
2415
2416 stream.avail_in = state.bit_reader.bytes_remaining() as u32;
2417 stream.next_in = state.bit_reader.as_ptr() as *mut u8;
2418
2419 stream.avail_out = (state.writer.capacity() - state.writer.len()) as u32;
2420 stream.next_out = state.writer.next_out() as *mut u8;
2421
2422 stream.adler = state.checksum as z_checksum;
2423
2424 let valid_mode = |mode| !matches!(mode, Mode::Bad | Mode::Mem | Mode::Sync);
2425 let not_done = |mode| {
2426 !matches!(
2427 mode,
2428 Mode::Check | Mode::Length | Mode::Bad | Mode::Mem | Mode::Sync
2429 )
2430 };
2431
2432 let must_update_window = state.window.size() != 0
2433 || (out_written != 0
2434 && valid_mode(state.mode)
2435 && (not_done(state.mode) || !matches!(state.flush, InflateFlush::Finish)));
2436
2437 let update_checksum = state.wrap & 4 != 0;
2438
2439 if must_update_window {
2440 state.window.extend(
2441 &state.writer.filled()[..out_written],
2442 state.gzip_flags,
2443 update_checksum,
2444 &mut state.checksum,
2445 &mut state.crc_fold,
2446 );
2447 }
2448
2449 if let Some(msg) = state.error_message {
2450 assert!(msg.ends_with('\0'));
2451 stream.msg = msg.as_ptr() as *mut u8 as *mut core::ffi::c_char;
2452 }
2453
2454 stream.data_type = state.decoding_state();
2455
2456 if ((in_read == 0 && out_written == 0) || flush == InflateFlush::Finish)
2457 && err == ReturnCode::Ok
2458 {
2459 ReturnCode::BufError
2460 } else {
2461 err
2462 }
2463}
2464
2465fn syncsearch(mut got: usize, buf: &[u8]) -> (usize, usize) {
2466 let len = buf.len();
2467 let mut next = 0;
2468
2469 while next < len && got < 4 {
2470 if buf[next] == if got < 2 { 0 } else { 0xff } {
2471 got += 1;
2472 } else if buf[next] != 0 {
2473 got = 0;
2474 } else {
2475 got = 4 - got;
2476 }
2477 next += 1;
2478 }
2479
2480 (got, next)
2481}
2482
2483pub fn sync(stream: &mut InflateStream) -> ReturnCode {
2484 let state = &mut stream.state;
2485
2486 if stream.avail_in == 0 && state.bit_reader.bits_in_buffer() < 8 {
2487 return ReturnCode::BufError;
2488 }
2489 if !matches!(state.mode, Mode::Sync) {
2491 state.mode = Mode::Sync;
2492
2493 let (buf, len) = state.bit_reader.start_sync_search();
2494
2495 (state.have, _) = syncsearch(0, &buf[..len]);
2496 }
2497
2498 let slice = unsafe { core::slice::from_raw_parts(stream.next_in, stream.avail_in as usize) };
2501
2502 let len;
2503 (state.have, len) = syncsearch(state.have, slice);
2504 stream.next_in = unsafe { stream.next_in.add(len) };
2506 stream.avail_in -= len as u32;
2507 stream.total_in += len as z_size;
2508
2509 if state.have != 4 {
2511 return ReturnCode::DataError;
2512 }
2513
2514 if state.gzip_flags == -1 {
2515 state.wrap = 0; } else {
2517 state.wrap &= !4; }
2519
2520 let flags = state.gzip_flags;
2521 let total_in = stream.total_in;
2522 let total_out = stream.total_out;
2523
2524 reset(stream);
2525
2526 stream.total_in = total_in;
2527 stream.total_out = total_out;
2528
2529 stream.state.gzip_flags = flags;
2530 stream.state.mode = Mode::Type;
2531
2532 ReturnCode::Ok
2533}
2534
2535pub fn sync_point(stream: &mut InflateStream) -> bool {
2544 matches!(stream.state.mode, Mode::Stored) && stream.state.bit_reader.bits_in_buffer() == 0
2545}
2546
2547pub unsafe fn copy<'a>(
2548 dest: &mut MaybeUninit<InflateStream<'a>>,
2549 source: &InflateStream<'a>,
2550) -> ReturnCode {
2551 if source.next_out.is_null() || (source.next_in.is_null() && source.avail_in != 0) {
2552 return ReturnCode::StreamError;
2553 }
2554
2555 unsafe { core::ptr::copy_nonoverlapping(source, dest.as_mut_ptr(), 1) };
2558
2559 let allocs = InflateAllocOffsets::new();
2561 debug_assert_eq!(allocs.total_size, source.state.total_allocation_size);
2562
2563 let Some(allocation_start) = source.alloc.allocate_slice_raw::<u8>(allocs.total_size) else {
2564 return ReturnCode::MemError;
2565 };
2566
2567 let address = allocation_start.as_ptr() as usize;
2568 let align_offset = address.next_multiple_of(64) - address;
2569 let buf = unsafe { allocation_start.as_ptr().add(align_offset) };
2570
2571 let window_allocation = unsafe { buf.add(allocs.window_pos) };
2572 let window = unsafe {
2573 source
2574 .state
2575 .window
2576 .clone_to(window_allocation, (1 << MAX_WBITS) + 64)
2577 };
2578
2579 let copy = unsafe { buf.add(allocs.state_pos).cast::<State>() };
2580 unsafe { core::ptr::copy_nonoverlapping(source.state, copy, 1) };
2581
2582 let field_ptr = unsafe { core::ptr::addr_of_mut!((*copy).window) };
2583 unsafe { core::ptr::write(field_ptr, window) };
2584
2585 let field_ptr = unsafe { core::ptr::addr_of_mut!((*copy).allocation_start) };
2586 unsafe { core::ptr::write(field_ptr, allocation_start.as_ptr()) };
2587
2588 let field_ptr = unsafe { core::ptr::addr_of_mut!((*dest.as_mut_ptr()).state) };
2589 unsafe { core::ptr::write(field_ptr as *mut *mut State, copy) };
2590
2591 ReturnCode::Ok
2592}
2593
2594pub fn undermine(stream: &mut InflateStream, subvert: i32) -> ReturnCode {
2595 stream.state.flags.update(Flags::SANE, (!subvert) != 0);
2596
2597 ReturnCode::Ok
2598}
2599
2600pub fn validate(stream: &mut InflateStream, check: bool) -> ReturnCode {
2602 if check && stream.state.wrap != 0 {
2603 stream.state.wrap |= 0b100;
2604 } else {
2605 stream.state.wrap &= !0b100;
2606 }
2607
2608 ReturnCode::Ok
2609}
2610
2611pub fn mark(stream: &InflateStream) -> c_long {
2612 if stream.next_out.is_null() || (stream.next_in.is_null() && stream.avail_in != 0) {
2613 return c_long::MIN;
2614 }
2615
2616 let state = &stream.state;
2617
2618 let length = match state.mode {
2619 Mode::CopyBlock => state.length,
2620 Mode::Match => state.was - state.length,
2621 _ => 0,
2622 };
2623
2624 (((state.back as c_long) as c_ulong) << 16) as c_long + length as c_long
2625}
2626
2627pub fn set_dictionary(stream: &mut InflateStream, dictionary: &[u8]) -> ReturnCode {
2628 if stream.state.wrap != 0 && !matches!(stream.state.mode, Mode::Dict) {
2629 return ReturnCode::StreamError;
2630 }
2631
2632 if matches!(stream.state.mode, Mode::Dict) {
2634 let dictid = adler32(1, dictionary);
2635
2636 if dictid != stream.state.checksum {
2637 return ReturnCode::DataError;
2638 }
2639 }
2640
2641 stream.state.window.extend(
2642 dictionary,
2643 stream.state.gzip_flags,
2644 false,
2645 &mut stream.state.checksum,
2646 &mut stream.state.crc_fold,
2647 );
2648
2649 stream.state.flags.update(Flags::HAVE_DICT, true);
2650
2651 ReturnCode::Ok
2652}
2653
2654pub fn end<'a>(stream: &'a mut InflateStream<'_>) -> &'a mut z_stream {
2655 let alloc = stream.alloc;
2656 let allocation_start = stream.state.allocation_start;
2657 let total_allocation_size = stream.state.total_allocation_size;
2658
2659 let mut window = Window::empty();
2660 core::mem::swap(&mut window, &mut stream.state.window);
2661
2662 let stream = stream.as_z_stream_mut();
2663 let _ = core::mem::replace(&mut stream.state, core::ptr::null_mut());
2664
2665 unsafe { alloc.deallocate(allocation_start, total_allocation_size) };
2666
2667 stream
2668}
2669
2670pub unsafe fn get_header<'a>(
2679 stream: &mut InflateStream<'a>,
2680 head: Option<&'a mut gz_header>,
2681) -> ReturnCode {
2682 if (stream.state.wrap & 2) == 0 {
2683 return ReturnCode::StreamError;
2684 }
2685
2686 stream.state.head = head.map(|head| {
2687 head.done = 0;
2688 head
2689 });
2690 ReturnCode::Ok
2691}
2692
2693pub unsafe fn get_dictionary(stream: &InflateStream<'_>, dictionary: *mut u8) -> usize {
2697 let whave = stream.state.window.have();
2698 let wnext = stream.state.window.next();
2699
2700 if !dictionary.is_null() {
2701 unsafe {
2702 core::ptr::copy_nonoverlapping(
2703 stream.state.window.as_ptr().add(wnext),
2704 dictionary,
2705 whave - wnext,
2706 );
2707
2708 core::ptr::copy_nonoverlapping(
2709 stream.state.window.as_ptr(),
2710 dictionary.add(whave).sub(wnext).cast(),
2711 wnext,
2712 );
2713 }
2714 }
2715
2716 stream.state.window.have()
2717}
2718
2719#[cfg(test)]
2720mod tests {
2721 use super::*;
2722
2723 #[test]
2724 fn uncompress_buffer_overflow() {
2725 let mut output = [0; 1 << 13];
2726 let input = [
2727 72, 137, 58, 0, 3, 39, 255, 255, 255, 255, 255, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
2728 14, 14, 184, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 184, 14, 14,
2729 14, 14, 14, 14, 14, 63, 14, 14, 14, 14, 14, 14, 14, 14, 184, 14, 14, 255, 14, 103, 14,
2730 14, 14, 14, 14, 14, 61, 14, 255, 255, 63, 14, 14, 14, 14, 14, 14, 14, 14, 184, 14, 14,
2731 255, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 6, 14, 14, 14, 14, 14, 14, 14, 14, 71,
2732 4, 137, 106,
2733 ];
2734
2735 let config = InflateConfig { window_bits: 15 };
2736
2737 let (_decompressed, err) = decompress_slice(&mut output, &input, config);
2738 assert_eq!(err, ReturnCode::DataError);
2739 }
2740}