1#![allow(non_snake_case)] #![allow(clippy::missing_safety_doc)] use core::ffi::{c_char, c_int, c_long, c_ulong};
5use core::marker::PhantomData;
6use core::mem::MaybeUninit;
7use core::ops::ControlFlow;
8
9mod bitreader;
10mod infback;
11mod inffixed_tbl;
12mod inftrees;
13mod window;
14mod writer;
15
16use crate::allocate::Allocator;
17use crate::c_api::internal_state;
18use crate::cpu_features::CpuFeatures;
19use crate::{
20 adler32::adler32,
21 c_api::{gz_header, z_checksum, z_size, z_stream, Z_DEFLATED},
22 inflate::writer::Writer,
23 Code, InflateFlush, ReturnCode, DEF_WBITS, MAX_WBITS, MIN_WBITS,
24};
25
26use crate::crc32::{crc32, Crc32Fold};
27
28pub use self::infback::{back, back_end, back_init};
29pub use self::window::Window;
30use self::{
31 bitreader::BitReader,
32 inftrees::{inflate_table, CodeType, InflateTable},
33};
34
35const INFLATE_STRICT: bool = false;
36
37#[repr(C)]
40pub struct InflateStream<'a> {
41 pub(crate) next_in: *mut crate::c_api::Bytef,
42 pub(crate) avail_in: crate::c_api::uInt,
43 pub(crate) total_in: crate::c_api::z_size,
44 pub(crate) next_out: *mut crate::c_api::Bytef,
45 pub(crate) avail_out: crate::c_api::uInt,
46 pub(crate) total_out: crate::c_api::z_size,
47 pub(crate) msg: *mut c_char,
48 pub(crate) state: &'a mut State<'a>,
49 pub(crate) alloc: Allocator<'a>,
50 pub(crate) data_type: c_int,
51 pub(crate) adler: crate::c_api::z_checksum,
52 pub(crate) reserved: crate::c_api::uLong,
53}
54
55unsafe impl Sync for InflateStream<'_> {}
56unsafe impl Send for InflateStream<'_> {}
57
58#[cfg(feature = "__internal-test")]
59#[doc(hidden)]
60pub const INFLATE_STATE_SIZE: usize = core::mem::size_of::<crate::inflate::State>();
61
62#[cfg(feature = "__internal-test")]
63#[doc(hidden)]
64pub unsafe fn set_mode_dict(strm: &mut z_stream) {
65 unsafe {
66 (*(strm.state as *mut State)).mode = Mode::Dict;
67 }
68}
69
70#[cfg(feature = "__internal-test")]
71#[doc(hidden)]
72pub unsafe fn set_mode_sync(strm: *mut z_stream) {
73 unsafe {
74 (*((*strm).state as *mut State)).mode = Mode::Sync;
75 }
76}
77
78impl<'a> InflateStream<'a> {
79 const _S: () = assert!(core::mem::size_of::<z_stream>() == core::mem::size_of::<Self>());
82 const _A: () = assert!(core::mem::align_of::<z_stream>() == core::mem::align_of::<Self>());
83
84 #[inline(always)]
93 pub unsafe fn from_stream_ref(strm: *const z_stream) -> Option<&'a Self> {
94 {
95 let stream = unsafe { strm.as_ref() }?;
97
98 if stream.zalloc.is_none() || stream.zfree.is_none() {
99 return None;
100 }
101
102 if stream.state.is_null() {
103 return None;
104 }
105 }
106
107 unsafe { strm.cast::<InflateStream>().as_ref() }
109 }
110
111 #[inline(always)]
120 pub unsafe fn from_stream_mut(strm: *mut z_stream) -> Option<&'a mut Self> {
121 {
122 let stream = unsafe { strm.as_ref() }?;
124
125 if stream.zalloc.is_none() || stream.zfree.is_none() {
126 return None;
127 }
128
129 if stream.state.is_null() {
130 return None;
131 }
132 }
133
134 unsafe { strm.cast::<InflateStream>().as_mut() }
136 }
137
138 fn as_z_stream_mut(&mut self) -> &mut z_stream {
139 unsafe { &mut *(self as *mut _ as *mut z_stream) }
141 }
142
143 pub fn new(config: InflateConfig) -> Self {
144 let mut inner = crate::c_api::z_stream::default();
145
146 let ret = crate::inflate::init(&mut inner, config);
147 assert_eq!(ret, ReturnCode::Ok);
148
149 unsafe { core::mem::transmute(inner) }
150 }
151}
152
153const MAX_BITS: u8 = 15; const MAX_DIST_EXTRA_BITS: u8 = 13; pub fn decompress_slice<'a>(
173 output: &'a mut [u8],
174 input: &[u8],
175 config: InflateConfig,
176) -> (&'a mut [u8], ReturnCode) {
177 let output_uninit = unsafe {
179 core::slice::from_raw_parts_mut(output.as_mut_ptr() as *mut MaybeUninit<u8>, output.len())
180 };
181
182 uncompress(output_uninit, input, config)
183}
184
185pub fn uncompress<'a>(
187 output: &'a mut [MaybeUninit<u8>],
188 input: &[u8],
189 config: InflateConfig,
190) -> (&'a mut [u8], ReturnCode) {
191 let (_consumed, output, ret) = uncompress2(output, input, config);
192 (output, ret)
193}
194
195pub fn uncompress2<'a>(
196 output: &'a mut [MaybeUninit<u8>],
197 input: &[u8],
198 config: InflateConfig,
199) -> (u64, &'a mut [u8], ReturnCode) {
200 let mut dest_len_ptr = output.len() as z_checksum;
201
202 let mut buf = [0u8];
204
205 let mut left;
206 let mut len = input.len() as u64;
207
208 let dest = if output.is_empty() {
209 left = 1;
210
211 buf.as_mut_ptr()
212 } else {
213 left = output.len() as u64;
214 dest_len_ptr = 0;
215
216 output.as_mut_ptr() as *mut u8
217 };
218
219 let mut stream = z_stream {
220 next_in: input.as_ptr() as *mut u8,
221 avail_in: 0,
222
223 zalloc: None,
224 zfree: None,
225 opaque: core::ptr::null_mut(),
226
227 ..z_stream::default()
228 };
229
230 let err = init(&mut stream, config);
231 if err != ReturnCode::Ok {
232 return (0, &mut [], err);
233 }
234
235 stream.next_out = dest;
236 stream.avail_out = 0;
237
238 let Some(stream) = (unsafe { InflateStream::from_stream_mut(&mut stream) }) else {
239 return (0, &mut [], ReturnCode::StreamError);
240 };
241
242 let err = loop {
243 if stream.avail_out == 0 {
244 stream.avail_out = Ord::min(left, u32::MAX as u64) as u32;
245 left -= stream.avail_out as u64;
246 }
247
248 if stream.avail_in == 0 {
249 stream.avail_in = Ord::min(len, u32::MAX as u64) as u32;
250 len -= stream.avail_in as u64;
251 }
252
253 let err = unsafe { inflate(stream, InflateFlush::NoFlush) };
254
255 if err != ReturnCode::Ok {
256 break err;
257 }
258 };
259
260 let consumed = len + u64::from(stream.avail_in);
261 if !output.is_empty() {
262 dest_len_ptr = stream.total_out;
263 } else if stream.total_out != 0 && err == ReturnCode::BufError {
264 left = 1;
265 }
266
267 let avail_out = stream.avail_out;
268
269 end(stream);
270
271 let ret = match err {
272 ReturnCode::StreamEnd => ReturnCode::Ok,
273 ReturnCode::NeedDict => ReturnCode::DataError,
274 ReturnCode::BufError if (left + avail_out as u64) != 0 => ReturnCode::DataError,
275 _ => err,
276 };
277
278 let output_slice = unsafe {
280 core::slice::from_raw_parts_mut(output.as_mut_ptr() as *mut u8, dest_len_ptr as usize)
281 };
282
283 (consumed, output_slice, ret)
284}
285
286#[derive(Debug, Clone, Copy)]
287#[repr(u8)]
288pub enum Mode {
289 Head,
290 Flags,
291 Time,
292 Os,
293 ExLen,
294 Extra,
295 Name,
296 Comment,
297 HCrc,
298 Sync,
299 Mem,
300 Length,
301 Type,
302 TypeDo,
303 Stored,
304 CopyBlock,
305 Check,
306 Len_,
307 Len,
308 Lit,
309 LenExt,
310 Dist,
311 DistExt,
312 Match,
313 Table,
314 LenLens,
315 CodeLens,
316 DictId,
317 Dict,
318 Done,
319 Bad,
320}
321
322#[derive(Default, Clone, Copy)]
323#[allow(clippy::enum_variant_names)]
324enum Codes {
325 #[default]
326 Fixed,
327 Codes,
328 Len,
329 Dist,
330}
331
332#[derive(Default, Clone, Copy)]
333struct Table {
334 codes: Codes,
335 bits: usize,
336}
337
338#[derive(Clone, Copy)]
339struct Flags(u8);
340
341impl Default for Flags {
342 fn default() -> Self {
343 Self::SANE
344 }
345}
346
347impl Flags {
348 const IS_LAST_BLOCK: Self = Self(0b0000_0001);
350
351 const HAVE_DICT: Self = Self(0b0000_0010);
353
354 const SANE: Self = Self(0b0000_0100);
356
357 pub(crate) const fn contains(self, other: Self) -> bool {
358 debug_assert!(other.0.count_ones() == 1);
359
360 self.0 & other.0 != 0
361 }
362
363 #[inline(always)]
364 pub(crate) fn update(&mut self, other: Self, value: bool) {
365 if value {
366 *self = Self(self.0 | other.0);
367 } else {
368 *self = Self(self.0 & !other.0);
369 }
370 }
371}
372
373#[repr(C, align(64))]
374pub(crate) struct State<'a> {
375 mode: Mode,
377
378 flags: Flags,
379
380 wbits: u8,
382
383 wrap: u8,
389
390 flush: InflateFlush,
391
392 window: Window<'a>,
394
395 ncode: usize,
398 nlen: usize,
400 ndist: usize,
402 have: usize,
404 next: usize, bit_reader: BitReader<'a>,
409
410 writer: Writer<'a>,
411 total: usize,
412
413 length: usize,
415 offset: usize,
417
418 extra: usize,
420
421 back: usize,
423
424 was: usize,
426
427 chunksize: usize,
429
430 in_available: usize,
431 out_available: usize,
432
433 gzip_flags: i32,
434
435 checksum: u32,
436 crc_fold: Crc32Fold,
437
438 error_message: Option<&'static str>,
439
440 head: Option<&'a mut gz_header>,
442 dmax: usize,
443
444 len_table: Table,
446
447 dist_table: Table,
449
450 codes_codes: [Code; crate::ENOUGH_LENS],
451 len_codes: [Code; crate::ENOUGH_LENS],
452 dist_codes: [Code; crate::ENOUGH_DISTS],
453
454 lens: [u16; 320],
456 work: [u16; 288],
458
459 allocation_start: *mut u8,
460 total_allocation_size: usize,
461}
462
463impl<'a> State<'a> {
464 fn new(reader: &'a [u8], writer: Writer<'a>) -> Self {
465 let in_available = reader.len();
466 let out_available = writer.capacity();
467
468 Self {
469 flush: InflateFlush::NoFlush,
470
471 flags: Flags::default(),
472 wrap: 0,
473 mode: Mode::Head,
474 length: 0,
475
476 len_table: Table::default(),
477 dist_table: Table::default(),
478
479 wbits: 0,
480 offset: 0,
481 extra: 0,
482 back: 0,
483 was: 0,
484 chunksize: 0,
485 in_available,
486 out_available,
487
488 bit_reader: BitReader::new(reader),
489
490 writer,
491 total: 0,
492
493 window: Window::empty(),
494 head: None,
495
496 lens: [0u16; 320],
497 work: [0u16; 288],
498
499 ncode: 0,
500 nlen: 0,
501 ndist: 0,
502 have: 0,
503 next: 0,
504
505 error_message: None,
506
507 checksum: 0,
508 crc_fold: Crc32Fold::new(),
509
510 dmax: 0,
511 gzip_flags: 0,
512
513 codes_codes: [Code::default(); crate::ENOUGH_LENS],
514 len_codes: [Code::default(); crate::ENOUGH_LENS],
515 dist_codes: [Code::default(); crate::ENOUGH_DISTS],
516
517 allocation_start: core::ptr::null_mut(),
518 total_allocation_size: 0,
519 }
520 }
521
522 fn len_table_ref(&self) -> &[Code] {
523 match self.len_table.codes {
524 Codes::Fixed => &self::inffixed_tbl::LENFIX,
525 Codes::Codes => &self.codes_codes,
526 Codes::Len => &self.len_codes,
527 Codes::Dist => &self.dist_codes,
528 }
529 }
530
531 fn dist_table_ref(&self) -> &[Code] {
532 match self.dist_table.codes {
533 Codes::Fixed => &self::inffixed_tbl::DISTFIX,
534 Codes::Codes => &self.codes_codes,
535 Codes::Len => &self.len_codes,
536 Codes::Dist => &self.dist_codes,
537 }
538 }
539
540 fn len_table_get(&self, index: usize) -> Code {
541 self.len_table_ref()[index]
542 }
543
544 fn dist_table_get(&self, index: usize) -> Code {
545 self.dist_table_ref()[index]
546 }
547}
548
549const fn zswap32(q: u32) -> u32 {
551 u32::from_be(q.to_le())
552}
553
554const INFLATE_FAST_MIN_HAVE: usize = 15;
555const INFLATE_FAST_MIN_LEFT: usize = 260;
556
557impl State<'_> {
558 fn len_and_friends(&mut self) -> ControlFlow<ReturnCode, ()> {
568 let avail_in = self.bit_reader.bytes_remaining();
569 let avail_out = self.writer.remaining();
570
571 if avail_in >= INFLATE_FAST_MIN_HAVE && avail_out >= INFLATE_FAST_MIN_LEFT {
572 unsafe { inflate_fast_help(self, 0) };
574 match self.mode {
575 Mode::Len => {}
576 _ => return ControlFlow::Continue(()),
577 }
578 }
579
580 let mut mode;
581 let mut writer;
582 let mut bit_reader;
583
584 macro_rules! load {
585 () => {
586 mode = self.mode;
587 writer = core::mem::replace(&mut self.writer, Writer::new(&mut []));
588 bit_reader = self.bit_reader;
589 };
590 }
591
592 macro_rules! restore {
593 () => {
594 self.mode = mode;
595 self.writer = writer;
596 self.bit_reader = bit_reader;
597 };
598 }
599
600 load!();
601
602 let len_table = match self.len_table.codes {
603 Codes::Fixed => &self::inffixed_tbl::LENFIX[..],
604 Codes::Codes => &self.codes_codes,
605 Codes::Len => &self.len_codes,
606 Codes::Dist => &self.dist_codes,
607 };
608
609 let dist_table = match self.dist_table.codes {
610 Codes::Fixed => &self::inffixed_tbl::DISTFIX[..],
611 Codes::Codes => &self.codes_codes,
612 Codes::Len => &self.len_codes,
613 Codes::Dist => &self.dist_codes,
614 };
615
616 loop {
617 mode = 'top: {
618 match mode {
619 Mode::Len => {
620 let avail_in = bit_reader.bytes_remaining();
621 let avail_out = writer.remaining();
622
623 if avail_in >= INFLATE_FAST_MIN_HAVE && avail_out >= INFLATE_FAST_MIN_LEFT {
628 restore!();
629 unsafe { inflate_fast_help(self, 0) };
633 return ControlFlow::Continue(());
634 }
635
636 self.back = 0;
637
638 let mut here;
640 loop {
641 let bits = bit_reader.bits(self.len_table.bits);
642 here = len_table[bits as usize];
643
644 if here.bits <= bit_reader.bits_in_buffer() {
645 break;
646 }
647
648 if let Err(return_code) = bit_reader.pull_byte() {
649 restore!();
650 return ControlFlow::Break(return_code);
651 };
652 }
653
654 if here.op != 0 && here.op & 0xf0 == 0 {
655 let last = here;
656 loop {
657 let bits = bit_reader.bits((last.bits + last.op) as usize) as u16;
658 here = len_table[(last.val + (bits >> last.bits)) as usize];
659 if last.bits + here.bits <= bit_reader.bits_in_buffer() {
660 break;
661 }
662
663 if let Err(return_code) = bit_reader.pull_byte() {
664 restore!();
665 return ControlFlow::Break(return_code);
666 };
667 }
668
669 bit_reader.drop_bits(last.bits);
670 self.back += last.bits as usize;
671 }
672
673 bit_reader.drop_bits(here.bits);
674 self.back += here.bits as usize;
675 self.length = here.val as usize;
676
677 if here.op == 0 {
678 break 'top Mode::Lit;
679 } else if here.op & 32 != 0 {
680 self.back = usize::MAX;
685 mode = Mode::Type;
686
687 restore!();
688 return ControlFlow::Continue(());
689 } else if here.op & 64 != 0 {
690 mode = Mode::Bad;
691 {
692 restore!();
693 let this = &mut *self;
694 let msg: &'static str = "invalid literal/length code\0";
695 #[cfg(all(feature = "std", test))]
696 dbg!(msg);
697 this.error_message = Some(msg);
698 return ControlFlow::Break(ReturnCode::DataError);
699 }
700 } else {
701 self.extra = (here.op & MAX_BITS) as usize;
703 break 'top Mode::LenExt;
704 }
705 }
706 Mode::Lit => {
707 if writer.is_full() {
709 restore!();
710 #[cfg(all(test, feature = "std"))]
711 eprintln!("Ok: writer is full ({} bytes)", self.writer.capacity());
712 return ControlFlow::Break(ReturnCode::Ok);
713 }
714
715 writer.push(self.length as u8);
716
717 break 'top Mode::Len;
718 }
719 Mode::LenExt => {
720 let extra = self.extra;
722
723 if extra != 0 {
725 match bit_reader.need_bits(extra) {
726 Err(return_code) => {
727 restore!();
728 return ControlFlow::Break(return_code);
729 }
730 Ok(v) => v,
731 };
732 self.length += bit_reader.bits(extra) as usize;
733 bit_reader.drop_bits(extra as u8);
734 self.back += extra;
735 }
736
737 self.was = self.length;
740
741 break 'top Mode::Dist;
742 }
743 Mode::Dist => {
744 let mut here;
748 loop {
749 let bits = bit_reader.bits(self.dist_table.bits) as usize;
750 here = dist_table[bits];
751 if here.bits <= bit_reader.bits_in_buffer() {
752 break;
753 }
754
755 if let Err(return_code) = bit_reader.pull_byte() {
756 restore!();
757 return ControlFlow::Break(return_code);
758 };
759 }
760
761 if here.op & 0xf0 == 0 {
762 let last = here;
763
764 loop {
765 let bits = bit_reader.bits((last.bits + last.op) as usize);
766 here =
767 dist_table[last.val as usize + ((bits as usize) >> last.bits)];
768
769 if last.bits + here.bits <= bit_reader.bits_in_buffer() {
770 break;
771 }
772
773 if let Err(return_code) = bit_reader.pull_byte() {
774 restore!();
775 return ControlFlow::Break(return_code);
776 };
777 }
778
779 bit_reader.drop_bits(last.bits);
780 self.back += last.bits as usize;
781 }
782
783 bit_reader.drop_bits(here.bits);
784
785 if here.op & 64 != 0 {
786 restore!();
787 self.mode = Mode::Bad;
788 return ControlFlow::Break(self.bad("invalid distance code\0"));
789 }
790
791 self.offset = here.val as usize;
792
793 self.extra = (here.op & MAX_BITS) as usize;
794
795 break 'top Mode::DistExt;
796 }
797 Mode::DistExt => {
798 let extra = self.extra;
800
801 if extra > 0 {
802 match bit_reader.need_bits(extra) {
803 Err(return_code) => {
804 restore!();
805 return ControlFlow::Break(return_code);
806 }
807 Ok(v) => v,
808 };
809 self.offset += bit_reader.bits(extra) as usize;
810 bit_reader.drop_bits(extra as u8);
811 self.back += extra;
812 }
813
814 if INFLATE_STRICT && self.offset > self.dmax {
815 restore!();
816 self.mode = Mode::Bad;
817 return ControlFlow::Break(
818 self.bad("invalid distance code too far back\0"),
819 );
820 }
821
822 break 'top Mode::Match;
825 }
826 Mode::Match => {
827 if writer.is_full() {
829 restore!();
830 #[cfg(all(feature = "std", test))]
831 eprintln!(
832 "BufError: writer is full ({} bytes)",
833 self.writer.capacity()
834 );
835 return ControlFlow::Break(ReturnCode::Ok);
836 }
837
838 let left = writer.remaining();
839 let copy = writer.len();
840
841 let copy = if self.offset > copy {
842 let mut copy = self.offset - copy;
845
846 if copy > self.window.have() {
847 if self.flags.contains(Flags::SANE) {
848 restore!();
849 self.mode = Mode::Bad;
850 return ControlFlow::Break(
851 self.bad("invalid distance too far back\0"),
852 );
853 }
854
855 panic!("INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR")
857 }
858
859 let wnext = self.window.next();
860 let wsize = self.window.size();
861
862 let from = if copy > wnext {
863 copy -= wnext;
864 wsize - copy
865 } else {
866 wnext - copy
867 };
868
869 copy = Ord::min(copy, self.length);
870 copy = Ord::min(copy, left);
871
872 writer.extend_from_window(&self.window, from..from + copy);
873
874 copy
875 } else {
876 let copy = Ord::min(self.length, left);
877 writer.copy_match(self.offset, copy);
878
879 copy
880 };
881
882 self.length -= copy;
883
884 if self.length == 0 {
885 break 'top Mode::Len;
886 } else {
887 break 'top Mode::Match;
890 }
891 }
892 _ => unsafe { core::hint::unreachable_unchecked() },
893 }
894 }
895 }
896 }
897
898 fn dispatch(&mut self) -> ReturnCode {
899 let mut mode = self.mode;
901
902 macro_rules! pull_byte {
903 ($self:expr) => {
904 match $self.bit_reader.pull_byte() {
905 Err(return_code) => {
906 self.mode = mode;
907 return $self.inflate_leave(return_code);
908 }
909 Ok(_) => (),
910 }
911 };
912 }
913
914 macro_rules! need_bits {
915 ($self:expr, $n:expr) => {
916 match $self.bit_reader.need_bits($n) {
917 Err(return_code) => {
918 self.mode = mode;
919 return $self.inflate_leave(return_code);
920 }
921 Ok(v) => v,
922 }
923 };
924 }
925
926 let ret = 'label: loop {
927 mode = 'blk: {
928 match mode {
929 Mode::Head => {
930 if self.wrap == 0 {
931 break 'blk Mode::TypeDo;
932 }
933
934 need_bits!(self, 16);
935
936 if (self.wrap & 2) != 0 && self.bit_reader.hold() == 0x8b1f {
938 if self.wbits == 0 {
939 self.wbits = 15;
940 }
941
942 let b0 = self.bit_reader.bits(8) as u8;
943 let b1 = (self.bit_reader.hold() >> 8) as u8;
944 self.checksum = crc32(crate::CRC32_INITIAL_VALUE, &[b0, b1]);
945 self.bit_reader.init_bits();
946
947 break 'blk Mode::Flags;
948 }
949
950 if let Some(header) = &mut self.head {
951 header.done = -1;
952 }
953
954 if (self.wrap & 1) == 0
956 || ((self.bit_reader.bits(8) << 8) + (self.bit_reader.hold() >> 8)) % 31
957 != 0
958 {
959 mode = Mode::Bad;
960 break 'label self.bad("incorrect header check\0");
961 }
962
963 if self.bit_reader.bits(4) != Z_DEFLATED as u64 {
964 mode = Mode::Bad;
965 break 'label self.bad("unknown compression method\0");
966 }
967
968 self.bit_reader.drop_bits(4);
969 let len = self.bit_reader.bits(4) as u8 + 8;
970
971 if self.wbits == 0 {
972 self.wbits = len;
973 }
974
975 if len as i32 > MAX_WBITS || len > self.wbits {
976 mode = Mode::Bad;
977 break 'label self.bad("invalid window size\0");
978 }
979
980 self.dmax = 1 << len;
981 self.gzip_flags = 0; self.checksum = crate::ADLER32_INITIAL_VALUE as _;
983
984 if self.bit_reader.hold() & 0x200 != 0 {
985 self.bit_reader.init_bits();
986
987 break 'blk Mode::DictId;
988 } else {
989 self.bit_reader.init_bits();
990
991 break 'blk Mode::Type;
992 }
993 }
994 Mode::Flags => {
995 need_bits!(self, 16);
996 self.gzip_flags = self.bit_reader.hold() as i32;
997
998 if self.gzip_flags & 0xff != Z_DEFLATED {
1000 mode = Mode::Bad;
1001 break 'label self.bad("unknown compression method\0");
1002 }
1003
1004 if self.gzip_flags & 0xe000 != 0 {
1005 mode = Mode::Bad;
1006 break 'label self.bad("unknown header flags set\0");
1007 }
1008
1009 if let Some(head) = self.head.as_mut() {
1010 head.text = ((self.bit_reader.hold() >> 8) & 1) as i32;
1011 }
1012
1013 if (self.gzip_flags & 0x0200) != 0 && (self.wrap & 4) != 0 {
1014 let b0 = self.bit_reader.bits(8) as u8;
1015 let b1 = (self.bit_reader.hold() >> 8) as u8;
1016 self.checksum = crc32(self.checksum, &[b0, b1]);
1017 }
1018
1019 self.bit_reader.init_bits();
1020
1021 break 'blk Mode::Time;
1022 }
1023 Mode::Time => {
1024 need_bits!(self, 32);
1025 if let Some(head) = self.head.as_mut() {
1026 head.time = self.bit_reader.hold() as z_size;
1027 }
1028
1029 if (self.gzip_flags & 0x0200) != 0 && (self.wrap & 4) != 0 {
1030 let bytes = (self.bit_reader.hold() as u32).to_le_bytes();
1031 self.checksum = crc32(self.checksum, &bytes);
1032 }
1033
1034 self.bit_reader.init_bits();
1035
1036 break 'blk Mode::Os;
1037 }
1038 Mode::Os => {
1039 need_bits!(self, 16);
1040 if let Some(head) = self.head.as_mut() {
1041 head.xflags = (self.bit_reader.hold() & 0xff) as i32;
1042 head.os = (self.bit_reader.hold() >> 8) as i32;
1043 }
1044
1045 if (self.gzip_flags & 0x0200) != 0 && (self.wrap & 4) != 0 {
1046 let bytes = (self.bit_reader.hold() as u16).to_le_bytes();
1047 self.checksum = crc32(self.checksum, &bytes);
1048 }
1049
1050 self.bit_reader.init_bits();
1051
1052 break 'blk Mode::ExLen;
1053 }
1054 Mode::ExLen => {
1055 if (self.gzip_flags & 0x0400) != 0 {
1056 need_bits!(self, 16);
1057
1058 self.length = self.bit_reader.hold() as usize;
1060 if let Some(head) = self.head.as_mut() {
1061 head.extra_len = self.length as u32;
1062 }
1063
1064 if (self.gzip_flags & 0x0200) != 0 && (self.wrap & 4) != 0 {
1065 let bytes = (self.bit_reader.hold() as u16).to_le_bytes();
1066 self.checksum = crc32(self.checksum, &bytes);
1067 }
1068 self.bit_reader.init_bits();
1069 } else if let Some(head) = self.head.as_mut() {
1070 head.extra = core::ptr::null_mut();
1071 }
1072
1073 break 'blk Mode::Extra;
1074 }
1075 Mode::Extra => {
1076 if (self.gzip_flags & 0x0400) != 0 {
1077 let extra_available =
1079 Ord::min(self.length, self.bit_reader.bytes_remaining());
1080
1081 if extra_available > 0 {
1082 if let Some(head) = self.head.as_mut() {
1083 if !head.extra.is_null() {
1084 let written_so_far = head.extra_len as usize - self.length;
1091
1092 let count = Ord::min(
1094 (head.extra_max as usize)
1095 .saturating_sub(written_so_far),
1096 extra_available,
1097 );
1098
1099 let next_write_offset =
1102 Ord::min(written_so_far, head.extra_max as usize);
1103
1104 unsafe {
1105 core::ptr::copy_nonoverlapping(
1109 self.bit_reader.as_mut_ptr(),
1110 head.extra.add(next_write_offset),
1111 count,
1112 );
1113 }
1114 }
1115 }
1116
1117 if (self.gzip_flags & 0x0200) != 0 && (self.wrap & 4) != 0 {
1119 let extra_slice =
1120 &self.bit_reader.as_slice()[..extra_available];
1121 self.checksum = crc32(self.checksum, extra_slice)
1122 }
1123
1124 self.in_available -= extra_available;
1125 self.bit_reader.advance(extra_available);
1126 self.length -= extra_available;
1127 }
1128
1129 if self.length != 0 {
1131 break 'label self.inflate_leave(ReturnCode::Ok);
1132 }
1133 }
1134
1135 self.length = 0;
1136
1137 break 'blk Mode::Name;
1138 }
1139 Mode::Name => {
1140 if (self.gzip_flags & 0x0800) != 0 {
1141 if self.in_available == 0 {
1142 break 'label self.inflate_leave(ReturnCode::Ok);
1143 }
1144
1145 let slice = self.bit_reader.as_slice();
1148 let null_terminator_index = slice.iter().position(|c| *c == 0);
1149
1150 let name_slice = match null_terminator_index {
1152 Some(i) => &slice[..=i],
1153 None => slice,
1154 };
1155
1156 if let Some(head) = self.head.as_mut() {
1158 if !head.name.is_null() {
1159 let remaining_name_bytes = (head.name_max as usize)
1160 .checked_sub(self.length)
1161 .expect("name out of bounds");
1162 let copy = Ord::min(name_slice.len(), remaining_name_bytes);
1163
1164 unsafe {
1165 core::ptr::copy_nonoverlapping(
1168 name_slice.as_ptr(),
1169 head.name.add(self.length),
1170 copy,
1171 )
1172 };
1173
1174 self.length += copy;
1175 }
1176 }
1177
1178 if (self.gzip_flags & 0x0200) != 0 && (self.wrap & 4) != 0 {
1179 self.checksum = crc32(self.checksum, name_slice);
1180 }
1181
1182 let reached_end = name_slice.last() == Some(&0);
1183 self.bit_reader.advance(name_slice.len());
1184
1185 if !reached_end && self.bit_reader.bytes_remaining() == 0 {
1186 break 'label self.inflate_leave(ReturnCode::Ok);
1187 }
1188 } else if let Some(head) = self.head.as_mut() {
1189 head.name = core::ptr::null_mut();
1190 }
1191
1192 self.length = 0;
1193
1194 break 'blk Mode::Comment;
1195 }
1196 Mode::Comment => {
1197 if (self.gzip_flags & 0x01000) != 0 {
1198 if self.in_available == 0 {
1199 break 'label self.inflate_leave(ReturnCode::Ok);
1200 }
1201
1202 let slice = self.bit_reader.as_slice();
1205 let null_terminator_index = slice.iter().position(|c| *c == 0);
1206
1207 let comment_slice = match null_terminator_index {
1209 Some(i) => &slice[..=i],
1210 None => slice,
1211 };
1212
1213 if let Some(head) = self.head.as_mut() {
1215 if !head.comment.is_null() {
1216 let remaining_comm_bytes = (head.comm_max as usize)
1217 .checked_sub(self.length)
1218 .expect("comm out of bounds");
1219 let copy = Ord::min(comment_slice.len(), remaining_comm_bytes);
1220
1221 unsafe {
1222 core::ptr::copy_nonoverlapping(
1225 comment_slice.as_ptr(),
1226 head.comment.add(self.length),
1227 copy,
1228 )
1229 };
1230
1231 self.length += copy;
1232 }
1233 }
1234
1235 if (self.gzip_flags & 0x0200) != 0 && (self.wrap & 4) != 0 {
1236 self.checksum = crc32(self.checksum, comment_slice);
1237 }
1238
1239 let reached_end = comment_slice.last() == Some(&0);
1240 self.bit_reader.advance(comment_slice.len());
1241
1242 if !reached_end && self.bit_reader.bytes_remaining() == 0 {
1243 break 'label self.inflate_leave(ReturnCode::Ok);
1244 }
1245 } else if let Some(head) = self.head.as_mut() {
1246 head.comment = core::ptr::null_mut();
1247 }
1248
1249 break 'blk Mode::HCrc;
1250 }
1251 Mode::HCrc => {
1252 if (self.gzip_flags & 0x0200) != 0 {
1253 need_bits!(self, 16);
1254
1255 if (self.wrap & 4) != 0
1256 && self.bit_reader.hold() as u32 != (self.checksum & 0xffff)
1257 {
1258 mode = Mode::Bad;
1259 break 'label self.bad("header crc mismatch\0");
1260 }
1261
1262 self.bit_reader.init_bits();
1263 }
1264
1265 if let Some(head) = self.head.as_mut() {
1266 head.hcrc = (self.gzip_flags >> 9) & 1;
1267 head.done = 1;
1268 }
1269
1270 if (self.wrap & 4 != 0) && self.gzip_flags != 0 {
1272 self.crc_fold = Crc32Fold::new();
1273 self.checksum = crate::CRC32_INITIAL_VALUE;
1274 }
1275
1276 break 'blk Mode::Type;
1277 }
1278 Mode::Type => {
1279 use InflateFlush::*;
1280
1281 match self.flush {
1282 Block | Trees => break 'label ReturnCode::Ok,
1283 NoFlush | SyncFlush | Finish => {
1284 break 'blk Mode::TypeDo;
1286 }
1287 }
1288 }
1289 Mode::TypeDo => {
1290 if self.flags.contains(Flags::IS_LAST_BLOCK) {
1291 self.bit_reader.next_byte_boundary();
1292 break 'blk Mode::Check;
1293 }
1294
1295 need_bits!(self, 3);
1296 self.flags
1298 .update(Flags::IS_LAST_BLOCK, self.bit_reader.bits(1) != 0);
1299 self.bit_reader.drop_bits(1);
1300
1301 match self.bit_reader.bits(2) {
1302 0b00 => {
1303 self.bit_reader.drop_bits(2);
1306
1307 break 'blk Mode::Stored;
1308 }
1309 0b01 => {
1310 self.len_table = Table {
1313 codes: Codes::Fixed,
1314 bits: 9,
1315 };
1316
1317 self.dist_table = Table {
1318 codes: Codes::Fixed,
1319 bits: 5,
1320 };
1321
1322 mode = Mode::Len_;
1323
1324 self.bit_reader.drop_bits(2);
1325
1326 if let InflateFlush::Trees = self.flush {
1327 break 'label self.inflate_leave(ReturnCode::Ok);
1328 } else {
1329 break 'blk Mode::Len_;
1330 }
1331 }
1332 0b10 => {
1333 self.bit_reader.drop_bits(2);
1336
1337 break 'blk Mode::Table;
1338 }
1339 0b11 => {
1340 self.bit_reader.drop_bits(2);
1343
1344 mode = Mode::Bad;
1345 break 'label self.bad("invalid block type\0");
1346 }
1347 _ => {
1348 unreachable!("BitReader::bits(2) only yields a value of two bits, so this match is already exhaustive")
1350 }
1351 }
1352 }
1353 Mode::Stored => {
1354 self.bit_reader.next_byte_boundary();
1355
1356 need_bits!(self, 32);
1357
1358 let hold = self.bit_reader.bits(32) as u32;
1359
1360 if hold as u16 != !((hold >> 16) as u16) {
1363 mode = Mode::Bad;
1364 break 'label self.bad("invalid stored block lengths\0");
1365 }
1366
1367 self.length = hold as usize & 0xFFFF;
1368 self.bit_reader.init_bits();
1371
1372 if let InflateFlush::Trees = self.flush {
1373 break 'label self.inflate_leave(ReturnCode::Ok);
1374 } else {
1375 break 'blk Mode::CopyBlock;
1376 }
1377 }
1378 Mode::CopyBlock => {
1379 loop {
1380 let mut copy = self.length;
1381
1382 if copy == 0 {
1383 break;
1384 }
1385
1386 copy = Ord::min(copy, self.writer.remaining());
1387 copy = Ord::min(copy, self.bit_reader.bytes_remaining());
1388
1389 if copy == 0 {
1390 break 'label self.inflate_leave(ReturnCode::Ok);
1391 }
1392
1393 self.writer.extend(&self.bit_reader.as_slice()[..copy]);
1394 self.bit_reader.advance(copy);
1395
1396 self.length -= copy;
1397 }
1398
1399 break 'blk Mode::Type;
1400 }
1401 Mode::Check => {
1402 if !cfg!(feature = "__internal-fuzz-disable-checksum") && self.wrap != 0 {
1403 need_bits!(self, 32);
1404
1405 self.total += self.writer.len();
1406
1407 if self.wrap & 4 != 0 {
1408 if self.gzip_flags != 0 {
1409 self.crc_fold.fold(self.writer.filled(), self.checksum);
1410 self.checksum = self.crc_fold.finish();
1411 } else {
1412 self.checksum = adler32(self.checksum, self.writer.filled());
1413 }
1414 }
1415
1416 let given_checksum = if self.gzip_flags != 0 {
1417 self.bit_reader.hold() as u32
1418 } else {
1419 zswap32(self.bit_reader.hold() as u32)
1420 };
1421
1422 self.out_available = self.writer.capacity() - self.writer.len();
1423
1424 if self.wrap & 4 != 0 && given_checksum != self.checksum {
1425 mode = Mode::Bad;
1426 break 'label self.bad("incorrect data check\0");
1427 }
1428
1429 self.bit_reader.init_bits();
1430 }
1431
1432 break 'blk Mode::Length;
1433 }
1434 Mode::Len_ => {
1435 break 'blk Mode::Len;
1436 }
1437 Mode::Len => {
1438 self.mode = mode;
1439 let val = self.len_and_friends();
1440 mode = self.mode;
1441 match val {
1442 ControlFlow::Break(return_code) => break 'label return_code,
1443 ControlFlow::Continue(()) => continue 'label,
1444 }
1445 }
1446 Mode::LenExt => {
1447 let extra = self.extra;
1449
1450 if extra != 0 {
1452 need_bits!(self, extra);
1453 self.length += self.bit_reader.bits(extra) as usize;
1454 self.bit_reader.drop_bits(extra as u8);
1455 self.back += extra;
1456 }
1457
1458 self.was = self.length;
1461
1462 break 'blk Mode::Dist;
1463 }
1464 Mode::Lit => {
1465 if self.writer.is_full() {
1467 #[cfg(all(test, feature = "std"))]
1468 eprintln!("Ok: writer is full ({} bytes)", self.writer.capacity());
1469 break 'label self.inflate_leave(ReturnCode::Ok);
1470 }
1471
1472 self.writer.push(self.length as u8);
1473
1474 break 'blk Mode::Len;
1475 }
1476 Mode::Dist => {
1477 let mut here;
1481 loop {
1482 let bits = self.bit_reader.bits(self.dist_table.bits) as usize;
1483 here = self.dist_table_get(bits);
1484 if here.bits <= self.bit_reader.bits_in_buffer() {
1485 break;
1486 }
1487
1488 pull_byte!(self);
1489 }
1490
1491 if here.op & 0xf0 == 0 {
1492 let last = here;
1493
1494 loop {
1495 let bits = self.bit_reader.bits((last.bits + last.op) as usize);
1496 here = self.dist_table_get(
1497 last.val as usize + ((bits as usize) >> last.bits),
1498 );
1499
1500 if last.bits + here.bits <= self.bit_reader.bits_in_buffer() {
1501 break;
1502 }
1503
1504 pull_byte!(self);
1505 }
1506
1507 self.bit_reader.drop_bits(last.bits);
1508 self.back += last.bits as usize;
1509 }
1510
1511 self.bit_reader.drop_bits(here.bits);
1512
1513 if here.op & 64 != 0 {
1514 mode = Mode::Bad;
1515 break 'label self.bad("invalid distance code\0");
1516 }
1517
1518 self.offset = here.val as usize;
1519
1520 self.extra = (here.op & MAX_BITS) as usize;
1521
1522 break 'blk Mode::DistExt;
1523 }
1524 Mode::DistExt => {
1525 let extra = self.extra;
1527
1528 if extra > 0 {
1529 need_bits!(self, extra);
1530 self.offset += self.bit_reader.bits(extra) as usize;
1531 self.bit_reader.drop_bits(extra as u8);
1532 self.back += extra;
1533 }
1534
1535 if INFLATE_STRICT && self.offset > self.dmax {
1536 mode = Mode::Bad;
1537 break 'label self.bad("invalid distance code too far back\0");
1538 }
1539
1540 break 'blk Mode::Match;
1543 }
1544 Mode::Match => {
1545 'match_: loop {
1548 if self.writer.is_full() {
1549 #[cfg(all(feature = "std", test))]
1550 eprintln!(
1551 "BufError: writer is full ({} bytes)",
1552 self.writer.capacity()
1553 );
1554 break 'label self.inflate_leave(ReturnCode::Ok);
1555 }
1556
1557 let left = self.writer.remaining();
1558 let copy = self.writer.len();
1559
1560 let copy = if self.offset > copy {
1561 let mut copy = self.offset - copy;
1564
1565 if copy > self.window.have() {
1566 if self.flags.contains(Flags::SANE) {
1567 mode = Mode::Bad;
1568 break 'label self.bad("invalid distance too far back\0");
1569 }
1570
1571 panic!("INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR")
1573 }
1574
1575 let wnext = self.window.next();
1576 let wsize = self.window.size();
1577
1578 let from = if copy > wnext {
1579 copy -= wnext;
1580 wsize - copy
1581 } else {
1582 wnext - copy
1583 };
1584
1585 copy = Ord::min(copy, self.length);
1586 copy = Ord::min(copy, left);
1587
1588 self.writer
1589 .extend_from_window(&self.window, from..from + copy);
1590
1591 copy
1592 } else {
1593 let copy = Ord::min(self.length, left);
1594 self.writer.copy_match(self.offset, copy);
1595
1596 copy
1597 };
1598
1599 self.length -= copy;
1600
1601 if self.length == 0 {
1602 break 'blk Mode::Len;
1603 } else {
1604 continue 'match_;
1606 }
1607 }
1608 }
1609 Mode::Table => {
1610 need_bits!(self, 14);
1611 self.nlen = self.bit_reader.bits(5) as usize + 257;
1612 self.bit_reader.drop_bits(5);
1613 self.ndist = self.bit_reader.bits(5) as usize + 1;
1614 self.bit_reader.drop_bits(5);
1615 self.ncode = self.bit_reader.bits(4) as usize + 4;
1616 self.bit_reader.drop_bits(4);
1617
1618 if self.nlen > 286 || self.ndist > 30 {
1620 mode = Mode::Bad;
1621 break 'label self.bad("too many length or distance symbols\0");
1622 }
1623
1624 self.have = 0;
1625
1626 break 'blk Mode::LenLens;
1627 }
1628 Mode::LenLens => {
1629 const ORDER: [u8; 19] = [
1631 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15,
1632 ];
1633
1634 while self.have < self.ncode {
1635 need_bits!(self, 3);
1636 self.lens[usize::from(ORDER[self.have])] =
1637 self.bit_reader.bits(3) as u16;
1638 self.have += 1;
1639 self.bit_reader.drop_bits(3);
1640 }
1641
1642 while self.have < 19 {
1643 self.lens[usize::from(ORDER[self.have])] = 0;
1644 self.have += 1;
1645 }
1646
1647 let InflateTable::Success { root, used } = inflate_table(
1648 CodeType::Codes,
1649 &self.lens,
1650 19,
1651 &mut self.codes_codes,
1652 7,
1653 &mut self.work,
1654 ) else {
1655 mode = Mode::Bad;
1656 break 'label self.bad("invalid code lengths set\0");
1657 };
1658
1659 self.next = used;
1660 self.len_table.codes = Codes::Codes;
1661 self.len_table.bits = root;
1662
1663 self.have = 0;
1664
1665 break 'blk Mode::CodeLens;
1666 }
1667 Mode::CodeLens => {
1668 while self.have < self.nlen + self.ndist {
1669 let here = loop {
1670 let bits = self.bit_reader.bits(self.len_table.bits);
1671 let here = self.len_table_get(bits as usize);
1672 if here.bits <= self.bit_reader.bits_in_buffer() {
1673 break here;
1674 }
1675
1676 pull_byte!(self);
1677 };
1678
1679 let here_bits = here.bits;
1680
1681 match here.val {
1682 0..=15 => {
1683 self.bit_reader.drop_bits(here_bits);
1684 self.lens[self.have] = here.val;
1685 self.have += 1;
1686 }
1687 16 => {
1688 need_bits!(self, usize::from(here_bits) + 2);
1689 self.bit_reader.drop_bits(here_bits);
1690 if self.have == 0 {
1691 mode = Mode::Bad;
1692 break 'label self.bad("invalid bit length repeat\0");
1693 }
1694
1695 let len = self.lens[self.have - 1];
1696 let copy = 3 + self.bit_reader.bits(2) as usize;
1697 self.bit_reader.drop_bits(2);
1698
1699 if self.have + copy > self.nlen + self.ndist {
1700 mode = Mode::Bad;
1701 break 'label self.bad("invalid bit length repeat\0");
1702 }
1703
1704 self.lens[self.have..][..copy].fill(len);
1705 self.have += copy;
1706 }
1707 17 => {
1708 need_bits!(self, usize::from(here_bits) + 3);
1709 self.bit_reader.drop_bits(here_bits);
1710 let copy = 3 + self.bit_reader.bits(3) as usize;
1711 self.bit_reader.drop_bits(3);
1712
1713 if self.have + copy > self.nlen + self.ndist {
1714 mode = Mode::Bad;
1715 break 'label self.bad("invalid bit length repeat\0");
1716 }
1717
1718 self.lens[self.have..][..copy].fill(0);
1719 self.have += copy;
1720 }
1721 18.. => {
1722 need_bits!(self, usize::from(here_bits) + 7);
1723 self.bit_reader.drop_bits(here_bits);
1724 let copy = 11 + self.bit_reader.bits(7) as usize;
1725 self.bit_reader.drop_bits(7);
1726
1727 if self.have + copy > self.nlen + self.ndist {
1728 mode = Mode::Bad;
1729 break 'label self.bad("invalid bit length repeat\0");
1730 }
1731
1732 self.lens[self.have..][..copy].fill(0);
1733 self.have += copy;
1734 }
1735 }
1736 }
1737
1738 if self.lens[256] == 0 {
1740 mode = Mode::Bad;
1741 break 'label self.bad("invalid code -- missing end-of-block\0");
1742 }
1743
1744 let InflateTable::Success { root, used } = inflate_table(
1747 CodeType::Lens,
1748 &self.lens,
1749 self.nlen,
1750 &mut self.len_codes,
1751 10,
1752 &mut self.work,
1753 ) else {
1754 mode = Mode::Bad;
1755 break 'label self.bad("invalid literal/lengths set\0");
1756 };
1757
1758 self.len_table.codes = Codes::Len;
1759 self.len_table.bits = root;
1760 self.next = used;
1761
1762 let InflateTable::Success { root, used } = inflate_table(
1763 CodeType::Dists,
1764 &self.lens[self.nlen..],
1765 self.ndist,
1766 &mut self.dist_codes,
1767 9,
1768 &mut self.work,
1769 ) else {
1770 mode = Mode::Bad;
1771 break 'label self.bad("invalid distances set\0");
1772 };
1773
1774 self.dist_table.bits = root;
1775 self.dist_table.codes = Codes::Dist;
1776 self.next += used;
1777
1778 mode = Mode::Len_;
1779
1780 if matches!(self.flush, InflateFlush::Trees) {
1781 break 'label self.inflate_leave(ReturnCode::Ok);
1782 }
1783
1784 break 'blk Mode::Len_;
1785 }
1786 Mode::Dict => {
1787 if !self.flags.contains(Flags::HAVE_DICT) {
1788 break 'label self.inflate_leave(ReturnCode::NeedDict);
1789 }
1790
1791 self.checksum = crate::ADLER32_INITIAL_VALUE as _;
1792
1793 break 'blk Mode::Type;
1794 }
1795 Mode::DictId => {
1796 need_bits!(self, 32);
1797
1798 self.checksum = zswap32(self.bit_reader.hold() as u32);
1799
1800 self.bit_reader.init_bits();
1801
1802 break 'blk Mode::Dict;
1803 }
1804 Mode::Done => {
1805 break 'label ReturnCode::StreamEnd;
1807 }
1808 Mode::Bad => {
1809 let msg = "repeated call with bad state\0";
1810 #[cfg(all(feature = "std", test))]
1811 dbg!(msg);
1812 self.error_message = Some(msg);
1813
1814 break 'label ReturnCode::DataError;
1815 }
1816 Mode::Mem => {
1817 break 'label ReturnCode::MemError;
1818 }
1819 Mode::Sync => {
1820 break 'label ReturnCode::StreamError;
1821 }
1822 Mode::Length => {
1823 if self.wrap != 0 && self.gzip_flags != 0 {
1825 need_bits!(self, 32);
1826 if (self.wrap & 0b100) != 0
1827 && self.bit_reader.hold() as u32 != self.total as u32
1828 {
1829 mode = Mode::Bad;
1830 break 'label self.bad("incorrect length check\0");
1831 }
1832
1833 self.bit_reader.init_bits();
1834 }
1835
1836 mode = Mode::Done;
1837 break 'label ReturnCode::StreamEnd;
1839 }
1840 };
1841 }
1842 };
1843
1844 self.mode = mode;
1845
1846 ret
1847 }
1848
1849 fn bad(&mut self, msg: &'static str) -> ReturnCode {
1850 #[cfg(all(feature = "std", test))]
1851 dbg!(msg);
1852 self.error_message = Some(msg);
1853 self.inflate_leave(ReturnCode::DataError)
1854 }
1855
1856 fn inflate_leave(&mut self, return_code: ReturnCode) -> ReturnCode {
1859 return_code
1861 }
1862
1863 fn decoding_state(&self) -> i32 {
1865 let bit_reader_bits = self.bit_reader.bits_in_buffer() as i32;
1866 debug_assert!(bit_reader_bits < 64);
1867
1868 let last = if self.flags.contains(Flags::IS_LAST_BLOCK) {
1869 64
1870 } else {
1871 0
1872 };
1873
1874 let mode = match self.mode {
1875 Mode::Type => 128,
1876 Mode::Len_ | Mode::CopyBlock => 256,
1877 _ => 0,
1878 };
1879
1880 bit_reader_bits | last | mode
1881 }
1882}
1883
1884unsafe fn inflate_fast_help(state: &mut State, start: usize) {
1889 #[cfg(any(target_arch = "x86_64", target_arch = "x86"))]
1890 if crate::cpu_features::is_enabled_avx2_and_bmi2() {
1891 return unsafe { inflate_fast_help_avx2(state, start) };
1893 }
1894
1895 unsafe { inflate_fast_help_vanilla(state, start) };
1897}
1898
1899#[cfg(any(target_arch = "x86_64", target_arch = "x86"))]
1904#[target_feature(enable = "avx2")]
1905#[target_feature(enable = "bmi2")]
1906#[target_feature(enable = "bmi1")]
1907unsafe fn inflate_fast_help_avx2(state: &mut State, start: usize) {
1908 unsafe { inflate_fast_help_impl::<{ CpuFeatures::AVX2 }>(state, start) };
1910}
1911
1912unsafe fn inflate_fast_help_vanilla(state: &mut State, start: usize) {
1917 unsafe { inflate_fast_help_impl::<{ CpuFeatures::NONE }>(state, start) };
1919}
1920
1921#[inline(always)]
1926unsafe fn inflate_fast_help_impl<const FEATURES: usize>(state: &mut State, _start: usize) {
1927 let mut bit_reader = BitReader::new(&[]);
1928 core::mem::swap(&mut bit_reader, &mut state.bit_reader);
1929 debug_assert!(bit_reader.bytes_remaining() >= 15);
1930
1931 let mut writer = Writer::new(&mut []);
1932 core::mem::swap(&mut writer, &mut state.writer);
1933
1934 let lcode = state.len_table_ref();
1935 let dcode = state.dist_table_ref();
1936
1937 let lmask = (1u64 << state.len_table.bits) - 1;
1939 let dmask = (1u64 << state.dist_table.bits) - 1;
1940
1941 let extra_safe = false;
1943
1944 let window_size = state.window.size();
1945
1946 let mut bad = None;
1947
1948 if bit_reader.bits_in_buffer() < 10 {
1949 debug_assert!(bit_reader.bytes_remaining() >= 15);
1950 unsafe { bit_reader.refill() };
1952 }
1953 debug_assert!(
1956 bit_reader.bytes_remaining() >= 8 && bit_reader.bytes_remaining_including_buffer() >= 15
1957 );
1958
1959 'outer: loop {
1960 debug_assert!(
1966 bit_reader.bytes_remaining() >= 8
1967 && bit_reader.bytes_remaining_including_buffer() >= 15
1968 );
1969
1970 let mut here = {
1971 let bits = bit_reader.bits_in_buffer();
1972 let hold = bit_reader.hold();
1973
1974 unsafe { bit_reader.refill() };
1981 debug_assert!(bit_reader.bytes_remaining() >= 8);
1983
1984 if bits as usize >= state.len_table.bits {
1987 lcode[(hold & lmask) as usize]
1988 } else {
1989 lcode[(bit_reader.hold() & lmask) as usize]
1990 }
1991 };
1992
1993 if here.op == 0 {
1994 writer.push(here.val as u8);
1995 bit_reader.drop_bits(here.bits);
1996 here = lcode[(bit_reader.hold() & lmask) as usize];
1997
1998 if here.op == 0 {
1999 writer.push(here.val as u8);
2000 bit_reader.drop_bits(here.bits);
2001 here = lcode[(bit_reader.hold() & lmask) as usize];
2002 }
2003 }
2004
2005 'dolen: loop {
2006 bit_reader.drop_bits(here.bits);
2007 let op = here.op;
2008
2009 if op == 0 {
2010 writer.push(here.val as u8);
2011 } else if op & 16 != 0 {
2012 let op = op & MAX_BITS;
2013 let mut len = here.val + bit_reader.bits(op as usize) as u16;
2014 bit_reader.drop_bits(op);
2015
2016 here = dcode[(bit_reader.hold() & dmask) as usize];
2017
2018 if bit_reader.bits_in_buffer() < MAX_BITS + MAX_DIST_EXTRA_BITS {
2021 debug_assert!(bit_reader.bytes_remaining() >= 8);
2022 unsafe { bit_reader.refill() };
2029 }
2030
2031 'dodist: loop {
2032 bit_reader.drop_bits(here.bits);
2033 let op = here.op;
2034
2035 if op & 16 != 0 {
2036 let op = op & MAX_BITS;
2037 let dist = here.val + bit_reader.bits(op as usize) as u16;
2038
2039 if INFLATE_STRICT && dist as usize > state.dmax {
2040 bad = Some("invalid distance too far back\0");
2041 state.mode = Mode::Bad;
2042 break 'outer;
2043 }
2044
2045 bit_reader.drop_bits(op);
2046
2047 let written = writer.len();
2049
2050 if dist as usize > written {
2051 if (dist as usize - written) > state.window.have() {
2053 if state.flags.contains(Flags::SANE) {
2054 bad = Some("invalid distance too far back\0");
2055 state.mode = Mode::Bad;
2056 break 'outer;
2057 }
2058
2059 panic!("INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR")
2060 }
2061
2062 let mut op = dist as usize - written;
2063 let mut from;
2064
2065 let window_next = state.window.next();
2066
2067 if window_next == 0 {
2068 from = window_size - op;
2075 } else if window_next >= op {
2076 from = window_next - op;
2078 } else {
2079 op -= window_next;
2084 from = window_size - op;
2085
2086 if op < len as usize {
2087 len -= op as u16;
2091 writer.extend_from_window_with_features::<FEATURES>(
2092 &state.window,
2093 from..from + op,
2094 );
2095 from = 0;
2096 op = window_next;
2097 }
2098 }
2099
2100 let copy = Ord::min(op, len as usize);
2101 writer.extend_from_window_with_features::<FEATURES>(
2102 &state.window,
2103 from..from + copy,
2104 );
2105
2106 if op < len as usize {
2107 writer.copy_match_with_features::<FEATURES>(
2109 dist as usize,
2110 len as usize - op,
2111 );
2112 }
2113 } else if extra_safe {
2114 todo!()
2115 } else {
2116 writer.copy_match_with_features::<FEATURES>(dist as usize, len as usize)
2117 }
2118 } else if (op & 64) == 0 {
2119 here = dcode[(here.val + bit_reader.bits(op as usize) as u16) as usize];
2121 continue 'dodist;
2122 } else {
2123 bad = Some("invalid distance code\0");
2124 state.mode = Mode::Bad;
2125 break 'outer;
2126 }
2127
2128 break 'dodist;
2129 }
2130 } else if (op & 64) == 0 {
2131 here = lcode[(here.val + bit_reader.bits(op as usize) as u16) as usize];
2133 continue 'dolen;
2134 } else if op & 32 != 0 {
2135 state.mode = Mode::Type;
2137 break 'outer;
2138 } else {
2139 bad = Some("invalid literal/length code\0");
2140 state.mode = Mode::Bad;
2141 break 'outer;
2142 }
2143
2144 break 'dolen;
2145 }
2146
2147 let remaining = bit_reader.bytes_remaining_including_buffer();
2149 if remaining >= INFLATE_FAST_MIN_HAVE && writer.remaining() >= INFLATE_FAST_MIN_LEFT {
2150 continue;
2151 }
2152
2153 break 'outer;
2154 }
2155
2156 bit_reader.return_unused_bytes();
2158
2159 state.bit_reader = bit_reader;
2160 state.writer = writer;
2161
2162 if let Some(error_message) = bad {
2163 debug_assert!(matches!(state.mode, Mode::Bad));
2164 state.bad(error_message);
2165 }
2166}
2167
2168pub fn prime(stream: &mut InflateStream, bits: i32, value: i32) -> ReturnCode {
2169 if bits == 0 {
2170 } else if bits < 0 {
2172 stream.state.bit_reader.init_bits();
2173 } else if bits > 16 || stream.state.bit_reader.bits_in_buffer() + bits as u8 > 32 {
2174 return ReturnCode::StreamError;
2175 } else {
2176 stream.state.bit_reader.prime(bits as u8, value as u64);
2177 }
2178
2179 ReturnCode::Ok
2180}
2181
2182struct InflateAllocOffsets {
2183 total_size: usize,
2184 state_pos: usize,
2185 window_pos: usize,
2186}
2187
2188impl InflateAllocOffsets {
2189 fn new() -> Self {
2190 use core::mem::size_of;
2191
2192 const WINDOW_PAD_SIZE: usize = 64;
2195
2196 const ALIGN_SIZE: usize = 64;
2199 let mut curr_size = 0usize;
2200
2201 let state_size = size_of::<State>();
2203 let window_size = (1 << MAX_WBITS) + WINDOW_PAD_SIZE;
2204
2205 let state_pos = curr_size.next_multiple_of(ALIGN_SIZE);
2207 curr_size = state_pos + state_size;
2208
2209 let window_pos = curr_size.next_multiple_of(ALIGN_SIZE);
2210 curr_size = window_pos + window_size;
2211
2212 let total_size = (curr_size + (ALIGN_SIZE - 1)).next_multiple_of(ALIGN_SIZE);
2215
2216 Self {
2217 total_size,
2218 state_pos,
2219 window_pos,
2220 }
2221 }
2222}
2223
2224#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
2228pub struct InflateConfig {
2229 pub window_bits: i32,
2230}
2231
2232impl Default for InflateConfig {
2233 fn default() -> Self {
2234 Self {
2235 window_bits: DEF_WBITS,
2236 }
2237 }
2238}
2239
2240pub fn init(stream: &mut z_stream, config: InflateConfig) -> ReturnCode {
2242 stream.msg = core::ptr::null_mut();
2243
2244 #[cfg(feature = "rust-allocator")]
2248 if stream.zalloc.is_none() || stream.zfree.is_none() {
2249 stream.configure_default_rust_allocator()
2250 }
2251
2252 #[cfg(feature = "c-allocator")]
2253 if stream.zalloc.is_none() || stream.zfree.is_none() {
2254 stream.configure_default_c_allocator()
2255 }
2256
2257 if stream.zalloc.is_none() || stream.zfree.is_none() {
2258 return ReturnCode::StreamError;
2259 }
2260
2261 let mut state = State::new(&[], Writer::new(&mut []));
2262
2263 state.chunksize = 32;
2265
2266 let alloc = Allocator {
2267 zalloc: stream.zalloc.unwrap(),
2268 zfree: stream.zfree.unwrap(),
2269 opaque: stream.opaque,
2270 _marker: PhantomData,
2271 };
2272 let allocs = InflateAllocOffsets::new();
2273
2274 let Some(allocation_start) = alloc.allocate_slice_raw::<u8>(allocs.total_size) else {
2275 return ReturnCode::MemError;
2276 };
2277
2278 let address = allocation_start.as_ptr() as usize;
2279 let align_offset = address.next_multiple_of(64) - address;
2280 let buf = unsafe { allocation_start.as_ptr().add(align_offset) };
2281
2282 let window_allocation = unsafe { buf.add(allocs.window_pos) };
2283 let window = unsafe { Window::from_raw_parts(window_allocation, (1 << MAX_WBITS) + 64) };
2284 state.window = window;
2285
2286 let state_allocation = unsafe { buf.add(allocs.state_pos).cast::<State>() };
2287 unsafe { state_allocation.write(state) };
2288 stream.state = state_allocation.cast::<internal_state>();
2289
2290 if let Some(stream) = unsafe { InflateStream::from_stream_mut(stream) } {
2292 stream.state.allocation_start = allocation_start.as_ptr();
2293 stream.state.total_allocation_size = allocs.total_size;
2294 let ret = reset_with_config(stream, config);
2295
2296 if ret != ReturnCode::Ok {
2297 end(stream);
2298 }
2299
2300 ret
2301 } else {
2302 ReturnCode::StreamError
2303 }
2304}
2305
2306pub fn reset_with_config(stream: &mut InflateStream, config: InflateConfig) -> ReturnCode {
2307 let mut window_bits = config.window_bits;
2308 let wrap;
2309
2310 if window_bits < 0 {
2311 wrap = 0;
2312
2313 if window_bits < -MAX_WBITS {
2314 return ReturnCode::StreamError;
2315 }
2316
2317 window_bits = -window_bits;
2318 } else {
2319 wrap = (window_bits >> 4) + 5; if window_bits < 48 {
2322 window_bits &= MAX_WBITS;
2323 }
2324 }
2325
2326 if window_bits != 0 && !(MIN_WBITS..=MAX_WBITS).contains(&window_bits) {
2327 #[cfg(feature = "std")]
2328 eprintln!("invalid windowBits");
2329 return ReturnCode::StreamError;
2330 }
2331
2332 stream.state.wrap = wrap as u8;
2333 stream.state.wbits = window_bits as _;
2334
2335 reset(stream)
2336}
2337
2338pub fn reset(stream: &mut InflateStream) -> ReturnCode {
2339 stream.state.window.clear();
2341
2342 stream.state.error_message = None;
2343
2344 reset_keep(stream)
2345}
2346
2347pub fn reset_keep(stream: &mut InflateStream) -> ReturnCode {
2348 stream.total_in = 0;
2349 stream.total_out = 0;
2350 stream.state.total = 0;
2351
2352 stream.msg = core::ptr::null_mut();
2353
2354 let state = &mut stream.state;
2355
2356 if state.wrap != 0 {
2357 stream.adler = (state.wrap & 1) as _;
2359 }
2360
2361 state.mode = Mode::Head;
2362 state.checksum = crate::ADLER32_INITIAL_VALUE as u32;
2363
2364 state.flags.update(Flags::IS_LAST_BLOCK, false);
2365 state.flags.update(Flags::HAVE_DICT, false);
2366 state.flags.update(Flags::SANE, true);
2367 state.gzip_flags = -1;
2368 state.dmax = 32768;
2369 state.head = None;
2370 state.bit_reader = BitReader::new(&[]);
2371
2372 state.next = 0;
2373 state.len_table = Table::default();
2374 state.dist_table = Table::default();
2375
2376 state.back = usize::MAX;
2377
2378 ReturnCode::Ok
2379}
2380
2381pub fn codes_used(stream: &InflateStream) -> usize {
2382 stream.state.next
2383}
2384
2385pub unsafe fn inflate(stream: &mut InflateStream, flush: InflateFlush) -> ReturnCode {
2386 if stream.next_out.is_null() || (stream.next_in.is_null() && stream.avail_in != 0) {
2387 return ReturnCode::StreamError;
2388 }
2389
2390 let state = &mut stream.state;
2391
2392 if let Mode::Type = state.mode {
2394 state.mode = Mode::TypeDo;
2395 }
2396
2397 state.flush = flush;
2398
2399 unsafe {
2400 state
2401 .bit_reader
2402 .update_slice(stream.next_in, stream.avail_in as usize)
2403 };
2404 state.writer = unsafe { Writer::new_uninit(stream.next_out.cast(), stream.avail_out as usize) };
2406
2407 state.in_available = stream.avail_in as _;
2408 state.out_available = stream.avail_out as _;
2409
2410 let err = state.dispatch();
2411
2412 let in_read = state.bit_reader.as_ptr() as usize - stream.next_in as usize;
2413 let out_written = state.out_available - (state.writer.capacity() - state.writer.len());
2414
2415 stream.total_in += in_read as z_size;
2416 state.total += out_written;
2417 stream.total_out = state.total as _;
2418
2419 stream.avail_in = state.bit_reader.bytes_remaining() as u32;
2420 stream.next_in = state.bit_reader.as_ptr() as *mut u8;
2421
2422 stream.avail_out = (state.writer.capacity() - state.writer.len()) as u32;
2423 stream.next_out = state.writer.next_out() as *mut u8;
2424
2425 stream.adler = state.checksum as z_checksum;
2426
2427 let valid_mode = |mode| !matches!(mode, Mode::Bad | Mode::Mem | Mode::Sync);
2428 let not_done = |mode| {
2429 !matches!(
2430 mode,
2431 Mode::Check | Mode::Length | Mode::Bad | Mode::Mem | Mode::Sync
2432 )
2433 };
2434
2435 let must_update_window = state.window.size() != 0
2436 || (out_written != 0
2437 && valid_mode(state.mode)
2438 && (not_done(state.mode) || !matches!(state.flush, InflateFlush::Finish)));
2439
2440 let update_checksum = state.wrap & 4 != 0;
2441
2442 if must_update_window {
2443 state.window.extend(
2444 &state.writer.filled()[..out_written],
2445 state.gzip_flags,
2446 update_checksum,
2447 &mut state.checksum,
2448 &mut state.crc_fold,
2449 );
2450 }
2451
2452 if let Some(msg) = state.error_message {
2453 assert!(msg.ends_with('\0'));
2454 stream.msg = msg.as_ptr() as *mut u8 as *mut core::ffi::c_char;
2455 }
2456
2457 stream.data_type = state.decoding_state();
2458
2459 if ((in_read == 0 && out_written == 0) || flush == InflateFlush::Finish)
2460 && err == ReturnCode::Ok
2461 {
2462 ReturnCode::BufError
2463 } else {
2464 err
2465 }
2466}
2467
2468fn syncsearch(mut got: usize, buf: &[u8]) -> (usize, usize) {
2469 let len = buf.len();
2470 let mut next = 0;
2471
2472 while next < len && got < 4 {
2473 if buf[next] == if got < 2 { 0 } else { 0xff } {
2474 got += 1;
2475 } else if buf[next] != 0 {
2476 got = 0;
2477 } else {
2478 got = 4 - got;
2479 }
2480 next += 1;
2481 }
2482
2483 (got, next)
2484}
2485
2486pub fn sync(stream: &mut InflateStream) -> ReturnCode {
2487 let state = &mut stream.state;
2488
2489 if stream.avail_in == 0 && state.bit_reader.bits_in_buffer() < 8 {
2490 return ReturnCode::BufError;
2491 }
2492 if !matches!(state.mode, Mode::Sync) {
2494 state.mode = Mode::Sync;
2495
2496 let (buf, len) = state.bit_reader.start_sync_search();
2497
2498 (state.have, _) = syncsearch(0, &buf[..len]);
2499 }
2500
2501 let slice = unsafe { core::slice::from_raw_parts(stream.next_in, stream.avail_in as usize) };
2504
2505 let len;
2506 (state.have, len) = syncsearch(state.have, slice);
2507 stream.next_in = unsafe { stream.next_in.add(len) };
2509 stream.avail_in -= len as u32;
2510 stream.total_in += len as z_size;
2511
2512 if state.have != 4 {
2514 return ReturnCode::DataError;
2515 }
2516
2517 if state.gzip_flags == -1 {
2518 state.wrap = 0; } else {
2520 state.wrap &= !4; }
2522
2523 let flags = state.gzip_flags;
2524 let total_in = stream.total_in;
2525 let total_out = stream.total_out;
2526
2527 reset(stream);
2528
2529 stream.total_in = total_in;
2530 stream.total_out = total_out;
2531
2532 stream.state.gzip_flags = flags;
2533 stream.state.mode = Mode::Type;
2534
2535 ReturnCode::Ok
2536}
2537
2538pub fn sync_point(stream: &mut InflateStream) -> bool {
2547 matches!(stream.state.mode, Mode::Stored) && stream.state.bit_reader.bits_in_buffer() == 0
2548}
2549
2550pub unsafe fn copy<'a>(
2551 dest: &mut MaybeUninit<InflateStream<'a>>,
2552 source: &InflateStream<'a>,
2553) -> ReturnCode {
2554 if source.next_out.is_null() || (source.next_in.is_null() && source.avail_in != 0) {
2555 return ReturnCode::StreamError;
2556 }
2557
2558 unsafe { core::ptr::copy_nonoverlapping(source, dest.as_mut_ptr(), 1) };
2561
2562 let allocs = InflateAllocOffsets::new();
2564 debug_assert_eq!(allocs.total_size, source.state.total_allocation_size);
2565
2566 let Some(allocation_start) = source.alloc.allocate_slice_raw::<u8>(allocs.total_size) else {
2567 return ReturnCode::MemError;
2568 };
2569
2570 let address = allocation_start.as_ptr() as usize;
2571 let align_offset = address.next_multiple_of(64) - address;
2572 let buf = unsafe { allocation_start.as_ptr().add(align_offset) };
2573
2574 let window_allocation = unsafe { buf.add(allocs.window_pos) };
2575 let window = unsafe {
2576 source
2577 .state
2578 .window
2579 .clone_to(window_allocation, (1 << MAX_WBITS) + 64)
2580 };
2581
2582 let copy = unsafe { buf.add(allocs.state_pos).cast::<State>() };
2583 unsafe { core::ptr::copy_nonoverlapping(source.state, copy, 1) };
2584
2585 let field_ptr = unsafe { core::ptr::addr_of_mut!((*copy).window) };
2586 unsafe { core::ptr::write(field_ptr, window) };
2587
2588 let field_ptr = unsafe { core::ptr::addr_of_mut!((*copy).allocation_start) };
2589 unsafe { core::ptr::write(field_ptr, allocation_start.as_ptr()) };
2590
2591 let field_ptr = unsafe { core::ptr::addr_of_mut!((*dest.as_mut_ptr()).state) };
2592 unsafe { core::ptr::write(field_ptr as *mut *mut State, copy) };
2593
2594 ReturnCode::Ok
2595}
2596
2597pub fn undermine(stream: &mut InflateStream, subvert: i32) -> ReturnCode {
2598 stream.state.flags.update(Flags::SANE, (!subvert) != 0);
2599
2600 ReturnCode::Ok
2601}
2602
2603pub fn validate(stream: &mut InflateStream, check: bool) -> ReturnCode {
2605 if check && stream.state.wrap != 0 {
2606 stream.state.wrap |= 0b100;
2607 } else {
2608 stream.state.wrap &= !0b100;
2609 }
2610
2611 ReturnCode::Ok
2612}
2613
2614pub fn mark(stream: &InflateStream) -> c_long {
2615 if stream.next_out.is_null() || (stream.next_in.is_null() && stream.avail_in != 0) {
2616 return c_long::MIN;
2617 }
2618
2619 let state = &stream.state;
2620
2621 let length = match state.mode {
2622 Mode::CopyBlock => state.length,
2623 Mode::Match => state.was - state.length,
2624 _ => 0,
2625 };
2626
2627 (((state.back as c_long) as c_ulong) << 16) as c_long + length as c_long
2628}
2629
2630pub fn set_dictionary(stream: &mut InflateStream, dictionary: &[u8]) -> ReturnCode {
2631 if stream.state.wrap != 0 && !matches!(stream.state.mode, Mode::Dict) {
2632 return ReturnCode::StreamError;
2633 }
2634
2635 if matches!(stream.state.mode, Mode::Dict) {
2637 let dictid = adler32(1, dictionary);
2638
2639 if dictid != stream.state.checksum {
2640 return ReturnCode::DataError;
2641 }
2642 }
2643
2644 stream.state.window.extend(
2645 dictionary,
2646 stream.state.gzip_flags,
2647 false,
2648 &mut stream.state.checksum,
2649 &mut stream.state.crc_fold,
2650 );
2651
2652 stream.state.flags.update(Flags::HAVE_DICT, true);
2653
2654 ReturnCode::Ok
2655}
2656
2657pub fn end<'a>(stream: &'a mut InflateStream<'_>) -> &'a mut z_stream {
2658 let alloc = stream.alloc;
2659 let allocation_start = stream.state.allocation_start;
2660 let total_allocation_size = stream.state.total_allocation_size;
2661
2662 let mut window = Window::empty();
2663 core::mem::swap(&mut window, &mut stream.state.window);
2664
2665 let stream = stream.as_z_stream_mut();
2666 let _ = core::mem::replace(&mut stream.state, core::ptr::null_mut());
2667
2668 unsafe { alloc.deallocate(allocation_start, total_allocation_size) };
2669
2670 stream
2671}
2672
2673pub unsafe fn get_header<'a>(
2682 stream: &mut InflateStream<'a>,
2683 head: Option<&'a mut gz_header>,
2684) -> ReturnCode {
2685 if (stream.state.wrap & 2) == 0 {
2686 return ReturnCode::StreamError;
2687 }
2688
2689 stream.state.head = head.map(|head| {
2690 head.done = 0;
2691 head
2692 });
2693 ReturnCode::Ok
2694}
2695
2696pub unsafe fn get_dictionary(stream: &InflateStream<'_>, dictionary: *mut u8) -> usize {
2700 let whave = stream.state.window.have();
2701 let wnext = stream.state.window.next();
2702
2703 if !dictionary.is_null() {
2704 unsafe {
2705 core::ptr::copy_nonoverlapping(
2706 stream.state.window.as_ptr().add(wnext),
2707 dictionary,
2708 whave - wnext,
2709 );
2710
2711 core::ptr::copy_nonoverlapping(
2712 stream.state.window.as_ptr(),
2713 dictionary.add(whave).sub(wnext).cast(),
2714 wnext,
2715 );
2716 }
2717 }
2718
2719 stream.state.window.have()
2720}
2721
2722#[cfg(test)]
2723mod tests {
2724 use super::*;
2725
2726 #[test]
2727 fn uncompress_buffer_overflow() {
2728 let mut output = [0; 1 << 13];
2729 let input = [
2730 72, 137, 58, 0, 3, 39, 255, 255, 255, 255, 255, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
2731 14, 14, 184, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 184, 14, 14,
2732 14, 14, 14, 14, 14, 63, 14, 14, 14, 14, 14, 14, 14, 14, 184, 14, 14, 255, 14, 103, 14,
2733 14, 14, 14, 14, 14, 61, 14, 255, 255, 63, 14, 14, 14, 14, 14, 14, 14, 14, 184, 14, 14,
2734 255, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 6, 14, 14, 14, 14, 14, 14, 14, 14, 71,
2735 4, 137, 106,
2736 ];
2737
2738 let config = InflateConfig { window_bits: 15 };
2739
2740 let (_decompressed, err) = decompress_slice(&mut output, &input, config);
2741 assert_eq!(err, ReturnCode::DataError);
2742 }
2743}