1#![allow(non_snake_case)] #![allow(clippy::missing_safety_doc)] use core::ffi::{c_char, c_int, c_long, c_ulong};
5use core::marker::PhantomData;
6use core::mem::MaybeUninit;
7use core::ops::ControlFlow;
8
9mod bitreader;
10mod infback;
11mod inffixed_tbl;
12mod inftrees;
13mod window;
14mod writer;
15
16use crate::allocate::Allocator;
17use crate::c_api::internal_state;
18use crate::cpu_features::CpuFeatures;
19use crate::{
20 adler32::adler32,
21 c_api::{gz_header, z_checksum, z_size, z_stream, Z_DEFLATED},
22 inflate::writer::Writer,
23 Code, InflateFlush, ReturnCode, DEF_WBITS, MAX_WBITS, MIN_WBITS,
24};
25
26use crate::crc32::{crc32, Crc32Fold};
27
28pub use self::infback::{back, back_end, back_init};
29pub use self::window::Window;
30use self::{
31 bitreader::BitReader,
32 inftrees::{inflate_table, CodeType, InflateTable},
33};
34
35const INFLATE_STRICT: bool = false;
36
37#[repr(C)]
40pub struct InflateStream<'a> {
41 pub(crate) next_in: *mut crate::c_api::Bytef,
42 pub(crate) avail_in: crate::c_api::uInt,
43 pub(crate) total_in: crate::c_api::z_size,
44 pub(crate) next_out: *mut crate::c_api::Bytef,
45 pub(crate) avail_out: crate::c_api::uInt,
46 pub(crate) total_out: crate::c_api::z_size,
47 pub(crate) msg: *mut c_char,
48 pub(crate) state: &'a mut State<'a>,
49 pub(crate) alloc: Allocator<'a>,
50 pub(crate) data_type: c_int,
51 pub(crate) adler: crate::c_api::z_checksum,
52 pub(crate) reserved: crate::c_api::uLong,
53}
54
55unsafe impl Sync for InflateStream<'_> {}
56unsafe impl Send for InflateStream<'_> {}
57
58#[cfg(feature = "__internal-test")]
59#[doc(hidden)]
60pub const INFLATE_STATE_SIZE: usize = core::mem::size_of::<crate::inflate::State>();
61
62#[cfg(feature = "__internal-test")]
63#[doc(hidden)]
64pub unsafe fn set_mode_dict(strm: &mut z_stream) {
65 unsafe {
66 (*(strm.state as *mut State)).mode = Mode::Dict;
67 }
68}
69
70#[cfg(feature = "__internal-test")]
71#[doc(hidden)]
72pub unsafe fn set_mode_sync(strm: *mut z_stream) {
73 unsafe {
74 (*((*strm).state as *mut State)).mode = Mode::Sync;
75 }
76}
77
78impl<'a> InflateStream<'a> {
79 const _S: () = assert!(core::mem::size_of::<z_stream>() == core::mem::size_of::<Self>());
82 const _A: () = assert!(core::mem::align_of::<z_stream>() == core::mem::align_of::<Self>());
83
84 #[inline(always)]
93 pub unsafe fn from_stream_ref(strm: *const z_stream) -> Option<&'a Self> {
94 {
95 let stream = unsafe { strm.as_ref() }?;
97
98 if stream.zalloc.is_none() || stream.zfree.is_none() {
99 return None;
100 }
101
102 if stream.state.is_null() {
103 return None;
104 }
105 }
106
107 unsafe { strm.cast::<InflateStream>().as_ref() }
109 }
110
111 #[inline(always)]
120 pub unsafe fn from_stream_mut(strm: *mut z_stream) -> Option<&'a mut Self> {
121 {
122 let stream = unsafe { strm.as_ref() }?;
124
125 if stream.zalloc.is_none() || stream.zfree.is_none() {
126 return None;
127 }
128
129 if stream.state.is_null() {
130 return None;
131 }
132 }
133
134 unsafe { strm.cast::<InflateStream>().as_mut() }
136 }
137
138 fn as_z_stream_mut(&mut self) -> &mut z_stream {
139 unsafe { &mut *(self as *mut _ as *mut z_stream) }
141 }
142
143 pub fn new(config: InflateConfig) -> Self {
144 let mut inner = crate::c_api::z_stream::default();
145
146 let ret = crate::inflate::init(&mut inner, config);
147 assert_eq!(ret, ReturnCode::Ok);
148
149 unsafe { core::mem::transmute(inner) }
150 }
151}
152
153const MAX_BITS: u8 = 15; const MAX_DIST_EXTRA_BITS: u8 = 13; pub fn uncompress_slice<'a>(
157 output: &'a mut [u8],
158 input: &[u8],
159 config: InflateConfig,
160) -> (&'a mut [u8], ReturnCode) {
161 let output_uninit = unsafe {
163 core::slice::from_raw_parts_mut(output.as_mut_ptr() as *mut MaybeUninit<u8>, output.len())
164 };
165
166 uncompress(output_uninit, input, config)
167}
168
169pub fn uncompress<'a>(
171 output: &'a mut [MaybeUninit<u8>],
172 input: &[u8],
173 config: InflateConfig,
174) -> (&'a mut [u8], ReturnCode) {
175 let (_consumed, output, ret) = uncompress2(output, input, config);
176 (output, ret)
177}
178
179pub fn uncompress2<'a>(
180 output: &'a mut [MaybeUninit<u8>],
181 input: &[u8],
182 config: InflateConfig,
183) -> (u64, &'a mut [u8], ReturnCode) {
184 let mut dest_len_ptr = output.len() as z_checksum;
185
186 let mut buf = [0u8];
188
189 let mut left;
190 let mut len = input.len() as u64;
191
192 let dest = if output.is_empty() {
193 left = 1;
194
195 buf.as_mut_ptr()
196 } else {
197 left = output.len() as u64;
198 dest_len_ptr = 0;
199
200 output.as_mut_ptr() as *mut u8
201 };
202
203 let mut stream = z_stream {
204 next_in: input.as_ptr() as *mut u8,
205 avail_in: 0,
206
207 zalloc: None,
208 zfree: None,
209 opaque: core::ptr::null_mut(),
210
211 ..z_stream::default()
212 };
213
214 let err = init(&mut stream, config);
215 if err != ReturnCode::Ok {
216 return (0, &mut [], err);
217 }
218
219 stream.next_out = dest;
220 stream.avail_out = 0;
221
222 let Some(stream) = (unsafe { InflateStream::from_stream_mut(&mut stream) }) else {
223 return (0, &mut [], ReturnCode::StreamError);
224 };
225
226 let err = loop {
227 if stream.avail_out == 0 {
228 stream.avail_out = Ord::min(left, u32::MAX as u64) as u32;
229 left -= stream.avail_out as u64;
230 }
231
232 if stream.avail_in == 0 {
233 stream.avail_in = Ord::min(len, u32::MAX as u64) as u32;
234 len -= stream.avail_in as u64;
235 }
236
237 let err = unsafe { inflate(stream, InflateFlush::NoFlush) };
238
239 if err != ReturnCode::Ok {
240 break err;
241 }
242 };
243
244 let consumed = len + u64::from(stream.avail_in);
245 if !output.is_empty() {
246 dest_len_ptr = stream.total_out;
247 } else if stream.total_out != 0 && err == ReturnCode::BufError {
248 left = 1;
249 }
250
251 let avail_out = stream.avail_out;
252
253 end(stream);
254
255 let ret = match err {
256 ReturnCode::StreamEnd => ReturnCode::Ok,
257 ReturnCode::NeedDict => ReturnCode::DataError,
258 ReturnCode::BufError if (left + avail_out as u64) != 0 => ReturnCode::DataError,
259 _ => err,
260 };
261
262 let output_slice = unsafe {
264 core::slice::from_raw_parts_mut(output.as_mut_ptr() as *mut u8, dest_len_ptr as usize)
265 };
266
267 (consumed, output_slice, ret)
268}
269
270#[derive(Debug, Clone, Copy)]
271#[repr(u8)]
272pub enum Mode {
273 Head,
274 Flags,
275 Time,
276 Os,
277 ExLen,
278 Extra,
279 Name,
280 Comment,
281 HCrc,
282 Sync,
283 Mem,
284 Length,
285 Type,
286 TypeDo,
287 Stored,
288 CopyBlock,
289 Check,
290 Len_,
291 Len,
292 Lit,
293 LenExt,
294 Dist,
295 DistExt,
296 Match,
297 Table,
298 LenLens,
299 CodeLens,
300 DictId,
301 Dict,
302 Done,
303 Bad,
304}
305
306#[derive(Default, Clone, Copy)]
307#[allow(clippy::enum_variant_names)]
308enum Codes {
309 #[default]
310 Fixed,
311 Codes,
312 Len,
313 Dist,
314}
315
316#[derive(Default, Clone, Copy)]
317struct Table {
318 codes: Codes,
319 bits: usize,
320}
321
322#[derive(Clone, Copy)]
323struct Flags(u8);
324
325impl Default for Flags {
326 fn default() -> Self {
327 Self::SANE
328 }
329}
330
331impl Flags {
332 const IS_LAST_BLOCK: Self = Self(0b0000_0001);
334
335 const HAVE_DICT: Self = Self(0b0000_0010);
337
338 const SANE: Self = Self(0b0000_0100);
340
341 pub(crate) const fn contains(self, other: Self) -> bool {
342 debug_assert!(other.0.count_ones() == 1);
343
344 self.0 & other.0 != 0
345 }
346
347 #[inline(always)]
348 pub(crate) fn update(&mut self, other: Self, value: bool) {
349 if value {
350 *self = Self(self.0 | other.0);
351 } else {
352 *self = Self(self.0 & !other.0);
353 }
354 }
355}
356
357#[repr(C, align(64))]
358pub(crate) struct State<'a> {
359 mode: Mode,
361
362 flags: Flags,
363
364 wbits: u8,
366
367 wrap: u8,
373
374 flush: InflateFlush,
375
376 window: Window<'a>,
378
379 ncode: usize,
382 nlen: usize,
384 ndist: usize,
386 have: usize,
388 next: usize, bit_reader: BitReader<'a>,
393
394 writer: Writer<'a>,
395 total: usize,
396
397 length: usize,
399 offset: usize,
401
402 extra: usize,
404
405 back: usize,
407
408 was: usize,
410
411 chunksize: usize,
413
414 in_available: usize,
415 out_available: usize,
416
417 gzip_flags: i32,
418
419 checksum: u32,
420 crc_fold: Crc32Fold,
421
422 error_message: Option<&'static str>,
423
424 head: Option<&'a mut gz_header>,
426 dmax: usize,
427
428 len_table: Table,
430
431 dist_table: Table,
433
434 codes_codes: [Code; crate::ENOUGH_LENS],
435 len_codes: [Code; crate::ENOUGH_LENS],
436 dist_codes: [Code; crate::ENOUGH_DISTS],
437
438 lens: [u16; 320],
440 work: [u16; 288],
442
443 allocation_start: *mut u8,
444 total_allocation_size: usize,
445}
446
447impl<'a> State<'a> {
448 fn new(reader: &'a [u8], writer: Writer<'a>) -> Self {
449 let in_available = reader.len();
450 let out_available = writer.capacity();
451
452 Self {
453 flush: InflateFlush::NoFlush,
454
455 flags: Flags::default(),
456 wrap: 0,
457 mode: Mode::Head,
458 length: 0,
459
460 len_table: Table::default(),
461 dist_table: Table::default(),
462
463 wbits: 0,
464 offset: 0,
465 extra: 0,
466 back: 0,
467 was: 0,
468 chunksize: 0,
469 in_available,
470 out_available,
471
472 bit_reader: BitReader::new(reader),
473
474 writer,
475 total: 0,
476
477 window: Window::empty(),
478 head: None,
479
480 lens: [0u16; 320],
481 work: [0u16; 288],
482
483 ncode: 0,
484 nlen: 0,
485 ndist: 0,
486 have: 0,
487 next: 0,
488
489 error_message: None,
490
491 checksum: 0,
492 crc_fold: Crc32Fold::new(),
493
494 dmax: 0,
495 gzip_flags: 0,
496
497 codes_codes: [Code::default(); crate::ENOUGH_LENS],
498 len_codes: [Code::default(); crate::ENOUGH_LENS],
499 dist_codes: [Code::default(); crate::ENOUGH_DISTS],
500
501 allocation_start: core::ptr::null_mut(),
502 total_allocation_size: 0,
503 }
504 }
505
506 fn len_table_ref(&self) -> &[Code] {
507 match self.len_table.codes {
508 Codes::Fixed => &self::inffixed_tbl::LENFIX,
509 Codes::Codes => &self.codes_codes,
510 Codes::Len => &self.len_codes,
511 Codes::Dist => &self.dist_codes,
512 }
513 }
514
515 fn dist_table_ref(&self) -> &[Code] {
516 match self.dist_table.codes {
517 Codes::Fixed => &self::inffixed_tbl::DISTFIX,
518 Codes::Codes => &self.codes_codes,
519 Codes::Len => &self.len_codes,
520 Codes::Dist => &self.dist_codes,
521 }
522 }
523
524 fn len_table_get(&self, index: usize) -> Code {
525 self.len_table_ref()[index]
526 }
527
528 fn dist_table_get(&self, index: usize) -> Code {
529 self.dist_table_ref()[index]
530 }
531}
532
533const fn zswap32(q: u32) -> u32 {
535 u32::from_be(q.to_le())
536}
537
538const INFLATE_FAST_MIN_HAVE: usize = 15;
539const INFLATE_FAST_MIN_LEFT: usize = 260;
540
541impl State<'_> {
542 fn len_and_friends(&mut self) -> ControlFlow<ReturnCode, ()> {
552 let avail_in = self.bit_reader.bytes_remaining();
553 let avail_out = self.writer.remaining();
554
555 if avail_in >= INFLATE_FAST_MIN_HAVE && avail_out >= INFLATE_FAST_MIN_LEFT {
556 unsafe { inflate_fast_help(self, 0) };
558 match self.mode {
559 Mode::Len => {}
560 _ => return ControlFlow::Continue(()),
561 }
562 }
563
564 let mut mode;
565 let mut writer;
566 let mut bit_reader;
567
568 macro_rules! load {
569 () => {
570 mode = self.mode;
571 writer = core::mem::replace(&mut self.writer, Writer::new(&mut []));
572 bit_reader = self.bit_reader;
573 };
574 }
575
576 macro_rules! restore {
577 () => {
578 self.mode = mode;
579 self.writer = writer;
580 self.bit_reader = bit_reader;
581 };
582 }
583
584 load!();
585
586 let len_table = match self.len_table.codes {
587 Codes::Fixed => &self::inffixed_tbl::LENFIX[..],
588 Codes::Codes => &self.codes_codes,
589 Codes::Len => &self.len_codes,
590 Codes::Dist => &self.dist_codes,
591 };
592
593 let dist_table = match self.dist_table.codes {
594 Codes::Fixed => &self::inffixed_tbl::DISTFIX[..],
595 Codes::Codes => &self.codes_codes,
596 Codes::Len => &self.len_codes,
597 Codes::Dist => &self.dist_codes,
598 };
599
600 loop {
601 mode = 'top: {
602 match mode {
603 Mode::Len => {
604 let avail_in = bit_reader.bytes_remaining();
605 let avail_out = writer.remaining();
606
607 if avail_in >= INFLATE_FAST_MIN_HAVE && avail_out >= INFLATE_FAST_MIN_LEFT {
612 restore!();
613 unsafe { inflate_fast_help(self, 0) };
617 return ControlFlow::Continue(());
618 }
619
620 self.back = 0;
621
622 let mut here;
624 loop {
625 let bits = bit_reader.bits(self.len_table.bits);
626 here = len_table[bits as usize];
627
628 if here.bits <= bit_reader.bits_in_buffer() {
629 break;
630 }
631
632 if let Err(return_code) = bit_reader.pull_byte() {
633 restore!();
634 return ControlFlow::Break(return_code);
635 };
636 }
637
638 if here.op != 0 && here.op & 0xf0 == 0 {
639 let last = here;
640 loop {
641 let bits = bit_reader.bits((last.bits + last.op) as usize) as u16;
642 here = len_table[(last.val + (bits >> last.bits)) as usize];
643 if last.bits + here.bits <= bit_reader.bits_in_buffer() {
644 break;
645 }
646
647 if let Err(return_code) = bit_reader.pull_byte() {
648 restore!();
649 return ControlFlow::Break(return_code);
650 };
651 }
652
653 bit_reader.drop_bits(last.bits);
654 self.back += last.bits as usize;
655 }
656
657 bit_reader.drop_bits(here.bits);
658 self.back += here.bits as usize;
659 self.length = here.val as usize;
660
661 if here.op == 0 {
662 break 'top Mode::Lit;
663 } else if here.op & 32 != 0 {
664 self.back = usize::MAX;
669 mode = Mode::Type;
670
671 restore!();
672 return ControlFlow::Continue(());
673 } else if here.op & 64 != 0 {
674 mode = Mode::Bad;
675 {
676 restore!();
677 let this = &mut *self;
678 let msg: &'static str = "invalid literal/length code\0";
679 #[cfg(all(feature = "std", test))]
680 dbg!(msg);
681 this.error_message = Some(msg);
682 return ControlFlow::Break(ReturnCode::DataError);
683 }
684 } else {
685 self.extra = (here.op & MAX_BITS) as usize;
687 break 'top Mode::LenExt;
688 }
689 }
690 Mode::Lit => {
691 if writer.is_full() {
693 restore!();
694 #[cfg(all(test, feature = "std"))]
695 eprintln!("Ok: writer is full ({} bytes)", self.writer.capacity());
696 return ControlFlow::Break(ReturnCode::Ok);
697 }
698
699 writer.push(self.length as u8);
700
701 break 'top Mode::Len;
702 }
703 Mode::LenExt => {
704 let extra = self.extra;
706
707 if extra != 0 {
709 match bit_reader.need_bits(extra) {
710 Err(return_code) => {
711 restore!();
712 return ControlFlow::Break(return_code);
713 }
714 Ok(v) => v,
715 };
716 self.length += bit_reader.bits(extra) as usize;
717 bit_reader.drop_bits(extra as u8);
718 self.back += extra;
719 }
720
721 self.was = self.length;
724
725 break 'top Mode::Dist;
726 }
727 Mode::Dist => {
728 let mut here;
732 loop {
733 let bits = bit_reader.bits(self.dist_table.bits) as usize;
734 here = dist_table[bits];
735 if here.bits <= bit_reader.bits_in_buffer() {
736 break;
737 }
738
739 if let Err(return_code) = bit_reader.pull_byte() {
740 restore!();
741 return ControlFlow::Break(return_code);
742 };
743 }
744
745 if here.op & 0xf0 == 0 {
746 let last = here;
747
748 loop {
749 let bits = bit_reader.bits((last.bits + last.op) as usize);
750 here =
751 dist_table[last.val as usize + ((bits as usize) >> last.bits)];
752
753 if last.bits + here.bits <= bit_reader.bits_in_buffer() {
754 break;
755 }
756
757 if let Err(return_code) = bit_reader.pull_byte() {
758 restore!();
759 return ControlFlow::Break(return_code);
760 };
761 }
762
763 bit_reader.drop_bits(last.bits);
764 self.back += last.bits as usize;
765 }
766
767 bit_reader.drop_bits(here.bits);
768
769 if here.op & 64 != 0 {
770 restore!();
771 self.mode = Mode::Bad;
772 return ControlFlow::Break(self.bad("invalid distance code\0"));
773 }
774
775 self.offset = here.val as usize;
776
777 self.extra = (here.op & MAX_BITS) as usize;
778
779 break 'top Mode::DistExt;
780 }
781 Mode::DistExt => {
782 let extra = self.extra;
784
785 if extra > 0 {
786 match bit_reader.need_bits(extra) {
787 Err(return_code) => {
788 restore!();
789 return ControlFlow::Break(return_code);
790 }
791 Ok(v) => v,
792 };
793 self.offset += bit_reader.bits(extra) as usize;
794 bit_reader.drop_bits(extra as u8);
795 self.back += extra;
796 }
797
798 if INFLATE_STRICT && self.offset > self.dmax {
799 restore!();
800 self.mode = Mode::Bad;
801 return ControlFlow::Break(
802 self.bad("invalid distance code too far back\0"),
803 );
804 }
805
806 break 'top Mode::Match;
809 }
810 Mode::Match => {
811 if writer.is_full() {
813 restore!();
814 #[cfg(all(feature = "std", test))]
815 eprintln!(
816 "BufError: writer is full ({} bytes)",
817 self.writer.capacity()
818 );
819 return ControlFlow::Break(ReturnCode::Ok);
820 }
821
822 let left = writer.remaining();
823 let copy = writer.len();
824
825 let copy = if self.offset > copy {
826 let mut copy = self.offset - copy;
829
830 if copy > self.window.have() {
831 if self.flags.contains(Flags::SANE) {
832 restore!();
833 self.mode = Mode::Bad;
834 return ControlFlow::Break(
835 self.bad("invalid distance too far back\0"),
836 );
837 }
838
839 panic!("INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR")
841 }
842
843 let wnext = self.window.next();
844 let wsize = self.window.size();
845
846 let from = if copy > wnext {
847 copy -= wnext;
848 wsize - copy
849 } else {
850 wnext - copy
851 };
852
853 copy = Ord::min(copy, self.length);
854 copy = Ord::min(copy, left);
855
856 writer.extend_from_window(&self.window, from..from + copy);
857
858 copy
859 } else {
860 let copy = Ord::min(self.length, left);
861 writer.copy_match(self.offset, copy);
862
863 copy
864 };
865
866 self.length -= copy;
867
868 if self.length == 0 {
869 break 'top Mode::Len;
870 } else {
871 break 'top Mode::Match;
874 }
875 }
876 _ => unsafe { core::hint::unreachable_unchecked() },
877 }
878 }
879 }
880 }
881
882 fn dispatch(&mut self) -> ReturnCode {
883 let mut mode = self.mode;
885
886 macro_rules! pull_byte {
887 ($self:expr) => {
888 match $self.bit_reader.pull_byte() {
889 Err(return_code) => {
890 self.mode = mode;
891 return $self.inflate_leave(return_code);
892 }
893 Ok(_) => (),
894 }
895 };
896 }
897
898 macro_rules! need_bits {
899 ($self:expr, $n:expr) => {
900 match $self.bit_reader.need_bits($n) {
901 Err(return_code) => {
902 self.mode = mode;
903 return $self.inflate_leave(return_code);
904 }
905 Ok(v) => v,
906 }
907 };
908 }
909
910 let ret = 'label: loop {
911 mode = 'blk: {
912 match mode {
913 Mode::Head => {
914 if self.wrap == 0 {
915 break 'blk Mode::TypeDo;
916 }
917
918 need_bits!(self, 16);
919
920 if (self.wrap & 2) != 0 && self.bit_reader.hold() == 0x8b1f {
922 if self.wbits == 0 {
923 self.wbits = 15;
924 }
925
926 let b0 = self.bit_reader.bits(8) as u8;
927 let b1 = (self.bit_reader.hold() >> 8) as u8;
928 self.checksum = crc32(crate::CRC32_INITIAL_VALUE, &[b0, b1]);
929 self.bit_reader.init_bits();
930
931 break 'blk Mode::Flags;
932 }
933
934 if let Some(header) = &mut self.head {
935 header.done = -1;
936 }
937
938 if (self.wrap & 1) == 0
940 || ((self.bit_reader.bits(8) << 8) + (self.bit_reader.hold() >> 8)) % 31
941 != 0
942 {
943 mode = Mode::Bad;
944 break 'label self.bad("incorrect header check\0");
945 }
946
947 if self.bit_reader.bits(4) != Z_DEFLATED as u64 {
948 mode = Mode::Bad;
949 break 'label self.bad("unknown compression method\0");
950 }
951
952 self.bit_reader.drop_bits(4);
953 let len = self.bit_reader.bits(4) as u8 + 8;
954
955 if self.wbits == 0 {
956 self.wbits = len;
957 }
958
959 if len as i32 > MAX_WBITS || len > self.wbits {
960 mode = Mode::Bad;
961 break 'label self.bad("invalid window size\0");
962 }
963
964 self.dmax = 1 << len;
965 self.gzip_flags = 0; self.checksum = crate::ADLER32_INITIAL_VALUE as _;
967
968 if self.bit_reader.hold() & 0x200 != 0 {
969 self.bit_reader.init_bits();
970
971 break 'blk Mode::DictId;
972 } else {
973 self.bit_reader.init_bits();
974
975 break 'blk Mode::Type;
976 }
977 }
978 Mode::Flags => {
979 need_bits!(self, 16);
980 self.gzip_flags = self.bit_reader.hold() as i32;
981
982 if self.gzip_flags & 0xff != Z_DEFLATED {
984 mode = Mode::Bad;
985 break 'label self.bad("unknown compression method\0");
986 }
987
988 if self.gzip_flags & 0xe000 != 0 {
989 mode = Mode::Bad;
990 break 'label self.bad("unknown header flags set\0");
991 }
992
993 if let Some(head) = self.head.as_mut() {
994 head.text = ((self.bit_reader.hold() >> 8) & 1) as i32;
995 }
996
997 if (self.gzip_flags & 0x0200) != 0 && (self.wrap & 4) != 0 {
998 let b0 = self.bit_reader.bits(8) as u8;
999 let b1 = (self.bit_reader.hold() >> 8) as u8;
1000 self.checksum = crc32(self.checksum, &[b0, b1]);
1001 }
1002
1003 self.bit_reader.init_bits();
1004
1005 break 'blk Mode::Time;
1006 }
1007 Mode::Time => {
1008 need_bits!(self, 32);
1009 if let Some(head) = self.head.as_mut() {
1010 head.time = self.bit_reader.hold() as z_size;
1011 }
1012
1013 if (self.gzip_flags & 0x0200) != 0 && (self.wrap & 4) != 0 {
1014 let bytes = (self.bit_reader.hold() as u32).to_le_bytes();
1015 self.checksum = crc32(self.checksum, &bytes);
1016 }
1017
1018 self.bit_reader.init_bits();
1019
1020 break 'blk Mode::Os;
1021 }
1022 Mode::Os => {
1023 need_bits!(self, 16);
1024 if let Some(head) = self.head.as_mut() {
1025 head.xflags = (self.bit_reader.hold() & 0xff) as i32;
1026 head.os = (self.bit_reader.hold() >> 8) as i32;
1027 }
1028
1029 if (self.gzip_flags & 0x0200) != 0 && (self.wrap & 4) != 0 {
1030 let bytes = (self.bit_reader.hold() as u16).to_le_bytes();
1031 self.checksum = crc32(self.checksum, &bytes);
1032 }
1033
1034 self.bit_reader.init_bits();
1035
1036 break 'blk Mode::ExLen;
1037 }
1038 Mode::ExLen => {
1039 if (self.gzip_flags & 0x0400) != 0 {
1040 need_bits!(self, 16);
1041
1042 self.length = self.bit_reader.hold() as usize;
1044 if let Some(head) = self.head.as_mut() {
1045 head.extra_len = self.length as u32;
1046 }
1047
1048 if (self.gzip_flags & 0x0200) != 0 && (self.wrap & 4) != 0 {
1049 let bytes = (self.bit_reader.hold() as u16).to_le_bytes();
1050 self.checksum = crc32(self.checksum, &bytes);
1051 }
1052 self.bit_reader.init_bits();
1053 } else if let Some(head) = self.head.as_mut() {
1054 head.extra = core::ptr::null_mut();
1055 }
1056
1057 break 'blk Mode::Extra;
1058 }
1059 Mode::Extra => {
1060 if (self.gzip_flags & 0x0400) != 0 {
1061 let extra_available =
1063 Ord::min(self.length, self.bit_reader.bytes_remaining());
1064
1065 if extra_available > 0 {
1066 if let Some(head) = self.head.as_mut() {
1067 if !head.extra.is_null() {
1068 let written_so_far = head.extra_len as usize - self.length;
1075
1076 let count = Ord::min(
1078 (head.extra_max as usize)
1079 .saturating_sub(written_so_far),
1080 extra_available,
1081 );
1082
1083 let next_write_offset =
1086 Ord::min(written_so_far, head.extra_max as usize);
1087
1088 unsafe {
1089 core::ptr::copy_nonoverlapping(
1093 self.bit_reader.as_mut_ptr(),
1094 head.extra.add(next_write_offset),
1095 count,
1096 );
1097 }
1098 }
1099 }
1100
1101 if (self.gzip_flags & 0x0200) != 0 && (self.wrap & 4) != 0 {
1103 let extra_slice =
1104 &self.bit_reader.as_slice()[..extra_available];
1105 self.checksum = crc32(self.checksum, extra_slice)
1106 }
1107
1108 self.in_available -= extra_available;
1109 self.bit_reader.advance(extra_available);
1110 self.length -= extra_available;
1111 }
1112
1113 if self.length != 0 {
1115 break 'label self.inflate_leave(ReturnCode::Ok);
1116 }
1117 }
1118
1119 self.length = 0;
1120
1121 break 'blk Mode::Name;
1122 }
1123 Mode::Name => {
1124 if (self.gzip_flags & 0x0800) != 0 {
1125 if self.in_available == 0 {
1126 break 'label self.inflate_leave(ReturnCode::Ok);
1127 }
1128
1129 let slice = self.bit_reader.as_slice();
1132 let null_terminator_index = slice.iter().position(|c| *c == 0);
1133
1134 let name_slice = match null_terminator_index {
1136 Some(i) => &slice[..=i],
1137 None => slice,
1138 };
1139
1140 if let Some(head) = self.head.as_mut() {
1142 if !head.name.is_null() {
1143 let remaining_name_bytes = (head.name_max as usize)
1144 .checked_sub(self.length)
1145 .expect("name out of bounds");
1146 let copy = Ord::min(name_slice.len(), remaining_name_bytes);
1147
1148 unsafe {
1149 core::ptr::copy_nonoverlapping(
1152 name_slice.as_ptr(),
1153 head.name.add(self.length),
1154 copy,
1155 )
1156 };
1157
1158 self.length += copy;
1159 }
1160 }
1161
1162 if (self.gzip_flags & 0x0200) != 0 && (self.wrap & 4) != 0 {
1163 self.checksum = crc32(self.checksum, name_slice);
1164 }
1165
1166 let reached_end = name_slice.last() == Some(&0);
1167 self.bit_reader.advance(name_slice.len());
1168
1169 if !reached_end && self.bit_reader.bytes_remaining() == 0 {
1170 break 'label self.inflate_leave(ReturnCode::Ok);
1171 }
1172 } else if let Some(head) = self.head.as_mut() {
1173 head.name = core::ptr::null_mut();
1174 }
1175
1176 self.length = 0;
1177
1178 break 'blk Mode::Comment;
1179 }
1180 Mode::Comment => {
1181 if (self.gzip_flags & 0x01000) != 0 {
1182 if self.in_available == 0 {
1183 break 'label self.inflate_leave(ReturnCode::Ok);
1184 }
1185
1186 let slice = self.bit_reader.as_slice();
1189 let null_terminator_index = slice.iter().position(|c| *c == 0);
1190
1191 let comment_slice = match null_terminator_index {
1193 Some(i) => &slice[..=i],
1194 None => slice,
1195 };
1196
1197 if let Some(head) = self.head.as_mut() {
1199 if !head.comment.is_null() {
1200 let remaining_comm_bytes = (head.comm_max as usize)
1201 .checked_sub(self.length)
1202 .expect("comm out of bounds");
1203 let copy = Ord::min(comment_slice.len(), remaining_comm_bytes);
1204
1205 unsafe {
1206 core::ptr::copy_nonoverlapping(
1209 comment_slice.as_ptr(),
1210 head.comment.add(self.length),
1211 copy,
1212 )
1213 };
1214
1215 self.length += copy;
1216 }
1217 }
1218
1219 if (self.gzip_flags & 0x0200) != 0 && (self.wrap & 4) != 0 {
1220 self.checksum = crc32(self.checksum, comment_slice);
1221 }
1222
1223 let reached_end = comment_slice.last() == Some(&0);
1224 self.bit_reader.advance(comment_slice.len());
1225
1226 if !reached_end && self.bit_reader.bytes_remaining() == 0 {
1227 break 'label self.inflate_leave(ReturnCode::Ok);
1228 }
1229 } else if let Some(head) = self.head.as_mut() {
1230 head.comment = core::ptr::null_mut();
1231 }
1232
1233 break 'blk Mode::HCrc;
1234 }
1235 Mode::HCrc => {
1236 if (self.gzip_flags & 0x0200) != 0 {
1237 need_bits!(self, 16);
1238
1239 if (self.wrap & 4) != 0
1240 && self.bit_reader.hold() as u32 != (self.checksum & 0xffff)
1241 {
1242 mode = Mode::Bad;
1243 break 'label self.bad("header crc mismatch\0");
1244 }
1245
1246 self.bit_reader.init_bits();
1247 }
1248
1249 if let Some(head) = self.head.as_mut() {
1250 head.hcrc = (self.gzip_flags >> 9) & 1;
1251 head.done = 1;
1252 }
1253
1254 if (self.wrap & 4 != 0) && self.gzip_flags != 0 {
1256 self.crc_fold = Crc32Fold::new();
1257 self.checksum = crate::CRC32_INITIAL_VALUE;
1258 }
1259
1260 break 'blk Mode::Type;
1261 }
1262 Mode::Type => {
1263 use InflateFlush::*;
1264
1265 match self.flush {
1266 Block | Trees => break 'label ReturnCode::Ok,
1267 NoFlush | SyncFlush | Finish => {
1268 break 'blk Mode::TypeDo;
1270 }
1271 }
1272 }
1273 Mode::TypeDo => {
1274 if self.flags.contains(Flags::IS_LAST_BLOCK) {
1275 self.bit_reader.next_byte_boundary();
1276 break 'blk Mode::Check;
1277 }
1278
1279 need_bits!(self, 3);
1280 self.flags
1282 .update(Flags::IS_LAST_BLOCK, self.bit_reader.bits(1) != 0);
1283 self.bit_reader.drop_bits(1);
1284
1285 match self.bit_reader.bits(2) {
1286 0b00 => {
1287 self.bit_reader.drop_bits(2);
1290
1291 break 'blk Mode::Stored;
1292 }
1293 0b01 => {
1294 self.len_table = Table {
1297 codes: Codes::Fixed,
1298 bits: 9,
1299 };
1300
1301 self.dist_table = Table {
1302 codes: Codes::Fixed,
1303 bits: 5,
1304 };
1305
1306 mode = Mode::Len_;
1307
1308 self.bit_reader.drop_bits(2);
1309
1310 if let InflateFlush::Trees = self.flush {
1311 break 'label self.inflate_leave(ReturnCode::Ok);
1312 } else {
1313 break 'blk Mode::Len_;
1314 }
1315 }
1316 0b10 => {
1317 self.bit_reader.drop_bits(2);
1320
1321 break 'blk Mode::Table;
1322 }
1323 0b11 => {
1324 self.bit_reader.drop_bits(2);
1327
1328 mode = Mode::Bad;
1329 break 'label self.bad("invalid block type\0");
1330 }
1331 _ => {
1332 unreachable!("BitReader::bits(2) only yields a value of two bits, so this match is already exhaustive")
1334 }
1335 }
1336 }
1337 Mode::Stored => {
1338 self.bit_reader.next_byte_boundary();
1339
1340 need_bits!(self, 32);
1341
1342 let hold = self.bit_reader.bits(32) as u32;
1343
1344 if hold as u16 != !((hold >> 16) as u16) {
1347 mode = Mode::Bad;
1348 break 'label self.bad("invalid stored block lengths\0");
1349 }
1350
1351 self.length = hold as usize & 0xFFFF;
1352 self.bit_reader.init_bits();
1355
1356 if let InflateFlush::Trees = self.flush {
1357 break 'label self.inflate_leave(ReturnCode::Ok);
1358 } else {
1359 break 'blk Mode::CopyBlock;
1360 }
1361 }
1362 Mode::CopyBlock => {
1363 loop {
1364 let mut copy = self.length;
1365
1366 if copy == 0 {
1367 break;
1368 }
1369
1370 copy = Ord::min(copy, self.writer.remaining());
1371 copy = Ord::min(copy, self.bit_reader.bytes_remaining());
1372
1373 if copy == 0 {
1374 break 'label self.inflate_leave(ReturnCode::Ok);
1375 }
1376
1377 self.writer.extend(&self.bit_reader.as_slice()[..copy]);
1378 self.bit_reader.advance(copy);
1379
1380 self.length -= copy;
1381 }
1382
1383 break 'blk Mode::Type;
1384 }
1385 Mode::Check => {
1386 if !cfg!(feature = "__internal-fuzz-disable-checksum") && self.wrap != 0 {
1387 need_bits!(self, 32);
1388
1389 self.total += self.writer.len();
1390
1391 if self.wrap & 4 != 0 {
1392 if self.gzip_flags != 0 {
1393 self.crc_fold.fold(self.writer.filled(), self.checksum);
1394 self.checksum = self.crc_fold.finish();
1395 } else {
1396 self.checksum = adler32(self.checksum, self.writer.filled());
1397 }
1398 }
1399
1400 let given_checksum = if self.gzip_flags != 0 {
1401 self.bit_reader.hold() as u32
1402 } else {
1403 zswap32(self.bit_reader.hold() as u32)
1404 };
1405
1406 self.out_available = self.writer.capacity() - self.writer.len();
1407
1408 if self.wrap & 4 != 0 && given_checksum != self.checksum {
1409 mode = Mode::Bad;
1410 break 'label self.bad("incorrect data check\0");
1411 }
1412
1413 self.bit_reader.init_bits();
1414 }
1415
1416 break 'blk Mode::Length;
1417 }
1418 Mode::Len_ => {
1419 break 'blk Mode::Len;
1420 }
1421 Mode::Len => {
1422 self.mode = mode;
1423 let val = self.len_and_friends();
1424 mode = self.mode;
1425 match val {
1426 ControlFlow::Break(return_code) => break 'label return_code,
1427 ControlFlow::Continue(()) => continue 'label,
1428 }
1429 }
1430 Mode::LenExt => {
1431 let extra = self.extra;
1433
1434 if extra != 0 {
1436 need_bits!(self, extra);
1437 self.length += self.bit_reader.bits(extra) as usize;
1438 self.bit_reader.drop_bits(extra as u8);
1439 self.back += extra;
1440 }
1441
1442 self.was = self.length;
1445
1446 break 'blk Mode::Dist;
1447 }
1448 Mode::Lit => {
1449 if self.writer.is_full() {
1451 #[cfg(all(test, feature = "std"))]
1452 eprintln!("Ok: writer is full ({} bytes)", self.writer.capacity());
1453 break 'label self.inflate_leave(ReturnCode::Ok);
1454 }
1455
1456 self.writer.push(self.length as u8);
1457
1458 break 'blk Mode::Len;
1459 }
1460 Mode::Dist => {
1461 let mut here;
1465 loop {
1466 let bits = self.bit_reader.bits(self.dist_table.bits) as usize;
1467 here = self.dist_table_get(bits);
1468 if here.bits <= self.bit_reader.bits_in_buffer() {
1469 break;
1470 }
1471
1472 pull_byte!(self);
1473 }
1474
1475 if here.op & 0xf0 == 0 {
1476 let last = here;
1477
1478 loop {
1479 let bits = self.bit_reader.bits((last.bits + last.op) as usize);
1480 here = self.dist_table_get(
1481 last.val as usize + ((bits as usize) >> last.bits),
1482 );
1483
1484 if last.bits + here.bits <= self.bit_reader.bits_in_buffer() {
1485 break;
1486 }
1487
1488 pull_byte!(self);
1489 }
1490
1491 self.bit_reader.drop_bits(last.bits);
1492 self.back += last.bits as usize;
1493 }
1494
1495 self.bit_reader.drop_bits(here.bits);
1496
1497 if here.op & 64 != 0 {
1498 mode = Mode::Bad;
1499 break 'label self.bad("invalid distance code\0");
1500 }
1501
1502 self.offset = here.val as usize;
1503
1504 self.extra = (here.op & MAX_BITS) as usize;
1505
1506 break 'blk Mode::DistExt;
1507 }
1508 Mode::DistExt => {
1509 let extra = self.extra;
1511
1512 if extra > 0 {
1513 need_bits!(self, extra);
1514 self.offset += self.bit_reader.bits(extra) as usize;
1515 self.bit_reader.drop_bits(extra as u8);
1516 self.back += extra;
1517 }
1518
1519 if INFLATE_STRICT && self.offset > self.dmax {
1520 mode = Mode::Bad;
1521 break 'label self.bad("invalid distance code too far back\0");
1522 }
1523
1524 break 'blk Mode::Match;
1527 }
1528 Mode::Match => {
1529 'match_: loop {
1532 if self.writer.is_full() {
1533 #[cfg(all(feature = "std", test))]
1534 eprintln!(
1535 "BufError: writer is full ({} bytes)",
1536 self.writer.capacity()
1537 );
1538 break 'label self.inflate_leave(ReturnCode::Ok);
1539 }
1540
1541 let left = self.writer.remaining();
1542 let copy = self.writer.len();
1543
1544 let copy = if self.offset > copy {
1545 let mut copy = self.offset - copy;
1548
1549 if copy > self.window.have() {
1550 if self.flags.contains(Flags::SANE) {
1551 mode = Mode::Bad;
1552 break 'label self.bad("invalid distance too far back\0");
1553 }
1554
1555 panic!("INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR")
1557 }
1558
1559 let wnext = self.window.next();
1560 let wsize = self.window.size();
1561
1562 let from = if copy > wnext {
1563 copy -= wnext;
1564 wsize - copy
1565 } else {
1566 wnext - copy
1567 };
1568
1569 copy = Ord::min(copy, self.length);
1570 copy = Ord::min(copy, left);
1571
1572 self.writer
1573 .extend_from_window(&self.window, from..from + copy);
1574
1575 copy
1576 } else {
1577 let copy = Ord::min(self.length, left);
1578 self.writer.copy_match(self.offset, copy);
1579
1580 copy
1581 };
1582
1583 self.length -= copy;
1584
1585 if self.length == 0 {
1586 break 'blk Mode::Len;
1587 } else {
1588 continue 'match_;
1590 }
1591 }
1592 }
1593 Mode::Table => {
1594 need_bits!(self, 14);
1595 self.nlen = self.bit_reader.bits(5) as usize + 257;
1596 self.bit_reader.drop_bits(5);
1597 self.ndist = self.bit_reader.bits(5) as usize + 1;
1598 self.bit_reader.drop_bits(5);
1599 self.ncode = self.bit_reader.bits(4) as usize + 4;
1600 self.bit_reader.drop_bits(4);
1601
1602 if self.nlen > 286 || self.ndist > 30 {
1604 mode = Mode::Bad;
1605 break 'label self.bad("too many length or distance symbols\0");
1606 }
1607
1608 self.have = 0;
1609
1610 break 'blk Mode::LenLens;
1611 }
1612 Mode::LenLens => {
1613 const ORDER: [u8; 19] = [
1615 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15,
1616 ];
1617
1618 while self.have < self.ncode {
1619 need_bits!(self, 3);
1620 self.lens[usize::from(ORDER[self.have])] =
1621 self.bit_reader.bits(3) as u16;
1622 self.have += 1;
1623 self.bit_reader.drop_bits(3);
1624 }
1625
1626 while self.have < 19 {
1627 self.lens[usize::from(ORDER[self.have])] = 0;
1628 self.have += 1;
1629 }
1630
1631 let InflateTable::Success { root, used } = inflate_table(
1632 CodeType::Codes,
1633 &self.lens,
1634 19,
1635 &mut self.codes_codes,
1636 7,
1637 &mut self.work,
1638 ) else {
1639 mode = Mode::Bad;
1640 break 'label self.bad("invalid code lengths set\0");
1641 };
1642
1643 self.next = used;
1644 self.len_table.codes = Codes::Codes;
1645 self.len_table.bits = root;
1646
1647 self.have = 0;
1648
1649 break 'blk Mode::CodeLens;
1650 }
1651 Mode::CodeLens => {
1652 while self.have < self.nlen + self.ndist {
1653 let here = loop {
1654 let bits = self.bit_reader.bits(self.len_table.bits);
1655 let here = self.len_table_get(bits as usize);
1656 if here.bits <= self.bit_reader.bits_in_buffer() {
1657 break here;
1658 }
1659
1660 pull_byte!(self);
1661 };
1662
1663 let here_bits = here.bits;
1664
1665 match here.val {
1666 0..=15 => {
1667 self.bit_reader.drop_bits(here_bits);
1668 self.lens[self.have] = here.val;
1669 self.have += 1;
1670 }
1671 16 => {
1672 need_bits!(self, usize::from(here_bits) + 2);
1673 self.bit_reader.drop_bits(here_bits);
1674 if self.have == 0 {
1675 mode = Mode::Bad;
1676 break 'label self.bad("invalid bit length repeat\0");
1677 }
1678
1679 let len = self.lens[self.have - 1];
1680 let copy = 3 + self.bit_reader.bits(2) as usize;
1681 self.bit_reader.drop_bits(2);
1682
1683 if self.have + copy > self.nlen + self.ndist {
1684 mode = Mode::Bad;
1685 break 'label self.bad("invalid bit length repeat\0");
1686 }
1687
1688 self.lens[self.have..][..copy].fill(len);
1689 self.have += copy;
1690 }
1691 17 => {
1692 need_bits!(self, usize::from(here_bits) + 3);
1693 self.bit_reader.drop_bits(here_bits);
1694 let copy = 3 + self.bit_reader.bits(3) as usize;
1695 self.bit_reader.drop_bits(3);
1696
1697 if self.have + copy > self.nlen + self.ndist {
1698 mode = Mode::Bad;
1699 break 'label self.bad("invalid bit length repeat\0");
1700 }
1701
1702 self.lens[self.have..][..copy].fill(0);
1703 self.have += copy;
1704 }
1705 18.. => {
1706 need_bits!(self, usize::from(here_bits) + 7);
1707 self.bit_reader.drop_bits(here_bits);
1708 let copy = 11 + self.bit_reader.bits(7) as usize;
1709 self.bit_reader.drop_bits(7);
1710
1711 if self.have + copy > self.nlen + self.ndist {
1712 mode = Mode::Bad;
1713 break 'label self.bad("invalid bit length repeat\0");
1714 }
1715
1716 self.lens[self.have..][..copy].fill(0);
1717 self.have += copy;
1718 }
1719 }
1720 }
1721
1722 if self.lens[256] == 0 {
1724 mode = Mode::Bad;
1725 break 'label self.bad("invalid code -- missing end-of-block\0");
1726 }
1727
1728 let InflateTable::Success { root, used } = inflate_table(
1731 CodeType::Lens,
1732 &self.lens,
1733 self.nlen,
1734 &mut self.len_codes,
1735 10,
1736 &mut self.work,
1737 ) else {
1738 mode = Mode::Bad;
1739 break 'label self.bad("invalid literal/lengths set\0");
1740 };
1741
1742 self.len_table.codes = Codes::Len;
1743 self.len_table.bits = root;
1744 self.next = used;
1745
1746 let InflateTable::Success { root, used } = inflate_table(
1747 CodeType::Dists,
1748 &self.lens[self.nlen..],
1749 self.ndist,
1750 &mut self.dist_codes,
1751 9,
1752 &mut self.work,
1753 ) else {
1754 mode = Mode::Bad;
1755 break 'label self.bad("invalid distances set\0");
1756 };
1757
1758 self.dist_table.bits = root;
1759 self.dist_table.codes = Codes::Dist;
1760 self.next += used;
1761
1762 mode = Mode::Len_;
1763
1764 if matches!(self.flush, InflateFlush::Trees) {
1765 break 'label self.inflate_leave(ReturnCode::Ok);
1766 }
1767
1768 break 'blk Mode::Len_;
1769 }
1770 Mode::Dict => {
1771 if !self.flags.contains(Flags::HAVE_DICT) {
1772 break 'label self.inflate_leave(ReturnCode::NeedDict);
1773 }
1774
1775 self.checksum = crate::ADLER32_INITIAL_VALUE as _;
1776
1777 break 'blk Mode::Type;
1778 }
1779 Mode::DictId => {
1780 need_bits!(self, 32);
1781
1782 self.checksum = zswap32(self.bit_reader.hold() as u32);
1783
1784 self.bit_reader.init_bits();
1785
1786 break 'blk Mode::Dict;
1787 }
1788 Mode::Done => {
1789 break 'label ReturnCode::StreamEnd;
1791 }
1792 Mode::Bad => {
1793 let msg = "repeated call with bad state\0";
1794 #[cfg(all(feature = "std", test))]
1795 dbg!(msg);
1796 self.error_message = Some(msg);
1797
1798 break 'label ReturnCode::DataError;
1799 }
1800 Mode::Mem => {
1801 break 'label ReturnCode::MemError;
1802 }
1803 Mode::Sync => {
1804 break 'label ReturnCode::StreamError;
1805 }
1806 Mode::Length => {
1807 if self.wrap != 0 && self.gzip_flags != 0 {
1809 need_bits!(self, 32);
1810 if (self.wrap & 0b100) != 0
1811 && self.bit_reader.hold() as u32 != self.total as u32
1812 {
1813 mode = Mode::Bad;
1814 break 'label self.bad("incorrect length check\0");
1815 }
1816
1817 self.bit_reader.init_bits();
1818 }
1819
1820 mode = Mode::Done;
1821 break 'label ReturnCode::StreamEnd;
1823 }
1824 };
1825 }
1826 };
1827
1828 self.mode = mode;
1829
1830 ret
1831 }
1832
1833 fn bad(&mut self, msg: &'static str) -> ReturnCode {
1834 #[cfg(all(feature = "std", test))]
1835 dbg!(msg);
1836 self.error_message = Some(msg);
1837 self.inflate_leave(ReturnCode::DataError)
1838 }
1839
1840 fn inflate_leave(&mut self, return_code: ReturnCode) -> ReturnCode {
1843 return_code
1845 }
1846
1847 fn decoding_state(&self) -> i32 {
1849 let bit_reader_bits = self.bit_reader.bits_in_buffer() as i32;
1850 debug_assert!(bit_reader_bits < 64);
1851
1852 let last = if self.flags.contains(Flags::IS_LAST_BLOCK) {
1853 64
1854 } else {
1855 0
1856 };
1857
1858 let mode = match self.mode {
1859 Mode::Type => 128,
1860 Mode::Len_ | Mode::CopyBlock => 256,
1861 _ => 0,
1862 };
1863
1864 bit_reader_bits | last | mode
1865 }
1866}
1867
1868unsafe fn inflate_fast_help(state: &mut State, start: usize) {
1873 #[cfg(any(target_arch = "x86_64", target_arch = "x86"))]
1874 if crate::cpu_features::is_enabled_avx2_and_bmi2() {
1875 return unsafe { inflate_fast_help_avx2(state, start) };
1877 }
1878
1879 unsafe { inflate_fast_help_vanilla(state, start) };
1881}
1882
1883#[cfg(any(target_arch = "x86_64", target_arch = "x86"))]
1888#[target_feature(enable = "avx2")]
1889#[target_feature(enable = "bmi2")]
1890#[target_feature(enable = "bmi1")]
1891unsafe fn inflate_fast_help_avx2(state: &mut State, start: usize) {
1892 unsafe { inflate_fast_help_impl::<{ CpuFeatures::AVX2 }>(state, start) };
1894}
1895
1896unsafe fn inflate_fast_help_vanilla(state: &mut State, start: usize) {
1901 unsafe { inflate_fast_help_impl::<{ CpuFeatures::NONE }>(state, start) };
1903}
1904
1905#[inline(always)]
1910unsafe fn inflate_fast_help_impl<const FEATURES: usize>(state: &mut State, _start: usize) {
1911 let mut bit_reader = BitReader::new(&[]);
1912 core::mem::swap(&mut bit_reader, &mut state.bit_reader);
1913 debug_assert!(bit_reader.bytes_remaining() >= 15);
1914
1915 let mut writer = Writer::new(&mut []);
1916 core::mem::swap(&mut writer, &mut state.writer);
1917
1918 let lcode = state.len_table_ref();
1919 let dcode = state.dist_table_ref();
1920
1921 let lmask = (1u64 << state.len_table.bits) - 1;
1923 let dmask = (1u64 << state.dist_table.bits) - 1;
1924
1925 let extra_safe = false;
1927
1928 let window_size = state.window.size();
1929
1930 let mut bad = None;
1931
1932 if bit_reader.bits_in_buffer() < 10 {
1933 debug_assert!(bit_reader.bytes_remaining() >= 15);
1934 unsafe { bit_reader.refill() };
1936 }
1937 debug_assert!(
1940 bit_reader.bytes_remaining() >= 8 && bit_reader.bytes_remaining_including_buffer() >= 15
1941 );
1942
1943 'outer: loop {
1944 debug_assert!(
1950 bit_reader.bytes_remaining() >= 8
1951 && bit_reader.bytes_remaining_including_buffer() >= 15
1952 );
1953
1954 let mut here = {
1955 let bits = bit_reader.bits_in_buffer();
1956 let hold = bit_reader.hold();
1957
1958 unsafe { bit_reader.refill() };
1965 debug_assert!(bit_reader.bytes_remaining() >= 8);
1967
1968 if bits as usize >= state.len_table.bits {
1971 lcode[(hold & lmask) as usize]
1972 } else {
1973 lcode[(bit_reader.hold() & lmask) as usize]
1974 }
1975 };
1976
1977 if here.op == 0 {
1978 writer.push(here.val as u8);
1979 bit_reader.drop_bits(here.bits);
1980 here = lcode[(bit_reader.hold() & lmask) as usize];
1981
1982 if here.op == 0 {
1983 writer.push(here.val as u8);
1984 bit_reader.drop_bits(here.bits);
1985 here = lcode[(bit_reader.hold() & lmask) as usize];
1986 }
1987 }
1988
1989 'dolen: loop {
1990 bit_reader.drop_bits(here.bits);
1991 let op = here.op;
1992
1993 if op == 0 {
1994 writer.push(here.val as u8);
1995 } else if op & 16 != 0 {
1996 let op = op & MAX_BITS;
1997 let mut len = here.val + bit_reader.bits(op as usize) as u16;
1998 bit_reader.drop_bits(op);
1999
2000 here = dcode[(bit_reader.hold() & dmask) as usize];
2001
2002 if bit_reader.bits_in_buffer() < MAX_BITS + MAX_DIST_EXTRA_BITS {
2005 debug_assert!(bit_reader.bytes_remaining() >= 8);
2006 unsafe { bit_reader.refill() };
2013 }
2014
2015 'dodist: loop {
2016 bit_reader.drop_bits(here.bits);
2017 let op = here.op;
2018
2019 if op & 16 != 0 {
2020 let op = op & MAX_BITS;
2021 let dist = here.val + bit_reader.bits(op as usize) as u16;
2022
2023 if INFLATE_STRICT && dist as usize > state.dmax {
2024 bad = Some("invalid distance too far back\0");
2025 state.mode = Mode::Bad;
2026 break 'outer;
2027 }
2028
2029 bit_reader.drop_bits(op);
2030
2031 let written = writer.len();
2033
2034 if dist as usize > written {
2035 if (dist as usize - written) > state.window.have() {
2037 if state.flags.contains(Flags::SANE) {
2038 bad = Some("invalid distance too far back\0");
2039 state.mode = Mode::Bad;
2040 break 'outer;
2041 }
2042
2043 panic!("INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR")
2044 }
2045
2046 let mut op = dist as usize - written;
2047 let mut from;
2048
2049 let window_next = state.window.next();
2050
2051 if window_next == 0 {
2052 from = window_size - op;
2059 } else if window_next >= op {
2060 from = window_next - op;
2062 } else {
2063 op -= window_next;
2068 from = window_size - op;
2069
2070 if op < len as usize {
2071 len -= op as u16;
2075 writer.extend_from_window_with_features::<FEATURES>(
2076 &state.window,
2077 from..from + op,
2078 );
2079 from = 0;
2080 op = window_next;
2081 }
2082 }
2083
2084 let copy = Ord::min(op, len as usize);
2085 writer.extend_from_window_with_features::<FEATURES>(
2086 &state.window,
2087 from..from + copy,
2088 );
2089
2090 if op < len as usize {
2091 writer.copy_match_with_features::<FEATURES>(
2093 dist as usize,
2094 len as usize - op,
2095 );
2096 }
2097 } else if extra_safe {
2098 todo!()
2099 } else {
2100 writer.copy_match_with_features::<FEATURES>(dist as usize, len as usize)
2101 }
2102 } else if (op & 64) == 0 {
2103 here = dcode[(here.val + bit_reader.bits(op as usize) as u16) as usize];
2105 continue 'dodist;
2106 } else {
2107 bad = Some("invalid distance code\0");
2108 state.mode = Mode::Bad;
2109 break 'outer;
2110 }
2111
2112 break 'dodist;
2113 }
2114 } else if (op & 64) == 0 {
2115 here = lcode[(here.val + bit_reader.bits(op as usize) as u16) as usize];
2117 continue 'dolen;
2118 } else if op & 32 != 0 {
2119 state.mode = Mode::Type;
2121 break 'outer;
2122 } else {
2123 bad = Some("invalid literal/length code\0");
2124 state.mode = Mode::Bad;
2125 break 'outer;
2126 }
2127
2128 break 'dolen;
2129 }
2130
2131 let remaining = bit_reader.bytes_remaining_including_buffer();
2133 if remaining >= INFLATE_FAST_MIN_HAVE && writer.remaining() >= INFLATE_FAST_MIN_LEFT {
2134 continue;
2135 }
2136
2137 break 'outer;
2138 }
2139
2140 bit_reader.return_unused_bytes();
2142
2143 state.bit_reader = bit_reader;
2144 state.writer = writer;
2145
2146 if let Some(error_message) = bad {
2147 debug_assert!(matches!(state.mode, Mode::Bad));
2148 state.bad(error_message);
2149 }
2150}
2151
2152pub fn prime(stream: &mut InflateStream, bits: i32, value: i32) -> ReturnCode {
2153 if bits == 0 {
2154 } else if bits < 0 {
2156 stream.state.bit_reader.init_bits();
2157 } else if bits > 16 || stream.state.bit_reader.bits_in_buffer() + bits as u8 > 32 {
2158 return ReturnCode::StreamError;
2159 } else {
2160 stream.state.bit_reader.prime(bits as u8, value as u64);
2161 }
2162
2163 ReturnCode::Ok
2164}
2165
2166struct InflateAllocOffsets {
2167 total_size: usize,
2168 state_pos: usize,
2169 window_pos: usize,
2170}
2171
2172impl InflateAllocOffsets {
2173 fn new() -> Self {
2174 use core::mem::size_of;
2175
2176 const WINDOW_PAD_SIZE: usize = 64;
2178
2179 let mut curr_size = 0usize;
2180
2181 let window_size = (1 << MAX_WBITS) + WINDOW_PAD_SIZE;
2183 let state_size = size_of::<State>();
2184
2185 let window_pos = curr_size.next_multiple_of(WINDOW_PAD_SIZE);
2187 curr_size += window_pos + window_size;
2188
2189 let state_pos = curr_size.next_multiple_of(64);
2190 curr_size += state_pos + state_size;
2191
2192 let total_size = (curr_size + (WINDOW_PAD_SIZE - 1)).next_multiple_of(64);
2194
2195 Self {
2196 total_size,
2197 state_pos,
2198 window_pos,
2199 }
2200 }
2201}
2202
2203#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
2204pub struct InflateConfig {
2205 pub window_bits: i32,
2206}
2207
2208impl Default for InflateConfig {
2209 fn default() -> Self {
2210 Self {
2211 window_bits: DEF_WBITS,
2212 }
2213 }
2214}
2215
2216pub fn init(stream: &mut z_stream, config: InflateConfig) -> ReturnCode {
2218 stream.msg = core::ptr::null_mut();
2219
2220 #[cfg(feature = "rust-allocator")]
2224 if stream.zalloc.is_none() || stream.zfree.is_none() {
2225 stream.configure_default_rust_allocator()
2226 }
2227
2228 #[cfg(feature = "c-allocator")]
2229 if stream.zalloc.is_none() || stream.zfree.is_none() {
2230 stream.configure_default_c_allocator()
2231 }
2232
2233 if stream.zalloc.is_none() || stream.zfree.is_none() {
2234 return ReturnCode::StreamError;
2235 }
2236
2237 let mut state = State::new(&[], Writer::new(&mut []));
2238
2239 state.chunksize = 32;
2241
2242 let alloc = Allocator {
2243 zalloc: stream.zalloc.unwrap(),
2244 zfree: stream.zfree.unwrap(),
2245 opaque: stream.opaque,
2246 _marker: PhantomData,
2247 };
2248 let allocs = InflateAllocOffsets::new();
2249
2250 let Some(allocation_start) = alloc.allocate_slice_raw::<u8>(allocs.total_size) else {
2251 return ReturnCode::MemError;
2252 };
2253
2254 let address = allocation_start.as_ptr() as usize;
2255 let align_offset = address.next_multiple_of(64) - address;
2256 let buf = unsafe { allocation_start.as_ptr().add(align_offset) };
2257
2258 let window_allocation = unsafe { buf.add(allocs.window_pos) };
2259 let window = unsafe { Window::from_raw_parts(window_allocation, (1 << MAX_WBITS) + 64) };
2260 state.window = window;
2261
2262 let state_allocation = unsafe { buf.add(allocs.state_pos).cast::<State>() };
2263 unsafe { state_allocation.write(state) };
2264 stream.state = state_allocation.cast::<internal_state>();
2265
2266 if let Some(stream) = unsafe { InflateStream::from_stream_mut(stream) } {
2268 stream.state.allocation_start = allocation_start.as_ptr();
2269 stream.state.total_allocation_size = allocs.total_size;
2270 let ret = reset_with_config(stream, config);
2271
2272 if ret != ReturnCode::Ok {
2273 end(stream);
2274 }
2275
2276 ret
2277 } else {
2278 ReturnCode::StreamError
2279 }
2280}
2281
2282pub fn reset_with_config(stream: &mut InflateStream, config: InflateConfig) -> ReturnCode {
2283 let mut window_bits = config.window_bits;
2284 let wrap;
2285
2286 if window_bits < 0 {
2287 wrap = 0;
2288
2289 if window_bits < -MAX_WBITS {
2290 return ReturnCode::StreamError;
2291 }
2292
2293 window_bits = -window_bits;
2294 } else {
2295 wrap = (window_bits >> 4) + 5; if window_bits < 48 {
2298 window_bits &= MAX_WBITS;
2299 }
2300 }
2301
2302 if window_bits != 0 && !(MIN_WBITS..=MAX_WBITS).contains(&window_bits) {
2303 #[cfg(feature = "std")]
2304 eprintln!("invalid windowBits");
2305 return ReturnCode::StreamError;
2306 }
2307
2308 stream.state.wrap = wrap as u8;
2309 stream.state.wbits = window_bits as _;
2310
2311 reset(stream)
2312}
2313
2314pub fn reset(stream: &mut InflateStream) -> ReturnCode {
2315 stream.state.window.clear();
2317
2318 stream.state.error_message = None;
2319
2320 reset_keep(stream)
2321}
2322
2323pub fn reset_keep(stream: &mut InflateStream) -> ReturnCode {
2324 stream.total_in = 0;
2325 stream.total_out = 0;
2326 stream.state.total = 0;
2327
2328 stream.msg = core::ptr::null_mut();
2329
2330 let state = &mut stream.state;
2331
2332 if state.wrap != 0 {
2333 stream.adler = (state.wrap & 1) as _;
2335 }
2336
2337 state.mode = Mode::Head;
2338 state.checksum = crate::ADLER32_INITIAL_VALUE as u32;
2339
2340 state.flags.update(Flags::IS_LAST_BLOCK, false);
2341 state.flags.update(Flags::HAVE_DICT, false);
2342 state.flags.update(Flags::SANE, true);
2343 state.gzip_flags = -1;
2344 state.dmax = 32768;
2345 state.head = None;
2346 state.bit_reader = BitReader::new(&[]);
2347
2348 state.next = 0;
2349 state.len_table = Table::default();
2350 state.dist_table = Table::default();
2351
2352 state.back = usize::MAX;
2353
2354 ReturnCode::Ok
2355}
2356
2357pub fn codes_used(stream: &InflateStream) -> usize {
2358 stream.state.next
2359}
2360
2361pub unsafe fn inflate(stream: &mut InflateStream, flush: InflateFlush) -> ReturnCode {
2362 if stream.next_out.is_null() || (stream.next_in.is_null() && stream.avail_in != 0) {
2363 return ReturnCode::StreamError as _;
2364 }
2365
2366 let state = &mut stream.state;
2367
2368 if let Mode::Type = state.mode {
2370 state.mode = Mode::TypeDo;
2371 }
2372
2373 state.flush = flush;
2374
2375 unsafe {
2376 state
2377 .bit_reader
2378 .update_slice(stream.next_in, stream.avail_in as usize)
2379 };
2380 state.writer = unsafe { Writer::new_uninit(stream.next_out.cast(), stream.avail_out as usize) };
2382
2383 state.in_available = stream.avail_in as _;
2384 state.out_available = stream.avail_out as _;
2385
2386 let err = state.dispatch();
2387
2388 let in_read = state.bit_reader.as_ptr() as usize - stream.next_in as usize;
2389 let out_written = state.out_available - (state.writer.capacity() - state.writer.len());
2390
2391 stream.total_in += in_read as z_size;
2392 state.total += out_written;
2393 stream.total_out = state.total as _;
2394
2395 stream.avail_in = state.bit_reader.bytes_remaining() as u32;
2396 stream.next_in = state.bit_reader.as_ptr() as *mut u8;
2397
2398 stream.avail_out = (state.writer.capacity() - state.writer.len()) as u32;
2399 stream.next_out = state.writer.next_out() as *mut u8;
2400
2401 stream.adler = state.checksum as z_checksum;
2402
2403 let valid_mode = |mode| !matches!(mode, Mode::Bad | Mode::Mem | Mode::Sync);
2404 let not_done = |mode| {
2405 !matches!(
2406 mode,
2407 Mode::Check | Mode::Length | Mode::Bad | Mode::Mem | Mode::Sync
2408 )
2409 };
2410
2411 let must_update_window = state.window.size() != 0
2412 || (out_written != 0
2413 && valid_mode(state.mode)
2414 && (not_done(state.mode) || !matches!(state.flush, InflateFlush::Finish)));
2415
2416 let update_checksum = state.wrap & 4 != 0;
2417
2418 if must_update_window {
2419 state.window.extend(
2420 &state.writer.filled()[..out_written],
2421 state.gzip_flags,
2422 update_checksum,
2423 &mut state.checksum,
2424 &mut state.crc_fold,
2425 );
2426 }
2427
2428 if let Some(msg) = state.error_message {
2429 assert!(msg.ends_with('\0'));
2430 stream.msg = msg.as_ptr() as *mut u8 as *mut core::ffi::c_char;
2431 }
2432
2433 stream.data_type = state.decoding_state();
2434
2435 if ((in_read == 0 && out_written == 0) || flush == InflateFlush::Finish as _)
2436 && err == (ReturnCode::Ok as _)
2437 {
2438 ReturnCode::BufError as _
2439 } else {
2440 err as _
2441 }
2442}
2443
2444fn syncsearch(mut got: usize, buf: &[u8]) -> (usize, usize) {
2445 let len = buf.len();
2446 let mut next = 0;
2447
2448 while next < len && got < 4 {
2449 if buf[next] == if got < 2 { 0 } else { 0xff } {
2450 got += 1;
2451 } else if buf[next] != 0 {
2452 got = 0;
2453 } else {
2454 got = 4 - got;
2455 }
2456 next += 1;
2457 }
2458
2459 (got, next)
2460}
2461
2462pub fn sync(stream: &mut InflateStream) -> ReturnCode {
2463 let state = &mut stream.state;
2464
2465 if stream.avail_in == 0 && state.bit_reader.bits_in_buffer() < 8 {
2466 return ReturnCode::BufError;
2467 }
2468 if !matches!(state.mode, Mode::Sync) {
2470 state.mode = Mode::Sync;
2471
2472 let (buf, len) = state.bit_reader.start_sync_search();
2473
2474 (state.have, _) = syncsearch(0, &buf[..len]);
2475 }
2476
2477 let slice = unsafe { core::slice::from_raw_parts(stream.next_in, stream.avail_in as usize) };
2480
2481 let len;
2482 (state.have, len) = syncsearch(state.have, slice);
2483 stream.next_in = unsafe { stream.next_in.add(len) };
2485 stream.avail_in -= len as u32;
2486 stream.total_in += len as z_size;
2487
2488 if state.have != 4 {
2490 return ReturnCode::DataError;
2491 }
2492
2493 if state.gzip_flags == -1 {
2494 state.wrap = 0; } else {
2496 state.wrap &= !4; }
2498
2499 let flags = state.gzip_flags;
2500 let total_in = stream.total_in;
2501 let total_out = stream.total_out;
2502
2503 reset(stream);
2504
2505 stream.total_in = total_in;
2506 stream.total_out = total_out;
2507
2508 stream.state.gzip_flags = flags;
2509 stream.state.mode = Mode::Type;
2510
2511 ReturnCode::Ok
2512}
2513
2514pub fn sync_point(stream: &mut InflateStream) -> bool {
2523 matches!(stream.state.mode, Mode::Stored) && stream.state.bit_reader.bits_in_buffer() == 0
2524}
2525
2526pub unsafe fn copy<'a>(
2527 dest: &mut MaybeUninit<InflateStream<'a>>,
2528 source: &InflateStream<'a>,
2529) -> ReturnCode {
2530 if source.next_out.is_null() || (source.next_in.is_null() && source.avail_in != 0) {
2531 return ReturnCode::StreamError;
2532 }
2533
2534 unsafe { core::ptr::copy_nonoverlapping(source, dest.as_mut_ptr(), 1) };
2537
2538 let allocs = InflateAllocOffsets::new();
2540 debug_assert_eq!(allocs.total_size, source.state.total_allocation_size);
2541
2542 let Some(allocation_start) = source.alloc.allocate_slice_raw::<u8>(allocs.total_size) else {
2543 return ReturnCode::MemError;
2544 };
2545
2546 let address = allocation_start.as_ptr() as usize;
2547 let align_offset = address.next_multiple_of(64) - address;
2548 let buf = unsafe { allocation_start.as_ptr().add(align_offset) };
2549
2550 let window_allocation = unsafe { buf.add(allocs.window_pos) };
2551 let window = unsafe {
2552 source
2553 .state
2554 .window
2555 .clone_to(window_allocation, (1 << MAX_WBITS) + 64)
2556 };
2557
2558 let copy = unsafe { buf.add(allocs.state_pos).cast::<State>() };
2559 unsafe { core::ptr::copy_nonoverlapping(source.state, copy, 1) };
2560
2561 let field_ptr = unsafe { core::ptr::addr_of_mut!((*copy).window) };
2562 unsafe { core::ptr::write(field_ptr, window) };
2563
2564 let field_ptr = unsafe { core::ptr::addr_of_mut!((*copy).allocation_start) };
2565 unsafe { core::ptr::write(field_ptr, allocation_start.as_ptr()) };
2566
2567 let field_ptr = unsafe { core::ptr::addr_of_mut!((*dest.as_mut_ptr()).state) };
2568 unsafe { core::ptr::write(field_ptr as *mut *mut State, copy) };
2569
2570 ReturnCode::Ok
2571}
2572
2573pub fn undermine(stream: &mut InflateStream, subvert: i32) -> ReturnCode {
2574 stream.state.flags.update(Flags::SANE, (!subvert) != 0);
2575
2576 ReturnCode::Ok
2577}
2578
2579pub fn validate(stream: &mut InflateStream, check: bool) -> ReturnCode {
2581 if check && stream.state.wrap != 0 {
2582 stream.state.wrap |= 0b100;
2583 } else {
2584 stream.state.wrap &= !0b100;
2585 }
2586
2587 ReturnCode::Ok
2588}
2589
2590pub fn mark(stream: &InflateStream) -> c_long {
2591 if stream.next_out.is_null() || (stream.next_in.is_null() && stream.avail_in != 0) {
2592 return c_long::MIN;
2593 }
2594
2595 let state = &stream.state;
2596
2597 let length = match state.mode {
2598 Mode::CopyBlock => state.length,
2599 Mode::Match => state.was - state.length,
2600 _ => 0,
2601 };
2602
2603 (((state.back as c_long) as c_ulong) << 16) as c_long + length as c_long
2604}
2605
2606pub fn set_dictionary(stream: &mut InflateStream, dictionary: &[u8]) -> ReturnCode {
2607 if stream.state.wrap != 0 && !matches!(stream.state.mode, Mode::Dict) {
2608 return ReturnCode::StreamError;
2609 }
2610
2611 if matches!(stream.state.mode, Mode::Dict) {
2613 let dictid = adler32(1, dictionary);
2614
2615 if dictid != stream.state.checksum {
2616 return ReturnCode::DataError;
2617 }
2618 }
2619
2620 stream.state.window.extend(
2621 dictionary,
2622 stream.state.gzip_flags,
2623 false,
2624 &mut stream.state.checksum,
2625 &mut stream.state.crc_fold,
2626 );
2627
2628 stream.state.flags.update(Flags::HAVE_DICT, true);
2629
2630 ReturnCode::Ok
2631}
2632
2633pub fn end<'a>(stream: &'a mut InflateStream<'_>) -> &'a mut z_stream {
2634 let alloc = stream.alloc;
2635 let allocation_start = stream.state.allocation_start;
2636 let total_allocation_size = stream.state.total_allocation_size;
2637
2638 let mut window = Window::empty();
2639 core::mem::swap(&mut window, &mut stream.state.window);
2640
2641 let stream = stream.as_z_stream_mut();
2642 let _ = core::mem::replace(&mut stream.state, core::ptr::null_mut());
2643
2644 unsafe { alloc.deallocate(allocation_start, total_allocation_size) };
2645
2646 stream
2647}
2648
2649pub unsafe fn get_header<'a>(
2658 stream: &mut InflateStream<'a>,
2659 head: Option<&'a mut gz_header>,
2660) -> ReturnCode {
2661 if (stream.state.wrap & 2) == 0 {
2662 return ReturnCode::StreamError;
2663 }
2664
2665 stream.state.head = head.map(|head| {
2666 head.done = 0;
2667 head
2668 });
2669 ReturnCode::Ok
2670}
2671
2672pub unsafe fn get_dictionary(stream: &InflateStream<'_>, dictionary: *mut u8) -> usize {
2676 let whave = stream.state.window.have();
2677 let wnext = stream.state.window.next();
2678
2679 if !dictionary.is_null() {
2680 unsafe {
2681 core::ptr::copy_nonoverlapping(
2682 stream.state.window.as_ptr().add(wnext),
2683 dictionary,
2684 whave - wnext,
2685 );
2686
2687 core::ptr::copy_nonoverlapping(
2688 stream.state.window.as_ptr(),
2689 dictionary.add(whave).sub(wnext).cast(),
2690 wnext,
2691 );
2692 }
2693 }
2694
2695 stream.state.window.have()
2696}
2697
2698#[cfg(test)]
2699mod tests {
2700 use super::*;
2701
2702 #[test]
2703 fn uncompress_buffer_overflow() {
2704 let mut output = [0; 1 << 13];
2705 let input = [
2706 72, 137, 58, 0, 3, 39, 255, 255, 255, 255, 255, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
2707 14, 14, 184, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 184, 14, 14,
2708 14, 14, 14, 14, 14, 63, 14, 14, 14, 14, 14, 14, 14, 14, 184, 14, 14, 255, 14, 103, 14,
2709 14, 14, 14, 14, 14, 61, 14, 255, 255, 63, 14, 14, 14, 14, 14, 14, 14, 14, 184, 14, 14,
2710 255, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 6, 14, 14, 14, 14, 14, 14, 14, 14, 71,
2711 4, 137, 106,
2712 ];
2713
2714 let config = InflateConfig { window_bits: 15 };
2715
2716 let (_decompressed, err) = uncompress_slice(&mut output, &input, config);
2717 assert_eq!(err, ReturnCode::DataError);
2718 }
2719}