streaming_libdeflate_rs/streams/
deflate_chunked_buffer_output.rs

1use crate::{DeflateOutput, OutStreamResult};
2use crc32fast::Hasher;
3use nightly_quirks::utils::NightlyUtils;
4
5pub struct DeflateChunkedBufferOutput<'a> {
6    buffer: Box<[u8]>,
7    last_usable_ptr: *mut u8,
8    current_ptr: *mut u8,
9    crc32: Hasher,
10    written: usize,
11    func: Box<dyn FnMut(&[u8]) -> Result<(), ()> + 'a>,
12}
13
14impl<'a> DeflateChunkedBufferOutput<'a> {
15    pub fn new<F: FnMut(&[u8]) -> Result<(), ()> + 'a>(write_func: F, buf_size: usize) -> Self {
16        unsafe {
17            let mut buffer = NightlyUtils::box_new_uninit_slice_assume_init(
18                buf_size + Self::MAX_LOOK_BACK + Self::OVERWRITE_MAX,
19            );
20
21            let buffer_start = buffer.as_mut_ptr();
22
23            Self {
24                buffer,
25                last_usable_ptr: buffer_start.add(buf_size + Self::MAX_LOOK_BACK),
26                current_ptr: buffer_start.add(Self::MAX_LOOK_BACK),
27                crc32: Hasher::new(),
28                written: 0,
29                func: Box::new(write_func),
30            }
31        }
32    }
33
34    fn flush_buffer(&mut self) -> bool {
35        let last_index = unsafe { self.current_ptr.offset_from(self.buffer.as_ptr()) } as usize;
36
37        self.crc32
38            .update(&self.buffer[Self::MAX_LOOK_BACK..last_index]);
39        if (self.func)(&self.buffer[Self::MAX_LOOK_BACK..last_index]).is_err() {
40            return false;
41        }
42        self.written += last_index - Self::MAX_LOOK_BACK;
43
44        unsafe {
45            std::ptr::copy(
46                self.buffer.as_ptr().add(last_index - Self::MAX_LOOK_BACK),
47                self.buffer.as_mut_ptr(),
48                Self::MAX_LOOK_BACK,
49            );
50        }
51        self.current_ptr = unsafe { self.buffer.as_mut_ptr().add(Self::MAX_LOOK_BACK) };
52        true
53    }
54}
55
56impl<'a> DeflateOutput for DeflateChunkedBufferOutput<'a> {
57    #[inline(always)]
58    fn has_writable_length(&mut self, length: usize) -> bool {
59        unsafe { self.current_ptr.add(length) <= self.last_usable_ptr }
60    }
61
62    fn flush_ensure_length(&mut self, length: usize) -> bool {
63        if !self.has_writable_length(length) {
64            if !self.flush_buffer() {
65                return false;
66            }
67        }
68        true
69    }
70
71    #[inline(always)]
72    fn get_output_ptr(&mut self) -> *mut u8 {
73        self.current_ptr
74    }
75
76    #[inline(always)]
77    unsafe fn set_output_ptr(&mut self, ptr: *mut u8) {
78        self.current_ptr = ptr;
79    }
80
81    // #[inline(always)]
82    // fn copy_forward(&mut self, prev_offset: usize, length: usize) -> bool {
83    //     if self.buffer.len() - self.position <= length {
84    //         if !self.flush_buffer(length) {
85    //             return false;
86    //         }
87    //     }
88
89    //     if prev_offset > self.position {
90    //         return false;
91    //     }
92
93    //     unsafe {
94    //         let dest = self.buffer.as_mut_ptr().add(self.position);
95    //         copy_rolling(
96    //             dest,
97    //             dest.add(length),
98    //             prev_offset,
99    //             self.get_available_buffer().len() >= (length + 3 * size_of::<usize>()),
100    //         );
101    //     }
102    //     self.position += length;
103
104    //     true
105    // }
106
107    #[inline(always)]
108    fn final_flush(&mut self) -> Result<OutStreamResult, ()> {
109        self.flush_buffer();
110        self.current_ptr = unsafe { self.buffer.as_mut_ptr().add(Self::MAX_LOOK_BACK) };
111
112        let result = OutStreamResult {
113            written: self.written,
114            crc32: self.crc32.clone().finalize(),
115        };
116
117        self.crc32 = Hasher::new();
118        self.written = 0;
119        Ok(result)
120    }
121}