compression_core/
util.rs

1use core::mem::MaybeUninit;
2
3pub const fn _assert_send<T: Send>() {}
4pub const fn _assert_sync<T: Sync>() {}
5
6#[derive(Debug, Default)]
7pub struct PartialBuffer<B> {
8    buffer: B,
9    index: usize,
10}
11
12impl<B: AsRef<[u8]>> PartialBuffer<B> {
13    pub fn new(buffer: B) -> Self {
14        Self { buffer, index: 0 }
15    }
16
17    pub fn written(&self) -> &[u8] {
18        &self.buffer.as_ref()[..self.index]
19    }
20
21    /// Convenient method for `.writen().len()`
22    pub fn written_len(&self) -> usize {
23        self.index
24    }
25
26    pub fn unwritten(&self) -> &[u8] {
27        &self.buffer.as_ref()[self.index..]
28    }
29
30    pub fn advance(&mut self, amount: usize) {
31        self.index += amount;
32        debug_assert!(self.index <= self.buffer.as_ref().len());
33    }
34
35    pub fn get_mut(&mut self) -> &mut B {
36        &mut self.buffer
37    }
38
39    pub fn into_inner(self) -> B {
40        self.buffer
41    }
42
43    pub fn reset(&mut self) {
44        self.index = 0;
45    }
46}
47
48impl<B: AsRef<[u8]> + AsMut<[u8]>> PartialBuffer<B> {
49    pub fn unwritten_mut(&mut self) -> &mut [u8] {
50        &mut self.buffer.as_mut()[self.index..]
51    }
52
53    pub fn copy_unwritten_from<C: AsRef<[u8]>>(&mut self, other: &mut PartialBuffer<C>) -> usize {
54        let len = self.unwritten().len().min(other.unwritten().len());
55
56        self.unwritten_mut()[..len].copy_from_slice(&other.unwritten()[..len]);
57
58        self.advance(len);
59        other.advance(len);
60        len
61    }
62}
63
64impl<B: AsRef<[u8]> + Default> PartialBuffer<B> {
65    pub fn take(&mut self) -> Self {
66        std::mem::take(self)
67    }
68}
69
70impl<B: AsRef<[u8]> + AsMut<[u8]>> From<B> for PartialBuffer<B> {
71    fn from(buffer: B) -> Self {
72        Self::new(buffer)
73    }
74}
75
76/// Write buffer for compression-codecs.
77///
78/// Currently it only supports initialized buffer, but will support uninitialized
79/// buffer soon.
80///
81/// # Layout
82///
83/// ```text
84/// |                                       buffer                                    |
85/// | written and initialized | unwritten but initialized | unwritten and uninitialized
86/// ```
87#[derive(Debug)]
88pub struct WriteBuffer<'a> {
89    buffer: &'a mut [MaybeUninit<u8>],
90    index: usize,
91    initialized: usize,
92}
93
94impl<'a> WriteBuffer<'a> {
95    pub fn new_initialized(buffer: &'a mut [u8]) -> Self {
96        Self {
97            initialized: buffer.len(),
98            // Safety: with initialized set to len of the buffer,
99            // `WriteBuffer` would treat it as a `&mut [u8]`.
100            buffer: unsafe { &mut *(buffer as *mut [u8] as *mut _) },
101            index: 0,
102        }
103    }
104
105    pub fn new_uninitialized(buffer: &'a mut [MaybeUninit<u8>]) -> Self {
106        Self {
107            buffer,
108            index: 0,
109            initialized: 0,
110        }
111    }
112
113    pub fn capacity(&self) -> usize {
114        self.buffer.len()
115    }
116
117    pub fn as_mut_ptr(&mut self) -> *mut u8 {
118        self.buffer.as_mut_ptr() as *mut _
119    }
120
121    pub fn initialized_len(&self) -> usize {
122        self.initialized
123    }
124
125    pub fn written(&self) -> &[u8] {
126        debug_assert!(self.index <= self.initialized);
127
128        unsafe { &*(&self.buffer[..self.index] as *const _ as *const [u8]) }
129    }
130
131    /// Convenient method for `.writen().len()`
132    pub fn written_len(&self) -> usize {
133        self.index
134    }
135
136    /// Buffer has no spare space to write any data
137    pub fn has_no_spare_space(&self) -> bool {
138        self.index == self.buffer.len()
139    }
140
141    /// Initialize all uninitialized, unwritten part to initialized, unwritten part
142    /// Return all unwritten part
143    pub fn initialize_unwritten(&mut self) -> &mut [u8] {
144        self.buffer[self.initialized..]
145            .iter_mut()
146            .for_each(|maybe_uninit| {
147                maybe_uninit.write(0);
148            });
149        self.initialized = self.buffer.len();
150
151        unsafe { &mut *(&mut self.buffer[self.index..] as *mut _ as *mut [u8]) }
152    }
153
154    /// Advance written index within initialized part.
155    ///
156    /// Note that try to advance into uninitialized part would panic.
157    pub fn advance(&mut self, amount: usize) {
158        debug_assert!(self.index + amount <= self.buffer.len());
159        debug_assert!(self.index + amount <= self.initialized);
160
161        self.index += amount;
162    }
163
164    pub fn reset(&mut self) {
165        self.index = 0;
166    }
167
168    /// Returns a mutable reference to the unwritten part of the buffer without
169    /// ensuring that it has been fully initialized.
170    ///
171    /// # Safety
172    ///
173    /// The caller must not de-initialize portions of the buffer that have already
174    /// been initialized.
175    ///
176    /// This includes any bytes in the region returned by this function.
177    pub unsafe fn unwritten_mut(&mut self) -> &mut [MaybeUninit<u8>] {
178        &mut self.buffer[self.index..]
179    }
180
181    /// Asserts that the first `n` unfilled bytes of the buffer are initialized.
182    ///
183    /// [`WriteBuffer`] assumes that bytes are never de-initialized, so this method
184    /// does nothing when called with fewer bytes than are already known to be initialized.
185    ///
186    /// # Safety
187    ///
188    /// The caller must ensure that `n` unfilled bytes of the buffer have already been initialized.
189    pub unsafe fn assume_init(&mut self, n: usize) {
190        debug_assert!(self.index <= (self.initialized + n));
191        debug_assert!((self.initialized + n) <= self.buffer.len());
192
193        self.initialized += n;
194    }
195
196    /// Convenient function combining [`WriteBuffer::assume_init`] and [`WriteBuffer::advance`].
197    ///
198    /// # Safety
199    ///
200    /// The caller must ensure that `n` unfilled bytes of the buffer have already been initialized.
201    pub unsafe fn assume_init_and_advance(&mut self, n: usize) {
202        debug_assert!(self.index + n <= self.buffer.len());
203
204        self.index += n;
205        self.initialized = self.initialized.max(self.index);
206    }
207
208    /// Convenient function combining [`WriteBuffer::assume_init`] and [`WriteBuffer::advance`],
209    /// works similar to [`Vec::set_len`].
210    ///
211    /// # Safety
212    ///
213    /// The caller must ensure that first `n` bytes of the buffer have already been initialized.
214    pub unsafe fn set_written_and_initialized_len(&mut self, n: usize) {
215        debug_assert!(n <= self.buffer.len());
216
217        self.index = n;
218        self.initialized = self.initialized.max(n);
219    }
220
221    pub fn copy_unwritten_from<C: AsRef<[u8]>>(&mut self, other: &mut PartialBuffer<C>) -> usize {
222        fn inner(this: &mut WriteBuffer<'_>, input: &[u8]) -> usize {
223            // Safety: We will never ever write uninitialized bytes into it
224            let out = unsafe { this.unwritten_mut() };
225
226            let len = out.len().min(input.len());
227
228            out[..len]
229                .iter_mut()
230                .zip(&input[..len])
231                .for_each(|(maybe_uninit, byte)| {
232                    maybe_uninit.write(*byte);
233                });
234
235            // Safety: We have written `len` bytes of initialized data into it
236            unsafe { this.assume_init_and_advance(len) };
237            len
238        }
239
240        let len = inner(self, other.unwritten());
241        other.advance(len);
242
243        len
244    }
245}