compression_core/
util.rs

1pub const fn _assert_send<T: Send>() {}
2pub const fn _assert_sync<T: Sync>() {}
3
4#[derive(Debug, Default)]
5pub struct PartialBuffer<B> {
6    buffer: B,
7    index: usize,
8}
9
10impl<B: AsRef<[u8]>> PartialBuffer<B> {
11    pub fn new(buffer: B) -> Self {
12        Self { buffer, index: 0 }
13    }
14
15    pub fn written(&self) -> &[u8] {
16        &self.buffer.as_ref()[..self.index]
17    }
18
19    /// Convenient method for `.writen().len()`
20    pub fn written_len(&self) -> usize {
21        self.index
22    }
23
24    pub fn unwritten(&self) -> &[u8] {
25        &self.buffer.as_ref()[self.index..]
26    }
27
28    pub fn advance(&mut self, amount: usize) {
29        self.index += amount;
30        debug_assert!(self.index <= self.buffer.as_ref().len());
31    }
32
33    pub fn get_mut(&mut self) -> &mut B {
34        &mut self.buffer
35    }
36
37    pub fn into_inner(self) -> B {
38        self.buffer
39    }
40
41    pub fn reset(&mut self) {
42        self.index = 0;
43    }
44}
45
46impl<B: AsRef<[u8]> + AsMut<[u8]>> PartialBuffer<B> {
47    pub fn unwritten_mut(&mut self) -> &mut [u8] {
48        &mut self.buffer.as_mut()[self.index..]
49    }
50
51    pub fn copy_unwritten_from<C: AsRef<[u8]>>(&mut self, other: &mut PartialBuffer<C>) -> usize {
52        let len = self.unwritten().len().min(other.unwritten().len());
53
54        self.unwritten_mut()[..len].copy_from_slice(&other.unwritten()[..len]);
55
56        self.advance(len);
57        other.advance(len);
58        len
59    }
60}
61
62impl<B: AsRef<[u8]> + Default> PartialBuffer<B> {
63    pub fn take(&mut self) -> Self {
64        std::mem::take(self)
65    }
66}
67
68impl<B: AsRef<[u8]> + AsMut<[u8]>> From<B> for PartialBuffer<B> {
69    fn from(buffer: B) -> Self {
70        Self::new(buffer)
71    }
72}
73
74/// Write buffer for compression-codecs.
75///
76/// Currently it only supports initialized buffer, but will support uninitialized
77/// buffer soon.
78///
79/// # Layout
80///
81/// ```text
82/// |                                       buffer                                    |
83/// | written and initialized | unwritten but initialized | unwritten and uninitialized
84/// ```
85#[derive(Debug)]
86pub struct WriteBuffer<'a> {
87    buffer: &'a mut [u8],
88    index: usize,
89}
90
91impl<'a> WriteBuffer<'a> {
92    pub fn new_initialized(buffer: &'a mut [u8]) -> Self {
93        Self { buffer, index: 0 }
94    }
95
96    pub fn written(&self) -> &[u8] {
97        &self.buffer[..self.index]
98    }
99
100    /// Convenient method for `.writen().len()`
101    pub fn written_len(&self) -> usize {
102        self.index
103    }
104
105    /// Buffer has no spare space to write any data
106    pub fn has_no_spare_space(&self) -> bool {
107        self.index == self.buffer.len()
108    }
109
110    /// Initialize all uninitialized, unwritten part to initialized, unwritten part
111    /// Return all unwritten part
112    pub fn initialize_unwritten(&mut self) -> &mut [u8] {
113        &mut self.buffer[self.index..]
114    }
115
116    /// Advance written index within initialized part.
117    ///
118    /// Note that try to advance into uninitialized part would panic.
119    pub fn advance(&mut self, amount: usize) {
120        self.index += amount;
121        debug_assert!(self.index <= self.buffer.len());
122    }
123
124    pub fn reset(&mut self) {
125        self.index = 0;
126    }
127
128    pub fn copy_unwritten_from<C: AsRef<[u8]>>(&mut self, other: &mut PartialBuffer<C>) -> usize {
129        let len = self
130            .initialize_unwritten()
131            .len()
132            .min(other.unwritten().len());
133
134        self.initialize_unwritten()[..len].copy_from_slice(&other.unwritten()[..len]);
135
136        self.advance(len);
137        other.advance(len);
138        len
139    }
140}