musli_common/
fixed.rs

1//! Fixed capacity containers.
2
3use core::fmt;
4use core::mem::{self, MaybeUninit};
5use core::ops::{Deref, DerefMut};
6use core::ptr;
7use core::slice;
8
9use musli::{Buf, Context};
10
11use crate::writer::Writer;
12
13/// A fixed-size bytes storage which keeps track of how much has been initialized.
14pub struct FixedBytes<const N: usize> {
15    /// Data storage.
16    data: [MaybeUninit<u8>; N],
17    /// How many bytes have been initialized.
18    init: usize,
19}
20
21impl<const N: usize> FixedBytes<N> {
22    /// Construct a new fixed bytes array storage.
23    #[inline]
24    pub const fn new() -> Self {
25        Self {
26            // SAFETY: MaybeUnint::uninit_array is not stable.
27            data: unsafe { MaybeUninit::<[MaybeUninit<u8>; N]>::uninit().assume_init() },
28            init: 0,
29        }
30    }
31
32    /// Construct a fixed bytes while asserting that the given runtime capacity isn't violated.
33    pub fn with_capacity(capacity: usize) -> Self {
34        assert!(
35            capacity < N,
36            "Requested capacity {capacity} is larger than {N}"
37        );
38        Self::new()
39    }
40
41    /// Get the length of the collection.
42    #[inline]
43    pub const fn len(&self) -> usize {
44        self.init
45    }
46
47    /// Test if the current container is empty.
48    #[inline]
49    pub const fn is_empty(&self) -> bool {
50        self.init == 0
51    }
52
53    /// Clear the [FixedBytes] container.
54    #[inline]
55    pub fn clear(&mut self) {
56        self.init = 0;
57    }
58
59    /// Get the remaining capacity of the [FixedBytes].
60    #[inline]
61    pub const fn remaining(&self) -> usize {
62        N.saturating_sub(self.init)
63    }
64
65    /// Coerce into the underlying bytes if all of them have been initialized.
66    #[inline]
67    pub fn into_bytes(self) -> Option<[u8; N]> {
68        if self.init == N {
69            // SAFETY: All of the bytes in the sequence have been initialized
70            // and can be safety transmuted.
71            //
72            // Method of transmuting comes from the implementation of
73            // `MaybeUninit::array_assume_init` which is not yet stable.
74            unsafe { Some((&self.data as *const _ as *const [u8; N]).read()) }
75        } else {
76            None
77        }
78    }
79
80    /// Coerce into the slice of initialized memory which is present.
81    #[inline]
82    pub fn as_slice(&self) -> &[u8] {
83        if self.init == 0 {
84            return &[];
85        }
86
87        // SAFETY: We've asserted that `initialized` accounts for the number of
88        // bytes that have been initialized.
89        unsafe { core::slice::from_raw_parts(self.data.as_ptr().cast(), self.init) }
90    }
91
92    /// Coerce into the mutable slice of initialized memory which is present.
93    #[inline]
94    pub fn as_mut_slice(&mut self) -> &mut [u8] {
95        if self.init == 0 {
96            return &mut [];
97        }
98
99        // SAFETY: We've asserted that `initialized` accounts for the number of
100        // bytes that have been initialized.
101        unsafe { core::slice::from_raw_parts_mut(self.data.as_mut_ptr().cast(), self.init) }
102    }
103
104    /// Try and push a single byte.
105    #[inline]
106    pub fn push(&mut self, value: u8) -> bool {
107        if N.saturating_sub(self.init) == 0 {
108            return false;
109        }
110
111        unsafe {
112            self.data
113                .as_mut_ptr()
114                .cast::<u8>()
115                .add(self.init)
116                .write(value)
117        }
118
119        self.init += 1;
120        true
121    }
122
123    /// Try and extend from the given slice.
124    #[inline]
125    pub fn extend_from_slice(&mut self, source: &[u8]) -> bool {
126        if source.len() > N.saturating_sub(self.init) {
127            return false;
128        }
129
130        unsafe {
131            let dst = (self.data.as_mut_ptr() as *mut u8).add(self.init);
132            ptr::copy_nonoverlapping(source.as_ptr(), dst, source.len());
133        }
134
135        self.init = self.init.wrapping_add(source.len());
136        true
137    }
138
139    /// Try and extend from the given slice.
140    #[inline]
141    pub fn write_bytes<C>(&mut self, cx: &C, source: &[u8]) -> Result<(), C::Error>
142    where
143        C: ?Sized + Context,
144    {
145        if !self.extend_from_slice(source) {
146            return Err(cx.message(FixedBytesOverflow {
147                at: self.init,
148                additional: source.len(),
149                capacity: N,
150            }));
151        }
152
153        Ok(())
154    }
155}
156
157impl<const N: usize> Deref for FixedBytes<N> {
158    type Target = [u8];
159
160    #[inline]
161    fn deref(&self) -> &Self::Target {
162        self.as_slice()
163    }
164}
165
166impl<const N: usize> DerefMut for FixedBytes<N> {
167    #[inline]
168    fn deref_mut(&mut self) -> &mut Self::Target {
169        self.as_mut_slice()
170    }
171}
172
173impl<const N: usize> Default for FixedBytes<N> {
174    #[inline]
175    fn default() -> Self {
176        Self::new()
177    }
178}
179
180impl<const N: usize> Writer for FixedBytes<N> {
181    type Mut<'this> = &'this mut Self where Self: 'this;
182
183    #[inline]
184    fn borrow_mut(&mut self) -> Self::Mut<'_> {
185        self
186    }
187
188    #[inline]
189    fn write_buffer<C, B>(&mut self, cx: &C, buffer: B) -> Result<(), C::Error>
190    where
191        C: ?Sized + Context,
192        B: Buf,
193    {
194        // SAFETY: the buffer never outlives this function call.
195        self.write_bytes(cx, buffer.as_slice())
196    }
197
198    #[inline]
199    fn write_bytes<C>(&mut self, cx: &C, bytes: &[u8]) -> Result<(), C::Error>
200    where
201        C: ?Sized + Context,
202    {
203        FixedBytes::write_bytes(self, cx, bytes)?;
204        cx.advance(bytes.len());
205        Ok(())
206    }
207}
208
209/// Capacity error raised by trying to write to a [FixedBytes] with no remaining
210/// capacity.
211#[derive(Debug)]
212#[allow(missing_docs)]
213#[non_exhaustive]
214pub(crate) struct FixedBytesOverflow {
215    at: usize,
216    additional: usize,
217    capacity: usize,
218}
219
220impl fmt::Display for FixedBytesOverflow {
221    #[inline]
222    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
223        let FixedBytesOverflow {
224            at,
225            additional,
226            capacity,
227        } = self;
228
229        write!(
230            f,
231            "Tried to write {additional} bytes at {at} with capacity {capacity}"
232        )
233    }
234}
235
236/// An error raised when we are at capacity.
237#[non_exhaustive]
238pub(crate) struct CapacityError;
239
240/// A fixed capacity vector allocated on the stack.
241pub(crate) struct FixedVec<T, const N: usize> {
242    data: [MaybeUninit<T>; N],
243    len: usize,
244}
245
246impl<T, const N: usize> FixedVec<T, N> {
247    /// Construct a new empty fixed vector.
248    pub(crate) const fn new() -> FixedVec<T, N> {
249        unsafe {
250            FixedVec {
251                data: MaybeUninit::uninit().assume_init(),
252                len: 0,
253            }
254        }
255    }
256
257    #[inline]
258    pub(crate) fn as_ptr(&self) -> *const T {
259        self.data.as_ptr() as *const T
260    }
261
262    #[inline]
263    pub(crate) fn as_mut_ptr(&mut self) -> *mut T {
264        self.data.as_mut_ptr() as *mut T
265    }
266
267    #[inline]
268    pub(crate) fn as_slice(&self) -> &[T] {
269        unsafe { slice::from_raw_parts(self.as_ptr(), self.len) }
270    }
271
272    #[inline]
273    pub(crate) fn as_mut_slice(&mut self) -> &mut [T] {
274        unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), self.len) }
275    }
276
277    /// Try to push an element onto the fixed vector.
278    pub(crate) fn try_push(&mut self, element: T) -> Result<(), CapacityError> {
279        if self.len >= N {
280            return Err(CapacityError);
281        }
282
283        unsafe {
284            ptr::write(self.as_mut_ptr().wrapping_add(self.len), element);
285            self.len += 1;
286        }
287
288        Ok(())
289    }
290
291    /// Pop the last element in the fixed vector.
292    pub(crate) fn pop(&mut self) -> Option<T> {
293        if self.len == 0 {
294            return None;
295        }
296
297        unsafe {
298            let new_len = self.len - 1;
299            self.len = new_len;
300            Some(ptr::read(self.as_ptr().wrapping_add(new_len)))
301        }
302    }
303
304    pub(crate) fn clear(&mut self) {
305        if self.len == 0 {
306            return;
307        }
308
309        let len = mem::take(&mut self.len);
310
311        if mem::needs_drop::<T>() {
312            unsafe {
313                let tail = slice::from_raw_parts_mut(self.as_mut_ptr(), len);
314                ptr::drop_in_place(tail);
315            }
316        }
317    }
318}
319
320impl<T, const N: usize> Deref for FixedVec<T, N> {
321    type Target = [T];
322
323    #[inline]
324    fn deref(&self) -> &Self::Target {
325        self.as_slice()
326    }
327}
328
329impl<T, const N: usize> DerefMut for FixedVec<T, N> {
330    #[inline]
331    fn deref_mut(&mut self) -> &mut Self::Target {
332        self.as_mut_slice()
333    }
334}
335
336impl<T, const N: usize> Drop for FixedVec<T, N> {
337    #[inline]
338    fn drop(&mut self) {
339        self.clear()
340    }
341}