musli_core/alloc/
boxed.rs

1use core::cmp::Ordering;
2use core::fmt;
3use core::hash::{Hash, Hasher};
4use core::mem::needs_drop;
5use core::ops::{Deref, DerefMut};
6
7use super::{Alloc, AllocError, Allocator};
8
9/// A Müsli-allocated pointer type that uniquely owns a heap allocation of type
10/// `T`.
11///
12/// This is a [`Box`][std-box] type capable of using the allocator provided
13/// through a [`Context`]. Therefore it can be safely used in no-std
14/// environments.
15///
16/// [std-box]: std::boxed::Box
17/// [`Context`]: crate::Context
18pub struct Box<T, A>
19where
20    A: Allocator,
21{
22    buf: A::Alloc<T>,
23}
24
25impl<T, A> Box<T, A>
26where
27    A: Allocator,
28{
29    /// Allocates memory on the heap and then places `x` into it.
30    ///
31    /// This doesn't actually allocate if `T` is zero-sized.
32    ///
33    /// ## Examples
34    ///
35    /// ```
36    /// use musli::alloc::{AllocError, Box};
37    ///
38    /// musli::alloc::default(|alloc| {
39    ///     let mut a = Box::new_in(10u32, alloc)?;
40    ///     assert_eq!(a.as_ref(), &10u32);
41    ///     Ok::<_, AllocError>(())
42    /// });
43    /// # Ok::<_, musli::alloc::AllocError>(())
44    /// ```
45    ///
46    /// Zero-sized types:
47    ///
48    /// ```
49    /// use musli::alloc::{AllocError, Box};
50    ///
51    /// musli::alloc::default(|alloc| {
52    ///     let mut a = Box::new_in((), alloc)?;
53    ///     assert_eq!(a.as_ref(), &());
54    ///     Ok::<_, AllocError>(())
55    /// });
56    /// # Ok::<_, musli::alloc::AllocError>(())
57    /// ```
58    #[inline]
59    pub fn new_in(value: T, alloc: A) -> Result<Self, AllocError> {
60        Ok(Self {
61            buf: alloc.alloc(value)?,
62        })
63    }
64}
65
66unsafe impl<T, A> Send for Box<T, A>
67where
68    T: Send,
69    A: Allocator,
70{
71}
72unsafe impl<T, A> Sync for Box<T, A>
73where
74    T: Sync,
75    A: Allocator,
76{
77}
78
79impl<T, A> Deref for Box<T, A>
80where
81    A: Allocator,
82{
83    type Target = T;
84
85    #[inline]
86    fn deref(&self) -> &Self::Target {
87        // SAFETY: The returned buffer is valid per construction.
88        unsafe { &*self.buf.as_ptr() }
89    }
90}
91
92impl<T, A> DerefMut for Box<T, A>
93where
94    A: Allocator,
95{
96    #[inline]
97    fn deref_mut(&mut self) -> &mut Self::Target {
98        // SAFETY: The returned buffer is valid per construction.
99        unsafe { &mut *self.buf.as_mut_ptr() }
100    }
101}
102
103impl<T, A> AsRef<T> for Box<T, A>
104where
105    A: Allocator,
106{
107    #[inline]
108    fn as_ref(&self) -> &T {
109        self
110    }
111}
112
113impl<T, A> Drop for Box<T, A>
114where
115    A: Allocator,
116{
117    #[inline]
118    fn drop(&mut self) {
119        // SAFETY: Layout assumptions are correctly encoded in the type as it
120        // was being allocated or grown.
121        unsafe {
122            if needs_drop::<T>() {
123                self.buf.as_mut_ptr().drop_in_place();
124            }
125        }
126    }
127}
128
129impl<T, A> fmt::Display for Box<T, A>
130where
131    T: fmt::Display,
132    A: Allocator,
133{
134    #[inline]
135    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
136        fmt::Display::fmt(&**self, f)
137    }
138}
139
140impl<T, A> fmt::Debug for Box<T, A>
141where
142    T: fmt::Debug,
143    A: Allocator,
144{
145    #[inline]
146    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
147        fmt::Debug::fmt(&**self, f)
148    }
149}
150
151impl<T, A> PartialEq for Box<T, A>
152where
153    T: PartialEq,
154    A: Allocator,
155{
156    #[inline]
157    fn eq(&self, other: &Self) -> bool {
158        PartialEq::eq(&**self, &**other)
159    }
160
161    #[inline]
162    #[allow(clippy::partialeq_ne_impl)]
163    fn ne(&self, other: &Self) -> bool {
164        PartialEq::ne(&**self, &**other)
165    }
166}
167
168impl<T, A> PartialOrd for Box<T, A>
169where
170    T: PartialOrd,
171    A: Allocator,
172{
173    #[inline]
174    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
175        PartialOrd::partial_cmp(&**self, &**other)
176    }
177
178    #[inline]
179    fn lt(&self, other: &Self) -> bool {
180        PartialOrd::lt(&**self, &**other)
181    }
182
183    #[inline]
184    fn le(&self, other: &Self) -> bool {
185        PartialOrd::le(&**self, &**other)
186    }
187
188    #[inline]
189    fn ge(&self, other: &Self) -> bool {
190        PartialOrd::ge(&**self, &**other)
191    }
192
193    #[inline]
194    fn gt(&self, other: &Self) -> bool {
195        PartialOrd::gt(&**self, &**other)
196    }
197}
198
199impl<T, A> Ord for Box<T, A>
200where
201    T: Ord,
202    A: Allocator,
203{
204    #[inline]
205    fn cmp(&self, other: &Self) -> Ordering {
206        Ord::cmp(&**self, &**other)
207    }
208}
209
210impl<T, A> Eq for Box<T, A>
211where
212    T: Eq,
213    A: Allocator,
214{
215}
216
217impl<T, A> Hash for Box<T, A>
218where
219    T: Hash,
220    A: Allocator,
221{
222    #[inline]
223    fn hash<H: Hasher>(&self, state: &mut H) {
224        (**self).hash(state);
225    }
226}
227
228impl<T, A> Hasher for Box<T, A>
229where
230    T: Hasher,
231    A: Allocator,
232{
233    #[inline]
234    fn finish(&self) -> u64 {
235        (**self).finish()
236    }
237
238    #[inline]
239    fn write(&mut self, bytes: &[u8]) {
240        (**self).write(bytes)
241    }
242
243    #[inline]
244    fn write_u8(&mut self, i: u8) {
245        (**self).write_u8(i)
246    }
247
248    #[inline]
249    fn write_u16(&mut self, i: u16) {
250        (**self).write_u16(i)
251    }
252
253    #[inline]
254    fn write_u32(&mut self, i: u32) {
255        (**self).write_u32(i)
256    }
257
258    #[inline]
259    fn write_u64(&mut self, i: u64) {
260        (**self).write_u64(i)
261    }
262
263    #[inline]
264    fn write_u128(&mut self, i: u128) {
265        (**self).write_u128(i)
266    }
267
268    #[inline]
269    fn write_usize(&mut self, i: usize) {
270        (**self).write_usize(i)
271    }
272
273    #[inline]
274    fn write_i8(&mut self, i: i8) {
275        (**self).write_i8(i)
276    }
277
278    #[inline]
279    fn write_i16(&mut self, i: i16) {
280        (**self).write_i16(i)
281    }
282
283    #[inline]
284    fn write_i32(&mut self, i: i32) {
285        (**self).write_i32(i)
286    }
287
288    #[inline]
289    fn write_i64(&mut self, i: i64) {
290        (**self).write_i64(i)
291    }
292
293    #[inline]
294    fn write_i128(&mut self, i: i128) {
295        (**self).write_i128(i)
296    }
297
298    #[inline]
299    fn write_isize(&mut self, i: isize) {
300        (**self).write_isize(i)
301    }
302}