musli_core/alloc/
global.rs

1use core::alloc::Layout;
2use core::cmp;
3use core::mem::{align_of, size_of};
4use core::ptr::NonNull;
5
6use rust_alloc::alloc;
7
8use super::{Alloc, AllocError, Allocator};
9
10/// Global buffer that can be used in combination with an [`Allocator`].
11///
12/// This uses the global allocator.
13///
14/// # Examples
15///
16/// ```
17/// use musli::alloc::{Global, Vec};
18///
19/// let alloc = Global::new();
20///
21/// let mut buf1 = Vec::new_in(alloc);
22/// let mut buf2 = Vec::new_in(alloc);
23//
24/// buf1.extend_from_slice(b"Hello, ")?;
25/// buf2.extend_from_slice(b"world!")?;
26///
27/// assert_eq!(buf1.as_slice(), b"Hello, ");
28/// assert_eq!(buf2.as_slice(), b"world!");
29///
30/// buf1.extend(buf2);
31/// assert_eq!(buf1.as_slice(), b"Hello, world!");
32/// # Ok::<_, musli::alloc::AllocError>(())
33/// ```
34#[derive(Debug, Clone, Copy, PartialEq, Eq)]
35#[non_exhaustive]
36pub struct Global;
37
38impl Global {
39    /// Construct a new global allocator.
40    #[inline]
41    pub const fn new() -> Self {
42        Self
43    }
44
45    /// Construct an allocation directly from raw parts.
46    ///
47    /// # Safety
48    ///
49    /// Caller must ensure that the allocation comes from the same global
50    /// allocator and is correctly initialized per its parameters.
51    #[inline]
52    pub(crate) unsafe fn slice_from_raw_parts<T>(data: NonNull<T>, size: usize) -> GlobalAlloc<T> {
53        GlobalAlloc { data, size }
54    }
55}
56
57impl Default for Global {
58    #[inline]
59    fn default() -> Self {
60        Self::new()
61    }
62}
63
64unsafe impl Allocator for Global {
65    const IS_GLOBAL: bool = true;
66
67    type Alloc<T> = GlobalAlloc<T>;
68
69    #[inline]
70    fn alloc<T>(self, value: T) -> Result<Self::Alloc<T>, AllocError> {
71        let mut raw = GlobalAlloc::<T>::alloc()?;
72
73        if size_of::<T>() != 0 {
74            // SAFETY: The above ensures the data has been allocated.
75            unsafe {
76                raw.as_mut_ptr().write(value);
77            }
78        }
79
80        Ok(raw)
81    }
82
83    #[inline]
84    fn alloc_empty<T>(self) -> Self::Alloc<T> {
85        GlobalAlloc::DANGLING
86    }
87}
88
89/// A vector-backed allocation.
90pub struct GlobalAlloc<T> {
91    /// Pointer to the allocated region.
92    data: NonNull<T>,
93    /// The size in number of `T` elements in the region.
94    size: usize,
95}
96
97impl<T> GlobalAlloc<T> {
98    /// Reallocate the region to the given capacity.
99    ///
100    /// # Safety
101    ///
102    /// The caller must ensure that the new capacity is valid per [`Layout`].
103    #[must_use = "allocating is fallible and must be checked"]
104    fn alloc() -> Result<Self, AllocError> {
105        if size_of::<T>() == 0 {
106            return Ok(Self {
107                data: NonNull::dangling(),
108                size: 1,
109            });
110        }
111
112        unsafe {
113            let data = alloc::alloc(Layout::new::<T>());
114
115            if data.is_null() {
116                return Err(AllocError);
117            }
118
119            Ok(Self {
120                data: NonNull::new_unchecked(data).cast(),
121                size: 1,
122            })
123        }
124    }
125}
126
127unsafe impl<T> Send for GlobalAlloc<T> where T: Send {}
128unsafe impl<T> Sync for GlobalAlloc<T> where T: Sync {}
129
130impl<T> Alloc<T> for GlobalAlloc<T> {
131    #[inline]
132    fn as_ptr(&self) -> *const T {
133        self.data.as_ptr().cast_const().cast()
134    }
135
136    #[inline]
137    fn as_mut_ptr(&mut self) -> *mut T {
138        self.data.as_ptr().cast()
139    }
140
141    #[inline]
142    fn capacity(&self) -> usize {
143        if size_of::<T>() == 0 {
144            usize::MAX
145        } else {
146            self.size
147        }
148    }
149
150    #[inline]
151    fn resize(&mut self, len: usize, additional: usize) -> Result<(), AllocError> {
152        if size_of::<T>() == 0 {
153            return Ok(());
154        }
155
156        if !self.reserve(len, additional) {
157            return Err(AllocError);
158        }
159
160        Ok(())
161    }
162
163    #[inline]
164    fn try_merge<B>(&mut self, _: usize, other: B, _: usize) -> Result<(), B>
165    where
166        B: Alloc<T>,
167    {
168        if size_of::<T>() == 0 {
169            return Ok(());
170        }
171
172        Err(other)
173    }
174}
175
176impl<T> GlobalAlloc<T> {
177    const MIN_NON_ZERO_CAP: usize = if size_of::<T>() == 1 {
178        8
179    } else if size_of::<T>() <= 1024 {
180        4
181    } else {
182        1
183    };
184
185    const DANGLING: Self = Self {
186        data: NonNull::dangling(),
187        size: 0,
188    };
189
190    /// Reallocate the region to the given capacity.
191    ///
192    /// # Safety
193    ///
194    /// The caller must ensure that the new capacity is valid per [`Layout`].
195    #[must_use = "allocating is fallible and must be checked"]
196    fn realloc(&mut self, new_layout: Layout) -> bool {
197        unsafe {
198            let data = {
199                if self.size > 0 {
200                    let old_layout = Layout::from_size_align_unchecked(
201                        self.size.wrapping_mul(size_of::<T>()),
202                        align_of::<T>(),
203                    );
204
205                    alloc::realloc(self.data.as_ptr().cast(), old_layout, new_layout.size())
206                } else {
207                    alloc::alloc(new_layout)
208                }
209            };
210
211            if data.is_null() {
212                return false;
213            }
214
215            self.data = NonNull::new_unchecked(data).cast();
216        }
217
218        true
219    }
220
221    #[must_use = "allocating is fallible and must be checked"]
222    fn reserve(&mut self, len: usize, additional: usize) -> bool {
223        debug_assert_ne!(size_of::<T>(), 0, "ZSTs should not get here");
224
225        let Some(required_cap) = len.checked_add(additional) else {
226            return false;
227        };
228
229        if self.size >= required_cap {
230            return true;
231        }
232
233        let cap = cmp::max(self.size * 2, required_cap);
234        let cap = cmp::max(Self::MIN_NON_ZERO_CAP, cap);
235
236        let Ok(new_layout) = Layout::array::<T>(cap) else {
237            return false;
238        };
239
240        if !self.realloc(new_layout) {
241            return false;
242        }
243
244        self.size = cap;
245        true
246    }
247
248    fn free(&mut self) {
249        if size_of::<T>() == 0 || self.size == 0 {
250            return;
251        }
252
253        // SAFETY: Layout assumptions are correctly encoded in the type as
254        // it was being allocated or grown.
255        unsafe {
256            let layout =
257                Layout::from_size_align_unchecked(self.size * size_of::<T>(), align_of::<T>());
258            alloc::dealloc(self.data.as_ptr().cast(), layout);
259            self.data = NonNull::dangling();
260            self.size = 0;
261        }
262    }
263}
264
265impl<T> Drop for GlobalAlloc<T> {
266    #[inline]
267    fn drop(&mut self) {
268        self.free();
269    }
270}