bump_scope/alloc/
global.rs

1//! Memory allocation APIs
2#![expect(clippy::unused_self)]
3
4use alloc_crate::alloc::{alloc, alloc_zeroed, dealloc, realloc};
5use core::{alloc::Layout, hint, ptr::NonNull};
6
7use crate::polyfill;
8
9use super::{AllocError, Allocator};
10
11/// The global memory allocator.
12///
13/// This type implements the [`Allocator`] trait by forwarding calls
14/// to the allocator registered with the `#[global_allocator]` attribute
15/// if there is one, or the `std` crate’s default.
16#[derive(Copy, Clone, Default, Debug)]
17// the compiler needs to know when a Box uses the global allocator vs a custom one
18pub struct Global;
19
20impl Global {
21    #[inline]
22    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
23    fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
24        match layout.size() {
25            0 => Ok(NonNull::slice_from_raw_parts(polyfill::layout::dangling(layout), 0)),
26            // SAFETY: `layout` is non-zero in size,
27            size => unsafe {
28                let raw_ptr = if zeroed { alloc_zeroed(layout) } else { alloc(layout) };
29                let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
30                Ok(NonNull::slice_from_raw_parts(ptr, size))
31            },
32        }
33    }
34
35    // SAFETY: Same as `Allocator::grow`
36    #[inline]
37    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
38    unsafe fn grow_impl(
39        &self,
40        ptr: NonNull<u8>,
41        old_layout: Layout,
42        new_layout: Layout,
43        zeroed: bool,
44    ) -> Result<NonNull<[u8]>, AllocError> {
45        debug_assert!(
46            new_layout.size() >= old_layout.size(),
47            "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
48        );
49
50        match old_layout.size() {
51            0 => self.alloc_impl(new_layout, zeroed),
52
53            // SAFETY: `new_size` is non-zero as `old_size` is greater than or equal to `new_size`
54            // as required by safety conditions. Other conditions must be upheld by the caller
55            old_size if old_layout.align() == new_layout.align() => unsafe {
56                let new_size = new_layout.size();
57
58                // `realloc` probably checks for `new_size >= old_layout.size()` or something similar.
59                hint::assert_unchecked(new_size >= old_layout.size());
60
61                let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
62                let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
63                if zeroed {
64                    raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
65                }
66                Ok(NonNull::slice_from_raw_parts(ptr, new_size))
67            },
68
69            // SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`,
70            // both the old and new memory allocation are valid for reads and writes for `old_size`
71            // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
72            // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
73            // for `dealloc` must be upheld by the caller.
74            old_size => unsafe {
75                let new_ptr = self.alloc_impl(new_layout, zeroed)?;
76                ptr.copy_to_nonoverlapping(new_ptr.cast(), old_size);
77                self.deallocate(ptr, old_layout);
78                Ok(new_ptr)
79            },
80        }
81    }
82}
83
84unsafe impl Allocator for Global {
85    #[inline]
86    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
87    fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
88        self.alloc_impl(layout, false)
89    }
90
91    #[inline]
92    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
93    fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
94        self.alloc_impl(layout, true)
95    }
96
97    #[inline]
98    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
99    unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
100        if layout.size() != 0 {
101            // SAFETY:
102            // * We have checked that `layout` is non-zero in size.
103            // * The caller is obligated to provide a layout that "fits", and in this case,
104            //   "fit" always means a layout that is equal to the original, because our
105            //   `allocate()`, `grow()`, and `shrink()` implementations never returns a larger
106            //   allocation than requested.
107            // * Other conditions must be upheld by the caller, as per `Allocator::deallocate()`'s
108            //   safety documentation.
109            unsafe { dealloc(ptr.as_ptr(), layout) }
110        }
111    }
112
113    #[inline]
114    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
115    unsafe fn grow(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
116        // SAFETY: all conditions must be upheld by the caller
117        unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
118    }
119
120    #[inline]
121    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
122    unsafe fn grow_zeroed(
123        &self,
124        ptr: NonNull<u8>,
125        old_layout: Layout,
126        new_layout: Layout,
127    ) -> Result<NonNull<[u8]>, AllocError> {
128        // SAFETY: all conditions must be upheld by the caller
129        unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
130    }
131
132    #[inline]
133    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
134    unsafe fn shrink(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
135        debug_assert!(
136            new_layout.size() <= old_layout.size(),
137            "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
138        );
139
140        match new_layout.size() {
141            // SAFETY: conditions must be upheld by the caller
142            0 => unsafe {
143                self.deallocate(ptr, old_layout);
144                Ok(NonNull::slice_from_raw_parts(polyfill::layout::dangling(new_layout), 0))
145            },
146
147            // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller
148            new_size if old_layout.align() == new_layout.align() => unsafe {
149                // `realloc` probably checks for `new_size <= old_layout.size()` or something similar.
150                hint::assert_unchecked(new_size <= old_layout.size());
151
152                let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
153                let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
154                Ok(NonNull::slice_from_raw_parts(ptr, new_size))
155            },
156
157            // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`,
158            // both the old and new memory allocation are valid for reads and writes for `new_size`
159            // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
160            // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
161            // for `dealloc` must be upheld by the caller.
162            new_size => unsafe {
163                let new_ptr = self.allocate(new_layout)?;
164                ptr.copy_to_nonoverlapping(new_ptr.cast(), new_size);
165                self.deallocate(ptr, old_layout);
166                Ok(new_ptr)
167            },
168        }
169    }
170}