stable_alloc_shim/
alloc_alloc.rs1use core::alloc::Layout;
2use core::ptr::{self, NonNull};
3
4use std_alloc::alloc::{alloc, alloc_zeroed, dealloc, realloc};
5
6use crate::core_alloc::{AllocError, Allocator};
7use crate::{layout_dangling, nonnull_as_mut_ptr, nonnull_slice_from_raw_parts};
8
9#[derive(Copy, Clone, Default, Debug)]
18pub struct Global;
19
20impl Global {
21 #[inline]
22 fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
23 match layout.size() {
24 0 => Ok(nonnull_slice_from_raw_parts(layout_dangling(&layout), 0)),
25 size => unsafe {
27 let raw_ptr = if zeroed {
28 alloc_zeroed(layout)
29 } else {
30 alloc(layout)
31 };
32 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
33 Ok(nonnull_slice_from_raw_parts(ptr, size))
34 },
35 }
36 }
37
38 #[inline]
40 unsafe fn grow_impl(
41 &self,
42 ptr: NonNull<u8>,
43 old_layout: Layout,
44 new_layout: Layout,
45 zeroed: bool,
46 ) -> Result<NonNull<[u8]>, AllocError> {
47 debug_assert!(
48 new_layout.size() >= old_layout.size(),
49 "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
50 );
51
52 match old_layout.size() {
53 0 => self.alloc_impl(new_layout, zeroed),
54
55 old_size if old_layout.align() == new_layout.align() => unsafe {
58 let new_size = new_layout.size();
59
60 crate::assume(new_size >= old_layout.size());
62
63 let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
64 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
65 if zeroed {
66 raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
67 }
68 Ok(nonnull_slice_from_raw_parts(ptr, new_size))
69 },
70
71 old_size => unsafe {
77 let new_ptr = self.alloc_impl(new_layout, zeroed)?;
78 ptr::copy_nonoverlapping(ptr.as_ptr(), nonnull_as_mut_ptr(new_ptr), old_size);
79 self.deallocate(ptr, old_layout);
80 Ok(new_ptr)
81 },
82 }
83 }
84}
85
86unsafe impl Allocator for Global {
87 #[inline]
88 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
89 self.alloc_impl(layout, false)
90 }
91
92 #[inline]
93 fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
94 self.alloc_impl(layout, true)
95 }
96
97 #[inline]
98 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
99 if layout.size() != 0 {
100 unsafe { dealloc(ptr.as_ptr(), layout) }
103 }
104 }
105
106 #[inline]
107 unsafe fn grow(
108 &self,
109 ptr: NonNull<u8>,
110 old_layout: Layout,
111 new_layout: Layout,
112 ) -> Result<NonNull<[u8]>, AllocError> {
113 unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
115 }
116
117 #[inline]
118 unsafe fn grow_zeroed(
119 &self,
120 ptr: NonNull<u8>,
121 old_layout: Layout,
122 new_layout: Layout,
123 ) -> Result<NonNull<[u8]>, AllocError> {
124 unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
126 }
127
128 #[inline]
129 unsafe fn shrink(
130 &self,
131 ptr: NonNull<u8>,
132 old_layout: Layout,
133 new_layout: Layout,
134 ) -> Result<NonNull<[u8]>, AllocError> {
135 debug_assert!(
136 new_layout.size() <= old_layout.size(),
137 "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
138 );
139
140 match new_layout.size() {
141 0 => unsafe {
143 self.deallocate(ptr, old_layout);
144 Ok(nonnull_slice_from_raw_parts(
145 layout_dangling(&new_layout),
146 0,
147 ))
148 },
149
150 new_size if old_layout.align() == new_layout.align() => unsafe {
152 crate::assume(new_size <= old_layout.size());
154
155 let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
156 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
157 Ok(nonnull_slice_from_raw_parts(ptr, new_size))
158 },
159
160 new_size => unsafe {
166 let new_ptr = self.allocate(new_layout)?;
167 ptr::copy_nonoverlapping(ptr.as_ptr(), nonnull_as_mut_ptr(new_ptr), new_size);
168 self.deallocate(ptr, old_layout);
169 Ok(new_ptr)
170 },
171 }
172 }
173}