1use core::alloc::Layout;
2use core::cmp;
3use core::mem::{align_of, size_of};
4use core::ptr::NonNull;
5
6use rust_alloc::alloc;
7
8use super::{Alloc, AllocError, Allocator, GlobalAllocator};
9
10#[derive(Debug, Clone, Copy, PartialEq, Eq)]
35#[non_exhaustive]
36pub struct Global;
37
38impl Global {
39 #[inline]
41 pub const fn new() -> Self {
42 Self
43 }
44}
45
46impl Default for Global {
47 #[inline]
48 fn default() -> Self {
49 Self::new()
50 }
51}
52
53unsafe impl GlobalAllocator for Global {
54 #[inline]
55 fn __do_not_implement() {}
56
57 #[inline]
58 fn new() -> Self {
59 Self
60 }
61
62 #[inline]
63 fn clone_alloc<T>(alloc: &Self::Alloc<T>) -> Self::Alloc<T> {
64 if alloc.size == 0 {
65 return GlobalAlloc::DANGLING;
66 }
67
68 unsafe {
69 let layout =
71 Layout::from_size_align_unchecked(alloc.size * size_of::<T>(), align_of::<T>());
72 let data = alloc::alloc(layout);
73
74 if data.is_null() {
75 alloc::handle_alloc_error(layout);
76 }
77
78 GlobalAlloc {
79 data: NonNull::new_unchecked(data).cast(),
80 size: alloc.size,
81 }
82 }
83 }
84
85 #[inline]
86 fn slice_from_raw_parts<T>(ptr: NonNull<T>, len: usize) -> Self::Alloc<T> {
87 GlobalAlloc {
88 data: ptr,
89 size: len,
90 }
91 }
92}
93
94unsafe impl Allocator for Global {
95 #[inline]
96 fn __do_not_implement() {}
97
98 const IS_GLOBAL: bool = true;
99
100 type Alloc<T> = GlobalAlloc<T>;
101
102 #[inline]
103 fn alloc<T>(self, value: T) -> Result<Self::Alloc<T>, AllocError> {
104 let mut raw = GlobalAlloc::<T>::alloc()?;
105
106 if size_of::<T>() != 0 {
107 unsafe {
109 raw.as_mut_ptr().write(value);
110 }
111 }
112
113 Ok(raw)
114 }
115
116 #[inline]
117 fn alloc_empty<T>(self) -> Self::Alloc<T> {
118 GlobalAlloc::DANGLING
119 }
120}
121
122pub struct GlobalAlloc<T> {
124 data: NonNull<T>,
126 size: usize,
128}
129
130impl<T> GlobalAlloc<T> {
131 #[must_use = "allocating is fallible and must be checked"]
137 fn alloc() -> Result<Self, AllocError> {
138 if size_of::<T>() == 0 {
139 return Ok(Self {
140 data: NonNull::dangling(),
141 size: 1,
142 });
143 }
144
145 unsafe {
146 let data = alloc::alloc(Layout::new::<T>());
147
148 if data.is_null() {
149 return Err(AllocError);
150 }
151
152 Ok(Self {
153 data: NonNull::new_unchecked(data).cast(),
154 size: 1,
155 })
156 }
157 }
158}
159
160unsafe impl<T> Send for GlobalAlloc<T> where T: Send {}
161unsafe impl<T> Sync for GlobalAlloc<T> where T: Sync {}
162
163impl<T> Alloc<T> for GlobalAlloc<T> {
164 #[inline]
165 fn as_ptr(&self) -> *const T {
166 self.data.as_ptr().cast_const().cast()
167 }
168
169 #[inline]
170 fn as_mut_ptr(&mut self) -> *mut T {
171 self.data.as_ptr().cast()
172 }
173
174 #[inline]
175 fn capacity(&self) -> usize {
176 if size_of::<T>() == 0 {
177 usize::MAX
178 } else {
179 self.size
180 }
181 }
182
183 #[inline]
184 fn resize(&mut self, len: usize, additional: usize) -> Result<(), AllocError> {
185 if size_of::<T>() == 0 {
186 return Ok(());
187 }
188
189 if !self.reserve(len, additional) {
190 return Err(AllocError);
191 }
192
193 Ok(())
194 }
195
196 #[inline]
197 fn try_merge<B>(&mut self, _: usize, other: B, _: usize) -> Result<(), B>
198 where
199 B: Alloc<T>,
200 {
201 if size_of::<T>() == 0 {
202 return Ok(());
203 }
204
205 Err(other)
206 }
207}
208
209impl<T> GlobalAlloc<T> {
210 const MIN_NON_ZERO_CAP: usize = if size_of::<T>() == 1 {
211 8
212 } else if size_of::<T>() <= 1024 {
213 4
214 } else {
215 1
216 };
217
218 const DANGLING: Self = Self {
219 data: NonNull::dangling(),
220 size: 0,
221 };
222
223 #[must_use = "allocating is fallible and must be checked"]
229 fn realloc(&mut self, new_layout: Layout) -> bool {
230 unsafe {
231 let data = {
232 if self.size > 0 {
233 let old_layout = Layout::from_size_align_unchecked(
234 self.size.wrapping_mul(size_of::<T>()),
235 align_of::<T>(),
236 );
237
238 alloc::realloc(self.data.as_ptr().cast(), old_layout, new_layout.size())
239 } else {
240 alloc::alloc(new_layout)
241 }
242 };
243
244 if data.is_null() {
245 return false;
246 }
247
248 self.data = NonNull::new_unchecked(data).cast();
249 }
250
251 true
252 }
253
254 #[must_use = "allocating is fallible and must be checked"]
255 fn reserve(&mut self, len: usize, additional: usize) -> bool {
256 debug_assert_ne!(size_of::<T>(), 0, "ZSTs should not get here");
257
258 let Some(required_cap) = len.checked_add(additional) else {
259 return false;
260 };
261
262 if self.size >= required_cap {
263 return true;
264 }
265
266 let cap = cmp::max(self.size * 2, required_cap);
267 let cap = cmp::max(Self::MIN_NON_ZERO_CAP, cap);
268
269 let Ok(new_layout) = Layout::array::<T>(cap) else {
270 return false;
271 };
272
273 if !self.realloc(new_layout) {
274 return false;
275 }
276
277 self.size = cap;
278 true
279 }
280
281 fn free(&mut self) {
282 if size_of::<T>() == 0 || self.size == 0 {
283 return;
284 }
285
286 unsafe {
289 let layout =
290 Layout::from_size_align_unchecked(self.size * size_of::<T>(), align_of::<T>());
291 alloc::dealloc(self.data.as_ptr().cast(), layout);
292 self.data = NonNull::dangling();
293 self.size = 0;
294 }
295 }
296}
297
298impl<T> Drop for GlobalAlloc<T> {
299 #[inline]
300 fn drop(&mut self) {
301 self.free();
302 }
303}