1use std::alloc::Layout;
2use std::fmt::Debug;
3use std::ptr;
4use std::ptr::NonNull;
5
6use crate::error::Error;
7use crate::error::ErrorKind::OutOfMemory;
8use crate::{Alloc, AllocMut, Gc, GcMut};
9
10pub const DEFAULT_ALLOC_RETRY_LIMIT: Option<u32> = Some(3);
11
12pub trait Allocator {
16 type Alloc;
18
19 fn as_raw_allocator(&mut self) -> &mut Self::Alloc;
20
21 fn yield_point(&mut self);
27
28 fn request_gc(&mut self, request: CollectionType);
36
37 #[inline(always)]
38 fn alloc<T>(&mut self, val: T) -> Gc<T, Self::Alloc>
39 where
40 Self::Alloc: Alloc<T>,
41 {
42 self.alloc_with(|| val)
43 }
44
45 #[inline(always)]
46 fn alloc_mut<T>(&mut self, val: T) -> GcMut<T, Self::Alloc>
47 where
48 Self::Alloc: AllocMut<T>,
49 <Self::Alloc as Alloc<T>>::MutTy: From<T>,
50 {
51 self.alloc_with::<_, <Self::Alloc as Alloc<T>>::MutTy>(|| val.into())
52 }
53
54 #[inline(always)]
55 fn try_alloc<T>(&mut self, val: T) -> Result<Gc<T, Self::Alloc>, Error>
56 where
57 Self::Alloc: Alloc<T>,
58 {
59 self.try_alloc_with(|| val)
60 }
61
62 #[inline(always)]
63 fn alloc_with<F, T>(&mut self, f: F) -> Gc<T, Self::Alloc>
64 where
65 F: FnOnce() -> T,
66 Self::Alloc: Alloc<T>,
67 {
68 self.try_gc_alloc_with(DEFAULT_ALLOC_RETRY_LIMIT, f)
69 .unwrap_or_else(|err| failed_allocation(err))
70 }
71
72 #[inline(always)]
73 fn try_gc_alloc_with<F, T>(
74 &mut self,
75 retry_limit: Option<u32>,
76 f: F,
77 ) -> Result<Gc<T, Self::Alloc>, Error>
78 where
79 F: FnOnce() -> T,
80 Self::Alloc: Alloc<T>,
81 {
82 let layout = Layout::new::<T>();
83
84 unsafe {
85 self.try_gc_alloc_init(retry_limit, layout, |ptr| {
86 ptr::write(ptr.as_ptr() as *mut T, f())
87 })
88 }
89 }
90
91 #[inline(always)]
99 unsafe fn try_gc_alloc_init<F, T>(
100 &mut self,
101 mut retry_limit: Option<u32>,
102 layout: Layout,
103 init: F,
104 ) -> Result<Gc<T, Self::Alloc>, Error>
105 where
106 T: ?Sized,
107 F: FnOnce(NonNull<u8>),
108 Self::Alloc: Alloc<T>,
109 {
110 let handle = loop {
111 match unsafe { Alloc::<T>::try_alloc_layout(self.as_raw_allocator(), layout) } {
112 Ok(handle) => break handle,
113 Err(err) if err.kind() == OutOfMemory => {
114 match &mut retry_limit {
116 None => {}
117 Some(0) => return Err(err),
118 Some(x) => *x -= 1,
119 }
120
121 self.request_gc(CollectionType::AllocAtLeast(layout));
123 self.yield_point();
124 }
125 Err(err) => return Err(err),
126 };
127 };
128
129 unsafe {
130 let data_ptr = Alloc::<T>::handle_ptr(self.as_raw_allocator(), &handle);
131 debug_assert!(
132 data_ptr.as_ptr() as usize & (layout.align() - 1) == 0,
133 "GC allocation did not meet required alignment"
134 );
135
136 init(data_ptr);
137 Ok(Gc::from_raw(handle))
138 }
139 }
140
141 #[inline(always)]
145 fn try_gc_alloc_setup<F, T>(
146 &mut self,
147 retry_limit: Option<u32>,
148 init: F,
149 ) -> Result<Gc<T, Self::Alloc>, Error>
150 where
151 T: ?Sized + Default,
152 F: FnOnce(&mut T),
153 Self::Alloc: Alloc<T>,
154 {
155 let layout = Layout::new::<T>();
156
157 unsafe {
158 self.try_gc_alloc_init(retry_limit, layout, |ptr| {
159 let mut_ref = &mut *ptr.cast().as_ptr();
160 *mut_ref = T::default();
162 init(mut_ref);
163 })
164 }
165 }
166
167 #[inline(always)]
168 fn try_alloc_with<F, T>(&mut self, f: F) -> Result<Gc<T, Self::Alloc>, Error>
169 where
170 F: FnOnce() -> T,
171 Self::Alloc: Alloc<T>,
172 {
173 self.try_gc_alloc_with(None, f)
174 }
175
176 #[inline(always)]
177 fn alloc_slice_copy<T>(&mut self, src: &[T]) -> Gc<[T], Self::Alloc>
178 where
179 T: Copy,
180 Self::Alloc: Alloc<[T]>,
181 {
182 let layout = Layout::new::<T>();
183
184 unsafe {
185 self.try_gc_alloc_init(DEFAULT_ALLOC_RETRY_LIMIT, layout, |ptr| {
186 ptr::copy_nonoverlapping(src.as_ptr(), ptr.as_ptr() as *mut T, src.len());
187 })
188 .unwrap_or_else(|err| failed_allocation(err))
189 }
190 }
191
192 #[inline(always)]
193 fn alloc_slice_clone<T>(&mut self, src: &[T]) -> Gc<[T], Self::Alloc>
194 where
195 T: Clone,
196 Self::Alloc: Alloc<[T]>,
197 {
198 self.alloc_slice_fill_with(src.len(), |index| src[index].clone())
199 }
200
201 #[inline(always)]
202 fn alloc_str(&mut self, src: &str) -> Gc<str, Self::Alloc>
203 where
204 Self::Alloc: Alloc<str>,
205 {
206 let layout = Layout::for_value(src.as_bytes());
207
208 unsafe {
209 self.try_gc_alloc_init(DEFAULT_ALLOC_RETRY_LIMIT, layout, |ptr| {
210 ptr::copy_nonoverlapping(src.as_ptr(), ptr.as_ptr(), src.len());
211 })
212 .unwrap_or_else(|err| failed_allocation(err))
213 }
214 }
215
216 #[inline(always)]
217 fn alloc_slice_fill_with<T, F>(&mut self, len: usize, f: F) -> Gc<[T], Self::Alloc>
218 where
219 F: FnMut(usize) -> T,
220 Self::Alloc: Alloc<[T]>,
221 {
222 self.try_alloc_slice_fill_with(len, f)
223 .unwrap_or_else(|err| failed_allocation(err))
224 }
225
226 #[inline(always)]
227 fn try_alloc_slice_fill_with<T, F>(
228 &mut self,
229 len: usize,
230 f: F,
231 ) -> Result<Gc<[T], Self::Alloc>, Error>
232 where
233 F: FnMut(usize) -> T,
234 Self::Alloc: Alloc<[T]>,
235 {
236 self.try_gc_alloc_slice_fill_with(Some(0), len, f)
237 }
238
239 #[inline(always)]
240 fn try_gc_alloc_slice_fill_with<T, F>(
241 &mut self,
242 retry_limit: Option<u32>,
243 len: usize,
244 mut f: F,
245 ) -> Result<Gc<[T], Self::Alloc>, Error>
246 where
247 F: FnMut(usize) -> T,
248 Self::Alloc: Alloc<[T]>,
249 {
250 let layout = Layout::array::<T>(len).unwrap_or_else(|err| failed_allocation(err));
251
252 unsafe {
253 self.try_gc_alloc_init(retry_limit, layout, |ptr| {
254 for index in 0..len {
255 ptr::write(ptr.cast::<T>().as_ptr().add(index), f(index));
256 }
257 })
258 }
259 }
260
261 #[inline(always)]
262 fn alloc_slice_fill_copy<T>(&mut self, len: usize, value: T) -> Gc<[T], Self::Alloc>
263 where
264 T: Copy,
265 Self::Alloc: Alloc<[T]>,
266 {
267 self.alloc_slice_fill_with(len, |_| value)
268 }
269
270 #[inline(always)]
271 fn alloc_slice_fill_clone<T>(&mut self, len: usize, value: &T) -> Gc<[T], Self::Alloc>
272 where
273 T: Clone,
274 Self::Alloc: Alloc<[T]>,
275 {
276 self.alloc_slice_fill_with(len, |_| value.clone())
277 }
278
279 #[inline(always)]
280 fn alloc_slice_fill_iter<T, I>(&mut self, iter: I) -> Gc<[T], Self::Alloc>
281 where
282 I: IntoIterator<Item = T>,
283 I::IntoIter: ExactSizeIterator,
284 Self::Alloc: Alloc<[T]>,
285 {
286 let mut iter = iter.into_iter();
287
288 self.alloc_slice_fill_with(iter.len(), |_| {
289 iter.next().expect("Iterator supplied too few elements")
290 })
291 }
292
293 #[inline(always)]
294 fn alloc_slice_fill_default<T>(&mut self, len: usize) -> Gc<[T], Self::Alloc>
295 where
296 T: Default,
297 Self::Alloc: Alloc<[T]>,
298 {
299 self.alloc_slice_fill_with(len, |_| T::default())
300 }
301}
302
303#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
304pub enum CollectionType {
305 Full,
307 Partial,
310 AllocAtLeast(Layout),
313 Suggest,
318 Custom(u64),
320}
321
322#[cold]
323#[inline(never)]
324fn failed_allocation<T: Debug>(err: T) -> ! {
325 panic!("Failed to perform GC allocation: {:?}", err)
326}