gc_api/alloc/
api.rs

1use std::alloc::Layout;
2use std::fmt::Debug;
3use std::ptr;
4use std::ptr::NonNull;
5
6use crate::error::Error;
7use crate::error::ErrorKind::OutOfMemory;
8use crate::{Alloc, AllocMut, Gc, GcMut};
9
10pub const DEFAULT_ALLOC_RETRY_LIMIT: Option<u32> = Some(3);
11
12/// Notes:
13/// - Encode metadata in Gc pointer
14/// - Swap error out with trait system?
15pub trait Allocator {
16    /// Inner allocator type used by this GC
17    type Alloc;
18
19    fn as_raw_allocator(&mut self) -> &mut Self::Alloc;
20
21    /// Mark a location where this allocator can safely yield to garbage collection. Garbage
22    /// connection will only occur when the required allocators have yielded. Calling this function
23    /// does not guarantee garbage collection will occur.
24    ///
25    /// For garbage collectors that do not require yield points, this will be treated as a no-op.
26    fn yield_point(&mut self);
27
28    /// Request that garbage collection is performed at the next `yield_point`. This function should
29    /// only be called if recommended by the underlying implementation. Extraneous calls to this
30    /// function may have an adverse effect on performance.
31    ///
32    /// Calling this function gives a GC a strong suggestion that garbage collection should be
33    /// performed at the next opportunity. However, this does not guarantee that garbage collection
34    /// can or will be performed.
35    fn request_gc(&mut self, request: CollectionType);
36
37    #[inline(always)]
38    fn alloc<T>(&mut self, val: T) -> Gc<T, Self::Alloc>
39    where
40        Self::Alloc: Alloc<T>,
41    {
42        self.alloc_with(|| val)
43    }
44
45    #[inline(always)]
46    fn alloc_mut<T>(&mut self, val: T) -> GcMut<T, Self::Alloc>
47    where
48        Self::Alloc: AllocMut<T>,
49        <Self::Alloc as Alloc<T>>::MutTy: From<T>,
50    {
51        self.alloc_with::<_, <Self::Alloc as Alloc<T>>::MutTy>(|| val.into())
52    }
53
54    #[inline(always)]
55    fn try_alloc<T>(&mut self, val: T) -> Result<Gc<T, Self::Alloc>, Error>
56    where
57        Self::Alloc: Alloc<T>,
58    {
59        self.try_alloc_with(|| val)
60    }
61
62    #[inline(always)]
63    fn alloc_with<F, T>(&mut self, f: F) -> Gc<T, Self::Alloc>
64    where
65        F: FnOnce() -> T,
66        Self::Alloc: Alloc<T>,
67    {
68        self.try_gc_alloc_with(DEFAULT_ALLOC_RETRY_LIMIT, f)
69            .unwrap_or_else(|err| failed_allocation(err))
70    }
71
72    #[inline(always)]
73    fn try_gc_alloc_with<F, T>(
74        &mut self,
75        retry_limit: Option<u32>,
76        f: F,
77    ) -> Result<Gc<T, Self::Alloc>, Error>
78    where
79        F: FnOnce() -> T,
80        Self::Alloc: Alloc<T>,
81    {
82        let layout = Layout::new::<T>();
83
84        unsafe {
85            self.try_gc_alloc_init(retry_limit, layout, |ptr| {
86                ptr::write(ptr.as_ptr() as *mut T, f())
87            })
88        }
89    }
90
91    /// This function attempts to allocate a new object on the heap in accordance to the given
92    /// layout. The caller can then choose how they would like to initialize that memory.
93    ///
94    /// # Safety
95    /// The caller must fully initialize the object data via the init function. Failing to do so may
96    /// result in undefined behavior comparable to calling [`std::mem::MaybeUninit::assume_init`]
97    /// without fully initializing the type.
98    #[inline(always)]
99    unsafe fn try_gc_alloc_init<F, T>(
100        &mut self,
101        mut retry_limit: Option<u32>,
102        layout: Layout,
103        init: F,
104    ) -> Result<Gc<T, Self::Alloc>, Error>
105    where
106        T: ?Sized,
107        F: FnOnce(NonNull<u8>),
108        Self::Alloc: Alloc<T>,
109    {
110        let handle = loop {
111            match unsafe { Alloc::<T>::try_alloc_layout(self.as_raw_allocator(), layout) } {
112                Ok(handle) => break handle,
113                Err(err) if err.kind() == OutOfMemory => {
114                    // Decrement retry counter
115                    match &mut retry_limit {
116                        None => {}
117                        Some(0) => return Err(err),
118                        Some(x) => *x -= 1,
119                    }
120
121                    // Request that a GC be performed and attempt to yield
122                    self.request_gc(CollectionType::AllocAtLeast(layout));
123                    self.yield_point();
124                }
125                Err(err) => return Err(err),
126            };
127        };
128
129        unsafe {
130            let data_ptr = Alloc::<T>::handle_ptr(self.as_raw_allocator(), &handle);
131            debug_assert!(
132                data_ptr.as_ptr() as usize & (layout.align() - 1) == 0,
133                "GC allocation did not meet required alignment"
134            );
135
136            init(data_ptr);
137            Ok(Gc::from_raw(handle))
138        }
139    }
140
141    /// This function is intended to be a safe equivalent for [`try_gc_alloc_init`]. To avoid any
142    /// unsafe code the unitialized value is first initialized to its default value before being
143    /// handed to the user.
144    #[inline(always)]
145    fn try_gc_alloc_setup<F, T>(
146        &mut self,
147        retry_limit: Option<u32>,
148        init: F,
149    ) -> Result<Gc<T, Self::Alloc>, Error>
150    where
151        T: ?Sized + Default,
152        F: FnOnce(&mut T),
153        Self::Alloc: Alloc<T>,
154    {
155        let layout = Layout::new::<T>();
156
157        unsafe {
158            self.try_gc_alloc_init(retry_limit, layout, |ptr| {
159                let mut_ref = &mut *ptr.cast().as_ptr();
160                // Hopefully the compiler will understand that this call can be optimized away
161                *mut_ref = T::default();
162                init(mut_ref);
163            })
164        }
165    }
166
167    #[inline(always)]
168    fn try_alloc_with<F, T>(&mut self, f: F) -> Result<Gc<T, Self::Alloc>, Error>
169    where
170        F: FnOnce() -> T,
171        Self::Alloc: Alloc<T>,
172    {
173        self.try_gc_alloc_with(None, f)
174    }
175
176    #[inline(always)]
177    fn alloc_slice_copy<T>(&mut self, src: &[T]) -> Gc<[T], Self::Alloc>
178    where
179        T: Copy,
180        Self::Alloc: Alloc<[T]>,
181    {
182        let layout = Layout::new::<T>();
183
184        unsafe {
185            self.try_gc_alloc_init(DEFAULT_ALLOC_RETRY_LIMIT, layout, |ptr| {
186                ptr::copy_nonoverlapping(src.as_ptr(), ptr.as_ptr() as *mut T, src.len());
187            })
188            .unwrap_or_else(|err| failed_allocation(err))
189        }
190    }
191
192    #[inline(always)]
193    fn alloc_slice_clone<T>(&mut self, src: &[T]) -> Gc<[T], Self::Alloc>
194    where
195        T: Clone,
196        Self::Alloc: Alloc<[T]>,
197    {
198        self.alloc_slice_fill_with(src.len(), |index| src[index].clone())
199    }
200
201    #[inline(always)]
202    fn alloc_str(&mut self, src: &str) -> Gc<str, Self::Alloc>
203    where
204        Self::Alloc: Alloc<str>,
205    {
206        let layout = Layout::for_value(src.as_bytes());
207
208        unsafe {
209            self.try_gc_alloc_init(DEFAULT_ALLOC_RETRY_LIMIT, layout, |ptr| {
210                ptr::copy_nonoverlapping(src.as_ptr(), ptr.as_ptr(), src.len());
211            })
212            .unwrap_or_else(|err| failed_allocation(err))
213        }
214    }
215
216    #[inline(always)]
217    fn alloc_slice_fill_with<T, F>(&mut self, len: usize, f: F) -> Gc<[T], Self::Alloc>
218    where
219        F: FnMut(usize) -> T,
220        Self::Alloc: Alloc<[T]>,
221    {
222        self.try_alloc_slice_fill_with(len, f)
223            .unwrap_or_else(|err| failed_allocation(err))
224    }
225
226    #[inline(always)]
227    fn try_alloc_slice_fill_with<T, F>(
228        &mut self,
229        len: usize,
230        f: F,
231    ) -> Result<Gc<[T], Self::Alloc>, Error>
232    where
233        F: FnMut(usize) -> T,
234        Self::Alloc: Alloc<[T]>,
235    {
236        self.try_gc_alloc_slice_fill_with(Some(0), len, f)
237    }
238
239    #[inline(always)]
240    fn try_gc_alloc_slice_fill_with<T, F>(
241        &mut self,
242        retry_limit: Option<u32>,
243        len: usize,
244        mut f: F,
245    ) -> Result<Gc<[T], Self::Alloc>, Error>
246    where
247        F: FnMut(usize) -> T,
248        Self::Alloc: Alloc<[T]>,
249    {
250        let layout = Layout::array::<T>(len).unwrap_or_else(|err| failed_allocation(err));
251
252        unsafe {
253            self.try_gc_alloc_init(retry_limit, layout, |ptr| {
254                for index in 0..len {
255                    ptr::write(ptr.cast::<T>().as_ptr().add(index), f(index));
256                }
257            })
258        }
259    }
260
261    #[inline(always)]
262    fn alloc_slice_fill_copy<T>(&mut self, len: usize, value: T) -> Gc<[T], Self::Alloc>
263    where
264        T: Copy,
265        Self::Alloc: Alloc<[T]>,
266    {
267        self.alloc_slice_fill_with(len, |_| value)
268    }
269
270    #[inline(always)]
271    fn alloc_slice_fill_clone<T>(&mut self, len: usize, value: &T) -> Gc<[T], Self::Alloc>
272    where
273        T: Clone,
274        Self::Alloc: Alloc<[T]>,
275    {
276        self.alloc_slice_fill_with(len, |_| value.clone())
277    }
278
279    #[inline(always)]
280    fn alloc_slice_fill_iter<T, I>(&mut self, iter: I) -> Gc<[T], Self::Alloc>
281    where
282        I: IntoIterator<Item = T>,
283        I::IntoIter: ExactSizeIterator,
284        Self::Alloc: Alloc<[T]>,
285    {
286        let mut iter = iter.into_iter();
287
288        self.alloc_slice_fill_with(iter.len(), |_| {
289            iter.next().expect("Iterator supplied too few elements")
290        })
291    }
292
293    #[inline(always)]
294    fn alloc_slice_fill_default<T>(&mut self, len: usize) -> Gc<[T], Self::Alloc>
295    where
296        T: Default,
297        Self::Alloc: Alloc<[T]>,
298    {
299        self.alloc_slice_fill_with(len, |_| T::default())
300    }
301}
302
303#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
304pub enum CollectionType {
305    /// Request that a full GC is performed across the entire heap as soon as possible
306    Full,
307    /// Request that a partial GC is performed. The definition of a partial GC will vary slightly
308    /// between different implementations.
309    Partial,
310    /// Request a GC to be performed so the current allocator has enough space to allocate the given
311    /// layout. What this means will change depending on the implementation.
312    AllocAtLeast(Layout),
313    /// Suggest to the GC that garbage collection should be performed soon. This is designed to
314    /// give a user the option to hint that space will be required in the near future. The
315    /// implementation may choose to ignore this request if it deems a GC is not required or would
316    /// be detrimental to performance.
317    Suggest,
318    /// A custom collection request defined by a specific implementation.
319    Custom(u64),
320}
321
322#[cold]
323#[inline(never)]
324fn failed_allocation<T: Debug>(err: T) -> ! {
325    panic!("Failed to perform GC allocation: {:?}", err)
326}