stabby_abi/alloc/allocators/
rust_alloc.rs

1use crate::alloc::{IAlloc, Layout};
2
3/// Rust's GlobalAlloc, annotating its yielded pointers in such a way that the allocated pointers can be safely freed from other binaries.
4#[crate::stabby]
5#[derive(Clone, Copy)]
6pub struct RustAlloc {
7    inner: [u8; 0],
8}
9#[crate::stabby]
10/// The VTable for [`RustAlloc`]
11pub struct RustAllocVt {
12    free: extern "C" fn(*mut (), crate::alloc::Layout),
13    realloc: extern "C" fn(*mut (), crate::alloc::Layout, usize) -> *mut (),
14}
15#[crate::stabby]
16/// The Prefix for [`RustAlloc`]
17pub struct RustAllocPrefix {
18    layout: Layout,
19    vtable: RustAllocVt,
20}
21
22extern "C" fn alloc(requested: crate::alloc::Layout) -> *mut () {
23    let requested = Layout::of::<RustAllocPrefix>().concat(requested);
24    let Ok(layout) = core::alloc::Layout::from_size_align(requested.size, requested.align) else {
25        return core::ptr::null_mut();
26    };
27    // SAFETY: The layout is always non-zero-sized
28    let alloc_start = unsafe { alloc_rs::alloc::alloc(layout) };
29    let ret = // SAFETY: the addition is indeed in-bound.
30        unsafe { alloc_start.add(layout.align().max(core::mem::size_of::<RustAllocPrefix>())) };
31    // SAFETY: `ret` is allocated and _at least_ one `RustAllocPrefix` greater than the start of the allocation, so writing there is safe.
32    unsafe {
33        ret.cast::<RustAllocPrefix>().sub(1).write(RustAllocPrefix {
34            layout: requested,
35            vtable: VTABLE,
36        })
37    };
38    ret.cast()
39}
40extern "C" fn realloc(ptr: *mut (), prev_layout: crate::alloc::Layout, new_size: usize) -> *mut () {
41    // SAFETY: The corresponding `alloc` returns the allocation offset by this much (see the line where `ret` is constructed in both the `alloc` and `realloc` functions)
42    let realloc_start = unsafe {
43        ptr.cast::<u8>().sub(
44            prev_layout
45                .align
46                .max(core::mem::size_of::<RustAllocPrefix>()),
47        )
48    };
49    let Ok(layout) = core::alloc::Layout::from_size_align(prev_layout.size, prev_layout.align)
50    else {
51        return core::ptr::null_mut();
52    };
53    let requested = Layout::of::<RustAllocPrefix>().concat(Layout {
54        size: new_size,
55        align: prev_layout.align,
56    });
57    // SAFETY: See each line
58    unsafe {
59        // If `ptr` was indeed allocated on by this allocator, then `realloc_start` was indeed allocated by _our_ GlobalAlloc.
60        let alloc_start = alloc_rs::alloc::realloc(realloc_start, layout, requested.size);
61        // We follow the same return-value shifting as in `alloc`
62        let ret = alloc_start.add(layout.align().max(core::mem::size_of::<RustAllocPrefix>()));
63        // And prepend the same prefix
64        ret.cast::<RustAllocPrefix>().sub(1).write(RustAllocPrefix {
65            layout: requested,
66            vtable: VTABLE,
67        });
68        ret.cast()
69    }
70}
71extern "C" fn free(ptr: *mut (), prev_layout: crate::alloc::Layout) {
72    // SAFETY: The corresponding `alloc` returns the allocation offset by this much (see the line where `ret` is constructed in both the `alloc` and `realloc` functions)
73    let dealloc_start = unsafe {
74        ptr.cast::<u8>().sub(
75            prev_layout
76                .align
77                .max(core::mem::size_of::<RustAllocPrefix>()),
78        )
79    };
80    // If `ptr` was indeed allocated on by this allocator, then `dealloc_start` was indeed allocated by _our_ GlobalAlloc.
81    unsafe {
82        alloc_rs::alloc::dealloc(
83            dealloc_start,
84            core::alloc::Layout::from_size_align_unchecked(prev_layout.size, prev_layout.align),
85        )
86    }
87}
88const VTABLE: RustAllocVt = RustAllocVt {
89    free: free as extern "C" fn(*mut (), crate::alloc::Layout),
90    realloc: realloc as extern "C" fn(*mut (), crate::alloc::Layout, usize) -> *mut (),
91};
92impl RustAlloc {
93    /// Constructs the allocator.
94    pub const fn new() -> Self {
95        Self { inner: [] }
96    }
97}
98impl Default for RustAlloc {
99    fn default() -> Self {
100        Self::new()
101    }
102}
103impl IAlloc for RustAlloc {
104    fn alloc(&mut self, layout: crate::alloc::Layout) -> *mut () {
105        alloc(layout)
106    }
107
108    unsafe fn free(&mut self, ptr: *mut ()) {
109        let RustAllocPrefix { layout, vtable } = // SAFETY: if called with a `ptr` allocated by an instance of `self`, this read is valid.
110            unsafe { ptr.cast::<RustAllocPrefix>().sub(1).read() };
111        (vtable.free)(ptr, layout)
112    }
113
114    unsafe fn realloc(
115        &mut self,
116        ptr: *mut (),
117        _prev_layout: crate::alloc::Layout,
118        new_size: usize,
119    ) -> *mut () {
120        let RustAllocPrefix { layout, vtable } = // SAFETY: if called with a `ptr` allocated by an instance of `self`, this read is valid.
121            unsafe { ptr.cast::<RustAllocPrefix>().sub(1).read() };
122        (vtable.realloc)(ptr, layout, new_size)
123    }
124}