use core::alloc::{GlobalAlloc, Layout};
use align_address::Align;
use hermit_sync::RawInterruptTicketMutex;
use talc::{ErrOnOom, Span, Talc, Talck};
pub struct LockedAllocator(Talck<RawInterruptTicketMutex, ErrOnOom>);
impl LockedAllocator {
pub const fn new() -> Self {
Self(Talc::new(ErrOnOom).lock())
}
#[inline]
fn align_layout(layout: Layout) -> Layout {
let size = layout
.size()
.align_up(core::mem::size_of::<crossbeam_utils::CachePadded<u8>>());
let align = layout
.align()
.max(core::mem::align_of::<crossbeam_utils::CachePadded<u8>>());
Layout::from_size_align(size, align).unwrap()
}
pub unsafe fn init(&self, heap_bottom: *mut u8, heap_size: usize) {
let arena = Span::from_base_size(heap_bottom, heap_size);
unsafe {
self.0.talc().init(arena);
}
}
pub unsafe fn extend(&self, heap_bottom: *mut u8, heap_size: usize) {
let arena = Span::from_base_size(heap_bottom, heap_size);
unsafe {
self.0.talc().extend(arena);
}
}
}
unsafe impl GlobalAlloc for LockedAllocator {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
let layout = Self::align_layout(layout);
unsafe { self.0.alloc(layout) }
}
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
let layout = Self::align_layout(layout);
unsafe { self.0.dealloc(ptr, layout) }
}
}
#[cfg(all(test, not(target_os = "none")))]
mod tests {
use core::mem;
use super::*;
#[test]
fn empty() {
const ARENA_SIZE: usize = 0x1000;
let mut arena: [u8; ARENA_SIZE] = [0; ARENA_SIZE];
let allocator: LockedAllocator = LockedAllocator::new();
unsafe {
allocator.init(&mut arena as *mut [u8] as *mut u8, ARENA_SIZE);
}
let layout = Layout::from_size_align(1, 1).unwrap();
assert!(unsafe { !allocator.alloc(layout.clone()).is_null() });
let layout = Layout::from_size_align(0x1000, mem::align_of::<usize>()).unwrap();
let addr = unsafe { allocator.alloc(layout) };
assert!(addr.is_null());
}
}