use core::alloc::{GlobalAlloc, Layout};
use core::cell::RefCell;
use core::ptr::{self, NonNull};
use critical_section::Mutex;
use linked_list_allocator::Heap as LLHeap;
pub struct Heap {
heap: Mutex<RefCell<(LLHeap, bool)>>,
}
impl Heap {
pub const fn empty() -> Heap {
Heap {
heap: Mutex::new(RefCell::new((LLHeap::empty(), false))),
}
}
pub unsafe fn init(&self, start_addr: usize, size: usize) {
assert!(size > 0);
critical_section::with(|cs| {
let mut heap = self.heap.borrow_ref_mut(cs);
assert!(!heap.1);
heap.1 = true;
heap.0.init(start_addr as *mut u8, size);
});
}
pub fn used(&self) -> usize {
critical_section::with(|cs| self.heap.borrow_ref_mut(cs).0.used())
}
pub fn free(&self) -> usize {
critical_section::with(|cs| self.heap.borrow_ref_mut(cs).0.free())
}
fn alloc(&self, layout: Layout) -> Option<NonNull<u8>> {
critical_section::with(|cs| {
self.heap
.borrow_ref_mut(cs)
.0
.allocate_first_fit(layout)
.ok()
})
}
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
critical_section::with(|cs| {
self.heap
.borrow_ref_mut(cs)
.0
.deallocate(NonNull::new_unchecked(ptr), layout)
});
}
}
unsafe impl GlobalAlloc for Heap {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
self.alloc(layout)
.map_or(ptr::null_mut(), |allocation| allocation.as_ptr())
}
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
self.dealloc(ptr, layout);
}
}
#[cfg(feature = "allocator_api")]
mod allocator_api {
use super::*;
use core::alloc::{AllocError, Allocator};
unsafe impl Allocator for Heap {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
match layout.size() {
0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
size => self.alloc(layout).map_or(Err(AllocError), |allocation| {
Ok(NonNull::slice_from_raw_parts(allocation, size))
}),
}
}
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
if layout.size() != 0 {
self.dealloc(ptr.as_ptr(), layout);
}
}
}
}