1use core::alloc::{GlobalAlloc, Layout};
2use core::cell::RefCell;
3use core::ptr::{self, NonNull};
4
5use const_default::ConstDefault;
6use critical_section::Mutex;
7use rlsf::Tlsf;
8
9type TlsfHeap = Tlsf<'static, usize, usize, { usize::BITS as usize }, { usize::BITS as usize }>;
10
11pub struct Heap {
13 heap: Mutex<RefCell<TlsfHeap>>,
14}
15
16impl Heap {
17 pub const fn empty() -> Heap {
22 Heap {
23 heap: Mutex::new(RefCell::new(ConstDefault::DEFAULT)),
24 }
25 }
26
27 pub unsafe fn init(&self, start_addr: usize, size: usize) {
52 critical_section::with(|cs| {
53 let block: &[u8] = core::slice::from_raw_parts(start_addr as *const u8, size);
54 self.heap
55 .borrow(cs)
56 .borrow_mut()
57 .insert_free_block_ptr(block.into());
58 });
59 }
60
61 fn alloc(&self, layout: Layout) -> Option<NonNull<u8>> {
62 critical_section::with(|cs| self.heap.borrow(cs).borrow_mut().allocate(layout))
63 }
64
65 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
66 critical_section::with(|cs| {
67 self.heap
68 .borrow(cs)
69 .borrow_mut()
70 .deallocate(NonNull::new_unchecked(ptr), layout.align())
71 })
72 }
73}
74
75unsafe impl GlobalAlloc for Heap {
76 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
77 self.alloc(layout)
78 .map_or(ptr::null_mut(), |allocation| allocation.as_ptr())
79 }
80
81 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
82 self.dealloc(ptr, layout)
83 }
84}
85
86#[cfg(feature = "allocator_api")]
87mod allocator_api {
88 use super::*;
89 use core::alloc::{AllocError, Allocator};
90
91 unsafe impl Allocator for Heap {
92 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
93 match layout.size() {
94 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
95 size => self.alloc(layout).map_or(Err(AllocError), |allocation| {
96 Ok(NonNull::slice_from_raw_parts(allocation, size))
97 }),
98 }
99 }
100
101 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
102 if layout.size() != 0 {
103 self.dealloc(ptr.as_ptr(), layout);
104 }
105 }
106 }
107}