1use core::alloc::{GlobalAlloc, Layout};
2use core::cell::RefCell;
3use core::ptr::{self, NonNull};
4
5use const_default::ConstDefault;
6use critical_section::Mutex;
7use rlsf::Tlsf;
8
9type TlsfHeap = Tlsf<'static, usize, usize, { usize::BITS as usize }, { usize::BITS as usize }>;
10
11struct Inner {
12 tlsf: TlsfHeap,
13 initialized: bool,
14 raw_block: Option<NonNull<[u8]>>,
15 raw_block_size: usize,
16}
17
18unsafe impl Sync for Inner {}
20unsafe impl Send for Inner {}
21
22pub struct Heap {
24 heap: Mutex<RefCell<Inner>>,
25}
26
27impl Heap {
28 pub const fn empty() -> Heap {
33 Heap {
34 heap: Mutex::new(RefCell::new(Inner {
35 tlsf: ConstDefault::DEFAULT,
36 initialized: false,
37 raw_block: None,
38 raw_block_size: 0,
39 })),
40 }
41 }
42
43 pub unsafe fn init(&self, start_addr: usize, size: usize) {
75 assert!(size > 0);
76 critical_section::with(|cs| {
77 let mut heap = self.heap.borrow_ref_mut(cs);
78 assert!(!heap.initialized);
79 heap.initialized = true;
80 let block: NonNull<[u8]> =
81 NonNull::slice_from_raw_parts(NonNull::new_unchecked(start_addr as *mut u8), size);
82 heap.tlsf.insert_free_block_ptr(block);
83 heap.raw_block = Some(block);
84 heap.raw_block_size = size;
85 });
86 }
87
88 fn alloc(&self, layout: Layout) -> Option<NonNull<u8>> {
89 critical_section::with(|cs| self.heap.borrow_ref_mut(cs).tlsf.allocate(layout))
90 }
91
92 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
93 critical_section::with(|cs| {
94 self.heap
95 .borrow_ref_mut(cs)
96 .tlsf
97 .deallocate(NonNull::new_unchecked(ptr), layout.align())
98 })
99 }
100
101 pub fn used(&self) -> usize {
103 critical_section::with(|cs| {
104 self.heap.borrow_ref_mut(cs).raw_block_size - self.free_with_cs(cs)
105 })
106 }
107
108 pub fn free(&self) -> usize {
110 critical_section::with(|cs| self.free_with_cs(cs))
111 }
112
113 fn free_with_cs(&self, cs: critical_section::CriticalSection) -> usize {
114 let inner_mut = self.heap.borrow_ref_mut(cs);
115 if !inner_mut.initialized {
116 return 0;
117 }
118 unsafe {
121 inner_mut
122 .tlsf
123 .iter_blocks(inner_mut.raw_block.unwrap())
124 .filter(|block_info| !block_info.is_occupied())
125 .map(|block_info| block_info.max_payload_size())
126 .sum::<usize>()
127 }
128 }
129}
130
131unsafe impl GlobalAlloc for Heap {
132 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
133 self.alloc(layout)
134 .map_or(ptr::null_mut(), |allocation| allocation.as_ptr())
135 }
136
137 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
138 self.dealloc(ptr, layout)
139 }
140}
141
142#[cfg(feature = "allocator_api")]
143mod allocator_api {
144 use super::*;
145 use core::alloc::{AllocError, Allocator};
146
147 unsafe impl Allocator for Heap {
148 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
149 match layout.size() {
150 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
151 size => self.alloc(layout).map_or(Err(AllocError), |allocation| {
152 Ok(NonNull::slice_from_raw_parts(allocation, size))
153 }),
154 }
155 }
156
157 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
158 if layout.size() != 0 {
159 self.dealloc(ptr.as_ptr(), layout);
160 }
161 }
162 }
163}