embedded_alloc/
tlsf.rs

1use core::alloc::{GlobalAlloc, Layout};
2use core::cell::RefCell;
3use core::ptr::{self, NonNull};
4
5use const_default::ConstDefault;
6use critical_section::Mutex;
7use rlsf::Tlsf;
8
9type TlsfHeap = Tlsf<'static, usize, usize, { usize::BITS as usize }, { usize::BITS as usize }>;
10
11/// A two-Level segregated fit heap.
12pub struct Heap {
13    heap: Mutex<RefCell<TlsfHeap>>,
14}
15
16impl Heap {
17    /// Create a new UNINITIALIZED heap allocator
18    ///
19    /// You must initialize this heap using the
20    /// [`init`](Self::init) method before using the allocator.
21    pub const fn empty() -> Heap {
22        Heap {
23            heap: Mutex::new(RefCell::new(ConstDefault::DEFAULT)),
24        }
25    }
26
27    /// Initializes the heap
28    ///
29    /// This function must be called BEFORE you run any code that makes use of the
30    /// allocator.
31    ///
32    /// `start_addr` is the address where the heap will be located.
33    ///
34    /// `size` is the size of the heap in bytes.
35    ///
36    /// Note that:
37    ///
38    /// - The heap grows "upwards", towards larger addresses. Thus `start_addr` will
39    ///   be the smallest address used.
40    ///
41    /// - The largest address used is `start_addr + size - 1`, so if `start_addr` is
42    ///   `0x1000` and `size` is `0x30000` then the allocator won't use memory at
43    ///   addresses `0x31000` and larger.
44    ///
45    /// # Safety
46    ///
47    /// Obey these or Bad Stuff will happen.
48    ///
49    /// - This function must be called exactly ONCE.
50    /// - `size > 0`
51    pub unsafe fn init(&self, start_addr: usize, size: usize) {
52        critical_section::with(|cs| {
53            let block: &[u8] = core::slice::from_raw_parts(start_addr as *const u8, size);
54            self.heap
55                .borrow(cs)
56                .borrow_mut()
57                .insert_free_block_ptr(block.into());
58        });
59    }
60
61    fn alloc(&self, layout: Layout) -> Option<NonNull<u8>> {
62        critical_section::with(|cs| self.heap.borrow(cs).borrow_mut().allocate(layout))
63    }
64
65    unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
66        critical_section::with(|cs| {
67            self.heap
68                .borrow(cs)
69                .borrow_mut()
70                .deallocate(NonNull::new_unchecked(ptr), layout.align())
71        })
72    }
73}
74
75unsafe impl GlobalAlloc for Heap {
76    unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
77        self.alloc(layout)
78            .map_or(ptr::null_mut(), |allocation| allocation.as_ptr())
79    }
80
81    unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
82        self.dealloc(ptr, layout)
83    }
84}
85
86#[cfg(feature = "allocator_api")]
87mod allocator_api {
88    use super::*;
89    use core::alloc::{AllocError, Allocator};
90
91    unsafe impl Allocator for Heap {
92        fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
93            match layout.size() {
94                0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
95                size => self.alloc(layout).map_or(Err(AllocError), |allocation| {
96                    Ok(NonNull::slice_from_raw_parts(allocation, size))
97                }),
98            }
99        }
100
101        unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
102            if layout.size() != 0 {
103                self.dealloc(ptr.as_ptr(), layout);
104            }
105        }
106    }
107}