embedded_alloc/
llff.rs

1use core::alloc::{GlobalAlloc, Layout};
2use core::cell::RefCell;
3use core::ptr::{self, NonNull};
4
5use critical_section::Mutex;
6use linked_list_allocator::Heap as LLHeap;
7
8/// A linked list first fit heap.
9pub struct Heap {
10    heap: Mutex<RefCell<LLHeap>>,
11}
12
13impl Heap {
14    /// Create a new UNINITIALIZED heap allocator
15    ///
16    /// You must initialize this heap using the
17    /// [`init`](Self::init) method before using the allocator.
18    pub const fn empty() -> Heap {
19        Heap {
20            heap: Mutex::new(RefCell::new(LLHeap::empty())),
21        }
22    }
23
24    /// Initializes the heap
25    ///
26    /// This function must be called BEFORE you run any code that makes use of the
27    /// allocator.
28    ///
29    /// `start_addr` is the address where the heap will be located.
30    ///
31    /// `size` is the size of the heap in bytes.
32    ///
33    /// Note that:
34    ///
35    /// - The heap grows "upwards", towards larger addresses. Thus `start_addr` will
36    ///   be the smallest address used.
37    ///
38    /// - The largest address used is `start_addr + size - 1`, so if `start_addr` is
39    ///   `0x1000` and `size` is `0x30000` then the allocator won't use memory at
40    ///   addresses `0x31000` and larger.
41    ///
42    /// # Safety
43    ///
44    /// Obey these or Bad Stuff will happen.
45    ///
46    /// - This function must be called exactly ONCE.
47    /// - `size > 0`
48    pub unsafe fn init(&self, start_addr: usize, size: usize) {
49        critical_section::with(|cs| {
50            self.heap
51                .borrow(cs)
52                .borrow_mut()
53                .init(start_addr as *mut u8, size);
54        });
55    }
56
57    /// Returns an estimate of the amount of bytes in use.
58    pub fn used(&self) -> usize {
59        critical_section::with(|cs| self.heap.borrow(cs).borrow_mut().used())
60    }
61
62    /// Returns an estimate of the amount of bytes available.
63    pub fn free(&self) -> usize {
64        critical_section::with(|cs| self.heap.borrow(cs).borrow_mut().free())
65    }
66
67    fn alloc(&self, layout: Layout) -> Option<NonNull<u8>> {
68        critical_section::with(|cs| {
69            self.heap
70                .borrow(cs)
71                .borrow_mut()
72                .allocate_first_fit(layout)
73                .ok()
74        })
75    }
76
77    unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
78        critical_section::with(|cs| {
79            self.heap
80                .borrow(cs)
81                .borrow_mut()
82                .deallocate(NonNull::new_unchecked(ptr), layout)
83        });
84    }
85}
86
87unsafe impl GlobalAlloc for Heap {
88    unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
89        self.alloc(layout)
90            .map_or(ptr::null_mut(), |allocation| allocation.as_ptr())
91    }
92
93    unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
94        self.dealloc(ptr, layout);
95    }
96}
97
98#[cfg(feature = "allocator_api")]
99mod allocator_api {
100    use super::*;
101    use core::alloc::{AllocError, Allocator};
102
103    unsafe impl Allocator for Heap {
104        fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
105            match layout.size() {
106                0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
107                size => self.alloc(layout).map_or(Err(AllocError), |allocation| {
108                    Ok(NonNull::slice_from_raw_parts(allocation, size))
109                }),
110            }
111        }
112
113        unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
114            if layout.size() != 0 {
115                self.dealloc(ptr.as_ptr(), layout);
116            }
117        }
118    }
119}