embedded_alloc/
llff.rs

1use core::alloc::{GlobalAlloc, Layout};
2use core::cell::RefCell;
3use core::ptr::{self, NonNull};
4
5use critical_section::Mutex;
6use linked_list_allocator::Heap as LLHeap;
7
8/// A linked list first fit heap.
9pub struct Heap {
10    heap: Mutex<RefCell<(LLHeap, bool)>>,
11}
12
13impl Heap {
14    /// Create a new UNINITIALIZED heap allocator
15    ///
16    /// You must initialize this heap using the
17    /// [`init`](Self::init) method before using the allocator.
18    pub const fn empty() -> Heap {
19        Heap {
20            heap: Mutex::new(RefCell::new((LLHeap::empty(), false))),
21        }
22    }
23
24    /// Initializes the heap
25    ///
26    /// This function must be called BEFORE you run any code that makes use of the
27    /// allocator.
28    ///
29    /// `start_addr` is the address where the heap will be located.
30    ///
31    /// `size` is the size of the heap in bytes.
32    ///
33    /// Note that:
34    ///
35    /// - The heap grows "upwards", towards larger addresses. Thus `start_addr` will
36    ///   be the smallest address used.
37    ///
38    /// - The largest address used is `start_addr + size - 1`, so if `start_addr` is
39    ///   `0x1000` and `size` is `0x30000` then the allocator won't use memory at
40    ///   addresses `0x31000` and larger.
41    ///
42    /// # Safety
43    ///
44    /// This function is safe if the following invariants hold:
45    ///
46    /// - `start_addr` points to valid memory.
47    /// - `size` is correct.
48    ///
49    /// # Panics
50    ///
51    /// This function will panic if either of the following are true:
52    ///
53    /// - this function is called more than ONCE.
54    /// - `size == 0`.
55    pub unsafe fn init(&self, start_addr: usize, size: usize) {
56        assert!(size > 0);
57        critical_section::with(|cs| {
58            let mut heap = self.heap.borrow_ref_mut(cs);
59            assert!(!heap.1);
60            heap.1 = true;
61            heap.0.init(start_addr as *mut u8, size);
62        });
63    }
64
65    /// Returns an estimate of the amount of bytes in use.
66    pub fn used(&self) -> usize {
67        critical_section::with(|cs| self.heap.borrow_ref_mut(cs).0.used())
68    }
69
70    /// Returns an estimate of the amount of bytes available.
71    pub fn free(&self) -> usize {
72        critical_section::with(|cs| self.heap.borrow_ref_mut(cs).0.free())
73    }
74
75    fn alloc(&self, layout: Layout) -> Option<NonNull<u8>> {
76        critical_section::with(|cs| {
77            self.heap
78                .borrow_ref_mut(cs)
79                .0
80                .allocate_first_fit(layout)
81                .ok()
82        })
83    }
84
85    unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
86        critical_section::with(|cs| {
87            self.heap
88                .borrow_ref_mut(cs)
89                .0
90                .deallocate(NonNull::new_unchecked(ptr), layout)
91        });
92    }
93}
94
95unsafe impl GlobalAlloc for Heap {
96    unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
97        self.alloc(layout)
98            .map_or(ptr::null_mut(), |allocation| allocation.as_ptr())
99    }
100
101    unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
102        self.dealloc(ptr, layout);
103    }
104}
105
106#[cfg(feature = "allocator_api")]
107mod allocator_api {
108    use super::*;
109    use core::alloc::{AllocError, Allocator};
110
111    unsafe impl Allocator for Heap {
112        fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
113            match layout.size() {
114                0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
115                size => self.alloc(layout).map_or(Err(AllocError), |allocation| {
116                    Ok(NonNull::slice_from_raw_parts(allocation, size))
117                }),
118            }
119        }
120
121        unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
122            if layout.size() != 0 {
123                self.dealloc(ptr.as_ptr(), layout);
124            }
125        }
126    }
127}