stm32f1_hal/common/
simplest_heap.rs

1use core::alloc::{GlobalAlloc, Layout};
2use core::cell::{Cell, RefCell};
3use core::ptr;
4use critical_section::Mutex;
5
6/// The simplest possible heap.
7///
8/// # Safety
9///
10/// Because it's the simplest implementation, it does **NOT** free memory.
11/// Any memory you drop cannot be reused (it's leaked), so avoid dropping anything whenever possible.
12///
13/// It is recommended that you use [embedded-alloc](https://crates.io/crates/embedded-alloc)
14pub struct Heap {
15    heap: Mutex<RefCell<SimplestHeap>>,
16    once_flag: Mutex<Cell<bool>>,
17}
18
19impl Heap {
20    /// Create a new UNINITIALIZED heap allocator
21    ///
22    /// You must initialize this heap using the
23    /// [`init`](Self::init) method before using the allocator.
24    pub const fn empty() -> Heap {
25        Heap {
26            heap: Mutex::new(RefCell::new(SimplestHeap::empty())),
27            once_flag: Mutex::new(Cell::new(false)),
28        }
29    }
30
31    /// Initializes the heap
32    ///
33    /// This function must be called BEFORE you run any code that makes use of the
34    /// allocator.
35    ///
36    /// `start_addr` is the address where the heap will be located.
37    ///
38    /// `size` is the size of the heap in bytes.
39    ///
40    /// # Safety
41    ///
42    /// Obey these or Bad Stuff will happen.
43    ///
44    /// - This function must be called exactly ONCE.
45    /// - `size > 0`
46    pub unsafe fn init(&self, start_addr: usize, size: usize) {
47        assert!(size > 0);
48        critical_section::with(|cs| {
49            let once_flag = self.once_flag.borrow(cs);
50            assert!(!once_flag.get());
51            once_flag.set(true);
52
53            self.heap
54                .borrow_ref_mut(cs)
55                .init(start_addr as *mut u8, size);
56        });
57    }
58
59    /// Returns an estimate of the amount of bytes in use.
60    pub fn used(&self) -> usize {
61        critical_section::with(|cs| self.heap.borrow_ref(cs).used())
62    }
63
64    /// Returns an estimate of the amount of bytes available.
65    pub fn free(&self) -> usize {
66        critical_section::with(|cs| self.heap.borrow_ref(cs).free())
67    }
68}
69
70unsafe impl GlobalAlloc for Heap {
71    unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
72        critical_section::with(|cs| self.heap.borrow_ref_mut(cs).alloc(layout))
73    }
74
75    unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {}
76}
77
78struct SimplestHeap {
79    arena: *mut u8,
80    remaining: usize,
81    size: usize,
82}
83
84unsafe impl Send for SimplestHeap {}
85
86impl SimplestHeap {
87    const fn empty() -> Self {
88        Self {
89            arena: ptr::null_mut(),
90            remaining: 0,
91            size: 0,
92        }
93    }
94
95    fn init(&mut self, start_addr: *mut u8, size: usize) {
96        self.arena = start_addr;
97        self.remaining = size;
98        self.size = size;
99    }
100
101    fn free(&self) -> usize {
102        self.remaining
103    }
104
105    fn used(&self) -> usize {
106        self.size - self.remaining
107    }
108
109    fn alloc(&mut self, layout: Layout) -> *mut u8 {
110        if layout.size() > self.remaining {
111            return ptr::null_mut();
112        }
113
114        // `Layout` contract forbids making a `Layout` with align=0, or align not power of 2.
115        // So we can safely use a mask to ensure alignment without worrying about UB.
116        let align_mask_to_round_down = !(layout.align() - 1);
117
118        self.remaining -= layout.size();
119        self.remaining &= align_mask_to_round_down;
120        self.arena.wrapping_add(self.remaining)
121    }
122}