stm32f1_hal/common/
simplest_heap.rs

1use crate::common::critical_section::Mutex;
2use core::{
3    alloc::{GlobalAlloc, Layout},
4    cell::{Cell, RefCell},
5    ptr,
6};
7
8/// The simplest possible heap.
9///
10/// # Safety
11///
12/// Because it's the simplest implementation, it does **NOT** free memory.
13/// Any memory you drop cannot be reused (it's leaked), so avoid dropping anything whenever possible.
14///
15/// It is recommended that you use [embedded-alloc](https://crates.io/crates/embedded-alloc)
16pub struct Heap {
17    heap: Mutex<RefCell<SimplestHeap>>,
18    once_flag: Mutex<Cell<bool>>,
19}
20
21impl Heap {
22    /// Create a new UNINITIALIZED heap allocator
23    ///
24    /// You must initialize this heap using the
25    /// [`init`](Self::init) method before using the allocator.
26    pub const fn empty() -> Heap {
27        Heap {
28            heap: Mutex::new(RefCell::new(SimplestHeap::empty())),
29            once_flag: Mutex::new(Cell::new(false)),
30        }
31    }
32
33    /// Initializes the heap
34    ///
35    /// This function must be called BEFORE you run any code that makes use of the
36    /// allocator.
37    ///
38    /// `start_addr` is the address where the heap will be located.
39    ///
40    /// `size` is the size of the heap in bytes.
41    ///
42    /// # Safety
43    ///
44    /// Obey these or Bad Stuff will happen.
45    ///
46    /// - This function must be called exactly ONCE.
47    /// - `size > 0`
48    pub unsafe fn init(&self, start_addr: usize, size: usize) {
49        assert!(size > 0);
50        critical_section::with(|cs| {
51            let once_flag = self.once_flag.borrow(cs);
52            assert!(!once_flag.get());
53            once_flag.set(true);
54
55            self.heap
56                .borrow_ref_mut(cs)
57                .init(start_addr as *mut u8, size);
58        });
59    }
60
61    /// Returns an estimate of the amount of bytes in use.
62    pub fn used(&self) -> usize {
63        critical_section::with(|cs| self.heap.borrow_ref(cs).used())
64    }
65
66    /// Returns an estimate of the amount of bytes available.
67    pub fn free(&self) -> usize {
68        critical_section::with(|cs| self.heap.borrow_ref(cs).free())
69    }
70}
71
72unsafe impl GlobalAlloc for Heap {
73    unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
74        critical_section::with(|cs| self.heap.borrow_ref_mut(cs).alloc(layout))
75    }
76
77    unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {}
78}
79
80struct SimplestHeap {
81    arena: *mut u8,
82    remaining: usize,
83    size: usize,
84}
85
86unsafe impl Send for SimplestHeap {}
87
88impl SimplestHeap {
89    const fn empty() -> Self {
90        Self {
91            arena: ptr::null_mut(),
92            remaining: 0,
93            size: 0,
94        }
95    }
96
97    fn init(&mut self, start_addr: *mut u8, size: usize) {
98        self.arena = start_addr;
99        self.remaining = size;
100        self.size = size;
101    }
102
103    fn free(&self) -> usize {
104        self.remaining
105    }
106
107    fn used(&self) -> usize {
108        self.size - self.remaining
109    }
110
111    fn alloc(&mut self, layout: Layout) -> *mut u8 {
112        if layout.size() > self.remaining {
113            return ptr::null_mut();
114        }
115
116        // `Layout` contract forbids making a `Layout` with align=0, or align not power of 2.
117        // So we can safely use a mask to ensure alignment without worrying about UB.
118        let align_mask_to_round_down = !(layout.align() - 1);
119
120        self.remaining -= layout.size();
121        self.remaining &= align_mask_to_round_down;
122        self.arena.wrapping_add(self.remaining)
123    }
124}