embedded_alloc/
tlsf.rs

1use core::alloc::{GlobalAlloc, Layout};
2use core::cell::RefCell;
3use core::ptr::{self, NonNull};
4
5use const_default::ConstDefault;
6use critical_section::Mutex;
7use rlsf::Tlsf;
8
9type TlsfHeap = Tlsf<'static, usize, usize, { usize::BITS as usize }, { usize::BITS as usize }>;
10
11struct Inner {
12    tlsf: TlsfHeap,
13    initialized: bool,
14    raw_block: Option<NonNull<[u8]>>,
15    raw_block_size: usize,
16}
17
18// Safety: The whole inner type is wrapped by a [Mutex].
19unsafe impl Sync for Inner {}
20unsafe impl Send for Inner {}
21
22/// A two-Level segregated fit heap.
23pub struct Heap {
24    heap: Mutex<RefCell<Inner>>,
25}
26
27impl Heap {
28    /// Create a new UNINITIALIZED heap allocator
29    ///
30    /// You must initialize this heap using the
31    /// [`init`](Self::init) method before using the allocator.
32    pub const fn empty() -> Heap {
33        Heap {
34            heap: Mutex::new(RefCell::new(Inner {
35                tlsf: ConstDefault::DEFAULT,
36                initialized: false,
37                raw_block: None,
38                raw_block_size: 0,
39            })),
40        }
41    }
42
43    /// Initializes the heap
44    ///
45    /// This function must be called BEFORE you run any code that makes use of the
46    /// allocator.
47    ///
48    /// `start_addr` is the address where the heap will be located.
49    ///
50    /// `size` is the size of the heap in bytes.
51    ///
52    /// Note that:
53    ///
54    /// - The heap grows "upwards", towards larger addresses. Thus `start_addr` will
55    ///   be the smallest address used.
56    ///
57    /// - The largest address used is `start_addr + size - 1`, so if `start_addr` is
58    ///   `0x1000` and `size` is `0x30000` then the allocator won't use memory at
59    ///   addresses `0x31000` and larger.
60    ///
61    /// # Safety
62    ///
63    /// This function is safe if the following invariants hold:
64    ///
65    /// - `start_addr` points to valid memory.
66    /// - `size` is correct.
67    ///
68    /// # Panics
69    ///
70    /// This function will panic if either of the following are true:
71    ///
72    /// - this function is called more than ONCE.
73    /// - `size == 0`.
74    pub unsafe fn init(&self, start_addr: usize, size: usize) {
75        assert!(size > 0);
76        critical_section::with(|cs| {
77            let mut heap = self.heap.borrow_ref_mut(cs);
78            assert!(!heap.initialized);
79            heap.initialized = true;
80            let block: NonNull<[u8]> =
81                NonNull::slice_from_raw_parts(NonNull::new_unchecked(start_addr as *mut u8), size);
82            heap.tlsf.insert_free_block_ptr(block);
83            heap.raw_block = Some(block);
84            heap.raw_block_size = size;
85        });
86    }
87
88    fn alloc(&self, layout: Layout) -> Option<NonNull<u8>> {
89        critical_section::with(|cs| self.heap.borrow_ref_mut(cs).tlsf.allocate(layout))
90    }
91
92    unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
93        critical_section::with(|cs| {
94            self.heap
95                .borrow_ref_mut(cs)
96                .tlsf
97                .deallocate(NonNull::new_unchecked(ptr), layout.align())
98        })
99    }
100
101    /// Get the amount of bytes used by the allocator.
102    pub fn used(&self) -> usize {
103        critical_section::with(|cs| {
104            self.heap.borrow_ref_mut(cs).raw_block_size - self.free_with_cs(cs)
105        })
106    }
107
108    /// Get the amount of free bytes in the allocator.
109    pub fn free(&self) -> usize {
110        critical_section::with(|cs| self.free_with_cs(cs))
111    }
112
113    fn free_with_cs(&self, cs: critical_section::CriticalSection) -> usize {
114        let inner_mut = self.heap.borrow_ref_mut(cs);
115        if !inner_mut.initialized {
116            return 0;
117        }
118        // Safety: We pass the memory block we previously initialized the heap with
119        // to the `iter_blocks` method.
120        unsafe {
121            inner_mut
122                .tlsf
123                .iter_blocks(inner_mut.raw_block.unwrap())
124                .filter(|block_info| !block_info.is_occupied())
125                .map(|block_info| block_info.max_payload_size())
126                .sum::<usize>()
127        }
128    }
129}
130
131unsafe impl GlobalAlloc for Heap {
132    unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
133        self.alloc(layout)
134            .map_or(ptr::null_mut(), |allocation| allocation.as_ptr())
135    }
136
137    unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
138        self.dealloc(ptr, layout)
139    }
140}
141
142#[cfg(feature = "allocator_api")]
143mod allocator_api {
144    use super::*;
145    use core::alloc::{AllocError, Allocator};
146
147    unsafe impl Allocator for Heap {
148        fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
149            match layout.size() {
150                0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
151                size => self.alloc(layout).map_or(Err(AllocError), |allocation| {
152                    Ok(NonNull::slice_from_raw_parts(allocation, size))
153                }),
154            }
155        }
156
157        unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
158            if layout.size() != 0 {
159                self.dealloc(ptr.as_ptr(), layout);
160            }
161        }
162    }
163}