use core::{
alloc::Layout,
cell::UnsafeCell,
mem::MaybeUninit,
ops::{Deref, DerefMut},
ptr::NonNull,
sync::atomic::{AtomicU8, Ordering},
mem::{forget, size_of, align_of},
};
use heapless::mpmc::MpMcQueue;
use linked_list_allocator::Heap;
pub static HEAP: AHeap = AHeap::new();
static FREE_Q: FreeQueue = FreeQueue::new();
#[link_section=".aheap.STORAGE"]
static HEAP_BUF: HeapStorage = HeapStorage::new();
const FREE_Q_LEN: usize = 128;
pub struct AHeap {
state: AtomicU8,
heap: UnsafeCell<MaybeUninit<Heap>>,
}
unsafe impl Sync for AHeap {}
impl AHeap {
const UNINIT: u8 = 0;
const INIT_IDLE: u8 = 1;
const BUSY_LOCKED: u8 = 2;
const fn new() -> Self {
Self {
state: AtomicU8::new(Self::UNINIT),
heap: UnsafeCell::new(MaybeUninit::uninit()),
}
}
pub fn init(&self) -> Result<(), ()> {
self.state
.compare_exchange(
Self::UNINIT,
Self::BUSY_LOCKED,
Ordering::SeqCst,
Ordering::SeqCst,
)
.map_err(drop)?;
unsafe {
let heap = HEAP_BUF.take_heap();
FREE_Q.init();
(*self.heap.get()).write(heap);
}
self.state.store(Self::INIT_IDLE, Ordering::SeqCst);
Ok(())
}
pub fn try_lock(&'static self) -> Option<HeapGuard> {
self.state
.compare_exchange(
Self::INIT_IDLE,
Self::BUSY_LOCKED,
Ordering::SeqCst,
Ordering::SeqCst,
)
.ok()?;
unsafe {
let heap = &mut *self.heap.get().cast();
Some(HeapGuard { heap })
}
}
}
struct FreeQueue {
q: UnsafeCell<MaybeUninit<MpMcQueue<FreeBox, FREE_Q_LEN>>>,
}
unsafe impl Sync for FreeQueue {}
impl FreeQueue {
const fn new() -> Self {
Self {
q: UnsafeCell::new(MaybeUninit::uninit()),
}
}
unsafe fn init(&self) {
let new = MpMcQueue::new();
self.q
.get()
.cast::<MpMcQueue<FreeBox, FREE_Q_LEN>>()
.write(new);
}
unsafe fn get_unchecked(&self) -> &MpMcQueue<FreeBox, FREE_Q_LEN> {
(*self.q.get()).assume_init_ref()
}
}
struct HeapStorage {
data: UnsafeCell<[u8; Self::SIZE_BYTES]>,
}
unsafe impl Sync for HeapStorage {}
pub struct HeapBox<T> {
ptr: *mut T,
}
unsafe impl<T> Send for HeapBox<T> { }
impl<T> Deref for HeapBox<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
unsafe { &*self.ptr }
}
}
impl<T> DerefMut for HeapBox<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { &mut *self.ptr }
}
}
impl<T> HeapBox<T> {
unsafe fn free_box(&mut self) -> FreeBox {
FreeBox {
ptr: NonNull::new_unchecked(self.ptr.cast::<u8>()),
layout: Layout::new::<T>(),
}
}
pub fn leak(self) -> &'static mut T {
let mutref = unsafe { &mut *self.ptr };
forget(self);
mutref
}
}
impl<T> Drop for HeapBox<T> {
fn drop(&mut self) {
let free_box = unsafe { self.free_box() };
free_box.box_drop();
}
}
pub struct HeapArray<T> {
count: usize,
ptr: *mut T,
}
unsafe impl<T> Send for HeapArray<T> { }
impl<T> Deref for HeapArray<T> {
type Target = [T];
fn deref(&self) -> &Self::Target {
unsafe { core::slice::from_raw_parts(self.ptr, self.count) }
}
}
impl<T> DerefMut for HeapArray<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { core::slice::from_raw_parts_mut(self.ptr, self.count) }
}
}
impl<T> HeapArray<T> {
unsafe fn free_box(&mut self) -> FreeBox {
let layout = {
let array_size = size_of::<T>() * self.count;
Layout::from_size_align_unchecked(array_size, align_of::<T>())
};
FreeBox {
ptr: NonNull::new_unchecked(self.ptr.cast::<u8>()),
layout,
}
}
pub fn leak(self) -> &'static mut [T] {
let mutref = unsafe { core::slice::from_raw_parts_mut(self.ptr, self.count) };
forget(self);
mutref
}
}
impl<T> Drop for HeapArray<T> {
fn drop(&mut self) {
let free_box = unsafe { self.free_box() };
free_box.box_drop();
}
}
struct FreeBox {
ptr: NonNull<u8>,
layout: Layout,
}
impl FreeBox {
fn box_drop(self) {
if let Some(mut h) = HEAP.try_lock() {
unsafe {
h.deref_mut().deallocate(self.ptr, self.layout);
}
} else {
let free_q = unsafe { FREE_Q.get_unchecked() };
defmt::unwrap!(free_q.enqueue(self).map_err(drop), "Free list is full!");
}
}
}
pub struct HeapGuard {
heap: &'static mut AHeap,
}
impl HeapGuard {
pub fn free_space(&self) -> usize {
self.deref().free()
}
pub fn used_space(&self) -> usize {
self.deref().used()
}
fn clean_allocs(&mut self) {
let free_q = unsafe { FREE_Q.get_unchecked() };
while let Some(FreeBox { ptr, layout }) = free_q.dequeue() {
unsafe {
self.deref_mut().deallocate(ptr, layout);
}
}
}
pub fn alloc_box<T>(&mut self, data: T) -> Result<HeapBox<T>, ()> {
self.clean_allocs();
let nnu8 = self.deref_mut().allocate_first_fit(Layout::new::<T>())?;
let ptr = nnu8.as_ptr().cast::<T>();
unsafe {
ptr.write(data);
}
Ok(HeapBox { ptr })
}
pub fn alloc_box_array<T: Copy + ?Sized>(&mut self, data: T, count: usize) -> Result<HeapArray<T>, ()> {
self.clean_allocs();
let layout = Layout::array::<T>(count).map_err(drop)?;
let nnu8 = self.deref_mut().allocate_first_fit(layout)?;
let ptr = nnu8.as_ptr().cast::<T>();
unsafe {
for i in 0..count {
ptr.add(i).write(data);
}
}
Ok(HeapArray { ptr, count })
}
}
impl HeapGuard {
fn deref(&self) -> &Heap {
unsafe { (*self.heap.heap.get()).assume_init_ref() }
}
fn deref_mut(&mut self) -> &mut Heap {
unsafe { (*self.heap.heap.get()).assume_init_mut() }
}
}
impl Drop for HeapGuard {
fn drop(&mut self) {
self.heap.state.store(AHeap::INIT_IDLE, Ordering::SeqCst);
}
}
impl HeapStorage {
const SIZE_KB: usize = 64;
const SIZE_BYTES: usize = Self::SIZE_KB * 1024;
const fn new() -> Self {
Self {
data: UnsafeCell::new([0u8; Self::SIZE_BYTES]),
}
}
fn addr_sz(&self) -> (usize, usize) {
let ptr = self.data.get();
let addr = ptr as usize;
(addr, Self::SIZE_BYTES)
}
unsafe fn take_heap(&self) -> Heap {
let mut heap = Heap::empty();
let (addr, size) = HEAP_BUF.addr_sz();
heap.init(addr, size);
heap
}
}