#![doc = include_str!("../README.md")]
#![no_std]
use core::alloc::{GlobalAlloc, Layout};
use core::cell::{RefCell, UnsafeCell};
use core::mem;
use core::ptr;
pub mod boxed;
mod index;
pub mod rc;
use boxed::Box;
use index::MemoryIndex;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum IndexError {
NoSuchRegion,
NoIndexAvailable,
NoFittingRegion,
OutOfMemory,
RegionTooThin,
EmptyPtr,
IndexAlreadyBorrowed,
}
pub struct IndexAllocator<const MEMORY_SIZE: usize, const INDEX_SIZE: usize> {
memory: UnsafeCell<[u8; MEMORY_SIZE]>,
index: RefCell<MemoryIndex<INDEX_SIZE>>,
}
unsafe impl<const MEMORY_SIZE: usize, const INDEX_SIZE: usize> Sync
for IndexAllocator<MEMORY_SIZE, INDEX_SIZE>
{
}
impl<const MEMORY_SIZE: usize, const INDEX_SIZE: usize> IndexAllocator<MEMORY_SIZE, INDEX_SIZE> {
#[must_use]
const fn new(memory: [u8; MEMORY_SIZE], index: MemoryIndex<INDEX_SIZE>) -> Self {
Self {
memory: UnsafeCell::new(memory),
index: RefCell::new(index),
}
}
#[must_use]
pub const fn empty() -> Self {
Self::new([0; MEMORY_SIZE], MemoryIndex::empty(MEMORY_SIZE))
}
fn try_reserve(&self, layout: Layout) -> Result<usize, IndexError> {
let layout = layout.pad_to_align();
let memory_start = self.memory.get() as usize;
let mut index = self
.index
.try_borrow_mut()
.map_err(|_| IndexError::IndexAlreadyBorrowed)?;
let allocation_baker = index.size_region_available(memory_start, layout)?;
let (region_index, _) = index.split_region(
allocation_baker.region,
allocation_baker.offset + layout.size(),
)?;
let region = index.get_region_mut(region_index)?;
region.reserve();
Ok(region.from + allocation_baker.offset)
}
fn try_free_addr(&self, addr: usize) -> Result<(), IndexError> {
let mut index = self
.index
.try_borrow_mut()
.map_err(|_| IndexError::IndexAlreadyBorrowed)?;
let region_index = index.find_region(addr)?;
index.get_region_mut(region_index)?.free();
index.sort_merge();
Ok(())
}
unsafe fn try_alloc(&self, layout: Layout) -> Result<*mut u8, IndexError> {
let offset = self.try_reserve(layout)?;
Ok(self.memory.get().cast::<u8>().wrapping_add(offset))
}
unsafe fn try_free(&self, ptr: *mut u8) -> Result<(), IndexError> {
let offset = ptr as usize - self.memory.get() as usize;
self.try_free_addr(offset)?;
Ok(())
}
unsafe fn try_alloc_value<T>(&self, val: T) -> Result<&mut T, IndexError> {
let layout = Layout::for_value(&val);
let inner_ptr = self.try_alloc(layout)?.cast::<T>();
let inner_ref = unsafe { inner_ptr.as_mut().ok_or(IndexError::EmptyPtr) }?;
mem::forget(mem::replace(inner_ref, val));
Ok(inner_ref)
}
unsafe fn try_free_value<T: ?Sized>(&self, val: &T) -> Result<(), IndexError> {
self.try_free(ptr::from_ref(val).cast_mut().cast::<u8>())
}
pub fn try_boxed<'a, T, U>(
&'a self,
val: U,
) -> Result<Box<T, MEMORY_SIZE, INDEX_SIZE>, IndexError>
where
U: 'a,
T: ?Sized,
&'a mut T: From<&'a mut U>,
{
Box::try_new(val, self)
}
}
impl<const MEMORY_SIZE: usize, const INDEX_SIZE: usize> Default
for IndexAllocator<MEMORY_SIZE, INDEX_SIZE>
{
#[must_use]
fn default() -> Self {
Self::empty()
}
}
unsafe impl<const MEMORY_SIZE: usize, const INDEX_SIZE: usize> GlobalAlloc
for IndexAllocator<MEMORY_SIZE, INDEX_SIZE>
{
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
self.try_alloc(layout).unwrap()
}
unsafe fn dealloc(&self, ptr: *mut u8, _layout: Layout) {
self.try_free(ptr).unwrap();
}
}