use core::{
alloc::Layout,
cell::Cell,
mem::{align_of, size_of},
ptr::NonNull,
sync::atomic::Ordering,
};
use allocator_api2::alloc::{AllocError, Allocator};
use crate::{addr, cold, with_addr_mut, ImUsize};
#[repr(C)]
#[derive(Debug)]
pub(crate) struct Chunk<T, const N: usize> {
pub cursor: Cell<*mut u8>,
pub freed: T,
pub next: Cell<Option<NonNull<Chunk<T, N>>>>,
}
impl<T, const N: usize> Chunk<T, N>
where
T: ImUsize,
{
const SIZE: usize = N;
const ALIGNMENT: usize = align_of::<Self>();
const LAYOUT: Layout = match Layout::from_size_align(Self::SIZE, Self::ALIGNMENT) {
Ok(layout) => layout,
Err(_) => panic!("Invalid chunk size"),
};
const LAYOUT_IS_VALID: bool = {
if Self::SIZE < size_of::<Self>() {
panic!("Chunk size is too small");
}
if Self::ALIGNMENT < align_of::<Self>() {
panic!("Chunk alignment is too small");
}
true
};
pub fn new<'a, A>(alloc: A) -> Result<NonNull<Self>, AllocError>
where
A: Allocator + 'a,
{
debug_assert!(Self::LAYOUT_IS_VALID);
let ptr = alloc.allocate(Self::LAYOUT)?.cast::<Self>();
let memory = unsafe { ptr.as_ptr().add(1).cast::<u8>() };
unsafe {
ptr.as_ptr().write(Chunk {
cursor: Cell::new(memory),
freed: T::new(addr(memory)),
next: Cell::new(None),
});
}
Ok(ptr.cast())
}
pub unsafe fn free<A>(ptr: NonNull<Self>, alloc: A)
where
A: Allocator,
{
unsafe {
alloc.deallocate(ptr.cast(), Self::LAYOUT);
}
}
fn chunk_addr(&self) -> usize {
addr(self as *const Self)
}
fn base_addr(&self) -> usize {
self.chunk_addr() + size_of::<Self>()
}
fn end_addr(&self) -> usize {
self.chunk_addr() + N
}
pub fn next(&self) -> Option<NonNull<Self>> {
self.next.get()
}
fn cursor(&self) -> &Cell<*mut u8> {
&self.cursor
}
fn freed(&self) -> &T {
&self.freed
}
}
impl<T, const N: usize> Chunk<T, N>
where
T: ImUsize,
{
#[inline(always)]
pub fn unused(&self) -> bool {
self.freed().load(Ordering::Acquire) == addr(self.cursor().get())
}
#[inline(always)]
pub fn reset(&self) -> bool {
let mut cursor = self.cursor().get();
if self.freed().load(Ordering::Acquire) == addr(cursor) {
cursor = unsafe { with_addr_mut(cursor, self.base_addr()) };
self.freed().store(addr(cursor), Ordering::Relaxed);
self.cursor().set(cursor);
true
} else {
cold();
false
}
}
#[inline(always)]
fn _allocate(&self, layout: Layout) -> Option<NonNull<u8>> {
let cursor = self.cursor().get();
let aligned = addr(cursor).checked_add(layout.align() - 1)? & !(layout.align() - 1);
let new_cursor = aligned.checked_add(layout.size())?;
if new_cursor > self.end_addr() {
return None;
}
let ptr = unsafe { with_addr_mut(cursor, aligned) };
let new_cursor = unsafe { with_addr_mut(cursor, new_cursor) };
self.cursor().set(new_cursor);
let overhead = aligned - addr(cursor);
self.freed().fetch_add(overhead, Ordering::Relaxed);
Some(unsafe { NonNull::new_unchecked(ptr) })
}
#[inline(always)]
pub fn allocate(&self, chunk_ptr: NonNull<Self>, layout: Layout) -> Option<NonNull<u8>> {
let (meta_layout, offset) = Layout::new::<NonNull<Self>>().extend(layout).ok()?;
let ptr = self._allocate(meta_layout)?;
unsafe {
ptr.as_ptr().cast::<NonNull<Self>>().write(chunk_ptr);
}
let ptr = unsafe { ptr.as_ptr().add(offset) };
Some(unsafe { NonNull::new_unchecked(ptr) })
}
#[inline(always)]
unsafe fn _deallocate(&self, size: usize) {
self.freed().fetch_add(size, Ordering::Release);
}
#[inline(always)]
pub unsafe fn deallocate(ptr: *mut u8, layout: Layout) {
let (meta_layout, offset) = Layout::new::<NonNull<Self>>().extend(layout).unwrap();
let meta_ptr = unsafe { ptr.sub(offset) }.cast::<NonNull<Self>>();
let chunk_ptr = unsafe { *meta_ptr };
let chunk = unsafe { chunk_ptr.as_ref() };
unsafe {
chunk._deallocate(meta_layout.size());
}
}
}