use core::{
alloc::Layout,
marker::PhantomData,
mem::{align_of, size_of, ManuallyDrop},
ptr::{slice_from_raw_parts_mut, NonNull},
};
use crate::{
alloc::alloc::{alloc, dealloc, handle_alloc_error},
ser::Allocator,
};
struct Block {
next_ptr: NonNull<Block>,
next_size: usize,
}
impl Block {
fn alloc(size: usize) -> NonNull<Self> {
debug_assert!(size >= size_of::<Self>());
let layout = Layout::from_size_align(size, align_of::<Self>()).unwrap();
let ptr = unsafe { alloc(layout).cast::<Self>() };
let Some(ptr) = NonNull::new(ptr) else {
handle_alloc_error(layout)
};
unsafe {
ptr.as_ptr().write(Self {
next_ptr: ptr,
next_size: layout.size(),
});
}
ptr
}
unsafe fn dealloc(ptr: NonNull<Self>, size: usize) {
let layout = unsafe {
Layout::from_size_align(size, align_of::<Self>()).unwrap_unchecked()
};
unsafe {
dealloc(ptr.as_ptr().cast(), layout);
}
}
unsafe fn push_next(
mut tail_ptr: NonNull<Self>,
mut new_ptr: NonNull<Self>,
) {
let tail = unsafe { tail_ptr.as_mut() };
let new = unsafe { new_ptr.as_mut() };
debug_assert!(new.next_ptr == new_ptr);
let head = tail.next_ptr;
let head_cap = tail.next_size;
tail.next_ptr = new_ptr;
tail.next_size = new.next_size;
new.next_ptr = head;
new.next_size = head_cap;
}
}
pub struct Arena {
head_ptr: NonNull<Block>,
}
unsafe impl Send for Arena {}
impl Drop for Arena {
fn drop(&mut self) {
self.shrink();
let head_size = unsafe { self.head_ptr.as_ref().next_size };
unsafe {
Block::dealloc(self.head_ptr, head_size);
}
}
}
impl Arena {
pub const DEFAULT_CAPACITY: usize = 1024;
pub fn new() -> Self {
Self::with_capacity(Self::DEFAULT_CAPACITY)
}
pub fn with_capacity(cap: usize) -> Self {
let head_size = (cap + size_of::<Block>()).next_power_of_two();
let head_ptr = Block::alloc(head_size);
Self { head_ptr }
}
pub fn shrink(&mut self) -> usize {
let (mut current_ptr, mut current_size) = {
let head = unsafe { self.head_ptr.as_ref() };
(head.next_ptr, head.next_size)
};
loop {
let current = unsafe { current_ptr.as_mut() };
if current.next_ptr == current_ptr {
break;
}
let next_ptr = current.next_ptr;
let next_size = current.next_size;
if next_ptr == self.head_ptr {
unsafe {
Block::dealloc(next_ptr, next_size);
}
current.next_ptr = current_ptr;
current.next_size = current_size;
self.head_ptr = current_ptr;
break;
}
unsafe {
Block::dealloc(current_ptr, current_size);
}
current_ptr = next_ptr;
current_size = next_size;
}
current_size - size_of::<Block>()
}
pub fn capacity(&self) -> usize {
let mut current_ptr = self.head_ptr;
loop {
let current = unsafe { current_ptr.as_ref() };
if current.next_ptr == self.head_ptr {
break current.next_size - size_of::<Block>();
}
current_ptr = current.next_ptr;
}
}
pub fn acquire(&mut self) -> ArenaHandle<'_> {
self.shrink();
ArenaHandle {
tail_ptr: self.head_ptr,
tail_size: unsafe { self.head_ptr.as_ref().next_size },
used: size_of::<Block>(),
_phantom: PhantomData,
}
}
pub fn into_raw(self) -> NonNull<()> {
let this = ManuallyDrop::new(self);
this.head_ptr.cast()
}
pub unsafe fn from_raw(raw: NonNull<()>) -> Self {
Self {
head_ptr: raw.cast(),
}
}
}
impl Default for Arena {
fn default() -> Self {
Self::new()
}
}
pub struct ArenaHandle<'a> {
tail_ptr: NonNull<Block>,
tail_size: usize,
used: usize,
_phantom: PhantomData<&'a mut Arena>,
}
unsafe impl Send for ArenaHandle<'_> {}
unsafe impl<E> Allocator<E> for ArenaHandle<'_> {
unsafe fn push_alloc(
&mut self,
layout: Layout,
) -> Result<NonNull<[u8]>, E> {
let pos = self.tail_ptr.as_ptr() as usize + self.used;
let pad = 0usize.wrapping_sub(pos) % layout.align();
if pad + layout.size() <= self.tail_size - self.used {
self.used += pad;
} else {
let size = usize::max(
2 * self.tail_size,
(size_of::<Block>() + layout.size() + layout.align())
.next_power_of_two(),
);
let next = Block::alloc(size);
unsafe {
Block::push_next(self.tail_ptr, next);
}
self.tail_ptr = next;
self.tail_size = size;
let pos = self.tail_ptr.as_ptr() as usize + size_of::<Block>();
let pad = 0usize.wrapping_sub(pos) % layout.align();
self.used = size_of::<Block>() + pad;
}
let ptr = unsafe { self.tail_ptr.as_ptr().cast::<u8>().add(self.used) };
let slice_ptr = slice_from_raw_parts_mut(ptr, layout.size());
let result = unsafe { NonNull::new_unchecked(slice_ptr) };
self.used += layout.size();
Ok(result)
}
unsafe fn pop_alloc(
&mut self,
ptr: NonNull<u8>,
_: Layout,
) -> Result<(), E> {
let start = self.tail_ptr.as_ptr() as usize;
let end = start + self.tail_size;
let pos = ptr.as_ptr() as usize;
if (start..end).contains(&pos) {
self.used = pos - start;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use core::alloc::Layout;
use rancor::{Panic, ResultExt};
use crate::{
alloc::{string::ToString, vec},
api::high::to_bytes_in_with_alloc,
ser::{allocator::Arena, Allocator},
util::AlignedVec,
};
#[test]
fn reuse_arena() {
let mut arena = Arena::with_capacity(2);
let value = vec![
"hello".to_string(),
"world".to_string(),
"foo".to_string(),
"bar".to_string(),
"baz".to_string(),
];
for _ in 0..10 {
to_bytes_in_with_alloc::<_, _, Panic>(
&value,
AlignedVec::<16>::new(),
arena.acquire(),
)
.unwrap();
}
}
#[test]
fn pop_non_tail() {
let mut arena = Arena::new();
let mut handle = arena.acquire();
let layout =
Layout::from_size_align(Arena::DEFAULT_CAPACITY, 1).unwrap();
unsafe {
let a =
Allocator::<Panic>::push_alloc(&mut handle, layout).always_ok();
let b =
Allocator::<Panic>::push_alloc(&mut handle, layout).always_ok();
Allocator::<Panic>::pop_alloc(&mut handle, b.cast(), layout)
.always_ok();
Allocator::<Panic>::pop_alloc(&mut handle, a.cast(), layout)
.always_ok();
}
}
}