#![cfg_attr(not(feature = "std"), no_std)]
#![warn(unsafe_op_in_unsafe_fn)]
#![warn(clippy::pedantic)]
#![warn(clippy::nursery)]
#![warn(clippy::cargo)]
use core::alloc::Layout;
use core::cell::Cell;
use core::ptr::NonNull;
use core::{mem, ptr, slice};
extern crate alloc;
#[cfg(feature = "bumpalo")]
pub mod bumpalo;
pub mod fallback;
#[cfg(test)]
mod tests;
pub trait ArenaAlloc {
type Error;
fn try_alloc_layout(&self, layout: Layout) -> Result<NonNull<u8>, Self::Error>;
}
struct Header {
previous: Option<NonNull<Header>>,
finalizer: unsafe fn(NonNull<u8>),
#[cfg(debug_assertions)]
finalizer_data_layout: Layout,
#[cfg(debug_assertions)]
data_layout: Layout,
}
impl Header {
fn finalize(header: NonNull<Self>) {
unsafe {
let dropper = header.as_ref().finalizer;
dropper(header.cast());
}
}
}
unsafe fn drop_finalizer<T>(non_null: NonNull<u8>) {
let header_layout = Layout::new::<Header>();
let unit_layout = Layout::new::<()>();
let t_layout = Layout::new::<T>();
#[cfg(debug_assertions)]
{
let header = unsafe { non_null.cast::<Header>().as_ref() };
debug_assert_eq!(unit_layout, header.finalizer_data_layout);
debug_assert_eq!(t_layout, header.data_layout);
}
let (layout, _) = header_layout.extend(unit_layout).unwrap();
let (_, offset_t) = layout.extend(t_layout).unwrap();
unsafe {
let bytes = non_null.as_ptr();
let ptr: *mut T = bytes.wrapping_add(offset_t).cast();
ptr.drop_in_place();
}
}
unsafe fn slice_drop_finalizer<T>(non_null: NonNull<u8>) {
let header_layout = Layout::new::<Header>();
let len_layout = Layout::new::<usize>();
let t_layout = Layout::new::<T>();
let (layout, offset_len) = header_layout.extend(len_layout).unwrap();
let (_, offset_t) = layout.extend(t_layout).unwrap();
let ptr = non_null.as_ptr();
let len = unsafe { *ptr.wrapping_add(offset_len).cast() };
#[cfg(debug_assertions)]
unsafe {
let header: &Header = &*ptr.cast();
let size = t_layout.size();
let align = t_layout.align();
let padded_size = size.wrapping_add(align).wrapping_sub(1) & !align.wrapping_sub(1);
let slice_layout = Layout::from_size_align_unchecked(padded_size * len, t_layout.align());
debug_assert_eq!(len_layout, header.finalizer_data_layout);
debug_assert_eq!(slice_layout, header.data_layout);
}
let ptr: *mut T = ptr.wrapping_add(offset_t).cast();
unsafe {
let slice = slice::from_raw_parts_mut(ptr, len);
ptr::drop_in_place(slice);
}
}
#[cfg(feature = "bumpalo")]
type Alloc = ::bumpalo::Bump;
#[cfg(not(feature = "bumpalo"))]
type Alloc = fallback::LeakingAlloc;
#[derive(Default)]
pub struct Rodeo<A> {
allocator: A,
last: Cell<Option<NonNull<Header>>>,
}
impl Rodeo<Alloc> {
#[must_use]
pub fn new() -> Self {
Self::with_allocator(Alloc::default())
}
}
impl<A> Rodeo<A> {
#[must_use]
pub const fn with_allocator(allocator: A) -> Self {
Self {
allocator,
last: Cell::new(None),
}
}
pub const fn allocator(&self) -> &A {
&self.allocator
}
pub fn into_allocator(self) -> A {
let alloc = unsafe { ptr::read(&self.allocator) };
mem::forget(self);
alloc
}
}
impl<A> Rodeo<A>
where
A: ArenaAlloc,
{
pub fn alloc<T>(&self, value: T) -> &mut T {
#[allow(clippy::option_if_let_else)]
if let Ok(ref_mut) = self.try_alloc(value) {
ref_mut
} else {
oom();
}
}
pub fn alloc_str(&self, value: &str) -> &mut str {
#[allow(clippy::option_if_let_else)]
if let Ok(ref_mut) = self.try_alloc_str(value) {
ref_mut
} else {
oom();
}
}
pub fn alloc_slice_copy<T: Copy>(&self, value: &[T]) -> &mut [T] {
#[allow(clippy::option_if_let_else)]
if let Ok(ref_mut) = self.try_alloc_slice_copy(value) {
ref_mut
} else {
oom();
}
}
pub fn alloc_slice_clone<T: Clone>(&self, value: &[T]) -> &mut [T] {
#[allow(clippy::option_if_let_else)]
if let Ok(ref_mut) = self.try_alloc_slice_clone(value) {
ref_mut
} else {
oom();
}
}
pub fn try_alloc<T>(&self, value: T) -> Result<&mut T, A::Error> {
let ptr: *mut T = if mem::needs_drop::<T>() {
let raw =
self.try_alloc_layout_with_finalizer(Layout::new::<T>(), drop_finalizer::<T>, ())?;
raw.cast()
} else {
let layout = Layout::new::<T>();
self.allocator.try_alloc_layout(layout)?.cast().as_ptr()
};
unsafe {
ptr.write(value);
Ok(&mut *ptr)
}
}
#[inline]
fn try_alloc_layout_with_finalizer<D>(
&self,
data_layout: Layout,
finalizer: unsafe fn(NonNull<u8>),
finalizer_data: D,
) -> Result<*mut u8, A::Error> {
let header_layout = Layout::new::<Header>();
let finalizer_data_layout = Layout::new::<D>();
let (hdr_fd_layout, fd_offset) = header_layout.extend(finalizer_data_layout).unwrap();
let (full_layout, data_offset) = hdr_fd_layout.extend(data_layout).unwrap();
let ptr = self.allocator.try_alloc_layout(full_layout)?.as_ptr();
let header = Header {
previous: self.last.take(),
finalizer,
#[cfg(debug_assertions)]
finalizer_data_layout,
#[cfg(debug_assertions)]
data_layout,
};
let header_non_null;
let value_ptr;
unsafe {
#[allow(clippy::cast_ptr_alignment)]
let header_ptr = ptr.cast::<Header>();
header_ptr.write(header);
header_non_null = NonNull::new_unchecked(header_ptr);
let finalizer_data_ptr = ptr.wrapping_add(fd_offset).cast::<D>();
finalizer_data_ptr.write(finalizer_data);
value_ptr = ptr.wrapping_add(data_offset);
}
self.last.set(Some(header_non_null));
Ok(value_ptr)
}
#[inline]
pub fn try_alloc_str(&self, string: &str) -> Result<&mut str, A::Error> {
let bytes = self.try_alloc_slice_copy(string.as_bytes())?;
Ok(unsafe { core::str::from_utf8_unchecked_mut(bytes) })
}
pub fn try_alloc_slice_copy<T: Copy>(&self, slice: &[T]) -> Result<&mut [T], A::Error> {
debug_assert!(!mem::needs_drop::<T>());
let len = slice.len();
let ptr = self.allocator.try_alloc_layout(Layout::for_value(slice))?;
let ptr: *mut T = ptr.cast().as_ptr();
unsafe {
ptr::copy_nonoverlapping(slice.as_ptr(), ptr, len);
Ok(slice::from_raw_parts_mut(ptr, len))
}
}
pub fn try_alloc_slice_clone<T: Clone>(&self, slice: &[T]) -> Result<&mut [T], A::Error> {
let len = slice.len();
if mem::needs_drop::<T>() {
let finalizer = slice_drop_finalizer::<T>;
let ptr =
self.try_alloc_layout_with_finalizer(Layout::for_value(slice), finalizer, len)?;
let ptr: *mut T = ptr.cast();
unsafe {
let progress = Cell::new(0);
let guard = DropCallback(|| {
let to_cleanup = progress.get();
for i in 0..to_cleanup {
ptr.wrapping_add(i).drop_in_place();
}
});
for (i, item) in slice.iter().enumerate() {
ptr.wrapping_add(i).write(item.clone());
progress.set(progress.get() + 1);
}
mem::forget(guard);
Ok(slice::from_raw_parts_mut(ptr, len))
}
} else {
let ptr = self.allocator.try_alloc_layout(Layout::for_value(slice))?;
let ptr: *mut T = ptr.cast().as_ptr();
unsafe {
for (i, item) in slice.iter().enumerate() {
ptr.wrapping_add(i).write(item.clone());
}
Ok(slice::from_raw_parts_mut(ptr, len))
}
}
}
}
#[inline(never)]
#[cold]
fn oom() -> ! {
panic!("out of memory")
}
impl<A> Drop for Rodeo<A> {
fn drop(&mut self) {
let mut current = self.last.get();
while let Some(header) = current {
Header::finalize(header);
current = unsafe { header.as_ref().previous };
}
}
}
#[cfg(doctest)]
#[doc = include_str!("../README.md")]
extern "C" {}
#[doc(hidden)]
pub const HEADER_LAYOUT: Layout = Layout::new::<Header>();
struct DropCallback<F: FnMut()>(F);
impl<F: FnMut()> Drop for DropCallback<F> {
fn drop(&mut self) {
(self.0)();
}
}