use core::{
alloc::Layout,
marker::PhantomData,
mem::{self, ManuallyDrop},
ptr::{self, NonNull},
};
use thiserror::Error;
use crate::Allocator;
#[derive(Debug)]
pub struct RawBuffer<T, A: Allocator> {
inner: RawBufferInner<A>,
_marker: PhantomData<T>,
}
#[derive(Debug)]
struct RawBufferInner<A> {
ptr: NonNull<u8>,
cap: usize,
alloc: A,
}
#[derive(Copy, Clone, PartialEq, Eq, Debug, Error)]
pub enum TryReserveError {
#[error("capacity overflow")]
CapacityOverflow,
#[error("allocation error for layout {:?}", layout)]
AllocError {
layout: Layout,
},
}
impl<T, A: Allocator> RawBuffer<T, A> {
#[inline]
pub const fn new_in(alloc: A) -> Self {
Self { inner: RawBufferInner::new_in(alloc, align_of::<T>()), _marker: PhantomData }
}
#[inline]
pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
Self { inner: RawBufferInner::with_capacity_in::<T>(capacity, alloc), _marker: PhantomData }
}
#[inline]
pub fn try_with_capacity_in(capacity: usize, alloc: A) -> Result<Self, TryReserveError> {
match RawBufferInner::try_with_capacity_in::<T>(capacity, alloc) {
Ok(inner) => Ok(Self { inner, _marker: PhantomData }),
Err(e) => Err(e),
}
}
#[must_use = "losing the pointer will leak memory"]
pub fn into_raw_parts(self) -> (*mut T, usize, A) {
let me = ManuallyDrop::new(self);
let capacity = me.capacity();
let ptr = me.ptr();
let alloc = unsafe { ptr::read(me.allocator()) };
(ptr, capacity, alloc)
}
#[inline]
pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, alloc: A) -> Self {
unsafe {
let ptr = ptr.cast();
Self {
inner: RawBufferInner::from_raw_parts_in(ptr, capacity, alloc),
_marker: PhantomData,
}
}
}
#[inline]
pub fn ptr(&self) -> *mut T {
self.inner.ptr()
}
#[inline]
pub fn non_null(&self) -> NonNull<T> {
self.inner.non_null()
}
#[inline]
pub fn capacity(&self) -> usize {
self.inner.capacity(size_of::<T>())
}
#[inline]
pub const fn allocator(&self) -> &A {
self.inner.allocator()
}
#[inline]
pub unsafe fn allocator_mut(&mut self) -> &mut A {
&mut self.inner.alloc
}
}
impl<A: Allocator> RawBufferInner<A> {
#[inline]
const fn new_in(alloc: A, align: usize) -> Self {
let ptr = unsafe { core::mem::transmute::<usize, NonNull<u8>>(align) };
Self { ptr, cap: 0, alloc }
}
#[inline]
fn with_capacity_in<T>(capacity: usize, alloc: A) -> Self {
match Self::try_allocate_in::<T>(capacity, alloc) {
Ok(this) => this,
Err(err) => handle_error(err),
}
}
fn try_allocate_in<T>(capacity: usize, alloc: A) -> Result<Self, TryReserveError> {
let layout = Layout::array::<T>(capacity).map_err(|_| TryReserveError::CapacityOverflow)?;
if layout.size() == 0 {
return Ok(Self::new_in(alloc, layout.align()));
}
alloc_guard(layout.size())?;
let result = unsafe { alloc.allocate(layout) };
let ptr = match result {
Ok(ptr) => ptr,
Err(_) => return Err(TryReserveError::AllocError { layout }),
};
Ok(Self { ptr: ptr.cast(), cap: capacity, alloc })
}
#[inline]
fn ptr<T>(&self) -> *mut T {
self.non_null::<T>().as_ptr()
}
#[inline]
fn non_null<T>(&self) -> NonNull<T> {
self.ptr.cast()
}
#[inline]
fn capacity(&self, elem_size: usize) -> usize {
if elem_size == 0 {
usize::MAX
} else {
self.cap
}
}
#[inline]
unsafe fn from_raw_parts_in(ptr: *mut u8, cap: usize, alloc: A) -> Self {
Self { ptr: unsafe { NonNull::new_unchecked(ptr) }, cap, alloc }
}
#[inline]
const fn allocator(&self) -> &A {
&self.alloc
}
#[inline]
fn current_memory(&self, elem_layout: Layout) -> Option<(NonNull<u8>, Layout)> {
if elem_layout.size() == 0 || self.cap == 0 {
None
} else {
unsafe {
let alloc_size = elem_layout.size().unchecked_mul(self.cap);
let layout = Layout::from_size_align_unchecked(alloc_size, elem_layout.align());
Some((self.ptr, layout))
}
}
}
#[inline]
fn try_with_capacity_in<T>(capacity: usize, alloc: A) -> Result<Self, TryReserveError> {
Self::try_allocate_in::<T>(capacity, alloc)
}
unsafe fn deallocate(&mut self, elem_layout: Layout) {
if let Some((ptr, layout)) = self.current_memory(elem_layout) {
unsafe {
self.alloc.deallocate(ptr, layout);
}
}
}
}
impl<T, A: Allocator> Drop for RawBuffer<T, A> {
fn drop(&mut self) {
unsafe {
let layout =
Layout::from_size_align_unchecked(mem::size_of::<T>(), mem::align_of::<T>());
self.inner.deallocate(layout)
}
}
}
#[cold]
fn handle_error(e: TryReserveError) -> ! {
match e {
TryReserveError::CapacityOverflow => capacity_overflow(),
TryReserveError::AllocError { layout } => handle_alloc_error(layout),
}
}
#[inline(never)]
fn capacity_overflow() -> ! {
panic!("capacity overflow");
}
#[cold]
pub const fn handle_alloc_error(layout: Layout) -> ! {
const fn ct_error(_: Layout) -> ! {
panic!("allocation failed");
}
ct_error(layout)
}
#[inline]
fn alloc_guard(alloc_size: usize) -> Result<(), TryReserveError> {
if usize::BITS < 64 && alloc_size > isize::MAX as usize {
Err(TryReserveError::CapacityOverflow)
} else {
Ok(())
}
}