use core::alloc::Layout;
use core::cell::UnsafeCell;
use core::marker::PhantomData;
use core::mem::MaybeUninit;
use core::ptr::NonNull;
use core::{fmt, ptr};
extern crate alloc;
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub struct AllocError;
#[cfg(any(feature = "std", feature = "core-error"))]
impl crate::Error for AllocError {}
impl fmt::Display for AllocError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("memory allocation failed")
}
}
pub unsafe trait Allocator {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError>;
fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
let ptr = self.allocate(layout)?;
unsafe { (ptr.as_ptr() as *mut u8).write_bytes(0, ptr.len()) }
Ok(ptr)
}
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout);
unsafe fn grow(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
debug_assert!(
new_layout.size() >= old_layout.size(),
"`new_layout.size()` must be greater than or equal to `old_layout.size()`"
);
let new_ptr = self.allocate(new_layout)?;
unsafe {
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr() as *mut u8, old_layout.size());
self.deallocate(ptr, old_layout);
}
Ok(new_ptr)
}
unsafe fn grow_zeroed(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
debug_assert!(
new_layout.size() >= old_layout.size(),
"`new_layout.size()` must be greater than or equal to `old_layout.size()`"
);
let new_ptr = self.allocate_zeroed(new_layout)?;
unsafe {
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr() as *mut u8, old_layout.size());
self.deallocate(ptr, old_layout);
}
Ok(new_ptr)
}
unsafe fn shrink(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
debug_assert!(
new_layout.size() <= old_layout.size(),
"`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
);
let new_ptr = self.allocate(new_layout)?;
unsafe {
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr() as *mut u8, new_layout.size());
self.deallocate(ptr, old_layout);
}
Ok(new_ptr)
}
#[inline(always)]
fn by_ref(&self) -> &Self
where
Self: Sized,
{
self
}
}
unsafe impl<T: ?Sized + Allocator> Allocator for &T {
#[inline(always)]
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
(**self).allocate(layout)
}
#[inline(always)]
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
(**self).deallocate(ptr, layout)
}
#[inline(always)]
fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
(**self).allocate_zeroed(layout)
}
#[inline(always)]
unsafe fn grow(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
(**self).grow(ptr, old_layout, new_layout)
}
#[inline(always)]
unsafe fn grow_zeroed(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
(**self).grow_zeroed(ptr, old_layout, new_layout)
}
#[inline(always)]
unsafe fn shrink(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
(**self).shrink(ptr, old_layout, new_layout)
}
}
unsafe impl<T: ?Sized + Allocator> Allocator for &mut T {
#[inline(always)]
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
(**self).allocate(layout)
}
#[inline(always)]
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
(**self).deallocate(ptr, layout)
}
#[inline(always)]
fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
(**self).allocate_zeroed(layout)
}
#[inline(always)]
unsafe fn grow(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
(**self).grow(ptr, old_layout, new_layout)
}
#[inline(always)]
unsafe fn grow_zeroed(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
(**self).grow_zeroed(ptr, old_layout, new_layout)
}
#[inline(always)]
unsafe fn shrink(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
(**self).shrink(ptr, old_layout, new_layout)
}
}
#[cfg(feature = "alloc")]
#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
unsafe impl<T: ?Sized + Allocator> Allocator for alloc::boxed::Box<T> {
#[inline(always)]
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
(**self).allocate(layout)
}
#[inline(always)]
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
(**self).deallocate(ptr, layout)
}
#[inline(always)]
fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
(**self).allocate_zeroed(layout)
}
#[inline(always)]
unsafe fn grow(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
(**self).grow(ptr, old_layout, new_layout)
}
#[inline(always)]
unsafe fn grow_zeroed(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
(**self).grow_zeroed(ptr, old_layout, new_layout)
}
#[inline(always)]
unsafe fn shrink(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
(**self).shrink(ptr, old_layout, new_layout)
}
}
#[cfg(feature = "alloc")]
#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
pub struct Global;
#[cfg(feature = "alloc")]
#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
unsafe impl Allocator for Global {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
let ptr = if layout.size() == 0 {
core::ptr::null_mut::<u8>().wrapping_add(layout.align())
} else {
unsafe { alloc::alloc::alloc(layout) }
};
if ptr.is_null() {
Err(AllocError)
} else {
Ok(unsafe { NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(ptr, layout.size())) })
}
}
fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
let ptr = if layout.size() == 0 {
core::ptr::null_mut::<u8>().wrapping_add(layout.align())
} else {
unsafe { alloc::alloc::alloc_zeroed(layout) }
};
if ptr.is_null() {
Err(AllocError)
} else {
Ok(unsafe { NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(ptr, layout.size())) })
}
}
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
if layout.size() != 0 {
alloc::alloc::dealloc(ptr.as_ptr(), layout);
}
}
unsafe fn grow(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
core::debug_assert!(
new_layout.size() >= old_layout.size(),
"`new_layout.size()` must be greater than or equal to `old_layout.size()`"
);
if old_layout.align() == new_layout.align() {
let ptr = if new_layout.size() == 0 {
core::ptr::null_mut::<u8>().wrapping_add(new_layout.align())
} else {
alloc::alloc::realloc(ptr.as_ptr(), old_layout, new_layout.size())
};
if ptr.is_null() {
Err(AllocError)
} else {
Ok(unsafe { NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(ptr, new_layout.size())) })
}
} else {
let new_ptr = self.allocate(new_layout)?;
unsafe {
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr() as *mut u8, old_layout.size());
self.deallocate(ptr, old_layout);
}
Ok(new_ptr)
}
}
unsafe fn shrink(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
core::debug_assert!(
new_layout.size() <= old_layout.size(),
"`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
);
if old_layout.align() == new_layout.align() {
let ptr = if new_layout.size() == 0 {
core::ptr::null_mut::<u8>().wrapping_add(new_layout.align())
} else {
alloc::alloc::realloc(ptr.as_ptr(), old_layout, new_layout.size())
};
if ptr.is_null() {
Err(AllocError)
} else {
Ok(unsafe { NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(ptr, new_layout.size())) })
}
} else {
let new_ptr = self.allocate(new_layout)?;
unsafe {
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr() as *mut u8, new_layout.size());
self.deallocate(ptr, old_layout);
}
Ok(new_ptr)
}
}
}
#[derive(Copy, Clone, Debug)]
pub(crate) struct VTable {
pub allocate: unsafe fn(*const (), Layout) -> Result<NonNull<[u8]>, AllocError>,
pub allocate_zeroed: unsafe fn(*const (), Layout) -> Result<NonNull<[u8]>, AllocError>,
pub deallocate: unsafe fn(*const (), ptr: NonNull<u8>, Layout),
pub grow: unsafe fn(*const (), NonNull<u8>, Layout, Layout) -> Result<NonNull<[u8]>, AllocError>,
pub grow_zeroed: unsafe fn(*const (), NonNull<u8>, Layout, Layout) -> Result<NonNull<[u8]>, AllocError>,
pub shrink: unsafe fn(*const (), NonNull<u8>, Layout, Layout) -> Result<NonNull<[u8]>, AllocError>,
pub clone: Option<unsafe fn(*mut (), *const ())>,
pub drop: unsafe fn(*mut ()),
}
pub struct DynAlloc<'a> {
pub(crate) alloc: UnsafeCell<MaybeUninit<*const ()>>,
pub(crate) vtable: &'static VTable,
__marker: PhantomData<&'a ()>,
}
unsafe impl Send for DynAlloc<'_> {}
unsafe impl Allocator for DynAlloc<'_> {
#[inline]
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
unsafe { (self.vtable.allocate)(core::ptr::addr_of!(self.alloc) as *const (), layout) }
}
#[inline]
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
unsafe { (self.vtable.deallocate)(core::ptr::addr_of!(self.alloc) as *const (), ptr, layout) }
}
#[inline]
fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
unsafe { (self.vtable.allocate_zeroed)(core::ptr::addr_of!(self.alloc) as *const (), layout) }
}
#[inline]
unsafe fn grow(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
unsafe { (self.vtable.grow)(core::ptr::addr_of!(self.alloc) as *const (), ptr, old_layout, new_layout) }
}
#[inline]
unsafe fn grow_zeroed(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
unsafe { (self.vtable.grow_zeroed)(core::ptr::addr_of!(self.alloc) as *const (), ptr, old_layout, new_layout) }
}
#[inline]
unsafe fn shrink(&self, ptr: NonNull<u8>, old_layout: Layout, new_layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
unsafe { (self.vtable.shrink)(core::ptr::addr_of!(self.alloc) as *const (), ptr, old_layout, new_layout) }
}
}
impl Drop for DynAlloc<'_> {
#[inline]
fn drop(&mut self) {
unsafe { (self.vtable.drop)(core::ptr::addr_of_mut!(self.alloc) as *mut ()) }
}
}
impl Clone for DynAlloc<'_> {
#[inline]
fn clone(&self) -> Self {
let mut alloc = UnsafeCell::new(MaybeUninit::uninit());
unsafe {
self.vtable.clone.unwrap()(core::ptr::addr_of_mut!(alloc) as *mut (), core::ptr::addr_of!(self.alloc) as *const ());
}
Self {
alloc,
vtable: self.vtable,
__marker: PhantomData,
}
}
}
impl<'a> DynAlloc<'a> {
#[inline]
pub fn try_new_unclone<A: 'a + Allocator + Send>(alloc: A) -> Result<Self, A> {
if core::mem::size_of::<A>() <= core::mem::size_of::<*const ()>() && core::mem::align_of::<A>() <= core::mem::align_of::<*const ()>() {
trait AllocUnclone: Allocator + Send {
const VTABLE: &'static VTable = &unsafe {
VTable {
allocate: core::mem::transmute(Self::allocate as fn(&Self, _) -> _),
allocate_zeroed: core::mem::transmute(Self::allocate_zeroed as fn(&Self, _) -> _),
deallocate: core::mem::transmute(Self::deallocate as unsafe fn(&Self, _, _) -> _),
grow: core::mem::transmute(Self::grow as unsafe fn(&Self, _, _, _) -> _),
grow_zeroed: core::mem::transmute(Self::grow_zeroed as unsafe fn(&Self, _, _, _) -> _),
shrink: core::mem::transmute(Self::shrink as unsafe fn(&Self, _, _, _) -> _),
clone: None,
drop: core::mem::transmute(core::ptr::drop_in_place::<Self> as unsafe fn(_) -> _),
}
};
}
impl<A: Allocator + Send> AllocUnclone for A {}
Ok(Self {
alloc: unsafe { core::mem::transmute_copy(&core::mem::ManuallyDrop::new(alloc)) },
vtable: <A as AllocUnclone>::VTABLE,
__marker: PhantomData,
})
} else {
Err(alloc)
}
}
#[inline]
pub fn try_new_clone<A: 'a + Clone + Allocator + Send>(alloc: A) -> Result<Self, A> {
if core::mem::size_of::<A>() <= core::mem::size_of::<*const ()>() && core::mem::align_of::<A>() <= core::mem::align_of::<*const ()>() {
trait AllocClone: Allocator + Send + Clone {
const VTABLE: &'static VTable = &unsafe {
VTable {
allocate: core::mem::transmute(Self::allocate as fn(_, _) -> _),
allocate_zeroed: core::mem::transmute(Self::allocate_zeroed as fn(_, _) -> _),
deallocate: core::mem::transmute(Self::deallocate as unsafe fn(_, _, _) -> _),
grow: core::mem::transmute(Self::grow as unsafe fn(_, _, _, _) -> _),
grow_zeroed: core::mem::transmute(Self::grow_zeroed as unsafe fn(_, _, _, _) -> _),
shrink: core::mem::transmute(Self::shrink as unsafe fn(_, _, _, _) -> _),
clone: Some(|dst: *mut (), src: *const ()| (dst as *mut Self).write((*(src as *const Self)).clone())),
drop: core::mem::transmute(core::ptr::drop_in_place::<Self> as unsafe fn(_) -> _),
}
};
}
impl<A: Allocator + Send + Clone> AllocClone for A {}
Ok(Self {
alloc: unsafe { core::mem::transmute_copy(&core::mem::ManuallyDrop::new(alloc)) },
vtable: <A as AllocClone>::VTABLE,
__marker: PhantomData,
})
} else {
Err(alloc)
}
}
#[inline]
pub fn from_ref<A: Allocator + Sync>(alloc: &'a A) -> Self {
match Self::try_new_clone(alloc) {
Ok(me) => me,
Err(_) => unreachable!(),
}
}
#[inline]
pub fn from_mut<A: Allocator + Send>(alloc: &'a mut A) -> Self {
match Self::try_new_unclone(alloc) {
Ok(me) => me,
Err(_) => unreachable!(),
}
}
#[inline]
pub fn by_mut(&mut self) -> DynAlloc<'_> {
DynAlloc::from_mut(self)
}
#[inline]
pub fn cloneable(&self) -> bool {
self.vtable.clone.is_some()
}
}