use crate::cap::Cap;
use std::{
alloc::{self, Layout},
ptr::{self, NonNull},
};
#[non_exhaustive]
#[derive(Debug, thiserror::Error)]
#[error("memory allocation failed")]
pub struct AllocError;
pub unsafe trait Allocator {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError>;
fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
let ptr = self.allocate(layout)?;
unsafe { (ptr.as_ptr() as *mut u8).write_bytes(0, ptr.len()) }
Ok(ptr)
}
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout);
unsafe fn grow(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
debug_assert!(
new_layout.size() >= old_layout.size(),
"`new_layout.size()` must be greater than or equal to `old_layout.size()`"
);
let new_ptr = self.allocate(new_layout)?;
unsafe {
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr() as *mut u8, old_layout.size());
self.deallocate(ptr, old_layout);
}
Ok(new_ptr)
}
unsafe fn grow_zeroed(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
debug_assert!(
new_layout.size() >= old_layout.size(),
"`new_layout.size()` must be greater than or equal to `old_layout.size()`"
);
let new_ptr = self.allocate_zeroed(new_layout)?;
unsafe {
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr() as *mut u8, old_layout.size());
self.deallocate(ptr, old_layout);
}
Ok(new_ptr)
}
unsafe fn shrink(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
debug_assert!(
new_layout.size() <= old_layout.size(),
"`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
);
let new_ptr = self.allocate(new_layout)?;
unsafe {
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr() as *mut u8, new_layout.size());
self.deallocate(ptr, old_layout);
}
Ok(new_ptr)
}
#[inline(always)]
fn by_ref(&self) -> &Self
where
Self: Sized,
{
self
}
}
unsafe impl<A> Allocator for &A
where
A: Allocator + ?Sized,
{
#[inline]
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
(**self).allocate(layout)
}
#[inline]
fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
(**self).allocate_zeroed(layout)
}
#[inline]
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
unsafe { (**self).deallocate(ptr, layout) }
}
#[inline]
unsafe fn grow(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
unsafe { (**self).grow(ptr, old_layout, new_layout) }
}
#[inline]
unsafe fn grow_zeroed(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
unsafe { (**self).grow_zeroed(ptr, old_layout, new_layout) }
}
#[inline]
unsafe fn shrink(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
unsafe { (**self).shrink(ptr, old_layout, new_layout) }
}
}
pub trait BufferAllocator<const ALIGNMENT: usize>: Allocator {
unsafe fn deallocate_buffer(&self, raw: RawBuffer<ALIGNMENT>) {
let (ptr, layout) = raw.alloc_info();
self.deallocate(ptr, layout);
}
}
#[derive(Debug, Clone, Copy)]
pub struct RawBuffer<const ALIGNMENT: usize> {
pub(crate) buf: NonNull<u8>,
pub(crate) cap: Cap,
}
impl<const ALIGNMENT: usize> RawBuffer<ALIGNMENT> {
pub(crate) unsafe fn new(buf: NonNull<u8>, capacity: Cap) -> Self {
Self { buf, cap: capacity }
}
#[inline]
pub fn buf_ptr(&self) -> NonNull<u8> {
self.buf
}
#[inline]
pub fn capacity(&self) -> Cap {
self.cap
}
#[inline]
pub fn alloc_info(&self) -> (NonNull<u8>, Layout) {
let layout = crate::raw::RawAlignedBuffer::<ALIGNMENT>::layout(self.cap.0.value())
.expect("Invalid layout");
let header = unsafe {
self
.buf
.as_ptr()
.sub(crate::raw::RawAlignedBuffer::<ALIGNMENT>::BUFFER_OFFSET)
};
debug_assert_eq!(self.cap.0.value(), unsafe {
(*(header as *const crate::raw::Header)).alloc_buffer_size
});
(unsafe { NonNull::new_unchecked(header) }, layout)
}
}
#[derive(Debug, Clone, Copy, Default)]
pub struct Global;
impl Global {
#[inline]
fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
match layout.size() {
0 => Ok(NonNull::slice_from_raw_parts(NonNull::dangling(), 0)),
size => unsafe {
let raw_ptr = if zeroed {
alloc::alloc_zeroed(layout)
} else {
alloc::alloc(layout)
};
let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
Ok(NonNull::slice_from_raw_parts(ptr, size))
},
}
}
#[inline]
unsafe fn grow_impl(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
zeroed: bool,
) -> Result<NonNull<[u8]>, AllocError> {
debug_assert!(
new_layout.size() >= old_layout.size(),
"`new_layout.size()` must be greater than or equal to `old_layout.size()`"
);
match old_layout.size() {
0 => self.alloc_impl(new_layout, zeroed),
old_size if old_layout.align() == new_layout.align() => unsafe {
let new_size = new_layout.size();
let raw_ptr = alloc::realloc(ptr.as_ptr(), old_layout, new_size);
let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
if zeroed {
raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
}
Ok(NonNull::slice_from_raw_parts(ptr, new_size))
},
old_size => unsafe {
let new_ptr = self.alloc_impl(new_layout, zeroed)?;
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr() as *mut u8, old_size);
self.deallocate(ptr, old_layout);
Ok(new_ptr)
},
}
}
}
unsafe impl Allocator for Global {
#[inline]
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.alloc_impl(layout, false)
}
#[inline]
fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.alloc_impl(layout, true)
}
#[inline]
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
if layout.size() != 0 {
unsafe { alloc::dealloc(ptr.as_ptr(), layout) }
}
}
#[inline]
unsafe fn grow(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
}
#[inline]
unsafe fn grow_zeroed(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
}
#[inline]
unsafe fn shrink(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
debug_assert!(
new_layout.size() <= old_layout.size(),
"`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
);
match new_layout.size() {
0 => unsafe {
self.deallocate(ptr, old_layout);
Ok(NonNull::slice_from_raw_parts(NonNull::dangling(), 0))
},
new_size if old_layout.align() == new_layout.align() => unsafe {
let raw_ptr = alloc::realloc(ptr.as_ptr(), old_layout, new_size);
let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
Ok(NonNull::slice_from_raw_parts(ptr, new_size))
},
new_size => unsafe {
let new_ptr = self.allocate(new_layout)?;
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr() as *mut u8, new_size);
self.deallocate(ptr, old_layout);
Ok(new_ptr)
},
}
}
}
impl<const ALIGNMENT: usize> BufferAllocator<ALIGNMENT> for Global {}