use {
crate::utils::TypeMeta,
::alloc::alloc::{Layout, alloc, dealloc, realloc},
::core::{mem, num::NonZero, ops::Range, panic::UnwindSafe, ptr},
};
pub(crate) struct RawSlimVec<T> {
ptr: ptr::NonNull<HeapData<T>>,
}
#[repr(C)]
pub(crate) struct HeapData<T> {
length: usize,
capacity: NonZero<usize>,
buffer: Buffer<T>,
}
#[repr(transparent)]
pub(crate) struct Buffer<T>([mem::MaybeUninit<T>; 0]);
macro_rules! assert_zst {
($t:ty) => {
assert!(
::core::mem::size_of::<$t>() == 0,
stringify!($t must be zero-sized)
)
};
}
macro_rules! assert_not_zst {
($t:ty) => {
assert!(
::core::mem::size_of::<$t>() != 0,
stringify!($t must not be zero-sized)
)
};
}
#[inline]
const unsafe fn zst_encode_length<T>(
length: usize,
) -> ptr::NonNull<HeapData<T>> {
debug_assert!(
length <= RawSlimVec::<T>::MAX_CAPACITY,
"required safety criteria"
);
let encoded_length = NonZero::new(!length);
ptr::NonNull::without_provenance(unsafe {
debug_assert!(encoded_length.is_some(), "implied by safety criteria");
encoded_length.unwrap_unchecked()
})
}
#[inline]
fn zst_decode_length<T>(ptr: ptr::NonNull<HeapData<T>>) -> usize {
assert_zst!(T);
!ptr.addr().get()
}
impl<T> Buffer<T> {
#[inline]
const fn buffer_layout(buffer_capacity: NonZero<usize>) -> Layout {
assert_not_zst!(T);
assert!(
mem::align_of::<T>() == mem::align_of::<Buffer<T>>(),
"The alignment of the buffer must match the alignment of its elements"
);
assert!(
(mem::align_of::<HeapData<T>>() + mem::offset_of!(HeapData<T>, buffer))
.is_multiple_of(mem::align_of::<T>()),
"The buffer field must be correctly aligned",
);
let Ok(layout) = Layout::array::<T>(buffer_capacity.get()) else {
panic!("Failed to create Layout for buffer")
};
layout
}
}
impl<T> HeapData<T> {
const HEADER_LAYOUT: Layout = {
let layout = Layout::new::<HeapData<T>>();
assert!(layout.size() != 0, "HEADER_LAYOUT must have non-zero size");
layout
};
#[inline]
const fn compute_layout(buffer_capacity: NonZero<usize>) -> Layout {
assert_not_zst!(T);
let Ok((layout, _)) =
Self::HEADER_LAYOUT.extend(Buffer::<T>::buffer_layout(buffer_capacity))
else {
panic!("Failed to create layout for HeapData");
};
layout
}
#[inline]
fn allocate(buffer_capacity: NonZero<usize>) -> ptr::NonNull<Self> {
assert_not_zst!(T);
let layout = HeapData::<T>::compute_layout(buffer_capacity);
assert!(layout.size() != 0);
let allocation = unsafe { alloc(layout) };
let ptr: ptr::NonNull<HeapData<T>> = ptr::NonNull::new(allocation)
.expect("Allocation failed")
.cast();
unsafe {
HeapData::write_capacity(ptr, buffer_capacity);
HeapData::write_length(ptr, 0);
}
ptr
}
#[inline]
unsafe fn reallocate(
ptr: ptr::NonNull<Self>,
new_buffer_capacity: NonZero<usize>,
) -> ptr::NonNull<Self> {
assert_not_zst!(T);
let old_capacity = unsafe { Self::read_capacity(ptr) };
let old_layout = HeapData::<T>::compute_layout(old_capacity);
let new_computed_layout =
HeapData::<T>::compute_layout(new_buffer_capacity);
let new_layout =
Layout::from_size_align(new_computed_layout.size(), old_layout.align())
.expect("Failed to create layout for HeapData");
assert!(
new_computed_layout == new_layout,
"The newly computed layout must match the definition for the layout that
will be returned from `alloc::realloc`"
);
assert!(new_computed_layout.size() != 0);
let new_allocation = unsafe {
realloc(ptr.as_ptr().cast(), old_layout, new_computed_layout.size())
};
let new_ptr: ptr::NonNull<HeapData<T>> = ptr::NonNull::new(new_allocation)
.expect("Reallocation Failed")
.cast();
unsafe { HeapData::write_capacity(new_ptr, new_buffer_capacity) };
new_ptr
}
#[inline]
unsafe fn deallocate(ptr: ptr::NonNull<Self>) {
let capacity = unsafe { Self::read_capacity(ptr) };
let layout = Self::compute_layout(capacity);
let allocation = ptr.as_ptr().cast();
unsafe { dealloc(allocation, layout) }
}
#[inline]
unsafe fn drop_in_place(ptr: ptr::NonNull<Self>, range: Range<usize>) {
debug_assert!(
range.end <= unsafe { HeapData::read_capacity(ptr).get() },
"required safety criteria"
);
let offset = mem::offset_of!(HeapData<T>, buffer);
let buffer_ptr: ptr::NonNull<T> = unsafe { ptr.byte_add(offset).cast() };
for i in range.rev() {
let element_ptr = unsafe { buffer_ptr.add(i) };
unsafe { element_ptr.drop_in_place() };
}
}
#[inline]
unsafe fn buffer_ptr(ptr: ptr::NonNull<Self>) -> ptr::NonNull<T> {
let offset = mem::offset_of!(HeapData<T>, buffer);
unsafe { ptr.byte_add(offset).cast() }
}
#[inline]
unsafe fn write_length(ptr: ptr::NonNull<Self>, length: usize) {
assert_not_zst!(T);
debug_assert!(
length <= unsafe { Self::read_capacity(ptr).get() },
"length must not exceed capacity"
);
let offset = mem::offset_of!(HeapData<T>, length);
unsafe {
let field = ptr.byte_add(offset).cast();
field.write(length);
}
}
#[inline]
unsafe fn write_capacity(ptr: ptr::NonNull<Self>, capacity: NonZero<usize>) {
assert_not_zst!(T);
debug_assert!(
capacity.get() <= RawSlimVec::<T>::MAX_CAPACITY,
"capacity must not exceed MAX_CAPACITY"
);
let offset = mem::offset_of!(HeapData<T>, capacity);
unsafe {
let field = ptr.byte_add(offset).cast();
field.write(capacity);
}
}
#[inline]
unsafe fn read_capacity(ptr: ptr::NonNull<Self>) -> NonZero<usize> {
assert_not_zst!(T);
let offset = mem::offset_of!(HeapData<T>, capacity);
unsafe { ptr.byte_add(offset).cast().read() }
}
}
impl<T> RawSlimVec<T> {
const SENTINEL_UNALLOCATED: ptr::NonNull<HeapData<T>> = {
let sentinel = NonZero::<usize>::MIN;
assert!(
!sentinel
.get()
.is_multiple_of(mem::align_of::<HeapData<T>>())
&& sentinel.get() < mem::align_of::<HeapData<T>>(),
"SENTINEL_UNALLOCATED must not alias any bit-patterns for any pointer
that could be valid for HeapData<T>"
);
ptr::NonNull::without_provenance(sentinel)
};
const EMPTY_ZST_BUFFER: ptr::NonNull<HeapData<T>> = {
unsafe { zst_encode_length::<T>(0) }
};
pub(crate) const EMPTY: RawSlimVec<T> = {
assert!(
mem::size_of::<Self>() == mem::size_of::<Option<Self>>(),
"SlimVec must contain a niche"
);
if T::IS_ZST {
RawSlimVec {
ptr: Self::EMPTY_ZST_BUFFER,
}
} else {
RawSlimVec {
ptr: Self::SENTINEL_UNALLOCATED,
}
}
};
pub(crate) const MAX_CAPACITY: usize = {
if T::IS_ZST {
isize::MAX as usize
} else {
isize::MAX as usize / mem::size_of::<T>()
}
};
#[inline]
pub(crate) fn is_allocated(&self) -> bool {
if T::IS_ZST {
false
} else {
self.ptr != Self::SENTINEL_UNALLOCATED
}
}
#[inline]
pub(crate) fn is_capacity_gt_zero(&self) -> bool {
let is_gt_zero = self.is_allocated() || T::IS_ZST;
debug_assert_eq!(self.capacity() > 0, is_gt_zero, "required invariant");
is_gt_zero
}
#[inline]
pub(crate) fn get_heap_data(&self) -> Option<&HeapData<T>> {
if self.is_allocated() {
Some(unsafe { self.ptr.as_ref() })
} else {
None
}
}
#[inline]
pub(crate) fn capacity(&self) -> usize {
if T::IS_ZST {
return Self::MAX_CAPACITY;
}
if let Some(heap_data) = self.get_heap_data() {
heap_data.capacity.get()
} else {
0
}
}
#[inline]
pub(crate) fn length(&self) -> usize {
if T::IS_ZST {
return zst_decode_length(self.ptr);
}
if let Some(heap_data) = self.get_heap_data() {
heap_data.length
} else {
0
}
}
#[inline]
pub(crate) unsafe fn set_length(&mut self, new_length: usize) {
debug_assert!(
self.is_capacity_gt_zero() && (new_length <= self.capacity()),
"required safety criteria"
);
if T::IS_ZST {
self.ptr = unsafe { zst_encode_length(new_length) };
return;
}
unsafe { HeapData::write_length(self.ptr, new_length) };
}
#[inline]
pub(crate) fn allocate(&mut self, buffer_capacity: NonZero<usize>) {
if T::IS_ZST {
assert!(
buffer_capacity.get() <= Self::MAX_CAPACITY,
"required capacity exceeds max buffer capacity"
);
return;
}
self.ptr = HeapData::<T>::allocate(buffer_capacity);
}
pub(crate) unsafe fn reallocate(&mut self, buffer_capacity: NonZero<usize>) {
debug_assert!(self.is_capacity_gt_zero(), "required safety criteria");
if T::IS_ZST {
assert!(
buffer_capacity.get() <= Self::MAX_CAPACITY,
"required capacity exceeds max buffer capacity"
);
return;
}
let ptr = self.ptr;
self.ptr = Self::SENTINEL_UNALLOCATED;
self.ptr = unsafe { HeapData::<T>::reallocate(ptr, buffer_capacity) };
}
#[inline]
pub(crate) unsafe fn deallocate(&mut self) {
debug_assert!(self.is_capacity_gt_zero(), "required safety criteria");
if T::IS_ZST {
return;
}
let ptr = self.ptr;
self.ptr = Self::SENTINEL_UNALLOCATED;
unsafe { HeapData::deallocate(ptr) };
}
#[inline]
pub(crate) unsafe fn drop_in_place(&mut self, range: Range<usize>) {
debug_assert!(range.end <= self.capacity(), "required safety criteria");
if range.is_empty() {
return;
}
if ::core::mem::needs_drop::<T>() {
if T::IS_ZST {
for _ in range.rev() {
unsafe { ptr::NonNull::<T>::dangling().drop_in_place() };
}
return;
}
unsafe { HeapData::drop_in_place(self.ptr, range) };
}
}
#[inline]
pub(crate) unsafe fn buffer_ptr(&self) -> ptr::NonNull<T> {
debug_assert!(self.is_capacity_gt_zero(), "required safety criteria");
unsafe { self.element_ptr(0) }
}
#[inline]
pub(crate) unsafe fn element_ptr(&self, index: usize) -> ptr::NonNull<T> {
debug_assert!(
self.is_capacity_gt_zero() && (index <= self.capacity()),
"required safety criteria"
);
if T::IS_ZST {
return ptr::NonNull::dangling();
}
unsafe { HeapData::buffer_ptr(self.ptr).add(index) }
}
#[inline]
pub(crate) unsafe fn write(&mut self, index: usize, value: T) {
debug_assert!(
self.is_capacity_gt_zero() && (index < self.capacity()),
"required safety criteria"
);
unsafe { self.element_ptr(index).write(value) }
}
#[inline]
pub(crate) unsafe fn read(&self, index: usize) -> T {
debug_assert!(
self.is_capacity_gt_zero() && (index < self.capacity()),
"required safety criteria"
);
unsafe { self.element_ptr(index).read() }
}
#[inline]
pub(crate) fn into_parts(self) -> (ptr::NonNull<T>, usize, usize) {
let length = self.length();
let capacity = self.capacity();
let ptr = if self.is_allocated() {
unsafe { HeapData::buffer_ptr(self.ptr) }
} else {
ptr::NonNull::dangling()
};
mem::forget(self);
(ptr, length, capacity)
}
#[inline]
pub(crate) unsafe fn from_parts(
ptr: ptr::NonNull<T>,
length: usize,
capacity: usize,
) -> Self {
debug_assert!(
(length <= capacity) && (capacity <= Self::MAX_CAPACITY),
"required safety criteria"
);
if T::IS_ZST {
let ptr = unsafe { zst_encode_length(length) };
return RawSlimVec { ptr };
}
if let Some(capacity) = NonZero::new(capacity) {
let ptr: ptr::NonNull<HeapData<T>> = {
let offset = mem::offset_of!(HeapData<T>, buffer);
unsafe { ptr.byte_sub(offset).cast() }
};
unsafe {
HeapData::write_capacity(ptr, capacity);
HeapData::write_length(ptr, length);
}
RawSlimVec { ptr }
} else {
Self::EMPTY
}
}
#[inline]
pub(crate) fn shrink_to(&mut self, new_capacity: usize) {
let new_capacity = self.length().max(new_capacity);
if new_capacity >= self.capacity() {
return;
}
if self.is_allocated() {
if new_capacity == 0 {
unsafe { self.deallocate() }
} else {
let new_capacity =
NonZero::new(new_capacity).unwrap_or_else(|| unreachable!());
unsafe { self.reallocate(new_capacity) };
}
}
}
#[inline]
pub(crate) fn grow_to(&mut self, new_capacity: usize) {
if new_capacity <= self.capacity() {
return;
}
let new_capacity =
NonZero::new(new_capacity).unwrap_or_else(|| unreachable!());
if self.is_allocated() {
unsafe { self.reallocate(new_capacity) };
} else {
self.allocate(new_capacity);
}
}
#[inline]
pub(crate) unsafe fn push_unchecked(&mut self, v: T) {
debug_assert!(self.capacity() > self.length(), "required safety criteria");
let count = self.length();
unsafe {
self.write(count, v);
self.set_length(count + 1);
}
}
#[inline]
pub(crate) unsafe fn pop_unchecked(&mut self) -> T {
debug_assert!(self.length() > 0, "required safety criteria");
let new_length = self.length() - 1;
unsafe {
self.set_length(new_length);
self.read(new_length)
}
}
}
impl<T> Drop for RawSlimVec<T> {
#[inline]
fn drop(&mut self) {
if self.is_capacity_gt_zero() {
let count = self.length();
unsafe { self.set_length(0) };
unsafe { self.drop_in_place(0..count) };
unsafe { self.deallocate() };
}
}
}
impl<T> Clone for RawSlimVec<T>
where
T: Clone,
{
#[inline]
fn clone(&self) -> Self {
if T::IS_ZST {
return RawSlimVec { ptr: self.ptr };
}
let Some(length) = NonZero::new(self.length()) else {
return RawSlimVec::EMPTY;
};
let mut clone = RawSlimVec::EMPTY;
clone.allocate(length);
for index in 0..length.get() {
let element: &T = unsafe { self.element_ptr(index).as_ref() };
unsafe { clone.write(index, element.clone()) };
}
unsafe { clone.set_length(length.get()) };
clone
}
}
unsafe impl<T> Send for RawSlimVec<T> where T: Send {}
unsafe impl<T> Sync for RawSlimVec<T> where T: Sync {}
impl<T> Unpin for RawSlimVec<T> where T: Unpin {}
impl<T> UnwindSafe for RawSlimVec<T> where T: UnwindSafe {}