use core::alloc::{Layout, LayoutError};
use core::cmp;
use core::mem::{self, ManuallyDrop, MaybeUninit};
use core::slice;
use crate::alloc::SizedTypeProperties;
use crate::alloc::{AllocError, Allocator, Global};
use crate::boxed::Box;
use crate::error::Error;
use crate::ptr::{self, NonNull, Unique};
enum AllocInit {
Uninitialized,
#[cfg(rune_nightly)]
Zeroed,
}
#[allow(missing_debug_implementations)]
pub(crate) struct RawVec<T, A: Allocator = Global> {
ptr: Unique<T>,
cap: usize,
alloc: A,
}
impl<T> RawVec<T, Global> {
pub const NEW: Self = Self::new();
#[must_use]
pub const fn new() -> Self {
Self::new_in(Global)
}
}
impl<T, A: Allocator> RawVec<T, A> {
pub(crate) const MIN_NON_ZERO_CAP: usize = if mem::size_of::<T>() == 1 {
8
} else if mem::size_of::<T>() <= 1024 {
4
} else {
1
};
pub const fn new_in(alloc: A) -> Self {
Self {
ptr: Unique::dangling(),
cap: 0,
alloc,
}
}
#[inline]
pub(crate) fn try_with_capacity_in(capacity: usize, alloc: A) -> Result<Self, Error> {
Self::try_allocate_in(capacity, AllocInit::Uninitialized, alloc)
}
#[inline]
#[cfg(rune_nightly)]
pub(crate) fn try_with_capacity_zeroed_in(capacity: usize, alloc: A) -> Result<Self, Error> {
Self::try_allocate_in(capacity, AllocInit::Zeroed, alloc)
}
pub unsafe fn into_box(self, len: usize) -> Box<[MaybeUninit<T>], A> {
debug_assert!(
len <= self.capacity(),
"`len` must be smaller than or equal to `self.capacity()`"
);
let me = ManuallyDrop::new(self);
unsafe {
let slice = slice::from_raw_parts_mut(me.ptr() as *mut MaybeUninit<T>, len);
Box::from_raw_in(slice, ptr::read(&me.alloc))
}
}
fn try_allocate_in(capacity: usize, init: AllocInit, alloc: A) -> Result<Self, Error> {
if T::IS_ZST || capacity == 0 {
Ok(Self::new_in(alloc))
} else {
let layout = match Layout::array::<T>(capacity) {
Ok(layout) => layout,
Err(_) => return Err(Error::CapacityOverflow),
};
match alloc_guard(layout.size()) {
Ok(_) => {}
Err(_) => return Err(Error::CapacityOverflow),
}
let ptr = match init {
AllocInit::Uninitialized => alloc.allocate(layout)?,
#[cfg(rune_nightly)]
AllocInit::Zeroed => alloc.allocate_zeroed(layout)?,
};
Ok(Self {
ptr: unsafe { Unique::new_unchecked(ptr.cast().as_ptr()) },
cap: capacity,
alloc,
})
}
}
#[inline]
pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, alloc: A) -> Self {
Self {
ptr: unsafe { Unique::new_unchecked(ptr) },
cap: capacity,
alloc,
}
}
#[inline]
pub(crate) fn ptr(&self) -> *mut T {
self.ptr.as_ptr()
}
#[inline(always)]
pub(crate) fn capacity(&self) -> usize {
if T::IS_ZST {
usize::MAX
} else {
self.cap
}
}
pub(crate) fn allocator(&self) -> &A {
&self.alloc
}
fn current_memory(&self) -> Option<(NonNull<u8>, Layout)> {
if T::IS_ZST || self.cap == 0 {
None
} else {
assert!(mem::size_of::<T>() % mem::align_of::<T>() == 0);
unsafe {
let align = mem::align_of::<T>();
let size = mem::size_of::<T>().wrapping_mul(self.cap);
let layout = Layout::from_size_align_unchecked(size, align);
Some((self.ptr.cast().into(), layout))
}
}
}
pub(crate) fn try_reserve(&mut self, len: usize, additional: usize) -> Result<(), Error> {
if self.needs_to_grow(len, additional) {
self.grow_amortized(len, additional)?;
}
Ok(())
}
pub(crate) fn try_reserve_for_push(&mut self, len: usize) -> Result<(), Error> {
self.grow_amortized(len, 1)
}
pub(crate) fn try_reserve_exact(&mut self, len: usize, additional: usize) -> Result<(), Error> {
if self.needs_to_grow(len, additional) {
self.grow_exact(len, additional)
} else {
Ok(())
}
}
pub(crate) fn try_shrink_to_fit(&mut self, cap: usize) -> Result<(), Error> {
self.shrink(cap)
}
}
impl<T, A: Allocator> RawVec<T, A> {
fn needs_to_grow(&self, len: usize, additional: usize) -> bool {
additional > self.capacity().wrapping_sub(len)
}
fn set_ptr_and_cap(&mut self, ptr: NonNull<[u8]>, cap: usize) {
self.ptr = unsafe { Unique::new_unchecked(ptr.cast().as_ptr()) };
self.cap = cap;
}
fn grow_amortized(&mut self, len: usize, additional: usize) -> Result<(), Error> {
debug_assert!(additional > 0);
if T::IS_ZST {
return Err(Error::CapacityOverflow);
}
let required_cap = len.checked_add(additional).ok_or(Error::CapacityOverflow)?;
let cap = cmp::max(self.cap * 2, required_cap);
let cap = cmp::max(Self::MIN_NON_ZERO_CAP, cap);
let new_layout = Layout::array::<T>(cap);
let ptr = finish_grow(new_layout, self.current_memory(), &self.alloc)?;
self.set_ptr_and_cap(ptr, cap);
Ok(())
}
fn grow_exact(&mut self, len: usize, additional: usize) -> Result<(), Error> {
if T::IS_ZST {
return Err(Error::CapacityOverflow);
}
let cap = len.checked_add(additional).ok_or(Error::CapacityOverflow)?;
let new_layout = Layout::array::<T>(cap);
let ptr = finish_grow(new_layout, self.current_memory(), &self.alloc)?;
self.set_ptr_and_cap(ptr, cap);
Ok(())
}
fn shrink(&mut self, cap: usize) -> Result<(), Error> {
assert!(mem::size_of::<T>() % mem::align_of::<T>() == 0);
assert!(
cap <= self.capacity(),
"Tried to shrink to a larger capacity"
);
let (ptr, layout) = if let Some(mem) = self.current_memory() {
mem
} else {
return Ok(());
};
if cap == 0 {
unsafe { self.alloc.deallocate(ptr, layout) };
self.ptr = Unique::dangling();
self.cap = 0;
} else {
let ptr = unsafe {
let new_size = mem::size_of::<T>().wrapping_mul(cap);
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
self.alloc
.shrink(ptr, layout, new_layout)
.map_err(|_| AllocError { layout: new_layout })?
};
self.set_ptr_and_cap(ptr, cap);
}
Ok(())
}
}
#[inline(never)]
fn finish_grow<A>(
new_layout: Result<Layout, LayoutError>,
current_memory: Option<(NonNull<u8>, Layout)>,
alloc: &A,
) -> Result<NonNull<[u8]>, Error>
where
A: Allocator,
{
let new_layout = new_layout.map_err(|_| Error::CapacityOverflow)?;
alloc_guard(new_layout.size())?;
let memory = if let Some((ptr, old_layout)) = current_memory {
debug_assert_eq!(old_layout.align(), new_layout.align());
unsafe {
debug_assert!(old_layout.align() == new_layout.align());
alloc.grow(ptr, old_layout, new_layout)
}
} else {
alloc.allocate(new_layout)
};
memory.map_err(|_| AllocError { layout: new_layout }.into())
}
#[cfg(not(rune_nightly))]
impl<T, A: Allocator> Drop for RawVec<T, A> {
fn drop(&mut self) {
if let Some((ptr, layout)) = self.current_memory() {
unsafe { self.alloc.deallocate(ptr, layout) }
}
}
}
#[cfg(rune_nightly)]
unsafe impl<#[may_dangle] T, A: Allocator> Drop for RawVec<T, A> {
fn drop(&mut self) {
if let Some((ptr, layout)) = self.current_memory() {
unsafe { self.alloc.deallocate(ptr, layout) }
}
}
}
#[inline]
fn alloc_guard(alloc_size: usize) -> Result<(), Error> {
if usize::BITS < 64 && alloc_size > isize::MAX as usize {
Err(Error::CapacityOverflow)
} else {
Ok(())
}
}