use crate::{
alloc::{
AbortAlloc,
AllocRef,
BuildAllocRef,
CapacityOverflow,
DeallocRef,
Global,
NonZeroLayout,
ReallocRef,
},
boxed::Box,
collections::CollectionAllocErr,
};
use core::{
alloc::Layout,
cmp,
convert::{TryFrom, TryInto},
marker::PhantomData,
mem,
num::NonZeroUsize,
ptr,
ptr::NonNull,
slice,
};
pub struct RawVec<T, B: BuildAllocRef = AbortAlloc<Global>> {
ptr: NonNull<T>,
capacity: usize,
build_alloc: B,
_owned: PhantomData<T>,
}
impl<T> RawVec<T> {
pub const NEW: Self = Self::new();
#[must_use]
pub const fn new() -> Self {
Self {
ptr: NonNull::dangling(),
capacity: [0, !0][(mem::size_of::<T>() == 0) as usize],
build_alloc: AbortAlloc(Global),
_owned: PhantomData,
}
}
#[inline]
#[must_use]
pub fn with_capacity(capacity: usize) -> Self {
Self::with_capacity_in(capacity, AbortAlloc(Global))
}
#[inline]
#[must_use]
pub fn with_capacity_zeroed(capacity: usize) -> Self {
Self::with_capacity_zeroed_in(capacity, AbortAlloc(Global))
}
#[inline]
pub unsafe fn from_raw_parts(ptr: *mut T, capacity: usize) -> Self {
Self::from_raw_parts_in(ptr, capacity, AbortAlloc(Global))
}
}
impl<T, B: BuildAllocRef> RawVec<T, B> {
pub fn new_in(mut a: B::Ref) -> Self {
let capacity = if mem::size_of::<T>() == 0 { !0 } else { 0 };
Self {
ptr: NonNull::dangling(),
capacity,
build_alloc: a.get_build_alloc(),
_owned: PhantomData,
}
}
#[inline]
pub fn with_capacity_in(capacity: usize, a: B::Ref) -> Self
where
B::Ref: AllocRef<Error = crate::Never>,
{
match Self::try_with_capacity_in(capacity, a) {
Ok(vec) => vec,
Err(CollectionAllocErr::CapacityOverflow) => capacity_overflow(),
Err(CollectionAllocErr::AllocError { .. }) => unreachable!("Infallible allocation"),
}
}
#[inline]
pub fn try_with_capacity_in(capacity: usize, a: B::Ref) -> Result<Self, CollectionAllocErr<B>>
where
B::Ref: AllocRef,
{
Self::allocate_in(capacity, false, a)
}
#[inline]
pub fn with_capacity_zeroed_in(capacity: usize, a: B::Ref) -> Self
where
B::Ref: AllocRef<Error = crate::Never>,
{
match Self::try_with_capacity_zeroed_in(capacity, a) {
Ok(vec) => vec,
Err(CollectionAllocErr::CapacityOverflow) => capacity_overflow(),
Err(CollectionAllocErr::AllocError { .. }) => unreachable!("Infallible allocation"),
}
}
#[inline]
pub fn try_with_capacity_zeroed_in(
capacity: usize,
a: B::Ref,
) -> Result<Self, CollectionAllocErr<B>>
where
B::Ref: AllocRef,
{
Self::allocate_in(capacity, true, a)
}
fn allocate_in(
capacity: usize,
zeroed: bool,
mut alloc: B::Ref,
) -> Result<Self, CollectionAllocErr<B>>
where
B::Ref: AllocRef,
{
let elem_size = mem::size_of::<T>();
let alloc_size = capacity
.checked_mul(elem_size)
.ok_or(CollectionAllocErr::CapacityOverflow)?;
let ptr = if alloc_size == 0 {
NonNull::<T>::dangling()
} else {
let layout = alloc_guard(alloc_size, mem::align_of::<T>())?;
let result = if zeroed {
alloc.alloc_zeroed(layout)
} else {
alloc.alloc(layout)
};
result
.map_err(|inner| CollectionAllocErr::AllocError { layout, inner })?
.cast()
};
Ok(Self {
ptr,
capacity,
build_alloc: alloc.get_build_alloc(),
_owned: PhantomData,
})
}
pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, build_alloc: B) -> Self {
debug_assert!(!ptr.is_null());
Self {
ptr: NonNull::new_unchecked(ptr),
capacity,
build_alloc,
_owned: PhantomData,
}
}
pub fn ptr(&self) -> *mut T {
self.ptr.as_ptr()
}
#[inline(always)]
pub fn capacity(&self) -> usize {
if mem::size_of::<T>() == 0 {
!0
} else {
self.capacity
}
}
pub fn build_alloc(&self) -> &B {
&self.build_alloc
}
pub fn build_alloc_mut(&mut self) -> &mut B {
&mut self.build_alloc
}
pub fn alloc_ref(&mut self) -> (B::Ref, Option<NonZeroLayout>) {
let size = mem::size_of::<T>() * self.capacity;
unsafe {
let layout = Layout::from_size_align_unchecked(size, mem::align_of::<T>())
.try_into()
.ok();
let ptr = self.ptr.cast();
let alloc = self.build_alloc_mut().build_alloc_ref(ptr, layout);
(alloc, layout)
}
}
pub fn into_box(self) -> Box<[mem::MaybeUninit<T>], B> {
let ptr = self.ptr() as *mut mem::MaybeUninit<T>;
unsafe {
let slice = slice::from_raw_parts_mut(ptr, self.capacity);
let builder = ptr::read(&self.build_alloc);
let output = Box::from_raw_in(slice, builder);
mem::forget(self);
output
}
}
fn amortized_new_size(
&self,
used_capacity: usize,
needed_extra_capacity: usize,
) -> Result<usize, CapacityOverflow> {
let required_cap = used_capacity
.checked_add(needed_extra_capacity)
.ok_or(CapacityOverflow)?;
let double_cap = self.capacity * 2;
Ok(cmp::max(double_cap, required_cap))
}
pub fn double(&mut self)
where
B::Ref: ReallocRef<Error = crate::Never>,
{
match self.try_double() {
Ok(_) => (),
Err(CollectionAllocErr::CapacityOverflow) => capacity_overflow(),
Err(CollectionAllocErr::AllocError { .. }) => unreachable!("Infallible allocation"),
}
}
#[inline(never)]
#[cold]
pub fn try_double(&mut self) -> Result<(), CollectionAllocErr<B>>
where
B::Ref: ReallocRef,
{
unsafe {
let elem_size = mem::size_of::<T>();
if elem_size == 0 {
return Err(CollectionAllocErr::CapacityOverflow);
}
let (mut alloc, old_layout) = self.alloc_ref();
let (new_cap, ptr) = if let Some(old_layout) = old_layout {
let new_cap = 2 * self.capacity;
let new_layout = alloc_guard(new_cap * elem_size, mem::align_of::<T>())?;
let ptr = alloc
.realloc(self.ptr.cast(), old_layout, new_layout)
.map_err(|inner| CollectionAllocErr::AllocError {
inner,
layout: new_layout,
})?;
(new_cap, ptr.cast())
} else {
let new_cap = NonZeroUsize::new_unchecked(if elem_size > (!0) / 8 { 1 } else { 4 });
let new_layout = NonZeroLayout::array::<T>(new_cap)?;
let ptr =
alloc
.alloc(new_layout)
.map_err(|inner| CollectionAllocErr::AllocError {
inner,
layout: new_layout,
})?;
(new_cap.get(), ptr.cast())
};
self.ptr = ptr;
self.capacity = new_cap;
Ok(())
}
}
pub fn double_in_place(&mut self) -> bool
where
B::Ref: AllocRef,
{
if let Ok(success) = self.try_double_in_place() {
success
} else {
capacity_overflow()
}
}
#[inline(never)]
#[cold]
pub fn try_double_in_place(&mut self) -> Result<bool, CapacityOverflow>
where
B::Ref: AllocRef,
{
unsafe {
let elem_size = mem::size_of::<T>();
if elem_size == 0 {
return Err(CapacityOverflow);
}
let (mut alloc, old_layout) = if let (alloc, Some(layout)) = self.alloc_ref() {
(alloc, layout)
} else {
return Ok(false); };
let new_cap = 2 * self.capacity;
let new_layout = alloc_guard(new_cap * elem_size, mem::align_of::<T>())?;
debug_assert_eq!(old_layout.align(), new_layout.align());
Ok(alloc.grow_in_place(self.ptr.cast(), old_layout, new_layout.size()))
}
}
pub fn reserve(&mut self, used_capacity: usize, needed_extra_capacity: usize)
where
B::Ref: ReallocRef<Error = crate::Never>,
{
match self.try_reserve(used_capacity, needed_extra_capacity) {
Ok(vec) => vec,
Err(CollectionAllocErr::CapacityOverflow) => capacity_overflow(),
Err(CollectionAllocErr::AllocError { .. }) => unreachable!("Infallible allocation"),
}
}
pub fn try_reserve(
&mut self,
used_capacity: usize,
needed_extra_capacity: usize,
) -> Result<(), CollectionAllocErr<B>>
where
B::Ref: ReallocRef,
{
self.reserve_internal(
used_capacity,
needed_extra_capacity,
ReserveStrategy::Amortized,
)
}
pub fn reserve_exact(&mut self, used_capacity: usize, needed_extra_capacity: usize)
where
B::Ref: ReallocRef<Error = crate::Never>,
{
match self.try_reserve_exact(used_capacity, needed_extra_capacity) {
Ok(_) => (),
Err(CollectionAllocErr::CapacityOverflow) => capacity_overflow(),
Err(CollectionAllocErr::AllocError { .. }) => unreachable!(),
}
}
pub fn try_reserve_exact(
&mut self,
used_capacity: usize,
needed_extra_capacity: usize,
) -> Result<(), CollectionAllocErr<B>>
where
B::Ref: ReallocRef,
{
self.reserve_internal(used_capacity, needed_extra_capacity, ReserveStrategy::Exact)
}
pub fn reserve_in_place(&mut self, used_capacity: usize, needed_extra_capacity: usize) -> bool
where
B::Ref: AllocRef,
{
if let Ok(success) = self.try_reserve_in_place(used_capacity, needed_extra_capacity) {
success
} else {
capacity_overflow()
}
}
pub fn try_reserve_in_place(
&mut self,
used_capacity: usize,
needed_extra_capacity: usize,
) -> Result<bool, CapacityOverflow>
where
B::Ref: AllocRef,
{
if self.capacity().wrapping_sub(used_capacity) >= needed_extra_capacity {
return Ok(false);
}
let (mut alloc, old_layout) = if let (alloc, Some(layout)) = self.alloc_ref() {
(alloc, layout)
} else {
return Ok(false); };
let new_cap = self.amortized_new_size(used_capacity, needed_extra_capacity)?;
let new_layout = alloc_guard(new_cap * mem::size_of::<T>(), mem::align_of::<T>())?;
debug_assert_eq!(new_layout.align(), old_layout.align());
unsafe {
if alloc.grow_in_place(self.ptr.cast(), old_layout, new_layout.size()) {
self.capacity = new_cap;
Ok(true)
} else {
Ok(false)
}
}
}
pub fn shrink_to_fit(&mut self, amount: usize)
where
B::Ref: ReallocRef<Error = crate::Never>,
{
match self.try_shrink_to_fit(amount) {
Ok(_) => (),
Err(CollectionAllocErr::CapacityOverflow) => {
panic!("Tried to shrink to a larger capacity")
}
Err(CollectionAllocErr::AllocError { .. }) => unreachable!(),
}
}
pub fn try_shrink_to_fit(&mut self, amount: usize) -> Result<(), CollectionAllocErr<B>>
where
B::Ref: ReallocRef,
{
let elem_size = mem::size_of::<T>();
if elem_size == 0 {
self.capacity = amount;
return Ok(());
}
if self.capacity < amount {
return Err(CollectionAllocErr::CapacityOverflow);
}
if amount == 0 {
unsafe {
let build_alloc = ptr::read(self.build_alloc());
self.dealloc_buffer();
ptr::write(
self,
Self::from_raw_parts_in(NonNull::dangling().as_ptr(), 0, build_alloc),
);
}
} else if self.capacity != amount {
unsafe {
let old_size = NonZeroUsize::new_unchecked(elem_size * self.capacity);
let new_size = NonZeroUsize::new_unchecked(elem_size * amount);
let align = NonZeroUsize::new_unchecked(mem::align_of::<T>());
let old_layout = NonZeroLayout::from_size_align_unchecked(old_size, align);
let new_layout = alloc_guard(new_size.get(), align.get())?;
let ptr = self.ptr.cast();
self.ptr = self
.build_alloc
.build_alloc_ref(ptr, Some(old_layout))
.realloc(ptr, old_layout, new_layout)
.map_err(|inner| CollectionAllocErr::AllocError {
layout: NonZeroLayout::from_size_align_unchecked(new_size, align),
inner,
})?
.cast();
}
self.capacity = amount;
}
Ok(())
}
fn reserve_internal(
&mut self,
used_capacity: usize,
needed_extra_capacity: usize,
strategy: ReserveStrategy,
) -> Result<(), CollectionAllocErr<B>>
where
B::Ref: ReallocRef,
{
if self.capacity().wrapping_sub(used_capacity) >= needed_extra_capacity {
return Ok(());
}
let new_cap = match strategy {
ReserveStrategy::Exact => used_capacity
.checked_add(needed_extra_capacity)
.ok_or(CollectionAllocErr::CapacityOverflow)?,
ReserveStrategy::Amortized => {
self.amortized_new_size(used_capacity, needed_extra_capacity)?
}
};
let new_cap = unsafe {
debug_assert_ne!(new_cap, 0);
NonZeroUsize::new_unchecked(new_cap)
};
let new_layout = NonZeroLayout::array::<T>(new_cap)?;
let _ = alloc_guard(new_layout.size().get(), new_layout.align().get())?;
let (mut alloc, old_layout) = self.alloc_ref();
let result = if let Some(layout) = old_layout {
unsafe { alloc.realloc(self.ptr.cast(), layout, new_layout) }
} else {
alloc.alloc(new_layout)
};
self.ptr = result
.map_err(|inner| CollectionAllocErr::AllocError {
layout: new_layout,
inner,
})?
.cast();
self.capacity = new_cap.get();
Ok(())
}
}
impl<T, B: BuildAllocRef> From<Box<[T], B>> for RawVec<T, B> {
fn from(slice: Box<[T], B>) -> Self {
let len = slice.len();
let (ptr, builder) = Box::into_raw_non_null_alloc(slice);
Self {
ptr: ptr.cast(),
capacity: len,
build_alloc: builder,
_owned: PhantomData,
}
}
}
#[derive(Copy, Clone)]
enum ReserveStrategy {
Exact,
Amortized,
}
impl<T, B: BuildAllocRef> RawVec<T, B> {
pub fn dealloc_buffer(&mut self) {
if let (mut alloc, Some(layout)) = self.alloc_ref() {
unsafe { alloc.dealloc(self.ptr.cast(), layout) }
}
}
}
#[cfg(feature = "dropck_eyepatch")]
unsafe impl<#[may_dangle] T, B: BuildAllocRef> Drop for RawVec<T, B> {
fn drop(&mut self) {
self.dealloc_buffer();
}
}
#[cfg(not(feature = "dropck_eyepatch"))]
impl<T, B: BuildAllocRef> Drop for RawVec<T, B> {
fn drop(&mut self) {
self.dealloc_buffer();
}
}
#[inline]
fn alloc_guard(alloc_size: usize, align: usize) -> Result<NonZeroLayout, CapacityOverflow> {
if mem::size_of::<usize>() < 8 && isize::try_from(alloc_size).is_err() {
Err(CapacityOverflow)
} else {
debug_assert!(NonZeroLayout::from_size_align(alloc_size, align).is_ok());
unsafe {
Ok(NonZeroLayout::from_size_align_unchecked(
NonZeroUsize::new_unchecked(alloc_size),
NonZeroUsize::new_unchecked(align),
))
}
}
}
fn capacity_overflow() -> ! {
panic!("capacity overflow");
}