use core::alloc::LayoutError;
use core::mem::{self, ManuallyDrop, MaybeUninit};
use core::ops::Drop;
use core::ptr::{self, NonNull};
use core::slice;
use core::{cmp, fmt};
use super::{
alloc::{Allocator, Global, Layout},
assume,
boxed::Box,
};
#[cfg(not(no_global_oom_handling))]
use super::alloc::handle_alloc_error;
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct TryReserveError {
kind: TryReserveErrorKind,
}
impl TryReserveError {
pub fn kind(&self) -> TryReserveErrorKind {
self.kind.clone()
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum TryReserveErrorKind {
CapacityOverflow,
AllocError {
layout: Layout,
#[doc(hidden)]
non_exhaustive: (),
},
}
use TryReserveErrorKind::*;
impl From<TryReserveErrorKind> for TryReserveError {
#[inline(always)]
fn from(kind: TryReserveErrorKind) -> Self {
Self { kind }
}
}
impl From<LayoutError> for TryReserveErrorKind {
#[inline(always)]
fn from(_: LayoutError) -> Self {
TryReserveErrorKind::CapacityOverflow
}
}
impl fmt::Display for TryReserveError {
fn fmt(
&self,
fmt: &mut core::fmt::Formatter<'_>,
) -> core::result::Result<(), core::fmt::Error> {
fmt.write_str("memory allocation failed")?;
let reason = match self.kind {
TryReserveErrorKind::CapacityOverflow => {
" because the computed capacity exceeded the collection's maximum"
}
TryReserveErrorKind::AllocError { .. } => {
" because the memory allocator returned an error"
}
};
fmt.write_str(reason)
}
}
#[cfg(feature = "fresh-rust")]
impl core::error::Error for TryReserveError {}
#[cfg(all(not(feature = "fresh-rust"), feature = "std"))]
impl std::error::Error for TryReserveError {}
#[cfg(not(no_global_oom_handling))]
enum AllocInit {
Uninitialized,
Zeroed,
}
#[allow(missing_debug_implementations)]
pub(crate) struct RawVec<T, A: Allocator = Global> {
ptr: NonNull<T>,
cap: usize,
alloc: A,
}
unsafe impl<T, A: Allocator> Send for RawVec<T, A>
where
T: Send,
A: Send,
{
}
unsafe impl<T, A: Allocator> Sync for RawVec<T, A>
where
T: Sync,
A: Sync,
{
}
impl<T> RawVec<T, Global> {
#[must_use]
pub const fn new() -> Self {
Self::new_in(Global)
}
#[cfg(not(no_global_oom_handling))]
#[must_use]
#[inline(always)]
pub fn with_capacity(capacity: usize) -> Self {
Self::with_capacity_in(capacity, Global)
}
#[cfg(not(no_global_oom_handling))]
#[must_use]
#[inline(always)]
pub fn with_capacity_zeroed(capacity: usize) -> Self {
Self::with_capacity_zeroed_in(capacity, Global)
}
}
impl<T, A: Allocator> RawVec<T, A> {
pub(crate) const MIN_NON_ZERO_CAP: usize = if mem::size_of::<T>() == 1 {
8
} else if mem::size_of::<T>() <= 1024 {
4
} else {
1
};
#[inline(always)]
pub const fn new_in(alloc: A) -> Self {
Self {
ptr: NonNull::dangling(),
cap: 0,
alloc,
}
}
#[cfg(not(no_global_oom_handling))]
#[inline(always)]
pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
Self::allocate_in(capacity, AllocInit::Uninitialized, alloc)
}
#[cfg(not(no_global_oom_handling))]
#[inline(always)]
pub fn with_capacity_zeroed_in(capacity: usize, alloc: A) -> Self {
Self::allocate_in(capacity, AllocInit::Zeroed, alloc)
}
#[inline(always)]
pub unsafe fn into_box(self, len: usize) -> Box<[MaybeUninit<T>], A> {
debug_assert!(
len <= self.capacity(),
"`len` must be smaller than or equal to `self.capacity()`"
);
let me = ManuallyDrop::new(self);
unsafe {
let slice = slice::from_raw_parts_mut(me.ptr() as *mut MaybeUninit<T>, len);
Box::<[MaybeUninit<T>], A>::from_raw_in(slice, ptr::read(&me.alloc))
}
}
#[cfg(not(no_global_oom_handling))]
#[inline(always)]
fn allocate_in(capacity: usize, init: AllocInit, alloc: A) -> Self {
if mem::size_of::<T>() == 0 || capacity == 0 {
Self::new_in(alloc)
} else {
let layout = match Layout::array::<T>(capacity) {
Ok(layout) => layout,
Err(_) => capacity_overflow(),
};
match alloc_guard(layout.size()) {
Ok(_) => {}
Err(_) => capacity_overflow(),
}
let result = match init {
AllocInit::Uninitialized => alloc.allocate(layout),
AllocInit::Zeroed => alloc.allocate_zeroed(layout),
};
let ptr = match result {
Ok(ptr) => ptr,
Err(_) => handle_alloc_error(layout),
};
Self {
ptr: unsafe { NonNull::new_unchecked(ptr.cast().as_ptr()) },
cap: capacity,
alloc,
}
}
}
#[inline(always)]
pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, alloc: A) -> Self {
Self {
ptr: unsafe { NonNull::new_unchecked(ptr) },
cap: capacity,
alloc,
}
}
#[inline(always)]
pub fn ptr(&self) -> *mut T {
self.ptr.as_ptr()
}
#[inline(always)]
pub fn capacity(&self) -> usize {
if mem::size_of::<T>() == 0 {
usize::MAX
} else {
self.cap
}
}
#[inline(always)]
pub fn allocator(&self) -> &A {
&self.alloc
}
#[inline(always)]
fn current_memory(&self) -> Option<(NonNull<u8>, Layout)> {
if mem::size_of::<T>() == 0 || self.cap == 0 {
None
} else {
unsafe {
let layout = Layout::array::<T>(self.cap).unwrap_unchecked();
Some((self.ptr.cast(), layout))
}
}
}
#[cfg(not(no_global_oom_handling))]
#[inline(always)]
pub fn reserve(&mut self, len: usize, additional: usize) {
#[cold]
#[inline(always)]
fn do_reserve_and_handle<T, A: Allocator>(
slf: &mut RawVec<T, A>,
len: usize,
additional: usize,
) {
handle_reserve(slf.grow_amortized(len, additional));
}
if self.needs_to_grow(len, additional) {
do_reserve_and_handle(self, len, additional);
}
}
#[cfg(not(no_global_oom_handling))]
#[inline(always)]
pub fn reserve_for_push(&mut self, len: usize) {
handle_reserve(self.grow_amortized(len, 1));
}
#[inline(always)]
pub fn try_reserve(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> {
if self.needs_to_grow(len, additional) {
self.grow_amortized(len, additional)
} else {
Ok(())
}
}
#[cfg(not(no_global_oom_handling))]
#[inline(always)]
pub fn reserve_exact(&mut self, len: usize, additional: usize) {
handle_reserve(self.try_reserve_exact(len, additional));
}
#[inline(always)]
pub fn try_reserve_exact(
&mut self,
len: usize,
additional: usize,
) -> Result<(), TryReserveError> {
if self.needs_to_grow(len, additional) {
self.grow_exact(len, additional)
} else {
Ok(())
}
}
#[cfg(not(no_global_oom_handling))]
#[inline(always)]
pub fn shrink_to_fit(&mut self, cap: usize) {
handle_reserve(self.shrink(cap));
}
}
impl<T, A: Allocator> RawVec<T, A> {
#[inline(always)]
fn needs_to_grow(&self, len: usize, additional: usize) -> bool {
additional > self.capacity().wrapping_sub(len)
}
#[inline(always)]
fn set_ptr_and_cap(&mut self, ptr: NonNull<[u8]>, cap: usize) {
self.ptr = unsafe { NonNull::new_unchecked(ptr.cast().as_ptr()) };
self.cap = cap;
}
#[inline(always)]
fn grow_amortized(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> {
debug_assert!(additional > 0);
if mem::size_of::<T>() == 0 {
return Err(CapacityOverflow.into());
}
let required_cap = len.checked_add(additional).ok_or(CapacityOverflow)?;
let cap = cmp::max(self.cap * 2, required_cap);
let cap = cmp::max(Self::MIN_NON_ZERO_CAP, cap);
let new_layout = Layout::array::<T>(cap);
let ptr = finish_grow(new_layout, self.current_memory(), &mut self.alloc)?;
self.set_ptr_and_cap(ptr, cap);
Ok(())
}
#[inline(always)]
fn grow_exact(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> {
if mem::size_of::<T>() == 0 {
return Err(CapacityOverflow.into());
}
let cap = len.checked_add(additional).ok_or(CapacityOverflow)?;
let new_layout = Layout::array::<T>(cap);
let ptr = finish_grow(new_layout, self.current_memory(), &mut self.alloc)?;
self.set_ptr_and_cap(ptr, cap);
Ok(())
}
#[cfg(not(no_global_oom_handling))]
#[inline(always)]
fn shrink(&mut self, cap: usize) -> Result<(), TryReserveError> {
assert!(
cap <= self.capacity(),
"Tried to shrink to a larger capacity"
);
let (ptr, layout) = if let Some(mem) = self.current_memory() {
mem
} else {
return Ok(());
};
let ptr = unsafe {
let new_layout = Layout::array::<T>(cap).unwrap_unchecked();
self.alloc
.shrink(ptr, layout, new_layout)
.map_err(|_| AllocError {
layout: new_layout,
non_exhaustive: (),
})?
};
self.set_ptr_and_cap(ptr, cap);
Ok(())
}
}
#[inline(always)]
fn finish_grow<A>(
new_layout: Result<Layout, LayoutError>,
current_memory: Option<(NonNull<u8>, Layout)>,
alloc: &mut A,
) -> Result<NonNull<[u8]>, TryReserveError>
where
A: Allocator,
{
let new_layout = new_layout.map_err(|_| CapacityOverflow)?;
alloc_guard(new_layout.size())?;
let memory = if let Some((ptr, old_layout)) = current_memory {
debug_assert_eq!(old_layout.align(), new_layout.align());
unsafe {
assume(old_layout.align() == new_layout.align());
alloc.grow(ptr, old_layout, new_layout)
}
} else {
alloc.allocate(new_layout)
};
memory.map_err(|_| {
AllocError {
layout: new_layout,
non_exhaustive: (),
}
.into()
})
}
impl<T, A: Allocator> Drop for RawVec<T, A> {
#[inline(always)]
fn drop(&mut self) {
if let Some((ptr, layout)) = self.current_memory() {
unsafe { self.alloc.deallocate(ptr, layout) }
}
}
}
#[cfg(not(no_global_oom_handling))]
#[inline(always)]
fn handle_reserve(result: Result<(), TryReserveError>) {
match result.map_err(|e| e.kind()) {
Err(CapacityOverflow) => capacity_overflow(),
Err(AllocError { layout, .. }) => handle_alloc_error(layout),
Ok(()) => { }
}
}
#[inline(always)]
fn alloc_guard(alloc_size: usize) -> Result<(), TryReserveError> {
if usize::BITS < 64 && alloc_size > isize::MAX as usize {
Err(CapacityOverflow.into())
} else {
Ok(())
}
}
#[cfg(not(no_global_oom_handling))]
fn capacity_overflow() -> ! {
panic!("capacity overflow");
}