use self::TryReserveErrorKind::{AllocError, CapacityOverflow};
use crate::{Allocation, Error, SizedTypeProperties, align_up, is_aligned, page_size};
use core::{
alloc::Layout,
borrow::{Borrow, BorrowMut},
cmp, fmt,
hash::{Hash, Hasher},
iter::FusedIterator,
marker::PhantomData,
mem::ManuallyDrop,
ops::{Deref, DerefMut, Index, IndexMut},
panic::UnwindSafe,
ptr,
slice::{self, SliceIndex},
};
#[derive(Clone, Copy, Debug)]
pub struct VecBuilder {
max_capacity: usize,
capacity: usize,
growth_strategy: GrowthStrategy,
header_layout: Layout,
}
impl VecBuilder {
#[inline]
const fn new(max_capacity: usize) -> Self {
VecBuilder {
max_capacity,
capacity: 0,
growth_strategy: GrowthStrategy::new(),
header_layout: Layout::new::<()>(),
}
}
#[inline]
pub const fn capacity(&mut self, capacity: usize) -> &mut Self {
self.capacity = capacity;
self
}
#[inline]
#[track_caller]
pub const fn growth_strategy(&mut self, growth_strategy: GrowthStrategy) -> &mut Self {
growth_strategy.validate();
self.growth_strategy = growth_strategy;
self
}
#[inline]
#[track_caller]
pub const fn header(&mut self, header_layout: Layout) -> &mut Self {
assert!(is_aligned(header_layout.size(), header_layout.align()));
self.header_layout = header_layout;
self
}
#[must_use]
#[track_caller]
pub fn build<T>(&self) -> Vec<T> {
match self.try_build() {
Ok(vec) => vec,
Err(err) => handle_error(err),
}
}
pub fn try_build<T>(&self) -> Result<Vec<T>, TryReserveError> {
Ok(Vec {
inner: unsafe {
VecInner::new(
self.max_capacity,
self.capacity,
self.growth_strategy,
self.header_layout,
size_of::<T>(),
align_of::<T>(),
)
}?,
marker: PhantomData,
})
}
}
pub struct Vec<T> {
inner: VecInner,
marker: PhantomData<T>,
}
struct VecInner {
elements: *mut (),
capacity: usize,
len: usize,
max_capacity: usize,
growth_strategy: GrowthStrategy,
allocation: Allocation,
}
unsafe impl<T: Send> Send for Vec<T> {}
unsafe impl<T: Sync> Sync for Vec<T> {}
impl Vec<()> {
#[inline]
#[must_use]
pub const fn builder(max_capacity: usize) -> VecBuilder {
VecBuilder::new(max_capacity)
}
}
impl<T> Vec<T> {
#[must_use]
#[track_caller]
pub fn new(max_capacity: usize) -> Self {
VecBuilder::new(max_capacity).build()
}
#[inline]
#[must_use]
pub const fn dangling() -> Self {
Vec {
inner: VecInner::dangling(align_of::<T>()),
marker: PhantomData,
}
}
#[inline]
#[must_use]
pub fn as_slice(&self) -> &[T] {
unsafe { slice::from_raw_parts(self.as_ptr(), self.len()) }
}
#[inline]
#[must_use]
pub fn as_mut_slice(&mut self) -> &mut [T] {
unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), self.len()) }
}
#[inline]
#[must_use]
pub fn as_ptr(&self) -> *const T {
self.inner.elements.cast()
}
#[inline]
#[must_use]
pub fn as_mut_ptr(&mut self) -> *mut T {
self.inner.elements.cast()
}
#[inline]
#[must_use]
pub fn max_capacity(&self) -> usize {
self.inner.max_capacity
}
#[inline]
#[must_use]
pub fn capacity(&self) -> usize {
self.inner.capacity
}
#[inline]
#[must_use]
pub fn len(&self) -> usize {
self.inner.len
}
#[inline]
#[must_use]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
#[inline]
#[track_caller]
pub fn push(&mut self, value: T) {
let len = self.len();
if len == self.capacity() {
self.grow_one();
}
unsafe { self.as_mut_ptr().add(len).write(value) };
unsafe { self.inner.len = len + 1 };
}
#[cold]
#[inline(never)]
#[track_caller]
fn grow_one(&mut self) {
unsafe { self.inner.grow_one(size_of::<T>()) };
}
#[track_caller]
pub fn reserve(&mut self, additional: usize) {
unsafe { self.inner.reserve(additional, size_of::<T>()) };
}
#[track_caller]
pub fn reserve_exact(&mut self, additional: usize) {
unsafe { self.inner.reserve_exact(additional, size_of::<T>()) };
}
pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
unsafe { self.inner.try_reserve(additional, size_of::<T>()) }
}
pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> {
unsafe { self.inner.try_reserve_exact(additional, size_of::<T>()) }
}
}
impl VecInner {
unsafe fn new(
max_capacity: usize,
capacity: usize,
growth_strategy: GrowthStrategy,
header_layout: Layout,
elem_size: usize,
elem_align: usize,
) -> Result<Self, TryReserveError> {
let size = max_capacity
.checked_mul(elem_size)
.ok_or(CapacityOverflow)?;
#[allow(clippy::cast_possible_wrap)]
if size > isize::MAX as usize || capacity > max_capacity {
return Err(CapacityOverflow.into());
}
if size == 0 && header_layout.size() == 0 {
let allocation = Allocation::dangling(elem_align);
return Ok(VecInner {
elements: allocation.ptr().cast(),
capacity: max_capacity,
len: 0,
max_capacity,
growth_strategy,
allocation,
});
}
let elements_offset = align_up(header_layout.size(), elem_align);
let size = elements_offset + size;
let page_size = page_size();
let size = align_up(size, page_size);
if size == 0 {
return Err(CapacityOverflow.into());
}
let align = cmp::max(header_layout.align(), elem_align);
let align_size = cmp::max(align, page_size) - page_size;
let size = align_size.checked_add(size).ok_or(CapacityOverflow)?;
let allocation = Allocation::new(size).map_err(AllocError)?;
let aligned_ptr = allocation.ptr().map_addr(|addr| align_up(addr, align));
let initial_size = align_up(elements_offset + capacity * elem_size, page_size);
if initial_size != 0 {
allocation
.commit(aligned_ptr, initial_size)
.map_err(AllocError)?;
}
let elements = aligned_ptr.wrapping_add(elements_offset).cast();
let capacity = if elem_size == 0 {
max_capacity
} else {
(initial_size - elements_offset) / elem_size
};
Ok(VecInner {
elements,
capacity,
len: 0,
max_capacity,
growth_strategy,
allocation,
})
}
#[inline]
const fn dangling(elem_align: usize) -> Self {
let allocation = Allocation::dangling(elem_align);
VecInner {
elements: allocation.ptr().cast(),
capacity: 0,
len: 0,
max_capacity: 0,
growth_strategy: GrowthStrategy::new(),
allocation,
}
}
#[inline]
#[track_caller]
unsafe fn reserve(&mut self, additional: usize, elem_size: usize) {
if self.needs_to_grow(additional) {
unsafe { self.reserve_slow(additional, elem_size) };
}
}
#[cold]
#[track_caller]
unsafe fn reserve_slow(&mut self, additional: usize, elem_size: usize) {
if let Err(err) = unsafe { self.grow(additional, elem_size) } {
handle_error(err);
}
}
#[track_caller]
unsafe fn reserve_exact(&mut self, additional: usize, elem_size: usize) {
if let Err(err) = unsafe { self.try_reserve_exact(additional, elem_size) } {
handle_error(err);
}
}
unsafe fn try_reserve(
&mut self,
additional: usize,
elem_size: usize,
) -> Result<(), TryReserveError> {
if self.needs_to_grow(additional) {
unsafe { self.grow(additional, elem_size) }
} else {
Ok(())
}
}
unsafe fn try_reserve_exact(
&mut self,
additional: usize,
elem_size: usize,
) -> Result<(), TryReserveError> {
if self.needs_to_grow(additional) {
unsafe { self.grow_exact(additional, elem_size) }
} else {
Ok(())
}
}
#[inline]
fn needs_to_grow(&self, additional: usize) -> bool {
additional > self.capacity - self.len
}
#[track_caller]
unsafe fn grow_one(&mut self, elem_size: usize) {
if let Err(err) = unsafe { self.grow(1, elem_size) } {
handle_error(err);
}
}
unsafe fn grow(&mut self, additional: usize, elem_size: usize) -> Result<(), TryReserveError> {
unsafe { self.grow_inner(additional, false, elem_size) }
}
unsafe fn grow_exact(
&mut self,
additional: usize,
elem_size: usize,
) -> Result<(), TryReserveError> {
unsafe { self.grow_inner(additional, true, elem_size) }
}
unsafe fn grow_inner(
&mut self,
additional: usize,
exact: bool,
elem_size: usize,
) -> Result<(), TryReserveError> {
debug_assert!(additional > 0);
if elem_size == 0 {
return Err(CapacityOverflow.into());
}
let required_capacity = self.len.checked_add(additional).ok_or(CapacityOverflow)?;
if required_capacity > self.max_capacity {
return Err(CapacityOverflow.into());
}
let old_capacity = self.capacity;
let mut new_capacity = required_capacity;
if !exact {
new_capacity = cmp::max(new_capacity, self.growth_strategy.grow(old_capacity));
new_capacity = cmp::min(new_capacity, self.max_capacity);
}
let elements_offset = self.elements.addr() - self.allocation.ptr().addr();
let page_size = page_size();
let old_size = align_up(elements_offset + old_capacity * elem_size, page_size);
let new_size = align_up(elements_offset + new_capacity * elem_size, page_size);
let ptr = self.allocation.ptr().wrapping_add(old_size);
let size = new_size - old_size;
self.allocation.commit(ptr, size).map_err(AllocError)?;
let new_capacity = (new_size - elements_offset) / elem_size;
let new_capacity = cmp::min(new_capacity, self.max_capacity);
self.capacity = new_capacity;
Ok(())
}
}
impl<T> AsRef<Vec<T>> for Vec<T> {
#[inline]
fn as_ref(&self) -> &Vec<T> {
self
}
}
impl<T> AsMut<Vec<T>> for Vec<T> {
#[inline]
fn as_mut(&mut self) -> &mut Vec<T> {
self
}
}
impl<T> AsRef<[T]> for Vec<T> {
#[inline]
fn as_ref(&self) -> &[T] {
self
}
}
impl<T> AsMut<[T]> for Vec<T> {
#[inline]
fn as_mut(&mut self) -> &mut [T] {
self
}
}
impl<T> Borrow<[T]> for Vec<T> {
#[inline]
fn borrow(&self) -> &[T] {
self
}
}
impl<T> BorrowMut<[T]> for Vec<T> {
#[inline]
fn borrow_mut(&mut self) -> &mut [T] {
self
}
}
impl<T: fmt::Debug> fmt::Debug for Vec<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
impl<T> Deref for Vec<T> {
type Target = [T];
#[inline]
fn deref(&self) -> &Self::Target {
self.as_slice()
}
}
impl<T> DerefMut for Vec<T> {
#[inline]
fn deref_mut(&mut self) -> &mut Self::Target {
self.as_mut_slice()
}
}
impl<T> Drop for Vec<T> {
fn drop(&mut self) {
let elements = ptr::slice_from_raw_parts_mut(self.as_mut_ptr(), self.len());
unsafe { elements.drop_in_place() };
}
}
impl<T: PartialEq<U>, U> PartialEq<Vec<U>> for Vec<T> {
#[inline]
fn eq(&self, other: &Vec<U>) -> bool {
**self == **other
}
}
impl<T: PartialEq<U>, U> PartialEq<&[U]> for Vec<T> {
#[inline]
fn eq(&self, other: &&[U]) -> bool {
**self == **other
}
}
impl<T: PartialEq<U>, U> PartialEq<&mut [U]> for Vec<T> {
#[inline]
fn eq(&self, other: &&mut [U]) -> bool {
**self == **other
}
}
impl<T: PartialEq<U>, U> PartialEq<Vec<U>> for &[T] {
#[inline]
fn eq(&self, other: &Vec<U>) -> bool {
**self == **other
}
}
impl<T: PartialEq<U>, U> PartialEq<Vec<U>> for &mut [T] {
#[inline]
fn eq(&self, other: &Vec<U>) -> bool {
**self == **other
}
}
impl<T: PartialEq<U>, U> PartialEq<[U]> for Vec<T> {
#[inline]
fn eq(&self, other: &[U]) -> bool {
**self == *other
}
}
impl<T: PartialEq<U>, U> PartialEq<Vec<U>> for [T] {
#[inline]
fn eq(&self, other: &Vec<U>) -> bool {
*self == **other
}
}
#[cfg(feature = "alloc")]
impl<T: PartialEq<U> + Clone, U> PartialEq<Vec<U>> for alloc::borrow::Cow<'_, [T]> {
#[inline]
fn eq(&self, other: &Vec<U>) -> bool {
**self == **other
}
}
impl<T: PartialEq<U>, U, const N: usize> PartialEq<[U; N]> for Vec<T> {
#[inline]
fn eq(&self, other: &[U; N]) -> bool {
**self == *other
}
}
impl<T: PartialEq<U>, U, const N: usize> PartialEq<&[U; N]> for Vec<T> {
#[inline]
fn eq(&self, other: &&[U; N]) -> bool {
**self == **other
}
}
impl<T: Eq> Eq for Vec<T> {}
impl<T> Extend<T> for Vec<T> {
#[inline]
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
for elem in iter {
self.push(elem);
}
}
}
impl<'a, T: Copy> Extend<&'a T> for Vec<T> {
#[inline]
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
for &elem in iter {
self.push(elem);
}
}
}
impl<T: Hash> Hash for Vec<T> {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
Hash::hash(&**self, state);
}
}
impl<T, I: SliceIndex<[T]>> Index<I> for Vec<T> {
type Output = I::Output;
#[inline]
fn index(&self, index: I) -> &Self::Output {
Index::index(&**self, index)
}
}
impl<T, I: SliceIndex<[T]>> IndexMut<I> for Vec<T> {
#[inline]
fn index_mut(&mut self, index: I) -> &mut Self::Output {
IndexMut::index_mut(&mut **self, index)
}
}
impl<'a, T> IntoIterator for &'a Vec<T> {
type Item = &'a T;
type IntoIter = slice::Iter<'a, T>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a, T> IntoIterator for &'a mut Vec<T> {
type Item = &'a mut T;
type IntoIter = slice::IterMut<'a, T>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
self.iter_mut()
}
}
impl<T> IntoIterator for Vec<T> {
type Item = T;
type IntoIter = IntoIter<T>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
IntoIter::new(self)
}
}
impl<T: PartialOrd> PartialOrd for Vec<T> {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
PartialOrd::partial_cmp(&**self, &**other)
}
}
impl<T: Ord> Ord for Vec<T> {
#[inline]
fn cmp(&self, other: &Self) -> cmp::Ordering {
Ord::cmp(&**self, &**other)
}
}
pub struct IntoIter<T> {
start: *const T,
end: *const T,
#[allow(dead_code)]
allocation: Allocation,
marker: PhantomData<T>,
}
unsafe impl<T: Send> Send for IntoIter<T> {}
unsafe impl<T: Sync> Sync for IntoIter<T> {}
impl<T: UnwindSafe> UnwindSafe for IntoIter<T> {}
impl<T> IntoIter<T> {
#[inline]
fn new(vec: Vec<T>) -> Self {
let mut vec = ManuallyDrop::new(vec);
let allocation = unsafe { ptr::read(&vec.inner.allocation) };
let start = vec.as_mut_ptr();
let end = if T::IS_ZST {
start.cast::<u8>().wrapping_add(vec.len()).cast::<T>()
} else {
unsafe { start.add(vec.len()) }
};
IntoIter {
start,
end,
allocation,
marker: PhantomData,
}
}
#[inline]
#[must_use]
pub fn as_slice(&self) -> &[T] {
unsafe { slice::from_raw_parts(self.start, self.len()) }
}
#[inline]
#[must_use]
pub fn as_mut_slice(&mut self) -> &mut [T] {
unsafe { slice::from_raw_parts_mut(self.start.cast_mut(), self.len()) }
}
}
impl<T> AsRef<[T]> for IntoIter<T> {
#[inline]
fn as_ref(&self) -> &[T] {
self.as_slice()
}
}
impl<T> AsMut<[T]> for IntoIter<T> {
#[inline]
fn as_mut(&mut self) -> &mut [T] {
self.as_mut_slice()
}
}
impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
}
}
impl<T> Drop for IntoIter<T> {
fn drop(&mut self) {
let elements = ptr::slice_from_raw_parts_mut(self.start.cast_mut(), self.len());
unsafe { elements.drop_in_place() };
}
}
impl<T> Iterator for IntoIter<T> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
if self.start == self.end {
return None;
}
let ptr = if T::IS_ZST {
self.end = self.end.cast::<u8>().wrapping_sub(1).cast::<T>();
self.start
} else {
let old = self.start;
self.start = unsafe { old.add(1) };
old
};
Some(unsafe { ptr.read() })
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.len();
(len, Some(len))
}
#[inline]
fn count(self) -> usize {
self.len()
}
}
impl<T> DoubleEndedIterator for IntoIter<T> {
#[inline]
fn next_back(&mut self) -> Option<Self::Item> {
if self.start == self.end {
return None;
}
let ptr = if T::IS_ZST {
self.end = self.end.cast::<u8>().wrapping_sub(1).cast::<T>();
self.start
} else {
self.end = unsafe { self.end.sub(1) };
self.end
};
Some(unsafe { ptr.read() })
}
}
impl<T> ExactSizeIterator for IntoIter<T> {
#[inline]
fn len(&self) -> usize {
if T::IS_ZST {
self.end.addr().wrapping_sub(self.start.addr())
} else {
unsafe { self.end.offset_from_unsigned(self.start) }
}
}
}
impl<T> FusedIterator for IntoIter<T> {}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
#[non_exhaustive]
pub enum GrowthStrategy {
Exponential {
numerator: usize,
denominator: usize,
},
Linear {
elements: usize,
},
}
impl Default for GrowthStrategy {
#[inline]
fn default() -> Self {
Self::new()
}
}
impl GrowthStrategy {
#[inline]
#[must_use]
pub const fn new() -> Self {
Self::Exponential {
numerator: 2,
denominator: 1,
}
}
#[track_caller]
pub(crate) const fn validate(&self) {
match *self {
Self::Exponential {
numerator,
denominator,
} => {
assert!(numerator > denominator);
assert!(denominator != 0);
}
Self::Linear { elements } => {
assert!(elements != 0);
}
}
}
pub(crate) fn grow(&self, old_capacity: usize) -> usize {
match *self {
GrowthStrategy::Exponential {
numerator,
denominator,
} => saturating_mul_div(old_capacity, numerator, denominator),
GrowthStrategy::Linear { elements } => old_capacity.saturating_add(elements),
}
}
}
#[cfg(target_pointer_width = "64")]
type DoubleUsize = u128;
#[cfg(target_pointer_width = "32")]
type DoubleUsize = u64;
#[cfg(target_pointer_width = "16")]
type DoubleUsize = u32;
fn saturating_mul_div(val: usize, numerator: usize, denominator: usize) -> usize {
(val as DoubleUsize * numerator as DoubleUsize / denominator as DoubleUsize)
.try_into()
.unwrap_or(usize::MAX)
}
#[cold]
#[track_caller]
pub(crate) fn handle_error(err: TryReserveError) -> ! {
match err.kind {
CapacityOverflow => capacity_overflow(),
AllocError(err) => handle_alloc_error(err),
}
}
#[inline(never)]
#[track_caller]
pub(crate) fn capacity_overflow() -> ! {
panic!("capacity overflow");
}
#[allow(clippy::needless_pass_by_value)]
#[cold]
#[inline(never)]
#[track_caller]
pub(crate) fn handle_alloc_error(err: Error) -> ! {
panic!("allocation failed: {err}");
}
#[derive(Debug)]
pub struct TryReserveError {
pub(crate) kind: TryReserveErrorKind,
}
impl From<TryReserveErrorKind> for TryReserveError {
#[inline]
fn from(kind: TryReserveErrorKind) -> Self {
TryReserveError { kind }
}
}
#[derive(Debug)]
pub(crate) enum TryReserveErrorKind {
CapacityOverflow,
AllocError(Error),
}
impl fmt::Display for TryReserveError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.kind {
CapacityOverflow => f.write_str(
"memory allocation failed because the computed capacity exceeded the collection's \
maximum",
),
AllocError(_) => f.write_str(
"memory allocation failed because the operating system returned an error",
),
}
}
}
impl core::error::Error for TryReserveError {
fn source(&self) -> Option<&(dyn core::error::Error + 'static)> {
match &self.kind {
TryReserveErrorKind::CapacityOverflow => None,
TryReserveErrorKind::AllocError(err) => Some(err),
}
}
}