use crate::{
Allocation, SizedTypeProperties, align_up, assert_unsafe_precondition, page_size,
vec::{
GrowthStrategy, TryReserveError,
TryReserveErrorKind::{AllocError, CapacityOverflow},
capacity_overflow, handle_error,
},
};
use core::{
alloc::Layout,
borrow::{Borrow, BorrowMut},
cell::UnsafeCell,
cmp, fmt,
hash::{Hash, Hasher},
iter::FusedIterator,
marker::PhantomData,
mem::{ManuallyDrop, MaybeUninit},
ops::{Deref, DerefMut, Index, IndexMut},
panic::RefUnwindSafe,
ptr,
slice::{self, SliceIndex},
sync::atomic::{
AtomicBool, AtomicUsize,
Ordering::{Acquire, Relaxed, Release},
},
};
pub mod raw;
#[derive(Clone, Copy, Debug)]
pub struct VecBuilder {
max_capacity: usize,
capacity: usize,
growth_strategy: GrowthStrategy,
}
impl VecBuilder {
#[inline]
const fn new(max_capacity: usize) -> Self {
VecBuilder {
max_capacity,
capacity: 0,
growth_strategy: GrowthStrategy::new(),
}
}
#[inline]
pub const fn capacity(&mut self, capacity: usize) -> &mut Self {
self.capacity = capacity;
self
}
#[inline]
#[track_caller]
pub const fn growth_strategy(&mut self, growth_strategy: GrowthStrategy) -> &mut Self {
growth_strategy.validate();
self.growth_strategy = growth_strategy;
self
}
#[must_use]
#[track_caller]
pub fn build<T>(&self) -> Vec<T> {
match self.try_build() {
Ok(vec) => vec,
Err(err) => handle_error(err),
}
}
pub fn try_build<T>(&self) -> Result<Vec<T>, TryReserveError> {
Ok(Vec {
inner: RawVec {
inner: unsafe {
RawVecInner::new(
self.max_capacity,
self.capacity,
self.growth_strategy,
Layout::new::<()>(),
size_of::<T>(),
align_of::<T>(),
)
}?,
marker: PhantomData,
},
})
}
}
pub struct Vec<T> {
inner: RawVec<Slot<T>>,
}
unsafe impl<T: Send> Send for Vec<T> {}
unsafe impl<T: Send + Sync> Sync for Vec<T> {}
impl Vec<()> {
#[inline]
#[must_use]
pub const fn builder(max_capacity: usize) -> VecBuilder {
VecBuilder::new(max_capacity)
}
}
impl<T> Vec<T> {
#[must_use]
#[track_caller]
pub fn new(max_capacity: usize) -> Self {
Vec {
inner: unsafe { RawVec::new(max_capacity) },
}
}
#[inline]
#[must_use]
pub const fn dangling() -> Self {
Vec {
inner: RawVec::dangling(),
}
}
#[inline]
#[must_use]
pub fn max_capacity(&self) -> usize {
self.inner.max_capacity()
}
#[inline]
#[must_use]
pub fn capacity(&self) -> usize {
self.inner.capacity()
}
#[inline]
#[must_use]
pub fn len(&self) -> usize {
self.inner.len()
}
#[inline]
#[must_use]
pub fn is_empty(&self) -> bool {
self.inner.is_empty()
}
#[inline]
#[track_caller]
pub fn push(&self, value: T) -> usize {
self.push_with(|_| value)
}
#[inline]
#[track_caller]
pub fn push_with(&self, f: impl FnOnce(usize) -> T) -> usize {
let (index, slot) = self.inner.push();
unsafe { &mut *slot.value.get() }.write(f(index));
unsafe { slot.occupied.store(true, Release) };
index
}
#[inline]
#[track_caller]
pub fn push_mut(&mut self, value: T) -> usize {
self.push_with_mut(|_| value)
}
#[inline]
#[track_caller]
pub fn push_with_mut(&mut self, f: impl FnOnce(usize) -> T) -> usize {
let (index, slot) = self.inner.push_mut();
slot.value.get_mut().write(f(index));
unsafe { *slot.occupied.get_mut() = true };
index
}
#[inline]
#[must_use]
pub fn get(&self, index: usize) -> Option<&T> {
self.inner.as_capacity().get(index)?.value()
}
#[inline]
#[must_use]
pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
self.inner.as_mut_capacity().get_mut(index)?.value_mut()
}
#[inline]
#[must_use]
pub unsafe fn get_unchecked(&self, index: usize) -> &T {
let slot = unsafe { self.inner.as_capacity().get_unchecked(index) };
let occupied = slot.occupied.load(Acquire);
assert_unsafe_precondition!(
occupied,
"`Vec::get_unchecked` requires that `index` refers to an initialized element",
);
unsafe { slot.value_unchecked() }
}
#[inline]
#[must_use]
pub unsafe fn get_unchecked_mut(&mut self, index: usize) -> &mut T {
let slot = unsafe { self.inner.as_mut_capacity().get_unchecked_mut(index) };
assert_unsafe_precondition!(
*slot.occupied.get_mut(),
"`Vec::get_unchecked` requires that `index` refers to an initialized element",
);
unsafe { slot.value_unchecked_mut() }
}
#[inline]
pub fn iter(&self) -> Iter<'_, T> {
Iter::new(self)
}
#[inline]
pub fn iter_mut(&mut self) -> IterMut<'_, T> {
IterMut {
inner: self.inner.iter_mut(),
}
}
#[track_caller]
pub fn reserve(&self, additional: usize) {
self.inner.reserve(additional);
}
#[track_caller]
pub fn reserve_exact(&self, additional: usize) {
self.inner.reserve_exact(additional);
}
pub fn try_reserve(&self, additional: usize) -> Result<(), TryReserveError> {
self.inner.try_reserve(additional)
}
pub fn try_reserve_exact(&self, additional: usize) -> Result<(), TryReserveError> {
self.inner.try_reserve_exact(additional)
}
}
impl<T> AsRef<Vec<T>> for Vec<T> {
#[inline]
fn as_ref(&self) -> &Vec<T> {
self
}
}
impl<T> AsMut<Vec<T>> for Vec<T> {
#[inline]
fn as_mut(&mut self) -> &mut Vec<T> {
self
}
}
impl<T: fmt::Debug> fmt::Debug for Vec<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&self.inner, f)
}
}
impl<T: PartialEq<U>, U> PartialEq<Vec<U>> for Vec<T> {
#[inline]
fn eq(&self, other: &Vec<U>) -> bool {
self.inner == other.inner
}
}
impl<T: PartialEq<U>, U> PartialEq<&[U]> for Vec<T> {
#[inline]
fn eq(&self, other: &&[U]) -> bool {
*self == **other
}
}
impl<T: PartialEq<U>, U> PartialEq<&mut [U]> for Vec<T> {
#[inline]
fn eq(&self, other: &&mut [U]) -> bool {
*self == **other
}
}
impl<T: PartialEq<U>, U> PartialEq<Vec<U>> for &[T] {
#[inline]
fn eq(&self, other: &Vec<U>) -> bool {
**self == *other
}
}
impl<T: PartialEq<U>, U> PartialEq<Vec<U>> for &mut [T] {
#[inline]
fn eq(&self, other: &Vec<U>) -> bool {
**self == *other
}
}
impl<T: PartialEq<U>, U> PartialEq<[U]> for Vec<T> {
#[inline]
fn eq(&self, other: &[U]) -> bool {
let len = self.len();
if len != other.len() {
return false;
}
#[allow(clippy::needless_range_loop)]
for index in 0..len {
let Some(elem) = self.get(index) else {
return false;
};
if *elem != other[index] {
return false;
}
}
true
}
}
impl<T: PartialEq<U>, U> PartialEq<Vec<U>> for [T] {
#[inline]
fn eq(&self, other: &Vec<U>) -> bool {
let len = other.len();
if len != self.len() {
return false;
}
#[allow(clippy::needless_range_loop)]
for index in 0..len {
let Some(elem) = other.get(index) else {
return false;
};
if self[index] != *elem {
return false;
}
}
true
}
}
#[cfg(feature = "alloc")]
impl<T: PartialEq<U> + Clone, U> PartialEq<Vec<U>> for alloc::borrow::Cow<'_, [T]> {
#[inline]
fn eq(&self, other: &Vec<U>) -> bool {
**self == *other
}
}
impl<T: PartialEq<U>, U, const N: usize> PartialEq<[U; N]> for Vec<T> {
#[inline]
fn eq(&self, other: &[U; N]) -> bool {
*self == *other.as_slice()
}
}
impl<T: PartialEq<U>, U, const N: usize> PartialEq<&[U; N]> for Vec<T> {
#[inline]
fn eq(&self, other: &&[U; N]) -> bool {
*self == *other.as_slice()
}
}
impl<T: Eq> Eq for Vec<T> {}
impl<T> Extend<T> for Vec<T> {
#[inline]
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
for elem in iter {
self.push_mut(elem);
}
}
}
impl<'a, T: Copy> Extend<&'a T> for Vec<T> {
#[inline]
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
for &elem in iter {
self.push_mut(elem);
}
}
}
impl<T: Hash> Hash for Vec<T> {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
Hash::hash(&*self.inner, state);
}
}
impl<T> Index<usize> for Vec<T> {
type Output = T;
#[inline]
#[track_caller]
fn index(&self, index: usize) -> &Self::Output {
if let Some(value) = self.get(index) {
value
} else {
invalid_index(index)
}
}
}
impl<T> IndexMut<usize> for Vec<T> {
#[inline]
#[track_caller]
fn index_mut(&mut self, index: usize) -> &mut Self::Output {
if let Some(value) = self.get_mut(index) {
value
} else {
invalid_index(index)
}
}
}
#[cold]
#[inline(never)]
#[track_caller]
fn invalid_index(index: usize) -> ! {
panic!("index {index} is out of bounds or not yet initialized");
}
impl<'a, T> IntoIterator for &'a Vec<T> {
type Item = &'a T;
type IntoIter = Iter<'a, T>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a, T> IntoIterator for &'a mut Vec<T> {
type Item = &'a mut T;
type IntoIter = IterMut<'a, T>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
self.iter_mut()
}
}
impl<T> IntoIterator for Vec<T> {
type Item = T;
type IntoIter = IntoIter<T>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
IntoIter::new(self)
}
}
impl<T: PartialOrd> PartialOrd for Vec<T> {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
PartialOrd::partial_cmp(&self.inner, &other.inner)
}
}
impl<T: Ord> Ord for Vec<T> {
#[inline]
fn cmp(&self, other: &Self) -> cmp::Ordering {
Ord::cmp(&self.inner, &other.inner)
}
}
struct Slot<T> {
occupied: AtomicBool,
value: UnsafeCell<MaybeUninit<T>>,
}
impl<T: RefUnwindSafe> RefUnwindSafe for Slot<T> {}
impl<T> Slot<T> {
#[inline(always)]
fn value(&self) -> Option<&T> {
if self.occupied.load(Acquire) {
Some(unsafe { self.value_unchecked() })
} else {
None
}
}
#[inline(always)]
fn value_mut(&mut self) -> Option<&mut T> {
if *self.occupied.get_mut() {
Some(unsafe { self.value_unchecked_mut() })
} else {
None
}
}
#[inline(always)]
unsafe fn value_unchecked(&self) -> &T {
let value = unsafe { &*self.value.get() };
unsafe { value.assume_init_ref() }
}
#[inline(always)]
unsafe fn value_unchecked_mut(&mut self) -> &mut T {
unsafe { self.value.get_mut().assume_init_mut() }
}
}
impl<T: Clone> Clone for Slot<T> {
#[inline]
fn clone(&self) -> Self {
let value = self.value();
Slot {
occupied: AtomicBool::new(value.is_some()),
value: UnsafeCell::new(if let Some(value) = value {
MaybeUninit::new(value.clone())
} else {
MaybeUninit::uninit()
}),
}
}
}
impl<T: fmt::Debug> fmt::Debug for Slot<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&self.value(), f)
}
}
impl<T> Drop for Slot<T> {
fn drop(&mut self) {
if *self.occupied.get_mut() {
unsafe { self.value.get_mut().assume_init_drop() };
}
}
}
impl<T: PartialEq<U>, U> PartialEq<Slot<U>> for Slot<T> {
#[allow(clippy::match_same_arms)]
#[inline]
fn eq(&self, other: &Slot<U>) -> bool {
match (self.value(), other.value()) {
(Some(value), Some(other_value)) => *value == *other_value,
(Some(_), None) => false,
(None, Some(_)) => false,
(None, None) => true,
}
}
}
impl<T: Eq> Eq for Slot<T> {}
impl<T: Hash> Hash for Slot<T> {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
self.value().hash(state);
}
}
impl<T: PartialOrd> PartialOrd for Slot<T> {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
PartialOrd::partial_cmp(&self.value(), &other.value())
}
}
impl<T: Ord> Ord for Slot<T> {
#[inline]
fn cmp(&self, other: &Self) -> cmp::Ordering {
Ord::cmp(&self.value(), &other.value())
}
}
pub struct Iter<'a, T> {
start: *const (),
end: *const (),
marker: PhantomData<&'a T>,
}
unsafe impl<T: Sync> Send for Iter<'_, T> {}
unsafe impl<T: Sync> Sync for Iter<'_, T> {}
impl<T> Iter<'_, T> {
#[inline]
fn new(vec: &Vec<T>) -> Self {
let start = vec.inner.as_ptr();
let len = cmp::min(vec.len(), vec.capacity());
let end = unsafe { start.add(len) };
Iter {
start: start.cast(),
end: end.cast(),
marker: PhantomData,
}
}
#[inline]
fn start(&self) -> *const Slot<T> {
self.start.cast()
}
#[inline]
fn end(&self) -> *const Slot<T> {
self.end.cast()
}
#[inline]
fn len(&self) -> usize {
unsafe { self.end().offset_from_unsigned(self.start()) }
}
fn as_slice(&self) -> &[Slot<T>] {
unsafe { slice::from_raw_parts(self.start(), self.len()) }
}
}
impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
struct Elements<'a, T>(&'a [Slot<T>]);
impl<T: fmt::Debug> fmt::Debug for Elements<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list()
.entries(self.0.iter().filter_map(Slot::value))
.finish()
}
}
f.debug_tuple("Iter")
.field(&Elements(self.as_slice()))
.finish()
}
}
impl<'a, T> Iterator for Iter<'a, T> {
type Item = &'a T;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
loop {
if self.start == self.end {
break None;
}
let start = self.start();
self.start = unsafe { start.add(1) }.cast();
let slot = unsafe { &*start };
if let Some(value) = slot.value() {
break Some(value);
}
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(0, Some(self.len()))
}
}
impl<T> DoubleEndedIterator for Iter<'_, T> {
#[inline]
fn next_back(&mut self) -> Option<Self::Item> {
loop {
if self.start == self.end {
break None;
}
let end = unsafe { self.end().sub(1) };
self.end = end.cast();
let slot = unsafe { &*end };
if let Some(value) = slot.value() {
break Some(value);
}
}
}
}
impl<T> FusedIterator for Iter<'_, T> {}
pub struct IterMut<'a, T> {
inner: slice::IterMut<'a, Slot<T>>,
}
impl<'a, T> Iterator for IterMut<'a, T> {
type Item = &'a mut T;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
loop {
let slot = self.inner.next()?;
if let Some(value) = slot.value_mut() {
break Some(value);
}
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(0, Some(self.inner.len()))
}
}
impl<T> DoubleEndedIterator for IterMut<'_, T> {
#[inline]
fn next_back(&mut self) -> Option<Self::Item> {
loop {
let slot = self.inner.next_back()?;
if let Some(value) = slot.value_mut() {
break Some(value);
}
}
}
}
impl<T> FusedIterator for IterMut<'_, T> {}
pub struct IntoIter<T> {
start: *mut (),
end: *mut (),
#[allow(dead_code)]
allocation: Allocation,
marker: PhantomData<T>,
}
unsafe impl<T: Send> Send for IntoIter<T> {}
unsafe impl<T: Sync> Sync for IntoIter<T> {}
impl<T> IntoIter<T> {
#[inline]
pub(super) fn new(vec: Vec<T>) -> Self {
let mut vec = ManuallyDrop::new(vec.inner);
let allocation = unsafe { ptr::read(&vec.inner.allocation) };
let start = vec.as_mut_ptr();
let len = cmp::min(vec.len_mut(), vec.capacity_mut());
let end = unsafe { start.add(len) };
IntoIter {
start: start.cast(),
end: end.cast(),
allocation,
marker: PhantomData,
}
}
#[inline]
fn start(&self) -> *mut Slot<T> {
self.start.cast()
}
#[inline]
fn end(&self) -> *mut Slot<T> {
self.end.cast()
}
#[inline]
fn len(&self) -> usize {
unsafe { self.end().offset_from_unsigned(self.start()) }
}
fn as_slice(&self) -> &[Slot<T>] {
unsafe { slice::from_raw_parts(self.start(), self.len()) }
}
}
impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
struct Elements<'a, T>(&'a [Slot<T>]);
impl<T: fmt::Debug> fmt::Debug for Elements<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list()
.entries(self.0.iter().filter_map(Slot::value))
.finish()
}
}
f.debug_tuple("IntoIter")
.field(&Elements(self.as_slice()))
.finish()
}
}
impl<T> Drop for IntoIter<T> {
fn drop(&mut self) {
let elements = ptr::slice_from_raw_parts_mut(self.start(), self.len());
unsafe { elements.drop_in_place() };
}
}
impl<T> Iterator for IntoIter<T> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
loop {
if self.start == self.end {
break None;
}
let start = self.start();
self.start = unsafe { start.add(1) }.cast();
let slot = unsafe { &*start };
if let Some(value) = slot.value() {
break Some(unsafe { ptr::read(value) });
}
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(0, Some(self.len()))
}
}
impl<T> DoubleEndedIterator for IntoIter<T> {
#[inline]
fn next_back(&mut self) -> Option<Self::Item> {
loop {
if self.start == self.end {
break None;
}
let end = unsafe { self.end().sub(1) };
self.end = end.cast();
let slot = unsafe { &*end };
if let Some(value) = slot.value() {
break Some(unsafe { ptr::read(value) });
}
}
}
}
impl<T> FusedIterator for IntoIter<T> {}
pub struct RawVec<T> {
inner: RawVecInner,
marker: PhantomData<T>,
}
struct RawVecInner {
elements: *mut (),
capacity: AtomicUsize,
len: AtomicUsize,
max_capacity: usize,
growth_strategy: GrowthStrategy,
allocation: Allocation,
}
impl RawVec<()> {
#[inline]
#[must_use]
pub const fn builder(max_capacity: usize) -> raw::VecBuilder {
raw::VecBuilder::new(max_capacity)
}
}
impl<T> RawVec<T> {
#[must_use]
#[track_caller]
pub unsafe fn new(max_capacity: usize) -> Self {
unsafe { RawVec::builder(max_capacity).build() }
}
#[inline]
#[must_use]
pub const fn dangling() -> Self {
RawVec {
inner: RawVecInner::dangling(align_of::<T>()),
marker: PhantomData,
}
}
#[inline]
#[must_use]
pub fn as_capacity(&self) -> &[T] {
unsafe { slice::from_raw_parts(self.as_ptr(), self.capacity()) }
}
#[inline]
#[must_use]
pub fn as_mut_capacity(&mut self) -> &mut [T] {
unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), self.capacity_mut()) }
}
#[inline]
#[must_use]
pub fn as_slice(&self) -> &[T] {
let len = cmp::min(self.len(), self.capacity());
unsafe { slice::from_raw_parts(self.as_ptr(), len) }
}
#[inline]
#[must_use]
pub fn as_mut_slice(&mut self) -> &mut [T] {
let len = cmp::min(self.len_mut(), self.capacity_mut());
unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), len) }
}
#[inline]
#[must_use]
pub fn as_ptr(&self) -> *const T {
self.inner.elements.cast()
}
#[inline]
#[must_use]
pub fn as_mut_ptr(&mut self) -> *mut T {
self.inner.elements.cast()
}
#[inline]
#[must_use]
pub fn max_capacity(&self) -> usize {
self.inner.max_capacity
}
#[inline]
#[must_use]
pub fn capacity(&self) -> usize {
self.inner.capacity.load(Acquire)
}
#[inline]
fn capacity_mut(&mut self) -> usize {
*self.inner.capacity.get_mut()
}
#[inline]
#[must_use]
pub fn len(&self) -> usize {
self.inner.len.load(Relaxed)
}
#[inline]
fn len_mut(&mut self) -> usize {
*self.inner.len.get_mut()
}
#[inline]
#[must_use]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
#[inline]
#[track_caller]
pub fn push(&self) -> (usize, &T) {
if T::IS_ZST {
let mut len = self.inner.len.load(Relaxed);
loop {
if len == self.inner.max_capacity {
capacity_overflow();
}
match self
.inner
.len
.compare_exchange(len, len + 1, Relaxed, Relaxed)
{
Ok(_) => {
let slot = unsafe { &*self.as_ptr() };
break (len, slot);
}
Err(new_len) => len = new_len,
}
}
} else {
let len = self.inner.len.fetch_add(1, Relaxed);
if len >= self.inner.capacity.load(Acquire) {
unsafe { self.grow_one(len) };
}
let slot = unsafe { &*self.as_ptr().add(len) };
(len, slot)
}
}
#[cold]
#[inline(never)]
#[track_caller]
unsafe fn grow_one(&self, len: usize) {
unsafe { self.inner.grow_one(len, size_of::<T>()) }
}
#[inline]
#[track_caller]
pub fn push_mut(&mut self) -> (usize, &mut T) {
let len = self.len_mut();
if len >= self.capacity_mut() {
unsafe { self.grow_one_mut() };
}
*self.inner.len.get_mut() = len + 1;
let slot = unsafe { &mut *self.as_mut_ptr().add(len) };
(len, slot)
}
#[cold]
#[inline(never)]
#[track_caller]
unsafe fn grow_one_mut(&mut self) {
unsafe { self.inner.grow_one_mut(size_of::<T>()) }
}
#[track_caller]
pub fn reserve(&self, additional: usize) {
unsafe { self.inner.reserve(additional, size_of::<T>()) };
}
#[track_caller]
pub fn reserve_exact(&self, additional: usize) {
unsafe { self.inner.reserve_exact(additional, size_of::<T>()) };
}
pub fn try_reserve(&self, additional: usize) -> Result<(), TryReserveError> {
unsafe { self.inner.try_reserve(additional, size_of::<T>()) }
}
pub fn try_reserve_exact(&self, additional: usize) -> Result<(), TryReserveError> {
unsafe { self.inner.try_reserve_exact(additional, size_of::<T>()) }
}
}
impl RawVecInner {
unsafe fn new(
max_capacity: usize,
capacity: usize,
growth_strategy: GrowthStrategy,
header_layout: Layout,
elem_size: usize,
elem_align: usize,
) -> Result<Self, TryReserveError> {
let size = max_capacity
.checked_mul(elem_size)
.ok_or(CapacityOverflow)?;
#[allow(clippy::cast_possible_wrap)]
if size > isize::MAX as usize || capacity > max_capacity {
return Err(CapacityOverflow.into());
}
if size == 0 && header_layout.size() == 0 {
let allocation = Allocation::dangling(elem_align);
return Ok(RawVecInner {
elements: allocation.ptr().cast(),
capacity: AtomicUsize::new(max_capacity),
len: AtomicUsize::new(0),
max_capacity,
growth_strategy,
allocation,
});
}
let elements_offset = align_up(header_layout.size(), elem_align);
let size = elements_offset + size;
let page_size = page_size();
let size = align_up(size, page_size);
if size == 0 {
return Err(CapacityOverflow.into());
}
let align = cmp::max(header_layout.align(), elem_align);
let align_size = cmp::max(align, page_size) - page_size;
let size = align_size.checked_add(size).ok_or(CapacityOverflow)?;
let allocation = Allocation::new(size).map_err(AllocError)?;
let aligned_ptr = allocation.ptr().map_addr(|addr| align_up(addr, align));
let initial_size = align_up(elements_offset + capacity * elem_size, page_size);
if initial_size != 0 {
allocation
.commit(aligned_ptr, initial_size)
.map_err(AllocError)?;
}
let elements = aligned_ptr.wrapping_add(elements_offset).cast();
let capacity = if elem_size == 0 {
max_capacity
} else {
(initial_size - elements_offset) / elem_size
};
Ok(RawVecInner {
elements,
capacity: AtomicUsize::new(capacity),
len: AtomicUsize::new(0),
max_capacity,
growth_strategy,
allocation,
})
}
#[inline]
const fn dangling(elem_align: usize) -> Self {
let allocation = Allocation::dangling(elem_align);
RawVecInner {
elements: allocation.ptr().cast(),
capacity: AtomicUsize::new(0),
len: AtomicUsize::new(0),
max_capacity: 0,
growth_strategy: GrowthStrategy::new(),
allocation,
}
}
#[inline]
#[track_caller]
unsafe fn reserve(&self, additional: usize, elem_size: usize) {
let len = self.len.load(Relaxed);
if self.needs_to_grow(len, additional) {
unsafe { self.reserve_slow(len, additional, elem_size) };
}
}
#[cold]
#[track_caller]
unsafe fn reserve_slow(&self, len: usize, additional: usize, elem_size: usize) {
if let Err(err) = unsafe { self.grow(len, additional, elem_size) } {
handle_error(err);
}
}
#[track_caller]
unsafe fn reserve_exact(&self, additional: usize, elem_size: usize) {
if let Err(err) = unsafe { self.try_reserve_exact(additional, elem_size) } {
handle_error(err);
}
}
unsafe fn try_reserve(
&self,
additional: usize,
elem_size: usize,
) -> Result<(), TryReserveError> {
let len = self.len.load(Relaxed);
if self.needs_to_grow(len, additional) {
unsafe { self.grow(len, additional, elem_size) }
} else {
Ok(())
}
}
unsafe fn try_reserve_exact(
&self,
additional: usize,
elem_size: usize,
) -> Result<(), TryReserveError> {
let len = self.len.load(Relaxed);
if self.needs_to_grow(len, additional) {
unsafe { self.grow_exact(len, additional, elem_size) }
} else {
Ok(())
}
}
#[inline]
fn needs_to_grow(&self, len: usize, additional: usize) -> bool {
let capacity = self.capacity.load(Relaxed);
let len = cmp::min(len, capacity);
additional > capacity - len
}
#[track_caller]
unsafe fn grow_one(&self, len: usize, elem_size: usize) {
if let Err(err) = unsafe { self.grow(len, 1, elem_size) } {
if len >= self.max_capacity {
self.len.fetch_sub(1, Relaxed);
}
handle_error(err);
}
}
#[track_caller]
unsafe fn grow_one_mut(&mut self, elem_size: usize) {
let len = *self.len.get_mut();
if let Err(err) = unsafe { self.grow(len, 1, elem_size) } {
handle_error(err);
}
}
unsafe fn grow(
&self,
len: usize,
additional: usize,
elem_size: usize,
) -> Result<(), TryReserveError> {
unsafe { self.grow_inner(len, additional, false, elem_size) }
}
unsafe fn grow_exact(
&self,
len: usize,
additional: usize,
elem_size: usize,
) -> Result<(), TryReserveError> {
unsafe { self.grow_inner(len, additional, true, elem_size) }
}
unsafe fn grow_inner(
&self,
len: usize,
additional: usize,
exact: bool,
elem_size: usize,
) -> Result<(), TryReserveError> {
debug_assert!(additional > 0);
if elem_size == 0 {
return Err(CapacityOverflow.into());
}
let required_capacity = len.checked_add(additional).ok_or(CapacityOverflow)?;
if required_capacity > self.max_capacity {
return Err(CapacityOverflow.into());
}
let old_capacity = self.capacity.load(Acquire);
if old_capacity >= required_capacity {
return Ok(());
}
let mut new_capacity = required_capacity;
if !exact {
new_capacity = cmp::max(new_capacity, self.growth_strategy.grow(old_capacity));
new_capacity = cmp::min(new_capacity, self.max_capacity);
}
let elements_offset = self.elements.addr() - self.allocation.ptr().addr();
let page_size = page_size();
let old_size = align_up(elements_offset + old_capacity * elem_size, page_size);
let new_size = align_up(elements_offset + new_capacity * elem_size, page_size);
let ptr = self.allocation.ptr().wrapping_add(old_size);
let size = new_size - old_size;
self.allocation.commit(ptr, size).map_err(AllocError)?;
let new_capacity = (new_size - elements_offset) / elem_size;
let new_capacity = cmp::min(new_capacity, self.max_capacity);
if let Err(capacity) =
self.capacity
.compare_exchange(old_capacity, new_capacity, Release, Acquire)
{
debug_assert!(capacity >= new_capacity);
}
Ok(())
}
}
impl<T> AsRef<RawVec<T>> for RawVec<T> {
#[inline]
fn as_ref(&self) -> &RawVec<T> {
self
}
}
impl<T> AsMut<RawVec<T>> for RawVec<T> {
#[inline]
fn as_mut(&mut self) -> &mut RawVec<T> {
self
}
}
impl<T> AsRef<[T]> for RawVec<T> {
#[inline]
fn as_ref(&self) -> &[T] {
self
}
}
impl<T> AsMut<[T]> for RawVec<T> {
#[inline]
fn as_mut(&mut self) -> &mut [T] {
self
}
}
impl<T> Borrow<[T]> for RawVec<T> {
#[inline]
fn borrow(&self) -> &[T] {
self
}
}
impl<T> BorrowMut<[T]> for RawVec<T> {
#[inline]
fn borrow_mut(&mut self) -> &mut [T] {
self
}
}
impl<T: fmt::Debug> fmt::Debug for RawVec<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
impl<T> Deref for RawVec<T> {
type Target = [T];
#[inline]
fn deref(&self) -> &Self::Target {
self.as_slice()
}
}
impl<T> DerefMut for RawVec<T> {
#[inline]
fn deref_mut(&mut self) -> &mut Self::Target {
self.as_mut_slice()
}
}
impl<T> Drop for RawVec<T> {
fn drop(&mut self) {
let len = cmp::min(self.len_mut(), self.capacity_mut());
let elements = ptr::slice_from_raw_parts_mut(self.as_mut_ptr(), len);
unsafe { elements.drop_in_place() };
}
}
impl<T: PartialEq<U>, U> PartialEq<RawVec<U>> for RawVec<T> {
#[inline]
fn eq(&self, other: &RawVec<U>) -> bool {
**self == **other
}
}
impl<T: PartialEq<U>, U> PartialEq<&[U]> for RawVec<T> {
#[inline]
fn eq(&self, other: &&[U]) -> bool {
**self == **other
}
}
impl<T: PartialEq<U>, U> PartialEq<&mut [U]> for RawVec<T> {
#[inline]
fn eq(&self, other: &&mut [U]) -> bool {
**self == **other
}
}
impl<T: PartialEq<U>, U> PartialEq<RawVec<U>> for &[T] {
#[inline]
fn eq(&self, other: &RawVec<U>) -> bool {
**self == **other
}
}
impl<T: PartialEq<U>, U> PartialEq<RawVec<U>> for &mut [T] {
#[inline]
fn eq(&self, other: &RawVec<U>) -> bool {
**self == **other
}
}
impl<T: PartialEq<U>, U> PartialEq<[U]> for RawVec<T> {
#[inline]
fn eq(&self, other: &[U]) -> bool {
**self == *other
}
}
impl<T: PartialEq<U>, U> PartialEq<RawVec<U>> for [T] {
#[inline]
fn eq(&self, other: &RawVec<U>) -> bool {
*self == **other
}
}
#[cfg(feature = "alloc")]
impl<T: PartialEq<U> + Clone, U> PartialEq<RawVec<U>> for alloc::borrow::Cow<'_, [T]> {
#[inline]
fn eq(&self, other: &RawVec<U>) -> bool {
**self == **other
}
}
impl<T: PartialEq<U>, U, const N: usize> PartialEq<[U; N]> for RawVec<T> {
#[inline]
fn eq(&self, other: &[U; N]) -> bool {
**self == *other
}
}
impl<T: PartialEq<U>, U, const N: usize> PartialEq<&[U; N]> for RawVec<T> {
#[inline]
fn eq(&self, other: &&[U; N]) -> bool {
**self == **other
}
}
impl<T: Eq> Eq for RawVec<T> {}
impl<T> Extend<T> for RawVec<T> {
#[inline]
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
for elem in iter {
let (_, slot) = self.push_mut();
*slot = elem;
}
}
}
impl<'a, T: Copy> Extend<&'a T> for RawVec<T> {
#[inline]
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
for &elem in iter {
let (_, slot) = self.push_mut();
*slot = elem;
}
}
}
impl<T: Hash> Hash for RawVec<T> {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
Hash::hash(&**self, state);
}
}
impl<T, I: SliceIndex<[T]>> Index<I> for RawVec<T> {
type Output = I::Output;
#[inline]
fn index(&self, index: I) -> &Self::Output {
Index::index(&**self, index)
}
}
impl<T, I: SliceIndex<[T]>> IndexMut<I> for RawVec<T> {
#[inline]
fn index_mut(&mut self, index: I) -> &mut Self::Output {
IndexMut::index_mut(&mut **self, index)
}
}
impl<'a, T> IntoIterator for &'a RawVec<T> {
type Item = &'a T;
type IntoIter = slice::Iter<'a, T>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a, T> IntoIterator for &'a mut RawVec<T> {
type Item = &'a mut T;
type IntoIter = slice::IterMut<'a, T>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
self.iter_mut()
}
}
impl<T> IntoIterator for RawVec<T> {
type Item = T;
type IntoIter = raw::IntoIter<T>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
raw::IntoIter::new(self)
}
}
impl<T: PartialOrd> PartialOrd for RawVec<T> {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
PartialOrd::partial_cmp(&**self, &**other)
}
}
impl<T: Ord> Ord for RawVec<T> {
#[inline]
fn cmp(&self, other: &Self) -> cmp::Ordering {
Ord::cmp(&**self, &**other)
}
}