use crate::{
alloc::{handle_alloc_error, AllocRef, Global},
boxed::Box,
capacity_overflow,
clone::CloneIn,
collections::TryReserveError::{self, AllocError, CapacityOverflow},
handle_reserve_error,
iter::{FromIteratorIn, TryExtend},
raw_vec::RawVec,
};
use core::{
cmp::{self, Ordering},
convert::TryFrom,
fmt,
hash::{self, Hash},
intrinsics::assume,
iter::{FromIterator, FusedIterator, TrustedLen},
mem,
ops::{
self,
Bound::{Excluded, Included, Unbounded},
Index,
IndexMut,
RangeBounds,
},
ptr::{self, NonNull},
slice::{self, SliceIndex},
};
pub struct Vec<T, A: AllocRef = Global> {
buf: RawVec<T, A>,
len: usize,
}
impl<T> Vec<T> {
#[inline]
#[must_use]
pub const fn new() -> Self {
Self {
buf: RawVec::NEW,
len: 0,
}
}
#[inline]
#[must_use]
pub fn with_capacity(capacity: usize) -> Self {
Self::with_capacity_in(capacity, Global)
}
pub unsafe fn from_raw_parts(ptr: *mut T, length: usize, capacity: usize) -> Self {
Self {
buf: RawVec::from_raw_parts(ptr, capacity),
len: length,
}
}
}
impl<T, A: AllocRef> Vec<T, A> {
#[inline]
pub fn new_in(a: A) -> Self {
Self {
buf: RawVec::new_in(a),
len: 0,
}
}
#[inline]
pub fn with_capacity_in(capacity: usize, a: A) -> Self
where
A: AllocRef,
{
Self {
buf: RawVec::with_capacity_in(capacity, a),
len: 0,
}
}
#[inline]
pub fn try_with_capacity_in(capacity: usize, a: A) -> Result<Self, TryReserveError>
where
A: AllocRef,
{
Ok(Self {
buf: RawVec::try_with_capacity_in(capacity, a)?,
len: 0,
})
}
pub unsafe fn from_raw_parts_in(ptr: *mut T, length: usize, capacity: usize, alloc: A) -> Self {
Self {
buf: RawVec::from_raw_parts_in(ptr, capacity, alloc),
len: length,
}
}
pub fn into_raw_parts(self) -> (*mut T, usize, usize) {
let mut me = mem::ManuallyDrop::new(self);
(me.as_mut_ptr(), me.len(), me.capacity())
}
#[inline]
pub fn capacity(&self) -> usize {
self.buf.capacity()
}
pub fn reserve(&mut self, additional: usize) {
self.buf.reserve(self.len, additional);
}
pub fn reserve_exact(&mut self, additional: usize) {
self.buf.reserve_exact(self.len, additional);
}
pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
self.buf.try_reserve(self.len, additional)
}
pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> {
self.buf.try_reserve_exact(self.len, additional)
}
pub fn shrink_to_fit(&mut self) {
if self.capacity() != self.len {
self.buf.shrink_to_fit(self.len);
}
}
pub fn try_shrink_to_fit(&mut self) -> Result<(), TryReserveError> {
if self.capacity() != self.len {
self.buf.try_shrink_to_fit(self.len)?;
}
Ok(())
}
pub fn shrink_to(&mut self, min_capacity: usize) {
self.buf.shrink_to_fit(cmp::max(self.len, min_capacity));
}
pub fn try_shrink_to(&mut self, min_capacity: usize) -> Result<(), TryReserveError> {
self.buf.try_shrink_to_fit(cmp::max(self.len, min_capacity))
}
pub fn into_boxed_slice(self) -> Box<[T], A> {
match self.try_into_boxed_slice() {
Err(CapacityOverflow) => capacity_overflow(),
Err(AllocError { layout }) => handle_alloc_error(layout),
Ok(boxed) => boxed,
}
}
pub fn try_into_boxed_slice(mut self) -> Result<Box<[T], A>, TryReserveError> {
unsafe {
self.try_shrink_to_fit()?;
let len = self.len;
let buf = ptr::read(&self.buf);
mem::forget(self);
Ok(buf.into_box(len).assume_init())
}
}
pub fn truncate(&mut self, len: usize) {
unsafe {
if len > self.len {
return;
}
#[allow(trivial_casts)]
let s = self.get_unchecked_mut(len..) as *mut _;
self.len = len;
ptr::drop_in_place(s);
}
}
#[inline]
pub fn as_slice(&self) -> &[T] {
self
}
#[inline]
pub fn as_mut_slice(&mut self) -> &mut [T] {
self
}
#[inline]
pub fn as_ptr(&self) -> *const T {
let ptr = self.buf.ptr();
unsafe { assume(!ptr.is_null()) };
ptr
}
#[inline]
pub fn as_mut_ptr(&mut self) -> *mut T {
let ptr = self.buf.ptr();
unsafe { assume(!ptr.is_null()) };
ptr
}
#[inline]
pub unsafe fn set_len(&mut self, new_len: usize) {
debug_assert!(new_len <= self.capacity());
self.len = new_len;
}
#[inline]
pub fn swap_remove(&mut self, index: usize) -> T {
unsafe {
let hole: *mut T = &mut self[index];
let last = ptr::read(self.get_unchecked(self.len - 1));
self.len -= 1;
ptr::replace(hole, last)
}
}
pub fn insert(&mut self, index: usize, element: T) {
match self.try_insert(index, element) {
Err(CapacityOverflow) => capacity_overflow(),
Err(AllocError { layout, .. }) => handle_alloc_error(layout),
Ok(()) => { }
}
}
pub fn try_insert(&mut self, index: usize, element: T) -> Result<(), TryReserveError> {
let len = self.len();
assert!(index <= len);
if len == self.buf.capacity() {
self.try_reserve(1)?;
}
unsafe {
{
let p = self.as_mut_ptr().add(index);
ptr::copy(p, p.offset(1), len - index);
ptr::write(p, element);
}
self.set_len(len + 1);
}
Ok(())
}
pub fn remove(&mut self, index: usize) -> T {
let len = self.len();
assert!(index < len);
unsafe {
let ret;
{
let ptr = self.as_mut_ptr().add(index);
ret = ptr::read(ptr);
ptr::copy(ptr.offset(1), ptr, len - index - 1);
}
self.set_len(len - 1);
ret
}
}
pub fn retain<F>(&mut self, mut f: F)
where
F: FnMut(&T) -> bool,
{
self.drain_filter(|x| !f(x));
}
#[inline]
pub fn dedup_by_key<F, K>(&mut self, mut key: F)
where
F: FnMut(&mut T) -> K,
K: PartialEq,
{
self.dedup_by(|a, b| key(a) == key(b))
}
pub fn dedup_by<F>(&mut self, same_bucket: F)
where
F: FnMut(&mut T, &mut T) -> bool,
{
let len = {
let (dedup, _) = partition_dedup_by(self.as_mut_slice(), same_bucket);
dedup.len()
};
self.truncate(len);
}
#[inline]
pub fn push(&mut self, value: T) {
match self.try_push(value) {
Err(CapacityOverflow) => capacity_overflow(),
Err(AllocError { layout, .. }) => handle_alloc_error(layout),
Ok(()) => { }
}
}
unsafe fn push_unchecked(&mut self, value: T)
where
A: AllocRef,
{
let len = self.len();
debug_assert!(self.capacity() > len);
ptr::write(self.get_unchecked_mut(len), value);
self.set_len(len + 1);
}
#[inline]
pub fn try_push(&mut self, value: T) -> Result<(), TryReserveError> {
if self.len == self.buf.capacity() {
self.try_reserve(1)?;
}
unsafe {
self.push_unchecked(value);
}
Ok(())
}
#[inline]
pub fn pop(&mut self) -> Option<T> {
if self.len == 0 {
None
} else {
unsafe {
self.len -= 1;
Some(ptr::read(self.get_unchecked(self.len())))
}
}
}
#[inline]
pub fn append(&mut self, other: &mut Self) {
match self.try_append(other) {
Err(CapacityOverflow) => capacity_overflow(),
Err(AllocError { layout, .. }) => handle_alloc_error(layout),
Ok(()) => { }
}
}
#[inline]
pub fn try_append(&mut self, other: &mut Self) -> Result<(), TryReserveError> {
unsafe {
self.try_append_elements(other.as_slice())?;
other.set_len(0);
}
Ok(())
}
#[inline]
unsafe fn try_append_elements(&mut self, other: *const [T]) -> Result<(), TryReserveError> {
let count = (*other).len();
self.try_reserve(count)?;
let len = self.len();
ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count);
self.len += count;
Ok(())
}
pub fn drain<R>(&mut self, range: R) -> Drain<'_, T, A>
where
R: RangeBounds<usize>,
{
let len = self.len();
let start = match range.start_bound() {
Included(&n) => n,
Excluded(&n) => n + 1,
Unbounded => 0,
};
let end = match range.end_bound() {
Included(&n) => n + 1,
Excluded(&n) => n,
Unbounded => len,
};
assert!(start <= end);
assert!(end <= len);
unsafe {
self.set_len(start);
let range_slice = slice::from_raw_parts_mut(self.as_mut_ptr().add(start), end - start);
Drain {
tail_start: end,
tail_len: len - end,
iter: range_slice.iter(),
vec: NonNull::from(self),
}
}
}
#[inline]
pub fn clear(&mut self) {
self.truncate(0)
}
#[inline]
pub fn len(&self) -> usize {
self.len
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
#[inline]
pub fn split_off(&mut self, at: usize) -> Self
where
A: Clone,
{
match self.try_split_off(at) {
Err(CapacityOverflow) => capacity_overflow(),
Err(AllocError { layout, .. }) => handle_alloc_error(layout),
Ok(vec) => vec,
}
}
#[inline]
pub fn try_split_off(&mut self, at: usize) -> Result<Self, TryReserveError>
where
A: Clone,
{
assert!(at <= self.len(), "`at` out of bounds");
let other_len = self.len - at;
let mut other = Self::try_with_capacity_in(other_len, self.alloc_ref().clone())?;
unsafe {
self.set_len(at);
other.set_len(other_len);
ptr::copy_nonoverlapping(self.as_ptr().add(at), other.as_mut_ptr(), other.len());
}
Ok(other)
}
pub fn resize_with<F>(&mut self, new_len: usize, f: F)
where
F: FnMut() -> T,
{
match self.try_resize_with(new_len, f) {
Err(CapacityOverflow) => capacity_overflow(),
Err(AllocError { layout, .. }) => handle_alloc_error(layout),
Ok(()) => { }
}
}
pub fn try_resize_with<F>(&mut self, new_len: usize, f: F) -> Result<(), TryReserveError>
where
F: FnMut() -> T,
{
let len = self.len();
if new_len > len {
self.try_extend_with(new_len - len, ExtendFunc(f))?;
} else {
self.truncate(new_len);
}
Ok(())
}
#[inline]
pub fn leak<'a>(vec: Self) -> &'a mut [T]
where
T: 'a, {
Box::leak(vec.into_boxed_slice())
}
#[inline]
pub fn try_leak<'a>(vec: Self) -> Result<&'a mut [T], TryReserveError>
where
T: 'a, {
Ok(Box::leak(vec.try_into_boxed_slice()?))
}
pub fn alloc_ref(&self) -> &A {
self.buf.alloc()
}
pub fn alloc_ref_mut(&mut self) -> &mut A {
self.buf.alloc_mut()
}
}
impl<T: Clone, A: AllocRef> Vec<T, A> {
pub fn resize(&mut self, new_len: usize, value: T) {
handle_reserve_error(self.try_resize(new_len, value))
}
pub fn try_resize(&mut self, new_len: usize, value: T) -> Result<(), TryReserveError> {
let len = self.len();
if new_len > len {
self.try_extend_with(new_len - len, ExtendElement(value))?
} else {
self.truncate(new_len);
}
Ok(())
}
pub fn extend_from_slice(&mut self, other: &[T]) {
self.spec_extend(other.iter())
}
pub fn try_extend_from_slice(&mut self, other: &[T]) -> Result<(), TryReserveError> {
self.try_spec_extend(other.iter())
}
}
trait ExtendWith<T> {
fn next(&mut self) -> T;
fn last(self) -> T;
}
struct ExtendElement<T>(T);
impl<T: Clone> ExtendWith<T> for ExtendElement<T> {
fn next(&mut self) -> T {
self.0.clone()
}
fn last(self) -> T {
self.0
}
}
struct ExtendDefault;
impl<T: Default> ExtendWith<T> for ExtendDefault {
fn next(&mut self) -> T {
Default::default()
}
fn last(self) -> T {
Default::default()
}
}
struct ExtendFunc<F>(F);
impl<T, F: FnMut() -> T> ExtendWith<T> for ExtendFunc<F> {
fn next(&mut self) -> T {
(self.0)()
}
fn last(mut self) -> T {
(self.0)()
}
}
impl<T, A: AllocRef> Vec<T, A> {
fn try_extend_with<E: ExtendWith<T>>(
&mut self,
n: usize,
mut value: E,
) -> Result<(), TryReserveError> {
self.try_reserve(n)?;
unsafe {
let mut ptr = self.as_mut_ptr().add(self.len());
let mut local_len = SetLenOnDrop::new(&mut self.len);
for _ in 1..n {
ptr::write(ptr, value.next());
ptr = ptr.offset(1);
local_len.increment_len(1);
}
if n > 0 {
ptr::write(ptr, value.last());
local_len.increment_len(1);
}
}
Ok(())
}
}
struct SetLenOnDrop<'a> {
len: &'a mut usize,
local_len: usize,
}
impl<'a> SetLenOnDrop<'a> {
#[inline]
fn new(len: &'a mut usize) -> Self {
SetLenOnDrop {
local_len: *len,
len,
}
}
#[inline]
fn increment_len(&mut self, increment: usize) {
self.local_len += increment;
}
}
impl Drop for SetLenOnDrop<'_> {
#[inline]
fn drop(&mut self) {
*self.len = self.local_len;
}
}
impl<T: PartialEq, A: AllocRef> Vec<T, A> {
#[inline]
pub fn dedup(&mut self) {
self.dedup_by(|a, b| a == b)
}
pub fn remove_item(&mut self, item: &T) -> Option<T> {
let pos = self.iter().position(|x| *x == *item)?;
Some(self.remove(pos))
}
}
#[doc(hidden)]
pub fn from_elem<T: Clone>(elem: T, n: usize) -> Vec<T> {
from_elem_in(elem, n, Global)
}
#[doc(hidden)]
pub fn from_elem_in<T: Clone, A>(elem: T, n: usize, a: A) -> Vec<T, A>
where
A: AllocRef,
{
handle_reserve_error(try_from_elem_in(elem, n, a))
}
#[doc(hidden)]
pub fn try_from_elem_in<T: Clone, A: AllocRef>(
elem: T,
n: usize,
a: A,
) -> Result<Vec<T, A>, TryReserveError> {
<T as SpecFromElem<A>>::try_from_elem_in(elem, n, a)
}
trait SpecFromElem<A: AllocRef>: Sized {
fn try_from_elem_in(elem: Self, n: usize, a: A) -> Result<Vec<Self, A>, TryReserveError>;
}
impl<T: Clone, A: AllocRef> SpecFromElem<A> for T {
default fn try_from_elem_in(
elem: Self,
n: usize,
a: A,
) -> Result<Vec<Self, A>, TryReserveError> {
let mut v = Vec::try_with_capacity_in(n, a)?;
v.try_extend_with(n, ExtendElement(elem))?;
Ok(v)
}
}
#[allow(clippy::use_self)]
impl<A: AllocRef> SpecFromElem<A> for u8 {
#[inline]
fn try_from_elem_in(elem: Self, n: usize, a: A) -> Result<Vec<Self, A>, TryReserveError> {
if elem == 0 {
return Ok(Vec {
buf: RawVec::try_with_capacity_zeroed_in(n, a)?,
len: n,
});
}
unsafe {
let mut v = Vec::try_with_capacity_in(n, a)?;
ptr::write_bytes(v.as_mut_ptr(), elem, n);
v.set_len(n);
Ok(v)
}
}
}
impl<T: Clone + IsZero, A: AllocRef> SpecFromElem<A> for T {
#[inline]
fn try_from_elem_in(elem: Self, n: usize, a: A) -> Result<Vec<Self, A>, TryReserveError> {
if elem.is_zero() {
return Ok(Vec {
buf: RawVec::try_with_capacity_zeroed_in(n, a)?,
len: n,
});
}
let mut v = Vec::try_with_capacity_in(n, a)?;
v.try_extend_with(n, ExtendElement(elem))?;
Ok(v)
}
}
unsafe trait IsZero {
fn is_zero(&self) -> bool;
}
macro_rules! impl_is_zero {
($t:ty, $is_zero:expr) => {
unsafe impl IsZero for $t {
#[inline]
fn is_zero(&self) -> bool {
$is_zero(*self)
}
}
};
}
impl_is_zero!(i8, |x| x == 0);
impl_is_zero!(i16, |x| x == 0);
impl_is_zero!(i32, |x| x == 0);
impl_is_zero!(i64, |x| x == 0);
impl_is_zero!(i128, |x| x == 0);
impl_is_zero!(isize, |x| x == 0);
impl_is_zero!(u16, |x| x == 0);
impl_is_zero!(u32, |x| x == 0);
impl_is_zero!(u64, |x| x == 0);
impl_is_zero!(u128, |x| x == 0);
impl_is_zero!(usize, |x| x == 0);
impl_is_zero!(bool, |x: Self| !x);
impl_is_zero!(char, |x| x == '\0');
impl_is_zero!(f32, |x: Self| x.to_bits() == 0);
impl_is_zero!(f64, |x: Self| x.to_bits() == 0);
unsafe impl<T> IsZero for *const T {
#[inline]
fn is_zero(&self) -> bool {
(*self).is_null()
}
}
unsafe impl<T> IsZero for *mut T {
#[inline]
fn is_zero(&self) -> bool {
(*self).is_null()
}
}
unsafe impl<T: ?Sized> IsZero for Option<&T> {
#[inline]
fn is_zero(&self) -> bool {
self.is_none()
}
}
unsafe impl<T: ?Sized> IsZero for Option<&mut T> {
#[inline]
fn is_zero(&self) -> bool {
self.is_none()
}
}
unsafe impl<T: ?Sized> IsZero for Option<Box<T>> {
#[inline]
fn is_zero(&self) -> bool {
self.is_none()
}
}
impl<T: Clone, A> Clone for Vec<T, A>
where
A: AllocRef + Clone,
{
#[must_use]
#[inline]
fn clone(&self) -> Self {
self.clone_in(self.alloc_ref().clone())
}
}
#[allow(clippy::use_self)]
impl<T: Clone, A: AllocRef, B: AllocRef> CloneIn<B> for Vec<T, A> {
type Cloned = Vec<T, B>;
fn clone_in(&self, a: B) -> Self::Cloned {
let mut v = Vec::with_capacity_in(self.len(), a);
self.iter()
.cloned()
.for_each(|element| unsafe { v.push_unchecked(element) });
v
}
fn try_clone_in(&self, a: B) -> Result<Self::Cloned, TryReserveError> {
let mut v = Vec::try_with_capacity_in(self.len(), a)?;
self.iter()
.cloned()
.for_each(|element| unsafe { v.push_unchecked(element) });
Ok(v)
}
}
impl<T: Hash, A: AllocRef> Hash for Vec<T, A> {
#[inline]
fn hash<H: hash::Hasher>(&self, state: &mut H) {
Hash::hash(&**self, state)
}
}
impl<T, A: AllocRef, I: SliceIndex<[T]>> Index<I> for Vec<T, A> {
type Output = I::Output;
#[inline]
#[must_use]
fn index(&self, index: I) -> &Self::Output {
Index::index(&**self, index)
}
}
impl<T, A: AllocRef, I: SliceIndex<[T]>> IndexMut<I> for Vec<T, A> {
#[inline]
#[must_use]
fn index_mut(&mut self, index: I) -> &mut Self::Output {
IndexMut::index_mut(&mut **self, index)
}
}
impl<T, A: AllocRef> ops::Deref for Vec<T, A> {
type Target = [T];
#[must_use]
fn deref(&self) -> &[T] {
unsafe { slice::from_raw_parts(self.as_ptr(), self.len) }
}
}
impl<T, A: AllocRef> ops::DerefMut for Vec<T, A> {
#[must_use]
fn deref_mut(&mut self) -> &mut [T] {
unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), self.len) }
}
}
impl<T> FromIterator<T> for Vec<T> {
#[inline]
#[must_use]
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
<Self as SpecExtend<T, I::IntoIter, Global>>::from_iter_in(iter.into_iter(), Global)
}
}
impl<T, A: AllocRef> FromIteratorIn<T, A> for Vec<T, A> {
#[inline]
#[must_use]
fn from_iter_in<I: IntoIterator<Item = T>>(iter: I, a: A) -> Self {
<Self as SpecExtend<T, I::IntoIter, A>>::from_iter_in(iter.into_iter(), a)
}
#[inline]
fn try_from_iter_in<I: IntoIterator<Item = T>>(iter: I, a: A) -> Result<Self, TryReserveError> {
<Self as SpecExtend<T, I::IntoIter, A>>::try_from_iter_in(iter.into_iter(), a)
}
}
impl<T, A: AllocRef> IntoIterator for Vec<T, A> {
type Item = T;
type IntoIter = IntoIter<T, A>;
#[inline]
#[must_use]
#[allow(clippy::cast_possible_wrap)]
fn into_iter(mut self) -> IntoIter<T, A> {
unsafe {
let begin = self.as_mut_ptr();
let end = if mem::size_of::<T>() == 0 {
arith_offset(begin as *const i8, self.len() as isize) as *const T
} else {
begin.add(self.len())
};
let capacity = self.buf.capacity();
let alloc = ptr::read(self.buf.alloc_mut());
mem::forget(self);
IntoIter {
buf: RawVec::from_raw_parts_in(begin, capacity, alloc),
ptr: begin,
end,
}
}
}
}
impl<'a, T, A: AllocRef> IntoIterator for &'a Vec<T, A> {
type Item = &'a T;
type IntoIter = slice::Iter<'a, T>;
#[must_use]
fn into_iter(self) -> slice::Iter<'a, T> {
self.iter()
}
}
impl<'a, T, A: AllocRef> IntoIterator for &'a mut Vec<T, A> {
type Item = &'a mut T;
type IntoIter = slice::IterMut<'a, T>;
fn into_iter(self) -> slice::IterMut<'a, T> {
self.iter_mut()
}
}
impl<T, A> Extend<T> for Vec<T, A>
where
A: AllocRef,
{
#[inline]
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
<Self as SpecExtend<T, I::IntoIter, A>>::spec_extend(self, iter.into_iter())
}
}
impl<T, A: AllocRef> TryExtend<T> for Vec<T, A> {
type Err = TryReserveError;
#[inline]
fn try_extend<I: IntoIterator<Item = T>>(&mut self, iter: I) -> Result<(), Self::Err> {
<Self as SpecExtend<T, I::IntoIter, A>>::try_spec_extend(self, iter.into_iter())
}
}
trait SpecExtend<T, I, A: AllocRef>: Sized {
#[inline]
fn from_iter_in(iter: I, a: A) -> Self {
handle_reserve_error(Self::try_from_iter_in(iter, a))
}
fn try_from_iter_in(iter: I, a: A) -> Result<Self, TryReserveError>;
#[inline]
fn spec_extend(&mut self, iter: I) {
handle_reserve_error(self.try_spec_extend(iter))
}
fn try_spec_extend(&mut self, iter: I) -> Result<(), TryReserveError>;
}
impl<T, I, A> SpecExtend<T, I, A> for Vec<T, A>
where
I: Iterator<Item = T>,
A: AllocRef,
{
default fn try_from_iter_in(mut iter: I, a: A) -> Result<Self, TryReserveError> {
let mut vector = match iter.next() {
None => return Ok(Self::new_in(a)),
Some(element) => {
let (lower, _) = iter.size_hint();
let mut vector = Self::try_with_capacity_in(lower.saturating_add(1), a)?;
unsafe {
ptr::write(vector.get_unchecked_mut(0), element);
vector.set_len(1);
}
vector
}
};
vector.try_spec_extend(iter)?;
Ok(vector)
}
default fn try_spec_extend(&mut self, iter: I) -> Result<(), TryReserveError> {
self.try_extend_desugared(iter)
}
}
impl<T, I, A: AllocRef> SpecExtend<T, I, A> for Vec<T, A>
where
I: TrustedLen<Item = T>,
{
default fn try_from_iter_in(iter: I, a: A) -> Result<Self, TryReserveError> {
let mut vector = Self::new_in(a);
vector.try_spec_extend(iter)?;
Ok(vector)
}
default fn try_spec_extend(&mut self, iter: I) -> Result<(), TryReserveError> {
let (low, high) = iter.size_hint();
if let Some(high_value) = high {
debug_assert_eq!(
low,
high_value,
"TrustedLen iterator's size hint is not exact: {:?}",
(low, high)
);
}
if let Some(additional) = high {
self.try_reserve(additional)?;
unsafe {
let mut ptr = self.as_mut_ptr().add(self.len());
let mut local_len = SetLenOnDrop::new(&mut self.len);
iter.for_each(move |element| {
ptr::write(ptr, element);
ptr = ptr.offset(1);
local_len.increment_len(1);
});
}
} else {
self.try_extend_desugared(iter)?
}
Ok(())
}
}
impl<T, A: AllocRef> SpecExtend<T, IntoIter<T, A>, A> for Vec<T, A> {
fn try_from_iter_in(mut iter: IntoIter<T, A>, alloc: A) -> Result<Self, TryReserveError> {
let ptr: *const T = iter.buf.ptr();
if ptr == iter.ptr {
unsafe {
let vec = Self::from_raw_parts_in(
iter.buf.ptr(),
iter.len(),
iter.buf.capacity(),
ptr::read(iter.buf.alloc_mut()),
);
mem::forget(iter);
Ok(vec)
}
} else {
let mut vector = Self::new_in(alloc);
vector.try_spec_extend(iter)?;
Ok(vector)
}
}
fn try_spec_extend(&mut self, mut iter: IntoIter<T, A>) -> Result<(), TryReserveError> {
unsafe {
self.try_append_elements(iter.as_slice())?;
}
iter.ptr = iter.end;
Ok(())
}
}
impl<'a, T: 'a, I, A: AllocRef> SpecExtend<&'a T, I, A> for Vec<T, A>
where
I: Iterator<Item = &'a T>,
T: Clone,
{
default fn try_from_iter_in(iterator: I, a: A) -> Result<Self, TryReserveError> {
SpecExtend::try_from_iter_in(iterator.cloned(), a)
}
default fn try_spec_extend(&mut self, iterator: I) -> Result<(), TryReserveError> {
self.try_spec_extend(iterator.cloned())
}
}
impl<'a, T: 'a, A: AllocRef> SpecExtend<&'a T, slice::Iter<'a, T>, A> for Vec<T, A>
where
T: Copy,
{
fn try_spec_extend(&mut self, iter: slice::Iter<'a, T>) -> Result<(), TryReserveError> {
let slice = iter.as_slice();
self.try_reserve(slice.len())?;
unsafe {
let len = self.len();
self.set_len(len + slice.len());
self.get_unchecked_mut(len..).copy_from_slice(slice);
}
Ok(())
}
}
impl<T, A: AllocRef> Vec<T, A> {
fn try_extend_desugared<I: Iterator<Item = T>>(
&mut self,
mut iterator: I,
) -> Result<(), TryReserveError>
where
A: AllocRef,
{
while let Some(element) = iterator.next() {
let len = self.len();
if len == self.capacity() {
let (lower, _) = iterator.size_hint();
self.try_reserve(lower.saturating_add(1))?;
}
unsafe {
self.push_unchecked(element);
}
}
Ok(())
}
#[inline]
pub fn splice<R, I>(&mut self, range: R, replace_with: I) -> Splice<'_, I::IntoIter, A>
where
A: AllocRef,
R: RangeBounds<usize>,
I: IntoIterator<Item = T>,
{
Splice {
drain: self.drain(range),
replace_with: replace_with.into_iter(),
}
}
pub fn drain_filter<F>(&mut self, filter: F) -> DrainFilter<'_, T, F, A>
where
A: AllocRef,
F: FnMut(&mut T) -> bool,
{
let old_len = self.len();
unsafe {
self.set_len(0);
}
DrainFilter {
vec: self,
idx: 0,
del: 0,
old_len,
pred: filter,
panic_flag: false,
}
}
}
impl<'a, T: 'a + Copy, A> Extend<&'a T> for Vec<T, A>
where
A: AllocRef,
{
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
self.extend(iter.into_iter().cloned())
}
}
impl<'a, T: 'a + Copy, A> TryExtend<&'a T> for Vec<T, A>
where
A: AllocRef,
{
type Err = TryReserveError;
fn try_extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) -> Result<(), Self::Err> {
self.try_extend(iter.into_iter().cloned())
}
}
macro_rules! __impl_slice_eq1 {
([$($vars:tt)*] $lhs:ty, $rhs:ty, $($constraints:tt)*) => {
impl<T, U, $($vars)*> PartialEq<$rhs> for $lhs
where
T: PartialEq<U>,
$($constraints)*
{
#[inline]
fn eq(&self, other: &$rhs) -> bool { self[..] == other[..] }
#[inline]
#[allow(clippy::partialeq_ne_impl)]
fn ne(&self, other: &$rhs) -> bool { self[..] != other[..] }
}
}
}
__impl_slice_eq1! { [A, B] Vec<T, A>, Vec<U, B>, A: AllocRef, B: AllocRef }
__impl_slice_eq1! { [A] Vec<T, A>, &[U], A: AllocRef }
__impl_slice_eq1! { [A] Vec<T, A>, &mut [U], A: AllocRef }
__impl_slice_eq1! { [A, const N: usize] Vec<T, A>, [U; N], [U; N]: core::array::LengthAtMost32, A: AllocRef }
__impl_slice_eq1! { [A, const N: usize] Vec<T, A>, &[U; N], [U; N]: core::array::LengthAtMost32, A: AllocRef }
impl<T: PartialOrd, A: AllocRef, B: AllocRef> PartialOrd<Vec<T, B>> for Vec<T, A> {
#[inline]
#[must_use]
fn partial_cmp(&self, other: &Vec<T, B>) -> Option<Ordering> {
PartialOrd::partial_cmp(&**self, &**other)
}
}
impl<T: Eq, A: AllocRef> Eq for Vec<T, A> {}
impl<T: Ord, A: AllocRef> Ord for Vec<T, A> {
#[inline]
#[must_use]
fn cmp(&self, other: &Self) -> Ordering {
Ord::cmp(&**self, &**other)
}
}
unsafe impl<#[may_dangle] T, A: AllocRef> Drop for Vec<T, A> {
fn drop(&mut self) {
unsafe {
ptr::drop_in_place(&mut self[..]);
}
}
}
impl<T, A: AllocRef> Default for Vec<T, A>
where
A: Default,
{
#[must_use]
fn default() -> Self {
Self::new_in(A::default())
}
}
impl<T: fmt::Debug, A: AllocRef> fmt::Debug for Vec<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
impl<T, A: AllocRef> AsRef<Vec<T, A>> for Vec<T, A> {
#[must_use]
fn as_ref(&self) -> &Self {
self
}
}
impl<T, A: AllocRef> AsMut<Vec<T, A>> for Vec<T, A> {
#[must_use]
fn as_mut(&mut self) -> &mut Self {
self
}
}
impl<T, A: AllocRef> AsRef<[T]> for Vec<T, A> {
#[must_use]
fn as_ref(&self) -> &[T] {
self
}
}
impl<T, A: AllocRef> AsMut<[T]> for Vec<T, A> {
#[must_use]
fn as_mut(&mut self) -> &mut [T] {
self
}
}
impl<T: Clone> From<&[T]> for Vec<T> {
#[must_use]
fn from(s: &[T]) -> Self {
let mut v = Self::new();
v.extend(s.iter().cloned());
v
}
}
impl<T: Clone> From<&mut [T]> for Vec<T> {
#[must_use]
fn from(s: &mut [T]) -> Self {
From::<&[T]>::from(s)
}
}
#[cfg(not(test))]
impl<T> From<Vec<T>> for Box<[T]> {
#[must_use]
fn from(v: Vec<T>) -> Self {
v.into_boxed_slice()
}
}
impl From<&str> for Vec<u8> {
#[must_use]
fn from(s: &str) -> Self {
From::from(s.as_bytes())
}
}
pub struct IntoIter<T, A: AllocRef = Global> {
buf: RawVec<T, A>,
ptr: *const T,
end: *const T,
}
impl<T: fmt::Debug, A: AllocRef> fmt::Debug for IntoIter<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
}
}
impl<T, A: AllocRef> IntoIter<T, A> {
#[must_use]
pub fn as_slice(&self) -> &[T] {
unsafe { slice::from_raw_parts(self.ptr, self.len()) }
}
#[must_use]
pub fn as_mut_slice(&mut self) -> &mut [T] {
unsafe { slice::from_raw_parts_mut(self.ptr as *mut T, self.len()) }
}
}
unsafe impl<T: Send, A: Send + AllocRef> Send for IntoIter<T, A> {}
unsafe impl<T: Sync, A: Send + AllocRef> Sync for IntoIter<T, A> {}
impl<T, A: AllocRef> Iterator for IntoIter<T, A> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
unsafe {
if self.ptr == self.end {
None
} else if mem::size_of::<T>() == 0 {
self.ptr = arith_offset(self.ptr as *const i8, 1) as *mut T;
Some(mem::zeroed())
} else {
let old = self.ptr;
self.ptr = self.ptr.offset(1);
Some(ptr::read(old))
}
}
}
#[inline]
#[must_use]
#[allow(clippy::cast_sign_loss)]
fn size_hint(&self) -> (usize, Option<usize>) {
let exact = if mem::size_of::<T>() == 0 {
(self.end as usize).wrapping_sub(self.ptr as usize)
} else {
unsafe { offset_from(self.end, self.ptr) as usize }
};
(exact, Some(exact))
}
#[inline]
#[must_use]
fn count(self) -> usize {
self.len()
}
}
impl<T, A: AllocRef> DoubleEndedIterator for IntoIter<T, A> {
#[inline]
fn next_back(&mut self) -> Option<T> {
unsafe {
if self.end == self.ptr {
None
} else if mem::size_of::<T>() == 0 {
self.end = arith_offset(self.end as *const i8, -1) as *mut T;
Some(mem::zeroed())
} else {
self.end = self.end.offset(-1);
Some(ptr::read(self.end))
}
}
}
}
impl<T, A: AllocRef> ExactSizeIterator for IntoIter<T, A> {}
impl<T, A: AllocRef> FusedIterator for IntoIter<T, A> {}
impl<T: Clone> Clone for IntoIter<T> {
#[must_use]
fn clone(&self) -> Self {
let mut v = Vec::new();
v.extend(self.as_slice().iter().cloned());
v.into_iter()
}
}
impl<T, A: AllocRef> Drop for IntoIter<T, A> {
fn drop(&mut self) {
for _x in self.by_ref() {}
}
}
pub struct Drain<'a, T, A: AllocRef = Global> {
tail_start: usize,
tail_len: usize,
iter: slice::Iter<'a, T>,
vec: NonNull<Vec<T, A>>,
}
impl<T: fmt::Debug, A: AllocRef> fmt::Debug for Drain<'_, T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("Drain").field(&self.iter.as_slice()).finish()
}
}
impl<T, A: AllocRef> Drain<'_, T, A> {
#[must_use]
pub fn as_slice(&self) -> &[T] {
self.iter.as_slice()
}
}
unsafe impl<T: Sync, A: AllocRef> Sync for Drain<'_, T, A> {}
unsafe impl<T: Send, A: AllocRef> Send for Drain<'_, T, A> {}
impl<T, A: AllocRef> Iterator for Drain<'_, T, A> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
self.iter.next().map(|elt| unsafe { ptr::read(elt) })
}
#[must_use]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<T, A: AllocRef> DoubleEndedIterator for Drain<'_, T, A> {
#[inline]
fn next_back(&mut self) -> Option<T> {
self.iter.next_back().map(|elt| unsafe { ptr::read(elt) })
}
}
impl<T, A: AllocRef> Drop for Drain<'_, T, A> {
fn drop(&mut self) {
self.for_each(drop);
if self.tail_len > 0 {
unsafe {
let source_vec = self.vec.as_mut();
let start = source_vec.len();
let tail = self.tail_start;
if tail != start {
let src = source_vec.as_ptr().add(tail);
let dst = source_vec.as_mut_ptr().add(start);
ptr::copy(src, dst, self.tail_len);
}
source_vec.set_len(start + self.tail_len);
}
}
}
}
impl<T, A: AllocRef> ExactSizeIterator for Drain<'_, T, A> {}
impl<T, A: AllocRef> FusedIterator for Drain<'_, T, A> {}
#[derive(Debug)]
pub struct Splice<'a, I: Iterator + 'a, A = Global>
where
A: AllocRef,
{
drain: Drain<'a, I::Item, A>,
replace_with: I,
}
impl<I: Iterator, A> Iterator for Splice<'_, I, A>
where
A: AllocRef,
{
type Item = I::Item;
fn next(&mut self) -> Option<Self::Item> {
self.drain.next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.drain.size_hint()
}
}
impl<I: Iterator, A> DoubleEndedIterator for Splice<'_, I, A>
where
A: AllocRef,
{
fn next_back(&mut self) -> Option<Self::Item> {
self.drain.next_back()
}
}
impl<I: Iterator, A> ExactSizeIterator for Splice<'_, I, A> where A: AllocRef {}
impl<I: Iterator, A> Drop for Splice<'_, I, A>
where
A: AllocRef,
{
fn drop(&mut self) {
self.drain.by_ref().for_each(drop);
unsafe {
if self.drain.tail_len == 0 {
self.drain.vec.as_mut().extend(self.replace_with.by_ref());
return;
}
if !self.drain.fill(&mut self.replace_with) {
return;
}
let (lower_bound, _upper_bound) = self.replace_with.size_hint();
if lower_bound > 0 {
self.drain.move_tail(lower_bound);
if !self.drain.fill(&mut self.replace_with) {
return;
}
}
let mut collected = Vec::new();
collected.extend(self.replace_with.by_ref());
let mut collected = collected.into_iter();
if collected.len() > 0 {
self.drain.move_tail(collected.len());
let filled = self.drain.fill(&mut collected);
debug_assert!(filled);
debug_assert_eq!(collected.len(), 0);
}
}
}
}
impl<T, A: AllocRef> Drain<'_, T, A>
where
A: AllocRef,
{
unsafe fn fill<I: Iterator<Item = T>>(&mut self, replace_with: &mut I) -> bool {
let vec = self.vec.as_mut();
let range_start = vec.len;
let range_end = self.tail_start;
let range_slice =
slice::from_raw_parts_mut(vec.as_mut_ptr().add(range_start), range_end - range_start);
for place in range_slice {
if let Some(new_item) = replace_with.next() {
ptr::write(place, new_item);
vec.len += 1;
} else {
return false;
}
}
true
}
unsafe fn move_tail(&mut self, extra_capacity: usize) {
let vec = self.vec.as_mut();
let used_capacity = self.tail_start + self.tail_len;
vec.buf.reserve(used_capacity, extra_capacity);
let new_tail_start = self.tail_start + extra_capacity;
let src = vec.as_ptr().add(self.tail_start);
let dst = vec.as_mut_ptr().add(new_tail_start);
ptr::copy(src, dst, self.tail_len);
self.tail_start = new_tail_start;
}
}
pub struct DrainFilter<'a, T, F, A: AllocRef = Global>
where
F: FnMut(&mut T) -> bool,
{
vec: &'a mut Vec<T, A>,
idx: usize,
del: usize,
old_len: usize,
pred: F,
panic_flag: bool,
}
impl<T, F, A: AllocRef> Iterator for DrainFilter<'_, T, F, A>
where
F: FnMut(&mut T) -> bool,
{
type Item = T;
fn next(&mut self) -> Option<T> {
unsafe {
while self.idx < self.old_len {
let i = self.idx;
let v = slice::from_raw_parts_mut(self.vec.as_mut_ptr(), self.old_len);
self.panic_flag = true;
let drained = (self.pred)(&mut v[i]);
self.panic_flag = false;
self.idx += 1;
if drained {
self.del += 1;
return Some(ptr::read(&v[i]));
} else if self.del > 0 {
let del = self.del;
let src: *const T = &v[i];
let dst: *mut T = &mut v[i - del];
ptr::copy_nonoverlapping(src, dst, 1);
}
}
None
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
(0, Some(self.old_len - self.idx))
}
}
impl<T, F, A: AllocRef> Drop for DrainFilter<'_, T, F, A>
where
F: FnMut(&mut T) -> bool,
{
fn drop(&mut self) {
struct BackshiftOnDrop<'a, 'b, T, F, A: AllocRef = Global>
where
F: FnMut(&mut T) -> bool,
{
drain: &'b mut DrainFilter<'a, T, F, A>,
}
impl<T, F, A: AllocRef> Drop for BackshiftOnDrop<'_, '_, T, F, A>
where
F: FnMut(&mut T) -> bool,
{
fn drop(&mut self) {
unsafe {
if self.drain.idx < self.drain.old_len && self.drain.del > 0 {
let ptr = self.drain.vec.as_mut_ptr();
let src = ptr.add(self.drain.idx);
let dst = src.sub(self.drain.del);
let tail_len = self.drain.old_len - self.drain.idx;
src.copy_to(dst, tail_len);
}
self.drain.vec.set_len(self.drain.old_len - self.drain.del);
}
}
}
let backshift = BackshiftOnDrop { drain: self };
if !backshift.drain.panic_flag {
backshift.drain.for_each(drop);
}
}
}
unsafe fn arith_offset<T>(p: *const T, offset: isize) -> *const T {
p.offset(offset)
}
fn partition_dedup_by<T, F>(s: &mut [T], mut same_bucket: F) -> (&mut [T], &mut [T])
where
F: FnMut(&mut T, &mut T) -> bool,
{
let len = s.len();
if len <= 1 {
return (s, &mut []);
}
let ptr = s.as_mut_ptr();
let mut next_read: usize = 1;
let mut next_write: usize = 1;
unsafe {
while next_read < len {
let ptr_read = ptr.add(next_read);
let prev_ptr_write = ptr.add(next_write - 1);
if !same_bucket(&mut *ptr_read, &mut *prev_ptr_write) {
if next_read != next_write {
let ptr_write = prev_ptr_write.offset(1);
mem::swap(&mut *ptr_read, &mut *ptr_write);
}
next_write += 1;
}
next_read += 1;
}
}
s.split_at_mut(next_write)
}
#[allow(clippy::cast_possible_wrap)]
unsafe fn offset_from<T>(p: *const T, origin: *const T) -> isize
where
T: Sized,
{
let pointee_size = mem::size_of::<T>();
assert!(0 < pointee_size && isize::try_from(pointee_size).is_ok());
let d = isize::wrapping_sub(p as _, origin as _);
d / (pointee_size as isize)
}