use crate::alloc::{Allocator, Global};
use crate::collections::{TryReserveError, TryReserveErrorKind};
use std::{
alloc::Layout,
borrow::{Borrow, BorrowMut},
cmp,
fmt,
fmt::Debug,
hash::{Hash, Hasher},
iter::FusedIterator,
mem,
mem::ManuallyDrop,
mem::MaybeUninit,
ops::{Bound, Deref, DerefMut, Index, IndexMut, RangeBounds},
ptr,
ptr::NonNull,
slice,
slice::SliceIndex,
};
pub type Vec<T> = VecA<T, Global>;
pub struct VecA<T, A: Allocator = Global> {
len: usize,
cap: usize,
nn: NonNull<T>,
alloc: A,
}
impl<T, A: Allocator> VecA<T, A> {
#[must_use]
pub fn new() -> Self
where
A: Default,
{
Self::new_in(A::default())
}
pub const fn len(&self) -> usize {
self.len
}
pub const fn is_empty(&self) -> bool {
self.len() == 0
}
pub fn push(&mut self, value: T) {
if self.cap == self.len {
let nc = if self.cap == 0 { 4 } else { self.cap * 2 };
self.set_capacity(nc).unwrap();
}
unsafe {
self.set(self.len, value);
}
self.len += 1;
}
pub fn push_mut(&mut self, value: T) -> &mut T {
if self.cap == self.len {
let nc = if self.cap == 0 { 4 } else { self.cap * 2 };
self.set_capacity(nc).unwrap();
}
unsafe {
let end = self.as_mut_ptr().add(self.len);
ptr::write(end, value);
self.len += 1;
&mut *end
}
}
pub fn pop(&mut self) -> Option<T> {
if self.len == 0 {
None
} else {
self.len -= 1;
unsafe { Some(self.getx(self.len)) }
}
}
pub fn pop_if(&mut self, predicate: impl FnOnce(&mut T) -> bool) -> Option<T> {
let last = self.last_mut()?;
if predicate(last) { self.pop() } else { None }
}
pub fn peek_mut(&mut self) -> Option<PeekMut<'_, T, A>> {
if self.is_empty() {
None
} else {
Some(PeekMut { vec: self })
}
}
pub fn insert(&mut self, index: usize, value: T) {
assert!(index <= self.len);
if self.cap == self.len {
let na = if self.cap == 0 { 4 } else { self.cap * 2 };
self.set_capacity(na).unwrap();
}
unsafe {
if index < self.len {
ptr::copy(self.ixp(index), self.ixp(index + 1), self.len - index);
}
self.set(index, value);
}
self.len += 1;
}
pub fn insert_mut(&mut self, index: usize, value: T) -> &mut T {
assert!(index <= self.len);
if self.cap == self.len {
let na = if self.cap == 0 { 4 } else { self.cap * 2 };
self.set_capacity(na).unwrap();
}
unsafe {
if index < self.len {
ptr::copy(self.ixp(index), self.ixp(index + 1), self.len - index);
}
self.set(index, value);
}
self.len += 1;
unsafe { &mut *self.ixp(index) }
}
pub fn remove(&mut self, index: usize) -> T {
assert!(index < self.len);
unsafe {
let result = self.getx(index);
ptr::copy(self.ixp(index + 1), self.ixp(index), self.len() - index - 1);
self.len -= 1;
result
}
}
pub fn swap_remove(&mut self, index: usize) -> T {
assert!(index < self.len);
unsafe {
let result = self.getx(index);
self.len -= 1;
if index != self.len {
let last = self.getx(self.len);
self.set(index, last);
}
result
}
}
}
impl<T, A: Allocator> VecA<T, A> {
pub fn append(&mut self, other: &mut Self) {
self.reserve(other.len);
unsafe {
ptr::copy_nonoverlapping(other.ixp(0), self.ixp(self.len), other.len);
}
self.len += other.len;
other.len = 0;
}
pub fn extend_from_slice(&mut self, other: &[T])
where
T: Clone,
{
for e in other {
self.push(e.clone());
}
}
pub fn extend_from_within<R>(&mut self, src: R)
where
T: Clone,
R: RangeBounds<usize>,
{
let (start, end) = self.get_range(src);
for i in start..end {
let e = self[i].clone();
self.push(e);
}
}
pub fn splice<R, I>(&mut self, range: R, replace_with: I) -> Splice<'_, I::IntoIter, A>
where
R: RangeBounds<usize>,
I: IntoIterator<Item = T>,
A: Clone,
{
Splice {
drain: self.drain(range),
replace_with: replace_with.into_iter(),
}
}
pub fn clear(&mut self) {
while self.len > 0 {
self.pop();
}
}
pub fn truncate(&mut self, len: usize) {
while self.len > len {
self.pop();
}
}
pub fn resize(&mut self, new_len: usize, value: T)
where
T: Clone,
{
if new_len > self.len {
while self.len < new_len {
self.push(value.clone());
}
} else {
self.truncate(new_len);
}
}
pub fn resize_with<F>(&mut self, new_len: usize, f: F)
where
F: FnMut() -> T,
{
let mut f = f;
if new_len > self.len {
while self.len < new_len {
self.push(f());
}
} else {
self.truncate(new_len);
}
}
pub fn split_off(&mut self, at: usize) -> Self
where
A: Clone,
{
assert!(at <= self.len);
let other_len = self.len - at;
let mut other = VecA::with_capacity_in(other_len, self.alloc.clone());
unsafe {
self.len = at;
other.len = other_len;
ptr::copy_nonoverlapping(self.ixp(at), other.ixp(0), other_len);
}
other
}
pub fn split_at_spare_mut(&mut self) -> (&mut [T], &mut [MaybeUninit<T>]) {
let ptr = self.as_mut_ptr();
let spare_ptr = unsafe { ptr.add(self.len) };
let spare_ptr = spare_ptr as *mut MaybeUninit<T>;
let spare_len = self.cap - self.len;
unsafe {
let initialized = slice::from_raw_parts_mut(ptr, self.len);
let spare = slice::from_raw_parts_mut(spare_ptr, spare_len);
(initialized, spare)
}
}
pub fn retain<F>(&mut self, f: F)
where
F: FnMut(&T) -> bool,
{
Gap::new(self, 0).retain(f);
}
pub fn retain_mut<F>(&mut self, f: F)
where
F: FnMut(&mut T) -> bool,
{
Gap::new(self, 0).retain_mut(f);
}
pub fn drain<R>(&mut self, range: R) -> Drain<'_, T, A>
where
R: RangeBounds<usize>,
{
Drain::new(self, range)
}
pub fn extract_if<F, R>(&mut self, range: R, filter: F) -> ExtractIf<'_, T, F, A>
where
F: FnMut(&mut T) -> bool,
R: RangeBounds<usize>,
{
ExtractIf::new(self, filter, range)
}
pub fn dedup(&mut self)
where
T: PartialEq,
{
self.dedup_by(|a, b| a == b);
}
pub fn dedup_by<F>(&mut self, same_bucket: F)
where
F: FnMut(&mut T, &mut T) -> bool,
{
Gap::new(self, 0).dedup_by(same_bucket);
}
pub fn dedup_by_key<F, K>(&mut self, mut key: F)
where
F: FnMut(&mut T) -> K,
K: PartialEq,
{
self.dedup_by(|a, b| key(a) == key(b))
}
#[inline]
unsafe fn ixp(&self, i: usize) -> *mut T {
unsafe { self.nn.as_ptr().add(i) }
}
#[inline]
unsafe fn getx(&mut self, i: usize) -> T {
unsafe { ptr::read(self.ixp(i)) }
}
#[inline]
unsafe fn set(&mut self, i: usize, elem: T) {
unsafe {
ptr::write(self.ixp(i), elem);
}
}
fn set_capacity(&mut self, na: usize) -> Result<(), TryReserveError> {
assert!(na >= self.len);
if na == self.cap {
return Ok(());
}
let result = unsafe { self.basic_set_capacity(self.cap, na) };
if result.is_ok() {
self.cap = na;
}
result
}
unsafe fn basic_set_capacity(&mut self, oa: usize, na: usize) -> Result<(), TryReserveError> {
unsafe {
if mem::size_of::<T>() == 0 {
return Ok(());
}
if na == 0 {
self.alloc.deallocate(
NonNull::new(self.nn.as_ptr().cast::<u8>()).unwrap(),
Layout::array::<T>(oa).unwrap(),
);
self.nn = NonNull::dangling();
return Ok(());
}
let new_layout = match Layout::array::<T>(na) {
Ok(x) => x,
Err(_e) => {
let kind = TryReserveErrorKind::CapacityOverflow;
return Err(TryReserveError { kind });
}
};
let new_ptr = if oa == 0 {
self.alloc.allocate(new_layout)
} else {
let old_layout = Layout::array::<T>(oa).unwrap();
let old_ptr = self.nn.as_ptr().cast::<u8>();
let old_ptr = NonNull::new(old_ptr).unwrap();
if new_layout.size() > old_layout.size() {
self.alloc.grow(old_ptr, old_layout, new_layout)
} else {
self.alloc.shrink(old_ptr, old_layout, new_layout)
}
};
match new_ptr {
Ok(p) => self.nn = NonNull::new(p.as_ptr().cast::<T>()).unwrap(),
Err(_e) => {
let kind = TryReserveErrorKind::AllocError { layout: new_layout };
return Err(TryReserveError { kind });
}
}
}
Ok(())
}
fn get_range<R>(&self, range: R) -> (usize, usize)
where
R: RangeBounds<usize>,
{
let start = match range.start_bound() {
Bound::Included(x) => *x,
Bound::Excluded(x) => {
assert!(*x < usize::MAX);
*x + 1
}
Bound::Unbounded => 0,
};
let end = match range.end_bound() {
Bound::Included(x) => {
assert!(*x < usize::MAX);
*x + 1
}
Bound::Excluded(x) => *x,
Bound::Unbounded => self.len,
};
assert!(end <= self.len);
assert!(start <= end);
(start, end)
}
}
impl<T, A: Allocator> VecA<T, A> {
#[must_use]
pub const fn new_in(alloc: A) -> VecA<T, A> {
Self {
len: 0,
cap: 0,
alloc,
nn: NonNull::dangling(),
}
}
pub fn allocator(&self) -> &A {
&self.alloc
}
pub const fn capacity(&self) -> usize {
if size_of::<T>() == 0 {
usize::MAX
} else {
self.cap
}
}
pub fn with_capacity_in(capacity: usize, alloc: A) -> VecA<T, A> {
let mut v = Self::new_in(alloc);
v.set_capacity(capacity).unwrap();
v
}
#[must_use]
pub fn with_capacity(capacity: usize) -> VecA<T, A>
where
A: Default,
{
let mut v = Self::new_in(A::default());
v.set_capacity(capacity).unwrap();
v
}
pub fn reserve(&mut self, additional: usize) {
let capacity = self.len + additional;
if capacity > self.cap {
self.set_capacity(capacity).unwrap();
}
}
pub fn reserve_exact(&mut self, additional: usize) {
let capacity = self.len + additional;
if capacity > self.cap {
self.set_capacity(capacity).unwrap();
}
}
pub fn shrink_to_fit(&mut self) {
let _ = self.set_capacity(self.len);
}
pub fn shrink_to(&mut self, capacity: usize) {
if self.cap > capacity {
let _ = self.set_capacity(cmp::max(self.len, capacity));
}
}
}
impl<T, A: Allocator> VecA<T, A> {
pub fn into_boxed_slice(mut self) -> crate::BoxA<[T], A> {
self.shrink_to_fit();
let (p, len, _cap, a) = self.into_parts_with_alloc();
let nn = NonNull::slice_from_raw_parts(p, len);
crate::BoxA { nn, a }
}
pub const fn as_slice(&self) -> &[T] {
unsafe { std::slice::from_raw_parts(self.nn.as_ptr(), self.len) }
}
pub const fn as_mut_slice(&mut self) -> &mut [T] {
unsafe { std::slice::from_raw_parts_mut(self.nn.as_ptr(), self.len) }
}
pub const fn as_non_null(&mut self) -> NonNull<T> {
self.nn
}
pub const fn as_ptr(&self) -> *const T {
self.nn.as_ptr()
}
pub const fn as_mut_ptr(&mut self) -> *mut T {
self.nn.as_ptr()
}
pub fn into_parts(self) -> (NonNull<T>, usize, usize) {
let (ptr, len, capacity) = self.into_raw_parts();
(unsafe { NonNull::new_unchecked(ptr) }, len, capacity)
}
pub fn into_parts_with_alloc(self) -> (NonNull<T>, usize, usize, A) {
let (ptr, len, capacity, alloc) = self.into_raw_parts_with_alloc();
(unsafe { NonNull::new_unchecked(ptr) }, len, capacity, alloc)
}
pub fn into_raw_parts(self) -> (*mut T, usize, usize) {
let mut me = ManuallyDrop::new(self);
(me.as_mut_ptr(), me.len, me.cap)
}
pub fn into_raw_parts_with_alloc(self) -> (*mut T, usize, usize, A) {
let mut me = ManuallyDrop::new(self);
let alloc = unsafe { ptr::read(&me.alloc) };
(me.as_mut_ptr(), me.len, me.cap, alloc)
}
pub unsafe fn from_parts_in(ptr: NonNull<T>, length: usize, capacity: usize, alloc: A) -> Self {
assert!(capacity >= length);
Self {
nn: ptr,
cap: capacity,
alloc,
len: length,
}
}
pub unsafe fn from_raw_parts_in(ptr: *mut T, length: usize, capacity: usize, alloc: A) -> Self {
assert!(capacity >= length);
let nn = unsafe { NonNull::new_unchecked(ptr) };
Self {
nn,
cap: capacity,
alloc,
len: length,
}
}
pub fn leak<'a>(self) -> &'a mut [T]
where
A: 'a,
{
let mut me = ManuallyDrop::new(self);
unsafe { slice::from_raw_parts_mut(me.as_mut_ptr(), me.len) }
}
pub fn spare_capacity_mut(&mut self) -> &mut [MaybeUninit<T>] {
unsafe {
slice::from_raw_parts_mut(
self.as_mut_ptr().add(self.len) as *mut MaybeUninit<T>,
self.cap - self.len,
)
}
}
pub unsafe fn set_len(&mut self, new_len: usize) {
assert!(new_len <= self.cap);
self.len = new_len;
}
pub fn into_chunks<const N: usize>(mut self) -> VecA<[T; N], A> {
const {
assert!(N != 0, "chunk size must be greater than zero");
}
let (len, cap) = (self.len(), self.capacity());
let len_remainder = len % N;
if len_remainder != 0 {
self.truncate(len - len_remainder);
}
let cap_remainder = cap % N;
if mem::size_of::<T>() != 0 && cap_remainder != 0 {
self.shrink_to(cap - cap_remainder);
}
let (ptr, _, _, alloc) = self.into_raw_parts_with_alloc();
unsafe { VecA::from_raw_parts_in(ptr.cast(), len / N, cap / N, alloc) }
}
}
impl<T, A: Allocator, const N: usize> VecA<[T; N], A> {
pub fn into_flattened(self) -> VecA<T, A> {
let (ptr, len, cap, alloc) = self.into_raw_parts_with_alloc();
let (new_len, new_cap) = if mem::size_of::<T>() == 0 {
(len.checked_mul(N).expect("vec len overflow"), usize::MAX)
} else {
unsafe { (len.unchecked_mul(N), cap.unchecked_mul(N)) }
};
unsafe { VecA::<T, A>::from_raw_parts_in(ptr.cast(), new_len, new_cap, alloc) }
}
}
impl<T> VecA<T> {
pub fn from_fn<F>(length: usize, f: F) -> VecA<T>
where
F: FnMut(usize) -> T,
{
let mut f = f;
let mut m = VecA::with_capacity(length);
for i in 0..length {
m.push(f(i));
}
m
}
pub unsafe fn from_parts(ptr: NonNull<T>, length: usize, capacity: usize) -> VecA<T> {
let mut v = VecA::new();
v.len = length;
v.cap = capacity;
v.nn = ptr;
v
}
pub unsafe fn from_raw_parts(ptr: *mut T, length: usize, capacity: usize) -> VecA<T> {
let mut v = VecA::new();
v.len = length;
v.cap = capacity;
v.nn = unsafe { NonNull::new_unchecked(ptr) };
v
}
}
impl<T, A: Allocator> VecA<T, A> {
pub fn try_with_capacity_in(capacity: usize, alloc: A) -> Result<Self, TryReserveError> {
let mut v = Self::new_in(alloc);
v.set_capacity(capacity)?;
Ok(v)
}
pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
let capacity = self.len + additional;
if capacity > self.cap {
return self.set_capacity(capacity);
}
Ok(())
}
pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> {
let capacity = self.len + additional;
if capacity > self.cap {
return self.set_capacity(capacity);
}
Ok(())
}
pub fn push_within_capacity(&mut self, value: T) -> Result<&mut T, T> {
if self.cap == self.len {
return Err(value);
}
let index = self.len;
self.len += 1;
unsafe {
self.set(index, value);
}
Ok(unsafe { &mut *self.ixp(index) })
}
pub fn try_remove(&mut self, index: usize) -> Option<T> {
if index >= self.len {
return None;
}
unsafe {
let result = self.getx(index);
ptr::copy(self.ixp(index + 1), self.ixp(index), self.len() - index - 1);
self.len -= 1;
Some(result)
}
}
}
impl<T> VecA<T> {
pub fn try_with_capacity(capacity: usize) -> Result<VecA<T>, TryReserveError> {
let mut v = VecA::<T>::new();
v.set_capacity(capacity)?;
Ok(v)
}
}
unsafe impl<T: Send, A: Allocator + Send> Send for VecA<T, A> {}
unsafe impl<T: Sync, A: Allocator + Sync> Sync for VecA<T, A> {}
impl<T: Eq, A: Allocator> Eq for VecA<T, A> {}
impl<T, A1, A2> PartialOrd<VecA<T, A2>> for VecA<T, A1>
where
T: PartialOrd,
A1: Allocator,
A2: Allocator,
{
fn partial_cmp(&self, other: &VecA<T, A2>) -> Option<std::cmp::Ordering> {
PartialOrd::partial_cmp(&**self, &**other)
}
}
impl<T: Ord, A: Allocator> Ord for VecA<T, A> {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
std::cmp::Ord::cmp(&**self, &**other)
}
}
impl<T, U, A1: Allocator, A2: Allocator> PartialEq<VecA<U, A2>> for VecA<T, A1>
where
T: PartialEq<U>,
{
fn eq(&self, other: &VecA<U, A2>) -> bool {
self[..] == other[..]
}
}
impl<T, U, A: Allocator, const N: usize> PartialEq<[U; N]> for VecA<T, A>
where
T: PartialEq<U>,
{
fn eq(&self, other: &[U; N]) -> bool {
self[..] == other[..]
}
}
impl<T, U, A: Allocator, const N: usize> PartialEq<&[U; N]> for VecA<T, A>
where
T: PartialEq<U>,
{
fn eq(&self, other: &&[U; N]) -> bool {
self[..] == other[..]
}
}
impl<T: Hash, A: Allocator> Hash for VecA<T, A> {
fn hash<H: Hasher>(&self, state: &mut H) {
Hash::hash(&**self, state)
}
}
impl<T, I: SliceIndex<[T]>, A: Allocator> Index<I> for VecA<T, A> {
type Output = I::Output;
fn index(&self, index: I) -> &Self::Output {
Index::index(&**self, index)
}
}
impl<T, I: SliceIndex<[T]>, A: Allocator> IndexMut<I> for VecA<T, A> {
fn index_mut(&mut self, index: I) -> &mut Self::Output {
IndexMut::index_mut(&mut **self, index)
}
}
impl<T, A: Allocator> Deref for VecA<T, A> {
type Target = [T];
fn deref(&self) -> &[T] {
unsafe { std::slice::from_raw_parts(self.nn.as_ptr(), self.len) }
}
}
impl<T, A: Allocator> DerefMut for VecA<T, A> {
fn deref_mut(&mut self) -> &mut [T] {
unsafe { std::slice::from_raw_parts_mut(self.nn.as_ptr(), self.len) }
}
}
impl<'a, T: 'a, A: Allocator> IntoIterator for &'a VecA<T, A> {
type Item = &'a T;
type IntoIter = slice::Iter<'a, T>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a, T: 'a, A: Allocator> IntoIterator for &'a mut VecA<T, A> {
type Item = &'a mut T;
type IntoIter = slice::IterMut<'a, T>;
fn into_iter(self) -> Self::IntoIter {
self.iter_mut()
}
}
impl<T, A: Allocator> IntoIterator for VecA<T, A> {
type Item = T;
type IntoIter = IntoIter<T, A>;
fn into_iter(self) -> Self::IntoIter {
IntoIter::new(self)
}
}
impl<T, A: Allocator> Extend<T> for VecA<T, A> {
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
for e in iter {
self.push(e);
}
}
}
impl<'a, T: Copy + 'a, A: Allocator> Extend<&'a T> for VecA<T, A> {
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
for e in iter {
self.push(*e);
}
}
}
impl<T: Clone, A: Allocator + Clone> Clone for VecA<T, A> {
fn clone(&self) -> Self {
let mut v = VecA::with_capacity_in(self.len, self.alloc.clone());
for e in self.iter() {
v.push(e.clone());
}
v
}
}
impl<T, A: Allocator> Drop for VecA<T, A> {
fn drop(&mut self) {
while self.len != 0 {
self.pop();
}
let _ = self.set_capacity(0);
}
}
impl<T> Default for VecA<T> {
fn default() -> Self {
Self::new()
}
}
impl<T: Debug, A: Allocator> Debug for VecA<T, A> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
impl<T: Clone> From<&[T]> for VecA<T> {
fn from(s: &[T]) -> VecA<T> {
let mut v = VecA::new();
for e in s {
v.push(e.clone());
}
v
}
}
impl<T: Clone> From<&mut [T]> for VecA<T> {
fn from(s: &mut [T]) -> VecA<T> {
let mut v = VecA::new();
for e in s {
v.push(e.clone());
}
v
}
}
impl<T, const N: usize> From<[T; N]> for VecA<T> {
fn from(a: [T; N]) -> VecA<T> {
VecA::from_iter(a)
}
}
impl<T> FromIterator<T> for VecA<T> {
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> VecA<T> {
let mut v = VecA::new();
for e in iter {
v.push(e);
}
v
}
}
impl<T: Clone, const N: usize> From<&[T; N]> for VecA<T> {
fn from(s: &[T; N]) -> VecA<T> {
Self::from(s.as_slice())
}
}
impl<T: Clone, const N: usize> From<&mut [T; N]> for VecA<T> {
fn from(s: &mut [T; N]) -> VecA<T> {
Self::from(s.as_mut_slice())
}
}
impl<T, A: Allocator> AsRef<VecA<T, A>> for VecA<T, A> {
fn as_ref(&self) -> &VecA<T, A> {
self
}
}
impl<T, A: Allocator> AsMut<VecA<T, A>> for VecA<T, A> {
fn as_mut(&mut self) -> &mut VecA<T, A> {
self
}
}
impl<T, A: Allocator> AsRef<[T]> for VecA<T, A> {
fn as_ref(&self) -> &[T] {
self
}
}
impl<T, A: Allocator> AsMut<[T]> for VecA<T, A> {
fn as_mut(&mut self) -> &mut [T] {
self
}
}
impl<T, A: Allocator> Borrow<[T]> for VecA<T, A> {
fn borrow(&self) -> &[T] {
&self[..]
}
}
impl<T, A: Allocator> BorrowMut<[T]> for VecA<T, A> {
fn borrow_mut(&mut self) -> &mut [T] {
&mut self[..]
}
}
impl<T, A: Allocator, const N: usize> TryFrom<VecA<T, A>> for [T; N] {
type Error = VecA<T, A>;
fn try_from(mut vec: VecA<T, A>) -> Result<[T; N], VecA<T, A>> {
if vec.len() != N {
return Err(vec);
}
unsafe { vec.set_len(0) };
let array = unsafe { ptr::read(vec.as_ptr() as *const [T; N]) };
Ok(array)
}
}
#[derive(Debug)]
pub struct IntoIter<T, A: Allocator = Global> {
start: usize,
end: usize,
v: VecA<T, A>,
}
impl<T, A: Allocator> IntoIter<T, A> {
fn new(mut v: VecA<T, A>) -> Self {
let end = v.len;
v.len = 0;
Self { start: 0, end, v }
}
pub fn as_slice(&self) -> &[T] {
unsafe { slice::from_raw_parts(self.v.ixp(self.start), self.len()) }
}
pub fn as_mut_slice(&mut self) -> &mut [T] {
unsafe { slice::from_raw_parts_mut(self.v.ixp(self.start), self.len()) }
}
pub fn allocator(&self) -> &A {
&self.v.alloc
}
}
impl<T, A> Default for IntoIter<T, A>
where
A: Allocator + Default,
{
fn default() -> Self {
VecA::new_in(Default::default()).into_iter()
}
}
impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> {
fn as_ref(&self) -> &[T] {
self.as_slice()
}
}
impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> {
fn clone(&self) -> Self {
let mut v = VecA::new_in(self.allocator().clone());
v.extend_from_slice(self.as_slice());
v.into_iter()
}
}
impl<T, A: Allocator> Iterator for IntoIter<T, A> {
type Item = T;
fn next(&mut self) -> Option<T> {
if self.start == self.end {
None
} else {
let ix = self.start;
self.start += 1;
Some(unsafe { self.v.getx(ix) })
}
}
}
impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
fn len(&self) -> usize {
self.end - self.start
}
}
impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
fn next_back(&mut self) -> Option<T> {
if self.start == self.end {
None
} else {
self.end -= 1;
Some(unsafe { self.v.getx(self.end) })
}
}
}
impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
impl<T, A: Allocator> Drop for IntoIter<T, A> {
fn drop(&mut self) {
while self.next().is_some() {}
}
}
struct Gap<'a, T, A: Allocator> {
r: usize, w: usize, len: usize, v: &'a mut VecA<T, A>,
}
impl<'a, T, A: Allocator> Gap<'a, T, A> {
fn new(v: &'a mut VecA<T, A>, b: usize) -> Self {
let len = v.len;
v.len = 0; Self { w: b, r: b, v, len }
}
fn close_gap(&mut self) {
let n = self.len - self.r;
let g = self.r - self.w;
if n > 0 && g != 0 {
unsafe {
let dst = self.v.ixp(self.w);
let src = self.v.ixp(self.r);
ptr::copy(src, dst, n);
}
}
self.v.len = self.len - g;
}
fn keep(&mut self, upto: usize) {
unsafe {
while self.r < upto {
let nxt = self.v.ixp(self.r);
if self.r != self.w {
let to = self.v.ixp(self.w);
ptr::write(to, ptr::read(nxt));
}
self.r += 1;
self.w += 1;
}
}
}
unsafe fn fill(&mut self, e: T) {
unsafe {
let to = self.v.ixp(self.w);
ptr::write(to, e);
self.w += 1;
}
}
fn append<I: Iterator<Item = T>>(&mut self, src: &mut I)
where
A: Clone,
{
while self.w < self.r {
if let Some(e) = src.next() {
unsafe {
self.fill(e);
}
} else {
return;
}
}
self.v.len = self.len; let mut tail = self.v.split_off(self.w);
for e in src {
self.v.push(e);
}
self.v.append(&mut tail);
self.len = self.v.len; }
fn retain<F>(&mut self, mut f: F)
where
F: FnMut(&T) -> bool,
{
unsafe {
while self.r < self.len {
let nxt = self.v.ixp(self.r);
if f(&mut *nxt) {
if self.r != self.w {
let to = self.v.ixp(self.w);
ptr::write(to, ptr::read(nxt));
}
self.r += 1;
self.w += 1;
} else {
self.r += 1;
let _v = ptr::read(nxt); }
}
}
}
fn retain_mut<F>(&mut self, mut f: F)
where
F: FnMut(&mut T) -> bool,
{
unsafe {
while self.r < self.len {
let nxt = self.v.ixp(self.r);
if f(&mut *nxt) {
if self.r != self.w {
let to = self.v.ixp(self.w);
ptr::write(to, ptr::read(nxt));
}
self.r += 1;
self.w += 1;
} else {
self.r += 1;
let _v = ptr::read(nxt); }
}
}
}
fn dedup_by<F>(&mut self, mut same_bucket: F)
where
F: FnMut(&mut T, &mut T) -> bool,
{
if self.len > 0 {
unsafe {
let mut cur = 0;
self.r = 1;
self.w = 1;
while self.r < self.len {
let cp = self.v.ixp(cur);
let nxt = self.v.ixp(self.r);
if same_bucket(&mut *nxt, &mut *cp) {
self.r += 1;
let _v = ptr::read(nxt); } else {
cur = self.w;
if self.r != self.w {
let to = self.v.ixp(self.w);
ptr::write(to, ptr::read(nxt));
}
self.r += 1;
self.w += 1;
}
}
}
}
}
fn extract_if<F>(&mut self, f: &mut F, end: usize) -> Option<T>
where
F: FnMut(&mut T) -> bool,
{
unsafe {
while self.r < end {
let nxt = self.v.ixp(self.r);
if f(&mut *nxt) {
self.r += 1;
return Some(ptr::read(nxt));
}
if self.r != self.w {
let to = self.v.ixp(self.w);
ptr::write(to, ptr::read(nxt));
}
self.r += 1;
self.w += 1;
}
None
}
}
}
impl<'a, T, A: Allocator> Drop for Gap<'a, T, A> {
fn drop(&mut self) {
self.close_gap();
}
}
pub struct Drain<'a, T, A: Allocator = Global> {
gap: Gap<'a, T, A>,
end: usize,
br: usize,
}
impl<'a, T, A: Allocator> Drain<'a, T, A> {
fn new<R: RangeBounds<usize>>(vec: &'a mut VecA<T, A>, range: R) -> Self {
let (b, end) = vec.get_range(range);
let gap = Gap::new(vec, b);
Self { gap, end, br: end }
}
pub fn keep_rest(mut self) {
self.gap.keep(self.br);
}
#[must_use]
pub fn allocator(&self) -> &A {
&self.gap.v.alloc
}
#[must_use]
pub fn as_slice(&self) -> &[T] {
&self.gap.v[self.gap.r..self.br]
}
}
impl<'a, T, A: Allocator> AsRef<[T]> for Drain<'a, T, A> {
fn as_ref(&self) -> &[T] {
self.as_slice()
}
}
impl<T: fmt::Debug, A: Allocator> fmt::Debug for Drain<'_, T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("Drain").field(&self.as_slice()).finish()
}
}
impl<T, A: Allocator> Iterator for Drain<'_, T, A> {
type Item = T;
fn next(&mut self) -> Option<T> {
if self.gap.r == self.br {
return None;
}
let x = self.gap.r;
self.gap.r += 1;
Some(unsafe { self.gap.v.getx(x) })
}
fn size_hint(&self) -> (usize, Option<usize>) {
let n = self.br - self.gap.r;
(n, Some(n))
}
}
impl<T, A: Allocator> DoubleEndedIterator for Drain<'_, T, A> {
fn next_back(&mut self) -> Option<T> {
if self.gap.r == self.br {
return None;
}
self.br -= 1;
let x = self.br;
Some(unsafe { self.gap.v.getx(x) })
}
}
impl<T, A: Allocator> Drop for Drain<'_, T, A> {
fn drop(&mut self) {
while self.next().is_some() {}
self.gap.r = self.end;
}
}
impl<T, A: Allocator> ExactSizeIterator for Drain<'_, T, A> {}
impl<T, A: Allocator> FusedIterator for Drain<'_, T, A> {}
pub struct ExtractIf<'a, T, F, A: Allocator = Global> {
gap: Gap<'a, T, A>,
end: usize,
pred: F,
}
impl<'a, T, F, A: Allocator> ExtractIf<'a, T, F, A> {
pub(super) fn new<R: RangeBounds<usize>>(vec: &'a mut VecA<T, A>, pred: F, range: R) -> Self {
let (b, end) = vec.get_range(range);
let gap = Gap::new(vec, b);
Self { gap, pred, end }
}
pub fn allocator(&self) -> &A {
&self.gap.v.alloc
}
#[must_use]
fn as_slice(&self) -> &[T] {
&self.gap.v[self.gap.r..self.end]
}
}
impl<T: fmt::Debug, A: Allocator> fmt::Debug for ExtractIf<'_, T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("ExtractIf").field(&self.as_slice()).finish()
}
}
impl<T, F, A: Allocator> Iterator for ExtractIf<'_, T, F, A>
where
F: FnMut(&mut T) -> bool,
{
type Item = T;
fn next(&mut self) -> Option<T> {
self.gap.extract_if(&mut self.pred, self.end)
}
fn size_hint(&self) -> (usize, Option<usize>) {
(0, Some(self.end - self.gap.r))
}
}
#[derive(Debug)]
pub struct Splice<'a, I: Iterator + 'a, A: Allocator + Clone + 'a = Global> {
drain: Drain<'a, I::Item, A>,
replace_with: I,
}
impl<I: Iterator, A: Allocator + Clone> Drop for Splice<'_, I, A> {
fn drop(&mut self) {
self.drain.by_ref().for_each(drop);
self.drain.gap.append(&mut self.replace_with);
}
}
impl<I: Iterator, A: Allocator + Clone> Iterator for Splice<'_, I, A> {
type Item = I::Item;
fn next(&mut self) -> Option<Self::Item> {
self.drain.next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.drain.size_hint()
}
}
impl<I: Iterator, A: Allocator + Clone> DoubleEndedIterator for Splice<'_, I, A> {
fn next_back(&mut self) -> Option<Self::Item> {
self.drain.next_back()
}
}
impl<I: Iterator, A: Allocator + Clone> ExactSizeIterator for Splice<'_, I, A> {}
pub struct PeekMut<'a, T, A: Allocator = Global> {
vec: &'a mut VecA<T, A>,
}
impl<T: fmt::Debug, A: Allocator> fmt::Debug for PeekMut<'_, T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("PeekMut").field(self.deref()).finish()
}
}
impl<'a, T, A: Allocator> PeekMut<'a, T, A> {
pub fn pop(this: Self) -> T {
unsafe { this.vec.pop().unwrap_unchecked() }
}
}
impl<'a, T, A: Allocator> Deref for PeekMut<'a, T, A> {
type Target = T;
fn deref(&self) -> &Self::Target {
let idx = self.vec.len() - 1;
unsafe { self.vec.get_unchecked(idx) }
}
}
impl<'a, T, A: Allocator> DerefMut for PeekMut<'a, T, A> {
fn deref_mut(&mut self) -> &mut Self::Target {
let idx = self.vec.len() - 1;
unsafe { self.vec.get_unchecked_mut(idx) }
}
}
#[macro_export]
macro_rules! vec {
() => (
$crate::vec::Vec::new()
);
($elem:expr; $n:expr) => (
$crate::vec::from_elem($elem, $n)
);
($($x:expr),+ $(,)?) => (
Vec::from_iter([$($x),+].into_iter())
);
}
#[doc(hidden)]
pub fn from_elem<T: Clone>(elem: T, n: usize) -> VecA<T> {
let mut v = VecA::with_capacity(n);
for _i in 0..n {
v.push(elem.clone());
}
v
}
#[cfg(test)]
mod test;