use crate::CapacityError;
use crate::storage::{
buffer_too_large_for_index_type, mut_ptr_at_index, normalize_range, ptr_at_index, ArrayLayout, Capacity, Storage, InlineStorage,
};
use core::cmp::{Eq, Ord, Ordering, PartialEq, PartialOrd};
use core::hash::{Hash, Hasher};
use core::iter::{DoubleEndedIterator, FusedIterator, IntoIterator as IntoIter, Iterator};
use core::marker::PhantomData;
#[allow(unused_imports)] use core::mem::MaybeUninit;
use core::ops::{Range, RangeBounds};
use core::ptr;
pub struct Vec<T, S: Storage<ArrayLayout<T>>, I: Capacity = usize> {
len: I,
buf: S,
elem: PhantomData<T>,
}
impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> From<S> for Vec<T, S, I> {
fn from(buf: S) -> Self {
if buf.capacity() > I::MAX_REPRESENTABLE {
buffer_too_large_for_index_type::<I>();
}
Vec {
len: I::from_usize(0),
buf,
elem: PhantomData,
}
}
}
impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> Vec<T, S, I> {
pub fn into_raw_parts(self) -> (S, I) {
let ptr = core::ptr::addr_of!(self.buf);
let result = (unsafe { ptr.read() }, self.len);
core::mem::forget(self);
result
}
pub unsafe fn from_raw_parts(buf: S, length: I) -> Self {
Vec {
buf,
len: length,
elem: PhantomData,
}
}
#[inline]
pub fn capacity(&self) -> usize {
self.buf.capacity()
}
#[inline]
pub fn len(&self) -> usize {
self.len.as_usize()
}
#[inline]
pub(crate) unsafe fn set_len(&mut self, new_len: I) {
debug_assert!(new_len.as_usize() <= self.capacity());
self.len = new_len;
}
#[inline]
pub fn is_empty(&self) -> bool {
self.len.as_usize() == 0
}
#[inline]
pub fn is_full(&self) -> bool {
self.len.as_usize() == self.buf.capacity()
}
#[inline]
pub fn pop(&mut self) -> Option<T> {
if self.is_empty() {
return None;
}
self.len = I::from_usize(self.len() - 1);
unsafe { Some(ptr_at_index(&self.buf, self.len()).read()) }
}
#[inline]
pub fn as_slice(&self) -> &[T] {
self
}
#[inline]
pub fn as_mut_slice(&mut self) -> &mut [T] {
self
}
#[inline]
pub fn get(&self, index: I) -> Option<&T> {
let index = index.as_usize();
if self.len() <= index {
return None;
}
unsafe { Some(&*ptr_at_index(&self.buf, index)) }
}
#[inline]
pub fn get_mut(&mut self, index: I) -> Option<&mut T> {
let index = index.as_usize();
if self.len() <= index {
return None;
}
unsafe { Some(&mut *mut_ptr_at_index(&mut self.buf, index)) }
}
#[inline]
pub fn try_push(&mut self, value: T) -> Result<(), T> {
if self.is_full() {
return Err(value);
}
let len = self.len();
unsafe { mut_ptr_at_index(&mut self.buf, len).write(value); }
self.len = I::from_usize(len + 1);
Ok(())
}
#[inline]
pub fn push(&mut self, value: T) {
#[cold]
#[inline(never)]
fn assert_failed() -> ! {
panic!("vector is already at capacity")
}
if self.try_push(value).is_err() {
assert_failed();
}
}
pub fn truncate(&mut self, len: I) {
let new_len = len.as_usize();
let old_len = self.len.as_usize();
if new_len >= old_len {
return;
}
for i in new_len..old_len {
unsafe { mut_ptr_at_index(&mut self.buf, i).drop_in_place(); }
}
self.len = len;
}
#[inline]
pub fn clear(&mut self) {
self.truncate(I::from_usize(0));
}
#[inline]
pub fn swap(&mut self, fst: I, snd: I) {
let fst = fst.as_usize();
let snd = snd.as_usize();
self.as_mut_slice().swap(fst, snd);
}
#[inline]
pub fn swap_remove(&mut self, index: I) -> T {
#[cold]
#[inline(never)]
fn assert_failed(idx: usize, len: usize) -> ! {
panic!(
"swap_remove index (is {}) should be < len (is {})",
idx, len
);
}
let idx = index.as_usize();
let len = self.len.as_usize();
if idx >= len {
assert_failed(idx, len);
}
unsafe {
let last = ptr_at_index(&self.buf, len - 1).read();
let hole = mut_ptr_at_index(&mut self.buf, idx);
self.len = I::from_usize(self.len() - 1);
ptr::replace(hole, last)
}
}
pub fn insert(&mut self, index: I, element: T) {
#[cold]
#[inline(never)]
fn assert_failed() -> ! {
panic!("vector is already at capacity")
}
let result = self.try_insert(index, element);
if result.is_err() {
assert_failed();
}
}
pub fn try_insert(&mut self, index: I, element: T) -> Result<(), T> {
#[cold]
#[inline(never)]
fn assert_failed(index: usize, len: usize) -> ! {
panic!(
"insertion index (is {}) should be <= len (is {})",
index, len
);
}
if self.is_full() {
return Err(element);
}
let idx = index.as_usize();
let len = self.len.as_usize();
if idx > len {
assert_failed(idx, len);
}
let p = mut_ptr_at_index(&mut self.buf, idx);
unsafe {
ptr::copy(p, p.add(1), len - idx);
ptr::write(p, element);
}
self.len = I::from_usize(len + 1);
Ok(())
}
pub fn replace(&mut self, index: I, element: T) -> T {
#[cold]
#[inline(never)]
fn assert_failed(index: usize, len: usize) -> ! {
panic!(
"replacement index (is {}) should be < len (is {})",
index, len
);
}
let idx = index.as_usize();
let len = self.len.as_usize();
if idx >= len {
assert_failed(idx, len);
}
let p = mut_ptr_at_index(&mut self.buf, idx);
unsafe { ptr::replace(p, element) }
}
pub fn remove(&mut self, index: I) -> T {
#[cold]
#[inline(never)]
fn assert_failed(idx: usize, len: usize) -> ! {
panic!("removal index (is {}) should be < len (is {})", idx, len);
}
let idx = index.as_usize();
let len = self.len.as_usize();
if idx >= len {
assert_failed(idx, len);
}
unsafe {
let ret;
{
let p = mut_ptr_at_index(&mut self.buf, idx);
ret = ptr::read(p);
ptr::copy(p.offset(1), p, len - idx - 1);
}
self.len = I::from_usize(len - 1);
ret
}
}
pub fn retain<F>(&mut self, mut f: F)
where
F: FnMut(&T) -> bool,
{
self.drain_filter(|_, item| !f(&*item));
}
pub fn drain<R: RangeBounds<I>>(&mut self, range: R) -> Drain<'_, T, S, I> {
let Range { start, end } = normalize_range(range, self.len());
let original_len = self.len();
self.len = I::from_usize(start);
Drain {
parent: self,
original_len,
target_start: start,
front_index: start,
back_index: end,
target_end: end,
}
}
pub fn drain_filter<F: FnMut(I, &mut T) -> bool>(&mut self, filter: F) -> DrainFilter<'_, T, S, I, F> {
self.drain_filter_range(.., filter)
}
pub fn drain_filter_range<R: RangeBounds<I>, F: FnMut(I, &mut T) -> bool>(&mut self, range: R, filter: F) -> DrainFilter<'_, T, S, I, F> {
let Range { start, end } = normalize_range(range, self.len());
let original_len = self.len();
self.len = I::from_usize(start);
DrainFilter {
parent: self,
filter_fn: filter,
original_len,
target_start: start,
front_index: start,
back_index: end,
target_end: end,
}
}
}
impl<T: Copy, S: Storage<ArrayLayout<T>>, I: Capacity> Vec<T, S, I> {
pub fn try_extend_from_slice(&mut self, other: &[T]) -> crate::Result<()> {
let new_len = self.len() + other.len();
if new_len > self.capacity() { return CapacityError::new(); }
unsafe {
let dst_ptr = self.as_mut_ptr().add(self.len());
let src_ptr = other.as_ptr();
ptr::copy_nonoverlapping(src_ptr, dst_ptr, other.len());
self.set_len(I::from_usize(new_len));
}
Ok(())
}
#[track_caller]
#[inline]
pub fn extend_from_slice(&mut self, other: &[T]) {
self.try_extend_from_slice(other).expect("`vec.len() + other.len()` must be less than or equal to `vec.capacity()`");
}
pub fn try_insert_slice(&mut self, idx: I, src: &[T]) -> crate::Result<()> {
#[cold]
#[inline(never)]
fn assert_failed(idx: usize, len: usize) -> ! {
panic!(
"idx (is {}) must be less than or equal to len (is {})",
idx, len
);
}
let count = src.len();
let new_len = self.len() + count;
if new_len > self.capacity() {
return CapacityError::new();
}
let idx = idx.as_usize();
let len = self.len.as_usize();
if idx > len {
assert_failed(idx, len);
}
unsafe {
let src_ptr = self.buf.get_ptr().cast::<T>().add(idx);
let dst_ptr = self.buf.get_mut_ptr().cast::<T>().add(idx + count);
ptr::copy(src_ptr, dst_ptr, len - idx);
let src_ptr = src.as_ptr();
let dst_ptr = self.buf.get_mut_ptr().cast::<T>().add(idx);
ptr::copy_nonoverlapping(src_ptr, dst_ptr, count);
self.set_len(I::from_usize(new_len));
}
Ok(())
}
#[track_caller]
#[inline]
pub fn insert_slice(&mut self, idx: I, src: &[T]) {
self.try_insert_slice(idx, src).expect("`vec.len() + src.len()` must be less than or equal to `vec.capacity()`");
}
pub fn try_extend_from_within<R: RangeBounds<I>>(&mut self, src: R) -> crate::Result<()> {
let Range { start, end } = normalize_range(src, self.len());
let count = end - start;
let new_len = self.len() + count;
if new_len > self.capacity() {
return CapacityError::new();
}
unsafe {
let src_ptr = self.as_ptr().add(start);
let dst_ptr = self.buf.get_mut_ptr().cast::<T>().add(self.len());
ptr::copy_nonoverlapping(src_ptr, dst_ptr, count);
self.set_len(I::from_usize(new_len));
}
Ok(())
}
#[track_caller]
#[inline]
pub fn extend_from_within<R: RangeBounds<I>>(&mut self, src: R) {
self.try_extend_from_within(src).expect("`vec.len() + src.len` must be less than or equal to `vec.capacity()`");
}
pub fn try_replace_range<R: RangeBounds<I>>(&mut self, range: R, replace_with: &[T]) -> crate::Result<()> {
let Range { start, end } = normalize_range(range, self.len());
let dst_count = end - start;
let src_count = replace_with.len();
if src_count <= dst_count {
unsafe {
let src_ptr = replace_with.as_ptr();
let dst_ptr = self.buf.get_mut_ptr().cast::<T>().add(start);
ptr::copy_nonoverlapping(src_ptr, dst_ptr, src_count);
let src_ptr = dst_ptr.add(dst_count) as *const T;
let dst_ptr = dst_ptr.add(src_count);
ptr::copy(src_ptr, dst_ptr, self.len() - end);
let new_len = I::from_usize(self.len() - (dst_count - src_count));
self.set_len(new_len);
}
} else {
let extra_space_needed = src_count - dst_count;
if self.len() + extra_space_needed > self.capacity() {
return CapacityError::new();
}
unsafe {
let src_ptr = self.buf.get_ptr().cast::<T>().add(end);
let dst_ptr = self.buf.get_mut_ptr().cast::<T>().add(end + extra_space_needed);
ptr::copy(src_ptr, dst_ptr, self.len() - end);
let src_ptr = replace_with.as_ptr();
let dst_ptr = self.buf.get_mut_ptr().cast::<T>().add(start);
ptr::copy_nonoverlapping(src_ptr, dst_ptr, src_count);
let new_len = I::from_usize(self.len() + extra_space_needed);
self.set_len(new_len);
}
}
Ok(())
}
pub fn replace_range<R: RangeBounds<I>>(&mut self, range: R, replace_with: &[T]) {
self.try_replace_range(range, replace_with).expect("remaining space is insufficient");
}
}
impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> core::ops::Deref for Vec<T, S, I> {
type Target = [T];
fn deref(&self) -> &[T] {
unsafe {
let ptr = self.buf.get_ptr().cast::<T>();
core::slice::from_raw_parts(ptr, self.len.as_usize())
}
}
}
impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> core::ops::DerefMut for Vec<T, S, I> {
fn deref_mut(&mut self) -> &mut [T] {
unsafe {
let ptr = self.buf.get_mut_ptr().cast::<T>();
core::slice::from_raw_parts_mut(ptr, self.len.as_usize())
}
}
}
impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> core::ops::Index<I> for Vec<T, S, I> {
type Output = T;
fn index(&self, index: I) -> &Self::Output {
self.get(index).unwrap()
}
}
impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> core::ops::IndexMut<I> for Vec<T, S, I> {
fn index_mut(&mut self, index: I) -> &mut Self::Output {
self.get_mut(index).unwrap()
}
}
macro_rules! _impl_idx_range {
($self:ident, $idx:ident: $r:ty, $lo:expr, $hi:expr) => {
impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> core::ops::Index<$r> for Vec<T, S, I> {
type Output = [T];
#[allow(unused_variables)]
fn index(&self, $idx: $r) -> &Self::Output {
let $self = self;
let start = $lo;
let end = $hi;
&self.as_slice()[start..end]
}
}
impl<T, S: Storage<ArrayLayout<T>>, I: Capacity + PartialOrd> core::ops::IndexMut<$r>
for Vec<T, S, I>
{
#[allow(unused_variables)]
fn index_mut(&mut self, $idx: $r) -> &mut Self::Output {
let (start, end) = {
let $self = &self;
($lo, $hi)
};
&mut self.as_mut_slice()[start..end]
}
}
};
}
_impl_idx_range! { s, index: core::ops::Range<I>, index.start.as_usize(), index.end.as_usize() }
_impl_idx_range! { s, index: core::ops::RangeFrom<I>, index.start.as_usize(), s.len() }
_impl_idx_range! { s, index: core::ops::RangeFull, 0, s.len() }
_impl_idx_range! { s, index: core::ops::RangeInclusive<I>, index.start().as_usize(), index.end().as_usize().saturating_add(1) }
_impl_idx_range! { s, index: core::ops::RangeTo<I>, 0, index.end.as_usize() }
_impl_idx_range! { s, index: core::ops::RangeToInclusive<I>, 0, index.end.as_usize().saturating_add(1) }
impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> core::convert::AsRef<[T]> for Vec<T, S, I> {
fn as_ref(&self) -> &[T] {
self
}
}
impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> core::convert::AsMut<[T]> for Vec<T, S, I> {
fn as_mut(&mut self) -> &mut [T] {
self
}
}
impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> core::ops::Drop for Vec<T, S, I> {
fn drop(&mut self) {
unsafe {
let ptr = self.buf.get_mut_ptr().cast::<T>();
ptr::drop_in_place(ptr::slice_from_raw_parts_mut(ptr, self.len()));
}
}
}
impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> core::fmt::Debug for Vec<T, S, I>
where
T: core::fmt::Debug,
{
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
self.as_slice().fmt(f)
}
}
impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> Hash for Vec<T, S, I>
where
T: Hash,
{
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
Hash::hash(&**self, state);
}
}
impl<AT, AS, AI, BT, BS, BI> PartialEq<Vec<BT, BS, BI>> for Vec<AT, AS, AI>
where
AT: PartialEq<BT>,
AS: Storage<ArrayLayout<AT>>,
BS: Storage<ArrayLayout<BT>>,
AI: Capacity,
BI: Capacity,
{
#[inline]
fn eq(&self, other: &Vec<BT, BS, BI>) -> bool {
self.as_slice() == other.as_slice()
}
}
impl<T: Eq, S: Storage<ArrayLayout<T>>, I: Capacity> Eq for Vec<T, S, I> {}
impl<V, T: PartialEq<V>, S: Storage<ArrayLayout<T>>, I: Capacity> PartialEq<&[V]> for Vec<T, S, I> {
#[inline]
fn eq(&self, other: &&[V]) -> bool {
self.as_slice() == &other[..]
}
}
impl<V, T, S: Storage<ArrayLayout<T>>, I: Capacity> PartialEq<&mut [V]> for Vec<T, S, I>
where
T: PartialEq<V>,
{
#[inline]
fn eq(&self, other: &&mut [V]) -> bool {
self.as_slice() == &other[..]
}
}
impl<V: PartialEq<T>, T, S: Storage<ArrayLayout<T>>, I: Capacity> PartialEq<Vec<T, S, I>> for &[V] {
#[inline]
fn eq(&self, other: &Vec<T, S, I>) -> bool {
&self[..] == other.as_slice()
}
}
impl<V, T, S: Storage<ArrayLayout<T>>, I: Capacity> PartialEq<Vec<T, S, I>> for &mut [V]
where
V: PartialEq<T>,
{
#[inline]
fn eq(&self, other: &Vec<T, S, I>) -> bool {
&self[..] == other.as_slice()
}
}
impl<T: PartialOrd, S: Storage<ArrayLayout<T>>, I: Capacity> PartialOrd for Vec<T, S, I> {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
self.as_slice().partial_cmp(other.as_slice())
}
}
impl<T: Ord, S: Storage<ArrayLayout<T>>, I: Capacity> Ord for Vec<T, S, I> {
fn cmp(&self, other: &Self) -> Ordering {
self.as_slice().cmp(other.as_slice())
}
}
impl<T, S: Storage<ArrayLayout<T>>, Idx: Capacity> core::iter::Extend<T> for Vec<T, S, Idx> {
fn extend<I: core::iter::IntoIterator<Item = T>>(&mut self, iter: I) {
for element in iter {
self.push(element);
}
}
}
impl<'a, T, S: Storage<ArrayLayout<T>>, Idx: Capacity> core::iter::Extend<&'a T> for Vec<T, S, Idx>
where
T: 'a + Clone,
{
fn extend<I: core::iter::IntoIterator<Item = &'a T>>(&mut self, iter: I) {
for element in iter {
self.push(element.clone());
}
}
}
pub struct IntoIterator<T, S: Storage<ArrayLayout<T>>, I: Capacity> {
start: I,
end: I,
buf: S,
elems: PhantomData<T>,
}
impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> Iterator for IntoIterator<T, S, I> {
type Item = T;
fn size_hint(&self) -> (usize, Option<usize>) {
let size = self.end.as_usize() - self.start.as_usize();
(size, Some(size))
}
#[inline]
fn next(&mut self) -> Option<Self::Item> {
let start = self.start.as_usize();
let end = self.end.as_usize();
if start >= end {
return None;
}
let ptr = (self.buf.get_ptr().cast::<T>()).wrapping_add(start);
let ret = unsafe { ptr.read() };
self.start = I::from_usize(start + 1);
Some(ret)
}
}
impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> DoubleEndedIterator for IntoIterator<T, S, I> {
#[inline]
fn next_back(&mut self) -> Option<Self::Item> {
let start = self.start.as_usize();
let end = self.end.as_usize();
if start >= end {
return None;
}
let end = end - 1;
let ptr = (self.buf.get_ptr().cast::<T>()).wrapping_add(end);
let ret = unsafe { ptr.read() };
self.end = I::from_usize(end);
Some(ret)
}
}
impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> ExactSizeIterator for IntoIterator<T, S, I> {}
impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> FusedIterator for IntoIterator<T, S, I> {}
impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> Drop for IntoIterator<T, S, I> {
fn drop(&mut self) {
self.for_each(drop);
}
}
impl<T, S: Storage<ArrayLayout<T>>, I: Capacity> IntoIter for Vec<T, S, I> {
type Item = T;
type IntoIter = IntoIterator<T, S, I>;
fn into_iter(self) -> Self::IntoIter {
let end = self.len;
let buf = unsafe { core::ptr::addr_of!(self.buf).read() };
core::mem::forget(self);
IntoIterator {
start: I::from_usize(0),
end,
buf,
elems: PhantomData,
}
}
}
impl<'a, T, S: Storage<ArrayLayout<T>>, I: Capacity> IntoIter for &'a Vec<T, S, I> {
type Item = &'a T;
type IntoIter = core::slice::Iter<'a, T>;
fn into_iter(self) -> Self::IntoIter {
self.as_slice().iter()
}
}
impl<'a, T, S: Storage<ArrayLayout<T>>, I: Capacity> IntoIter for &'a mut Vec<T, S, I> {
type Item = &'a mut T;
type IntoIter = core::slice::IterMut<'a, T>;
fn into_iter(self) -> Self::IntoIter {
self.as_mut_slice().iter_mut()
}
}
pub struct Drain<'p, T, S: Storage<ArrayLayout<T>>, I: Capacity> {
parent: &'p mut Vec<T, S, I>,
original_len: usize,
target_start: usize,
front_index: usize,
back_index: usize,
target_end: usize,
}
impl<'p, T, S: Storage<ArrayLayout<T>>, I: Capacity> Iterator for Drain<'p, T, S, I> {
type Item = T;
fn size_hint(&self) -> (usize, Option<usize>) {
let size = self.back_index - self.front_index;
(size, Some(size))
}
fn next(&mut self) -> Option<Self::Item> {
if self.front_index == self.back_index {
return None;
}
let out = unsafe { self.parent.as_slice().as_ptr().add(self.front_index).read() };
self.front_index += 1;
Some(out)
}
}
impl<'p, T, S: Storage<ArrayLayout<T>>, I: Capacity> DoubleEndedIterator for Drain<'p, T, S, I> {
fn next_back(&mut self) -> Option<Self::Item> {
if self.front_index == self.back_index {
return None;
}
self.back_index -= 1;
unsafe { Some(self.parent.as_slice().as_ptr().add(self.back_index).read()) }
}
}
impl<'p, T, S: Storage<ArrayLayout<T>>, I: Capacity> ExactSizeIterator for Drain<'p, T, S, I> {}
impl<'p, T, S: Storage<ArrayLayout<T>>, I: Capacity> FusedIterator for Drain<'p, T, S, I> {}
impl<'p, T, S: Storage<ArrayLayout<T>>, I: Capacity> Drop for Drain<'p, T, S, I> {
fn drop(&mut self) {
self.for_each(drop);
let count = self.original_len - self.target_end;
let src = unsafe { self.parent.as_slice().as_ptr().add(self.target_end) };
let dst = unsafe { self.parent.as_mut_slice().as_mut_ptr().add(self.target_start) };
unsafe { ptr::copy(src, dst, count); }
let removed = self.target_end - self.target_start;
let new_len = I::from_usize(self.original_len - removed);
unsafe { self.parent.set_len(new_len); }
}
}
pub struct DrainFilter<'p, T, S: Storage<ArrayLayout<T>>, I: Capacity, F: FnMut(I, &mut T) -> bool> {
parent: &'p mut Vec<T, S, I>,
filter_fn: F,
original_len: usize,
target_start: usize,
front_index: usize,
back_index: usize,
target_end: usize,
}
impl<T, S: Storage<ArrayLayout<T>>, I: Capacity, F: FnMut(I, &mut T) -> bool> Iterator for DrainFilter<'_, T, S, I, F> {
type Item = T;
fn size_hint(&self) -> (usize, Option<usize>) {
let max_len = self.back_index - self.front_index;
(0, Some(max_len))
}
fn next(&mut self) -> Option<Self::Item> {
while self.front_index != self.back_index {
let src = unsafe { self.parent.as_mut_slice().as_mut_ptr().add(self.front_index) };
let item = unsafe { src.as_mut().unwrap() };
self.front_index += 1;
if (self.filter_fn)(I::from_usize(self.front_index), item) {
return Some(unsafe { src.read() });
}
let dst = unsafe { self.parent.as_mut_slice().as_mut_ptr().add(self.target_start) };
unsafe { ptr::copy(src as *const T, dst, 1); }
self.target_start += 1;
}
None
}
}
impl<T, S: Storage<ArrayLayout<T>>, I: Capacity, F: FnMut(I, &mut T) -> bool> DoubleEndedIterator for DrainFilter<'_, T, S, I, F> {
fn next_back(&mut self) -> Option<Self::Item> {
while self.front_index != self.back_index {
self.back_index -= 1;
let src = unsafe { self.parent.as_mut_slice().as_mut_ptr().add(self.back_index) };
let item = unsafe { src.as_mut().unwrap() };
if (self.filter_fn)(I::from_usize(self.back_index), item) {
return Some(unsafe { src.read() });
}
self.target_end -= 1;
let dst = unsafe { self.parent.as_mut_slice().as_mut_ptr().add(self.target_end) };
unsafe { ptr::copy(src as *const T, dst, 1); }
}
None
}
}
impl<T, S: Storage<ArrayLayout<T>>, I: Capacity, F: FnMut(I, &mut T) -> bool> FusedIterator for DrainFilter<'_, T, S, I, F> {}
impl<T, S: Storage<ArrayLayout<T>>, I: Capacity, F: FnMut(I, &mut T) -> bool> Drop for DrainFilter<'_, T, S, I, F> {
fn drop(&mut self) {
self.for_each(drop);
let count = self.original_len - self.target_end;
let src = unsafe { self.parent.as_slice().as_ptr().add(self.target_end) };
let dst = unsafe { self.parent.as_mut_slice().as_mut_ptr().add(self.target_start) };
unsafe { ptr::copy(src, dst, count); }
let removed = self.target_end - self.target_start;
let new_len = I::from_usize(self.original_len - removed);
self.parent.len = new_len;
}
}
#[cfg(feature = "alloc")]
#[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))]
impl<T: Copy, I: Capacity> crate::collections::AllocVec<T, I> {
pub fn with_capacity(capacity: I) -> Self {
let cap = capacity.as_usize();
if capacity != I::from_usize(cap) {
buffer_too_large_for_index_type::<I>();
}
Vec {
len: I::from_usize(0),
buf: crate::storage::AllocStorage::with_capacity(cap),
elem: PhantomData,
}
}
}
#[cfg(feature = "alloc")]
#[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))]
impl<T: Copy, I: Capacity> Clone for crate::collections::AllocVec<T, I> {
fn clone(&self) -> Self {
let mut result = Self::with_capacity(I::from_usize(self.capacity()));
for item in self.iter() {
result.push(*item);
}
result
}
}
impl<T, I: Capacity, const C: usize> Vec<T, InlineStorage<T, C>, I> {
#[inline]
pub fn new() -> Self {
if C > I::MAX_REPRESENTABLE {
buffer_too_large_for_index_type::<I>();
}
Vec {
len: I::from_usize(0),
buf: unsafe { MaybeUninit::uninit().assume_init() },
elem: PhantomData,
}
}
}
impl<T, I: Capacity, const C: usize> Default for Vec<T, InlineStorage<T, C>, I> {
fn default() -> Self {
Self::new()
}
}
impl<T: Clone, I: Capacity, const C: usize> core::clone::Clone for Vec<T, InlineStorage<T, C>, I> {
fn clone(&self) -> Self {
let mut ret = Self::new();
ret.clone_from(self);
ret
}
fn clone_from(&mut self, source: &Self) {
self.clear();
for next in source {
self.push(next.clone());
}
}
}
impl<T: Clone, I: Capacity, const C: usize> From<&[T]> for Vec<T, InlineStorage<T, C>, I> {
fn from(source: &[T]) -> Self {
if C > I::MAX_REPRESENTABLE {
buffer_too_large_for_index_type::<I>();
}
assert!(
source.len() <= C,
"source should not have more than {} elements (has {})",
C,
source.len()
);
let mut ret = Self::new();
for next in source {
ret.push(next.clone());
}
ret
}
}
impl<T: Clone, I: Capacity, const C: usize> From<&mut [T]> for Vec<T, InlineStorage<T, C>, I> {
fn from(source: &mut [T]) -> Self {
if C > I::MAX_REPRESENTABLE {
buffer_too_large_for_index_type::<I>();
}
assert!(
source.len() <= C,
"source should not have more than {} elements (has {})",
C,
source.len()
);
let mut ret = Self::new();
for next in source {
ret.push(next.clone());
}
ret
}
}
impl<V, T, S, I, const N: usize> PartialEq<Vec<T, S, I>> for [V; N]
where
V: PartialEq<T>,
S: Storage<ArrayLayout<T>>,
I: Capacity,
{
#[inline]
fn eq(&self, other: &Vec<T, S, I>) -> bool {
&self[..] == other.as_slice()
}
}
impl<V, T, S, I, const N: usize> PartialEq<[V; N]> for Vec<T, S, I>
where
T: PartialEq<V>,
S: Storage<ArrayLayout<T>>,
I: Capacity,
{
#[inline]
fn eq(&self, other: &[V; N]) -> bool {
self.as_slice() == &other[..]
}
}
impl<T, I: Capacity, const C: usize> core::iter::FromIterator<T>
for Vec<T, InlineStorage<T, C>, I>
{
fn from_iter<It: core::iter::IntoIterator<Item = T>>(iter: It) -> Self {
let mut result = Self::new();
result.extend(iter);
result
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::collections::{ArenaVec, InlineVec, SliceVec};
#[test]
#[should_panic]
fn from_panics_for_too_large_inputs() {
let mut backing_array = [core::mem::MaybeUninit::<char>::uninit(); 300];
let _ret = SliceVec::<char, u8>::from(&mut backing_array[..]);
}
#[test]
fn sizes_of_instantiated_types() {
use core::mem::size_of;
assert_eq!(size_of::<SliceVec<u64, usize>>(), 3 * size_of::<usize>());
assert_eq!(size_of::<ArenaVec<u64, usize>>(), 3 * size_of::<usize>());
#[cfg(feature = "alloc")]
assert_eq!(size_of::<crate::collections::AllocVec<u64, usize>>(), 3 * size_of::<usize>());
assert_eq!(size_of::<InlineVec<u8, 8>>(), size_of::<usize>() + 8);
assert_eq!(size_of::<InlineVec<u8, 99, u8>>(), 100);
assert_eq!(
size_of::<Vec<u32, &mut [MaybeUninit<u32>; 1000], usize>>(),
2 * size_of::<usize>()
);
}
#[test]
fn iterators_take_and_drop_correctly() {
use crate::test_utils::*;
let drop_count = DropCounter::new();
let mut backing_region = [
core::mem::MaybeUninit::<Droppable<usize>>::uninit(),
core::mem::MaybeUninit::<Droppable<usize>>::uninit(),
core::mem::MaybeUninit::<Droppable<usize>>::uninit(),
core::mem::MaybeUninit::<Droppable<usize>>::uninit(),
core::mem::MaybeUninit::<Droppable<usize>>::uninit(),
core::mem::MaybeUninit::<Droppable<usize>>::uninit(),
core::mem::MaybeUninit::<Droppable<usize>>::uninit(),
core::mem::MaybeUninit::<Droppable<usize>>::uninit(),
];
let mut vec = SliceVec::<Droppable<usize>>::from(&mut backing_region[..]);
for i in 1..=8 {
vec.push(drop_count.new_droppable(i));
}
let mut drain_iter = vec.drain(2..=5);
assert_eq!(drain_iter.next_back().unwrap().value, 6);
assert_eq!(drop_count.dropped(), 1);
drop(drain_iter);
assert_eq!(drop_count.dropped(), 4);
let mut into_iter = vec.into_iter();
assert_eq!(into_iter.next().unwrap().value, 1);
assert_eq!(into_iter.next().unwrap().value, 2);
assert_eq!(into_iter.next().unwrap().value, 7);
assert_eq!(drop_count.dropped(), 7);
drop(into_iter);
assert_eq!(drop_count.dropped(), 8);
let mut vec = SliceVec::<Droppable<usize>>::from(&mut backing_region[..]);
for i in 1..=8 {
vec.push(drop_count.new_droppable(i));
}
drop(vec);
assert_eq!(drop_count.dropped(), 16);
}
#[test]
#[should_panic]
fn leaking_drain() {
let mut a = 1;
let mut b = 2;
let mut c = 3;
let mut backing_region = [
core::mem::MaybeUninit::<&mut i32>::uninit(),
core::mem::MaybeUninit::<&mut i32>::uninit(),
core::mem::MaybeUninit::<&mut i32>::uninit(),
core::mem::MaybeUninit::<&mut i32>::uninit(),
];
let mut vec = SliceVec::<&mut i32>::from(&mut backing_region[..]);
vec.push(&mut a);
vec.push(&mut b);
vec.push(&mut c);
let mut it = vec.drain(1..);
if let Some(cloned_ref) = it.next_back() {
core::mem::forget(it);
if let Some(original_ref) = vec.pop() {
let clone = cloned_ref as *mut i32 as usize;
let original = original_ref as *mut i32 as usize;
assert_eq!(clone, original);
}
}
}
}