use alloc::alloc as malloc;
use alloc::boxed::Box;
use alloc::vec::Vec;
use core::alloc::Layout;
use core::cell::Cell;
use core::fmt::Debug;
use core::iter::FusedIterator;
use core::marker::PhantomData;
use core::mem::{ManuallyDrop, MaybeUninit};
use core::num::NonZeroUsize;
use core::panic::{RefUnwindSafe, UnwindSafe};
use core::ptr::NonNull;
use core::{ptr, slice};
use crate::utils::min_cap;
use super::utils::{IsZST, split_range_bound};
const MAX_CAP: usize = usize::MAX >> 1;
const MARKER: usize = usize::MAX ^ MAX_CAP;
pub struct FastVecData<T, const N: usize> {
ptr: Cell<*mut T>,
cap_and_flag: usize,
len: usize,
cache: [MaybeUninit<T>; N],
}
unsafe impl<T: Sync, const N: usize> Send for FastVecData<T, N> {}
unsafe impl<T: Send, const N: usize> Sync for FastVecData<T, N> {}
impl<T, const N: usize> UnwindSafe for FastVecData<T, N> where T: UnwindSafe {}
impl<T, const N: usize> RefUnwindSafe for FastVecData<T, N> where T: RefUnwindSafe {}
impl<T, const N: usize> Drop for FastVecData<T, N> {
fn drop(&mut self) {
self.clear();
self.dealloc();
}
}
impl<T, const N: usize> FastVecData<T, N> {
#[inline(always)]
const fn cache_ptr(&self) -> *const T {
&self.cache as *const [MaybeUninit<T>] as *const T
}
#[inline(always)]
const fn in_heap(&self) -> bool {
self.cap_and_flag & MARKER == 0
}
#[inline(always)]
pub unsafe fn refresh(&self) {
if !T::IS_ZST && !self.in_heap() {
self.ptr.set(self.cache_ptr() as *mut T);
}
}
#[inline(always)]
pub const fn capacity(&self) -> usize {
self.cap_and_flag & MAX_CAP
}
#[inline(always)]
pub const fn len(&self) -> usize {
self.len
}
#[inline(always)]
pub const fn is_empty(&self) -> bool {
self.len == 0
}
#[inline(always)]
pub const fn as_ptr(&self) -> *const T {
self.ptr.get()
}
#[inline(always)]
pub const fn as_mut_ptr(&mut self) -> *mut T {
self.ptr.get()
}
#[inline]
pub const fn as_slice(&self) -> &[T] {
let data = self.as_ptr();
let len = self.len();
unsafe { slice::from_raw_parts(data, len) }
}
#[inline]
pub const fn as_mut_slice(&mut self) -> &mut [T] {
let data = self.as_mut_ptr();
let len = self.len();
unsafe { slice::from_raw_parts_mut(data, len) }
}
#[inline(always)]
pub const unsafe fn set_len(&mut self, new_len: usize) {
debug_assert!(new_len <= self.capacity());
self.len = new_len;
}
pub fn clear(&mut self) {
if core::mem::needs_drop::<T>() {
unsafe {
let slice: &mut [T] = self.as_mut_slice();
ptr::drop_in_place::<[T]>(slice);
}
}
self.len = 0;
}
pub fn truncate(&mut self, len: usize) {
if self.len > len {
if core::mem::needs_drop::<T>() {
unsafe {
let data = self.as_mut_ptr().add(len);
let len = self.len - len;
let to_drop = ptr::slice_from_raw_parts_mut(data, len);
ptr::drop_in_place::<[T]>(to_drop)
}
}
self.len = len;
}
}
}
impl<T, const N: usize> FastVecData<T, N> {
#[inline(never)]
fn realloc(&mut self, new_cap: usize) {
let len = self.len();
let old_cap = self.capacity();
debug_assert!(new_cap >= len);
assert!(
new_cap <= MAX_CAP,
"the capacity of FastVec cannot exceed isize::MAX"
);
if new_cap <= N {
debug_assert!(self.in_heap());
if !T::IS_ZST {
let ptr = self.as_mut_ptr();
let old_layout = Layout::array::<T>(old_cap).unwrap();
self.len = 0; unsafe {
let dst = self.cache.as_mut_ptr() as *mut T;
ptr::copy_nonoverlapping::<T>(ptr, dst, len);
malloc::dealloc(ptr as *mut u8, old_layout);
self.ptr.set(dst);
}
}
self.cap_and_flag = N | MARKER;
self.len = len;
} else if self.in_heap() {
let mut ptr = self.as_mut_ptr();
if !T::IS_ZST {
let old_layout = Layout::array::<T>(old_cap).unwrap();
let new_layout = Layout::array::<T>(new_cap).unwrap();
let new_size = new_layout.size();
let raw_ptr = ptr as *mut u8;
unsafe {
ptr = NonNull::new(malloc::realloc(raw_ptr, old_layout, new_size) as *mut T)
.unwrap_or_else(|| malloc::handle_alloc_error(new_layout))
.as_ptr();
}
}
self.ptr.set(ptr);
self.cap_and_flag = new_cap;
} else {
let ptr: NonNull<T> = if !T::IS_ZST {
let layout = Layout::array::<T>(new_cap).unwrap();
NonNull::new(unsafe { malloc::alloc(layout) as *mut T })
.unwrap_or_else(|| malloc::handle_alloc_error(layout))
} else {
let align = ::core::mem::align_of::<T>();
debug_assert!(align != 0);
NonNull::<T>::without_provenance(unsafe { NonZeroUsize::new_unchecked(align) })
};
if !T::IS_ZST {
unsafe {
let src = self.cache.as_ptr() as *const T;
ptr::copy_nonoverlapping::<T>(src, ptr.as_ptr(), len);
}
}
self.ptr.set(ptr.as_ptr());
self.cap_and_flag = new_cap;
}
}
fn dealloc(&mut self) {
if !T::IS_ZST && self.in_heap() {
let ptr = self.as_mut_ptr();
let cap = self.capacity();
let layout = Layout::array::<T>(cap).unwrap();
unsafe {
malloc::dealloc(ptr as *mut u8, layout);
}
}
}
}
impl<T, const N: usize> FastVecData<T, N> {
#[must_use]
#[inline(always)]
const fn new() -> Self {
assert!(
N <= MAX_CAP,
"the capacity of FastVecData cannot exceed `isize::MAX`"
);
unsafe {
Self {
cache: MaybeUninit::<[MaybeUninit<T>; N]>::uninit().assume_init(),
ptr: Cell::new(ptr::without_provenance_mut(align_of::<T>())),
len: 0,
cap_and_flag: N | MARKER,
}
}
}
#[inline]
#[must_use]
fn with_capacity(capacity: usize) -> Self {
assert!(
N <= MAX_CAP,
"the capacity of FastVecData cannot exceed `isize::MAX`"
);
let mut this = Self::new();
if capacity > N {
this.realloc(capacity);
}
this
}
#[inline]
const unsafe fn from_raw_parts(ptr: *mut T, length: usize, capacity: usize) -> Self {
assert!(
N <= MAX_CAP,
"the capacity of FastVecData cannot exceed `isize::MAX`"
);
debug_assert!(length <= capacity && capacity <= MAX_CAP);
unsafe {
Self {
cache: MaybeUninit::<[MaybeUninit<T>; N]>::uninit().assume_init(),
ptr: Cell::new(ptr),
len: length,
cap_and_flag: capacity,
}
}
}
#[inline]
fn from_slice(slice: &[T]) -> Self
where
T: Copy,
{
let mut this = Self::with_capacity(slice.len());
unsafe {
if !T::IS_ZST {
let ptr = this.as_mut_ptr();
ptr::copy_nonoverlapping(slice.as_ptr(), ptr, slice.len());
}
this.set_len(slice.len());
}
this
}
pub fn reserve(&mut self, additional: usize) {
let cap = self.capacity();
let len = self.len();
let target = len.saturating_add(additional);
if target > cap {
self.realloc(min_cap::<T>().max(target).min(MARKER).next_power_of_two());
}
}
pub fn reserve_exact(&mut self, additional: usize) {
let cap = self.capacity();
let len = self.len();
let target = len.saturating_add(additional);
if target > cap {
self.realloc(target);
}
}
pub fn shrink_to_fit(&mut self) {
if self.in_heap() {
let len = self.len();
let cap = self.capacity();
if cap > len {
self.realloc(len);
}
}
}
pub fn shrink_to(&mut self, min_capacity: usize) {
if self.in_heap() {
let len = self.len();
let cap = self.capacity();
if min_capacity >= len && min_capacity < cap {
self.realloc(min_capacity);
}
}
}
#[inline]
fn into_vec(self) -> Vec<T> {
if self.in_heap() {
let ptr = self.ptr.get();
let cap = self.cap_and_flag;
let len = self.len;
::core::mem::forget(self);
unsafe { Vec::from_raw_parts(ptr, len, cap) }
} else {
let len = self.len;
let mut ret = Vec::<T>::with_capacity(len);
unsafe {
if !T::IS_ZST {
let src = self.cache.as_ptr() as *const T;
let dst = ret.as_mut_ptr();
ptr::copy_nonoverlapping(src, dst, len);
}
::core::mem::forget(self);
ret.set_len(len);
}
ret
}
}
#[inline(always)]
pub fn push(&mut self, value: T) {
if self.len == self.capacity() {
crate::utils::cold_path();
self.reserve(1);
}
unsafe {
let ptr = self.as_mut_ptr();
ptr::write(ptr.add(self.len), value);
self.len += 1;
}
}
#[inline(always)]
pub unsafe fn push_unchecked(&mut self, value: T) {
unsafe {
let ptr = self.as_mut_ptr();
ptr::write(ptr.add(self.len), value);
self.len += 1;
}
}
#[inline]
pub fn pop(&mut self) -> Option<T> {
if self.len != 0 {
unsafe {
self.len -= 1;
Some(ptr::read(self.as_ptr().add(self.len)))
}
} else {
crate::utils::cold_path();
None
}
}
#[inline]
pub fn pop_if(&mut self, predicate: impl FnOnce(&mut T) -> bool) -> Option<T> {
let last = self.last_mut()?;
if predicate(last) { self.pop() } else { None }
}
pub fn insert(&mut self, index: usize, element: T) {
#[cold]
#[inline(never)]
fn assert_failed(index: usize, len: usize) -> ! {
panic!("insertion index (is {index}) should be <= len (is {len})");
}
let len = self.len;
if index > len {
assert_failed(index, len);
}
if len == self.capacity() {
crate::utils::cold_path();
self.reserve(1);
}
unsafe {
let p = self.as_mut_ptr().add(index);
if index < len {
ptr::copy(p, p.add(1), len - index);
}
ptr::write(p, element);
self.len += 1;
}
}
pub fn remove(&mut self, index: usize) -> T {
#[cold]
#[inline(never)]
fn assert_failed(index: usize, len: usize) -> ! {
panic!("removal index (is {index}) should be < len (is {len})");
}
let len = self.len;
if index >= len {
assert_failed(index, len);
}
unsafe {
let ptr = self.as_mut_ptr().add(index);
let ret = ptr::read(ptr);
ptr::copy(ptr.add(1), ptr, len - index - 1);
self.len -= 1;
ret
}
}
pub fn swap_remove(&mut self, index: usize) -> T {
#[cold]
#[inline(never)]
fn assert_failed(index: usize, len: usize) -> ! {
panic!("swap_remove index (is {index}) should be < len (is {len})");
}
let len = self.len;
if index >= len {
assert_failed(index, len);
}
unsafe {
let ptr = self.as_mut_ptr();
let value = ptr::read(ptr.add(index));
ptr::copy(ptr.add(len - 1), ptr.add(index), 1);
self.len -= 1;
value
}
}
pub fn append<const P: usize>(&mut self, other: &mut FastVecData<T, P>) {
unsafe {
let slice = other.as_slice();
let count = slice.len();
self.reserve(count);
let len = self.len();
let dst = self.as_mut_ptr().add(len);
ptr::copy_nonoverlapping::<T>(slice.as_ptr(), dst, count);
self.len += count;
other.set_len(0);
}
}
pub fn resize_with<F>(&mut self, new_len: usize, mut f: F)
where
F: FnMut() -> T,
{
let len = self.len();
if new_len > len {
self.reserve(new_len - len);
let ptr = self.as_mut_ptr();
(len..new_len).for_each(|idx| unsafe {
ptr::write(ptr.add(idx), f());
});
unsafe {
self.set_len(new_len);
}
} else {
self.truncate(new_len);
}
}
pub fn retain_mut<F: FnMut(&mut T) -> bool>(&mut self, mut f: F) {
let base_ptr = self.as_mut_ptr();
let len = self.len;
self.len = 0; let mut count = 0usize;
for index in 0..len {
unsafe {
let dst = base_ptr.add(index);
if f(&mut *dst) {
ptr::copy(dst, base_ptr.add(count), 1);
count += 1;
} else {
ptr::drop_in_place(dst);
}
}
}
self.len = count;
}
#[inline]
pub fn retain<F: FnMut(&T) -> bool>(&mut self, mut f: F) {
self.retain_mut(|v| f(v));
}
pub fn dedup_by<F: FnMut(&mut T, &mut T) -> bool>(&mut self, mut same_bucket: F) {
let len = self.len();
if len <= 1 {
return;
}
let ptr = self.as_mut_ptr();
let mut left = 0usize;
unsafe {
let mut p_l = ptr.add(left);
for right in 1..len {
let p_r = ptr.add(right);
if !same_bucket(&mut *p_r, &mut *p_l) {
left += 1;
p_l = ptr.add(left);
if right != left {
ptr::swap(p_r, p_l);
}
}
}
}
self.truncate(left + 1);
}
#[inline]
pub fn dedup_by_key<F, K>(&mut self, mut key: F)
where
F: FnMut(&mut T) -> K,
K: PartialEq,
{
self.dedup_by(|a, b| key(a) == key(b));
}
#[inline]
pub fn spare_capacity_mut(&mut self) -> &mut [MaybeUninit<T>] {
let len = self.len();
let cap = self.capacity();
unsafe {
let data = self.as_mut_ptr().add(len);
slice::from_raw_parts_mut(data as *mut MaybeUninit<T>, cap - len)
}
}
}
super::utils::impl_common_traits!(FastVecData<T, N>);
impl<T, U, const N: usize> PartialEq<FastVecData<U, N>> for FastVecData<T, N>
where
T: PartialEq<U>,
{
#[inline]
fn eq(&self, other: &FastVecData<U, N>) -> bool {
PartialEq::eq(self.as_slice(), other.as_slice())
}
}
impl<T: PartialEq, const N: usize> FastVecData<T, N> {
#[inline]
pub fn dedup(&mut self) {
self.dedup_by(|x, y| PartialEq::eq(x, y));
}
}
impl<T: Clone, const N: usize> FastVecData<T, N> {
pub fn resize(&mut self, new_len: usize, value: T) {
let len = self.len();
if new_len > len {
self.reserve(new_len - len);
(len..new_len - 1).for_each(|_| unsafe {
self.push_unchecked(value.clone());
});
unsafe {
self.push_unchecked(value);
}
} else {
self.truncate(new_len);
}
}
pub fn extend_from_slice(&mut self, other: &[T]) {
self.reserve(other.len());
other.iter().for_each(|item| unsafe {
self.push_unchecked(item.clone());
});
}
}
impl<'a, T: 'a + Clone + 'a, const N: usize> Extend<&'a T> for FastVecData<T, N> {
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
let iter = iter.into_iter();
self.reserve(iter.size_hint().0);
iter.for_each(|item| {
self.push(item.clone());
});
}
}
impl<T, const N: usize> Extend<T> for FastVecData<T, N> {
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
let iter = iter.into_iter();
self.reserve(iter.size_hint().0);
iter.for_each(|item| {
self.push(item);
});
}
}
pub struct Drain<'a, T: 'a, const N: usize> {
tail_start: usize,
tail_len: usize,
iter: slice::Iter<'a, T>,
vec: NonNull<FastVecData<T, N>>,
}
impl<T, const N: usize> FastVecData<T, N> {
pub fn drain<R: core::ops::RangeBounds<usize>>(&mut self, range: R) -> Drain<'_, T, N> {
let len = self.len();
let (start, end) = split_range_bound(&range, len);
unsafe {
self.set_len(start);
let data = self.as_ptr().add(start);
let range_slice = slice::from_raw_parts(data, end - start);
Drain {
tail_start: end,
tail_len: len - end,
iter: range_slice.iter(),
vec: NonNull::new_unchecked(self as *mut _),
}
}
}
}
impl<T: Debug, const N: usize> Debug for Drain<'_, T, N> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_tuple("Drain").field(&self.iter.as_slice()).finish()
}
}
impl<T, const N: usize> Iterator for Drain<'_, T, N> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
self.iter
.next()
.map(|reference| unsafe { ptr::read(reference) })
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<T, const N: usize> DoubleEndedIterator for Drain<'_, T, N> {
#[inline]
fn next_back(&mut self) -> Option<Self::Item> {
self.iter
.next_back()
.map(|reference| unsafe { ptr::read(reference) })
}
}
impl<T, const N: usize> ExactSizeIterator for Drain<'_, T, N> {
#[inline]
fn len(&self) -> usize {
self.iter.len()
}
}
impl<T, const N: usize> FusedIterator for Drain<'_, T, N> {}
impl<'a, T: 'a, const N: usize> Drop for Drain<'a, T, N> {
fn drop(&mut self) {
struct DropGuard<'r, 'a, T, const N: usize>(&'r mut Drain<'a, T, N>);
impl<'r, 'a, T, const N: usize> Drop for DropGuard<'r, 'a, T, N> {
fn drop(&mut self) {
if self.0.tail_len > 0 {
unsafe {
let source_vec = self.0.vec.as_mut();
let start = source_vec.len();
let tail = self.0.tail_start;
if tail != start {
let base = source_vec.as_mut_ptr();
let src = base.add(tail);
let dst = base.add(start);
ptr::copy(src, dst, self.0.tail_len);
}
source_vec.set_len(start + self.0.tail_len);
}
}
}
}
let iter = core::mem::take(&mut self.iter);
let drop_len = iter.len();
let mut vec = self.vec;
if T::IS_ZST {
unsafe {
let vec = vec.as_mut();
let old_len = vec.len();
vec.set_len(old_len + drop_len + self.tail_len);
vec.truncate(old_len + self.tail_len);
}
return;
}
let _guard = DropGuard(self);
if drop_len == 0 {
return;
}
let drop_ptr = iter.as_slice().as_ptr();
unsafe {
let vec_ptr = vec.as_mut().as_mut_ptr();
let drop_offset = drop_ptr.offset_from_unsigned(vec_ptr);
let to_drop = ptr::slice_from_raw_parts_mut(vec_ptr.add(drop_offset), drop_len);
ptr::drop_in_place(to_drop);
}
}
}
pub struct ExtractIf<'a, T, F: FnMut(&mut T) -> bool, const N: usize> {
vec: &'a mut FastVecData<T, N>,
idx: usize,
end: usize,
del: usize,
old_len: usize,
pred: F,
}
impl<T, const N: usize> FastVecData<T, N> {
pub fn extract_if<F, R>(&mut self, range: R, filter: F) -> ExtractIf<'_, T, F, N>
where
F: FnMut(&mut T) -> bool,
R: core::ops::RangeBounds<usize>,
{
let old_len = self.len();
let (start, end) = split_range_bound(&range, old_len);
unsafe {
self.set_len(0);
}
ExtractIf {
vec: self,
idx: start,
del: 0,
end,
old_len,
pred: filter,
}
}
}
impl<T, F: FnMut(&mut T) -> bool, const N: usize> Iterator for ExtractIf<'_, T, F, N> {
type Item = T;
fn next(&mut self) -> Option<T> {
while self.idx < self.end {
let i = self.idx;
let cur = unsafe { &mut *self.vec.as_mut_ptr().add(i) };
let drained = (self.pred)(cur);
self.idx += 1;
if drained {
self.del += 1;
return Some(unsafe { ptr::read(cur) });
} else if self.del > 0 {
unsafe {
let hole_slot = self.vec.as_mut_ptr().add(i - self.del);
ptr::copy_nonoverlapping(cur, hole_slot, 1);
}
}
}
None
}
fn size_hint(&self) -> (usize, Option<usize>) {
(0, Some(self.end - self.idx))
}
}
impl<T, F: FnMut(&mut T) -> bool, const N: usize> Drop for ExtractIf<'_, T, F, N> {
fn drop(&mut self) {
if !T::IS_ZST && self.del > 0 {
unsafe {
let base = self.vec.as_mut_ptr();
ptr::copy(
base.add(self.idx),
base.add(self.idx - self.del),
self.old_len - self.idx,
);
}
}
unsafe {
self.vec.set_len(self.old_len - self.del);
}
}
}
impl<T: Debug, F: FnMut(&mut T) -> bool, const N: usize> Debug for ExtractIf<'_, T, F, N> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let peek = if self.idx < self.end {
self.vec.get(self.idx)
} else {
None
};
f.debug_struct("ExtractIf")
.field("peek", &peek)
.finish_non_exhaustive()
}
}
#[repr(transparent)]
pub struct FastVec<T, const N: usize> {
inner: FastVecData<T, N>,
_marker: PhantomData<*const ()>,
}
unsafe impl<T, const N: usize> Send for FastVec<T, N> where T: Send {}
impl<T, const N: usize> RefUnwindSafe for FastVec<T, N> where T: RefUnwindSafe {}
impl<T, const N: usize> FastVec<T, N> {
#[inline]
pub const fn new() -> Self {
Self {
inner: FastVecData::new(),
_marker: PhantomData,
}
}
#[inline]
pub fn with_capacity(capacity: usize) -> Self {
Self {
inner: FastVecData::with_capacity(capacity),
_marker: PhantomData,
}
}
#[inline]
pub const unsafe fn from_raw_parts(ptr: *mut T, length: usize, capacity: usize) -> Self {
Self {
inner: unsafe { FastVecData::from_raw_parts(ptr, length, capacity) },
_marker: PhantomData,
}
}
#[inline]
pub fn from_slice(slice: &[T]) -> Self
where
T: Copy,
{
Self {
inner: FastVecData::from_slice(slice),
_marker: PhantomData,
}
}
#[inline(always)]
pub const fn is_empty(&self) -> bool {
self.inner.len == 0
}
#[inline(always)]
pub const fn len(&self) -> usize {
self.inner.len
}
#[inline(always)]
pub const fn capacity(&self) -> usize {
self.inner.capacity()
}
#[inline(always)]
pub fn refresh(&self) {
unsafe {
self.inner.refresh();
}
}
#[inline]
pub fn data(&mut self) -> &mut FastVecData<T, N> {
self.refresh();
&mut self.inner
}
#[inline]
pub fn as_slice(&self) -> &[T] {
self.refresh();
self.inner.as_slice()
}
#[inline]
pub fn as_mut_slice(&mut self) -> &mut [T] {
self.refresh();
self.inner.as_mut_slice()
}
pub fn into_vec(self) -> Vec<T> {
self.refresh();
self.inner.into_vec()
}
pub fn into_boxed_slice(self) -> Box<[T]> {
self.refresh();
self.inner.into_vec().into_boxed_slice()
}
}
impl<T, const N: usize> Default for FastVec<T, N> {
#[inline(always)]
fn default() -> Self {
Self::new()
}
}
super::utils::impl_common_traits!(FastVec<T, N>);
impl<T, U, const N: usize> PartialEq<FastVec<U, N>> for FastVec<T, N>
where
T: PartialEq<U>,
{
#[inline]
fn eq(&self, other: &FastVec<U, N>) -> bool {
PartialEq::eq(self.as_slice(), other.as_slice())
}
}
impl<T: Clone, const N: usize> Clone for FastVec<T, N> {
fn clone(&self) -> Self {
let mut vec = Self::with_capacity(self.len());
let dst = vec.data();
for item in self.as_slice() {
unsafe {
dst.push_unchecked(item.clone());
}
}
vec
}
fn clone_from(&mut self, source: &Self) {
let dst = self.data();
dst.clear();
dst.reserve(source.len());
for item in source.as_slice() {
unsafe {
dst.push_unchecked(item.clone());
}
}
}
}
impl<T, const N: usize> FromIterator<T> for FastVec<T, N> {
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
let iter = iter.into_iter();
let mut vec = Self::with_capacity(iter.size_hint().0);
let data = vec.data();
iter.for_each(|item| {
data.push(item);
});
vec
}
}
impl<T, const N: usize> From<FastVec<T, N>> for Vec<T> {
fn from(v: FastVec<T, N>) -> Self {
v.into_vec()
}
}
impl<T, const N: usize> From<FastVec<T, N>> for Box<[T]> {
fn from(v: FastVec<T, N>) -> Self {
v.into_boxed_slice()
}
}
impl<T, const N: usize, const P: usize> TryFrom<FastVec<T, N>> for [T; P] {
type Error = FastVec<T, N>;
fn try_from(mut vec: FastVec<T, N>) -> Result<[T; P], FastVec<T, N>> {
if vec.len() != P {
return Err(vec);
}
let data = vec.data();
let src = data.as_ptr();
unsafe { data.set_len(0) };
let array = unsafe { ptr::read(src as *const [T; P]) };
Ok(array)
}
}
impl<T: Clone, const N: usize> From<&[T]> for FastVec<T, N> {
fn from(s: &[T]) -> FastVec<T, N> {
let mut vec = FastVec::<T, N>::with_capacity(s.len());
let data = vec.data();
s.iter().for_each(|item| unsafe {
data.push_unchecked(item.clone());
});
vec
}
}
impl<T: Clone, const N: usize> From<&mut [T]> for FastVec<T, N> {
fn from(s: &mut [T]) -> FastVec<T, N> {
let mut vec = FastVec::<T, N>::with_capacity(s.len());
let data = vec.data();
s.iter().for_each(|item| unsafe {
data.push_unchecked(item.clone());
});
vec
}
}
impl<T: Clone, const N: usize, const P: usize> From<&[T; N]> for FastVec<T, P> {
fn from(s: &[T; N]) -> FastVec<T, P> {
Self::from(s.as_slice())
}
}
impl<T: Clone, const N: usize, const P: usize> From<&mut [T; N]> for FastVec<T, P> {
fn from(s: &mut [T; N]) -> Self {
Self::from(s.as_mut_slice())
}
}
impl<T, const N: usize, const P: usize> From<[T; N]> for FastVec<T, P> {
fn from(s: [T; N]) -> Self {
if N <= P {
let mut this = Self::new();
let data = this.data();
let ptr = data.as_mut_ptr();
let s = ManuallyDrop::new(s);
let len = s.len();
unsafe {
ptr::copy_nonoverlapping(s.as_ptr(), ptr, len);
data.set_len(len);
}
this
} else {
let vec = Vec::<T>::from(s);
FastVec::from(vec)
}
}
}
impl<T, const N: usize> From<Vec<T>> for FastVec<T, N> {
fn from(s: Vec<T>) -> Self {
let (p, l, c) = s.into_raw_parts();
unsafe { FastVec::from_raw_parts(p, l, c) }
}
}
impl<T, const N: usize> From<Box<[T]>> for FastVec<T, N> {
fn from(s: Box<[T]>) -> Self {
Self::from(s.into_vec())
}
}
pub struct IntoIter<T, const N: usize> {
vec: ManuallyDrop<FastVec<T, N>>,
index: usize,
}
unsafe impl<T: Send, const N: usize> Send for IntoIter<T, N> {}
unsafe impl<T: Sync, const N: usize> Sync for IntoIter<T, N> {}
impl<T, const N: usize> IntoIterator for FastVec<T, N> {
type Item = T;
type IntoIter = IntoIter<T, N>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
IntoIter {
vec: ManuallyDrop::new(self),
index: 0,
}
}
}
impl<T, const N: usize> Iterator for IntoIter<T, N> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
if self.index < self.vec.len() {
self.index += 1;
let ptr = self.vec.data().as_ptr();
unsafe { Some(ptr::read(ptr.add(self.index - 1))) }
} else {
None
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let v = self.vec.len() - self.index;
(v, Some(v))
}
}
impl<T, const N: usize> DoubleEndedIterator for IntoIter<T, N> {
#[inline]
fn next_back(&mut self) -> Option<Self::Item> {
let len = self.vec.len();
if self.index < len {
let data = self.vec.data();
unsafe {
data.set_len(len - 1);
}
unsafe { Some(ptr::read(data.as_ptr().add(len - 1))) }
} else {
None
}
}
}
impl<T, const N: usize> ExactSizeIterator for IntoIter<T, N> {
#[inline]
fn len(&self) -> usize {
self.vec.len() - self.index
}
}
impl<T, const N: usize> FusedIterator for IntoIter<T, N> {}
impl<T: Debug, const N: usize> Debug for IntoIter<T, N> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_tuple("IntoIter")
.field(&self.vec.as_slice())
.finish()
}
}
impl<T, const N: usize> Drop for IntoIter<T, N> {
fn drop(&mut self) {
let len = self.vec.len();
let data = self.vec.data();
if self.index < len {
unsafe {
let ptr = data.as_mut_ptr().add(self.index);
let count = len - self.index;
let to_drop = ptr::slice_from_raw_parts_mut(ptr, count);
ptr::drop_in_place(to_drop);
}
}
data.dealloc();
}
}
impl<T, const N: usize> FastVec<T, N> {
pub fn drain<R: core::ops::RangeBounds<usize>>(&mut self, range: R) -> Drain<'_, T, N> {
self.data().drain(range)
}
pub fn extract_if<F, R>(&mut self, range: R, filter: F) -> ExtractIf<'_, T, F, N>
where
F: FnMut(&mut T) -> bool,
R: core::ops::RangeBounds<usize>,
{
self.data().extract_if(range, filter)
}
}
#[cfg(test)]
mod tests {
use super::FastVec;
use core::sync::atomic::{AtomicUsize, Ordering};
macro_rules! define_tracker {
() => {
static DROPS: AtomicUsize = AtomicUsize::new(0);
struct Tracker;
impl Drop for Tracker {
fn drop(&mut self) {
DROPS.fetch_add(1, Ordering::SeqCst);
}
}
};
}
#[test]
fn drop_zst() {
define_tracker!();
DROPS.store(0, Ordering::SeqCst);
let mut vec = FastVec::<Tracker, 0>::new();
let data = vec.data();
data.push(Tracker);
data.push(Tracker);
data.push(Tracker);
data.push(Tracker);
data.push(Tracker);
{
let mut drain = data.drain(1..4);
let one = drain.next_back().unwrap();
drop(one);
assert_eq!(DROPS.load(Ordering::SeqCst), 1);
}
assert_eq!(DROPS.load(Ordering::SeqCst), 3);
drop(vec);
assert_eq!(DROPS.load(Ordering::SeqCst), 5);
}
#[test]
fn drop_vec() {
define_tracker!();
DROPS.store(0, Ordering::SeqCst);
{
let mut vec = FastVec::<Tracker, 4>::new();
let data = vec.data();
data.push(Tracker);
data.push(Tracker);
data.push(Tracker);
assert_eq!(DROPS.load(Ordering::SeqCst), 0);
}
assert_eq!(DROPS.load(Ordering::SeqCst), 3);
}
#[test]
fn drop_pop_remove() {
define_tracker!();
DROPS.store(0, Ordering::SeqCst);
let mut vec = FastVec::<Tracker, 4>::new();
let data = vec.data();
data.push(Tracker);
data.push(Tracker);
data.push(Tracker);
let popped = data.pop().unwrap();
assert_eq!(DROPS.load(Ordering::SeqCst), 0);
drop(popped);
assert_eq!(DROPS.load(Ordering::SeqCst), 1);
let removed = data.remove(0);
assert_eq!(DROPS.load(Ordering::SeqCst), 1);
drop(removed);
assert_eq!(DROPS.load(Ordering::SeqCst), 2);
drop(vec);
assert_eq!(DROPS.load(Ordering::SeqCst), 3);
}
#[test]
fn drop_into_iter() {
define_tracker!();
DROPS.store(0, Ordering::SeqCst);
let mut vec = FastVec::<Tracker, 4>::new();
let data = vec.data();
data.push(Tracker);
data.push(Tracker);
data.push(Tracker);
let mut iter = vec.into_iter();
let first = iter.next().unwrap();
drop(first);
assert_eq!(DROPS.load(Ordering::SeqCst), 1);
drop(iter);
assert_eq!(DROPS.load(Ordering::SeqCst), 3);
}
#[test]
fn drop_drain() {
define_tracker!();
DROPS.store(0, Ordering::SeqCst);
let mut vec = FastVec::<Tracker, 8>::new();
let data = vec.data();
data.push(Tracker);
data.push(Tracker);
data.push(Tracker);
data.push(Tracker);
data.push(Tracker);
{
let mut drain = data.drain(1..4);
let first = drain.next().unwrap();
drop(first);
assert_eq!(DROPS.load(Ordering::SeqCst), 1);
}
assert_eq!(DROPS.load(Ordering::SeqCst), 3);
drop(vec);
assert_eq!(DROPS.load(Ordering::SeqCst), 5);
}
#[test]
fn drop_extract_if() {
static DROPS: AtomicUsize = AtomicUsize::new(0);
struct Tracker {
id: usize,
}
impl Drop for Tracker {
fn drop(&mut self) {
DROPS.fetch_add(1, Ordering::SeqCst);
}
}
DROPS.store(0, Ordering::SeqCst);
let mut vec = FastVec::<Tracker, 2>::new();
let data = vec.data();
for id in 0..6 {
data.push(Tracker { id });
}
let removed: FastVec<Tracker, 8> = data.extract_if(.., |t| t.id % 2 == 0).collect();
assert_eq!(DROPS.load(Ordering::SeqCst), 0);
drop(removed);
assert_eq!(DROPS.load(Ordering::SeqCst), 3);
drop(vec);
assert_eq!(DROPS.load(Ordering::SeqCst), 6);
}
}