use alloc::alloc as malloc;
use alloc::boxed::Box;
use alloc::vec::Vec;
use core::alloc::Layout;
use core::fmt::Debug;
use core::iter::FusedIterator;
use core::mem::{ManuallyDrop, MaybeUninit};
use core::num::NonZeroUsize;
use core::panic::{RefUnwindSafe, UnwindSafe};
use core::ptr::{self, NonNull};
use core::slice;
use super::utils::{IsZST, min_cap, split_range_bound};
const MAX_LEN: usize = usize::MAX >> 1;
const MARKER: usize = usize::MAX ^ MAX_LEN;
union Data<T, const N: usize> {
heap: (NonNull<T>, usize),
cache: [ManuallyDrop<T>; N],
}
pub struct SmallVec<T, const N: usize> {
len_and_flag: usize,
data: Data<T, N>,
}
unsafe impl<T: Sync, const N: usize> Sync for SmallVec<T, N> {}
unsafe impl<T: Send, const N: usize> Send for SmallVec<T, N> {}
impl<T, const N: usize> UnwindSafe for SmallVec<T, N> where T: UnwindSafe {}
impl<T, const N: usize> RefUnwindSafe for SmallVec<T, N> where T: RefUnwindSafe {}
impl<T, const N: usize> Default for SmallVec<T, N> {
#[inline(always)]
fn default() -> Self {
Self::new()
}
}
impl<T, const N: usize> Drop for SmallVec<T, N> {
fn drop(&mut self) {
self.clear();
self.dealloc();
}
}
impl<T, const N: usize> SmallVec<T, N> {
#[inline(always)]
const fn in_heap(&self) -> bool {
self.len_and_flag & MARKER != 0
}
#[inline(always)]
pub const fn capacity(&self) -> usize {
if self.in_heap() {
crate::utils::cold_path();
unsafe { self.data.heap.1 }
} else {
N
}
}
#[inline(always)]
pub const fn len(&self) -> usize {
self.len_and_flag & MAX_LEN
}
#[inline(always)]
pub const fn is_empty(&self) -> bool {
self.len_and_flag & MAX_LEN == 0
}
#[inline(always)]
pub const fn as_ptr(&self) -> *const T {
if self.in_heap() {
crate::utils::cold_path();
unsafe { self.data.heap.0.as_ptr() }
} else {
unsafe { self.data.cache.as_ptr() as *const T }
}
}
#[inline(always)]
pub const fn as_mut_ptr(&mut self) -> *mut T {
if self.in_heap() {
crate::utils::cold_path();
unsafe { self.data.heap.0.as_ptr() }
} else {
unsafe { self.data.cache.as_mut_ptr() as *mut T }
}
}
#[inline]
pub const fn as_slice(&self) -> &[T] {
let data = self.as_ptr();
let len = self.len();
unsafe { slice::from_raw_parts(data, len) }
}
#[inline]
pub const fn as_mut_slice(&mut self) -> &mut [T] {
let data = self.as_mut_ptr();
let len = self.len();
unsafe { slice::from_raw_parts_mut(data, len) }
}
#[inline(always)]
pub const unsafe fn set_len(&mut self, new_len: usize) {
debug_assert!(new_len <= self.capacity());
self.len_and_flag = (self.len_and_flag & MARKER) | new_len;
}
pub fn clear(&mut self) {
if core::mem::needs_drop::<T>() {
unsafe {
let slice: &mut [T] = self.as_mut_slice();
ptr::drop_in_place::<[T]>(slice);
}
}
self.len_and_flag &= MARKER;
}
pub fn truncate(&mut self, len: usize) {
let old = self.len_and_flag;
let old_len = old & MAX_LEN;
if old_len > len {
if core::mem::needs_drop::<T>() {
unsafe {
let data = self.as_mut_ptr().add(len);
let len = old_len - len;
let to_drop = ptr::slice_from_raw_parts_mut(data, len);
ptr::drop_in_place::<[T]>(to_drop)
}
}
self.len_and_flag = (old & MARKER) | len;
}
}
}
impl<T, const N: usize> SmallVec<T, N> {
#[inline(never)]
fn realloc(&mut self, cap: usize) {
let len = self.len();
debug_assert!(cap >= len);
assert!(
cap <= MAX_LEN,
"the capacity of SmallVec cannot exceed isize::MAX"
);
if cap <= N {
debug_assert!(self.in_heap());
if !T::IS_ZST {
let (ptr, old_cap) = unsafe { self.data.heap };
let old_layout = Layout::array::<T>(old_cap).unwrap();
self.len_and_flag = 0; unsafe {
let dst = self.data.cache.as_mut_ptr() as *mut T;
ptr::copy_nonoverlapping::<T>(ptr.as_ptr(), dst, len);
malloc::dealloc(ptr.as_ptr() as *mut u8, old_layout);
}
}
self.len_and_flag = len & MAX_LEN;
} else if self.in_heap() {
let (mut ptr, old_cap) = unsafe { self.data.heap };
if !T::IS_ZST {
let old_layout = Layout::array::<T>(old_cap).unwrap();
let new_layout = Layout::array::<T>(cap).unwrap();
let new_size = new_layout.size();
let raw_ptr = ptr.as_ptr() as *mut u8;
unsafe {
ptr = NonNull::new(malloc::realloc(raw_ptr, old_layout, new_size) as *mut T)
.unwrap_or_else(|| malloc::handle_alloc_error(new_layout));
}
}
self.data.heap = (ptr, cap);
} else {
let ptr: NonNull<T> = if !T::IS_ZST {
let layout = Layout::array::<T>(cap).unwrap();
NonNull::new(unsafe { malloc::alloc(layout) as *mut T })
.unwrap_or_else(|| malloc::handle_alloc_error(layout))
} else {
let align = ::core::mem::align_of::<T>();
debug_assert!(align != 0);
NonNull::<T>::without_provenance(unsafe { NonZeroUsize::new_unchecked(align) })
};
if !T::IS_ZST {
unsafe {
let src = self.data.cache.as_ptr() as *const T;
ptr::copy_nonoverlapping::<T>(src, ptr.as_ptr(), len);
}
}
self.len_and_flag = len | MARKER;
self.data.heap = (ptr, cap);
}
}
fn dealloc(&mut self) {
if !T::IS_ZST && self.in_heap() {
let (ptr, cap) = unsafe { self.data.heap };
let layout = Layout::array::<T>(cap).unwrap();
unsafe {
malloc::dealloc(ptr.as_ptr() as *mut u8, layout);
}
}
}
}
impl<T, const N: usize> SmallVec<T, N> {
#[must_use]
#[inline(always)]
pub const fn new() -> Self {
assert!(
N <= MAX_LEN,
"The capacity of SmallVec can not exceed isize::MAX"
);
Self {
data: Data {
#[expect(clippy::uninit_assumed_init)]
cache: unsafe { MaybeUninit::<[ManuallyDrop<T>; N]>::uninit().assume_init() },
},
len_and_flag: 0,
}
}
#[inline]
#[must_use]
pub fn with_capacity(capacity: usize) -> Self {
let mut this = Self::new();
if capacity > N {
this.realloc(capacity);
}
this
}
#[inline]
pub unsafe fn from_raw_parts(ptr: *mut T, length: usize, capacity: usize) -> Self {
debug_assert!(length <= capacity && capacity & MARKER == 0);
let ptr = NonNull::new(ptr).unwrap();
Self {
data: Data {
heap: (ptr, capacity),
},
len_and_flag: length | MARKER,
}
}
#[inline]
pub fn from_slice(slice: &[T]) -> Self
where
T: Copy,
{
let mut this = Self::with_capacity(slice.len());
unsafe {
if !T::IS_ZST {
let ptr = this.as_mut_ptr();
ptr::copy_nonoverlapping(slice.as_ptr(), ptr, slice.len());
}
this.set_len(slice.len());
}
this
}
pub fn reserve(&mut self, additional: usize) {
let cap = self.capacity();
let len = self.len();
let target = len.saturating_add(additional);
if target > cap {
self.realloc(target.min(MARKER).next_power_of_two());
}
}
pub fn reserve_exact(&mut self, additional: usize) {
let cap = self.capacity();
let len = self.len();
let target = len.saturating_add(additional);
if target > cap {
self.realloc(target);
}
}
pub fn shrink_to_fit(&mut self) {
if self.in_heap() {
let len = self.len();
let cap = self.capacity();
if cap > len {
self.realloc(len);
}
}
}
pub fn shrink_to(&mut self, min_capacity: usize) {
if self.in_heap() {
let len = self.len();
let cap = self.capacity();
if min_capacity >= len && min_capacity < cap {
self.realloc(min_capacity);
}
}
}
#[inline]
pub fn into_vec(self) -> Vec<T> {
if self.in_heap() {
let (ptr, cap) = unsafe { self.data.heap };
let len = self.len();
::core::mem::forget(self);
unsafe { Vec::from_raw_parts(ptr.as_ptr(), len, cap) }
} else {
let len = self.len_and_flag;
let mut ret = Vec::<T>::with_capacity(len);
unsafe {
if !T::IS_ZST {
let src = self.data.cache.as_ptr() as *const T;
let dst = ret.as_mut_ptr();
ptr::copy_nonoverlapping(src, dst, len);
}
::core::mem::forget(self);
ret.set_len(len);
}
ret
}
}
#[inline]
pub fn into_boxed_slice(self) -> Box<[T]> {
self.into_vec().into_boxed_slice()
}
pub fn push(&mut self, value: T) {
if self.in_heap() {
crate::utils::cold_path();
let len = self.len();
let cap = unsafe { self.data.heap.1 };
if len == cap {
crate::utils::cold_path();
let new_cap = (cap << 1).max(min_cap::<T>());
self.realloc(new_cap);
}
unsafe {
let ptr = self.data.heap.0.as_ptr();
ptr::write(ptr.add(len), value);
self.len_and_flag += 1;
}
} else {
let len = self.len_and_flag;
let ptr: *mut T = if len == N {
crate::utils::cold_path();
let new_cap = (N << 1).max(min_cap::<T>());
self.realloc(new_cap);
unsafe { self.data.heap.0.as_ptr() }
} else {
unsafe { self.data.cache.as_mut_ptr() as *mut T }
};
unsafe {
ptr::write(ptr.add(len), value);
}
self.len_and_flag += 1;
}
}
#[inline(always)]
pub unsafe fn push_unchecked(&mut self, value: T) {
let ptr = self.as_mut_ptr();
let len = self.len();
unsafe {
ptr::write(ptr.add(len), value);
}
self.len_and_flag += 1;
}
#[inline]
pub fn pop(&mut self) -> Option<T> {
let len = self.len();
if len != 0 {
unsafe {
self.len_and_flag -= 1;
Some(ptr::read(self.as_ptr().add(len - 1)))
}
} else {
crate::utils::cold_path();
None
}
}
#[inline]
pub fn pop_if(&mut self, predicate: impl FnOnce(&mut T) -> bool) -> Option<T> {
let last = self.last_mut()?;
if predicate(last) { self.pop() } else { None }
}
pub fn insert(&mut self, index: usize, element: T) {
#[cold]
#[inline(never)]
fn assert_failed(index: usize, len: usize) -> ! {
panic!("insertion index (is {index}) should be <= len (is {len})");
}
let len = self.len();
if index > len {
assert_failed(index, len);
}
if len == self.capacity() {
crate::utils::cold_path();
self.reserve(1);
}
unsafe {
let p = self.as_mut_ptr().add(index);
if index < len {
ptr::copy(p, p.add(1), len - index);
}
ptr::write(p, element);
self.len_and_flag += 1;
}
}
pub fn remove(&mut self, index: usize) -> T {
#[cold]
#[inline(never)]
fn assert_failed(index: usize, len: usize) -> ! {
panic!("removal index (is {index}) should be < len (is {len})");
}
let len = self.len();
if index >= len {
assert_failed(index, len);
}
unsafe {
let ptr = self.as_mut_ptr().add(index);
let ret = ptr::read(ptr);
ptr::copy(ptr.add(1), ptr, len - index - 1);
self.len_and_flag -= 1;
ret
}
}
pub fn swap_remove(&mut self, index: usize) -> T {
#[cold]
#[inline(never)]
fn assert_failed(index: usize, len: usize) -> ! {
panic!("swap_remove index (is {index}) should be < len (is {len})");
}
let len = self.len();
if index >= len {
assert_failed(index, len);
}
unsafe {
let ptr = self.as_mut_ptr();
let value = ptr::read(ptr.add(index));
ptr::copy(ptr.add(len - 1), ptr.add(index), 1);
self.len_and_flag -= 1;
value
}
}
pub fn append<const P: usize>(&mut self, other: &mut SmallVec<T, P>) {
unsafe {
let slice = other.as_slice();
let count = slice.len();
self.reserve(count);
if !T::IS_ZST {
let len = self.len();
let dst = self.as_mut_ptr().add(len);
ptr::copy_nonoverlapping::<T>(slice.as_ptr(), dst, count);
}
self.len_and_flag += count;
other.set_len(0);
}
}
pub fn resize_with<F>(&mut self, new_len: usize, mut f: F)
where
F: FnMut() -> T,
{
let len = self.len();
if new_len > len {
self.reserve(new_len - len);
let ptr = self.as_mut_ptr();
(len..new_len).for_each(|idx| unsafe {
ptr::write(ptr.add(idx), f());
});
unsafe {
self.set_len(new_len);
}
} else {
self.truncate(new_len);
}
}
pub fn retain_mut<F: FnMut(&mut T) -> bool>(&mut self, mut f: F) {
let base_ptr = self.as_mut_ptr();
let len = self.len_and_flag & MAX_LEN;
let flag = self.len_and_flag & MARKER;
self.len_and_flag = 0;
let mut count = 0usize;
for index in 0..len {
unsafe {
let dst = base_ptr.add(index);
if f(&mut *dst) {
ptr::copy(dst, base_ptr.add(count), 1);
count += 1;
} else {
ptr::drop_in_place(dst);
}
}
}
self.len_and_flag = count | flag;
}
#[inline]
pub fn retain<F: FnMut(&T) -> bool>(&mut self, mut f: F) {
self.retain_mut(|v| f(v));
}
pub fn dedup_by<F: FnMut(&mut T, &mut T) -> bool>(&mut self, mut same_bucket: F) {
let len = self.len();
if len <= 1 {
return;
}
let ptr = self.as_mut_ptr();
let mut left = 0usize;
unsafe {
let mut p_l = ptr.add(left);
for right in 1..len {
let p_r = ptr.add(right);
if !same_bucket(&mut *p_r, &mut *p_l) {
left += 1;
p_l = ptr.add(left);
if right != left {
ptr::swap(p_r, p_l);
}
}
}
}
self.truncate(left + 1);
}
#[inline]
pub fn dedup_by_key<F, K>(&mut self, mut key: F)
where
F: FnMut(&mut T) -> K,
K: PartialEq,
{
self.dedup_by(|a, b| key(a) == key(b));
}
#[inline]
pub fn spare_capacity_mut(&mut self) -> &mut [MaybeUninit<T>] {
let len = self.len();
let cap = self.capacity();
unsafe {
let data = self.as_mut_ptr().add(len);
slice::from_raw_parts_mut(data as *mut MaybeUninit<T>, cap - len)
}
}
}
super::utils::impl_common_traits!(SmallVec<T, N>);
impl<T, U, const N: usize> PartialEq<SmallVec<U, N>> for SmallVec<T, N>
where
T: PartialEq<U>,
{
#[inline]
fn eq(&self, other: &SmallVec<U, N>) -> bool {
PartialEq::eq(self.as_slice(), other.as_slice())
}
}
impl<T: PartialEq, const N: usize> SmallVec<T, N> {
#[inline]
pub fn dedup(&mut self) {
self.dedup_by(|x, y| PartialEq::eq(x, y));
}
}
impl<T: Clone, const N: usize> Clone for SmallVec<T, N> {
fn clone(&self) -> Self {
let slice = self.as_slice();
let mut this = Self::with_capacity(slice.len());
slice.iter().for_each(|item| unsafe {
this.push_unchecked(item.clone());
});
this
}
fn clone_from(&mut self, source: &Self) {
let slice = source.as_slice();
self.clear();
self.reserve(slice.len());
slice.iter().for_each(|item| unsafe {
self.push_unchecked(item.clone());
});
}
}
impl<T: Clone, const N: usize> SmallVec<T, N> {
pub fn resize(&mut self, new_len: usize, value: T) {
let len = self.len();
if new_len > len {
self.reserve(new_len - len);
(len..new_len - 1).for_each(|_| unsafe {
self.push_unchecked(value.clone());
});
unsafe {
self.push_unchecked(value);
}
} else {
self.truncate(new_len);
}
}
pub fn extend_from_slice(&mut self, other: &[T]) {
self.reserve(other.len());
other.iter().for_each(|item| unsafe {
self.push_unchecked(item.clone());
});
}
}
impl<'a, T: 'a + Clone + 'a, const N: usize> Extend<&'a T> for SmallVec<T, N> {
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
let iter = iter.into_iter();
self.reserve(iter.size_hint().0);
iter.for_each(|item| {
self.push(item.clone());
});
}
}
impl<T, const N: usize> Extend<T> for SmallVec<T, N> {
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
let iter = iter.into_iter();
self.reserve(iter.size_hint().0);
iter.for_each(|item| {
self.push(item);
});
}
}
impl<T, const N: usize> FromIterator<T> for SmallVec<T, N> {
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
let iter = iter.into_iter();
let mut vec = Self::with_capacity(iter.size_hint().0);
iter.for_each(|item| {
vec.push(item);
});
vec
}
}
impl<T, const N: usize> From<SmallVec<T, N>> for Vec<T> {
fn from(v: SmallVec<T, N>) -> Self {
v.into_vec()
}
}
impl<T, const N: usize> From<SmallVec<T, N>> for Box<[T]> {
fn from(v: SmallVec<T, N>) -> Self {
v.into_boxed_slice()
}
}
impl<T, const N: usize, const P: usize> TryFrom<SmallVec<T, N>> for [T; P] {
type Error = SmallVec<T, N>;
fn try_from(mut vec: SmallVec<T, N>) -> Result<[T; P], SmallVec<T, N>> {
if vec.len() != P {
return Err(vec);
}
let src = vec.as_ptr();
unsafe { vec.set_len(0) };
let array = unsafe { ptr::read(src as *const [T; P]) };
Ok(array)
}
}
impl<T: Clone, const N: usize> From<&[T]> for SmallVec<T, N> {
fn from(s: &[T]) -> SmallVec<T, N> {
let mut vec = SmallVec::<T, N>::with_capacity(s.len());
s.iter().for_each(|item| unsafe {
vec.push_unchecked(item.clone());
});
vec
}
}
impl<T: Clone, const N: usize> From<&mut [T]> for SmallVec<T, N> {
fn from(s: &mut [T]) -> SmallVec<T, N> {
let mut vec = SmallVec::<T, N>::with_capacity(s.len());
s.iter().for_each(|item| unsafe {
vec.push_unchecked(item.clone());
});
vec
}
}
impl<T: Clone, const N: usize, const P: usize> From<&[T; N]> for SmallVec<T, P> {
fn from(s: &[T; N]) -> SmallVec<T, P> {
Self::from(s.as_slice())
}
}
impl<T: Clone, const N: usize, const P: usize> From<&mut [T; N]> for SmallVec<T, P> {
fn from(s: &mut [T; N]) -> Self {
Self::from(s.as_mut_slice())
}
}
impl<T, const N: usize, const P: usize> From<[T; N]> for SmallVec<T, P> {
fn from(s: [T; N]) -> Self {
if N <= P {
let mut this = Self::new();
let ptr = unsafe { this.data.cache.as_mut_ptr() as *mut T };
let s = ManuallyDrop::new(s);
let len = s.len();
unsafe {
ptr::copy_nonoverlapping(s.as_ptr(), ptr, len);
this.set_len(len);
}
this
} else {
let vec = Vec::<T>::from(s);
SmallVec::from(vec)
}
}
}
impl<T, const N: usize> From<Vec<T>> for SmallVec<T, N> {
fn from(s: Vec<T>) -> Self {
let (p, l, c) = s.into_raw_parts();
unsafe { SmallVec::from_raw_parts(p, l, c) }
}
}
impl<T, const N: usize> From<Box<[T]>> for SmallVec<T, N> {
fn from(s: Box<[T]>) -> Self {
Self::from(s.into_vec())
}
}
pub struct IntoIter<T, const N: usize> {
vec: ManuallyDrop<SmallVec<T, N>>,
index: usize,
}
unsafe impl<T: Send, const N: usize> Send for IntoIter<T, N> {}
unsafe impl<T: Sync, const N: usize> Sync for IntoIter<T, N> {}
impl<T, const N: usize> IntoIterator for SmallVec<T, N> {
type Item = T;
type IntoIter = IntoIter<T, N>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
IntoIter {
vec: ManuallyDrop::new(self),
index: 0,
}
}
}
impl<T, const N: usize> Iterator for IntoIter<T, N> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
if self.index < self.vec.len() {
self.index += 1;
unsafe { Some(ptr::read(self.vec.as_ptr().add(self.index - 1))) }
} else {
None
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let v = self.vec.len() - self.index;
(v, Some(v))
}
}
impl<T, const N: usize> DoubleEndedIterator for IntoIter<T, N> {
#[inline]
fn next_back(&mut self) -> Option<Self::Item> {
let len = self.vec.len();
if self.index < len {
unsafe {
self.vec.set_len(len - 1);
}
unsafe { Some(ptr::read(self.vec.as_ptr().add(len - 1))) }
} else {
None
}
}
}
impl<T, const N: usize> ExactSizeIterator for IntoIter<T, N> {
#[inline]
fn len(&self) -> usize {
self.vec.len() - self.index
}
}
impl<T, const N: usize> FusedIterator for IntoIter<T, N> {}
impl<T: Debug, const N: usize> Debug for IntoIter<T, N> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_tuple("IntoIter")
.field(&self.vec.as_slice())
.finish()
}
}
impl<T, const N: usize> Drop for IntoIter<T, N> {
fn drop(&mut self) {
let len = self.vec.len();
if self.index < len {
unsafe {
let ptr = self.vec.as_mut_ptr().add(self.index);
let count = len - self.index;
let to_drop = ptr::slice_from_raw_parts_mut(ptr, count);
ptr::drop_in_place(to_drop);
}
}
self.vec.dealloc();
}
}
pub struct Drain<'a, T: 'a, const N: usize> {
tail_start: usize,
tail_len: usize,
iter: slice::Iter<'a, T>,
vec: NonNull<SmallVec<T, N>>,
}
impl<T, const N: usize> SmallVec<T, N> {
pub fn drain<R: core::ops::RangeBounds<usize>>(&mut self, range: R) -> Drain<'_, T, N> {
let len = self.len();
let (start, end) = split_range_bound(&range, len);
unsafe {
self.set_len(start);
let data = self.as_ptr().add(start);
let range_slice = slice::from_raw_parts(data, end - start);
Drain {
tail_start: end,
tail_len: len - end,
iter: range_slice.iter(),
vec: NonNull::new_unchecked(self as *mut _),
}
}
}
}
impl<T: Debug, const N: usize> Debug for Drain<'_, T, N> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_tuple("Drain").field(&self.iter.as_slice()).finish()
}
}
impl<T, const N: usize> Iterator for Drain<'_, T, N> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
self.iter
.next()
.map(|reference| unsafe { ptr::read(reference) })
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<T, const N: usize> DoubleEndedIterator for Drain<'_, T, N> {
#[inline]
fn next_back(&mut self) -> Option<Self::Item> {
self.iter
.next_back()
.map(|reference| unsafe { ptr::read(reference) })
}
}
impl<T, const N: usize> ExactSizeIterator for Drain<'_, T, N> {
#[inline]
fn len(&self) -> usize {
self.iter.len()
}
}
impl<T, const N: usize> FusedIterator for Drain<'_, T, N> {}
impl<'a, T: 'a, const N: usize> Drop for Drain<'a, T, N> {
fn drop(&mut self) {
struct DropGuard<'r, 'a, T, const N: usize>(&'r mut Drain<'a, T, N>);
impl<'r, 'a, T, const N: usize> Drop for DropGuard<'r, 'a, T, N> {
fn drop(&mut self) {
if self.0.tail_len > 0 {
unsafe {
let source_vec = self.0.vec.as_mut();
let start = source_vec.len();
let tail = self.0.tail_start;
if tail != start {
let base = source_vec.as_mut_ptr();
let src = base.add(tail);
let dst = base.add(start);
ptr::copy(src, dst, self.0.tail_len);
}
source_vec.set_len(start + self.0.tail_len);
}
}
}
}
let iter = core::mem::take(&mut self.iter);
let drop_len = iter.len();
let mut vec = self.vec;
if T::IS_ZST {
unsafe {
let vec = vec.as_mut();
let old_len = vec.len();
vec.set_len(old_len + drop_len + self.tail_len);
vec.truncate(old_len + self.tail_len);
}
return;
}
let _guard = DropGuard(self);
if drop_len == 0 {
return;
}
let drop_ptr = iter.as_slice().as_ptr();
unsafe {
let vec_ptr = vec.as_mut().as_mut_ptr();
let drop_offset = drop_ptr.offset_from_unsigned(vec_ptr);
let to_drop = ptr::slice_from_raw_parts_mut(vec_ptr.add(drop_offset), drop_len);
ptr::drop_in_place(to_drop);
}
}
}
pub struct ExtractIf<'a, T, F: FnMut(&mut T) -> bool, const N: usize> {
vec: &'a mut SmallVec<T, N>,
idx: usize,
end: usize,
del: usize,
old_len: usize,
pred: F,
}
impl<T, const N: usize> SmallVec<T, N> {
pub fn extract_if<F, R>(&mut self, range: R, filter: F) -> ExtractIf<'_, T, F, N>
where
F: FnMut(&mut T) -> bool,
R: core::ops::RangeBounds<usize>,
{
let old_len = self.len();
let (start, end) = split_range_bound(&range, old_len);
unsafe {
self.set_len(0);
}
ExtractIf {
vec: self,
idx: start,
del: 0,
end,
old_len,
pred: filter,
}
}
}
impl<T, F: FnMut(&mut T) -> bool, const N: usize> Iterator for ExtractIf<'_, T, F, N> {
type Item = T;
fn next(&mut self) -> Option<T> {
while self.idx < self.end {
let i = self.idx;
let cur = unsafe { &mut *self.vec.as_mut_ptr().add(i) };
let drained = (self.pred)(cur);
self.idx += 1;
if drained {
self.del += 1;
return Some(unsafe { ptr::read(cur) });
} else if self.del > 0 {
unsafe {
let hole_slot = self.vec.as_mut_ptr().add(i - self.del);
ptr::copy_nonoverlapping(cur, hole_slot, 1);
}
}
}
None
}
fn size_hint(&self) -> (usize, Option<usize>) {
(0, Some(self.end - self.idx))
}
}
impl<T, F: FnMut(&mut T) -> bool, const N: usize> Drop for ExtractIf<'_, T, F, N> {
fn drop(&mut self) {
if !T::IS_ZST && self.del > 0 {
unsafe {
let base = self.vec.as_mut_ptr();
ptr::copy(
base.add(self.idx),
base.add(self.idx - self.del),
self.old_len - self.idx,
);
}
}
unsafe {
self.vec.set_len(self.old_len - self.del);
}
}
}
impl<T: Debug, F: FnMut(&mut T) -> bool, const N: usize> Debug for ExtractIf<'_, T, F, N> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let peek = if self.idx < self.end {
self.vec.get(self.idx)
} else {
None
};
f.debug_struct("ExtractIf")
.field("peek", &peek)
.finish_non_exhaustive()
}
}
#[cfg(test)]
mod tests {
use super::SmallVec;
use core::sync::atomic::{AtomicUsize, Ordering};
macro_rules! define_tracker {
() => {
static DROPS: AtomicUsize = AtomicUsize::new(0);
struct Tracker;
impl Drop for Tracker {
fn drop(&mut self) {
DROPS.fetch_add(1, Ordering::SeqCst);
}
}
};
}
#[test]
fn drop_zst() {
define_tracker!();
DROPS.store(0, Ordering::SeqCst);
let mut vec = SmallVec::<Tracker, 0>::new();
vec.push(Tracker);
vec.push(Tracker);
vec.push(Tracker);
vec.push(Tracker);
vec.push(Tracker);
{
let mut drain = vec.drain(1..4);
let one = drain.next_back().unwrap();
drop(one);
assert_eq!(DROPS.load(Ordering::SeqCst), 1);
}
assert_eq!(DROPS.load(Ordering::SeqCst), 3);
drop(vec);
assert_eq!(DROPS.load(Ordering::SeqCst), 5);
}
#[test]
fn drop_inline_and_heap() {
define_tracker!();
DROPS.store(0, Ordering::SeqCst);
{
let mut vec = SmallVec::<Tracker, 4>::new();
vec.push(Tracker);
vec.push(Tracker);
vec.push(Tracker);
assert_eq!(DROPS.load(Ordering::SeqCst), 0);
}
assert_eq!(DROPS.load(Ordering::SeqCst), 3);
DROPS.store(0, Ordering::SeqCst);
{
let mut vec = SmallVec::<Tracker, 2>::new();
vec.push(Tracker);
vec.push(Tracker);
vec.push(Tracker);
assert_eq!(DROPS.load(Ordering::SeqCst), 0);
}
assert_eq!(DROPS.load(Ordering::SeqCst), 3);
}
#[test]
fn drop_pop_remove() {
define_tracker!();
DROPS.store(0, Ordering::SeqCst);
let mut vec = SmallVec::<Tracker, 2>::new();
vec.push(Tracker);
vec.push(Tracker);
vec.push(Tracker);
let popped = vec.pop().unwrap();
assert_eq!(DROPS.load(Ordering::SeqCst), 0);
drop(popped);
assert_eq!(DROPS.load(Ordering::SeqCst), 1);
let removed = vec.remove(0);
assert_eq!(DROPS.load(Ordering::SeqCst), 1);
drop(removed);
assert_eq!(DROPS.load(Ordering::SeqCst), 2);
drop(vec);
assert_eq!(DROPS.load(Ordering::SeqCst), 3);
}
#[test]
fn drop_into_iter() {
define_tracker!();
DROPS.store(0, Ordering::SeqCst);
let mut vec = SmallVec::<Tracker, 2>::new();
vec.push(Tracker);
vec.push(Tracker);
vec.push(Tracker);
let mut iter = vec.into_iter();
let first = iter.next().unwrap();
drop(first);
assert_eq!(DROPS.load(Ordering::SeqCst), 1);
drop(iter);
assert_eq!(DROPS.load(Ordering::SeqCst), 3);
}
#[test]
fn drop_drain() {
define_tracker!();
DROPS.store(0, Ordering::SeqCst);
let mut vec = SmallVec::<Tracker, 2>::new();
vec.push(Tracker);
vec.push(Tracker);
vec.push(Tracker);
vec.push(Tracker);
vec.push(Tracker);
{
let mut drain = vec.drain(1..4);
let first = drain.next().unwrap();
drop(first);
assert_eq!(DROPS.load(Ordering::SeqCst), 1);
}
assert_eq!(DROPS.load(Ordering::SeqCst), 3);
drop(vec);
assert_eq!(DROPS.load(Ordering::SeqCst), 5);
}
#[test]
fn drop_extract_if() {
static DROPS: AtomicUsize = AtomicUsize::new(0);
struct Tracker {
id: usize,
}
impl Drop for Tracker {
fn drop(&mut self) {
DROPS.fetch_add(1, Ordering::SeqCst);
}
}
DROPS.store(0, Ordering::SeqCst);
let mut vec = SmallVec::<Tracker, 2>::new();
for id in 0..6 {
vec.push(Tracker { id });
}
let removed: SmallVec<Tracker, 8> = vec.extract_if(.., |t| t.id % 2 == 0).collect();
assert_eq!(DROPS.load(Ordering::SeqCst), 0);
drop(removed);
assert_eq!(DROPS.load(Ordering::SeqCst), 3);
drop(vec);
assert_eq!(DROPS.load(Ordering::SeqCst), 6);
}
}