#![cfg_attr(not(feature = "alloc"), doc = "
# WARNING
The `alloc` feature is disabled. This means that a `TinyVec` won't be able to
grow over it's stack capacity.
The following functions from [TinyVec] can cause the program to panic if the vector exceeds its
capacity.
- [with_capacity]
- [from_array](TinyVec::from_array)
- [from_tiny_vec](TinyVec::from_tiny_vec)
- [from_slice_copied](TinyVec::from_slice_copied)
- [repeat](TinyVec::repeat)
- [reserve]
- [reserve_exact]
- [push]
- [push_unchecked](TinyVec::push_unchecked)
- [insert](TinyVec::insert)
- [insert_unchecked](TinyVec::insert_unchecked)
- [insert_slice](TinyVec::insert_slice)
- [insert_slice_copied](TinyVec::insert_slice_copied)
- [insert_iter](TinyVec::insert_iter)
- [resize](TinyVec::resize)
- [reserve](TinyVec::reserve)
- [resize_with](TinyVec::resize_with)
- [resize_zeroed](TinyVec::resize_zeroed)
- [extend_from_slice](TinyVec::extend_from_slice)
- [copy_from_slice](TinyVec::copy_from_slice)
- [extend_from_within](TinyVec::extend_from_within)
- [extend_from_within_copied](TinyVec::extend_from_within_copied)
- [append](TinyVec::append)
## Alternatives
| May Panic | No Panic |
| --------- | -------- |
| [push] | [push_within_capacity](TinyVec::push_within_capacity) |
| [reserve] | [try_reserve](TinyVec::try_reserve) |
| [reserve_exact] | [try_reserve_exact](TinyVec::try_reserve) |
| [with_capacity] | [try_with_capacity](TinyVec::try_with_capacity) |
[push]: TinyVec::push
[reserve]: TinyVec::reserve
[reserve_exact]: TinyVec::reserve_exact
[with_capacity]: TinyVec::with_capacity
")]
#![allow(incomplete_features)]
#![cfg_attr(feature = "use-nightly-features", feature(min_specialization, slice_swap_unchecked, generic_const_exprs))]
#![cfg_attr(feature = "use-nightly-features", feature(extend_one, extend_one_unchecked))]
#![cfg_attr(feature = "use-nightly-features", feature(iter_advance_by))]
#![cfg_attr(feature = "use-nightly-features", feature(can_vector, write_all_vectored))]
#![no_std]
#[cfg(feature = "alloc")]
extern crate alloc;
#[cfg(feature = "alloc")]
use alloc::{
vec::Vec,
boxed::Box,
collections::VecDeque,
};
use drain::Drain;
use extract_if::ExtractIf;
#[cfg(feature = "serde")]
use serde::Deserialize;
use core::borrow::{Borrow, BorrowMut};
use core::hash::Hash;
use core::marker::PhantomData;
use core::mem::{self, ManuallyDrop, MaybeUninit};
use core::ops::{Bound, Deref, DerefMut, Range, RangeBounds};
use core::ptr::NonNull;
use core::{fmt, ptr};
use core::slice;
mod raw;
use raw::RawVec;
pub use raw::ResizeError;
mod cow;
pub use cow::Cow;
pub mod iter;
pub mod drain;
pub mod extract_if;
union TinyVecInner<T, const N: usize> {
stack: ManuallyDrop<[MaybeUninit<T>; N]>,
raw: RawVec<T>,
}
impl<T, const N: usize> TinyVecInner<T, N> {
#[inline(always)]
const unsafe fn as_ptr_stack(&self) -> *const T {
unsafe { &raw const self.stack as *const T }
}
#[inline(always)]
const unsafe fn as_ptr_stack_mut(&mut self) -> *mut T {
unsafe { &raw mut self.stack as *mut T }
}
#[inline(always)]
const unsafe fn as_ptr_heap(&self) -> *const T {
unsafe { self.raw.ptr.as_ptr() }
}
#[inline(always)]
const unsafe fn as_ptr_heap_mut(&mut self) -> *mut T {
unsafe { self.raw.ptr.as_ptr() }
}
}
#[repr(transparent)]
struct Length(usize);
impl Length {
#[inline(always)]
const fn new_stack(len: usize) -> Self {
Self(len << 1)
}
#[inline(always)]
#[cfg(feature = "alloc")]
const fn new_heap(len: usize) -> Self {
Self(len << 1 | 0b1)
}
#[inline(always)]
const fn is_stack(&self) -> bool {
(self.0 & 0b1) == 0
}
#[inline(always)]
const fn set_heap(&mut self) {
self.0 |= 0b1;
}
#[inline(always)]
#[cfg(feature = "alloc")]
const fn set_stack(&mut self) {
self.0 &= 0b0;
}
#[inline(always)]
const fn set(&mut self, n: usize) {
self.0 &= 0b1;
self.0 |= n << 1;
}
#[inline(always)]
const fn get(&self) -> usize {
self.0 >> 1
}
#[inline(always)]
const fn add(&mut self, n: usize) {
self.0 += n << 1;
}
#[inline(always)]
const fn sub(&mut self, n: usize) {
self.0 -= n << 1;
}
}
#[macro_export]
macro_rules! tinyvec {
(@one $e:expr) => { 1 };
() => {
$crate::TinyVec::new()
};
($elem:expr; $n:expr) => ({
let mut tv = $crate::TinyVec::new();
tv.resize($n, $elem);
tv
});
($($x:expr),+ $(,)?) => ({
let n = const {
0 $( + $crate::tinyvec!(@one $x) )+
};
let mut vec = $crate::TinyVec::with_capacity(n);
$(vec.push($x);)+
vec
});
}
pub const fn n_elements_for_stack<T>() -> usize {
n_elements_for_bytes::<T>(mem::size_of::<RawVec<T>>())
}
pub const fn n_elements_for_bytes<T>(n: usize) -> usize {
let size = mem::size_of::<T>();
if size == 0 {
isize::MAX as usize
} else {
n / size
}
}
fn slice_range<R>(range: R, len: usize) -> Range<usize>
where
R: RangeBounds<usize>
{
let start = match range.start_bound() {
Bound::Included(n) => *n,
Bound::Excluded(n) => *n + 1,
Bound::Unbounded => 0,
};
let end = match range.end_bound() {
Bound::Included(n) => *n + 1,
Bound::Excluded(n) => *n,
Bound::Unbounded => len,
};
assert!(start <= end);
assert!(end <= len);
Range { start, end }
}
pub struct TinyVec<T,
#[cfg(not(feature = "use-nightly-features"))]
const N: usize,
#[cfg(feature = "use-nightly-features")]
const N: usize = { n_elements_for_stack::<T>() },
> {
inner: TinyVecInner<T, N>,
len: Length,
}
impl<T, const N: usize> TinyVec<T, N> {
unsafe fn switch_to_heap(&mut self, n: usize, exact: bool) -> Result<(), ResizeError> {
debug_assert!(self.lives_on_stack());
debug_assert_ne!(mem::size_of::<T>(), 0);
let mut vec = RawVec::new();
if exact {
vec.try_expand_if_needed_exact(0, self.len.get() + n)?;
} else {
vec.try_expand_if_needed(0, self.len.get() + n)?;
}
unsafe {
let src = self.inner.as_ptr_stack();
let dst = vec.ptr.as_ptr();
ptr::copy_nonoverlapping(src, dst, self.len());
self.inner.raw = vec;
}
self.len.set_heap();
Ok(())
}
#[cfg(feature = "alloc")]
unsafe fn switch_to_stack(&mut self) {
debug_assert!(!self.lives_on_stack());
debug_assert_ne!(mem::size_of::<T>(), 0,
"We shouldn't call switch_to_stack, since T is a ZST, and\
shouldn't be moved to the heap in the first place");
let mut rv = unsafe { self.inner.raw };
let mut stack = [const { MaybeUninit::uninit() }; N];
unsafe {
let src = rv.ptr.as_ptr();
let dst = stack.as_mut_ptr() as *mut T;
ptr::copy_nonoverlapping(src,dst,self.len());
rv.destroy();
}
self.inner.stack = ManuallyDrop::new(stack);
self.len.set_stack();
}
const fn split_at_spare_mut_with_len(&mut self) -> (&mut [T], &mut [MaybeUninit<T>], &mut Length) {
unsafe {
let len = self.len();
let ptr = self.as_mut_ptr();
let spare_ptr = ptr.add(len).cast::<MaybeUninit<T>>();
let spare_len = self.capacity() - len;
let slice = slice::from_raw_parts_mut(ptr, len);
let spare_slice = slice::from_raw_parts_mut(spare_ptr, spare_len);
(slice, spare_slice, &mut self.len)
}
}
#[cfg(feature = "alloc")]
fn _shrink(&mut self, cap: usize) {
assert!(!self.lives_on_stack());
assert!(cap >= self.len.get());
if cap <= N {
unsafe { self.switch_to_stack(); }
} else {
unsafe { self.inner.raw.shrink_to_fit(cap); };
}
}
}
impl<T, const N: usize> TinyVec<T, N> {
#[must_use]
pub const fn new() -> Self {
let stack = [ const { MaybeUninit::uninit() }; N ];
Self {
inner: TinyVecInner { stack: ManuallyDrop::new(stack) },
len: Length::new_stack(0),
}
}
#[must_use]
pub fn with_capacity(cap: usize) -> Self {
Self::try_with_capacity(cap).unwrap_or_else(|err| err.handle())
}
pub fn try_with_capacity(cap: usize) -> Result<Self,ResizeError> {
let mut len = Length(0);
let inner = if cap <= N {
let s = [const { MaybeUninit::uninit() }; N];
TinyVecInner {
stack: ManuallyDrop::new(s)
}
} else {
len.set_heap();
TinyVecInner {
raw: RawVec::try_with_capacity(cap)?
}
};
Ok(Self { inner, len })
}
#[must_use]
pub fn from_array<const M: usize>(arr: [T; M]) -> Self {
let arr = ManuallyDrop::new(arr);
let mut tv = Self::with_capacity(M);
let src = arr.as_ptr();
let dst = tv.as_mut_ptr();
unsafe {
ptr::copy(src, dst, M);
tv.set_len(M);
}
tv
}
#[must_use]
pub const fn from_array_eq_size(arr: [T; N]) -> Self {
let mut tv = Self::new();
let src = arr.as_ptr();
let dst = tv.as_mut_ptr();
unsafe {
ptr::copy(src, dst, N);
tv.set_len(N);
}
mem::forget(arr);
tv
}
#[cfg(feature = "alloc")]
#[must_use]
pub fn from_vec(vec: Vec<T>) -> Self {
let mut vec = ManuallyDrop::new(vec);
let ptr = vec.as_mut_ptr();
let ptr = unsafe { NonNull::new_unchecked(ptr) };
let len = Length::new_heap(vec.len());
let cap = vec.capacity();
let raw = RawVec { cap, ptr };
let inner = TinyVecInner { raw };
Self { inner, len }
}
#[cfg(feature = "alloc")]
#[must_use]
pub fn from_boxed_slice(boxed: Box<[T]>) -> Self {
let len = boxed.len();
let ptr = Box::into_raw(boxed);
let ptr = unsafe { NonNull::new_unchecked(ptr as *mut T) };
let raw = RawVec { ptr, cap: len };
Self {
inner: TinyVecInner { raw },
len: Length::new_heap(len)
}
}
#[must_use]
pub fn from_tiny_vec<const M: usize>(mut vec: TinyVec<T, M>) -> Self {
let len = vec.len();
#[cfg(feature = "alloc")]
if len > N && len > M {
let tv = Self {
len: Length::new_heap(len),
inner: TinyVecInner {
raw: unsafe { vec.inner.raw }
}
};
mem::forget(vec);
return tv
}
let mut tv = Self::with_capacity(len);
let src = vec.as_ptr();
let dst = tv.as_mut_ptr();
unsafe {
ptr::copy_nonoverlapping(src, dst, len);
vec.set_len(0);
tv.set_len(len);
}
tv
}
pub fn from_slice(slice: &[T]) -> Self
where
T: Clone
{
let mut v = Self::with_capacity(slice.len());
v.extend_from_slice_impl(slice);
v
}
#[must_use]
pub fn from_slice_copied(slice: &[T]) -> Self
where
T: Copy
{
let mut v = Self::with_capacity(slice.len());
v.copy_from_slice(slice);
v
}
#[must_use]
pub fn repeat(slice: &[T], n: usize) -> Self
where
T: Copy
{
let mut s = Self::with_capacity(slice.len() * n);
let mut dst = s.as_mut_ptr();
let src = slice.as_ptr();
let len = slice.len();
for _ in 0..n {
unsafe {
ptr::copy(src, dst, len);
dst = dst.add(len);
}
}
s.len.set(len * n);
s
}
#[inline]
pub const fn len(&self) -> usize { self.len.get() }
#[inline]
pub const fn is_empty(&self) -> bool { self.len.get() == 0 }
#[inline]
pub const fn capacity(&self) -> usize {
if mem::size_of::<T>() == 0 {
return isize::MAX as usize
}
if self.len.is_stack() {
N
} else {
unsafe { self.inner.raw.cap }
}
}
#[inline]
pub const fn lives_on_stack(&self) -> bool { self.len.is_stack() }
#[inline]
pub const fn as_ptr(&self) -> *const T {
unsafe {
if self.len.is_stack() {
self.inner.as_ptr_stack()
} else {
self.inner.as_ptr_heap()
}
}
}
#[inline]
pub const fn as_mut_ptr(&mut self) -> *mut T {
unsafe {
if self.len.is_stack() {
self.inner.as_ptr_stack_mut()
} else {
self.inner.as_ptr_heap_mut()
}
}
}
#[inline]
pub const fn as_non_null(&mut self) -> NonNull<T> {
debug_assert!(!self.as_mut_ptr().is_null());
unsafe {
NonNull::new_unchecked(self.as_mut_ptr())
}
}
#[inline]
pub const fn as_slice(&self) -> &[T] {
unsafe {
slice::from_raw_parts(self.as_ptr(), self.len.get())
}
}
#[inline]
pub const fn as_mut_slice(&mut self) -> &mut [T] {
unsafe {
slice::from_raw_parts_mut(self.as_mut_ptr(), self.len.get())
}
}
#[inline]
pub fn reserve(&mut self, n: usize) {
self.try_reserve(n).unwrap_or_else(|err| err.handle());
}
pub fn try_reserve(&mut self, n: usize) -> Result<(), ResizeError> {
if self.len.is_stack() {
if self.len.get() + n > self.capacity() {
unsafe { self.switch_to_heap(n, false)?; }
}
} else {
unsafe {
self.inner.raw.try_expand_if_needed(self.len.get(), n)?;
}
}
Ok(())
}
pub fn reserve_exact(&mut self, n: usize) {
self.try_reserve_exact(n).unwrap_or_else(|err| err.handle())
}
pub fn try_reserve_exact(&mut self, n: usize) -> Result<(), ResizeError> {
if self.len.is_stack() {
if self.len.get() + n > self.capacity() {
unsafe { self.switch_to_heap(n, true)?; }
}
} else {
let vec = unsafe { &mut self.inner.raw };
let len = self.len.get();
let new_cap = vec.cap.max(len + n);
vec.try_expand_if_needed_exact(len, new_cap)?;
}
Ok(())
}
#[inline]
pub fn push(&mut self, elem: T) {
self.reserve(1);
unsafe { self.push_unchecked(elem); }
}
pub unsafe fn push_unchecked(&mut self, elem: T) {
unsafe {
let dst = self.as_mut_ptr().add(self.len.get());
dst.write(elem);
}
self.len.add(1);
}
pub fn push_within_capacity(&mut self, val: T) -> Result<(),T> {
if self.len.get() < self.capacity() {
unsafe { self.push_unchecked(val); }
Ok(())
} else {
Err(val)
}
}
pub const fn pop(&mut self) -> Option<T> {
if self.len.get() == 0 {
None
} else {
self.len.sub(1);
let val = unsafe {
self.as_ptr()
.add(self.len.get())
.read()
};
Some(val)
}
}
pub fn pop_if<F>(&mut self, predicate: F) -> Option<T>
where
F: FnOnce(&mut T) -> bool
{
let len = self.len();
if len == 0 { return None }
unsafe {
let last = self.as_mut_ptr().add(len - 1);
predicate(&mut *last).then(|| {
self.len.sub(1);
last.read()
})
}
}
pub fn insert(&mut self, index: usize, elem: T) -> Result<(),T> {
if index > self.len.get() {
return Err(elem)
}
unsafe { self.insert_unchecked(index, elem); }
Ok(())
}
pub unsafe fn insert_unchecked(&mut self, index: usize, elem: T) {
self.reserve(1);
unsafe {
let ptr = self.as_mut_ptr();
ptr::copy(
ptr.add(index),
ptr.add(index + 1),
self.len.get() - index,
);
ptr.add(index).write(elem);
}
self.len.add(1);
}
#[inline]
pub fn insert_slice<'a>(&mut self, index: usize, elems: &'a [T]) -> Result<(), &'a [T]>
where
T: Clone
{
self.insert_slice_impl(index, elems)
}
pub fn insert_slice_copied<'a>(&mut self, index: usize, elems: &'a [T]) -> Result<(), &'a [T]>
where
T: Copy
{
if index > self.len() {
return Err(elems)
}
let len = elems.len();
self.reserve(len);
unsafe {
let ptr = self.as_mut_ptr();
ptr::copy(
ptr.add(index),
ptr.add(index + len),
self.len.get() - index,
);
ptr::copy_nonoverlapping(
elems.as_ptr(),
ptr.add(index),
len
);
}
self.len.add(len);
Ok(())
}
pub fn insert_iter<I>(&mut self, index: usize, it: I) -> Result<(), I>
where
I: IntoIterator<Item = T>,
<I as IntoIterator>::IntoIter: ExactSizeIterator,
{
if index > self.len() {
return Err(it);
}
let it = it.into_iter();
let len = it.len();
self.reserve(len);
unsafe {
let ptr = self.as_mut_ptr();
ptr::copy(
ptr.add(index),
ptr.add(index + len),
self.len.get() - index,
);
let mut ptr = ptr.add(index);
for elem in it {
ptr.write(elem);
ptr = ptr.add(1);
self.len.add(1);
}
}
Ok(())
}
pub fn resize(&mut self, new_len: usize, elem: T)
where
T: Clone
{
if new_len < self.len() {
self.truncate(new_len);
} else {
self.resize_impl(new_len, elem);
}
}
pub fn resize_with<F>(&mut self, cap: usize, mut f: F)
where
F: FnMut() -> T
{
if cap < self.len() {
self.truncate(cap);
} else {
let n = cap - self.len();
self.reserve(n);
unsafe {
let mut ptr = self.as_mut_ptr().add(self.len());
let len = &mut self.len;
for _ in 0..n {
ptr::write(ptr, f());
ptr = ptr.add(1);
len.add(1);
}
}
}
}
pub unsafe fn resize_zeroed(&mut self, cap: usize) {
if cap < self.len() {
self.truncate(cap);
} else {
let n = cap - self.len();
self.reserve(n);
unsafe {
let ptr = self.as_mut_ptr().add(self.len());
ptr.write_bytes(0, n);
}
self.len.add(n);
}
}
pub const unsafe fn remove_unchecked(&mut self, index: usize) -> T {
debug_assert!(index < self.len());
unsafe {
self.len.sub(1);
let result = self.as_mut_ptr().add(index).read();
ptr::copy(
self.as_ptr().add(index + 1),
self.as_mut_ptr().add(index),
self.len.get() - index,
);
result
}
}
#[inline]
pub const fn remove(&mut self, index: usize) -> Option<T> {
if index >= self.len.get() { return None }
Some(unsafe { self.remove_unchecked(index) })
}
pub fn remove_if<F>(&mut self, index: usize, predicate: F) -> Option<T>
where
F: FnOnce(&mut T) -> bool
{
let len = self.len.get();
if index >= len { return None }
unsafe {
let ptr = self.as_mut_ptr().add(index);
predicate(&mut *ptr).then(|| {
let elem = ptr.read();
ptr::copy(
ptr.add(1),
ptr,
len - index - 1
);
self.len.sub(1);
elem
})
}
}
pub const fn swap_checked(&mut self, a: usize, b: usize) -> Result<(),usize> {
if a >= self.len.get() {
return Err(a)
};
if b >= self.len.get() {
return Err(b)
};
unsafe { self.swap_unchecked(a, b); }
Ok(())
}
#[cfg(not(feature = "use-nightly-features"))]
pub const unsafe fn swap_unchecked(&mut self, a: usize, b: usize) {
unsafe {
let ptr = self.as_mut_ptr();
let ap = ptr.add(a);
let bp = ptr.add(b);
ptr::swap(ap, bp);
}
}
#[cfg(feature = "use-nightly-features")]
#[inline(always)]
const unsafe fn swap_unchecked(&mut self, a: usize, b: usize) {
unsafe { self.as_mut_slice().swap_unchecked(a, b); }
}
pub const fn swap_remove(&mut self, index: usize) -> Option<T> {
if index >= self.len.get() {
None
} else if index == self.len.get() - 1 {
self.pop()
} else {
unsafe { self.swap_unchecked(index, self.len.get() - 1) }
self.pop()
}
}
#[inline]
pub fn dedup(&mut self)
where
T: PartialEq
{
self.dedup_by(|a, b| a == b);
}
#[inline]
pub fn dedup_by_key<F, K>(&mut self, mut key: F)
where
F: FnMut(&mut T) -> K,
K: PartialEq
{
self.dedup_by(|a, b| key(a) == key(b));
}
pub fn dedup_by<F>(&mut self, mut are_equal: F)
where
F: FnMut(&mut T, &mut T) -> bool
{
let (ptr, _, len) = self.split_at_spare_mut_with_len();
let ptr = ptr.as_mut_ptr();
if len.get() <= 1 {
return
}
let mut first_dup = 1;
while first_dup != len.get() {
let is_dup = unsafe {
let a = ptr.add(first_dup - 1);
let b = ptr.add(first_dup);
are_equal(&mut *b, &mut *a)
};
if is_dup {
break
}
first_dup += 1;
}
if first_dup == len.get() {
return;
}
struct DropGuard<'a, T> {
len: &'a mut Length,
ptr: *mut T,
right: usize,
left: usize,
}
impl<T> Drop for DropGuard<'_, T> {
fn drop(&mut self) {
unsafe {
let len = self.len.get();
let shift = len - self.right;
let src = self.ptr.add(self.right);
let dst = self.ptr.add(self.left);
ptr::copy(src, dst, shift);
let deleted = self.right - self.left;
self.len.sub(deleted);
}
}
}
let mut guard = DropGuard { right: first_dup + 1, left: first_dup, ptr, len };
unsafe { ptr::drop_in_place(ptr.add(first_dup)); }
while guard.right < guard.len.get() {
unsafe {
let a = ptr.add(guard.left - 1);
let b = ptr.add(guard.right);
if are_equal(&mut *b, &mut *a) {
guard.right += 1;
ptr::drop_in_place(b);
} else {
let dst = ptr.add(guard.left);
ptr::copy_nonoverlapping(b, dst, 1);
guard.left += 1;
guard.right += 1;
}
}
}
guard.len.set(guard.left);
mem::forget(guard);
}
#[cfg(feature = "alloc")]
pub fn shrink_to(&mut self, min_capacity: usize) {
if !self.lives_on_stack() && self.capacity() > min_capacity {
let new_cap = usize::max(self.len.get(), min_capacity);
self._shrink(new_cap);
}
}
#[cfg(feature = "alloc")]
pub fn shrink_to_fit(&mut self) {
if self.len.is_stack() { return }
self._shrink(self.len.get());
}
#[cfg(feature = "alloc")]
pub fn move_to_heap(&mut self) {
self.try_move_to_heap().unwrap_or_else(|err| err.handle());
}
#[cfg(feature = "alloc")]
pub fn try_move_to_heap(&mut self) -> Result<(), ResizeError> {
if self.lives_on_stack() {
unsafe { self.switch_to_heap(0, false)? };
}
Ok(())
}
#[cfg(feature = "alloc")]
pub fn move_to_heap_exact(&mut self) {
self.try_move_to_heap_exact().unwrap_or_else(|err| err.handle());
}
#[cfg(feature = "alloc")]
pub fn try_move_to_heap_exact(&mut self) -> Result<(), ResizeError> {
if self.lives_on_stack() {
unsafe { self.switch_to_heap(0, true)? };
}
Ok(())
}
#[cfg(feature = "alloc")]
pub fn shrink_to_fit_heap_only(&mut self) {
if !self.len.is_stack() {
unsafe { self.inner.raw.shrink_to_fit(self.len.get()); };
}
}
pub fn clear(&mut self) {
let ptr = self.as_mut_slice() as *mut [T];
unsafe {
self.len.set(0);
ptr::drop_in_place(ptr);
}
}
#[inline]
pub const unsafe fn set_len(&mut self, len: usize) {
self.len.set(len);
}
pub unsafe fn update_len<F>(&mut self, mut f: F)
where
F: FnMut(&mut usize)
{
let mut len = self.len.get();
f(&mut len);
self.len.set(len);
}
pub fn truncate(&mut self, len: usize) {
if len < self.len.get() {
for e in &mut self[len..] {
unsafe { ptr::drop_in_place(e) }
}
unsafe { self.set_len(len); }
}
}
#[inline]
pub fn extend_from_slice(&mut self, s: &[T])
where
T: Clone
{
self.extend_from_slice_impl(s);
}
#[inline]
pub fn copy_from_slice(&mut self, s: &[T])
where
T: Copy
{
let len = s.len();
self.reserve(len);
unsafe {
ptr::copy(
s.as_ptr(),
self.as_mut_ptr().add(self.len.get()),
len
)
}
self.len.add(len);
}
#[inline]
pub fn extend_from_within<R>(&mut self, range: R)
where
T: Clone,
R: RangeBounds<usize>,
{
self.extend_from_within_impl(range);
}
pub fn extend_from_within_copied<R>(&mut self, range: R)
where
T: Copy,
R: RangeBounds<usize>,
{
let len = self.len();
let Range { start, end } = slice_range(range, len);
let new_len = end - start;
self.reserve(new_len);
let ptr = self.as_mut_ptr();
unsafe {
let src = ptr.add(start);
let dst = ptr.add(len);
ptr::copy(src, dst, new_len);
}
self.len.add(new_len);
}
#[inline]
pub fn retain<F>(&mut self, mut pred: F)
where
F: FnMut(&T) -> bool
{
self.retain_mut(|e| pred(e));
}
pub fn retain_mut<F>(&mut self, mut pred: F)
where
F: FnMut(&mut T) -> bool
{
for e in self.extract_if(.., |e| !pred(e)) {
drop(e)
}
}
pub const fn split_at_spare_mut(&mut self) -> (&mut [T], &mut [MaybeUninit<T>]) {
let (init, uninit, _) = self.split_at_spare_mut_with_len();
(init, uninit)
}
#[must_use = "use `.truncate()` if you don't need the other half"]
pub fn split_off(&mut self, at: usize) -> TinyVec<T , N> {
if at >= self.len() {
panic!("Index out of bounds");
}
let other_len = self.len() - at;
let mut other = TinyVec::<T, N>::with_capacity(other_len);
unsafe {
let src = self.as_ptr().add(at);
let dst = other.as_mut_ptr();
ptr::copy_nonoverlapping(src, dst, other_len);
other.set_len(other_len);
self.len.sub(other_len);
}
other
}
#[cfg(feature = "alloc")]
pub fn leak<'a>(self) -> &'a mut [T]
where
T: 'a
{
let mut slf = ManuallyDrop::new(self);
unsafe {
let len = slf.len();
slf.shrink_to_fit_heap_only();
slf.move_to_heap_exact();
let ptr = slf.as_mut_ptr();
slice::from_raw_parts_mut(ptr, len)
}
}
pub fn append<const M: usize>(&mut self, other: &mut TinyVec<T, M>) {
unsafe {
let other_len = other.len();
self.reserve(other_len);
let src = other.as_slice().as_ptr();
let dst = self.as_mut_ptr().add(self.len());
ptr::copy(src, dst, other_len);
other.set_len(0);
self.len.add(other_len);
}
}
pub fn drain<R>(&mut self, range: R) -> Drain<'_, T, N>
where
R: RangeBounds<usize>
{
let len = self.len();
let Range { start, end } = slice_range(range, len);
unsafe {
self.set_len(start);
Drain {
vec: NonNull::new_unchecked(self as *mut _),
remaining_start: start,
remaining_len: end - start,
tail_start: end,
tail_len: len - end,
_marker: PhantomData,
}
}
}
#[must_use = "extract_if won't do anything if it's dropped immediately. \
Use '.retain' with the predicate negated."]
pub fn extract_if<R, F>(&mut self, range: R, pred: F) -> ExtractIf<'_, T, N, F>
where
R: RangeBounds<usize>,
F: FnMut(&mut T) -> bool,
{
let len = self.len();
let Range { start, end } = slice_range(range, len);
unsafe { self.set_len(start) }
ExtractIf {
original_len: len,
deleted: 0,
next: start,
last: end,
vec: self,
pred,
}
}
#[cfg(feature = "alloc")]
pub fn into_boxed_slice(self) -> Box<[T]> {
let mut slf = ManuallyDrop::new(self);
if slf.lives_on_stack() {
unsafe { slf.switch_to_heap(0, true).unwrap_or_else(|err| err.handle()) };
}
debug_assert!(!slf.lives_on_stack());
let len = slf.len();
slf.shrink_to_fit_heap_only();
debug_assert_eq!(len, slf.capacity());
let ptr = slf.as_mut_ptr();
unsafe {
let slice = slice::from_raw_parts_mut(ptr, len);
Box::from_raw(slice)
}
}
#[cfg(feature = "alloc")]
pub fn into_vec(self) -> Vec<T> {
let mut vec = ManuallyDrop::new(self);
vec.move_to_heap();
let ptr = vec.as_mut_ptr();
let len = vec.len();
let cap = vec.capacity();
unsafe { Vec::from_raw_parts(ptr, len, cap) }
}
}
impl<T, const N: usize> Extend<T> for TinyVec<T, N> {
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
let iter = iter.into_iter();
let (lower, _) = iter.size_hint();
self.reserve(lower);
for elem in iter {
self.push(elem);
}
}
#[cfg(feature = "use-nightly-features")]
#[inline]
fn extend_one(&mut self, item: T) {
self.push(item);
}
#[cfg(feature = "use-nightly-features")]
#[inline]
fn extend_reserve(&mut self, additional: usize) {
self.reserve(additional);
}
#[cfg(feature = "use-nightly-features")]
#[inline]
unsafe fn extend_one_unchecked(&mut self, item: T) {
unsafe { self.push_unchecked(item); }
}
}
impl<'a, T: Clone, const N: usize> Extend<&'a T> for TinyVec<T, N> {
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
self.extend(iter.into_iter().map(Clone::clone));
}
}
#[cfg(feature = "use-nightly-features")]
macro_rules! maybe_default {
($($t:tt)*) => {
default $($t)*
};
}
#[cfg(not(feature = "use-nightly-features"))]
macro_rules! maybe_default {
($($t:tt)*) => {
$($t)*
};
}
trait CopyOptimization<T> {
fn extend_from_slice_impl(&mut self, s: &[T]);
fn insert_slice_impl<'a>(&mut self, index: usize, elems: &'a [T]) -> Result<(), &'a [T]>;
fn resize_impl(&mut self, new_len: usize, elem: T);
fn extend_from_within_impl<R>(&mut self, range: R)
where
R: RangeBounds<usize>;
}
impl<T: Clone, const N: usize> CopyOptimization<T> for TinyVec<T, N> {
maybe_default! {
fn extend_from_slice_impl(&mut self, s: &[T]) {
self.extend(s.iter().cloned());
}
}
maybe_default! {
fn insert_slice_impl<'a>(&mut self, index: usize, elems: &'a [T]) -> Result<(), &'a [T]> {
self.insert_iter(index, elems.iter().cloned()).map_err(|_| elems)
}
}
maybe_default! {
fn extend_from_within_impl<R>(&mut self, range: R)
where
R: RangeBounds<usize>
{
let len = self.len();
let Range { start, end } = slice_range(range, len);
self.reserve(end - start);
let (slice, spare, len) = self.split_at_spare_mut_with_len();
let slice = &slice[start..end];
for (src, dst) in slice.iter().zip(spare.iter_mut()) {
dst.write(src.clone());
len.add(1);
}
}
}
maybe_default! {
fn resize_impl(&mut self, new_len: usize, elem: T) {
let n = new_len - self.len();
self.reserve(n);
unsafe {
let mut ptr = self.as_mut_ptr().add(self.len());
let len = &mut self.len;
for _ in 1..n {
ptr::write(ptr, elem.clone());
ptr = ptr.add(1);
len.add(1);
}
if n > 0 {
ptr::write(ptr, elem);
len.add(1);
}
}
}
}
}
#[cfg(feature = "use-nightly-features")]
impl<T: Copy, const N: usize> CopyOptimization<T> for TinyVec<T, N> {
#[inline]
fn extend_from_slice_impl(&mut self, s: &[T]) {
self.copy_from_slice(s);
}
#[inline]
fn insert_slice_impl<'a>(&mut self, index: usize, elems: &'a [T]) -> Result<(), &'a [T]> {
self.insert_slice_copied(index, elems)
}
#[inline]
fn extend_from_within_impl<R>(&mut self, range: R)
where
R: RangeBounds<usize>
{
self.extend_from_within_copied(range);
}
#[inline]
fn resize_impl(&mut self, new_len: usize, elem: T) {
let n = new_len - self.len();
self.reserve(n);
unsafe {
let mut ptr = self.as_mut_ptr().add(self.len());
for _ in 0..n {
ptr::write(ptr, elem);
ptr = ptr.add(1);
}
self.len.add(n);
}
}
}
impl<T, const N: usize> Default for TinyVec<T, N> {
#[inline]
fn default() -> Self {
Self::new()
}
}
impl<T: fmt::Debug, const N: usize> fmt::Debug for TinyVec<T, N> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(self.as_slice(), f)
}
}
impl<T: PartialEq, const N: usize, S> PartialEq<S> for TinyVec<T, N>
where
S: AsRef<[T]>,
{
#[inline]
fn eq(&self, other: &S) -> bool {
self.as_slice() == other.as_ref()
}
}
impl<T: PartialOrd, const N: usize> PartialOrd for TinyVec<T, N> {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
self.as_slice().partial_cmp(other.as_slice())
}
}
impl<T: Ord, const N: usize> Ord for TinyVec<T, N> {
#[inline]
fn cmp(&self, other: &Self) -> core::cmp::Ordering {
self.as_slice().cmp(other.as_slice())
}
}
impl<T: PartialEq, const N: usize> Eq for TinyVec<T, N> {}
impl<T: Hash, const N: usize> Hash for TinyVec<T, N> {
fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
self.as_slice().hash(state);
}
}
impl<T, const N: usize> Borrow<[T]> for TinyVec<T, N> {
#[inline]
fn borrow(&self) -> &[T] {
self.as_slice()
}
}
impl<T, const N: usize> BorrowMut<[T]> for TinyVec<T, N> {
#[inline]
fn borrow_mut(&mut self) -> &mut [T] {
self.as_mut_slice()
}
}
impl<T, const N: usize> Deref for TinyVec<T, N> {
type Target = [T];
#[inline]
fn deref(&self) -> &Self::Target {
self.as_slice()
}
}
impl<T, const N: usize> DerefMut for TinyVec<T, N> {
#[inline]
fn deref_mut(&mut self) -> &mut Self::Target {
self.as_mut_slice()
}
}
impl<T, const N: usize> Drop for TinyVec<T, N> {
fn drop(&mut self) {
if mem::needs_drop::<T>() {
for e in self.as_mut_slice() {
unsafe { ptr::drop_in_place(e) };
}
}
if !self.len.is_stack() {
unsafe { self.inner.raw.destroy(); }
}
}
}
macro_rules! impl_from_call {
($( $({$($im:tt)*})? $(where { $($w:tt)* })? $t:ty => $c:ident ),* $(,)?) => {
$(
impl<T, const N: usize, $($($im)*)?> From<$t> for TinyVec<T, N>
$(where $($w)* )?
{
fn from(value: $t) -> Self {
Self:: $c (value)
}
}
)*
};
}
#[cfg(feature = "alloc")]
impl_from_call! {
Vec<T> => from_vec,
Box<[T]> => from_boxed_slice,
}
#[cfg(feature = "alloc")]
impl<T, const N: usize> From<VecDeque<T>> for TinyVec<T, N> {
fn from(value: VecDeque<T>) -> Self {
Self::from_vec(Vec::from(value))
}
}
#[cfg(feature = "alloc")]
impl<T, const N: usize> From<TinyVec<T, N>> for VecDeque<T> {
fn from(value: TinyVec<T, N>) -> Self {
VecDeque::from(value.into_vec())
}
}
impl_from_call! {
[T; N] => from_array_eq_size,
where { T: Clone } &[T] => from_slice,
where { T: Clone } &mut [T] => from_slice,
{ const M: usize } where { T: Clone } &[T; M] => from_slice,
{ const M: usize } where { T: Clone } &mut [T; M] => from_slice,
}
impl<T, const N: usize> FromIterator<T> for TinyVec<T, N> {
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
let mut s = Self::new();
s.extend(iter);
s
}
}
impl<'a, T: Clone, const N: usize> FromIterator<&'a T> for TinyVec<T, N> {
fn from_iter<I: IntoIterator<Item = &'a T>>(iter: I) -> Self {
let mut s = Self::new();
s.extend(iter);
s
}
}
#[cfg(feature = "alloc")]
impl<T, const N: usize> From<TinyVec<T, N>> for Vec<T> {
#[inline]
fn from(value: TinyVec<T, N>) -> Self {
value.into_vec()
}
}
impl<T, const N: usize> AsRef<[T]> for TinyVec<T, N> {
#[inline]
fn as_ref(&self) -> &[T] {
self.as_slice()
}
}
impl<T, const N: usize> AsMut<[T]> for TinyVec<T, N> {
#[inline]
fn as_mut(&mut self) -> &mut [T] {
self.as_mut_slice()
}
}
impl<T, const N: usize> AsRef<TinyVec<T, N>> for TinyVec<T, N> {
#[inline]
fn as_ref(&self) -> &TinyVec<T, N> {
self
}
}
impl<T, const N: usize> AsMut<TinyVec<T, N>> for TinyVec<T, N> {
#[inline]
fn as_mut(&mut self) -> &mut TinyVec<T, N> {
self
}
}
impl<T: Clone, const N: usize> Clone for TinyVec<T, N> {
fn clone(&self) -> Self {
Self::from_slice(self.as_slice())
}
fn clone_from(&mut self, source: &Self) {
self.clear();
self.reserve(source.len());
let (_, buf, len) = self.split_at_spare_mut_with_len();
for (src, dst) in source.as_slice().iter().zip(buf.iter_mut()) {
dst.write(src.clone());
}
len.set(source.len());
}
}
#[cfg(feature = "std")]
extern crate std;
#[cfg(feature = "std")]
impl<const N: usize> std::io::Write for TinyVec<u8, N> {
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
self.copy_from_slice(buf);
Ok(buf.len())
}
fn write_all(&mut self, buf: &[u8]) -> std::io::Result<()> {
self.copy_from_slice(buf);
Ok(())
}
fn write_vectored(&mut self, bufs: &[std::io::IoSlice<'_>]) -> std::io::Result<usize> {
use std::io::{Error, ErrorKind};
use std::string::ToString;
let len = bufs.iter().map(|b| b.len()).sum();
self.try_reserve(len).map_err(|err| {
Error::new(ErrorKind::OutOfMemory, err.to_string())
})?;
for buf in bufs {
self.copy_from_slice(buf);
}
Ok(len)
}
#[cfg(feature = "use-nightly-features")]
fn is_write_vectored(&self) -> bool { true }
#[cfg(feature = "use-nightly-features")]
fn write_all_vectored(&mut self, bufs: &mut [std::io::IoSlice<'_>]) -> std::io::Result<()> {
self.write_vectored(bufs)?;
Ok(())
}
fn flush(&mut self) -> std::io::Result<()> {
Ok(())
}
}
impl<const N: usize> fmt::Write for TinyVec<u8, N> {
fn write_str(&mut self, s: &str) -> fmt::Result {
self.copy_from_slice(s.as_bytes());
Ok(())
}
}
#[cfg(feature = "serde")]
impl<T: serde::Serialize, const N: usize> serde::Serialize for TinyVec<T, N> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer
{
use serde::ser::SerializeSeq;
let mut seq = serializer.serialize_seq(Some(self.len()))?;
for elem in self.as_slice() {
seq.serialize_element(elem)?;
}
seq.end()
}
}
#[cfg(feature = "serde")]
impl<'de, T: Deserialize<'de>, const N: usize> serde::Deserialize<'de> for TinyVec<T, N> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>
{
deserializer.deserialize_seq(TinyVecDeserializer(PhantomData))
}
}
#[cfg(feature = "serde")]
struct TinyVecDeserializer<T, const N: usize>(PhantomData<T>);
#[cfg(feature = "serde")]
impl<'de, T: Deserialize<'de>, const N: usize> serde::de::Visitor<'de> for TinyVecDeserializer<T, N> {
type Value = TinyVec<T, N>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "a sequence")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: serde::de::SeqAccess<'de>,
{
use serde::de::Error;
let len = seq.size_hint().unwrap_or(0);
let mut values = TinyVec::try_with_capacity(len).map_err(A::Error::custom)?;
while let Some(value) = seq.next_element()? {
values.push(value);
}
Ok(values)
}
}
#[cfg(test)]
mod test;