use std::{
alloc::{self, Layout},
mem, ptr, marker,
};
use std::fmt::{self};
use std::cmp;
use std::ops::Index;
use std::iter::FromIterator;
use std::ops::Deref;
use std::slice;
use std::slice::SliceIndex;
use std::ptr::NonNull;
use std::ops::DerefMut;
use std::ops::IndexMut;
use std::borrow::Cow;
use std::iter::FusedIterator;
use std::ops::RangeBounds;
use std::ops::Bound::*;
use std::borrow::Borrow;
use std::borrow::BorrowMut;
use std::num::NonZeroUsize;
pub struct ThinVec<T> {
u: NonZeroUsize,
_marker: marker::PhantomData<T>,
}
#[cfg(target_pointer_width = "64")]
const ZST_MASK: usize = 0x8000_0000_0000_0000;
#[cfg(target_pointer_width = "32")]
const ZST_MASK: usize = 0x8000_0000;
const DANGLE: usize = <usize>::max_value();
impl<T> ThinVec<T> {
#[inline]
pub fn new() -> ThinVec<T> {
unsafe {
if mem::size_of::<T>() == 0 { return ThinVec { u: NonZeroUsize::new_unchecked(ZST_MASK), _marker: marker::PhantomData }; }
ThinVec { u: NonZeroUsize::new_unchecked(DANGLE), _marker: marker::PhantomData }
}
}
#[inline]
pub fn with_capacity(capacity: usize) -> ThinVec<T> {
if mem::size_of::<T>() == 0 { return ThinVec::new(); }
if capacity == 0 {
return <ThinVec<T>>::new();
}
let array = <ThinVec<T>>::allocate_array(capacity);
unsafe {
ThinVec { u: NonZeroUsize::new_unchecked(array as usize), _marker: marker::PhantomData }
}
}
#[inline]
pub fn capacity(&self) -> usize {
if mem::size_of::<T>() == 0 { return <usize>::max_value(); }
if self.u.get() == DANGLE { return 0; }
unsafe {
let len_ptr = self.u.get() as *mut usize;
return *len_ptr.add(1);
}
}
pub fn reserve(&mut self, additional: usize) {
if mem::size_of::<T>() == 0 { return; }
let len = self.len();
self.reserve_exact(cmp::max(len * 2, len + additional));
}
pub fn reserve_exact(&mut self, additional: usize) {
if mem::size_of::<T>() == 0 { return; }
if self.u.get() == DANGLE {
unsafe {
self.u = NonZeroUsize::new_unchecked(<ThinVec<T>>::allocate_array(additional) as usize);
return;
}
}
let len = self.len();
let remain = self.capacity() - len;
if remain < additional {
let len_ptr = self.u.get() as *mut usize;
unsafe {
self.realloc_heap(len_ptr, *len_ptr + additional);
}
}
}
pub fn shrink_to_fit(&mut self) {
if mem::size_of::<T>() == 0 { return; }
if self.u.get() == DANGLE { return; }
let len_ptr = self.u.get() as *mut usize;
unsafe {
self.realloc_heap(len_ptr, *len_ptr);
}
}
pub fn truncate(&mut self, len: usize) {
if mem::size_of::<T>() == 0 {
unsafe {
self.set_len(len);
return;
}
}
if self.u.get() == DANGLE { return; }
unsafe {
let len_ptr = self.u.get() as *mut usize;
if *len_ptr > len {
if mem::needs_drop::<T>() {
let mut cur = ((len_ptr as *mut u8).add(<ThinVec<T>>::header_bytes()) as *mut T).add(len);
let end = cur.add(*len_ptr - len);
while cur < end {
*len_ptr -= 1;
ptr::drop_in_place(cur);
cur = cur.add(1);
}
} else {
*len_ptr = len;
}
}
}
}
#[inline]
pub fn as_slice(&self) -> &[T] {
self
}
#[inline]
pub fn as_mut_slice(&mut self) -> &mut [T] {
self
}
#[inline]
pub fn swap_remove(&mut self, index: usize) -> T {
if mem::size_of::<T>() == 0 {
unsafe {
let len = self.len() - 1;
self.set_len(len);
return ptr::read(NonNull::dangling().as_ptr());
}
}
unsafe {
if index < self.len() {
let len_ptr = self.u.get() as *mut usize;
*len_ptr -= 1;
let ptr = len_ptr as *mut u8;
<ThinVec<T>>::replace(ptr.add(<ThinVec<T>>::header_bytes()) as *mut T, *len_ptr, index)
} else {
panic!("index out of bounds! len: {}, index {}", self.len(), index);
}
}
}
#[inline]
pub fn insert(&mut self, index: usize, val: T) {
if mem::size_of::<T>() == 0 {
unsafe {
let len = self.len();
assert!(index <= len);
self.set_len(len + 1);
return;
}
}
assert!(index <= self.len());
self.possibly_grow_heap();
unsafe {
let len_ptr = self.u.get() as usize as *mut usize;
let len = *len_ptr;
{
let p = self.as_mut_ptr().offset(index as isize);
ptr::copy(p, p.offset(1), len - index);
ptr::write(p, val);
}
*len_ptr += 1;
}
}
#[inline]
fn replace(ptr: *mut T, src: usize, dst: usize) -> T {
unsafe {
let hole: *mut T = ptr.add(dst);
let last = ptr::read(ptr.add(src));
ptr::replace(hole, last)
}
}
pub fn remove(&mut self, index: usize) -> T {
if mem::size_of::<T>() == 0 {
unsafe {
let len = self.len();
assert!(index < len);
self.set_len(len - 1);
return ptr::read(NonNull::dangling().as_ptr());
}
}
let array: *mut T;
let len: usize;
let len_ptr = self.u.get() as *mut usize;
let ptr = len_ptr as *mut u8;
unsafe {
len = *len_ptr;
*len_ptr -= 1;
array = ptr.add(<ThinVec<T>>::header_bytes()) as *mut T;
}
assert!(index < len);
unsafe {
let ret;
{
let ptr = array.offset(index as isize);
ret = ptr::read(ptr);
ptr::copy(ptr.offset(1), ptr, len - index - 1);
}
ret
}
}
pub fn retain<F>(&mut self, mut f: F)
where F: FnMut(&T) -> bool
{
if mem::size_of::<T>() == 0 {
let mut count = 0;
unsafe {
let t: T = ptr::read(NonNull::dangling().as_ptr());
for _i in 0..self.len() {
if f(&t) { count += 1; }
}
self.set_len(count);
}
return;
}
let mut array: *mut T;
let len: usize;
{
let len_ptr = self.u.get() as *mut usize;
let ptr = len_ptr as *mut u8;
unsafe {
len = *len_ptr;
array = ptr.add(<ThinVec<T>>::header_bytes()) as *mut T;
}
}
unsafe {
let end = array.add(len);
let mut cur = array;
let mut removed: usize = len;
while array < end {
if f(&*array) {
if array != cur {
ptr::copy_nonoverlapping(array, cur, 1);
}
cur = cur.add(1);
removed -= 1;
} else {
ptr::drop_in_place(array);
}
array = array.add(1);
}
let len_ptr = self.u.get() as usize as *mut usize;
*len_ptr -= removed;
}
}
pub fn dedup_by<F>(&mut self, mut same_bucket: F) where F: FnMut(&mut T, &mut T) -> bool {
if mem::size_of::<T>() == 0 {
unsafe {
if self.len() > 1 {
self.set_len(1);
}
}
return;
}
unsafe {
let ln = self.len();
if ln <= 1 {
return;
}
let p = self.as_mut_ptr();
let mut r: usize = 1;
let mut w: usize = 1;
while r < ln {
let p_r = p.offset(r as isize);
let p_wm1 = p.offset((w - 1) as isize);
if !same_bucket(&mut *p_r, &mut *p_wm1) {
if r != w {
let p_w = p_wm1.offset(1);
mem::swap(&mut *p_r, &mut *p_w);
}
w += 1;
}
r += 1;
}
self.truncate(w);
}
}
#[inline]
pub fn dedup_by_key<F, K>(&mut self, mut key: F) where F: FnMut(&mut T) -> K, K: PartialEq {
self.dedup_by(|a, b| key(a) == key(b))
}
#[inline(always)]
pub fn len(&self) -> usize {
if mem::size_of::<T>() == 0 { return (self.u.get() & (ZST_MASK - 1)) as usize; }
unsafe { return if self.u.get() == DANGLE { 0 } else { *(self.u.get() as *mut usize) }; };
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
#[inline]
pub fn push(&mut self, val: T) {
if mem::size_of::<T>() == 0 {
unsafe {
let len = self.len();
self.set_len(len + 1);
return;
}
}
self.possibly_grow_heap();
unsafe {
let len_ptr = self.u.get() as usize as *mut usize;
let arr = (len_ptr as *mut u8).add(<ThinVec<T>>::header_bytes()) as *mut T;
ptr::write(arr.add(*len_ptr), val);
*len_ptr += 1;
}
}
#[inline]
pub fn pop(&mut self) -> Option<T> {
if mem::size_of::<T>() == 0 {
let len = self.len();
if len > 0 {
unsafe {
self.set_len(len - 1);
return Some(ptr::read(NonNull::dangling().as_ptr()));
}
}
return None;
}
if self.len() == 0 { return None; }
let array: *mut T;
let len: usize;
{
let len_ptr = self.u.get() as *mut usize;
unsafe {
len = *len_ptr;
if len == 0 { return None; }
let ptr = len_ptr as *mut u8;
*len_ptr -= 1;
array = ptr.add(<ThinVec<T>>::header_bytes()) as *mut T;
}
}
unsafe {
let ret;
{
let ptr = array.offset((len - 1) as isize);
ret = ptr::read(ptr);
}
Some(ret)
}
}
#[inline]
pub fn append(&mut self, other: &mut Self) {
if mem::size_of::<T>() == 0 {
unsafe {
let len = self.len() + other.len();
self.set_len(len);
other.u = NonZeroUsize::new_unchecked(ZST_MASK);
return;
}
}
unsafe {
self.append_elements(other.as_slice() as _);
other.set_len(0);
}
}
#[inline]
unsafe fn append_elements(&mut self, other: *const [T]) {
let count = (*other).len();
if count > 0 {
self.reserve(count);
let len = self.len();
ptr::copy_nonoverlapping(other as *const T, self.get_unchecked_mut(len), count);
self.set_len(len + count);
}
}
unsafe fn set_len(&mut self, len: usize) {
if mem::size_of::<T>() == 0 {
self.u = NonZeroUsize::new_unchecked(len | ZST_MASK);
return;
}
if self.u.get() == DANGLE {
if len != 0 {
panic!("can't set length of empyty ");
} else {
return;
}
}
let len_ptr = self.u.get() as *mut usize;
*len_ptr = len;
}
#[inline]
pub fn split_off(&mut self, at: usize) -> Self {
let len = self.len();
assert!(at <= len, "`at` out of bounds");
let other_len = len - at;
let mut other = ThinVec::with_capacity(other_len);
unsafe {
self.set_len(at);
other.set_len(other_len);
ptr::copy_nonoverlapping(self.as_ptr().offset(at as isize),
other.as_mut_ptr(),
other.len());
}
other
}
#[inline]
pub fn clear(&mut self) {
self.truncate(0)
}
pub fn into_boxed_slice(mut self) -> Box<[T]> {
if mem::size_of::<T>() == 0 {
unsafe {
let slice = slice::from_raw_parts_mut(NonNull::dangling().as_ptr(), self.len());
let output: Box<[T]> = Box::from_raw(slice);
return output;
}
}
unsafe {
let array: *mut T;
let len: usize;
let len_ptr = self.u.get() as *mut usize;
array = (len_ptr as *mut u8).add(<ThinVec<T>>::header_bytes()) as *mut T;
len = *len_ptr;
*len_ptr = 0; let layout = Layout::from_size_align(mem::size_of::<T>() * len, mem::align_of::<T>()).unwrap();
let buffer = alloc::alloc(layout) as *mut T;
ptr::copy_nonoverlapping(array, buffer, len);
let slice = slice::from_raw_parts_mut(buffer, len);
let output: Box<[T]> = Box::from_raw(slice);
output
}
}
pub fn drain<R>(&mut self, range: R) -> Drain<T>
where R: RangeBounds<usize>
{
let len = self.len();
let start = match range.start_bound() {
Included(&n) => n,
Excluded(&n) => n + 1,
Unbounded => 0,
};
let end = match range.end_bound() {
Included(&n) => n + 1,
Excluded(&n) => n,
Unbounded => len,
};
assert!(start <= end);
assert!(end <= len);
unsafe {
self.set_len(start);
let range_slice = slice::from_raw_parts_mut(self.as_mut_ptr().offset(start as isize),
end - start);
Drain {
tail_start: end,
tail_len: len - end,
iter: range_slice.iter(),
vec: NonNull::from(self),
}
}
}
#[inline]
pub fn splice<R, I>(&mut self, range: R, replace_with: I) -> Splice<I::IntoIter>
where R: RangeBounds<usize>, I: IntoIterator<Item=T>
{
Splice {
drain: self.drain(range),
replace_with: replace_with.into_iter(),
}
}
pub fn drain_filter<F>(&mut self, filter: F) -> DrainFilter<T, F>
where F: FnMut(&mut T) -> bool,
{
let old_len = self.len();
unsafe { self.set_len(0); }
DrainFilter {
vec: self,
idx: 0,
del: 0,
old_len,
pred: filter,
}
}
fn extend_desugared<I: Iterator<Item=T>>(&mut self, mut iterator: I) {
if mem::size_of::<T>() == 0 {
let mut count = 0;
while let Some(_element) = iterator.next() { count += 1; }
unsafe {
let len = self.len();
self.set_len(len + count);
}
return;
}
while let Some(element) = iterator.next() {
let len = self.len();
if len == self.capacity() {
let (lower, _) = iterator.size_hint();
self.reserve(lower.saturating_add(1));
}
unsafe {
ptr::write(self.get_unchecked_mut(len), element);
self.set_len(len + 1);
}
}
}
#[inline(always)]
fn allocate_array(capacity: usize) -> *mut u8 {
unsafe {
let align = cmp::max(mem::align_of::<usize>(), mem::align_of::<T>());
let layout = Layout::from_size_align(mem::size_of::<T>() * capacity + <ThinVec<T>>::header_bytes(), align).unwrap();
let buffer = alloc::alloc(layout);
ptr::write(buffer as *mut usize, 0); ptr::write((buffer as *mut usize).add(1), capacity);
buffer
}
}
#[inline]
fn possibly_grow_heap(&mut self) {
unsafe {
if self.u.get() == DANGLE {
self.u = NonZeroUsize::new_unchecked(<ThinVec<T>>::allocate_array(8) as usize);
return;
}
let len_ptr = self.u.get() as *mut usize;
let cap_ptr = len_ptr.add(1);
if *len_ptr == *cap_ptr {
self.realloc_heap(len_ptr, *len_ptr * 2);
}
}
}
#[inline(always)]
fn header_bytes() -> usize {
cmp::max(mem::size_of::<usize>() * 2, mem::align_of::<T>())
}
#[inline(always)]
fn realloc_heap(&mut self, len_ptr: *mut usize, new_capacity: usize) {
unsafe {
let head_ptr = <ThinVec<T>>::allocate_array(new_capacity);
let header_bytes = <ThinVec<T>>::header_bytes();
let to_move = cmp::min(new_capacity, *(len_ptr.add(1)));
ptr::copy_nonoverlapping(len_ptr as *mut u8, head_ptr, to_move * mem::size_of::<T>() + header_bytes);
let new_cap_ptr = (head_ptr as *mut usize).add(1);
*new_cap_ptr = new_capacity;
self.u = NonZeroUsize::new_unchecked(head_ptr as usize);
let old = len_ptr as *mut u8;
let align = cmp::max(mem::align_of::<usize>(), mem::align_of::<T>());
let layout = Layout::from_size_align(mem::size_of::<T>() * (*(len_ptr.add(1))) + header_bytes, align).unwrap();
alloc::dealloc(old, layout);
}
}
}
impl<T: Clone> ThinVec<T> {
pub fn from_elem(elem: T, count: usize) -> ThinVec<T> {
let mut thinvec = ThinVec::with_capacity(count);
for _i in 0..count as isize {
thinvec.push(elem.clone());
}
thinvec
}
pub fn extend_from_slice(&mut self, slice: &[T]) {
self.reserve(slice.len());
unsafe {
let mut len = self.len();
let mut dst = self.get_unchecked_mut(len) as *mut T;
for t in slice.iter() {
ptr::write(dst, t.clone());
dst = dst.add(1);
len += 1;
self.set_len(len); }
}
}
}
impl<T> Drop for ThinVec<T> {
fn drop(&mut self) {
unsafe {
if mem::size_of::<T>() == 0 {
self.u = NonZeroUsize::new_unchecked(ZST_MASK);
return;
}
if self.u.get() == DANGLE { return; }
let len_ptr = self.u.get() as *mut usize;
let header_bytes = <ThinVec<T>>::header_bytes();
if mem::needs_drop::<T>() {
let mut cur = (len_ptr as *mut u8).add(header_bytes) as *mut T;
let end = cur.add(*len_ptr);
while cur < end {
ptr::drop_in_place(cur);
cur = cur.add(1);
}
}
let align = cmp::max(mem::align_of::<usize>(), mem::align_of::<T>());
let layout = Layout::from_size_align(mem::size_of::<T>() * (*(len_ptr.add(1))) + header_bytes, align).unwrap();
alloc::dealloc(self.u.get() as *mut u8, layout);
self.u = NonZeroUsize::new_unchecked(DANGLE);
}
}
}
impl<T> ThinVec<T> {
pub unsafe fn transmute<X>(self) -> ThinVec<X> {
assert!(mem::size_of::<X>() == mem::size_of::<T>());
assert!(mem::align_of::<X>() == mem::align_of::<T>());
assert!(!mem::needs_drop::<X>());
assert!(!mem::needs_drop::<T>());
let v: ThinVec<X> = ThinVec { u: self.u, _marker: marker::PhantomData };
mem::forget(self);
v
}
}
impl<T, I> Index<I> for ThinVec<T>
where
I: SliceIndex<[T]>,
{
type Output = I::Output;
#[inline]
fn index(&self, index: I) -> &Self::Output {
Index::index(&**self, index)
}
}
impl<T, I> IndexMut<I> for ThinVec<T>
where
I: SliceIndex<[T]>,
{
#[inline]
fn index_mut(&mut self, index: I) -> &mut Self::Output {
IndexMut::index_mut(&mut **self, index)
}
}
impl<T> Deref for ThinVec<T> {
type Target = [T];
fn deref(&self) -> &[T] {
unsafe {
if mem::size_of::<T>() == 0 { return slice::from_raw_parts(NonNull::dangling().as_ptr(), self.len()); }
let len: usize;
let arr: *mut T;
if self.u.get() == DANGLE {
len = 0;
arr = NonNull::dangling().as_ptr();
} else {
let len_ptr = self.u.get() as *mut usize;
len = *len_ptr;
arr = (len_ptr as *mut u8).add(<ThinVec<T>>::header_bytes()) as *mut T;
}
slice::from_raw_parts(arr, len)
}
}
}
impl<T> DerefMut for ThinVec<T> {
fn deref_mut(&mut self) -> &mut [T] {
unsafe {
if mem::size_of::<T>() == 0 { return slice::from_raw_parts_mut(NonNull::dangling().as_ptr(), self.len()); }
let len: usize;
let arr: *mut T;
if self.u.get() == DANGLE {
len = 0;
arr = NonNull::dangling().as_ptr();
} else {
let len_ptr = self.u.get() as *mut usize;
len = *len_ptr;
arr = (len_ptr as *mut u8).add(<ThinVec<T>>::header_bytes()) as *mut T;
}
slice::from_raw_parts_mut(arr, len)
}
}
}
macro_rules! __impl_slice_eq1 {
($Lhs: ty, $Rhs: ty) => {
__impl_slice_eq1! { $Lhs, $Rhs, Sized }
};
($Lhs: ty, $Rhs: ty, $Bound: ident) => {
impl<'a, 'b, A: $Bound, B> PartialEq<$Rhs> for $Lhs where A: PartialEq<B> {
#[inline]
fn eq(&self, other: &$Rhs) -> bool { self[..] == other[..] }
#[inline]
fn ne(&self, other: &$Rhs) -> bool { self[..] != other[..] }
}
}
}
__impl_slice_eq1! { ThinVec<A>, ThinVec<B> }
__impl_slice_eq1! { ThinVec<A>, &'b [B] }
__impl_slice_eq1! { ThinVec<A>, &'b mut [B] }
macro_rules! array_impls {
($($N: expr)+) => {
$(
__impl_slice_eq1! { ThinVec<A>, [B; $N] }
__impl_slice_eq1! { ThinVec<A>, &'b [B; $N] }
)+
}
}
array_impls! {
0 1 2 3 4 5 6 7 8 9
10 11 12 13 14 15 16 17 18 19
20 21 22 23 24 25 26 27 28 29
30 31 32
}
impl<T: fmt::Debug> fmt::Debug for ThinVec<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
pub struct IntoIter<T> {
buf: *mut u8,
_marker: marker::PhantomData<T>,
ptr: *const T,
end: *const T,
}
impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("IntoIter")
.field(&self.as_slice())
.finish()
}
}
impl<T> IntoIterator for ThinVec<T> {
type Item = T;
type IntoIter = IntoIter<T>;
#[inline]
fn into_iter(mut self) -> IntoIter<T> {
unsafe {
if mem::size_of::<T>() == 0 {
let ptr = self.as_mut_ptr();
let end = (ptr as *const u8).add(self.len()) as *const T;
mem::forget(self);
return IntoIter {
buf: ptr::null_mut(),
_marker: marker::PhantomData,
ptr,
end,
};
}
if self.u.get() == DANGLE {
return IntoIter {
buf: ptr::null_mut(),
_marker: marker::PhantomData,
ptr: ptr::null_mut(),
end: ptr::null_mut(),
};
}
let len_ptr = self.u.get() as usize as *mut u8 as *mut usize;
let begin = (len_ptr as *mut u8).add(<ThinVec<T>>::header_bytes()) as *mut T;
let end = begin.offset(*len_ptr as isize) as *const T;
let buf = len_ptr as *mut u8;
mem::forget(self);
IntoIter {
buf,
_marker: marker::PhantomData,
ptr: begin,
end,
}
}
}
}
impl<'a, T> IntoIterator for &'a ThinVec<T> {
type Item = &'a T;
type IntoIter = slice::Iter<'a, T>;
fn into_iter(self) -> slice::Iter<'a, T> {
self.iter()
}
}
impl<'a, T> IntoIterator for &'a mut ThinVec<T> {
type Item = &'a mut T;
type IntoIter = slice::IterMut<'a, T>;
fn into_iter(self) -> slice::IterMut<'a, T> {
self.iter_mut()
}
}
impl<T> IntoIter<T> {
pub fn as_slice(&self) -> &[T] {
unsafe {
slice::from_raw_parts(self.ptr, self.len())
}
}
pub fn as_mut_slice(&mut self) -> &mut [T] {
unsafe {
slice::from_raw_parts_mut(self.ptr as *mut T, self.len())
}
}
}
unsafe impl<T: Send> Send for IntoIter<T> {}
unsafe impl<T: Sync> Sync for IntoIter<T> {}
impl<T> Iterator for IntoIter<T> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
unsafe {
if self.ptr as *const _ == self.end {
None
} else {
if mem::size_of::<T>() == 0 {
self.ptr = (self.ptr as *mut u8).add(1) as *mut T;
Some(ptr::read(NonNull::dangling().as_ptr()))
} else {
let old = self.ptr;
self.ptr = self.ptr.offset(1);
Some(ptr::read(old))
}
}
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let exact = if mem::size_of::<T>() == 0 {
(self.end as usize).wrapping_sub(self.ptr as usize)
} else {
(self.end as usize).wrapping_sub(self.ptr as usize) / mem::size_of::<T>()
};
(exact, Some(exact))
}
#[inline]
fn count(self) -> usize {
self.len()
}
}
impl<T> DoubleEndedIterator for IntoIter<T> {
#[inline]
fn next_back(&mut self) -> Option<T> {
unsafe {
if self.end == self.ptr {
None
} else {
if mem::size_of::<T>() == 0 {
self.end = (self.end as *const i8).sub(1) as *mut T;
Some(ptr::read(NonNull::dangling().as_ptr()))
} else {
self.end = self.end.offset(-1);
Some(ptr::read(self.end))
}
}
}
}
}
impl<T> ExactSizeIterator for IntoIter<T> {}
impl<T> FusedIterator for IntoIter<T> {}
impl<T: Clone> Clone for IntoIter<T> {
fn clone(&self) -> IntoIter<T> {
self.as_slice().to_thinvec().into_iter()
}
}
pub trait ToThinVec<T> {
fn to_thinvec(&self) -> ThinVec<T>;
}
pub trait IntoThinVec<T> {
fn into_thinvec(self) -> ThinVec<T>;
}
impl<T: Clone> ToThinVec<T> for [T] {
fn to_thinvec(&self) -> ThinVec<T> {
let mut vector = ThinVec::with_capacity(self.len());
vector.extend_desugared(self.iter().cloned());
vector
}
}
impl<T> IntoThinVec<T> for Box<[T]> {
fn into_thinvec(self) -> ThinVec<T> {
unsafe {
let len = self.len();
let ptr = self.as_ptr();
let mut vec = ThinVec::with_capacity(len);
ptr::copy(ptr, vec.as_mut_ptr(), len);
vec.set_len(len);
mem::forget(self); vec
}
}
}
impl<T> Drop for IntoIter<T> {
fn drop(&mut self) {
if mem::size_of::<T>() == 0 { return; }
for _x in self.by_ref() {}
unsafe {
if !self.buf.is_null() {
let len_ptr = self.buf as *mut usize;
let align = cmp::max(mem::align_of::<usize>(), mem::align_of::<T>());
let layout = Layout::from_size_align(mem::size_of::<T>() * (*(len_ptr.add(1))) + <ThinVec<T>>::header_bytes(), align).unwrap();
alloc::dealloc(self.buf, layout);
self.buf = ptr::null_mut();
}
}
}
}
impl<T> Extend<T> for ThinVec<T> {
#[inline]
fn extend<I: IntoIterator<Item=T>>(&mut self, iter: I) {
self.extend_desugared(iter.into_iter())
}
}
impl<'a, T: 'a + Copy> Extend<&'a T> for ThinVec<T> {
#[inline]
fn extend<I: IntoIterator<Item=&'a T>>(&mut self, iter: I) {
self.extend_desugared(iter.into_iter().cloned())
}
}
impl<T> Default for ThinVec<T> {
fn default() -> ThinVec<T> {
ThinVec::new()
}
}
impl<'a, T: Clone> From<&'a [T]> for ThinVec<T> {
fn from(s: &'a [T]) -> ThinVec<T> {
s.to_thinvec()
}
}
impl<'a, T: Clone> From<&'a mut [T]> for ThinVec<T> {
fn from(s: &'a mut [T]) -> ThinVec<T> {
s.to_thinvec()
}
}
impl<T> From<Box<[T]>> for ThinVec<T> {
fn from(s: Box<[T]>) -> ThinVec<T> {
s.into_thinvec()
}
}
impl<'a, T> From<Cow<'a, [T]>> for ThinVec<T> where [T]: ToOwned<Owned=ThinVec<T>> {
fn from(s: Cow<'a, [T]>) -> ThinVec<T> {
s.into_owned()
}
}
impl<'a> From<&'a str> for ThinVec<u8> {
fn from(s: &'a str) -> ThinVec<u8> {
From::from(s.as_bytes())
}
}
impl<T> FromIterator<T> for ThinVec<T> {
#[inline]
fn from_iter<I: IntoIterator<Item=T>>(iter: I) -> ThinVec<T> {
let into_iter = iter.into_iter();
let (lower, _) = into_iter.size_hint();
let mut thinvec = ThinVec::with_capacity(lower);
thinvec.extend_desugared(into_iter);
thinvec
}
}
pub struct Drain<'a, T: 'a> {
tail_start: usize,
tail_len: usize,
iter: slice::Iter<'a, T>,
vec: NonNull<ThinVec<T>>,
}
impl<'a, T: 'a + fmt::Debug> fmt::Debug for Drain<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("Drain")
.field(&self.iter.as_slice())
.finish()
}
}
unsafe impl<'a, T: Sync> Sync for Drain<'a, T> {}
unsafe impl<'a, T: Send> Send for Drain<'a, T> {}
impl<'a, T> Iterator for Drain<'a, T> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
self.iter.next().map(|elt| unsafe { ptr::read(elt as *const _) })
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<'a, T> DoubleEndedIterator for Drain<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<T> {
self.iter.next_back().map(|elt| unsafe { ptr::read(elt as *const _) })
}
}
impl<'a, T> Drop for Drain<'a, T> {
fn drop(&mut self) {
self.for_each(drop);
if self.tail_len > 0 {
unsafe {
let source_vec = self.vec.as_mut();
let start = source_vec.len();
let tail = self.tail_start;
if tail != start {
let src = source_vec.as_ptr().offset(tail as isize);
let dst = source_vec.as_mut_ptr().offset(start as isize);
ptr::copy(src, dst, self.tail_len);
}
source_vec.set_len(start + self.tail_len);
}
}
}
}
impl<'a, T> ExactSizeIterator for Drain<'a, T> {}
impl<'a, T> FusedIterator for Drain<'a, T> {}
#[derive(Debug)]
pub struct Splice<'a, I: Iterator + 'a> {
drain: Drain<'a, I::Item>,
replace_with: I,
}
impl<'a, I: Iterator> Iterator for Splice<'a, I> {
type Item = I::Item;
fn next(&mut self) -> Option<Self::Item> {
self.drain.next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.drain.size_hint()
}
}
impl<'a, I: Iterator> DoubleEndedIterator for Splice<'a, I> {
fn next_back(&mut self) -> Option<Self::Item> {
self.drain.next_back()
}
}
impl<'a, I: Iterator> ExactSizeIterator for Splice<'a, I> {}
impl<'a, I: Iterator> Drop for Splice<'a, I> {
fn drop(&mut self) {
self.drain.by_ref().for_each(drop);
unsafe {
if self.drain.tail_len == 0 {
self.drain.vec.as_mut().extend(self.replace_with.by_ref());
return;
}
if !self.drain.fill(&mut self.replace_with) {
return;
}
let (lower_bound, _upper_bound) = self.replace_with.size_hint();
if lower_bound > 0 {
self.drain.move_tail(lower_bound);
if !self.drain.fill(&mut self.replace_with) {
return;
}
}
let mut collected = self.replace_with.by_ref().collect::<ThinVec<I::Item>>().into_iter();
if collected.len() > 0 {
self.drain.move_tail(collected.len());
let filled = self.drain.fill(&mut collected);
debug_assert!(filled);
debug_assert_eq!(collected.len(), 0);
}
}
}
}
impl<'a, T> Drain<'a, T> {
unsafe fn fill<I: Iterator<Item=T>>(&mut self, replace_with: &mut I) -> bool {
let vec = self.vec.as_mut();
let range_start = vec.len();
let range_end = self.tail_start;
let range_slice = slice::from_raw_parts_mut(
vec.as_mut_ptr().offset(range_start as isize),
range_end - range_start);
let mut count = range_start;
for place in range_slice {
if let Some(new_item) = replace_with.next() {
ptr::write(place, new_item);
count += 1;
vec.set_len(count);
} else {
return false;
}
}
true
}
unsafe fn move_tail(&mut self, extra_capacity: usize) {
let vec = self.vec.as_mut();
vec.reserve(extra_capacity);
let new_tail_start = self.tail_start + extra_capacity;
let src = vec.as_ptr().offset(self.tail_start as isize);
let dst = vec.as_mut_ptr().offset(new_tail_start as isize);
ptr::copy(src, dst, self.tail_len);
self.tail_start = new_tail_start;
}
}
pub trait CloneIntoThinVec<T> {
fn clone_into_thinvec(&self, target: &mut ThinVec<T>);
}
impl<T: Clone> CloneIntoThinVec<T> for [T] {
fn clone_into_thinvec(&self, target: &mut ThinVec<T>) {
target.truncate(self.len());
let len = target.len();
target.clone_from_slice(&self[..len]);
target.extend_from_slice(&self[len..]);
}
}
impl<T: Clone> Clone for ThinVec<T> {
fn clone(&self) -> ThinVec<T> {
<[T]>::to_thinvec(&**self)
}
fn clone_from(&mut self, other: &ThinVec<T>) {
other.as_slice().clone_into_thinvec(self);
}
}
#[derive(Debug)]
pub struct DrainFilter<'a, T: 'a, F>
where F: FnMut(&mut T) -> bool,
{
vec: &'a mut ThinVec<T>,
idx: usize,
del: usize,
old_len: usize,
pred: F,
}
impl<'a, T, F> Iterator for DrainFilter<'a, T, F>
where F: FnMut(&mut T) -> bool,
{
type Item = T;
fn next(&mut self) -> Option<T> {
unsafe {
while self.idx != self.old_len {
let i = self.idx;
self.idx += 1;
let v = slice::from_raw_parts_mut(self.vec.as_mut_ptr(), self.old_len);
if (self.pred)(&mut v[i]) {
self.del += 1;
return Some(ptr::read(&v[i]));
} else if self.del > 0 {
let del = self.del;
let src: *const T = &v[i];
let dst: *mut T = &mut v[i - del];
ptr::copy_nonoverlapping(src, dst, 1);
}
}
None
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
(0, Some(self.old_len - self.idx))
}
}
impl<'a, T, F> Drop for DrainFilter<'a, T, F>
where F: FnMut(&mut T) -> bool,
{
fn drop(&mut self) {
self.for_each(drop);
unsafe {
self.vec.set_len(self.old_len - self.del);
}
}
}
impl<T: PartialEq> ThinVec<T> {
#[inline]
pub fn dedup(&mut self) {
self.dedup_by(|a, b| a == b)
}
pub fn remove_item(&mut self, item: &T) -> Option<T> {
let pos = self.iter().position(|x| *x == *item)?;
Some(self.remove(pos))
}
}
impl<T> Borrow<[T]> for ThinVec<T> {
fn borrow(&self) -> &[T] {
&self[..]
}
}
impl<T> BorrowMut<[T]> for ThinVec<T> {
fn borrow_mut(&mut self) -> &mut [T] {
&mut self[..]
}
}