use std::{
alloc::{self, Layout},
mem, ptr, marker,
};
use std::fmt::{self};
use std::cmp;
use std::ops::Index;
use std::iter::FromIterator;
use std::ops::Deref;
use std::slice;
use std::slice::SliceIndex;
use std::ptr::NonNull;
use std::ops::DerefMut;
use std::ops::IndexMut;
use std::borrow::Cow;
use std::iter::FusedIterator;
use std::ops::RangeBounds;
use std::ops::Bound::*;
use std::borrow::Borrow;
use std::borrow::BorrowMut;
use std::num::NonZeroU64;
#[cfg(target_endian = "little")]
pub struct V64<T> {
u: NonZeroU64,
_marker: marker::PhantomData<T>,
}
const ZST_MASK: u64 = 0x8000_0000_0000_0000u64;
pub enum Control {
Heap(*mut u8),
Stack(usize),
}
impl<T> V64<T> {
#[inline]
pub fn new() -> V64<T> {
unsafe {
if mem::size_of::<T>() == 0 { return V64 { u: NonZeroU64::new_unchecked(ZST_MASK), _marker: marker::PhantomData }; }
V64 { u: NonZeroU64::new_unchecked(8), _marker: marker::PhantomData }
}
}
#[inline]
pub fn with_capacity(capacity: usize) -> V64<T> {
if mem::size_of::<T>() == 0 { return V64::new(); }
if capacity == 0 || mem::size_of::<T>() * capacity < 8 {
return <V64<T>>::new();
}
let array = <V64<T>>::allocate_array(capacity);
unsafe {
V64 { u: NonZeroU64::new_unchecked(array as u64), _marker: marker::PhantomData }
}
}
#[inline]
pub fn capacity(&self) -> usize {
if mem::size_of::<T>() == 0 { return <usize>::max_value(); }
match self.control() {
Control::Heap(ptr) => {
unsafe {
let len_ptr = ptr as *mut usize;
return *len_ptr.add(1);
}
}
Control::Stack(_) => {
let s = mem::size_of::<T>();
return if s == 0 { 7 } else { 7 / s };
}
}
}
pub fn reserve(&mut self, additional: usize) {
if mem::size_of::<T>() == 0 { return; }
let len = self.len();
self.reserve_exact(cmp::max(len * 2, len + additional));
}
pub fn reserve_exact(&mut self, additional: usize) {
if mem::size_of::<T>() == 0 { return; }
let remain = self.capacity() - self.len();
if remain < additional {
match self.control() {
Control::Heap(ptr) => {
unsafe {
let len_ptr = ptr as *mut usize;
self.realloc_heap(len_ptr, *len_ptr + additional);
}
}
Control::Stack(len) => {
self.move_to_heap(len, len + additional);
}
}
}
}
pub fn shrink_to_fit(&mut self) {
if mem::size_of::<T>() == 0 { return; }
match self.control() {
Control::Heap(ptr) => {
unsafe {
let len_ptr = ptr as *mut usize;
self.realloc_heap(len_ptr, *len_ptr);
}
}
Control::Stack(_) => {}
}
}
pub fn truncate(&mut self, len: usize) {
if mem::size_of::<T>() == 0 {
unsafe {
self.set_len(len);
return;
}
}
match self.control() {
Control::Heap(ptr) => {
unsafe {
let len_ptr = ptr as *mut usize;
if *len_ptr > len {
if mem::needs_drop::<T>() {
let header_bytes = cmp::max(mem::size_of::<usize>() * 2, mem::align_of::<T>());
let mut cur = ((len_ptr as *mut u8).add(header_bytes) as *mut T).add(len);
let end = cur.add(*len_ptr - len);
while cur < end {
*len_ptr -= 1;
ptr::drop_in_place(cur);
cur = cur.add(1);
}
} else {
*len_ptr = len;
}
}
}
}
Control::Stack(stack_len) => {
unsafe {
if stack_len > len {
let mut cur = self.stack_ptr().add(len);
let end = cur.add(stack_len - len);
let mut cur_len = stack_len;
while cur < end {
cur_len -= 1;
self.set_stack_len(cur_len);
ptr::drop_in_place(cur);
ptr::write(cur, mem::zeroed::<T>()); cur = cur.add(1);
}
}
}
}
}
}
#[inline]
pub fn as_slice(&self) -> &[T] {
self
}
pub fn bytes_on_heap(&self) -> usize {
if mem::size_of::<T>() == 0 { return 0; }
match self.control() {
Control::Heap(ptr) => {
unsafe {
let header_bytes = cmp::max(mem::size_of::<usize>() * 2, mem::align_of::<T>());
let cap_ptr: *mut usize = (ptr as *mut usize).add(1);
return header_bytes + *cap_ptr * mem::size_of::<T>();
}
}
Control::Stack(_len) => { return 0; }
}
}
#[inline]
pub fn as_mut_slice(&mut self) -> &mut [T] {
self
}
#[inline]
pub fn swap_remove(&mut self, index: usize) -> T {
if mem::size_of::<T>() == 0 {
unsafe {
let len = self.len() - 1;
self.set_len(len);
return ptr::read(NonNull::dangling().as_ptr());
}
}
unsafe {
match self.control() {
Control::Heap(ptr) => {
let len_ptr = ptr as *mut usize;
if index < *len_ptr {
*len_ptr -= 1;
let header_bytes = cmp::max(mem::size_of::<usize>() * 2, mem::align_of::<T>());
<V64<T>>::replace(ptr.add(header_bytes) as *mut T, *len_ptr, index)
} else {
panic!("index out of bounds! len: {}, index {}", *len_ptr, index);
}
}
Control::Stack(len) => {
if index < len {
self.set_stack_len(len - 1);
<V64<T>>::replace(self.stack_ptr(),
len - 1, index)
} else {
panic!("index out of bounds! len: {}, index {}", len, index);
}
}
}
}
}
pub fn insert(&mut self, index: usize, val: T) {
if mem::size_of::<T>() == 0 {
unsafe {
let len = self.len();
assert!(index <= len);
self.set_len(len + 1);
return;
}
}
match self.control() {
Control::Heap(ptr) => {
let len_ptr = ptr as *mut usize;
unsafe { assert!(index <= *len_ptr); }
self.possibly_grow_heap(ptr);
}
Control::Stack(len) => {
assert!(index <= len);
if mem::size_of::<T>() * (len + 1) < 8 {
self.stack_insert(index, val, len);
return;
} else {
self.move_to_heap(len, 1);
}
}
}
self.heap_insert(index, val);
}
fn heap_insert(&mut self, index: usize, val: T) {
unsafe {
let len_ptr = self.u.get() as usize as *mut usize;
let len = *len_ptr;
{
let p = self.as_mut_ptr().offset(index as isize);
ptr::copy(p, p.offset(1), len - index);
ptr::write(p, val);
}
*len_ptr += 1;
}
}
fn stack_insert(&mut self, index: usize, val: T, len: usize) {
unsafe {
{
let p = self.as_mut_ptr().offset(index as isize);
ptr::copy(p, p.offset(1), len - index);
ptr::write(p, val);
}
self.set_stack_len(len + 1);
}
}
#[inline]
fn replace(ptr: *mut T, src: usize, dst: usize) -> T {
unsafe {
let hole: *mut T = ptr.add(dst);
let last = ptr::read(ptr.add(src));
ptr::replace(hole, last)
}
}
pub fn remove(&mut self, index: usize) -> T {
if mem::size_of::<T>() == 0 {
unsafe {
let len = self.len();
assert!(index < len);
self.set_len(len - 1);
return ptr::read(NonNull::dangling().as_ptr());
}
}
let array: *mut T;
let len: usize;
{
match self.control() {
Control::Heap(ptr) => {
let len_ptr = ptr as *mut usize;
unsafe {
len = *len_ptr;
*len_ptr -= 1;
let header_bytes = cmp::max(mem::size_of::<usize>() * 2, mem::align_of::<T>());
array = ptr.add(header_bytes) as *mut T;
}
}
Control::Stack(stack_len) => {
len = stack_len;
array = self.stack_ptr();
self.set_stack_len(stack_len - 1);
}
}
}
assert!(index < len);
unsafe {
let ret;
{
let ptr = array.offset(index as isize);
ret = ptr::read(ptr);
ptr::copy(ptr.offset(1), ptr, len - index - 1);
}
ret
}
}
pub fn retain<F>(&mut self, mut f: F)
where F: FnMut(&T) -> bool
{
if mem::size_of::<T>() == 0 {
let mut count = 0;
unsafe {
let t: T = ptr::read(NonNull::dangling().as_ptr());
for _i in 0..self.len() {
if f(&t) { count += 1; }
}
self.set_len(count);
}
return;
}
let mut array: *mut T;
let len: usize;
let heap: bool;
{
match self.control() {
Control::Heap(ptr) => {
let len_ptr = ptr as *mut usize;
unsafe {
len = *len_ptr;
let header_bytes = cmp::max(mem::size_of::<usize>() * 2, mem::align_of::<T>());
array = ptr.add(header_bytes) as *mut T;
heap = true;
}
}
Control::Stack(stack_len) => {
len = stack_len;
heap = false;
array = self.stack_ptr();
}
}
}
unsafe {
let end = array.add(len);
let mut cur = array;
let mut removed: usize = len;
while array < end {
if f(&*array) {
if array != cur {
ptr::copy_nonoverlapping(array, cur, 1);
}
cur = cur.add(1);
removed -= 1;
} else {
ptr::drop_in_place(array);
}
array = array.add(1);
}
if heap {
let len_ptr = self.u.get() as usize as *mut usize;
*len_ptr -= removed;
} else {
self.set_stack_len(len - removed);
}
}
}
pub fn dedup_by<F>(&mut self, mut same_bucket: F) where F: FnMut(&mut T, &mut T) -> bool {
if mem::size_of::<T>() == 0 {
unsafe {
if self.len() > 1 {
self.set_len(1);
}
}
return;
}
unsafe {
let ln = self.len();
if ln <= 1 {
return;
}
let p = self.as_mut_ptr();
let mut r: usize = 1;
let mut w: usize = 1;
while r < ln {
let p_r = p.offset(r as isize);
let p_wm1 = p.offset((w - 1) as isize);
if !same_bucket(&mut *p_r, &mut *p_wm1) {
if r != w {
let p_w = p_wm1.offset(1);
mem::swap(&mut *p_r, &mut *p_w);
}
w += 1;
}
r += 1;
}
self.truncate(w);
}
}
#[inline]
pub fn dedup_by_key<F, K>(&mut self, mut key: F) where F: FnMut(&mut T) -> K, K: PartialEq {
self.dedup_by(|a, b| key(a) == key(b))
}
#[inline]
pub fn len(&self) -> usize {
if mem::size_of::<T>() == 0 { return (self.u.get() & (ZST_MASK - 1)) as usize; }
match self.control() {
Control::Heap(ptr) => { unsafe { return *(ptr as *mut usize); } }
Control::Stack(len) => { return len; }
}
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
fn control(&self) -> Control {
let c = (self.u.get() & 15) as u8;
if c == 0 {
return Control::Heap(self.u.get() as usize as *mut u8);
}
return Control::Stack((c & 7) as usize);
}
#[inline]
pub fn push(&mut self, val: T) {
if mem::size_of::<T>() == 0 {
unsafe {
let len = self.len();
self.set_len(len + 1);
return;
}
}
match self.control() {
Control::Heap(ptr) => {
self.possibly_grow_heap(ptr);
}
Control::Stack(len) => {
if mem::size_of::<T>() * (len + 1) < 8 {
self.stack_push(val, len);
return;
} else {
self.move_to_heap(len, 1);
}
}
}
self.heap_push(val);
}
#[inline]
pub fn pop(&mut self) -> Option<T> {
if mem::size_of::<T>() == 0 {
let len = self.len();
if len > 0 {
unsafe {
self.set_len(len - 1);
return Some(ptr::read(NonNull::dangling().as_ptr()));
}
}
return None;
}
let array: *mut T;
let len: usize;
{
match self.control() {
Control::Heap(ptr) => {
let len_ptr = ptr as *mut usize;
unsafe {
len = *len_ptr;
if len == 0 { return None; }
*len_ptr -= 1;
let header_bytes = cmp::max(mem::size_of::<usize>() * 2, mem::align_of::<T>());
array = ptr.add(header_bytes) as *mut T;
}
}
Control::Stack(stack_len) => {
if stack_len == 0 { return None; }
len = stack_len;
array = self.stack_ptr();
self.set_stack_len(stack_len - 1);
}
}
}
unsafe {
let ret;
{
let ptr = array.offset((len - 1) as isize);
ret = ptr::read(ptr);
}
Some(ret)
}
}
#[inline]
pub fn append(&mut self, other: &mut Self) {
if mem::size_of::<T>() == 0 {
unsafe {
let len = self.len() + other.len();
self.set_len(len);
other.u = NonZeroU64::new_unchecked(ZST_MASK);
return;
}
}
unsafe {
self.append_elements(other.as_slice() as _);
match other.control() {
Control::Heap(ptr) => {
let len_ptr = ptr as *mut usize;
*len_ptr = 0;
}
Control::Stack(_stack_len) => {
other.u = NonZeroU64::new_unchecked(8);
}
}
}
}
#[inline]
unsafe fn append_elements(&mut self, other: *const [T]) {
let count = (*other).len();
if count > 0 {
self.reserve(count);
let len = self.len();
ptr::copy_nonoverlapping(other as *const T, self.get_unchecked_mut(len), count);
self.set_len(len + count);
}
}
unsafe fn set_len(&mut self, len: usize) {
if mem::size_of::<T>() == 0 {
self.u = NonZeroU64::new_unchecked(len as u64 | ZST_MASK);
return;
}
match self.control() {
Control::Heap(ptr) => {
let len_ptr = ptr as *mut usize;
*len_ptr = len;
}
Control::Stack(_stack_len) => {
self.set_stack_len(len);
}
}
}
#[inline]
pub fn split_off(&mut self, at: usize) -> Self {
let len = self.len();
assert!(at <= len, "`at` out of bounds");
let other_len = len - at;
let mut other = V64::with_capacity(other_len);
unsafe {
self.set_len(at);
other.set_len(other_len);
ptr::copy_nonoverlapping(self.as_ptr().offset(at as isize),
other.as_mut_ptr(),
other.len());
}
other
}
#[inline]
pub fn clear(&mut self) {
self.truncate(0)
}
pub fn into_boxed_slice(mut self) -> Box<[T]> {
if mem::size_of::<T>() == 0 {
unsafe {
let slice = slice::from_raw_parts_mut(NonNull::dangling().as_ptr(), self.len());
let output: Box<[T]> = Box::from_raw(slice);
return output;
}
}
unsafe {
let array: *mut T;
let len: usize;
match self.control() {
Control::Heap(ptr) => {
let len_ptr = ptr as *mut usize;
let header_bytes = cmp::max(mem::size_of::<usize>() * 2, mem::align_of::<T>());
array = ptr.add(header_bytes) as *mut T;
len = *len_ptr;
*len_ptr = 0; }
Control::Stack(stack_len) => {
array = self.stack_ptr();
len = stack_len;
self.set_stack_len(0); }
}
let layout = Layout::from_size_align(mem::size_of::<T>() * len, mem::align_of::<T>()).unwrap();
let buffer = alloc::alloc(layout) as *mut T;
ptr::copy_nonoverlapping(array, buffer, len);
let slice = slice::from_raw_parts_mut(buffer, len);
let output: Box<[T]> = Box::from_raw(slice);
output
}
}
pub fn drain<R>(&mut self, range: R) -> Drain<T>
where R: RangeBounds<usize>
{
let len = self.len();
let start = match range.start_bound() {
Included(&n) => n,
Excluded(&n) => n + 1,
Unbounded => 0,
};
let end = match range.end_bound() {
Included(&n) => n + 1,
Excluded(&n) => n,
Unbounded => len,
};
assert!(start <= end);
assert!(end <= len);
unsafe {
self.set_len(start);
let range_slice = slice::from_raw_parts_mut(self.as_mut_ptr().offset(start as isize),
end - start);
Drain {
tail_start: end,
tail_len: len - end,
iter: range_slice.iter(),
vec: NonNull::from(self),
}
}
}
#[inline]
pub fn splice<R, I>(&mut self, range: R, replace_with: I) -> Splice<I::IntoIter>
where R: RangeBounds<usize>, I: IntoIterator<Item=T>
{
Splice {
drain: self.drain(range),
replace_with: replace_with.into_iter(),
}
}
pub fn drain_filter<F>(&mut self, filter: F) -> DrainFilter<T, F>
where F: FnMut(&mut T) -> bool,
{
let old_len = self.len();
unsafe { self.set_len(0); }
DrainFilter {
vec: self,
idx: 0,
del: 0,
old_len,
pred: filter,
}
}
fn extend_desugared<I: Iterator<Item=T>>(&mut self, mut iterator: I) {
if mem::size_of::<T>() == 0 {
let mut count = 0;
while let Some(_element) = iterator.next() { count += 1; }
unsafe {
let len = self.len();
self.set_len(len + count);
}
return;
}
while let Some(element) = iterator.next() {
let len = self.len();
if len == self.capacity() {
let (lower, _) = iterator.size_hint();
self.reserve(lower.saturating_add(1));
}
unsafe {
ptr::write(self.get_unchecked_mut(len), element);
self.set_len(len + 1);
}
}
}
#[inline(always)]
fn stack_ptr(&mut self) -> *mut T {
unsafe {
((&mut self.u) as *mut _ as *mut u8).add(mem::align_of::<T>()) as *mut T
}
}
#[inline(always)]
fn inc_stack_len(&mut self, old: usize) {
self.set_stack_len(old + 1);
}
#[inline(always)]
fn set_stack_len(&mut self, len: usize) {
assert!(len < 8);
let new_len = len as u64;
unsafe {
self.u = NonZeroU64::new_unchecked((self.u.get() & 0xFFFF_FFFF_FFFF_FFF8) | new_len);
}
}
#[cold]
fn allocate_array(capacity: usize) -> *mut u8 {
unsafe {
let header_bytes = cmp::max(mem::size_of::<usize>() * 2, mem::align_of::<T>());
let align = cmp::max(16, mem::align_of::<T>());
let layout = Layout::from_size_align(mem::size_of::<T>() * capacity + header_bytes, align).unwrap();
let buffer = alloc::alloc(layout);
assert!(buffer as usize & 15 == 0); ptr::write(buffer as *mut usize, 0); ptr::write((buffer as *mut usize).add(1), capacity);
buffer
}
}
#[inline]
fn possibly_grow_heap(&mut self, arr: *mut u8) {
unsafe {
let len_ptr = arr as *mut usize;
let cap_ptr = len_ptr.add(1);
if *len_ptr == *cap_ptr {
self.realloc_heap(len_ptr, *len_ptr * 2);
}
}
}
#[inline(always)]
fn realloc_heap(&mut self, len_ptr: *mut usize, new_capacity: usize) {
unsafe {
let head_ptr = <V64<T>>::allocate_array(new_capacity);
let header_bytes = cmp::max(mem::size_of::<usize>() * 2, mem::align_of::<T>());
let to_move = cmp::min(new_capacity, *(len_ptr.add(1)));
ptr::copy_nonoverlapping(len_ptr as *mut u8, head_ptr, to_move * mem::size_of::<T>() + header_bytes);
let new_cap_ptr = (head_ptr as *mut usize).add(1);
*new_cap_ptr = new_capacity;
self.u = NonZeroU64::new_unchecked(head_ptr as u64);
let old = len_ptr as *mut u8;
let align = cmp::max(16, mem::align_of::<T>());
let layout = Layout::from_size_align(mem::size_of::<T>() * (*(len_ptr.add(1))) + header_bytes, align).unwrap();
alloc::dealloc(old, layout);
}
}
fn move_to_heap(&mut self, len: usize, heap_capacity_min: usize) {
unsafe {
let stack_capacity = 7 / mem::size_of::<T>();
let heap_capacity = cmp::max(stack_capacity * 2, heap_capacity_min);
let header_bytes = cmp::max(mem::size_of::<usize>() * 2, mem::align_of::<T>());
let len_ptr = <V64<T>>::allocate_array(heap_capacity) as *mut usize;
let arr = (len_ptr as *mut u8).add(header_bytes) as *mut T;
ptr::copy_nonoverlapping(self.stack_ptr(), arr, stack_capacity);
*len_ptr += len;
self.u = NonZeroU64::new_unchecked(len_ptr as u64);
}
}
#[inline]
fn heap_push(&mut self, val: T) {
unsafe {
let len_ptr = self.u.get() as usize as *mut usize;
let header_bytes = cmp::max(mem::size_of::<usize>() * 2, mem::align_of::<T>());
let arr = (len_ptr as *mut u8).add(header_bytes) as *mut T;
ptr::write(arr.add(*len_ptr), val);
*len_ptr += 1;
}
}
#[inline]
fn stack_push(&mut self, val: T, len: usize) {
unsafe {
let start = self.stack_ptr().add(len);
ptr::write_unaligned(start, val);
self.inc_stack_len(len);
}
}
}
impl<T: Clone> V64<T> {
pub fn from_elem(elem: T, count: usize) -> V64<T> {
let mut v64 = V64::with_capacity(count);
for _i in 0..count as isize {
v64.push(elem.clone());
}
v64
}
pub fn extend_from_slice(&mut self, slice: &[T]) {
self.reserve(slice.len());
unsafe {
let mut len = self.len();
let mut dst = self.get_unchecked_mut(len) as *mut T;
for t in slice.iter() {
ptr::write(dst, t.clone());
dst = dst.add(1);
len += 1;
self.set_len(len); }
}
}
}
impl<T> Drop for V64<T> {
fn drop(&mut self) {
unsafe {
if mem::size_of::<T>() == 0 {
self.u = NonZeroU64::new_unchecked(ZST_MASK);
return;
}
match self.control() {
Control::Heap(ptr) => {
let len_ptr = ptr as *mut usize;
let header_bytes = cmp::max(mem::size_of::<usize>() * 2, mem::align_of::<T>());
if mem::needs_drop::<T>() {
let mut cur = (len_ptr as *mut u8).add(header_bytes) as *mut T;
let end = cur.add(*len_ptr);
while cur < end {
ptr::drop_in_place(cur);
cur = cur.add(1);
}
}
let align = cmp::max(16, mem::align_of::<T>());
let layout = Layout::from_size_align(mem::size_of::<T>() * (*(len_ptr.add(1))) + header_bytes, align).unwrap();
alloc::dealloc(ptr, layout);
}
Control::Stack(len) => {
if mem::needs_drop::<T>() {
let mut cur = self.stack_ptr();
let end = cur.add(len);
while cur < end {
ptr::drop_in_place(cur);
cur = cur.add(1);
}
}
}
}
self.u = NonZeroU64::new_unchecked(8);
}
}
}
impl<T> V64<T> {
pub unsafe fn transmute<X>(self) -> V64<X> {
assert!(mem::size_of::<X>() == mem::size_of::<T>());
assert!(mem::align_of::<X>() == mem::align_of::<T>());
assert!(!mem::needs_drop::<X>());
assert!(!mem::needs_drop::<T>());
let v: V64<X> = V64 { u: self.u, _marker: marker::PhantomData };
mem::forget(self);
v
}
}
impl<T, I> Index<I> for V64<T>
where
I: SliceIndex<[T]>,
{
type Output = I::Output;
#[inline]
fn index(&self, index: I) -> &Self::Output {
Index::index(&**self, index)
}
}
impl<T, I> IndexMut<I> for V64<T>
where
I: SliceIndex<[T]>,
{
#[inline]
fn index_mut(&mut self, index: I) -> &mut Self::Output {
IndexMut::index_mut(&mut **self, index)
}
}
impl<T> Deref for V64<T> {
type Target = [T];
fn deref(&self) -> &[T] {
unsafe {
if mem::size_of::<T>() == 0 { return slice::from_raw_parts(NonNull::dangling().as_ptr(), self.len()); }
match self.control() {
Control::Heap(ptr) => {
let len_ptr = ptr as *mut usize;
let header_bytes = cmp::max(mem::size_of::<usize>() * 2, mem::align_of::<T>());
let arr = (len_ptr as *mut u8).add(header_bytes) as *mut T;
slice::from_raw_parts(arr, *len_ptr)
}
Control::Stack(len) => {
let arr = ((&self.u) as *const _ as *const u8).add(mem::align_of::<T>()) as *const T;
slice::from_raw_parts(arr, len)
}
}
}
}
}
impl<T> DerefMut for V64<T> {
fn deref_mut(&mut self) -> &mut [T] {
unsafe {
if mem::size_of::<T>() == 0 { return slice::from_raw_parts_mut(NonNull::dangling().as_ptr(), self.len()); }
match self.control() {
Control::Heap(ptr) => {
let len_ptr = ptr as *mut usize;
let header_bytes = cmp::max(mem::size_of::<usize>() * 2, mem::align_of::<T>());
let arr = (len_ptr as *mut u8).add(header_bytes) as *mut T;
slice::from_raw_parts_mut(arr, *len_ptr)
}
Control::Stack(len) => {
let arr = self.stack_ptr();
slice::from_raw_parts_mut(arr, len)
}
}
}
}
}
macro_rules! __impl_slice_eq1 {
($Lhs: ty, $Rhs: ty) => {
__impl_slice_eq1! { $Lhs, $Rhs, Sized }
};
($Lhs: ty, $Rhs: ty, $Bound: ident) => {
impl<'a, 'b, A: $Bound, B> PartialEq<$Rhs> for $Lhs where A: PartialEq<B> {
#[inline]
fn eq(&self, other: &$Rhs) -> bool { self[..] == other[..] }
#[inline]
fn ne(&self, other: &$Rhs) -> bool { self[..] != other[..] }
}
}
}
__impl_slice_eq1! { V64<A>, V64<B> }
__impl_slice_eq1! { V64<A>, &'b [B] }
__impl_slice_eq1! { V64<A>, &'b mut [B] }
macro_rules! array_impls {
($($N: expr)+) => {
$(
__impl_slice_eq1! { V64<A>, [B; $N] }
__impl_slice_eq1! { V64<A>, &'b [B; $N] }
)+
}
}
array_impls! {
0 1 2 3 4 5 6 7 8 9
10 11 12 13 14 15 16 17 18 19
20 21 22 23 24 25 26 27 28 29
30 31 32
}
impl<T: fmt::Debug> fmt::Debug for V64<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
pub struct IntoIter<T> {
buf: *mut u8,
_marker: marker::PhantomData<T>,
ptr: *const T,
end: *const T,
is_heap: bool,
}
impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("IntoIter")
.field(&self.as_slice())
.finish()
}
}
impl<T> IntoIterator for V64<T> {
type Item = T;
type IntoIter = IntoIter<T>;
#[inline]
fn into_iter(mut self) -> IntoIter<T> {
unsafe {
if mem::size_of::<T>() == 0 {
let ptr = self.as_mut_ptr();
let end = (ptr as *const u8).add(self.len()) as *const T;
mem::forget(self);
return IntoIter {
buf: ptr::null_mut(),
_marker: marker::PhantomData,
ptr,
end,
is_heap: false,
};
}
if self.u.get() & 8 == 8 {
let len = (self.u.get() & 7) as usize;
if len == 0 {
return IntoIter {
buf: ptr::null_mut(),
_marker: marker::PhantomData,
ptr: mem::align_of::<T>() as *mut T,
end: mem::align_of::<T>() as *mut T,
is_heap: false,
};
}
self.move_to_heap(len, 0);
}
let len_ptr = self.u.get() as usize as *mut u8 as *mut usize;
let header_bytes = cmp::max(mem::size_of::<usize>() * 2, mem::align_of::<T>());
let begin = (len_ptr as *mut u8).add(header_bytes) as *mut T;
let end = begin.offset(*len_ptr as isize) as *const T;
let buf = len_ptr as *mut u8;
mem::forget(self);
IntoIter {
buf,
_marker: marker::PhantomData,
ptr: begin,
end,
is_heap: true,
}
}
}
}
impl<'a, T> IntoIterator for &'a V64<T> {
type Item = &'a T;
type IntoIter = slice::Iter<'a, T>;
fn into_iter(self) -> slice::Iter<'a, T> {
self.iter()
}
}
impl<'a, T> IntoIterator for &'a mut V64<T> {
type Item = &'a mut T;
type IntoIter = slice::IterMut<'a, T>;
fn into_iter(self) -> slice::IterMut<'a, T> {
self.iter_mut()
}
}
impl<T> IntoIter<T> {
pub fn as_slice(&self) -> &[T] {
unsafe {
slice::from_raw_parts(self.ptr, self.len())
}
}
pub fn as_mut_slice(&mut self) -> &mut [T] {
unsafe {
slice::from_raw_parts_mut(self.ptr as *mut T, self.len())
}
}
}
unsafe impl<T: Send> Send for IntoIter<T> {}
unsafe impl<T: Sync> Sync for IntoIter<T> {}
impl<T> Iterator for IntoIter<T> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
unsafe {
if self.ptr as *const _ == self.end {
None
} else {
if mem::size_of::<T>() == 0 {
self.ptr = (self.ptr as *mut u8).add(1) as *mut T;
Some(ptr::read(NonNull::dangling().as_ptr()))
} else {
let old = self.ptr;
self.ptr = self.ptr.offset(1);
Some(ptr::read(old))
}
}
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let exact = if mem::size_of::<T>() == 0 {
(self.end as usize).wrapping_sub(self.ptr as usize)
} else {
(self.end as usize).wrapping_sub(self.ptr as usize) / mem::size_of::<T>()
};
(exact, Some(exact))
}
#[inline]
fn count(self) -> usize {
self.len()
}
}
impl<T> DoubleEndedIterator for IntoIter<T> {
#[inline]
fn next_back(&mut self) -> Option<T> {
unsafe {
if self.end == self.ptr {
None
} else {
if mem::size_of::<T>() == 0 {
self.end = (self.end as *const i8).sub(1) as *mut T;
Some(ptr::read(NonNull::dangling().as_ptr()))
} else {
self.end = self.end.offset(-1);
Some(ptr::read(self.end))
}
}
}
}
}
impl<T> ExactSizeIterator for IntoIter<T> {}
impl<T> FusedIterator for IntoIter<T> {}
impl<T: Clone> Clone for IntoIter<T> {
fn clone(&self) -> IntoIter<T> {
self.as_slice().to_v64().into_iter()
}
}
pub trait ToV64<T> {
fn to_v64(&self) -> V64<T>;
}
pub trait IntoV64<T> {
fn into_v64(self) -> V64<T>;
}
impl<T: Clone> ToV64<T> for [T] {
fn to_v64(&self) -> V64<T> {
let mut vector = V64::with_capacity(self.len());
vector.extend_desugared(self.iter().cloned());
vector
}
}
impl<T> IntoV64<T> for Box<[T]> {
fn into_v64(self) -> V64<T> {
unsafe {
let len = self.len();
let ptr = self.as_ptr();
let mut vec = V64::with_capacity(len);
ptr::copy(ptr, vec.as_mut_ptr(), len);
vec.set_len(len);
mem::forget(self); vec
}
}
}
impl<T> Drop for IntoIter<T> {
fn drop(&mut self) {
if mem::size_of::<T>() == 0 { return; }
for _x in self.by_ref() {}
unsafe {
if self.is_heap {
let len_ptr = self.buf as *mut usize;
let header_bytes = cmp::max(mem::size_of::<usize>() * 2, mem::align_of::<T>());
let align = cmp::max(16, mem::align_of::<T>());
let layout = Layout::from_size_align(mem::size_of::<T>() * (*(len_ptr.add(1))) + header_bytes, align).unwrap();
alloc::dealloc(self.buf, layout);
self.is_heap = false;
}
}
}
}
impl<T> Extend<T> for V64<T> {
#[inline]
fn extend<I: IntoIterator<Item=T>>(&mut self, iter: I) {
self.extend_desugared(iter.into_iter())
}
}
impl<'a, T: 'a + Copy> Extend<&'a T> for V64<T> {
#[inline]
fn extend<I: IntoIterator<Item=&'a T>>(&mut self, iter: I) {
self.extend_desugared(iter.into_iter().cloned())
}
}
impl<T> Default for V64<T> {
fn default() -> V64<T> {
V64::new()
}
}
impl<'a, T: Clone> From<&'a [T]> for V64<T> {
fn from(s: &'a [T]) -> V64<T> {
s.to_v64()
}
}
impl<'a, T: Clone> From<&'a mut [T]> for V64<T> {
fn from(s: &'a mut [T]) -> V64<T> {
s.to_v64()
}
}
impl<T> From<Box<[T]>> for V64<T> {
fn from(s: Box<[T]>) -> V64<T> {
s.into_v64()
}
}
impl<'a, T> From<Cow<'a, [T]>> for V64<T> where [T]: ToOwned<Owned=V64<T>> {
fn from(s: Cow<'a, [T]>) -> V64<T> {
s.into_owned()
}
}
impl<'a> From<&'a str> for V64<u8> {
fn from(s: &'a str) -> V64<u8> {
From::from(s.as_bytes())
}
}
impl<T> FromIterator<T> for V64<T> {
#[inline]
fn from_iter<I: IntoIterator<Item=T>>(iter: I) -> V64<T> {
let into_iter = iter.into_iter();
let (lower, _) = into_iter.size_hint();
let mut v64 = V64::with_capacity(lower);
v64.extend_desugared(into_iter);
v64
}
}
pub struct Drain<'a, T: 'a> {
tail_start: usize,
tail_len: usize,
iter: slice::Iter<'a, T>,
vec: NonNull<V64<T>>,
}
impl<'a, T: 'a + fmt::Debug> fmt::Debug for Drain<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("Drain")
.field(&self.iter.as_slice())
.finish()
}
}
unsafe impl<'a, T: Sync> Sync for Drain<'a, T> {}
unsafe impl<'a, T: Send> Send for Drain<'a, T> {}
impl<'a, T> Iterator for Drain<'a, T> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
self.iter.next().map(|elt| unsafe { ptr::read(elt as *const _) })
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<'a, T> DoubleEndedIterator for Drain<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<T> {
self.iter.next_back().map(|elt| unsafe { ptr::read(elt as *const _) })
}
}
impl<'a, T> Drop for Drain<'a, T> {
fn drop(&mut self) {
self.for_each(drop);
if self.tail_len > 0 {
unsafe {
let source_vec = self.vec.as_mut();
let start = source_vec.len();
let tail = self.tail_start;
if tail != start {
let src = source_vec.as_ptr().offset(tail as isize);
let dst = source_vec.as_mut_ptr().offset(start as isize);
ptr::copy(src, dst, self.tail_len);
}
source_vec.set_len(start + self.tail_len);
}
}
}
}
impl<'a, T> ExactSizeIterator for Drain<'a, T> {}
impl<'a, T> FusedIterator for Drain<'a, T> {}
#[derive(Debug)]
pub struct Splice<'a, I: Iterator + 'a> {
drain: Drain<'a, I::Item>,
replace_with: I,
}
impl<'a, I: Iterator> Iterator for Splice<'a, I> {
type Item = I::Item;
fn next(&mut self) -> Option<Self::Item> {
self.drain.next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.drain.size_hint()
}
}
impl<'a, I: Iterator> DoubleEndedIterator for Splice<'a, I> {
fn next_back(&mut self) -> Option<Self::Item> {
self.drain.next_back()
}
}
impl<'a, I: Iterator> ExactSizeIterator for Splice<'a, I> {}
impl<'a, I: Iterator> Drop for Splice<'a, I> {
fn drop(&mut self) {
self.drain.by_ref().for_each(drop);
unsafe {
if self.drain.tail_len == 0 {
self.drain.vec.as_mut().extend(self.replace_with.by_ref());
return;
}
if !self.drain.fill(&mut self.replace_with) {
return;
}
let (lower_bound, _upper_bound) = self.replace_with.size_hint();
if lower_bound > 0 {
self.drain.move_tail(lower_bound);
if !self.drain.fill(&mut self.replace_with) {
return;
}
}
let mut collected = self.replace_with.by_ref().collect::<V64<I::Item>>().into_iter();
if collected.len() > 0 {
self.drain.move_tail(collected.len());
let filled = self.drain.fill(&mut collected);
debug_assert!(filled);
debug_assert_eq!(collected.len(), 0);
}
}
}
}
impl<'a, T> Drain<'a, T> {
unsafe fn fill<I: Iterator<Item=T>>(&mut self, replace_with: &mut I) -> bool {
let vec = self.vec.as_mut();
let range_start = vec.len();
let range_end = self.tail_start;
let range_slice = slice::from_raw_parts_mut(
vec.as_mut_ptr().offset(range_start as isize),
range_end - range_start);
let mut count = range_start;
for place in range_slice {
if let Some(new_item) = replace_with.next() {
ptr::write(place, new_item);
count += 1;
vec.set_len(count);
} else {
return false;
}
}
true
}
unsafe fn move_tail(&mut self, extra_capacity: usize) {
let vec = self.vec.as_mut();
vec.reserve(extra_capacity);
let new_tail_start = self.tail_start + extra_capacity;
let src = vec.as_ptr().offset(self.tail_start as isize);
let dst = vec.as_mut_ptr().offset(new_tail_start as isize);
ptr::copy(src, dst, self.tail_len);
self.tail_start = new_tail_start;
}
}
pub trait CloneIntoV64<T> {
fn clone_into_v64(&self, target: &mut V64<T>);
}
impl<T: Clone> CloneIntoV64<T> for [T] {
fn clone_into_v64(&self, target: &mut V64<T>) {
target.truncate(self.len());
let len = target.len();
target.clone_from_slice(&self[..len]);
target.extend_from_slice(&self[len..]);
}
}
impl<T: Clone> Clone for V64<T> {
fn clone(&self) -> V64<T> {
<[T]>::to_v64(&**self)
}
fn clone_from(&mut self, other: &V64<T>) {
other.as_slice().clone_into_v64(self);
}
}
#[derive(Debug)]
pub struct DrainFilter<'a, T: 'a, F>
where F: FnMut(&mut T) -> bool,
{
vec: &'a mut V64<T>,
idx: usize,
del: usize,
old_len: usize,
pred: F,
}
impl<'a, T, F> Iterator for DrainFilter<'a, T, F>
where F: FnMut(&mut T) -> bool,
{
type Item = T;
fn next(&mut self) -> Option<T> {
unsafe {
while self.idx != self.old_len {
let i = self.idx;
self.idx += 1;
let v = slice::from_raw_parts_mut(self.vec.as_mut_ptr(), self.old_len);
if (self.pred)(&mut v[i]) {
self.del += 1;
return Some(ptr::read(&v[i]));
} else if self.del > 0 {
let del = self.del;
let src: *const T = &v[i];
let dst: *mut T = &mut v[i - del];
ptr::copy_nonoverlapping(src, dst, 1);
}
}
None
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
(0, Some(self.old_len - self.idx))
}
}
impl<'a, T, F> Drop for DrainFilter<'a, T, F>
where F: FnMut(&mut T) -> bool,
{
fn drop(&mut self) {
self.for_each(drop);
unsafe {
self.vec.set_len(self.old_len - self.del);
}
}
}
impl<T: PartialEq> V64<T> {
#[inline]
pub fn dedup(&mut self) {
self.dedup_by(|a, b| a == b)
}
pub fn remove_item(&mut self, item: &T) -> Option<T> {
let pos = self.iter().position(|x| *x == *item)?;
Some(self.remove(pos))
}
}
impl<T> Borrow<[T]> for V64<T> {
fn borrow(&self) -> &[T] {
&self[..]
}
}
impl<T> BorrowMut<[T]> for V64<T> {
fn borrow_mut(&mut self) -> &mut [T] {
&mut self[..]
}
}