use core::{
alloc::Layout,
cmp,
marker::{PhantomData, Unsize},
mem,
ptr::{self, NonNull},
slice::{Iter, IterMut},
};
use emplacable::{Emplacable, EmplacableFn, Emplacer};
use crate::unwrap_try_reserve_result;
use super::{AlignedVecImpl, AlignedVecProvider, TryReserveError, UnsizedVecProvider};
impl<T> UnsizedVecProvider<T> for ::alloc::vec::Vec<T> {
type Align = ();
type Size = ();
type Iter<'a> = Iter<'a, T> where T: 'a;
type IterMut<'a> = IterMut<'a, T> where T: 'a;
const NEW_ALIGN_1: Self = ::alloc::vec::Vec::new();
const NEW_ALIGN_PTR: Self = Self::NEW_ALIGN_1;
#[inline]
fn capacity(&self) -> usize {
self.capacity()
}
#[inline]
fn byte_capacity(&self) -> usize {
unsafe { self.capacity().unchecked_mul(mem::size_of::<T>()) }
}
#[inline]
fn align(&self) -> usize {
mem::align_of::<T>()
}
#[inline]
fn try_reserve_exact_capacity_bytes_align(
&mut self,
additional: usize,
additional_bytes: usize,
_align: (),
) -> Result<(), TryReserveError> {
let additional = cmp::max(additional, additional_bytes.div_ceil(mem::size_of::<T>()));
Ok(self.try_reserve_exact(additional)?)
}
#[inline]
fn shrink_capacity_bytes_align_to(
&mut self,
min_capacity: usize,
min_byte_capacity: usize,
_align: (),
) {
let min_capacity = cmp::max(
min_capacity,
min_byte_capacity.div_ceil(mem::size_of::<T>()),
);
self.shrink_to(min_capacity);
}
#[inline]
unsafe fn insert_unchecked(&mut self, index: usize, element: T, _size: ()) {
debug_assert!(self.capacity() > self.len());
unsafe {
let how_much_to_move = self.len().unchecked_sub(index);
let start_ptr = self.as_mut_ptr().add(index);
ptr::copy(start_ptr, start_ptr.add(1), how_much_to_move);
start_ptr.write(element);
self.set_len(self.len().unchecked_add(1));
}
}
unsafe fn insert_with_unchecked(
&mut self,
index: usize,
value: Emplacable<T, impl EmplacableFn<T>>,
) {
struct ElementShifterBacker<T> {
ptr_to_index: *mut T,
num_elems_to_shift: usize,
}
impl<T> Drop for ElementShifterBacker<T> {
#[inline]
fn drop(&mut self) {
unsafe {
ptr::copy(
self.ptr_to_index.add(1),
self.ptr_to_index,
self.num_elems_to_shift,
);
}
}
}
debug_assert!(index <= self.len());
let emplacable_closure = value.into_fn();
let emplacer_closure =
&mut move |_, (), inner_closure: &mut dyn FnMut(*mut PhantomData<T>)| {
let reserve_result = self.try_reserve_exact_capacity_bytes_align(1, 0, ());
unwrap_try_reserve_result(reserve_result);
let ptr_to_elem = unsafe { self.as_mut_ptr().add(index) };
let num_elems_to_shift = unsafe { self.len().unchecked_sub(index) };
let shifter_backer: ElementShifterBacker<T> = ElementShifterBacker {
ptr_to_index: ptr_to_elem,
num_elems_to_shift,
};
unsafe { ptr::copy(ptr_to_elem, ptr_to_elem.add(1), num_elems_to_shift) }
inner_closure(ptr_to_elem.cast());
mem::forget(shifter_backer);
unsafe { self.set_len(self.len().unchecked_add(1)) };
};
let emplacer = unsafe { Emplacer::from_fn(emplacer_closure) };
emplacable_closure(emplacer);
}
unsafe fn remove_into_unchecked(&mut self, index: usize, emplacer: &mut Emplacer<T>) {
debug_assert!(index < self.len());
let new_len = unsafe { self.len().unchecked_sub(1) };
unsafe { self.set_len(new_len) };
let thin_ptr_to_elem = unsafe { self.as_ptr().add(index) };
let how_much_to_move = unsafe { new_len.unchecked_sub(index) };
let emplacer_closure = unsafe { emplacer.into_fn() };
emplacer_closure(Layout::new::<T>(), (), &mut |out_ptr| {
if !out_ptr.is_null() {
unsafe {
ptr::copy_nonoverlapping(thin_ptr_to_elem, out_ptr.cast(), 1);
}
} else {
let typed_ptr_to_elem: *mut T = thin_ptr_to_elem.cast_mut().cast();
unsafe { typed_ptr_to_elem.drop_in_place() }
}
});
unsafe {
ptr::copy(
thin_ptr_to_elem.add(1),
thin_ptr_to_elem.cast_mut(),
how_much_to_move,
);
}
}
#[inline]
unsafe fn push_unchecked(&mut self, value: T, _size: ()) {
debug_assert!(self.capacity() - self.len() > 0);
let old_len = self.len();
unsafe {
self.as_mut_ptr().add(old_len).write(value);
self.set_len(old_len.unchecked_add(1));
}
}
fn push_with(&mut self, value: Emplacable<T, impl EmplacableFn<T>>) {
let emplacable_closure = value.into_fn();
let emplacer_closure =
&mut move |_, (), inner_closure: &mut dyn FnMut(*mut PhantomData<T>)| {
let reserve_result = self.try_reserve_exact_capacity_bytes_align(1, 0, ());
unwrap_try_reserve_result(reserve_result);
let ptr_to_elem = unsafe { self.as_mut_ptr().add(self.len()) };
inner_closure(ptr_to_elem.cast());
unsafe { self.set_len(self.len().unchecked_add(1)) };
};
let emplacer = unsafe { Emplacer::from_fn(emplacer_closure) };
emplacable_closure(emplacer);
}
unsafe fn pop_into_unchecked(&mut self, emplacer: &mut Emplacer<T>) {
debug_assert!(!self.is_empty());
let new_len = unsafe { self.len().unchecked_sub(1) };
unsafe { self.set_len(new_len) };
let ptr_to_elem = unsafe { self.as_ptr().add(new_len) };
let emplace_closure = unsafe { emplacer.into_fn() };
emplace_closure(Layout::new::<T>(), (), &mut |out_ptr| {
if !out_ptr.is_null() {
unsafe {
ptr::copy_nonoverlapping(ptr_to_elem, out_ptr.cast(), 1);
}
} else {
let typed_ptr_to_elem: *mut T = ptr_to_elem.cast_mut().cast();
unsafe { typed_ptr_to_elem.drop_in_place() }
}
});
}
#[inline]
fn len(&self) -> usize {
self.len()
}
#[inline]
unsafe fn get_unchecked_raw(&self, index: usize) -> NonNull<T> {
debug_assert!(index < self.len());
unsafe { NonNull::new_unchecked(self.as_ptr().add(index).cast_mut()).cast() }
}
#[inline]
fn iter(&self) -> Self::Iter<'_> {
let slice: &[T] = self;
slice.iter()
}
#[inline]
fn iter_mut(&mut self) -> Self::IterMut<'_> {
let slice: &mut [T] = self;
slice.iter_mut()
}
fn from_sized<S>(_: ::alloc::vec::Vec<S>) -> Self
where
S: Unsize<T>,
{
unreachable!("can't unsize to a sized type, that would make 0 sense")
}
}
impl<T> AlignedVecProvider<T> for ::alloc::vec::Vec<T> {}
impl<T> AlignedVecImpl for T {
type Impl = ::alloc::vec::Vec<T>;
}