use crate::memory::{Memory, MemoryLayoutError};
use core::{
cmp::Ordering,
hash::Hash,
marker::PhantomData,
mem::MaybeUninit,
ops::{Deref, DerefMut, Index, IndexMut},
ptr,
slice::{self, SliceIndex},
};
pub struct MemVec<'a, T: Copy, A: 'a + Memory> {
mem: A,
_marker: PhantomData<&'a T>,
}
impl<'a, T: Copy, A: 'a + Memory> MemVec<'a, T, A> {
pub unsafe fn try_from_memory(mem: A) -> Result<Self, (A, MemoryLayoutError)> {
let (prefix, _, _suffix) = mem.deref().align_to::<T>();
if !prefix.is_empty() {
return Err((mem, MemoryLayoutError::MisalignedMemory));
}
let vec = Self {
mem,
_marker: PhantomData,
};
if vec.len() > vec.capacity() {
let mem = vec.into_mem();
return Err((mem, MemoryLayoutError::CapacityExceeded));
}
Ok(vec)
}
pub fn into_mem(self) -> A {
self.mem
}
pub fn as_mem(&self) -> &A {
&self.mem
}
pub fn as_mem_mut(&mut self) -> &mut A {
&mut self.mem
}
}
impl<'a, T: Copy, A: 'a + Memory> MemVec<'a, T, A> {
fn as_buf(&self) -> &[T] {
unsafe {
let (prefix, slice, _suffix) = self.mem.deref().align_to::<T>();
debug_assert_eq!(prefix.len(), 0);
slice
}
}
fn as_buf_mut(&mut self) -> &mut [T] {
unsafe {
let (prefix, slice, _suffix) = self.mem.deref_mut().align_to_mut::<T>();
debug_assert_eq!(prefix.len(), 0);
slice
}
}
#[inline]
pub fn capacity(&self) -> usize {
self.as_buf().len()
}
#[inline]
pub fn reserve(&mut self, additional: usize) {
self.try_reserve(additional).expect("reserve failed");
}
pub fn try_reserve(&mut self, additional: usize) -> Result<(), A::Error> {
let len = self.len();
if self.needs_to_grow(len, additional) {
self.grow_amortized(len, additional)
} else {
Ok(())
}
}
pub fn reserve_exact(&mut self, additional: usize) {
self.try_reserve_exact(additional).expect("reserve failed");
}
pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), A::Error> {
let len = self.len();
if self.needs_to_grow(len, additional) {
self.grow_exact(len, additional)
} else {
Ok(())
}
}
pub fn shrink_to_fit(&mut self) {
let len = self.mem.len();
if self.capacity() > len {
self.mem
.shrink_to(len * core::mem::size_of::<T>())
.expect("shrink_to failed");
}
}
pub fn shrink_to(&mut self, min_capacity: usize) {
if self.capacity() > min_capacity {
let new_cap = core::cmp::max(self.len(), min_capacity);
self.mem
.shrink_to(new_cap * core::mem::size_of::<T>())
.expect("shrink_to failed");
}
}
pub fn truncate(&mut self, len: usize) {
if len > self.len() {
return;
}
unsafe {
let remaining_len = self.mem.len() - len;
let s = ptr::slice_from_raw_parts_mut(self.as_mut_ptr().add(len), remaining_len);
*self.mem.len_mut() = len;
ptr::drop_in_place(s);
}
}
pub fn as_slice(&self) -> &[T] {
let len = self.mem.len();
unsafe { self.as_buf().get_unchecked(..len) }
}
pub fn as_mut_slice(&mut self) -> &mut [T] {
let len = self.mem.len();
unsafe { self.as_buf_mut().get_unchecked_mut(..len) }
}
#[inline]
pub fn as_ptr(&self) -> *const T {
self.mem.as_ptr() as *const _
}
#[inline]
pub fn as_mut_ptr(&mut self) -> *mut T {
self.mem.as_mut_ptr() as *mut _
}
pub unsafe fn set_len(&mut self, len: usize) {
#[cold]
#[inline(never)]
fn assert_failed(len: usize, cap: usize) -> ! {
panic!("`set_len` len (is {len}) should be <= cap (is {cap})");
}
let cap = self.capacity();
if len > cap {
assert_failed(len, cap);
}
*self.mem.len_mut() = len;
}
#[inline]
pub fn swap_remove(&mut self, index: usize) -> T {
#[cold]
#[inline(never)]
fn assert_failed(index: usize, len: usize) -> ! {
panic!("swap_remove index (is {index}) should be < len (is {len})");
}
let len = self.len();
if index >= len {
assert_failed(index, len);
}
unsafe {
let value = ptr::read(self.as_ptr().add(index));
let base_ptr = self.as_mut_ptr();
ptr::copy(base_ptr.add(len - 1), base_ptr.add(index), 1);
self.set_len(len - 1);
value
}
}
pub fn insert(&mut self, index: usize, element: T) {
#[cold]
#[inline(never)]
fn assert_failed(index: usize, len: usize) -> ! {
panic!("insertion index (is {index}) should be <= len (is {len})");
}
let len = self.len();
if index > len {
assert_failed(index, len);
}
if len == self.capacity() {
self.reserve(1);
}
unsafe {
{
let p = self.as_mut_ptr().add(index);
ptr::copy(p, p.offset(1), len - index);
ptr::write(p, element);
}
self.set_len(len + 1);
}
}
#[track_caller]
pub fn remove(&mut self, index: usize) -> T {
#[cold]
#[inline(never)]
#[track_caller]
fn assert_failed(index: usize, len: usize) -> ! {
panic!("removal index (is {index}) should be < len (is {len})");
}
let len = self.len();
if index >= len {
assert_failed(index, len);
}
unsafe {
let ret;
{
let ptr = self.as_mut_ptr().add(index);
ret = ptr::read(ptr);
ptr::copy(ptr.offset(1), ptr, len - index - 1);
}
self.set_len(len - 1);
ret
}
}
pub fn retain<F>(&mut self, mut f: F)
where
F: FnMut(&T) -> bool,
{
self.retain_mut(|elem| f(elem));
}
pub fn retain_mut<F>(&mut self, mut f: F)
where
F: FnMut(&mut T) -> bool,
{
let original_len = self.len();
unsafe { self.set_len(0) };
struct BackshiftOnDrop<'a, 'v, T: Copy, A: Memory> {
v: &'a mut MemVec<'v, T, A>,
processed_len: usize,
deleted_cnt: usize,
original_len: usize,
}
impl<T: Copy, A: Memory> Drop for BackshiftOnDrop<'_, '_, T, A> {
fn drop(&mut self) {
if self.deleted_cnt > 0 {
unsafe {
ptr::copy(
self.v.as_ptr().add(self.processed_len),
self.v
.as_mut_ptr()
.add(self.processed_len - self.deleted_cnt),
self.original_len - self.processed_len,
);
}
}
unsafe {
self.v.set_len(self.original_len - self.deleted_cnt);
}
}
}
let mut g = BackshiftOnDrop {
v: self,
processed_len: 0,
deleted_cnt: 0,
original_len,
};
fn process_loop<F, T: Copy, A: Memory, const DELETED: bool>(
original_len: usize,
f: &mut F,
g: &mut BackshiftOnDrop<'_, '_, T, A>,
) where
F: FnMut(&mut T) -> bool,
{
while g.processed_len != original_len {
let cur = unsafe { &mut *g.v.as_mut_ptr().add(g.processed_len) };
if !f(cur) {
g.processed_len += 1;
g.deleted_cnt += 1;
unsafe { ptr::drop_in_place(cur) };
if DELETED {
continue;
} else {
break;
}
}
if DELETED {
unsafe {
let hole_slot = g.v.as_mut_ptr().add(g.processed_len - g.deleted_cnt);
ptr::copy_nonoverlapping(cur, hole_slot, 1);
}
}
g.processed_len += 1;
}
}
process_loop::<F, T, A, false>(original_len, &mut f, &mut g);
process_loop::<F, T, A, true>(original_len, &mut f, &mut g);
drop(g);
}
#[inline]
pub fn dedup_by_key<F, K>(&mut self, mut key: F)
where
F: FnMut(&mut T) -> K,
K: PartialEq,
{
self.dedup_by(|a, b| key(a) == key(b))
}
pub fn dedup_by<F>(&mut self, mut same_bucket: F)
where
F: FnMut(&mut T, &mut T) -> bool,
{
let len = self.len();
if len <= 1 {
return;
}
struct FillGapOnDrop<'a, 'b, T: Copy, A: Memory> {
read: usize,
write: usize,
vec: &'a mut MemVec<'b, T, A>,
}
impl<'a, 'b, T: Copy, A: Memory> Drop for FillGapOnDrop<'a, 'b, T, A> {
fn drop(&mut self) {
unsafe {
let ptr = self.vec.as_mut_ptr();
let len = self.vec.len();
let items_left = len.wrapping_sub(self.read);
let dropped_ptr = ptr.add(self.write);
let valid_ptr = ptr.add(self.read);
ptr::copy(valid_ptr, dropped_ptr, items_left);
let dropped = self.read.wrapping_sub(self.write);
self.vec.set_len(len - dropped);
}
}
}
let mut gap = FillGapOnDrop {
read: 1,
write: 1,
vec: self,
};
let ptr = gap.vec.as_mut_ptr();
unsafe {
while gap.read < len {
let read_ptr = ptr.add(gap.read);
let prev_ptr = ptr.add(gap.write.wrapping_sub(1));
if same_bucket(&mut *read_ptr, &mut *prev_ptr) {
gap.read += 1;
ptr::drop_in_place(read_ptr);
} else {
let write_ptr = ptr.add(gap.write);
ptr::copy(read_ptr, write_ptr, 1);
gap.write += 1;
gap.read += 1;
}
}
gap.vec.set_len(gap.write);
core::mem::forget(gap);
}
}
#[inline]
pub fn push(&mut self, value: T) {
if self.len() == self.capacity() {
self.reserve_for_push(self.len()).unwrap();
}
unsafe {
let end = self.as_mut_ptr().add(self.len());
ptr::write(end, value);
*self.mem.len_mut() += 1;
}
}
#[inline]
pub fn pop(&mut self) -> Option<T> {
if self.mem.len() == 0 {
None
} else {
unsafe {
*self.mem.len_mut() -= 1;
Some(ptr::read(self.as_mut_ptr().add(self.len())))
}
}
}
#[inline]
pub fn clear(&mut self) {
self.truncate(0)
}
#[inline]
pub fn len(&self) -> usize {
self.mem.len()
}
#[inline]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
pub fn resize_with<F>(&mut self, new_len: usize, f: F)
where
F: FnMut() -> T,
{
let len = self.len();
if new_len > len {
self.extend_with(new_len - len, ExtendFunc(f));
} else {
self.truncate(new_len);
}
}
#[inline]
pub fn spare_capacity_mut(&mut self) -> &mut [MaybeUninit<T>] {
unsafe {
core::slice::from_raw_parts_mut(
self.as_mut_ptr().add(self.len()) as *mut MaybeUninit<T>,
self.capacity() - self.len(),
)
}
}
}
trait ExtendWith<T> {
fn next(&mut self) -> T;
fn last(self) -> T;
}
struct ExtendFunc<F>(F);
impl<T, F: FnMut() -> T> ExtendWith<T> for ExtendFunc<F> {
fn next(&mut self) -> T {
(self.0)()
}
fn last(mut self) -> T {
(self.0)()
}
}
fn capacity_overflow() -> usize {
panic!("capacity overflow");
}
impl<'a, T: Copy + std::cmp::PartialEq, A: 'a + Memory> MemVec<'a, T, A> {
#[inline]
pub fn dedup(&mut self) {
self.dedup_by(|a, b| a == b)
}
}
impl<'a, T: Copy, A: 'a + Memory> MemVec<'a, T, A> {
pub(crate) const MIN_NON_ZERO_CAP: usize = if core::mem::size_of::<T>() == 1 {
8
} else if core::mem::size_of::<T>() <= 1024 {
4
} else {
1
};
fn needs_to_grow(&self, len: usize, additional: usize) -> bool {
additional > self.capacity().wrapping_sub(len)
}
fn reserve_for_push(&mut self, len: usize) -> Result<(), A::Error> {
self.grow_amortized(len, 1)
}
fn grow_amortized(&mut self, len: usize, additional: usize) -> Result<(), A::Error> {
debug_assert!(additional > 0);
let required_cap = len
.checked_add(additional)
.unwrap_or_else(capacity_overflow);
let cap = core::cmp::max(self.capacity() * 2, required_cap);
let cap = core::cmp::max(Self::MIN_NON_ZERO_CAP, cap);
self.mem.reserve(cap * core::mem::size_of::<T>())
}
fn grow_exact(&mut self, len: usize, additional: usize) -> Result<(), A::Error> {
let cap = len
.checked_add(additional)
.unwrap_or_else(capacity_overflow);
self.mem.reserve(cap * core::mem::size_of::<T>())
}
fn extend_with<E: ExtendWith<T>>(&mut self, n: usize, mut value: E) {
self.reserve(n);
unsafe {
let mut ptr = self.as_mut_ptr().add(self.len());
for _ in 1..n {
ptr::write(ptr, value.next());
ptr = ptr.offset(1);
*self.mem.len_mut() += 1;
}
if n > 0 {
core::ptr::write(ptr, value.last());
*self.mem.len_mut() += 1;
}
}
}
}
impl<'a, T: Copy, A: 'a + Memory> Deref for MemVec<'a, T, A> {
type Target = [T];
fn deref(&self) -> &Self::Target {
self.as_slice()
}
}
impl<'a, T: Copy, A: 'a + Memory> DerefMut for MemVec<'a, T, A> {
fn deref_mut(&mut self) -> &mut Self::Target {
self.as_mut_slice()
}
}
impl<'a, T: Copy + Hash, A: Memory> Hash for MemVec<'a, T, A> {
#[inline]
fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
Hash::hash(&**self, state)
}
}
impl<'a, T: Copy, I: SliceIndex<[T]>, A: Memory> Index<I> for MemVec<'a, T, A> {
type Output = I::Output;
#[inline]
fn index(&self, index: I) -> &Self::Output {
Index::index(&**self, index)
}
}
impl<'a, T: Copy, I: SliceIndex<[T]>, A: Memory> IndexMut<I> for MemVec<'a, T, A> {
#[inline]
fn index_mut(&mut self, index: I) -> &mut Self::Output {
IndexMut::index_mut(&mut **self, index)
}
}
impl<'a, 'm, T: Copy, A: Memory> IntoIterator for &'a MemVec<'m, T, A> {
type Item = &'a T;
type IntoIter = slice::Iter<'a, T>;
fn into_iter(self) -> slice::Iter<'a, T> {
self.iter()
}
}
impl<'a, 'm, T: Copy, A: Memory> IntoIterator for &'a mut MemVec<'m, T, A> {
type Item = &'a mut T;
type IntoIter = slice::IterMut<'a, T>;
fn into_iter(self) -> slice::IterMut<'a, T> {
self.iter_mut()
}
}
impl<'a, T: Copy + PartialEq, A: Memory> PartialEq for MemVec<'a, T, A> {
fn eq(&self, other: &Self) -> bool {
self.as_slice() == other.as_slice()
}
}
impl<'a, T: Copy + PartialOrd, A: Memory> PartialOrd for MemVec<'a, T, A> {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
PartialOrd::partial_cmp(&**self, &**other)
}
}
impl<'a, T: Copy + Eq, A: Memory> Eq for MemVec<'a, T, A> {}
impl<'a, T: Ord + Copy, A: Memory> Ord for MemVec<'a, T, A> {
#[inline]
fn cmp(&self, other: &Self) -> Ordering {
Ord::cmp(&**self, &**other)
}
}
impl<'a, T: core::fmt::Debug + Copy, A: Memory> core::fmt::Debug for MemVec<'a, T, A> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
core::fmt::Debug::fmt(&**self, f)
}
}
impl<'a, T: Copy, A: Memory> AsRef<[T]> for MemVec<'a, T, A> {
fn as_ref(&self) -> &[T] {
self
}
}
impl<'a, T: Copy, A: Memory> AsMut<[T]> for MemVec<'a, T, A> {
fn as_mut(&mut self) -> &mut [T] {
self
}
}