use core::alloc::{Layout, LayoutError};
use core::fmt::{Debug, Formatter};
use core::iter::FusedIterator;
use core::marker::PhantomData;
use core::mem::MaybeUninit;
use core::ops::{Index, IndexMut};
use super::{buffer_too_large_for_handle_type, DebugEntry, DefaultHandle, Handle};
use crate::storage::{Capacity, LayoutSpec, Storage};
pub struct PackedPoolLayout<T, H>(PhantomData<(T, H)>);
impl<T, H: Handle> LayoutSpec for PackedPoolLayout<T, H> {
fn layout_with_capacity(items: usize) -> Result<Layout, LayoutError> {
let values_array = Layout::array::<T>(items)?;
let handles_array = Layout::array::<H>(items)?;
let counters_array = Layout::array::<u32>(items)?;
let index_array = Layout::array::<H::Index>(items)?;
let (extended, _) = values_array.extend(handles_array)?;
let (extended, _) = extended.extend(counters_array)?;
let (extended, _) = extended.extend(index_array)?;
Ok(extended.pad_to_align())
}
}
pub struct PackedPool<T, S: Storage<PackedPoolLayout<T, H>>, H: Handle = DefaultHandle> {
buf: S,
len: H::Index,
next_free_slot: H::Index,
items: PhantomData<T>,
}
impl<T, S: Storage<PackedPoolLayout<T, H>>, H: Handle> From<S> for PackedPool<T, S, H> {
fn from(buf: S) -> Self {
let cap = buf.capacity();
if cap >= H::MAX_INDEX {
buffer_too_large_for_handle_type::<H>();
}
let mut result = PackedPool {
buf,
len: H::Index::from_usize(0),
next_free_slot: H::Index::from_usize(0),
items: PhantomData,
};
let mut ptr = result.next_free_slot_or_packed_index_array_mut();
for i in 1..cap {
unsafe {
*ptr = H::Index::from_usize(i);
ptr = ptr.add(1);
}
}
let sentinel = H::Index::from_usize(Self::FREE_LIST_SENTINEL);
unsafe { *ptr = sentinel; }
unsafe { core::ptr::write_bytes(result.counters_mut(), 0x00, cap); }
result
}
}
impl<T, S: Storage<PackedPoolLayout<T, H>>, H: Handle> PackedPool<T, S, H> {
const FREE_LIST_SENTINEL: usize = H::Index::MAX_REPRESENTABLE;
#[inline]
fn values_ptr(&self) -> *const T {
self.buf.get_ptr().cast()
}
#[inline]
fn values_mut_ptr(&mut self) -> *mut T {
self.buf.get_mut_ptr().cast()
}
#[inline]
fn handles_ptr(&self) -> *const H {
let cap = self.buf.capacity();
let values_array = Layout::array::<T>(cap).unwrap();
let handles_array = Layout::array::<H>(cap).unwrap();
let (_, offset) = values_array.extend(handles_array).unwrap();
let base_ptr = self.buf.get_ptr();
unsafe { base_ptr.add(offset) }.cast()
}
#[inline]
fn handles_mut_ptr(&mut self) -> *mut H {
let cap = self.buf.capacity();
let values_array = Layout::array::<T>(cap).unwrap();
let handles_array = Layout::array::<H>(cap).unwrap();
let (_, offset) = values_array.extend(handles_array).unwrap();
let base_ptr = self.buf.get_mut_ptr();
unsafe { base_ptr.add(offset) }.cast()
}
#[inline]
fn counters(&self) -> *const u32 {
let cap = self.buf.capacity();
let values_array = Layout::array::<T>(cap).unwrap();
let handles_array = Layout::array::<H>(cap).unwrap();
let counters_array = Layout::array::<u32>(cap).unwrap();
let (extended, _) = values_array.extend(handles_array).unwrap();
let (_, offset) = extended.extend(counters_array).unwrap();
let base_ptr = self.buf.get_ptr();
unsafe { base_ptr.add(offset) }.cast()
}
#[inline]
fn counters_mut(&mut self) -> *mut u32 {
let cap = self.buf.capacity();
let values_array = Layout::array::<T>(cap).unwrap();
let handles_array = Layout::array::<H>(cap).unwrap();
let counters_array = Layout::array::<u32>(cap).unwrap();
let (extended, _) = values_array.extend(handles_array).unwrap();
let (_, offset) = extended.extend(counters_array).unwrap();
let base_ptr = self.buf.get_mut_ptr();
unsafe { base_ptr.add(offset) }.cast()
}
#[inline]
fn next_free_slot_or_packed_index_array(&self) -> *const H::Index {
let cap = self.buf.capacity();
let values_array = Layout::array::<T>(cap).unwrap();
let handles_array = Layout::array::<H>(cap).unwrap();
let counters_array = Layout::array::<u32>(cap).unwrap();
let nfsopi_array = Layout::array::<H::Index>(cap).unwrap();
let (extended, _) = values_array.extend(handles_array).unwrap();
let (extended, _) = extended.extend(counters_array).unwrap();
let (_, offset) = extended.extend(nfsopi_array).unwrap();
let base_ptr = self.buf.get_ptr();
unsafe { base_ptr.add(offset) }.cast()
}
#[inline]
fn next_free_slot_or_packed_index_array_mut(&mut self) -> *mut H::Index {
let cap = self.buf.capacity();
let values_array = Layout::array::<T>(cap).unwrap();
let handles_array = Layout::array::<H>(cap).unwrap();
let counters_array = Layout::array::<u32>(cap).unwrap();
let nfsopi_array = Layout::array::<H::Index>(cap).unwrap();
let (extended, _) = values_array.extend(handles_array).unwrap();
let (extended, _) = extended.extend(counters_array).unwrap();
let (_, offset) = extended.extend(nfsopi_array).unwrap();
let base_ptr = self.buf.get_mut_ptr();
unsafe { base_ptr.add(offset) }.cast()
}
#[inline]
pub fn capacity(&self) -> usize {
self.buf.capacity()
}
#[inline]
pub fn len(&self) -> usize {
self.len.as_usize()
}
#[inline]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
#[inline]
pub fn is_full(&self) -> bool {
self.len() == self.capacity()
}
#[inline]
pub fn values(&self) -> &[T] {
unsafe { core::slice::from_raw_parts(self.values_ptr(), self.len()) }
}
#[inline]
pub fn values_mut(&mut self) -> &mut [T] {
unsafe { core::slice::from_raw_parts_mut(self.values_mut_ptr(), self.len()) }
}
#[inline]
pub fn handles(&self) -> &[H] {
unsafe { core::slice::from_raw_parts(self.handles_ptr(), self.len()) }
}
#[inline]
pub fn handles_and_values_mut(&mut self) -> (&[H], &mut [T]) {
let len = self.len();
let handles_ptr = self.handles_ptr();
let values_ptr = self.values_mut_ptr();
unsafe {
let handles = core::slice::from_raw_parts(handles_ptr, len);
let values = core::slice::from_raw_parts_mut(values_ptr, len);
(handles, values)
}
}
pub fn contains(&self, handle: H) -> bool {
let (idx, input_gen_count) = handle.into_raw_parts();
if idx >= self.buf.capacity() {
return false;
}
let current_gen_count = unsafe { self.counters().add(idx).read() };
current_gen_count == input_gen_count && current_gen_count % 2 == 1
}
pub fn get(&self, handle: H) -> Option<&T> {
let (index, input_gen_count) = handle.into_raw_parts();
if index >= self.buf.capacity() {
return None;
}
let current_gen_count = unsafe { self.counters().add(index).read() };
if current_gen_count != input_gen_count || input_gen_count % 2 == 0 {
return None;
}
let packed_index = unsafe {
self.next_free_slot_or_packed_index_array().add(index).read()
};
unsafe { self.values_ptr().add(packed_index.as_usize()).as_ref() }
}
pub fn get_mut(&mut self, handle: H) -> Option<&mut T> {
let (index, input_gen_count) = handle.into_raw_parts();
if index >= self.buf.capacity() {
return None;
}
let current_gen_count = unsafe { self.counters().add(index).read() };
if current_gen_count != input_gen_count || input_gen_count % 2 == 0 {
return None;
}
let packed_index = unsafe {
self.next_free_slot_or_packed_index_array().add(index).read()
};
unsafe { self.values_mut_ptr().add(packed_index.as_usize()).as_mut() }
}
pub fn get_disjoint_mut<const N: usize>(&mut self, handles: [H; N]) -> Option<[&mut T; N]> {
let mut result: [MaybeUninit<*mut T>; N] = unsafe { MaybeUninit::uninit().assume_init() };
let counters = self.counters_mut();
let slots = self.next_free_slot_or_packed_index_array();
let values = self.values_mut_ptr();
let mut i = 0;
while i < N {
let (index, input_gen_count) = handles[i].into_raw_parts();
if index >= self.capacity() || input_gen_count % 2 == 0 {
break;
}
let current_gen_count = unsafe { counters.add(index).read() };
if current_gen_count != input_gen_count {
break;
}
unsafe {
*counters.add(index) ^= 1;
let packed_index = slots.add(index).read().as_usize();
result[i] = MaybeUninit::new(values.add(packed_index));
}
i += 1;
}
for h in &handles[..i] {
let (index, _) = h.into_raw_parts();
unsafe { *counters.add(index) ^= 1 };
}
if i == N {
Some(unsafe { core::mem::transmute_copy(&result) })
} else {
None
}
}
pub fn try_insert(&mut self, value: T) -> Result<H, T> {
let insert_position = self.next_free_slot.as_usize();
if insert_position == Self::FREE_LIST_SENTINEL {
return Err(value);
}
let packed_insert_position = self.len;
self.len = H::Index::from_usize(packed_insert_position.as_usize() + 1);
unsafe {
let gen_count_ptr = self.counters_mut().add(insert_position);
let gen_count = gen_count_ptr.read().wrapping_add(1) & H::MAX_GENERATION;
debug_assert_eq!(gen_count % 2, 1);
gen_count_ptr.write(gen_count);
let slot_ptr = self.next_free_slot_or_packed_index_array_mut().add(insert_position);
self.next_free_slot = slot_ptr.read();
slot_ptr.write(packed_insert_position);
let handle = H::new(insert_position, gen_count);
self.handles_mut_ptr().add(packed_insert_position.as_usize()).write(handle);
self.values_mut_ptr().add(packed_insert_position.as_usize()).write(value);
Ok(handle)
}
}
pub fn insert(&mut self, value: T) -> H {
#[cold]
#[inline(never)]
fn assert_failed() -> ! {
panic!("pool is already at capacity")
}
let result = self.try_insert(value);
match result {
Ok(handle) => handle,
Err(_) => assert_failed(),
}
}
pub fn try_insert_with_handle<F: FnOnce(H) -> T>(&mut self, f: F) -> Option<H> {
let insert_position = self.next_free_slot.as_usize();
if insert_position == Self::FREE_LIST_SENTINEL {
return None;
}
let packed_insert_position = self.len;
self.len = H::Index::from_usize(packed_insert_position.as_usize() + 1);
unsafe {
let gen_count_ptr = self.counters_mut().add(insert_position);
let gen_count = gen_count_ptr.read().wrapping_add(1) & H::MAX_GENERATION;
debug_assert_eq!(gen_count % 2, 1);
gen_count_ptr.write(gen_count);
let slot_ptr = self.next_free_slot_or_packed_index_array_mut().add(insert_position);
self.next_free_slot = slot_ptr.read();
slot_ptr.write(packed_insert_position);
let handle = H::new(insert_position, gen_count);
self.handles_mut_ptr().add(packed_insert_position.as_usize()).write(handle);
self.values_mut_ptr().add(packed_insert_position.as_usize()).write(f(handle));
Some(handle)
}
}
pub fn insert_with_handle<F: FnOnce(H) -> T>(&mut self, f: F) -> H {
self.try_insert_with_handle(f)
.expect("pool is already at capacity")
}
pub fn remove(&mut self, handle: H) -> Option<T> {
let (index, input_gen_count) = handle.into_raw_parts();
if index >= self.buf.capacity() || input_gen_count % 2 == 0 {
return None;
}
let gen_count_ptr = unsafe { self.counters_mut().add(index) };
let current_gen_count = unsafe { *gen_count_ptr };
if current_gen_count != input_gen_count {
return None;
}
unsafe {
gen_count_ptr.write(current_gen_count.wrapping_add(1));
let slot_ptr = self.next_free_slot_or_packed_index_array_mut().add(index);
let packed_index = slot_ptr.read();
slot_ptr.write(self.next_free_slot);
self.next_free_slot = H::Index::from_usize(index);
let hole = self.values_mut_ptr().add(packed_index.as_usize());
let result = hole.read();
let new_len = self.len() - 1;
if new_len != packed_index.as_usize() {
let last = self.values_ptr().add(new_len);
core::ptr::copy(last, hole, 1);
let hole = self.handles_mut_ptr().add(packed_index.as_usize());
let last = self.handles_ptr().add(new_len);
core::ptr::copy(last, hole, 1);
let (index, _) = last.read().into_raw_parts();
self.next_free_slot_or_packed_index_array_mut().add(index).write(packed_index);
}
self.len = H::Index::from_usize(new_len);
Some(result)
}
}
pub fn retain<F: FnMut(H, &mut T) -> bool>(&mut self, mut f: F) {
self.drain_filter(|handle, item| !f(handle, item))
.for_each(drop);
}
pub fn clear(&mut self) {
for packed_index in 0..self.len() {
unsafe {
self.values_mut_ptr().add(packed_index).drop_in_place();
let (index, _) = self.handles_ptr().add(packed_index).read().into_raw_parts();
*self.counters_mut().add(index) += 1;
let slot_ptr = self.next_free_slot_or_packed_index_array_mut().add(index);
*slot_ptr = self.next_free_slot;
self.next_free_slot = H::Index::from_usize(index);
}
}
self.len = H::Index::from_usize(0);
}
pub fn iter(&self) -> Iter<'_, H, T> {
Iter { handles: self.handles_ptr(), values: self.values_ptr(), len: self.len(), pool: PhantomData }
}
pub fn iter_mut(&mut self) -> IterMut<'_, H, T> {
IterMut { handles: self.handles_ptr(), values: self.values_mut_ptr(), len: self.len(), pool: PhantomData }
}
pub fn drain(&mut self) -> Drain<'_, T, S, H> {
Drain { pool: self }
}
pub fn drain_filter<F: FnMut(H, &mut T) -> bool>(&mut self, filter_fn: F) -> DrainFilter<'_, T, S, H, F> {
let last_visited = self.len();
DrainFilter {
pool: self,
last_visited,
filter_fn
}
}
}
impl<T, S: Storage<PackedPoolLayout<T, H>>, H: Handle> Index<H> for PackedPool<T, S, H> {
type Output = T;
fn index(&self, handle: H) -> &Self::Output {
self.get(handle).expect("indexed with invalid pool handle")
}
}
impl<T, S: Storage<PackedPoolLayout<T, H>>, H: Handle> IndexMut<H> for PackedPool<T, S, H> {
fn index_mut(&mut self, handle: H) -> &mut Self::Output {
self.get_mut(handle).expect("indexed with invalid pool handle")
}
}
impl<T, S: Storage<PackedPoolLayout<T, H>>, H: Handle> Drop for PackedPool<T, S, H> {
fn drop(&mut self) {
self.clear();
}
}
struct DebugSlots<'a, T: Debug, S: Storage<PackedPoolLayout<T, H>>, H: Handle>(
&'a PackedPool<T, S, H>,
);
impl<T: Debug, S: Storage<PackedPoolLayout<T, H>>, H: Handle> Debug for DebugSlots<'_, T, S, H> {
fn fmt(&self, fmt: &mut Formatter<'_>) -> core::fmt::Result {
let gen_count_ptr = self.0.counters();
let slot_ptr = self.0.next_free_slot_or_packed_index_array();
let values_ptr = self.0.values_ptr();
fmt.debug_list()
.entries(
(0..self.0.capacity()).map::<DebugEntry<T, H>, _>(|i| unsafe {
let generation = gen_count_ptr.add(i).read();
if generation % 2 == 0 {
let next_free_slot = slot_ptr.add(i).read();
DebugEntry::Vacant {
generation,
next_free_slot,
}
} else {
let packed_index = slot_ptr.add(i).read().as_usize();
let value = &*values_ptr.add(packed_index);
DebugEntry::Occupied { generation, value }
}
}),
)
.finish()
}
}
impl<T: Debug, S: Storage<PackedPoolLayout<T, H>>, H: Handle> Debug for PackedPool<T, S, H> {
fn fmt(&self, fmt: &mut Formatter<'_>) -> core::fmt::Result {
let mut builder = fmt.debug_struct("PackedPool");
builder
.field("len", &self.len.as_usize())
.field("next_free_slot", &self.next_free_slot.as_usize())
.field("slots", &DebugSlots(self))
.finish()
}
}
pub struct Iter<'a, H, T> {
handles: *const H,
values: *const T,
len: usize,
pool: PhantomData<&'a ()>,
}
impl<'a, H, T: 'a> Iterator for Iter<'a, H, T> {
type Item = (H, &'a T);
fn next(&mut self) -> Option<Self::Item> {
if self.len == 0 {
return None;
}
self.len -= 1;
Some(unsafe {
let handle = self.handles.add(self.len).read();
let value = &*self.values.add(self.len);
(handle, value)
})
}
fn size_hint(&self) -> (usize, Option<usize>) {
(self.len, Some(self.len))
}
}
impl<'a, T: 'a, H> ExactSizeIterator for Iter<'a, H, T> {}
impl<'a, T: 'a, H> FusedIterator for Iter<'a, H, T> {}
impl<'a, T: 'a, S: Storage<PackedPoolLayout<T, H>>, H: Handle> IntoIterator for &'a PackedPool<T, S, H> {
type Item = (H, &'a T);
type IntoIter = Iter<'a, H, T>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
pub struct IterMut<'a, H, T> {
handles: *const H,
values: *mut T,
len: usize,
pool: PhantomData<&'a mut ()>,
}
impl<'a, H, T: 'a> Iterator for IterMut<'a, H, T> {
type Item = (H, &'a mut T);
fn next(&mut self) -> Option<Self::Item> {
if self.len == 0 {
return None;
}
self.len -= 1;
Some(unsafe {
let handle = self.handles.add(self.len).read();
let value = &mut *self.values.add(self.len);
(handle, value)
})
}
fn size_hint(&self) -> (usize, Option<usize>) {
(self.len, Some(self.len))
}
}
impl<'a, T: 'a, H> ExactSizeIterator for IterMut<'a, H, T> {}
impl<'a, T: 'a, H> FusedIterator for IterMut<'a, H, T> {}
impl<'a, T: 'a, S: Storage<PackedPoolLayout<T, H>>, H: Handle> IntoIterator for &'a mut PackedPool<T, S, H> {
type Item = (H, &'a mut T);
type IntoIter = IterMut<'a, H, T>;
fn into_iter(self) -> Self::IntoIter {
self.iter_mut()
}
}
pub struct Drain<'a, T, S: Storage<PackedPoolLayout<T, H>>, H: Handle> {
pool: &'a mut PackedPool<T, S, H>,
}
impl<'a, T, S: Storage<PackedPoolLayout<T, H>>, H: Handle> Iterator for Drain<'a, T, S, H> {
type Item = (H, T);
fn next(&mut self) -> Option<Self::Item> {
let len = self.pool.len();
if len == 0 {
return None;
}
let new_len = len - 1;
let handle = unsafe { self.pool.handles_ptr().add(new_len).read() };
let value = unsafe { self.pool.values_ptr().add(new_len).read() };
let (index, gen_count) = handle.into_raw_parts();
unsafe {
self.pool.counters_mut().add(index).write(gen_count.wrapping_add(1));
self.pool.next_free_slot_or_packed_index_array_mut().add(index).write(self.pool.next_free_slot);
}
self.pool.next_free_slot = H::Index::from_usize(index);
self.pool.len = H::Index::from_usize(new_len);
Some((handle, value))
}
fn size_hint(&self) -> (usize, Option<usize>) {
let size = self.pool.len();
(size, Some(size))
}
}
impl <'a, T, S: Storage<PackedPoolLayout<T, H>>, H: Handle> Drop for Drain<'a, T, S, H> {
fn drop(&mut self) {
self.pool.clear();
}
}
impl<'a, T, S: Storage<PackedPoolLayout<T, H>>, H: Handle> ExactSizeIterator for Drain<'a, T, S, H> {}
impl<'a, T, S: Storage<PackedPoolLayout<T, H>>, H: Handle> FusedIterator for Drain<'a, T, S, H> {}
pub struct DrainFilter<'a, T, S: Storage<PackedPoolLayout<T, H>>, H: Handle, F: FnMut(H, &mut T) -> bool> {
pool: &'a mut PackedPool<T, S, H>,
last_visited: usize,
filter_fn: F,
}
impl<'a, T, S: Storage<PackedPoolLayout<T, H>>, H: Handle, F: FnMut(H, &mut T) -> bool> Iterator for DrainFilter<'a, T, S, H, F> {
type Item = (H, T);
fn next(&mut self) -> Option<Self::Item> {
while self.last_visited > 0 {
self.last_visited -= 1;
let handle = unsafe { self.pool.handles_ptr().add(self.last_visited).read() };
let should_remove = unsafe {
let value_ref = &mut *self.pool.values_mut_ptr().add(self.last_visited);
(self.filter_fn)(handle, value_ref)
};
if !should_remove { continue; }
let new_len = self.pool.len() - 1;
let value = unsafe { self.pool.values_ptr().add(self.last_visited).read() };
let (index, gen_count) = handle.into_raw_parts();
unsafe {
self.pool.counters_mut().add(index).write(gen_count.wrapping_add(1));
self.pool.next_free_slot_or_packed_index_array_mut().add(index).write(self.pool.next_free_slot);
if index != new_len {
let value_src = self.pool.values_ptr().add(new_len);
let value_dst = self.pool.values_mut_ptr().add(self.last_visited);
core::ptr::copy(value_src, value_dst, 1);
let handle_src = self.pool.handles_ptr().add(new_len);
let handle_dst = self.pool.handles_mut_ptr().add(self.last_visited);
core::ptr::copy(handle_src, handle_dst, 1);
let (index, _) = handle_src.read().into_raw_parts();
self.pool.next_free_slot_or_packed_index_array_mut().add(index).write(H::Index::from_usize(self.last_visited));
}
}
self.pool.len = H::Index::from_usize(new_len);
return Some((handle, value));
}
None
}
}
impl<'a, T, S: Storage<PackedPoolLayout<T, H>>, H: Handle, F: FnMut(H, &mut T) -> bool> Drop for DrainFilter<'a, T, S, H, F> {
fn drop(&mut self) {
self.for_each(drop);
}
}
impl<'a, T, S: Storage<PackedPoolLayout<T, H>>, H: Handle, F: FnMut(H, &mut T) -> bool> ExactSizeIterator for DrainFilter<'a, T, S, H, F> {}
impl<'a, T, S: Storage<PackedPoolLayout<T, H>>, H: Handle, F: FnMut(H, &mut T) -> bool> FusedIterator for DrainFilter<'a, T, S, H, F> {}
#[cfg(feature = "alloc")]
#[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))]
impl<T, H: Handle> crate::collections::PackedAllocPool<T, H> {
pub fn with_capacity(capacity: H::Index) -> Self {
let cap = capacity.as_usize();
if cap >= H::MAX_INDEX {
buffer_too_large_for_handle_type::<H>();
}
let storage = crate::storage::AllocStorage::with_capacity(cap);
Self::from(storage)
}
}
#[cfg(feature = "alloc")]
#[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))]
impl<T: Clone, H: Handle> Clone for crate::collections::PackedAllocPool<T, H> {
fn clone(&self) -> Self {
let storage = crate::storage::AllocStorage::with_capacity(self.capacity());
let mut result: Self = PackedPool {
buf: storage,
len: self.len,
next_free_slot: self.next_free_slot,
items: PhantomData,
};
for i in 0..self.len() {
unsafe {
let src = &*self.values_ptr().add(i);
let dst = result.values_mut_ptr().add(i);
dst.write(src.clone());
}
}
let src_handles = self.handles_ptr();
let dst_handles = result.handles_mut_ptr();
unsafe { core::ptr::copy(src_handles, dst_handles, self.len()) };
let src_counters = self.counters();
let dst_counters = result.counters_mut();
unsafe { core::ptr::copy(src_counters, dst_counters, self.capacity()) };
let src_slots = self.next_free_slot_or_packed_index_array();
let dst_slots = result.next_free_slot_or_packed_index_array_mut();
unsafe { core::ptr::copy(src_slots, dst_slots, self.capacity()) };
result
}
}
#[repr(C)]
pub struct InlineStorage<T, H: Handle, const N: usize> {
values: [MaybeUninit<T>; N],
handles: [MaybeUninit<H>; N],
counters: [MaybeUninit<u32>; N],
next_free_slot_or_packed_index: [MaybeUninit<H::Index>; N],
}
unsafe impl<T, H: Handle, const N: usize> Storage<PackedPoolLayout<T, H>> for InlineStorage<T, H, N> {
fn get_ptr(&self) -> *const u8 {
(self as *const Self).cast()
}
fn get_mut_ptr(&mut self) -> *mut u8 {
(self as *mut Self).cast()
}
fn capacity(&self) -> usize {
N
}
}
impl<T, H: Handle, const N: usize> PackedPool<T, InlineStorage<T, H, N>, H> {
pub fn new() -> Self {
if N >= H::MAX_INDEX {
buffer_too_large_for_handle_type::<H>();
}
Self::from(InlineStorage {
values: unsafe { MaybeUninit::<[MaybeUninit<T>; N]>::uninit().assume_init() },
handles: [MaybeUninit::uninit(); N],
counters: [MaybeUninit::uninit(); N],
next_free_slot_or_packed_index: [MaybeUninit::uninit(); N],
})
}
}
impl<T, H: Handle, const N: usize> Default for PackedPool<T, InlineStorage<T, H, N>, H> {
fn default() -> Self {
Self::new()
}
}
impl<T: Clone, H: Handle, const N: usize> Clone for PackedPool<T, InlineStorage<T, H, N>, H> {
fn clone(&self) -> Self {
let mut result: Self = PackedPool {
buf: InlineStorage {
values: unsafe { MaybeUninit::<[MaybeUninit<T>; N]>::uninit().assume_init() },
handles: [MaybeUninit::uninit(); N],
counters: [MaybeUninit::uninit(); N],
next_free_slot_or_packed_index: [MaybeUninit::uninit(); N],
},
len: self.len,
next_free_slot: self.next_free_slot,
items: PhantomData,
};
for i in 0..self.len() {
unsafe {
let src = self.values_ptr().add(i).as_ref().unwrap();
let dst = result.values_mut_ptr().add(i);
dst.write(src.clone());
}
}
let src_handles = self.handles_ptr();
let dst_handles = result.handles_mut_ptr();
unsafe { core::ptr::copy(src_handles, dst_handles, self.len()) };
let src_counters = self.counters();
let dst_counters = result.counters_mut();
unsafe { core::ptr::copy(src_counters, dst_counters, self.capacity()) };
let src_slots = self.next_free_slot_or_packed_index_array();
let dst_slots = result.next_free_slot_or_packed_index_array_mut();
unsafe { core::ptr::copy(src_slots, dst_slots, self.capacity()) };
result
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::collections::pool::{DefaultHandle, Handle};
use crate::storage::LayoutSpec;
use crate::{fmt, arena::Arena};
#[test]
fn debug_impl() {
let mut storage = [MaybeUninit::uninit(); 2048];
let mut arena = Arena::from(&mut storage[..]);
let mut pool: crate::collections::PackedArenaPool<&'static str, DefaultHandle> = arena.with_capacity(4);
let empty = fmt!(arena, "{:?}", pool).unwrap();
assert_eq!(
&*empty,
"PackedPool { len: 0, next_free_slot: 0, slots: [\
Vacant { generation: 0, next_free_slot: 1 }, \
Vacant { generation: 0, next_free_slot: 2 }, \
Vacant { generation: 0, next_free_slot: 3 }, \
Vacant { generation: 0, next_free_slot: 4294967295 }\
] }"
);
let h1 = pool.insert("First");
pool.insert("Second");
let h3 = pool.insert("Third");
pool.insert("Fourth");
let full = fmt!(&mut arena, "{:?}", pool).unwrap();
assert_eq!(
&*full,
"PackedPool { len: 4, next_free_slot: 4294967295, slots: [\
Occupied { generation: 1, value: \"First\" }, \
Occupied { generation: 1, value: \"Second\" }, \
Occupied { generation: 1, value: \"Third\" }, \
Occupied { generation: 1, value: \"Fourth\" }\
] }"
);
pool.remove(h1);
pool.remove(h3);
let partial = fmt!(&mut arena, "{:?}", pool).unwrap();
assert_eq!(
&*partial,
"PackedPool { len: 2, next_free_slot: 2, slots: [\
Vacant { generation: 2, next_free_slot: 4294967295 }, \
Occupied { generation: 1, value: \"Second\" }, \
Vacant { generation: 2, next_free_slot: 0 }, \
Occupied { generation: 1, value: \"Fourth\" }\
] }"
);
}
#[test]
fn inline_storage_layout() {
fn test_layout<T, H: Handle, const N: usize>() {
use core::alloc::Layout;
let inline_layout = Layout::new::<InlineStorage<T, H, N>>();
let dynamic_layout = PackedPoolLayout::<T, H>::layout_with_capacity(N).unwrap();
assert_eq!(inline_layout, dynamic_layout);
}
test_layout::<[u8; 3], DefaultHandle, 10>();
test_layout::<[u8; 25], DefaultHandle, 20>();
test_layout::<u128, DefaultHandle, 40>();
test_layout::<crate::collections::ArenaDeque<u8>, DefaultHandle, 80>();
}
}