1use core::alloc::{Layout, LayoutError};
8use core::fmt::{Debug, Formatter};
9use core::iter::FusedIterator;
10use core::marker::PhantomData;
11use core::mem::MaybeUninit;
12use core::ops::{Index, IndexMut};
13
14use super::{buffer_too_large_for_handle_type, DebugEntry, DefaultHandle, Handle};
15use crate::storage::{Capacity, LayoutSpec, Storage};
16
17pub struct PackedPoolLayout<T, H>(PhantomData<(T, H)>);
19impl<T, H: Handle> LayoutSpec for PackedPoolLayout<T, H> {
20 fn layout_with_capacity(items: usize) -> Result<Layout, LayoutError> {
21 let values_array = Layout::array::<T>(items)?;
22 let handles_array = Layout::array::<H>(items)?;
23 let counters_array = Layout::array::<u32>(items)?;
24 let index_array = Layout::array::<H::Index>(items)?;
25
26 let (extended, _) = values_array.extend(handles_array)?;
27 let (extended, _) = extended.extend(counters_array)?;
28 let (extended, _) = extended.extend(index_array)?;
29
30 Ok(extended.pad_to_align())
31 }
32}
33
34pub struct PackedPool<T, S: Storage<PackedPoolLayout<T, H>>, H: Handle = DefaultHandle> {
40 buf: S,
41 len: H::Index,
42 next_free_slot: H::Index,
43 items: PhantomData<T>,
44}
45
46impl<T, S: Storage<PackedPoolLayout<T, H>>, H: Handle> From<S> for PackedPool<T, S, H> {
47 fn from(buf: S) -> Self {
48 let cap = buf.capacity();
49 if cap >= H::MAX_INDEX {
50 buffer_too_large_for_handle_type::<H>();
51 }
52
53 let mut result = PackedPool {
54 buf,
55 len: H::Index::from_usize(0),
56 next_free_slot: H::Index::from_usize(0),
57 items: PhantomData,
58 };
59
60 let mut ptr = result.next_free_slot_or_packed_index_array_mut();
62 for i in 1..cap {
63 unsafe {
64 *ptr = H::Index::from_usize(i);
65 ptr = ptr.add(1);
66 }
67 }
68
69 let sentinel = H::Index::from_usize(Self::FREE_LIST_SENTINEL);
70 unsafe { *ptr = sentinel; }
71
72 unsafe { core::ptr::write_bytes(result.counters_mut(), 0x00, cap); }
74
75 result
76 }
77}
78
79impl<T, S: Storage<PackedPoolLayout<T, H>>, H: Handle> PackedPool<T, S, H> {
80 const FREE_LIST_SENTINEL: usize = H::Index::MAX_REPRESENTABLE;
81
82 #[inline]
83 fn values_ptr(&self) -> *const T {
84 self.buf.get_ptr().cast()
85 }
86
87 #[inline]
88 fn values_mut_ptr(&mut self) -> *mut T {
89 self.buf.get_mut_ptr().cast()
90 }
91
92 #[inline]
93 fn handles_ptr(&self) -> *const H {
94 let cap = self.buf.capacity();
95 let values_array = Layout::array::<T>(cap).unwrap();
96 let handles_array = Layout::array::<H>(cap).unwrap();
97 let (_, offset) = values_array.extend(handles_array).unwrap();
98
99 let base_ptr = self.buf.get_ptr();
100 unsafe { base_ptr.add(offset) }.cast()
101 }
102
103 #[inline]
104 fn handles_mut_ptr(&mut self) -> *mut H {
105 let cap = self.buf.capacity();
106 let values_array = Layout::array::<T>(cap).unwrap();
107 let handles_array = Layout::array::<H>(cap).unwrap();
108 let (_, offset) = values_array.extend(handles_array).unwrap();
109
110 let base_ptr = self.buf.get_mut_ptr();
111 unsafe { base_ptr.add(offset) }.cast()
112 }
113
114 #[inline]
115 fn counters(&self) -> *const u32 {
116 let cap = self.buf.capacity();
117 let values_array = Layout::array::<T>(cap).unwrap();
118 let handles_array = Layout::array::<H>(cap).unwrap();
119 let counters_array = Layout::array::<u32>(cap).unwrap();
120
121 let (extended, _) = values_array.extend(handles_array).unwrap();
122 let (_, offset) = extended.extend(counters_array).unwrap();
123
124 let base_ptr = self.buf.get_ptr();
125 unsafe { base_ptr.add(offset) }.cast()
126 }
127
128 #[inline]
129 fn counters_mut(&mut self) -> *mut u32 {
130 let cap = self.buf.capacity();
131 let values_array = Layout::array::<T>(cap).unwrap();
132 let handles_array = Layout::array::<H>(cap).unwrap();
133 let counters_array = Layout::array::<u32>(cap).unwrap();
134
135 let (extended, _) = values_array.extend(handles_array).unwrap();
136 let (_, offset) = extended.extend(counters_array).unwrap();
137
138 let base_ptr = self.buf.get_mut_ptr();
139 unsafe { base_ptr.add(offset) }.cast()
140 }
141
142 #[inline]
143 fn next_free_slot_or_packed_index_array(&self) -> *const H::Index {
144 let cap = self.buf.capacity();
145 let values_array = Layout::array::<T>(cap).unwrap();
146 let handles_array = Layout::array::<H>(cap).unwrap();
147 let counters_array = Layout::array::<u32>(cap).unwrap();
148 let nfsopi_array = Layout::array::<H::Index>(cap).unwrap();
149
150 let (extended, _) = values_array.extend(handles_array).unwrap();
151 let (extended, _) = extended.extend(counters_array).unwrap();
152 let (_, offset) = extended.extend(nfsopi_array).unwrap();
153
154 let base_ptr = self.buf.get_ptr();
155 unsafe { base_ptr.add(offset) }.cast()
156 }
157
158 #[inline]
159 fn next_free_slot_or_packed_index_array_mut(&mut self) -> *mut H::Index {
160 let cap = self.buf.capacity();
161 let values_array = Layout::array::<T>(cap).unwrap();
162 let handles_array = Layout::array::<H>(cap).unwrap();
163 let counters_array = Layout::array::<u32>(cap).unwrap();
164 let nfsopi_array = Layout::array::<H::Index>(cap).unwrap();
165
166 let (extended, _) = values_array.extend(handles_array).unwrap();
167 let (extended, _) = extended.extend(counters_array).unwrap();
168 let (_, offset) = extended.extend(nfsopi_array).unwrap();
169
170 let base_ptr = self.buf.get_mut_ptr();
171 unsafe { base_ptr.add(offset) }.cast()
172 }
173
174 #[inline]
176 pub fn capacity(&self) -> usize {
177 self.buf.capacity()
178 }
179
180 #[inline]
182 pub fn len(&self) -> usize {
183 self.len.as_usize()
184 }
185
186 #[inline]
188 pub fn is_empty(&self) -> bool {
189 self.len() == 0
190 }
191
192 #[inline]
194 pub fn is_full(&self) -> bool {
195 self.len() == self.capacity()
196 }
197
198 #[inline]
215 pub fn values(&self) -> &[T] {
216 unsafe { core::slice::from_raw_parts(self.values_ptr(), self.len()) }
217 }
218
219 #[inline]
238 pub fn values_mut(&mut self) -> &mut [T] {
239 unsafe { core::slice::from_raw_parts_mut(self.values_mut_ptr(), self.len()) }
240 }
241
242 #[inline]
259 pub fn handles(&self) -> &[H] {
260 unsafe { core::slice::from_raw_parts(self.handles_ptr(), self.len()) }
261 }
262
263 #[inline]
290 pub fn handles_and_values_mut(&mut self) -> (&[H], &mut [T]) {
291 let len = self.len();
292 let handles_ptr = self.handles_ptr();
293 let values_ptr = self.values_mut_ptr();
294
295 unsafe {
296 let handles = core::slice::from_raw_parts(handles_ptr, len);
297 let values = core::slice::from_raw_parts_mut(values_ptr, len);
298 (handles, values)
299 }
300 }
301
302 pub fn contains(&self, handle: H) -> bool {
314 let (idx, input_gen_count) = handle.into_raw_parts();
315 if idx >= self.buf.capacity() {
316 return false;
317 }
318
319 let current_gen_count = unsafe { self.counters().add(idx).read() };
320 current_gen_count == input_gen_count && current_gen_count % 2 == 1
321 }
322
323 pub fn get(&self, handle: H) -> Option<&T> {
327 let (index, input_gen_count) = handle.into_raw_parts();
328 if index >= self.buf.capacity() {
329 return None;
330 }
331
332 let current_gen_count = unsafe { self.counters().add(index).read() };
333 if current_gen_count != input_gen_count || input_gen_count % 2 == 0 {
334 return None;
335 }
336
337 let packed_index = unsafe {
338 self.next_free_slot_or_packed_index_array().add(index).read()
339 };
340 unsafe { self.values_ptr().add(packed_index.as_usize()).as_ref() }
341 }
342
343 pub fn get_mut(&mut self, handle: H) -> Option<&mut T> {
347 let (index, input_gen_count) = handle.into_raw_parts();
348 if index >= self.buf.capacity() {
349 return None;
350 }
351
352 let current_gen_count = unsafe { self.counters().add(index).read() };
353 if current_gen_count != input_gen_count || input_gen_count % 2 == 0 {
354 return None;
355 }
356
357 let packed_index = unsafe {
358 self.next_free_slot_or_packed_index_array().add(index).read()
359 };
360 unsafe { self.values_mut_ptr().add(packed_index.as_usize()).as_mut() }
361 }
362
363 pub fn get_disjoint_mut<const N: usize>(&mut self, handles: [H; N]) -> Option<[&mut T; N]> {
388 let mut result: [MaybeUninit<*mut T>; N] = unsafe { MaybeUninit::uninit().assume_init() };
389
390 let counters = self.counters_mut();
391 let slots = self.next_free_slot_or_packed_index_array();
392 let values = self.values_mut_ptr();
393
394 let mut i = 0;
395 while i < N {
396 let (index, input_gen_count) = handles[i].into_raw_parts();
397 if index >= self.capacity() || input_gen_count % 2 == 0 {
398 break;
399 }
400
401 let current_gen_count = unsafe { counters.add(index).read() };
402 if current_gen_count != input_gen_count {
403 break;
404 }
405
406 unsafe {
407 *counters.add(index) ^= 1;
408 let packed_index = slots.add(index).read().as_usize();
409 result[i] = MaybeUninit::new(values.add(packed_index));
410 }
411
412 i += 1;
413 }
414
415 for h in &handles[..i] {
416 let (index, _) = h.into_raw_parts();
417 unsafe { *counters.add(index) ^= 1 };
418 }
419
420 if i == N {
421 Some(unsafe { core::mem::transmute_copy(&result) })
422 } else {
423 None
424 }
425 }
426
427 pub fn try_insert(&mut self, value: T) -> Result<H, T> {
439 let insert_position = self.next_free_slot.as_usize();
440 if insert_position == Self::FREE_LIST_SENTINEL {
441 return Err(value);
442 }
443
444 let packed_insert_position = self.len;
445 self.len = H::Index::from_usize(packed_insert_position.as_usize() + 1);
446
447 unsafe {
448 let gen_count_ptr = self.counters_mut().add(insert_position);
449 let gen_count = gen_count_ptr.read().wrapping_add(1) & H::MAX_GENERATION;
450 debug_assert_eq!(gen_count % 2, 1);
451 gen_count_ptr.write(gen_count);
452
453 let slot_ptr = self.next_free_slot_or_packed_index_array_mut().add(insert_position);
454 self.next_free_slot = slot_ptr.read();
455 slot_ptr.write(packed_insert_position);
456
457 let handle = H::new(insert_position, gen_count);
458 self.handles_mut_ptr().add(packed_insert_position.as_usize()).write(handle);
459 self.values_mut_ptr().add(packed_insert_position.as_usize()).write(value);
460
461 Ok(handle)
462 }
463 }
464
465 pub fn insert(&mut self, value: T) -> H {
471 #[cold]
472 #[inline(never)]
473 fn assert_failed() -> ! {
474 panic!("pool is already at capacity")
475 }
476
477 let result = self.try_insert(value);
478 match result {
479 Ok(handle) => handle,
480 Err(_) => assert_failed(),
481 }
482 }
483
484 pub fn try_insert_with_handle<F: FnOnce(H) -> T>(&mut self, f: F) -> Option<H> {
498 let insert_position = self.next_free_slot.as_usize();
499 if insert_position == Self::FREE_LIST_SENTINEL {
500 return None;
501 }
502
503 let packed_insert_position = self.len;
504 self.len = H::Index::from_usize(packed_insert_position.as_usize() + 1);
505
506 unsafe {
507 let gen_count_ptr = self.counters_mut().add(insert_position);
508 let gen_count = gen_count_ptr.read().wrapping_add(1) & H::MAX_GENERATION;
509 debug_assert_eq!(gen_count % 2, 1);
510 gen_count_ptr.write(gen_count);
511
512 let slot_ptr = self.next_free_slot_or_packed_index_array_mut().add(insert_position);
513 self.next_free_slot = slot_ptr.read();
514 slot_ptr.write(packed_insert_position);
515
516 let handle = H::new(insert_position, gen_count);
517 self.handles_mut_ptr().add(packed_insert_position.as_usize()).write(handle);
518 self.values_mut_ptr().add(packed_insert_position.as_usize()).write(f(handle));
519
520 Some(handle)
521 }
522 }
523
524 pub fn insert_with_handle<F: FnOnce(H) -> T>(&mut self, f: F) -> H {
533 self.try_insert_with_handle(f)
534 .expect("pool is already at capacity")
535 }
536
537 pub fn remove(&mut self, handle: H) -> Option<T> {
553 let (index, input_gen_count) = handle.into_raw_parts();
554 if index >= self.buf.capacity() || input_gen_count % 2 == 0 {
555 return None;
556 }
557
558 let gen_count_ptr = unsafe { self.counters_mut().add(index) };
559 let current_gen_count = unsafe { *gen_count_ptr };
560 if current_gen_count != input_gen_count {
561 return None;
562 }
563
564 unsafe {
565 gen_count_ptr.write(current_gen_count.wrapping_add(1));
566
567 let slot_ptr = self.next_free_slot_or_packed_index_array_mut().add(index);
568 let packed_index = slot_ptr.read();
569 slot_ptr.write(self.next_free_slot);
570 self.next_free_slot = H::Index::from_usize(index);
571
572 let hole = self.values_mut_ptr().add(packed_index.as_usize());
573 let result = hole.read();
574
575 let new_len = self.len() - 1;
576 if new_len != packed_index.as_usize() {
577 let last = self.values_ptr().add(new_len);
578 core::ptr::copy(last, hole, 1);
579
580 let hole = self.handles_mut_ptr().add(packed_index.as_usize());
581 let last = self.handles_ptr().add(new_len);
582 core::ptr::copy(last, hole, 1);
583
584 let (index, _) = last.read().into_raw_parts();
585 self.next_free_slot_or_packed_index_array_mut().add(index).write(packed_index);
586 }
587
588 self.len = H::Index::from_usize(new_len);
589 Some(result)
590 }
591 }
592
593 pub fn retain<F: FnMut(H, &mut T) -> bool>(&mut self, mut f: F) {
614 self.drain_filter(|handle, item| !f(handle, item))
615 .for_each(drop);
616 }
617
618 pub fn clear(&mut self) {
631 for packed_index in 0..self.len() {
632 unsafe {
633 self.values_mut_ptr().add(packed_index).drop_in_place();
634
635 let (index, _) = self.handles_ptr().add(packed_index).read().into_raw_parts();
636 *self.counters_mut().add(index) += 1;
637
638 let slot_ptr = self.next_free_slot_or_packed_index_array_mut().add(index);
639 *slot_ptr = self.next_free_slot;
640 self.next_free_slot = H::Index::from_usize(index);
641 }
642 }
643
644 self.len = H::Index::from_usize(0);
645 }
646
647 pub fn iter(&self) -> Iter<'_, H, T> {
668 Iter { handles: self.handles_ptr(), values: self.values_ptr(), len: self.len(), pool: PhantomData }
669 }
670
671 pub fn iter_mut(&mut self) -> IterMut<'_, H, T> {
691 IterMut { handles: self.handles_ptr(), values: self.values_mut_ptr(), len: self.len(), pool: PhantomData }
692 }
693
694 pub fn drain(&mut self) -> Drain<'_, T, S, H> {
715 Drain { pool: self }
716 }
717
718 pub fn drain_filter<F: FnMut(H, &mut T) -> bool>(&mut self, filter_fn: F) -> DrainFilter<'_, T, S, H, F> {
749 let last_visited = self.len();
750 DrainFilter {
751 pool: self,
752 last_visited,
753 filter_fn
754 }
755 }
756}
757
758impl<T, S: Storage<PackedPoolLayout<T, H>>, H: Handle> Index<H> for PackedPool<T, S, H> {
759 type Output = T;
760 fn index(&self, handle: H) -> &Self::Output {
761 self.get(handle).expect("indexed with invalid pool handle")
762 }
763}
764
765impl<T, S: Storage<PackedPoolLayout<T, H>>, H: Handle> IndexMut<H> for PackedPool<T, S, H> {
766 fn index_mut(&mut self, handle: H) -> &mut Self::Output {
767 self.get_mut(handle).expect("indexed with invalid pool handle")
768 }
769}
770
771impl<T, S: Storage<PackedPoolLayout<T, H>>, H: Handle> Drop for PackedPool<T, S, H> {
772 fn drop(&mut self) {
773 self.clear();
774 }
775}
776
777struct DebugSlots<'a, T: Debug, S: Storage<PackedPoolLayout<T, H>>, H: Handle>(
778 &'a PackedPool<T, S, H>,
779);
780impl<T: Debug, S: Storage<PackedPoolLayout<T, H>>, H: Handle> Debug for DebugSlots<'_, T, S, H> {
781 fn fmt(&self, fmt: &mut Formatter<'_>) -> core::fmt::Result {
782 let gen_count_ptr = self.0.counters();
783 let slot_ptr = self.0.next_free_slot_or_packed_index_array();
784 let values_ptr = self.0.values_ptr();
785 fmt.debug_list()
786 .entries(
787 (0..self.0.capacity()).map::<DebugEntry<T, H>, _>(|i| unsafe {
788 let generation = gen_count_ptr.add(i).read();
789 if generation % 2 == 0 {
790 let next_free_slot = slot_ptr.add(i).read();
791 DebugEntry::Vacant {
792 generation,
793 next_free_slot,
794 }
795 } else {
796 let packed_index = slot_ptr.add(i).read().as_usize();
797 let value = &*values_ptr.add(packed_index);
798 DebugEntry::Occupied { generation, value }
799 }
800 }),
801 )
802 .finish()
803 }
804}
805
806impl<T: Debug, S: Storage<PackedPoolLayout<T, H>>, H: Handle> Debug for PackedPool<T, S, H> {
807 fn fmt(&self, fmt: &mut Formatter<'_>) -> core::fmt::Result {
808 let mut builder = fmt.debug_struct("PackedPool");
809 builder
810 .field("len", &self.len.as_usize())
811 .field("next_free_slot", &self.next_free_slot.as_usize())
812 .field("slots", &DebugSlots(self))
813 .finish()
814 }
815}
816
817pub struct Iter<'a, H, T> {
821 handles: *const H,
822 values: *const T,
823 len: usize,
824 pool: PhantomData<&'a ()>,
825}
826
827impl<'a, H, T: 'a> Iterator for Iter<'a, H, T> {
828 type Item = (H, &'a T);
829
830 fn next(&mut self) -> Option<Self::Item> {
831 if self.len == 0 {
832 return None;
833 }
834
835 self.len -= 1;
836 Some(unsafe {
837 let handle = self.handles.add(self.len).read();
838 let value = &*self.values.add(self.len);
839 (handle, value)
840 })
841 }
842
843 fn size_hint(&self) -> (usize, Option<usize>) {
844 (self.len, Some(self.len))
845 }
846}
847
848impl<'a, T: 'a, H> ExactSizeIterator for Iter<'a, H, T> {}
849impl<'a, T: 'a, H> FusedIterator for Iter<'a, H, T> {}
850
851impl<'a, T: 'a, S: Storage<PackedPoolLayout<T, H>>, H: Handle> IntoIterator for &'a PackedPool<T, S, H> {
852 type Item = (H, &'a T);
853 type IntoIter = Iter<'a, H, T>;
854
855 fn into_iter(self) -> Self::IntoIter {
856 self.iter()
857 }
858}
859
860pub struct IterMut<'a, H, T> {
864 handles: *const H,
865 values: *mut T,
866 len: usize,
867 pool: PhantomData<&'a mut ()>,
868}
869
870impl<'a, H, T: 'a> Iterator for IterMut<'a, H, T> {
871 type Item = (H, &'a mut T);
872
873 fn next(&mut self) -> Option<Self::Item> {
874 if self.len == 0 {
875 return None;
876 }
877
878 self.len -= 1;
879 Some(unsafe {
880 let handle = self.handles.add(self.len).read();
881 let value = &mut *self.values.add(self.len);
882 (handle, value)
883 })
884 }
885
886 fn size_hint(&self) -> (usize, Option<usize>) {
887 (self.len, Some(self.len))
888 }
889}
890
891impl<'a, T: 'a, H> ExactSizeIterator for IterMut<'a, H, T> {}
892impl<'a, T: 'a, H> FusedIterator for IterMut<'a, H, T> {}
893
894impl<'a, T: 'a, S: Storage<PackedPoolLayout<T, H>>, H: Handle> IntoIterator for &'a mut PackedPool<T, S, H> {
895 type Item = (H, &'a mut T);
896 type IntoIter = IterMut<'a, H, T>;
897
898 fn into_iter(self) -> Self::IntoIter {
899 self.iter_mut()
900 }
901}
902
903pub struct Drain<'a, T, S: Storage<PackedPoolLayout<T, H>>, H: Handle> {
907 pool: &'a mut PackedPool<T, S, H>,
908}
909
910impl<'a, T, S: Storage<PackedPoolLayout<T, H>>, H: Handle> Iterator for Drain<'a, T, S, H> {
911 type Item = (H, T);
912
913 fn next(&mut self) -> Option<Self::Item> {
914 let len = self.pool.len();
915 if len == 0 {
916 return None;
917 }
918
919 let new_len = len - 1;
920 let handle = unsafe { self.pool.handles_ptr().add(new_len).read() };
921 let value = unsafe { self.pool.values_ptr().add(new_len).read() };
922
923 let (index, gen_count) = handle.into_raw_parts();
924 unsafe {
925 self.pool.counters_mut().add(index).write(gen_count.wrapping_add(1));
926 self.pool.next_free_slot_or_packed_index_array_mut().add(index).write(self.pool.next_free_slot);
927 }
928
929 self.pool.next_free_slot = H::Index::from_usize(index);
930 self.pool.len = H::Index::from_usize(new_len);
931 Some((handle, value))
932 }
933
934 fn size_hint(&self) -> (usize, Option<usize>) {
935 let size = self.pool.len();
936 (size, Some(size))
937 }
938}
939
940impl <'a, T, S: Storage<PackedPoolLayout<T, H>>, H: Handle> Drop for Drain<'a, T, S, H> {
941 fn drop(&mut self) {
942 self.pool.clear();
943 }
944}
945
946impl<'a, T, S: Storage<PackedPoolLayout<T, H>>, H: Handle> ExactSizeIterator for Drain<'a, T, S, H> {}
947impl<'a, T, S: Storage<PackedPoolLayout<T, H>>, H: Handle> FusedIterator for Drain<'a, T, S, H> {}
948
949pub struct DrainFilter<'a, T, S: Storage<PackedPoolLayout<T, H>>, H: Handle, F: FnMut(H, &mut T) -> bool> {
953 pool: &'a mut PackedPool<T, S, H>,
954 last_visited: usize,
955 filter_fn: F,
956}
957
958impl<'a, T, S: Storage<PackedPoolLayout<T, H>>, H: Handle, F: FnMut(H, &mut T) -> bool> Iterator for DrainFilter<'a, T, S, H, F> {
959 type Item = (H, T);
960
961 fn next(&mut self) -> Option<Self::Item> {
962 while self.last_visited > 0 {
963 self.last_visited -= 1;
964
965 let handle = unsafe { self.pool.handles_ptr().add(self.last_visited).read() };
966 let should_remove = unsafe {
967 let value_ref = &mut *self.pool.values_mut_ptr().add(self.last_visited);
968 (self.filter_fn)(handle, value_ref)
969 };
970
971 if !should_remove { continue; }
972 let new_len = self.pool.len() - 1;
973 let value = unsafe { self.pool.values_ptr().add(self.last_visited).read() };
974
975 let (index, gen_count) = handle.into_raw_parts();
976 unsafe {
977 self.pool.counters_mut().add(index).write(gen_count.wrapping_add(1));
978 self.pool.next_free_slot_or_packed_index_array_mut().add(index).write(self.pool.next_free_slot);
979
980 if index != new_len {
981 let value_src = self.pool.values_ptr().add(new_len);
982 let value_dst = self.pool.values_mut_ptr().add(self.last_visited);
983 core::ptr::copy(value_src, value_dst, 1);
984
985 let handle_src = self.pool.handles_ptr().add(new_len);
986 let handle_dst = self.pool.handles_mut_ptr().add(self.last_visited);
987 core::ptr::copy(handle_src, handle_dst, 1);
988
989 let (index, _) = handle_src.read().into_raw_parts();
990 self.pool.next_free_slot_or_packed_index_array_mut().add(index).write(H::Index::from_usize(self.last_visited));
991 }
992 }
993
994 self.pool.len = H::Index::from_usize(new_len);
995 return Some((handle, value));
996 }
997
998 None
999 }
1000}
1001
1002impl<'a, T, S: Storage<PackedPoolLayout<T, H>>, H: Handle, F: FnMut(H, &mut T) -> bool> Drop for DrainFilter<'a, T, S, H, F> {
1003 fn drop(&mut self) {
1004 self.for_each(drop);
1005 }
1006}
1007
1008impl<'a, T, S: Storage<PackedPoolLayout<T, H>>, H: Handle, F: FnMut(H, &mut T) -> bool> ExactSizeIterator for DrainFilter<'a, T, S, H, F> {}
1009impl<'a, T, S: Storage<PackedPoolLayout<T, H>>, H: Handle, F: FnMut(H, &mut T) -> bool> FusedIterator for DrainFilter<'a, T, S, H, F> {}
1010
1011#[cfg(feature = "alloc")]
1012#[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))]
1013impl<T, H: Handle> crate::collections::PackedAllocPool<T, H> {
1014 pub fn with_capacity(capacity: H::Index) -> Self {
1020 let cap = capacity.as_usize();
1021 if cap >= H::MAX_INDEX {
1022 buffer_too_large_for_handle_type::<H>();
1023 }
1024
1025 let storage = crate::storage::AllocStorage::with_capacity(cap);
1026 Self::from(storage)
1027 }
1028}
1029
1030#[cfg(feature = "alloc")]
1031#[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))]
1032impl<T: Clone, H: Handle> Clone for crate::collections::PackedAllocPool<T, H> {
1033 fn clone(&self) -> Self {
1034 let storage = crate::storage::AllocStorage::with_capacity(self.capacity());
1035 let mut result: Self = PackedPool {
1036 buf: storage,
1037 len: self.len,
1038 next_free_slot: self.next_free_slot,
1039 items: PhantomData,
1040 };
1041
1042 for i in 0..self.len() {
1043 unsafe {
1044 let src = &*self.values_ptr().add(i);
1045 let dst = result.values_mut_ptr().add(i);
1046 dst.write(src.clone());
1047 }
1048 }
1049
1050 let src_handles = self.handles_ptr();
1051 let dst_handles = result.handles_mut_ptr();
1052 unsafe { core::ptr::copy(src_handles, dst_handles, self.len()) };
1053
1054 let src_counters = self.counters();
1055 let dst_counters = result.counters_mut();
1056 unsafe { core::ptr::copy(src_counters, dst_counters, self.capacity()) };
1057
1058 let src_slots = self.next_free_slot_or_packed_index_array();
1059 let dst_slots = result.next_free_slot_or_packed_index_array_mut();
1060 unsafe { core::ptr::copy(src_slots, dst_slots, self.capacity()) };
1061
1062 result
1063 }
1064}
1065
1066#[repr(C)]
1068pub struct InlineStorage<T, H: Handle, const N: usize> {
1069 values: [MaybeUninit<T>; N],
1071 handles: [MaybeUninit<H>; N],
1072 counters: [MaybeUninit<u32>; N],
1074 next_free_slot_or_packed_index: [MaybeUninit<H::Index>; N],
1075}
1076
1077unsafe impl<T, H: Handle, const N: usize> Storage<PackedPoolLayout<T, H>> for InlineStorage<T, H, N> {
1078 fn get_ptr(&self) -> *const u8 {
1079 (self as *const Self).cast()
1080 }
1081 fn get_mut_ptr(&mut self) -> *mut u8 {
1082 (self as *mut Self).cast()
1083 }
1084 fn capacity(&self) -> usize {
1085 N
1086 }
1087}
1088
1089impl<T, H: Handle, const N: usize> PackedPool<T, InlineStorage<T, H, N>, H> {
1090 pub fn new() -> Self {
1092 if N >= H::MAX_INDEX {
1093 buffer_too_large_for_handle_type::<H>();
1094 }
1095
1096 Self::from(InlineStorage {
1097 values: unsafe { MaybeUninit::<[MaybeUninit<T>; N]>::uninit().assume_init() },
1098 handles: [MaybeUninit::uninit(); N],
1099 counters: [MaybeUninit::uninit(); N],
1100 next_free_slot_or_packed_index: [MaybeUninit::uninit(); N],
1101 })
1102 }
1103}
1104
1105impl<T, H: Handle, const N: usize> Default for PackedPool<T, InlineStorage<T, H, N>, H> {
1106 fn default() -> Self {
1107 Self::new()
1108 }
1109}
1110
1111impl<T: Clone, H: Handle, const N: usize> Clone for PackedPool<T, InlineStorage<T, H, N>, H> {
1112 fn clone(&self) -> Self {
1113 let mut result: Self = PackedPool {
1114 buf: InlineStorage {
1115 values: unsafe { MaybeUninit::<[MaybeUninit<T>; N]>::uninit().assume_init() },
1116 handles: [MaybeUninit::uninit(); N],
1117 counters: [MaybeUninit::uninit(); N],
1118 next_free_slot_or_packed_index: [MaybeUninit::uninit(); N],
1119 },
1120 len: self.len,
1121 next_free_slot: self.next_free_slot,
1122 items: PhantomData,
1123 };
1124
1125 for i in 0..self.len() {
1126 unsafe {
1127 let src = self.values_ptr().add(i).as_ref().unwrap();
1128 let dst = result.values_mut_ptr().add(i);
1129 dst.write(src.clone());
1130 }
1131 }
1132
1133 let src_handles = self.handles_ptr();
1134 let dst_handles = result.handles_mut_ptr();
1135 unsafe { core::ptr::copy(src_handles, dst_handles, self.len()) };
1136
1137 let src_counters = self.counters();
1138 let dst_counters = result.counters_mut();
1139 unsafe { core::ptr::copy(src_counters, dst_counters, self.capacity()) };
1140
1141 let src_slots = self.next_free_slot_or_packed_index_array();
1142 let dst_slots = result.next_free_slot_or_packed_index_array_mut();
1143 unsafe { core::ptr::copy(src_slots, dst_slots, self.capacity()) };
1144
1145 result
1146 }
1147}
1148
1149#[cfg(test)]
1150mod tests {
1151 use super::*;
1152 use crate::collections::pool::{DefaultHandle, Handle};
1153 use crate::storage::LayoutSpec;
1154 use crate::{fmt, arena::Arena};
1155
1156 #[test]
1157 fn debug_impl() {
1158 let mut storage = [MaybeUninit::uninit(); 2048];
1159 let mut arena = Arena::from(&mut storage[..]);
1160 let mut pool: crate::collections::PackedArenaPool<&'static str, DefaultHandle> = arena.with_capacity(4);
1161
1162 let empty = fmt!(arena, "{:?}", pool).unwrap();
1163 assert_eq!(
1164 &*empty,
1165 "PackedPool { len: 0, next_free_slot: 0, slots: [\
1166 Vacant { generation: 0, next_free_slot: 1 }, \
1167 Vacant { generation: 0, next_free_slot: 2 }, \
1168 Vacant { generation: 0, next_free_slot: 3 }, \
1169 Vacant { generation: 0, next_free_slot: 4294967295 }\
1170 ] }"
1171 );
1172
1173 let h1 = pool.insert("First");
1174 pool.insert("Second");
1175 let h3 = pool.insert("Third");
1176 pool.insert("Fourth");
1177
1178 let full = fmt!(&mut arena, "{:?}", pool).unwrap();
1179 assert_eq!(
1180 &*full,
1181 "PackedPool { len: 4, next_free_slot: 4294967295, slots: [\
1182 Occupied { generation: 1, value: \"First\" }, \
1183 Occupied { generation: 1, value: \"Second\" }, \
1184 Occupied { generation: 1, value: \"Third\" }, \
1185 Occupied { generation: 1, value: \"Fourth\" }\
1186 ] }"
1187 );
1188
1189 pool.remove(h1);
1190 pool.remove(h3);
1191
1192 let partial = fmt!(&mut arena, "{:?}", pool).unwrap();
1193 assert_eq!(
1194 &*partial,
1195 "PackedPool { len: 2, next_free_slot: 2, slots: [\
1196 Vacant { generation: 2, next_free_slot: 4294967295 }, \
1197 Occupied { generation: 1, value: \"Second\" }, \
1198 Vacant { generation: 2, next_free_slot: 0 }, \
1199 Occupied { generation: 1, value: \"Fourth\" }\
1200 ] }"
1201 );
1202 }
1203
1204 #[test]
1205 fn inline_storage_layout() {
1206 fn test_layout<T, H: Handle, const N: usize>() {
1207 use core::alloc::Layout;
1208 let inline_layout = Layout::new::<InlineStorage<T, H, N>>();
1209 let dynamic_layout = PackedPoolLayout::<T, H>::layout_with_capacity(N).unwrap();
1210
1211 assert_eq!(inline_layout, dynamic_layout);
1212 }
1213
1214 test_layout::<[u8; 3], DefaultHandle, 10>();
1215 test_layout::<[u8; 25], DefaultHandle, 20>();
1216 test_layout::<u128, DefaultHandle, 40>();
1217 test_layout::<crate::collections::ArenaDeque<u8>, DefaultHandle, 80>();
1218 }
1219}