1use std::{
4 alloc::Layout,
5 cell::UnsafeCell,
6 iter::Zip,
7 mem::{align_of, size_of},
8 ops::{Deref, DerefMut},
9 ptr::NonNull,
10 rc::Rc,
11 slice::Iter,
12};
13
14use super::{
15 archetype::ArchetypeIndex, component::Component, next_component_version, ComponentIndex,
16 ComponentMeta, ComponentSlice, ComponentSliceMut, ComponentStorage, Epoch,
17 UnknownComponentStorage,
18};
19
20#[derive(Debug)]
22struct RawAlloc<T> {
23 ptr: NonNull<T>,
24 cap: usize,
25}
26
27impl<T> RawAlloc<T> {
28 fn new(min_capacity: usize) -> Self {
29 if size_of::<T>() == 0 {
30 Self {
31 ptr: NonNull::dangling(),
32 cap: !0,
33 }
34 } else if min_capacity == 0 {
35 Self {
36 ptr: NonNull::dangling(),
37 cap: 0,
38 }
39 } else {
40 let layout =
41 Layout::from_size_align(size_of::<T>() * min_capacity, align_of::<T>()).unwrap();
42 Self {
43 ptr: NonNull::new(unsafe { std::alloc::alloc(layout) as *mut _ }).unwrap(),
44 cap: min_capacity,
45 }
46 }
47 }
48
49 fn layout(&self) -> Layout {
50 Layout::from_size_align(size_of::<T>() * self.cap, align_of::<T>()).unwrap()
51 }
52
53 fn grow(&mut self, new_capacity: usize) {
54 debug_assert!(self.cap < new_capacity);
55
56 unsafe {
57 let dst_ptr = if self.cap == 0 {
58 let layout =
59 Layout::from_size_align(size_of::<T>() * new_capacity, align_of::<T>())
60 .unwrap();
61 std::alloc::alloc(layout) as *mut T
62 } else {
63 std::alloc::realloc(
64 self.ptr.as_ptr() as *mut u8,
65 self.layout(),
66 size_of::<T>() * new_capacity,
67 ) as *mut T
68 };
69 if let Some(new_ptr) = NonNull::new(dst_ptr) {
70 self.ptr = new_ptr;
71 self.cap = new_capacity;
72 } else {
73 std::alloc::handle_alloc_error(Layout::from_size_align_unchecked(
74 size_of::<T>() * new_capacity,
75 align_of::<T>(),
76 ));
77 }
78 }
79 }
80}
81
82impl<T> Drop for RawAlloc<T> {
83 fn drop(&mut self) {
84 if size_of::<T>() != 0 && self.cap > 0 {
85 unsafe {
86 let layout =
87 Layout::from_size_align_unchecked(size_of::<T>() * self.cap, align_of::<T>());
88 std::alloc::dealloc(self.ptr.as_ptr() as *mut _, layout);
89 }
90 }
91 }
92}
93
94#[derive(Debug)]
95#[allow(dead_code)] enum ComponentVec<T> {
97 Packed {
98 raw: Rc<RawAlloc<T>>,
99 offset: usize,
100 len: usize,
101 cap: usize,
102 },
103 Loose {
104 raw: RawAlloc<T>,
105 len: usize,
106 last_written: Epoch,
107 },
108}
109
110impl<T> ComponentVec<T> {
111 fn new() -> Self {
112 Self::Loose {
113 raw: RawAlloc::new(0),
114 len: 0,
115 last_written: 0,
116 }
117 }
118
119 fn should_pack(&self, epoch_threshold: Epoch) -> bool {
120 match self {
121 Self::Loose { last_written, .. } => *last_written <= epoch_threshold,
122 _ => true,
123 }
124 }
125
126 fn as_raw_slice(&self) -> (NonNull<T>, usize) {
127 match self {
128 Self::Packed {
129 raw, offset, len, ..
130 } => {
131 (
132 unsafe { NonNull::new_unchecked(raw.ptr.as_ptr().add(*offset)) },
133 *len,
134 )
135 }
136 Self::Loose { raw, len, .. } => {
137 (unsafe { NonNull::new_unchecked(raw.ptr.as_ptr()) }, *len)
138 }
139 }
140 }
141
142 fn estimate_fragmentation(&self) -> f32 {
143 match self {
144 Self::Loose { .. } => 1f32,
145 Self::Packed { len, cap, .. } => {
146 let empty = cap - len;
147 f32::min(1f32, empty as f32 * size_of::<T>() as f32 / 16f32)
148 }
149 }
150 }
151
152 unsafe fn extend_memcopy(&mut self, epoch: Epoch, ptr: *const T, count: usize) {
153 self.ensure_capacity(epoch, count);
154 let (dst, len) = self.as_raw_slice();
155 std::ptr::copy_nonoverlapping(ptr, dst.as_ptr().add(len), count);
156 match self {
157 Self::Packed { len, .. } => *len += count,
158 Self::Loose {
159 len, last_written, ..
160 } => {
161 *len += count;
162 *last_written = epoch;
163 }
164 }
165 }
166
167 fn ensure_capacity(&mut self, epoch: Epoch, space: usize) {
168 let (cap, len) = match self {
169 Self::Packed { cap, len, .. } => (*cap, *len),
170 Self::Loose { raw, len, .. } => (raw.cap, *len),
171 };
172
173 if cap - len < space {
174 self.grow(epoch, len + space);
175 }
176 }
177
178 fn swap_remove(&mut self, epoch: Epoch, index: usize) -> T {
179 let (ptr, len) = self.as_raw_slice();
180 assert!(len > index);
181
182 unsafe {
183 let item_ptr = ptr.as_ptr().add(index);
184 let last_ptr = ptr.as_ptr().add(len - 1);
185 if index < len - 1 {
186 std::ptr::swap(item_ptr, last_ptr);
187 }
188 let value = std::ptr::read(last_ptr);
189 match self {
190 Self::Packed { len, .. } => *len -= 1,
191 Self::Loose {
192 len, last_written, ..
193 } => {
194 *len -= 1;
195 *last_written = epoch;
196 }
197 }
198 value
199 }
200 }
201
202 fn grow(&mut self, epoch: Epoch, new_capacity: usize) {
203 debug_assert_ne!(std::mem::size_of::<T>(), 0);
204
205 match self {
206 Self::Packed {
207 raw,
208 offset,
209 len,
210 cap,
211 } => {
212 debug_assert!(*cap < new_capacity);
214 let new_alloc = RawAlloc::new(*len);
215 unsafe {
216 std::ptr::copy_nonoverlapping(
217 raw.ptr.as_ptr().add(*offset),
218 new_alloc.ptr.as_ptr(),
219 *len,
220 )
221 };
222 *self = Self::Loose {
223 raw: new_alloc,
224 len: *len,
225 last_written: epoch,
226 };
227 }
228 Self::Loose {
229 raw, last_written, ..
230 } => {
231 raw.grow(new_capacity);
233 *last_written = epoch;
234 }
235 };
236 }
237
238 unsafe fn pack(&mut self, dst: Rc<RawAlloc<T>>, offset: usize) {
239 let (ptr, len) = self.as_raw_slice();
240 debug_assert_ne!(std::mem::size_of::<T>(), 0);
241 debug_assert!(dst.cap >= offset + len);
242 std::ptr::copy_nonoverlapping(ptr.as_ptr(), dst.ptr.as_ptr().add(offset), len);
243 *self = Self::Packed {
244 raw: dst,
245 offset,
246 len,
247 cap: len,
248 }
249 }
250}
251
252impl<T> Deref for ComponentVec<T> {
253 type Target = [T];
254 fn deref(&self) -> &Self::Target {
255 let (ptr, len) = self.as_raw_slice();
256 unsafe { std::slice::from_raw_parts(ptr.as_ptr(), len) }
257 }
258}
259
260impl<T> DerefMut for ComponentVec<T> {
261 fn deref_mut(&mut self) -> &mut Self::Target {
262 let (ptr, len) = self.as_raw_slice();
263 unsafe { std::slice::from_raw_parts_mut(ptr.as_ptr(), len) }
264 }
265}
266
267impl<T> Drop for ComponentVec<T> {
268 fn drop(&mut self) {
269 if std::mem::needs_drop::<T>() {
270 unsafe {
271 let (ptr, len) = self.as_raw_slice();
272 for i in 0..len {
273 std::ptr::drop_in_place(ptr.as_ptr().add(i));
274 }
275 }
276 }
277 }
278}
279
280#[derive(Debug)]
284pub struct PackedStorage<T: Component> {
285 index: Vec<usize>,
287 slices: Vec<(NonNull<T>, usize)>,
289 entity_len: usize,
291 epoch: Epoch,
293 versions: Vec<UnsafeCell<u64>>,
295 allocations: Vec<ComponentVec<T>>,
297}
298
299unsafe impl<T: Component> Send for PackedStorage<T> {}
302unsafe impl<T: Component> Sync for PackedStorage<T> {}
303
304impl<T: Component> PackedStorage<T> {
305 fn swap_remove_internal(
306 &mut self,
307 ArchetypeIndex(archetype): ArchetypeIndex,
308 ComponentIndex(index): ComponentIndex,
309 ) -> T {
310 let slice_index = self.index[archetype as usize];
311 let allocation = &mut self.allocations[slice_index];
312 let component = allocation.swap_remove(self.epoch, index as usize);
313 self.update_slice(slice_index);
314 self.entity_len -= 1;
315 component
316 }
317
318 #[inline]
319 fn update_slice(&mut self, slice_index: usize) {
320 self.slices[slice_index] = self.allocations[slice_index].as_raw_slice();
321 }
322
323 fn index(&self, ArchetypeIndex(archetype): ArchetypeIndex) -> usize {
324 self.index[archetype as usize]
325 }
326}
327
328impl<T: Component> UnknownComponentStorage for PackedStorage<T> {
329 fn move_component(
330 &mut self,
331 source: ArchetypeIndex,
332 index: ComponentIndex,
333 dst: ArchetypeIndex,
334 ) {
335 let src_slice_index = self.index(source);
337 let dst_slice_index = self.index(dst);
338
339 let src_allocation = &mut self.allocations[src_slice_index];
341 let value = src_allocation.swap_remove(self.epoch, index.0 as usize);
342
343 let dst_allocation = &mut self.allocations[dst_slice_index];
345 unsafe {
346 dst_allocation.extend_memcopy(self.epoch, &value as *const _, 1);
347 *self.versions[dst_slice_index].get() = next_component_version();
348 }
349
350 self.update_slice(src_slice_index);
352 self.update_slice(dst_slice_index);
353
354 std::mem::forget(value);
356 }
357
358 fn insert_archetype(&mut self, archetype: ArchetypeIndex, index: Option<usize>) {
359 let index = index.unwrap_or_else(|| self.slices.len());
360 let arch_index = archetype.0 as usize;
361
362 let allocation = ComponentVec::<T>::new();
364
365 self.slices.insert(index, allocation.as_raw_slice());
367 self.versions.insert(index, UnsafeCell::new(0));
368 self.allocations.insert(index, allocation);
369
370 for i in self.index.iter_mut().filter(|i| **i != !0 && **i >= index) {
372 *i += 1;
373 }
374 if arch_index >= self.index.len() {
375 self.index.resize(arch_index + 1, !0);
376 }
377 self.index[arch_index] = index;
378 }
379
380 fn transfer_archetype(
381 &mut self,
382 src_archetype: ArchetypeIndex,
383 dst_archetype: ArchetypeIndex,
384 dst: &mut dyn UnknownComponentStorage,
385 ) {
386 let dst = dst.downcast_mut::<Self>().unwrap();
387 let src_index = self.index(src_archetype);
388 let dst_index = dst.index(dst_archetype);
389
390 let count = self.allocations[src_index].len();
392 self.entity_len -= count;
393 dst.entity_len += count;
394
395 if dst.allocations[dst_index].len() == 0 {
396 std::mem::swap(
399 &mut self.allocations[src_index],
400 &mut dst.allocations[dst_index],
401 );
402
403 unsafe { *dst.versions[dst_index].get() = next_component_version() };
405 } else {
406 let (ptr, len) = self.get_raw(src_archetype).unwrap();
408 unsafe { dst.extend_memcopy_raw(dst_archetype, ptr, len) };
409
410 let mut swapped = ComponentVec::<T>::new();
412 std::mem::swap(&mut self.allocations[src_index], &mut swapped);
413 std::mem::forget(swapped);
414 }
415
416 self.update_slice(src_index);
418 dst.update_slice(dst_index);
419 }
420
421 fn transfer_component(
423 &mut self,
424 src_archetype: ArchetypeIndex,
425 src_component: ComponentIndex,
426 dst_archetype: ArchetypeIndex,
427 dst: &mut dyn UnknownComponentStorage,
428 ) {
429 let component = self.swap_remove_internal(src_archetype, src_component);
430 unsafe { dst.extend_memcopy_raw(dst_archetype, &component as *const T as *const u8, 1) };
431 std::mem::forget(component);
432 }
433
434 fn swap_remove(&mut self, archetype: ArchetypeIndex, index: ComponentIndex) {
435 self.swap_remove_internal(archetype, index);
436 }
437
438 fn pack(&mut self, age_threshold: Epoch) -> usize {
439 if size_of::<T>() == 0 {
440 return 0;
441 }
442
443 let epoch_threshold = self.epoch - age_threshold;
444
445 let len = self
446 .slices
447 .iter()
448 .zip(self.allocations.iter())
449 .filter(|(_, allocation)| allocation.should_pack(epoch_threshold))
450 .map(|((_, len), _)| *len)
451 .sum();
452
453 unsafe {
454 let packed = Rc::new(RawAlloc::new(len));
455
456 let mut cursor = 0;
457 for (alloc, slice) in self
458 .allocations
459 .iter_mut()
460 .zip(self.slices.iter_mut())
461 .filter(|(allocation, _)| allocation.should_pack(epoch_threshold))
462 {
463 alloc.pack(packed.clone(), cursor);
464 *slice = alloc.as_raw_slice();
465 cursor += slice.1;
466 }
467
468 cursor
469 }
470 }
471
472 fn fragmentation(&self) -> f32 {
473 self.allocations
474 .iter()
475 .fold(0f32, |x, y| x + y.estimate_fragmentation())
476 / self.entity_len as f32
477 }
478
479 fn element_vtable(&self) -> ComponentMeta {
480 ComponentMeta::of::<T>()
481 }
482
483 fn get_raw(&self, ArchetypeIndex(archetype): ArchetypeIndex) -> Option<(*const u8, usize)> {
484 let slice_index = *self.index.get(archetype as usize)?;
485 let (ptr, len) = self.slices.get(slice_index)?;
486 Some((ptr.as_ptr() as *const u8, *len))
487 }
488
489 unsafe fn get_mut_raw(
490 &self,
491 ArchetypeIndex(archetype): ArchetypeIndex,
492 ) -> Option<(*mut u8, usize)> {
493 let slice_index = *self.index.get(archetype as usize)?;
494 let (ptr, len) = self.slices.get(slice_index)?;
495 *self.versions.get_unchecked(slice_index).get() = next_component_version();
496 Some((ptr.as_ptr() as *mut u8, *len))
497 }
498
499 unsafe fn extend_memcopy_raw(
500 &mut self,
501 ArchetypeIndex(archetype): ArchetypeIndex,
502 ptr: *const u8,
503 count: usize,
504 ) {
505 let slice_index = self.index[archetype as usize];
506 let allocation = &mut self.allocations[slice_index];
507 allocation.extend_memcopy(self.epoch, ptr as *const T, count);
508 self.slices[slice_index] = allocation.as_raw_slice();
509 self.entity_len += count;
510 *self.versions[slice_index].get() = next_component_version();
511 }
512
513 fn increment_epoch(&mut self) {
514 self.epoch += 1;
515 }
516
517 fn ensure_capacity(&mut self, ArchetypeIndex(archetype): ArchetypeIndex, capacity: usize) {
518 let slice_index = self.index[archetype as usize];
519 let allocation = &mut self.allocations[slice_index];
520 allocation.ensure_capacity(self.epoch, capacity);
521 }
522}
523
524impl<T: Component> Default for PackedStorage<T> {
525 fn default() -> Self {
526 Self {
527 index: Vec::new(),
528 slices: Vec::new(),
529 versions: Vec::new(),
530 allocations: Vec::new(),
531 entity_len: 0,
532 epoch: 0,
533 }
534 }
535}
536
537impl<'a, T: Component> ComponentStorage<'a, T> for PackedStorage<T> {
538 type Iter = ComponentIter<'a, T>;
539 type IterMut = ComponentIterMut<'a, T>;
540
541 unsafe fn extend_memcopy(&mut self, archetype: ArchetypeIndex, ptr: *const T, count: usize) {
542 self.extend_memcopy_raw(archetype, ptr as *const u8, count);
543 }
544
545 fn get(&'a self, ArchetypeIndex(archetype): ArchetypeIndex) -> Option<ComponentSlice<'a, T>> {
546 let slice_index = *self.index.get(archetype as usize)?;
547 let (ptr, len) = self.slices.get(slice_index)?;
548 let slice = unsafe { std::slice::from_raw_parts(ptr.as_ptr(), *len as usize) };
549 let version = unsafe { &*self.versions.get_unchecked(slice_index).get() };
550 Some(ComponentSlice::new(slice, version))
551 }
552
553 unsafe fn get_mut(
554 &'a self,
555 ArchetypeIndex(archetype): ArchetypeIndex,
556 ) -> Option<ComponentSliceMut<'a, T>> {
557 let slice_index = *self.index.get(archetype as usize)?;
558 let (ptr, len) = self.slices.get(slice_index)?;
559 let slice = std::slice::from_raw_parts_mut(ptr.as_ptr(), *len as usize);
560 let version = &mut *self.versions.get_unchecked(slice_index).get();
561 Some(ComponentSliceMut::new(slice, version))
562 }
563
564 fn iter(&'a self, start_inclusive: usize, end_exclusive: usize) -> Self::Iter {
565 ComponentIter {
566 slices: self.slices[start_inclusive..end_exclusive]
567 .iter()
568 .zip(self.versions[start_inclusive..end_exclusive].iter()),
569 }
570 }
571
572 unsafe fn iter_mut(&'a self, start_inclusive: usize, end_exclusive: usize) -> Self::IterMut {
573 ComponentIterMut {
574 slices: self.slices[start_inclusive..end_exclusive]
575 .iter()
576 .zip(self.versions[start_inclusive..end_exclusive].iter()),
577 }
578 }
579
580 fn len(&self) -> usize {
581 self.allocations.len()
582 }
583}
584
585#[doc(hidden)]
586pub struct ComponentIter<'a, T> {
587 slices: Zip<Iter<'a, (NonNull<T>, usize)>, Iter<'a, UnsafeCell<u64>>>,
588}
589
590impl<'a, T: Component> Iterator for ComponentIter<'a, T> {
591 type Item = ComponentSlice<'a, T>;
592
593 #[inline]
594 fn next(&mut self) -> Option<Self::Item> {
595 self.slices.next().map(|((ptr, len), version)| {
596 let slice = unsafe { std::slice::from_raw_parts(ptr.as_ptr(), *len as usize) };
597 let version = unsafe { &*version.get() };
598 ComponentSlice::new(slice, version)
599 })
600 }
601}
602
603#[doc(hidden)]
604pub struct ComponentIterMut<'a, T> {
605 slices: Zip<Iter<'a, (NonNull<T>, usize)>, Iter<'a, UnsafeCell<u64>>>,
606}
607
608impl<'a, T: Component> Iterator for ComponentIterMut<'a, T> {
609 type Item = ComponentSliceMut<'a, T>;
610
611 #[inline]
612 fn next(&mut self) -> Option<Self::Item> {
613 self.slices.next().map(|((ptr, len), version)| {
614 let slice = unsafe { std::slice::from_raw_parts_mut(ptr.as_ptr(), *len as usize) };
616 let version = unsafe { &mut *version.get() };
617 ComponentSliceMut::new(slice, version)
618 })
619 }
620}
621
622#[cfg(test)]
623mod test {
624 use super::*;
625
626 #[test]
627 fn create_storage() {
628 let _ = PackedStorage::<usize>::default();
629 }
630
631 #[test]
632 fn create_zst_storage() {
633 let _ = PackedStorage::<()>::default();
634 }
635
636 #[test]
637 fn insert_archetype() {
638 let mut storage = PackedStorage::<usize>::default();
639 storage.insert_archetype(ArchetypeIndex(0), Some(0));
640 }
641
642 #[test]
643 fn insert_zst_archetype() {
644 let mut storage = PackedStorage::<()>::default();
645 storage.insert_archetype(ArchetypeIndex(0), Some(0));
646 }
647
648 #[test]
649 fn insert_components() {
650 let mut storage = PackedStorage::<usize>::default();
651 storage.insert_archetype(ArchetypeIndex(0), Some(0));
652
653 unsafe {
654 let components = vec![1, 2, 3];
655 let ptr = components.as_ptr();
656 storage.extend_memcopy(ArchetypeIndex(0), ptr, 3);
657 std::mem::forget(components);
658
659 let slice = storage.get_mut(ArchetypeIndex(0)).unwrap();
660 assert_eq!(slice.into_slice(), &[1usize, 2usize, 3usize]);
661 }
662 }
663
664 #[test]
665 fn insert_zst_components() {
666 let mut storage = PackedStorage::<()>::default();
667 storage.insert_archetype(ArchetypeIndex(0), Some(0));
668
669 unsafe {
670 let components = vec![(), (), ()];
671 let ptr = components.as_ptr();
672 storage.extend_memcopy(ArchetypeIndex(0), ptr, 3);
673 std::mem::forget(components);
674
675 let slice = storage.get_mut(ArchetypeIndex(0)).unwrap();
676 assert_eq!(slice.into_slice(), &[(), (), ()]);
677 }
678 }
679
680 #[test]
681 fn swap_remove_first() {
682 let mut storage = PackedStorage::<usize>::default();
683 storage.insert_archetype(ArchetypeIndex(0), Some(0));
684
685 unsafe {
686 let components = vec![1, 2, 3];
687 let ptr = components.as_ptr();
688 storage.extend_memcopy(ArchetypeIndex(0), ptr, 3);
689 std::mem::forget(components);
690 }
691
692 storage.swap_remove(ArchetypeIndex(0), ComponentIndex(0));
693
694 unsafe {
695 let slice = storage.get_mut(ArchetypeIndex(0)).unwrap();
696 assert_eq!(slice.into_slice(), &[3usize, 2usize]);
697 }
698 }
699
700 #[test]
701 fn swap_remove_last() {
702 let mut storage = PackedStorage::<usize>::default();
703 storage.insert_archetype(ArchetypeIndex(0), Some(0));
704
705 unsafe {
706 let components = vec![1, 2, 3];
707 let ptr = components.as_ptr();
708 storage.extend_memcopy(ArchetypeIndex(0), ptr, 3);
709 std::mem::forget(components);
710 }
711
712 storage.swap_remove(ArchetypeIndex(0), ComponentIndex(2));
713
714 unsafe {
715 let slice = storage.get_mut(ArchetypeIndex(0)).unwrap();
716 assert_eq!(slice.into_slice(), &[1usize, 2usize]);
717 }
718 }
719}