1use alloc::collections::btree_map::Entry as BTreeEntry;
4use alloc::collections::{BTreeMap, BTreeSet};
5#[cfg(not(feature = "std"))]
6use alloc::{boxed::Box, vec, vec::Vec};
7use core::cmp::Ordering;
8use core::ops::Index;
9use core::panic::{RefUnwindSafe, UnwindSafe};
10use core::ptr::NonNull;
11use core::{fmt, mem, ptr, slice};
12
13use ahash::RandomState;
14use slab::Slab;
15
16use crate::aliased_box::AliasedBox;
17use crate::assume_unchecked;
18use crate::blob_vec::BlobVec;
19use crate::component::{ComponentIdx, ComponentInfo, Components};
20use crate::entity::{Entities, EntityId, EntityLocation};
21use crate::event::{EventId, EventPtr, TargetedEventIdx};
22use crate::handler::{
23 HandlerConfig, HandlerInfo, HandlerInfoPtr, HandlerList, HandlerParam, Handlers, InitError,
24};
25use crate::map::{Entry, HashMap};
26use crate::prelude::World;
27use crate::sparse::SparseIndex;
28use crate::sparse_map::SparseMap;
29use crate::world::UnsafeWorldCell;
30
31#[derive(Debug)]
46pub struct Archetypes {
47 archetypes: Slab<Archetype>,
48 by_components: HashMap<AliasedBox<[ComponentIdx]>, ArchetypeIdx>,
49}
50
51impl Archetypes {
52 pub(crate) fn new() -> Self {
53 let mut map = HashMap::with_hasher(RandomState::new());
54 map.insert(vec![].into_boxed_slice().into(), ArchetypeIdx::EMPTY);
55
56 Self {
57 archetypes: Slab::from_iter([(0, Archetype::empty())]),
58 by_components: map,
59 }
60 }
61
62 pub fn empty(&self) -> &Archetype {
67 unsafe { self.archetypes.get(0).unwrap_unchecked() }
69 }
70
71 pub(crate) fn empty_mut(&mut self) -> &mut Archetype {
73 unsafe { self.archetypes.get_mut(0).unwrap_unchecked() }
75 }
76
77 pub fn get(&self, idx: ArchetypeIdx) -> Option<&Archetype> {
80 self.archetypes.get(idx.0 as usize)
81 }
82
83 pub fn get_by_components(&self, components: &[ComponentIdx]) -> Option<&Archetype> {
89 let idx = *self.by_components.get(components)?;
90 Some(unsafe { self.get(idx).unwrap_unchecked() })
91 }
92
93 pub(crate) fn spawn(&mut self, id: EntityId) -> EntityLocation {
96 let empty = self.empty_mut();
97
98 let rellocated = empty.push_would_reallocate();
99
100 let row = ArchetypeRow(empty.entity_count());
101 empty.entity_ids.push(id);
102
103 if empty.entity_count() == 1 || rellocated {
104 for mut ptr in empty.refresh_listeners.iter().copied() {
105 unsafe { ptr.as_info_mut().handler_mut().refresh_archetype(empty) };
106 }
107 }
108
109 EntityLocation {
110 archetype: ArchetypeIdx::EMPTY,
111 row,
112 }
113 }
114
115 pub fn iter(&self) -> impl Iterator<Item = &Archetype> {
117 self.archetypes.iter().map(|(_, v)| v)
118 }
119
120 pub fn len(&self) -> usize {
122 self.archetypes.len()
123 }
124
125 pub(crate) fn register_handler(&mut self, info: &mut HandlerInfo) {
126 for (_, arch) in &mut self.archetypes {
128 arch.register_handler(info);
129 }
130 }
131
132 pub(crate) fn remove_handler(&mut self, info: &HandlerInfo) {
133 for (_, arch) in &mut self.archetypes {
135 arch.refresh_listeners.remove(&info.ptr());
136
137 if let EventId::Targeted(id) = info.received_event() {
138 if let Some(list) = arch.event_listeners.get_mut(id.index()) {
139 list.remove(info.ptr());
140 }
141 }
142 }
143 }
144
145 pub(crate) fn remove_component<F>(
146 &mut self,
147 info: &mut ComponentInfo,
148 components: &mut Components,
149 mut f: F,
150 ) where
151 F: FnMut(EntityId),
152 {
153 let removed_component_id = info.id();
154
155 for arch_idx in info.member_of.drain(..) {
156 let mut arch = self.archetypes.remove(arch_idx.0 as usize);
157
158 for mut ptr in arch.refresh_listeners.iter().copied() {
159 unsafe { ptr.as_info_mut().handler_mut().remove_archetype(&arch) };
160 }
161
162 for &comp_idx in arch.component_indices() {
163 if comp_idx != removed_component_id.index() {
164 let info = unsafe { components.get_by_index_mut(comp_idx).unwrap_unchecked() };
165 info.member_of.swap_remove(&arch_idx);
166 }
167 }
168
169 self.by_components.remove_entry(arch.component_indices());
171
172 for &entity_id in arch.entity_ids() {
173 f(entity_id);
174 }
175
176 for (comp_idx, arch_idx) in mem::take(&mut arch.insert_components) {
179 let other_arch = unsafe {
180 self.archetypes
181 .get_mut(arch_idx.0 as usize)
182 .unwrap_unchecked()
183 };
184
185 other_arch.remove_components.remove(&comp_idx);
186 }
187
188 for (comp_idx, arch_idx) in mem::take(&mut arch.remove_components) {
189 let other_arch = unsafe {
190 self.archetypes
191 .get_mut(arch_idx.0 as usize)
192 .unwrap_unchecked()
193 };
194
195 other_arch.insert_components.remove(&comp_idx);
196 }
197 }
198 }
199
200 pub(crate) unsafe fn traverse_insert(
206 &mut self,
207 src_arch_idx: ArchetypeIdx,
208 component_idx: ComponentIdx,
209 components: &mut Components,
210 handlers: &mut Handlers,
211 ) -> ArchetypeIdx {
212 debug_assert!(components.get_by_index(component_idx).is_some());
213
214 let next_arch_idx = self.archetypes.vacant_key();
215
216 let src_arch = unsafe {
217 self.archetypes
218 .get_mut(src_arch_idx.0 as usize)
219 .unwrap_unchecked()
220 };
221
222 match src_arch.insert_components.entry(component_idx) {
223 BTreeEntry::Vacant(vacant_insert_components) => {
224 let Err(idx) = src_arch
225 .component_indices
226 .as_ref()
227 .binary_search(&component_idx)
228 else {
229 return src_arch_idx;
231 };
232
233 let mut new_components = Vec::with_capacity(src_arch.component_indices.len() + 1);
234 new_components.extend(src_arch.component_indices.as_ref().iter().copied());
235 new_components.insert(idx, component_idx);
236
237 match self
238 .by_components
239 .entry(new_components.into_boxed_slice().into())
240 {
241 Entry::Vacant(vacant_by_components) => {
242 assert!(next_arch_idx < u32::MAX as usize, "too many archetypes");
243
244 let arch_id = ArchetypeIdx(next_arch_idx as u32);
245
246 let mut new_arch = Archetype::new(
247 arch_id,
248 vacant_by_components.key().as_ref().into(),
249 components,
250 );
251
252 new_arch
253 .remove_components
254 .insert(component_idx, src_arch_idx);
255
256 for info in handlers.iter_mut() {
257 new_arch.register_handler(info);
258 }
259
260 vacant_by_components.insert(arch_id);
261
262 vacant_insert_components.insert(arch_id);
263
264 self.archetypes.insert(new_arch);
265
266 arch_id
267 }
268 Entry::Occupied(o) => *vacant_insert_components.insert(*o.get()),
269 }
270 }
271 BTreeEntry::Occupied(o) => *o.get(),
272 }
273 }
274
275 pub(crate) unsafe fn traverse_remove(
281 &mut self,
282 src_arch_idx: ArchetypeIdx,
283 component_idx: ComponentIdx,
284 components: &mut Components,
285 handlers: &mut Handlers,
286 ) -> ArchetypeIdx {
287 let next_arch_idx = self.archetypes.vacant_key();
288
289 let src_arch = unsafe {
290 self.archetypes
291 .get_mut(src_arch_idx.0 as usize)
292 .unwrap_unchecked()
293 };
294
295 match src_arch.remove_components.entry(component_idx) {
296 BTreeEntry::Vacant(vacant_remove_components) => {
297 if src_arch
298 .component_indices
299 .as_ref()
300 .binary_search(&component_idx)
301 .is_err()
302 {
303 return src_arch_idx;
305 }
306
307 let new_components: Box<[ComponentIdx]> = src_arch
308 .component_indices
309 .as_ref()
310 .iter()
311 .copied()
312 .filter(|&c| c != component_idx)
313 .collect();
314
315 match self.by_components.entry(new_components.into()) {
316 Entry::Vacant(vacant_by_components) => {
317 assert!(next_arch_idx < u32::MAX as usize, "too many archetypes");
318
319 let arch_id = ArchetypeIdx(next_arch_idx as u32);
320
321 let mut new_arch = Archetype::new(
322 arch_id,
323 vacant_by_components.key().as_ref().into(),
324 components,
325 );
326
327 new_arch
328 .insert_components
329 .insert(component_idx, src_arch_idx);
330
331 for info in handlers.iter_mut() {
332 new_arch.register_handler(info);
333 }
334
335 vacant_by_components.insert(arch_id);
336
337 vacant_remove_components.insert(arch_id);
338
339 self.archetypes.insert(new_arch);
340
341 arch_id
342 }
343 Entry::Occupied(o) => *vacant_remove_components.insert(*o.get()),
344 }
345 }
346 BTreeEntry::Occupied(o) => *o.get(),
347 }
348 }
349
350 pub(crate) unsafe fn move_entity(
353 &mut self,
354 src: EntityLocation,
355 dst: ArchetypeIdx,
356 new_components: impl IntoIterator<Item = (ComponentIdx, *const u8)>,
357 entities: &mut Entities,
358 ) -> ArchetypeRow {
359 let mut new_components = new_components.into_iter();
360
361 if src.archetype == dst {
362 let arch = self
363 .archetypes
364 .get_mut(src.archetype.0 as usize)
365 .unwrap_unchecked();
366
367 for (comp_idx, comp_ptr) in new_components {
368 let col = arch.column_of_mut(comp_idx).unwrap_unchecked();
369
370 col.data.assign(src.row.0 as usize, comp_ptr);
371 }
372
373 return src.row;
374 }
375
376 let (src_arch, dst_arch) = self
377 .archetypes
378 .get2_mut(src.archetype.0 as usize, dst.0 as usize)
379 .unwrap();
380
381 let dst_row = ArchetypeRow(dst_arch.entity_ids.len() as u32);
382
383 let dst_arch_reallocated = dst_arch.push_would_reallocate();
384
385 let mut src_idx = 0;
386 let mut dst_idx = 0;
387
388 loop {
389 let src_in_bounds = src_idx < src_arch.component_indices.len();
390 let dst_in_bounds = dst_idx < dst_arch.component_indices.len();
391
392 match (src_in_bounds, dst_in_bounds) {
393 (true, true) => {
394 let src_comp_idx = *src_arch.component_indices.as_ref().get_unchecked(src_idx);
395
396 let dst_comp_idx = *dst_arch.component_indices.as_ref().get_unchecked(dst_idx);
397
398 match src_comp_idx.cmp(&dst_comp_idx) {
399 Ordering::Less => {
400 let src_col = &mut *src_arch.columns.as_ptr().add(src_idx);
401 src_col.data.swap_remove(src.row.0 as usize);
402 src_idx += 1;
403 }
404 Ordering::Equal => {
405 let src_col = &mut *src_arch.columns.as_ptr().add(src_idx);
406 let dst_col = &mut *dst_arch.columns.as_ptr().add(dst_idx);
407
408 src_col
409 .data
410 .transfer_elem(&mut dst_col.data, src.row.0 as usize);
411
412 src_idx += 1;
413 dst_idx += 1;
414 }
415 Ordering::Greater => {
416 let (component_idx, component_ptr) =
417 new_components.next().unwrap_unchecked();
418
419 let dst_col = &mut *dst_arch.columns.as_ptr().add(dst_idx);
420
421 let dst_comp_idx =
422 *dst_arch.component_indices.as_ref().get_unchecked(dst_idx);
423
424 debug_assert_eq!(component_idx, dst_comp_idx);
425
426 ptr::copy_nonoverlapping(
427 component_ptr,
428 dst_col.data.push().as_ptr(),
429 dst_col.data.elem_layout().size(),
430 );
431
432 dst_idx += 1;
433 }
434 }
435 }
436 (true, false) => {
437 let src_col = &mut *src_arch.columns.as_ptr().add(src_idx);
438 src_col.data.swap_remove(src.row.0 as usize);
439 src_idx += 1;
440 }
441 (false, true) => {
442 let (component_idx, component_ptr) = new_components.next().unwrap_unchecked();
443
444 let dst_col = &mut *dst_arch.columns.as_ptr().add(dst_idx);
445
446 let dst_comp_idx = *dst_arch.component_indices.as_ref().get_unchecked(dst_idx);
447
448 debug_assert_eq!(component_idx, dst_comp_idx);
449
450 ptr::copy_nonoverlapping(
451 component_ptr,
452 dst_col.data.push().as_ptr(),
453 dst_col.data.elem_layout().size(),
454 );
455
456 dst_idx += 1;
457 }
458 (false, false) => break,
459 }
460 }
461
462 debug_assert!(new_components.next().is_none());
463
464 let entity_id = src_arch.entity_ids.swap_remove(src.row.0 as usize);
465 dst_arch.entity_ids.push(entity_id);
466
467 *unsafe { entities.get_mut(entity_id).unwrap_unchecked() } = EntityLocation {
468 archetype: dst,
469 row: dst_row,
470 };
471
472 if let Some(&swapped_entity_id) = src_arch.entity_ids.get(src.row.0 as usize) {
473 unsafe { entities.get_mut(swapped_entity_id).unwrap_unchecked() }.row = src.row;
474 }
475
476 if src_arch.entity_ids.is_empty() {
477 for mut ptr in src_arch.refresh_listeners.iter().copied() {
478 unsafe { ptr.as_info_mut().handler_mut().remove_archetype(src_arch) };
479 }
480 }
481
482 if dst_arch_reallocated || dst_arch.entity_count() == 1 {
483 for mut ptr in dst_arch.refresh_listeners.iter().copied() {
484 unsafe { ptr.as_info_mut().handler_mut().refresh_archetype(dst_arch) };
485 }
486 }
487
488 dst_row
489 }
490
491 pub(crate) unsafe fn remove_entity(&mut self, loc: EntityLocation, entities: &mut Entities) {
493 let arch = unsafe {
494 self.archetypes
495 .get_mut(loc.archetype.0 as usize)
496 .unwrap_unchecked()
497 };
498
499 for col in arch.columns_mut() {
500 unsafe { col.data.swap_remove(loc.row.0 as usize) };
501 }
502
503 unsafe {
504 assume_unchecked((loc.row.0 as usize) < arch.entity_ids.len());
505 };
506
507 let id = arch.entity_ids.swap_remove(loc.row.0 as usize);
508
509 let removed_loc = unsafe { entities.remove(id).unwrap_unchecked() };
510
511 debug_assert_eq!(loc, removed_loc);
512
513 if (loc.row.0 as usize) < arch.entity_ids.len() {
514 let displaced = *unsafe { arch.entity_ids.get_unchecked(loc.row.0 as usize) };
515 unsafe { entities.get_mut(displaced).unwrap_unchecked() }.row = loc.row;
516 }
517
518 if arch.entity_count() == 0 {
519 for mut ptr in arch.refresh_listeners.iter().copied() {
520 unsafe { ptr.as_info_mut().handler_mut().remove_archetype(arch) };
521 }
522 }
523 }
524}
525
526impl Index<ArchetypeIdx> for Archetypes {
527 type Output = Archetype;
528
529 fn index(&self, index: ArchetypeIdx) -> &Self::Output {
531 if let Some(arch) = self.get(index) {
532 arch
533 } else {
534 panic!("no such archetype with index of {index:?} exists")
535 }
536 }
537}
538
539unsafe impl HandlerParam for &'_ Archetypes {
540 type State = ();
541
542 type This<'a> = &'a Archetypes;
543
544 fn init(_world: &mut World, _config: &mut HandlerConfig) -> Result<Self::State, InitError> {
545 Ok(())
546 }
547
548 unsafe fn get<'a>(
549 _state: &'a mut Self::State,
550 _info: &'a HandlerInfo,
551 _event_ptr: EventPtr<'a>,
552 _target_location: EntityLocation,
553 world: UnsafeWorldCell<'a>,
554 ) -> Self::This<'a> {
555 world.archetypes()
556 }
557
558 fn refresh_archetype(_state: &mut Self::State, _arch: &Archetype) {}
559
560 fn remove_archetype(_state: &mut Self::State, _arch: &Archetype) {}
561}
562
563#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
567pub struct ArchetypeIdx(pub u32);
568
569impl ArchetypeIdx {
570 pub const EMPTY: Self = Self(0);
572 pub const NULL: Self = Self(u32::MAX);
574}
575
576unsafe impl SparseIndex for ArchetypeIdx {
577 const MAX: Self = ArchetypeIdx::NULL;
578
579 fn index(self) -> usize {
580 self.0.index()
581 }
582
583 fn from_index(idx: usize) -> Self {
584 Self(u32::from_index(idx))
585 }
586}
587
588#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
591pub struct ArchetypeRow(pub u32);
592
593impl ArchetypeRow {
594 pub const NULL: Self = Self(u32::MAX);
596}
597
598pub struct Archetype {
611 index: ArchetypeIdx,
613 component_indices: NonNull<[ComponentIdx]>,
615 columns: NonNull<Column>,
620 entity_ids: Vec<EntityId>,
623 insert_components: BTreeMap<ComponentIdx, ArchetypeIdx>,
624 remove_components: BTreeMap<ComponentIdx, ArchetypeIdx>,
625 refresh_listeners: BTreeSet<HandlerInfoPtr>,
627 event_listeners: SparseMap<TargetedEventIdx, HandlerList>,
629}
630
631impl Archetype {
632 fn empty() -> Self {
633 Self {
634 index: ArchetypeIdx::EMPTY,
635 component_indices: NonNull::from(<&[_]>::default()),
636 columns: NonNull::dangling(),
637 entity_ids: vec![],
638 insert_components: BTreeMap::new(),
639 remove_components: BTreeMap::new(),
640 refresh_listeners: BTreeSet::new(),
641 event_listeners: SparseMap::new(),
642 }
643 }
644
645 unsafe fn new(
651 arch_idx: ArchetypeIdx,
652 component_indices: NonNull<[ComponentIdx]>,
653 components: &mut Components,
654 ) -> Self {
655 let columns: Box<[Column]> = component_indices
656 .as_ref()
657 .iter()
658 .map(|&idx| {
659 let info = unsafe { components.get_by_index_mut(idx).unwrap_unchecked() };
660
661 info.member_of.insert(arch_idx);
662
663 Column {
664 data: unsafe { BlobVec::new(info.layout(), info.drop()) },
665 }
666 })
667 .collect();
668
669 let columns_ptr = unsafe { NonNull::new_unchecked(Box::into_raw(columns) as *mut Column) };
671
672 Self {
673 index: arch_idx,
674 component_indices,
675 columns: columns_ptr,
676 entity_ids: vec![],
677 insert_components: BTreeMap::new(),
678 remove_components: BTreeMap::new(),
679 refresh_listeners: BTreeSet::new(),
680 event_listeners: SparseMap::new(),
681 }
682 }
683
684 fn register_handler(&mut self, info: &mut HandlerInfo) {
685 if info
686 .archetype_filter()
687 .matches_archetype(|idx| self.column_of(idx).is_some())
688 {
689 if self.entity_count() > 0 {
690 info.handler_mut().refresh_archetype(self);
691 }
692
693 self.refresh_listeners.insert(info.ptr());
694 }
695
696 if let (Some(expr), EventId::Targeted(event_id)) = (
697 info.targeted_event_component_access(),
698 info.received_event(),
699 ) {
700 if expr.matches_archetype(|idx| self.column_of(idx).is_some()) {
701 if let Some(list) = self.event_listeners.get_mut(event_id.index()) {
702 list.insert(info.ptr(), info.priority());
703 } else {
704 let mut list = HandlerList::new();
705 list.insert(info.ptr(), info.priority());
706
707 self.event_listeners.insert(event_id.index(), list);
708 }
709 }
710 }
711 }
712
713 pub(crate) fn handler_list_for(&self, idx: TargetedEventIdx) -> Option<&HandlerList> {
714 self.event_listeners.get(idx)
715 }
716
717 pub fn index(&self) -> ArchetypeIdx {
719 self.index
720 }
721
722 pub fn entity_count(&self) -> u32 {
724 debug_assert!(u32::try_from(self.entity_ids.len()).is_ok());
725 self.entity_ids.len() as u32
727 }
728
729 pub fn entity_ids(&self) -> &[EntityId] {
731 &self.entity_ids
732 }
733
734 pub fn component_indices(&self) -> &[ComponentIdx] {
740 unsafe { self.component_indices.as_ref() }
741 }
742
743 pub fn columns(&self) -> &[Column] {
745 unsafe { slice::from_raw_parts(self.columns.as_ptr(), self.component_indices.len()) }
746 }
747
748 fn columns_mut(&mut self) -> &mut [Column] {
750 unsafe { slice::from_raw_parts_mut(self.columns.as_ptr(), self.component_indices.len()) }
751 }
752
753 pub fn column_of(&self, idx: ComponentIdx) -> Option<&Column> {
756 let idx = self.component_indices().binary_search(&idx).ok()?;
757
758 Some(unsafe { &*self.columns.as_ptr().add(idx) })
760 }
761
762 fn column_of_mut(&mut self, idx: ComponentIdx) -> Option<&mut Column> {
763 let idx = self.component_indices().binary_search(&idx).ok()?;
764
765 Some(unsafe { &mut *self.columns.as_ptr().add(idx) })
767 }
768
769 fn push_would_reallocate(&self) -> bool {
772 self.columns()
776 .first()
777 .map_or(false, |col| col.data.len() == col.data.capacity())
778 || self.entity_ids.capacity() == self.entity_ids.len()
779 }
780}
781
782impl Drop for Archetype {
783 fn drop(&mut self) {
784 let _ = unsafe {
790 Box::from_raw(slice::from_raw_parts_mut(
791 self.columns.as_ptr(),
792 self.component_indices.len(),
793 ))
794 };
795 }
796}
797
798unsafe impl Send for Archetype {}
801unsafe impl Sync for Archetype {}
802
803impl UnwindSafe for Archetype {}
805impl RefUnwindSafe for Archetype {}
806
807impl fmt::Debug for Archetype {
808 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
809 f.debug_struct("Archetype")
810 .field("index", &self.index)
811 .field("component_indices", &self.component_indices())
812 .field("columns", &self.columns())
813 .field("entity_ids", &self.entity_ids)
814 .field("insert_components", &self.insert_components)
815 .field("remove_components", &self.remove_components)
816 .field("refresh_listeners", &self.refresh_listeners)
817 .field("event_listeners", &self.event_listeners)
818 .finish()
819 }
820}
821
822#[derive(Debug)]
824pub struct Column {
825 data: BlobVec,
827}
828
829impl Column {
830 pub fn data(&self) -> NonNull<u8> {
833 self.data.as_ptr()
834 }
835}
836
837unsafe impl Send for Column {}
840unsafe impl Sync for Column {}
841
842impl UnwindSafe for Column {}
844impl RefUnwindSafe for Column {}
845
846#[cfg(test)]
847mod tests {
848 use crate::prelude::*;
849
850 #[derive(Component)]
851 struct C(String);
852
853 #[derive(GlobalEvent)]
854 struct E1;
855
856 #[derive(GlobalEvent)]
857 struct E2;
858
859 #[test]
860 fn insert_overwrites() {
861 let mut world = World::new();
862
863 let e = world.spawn();
864
865 world.insert(e, C("hello".into()));
866
867 assert_eq!(world.get::<C>(e).unwrap().0, "hello");
868
869 world.insert(e, C("goodbye".into()));
870
871 assert_eq!(world.get::<C>(e).unwrap().0, "goodbye");
872 }
873}