legion_core/
storage.rs

1use crate::borrow::{AtomicRefCell, Ref, RefMap, RefMapMut, RefMut};
2use crate::entity::Entity;
3use crate::entity::EntityLocation;
4use crate::event::EventFilterWrapper;
5use crate::event::Subscriber;
6use crate::event::{Event, Subscribers};
7use crate::filter::ArchetypeFilterData;
8use crate::filter::ChunkFilterData;
9use crate::filter::ChunksetFilterData;
10use crate::filter::EntityFilter;
11use crate::filter::Filter;
12use crate::index::ArchetypeIndex;
13use crate::index::ChunkIndex;
14use crate::index::ComponentIndex;
15use crate::index::SetIndex;
16use crate::iterator::FissileZip;
17use crate::iterator::SliceVecIter;
18use crate::world::TagSet;
19use crate::world::WorldId;
20use derivative::Derivative;
21use fxhash::FxHashMap;
22use smallvec::SmallVec;
23use std::any::TypeId;
24use std::cell::UnsafeCell;
25use std::fmt::Debug;
26use std::fmt::{Display, Formatter};
27use std::mem::size_of;
28use std::ops::Deref;
29use std::ops::DerefMut;
30use std::ops::RangeBounds;
31use std::ptr::NonNull;
32use std::sync::atomic::AtomicU64;
33use std::sync::atomic::Ordering;
34use std::sync::Arc;
35use tracing::trace;
36
37static VERSION_COUNTER: AtomicU64 = AtomicU64::new(0);
38
39fn next_version() -> u64 {
40    VERSION_COUNTER
41        .fetch_add(1, Ordering::Relaxed)
42        .checked_add(1)
43        .unwrap()
44}
45
46#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
47pub struct ComponentTypeId {
48    type_id: TypeId,
49    #[cfg(feature = "ffi")]
50    discriminator: u32,
51    #[cfg(debug_assertions)]
52    name: &'static str,
53}
54
55impl ComponentTypeId {
56    /// Gets the component type ID that represents type `T`.
57    pub fn of<T: Component>() -> Self {
58        Self {
59            type_id: TypeId::of::<T>(),
60            #[cfg(feature = "ffi")]
61            discriminator: 0,
62            #[cfg(debug_assertions)]
63            name: std::any::type_name::<T>(),
64        }
65    }
66
67    pub fn type_id(&self) -> TypeId { self.type_id }
68}
69
70impl Display for ComponentTypeId {
71    #[cfg(debug_assertions)]
72    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.name) }
73
74    #[cfg(not(debug_assertions))]
75    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { write!(f, "{:?}", self.type_id) }
76}
77
78#[cfg(not(feature = "ffi"))]
79/// A type ID identifying a tag type.
80#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
81pub struct TagTypeId(pub TypeId);
82
83#[cfg(not(feature = "ffi"))]
84impl TagTypeId {
85    /// Gets the tag type ID that represents type `T`.
86    pub fn of<T: Component>() -> Self { Self(TypeId::of::<T>()) }
87
88    pub fn type_id(&self) -> TypeId { self.0 }
89}
90
91#[cfg(feature = "ffi")]
92/// A type ID identifying a tag type.
93#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
94pub struct TagTypeId(pub TypeId, pub u32);
95
96#[cfg(feature = "ffi")]
97impl TagTypeId {
98    /// Gets the tag type ID that represents type `T`.
99    pub fn of<T: Component>() -> Self { Self(TypeId::of::<T>(), 0) }
100
101    pub fn type_id(&self) -> TypeId { self.0 }
102}
103
104/// A `Component` is per-entity data that can be attached to a single entity.
105pub trait Component: Send + Sync + 'static {}
106
107/// A `Tag` is shared data that can be attached to multiple entities at once.
108pub trait Tag: Clone + Send + Sync + PartialEq + 'static {}
109
110impl<T: Send + Sync + 'static> Component for T {}
111impl<T: Clone + Send + Sync + PartialEq + 'static> Tag for T {}
112
113/// Stores slices of `ComponentTypeId`, each of which identifies the type of components
114/// contained within the archetype of the same index.
115#[derive(Derivative)]
116#[derivative(Default(bound = ""))]
117pub struct ComponentTypes(SliceVec<ComponentTypeId>);
118
119/// Stores slices of `TagTypeId`, each of which identifies the type of tags
120/// contained within the archetype of the same index.
121#[derive(Derivative)]
122#[derivative(Default(bound = ""))]
123pub struct TagTypes(SliceVec<TagTypeId>);
124
125impl ComponentTypes {
126    /// Gets an iterator over all type ID slices.
127    pub fn iter(&self) -> SliceVecIter<ComponentTypeId> { self.0.iter() }
128
129    /// Gets the number of slices stored within the set.
130    pub fn len(&self) -> usize { self.0.len() }
131
132    /// Determines if the set is empty.
133    pub fn is_empty(&self) -> bool { self.len() < 1 }
134}
135
136impl TagTypes {
137    /// Gets an iterator over all type ID slices.
138    pub fn iter(&self) -> SliceVecIter<TagTypeId> { self.0.iter() }
139
140    /// Gets the number of slices stored within the set.
141    pub fn len(&self) -> usize { self.0.len() }
142
143    /// Determines if the set is empty.
144    pub fn is_empty(&self) -> bool { self.len() < 1 }
145}
146
147/// A vector of slices.
148///
149/// Each slice is stored inline so as to be efficiently iterated through linearly.
150#[derive(Derivative)]
151#[derivative(Default(bound = ""))]
152pub struct SliceVec<T> {
153    data: Vec<T>,
154    counts: Vec<usize>,
155}
156
157impl<T> SliceVec<T> {
158    /// Gets the length of the vector.
159    pub fn len(&self) -> usize { self.counts.len() }
160
161    /// Determines if the vector is empty.
162    pub fn is_empty(&self) -> bool { self.len() < 1 }
163
164    /// Pushes a new slice onto the end of the vector.
165    pub fn push<I: IntoIterator<Item = T>>(&mut self, items: I) {
166        let mut count = 0;
167        for item in items.into_iter() {
168            self.data.push(item);
169            count += 1;
170        }
171        self.counts.push(count);
172    }
173
174    /// Gets an iterator over all slices in the vector.
175    pub fn iter(&self) -> SliceVecIter<T> {
176        SliceVecIter {
177            data: &self.data,
178            counts: &self.counts,
179        }
180    }
181}
182
183/// Stores all entity data for a `World`.
184pub struct Storage {
185    world_id: WorldId,
186    component_types: ComponentTypes,
187    tag_types: TagTypes,
188    archetypes: Vec<ArchetypeData>,
189    subscribers: Subscribers,
190}
191
192impl Storage {
193    // Creates an empty `Storage`.
194    pub fn new(world_id: WorldId) -> Self {
195        Self {
196            world_id,
197            component_types: ComponentTypes::default(),
198            tag_types: TagTypes::default(),
199            archetypes: Vec::default(),
200            subscribers: Subscribers::default(),
201        }
202    }
203
204    pub(crate) fn subscribe<T: EntityFilter + Sync + 'static>(
205        &mut self,
206        sender: crossbeam_channel::Sender<Event>,
207        filter: T,
208    ) {
209        let subscriber = Subscriber::new(Arc::new(EventFilterWrapper(filter.clone())), sender);
210        self.subscribers.push(subscriber.clone());
211
212        for i in filter.iter_archetype_indexes(self).collect::<Vec<_>>() {
213            self.archetypes_mut()[i].subscribe(subscriber.clone());
214        }
215    }
216
217    /// Creates a new archetype.
218    ///
219    /// Returns the index of the newly created archetype and an exclusive reference to the
220    /// achetype's data.
221    pub(crate) fn alloc_archetype(
222        &mut self,
223        desc: ArchetypeDescription,
224    ) -> (ArchetypeIndex, &mut ArchetypeData) {
225        let index = ArchetypeIndex(self.archetypes.len());
226        let id = ArchetypeId(self.world_id, index);
227        let archetype = ArchetypeData::new(id, desc);
228
229        self.push(archetype);
230
231        let archetype = &mut self.archetypes[index];
232        (index, archetype)
233    }
234
235    pub(crate) fn push(&mut self, mut archetype: ArchetypeData) {
236        let desc = archetype.description();
237        self.component_types
238            .0
239            .push(desc.components.iter().map(|&(t, _)| t));
240        self.tag_types.0.push(desc.tags.iter().map(|&(t, _)| t));
241
242        let index = ArchetypeIndex(self.archetypes.len());
243        let archetype_data = ArchetypeFilterData {
244            component_types: &self.component_types,
245            tag_types: &self.tag_types,
246        };
247
248        let id = archetype.id();
249
250        trace!(
251            world = id.world().index(),
252            archetype = *id.index(),
253            components = ?desc.component_names,
254            tags = ?desc.tag_names,
255            "Created Archetype"
256        );
257
258        let mut subscribers = self.subscribers.matches_archetype(archetype_data, index);
259        subscribers.send(Event::ArchetypeCreated(id));
260        archetype.set_subscribers(subscribers);
261
262        self.archetypes.push(archetype);
263    }
264
265    /// Gets a vector of slices of all component types for all archetypes.
266    ///
267    /// Each slice contains the component types for the archetype at the corresponding index.
268    pub fn component_types(&self) -> &ComponentTypes { &self.component_types }
269
270    /// Gets a vector of slices of all tag types for all archetypes.
271    ///
272    /// Each slice contains the tag types for the archetype at the corresponding index.
273    pub fn tag_types(&self) -> &TagTypes { &self.tag_types }
274
275    /// Gets a slice reference to all archetypes.
276    pub fn archetypes(&self) -> &[ArchetypeData] { &self.archetypes }
277
278    /// Gets a mutable slice reference to all archetypes.
279    pub fn archetypes_mut(&mut self) -> &mut [ArchetypeData] { &mut self.archetypes }
280
281    pub(crate) fn drain<R: RangeBounds<usize>>(
282        &mut self,
283        range: R,
284    ) -> std::vec::Drain<ArchetypeData> {
285        self.archetypes.drain(range)
286    }
287
288    pub(crate) fn archetype(
289        &self,
290        ArchetypeIndex(index): ArchetypeIndex,
291    ) -> Option<&ArchetypeData> {
292        self.archetypes().get(index)
293    }
294
295    pub(crate) fn archetype_mut(
296        &mut self,
297        ArchetypeIndex(index): ArchetypeIndex,
298    ) -> Option<&mut ArchetypeData> {
299        self.archetypes_mut().get_mut(index)
300    }
301
302    pub(crate) unsafe fn archetype_unchecked(
303        &self,
304        ArchetypeIndex(index): ArchetypeIndex,
305    ) -> &ArchetypeData {
306        self.archetypes().get_unchecked(index)
307    }
308
309    pub(crate) unsafe fn archetype_unchecked_mut(
310        &mut self,
311        ArchetypeIndex(index): ArchetypeIndex,
312    ) -> &mut ArchetypeData {
313        self.archetypes_mut().get_unchecked_mut(index)
314    }
315
316    pub(crate) fn chunk(&self, loc: EntityLocation) -> Option<&ComponentStorage> {
317        self.archetype(loc.archetype())
318            .and_then(|atd| atd.chunkset(loc.set()))
319            .and_then(|cs| cs.chunk(loc.chunk()))
320    }
321
322    pub(crate) fn chunk_mut(&mut self, loc: EntityLocation) -> Option<&mut ComponentStorage> {
323        self.archetype_mut(loc.archetype())
324            .and_then(|atd| atd.chunkset_mut(loc.set()))
325            .and_then(|cs| cs.chunk_mut(loc.chunk()))
326    }
327}
328
329/// Stores metadata decribing the type of a tag.
330#[derive(Copy, Clone, PartialEq)]
331pub struct TagMeta {
332    size: usize,
333    align: usize,
334    drop_fn: Option<fn(*mut u8)>,
335    eq_fn: fn(*const u8, *const u8) -> bool,
336    clone_fn: fn(*const u8, *mut u8),
337}
338
339impl TagMeta {
340    /// Gets the tag meta of tag type `T`.
341    pub fn of<T: Tag>() -> Self {
342        TagMeta {
343            size: size_of::<T>(),
344            align: std::mem::align_of::<T>(),
345            drop_fn: if std::mem::needs_drop::<T>() {
346                Some(|ptr| unsafe { std::ptr::drop_in_place(ptr as *mut T) })
347            } else {
348                None
349            },
350            eq_fn: |a, b| unsafe { *(a as *const T) == *(b as *const T) },
351            clone_fn: |src, dst| unsafe {
352                let clone = (&*(src as *const T)).clone();
353                std::ptr::write(dst as *mut T, clone);
354            },
355        }
356    }
357
358    pub(crate) unsafe fn equals(&self, a: *const u8, b: *const u8) -> bool { (self.eq_fn)(a, b) }
359
360    pub(crate) unsafe fn clone(&self, src: *const u8, dst: *mut u8) { (self.clone_fn)(src, dst) }
361
362    pub(crate) unsafe fn drop(&self, val: *mut u8) {
363        if let Some(drop_fn) = self.drop_fn {
364            (drop_fn)(val);
365        }
366    }
367
368    pub(crate) fn layout(&self) -> std::alloc::Layout {
369        unsafe { std::alloc::Layout::from_size_align_unchecked(self.size, self.align) }
370    }
371
372    pub(crate) fn is_zero_sized(&self) -> bool { self.size == 0 }
373}
374
375/// Stores metadata describing the type of a component.
376#[derive(Copy, Clone, PartialEq)]
377pub struct ComponentMeta {
378    size: usize,
379    align: usize,
380    drop_fn: Option<fn(*mut u8)>,
381}
382
383impl ComponentMeta {
384    /// Gets the component meta of component type `T`.
385    pub fn of<T: Component>() -> Self {
386        ComponentMeta {
387            size: size_of::<T>(),
388            align: std::mem::align_of::<T>(),
389            drop_fn: if std::mem::needs_drop::<T>() {
390                Some(|ptr| unsafe { std::ptr::drop_in_place(ptr as *mut T) })
391            } else {
392                None
393            },
394        }
395    }
396
397    pub(crate) fn size(&self) -> usize { self.size }
398
399    pub(crate) fn align(&self) -> usize { self.align }
400}
401
402/// Describes the layout of an archetype, including what components
403/// and tags shall be attached to entities stored within an archetype.
404#[derive(Default, Clone, PartialEq)]
405pub struct ArchetypeDescription {
406    tags: Vec<(TagTypeId, TagMeta)>,
407    components: Vec<(ComponentTypeId, ComponentMeta)>,
408    tag_names: Vec<&'static str>,
409    component_names: Vec<&'static str>,
410}
411
412impl ArchetypeDescription {
413    /// Gets a slice of the tags in the description.
414    pub fn tags(&self) -> &[(TagTypeId, TagMeta)] { &self.tags }
415
416    /// Gets a slice of the components in the description.
417    pub fn components(&self) -> &[(ComponentTypeId, ComponentMeta)] { &self.components }
418
419    /// Adds a tag to the description.
420    pub fn register_tag_raw(&mut self, type_id: TagTypeId, type_meta: TagMeta) {
421        self.tags.push((type_id, type_meta));
422        self.tag_names.push("<unknown>");
423    }
424
425    /// Adds a tag to the description.
426    pub fn register_tag<T: Tag>(&mut self) {
427        self.tags.push((TagTypeId::of::<T>(), TagMeta::of::<T>()));
428        self.tag_names.push(std::any::type_name::<T>());
429    }
430
431    /// Adds a component to the description.
432    pub fn register_component_raw(&mut self, type_id: ComponentTypeId, type_meta: ComponentMeta) {
433        self.components.push((type_id, type_meta));
434        self.component_names.push("<unknown>");
435    }
436
437    /// Adds a component to the description.
438    pub fn register_component<T: Component>(&mut self) {
439        self.components
440            .push((ComponentTypeId::of::<T>(), ComponentMeta::of::<T>()));
441        self.component_names.push(std::any::type_name::<T>());
442    }
443}
444
445impl<'a> Filter<ArchetypeFilterData<'a>> for ArchetypeDescription {
446    type Iter = FissileZip<SliceVecIter<'a, TagTypeId>, SliceVecIter<'a, ComponentTypeId>>;
447
448    fn collect(&self, source: ArchetypeFilterData<'a>) -> Self::Iter {
449        FissileZip::new(source.tag_types.iter(), source.component_types.iter())
450    }
451
452    fn is_match(&self, (tags, components): &<Self::Iter as Iterator>::Item) -> Option<bool> {
453        Some(
454            tags.len() == self.tags.len()
455                && self.tags.iter().all(|(t, _)| tags.contains(t))
456                && components.len() == self.components.len()
457                && self.components.iter().all(|(t, _)| components.contains(t)),
458        )
459    }
460}
461
462const MAX_CHUNK_SIZE: usize = 16 * 1024 * 10;
463const COMPONENT_STORAGE_ALIGNMENT: usize = 64;
464
465/// Unique ID of an archetype.
466#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
467pub struct ArchetypeId(WorldId, ArchetypeIndex);
468
469impl ArchetypeId {
470    pub(crate) fn new(world_id: WorldId, index: ArchetypeIndex) -> Self {
471        ArchetypeId(world_id, index)
472    }
473
474    pub fn index(self) -> ArchetypeIndex { self.1 }
475
476    pub fn world(self) -> WorldId { self.0 }
477}
478
479/// Contains all of the tags attached to the entities in each chunk.
480pub struct Tags(pub(crate) SmallVec<[(TagTypeId, TagStorage); 3]>);
481
482impl Tags {
483    fn new(mut data: SmallVec<[(TagTypeId, TagStorage); 3]>) -> Self {
484        data.sort_by_key(|&(t, _)| t);
485        Self(data)
486    }
487
488    fn validate(&self, set_count: usize) {
489        for (_, tags) in self.0.iter() {
490            debug_assert_eq!(set_count, tags.len());
491        }
492    }
493
494    /// Gets the set of tag values of the specified type attached to all chunks.
495    #[inline]
496    pub fn get(&self, type_id: TagTypeId) -> Option<&TagStorage> {
497        self.0
498            .binary_search_by_key(&type_id, |&(t, _)| t)
499            .ok()
500            .map(|i| unsafe { &self.0.get_unchecked(i).1 })
501    }
502
503    /// Mutably gets the set of all tag values of the specified type attached to all chunks.
504    #[inline]
505    pub fn get_mut(&mut self, type_id: TagTypeId) -> Option<&mut TagStorage> {
506        self.0
507            .binary_search_by_key(&type_id, |&(t, _)| t)
508            .ok()
509            .map(move |i| unsafe { &mut self.0.get_unchecked_mut(i).1 })
510    }
511
512    pub(crate) fn tag_set(&self, SetIndex(index): SetIndex) -> DynamicTagSet {
513        let mut tags = DynamicTagSet { tags: Vec::new() };
514
515        unsafe {
516            for &(type_id, ref storage) in self.0.iter() {
517                let (ptr, element_size, count) = storage.data_raw();
518                debug_assert!(index < count, "set index out of bounds");
519                tags.push(
520                    type_id,
521                    *storage.element(),
522                    NonNull::new(ptr.as_ptr().add(element_size * index)).unwrap(),
523                );
524            }
525        }
526
527        tags
528    }
529}
530
531pub(crate) struct DynamicTagSet {
532    // the pointer here is to heap allocated memory owned by the tag set
533    tags: Vec<(TagTypeId, TagMeta, NonNull<u8>)>,
534}
535
536unsafe impl Send for DynamicTagSet {}
537
538unsafe impl Sync for DynamicTagSet {}
539
540impl DynamicTagSet {
541    pub fn push(&mut self, type_id: TagTypeId, meta: TagMeta, value: NonNull<u8>) {
542        // we clone the value here and take ownership of the copy
543        unsafe {
544            if meta.is_zero_sized() {
545                self.tags
546                    .push((type_id, meta, NonNull::new(meta.align as *mut u8).unwrap()));
547            } else {
548                let copy = std::alloc::alloc(meta.layout());
549                meta.clone(value.as_ptr(), copy);
550                self.tags.push((type_id, meta, NonNull::new(copy).unwrap()));
551            }
552        }
553    }
554
555    pub fn remove(&mut self, type_id: TagTypeId) {
556        if let Some((i, _)) = self
557            .tags
558            .iter()
559            .enumerate()
560            .find(|(_, &(t, _, _))| t == type_id)
561        {
562            let (_, meta, ptr) = self.tags.remove(i);
563            unsafe {
564                // drop and dealloc the copy as we own this memory
565                if let Some(drop_fn) = meta.drop_fn {
566                    drop_fn(ptr.as_ptr());
567                }
568
569                if !meta.is_zero_sized() {
570                    std::alloc::dealloc(ptr.as_ptr(), meta.layout());
571                }
572            }
573        }
574    }
575}
576
577impl TagSet for DynamicTagSet {
578    fn write_tags(&self, tags: &mut Tags) {
579        for &(type_id, ref meta, ptr) in self.tags.iter() {
580            let storage = tags.get_mut(type_id).unwrap();
581            unsafe {
582                if meta.drop_fn.is_some() && !meta.is_zero_sized() {
583                    // clone the value into temp storage then move it into the chunk
584                    // we can dealloc the copy without dropping because the value
585                    // is considered moved and will be dropped by the tag storage later
586                    let copy = std::alloc::alloc(meta.layout());
587                    meta.clone(ptr.as_ptr(), copy);
588                    storage.push_raw(copy);
589                    std::alloc::dealloc(copy, meta.layout());
590                } else {
591                    // copy the value directly into the tag storage
592                    // if the value has no drop fn, then it is safe for us to make
593                    // copies of the data without explicit clones
594                    storage.push_raw(ptr.as_ptr())
595                }
596            }
597        }
598    }
599}
600
601impl Drop for DynamicTagSet {
602    fn drop(&mut self) {
603        // we own all of the vales in the set, so we need to drop and dealloc them
604        for (_, meta, ptr) in self.tags.drain(..) {
605            unsafe {
606                let layout = std::alloc::Layout::from_size_align_unchecked(meta.size, meta.align);
607                if let Some(drop_fn) = meta.drop_fn {
608                    drop_fn(ptr.as_ptr());
609                }
610                if !meta.is_zero_sized() {
611                    std::alloc::dealloc(ptr.as_ptr(), layout);
612                }
613            }
614        }
615    }
616}
617
618/// Stores entity data in chunks. All entities within an archetype have the same data layout
619/// (component and tag types).
620pub struct ArchetypeData {
621    id: ArchetypeId,
622    desc: ArchetypeDescription,
623    tags: Tags,
624    component_layout: ComponentStorageLayout,
625    chunk_sets: Vec<Chunkset>,
626    subscribers: Subscribers,
627}
628
629impl ArchetypeData {
630    fn new(id: ArchetypeId, desc: ArchetypeDescription) -> Self {
631        // create tag storage
632        let tags = desc
633            .tags
634            .iter()
635            .map(|&(type_id, meta)| (type_id, TagStorage::new(meta)))
636            .collect();
637
638        // create component data layout
639        let max_component_size = desc
640            .components
641            .iter()
642            .map(|(_, meta)| meta.size)
643            .max()
644            .unwrap_or(0);
645        let entity_capacity = std::cmp::max(
646            1,
647            MAX_CHUNK_SIZE / std::cmp::max(max_component_size, size_of::<Entity>()),
648        );
649        let mut data_capacity = 0usize;
650        let mut component_data_offsets = Vec::new();
651        for &(type_id, meta) in desc.components.iter() {
652            data_capacity = align_up(
653                align_up(data_capacity, COMPONENT_STORAGE_ALIGNMENT),
654                meta.align,
655            );
656            component_data_offsets.push((type_id, data_capacity, meta));
657            data_capacity += meta.size * entity_capacity;
658        }
659        let data_alignment =
660            std::alloc::Layout::from_size_align(data_capacity, COMPONENT_STORAGE_ALIGNMENT)
661                .expect("invalid component data size/alignment");
662
663        ArchetypeData {
664            desc,
665            id,
666            tags: Tags::new(tags),
667            component_layout: ComponentStorageLayout {
668                capacity: entity_capacity,
669                alloc_layout: data_alignment,
670                data_layout: component_data_offsets,
671            },
672            chunk_sets: Vec::new(),
673            subscribers: Subscribers::default(),
674        }
675    }
676
677    pub(crate) fn delete_all(&mut self) {
678        for set in &mut self.chunk_sets {
679            // Clearing the chunk will Drop all the data
680            set.chunks.clear();
681        }
682    }
683
684    pub(crate) fn subscribe(&mut self, subscriber: Subscriber) {
685        self.subscribers.push(subscriber.clone());
686
687        for i in (0..self.chunk_sets.len()).map(SetIndex) {
688            let filter = ChunksetFilterData {
689                archetype_data: self,
690            };
691
692            if subscriber.filter.matches_chunkset(filter, i) {
693                self.chunk_sets[i].subscribe(subscriber.clone());
694            }
695        }
696    }
697
698    pub(crate) fn set_subscribers(&mut self, subscribers: Subscribers) {
699        self.subscribers = subscribers;
700
701        for i in (0..self.chunk_sets.len()).map(SetIndex) {
702            let filter = ChunksetFilterData {
703                archetype_data: self,
704            };
705
706            let subscribers = self.subscribers.matches_chunkset(filter, i);
707            self.chunk_sets[i].set_subscribers(subscribers);
708        }
709    }
710
711    /// Gets the unique ID of this archetype.
712    pub fn id(&self) -> ArchetypeId { self.id }
713
714    fn find_chunk_set_by_tags(
715        &self,
716        other_tags: &Tags,
717        other_set_index: SetIndex,
718    ) -> Option<SetIndex> {
719        // search for a matching chunk set
720        let mut set_match = None;
721        for self_set_index in 0..self.chunk_sets.len() {
722            let self_set_index = SetIndex(self_set_index);
723            let mut matches = true;
724            for &(type_id, ref tags) in self.tags.0.iter() {
725                unsafe {
726                    let (self_tag_ptr, size, _) = tags.data_raw();
727                    let (other_tag_ptr, _, _) = other_tags.get(type_id).unwrap().data_raw();
728
729                    if !tags.element().equals(
730                        self_tag_ptr.as_ptr().add(self_set_index.0 * size),
731                        other_tag_ptr.as_ptr().add(other_set_index.0 * size),
732                    ) {
733                        matches = false;
734                        break;
735                    }
736                }
737            }
738
739            if matches {
740                set_match = Some(self_set_index);
741                break;
742            }
743        }
744
745        set_match
746    }
747
748    pub(crate) fn find_or_create_chunk_set_by_tags(
749        &mut self,
750        src_tags: &Tags,
751        src_chunk_set_index: SetIndex,
752    ) -> SetIndex {
753        let dst_chunk_set_index = self.find_chunk_set_by_tags(src_tags, src_chunk_set_index);
754        dst_chunk_set_index.unwrap_or_else(|| {
755            self.alloc_chunk_set(|self_tags| {
756                for (type_id, other_tags) in src_tags.0.iter() {
757                    unsafe {
758                        let (src, _, _) = other_tags.data_raw();
759                        let dst = self_tags.get_mut(*type_id).unwrap().alloc_ptr();
760                        other_tags.element().clone(src.as_ptr(), dst);
761                    }
762                }
763            })
764        })
765    }
766
767    pub(crate) fn move_from(&mut self, mut other: ArchetypeData) {
768        let other_tags = &other.tags;
769        for (other_index, mut set) in other.chunk_sets.drain(..).enumerate() {
770            let other_index = SetIndex(other_index);
771            let set_match = self.find_chunk_set_by_tags(&other_tags, other_index);
772
773            if let Some(chunk_set) = set_match {
774                // if we found a match, move the chunks into the set
775                let target = &mut self.chunk_sets[chunk_set];
776                for mut chunk in set.drain(..) {
777                    chunk.mark_modified();
778                    target.push(chunk);
779                }
780            } else {
781                // if we did not find a match, clone the tags and move the set
782                set.mark_modified();
783                self.push(set, |self_tags| {
784                    for &(type_id, ref other_tags) in other_tags.0.iter() {
785                        unsafe {
786                            let (src, _, _) = other_tags.data_raw();
787                            let dst = self_tags.get_mut(type_id).unwrap().alloc_ptr();
788                            other_tags.element().clone(src.as_ptr(), dst);
789                        }
790                    }
791                });
792            }
793        }
794
795        self.tags.validate(self.chunk_sets.len());
796    }
797
798    /// Given a source world and archetype, step through all of its chunks and copy the data in it
799    /// into this archetype. The archetype index is provided so that we can produce EntityLocations
800    /// During this process, we can replace pre-existing entities. This function assumes that any
801    /// entity referenced in replace_mappings actually exists in the world. The public API in world
802    /// checks this assumption and panics if it is violated.
803    ///
804    /// See also `clone_from_single`, which copies a specific entity
805    #[allow(clippy::too_many_arguments)]
806    pub(crate) fn clone_from<
807        's,
808        CloneImplT: crate::world::CloneImpl,
809        CloneImplResultT: crate::world::CloneImplResult,
810        EntityReplacePolicyT: crate::world::EntityReplacePolicy<'s>,
811    >(
812        &mut self,
813        src_world: &crate::world::World,
814        src_archetype: &ArchetypeData,
815        dst_archetype_index: ArchetypeIndex,
816        dst_entity_allocator: &crate::entity::EntityAllocator,
817        dst_entity_locations: &mut crate::entity::Locations,
818        clone_impl: &CloneImplT,
819        clone_impl_result: &mut CloneImplResultT,
820        entity_replace_policy: &EntityReplacePolicyT,
821    ) {
822        // Iterate all the chunk sets within the source archetype
823        let src_tags = &src_archetype.tags;
824        for (src_chunk_set_index, src_chunk_set) in src_archetype.chunk_sets.iter().enumerate() {
825            let src_chunk_set_index = SetIndex(src_chunk_set_index);
826            let dst_chunk_set_index =
827                self.find_or_create_chunk_set_by_tags(src_tags, src_chunk_set_index);
828
829            // Iterate all the chunks within the source chunk set
830            for (_src_chunk_idx, src_chunk) in src_chunk_set.chunks.iter().enumerate() {
831                // Copy the data from source to destination. Continuously find or create chunks as
832                // needed until we've copied all the data
833                let mut entities_remaining = src_chunk.len();
834                while entities_remaining > 0 {
835                    // Get or allocate a chunk.. since we could be transforming to a larger component size, it's possible
836                    // that even a brand-new, empty chunk won't be large enough to hold everything in the chunk we are copying from
837                    let dst_free_chunk_index =
838                        self.get_free_chunk(dst_chunk_set_index, entities_remaining);
839                    let dst_chunk_set = &mut self.chunk_sets[dst_chunk_set_index];
840                    let dst_chunk = &mut dst_chunk_set.chunks[dst_free_chunk_index];
841
842                    // Determine how many entities we will write
843                    let entities_to_write =
844                        std::cmp::min(entities_remaining, dst_chunk.capacity() - dst_chunk.len());
845
846                    // Prepare to write to the chunk storage
847                    let mut writer = dst_chunk.writer();
848                    let (dst_entities, dst_components) = writer.get();
849
850                    // Find the region of memory we will be reading from in the source chunk
851                    let src_begin_idx = ComponentIndex(src_chunk.len() - entities_remaining);
852                    let src_end_idx = ComponentIndex(src_begin_idx.0 + entities_to_write);
853
854                    let dst_begin_idx = ComponentIndex(dst_entities.len());
855                    let dst_end_idx = ComponentIndex(dst_entities.len() + entities_to_write);
856
857                    // Copy all the entities to the destination chunk. The normal case is that we simply allocate
858                    // new entities.
859                    //
860                    // We also allow end-user to specify a HashMap<Entity, Entity>. The key is an Entity from
861                    // the source chunk and the value is an Entity from the destination chunk. Rather than appending
862                    // data to the destination chunk, we will *replace* the data, according to the mapping. This
863                    // is specifically intended for use with hot-reloading data. When some source data is changed,
864                    // we can use the mapping to respawn entities as needed using the new data.
865
866                    // We know how many entities will be appended to this list
867                    dst_entities.reserve(dst_entities.len() + entities_to_write);
868
869                    for src_entity in &src_chunk.entities[src_begin_idx.0..src_end_idx.0] {
870                        // Determine if there is an entity we will be replacing
871                        let dst_entity = entity_replace_policy.get_dst_entity(*src_entity);
872
873                        // The location of the next entity
874                        let location = EntityLocation::new(
875                            dst_archetype_index,
876                            dst_chunk_set_index,
877                            dst_free_chunk_index,
878                            ComponentIndex(dst_entities.len()),
879                        );
880
881                        // Determine the Entity to use for this element
882                        let dst_entity = if let Some(dst_entity) = dst_entity {
883                            // We are replacing data
884                            // Verify that the entity is alive.. this checks the index and version of the entity
885                            // The entity should be alive because World::clone_from verifies this
886                            debug_assert!(dst_entity_allocator.is_alive(dst_entity));
887                            dst_entity
888                        } else {
889                            // We are appending data, allocate a new entity
890                            dst_entity_allocator.create_entity()
891                        };
892
893                        dst_entity_locations.set(dst_entity, location);
894                        dst_entities.push(dst_entity);
895
896                        clone_impl_result.add_result(*src_entity, dst_entity);
897                    }
898
899                    ArchetypeData::clone_components(
900                        clone_impl,
901                        src_world,
902                        src_archetype,
903                        src_chunk,
904                        src_begin_idx..src_end_idx,
905                        &dst_entities[dst_begin_idx.0..dst_end_idx.0],
906                        dst_components,
907                        entities_to_write,
908                    );
909
910                    entities_remaining -= entities_to_write;
911                }
912            }
913        }
914    }
915
916    /// Given a source world, archetype, and entity, copy it into this archetype. The archetype
917    /// index is provided so that we can produce EntityLocations.
918    /// During this process, we can replace a pre-existing entity. This function assumes that if
919    /// replace_mapping is not none, that the entity exists. The public API in world checks this
920    /// assumption and panics if it is violated.
921    ///
922    /// See also `clone_from`, which copies all data
923    #[allow(clippy::too_many_arguments)]
924    pub(crate) fn clone_from_single<C: crate::world::CloneImpl>(
925        &mut self,
926        src_world: &crate::world::World,
927        src_archetype: &ArchetypeData,
928        src_location: &EntityLocation,
929        dst_archetype_index: ArchetypeIndex,
930        dst_entity_allocator: &crate::entity::EntityAllocator,
931        dst_entity_locations: &mut crate::entity::Locations,
932        clone_impl: &C,
933        replace_mapping: Option<Entity>,
934    ) -> Entity {
935        // We are reading from a specific chunk set within the source archetype
936        let src_tags = &src_archetype.tags;
937        let src_chunk_set_index = src_location.set();
938        let src_chunk_set = &src_archetype.chunk_sets[src_chunk_set_index];
939
940        // Find or create the chunk set that matches the source chunk set
941        let dst_chunk_set_index =
942            self.find_or_create_chunk_set_by_tags(src_tags, src_chunk_set_index);
943
944        // Get the source chunk
945        let src_chunk_idx = src_location.chunk();
946        let src_chunk = &src_chunk_set.chunks[src_chunk_idx];
947
948        // Get or allocate a chunk.. since we could be transforming to a larger component size, it's possible
949        // that even a brand-new, empty chunk won't be large enough to hold everything in the chunk we are copying from
950        let dst_free_chunk_index = self.get_free_chunk(dst_chunk_set_index, 1);
951        let dst_chunk_set = &mut self.chunk_sets[dst_chunk_set_index];
952        let dst_chunk = &mut dst_chunk_set.chunks[dst_free_chunk_index];
953
954        // Determine how many entities we will write
955        let entities_to_write = 1;
956
957        // Prepare to write to the chunk storage
958        let mut writer = dst_chunk.writer();
959        let (dst_entities, dst_components) = writer.get();
960
961        // Find the region of memory we will be reading from in the source chunk
962        let src_begin_idx = src_location.component();
963        let src_end_idx = ComponentIndex(src_begin_idx.0 + 1);
964
965        // We know how many entities will be appended to this list
966        let dst_begin_idx = ComponentIndex(dst_entities.len());
967        let dst_end_idx = ComponentIndex(dst_entities.len() + entities_to_write);
968
969        // Copy the entity to the destination chunk. The normal case is that we simply allocate
970        // a new entity.
971        //
972        // We also allow end-user to specify a Option<Entity>. The src Entity from will *replace* the
973        // data of the given Entity
974
975        // The location of the next entity
976        let location = EntityLocation::new(
977            dst_archetype_index,
978            dst_chunk_set_index,
979            dst_free_chunk_index,
980            ComponentIndex(dst_entities.len()),
981        );
982
983        let dst_entity = if let Some(dst_entity) = replace_mapping {
984            // We are replacing data
985            // Verify that the entity is alive.. this checks the index and version of the entity
986            // The entity should be alive because World::clone_from verifies this
987            debug_assert!(dst_entity_allocator.is_alive(dst_entity));
988            dst_entity
989        } else {
990            // We are appending data, allocate a new entity
991            dst_entity_allocator.create_entity()
992        };
993
994        dst_entity_locations.set(dst_entity, location);
995        dst_entities.push(dst_entity);
996
997        ArchetypeData::clone_components(
998            clone_impl,
999            src_world,
1000            src_archetype,
1001            src_chunk,
1002            src_begin_idx..src_end_idx,
1003            &dst_entities[dst_begin_idx.0..dst_end_idx.0],
1004            dst_components,
1005            entities_to_write,
1006        );
1007
1008        dst_entity
1009    }
1010
1011    /// Implements shared logic between `clone_from` and `clone_from_single`. For every component type,
1012    /// in the given archetype,
1013    #[allow(clippy::too_many_arguments)]
1014    fn clone_components<C: crate::world::CloneImpl>(
1015        clone_impl: &C,
1016        src_world: &crate::world::World,
1017        src_archetype: &ArchetypeData,
1018        src_chunk: &ComponentStorage,
1019        src_range: core::ops::Range<ComponentIndex>,
1020        dst_entities: &[Entity],
1021        dst_components: &UnsafeCell<Components>,
1022        entities_to_write: usize,
1023    ) {
1024        for (src_type, _) in src_archetype.description().components() {
1025            let dst_components = unsafe { &mut *dst_components.get() };
1026
1027            // Look up what type we should transform the data into (can be the same type, meaning it should be cloned)
1028            let (dst_type, _) = clone_impl.map_component_type(*src_type);
1029
1030            // Create a writer that will insert the data into the destination chunk
1031            let mut dst_component_writer = dst_components
1032                .get_mut(dst_type)
1033                .expect("ComponentResourceSet missing in clone_from")
1034                .writer();
1035
1036            // Find the data in the source chunk
1037            let src_component_storage = src_chunk
1038                .components(*src_type)
1039                .expect("ComponentResourceSet missing in clone_from");
1040
1041            // Now copy the data
1042            unsafe {
1043                let (src_component_chunk_data, src_element_size, _) =
1044                    src_component_storage.data_raw();
1045
1046                // offset to the first entity we want to copy from the source chunk
1047                let src_data = src_component_chunk_data.add(src_element_size * src_range.start.0);
1048
1049                // allocate the space we need in the destination chunk
1050                let dst_data = dst_component_writer.reserve_raw(entities_to_write).as_ptr();
1051
1052                // Delegate the clone operation to the provided CloneImpl
1053                clone_impl.clone_components(
1054                    src_world,
1055                    src_chunk,
1056                    src_range.clone(),
1057                    *src_type,
1058                    &src_chunk.entities[src_range.start.0..src_range.end.0],
1059                    dst_entities,
1060                    src_data,
1061                    dst_data,
1062                    entities_to_write,
1063                );
1064            }
1065        }
1066    }
1067
1068    /// Iterate all entities in existence by iterating across archetypes, chunk sets, and chunks
1069    pub(crate) fn iter_entities<'a>(&'a self) -> impl Iterator<Item = Entity> + 'a {
1070        self.chunk_sets.iter().flat_map(move |set| {
1071            set.chunks
1072                .iter()
1073                .flat_map(move |chunk| chunk.entities().iter().copied())
1074        })
1075    }
1076
1077    pub(crate) fn iter_entity_locations<'a>(
1078        &'a self,
1079        archetype_index: ArchetypeIndex,
1080    ) -> impl Iterator<Item = (Entity, EntityLocation)> + 'a {
1081        self.chunk_sets
1082            .iter()
1083            .enumerate()
1084            .flat_map(move |(set_index, set)| {
1085                set.chunks
1086                    .iter()
1087                    .enumerate()
1088                    .flat_map(move |(chunk_index, chunk)| {
1089                        chunk
1090                            .entities()
1091                            .iter()
1092                            .enumerate()
1093                            .map(move |(entity_index, &entity)| {
1094                                (
1095                                    entity,
1096                                    EntityLocation::new(
1097                                        archetype_index,
1098                                        SetIndex(set_index),
1099                                        ChunkIndex(chunk_index),
1100                                        ComponentIndex(entity_index),
1101                                    ),
1102                                )
1103                            })
1104                    })
1105            })
1106    }
1107
1108    fn push<F: FnMut(&mut Tags)>(&mut self, set: Chunkset, mut initialize: F) {
1109        initialize(&mut self.tags);
1110        self.chunk_sets.push(set);
1111
1112        let index = SetIndex(self.chunk_sets.len() - 1);
1113        let filter = ChunksetFilterData {
1114            archetype_data: self,
1115        };
1116        let subscribers = self.subscribers.matches_chunkset(filter, index);
1117
1118        self.chunk_sets[index].set_subscribers(subscribers);
1119        self.tags.validate(self.chunk_sets.len());
1120    }
1121
1122    /// Allocates a new chunk set. Returns the index of the new set.
1123    ///
1124    /// `initialize` is expected to push the new chunkset's tag values onto the tags collection.
1125    pub(crate) fn alloc_chunk_set<F: FnMut(&mut Tags)>(&mut self, initialize: F) -> SetIndex {
1126        self.push(Chunkset::default(), initialize);
1127        SetIndex(self.chunk_sets.len() - 1)
1128    }
1129
1130    /// Finds a chunk with space free for at least `minimum_space` entities, creating a chunk if needed.
1131    pub(crate) fn get_free_chunk(
1132        &mut self,
1133        set_index: SetIndex,
1134        minimum_space: usize,
1135    ) -> ChunkIndex {
1136        let count = {
1137            let chunks = &mut self.chunk_sets[set_index];
1138            let len = chunks.len();
1139            for (i, chunk) in chunks.iter_mut().enumerate() {
1140                let space_left = chunk.capacity() - chunk.len();
1141                if space_left >= minimum_space {
1142                    return ChunkIndex(i);
1143                }
1144            }
1145            ChunkIndex(len)
1146        };
1147
1148        let chunk = self
1149            .component_layout
1150            .alloc_storage(ChunkId(self.id, set_index, count));
1151        unsafe { self.chunkset_unchecked_mut(set_index).push(chunk) };
1152
1153        trace!(
1154            world = self.id.world().index(),
1155            archetype = *self.id.index(),
1156            chunkset = *set_index,
1157            chunk = *count,
1158            components = ?self.desc.component_names,
1159            tags = ?self.desc.tag_names,
1160            "Created chunk"
1161        );
1162
1163        count
1164    }
1165
1166    /// Gets the number of chunk sets stored within this archetype.
1167    pub fn len(&self) -> usize { self.chunk_sets.len() }
1168
1169    /// Determines whether this archetype has any chunks.
1170    pub fn is_empty(&self) -> bool { self.len() < 1 }
1171
1172    /// Gets the tag storage for all chunks in the archetype.
1173    pub fn tags(&self) -> &Tags { &self.tags }
1174
1175    /// Mutably gets the tag storage for all chunks in the archetype.
1176    pub fn tags_mut(&mut self) -> &mut Tags { &mut self.tags }
1177
1178    /// Gets a slice of chunksets.
1179    pub fn chunksets(&self) -> &[Chunkset] { &self.chunk_sets }
1180
1181    /// Gets a mutable slice of chunksets.
1182    pub fn chunksets_mut(&mut self) -> &mut [Chunkset] { &mut self.chunk_sets }
1183
1184    /// Gets a description of the component types in the archetype.
1185    pub fn description(&self) -> &ArchetypeDescription { &self.desc }
1186
1187    pub(crate) fn defrag<F: FnMut(Entity, EntityLocation)>(
1188        &mut self,
1189        budget: &mut usize,
1190        mut on_moved: F,
1191    ) -> bool {
1192        trace!(
1193            world = self.id().world().index(),
1194            archetype = *self.id().index(),
1195            "Defragmenting archetype"
1196        );
1197        let arch_index = self.id.index();
1198        for (i, chunkset) in self.chunk_sets.iter_mut().enumerate() {
1199            let complete = chunkset.defrag(budget, |e, chunk, component| {
1200                on_moved(
1201                    e,
1202                    EntityLocation::new(arch_index, SetIndex(i), chunk, component),
1203                );
1204            });
1205            if !complete {
1206                return false;
1207            }
1208        }
1209
1210        true
1211    }
1212
1213    pub(crate) fn chunkset(&self, SetIndex(index): SetIndex) -> Option<&Chunkset> {
1214        self.chunksets().get(index)
1215    }
1216
1217    pub(crate) fn chunkset_mut(&mut self, SetIndex(index): SetIndex) -> Option<&mut Chunkset> {
1218        self.chunksets_mut().get_mut(index)
1219    }
1220
1221    pub(crate) unsafe fn chunkset_unchecked(&self, SetIndex(index): SetIndex) -> &Chunkset {
1222        self.chunksets().get_unchecked(index)
1223    }
1224
1225    pub(crate) unsafe fn chunkset_unchecked_mut(
1226        &mut self,
1227        SetIndex(index): SetIndex,
1228    ) -> &mut Chunkset {
1229        self.chunksets_mut().get_unchecked_mut(index)
1230    }
1231
1232    pub(crate) fn iter_data_slice<'a, T: Component>(
1233        &'a self,
1234    ) -> impl Iterator<Item = RefMap<&[T]>> + 'a {
1235        self.chunk_sets.iter().flat_map(move |set| {
1236            set.chunks.iter().map(move |chunk| {
1237                let c = chunk.components(ComponentTypeId::of::<T>()).unwrap();
1238                unsafe { c.data_slice::<T>() }
1239            })
1240        })
1241    }
1242
1243    pub(crate) unsafe fn iter_data_slice_unchecked_mut<'a, T: Component>(
1244        &'a self,
1245    ) -> impl Iterator<Item = RefMapMut<&mut [T]>> + 'a {
1246        self.chunk_sets.iter().flat_map(move |set| {
1247            set.chunks.iter().map(move |chunk| {
1248                let c = chunk.components(ComponentTypeId::of::<T>()).unwrap();
1249                c.data_slice_mut::<T>()
1250            })
1251        })
1252    }
1253}
1254
1255fn align_up(addr: usize, align: usize) -> usize { (addr + (align - 1)) & align.wrapping_neg() }
1256
1257/// Describes the data layout for a chunk.
1258pub struct ComponentStorageLayout {
1259    capacity: usize,
1260    alloc_layout: std::alloc::Layout,
1261    data_layout: Vec<(ComponentTypeId, usize, ComponentMeta)>,
1262}
1263
1264impl ComponentStorageLayout {
1265    /// The maximum number of entities that can be stored in each chunk.
1266    pub fn capacity(&self) -> usize { self.capacity }
1267
1268    /// The components in each chunk.
1269    pub fn components(&self) -> &[(ComponentTypeId, usize, ComponentMeta)] { &self.data_layout }
1270
1271    fn alloc_storage(&self, id: ChunkId) -> ComponentStorage {
1272        let storage_info = self
1273            .data_layout
1274            .iter()
1275            .map(|&(ty, _, ref meta)| {
1276                (
1277                    ty,
1278                    ComponentResourceSet {
1279                        ptr: AtomicRefCell::new(meta.align as *mut u8),
1280                        capacity: self.capacity,
1281                        count: UnsafeCell::new(0),
1282                        element_size: meta.size,
1283                        drop_fn: meta.drop_fn,
1284                        version: UnsafeCell::new(0),
1285                    },
1286                )
1287            })
1288            .collect();
1289
1290        ComponentStorage {
1291            id,
1292            capacity: self.capacity,
1293            entities: Vec::with_capacity(self.capacity),
1294            component_offsets: self
1295                .data_layout
1296                .iter()
1297                .map(|&(ty, offset, _)| (ty, offset))
1298                .collect(),
1299            component_layout: self.alloc_layout,
1300            component_info: UnsafeCell::new(Components::new(storage_info)),
1301            component_data: None,
1302            subscribers: Subscribers::default(),
1303        }
1304    }
1305}
1306
1307/// Contains chunks with the same layout and tag values.
1308#[derive(Default)]
1309pub struct Chunkset {
1310    chunks: Vec<ComponentStorage>,
1311    subscribers: Subscribers,
1312}
1313
1314impl Deref for Chunkset {
1315    type Target = [ComponentStorage];
1316
1317    fn deref(&self) -> &Self::Target { self.chunks.as_slice() }
1318}
1319
1320impl DerefMut for Chunkset {
1321    fn deref_mut(&mut self) -> &mut Self::Target { self.chunks.as_mut_slice() }
1322}
1323
1324impl Chunkset {
1325    pub(crate) fn new() -> Self {
1326        Self {
1327            chunks: Vec::new(),
1328            subscribers: Subscribers::default(),
1329        }
1330    }
1331
1332    /// Pushes a new chunk into the set.
1333    pub fn push(&mut self, chunk: ComponentStorage) {
1334        let id = chunk.id();
1335        self.chunks.push(chunk);
1336
1337        let index = ChunkIndex(self.chunks.len() - 1);
1338        let filter = ChunkFilterData {
1339            chunks: &self.chunks,
1340        };
1341        let mut subscribers = self.subscribers.matches_chunk(filter, index);
1342        subscribers.send(Event::ChunkCreated(id));
1343        self.chunks[index].set_subscribers(subscribers);
1344    }
1345
1346    pub(crate) fn subscribe(&mut self, subscriber: Subscriber) {
1347        self.subscribers.push(subscriber.clone());
1348
1349        for i in (0..self.chunks.len()).map(ChunkIndex) {
1350            let filter = ChunkFilterData {
1351                chunks: &self.chunks,
1352            };
1353
1354            if subscriber.filter.matches_chunk(filter, i) {
1355                self.chunks[i].subscribe(subscriber.clone());
1356            }
1357        }
1358    }
1359
1360    pub(crate) fn set_subscribers(&mut self, subscribers: Subscribers) {
1361        self.subscribers = subscribers;
1362
1363        for i in (0..self.chunks.len()).map(ChunkIndex) {
1364            let filter = ChunkFilterData {
1365                chunks: &self.chunks,
1366            };
1367
1368            let subscribers = self.subscribers.matches_chunk(filter, i);
1369            self.chunks[i].set_subscribers(subscribers);
1370        }
1371    }
1372
1373    fn mark_modified(&mut self) {
1374        for chunk in self.chunks.iter_mut() {
1375            chunk.mark_modified();
1376        }
1377    }
1378
1379    pub(crate) fn drain<R: RangeBounds<usize>>(
1380        &mut self,
1381        range: R,
1382    ) -> std::vec::Drain<ComponentStorage> {
1383        self.chunks.drain(range)
1384    }
1385
1386    /// Gets a slice reference to occupied chunks.
1387    pub fn occupied(&self) -> &[ComponentStorage] {
1388        let mut len = self.chunks.len();
1389        while len > 0 {
1390            if unsafe { !self.chunks.get_unchecked(len - 1).is_empty() } {
1391                break;
1392            }
1393            len -= 1;
1394        }
1395        let (some, _) = self.chunks.as_slice().split_at(len);
1396        some
1397    }
1398
1399    /// Gets a mutable slice reference to occupied chunks.
1400    pub fn occupied_mut(&mut self) -> &mut [ComponentStorage] {
1401        let mut len = self.chunks.len();
1402        while len > 0 {
1403            if unsafe { !self.chunks.get_unchecked(len - 1).is_empty() } {
1404                break;
1405            }
1406            len -= 1;
1407        }
1408        let (some, _) = self.chunks.as_mut_slice().split_at_mut(len);
1409        some
1410    }
1411
1412    /// Defragments all chunks within the chunkset.
1413    ///
1414    /// This will compact entities down into lower index chunks, preferring to fill one
1415    /// chunk before moving on to the next.
1416    ///
1417    /// `budget` determines the maximum number of entities that can be moved, and is decremented
1418    /// as this function moves entities.
1419    ///
1420    /// `on_moved` is called when an entity is moved, with the entity's ID, new chunk index,
1421    /// new component index.
1422    ///
1423    /// Returns whether or not the chunkset has been fully defragmented.
1424    fn defrag<F: FnMut(Entity, ChunkIndex, ComponentIndex)>(
1425        &mut self,
1426        budget: &mut usize,
1427        mut on_moved: F,
1428    ) -> bool {
1429        let slice = self.occupied_mut();
1430
1431        if slice.is_empty() {
1432            return true;
1433        }
1434
1435        let mut first = 0;
1436        let mut last = slice.len() - 1;
1437
1438        trace!("Defragmenting chunkset");
1439
1440        loop {
1441            // find the first chunk that is not full
1442            while first < last && slice[first].is_full() {
1443                first += 1;
1444            }
1445
1446            // find the last chunk that is not empty
1447            while last > first && slice[last].is_empty() {
1448                last -= 1;
1449            }
1450
1451            // exit if the cursors meet; the chunkset is defragmented
1452            if first == last {
1453                return true;
1454            }
1455
1456            // get mut references to both chunks
1457            let (with_first, with_last) = slice.split_at_mut(last);
1458            let target = &mut with_first[first];
1459            let source = &mut with_last[0];
1460
1461            // move as many entities as we can from the last chunk into the first
1462            loop {
1463                if *budget == 0 {
1464                    return false;
1465                }
1466
1467                *budget -= 1;
1468
1469                // move the last entity
1470                let comp_index = ComponentIndex(source.len() - 1);
1471                let swapped = source.move_entity(target, comp_index);
1472                assert!(swapped.is_none());
1473
1474                // notify move
1475                on_moved(
1476                    *target.entities.last().unwrap(),
1477                    ChunkIndex(first),
1478                    comp_index,
1479                );
1480
1481                // exit if we cant move any more
1482                if target.is_full() || source.is_empty() {
1483                    break;
1484                }
1485            }
1486        }
1487    }
1488
1489    pub(crate) fn chunk(&self, ChunkIndex(index): ChunkIndex) -> Option<&ComponentStorage> {
1490        self.chunks.get(index)
1491    }
1492
1493    pub(crate) fn chunk_mut(
1494        &mut self,
1495        ChunkIndex(index): ChunkIndex,
1496    ) -> Option<&mut ComponentStorage> {
1497        self.chunks.get_mut(index)
1498    }
1499
1500    pub(crate) unsafe fn chunk_unchecked(
1501        &self,
1502        ChunkIndex(index): ChunkIndex,
1503    ) -> &ComponentStorage {
1504        self.chunks.get_unchecked(index)
1505    }
1506
1507    pub(crate) unsafe fn chunk_unchecked_mut(
1508        &mut self,
1509        ChunkIndex(index): ChunkIndex,
1510    ) -> &mut ComponentStorage {
1511        self.chunks.get_unchecked_mut(index)
1512    }
1513}
1514
1515/// Unique ID of a chunk.
1516#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
1517pub struct ChunkId(ArchetypeId, SetIndex, ChunkIndex);
1518
1519impl ChunkId {
1520    pub(crate) fn new(archetype: ArchetypeId, set: SetIndex, index: ChunkIndex) -> Self {
1521        ChunkId(archetype, set, index)
1522    }
1523
1524    pub fn archetype_id(&self) -> ArchetypeId { self.0 }
1525
1526    pub(crate) fn set(&self) -> SetIndex { self.1 }
1527
1528    pub(crate) fn index(&self) -> ChunkIndex { self.2 }
1529}
1530
1531/// A set of component slices located on a chunk.
1532pub struct Components(SmallVec<[(ComponentTypeId, ComponentResourceSet); 5]>);
1533
1534impl Components {
1535    pub(crate) fn new(mut data: SmallVec<[(ComponentTypeId, ComponentResourceSet); 5]>) -> Self {
1536        data.sort_by_key(|&(t, _)| t);
1537        Self(data)
1538    }
1539
1540    /// Gets a component slice accessor for the specified component type.
1541    #[inline]
1542    pub fn get(&self, type_id: ComponentTypeId) -> Option<&ComponentResourceSet> {
1543        self.0
1544            .binary_search_by_key(&type_id, |&(t, _)| t)
1545            .ok()
1546            .map(|i| unsafe { &self.0.get_unchecked(i).1 })
1547    }
1548
1549    /// Gets a mutable component slice accessor for the specified component type.
1550    #[inline]
1551    pub fn get_mut(&mut self, type_id: ComponentTypeId) -> Option<&mut ComponentResourceSet> {
1552        self.0
1553            .binary_search_by_key(&type_id, |&(t, _)| t)
1554            .ok()
1555            .map(move |i| unsafe { &mut self.0.get_unchecked_mut(i).1 })
1556    }
1557
1558    fn iter(&mut self) -> impl Iterator<Item = &(ComponentTypeId, ComponentResourceSet)> + '_ {
1559        self.0.iter()
1560    }
1561
1562    fn iter_mut(
1563        &mut self,
1564    ) -> impl Iterator<Item = &mut (ComponentTypeId, ComponentResourceSet)> + '_ {
1565        self.0.iter_mut()
1566    }
1567
1568    fn drain(&mut self) -> impl Iterator<Item = (ComponentTypeId, ComponentResourceSet)> + '_ {
1569        self.0.drain(..)
1570    }
1571}
1572
1573/// Stores a chunk of entities and their component data of a specific data layout.
1574pub struct ComponentStorage {
1575    id: ChunkId,
1576    capacity: usize,
1577    entities: Vec<Entity>,
1578    component_layout: std::alloc::Layout,
1579    component_offsets: FxHashMap<ComponentTypeId, usize>,
1580    component_info: UnsafeCell<Components>,
1581    component_data: Option<NonNull<u8>>,
1582    subscribers: Subscribers,
1583}
1584
1585pub struct StorageWriter<'a> {
1586    initial_count: usize,
1587    storage: &'a mut ComponentStorage,
1588}
1589
1590impl<'a> StorageWriter<'a> {
1591    pub fn get(&mut self) -> (&mut Vec<Entity>, &UnsafeCell<Components>) {
1592        (&mut self.storage.entities, &self.storage.component_info)
1593    }
1594}
1595
1596impl<'a> Drop for StorageWriter<'a> {
1597    fn drop(&mut self) {
1598        self.storage.update_count_gauge();
1599        for &entity in self.storage.entities.iter().skip(self.initial_count) {
1600            self.storage
1601                .subscribers
1602                .send(Event::EntityInserted(entity, self.storage.id()));
1603        }
1604    }
1605}
1606
1607impl ComponentStorage {
1608    /// Gets the unique ID of the chunk.
1609    pub fn id(&self) -> ChunkId { self.id }
1610
1611    /// Gets the number of entities stored in the chunk.
1612    pub fn len(&self) -> usize { self.entities.len() }
1613
1614    /// Gets the maximum number of entities that can be stored in the chunk.
1615    pub fn capacity(&self) -> usize { self.capacity }
1616
1617    /// Determines if the chunk is full.
1618    pub fn is_full(&self) -> bool { self.len() >= self.capacity }
1619
1620    /// Determines if the chunk is empty.
1621    pub fn is_empty(&self) -> bool { self.entities.len() == 0 }
1622
1623    /// Determines if the internal memory for this chunk has been allocated.
1624    pub fn is_allocated(&self) -> bool { self.component_data.is_some() }
1625
1626    pub(crate) fn subscribe(&mut self, subscriber: Subscriber) {
1627        self.subscribers.push(subscriber);
1628    }
1629
1630    pub(crate) fn set_subscribers(&mut self, subscribers: Subscribers) {
1631        self.subscribers = subscribers;
1632    }
1633
1634    /// Gets a slice reference containing the IDs of all entities stored in the chunk.
1635    pub fn entities(&self) -> &[Entity] { self.entities.as_slice() }
1636
1637    /// Gets a component accessor for the specified component type.
1638    pub fn components(&self, component_type: ComponentTypeId) -> Option<&ComponentResourceSet> {
1639        unsafe { &*self.component_info.get() }.get(component_type)
1640    }
1641
1642    /// Increments all component versions, forcing the chunk to be seen as modified for all queries.
1643    fn mark_modified(&mut self) {
1644        unsafe {
1645            let components = &mut *self.component_info.get();
1646            for (_, component) in components.iter_mut() {
1647                // touch each slice mutably to increment its version
1648                let _ = component.data_raw_mut();
1649            }
1650        }
1651    }
1652
1653    /// Removes an entity from the chunk by swapping it with the last entry.
1654    ///
1655    /// Returns the ID of the entity which was swapped into the removed entity's position.
1656    pub fn swap_remove(
1657        &mut self,
1658        ComponentIndex(index): ComponentIndex,
1659        drop: bool,
1660    ) -> Option<Entity> {
1661        let removed = self.entities.swap_remove(index);
1662        for (_, component) in unsafe { &mut *self.component_info.get() }.iter_mut() {
1663            component.writer().swap_remove(index, drop);
1664        }
1665
1666        self.subscribers
1667            .send(Event::EntityRemoved(removed, self.id()));
1668        self.update_count_gauge();
1669
1670        if self.entities.len() > index {
1671            Some(*self.entities.get(index).unwrap())
1672        } else {
1673            if self.is_empty() {
1674                self.free();
1675            }
1676
1677            None
1678        }
1679    }
1680
1681    /// Moves an entity from this chunk into a target chunk, moving all compatable components into
1682    /// the target chunk. Any components left over will be dropped.
1683    ///
1684    /// Returns the ID of the entity which was swapped into the removed entity's position.
1685    pub fn move_entity(
1686        &mut self,
1687        target: &mut ComponentStorage,
1688        index: ComponentIndex,
1689    ) -> Option<Entity> {
1690        debug_assert!(*index < self.len());
1691        debug_assert!(!target.is_full());
1692        if !target.is_allocated() {
1693            target.allocate();
1694        }
1695
1696        trace!(index = *index, source = ?self.id, destination = ?target.id, "Moving entity");
1697
1698        let entity = unsafe { *self.entities.get_unchecked(*index) };
1699        target.entities.push(entity);
1700
1701        let self_components = unsafe { &mut *self.component_info.get() };
1702        let target_components = unsafe { &mut *target.component_info.get() };
1703
1704        for (comp_type, accessor) in self_components.iter_mut() {
1705            if let Some(target_accessor) = target_components.get_mut(*comp_type) {
1706                // move the component into the target chunk
1707                unsafe {
1708                    let (ptr, element_size, _) = accessor.data_raw();
1709                    let component = ptr.add(element_size * *index);
1710                    target_accessor
1711                        .writer()
1712                        .push_raw(NonNull::new_unchecked(component), 1);
1713                }
1714            } else {
1715                // drop the component rather than move it
1716                unsafe { accessor.writer().drop_in_place(index) };
1717            }
1718        }
1719
1720        // remove the entity from this chunk
1721        let removed = self.swap_remove(index, false);
1722
1723        target
1724            .subscribers
1725            .send(Event::EntityInserted(entity, target.id()));
1726        target.update_count_gauge();
1727
1728        removed
1729    }
1730
1731    /// Gets mutable references to the internal data of the chunk.
1732    pub fn writer(&mut self) -> StorageWriter {
1733        if !self.is_allocated() {
1734            self.allocate();
1735        }
1736        StorageWriter {
1737            initial_count: self.entities.len(),
1738            storage: self,
1739        }
1740    }
1741
1742    fn free(&mut self) {
1743        debug_assert!(self.is_allocated());
1744        debug_assert_eq!(0, self.len());
1745
1746        self.entities.shrink_to_fit();
1747
1748        trace!(
1749            world = self.id.archetype_id().world().index(),
1750            archetype = *self.id.archetype_id().index(),
1751            chunkset = *self.id.set(),
1752            chunk = *self.id.index(),
1753            layout = ?self.component_layout,
1754            "Freeing chunk memory"
1755        );
1756
1757        // Safety Note:
1758        // accessors are left with pointers pointing to invalid memory (although aligned properly)
1759        // the slices returned from these accessors will be empty though, so no code
1760        // should ever dereference these pointers
1761
1762        // free component memory
1763        unsafe {
1764            let ptr = self.component_data.take().unwrap();
1765
1766            if self.component_layout.size() > 0 {
1767                std::alloc::dealloc(ptr.as_ptr(), self.component_layout);
1768            }
1769        }
1770
1771        self.update_mem_gauge();
1772    }
1773
1774    fn allocate(&mut self) {
1775        debug_assert!(!self.is_allocated());
1776
1777        trace!(
1778            world = self.id.archetype_id().world().index(),
1779            archetype = *self.id.archetype_id().index(),
1780            chunkset = *self.id.set(),
1781            chunk = *self.id.index(),
1782            layout = ?self.component_layout,
1783            "Allocating chunk memory"
1784        );
1785        self.entities.reserve_exact(self.capacity);
1786
1787        unsafe {
1788            // allocating backing store
1789            if self.component_layout.size() > 0 {
1790                let ptr = std::alloc::alloc(self.component_layout);
1791                self.component_data = Some(NonNull::new_unchecked(ptr));
1792
1793                // update accessor pointers
1794                for (type_id, component) in (&mut *self.component_info.get()).iter_mut() {
1795                    let &offset = self.component_offsets.get(type_id).unwrap();
1796                    *component.ptr.get_mut() = ptr.add(offset);
1797                }
1798            } else {
1799                self.component_data =
1800                    Some(NonNull::new(self.component_layout.align() as *mut u8).unwrap());
1801            }
1802        }
1803
1804        self.update_mem_gauge();
1805    }
1806
1807    fn update_mem_gauge(&self) {
1808        #[cfg(feature = "metrics")]
1809        {
1810            use std::convert::TryInto;
1811            metrics::gauge!(
1812                "chunk_memory",
1813                if self.is_allocated() { self.component_layout.size().try_into().unwrap() } else { 0 },
1814                "world" => self.id.archetype_id().world().index().to_string(),
1815                "archetype" => self.id.archetype_id().index().to_string(),
1816                "chunkset" => self.id.set().to_string(),
1817                "chunk" => self.id.index().to_string()
1818            );
1819        }
1820    }
1821
1822    fn update_count_gauge(&self) {
1823        #[cfg(feature = "metrics")]
1824        {
1825            use std::convert::TryInto;
1826            metrics::gauge!(
1827                "entity_count",
1828                self.len().try_into().unwrap(),
1829                "world" => self.id.archetype_id().world().index().to_string(),
1830                "archetype" => self.id.archetype_id().index().to_string(),
1831                "chunkset" => self.id.set().to_string(),
1832                "chunk" => self.id.index().to_string()
1833            );
1834        }
1835    }
1836}
1837
1838unsafe impl Sync for ComponentStorage {}
1839
1840unsafe impl Send for ComponentStorage {}
1841
1842impl Drop for ComponentStorage {
1843    fn drop(&mut self) {
1844        if let Some(ptr) = self.component_data {
1845            // run the drop functions of all components
1846            for (_, info) in unsafe { &mut *self.component_info.get() }.drain() {
1847                if let Some(drop_fn) = info.drop_fn {
1848                    let ptr = info.ptr.get_mut();
1849                    for i in 0..self.len() {
1850                        unsafe {
1851                            drop_fn(ptr.add(info.element_size * i));
1852                        }
1853                    }
1854                }
1855            }
1856
1857            for e in &self.entities {
1858                self.subscribers.send(Event::EntityRemoved(*e, self.id()));
1859            }
1860
1861            self.update_count_gauge();
1862
1863            // free the chunk's memory
1864            if self.component_layout.size() > 0 {
1865                unsafe {
1866                    std::alloc::dealloc(ptr.as_ptr(), self.component_layout);
1867                }
1868            }
1869        }
1870    }
1871}
1872
1873/// Provides raw access to component data slices.
1874#[repr(align(64))]
1875pub struct ComponentResourceSet {
1876    ptr: AtomicRefCell<*mut u8>,
1877    element_size: usize,
1878    count: UnsafeCell<usize>,
1879    capacity: usize,
1880    drop_fn: Option<fn(*mut u8)>,
1881    version: UnsafeCell<u64>,
1882}
1883
1884impl ComponentResourceSet {
1885    /// Gets the version of the component slice.
1886    pub fn version(&self) -> u64 { unsafe { *self.version.get() } }
1887
1888    /// Gets a raw pointer to the start of the component slice.
1889    ///
1890    /// Returns a tuple containing `(pointer, element_size, count)`.
1891    ///
1892    /// # Safety
1893    ///
1894    /// Access to the component data within the slice is runtime borrow checked in debug builds.
1895    /// This call will panic if borrowing rules are broken in debug, and is undefined behavior in release.
1896    pub unsafe fn data_raw(&self) -> (Ref<*mut u8>, usize, usize) {
1897        (self.ptr.get(), self.element_size, *self.count.get())
1898    }
1899
1900    /// Gets a raw pointer to the start of the component slice.
1901    ///
1902    /// Returns a tuple containing `(pointer, element_size, count)`.
1903    ///
1904    /// # Safety
1905    ///
1906    /// Access to the component data within the slice is runtime borrow checked in debug builds.
1907    /// This call will panic if borrowing rules are broken in debug, and is undefined behavior in release.
1908    ///
1909    /// # Panics
1910    ///
1911    /// Will panic when an internal u64 counter overflows.
1912    /// It will happen in 50000 years if you do 10000 mutations a millisecond.
1913    pub unsafe fn data_raw_mut(&self) -> (RefMut<*mut u8>, usize, usize) {
1914        // this version increment is not thread safe
1915        // - but the pointer `get_mut` ensures exclusive access at runtime
1916        let ptr = self.ptr.get_mut();
1917        *self.version.get() = next_version();
1918        (ptr, self.element_size, *self.count.get())
1919    }
1920
1921    /// Gets a shared reference to the slice of components.
1922    ///
1923    /// # Safety
1924    ///
1925    /// Ensure that `T` is representative of the component data actually stored.
1926    ///
1927    /// Access to the component data within the slice is runtime borrow checked.
1928    /// This call will panic if borrowing rules are broken.
1929    pub unsafe fn data_slice<T>(&self) -> RefMap<&[T]> {
1930        let (ptr, _size, count) = self.data_raw();
1931        ptr.map_into(|&ptr| std::slice::from_raw_parts(ptr as *const _ as *const T, count))
1932    }
1933
1934    /// Gets a mutable reference to the slice of components.
1935    ///
1936    /// # Safety
1937    ///
1938    /// Ensure that `T` is representative of the component data actually stored.
1939    ///
1940    /// Access to the component data within the slice is runtime borrow checked.
1941    /// This call will panic if borrowing rules are broken.
1942    ///
1943    /// # Panics
1944    ///
1945    /// Will panic when an internal u64 counter overflows.
1946    /// It will happen in 50000 years if you do 10000 mutations a millisecond.
1947    pub unsafe fn data_slice_mut<T>(&self) -> RefMapMut<&mut [T]> {
1948        let (ptr, _size, count) = self.data_raw_mut();
1949        ptr.map_into(|&mut ptr| std::slice::from_raw_parts_mut(ptr as *mut _ as *mut T, count))
1950    }
1951
1952    /// Creates a writer for pushing components into or removing from the vec.
1953    pub fn writer(&mut self) -> ComponentWriter { ComponentWriter::new(self) }
1954}
1955
1956impl Debug for ComponentResourceSet {
1957    fn fmt(&self, f: &mut Formatter) -> Result<(), std::fmt::Error> {
1958        write!(
1959            f,
1960            "ComponentResourceSet {{ ptr: {:?}, element_size: {}, count: {}, capacity: {}, version: {} }}",
1961            *self.ptr.get(),
1962            self.element_size,
1963            unsafe { *self.count.get() },
1964            self.capacity,
1965            self.version()
1966        )
1967    }
1968}
1969
1970/// Provides methods adding or removing components from a component vec.
1971pub struct ComponentWriter<'a> {
1972    accessor: &'a ComponentResourceSet,
1973    ptr: RefMut<'a, *mut u8>,
1974}
1975
1976impl<'a> ComponentWriter<'a> {
1977    fn new(accessor: &'a ComponentResourceSet) -> ComponentWriter<'a> {
1978        Self {
1979            accessor,
1980            ptr: accessor.ptr.get_mut(),
1981        }
1982    }
1983
1984    /// Increases the length of the associated `ComponentResourceSet` by `count`
1985    /// and returns a pointer to the start of the memory that is reserved as a result.
1986    ///
1987    /// # Safety
1988    ///
1989    /// Ensure the memory returned by this function is properly initialized before calling
1990    /// any other storage function. Ensure that the data written into the returned pointer
1991    /// is representative of the component types stored in the associated ComponentResourceSet.
1992    ///
1993    /// # Panics
1994    ///
1995    /// Will panic when an internal u64 counter overflows.
1996    /// It will happen in 50000 years if you do 10000 mutations a millisecond.
1997    pub(crate) unsafe fn reserve_raw(&mut self, count: usize) -> NonNull<u8> {
1998        debug_assert!((*self.accessor.count.get() + count) <= self.accessor.capacity);
1999        let ptr = self
2000            .ptr
2001            .add(*self.accessor.count.get() * self.accessor.element_size);
2002        *self.accessor.count.get() += count;
2003        *self.accessor.version.get() = next_version();
2004        NonNull::new_unchecked(ptr)
2005    }
2006
2007    /// Pushes new components onto the end of the vec.
2008    ///
2009    /// # Safety
2010    ///
2011    /// Ensure the components pointed to by `components` are representative
2012    /// of the component types stored in the vec.
2013    ///
2014    /// This function will _copy_ all elements into the chunk. If the source is not `Copy`,
2015    /// the caller must then `mem::forget` the source such that the destructor does not run
2016    /// on the original data.
2017    ///
2018    /// # Panics
2019    ///
2020    /// Will panic when an internal u64 counter overflows.
2021    /// It will happen in 50000 years if you do 10000 mutations a millisecond.
2022    pub unsafe fn push_raw(&mut self, components: NonNull<u8>, count: usize) {
2023        debug_assert!((*self.accessor.count.get() + count) <= self.accessor.capacity);
2024        std::ptr::copy_nonoverlapping(
2025            components.as_ptr(),
2026            self.ptr
2027                .add(*self.accessor.count.get() * self.accessor.element_size),
2028            count * self.accessor.element_size,
2029        );
2030        *self.accessor.count.get() += count;
2031        *self.accessor.version.get() = next_version();
2032    }
2033
2034    /// Pushes new components onto the end of the vec.
2035    ///
2036    /// # Safety
2037    ///
2038    /// Ensure that the type `T` is representative of the component types stored in the vec.
2039    ///
2040    /// This function will _copy_ all elements of `T` into the chunk. If `T` is not `Copy`,
2041    /// the caller must then `mem::forget` the source such that the destructor does not run
2042    /// on the original data.
2043    pub unsafe fn push<T: Component>(&mut self, components: &[T]) {
2044        self.push_raw(
2045            NonNull::new_unchecked(components.as_ptr() as *mut u8),
2046            components.len(),
2047        );
2048    }
2049
2050    /// Removes the component at the specified index by swapping it with the last component.
2051    pub fn swap_remove(&mut self, index: usize, drop: bool) {
2052        unsafe {
2053            let size = self.accessor.element_size;
2054            let to_remove = self.ptr.add(size * index);
2055            if drop {
2056                if let Some(drop_fn) = self.accessor.drop_fn {
2057                    drop_fn(to_remove);
2058                }
2059            }
2060
2061            let count = *self.accessor.count.get();
2062            if index < count - 1 {
2063                let swap_target = self.ptr.add(size * (count - 1));
2064                std::ptr::copy_nonoverlapping(swap_target, to_remove, size);
2065            }
2066
2067            *self.accessor.count.get() -= 1;
2068        }
2069    }
2070
2071    /// Drops the component stored at `index` without moving any other data or
2072    /// altering the number of elements.
2073    ///
2074    /// # Safety
2075    ///
2076    /// Ensure that this function is only ever called once on a given index.
2077    pub unsafe fn drop_in_place(&mut self, ComponentIndex(index): ComponentIndex) {
2078        if let Some(drop_fn) = self.accessor.drop_fn {
2079            let size = self.accessor.element_size;
2080            let to_remove = self.ptr.add(size * index);
2081            drop_fn(to_remove);
2082        }
2083    }
2084}
2085
2086/// A vector of tag values of a single type.
2087///
2088/// Each element in the vector represents the value of tag for
2089/// the chunk with the corresponding index.
2090pub struct TagStorage {
2091    ptr: NonNull<u8>,
2092    capacity: usize,
2093    len: usize,
2094    element: TagMeta,
2095}
2096
2097impl TagStorage {
2098    pub(crate) fn new(element: TagMeta) -> Self {
2099        let capacity = if element.size == 0 { !0 } else { 4 };
2100
2101        let ptr = unsafe {
2102            if element.size > 0 {
2103                let layout =
2104                    std::alloc::Layout::from_size_align(capacity * element.size, element.align)
2105                        .unwrap();
2106                NonNull::new_unchecked(std::alloc::alloc(layout))
2107            } else {
2108                NonNull::new_unchecked(element.align as *mut u8)
2109            }
2110        };
2111
2112        TagStorage {
2113            ptr,
2114            capacity,
2115            len: 0,
2116            element,
2117        }
2118    }
2119
2120    /// Gets the element metadata.
2121    pub fn element(&self) -> &TagMeta { &self.element }
2122
2123    /// Gets the number of tags contained within the vector.
2124    pub fn len(&self) -> usize { self.len }
2125
2126    /// Determines if the vector is empty.
2127    pub fn is_empty(&self) -> bool { self.len() < 1 }
2128
2129    /// Allocates uninitialized memory for a new element.
2130    ///
2131    /// # Safety
2132    ///
2133    /// A valid element must be written into the returned address before the
2134    /// tag storage is next accessed.
2135    pub unsafe fn alloc_ptr(&mut self) -> *mut u8 {
2136        if self.len == self.capacity {
2137            self.grow();
2138        }
2139
2140        let ptr = if self.element.size > 0 {
2141            self.ptr.as_ptr().add(self.len * self.element.size)
2142        } else {
2143            self.element.align as *mut u8
2144        };
2145
2146        self.len += 1;
2147        ptr
2148    }
2149
2150    /// Pushes a new tag onto the end of the vector.
2151    ///
2152    /// # Safety
2153    ///
2154    /// Ensure the tag pointed to by `ptr` is representative
2155    /// of the tag types stored in the vec.
2156    ///
2157    /// `ptr` must not point to a location already within the vector.
2158    ///
2159    /// The value at `ptr` is _copied_ into the tag vector. If the value
2160    /// is not `Copy`, then the caller must ensure that the original value
2161    /// is forgotten with `mem::forget` such that the finalizer is not called
2162    /// twice.
2163    pub unsafe fn push_raw(&mut self, ptr: *const u8) {
2164        if self.len == self.capacity {
2165            self.grow();
2166        }
2167
2168        if self.element.size > 0 {
2169            let dst = self.ptr.as_ptr().add(self.len * self.element.size);
2170            std::ptr::copy_nonoverlapping(ptr, dst, self.element.size);
2171        }
2172
2173        self.len += 1;
2174    }
2175
2176    /// Pushes a new tag onto the end of the vector.
2177    ///
2178    /// # Safety
2179    ///
2180    /// Ensure that the type `T` is representative of the tag type stored in the vec.
2181    pub unsafe fn push<T: Tag>(&mut self, value: T) {
2182        debug_assert!(
2183            size_of::<T>() == self.element.size,
2184            "incompatible element data size"
2185        );
2186        self.push_raw(&value as *const T as *const u8);
2187        std::mem::forget(value);
2188    }
2189
2190    /// Gets a raw pointer to the start of the tag slice.
2191    ///
2192    /// Returns a tuple containing `(pointer, element_size, count)`.
2193    ///
2194    /// # Safety
2195    /// This function returns a raw pointer with the size and length.
2196    /// Ensure that you do not access outside these bounds for this pointer.
2197    pub unsafe fn data_raw(&self) -> (NonNull<u8>, usize, usize) {
2198        (self.ptr, self.element.size, self.len)
2199    }
2200
2201    /// Gets a shared reference to the slice of tags.
2202    ///
2203    /// # Safety
2204    ///
2205    /// Ensure that `T` is representative of the tag data actually stored.
2206    ///
2207    /// Access to the tag data within the slice is runtime borrow checked.
2208    /// This call will panic if borrowing rules are broken.
2209    pub unsafe fn data_slice<T>(&self) -> &[T] {
2210        debug_assert!(
2211            size_of::<T>() == self.element.size,
2212            "incompatible element data size"
2213        );
2214        std::slice::from_raw_parts(self.ptr.as_ptr() as *const T, self.len)
2215    }
2216
2217    /// Drop the storage without dropping the tags contained in the storage
2218    pub(crate) fn forget_data(mut self) {
2219        // this is a bit of a hack, but it makes the Drop impl not drop the elements
2220        self.element.drop_fn = None;
2221    }
2222
2223    fn grow(&mut self) {
2224        assert!(self.element.size != 0, "capacity overflow");
2225        unsafe {
2226            let (new_cap, ptr) = {
2227                let layout = std::alloc::Layout::from_size_align(
2228                    self.capacity * self.element.size,
2229                    self.element.align,
2230                )
2231                .unwrap();
2232                let new_cap = 2 * self.capacity;
2233                let ptr =
2234                    std::alloc::realloc(self.ptr.as_ptr(), layout, new_cap * self.element.size);
2235
2236                (new_cap, ptr)
2237            };
2238
2239            if ptr.is_null() {
2240                tracing::error!("out of memory");
2241                std::process::abort()
2242            }
2243
2244            self.ptr = NonNull::new_unchecked(ptr);
2245            self.capacity = new_cap;
2246        }
2247    }
2248}
2249
2250unsafe impl Sync for TagStorage {}
2251
2252unsafe impl Send for TagStorage {}
2253
2254impl Drop for TagStorage {
2255    fn drop(&mut self) {
2256        if self.element.size > 0 {
2257            let ptr = self.ptr.as_ptr();
2258
2259            unsafe {
2260                if let Some(drop_fn) = self.element.drop_fn {
2261                    for i in 0..self.len {
2262                        drop_fn(ptr.add(i * self.element.size));
2263                    }
2264                }
2265                let layout = std::alloc::Layout::from_size_align_unchecked(
2266                    self.element.size * self.capacity,
2267                    self.element.align,
2268                );
2269                std::alloc::dealloc(ptr, layout);
2270            }
2271        }
2272    }
2273}
2274
2275impl Debug for TagStorage {
2276    fn fmt(&self, f: &mut Formatter) -> Result<(), std::fmt::Error> {
2277        write!(
2278            f,
2279            "TagStorage {{ element_size: {}, count: {}, capacity: {} }}",
2280            self.element.size, self.len, self.capacity
2281        )
2282    }
2283}
2284
2285#[cfg(test)]
2286mod test {
2287    use super::*;
2288    use std::num::Wrapping;
2289
2290    #[derive(Copy, Clone, PartialEq, Debug)]
2291    struct ZeroSize;
2292
2293    #[test]
2294    pub fn create() {
2295        let _ = tracing_subscriber::fmt::try_init();
2296
2297        let mut archetypes = Storage::new(WorldId::default());
2298
2299        let mut desc = ArchetypeDescription::default();
2300        desc.register_tag::<usize>();
2301        desc.register_component::<isize>();
2302
2303        let (_arch_id, data) = archetypes.alloc_archetype(desc);
2304        let set = data.alloc_chunk_set(|tags| unsafe {
2305            tags.get_mut(TagTypeId::of::<usize>()).unwrap().push(1isize)
2306        });
2307
2308        let chunk_index = data.get_free_chunk(set, 1);
2309        let components = data
2310            .chunkset_mut(set)
2311            .unwrap()
2312            .chunk_mut(chunk_index)
2313            .unwrap();
2314        let mut writer = components.writer();
2315        let (chunk_entities, chunk_components) = writer.get();
2316
2317        chunk_entities.push(Entity::new(1, Wrapping(0)));
2318        unsafe {
2319            (&mut *chunk_components.get())
2320                .get_mut(ComponentTypeId::of::<isize>())
2321                .unwrap()
2322                .writer()
2323                .push(&[1usize]);
2324        }
2325    }
2326
2327    #[test]
2328    pub fn create_lazy_allocated() {
2329        let _ = tracing_subscriber::fmt::try_init();
2330
2331        let mut archetypes = Storage::new(WorldId::default());
2332
2333        let mut desc = ArchetypeDescription::default();
2334        desc.register_tag::<usize>();
2335        desc.register_component::<isize>();
2336
2337        let (_arch_id, data) = archetypes.alloc_archetype(desc);
2338        let set = data.alloc_chunk_set(|tags| unsafe {
2339            tags.get_mut(TagTypeId::of::<usize>()).unwrap().push(1isize)
2340        });
2341
2342        let chunk_index = data.get_free_chunk(set, 1);
2343        let chunk = data
2344            .chunkset_mut(set)
2345            .unwrap()
2346            .chunk_mut(chunk_index)
2347            .unwrap();
2348
2349        assert!(!chunk.is_allocated());
2350
2351        chunk.writer();
2352
2353        assert!(chunk.is_allocated());
2354    }
2355
2356    #[test]
2357    pub fn create_free_when_empty() {
2358        let _ = tracing_subscriber::fmt::try_init();
2359
2360        let mut archetypes = Storage::new(WorldId::default());
2361
2362        let mut desc = ArchetypeDescription::default();
2363        desc.register_tag::<usize>();
2364        desc.register_component::<isize>();
2365
2366        let (_arch_id, data) = archetypes.alloc_archetype(desc);
2367        let set = data.alloc_chunk_set(|tags| unsafe {
2368            tags.get_mut(TagTypeId::of::<usize>()).unwrap().push(1isize)
2369        });
2370
2371        let chunk_index = data.get_free_chunk(set, 1);
2372        let chunk = data
2373            .chunkset_mut(set)
2374            .unwrap()
2375            .chunk_mut(chunk_index)
2376            .unwrap();
2377
2378        assert!(!chunk.is_allocated());
2379
2380        {
2381            let mut writer = chunk.writer();
2382            let (chunk_entities, chunk_components) = writer.get();
2383
2384            chunk_entities.push(Entity::new(1, Wrapping(0)));
2385            unsafe {
2386                (&mut *chunk_components.get())
2387                    .get_mut(ComponentTypeId::of::<isize>())
2388                    .unwrap()
2389                    .writer()
2390                    .push(&[1usize]);
2391            }
2392        }
2393
2394        assert!(chunk.is_allocated());
2395
2396        chunk.swap_remove(ComponentIndex(0), true);
2397
2398        assert!(!chunk.is_allocated());
2399    }
2400
2401    #[test]
2402    pub fn read_components() {
2403        let _ = tracing_subscriber::fmt::try_init();
2404
2405        let mut archetypes = Storage::new(WorldId::default());
2406
2407        let mut desc = ArchetypeDescription::default();
2408        desc.register_component::<isize>();
2409        desc.register_component::<usize>();
2410        desc.register_component::<ZeroSize>();
2411
2412        let (_arch_id, data) = archetypes.alloc_archetype(desc);
2413        let set = data.alloc_chunk_set(|_| {});
2414        let chunk_index = data.get_free_chunk(set, 1);
2415        let components = data
2416            .chunkset_mut(set)
2417            .unwrap()
2418            .chunk_mut(chunk_index)
2419            .unwrap();
2420
2421        let entities = [
2422            (Entity::new(1, Wrapping(0)), 1isize, 1usize, ZeroSize),
2423            (Entity::new(2, Wrapping(0)), 2isize, 2usize, ZeroSize),
2424            (Entity::new(3, Wrapping(0)), 3isize, 3usize, ZeroSize),
2425        ];
2426
2427        let mut writer = components.writer();
2428        let (chunk_entities, chunk_components) = writer.get();
2429        for (entity, c1, c2, c3) in entities.iter() {
2430            chunk_entities.push(*entity);
2431            unsafe {
2432                (&mut *chunk_components.get())
2433                    .get_mut(ComponentTypeId::of::<isize>())
2434                    .unwrap()
2435                    .writer()
2436                    .push(&[*c1]);
2437                (&mut *chunk_components.get())
2438                    .get_mut(ComponentTypeId::of::<usize>())
2439                    .unwrap()
2440                    .writer()
2441                    .push(&[*c2]);
2442                (&mut *chunk_components.get())
2443                    .get_mut(ComponentTypeId::of::<ZeroSize>())
2444                    .unwrap()
2445                    .writer()
2446                    .push(&[*c3]);
2447            }
2448        }
2449
2450        unsafe {
2451            for (i, c) in (*chunk_components.get())
2452                .get(ComponentTypeId::of::<isize>())
2453                .unwrap()
2454                .data_slice::<isize>()
2455                .iter()
2456                .enumerate()
2457            {
2458                assert_eq!(entities[i].1, *c);
2459            }
2460
2461            for (i, c) in (*chunk_components.get())
2462                .get(ComponentTypeId::of::<usize>())
2463                .unwrap()
2464                .data_slice::<usize>()
2465                .iter()
2466                .enumerate()
2467            {
2468                assert_eq!(entities[i].2, *c);
2469            }
2470
2471            for (i, c) in (*chunk_components.get())
2472                .get(ComponentTypeId::of::<ZeroSize>())
2473                .unwrap()
2474                .data_slice::<ZeroSize>()
2475                .iter()
2476                .enumerate()
2477            {
2478                assert_eq!(entities[i].3, *c);
2479            }
2480        }
2481    }
2482
2483    #[test]
2484    pub fn read_tags() {
2485        let _ = tracing_subscriber::fmt::try_init();
2486
2487        let mut archetypes = Storage::new(WorldId::default());
2488
2489        let mut desc = ArchetypeDescription::default();
2490        desc.register_tag::<isize>();
2491        desc.register_tag::<ZeroSize>();
2492
2493        let (_arch_id, data) = archetypes.alloc_archetype(desc);
2494
2495        let tag_values = [(0isize, ZeroSize), (1isize, ZeroSize), (2isize, ZeroSize)];
2496
2497        for (t1, t2) in tag_values.iter() {
2498            data.alloc_chunk_set(|tags| {
2499                unsafe { tags.get_mut(TagTypeId::of::<isize>()).unwrap().push(*t1) };
2500                unsafe { tags.get_mut(TagTypeId::of::<ZeroSize>()).unwrap().push(*t2) };
2501            });
2502        }
2503
2504        unsafe {
2505            let tags1 = data
2506                .tags()
2507                .get(TagTypeId::of::<isize>())
2508                .unwrap()
2509                .data_slice::<isize>();
2510            assert_eq!(tags1.len(), tag_values.len());
2511            for (i, t) in tags1.iter().enumerate() {
2512                assert_eq!(tag_values[i].0, *t);
2513            }
2514
2515            let tags2 = data
2516                .tags()
2517                .get(TagTypeId::of::<ZeroSize>())
2518                .unwrap()
2519                .data_slice::<ZeroSize>();
2520            assert_eq!(tags2.len(), tag_values.len());
2521            for (i, t) in tags2.iter().enumerate() {
2522                assert_eq!(tag_values[i].1, *t);
2523            }
2524        }
2525    }
2526
2527    #[test]
2528    pub fn create_zero_size_tags() {
2529        let _ = tracing_subscriber::fmt::try_init();
2530
2531        let mut archetypes = Storage::new(WorldId::default());
2532
2533        let mut desc = ArchetypeDescription::default();
2534        desc.register_tag::<ZeroSize>();
2535        desc.register_component::<isize>();
2536
2537        let (_arch_id, data) = archetypes.alloc_archetype(desc);
2538        let set = data.alloc_chunk_set(|tags| unsafe {
2539            tags.get_mut(TagTypeId::of::<ZeroSize>())
2540                .unwrap()
2541                .push(ZeroSize);
2542        });
2543
2544        let chunk_index = data.get_free_chunk(set, 1);
2545        let components = data
2546            .chunkset_mut(set)
2547            .unwrap()
2548            .chunk_mut(chunk_index)
2549            .unwrap();
2550        let mut writer = components.writer();
2551        let (chunk_entities, chunk_components) = writer.get();
2552
2553        chunk_entities.push(Entity::new(1, Wrapping(0)));
2554        unsafe {
2555            (&mut *chunk_components.get())
2556                .get_mut(ComponentTypeId::of::<isize>())
2557                .unwrap()
2558                .writer()
2559                .push(&[1usize]);
2560        }
2561    }
2562
2563    #[test]
2564    pub fn create_zero_size_components() {
2565        let _ = tracing_subscriber::fmt::try_init();
2566
2567        let mut archetypes = Storage::new(WorldId::default());
2568
2569        let mut desc = ArchetypeDescription::default();
2570        desc.register_tag::<usize>();
2571        desc.register_component::<ZeroSize>();
2572
2573        let (_arch_id, data) = archetypes.alloc_archetype(desc);
2574        let set = data.alloc_chunk_set(|tags| unsafe {
2575            tags.get_mut(TagTypeId::of::<usize>()).unwrap().push(1isize);
2576        });
2577
2578        let chunk_index = data.get_free_chunk(set, 1);
2579        let components = data
2580            .chunkset_mut(set)
2581            .unwrap()
2582            .chunk_mut(chunk_index)
2583            .unwrap();
2584        let mut writer = components.writer();
2585        let (chunk_entities, chunk_components) = writer.get();
2586
2587        chunk_entities.push(Entity::new(1, Wrapping(0)));
2588        unsafe {
2589            (&mut *chunk_components.get())
2590                .get_mut(ComponentTypeId::of::<ZeroSize>())
2591                .unwrap()
2592                .writer()
2593                .push(&[ZeroSize]);
2594        }
2595    }
2596}