1use crate::alloc::alloc::{Layout, alloc, dealloc};
2use crate::alloc::boxed::Box;
3use crate::alloc::{vec, vec::Vec};
4use core::any::{TypeId, type_name};
5use core::fmt;
6use core::hash::{BuildHasher, BuildHasherDefault, Hasher};
7use core::ops::{Deref, DerefMut};
8use core::ptr::{self, NonNull};
9
10use hashbrown::HashMap;
11
12use crate::borrow::AtomicBorrow;
13use crate::query::Fetch;
14use crate::{Access, Component, ComponentRef, Query};
15
16pub struct Archetype {
21 types: Vec<TypeInfo>,
22 type_ids: Box<[TypeId]>,
23 index: OrderedTypeIdMap<usize>,
24 len: u32,
25 entities: Box<[u32]>,
26 data: Box<[Data]>,
28}
29
30impl Archetype {
31 fn assert_type_info(types: &[TypeInfo]) {
32 types.windows(2).for_each(|x| match x[0].cmp(&x[1]) {
33 core::cmp::Ordering::Less => (),
34 #[cfg(debug_assertions)]
35 core::cmp::Ordering::Equal => panic!(
36 "attempted to allocate entity with duplicate {} components; \
37 each type must occur at most once!",
38 x[0].type_name
39 ),
40 #[cfg(not(debug_assertions))]
41 core::cmp::Ordering::Equal => panic!(
42 "attempted to allocate entity with duplicate components; \
43 each type must occur at most once!"
44 ),
45 core::cmp::Ordering::Greater => panic!("type info is unsorted"),
46 });
47 }
48
49 pub(crate) fn new(types: Vec<TypeInfo>) -> Self {
50 let max_align = types.first().map_or(1, |ty| ty.layout.align());
51 Self::assert_type_info(&types);
52 let component_count = types.len();
53 Self {
54 index: OrderedTypeIdMap::new(types.iter().enumerate().map(|(i, ty)| (ty.id, i))),
55 type_ids: types.iter().map(|ty| ty.id()).collect(),
56 types,
57 entities: Box::new([]),
58 len: 0,
59 data: (0..component_count)
60 .map(|_| Data {
61 state: AtomicBorrow::new(),
62 storage: NonNull::new(max_align as *mut u8).unwrap(),
63 })
64 .collect(),
65 }
66 }
67
68 pub(crate) fn clear(&mut self) {
69 for (ty, data) in self.types.iter().zip(&*self.data) {
70 for index in 0..self.len {
71 unsafe {
72 let removed = data.storage.as_ptr().add(index as usize * ty.layout.size());
73 (ty.drop)(removed);
74 }
75 }
76 }
77 self.len = 0;
78 }
79
80 pub fn has<T: Component>(&self) -> bool {
82 self.has_dynamic(TypeId::of::<T>())
83 }
84
85 pub fn has_dynamic(&self, id: TypeId) -> bool {
87 self.index.contains_key(&id)
88 }
89
90 pub(crate) fn get_state<T: Component>(&self) -> Option<usize> {
92 self.index.get(&TypeId::of::<T>()).copied()
93 }
94
95 pub(crate) unsafe fn get_base<T: Component>(&self, state: usize) -> NonNull<T> {
100 debug_assert_eq!(self.types[state].id, TypeId::of::<T>());
101
102 unsafe {
103 NonNull::new_unchecked(self.data.get_unchecked(state).storage.as_ptr().cast::<T>())
104 }
105 }
106
107 pub fn get<'a, T: ComponentRef<'a>>(&'a self) -> Option<T::Column> {
113 T::get_column(self)
114 }
115
116 pub(crate) fn borrow<T: Component>(&self, state: usize) {
117 assert_eq!(self.types[state].id, TypeId::of::<T>());
118
119 if !self.data[state].state.borrow() {
120 panic!("{} already borrowed uniquely", type_name::<T>());
121 }
122 }
123
124 pub(crate) unsafe fn borrow_raw(&self, state: usize) {
125 if !self.data[state].state.borrow() {
126 panic!("state index {} already borrowed uniquely", state);
127 }
128 }
129
130 pub(crate) fn borrow_mut<T: Component>(&self, state: usize) {
131 assert_eq!(self.types[state].id, TypeId::of::<T>());
132
133 if !self.data[state].state.borrow_mut() {
134 panic!("{} already borrowed", type_name::<T>());
135 }
136 }
137
138 pub(crate) fn release<T: Component>(&self, state: usize) {
139 assert_eq!(self.types[state].id, TypeId::of::<T>());
140 self.data[state].state.release();
141 }
142
143 pub(crate) fn release_mut<T: Component>(&self, state: usize) {
144 assert_eq!(self.types[state].id, TypeId::of::<T>());
145 self.data[state].state.release_mut();
146 }
147
148 pub(crate) unsafe fn release_raw(&self, state: usize) {
149 self.data[state].state.release();
150 }
151
152 pub(crate) unsafe fn release_raw_mut(&self, state: usize) {
153 self.data[state].state.release_mut();
154 }
155
156 #[inline]
158 pub fn len(&self) -> u32 {
159 self.len
160 }
161
162 #[inline]
164 pub fn is_empty(&self) -> bool {
165 self.len == 0
166 }
167
168 #[inline]
169 pub(crate) fn entities(&self) -> NonNull<u32> {
170 unsafe { NonNull::new_unchecked(self.entities.as_ptr() as *mut _) }
171 }
172
173 pub(crate) fn entity_id(&self, index: u32) -> u32 {
174 self.entities[index as usize]
175 }
176
177 #[inline]
178 pub(crate) fn set_entity_id(&mut self, index: usize, id: u32) {
179 self.entities[index] = id;
180 }
181
182 pub(crate) fn types(&self) -> &[TypeInfo] {
183 &self.types
184 }
185
186 pub(crate) fn type_ids(&self) -> &[TypeId] {
187 &self.type_ids
188 }
189
190 pub fn component_types(&self) -> impl ExactSizeIterator<Item = TypeId> + '_ {
205 self.types.iter().map(|typeinfo| typeinfo.id)
206 }
207
208 pub(crate) unsafe fn get_dynamic(
210 &self,
211 ty: TypeId,
212 size: usize,
213 index: u32,
214 ) -> Option<NonNull<u8>> {
215 debug_assert!(index <= self.len);
216 Some(NonNull::new_unchecked(
217 self.data
218 .get_unchecked(*self.index.get(&ty)?)
219 .storage
220 .as_ptr()
221 .add(size * index as usize)
222 .cast::<u8>(),
223 ))
224 }
225
226 pub(crate) unsafe fn allocate(&mut self, id: u32) -> u32 {
228 if self.len as usize == self.entities.len() {
229 self.grow(64);
230 }
231
232 self.entities[self.len as usize] = id;
233 self.len += 1;
234 self.len - 1
235 }
236
237 pub(crate) unsafe fn set_len(&mut self, len: u32) {
238 debug_assert!(len <= self.capacity());
239 self.len = len;
240 }
241
242 pub(crate) fn reserve(&mut self, additional: u32) {
243 if additional > (self.capacity() - self.len()) {
244 let increment = additional - (self.capacity() - self.len());
245 self.grow(increment.max(64));
246 }
247 }
248
249 pub(crate) fn capacity(&self) -> u32 {
250 self.entities.len() as u32
251 }
252
253 fn grow(&mut self, min_increment: u32) {
255 self.grow_exact(self.capacity().max(min_increment))
257 }
258
259 fn grow_exact(&mut self, increment: u32) {
261 let old_count = self.len as usize;
262 let old_cap = self.entities.len();
263 let new_cap = self.entities.len() + increment as usize;
264 let mut new_entities = vec![!0; new_cap].into_boxed_slice();
265 new_entities[0..old_count].copy_from_slice(&self.entities[0..old_count]);
266 self.entities = new_entities;
267
268 let new_data = self
269 .types
270 .iter()
271 .zip(&*self.data)
272 .map(|(info, old)| {
273 let storage = if info.layout.size() == 0 {
274 NonNull::new(info.layout.align() as *mut u8).unwrap()
275 } else {
276 let layout =
277 Layout::from_size_align(info.layout.size() * new_cap, info.layout.align())
278 .unwrap();
279 unsafe {
280 let mem = alloc(layout);
281 let mem = NonNull::new(mem)
282 .unwrap_or_else(|| alloc::alloc::handle_alloc_error(layout));
283 ptr::copy_nonoverlapping(
284 old.storage.as_ptr(),
285 mem.as_ptr(),
286 info.layout.size() * old_count,
287 );
288 mem
289 }
290 };
291 Data {
292 state: AtomicBorrow::new(), storage,
294 }
295 })
296 .collect::<Box<[_]>>();
297
298 if old_cap > 0 {
302 for (info, data) in self.types.iter().zip(&*self.data) {
303 if info.layout.size() == 0 {
304 continue;
305 }
306 unsafe {
307 dealloc(
308 data.storage.as_ptr(),
309 Layout::from_size_align(info.layout.size() * old_cap, info.layout.align())
310 .unwrap(),
311 );
312 }
313 }
314 }
315
316 self.data = new_data;
317 }
318
319 pub(crate) unsafe fn remove(&mut self, index: u32, drop: bool) -> Option<u32> {
321 let last = self.len - 1;
322 for (ty, data) in self.types.iter().zip(&*self.data) {
323 let removed = data.storage.as_ptr().add(index as usize * ty.layout.size());
324 if drop {
325 (ty.drop)(removed);
326 }
327 if index != last {
328 let moved = data.storage.as_ptr().add(last as usize * ty.layout.size());
329 ptr::copy_nonoverlapping(moved, removed, ty.layout.size());
330 }
331 }
332 self.len = last;
333 if index != last {
334 self.entities[index as usize] = self.entities[last as usize];
335 Some(self.entities[last as usize])
336 } else {
337 None
338 }
339 }
340
341 pub(crate) unsafe fn move_to(
343 &mut self,
344 index: u32,
345 mut f: impl FnMut(*mut u8, TypeId, usize),
346 ) -> Option<u32> {
347 let last = self.len - 1;
348 for (ty, data) in self.types.iter().zip(&*self.data) {
349 let moved_out = data.storage.as_ptr().add(index as usize * ty.layout.size());
350 f(moved_out, ty.id(), ty.layout().size());
351 if index != last {
352 let moved = data.storage.as_ptr().add(last as usize * ty.layout.size());
353 ptr::copy_nonoverlapping(moved, moved_out, ty.layout.size());
354 }
355 }
356 self.len -= 1;
357 if index != last {
358 self.entities[index as usize] = self.entities[last as usize];
359 Some(self.entities[last as usize])
360 } else {
361 None
362 }
363 }
364
365 pub(crate) unsafe fn put_dynamic(
366 &mut self,
367 component: *mut u8,
368 ty: TypeId,
369 size: usize,
370 index: u32,
371 ) {
372 let ptr = self
373 .get_dynamic(ty, size, index)
374 .unwrap()
375 .as_ptr()
376 .cast::<u8>();
377 ptr::copy_nonoverlapping(component, ptr, size);
378 }
379
380 pub fn access<Q: Query>(&self) -> Option<Access> {
382 Q::Fetch::access(self)
383 }
384
385 pub fn satisfies<Q: Query>(&self) -> bool {
387 self.access::<Q>().is_some()
388 }
389
390 pub(crate) unsafe fn merge(&mut self, mut other: Archetype) {
396 self.reserve(other.len);
397 for ((info, dst), src) in self.types.iter().zip(&*self.data).zip(&*other.data) {
398 dst.storage
399 .as_ptr()
400 .add(self.len as usize * info.layout.size())
401 .copy_from_nonoverlapping(
402 src.storage.as_ptr(),
403 other.len as usize * info.layout.size(),
404 )
405 }
406 self.len += other.len;
407 other.len = 0;
408 }
409
410 #[inline]
416 pub fn ids(&self) -> &[u32] {
417 &self.entities[0..self.len as usize]
418 }
419}
420
421impl Drop for Archetype {
422 fn drop(&mut self) {
423 self.clear();
424 if self.entities.is_empty() {
425 return;
426 }
427 for (info, data) in self.types.iter().zip(&*self.data) {
428 if info.layout.size() != 0 {
429 unsafe {
430 dealloc(
431 data.storage.as_ptr(),
432 Layout::from_size_align_unchecked(
433 info.layout.size() * self.entities.len(),
434 info.layout.align(),
435 ),
436 );
437 }
438 }
439 }
440 }
441}
442
443struct Data {
444 state: AtomicBorrow,
445 storage: NonNull<u8>,
446}
447
448#[derive(Default)]
453pub struct TypeIdHasher {
454 hash: u64,
455}
456
457impl Hasher for TypeIdHasher {
458 fn write_u64(&mut self, n: u64) {
459 debug_assert_eq!(self.hash, 0);
461 self.hash = n;
462 }
463
464 fn write_u128(&mut self, n: u128) {
466 debug_assert_eq!(self.hash, 0);
467 self.hash = n as u64;
468 }
469
470 fn write(&mut self, bytes: &[u8]) {
471 debug_assert_eq!(self.hash, 0);
472
473 let mut hasher = foldhash::fast::FixedState::with_seed(0xb334867b740a29a5).build_hasher();
476 hasher.write(bytes);
477 self.hash = hasher.finish();
478 }
479
480 fn finish(&self) -> u64 {
481 self.hash
482 }
483}
484
485pub type TypeIdMap<V> = HashMap<TypeId, V, BuildHasherDefault<TypeIdHasher>>;
491
492struct OrderedTypeIdMap<V>(Box<[(TypeId, V)]>);
493
494impl<V> OrderedTypeIdMap<V> {
495 fn new(iter: impl Iterator<Item = (TypeId, V)>) -> Self {
496 let mut vals = iter.collect::<Box<[_]>>();
497 vals.sort_unstable_by_key(|(id, _)| *id);
498 Self(vals)
499 }
500
501 fn search(&self, id: &TypeId) -> Option<usize> {
502 self.0.binary_search_by_key(id, |(id, _)| *id).ok()
503 }
504
505 fn contains_key(&self, id: &TypeId) -> bool {
506 self.search(id).is_some()
507 }
508
509 fn get(&self, id: &TypeId) -> Option<&V> {
510 self.search(id).map(move |idx| &self.0[idx].1)
511 }
512}
513
514#[derive(Debug, Copy, Clone)]
520pub struct TypeInfo {
521 id: TypeId,
522 layout: Layout,
523 drop: unsafe fn(*mut u8),
524 #[cfg(debug_assertions)]
525 type_name: &'static str,
526}
527
528impl TypeInfo {
529 pub fn of<T: 'static>() -> Self {
531 unsafe fn drop_ptr<T>(x: *mut u8) {
532 x.cast::<T>().drop_in_place()
533 }
534
535 Self {
536 id: TypeId::of::<T>(),
537 layout: Layout::new::<T>(),
538 drop: drop_ptr::<T>,
539 #[cfg(debug_assertions)]
540 type_name: core::any::type_name::<T>(),
541 }
542 }
543
544 pub fn from_parts(id: TypeId, layout: Layout, drop: unsafe fn(*mut u8)) -> Self {
549 Self {
550 id,
551 layout,
552 drop,
553 #[cfg(debug_assertions)]
554 type_name: "<unknown> (TypeInfo constructed from parts)",
555 }
556 }
557
558 pub fn id(&self) -> TypeId {
560 self.id
561 }
562
563 pub fn layout(&self) -> Layout {
565 self.layout
566 }
567
568 pub unsafe fn drop(&self, data: *mut u8) {
575 (self.drop)(data)
576 }
577
578 pub fn drop_shim(&self) -> unsafe fn(*mut u8) {
581 self.drop
582 }
583}
584
585impl PartialOrd for TypeInfo {
586 fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
587 Some(self.cmp(other))
588 }
589}
590
591impl Ord for TypeInfo {
592 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
594 self.layout
595 .align()
596 .cmp(&other.layout.align())
597 .reverse()
598 .then_with(|| self.id.cmp(&other.id))
599 }
600}
601
602impl PartialEq for TypeInfo {
603 fn eq(&self, other: &Self) -> bool {
604 self.id == other.id
605 }
606}
607
608impl Eq for TypeInfo {}
609
610pub struct ArchetypeColumn<'a, T: Component> {
612 archetype: &'a Archetype,
613 column: &'a [T],
614}
615
616impl<'a, T: Component> ArchetypeColumn<'a, T> {
617 pub(crate) fn new(archetype: &'a Archetype) -> Option<Self> {
618 let state = archetype.get_state::<T>()?;
619 let ptr = unsafe { archetype.get_base::<T>(state) };
620 let column = unsafe { core::slice::from_raw_parts(ptr.as_ptr(), archetype.len() as usize) };
621 archetype.borrow::<T>(state);
622 Some(Self { archetype, column })
623 }
624}
625
626impl<T: Component> Deref for ArchetypeColumn<'_, T> {
627 type Target = [T];
628 fn deref(&self) -> &[T] {
629 self.column
630 }
631}
632
633impl<T: Component> Drop for ArchetypeColumn<'_, T> {
634 fn drop(&mut self) {
635 let state = self.archetype.get_state::<T>().unwrap();
636 self.archetype.release::<T>(state);
637 }
638}
639
640impl<T: Component> Clone for ArchetypeColumn<'_, T> {
641 fn clone(&self) -> Self {
642 let state = self.archetype.get_state::<T>().unwrap();
643 self.archetype.borrow::<T>(state);
644 Self {
645 archetype: self.archetype,
646 column: self.column,
647 }
648 }
649}
650
651impl<T: Component + fmt::Debug> fmt::Debug for ArchetypeColumn<'_, T> {
652 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
653 self.column.fmt(f)
654 }
655}
656
657pub struct ArchetypeColumnMut<'a, T: Component> {
659 archetype: &'a Archetype,
660 column: &'a mut [T],
661}
662
663impl<'a, T: Component> ArchetypeColumnMut<'a, T> {
664 pub(crate) fn new(archetype: &'a Archetype) -> Option<Self> {
665 let state = archetype.get_state::<T>()?;
666 let ptr = unsafe { archetype.get_base::<T>(state) };
667 let column =
668 unsafe { core::slice::from_raw_parts_mut(ptr.as_ptr(), archetype.len() as usize) };
669 archetype.borrow_mut::<T>(state);
670 Some(Self { archetype, column })
671 }
672}
673
674impl<T: Component> Deref for ArchetypeColumnMut<'_, T> {
675 type Target = [T];
676 fn deref(&self) -> &[T] {
677 self.column
678 }
679}
680
681impl<T: Component> DerefMut for ArchetypeColumnMut<'_, T> {
682 fn deref_mut(&mut self) -> &mut [T] {
683 self.column
684 }
685}
686
687impl<T: Component> Drop for ArchetypeColumnMut<'_, T> {
688 fn drop(&mut self) {
689 let state = self.archetype.get_state::<T>().unwrap();
690 self.archetype.release_mut::<T>(state);
691 }
692}
693
694impl<T: Component + fmt::Debug> fmt::Debug for ArchetypeColumnMut<'_, T> {
695 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
696 self.column.fmt(f)
697 }
698}