1use std::alloc::{alloc, dealloc, handle_alloc_error, Layout};
2use std::any::TypeId;
3use std::cmp::Reverse;
4use std::mem::{align_of, needs_drop, transmute};
5use std::ptr::{copy_nonoverlapping, drop_in_place, slice_from_raw_parts_mut};
6
7use crate::{resources::TypeIdMap, world::Entity};
8
9pub struct Archetype {
10 types: Box<[TypeMetadata]>,
11 layout: Layout,
12 len: u32,
13 cap: u32,
14 ptr: *mut u8,
15}
16
17unsafe impl Send for Archetype {}
18
19impl Archetype {
20 pub fn new(types: TypeMetadataSet) -> Self {
21 assert!(types.0.len() <= u16::MAX as usize);
22
23 let max_align = types
24 .0
25 .iter()
26 .map(|ty| ty.layout.align())
27 .max()
28 .unwrap_or(1);
29
30 let layout = Layout::from_size_align(0, max_align).unwrap();
31 let ptr = invalid_ptr(max_align);
32
33 Self {
34 types: types.0.into(),
35 layout,
36 len: 0,
37 cap: 0,
38 ptr,
39 }
40 }
41}
42
43impl Archetype {
44 pub fn len(&self) -> u32 {
45 self.len
46 }
47
48 pub unsafe fn alloc(&mut self) -> u32 {
49 if self.len == self.cap {
50 self.grow();
51 }
52
53 let idx = self.len;
54 self.len += 1;
55 idx
56 }
57
58 #[must_use]
59 pub unsafe fn free<const DROP: bool>(&mut self, idx: u32) -> bool {
60 if DROP {
61 for ty in &*self.types {
62 if let Some(drop) = ty.drop {
63 let ptr = ty.base_pointer.add(ty.layout.size() * idx as usize);
64
65 drop(ptr, 1);
66 }
67 }
68 }
69
70 self.len -= 1;
71
72 if idx != self.len {
73 for ty in &*self.types {
74 let size = ty.layout.size();
75
76 let src_ptr = ty.base_pointer.add(size * self.len as usize);
77 let dst_ptr = ty.base_pointer.add(size * idx as usize);
78
79 copy_nonoverlapping(src_ptr, dst_ptr, size);
80 }
81
82 true
83 } else {
84 false
85 }
86 }
87
88 pub fn clear(&mut self) {
89 let len = self.len;
90 self.len = 0;
91
92 for ty in &*self.types {
93 if let Some(drop) = ty.drop {
94 unsafe {
95 drop(ty.base_pointer, len as usize);
96 }
97 }
98 }
99 }
100
101 #[cold]
102 #[inline(never)]
103 fn grow(&mut self) {
104 let new_cap = self.cap.checked_add(self.cap.max(8)).unwrap() as usize;
105
106 struct SortOnDrop<'a>(&'a mut [TypeMetadata]);
107
108 impl Drop for SortOnDrop<'_> {
109 fn drop(&mut self) {
110 self.0.sort_unstable_by_key(|ty| ty.id);
111 }
112 }
113
114 let types = SortOnDrop(&mut self.types);
115
116 types
117 .0
118 .sort_unstable_by_key(|ty| Reverse(ty.layout.align()));
119
120 let mut new_offsets = Vec::<usize>::with_capacity(types.0.len());
121 let mut new_size = 0;
122
123 for ty in types.0.iter() {
124 new_offsets.push(new_size);
125
126 new_size = new_size
127 .checked_add(ty.layout.size().checked_mul(new_cap).unwrap())
128 .unwrap();
129 }
130
131 let new_layout = Layout::from_size_align(new_size, self.layout.align()).unwrap();
132
133 let new_ptr = alloc_ptr(new_layout);
134
135 unsafe {
136 for (ty, new_offset) in types.0.iter_mut().zip(&new_offsets) {
137 let new_base_pointer = new_ptr.add(*new_offset);
138
139 copy_nonoverlapping(
140 ty.base_pointer,
141 new_base_pointer,
142 ty.layout.size() * self.cap as usize,
143 );
144
145 ty.base_pointer = new_base_pointer;
146 }
147
148 if self.layout.size() != 0 {
149 dealloc(self.ptr, self.layout);
150 }
151 }
152
153 self.layout = new_layout;
154 self.ptr = new_ptr;
155
156 self.cap = new_cap as u32;
157 }
158}
159
160impl Drop for Archetype {
161 fn drop(&mut self) {
162 self.clear();
163
164 unsafe {
165 if self.layout.size() != 0 {
166 dealloc(self.ptr, self.layout);
167 }
168 }
169 }
170}
171
172impl Archetype {
173 pub fn types(&self) -> TypeMetadataSet {
174 TypeMetadataSet(self.types.to_vec())
175 }
176
177 pub fn match_(&self, types: &TypeMetadataSet) -> bool {
178 let lhs = self.types.iter().map(|ty| ty.id);
179 let rhs = types.0.iter().map(|ty| ty.id);
180
181 lhs.eq(rhs)
182 }
183
184 pub fn find<C>(&self) -> Option<u16>
185 where
186 C: 'static,
187 {
188 self.find_impl(&TypeId::of::<C>())
189 }
190
191 fn find_impl(&self, key: &TypeId) -> Option<u16> {
192 self.types
193 .binary_search_by_key(key, |ty| ty.id)
194 .map(|idx| idx as u16)
195 .ok()
196 }
197}
198
199impl Archetype {
200 #[allow(clippy::needless_pass_by_ref_mut)]
201 pub unsafe fn move_(src: &mut Self, dst: &mut Self, src_idx: u32, dst_idx: u32) {
202 debug_assert!(src_idx < src.len);
203 debug_assert!(dst_idx < dst.len);
204
205 let mut dst_types = &*dst.types;
206
207 for src_ty in &*src.types {
208 let dst_ty = match dst_types.iter().position(|ty| ty.id == src_ty.id) {
209 Some(dst_ty) => {
210 dst_types = &dst_types[dst_ty..];
211 &dst_types[0]
212 }
213 None => continue,
214 };
215
216 let size = src_ty.layout.size();
217
218 let src_ptr = src_ty.base_pointer.add(size * src_idx as usize);
219 let dst_ptr = dst_ty.base_pointer.add(size * dst_idx as usize);
220
221 copy_nonoverlapping(src_ptr, dst_ptr, size);
222 }
223 }
224}
225
226impl Archetype {
227 #[cfg(debug_assertions)]
228 fn type_metadata<C>(&self, ty: u16) -> &TypeMetadata
229 where
230 C: 'static,
231 {
232 let ty = &self.types[ty as usize];
233 assert_eq!(ty.id, TypeId::of::<C>());
234 ty
235 }
236
237 #[cfg(not(debug_assertions))]
238 unsafe fn type_metadata_unchecked(&self, ty: u16) -> &TypeMetadata {
239 self.types.get_unchecked(ty as usize)
240 }
241
242 pub unsafe fn base_pointer<C>(&self, ty: u16) -> *mut C
243 where
244 C: 'static,
245 {
246 #[cfg(debug_assertions)]
247 let ty = self.type_metadata::<C>(ty);
248 #[cfg(not(debug_assertions))]
249 let ty = self.type_metadata_unchecked(ty);
250
251 ty.base_pointer.cast::<C>()
252 }
253
254 pub unsafe fn pointer<C>(&self, ty: u16, idx: u32) -> *mut C
255 where
256 C: 'static,
257 {
258 let ptr = self.base_pointer::<C>(ty);
259
260 debug_assert!(idx < self.len);
261 ptr.add(idx as usize)
262 }
263}
264
265impl Archetype {
266 pub unsafe fn get<C>(&self, idx: u32) -> *mut C
267 where
268 C: 'static,
269 {
270 let ty = self.find::<C>().unwrap();
271 self.pointer::<C>(ty, idx)
272 }
273
274 pub unsafe fn drop<C>(&mut self, idx: u32)
275 where
276 C: 'static,
277 {
278 if needs_drop::<C>() {
279 if let Some(ty) = self.find::<C>() {
280 let ty = self.types.get_unchecked(ty as usize);
281
282 let ptr = ty.base_pointer.add(ty.layout.size() * idx as usize);
283
284 (ty.drop.unwrap())(ptr, 1);
285 }
286 }
287 }
288}
289
290impl Archetype {
291 pub fn clone(&mut self, cloner: &Cloner) -> Self {
292 let ptr = alloc_ptr(self.layout);
293
294 let mut types = self.types.clone();
295
296 unsafe {
297 for ty in &mut *types {
298 let clone = cloner
299 .clone
300 .get(&ty.id)
301 .expect("Component type missing from cloner");
302
303 let offset = ty.base_pointer.offset_from(self.ptr);
304 let new_base_pointer = ptr.offset(offset);
305
306 clone(ty.base_pointer, new_base_pointer, self.len as usize);
307
308 ty.base_pointer = new_base_pointer;
309 }
310 }
311
312 Self {
313 types,
314 layout: self.layout,
315 len: self.len,
316 cap: self.cap,
317 ptr,
318 }
319 }
320}
321
322#[derive(Clone, Copy)]
323struct TypeMetadata {
324 id: TypeId,
325 layout: Layout,
326 drop: Option<unsafe fn(*mut u8, usize)>,
327 base_pointer: *mut u8,
328}
329
330impl TypeMetadata {
331 fn new<C>() -> Self
332 where
333 C: 'static,
334 {
335 unsafe fn drop<C>(ptr: *mut u8, len: usize) {
336 drop_in_place(slice_from_raw_parts_mut(ptr.cast::<C>(), len))
337 }
338
339 Self {
340 id: TypeId::of::<C>(),
341 layout: Layout::new::<C>(),
342 drop: if needs_drop::<C>() {
343 Some(drop::<C>)
344 } else {
345 None
346 },
347 base_pointer: invalid_ptr(align_of::<C>()),
348 }
349 }
350}
351
352#[derive(Default)]
353pub struct TypeMetadataSet(Vec<TypeMetadata>);
354
355impl TypeMetadataSet {
356 pub fn ids(&self) -> impl ExactSizeIterator<Item = TypeId> + '_ {
357 self.0.iter().map(|ty| ty.id)
358 }
359
360 pub fn insert<C>(&mut self)
361 where
362 C: 'static,
363 {
364 if let Err(ty) = self.0.binary_search_by_key(&TypeId::of::<C>(), |ty| ty.id) {
365 self.0.insert(ty, TypeMetadata::new::<C>());
366 }
367 }
368
369 #[must_use]
370 pub fn remove<C>(&mut self) -> Option<()>
371 where
372 C: 'static,
373 {
374 let ty = self
375 .0
376 .binary_search_by_key(&TypeId::of::<C>(), |ty| ty.id)
377 .ok()?;
378
379 self.0.remove(ty);
380
381 Some(())
382 }
383}
384
385fn alloc_ptr(layout: Layout) -> *mut u8 {
386 if layout.size() == 0 {
387 return invalid_ptr(layout.align());
388 }
389
390 let ptr = unsafe { alloc(layout) };
391 if ptr.is_null() {
392 handle_alloc_error(layout);
393 }
394 ptr
395}
396
397fn invalid_ptr(addr: usize) -> *mut u8 {
398 unsafe {
399 #[allow(integer_to_ptr_transmutes)]
400 transmute(addr)
401 }
402}
403
404pub struct Cloner {
408 clone: TypeIdMap<unsafe fn(*const u8, *mut u8, usize)>,
409}
410
411impl Cloner {
412 pub fn new() -> Self {
414 let mut _self = Self {
415 clone: Default::default(),
416 };
417
418 _self.add_copyable::<Entity>();
419
420 _self
421 }
422}
423
424impl Default for Cloner {
425 fn default() -> Self {
426 Self::new()
427 }
428}
429
430impl Cloner {
431 pub fn add_cloneable<C>(&mut self)
435 where
436 C: Clone + 'static,
437 {
438 unsafe fn clone<C>(src: *const u8, dst: *mut u8, len: usize)
439 where
440 C: Clone,
441 {
442 let src = src.cast::<C>();
443 let dst = dst.cast::<C>();
444
445 for idx in 0..len {
446 let val = (*src.add(idx)).clone();
447 dst.add(idx).write(val);
448 }
449 }
450
451 self.clone.insert(TypeId::of::<C>(), clone::<C>);
452 }
453
454 pub fn add_copyable<C>(&mut self)
456 where
457 C: Copy + 'static,
458 {
459 unsafe fn clone<C>(src: *const u8, dst: *mut u8, len: usize)
460 where
461 C: Copy,
462 {
463 let src = src.cast::<C>();
464 let dst = dst.cast::<C>();
465
466 copy_nonoverlapping(src, dst, len);
467 }
468
469 self.clone.insert(TypeId::of::<C>(), clone::<C>);
470 }
471}
472
473#[cfg(test)]
474mod tests {
475 use super::*;
476
477 use std::cell::Cell;
478 use std::rc::Rc;
479
480 #[test]
481 fn reverse_sorting_by_alignment_avoids_padding() {
482 let mut types = TypeMetadataSet::default();
483 types.insert::<u64>();
484 types.insert::<u32>();
485 types.insert::<u16>();
486 types.insert::<u8>();
487
488 let mut archetype = Archetype::new(types);
489 let _ent = unsafe { archetype.alloc() };
490
491 assert_eq!(archetype.layout.size(), 8 * (8 + 4 + 2 + 1));
492 assert_eq!(archetype.layout.align(), 8);
493
494 assert_eq!(archetype.len, 1);
495 assert_eq!(archetype.cap, 8);
496
497 assert_eq!(archetype.types.len(), 4);
498
499 unsafe {
500 let ty = archetype.find::<u64>().unwrap();
501 let base_pointer = archetype.base_pointer::<u64>(ty);
502 assert_eq!(base_pointer, archetype.ptr.add(0).cast());
503
504 let ty = archetype.find::<u32>().unwrap();
505 let base_pointer = archetype.base_pointer::<u32>(ty);
506 assert_eq!(base_pointer, archetype.ptr.add(8 * 8).cast());
507
508 let ty = archetype.find::<u16>().unwrap();
509 let base_pointer = archetype.base_pointer::<u16>(ty);
510 assert_eq!(base_pointer, archetype.ptr.add(8 * (8 + 4)).cast());
511
512 let ty = archetype.find::<u8>().unwrap();
513 let base_pointer = archetype.base_pointer::<u8>(ty);
514 assert_eq!(base_pointer, archetype.ptr.add(8 * (8 + 4 + 2)).cast());
515 }
516 }
517
518 #[test]
519 fn drops_all_component_values() {
520 struct CountDrops(Rc<Cell<usize>>);
521
522 impl Drop for CountDrops {
523 fn drop(&mut self) {
524 let drops = &self.0;
525
526 drops.set(drops.get() + 1);
527 }
528 }
529
530 let drops = Rc::new(Cell::new(0));
531
532 let mut types = TypeMetadataSet::default();
533 types.insert::<CountDrops>();
534
535 let mut archetype = Archetype::new(types);
536
537 unsafe {
538 for _ in 0..32 {
539 let ent = archetype.alloc();
540
541 archetype
542 .get::<CountDrops>(ent)
543 .write(CountDrops(drops.clone()));
544 }
545 }
546
547 drop(archetype);
548
549 assert_eq!(drops.get(), 32);
550 }
551}