1use alloc::vec::Vec;
2use core::cmp;
3use core::convert::TryFrom;
4use core::iter::ExactSizeIterator;
5use core::num::{NonZeroU32, NonZeroU64};
6use core::ops::Range;
7use core::sync::atomic::{AtomicI64, Ordering};
8use core::{fmt, mem};
9#[cfg(feature = "std")]
10use std::error::Error;
11
12#[derive(Clone, Copy, Hash, Eq, Ord, PartialEq, PartialOrd)]
19pub struct Entity {
20 pub(crate) generation: NonZeroU32,
21 pub(crate) id: u32,
22}
23
24impl Entity {
25 pub fn to_bits(self) -> NonZeroU64 {
33 unsafe {
34 NonZeroU64::new_unchecked(u64::from(self.generation.get()) << 32 | u64::from(self.id))
35 }
36 }
37
38 pub fn from_bits(bits: u64) -> Option<Self> {
44 Some(Self {
45 generation: NonZeroU32::new((bits >> 32) as u32)?,
46 id: bits as u32,
47 })
48 }
49
50 pub fn id(self) -> u32 {
58 self.id
59 }
60}
61
62impl fmt::Debug for Entity {
63 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
64 write!(f, "{}v{}", self.id, self.generation)
65 }
66}
67
68#[cfg(feature = "serde")]
69impl serde::Serialize for Entity {
70 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
71 where
72 S: serde::Serializer,
73 {
74 self.to_bits().serialize(serializer)
75 }
76}
77
78#[cfg(feature = "serde")]
79impl<'de> serde::Deserialize<'de> for Entity {
80 fn deserialize<D>(deserializer: D) -> Result<Entity, D::Error>
81 where
82 D: serde::Deserializer<'de>,
83 {
84 let bits = u64::deserialize(deserializer)?;
85
86 match Entity::from_bits(bits) {
87 Some(ent) => Ok(ent),
88 None => Err(serde::de::Error::invalid_value(
89 serde::de::Unexpected::Unsigned(bits),
90 &"`a valid `Entity` bitpattern",
91 )),
92 }
93 }
94}
95
96pub struct ReserveEntitiesIterator<'a> {
98 meta: &'a [EntityMeta],
100
101 id_iter: core::slice::Iter<'a, u32>,
103
104 id_range: core::ops::Range<u32>,
106}
107
108impl<'a> Iterator for ReserveEntitiesIterator<'a> {
109 type Item = Entity;
110
111 fn next(&mut self) -> Option<Self::Item> {
112 self.id_iter
113 .next()
114 .map(|&id| Entity {
115 generation: self.meta[id as usize].generation,
116 id,
117 })
118 .or_else(|| {
119 self.id_range.next().map(|id| Entity {
120 generation: NonZeroU32::new(1).unwrap(),
121 id,
122 })
123 })
124 }
125
126 fn size_hint(&self) -> (usize, Option<usize>) {
127 let len = self.id_iter.len() + self.id_range.len();
128 (len, Some(len))
129 }
130}
131
132impl<'a> ExactSizeIterator for ReserveEntitiesIterator<'a> {}
133
134#[derive(Default)]
135pub(crate) struct Entities {
136 pub meta: Vec<EntityMeta>,
137
138 pending: Vec<u32>,
174 free_cursor: AtomicI64,
175 len: u32,
176}
177
178impl Entities {
179 pub fn reserve_entities(&self, count: u32) -> ReserveEntitiesIterator {
183 let range_end = self.free_cursor.fetch_sub(count as i64, Ordering::Relaxed);
187 let range_start = range_end - count as i64;
188
189 let freelist_range = range_start.max(0) as usize..range_end.max(0) as usize;
190
191 let (new_id_start, new_id_end) = if range_start >= 0 {
192 (0, 0)
194 } else {
195 let base = self.meta.len() as i64;
205
206 let new_id_end = u32::try_from(base - range_start).expect("too many entities");
207
208 let new_id_start = (base - range_end.min(0)) as u32;
210
211 (new_id_start, new_id_end)
212 };
213
214 ReserveEntitiesIterator {
215 meta: &self.meta[..],
216 id_iter: self.pending[freelist_range].iter(),
217 id_range: new_id_start..new_id_end,
218 }
219 }
220
221 pub fn reserve_entity(&self) -> Entity {
225 let n = self.free_cursor.fetch_sub(1, Ordering::Relaxed);
226 if n > 0 {
227 let id = self.pending[(n - 1) as usize];
229 Entity {
230 generation: self.meta[id as usize].generation,
231 id,
232 }
233 } else {
234 Entity {
240 generation: NonZeroU32::new(1).unwrap(),
241 id: u32::try_from(self.meta.len() as i64 - n).expect("too many entities"),
242 }
243 }
244 }
245
246 fn verify_flushed(&mut self) {
248 debug_assert!(
249 !self.needs_flush(),
250 "flush() needs to be called before this operation is legal"
251 );
252 }
253
254 pub fn alloc(&mut self) -> Entity {
258 self.verify_flushed();
259
260 self.len += 1;
261 if let Some(id) = self.pending.pop() {
262 let new_free_cursor = self.pending.len() as i64;
263 self.free_cursor.store(new_free_cursor, Ordering::Relaxed); Entity {
265 generation: self.meta[id as usize].generation,
266 id,
267 }
268 } else {
269 let id = u32::try_from(self.meta.len()).expect("too many entities");
270 self.meta.push(EntityMeta::EMPTY);
271 Entity {
272 generation: NonZeroU32::new(1).unwrap(),
273 id,
274 }
275 }
276 }
277
278 pub fn alloc_many(&mut self, n: u32, archetype: u32, mut first_index: u32) -> AllocManyState {
282 self.verify_flushed();
283
284 let fresh = (n as usize).saturating_sub(self.pending.len()) as u32;
285 assert!(
286 (self.meta.len() + fresh as usize) < u32::MAX as usize,
287 "too many entities"
288 );
289 let pending_end = self.pending.len().saturating_sub(n as usize);
290 for &id in &self.pending[pending_end..] {
291 self.meta[id as usize].location = Location {
292 archetype,
293 index: first_index,
294 };
295 first_index += 1;
296 }
297
298 let fresh_start = self.meta.len() as u32;
299 self.meta.extend(
300 (first_index..(first_index + fresh)).map(|index| EntityMeta {
301 generation: NonZeroU32::new(1).unwrap(),
302 location: Location { archetype, index },
303 }),
304 );
305
306 self.len += n;
307
308 AllocManyState {
309 fresh: fresh_start..(fresh_start + fresh),
310 pending_end,
311 }
312 }
313
314 pub fn finish_alloc_many(&mut self, pending_end: usize) {
318 self.pending.truncate(pending_end);
319 }
320
321 pub fn alloc_at(&mut self, entity: Entity) -> Option<Location> {
325 self.verify_flushed();
326
327 let loc = if entity.id as usize >= self.meta.len() {
328 self.pending.extend((self.meta.len() as u32)..entity.id);
329 let new_free_cursor = self.pending.len() as i64;
330 self.free_cursor.store(new_free_cursor, Ordering::Relaxed); self.meta.resize(entity.id as usize + 1, EntityMeta::EMPTY);
332 self.len += 1;
333 None
334 } else if let Some(index) = self.pending.iter().position(|item| *item == entity.id) {
335 self.pending.swap_remove(index);
336 let new_free_cursor = self.pending.len() as i64;
337 self.free_cursor.store(new_free_cursor, Ordering::Relaxed); self.len += 1;
339 None
340 } else {
341 Some(mem::replace(
342 &mut self.meta[entity.id as usize].location,
343 EntityMeta::EMPTY.location,
344 ))
345 };
346
347 self.meta[entity.id as usize].generation = entity.generation;
348
349 loc
350 }
351
352 pub fn free(&mut self, entity: Entity) -> Result<Location, NoSuchEntity> {
356 self.verify_flushed();
357
358 let meta = self.meta.get_mut(entity.id as usize).ok_or(NoSuchEntity)?;
359 if meta.generation != entity.generation {
360 return Err(NoSuchEntity);
361 }
362
363 meta.generation = NonZeroU32::new(u32::from(meta.generation).wrapping_add(1))
364 .unwrap_or_else(|| NonZeroU32::new(1).unwrap());
365
366 let loc = mem::replace(&mut meta.location, EntityMeta::EMPTY.location);
367
368 self.pending.push(entity.id);
369
370 let new_free_cursor = self.pending.len() as i64;
371 self.free_cursor.store(new_free_cursor, Ordering::Relaxed); self.len -= 1;
373
374 Ok(loc)
375 }
376
377 pub fn reserve(&mut self, additional: u32) {
379 self.verify_flushed();
380
381 let freelist_size = self.free_cursor.load(Ordering::Relaxed);
382 let shortfall = additional as i64 - freelist_size;
383 if shortfall > 0 {
384 self.meta.reserve(shortfall as usize);
385 }
386 }
387
388 pub fn contains(&self, entity: Entity) -> bool {
389 self.meta
392 .get(entity.id as usize)
393 .map_or(true, |meta| meta.generation == entity.generation)
394 }
395
396 pub fn clear(&mut self) {
397 self.meta.clear();
398 self.pending.clear();
399 self.free_cursor.store(0, Ordering::Relaxed); }
401
402 pub fn get_mut(&mut self, entity: Entity) -> Result<&mut Location, NoSuchEntity> {
406 let meta = self.meta.get_mut(entity.id as usize).ok_or(NoSuchEntity)?;
407 if meta.generation == entity.generation {
408 Ok(&mut meta.location)
409 } else {
410 Err(NoSuchEntity)
411 }
412 }
413
414 pub fn get(&self, entity: Entity) -> Result<Location, NoSuchEntity> {
416 if self.meta.len() <= entity.id as usize {
417 return Ok(Location {
418 archetype: 0,
419 index: u32::max_value(),
420 });
421 }
422 let meta = &self.meta[entity.id as usize];
423 if meta.generation != entity.generation {
424 return Err(NoSuchEntity);
425 }
426 Ok(meta.location)
427 }
428
429 pub unsafe fn resolve_unknown_gen(&self, id: u32) -> Entity {
434 let meta_len = self.meta.len();
435
436 if meta_len > id as usize {
437 let meta = &self.meta[id as usize];
438 Entity {
439 generation: meta.generation,
440 id,
441 }
442 } else {
443 let free_cursor = self.free_cursor.load(Ordering::Relaxed);
445 let num_pending = cmp::max(-free_cursor, 0) as usize;
446
447 if meta_len + num_pending > id as usize {
448 Entity {
450 generation: NonZeroU32::new(1).unwrap(),
451 id,
452 }
453 } else {
454 panic!("entity id is out of range");
455 }
456 }
457 }
458
459 fn needs_flush(&mut self) -> bool {
460 self.free_cursor.load(Ordering::Relaxed) != self.pending.len() as i64
462 }
463
464 pub fn flush(&mut self, mut init: impl FnMut(u32, &mut Location)) {
467 let free_cursor = self.free_cursor.load(Ordering::Relaxed);
469
470 let new_free_cursor = if free_cursor >= 0 {
471 free_cursor as usize
472 } else {
473 let old_meta_len = self.meta.len();
474 let new_meta_len = old_meta_len + -free_cursor as usize;
475 self.meta.resize(new_meta_len, EntityMeta::EMPTY);
476
477 self.len += -free_cursor as u32;
478 for (id, meta) in self.meta.iter_mut().enumerate().skip(old_meta_len) {
479 init(id as u32, &mut meta.location);
480 }
481
482 self.free_cursor.store(0, Ordering::Relaxed);
483 0
484 };
485
486 self.len += (self.pending.len() - new_free_cursor) as u32;
487 for id in self.pending.drain(new_free_cursor..) {
488 init(id, &mut self.meta[id as usize].location);
489 }
490 }
491
492 #[inline]
493 pub fn len(&self) -> u32 {
494 self.len
495 }
496}
497
498#[derive(Copy, Clone)]
499pub(crate) struct EntityMeta {
500 pub generation: NonZeroU32,
501 pub location: Location,
502}
503
504impl EntityMeta {
505 const EMPTY: EntityMeta = EntityMeta {
506 generation: unsafe { NonZeroU32::new_unchecked(1) },
507 location: Location {
508 archetype: 0,
509 index: u32::max_value(), },
511 };
512}
513
514#[derive(Copy, Clone)]
515pub(crate) struct Location {
516 pub archetype: u32,
517 pub index: u32,
518}
519
520#[derive(Debug, Clone, Eq, PartialEq)]
522pub struct NoSuchEntity;
523
524impl fmt::Display for NoSuchEntity {
525 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
526 f.pad("no such entity")
527 }
528}
529
530#[cfg(feature = "std")]
531impl Error for NoSuchEntity {}
532
533#[derive(Clone)]
534pub(crate) struct AllocManyState {
535 pub pending_end: usize,
536 fresh: Range<u32>,
537}
538
539impl AllocManyState {
540 pub fn next(&mut self, entities: &Entities) -> Option<u32> {
541 if self.pending_end < entities.pending.len() {
542 let id = entities.pending[self.pending_end];
543 self.pending_end += 1;
544 Some(id)
545 } else {
546 self.fresh.next()
547 }
548 }
549
550 pub fn len(&self, entities: &Entities) -> usize {
551 self.fresh.len() + (entities.pending.len() - self.pending_end)
552 }
553}
554
555#[cfg(test)]
556mod tests {
557 use super::*;
558 use hashbrown::{HashMap, HashSet};
559 use rand::{rngs::StdRng, Rng, SeedableRng};
560
561 #[test]
562 fn entity_bits_roundtrip() {
563 let e = Entity {
564 generation: NonZeroU32::new(0xDEADBEEF).unwrap(),
565 id: 0xBAADF00D,
566 };
567 assert_eq!(Entity::from_bits(e.to_bits().into()).unwrap(), e);
568 }
569
570 #[test]
571 fn alloc_and_free() {
572 let mut rng = StdRng::seed_from_u64(0xFEEDFACEDEADF00D);
573
574 let mut e = Entities::default();
575 let mut first_unused = 0u32;
576 let mut id_to_gen: HashMap<u32, u32> = Default::default();
577 let mut free_set: HashSet<u32> = Default::default();
578 let mut len = 0;
579
580 for _ in 0..100 {
581 let alloc = rng.gen_bool(0.7);
582 if alloc || first_unused == 0 {
583 let entity = e.alloc();
584 len += 1;
585
586 let id = entity.id;
587 if !free_set.is_empty() {
588 assert!(free_set.remove(&id));
590 } else if id >= first_unused {
591 first_unused = id + 1;
592 }
593
594 e.get_mut(entity).unwrap().index = 37;
595
596 assert!(id_to_gen.insert(id, entity.generation.get()).is_none());
597 } else {
598 let id = rng.gen_range(0..first_unused);
600
601 let generation = id_to_gen.remove(&id);
602 let entity = Entity {
603 id,
604 generation: NonZeroU32::new(
605 generation.unwrap_or_else(|| NonZeroU32::new(1).unwrap().get()),
606 )
607 .unwrap(),
608 };
609
610 assert_eq!(e.free(entity).is_ok(), generation.is_some());
611 if generation.is_some() {
612 len -= 1;
613 }
614
615 free_set.insert(id);
616 }
617 assert_eq!(e.len(), len);
618 }
619 }
620
621 #[test]
622 fn alloc_at() {
623 let mut e = Entities::default();
624
625 let mut old = Vec::new();
626
627 for _ in 0..2 {
628 let entity = e.alloc();
629 old.push(entity);
630 e.free(entity).unwrap();
631 }
632
633 assert_eq!(e.len(), 0);
634
635 let id = old.first().unwrap().id();
636 assert!(old.iter().all(|entity| entity.id() == id));
637
638 let entity = *old.last().unwrap();
639 assert!(!e.contains(entity));
642 assert!(e.pending.contains(&entity.id()));
643 assert!(e.alloc_at(entity).is_none());
645 assert!(e.contains(entity));
646 assert!(!e.pending.contains(&entity.id()));
648 assert_eq!(e.len(), 1);
649 assert!(e.alloc_at(entity).is_some());
652 assert!(e.contains(entity));
653 assert_eq!(e.len(), 1);
654
655 assert_eq!(e.meta.len(), 1);
658 assert!(e
659 .alloc_at(Entity {
660 id: 3,
661 generation: NonZeroU32::new(2).unwrap(),
662 })
663 .is_none());
664 assert_eq!(e.pending.len(), 2);
665 assert_eq!(&e.pending, &[1, 2]);
666 assert_eq!(e.meta.len(), 4);
667 }
668
669 #[test]
670 fn contains() {
671 let mut e = Entities::default();
672
673 for _ in 0..2 {
674 let entity = e.alloc();
675 assert!(e.contains(entity));
676
677 e.free(entity).unwrap();
678 assert!(!e.contains(entity));
679 }
680
681 for _ in 0..3 {
683 let entity = e.reserve_entity();
684 assert!(e.contains(entity));
685 }
686 }
687
688 fn reserve_test_helper(reserve_n: impl FnOnce(&mut Entities, u32) -> Vec<Entity>) {
690 let mut e = Entities::default();
691
692 let mut v1: Vec<Entity> = (0..10).map(|_| e.alloc()).collect();
694 assert_eq!(v1.iter().map(|e| e.id).max(), Some(9));
695 for &entity in v1.iter() {
696 assert!(e.contains(entity));
697 e.get_mut(entity).unwrap().index = 37;
698 }
699
700 for entity in v1.drain(6..) {
702 e.free(entity).unwrap();
703 }
704 assert_eq!(e.free_cursor.load(Ordering::Relaxed), 4);
705
706 let v2 = reserve_n(&mut e, 10);
709 assert_eq!(v2.iter().map(|e| e.id).max(), Some(15));
710
711 assert!(v2.iter().all(|&entity| e.contains(entity)));
713
714 let mut v3: Vec<Entity> = v1.iter().chain(v2.iter()).copied().collect();
716 assert_eq!(v3.len(), 16);
717 v3.sort_by_key(|entity| entity.id);
718 for (i, entity) in v3.into_iter().enumerate() {
719 assert_eq!(entity.id, i as u32);
720 }
721
722 assert_eq!(e.free_cursor.load(Ordering::Relaxed), -6);
724
725 let mut flushed = Vec::new();
726 e.flush(|id, _| flushed.push(id));
727 flushed.sort_unstable();
728
729 assert_eq!(flushed, (6..16).collect::<Vec<_>>());
730 }
731
732 #[test]
733 fn reserve_entity() {
734 reserve_test_helper(|e, n| (0..n).map(|_| e.reserve_entity()).collect())
735 }
736
737 #[test]
738 fn reserve_entities() {
739 reserve_test_helper(|e, n| e.reserve_entities(n).collect())
740 }
741
742 #[test]
743 fn reserve_grows() {
744 let mut e = Entities::default();
745 let _ = e.reserve_entity();
746 e.flush(|_, _| {});
747 assert_eq!(e.len(), 1);
748 }
749
750 #[test]
751 fn reserve_grows_mixed() {
752 let mut e = Entities::default();
753 let a = e.alloc();
754 e.alloc();
755 e.free(a).unwrap();
756 let _ = e.reserve_entities(3);
757 e.flush(|_, _| {});
758 assert_eq!(e.len(), 4);
759 }
760}