mod clone_entities;
mod entity_set;
mod map_entities;
#[cfg(feature = "bevy_reflect")]
use bevy_reflect::Reflect;
#[cfg(all(feature = "bevy_reflect", feature = "serialize"))]
use bevy_reflect::{ReflectDeserialize, ReflectSerialize};
pub use clone_entities::*;
use derive_more::derive::Display;
pub use entity_set::*;
pub use map_entities::*;
mod hash;
pub use hash::*;
pub mod hash_map;
pub mod hash_set;
pub use hash_map::EntityHashMap;
pub use hash_set::EntityHashSet;
pub mod index_map;
pub mod index_set;
pub use index_map::EntityIndexMap;
pub use index_set::EntityIndexSet;
pub mod unique_array;
pub mod unique_slice;
pub mod unique_vec;
use nonmax::NonMaxU32;
pub use unique_array::{UniqueEntityArray, UniqueEntityEquivalentArray};
pub use unique_slice::{UniqueEntityEquivalentSlice, UniqueEntitySlice};
pub use unique_vec::{UniqueEntityEquivalentVec, UniqueEntityVec};
use crate::{
archetype::{ArchetypeId, ArchetypeRow},
change_detection::{CheckChangeTicks, MaybeLocation, Tick},
storage::{SparseSetIndex, TableId, TableRow},
};
use alloc::vec::Vec;
use bevy_platform::sync::atomic::{AtomicU32, AtomicUsize, Ordering};
use core::{fmt, hash::Hash, mem, num::NonZero, panic::Location};
use log::warn;
#[cfg(feature = "serialize")]
use serde::{Deserialize, Serialize};
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Display)]
#[cfg_attr(feature = "bevy_reflect", derive(Reflect))]
#[cfg_attr(feature = "bevy_reflect", reflect(opaque))]
#[cfg_attr(feature = "bevy_reflect", reflect(Hash, PartialEq, Debug, Clone))]
#[repr(transparent)]
pub struct EntityIndex(NonMaxU32);
impl EntityIndex {
const PLACEHOLDER: Self = Self(NonMaxU32::MAX);
pub const fn new(index: NonMaxU32) -> Self {
Self(index)
}
pub const fn from_raw_u32(index: u32) -> Option<Self> {
match NonMaxU32::new(index) {
Some(index) => Some(Self(index)),
None => None,
}
}
#[inline(always)]
pub const fn index(self) -> u32 {
self.0.get()
}
#[inline(always)]
const fn to_bits(self) -> u32 {
unsafe { mem::transmute::<NonMaxU32, u32>(self.0) }
}
#[inline]
const fn from_bits(bits: u32) -> Self {
Self::try_from_bits(bits).expect("Attempted to initialize invalid bits as an entity index")
}
#[inline(always)]
const fn try_from_bits(bits: u32) -> Option<Self> {
match NonZero::<u32>::new(bits) {
Some(underlying) => Some(Self(unsafe {
mem::transmute::<NonZero<u32>, NonMaxU32>(underlying)
})),
None => None,
}
}
}
impl SparseSetIndex for EntityIndex {
#[inline]
fn sparse_set_index(&self) -> usize {
self.index() as usize
}
#[inline]
fn get_sparse_set_index(value: usize) -> Self {
Self::from_bits(value as u32)
}
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, Display)]
#[cfg_attr(feature = "bevy_reflect", derive(Reflect))]
#[cfg_attr(feature = "bevy_reflect", reflect(opaque))]
#[cfg_attr(feature = "bevy_reflect", reflect(Hash, PartialEq, Debug, Clone))]
#[repr(transparent)]
pub struct EntityGeneration(u32);
impl EntityGeneration {
pub const FIRST: Self = Self(0);
const DIFF_MAX: u32 = 1u32 << 31;
#[inline(always)]
pub const fn to_bits(self) -> u32 {
self.0
}
#[inline]
pub const fn from_bits(bits: u32) -> Self {
Self(bits)
}
#[inline]
pub const fn after_versions(self, versions: u32) -> Self {
Self(self.0.wrapping_add(versions))
}
#[inline]
pub const fn after_versions_and_could_alias(self, versions: u32) -> (Self, bool) {
let raw = self.0.overflowing_add(versions);
(Self(raw.0), raw.1)
}
#[inline]
pub const fn cmp_approx(&self, other: &Self) -> core::cmp::Ordering {
use core::cmp::Ordering;
match self.0.wrapping_sub(other.0) {
0 => Ordering::Equal,
1..Self::DIFF_MAX => Ordering::Greater,
_ => Ordering::Less,
}
}
}
#[derive(Clone, Copy)]
#[cfg_attr(feature = "bevy_reflect", derive(Reflect))]
#[cfg_attr(feature = "bevy_reflect", reflect(opaque))]
#[cfg_attr(feature = "bevy_reflect", reflect(Hash, PartialEq, Debug, Clone))]
#[cfg_attr(
all(feature = "bevy_reflect", feature = "serialize"),
reflect(Serialize, Deserialize)
)]
#[repr(C, align(8))]
pub struct Entity {
#[cfg(target_endian = "little")]
index: EntityIndex,
generation: EntityGeneration,
#[cfg(target_endian = "big")]
index: EntityIndex,
}
impl PartialEq for Entity {
#[inline]
fn eq(&self, other: &Entity) -> bool {
self.to_bits() == other.to_bits()
}
}
impl Eq for Entity {}
impl PartialOrd for Entity {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for Entity {
#[inline]
fn cmp(&self, other: &Self) -> core::cmp::Ordering {
self.to_bits().cmp(&other.to_bits())
}
}
impl Hash for Entity {
#[inline]
fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
self.to_bits().hash(state);
}
}
impl Entity {
#[inline(always)]
pub const fn from_index_and_generation(
index: EntityIndex,
generation: EntityGeneration,
) -> Entity {
Self { index, generation }
}
pub const PLACEHOLDER: Self = Self::from_index(EntityIndex::PLACEHOLDER);
#[inline(always)]
pub const fn from_index(index: EntityIndex) -> Entity {
Self::from_index_and_generation(index, EntityGeneration::FIRST)
}
#[inline(always)]
pub const fn from_raw_u32(index: u32) -> Option<Entity> {
match NonMaxU32::new(index) {
Some(index) => Some(Self::from_index(EntityIndex::new(index))),
None => None,
}
}
#[inline(always)]
pub const fn to_bits(self) -> u64 {
self.index.to_bits() as u64 | ((self.generation.to_bits() as u64) << 32)
}
#[inline]
pub const fn from_bits(bits: u64) -> Self {
if let Some(id) = Self::try_from_bits(bits) {
id
} else {
panic!("Attempted to initialize invalid bits as an entity")
}
}
#[inline(always)]
pub const fn try_from_bits(bits: u64) -> Option<Self> {
let raw_index = bits as u32;
let raw_gen = (bits >> 32) as u32;
if let Some(index) = EntityIndex::try_from_bits(raw_index) {
Some(Self {
index,
generation: EntityGeneration::from_bits(raw_gen),
})
} else {
None
}
}
#[inline]
pub const fn index(self) -> EntityIndex {
self.index
}
#[inline]
pub const fn index_u32(self) -> u32 {
self.index.index()
}
#[inline]
pub const fn generation(self) -> EntityGeneration {
self.generation
}
}
#[cfg(feature = "serialize")]
impl Serialize for Entity {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_u64(self.to_bits())
}
}
#[cfg(feature = "serialize")]
impl<'de> Deserialize<'de> for Entity {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
use serde::de::Error;
let id: u64 = Deserialize::deserialize(deserializer)?;
Entity::try_from_bits(id)
.ok_or_else(|| D::Error::custom("Attempting to deserialize an invalid entity."))
}
}
impl fmt::Debug for Entity {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(self, f)
}
}
impl fmt::Display for Entity {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self == &Self::PLACEHOLDER {
f.pad("PLACEHOLDER")
} else {
f.pad(&alloc::fmt::format(format_args!(
"{}v{}",
self.index(),
self.generation()
)))
}
}
}
impl SparseSetIndex for Entity {
#[inline]
fn sparse_set_index(&self) -> usize {
self.index().sparse_set_index()
}
#[inline]
fn get_sparse_set_index(value: usize) -> Self {
Entity::from_index(EntityIndex::get_sparse_set_index(value))
}
}
#[derive(Default, Debug)]
pub struct EntityAllocator {
free: Vec<Entity>,
free_len: AtomicUsize,
next_index: AtomicU32,
}
impl EntityAllocator {
pub(crate) fn restart(&mut self) {
self.free.clear();
*self.free_len.get_mut() = 0;
*self.next_index.get_mut() = 0;
}
pub fn free(&mut self, freed: Entity) {
let expected_len = *self.free_len.get_mut();
if expected_len > self.free.len() {
self.free.clear();
} else {
self.free.truncate(expected_len);
}
self.free.push(freed);
*self.free_len.get_mut() = self.free.len();
}
pub fn alloc(&self) -> Entity {
let index = self
.free_len
.fetch_sub(1, Ordering::Relaxed)
.wrapping_sub(1);
self.free.get(index).copied().unwrap_or_else(|| {
let index = self.next_index.fetch_add(1, Ordering::Relaxed);
let index = NonMaxU32::new(index).expect("too many entities");
Entity::from_index(EntityIndex::new(index))
})
}
pub fn alloc_many(&self, count: u32) -> AllocEntitiesIterator<'_> {
let current_len = self.free_len.fetch_sub(count as usize, Ordering::Relaxed);
let current_len = if current_len <= self.free.len() {
current_len
} else {
0
};
let start = current_len.saturating_sub(count as usize);
let reuse = start..current_len;
let still_need = (count as usize - reuse.len()) as u32;
let new = if still_need > 0 {
let start_new = self.next_index.fetch_add(still_need, Ordering::Relaxed);
let end_new = start_new
.checked_add(still_need)
.expect("too many entities");
start_new..end_new
} else {
0..0
};
AllocEntitiesIterator {
reuse: self.free[reuse].iter(),
new,
}
}
}
pub struct AllocEntitiesIterator<'a> {
reuse: core::slice::Iter<'a, Entity>,
new: core::ops::Range<u32>,
}
impl<'a> Iterator for AllocEntitiesIterator<'a> {
type Item = Entity;
fn next(&mut self) -> Option<Self::Item> {
self.reuse.next().copied().or_else(|| {
self.new.next().map(|index| {
let index = unsafe { EntityIndex::new(NonMaxU32::new_unchecked(index)) };
Entity::from_index(index)
})
})
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.reuse.len() + self.new.len();
(len, Some(len))
}
}
impl<'a> ExactSizeIterator for AllocEntitiesIterator<'a> {}
impl<'a> core::iter::FusedIterator for AllocEntitiesIterator<'a> {}
unsafe impl EntitySetIterator for AllocEntitiesIterator<'_> {}
#[derive(Debug, Clone)]
pub struct Entities {
meta: Vec<EntityMeta>,
}
impl Entities {
pub(crate) const fn new() -> Self {
Self { meta: Vec::new() }
}
pub fn clear(&mut self) {
self.meta.clear();
}
#[inline]
pub fn get_spawned(&self, entity: Entity) -> Result<EntityLocation, EntityNotSpawnedError> {
let meta = self.meta.get(entity.index_u32() as usize);
let meta = meta.unwrap_or(&EntityMeta::FRESH);
if entity.generation() != meta.generation {
return Err(EntityNotSpawnedError::Invalid(InvalidEntityError {
entity,
current_generation: meta.generation,
}));
};
meta.location
.ok_or(EntityNotSpawnedError::ValidButNotSpawned(
EntityValidButNotSpawnedError {
entity,
location: meta.spawned_or_despawned.by,
},
))
}
#[inline]
pub fn get(&self, entity: Entity) -> Result<Option<EntityLocation>, InvalidEntityError> {
match self.get_spawned(entity) {
Ok(location) => Ok(Some(location)),
Err(EntityNotSpawnedError::ValidButNotSpawned { .. }) => Ok(None),
Err(EntityNotSpawnedError::Invalid(err)) => Err(err),
}
}
#[inline]
pub fn resolve_from_index(&self, index: EntityIndex) -> Entity {
self.meta
.get(index.index() as usize)
.map(|meta| Entity::from_index_and_generation(index, meta.generation))
.unwrap_or(Entity::from_index(index))
}
#[inline]
pub fn is_index_spawned(&self, index: EntityIndex) -> bool {
self.meta
.get(index.index() as usize)
.is_some_and(|meta| meta.location.is_some())
}
pub fn contains(&self, entity: Entity) -> bool {
self.resolve_from_index(entity.index()).generation() == entity.generation()
}
pub fn contains_spawned(&self, entity: Entity) -> bool {
self.get_spawned(entity).is_ok()
}
#[inline]
pub fn check_can_spawn_at(&self, entity: Entity) -> Result<(), SpawnError> {
match self.get(entity) {
Ok(Some(_)) => Err(SpawnError::AlreadySpawned),
Ok(None) => Ok(()),
Err(err) => Err(SpawnError::Invalid(err)),
}
}
#[inline]
pub(crate) unsafe fn update_existing_location(
&mut self,
index: EntityIndex,
location: Option<EntityLocation>,
) -> Option<EntityLocation> {
let meta = unsafe { self.meta.get_unchecked_mut(index.index() as usize) };
mem::replace(&mut meta.location, location)
}
#[inline]
pub(crate) unsafe fn set_location(
&mut self,
index: EntityIndex,
location: Option<EntityLocation>,
) -> Option<EntityLocation> {
self.ensure_index_index_is_valid(index);
unsafe { self.update_existing_location(index, location) }
}
#[inline]
fn ensure_index_index_is_valid(&mut self, index: EntityIndex) {
#[cold] fn expand(meta: &mut Vec<EntityMeta>, len: usize) {
meta.resize(len, EntityMeta::FRESH);
meta.resize(meta.capacity(), EntityMeta::FRESH);
}
let index = index.index() as usize;
if self.meta.len() <= index {
expand(&mut self.meta, index + 1);
}
}
pub(crate) unsafe fn mark_free(&mut self, index: EntityIndex, generations: u32) -> Entity {
self.ensure_index_index_is_valid(index);
let meta = unsafe { self.meta.get_unchecked_mut(index.index() as usize) };
let (new_generation, aliased) = meta.generation.after_versions_and_could_alias(generations);
meta.generation = new_generation;
if aliased {
warn!("EntityIndex({index}) generation wrapped on Entities::free, aliasing may occur",);
}
Entity::from_index_and_generation(index, meta.generation)
}
#[inline]
pub(crate) unsafe fn mark_spawned_or_despawned(
&mut self,
index: EntityIndex,
by: MaybeLocation,
tick: Tick,
) {
let meta = unsafe { self.meta.get_unchecked_mut(index.index() as usize) };
meta.spawned_or_despawned = SpawnedOrDespawned { by, tick };
}
pub fn entity_get_spawned_or_despawned_by(
&self,
entity: Entity,
) -> MaybeLocation<Option<&'static Location<'static>>> {
MaybeLocation::new_with_flattened(|| {
self.entity_get_spawned_or_despawned(entity)
.map(|spawned_or_despawned| spawned_or_despawned.by)
})
}
pub fn entity_get_spawn_or_despawn_tick(&self, entity: Entity) -> Option<Tick> {
self.entity_get_spawned_or_despawned(entity)
.map(|spawned_or_despawned| spawned_or_despawned.tick)
}
#[inline]
fn entity_get_spawned_or_despawned(&self, entity: Entity) -> Option<SpawnedOrDespawned> {
self.meta
.get(entity.index_u32() as usize)
.filter(|meta|
(meta.generation == entity.generation)
|| (meta.location.is_none() && meta.generation == entity.generation.after_versions(1)))
.map(|meta| meta.spawned_or_despawned)
}
#[inline]
pub(crate) unsafe fn entity_get_spawned_or_despawned_unchecked(
&self,
entity: Entity,
) -> (MaybeLocation, Tick) {
let meta = unsafe { self.meta.get_unchecked(entity.index_u32() as usize) };
(meta.spawned_or_despawned.by, meta.spawned_or_despawned.tick)
}
#[inline]
pub(crate) fn check_change_ticks(&mut self, check: CheckChangeTicks) {
for meta in &mut self.meta {
meta.spawned_or_despawned.tick.check_tick(check);
}
}
#[inline]
pub fn len(&self) -> u32 {
self.meta.len() as u32
}
#[inline]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
pub fn count_spawned(&self) -> u32 {
self.meta
.iter()
.filter(|meta| meta.location.is_some())
.count() as u32
}
pub fn any_spawned(&self) -> bool {
self.meta.iter().any(|meta| meta.location.is_some())
}
}
#[derive(thiserror::Error, Debug, Clone, Copy, PartialEq, Eq)]
pub enum SpawnError {
#[error("Invalid id: {0}")]
Invalid(InvalidEntityError),
#[error("The entity can not be spawned as it already has a location.")]
AlreadySpawned,
}
#[derive(thiserror::Error, Debug, Clone, Copy, PartialEq, Eq)]
#[error(
"The entity with ID {entity} is invalid; its index now has generation {current_generation}."
)]
pub struct InvalidEntityError {
pub entity: Entity,
pub current_generation: EntityGeneration,
}
#[derive(thiserror::Error, Debug, Clone, Copy, PartialEq, Eq)]
pub struct EntityValidButNotSpawnedError {
pub entity: Entity,
pub location: MaybeLocation<&'static Location<'static>>,
}
impl fmt::Display for EntityValidButNotSpawnedError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let entity = self.entity;
match self.location.into_option() {
Some(location) => write!(f, "The entity with ID {entity} is not spawned; its index was last despawned by {location}."),
None => write!(
f,
"The entity with ID {entity} is not spawned; enable `track_location` feature for more details."
),
}
}
}
#[derive(thiserror::Error, Copy, Clone, Debug, Eq, PartialEq)]
pub enum EntityNotSpawnedError {
#[error("Entity despawned: {0}\nNote that interacting with a despawned entity is the most common cause of this error but there are others")]
Invalid(#[from] InvalidEntityError),
#[error("Entity not yet spawned: {0}\nNote that interacting with a not-yet-spawned entity is the most common cause of this error but there are others")]
ValidButNotSpawned(#[from] EntityValidButNotSpawnedError),
}
impl EntityNotSpawnedError {
pub fn entity(&self) -> Entity {
match self {
EntityNotSpawnedError::Invalid(err) => err.entity,
EntityNotSpawnedError::ValidButNotSpawned(err) => err.entity,
}
}
}
#[derive(Copy, Clone, Debug)]
struct EntityMeta {
generation: EntityGeneration,
location: Option<EntityLocation>,
spawned_or_despawned: SpawnedOrDespawned,
}
#[derive(Copy, Clone, Debug)]
struct SpawnedOrDespawned {
by: MaybeLocation,
tick: Tick,
}
impl EntityMeta {
const FRESH: EntityMeta = EntityMeta {
generation: EntityGeneration::FIRST,
location: None,
spawned_or_despawned: SpawnedOrDespawned {
by: MaybeLocation::caller(),
tick: Tick::new(0),
},
};
}
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct EntityLocation {
pub archetype_id: ArchetypeId,
pub archetype_row: ArchetypeRow,
pub table_id: TableId,
pub table_row: TableRow,
}
#[cfg(test)]
mod tests {
use super::*;
use alloc::format;
#[test]
fn entity_niche_optimization() {
assert_eq!(size_of::<Entity>(), size_of::<Option<Entity>>());
}
#[test]
fn entity_bits_roundtrip() {
let r = EntityIndex::from_raw_u32(0xDEADBEEF).unwrap();
assert_eq!(EntityIndex::from_bits(r.to_bits()), r);
let e = Entity::from_index_and_generation(
EntityIndex::from_raw_u32(0xDEADBEEF).unwrap(),
EntityGeneration::from_bits(0x5AADF00D),
);
assert_eq!(Entity::from_bits(e.to_bits()), e);
}
#[test]
fn entity_const() {
const C1: Entity = Entity::from_index(EntityIndex::from_raw_u32(42).unwrap());
assert_eq!(42, C1.index_u32());
assert_eq!(0, C1.generation().to_bits());
const C2: Entity = Entity::from_bits(0x0000_00ff_0000_00cc);
assert_eq!(!0x0000_00cc, C2.index_u32());
assert_eq!(0x0000_00ff, C2.generation().to_bits());
const C3: u32 = Entity::from_index(EntityIndex::from_raw_u32(33).unwrap()).index_u32();
assert_eq!(33, C3);
const C4: u32 = Entity::from_bits(0x00dd_00ff_1111_1111)
.generation()
.to_bits();
assert_eq!(0x00dd_00ff, C4);
}
#[test]
#[expect(
clippy::nonminimal_bool,
reason = "This intentionally tests all possible comparison operators as separate functions; thus, we don't want to rewrite these comparisons to use different operators."
)]
fn entity_comparison() {
assert_eq!(
Entity::from_index_and_generation(
EntityIndex::from_raw_u32(123).unwrap(),
EntityGeneration::from_bits(456)
),
Entity::from_index_and_generation(
EntityIndex::from_raw_u32(123).unwrap(),
EntityGeneration::from_bits(456)
)
);
assert_ne!(
Entity::from_index_and_generation(
EntityIndex::from_raw_u32(123).unwrap(),
EntityGeneration::from_bits(789)
),
Entity::from_index_and_generation(
EntityIndex::from_raw_u32(123).unwrap(),
EntityGeneration::from_bits(456)
)
);
assert_ne!(
Entity::from_index_and_generation(
EntityIndex::from_raw_u32(123).unwrap(),
EntityGeneration::from_bits(456)
),
Entity::from_index_and_generation(
EntityIndex::from_raw_u32(123).unwrap(),
EntityGeneration::from_bits(789)
)
);
assert_ne!(
Entity::from_index_and_generation(
EntityIndex::from_raw_u32(123).unwrap(),
EntityGeneration::from_bits(456)
),
Entity::from_index_and_generation(
EntityIndex::from_raw_u32(456).unwrap(),
EntityGeneration::from_bits(123)
)
);
assert!(
Entity::from_index_and_generation(
EntityIndex::from_raw_u32(123).unwrap(),
EntityGeneration::from_bits(456)
) >= Entity::from_index_and_generation(
EntityIndex::from_raw_u32(123).unwrap(),
EntityGeneration::from_bits(456)
)
);
assert!(
Entity::from_index_and_generation(
EntityIndex::from_raw_u32(123).unwrap(),
EntityGeneration::from_bits(456)
) <= Entity::from_index_and_generation(
EntityIndex::from_raw_u32(123).unwrap(),
EntityGeneration::from_bits(456)
)
);
assert!(
!(Entity::from_index_and_generation(
EntityIndex::from_raw_u32(123).unwrap(),
EntityGeneration::from_bits(456)
) < Entity::from_index_and_generation(
EntityIndex::from_raw_u32(123).unwrap(),
EntityGeneration::from_bits(456)
))
);
assert!(
!(Entity::from_index_and_generation(
EntityIndex::from_raw_u32(123).unwrap(),
EntityGeneration::from_bits(456)
) > Entity::from_index_and_generation(
EntityIndex::from_raw_u32(123).unwrap(),
EntityGeneration::from_bits(456)
))
);
assert!(
Entity::from_index_and_generation(
EntityIndex::from_raw_u32(9).unwrap(),
EntityGeneration::from_bits(1)
) < Entity::from_index_and_generation(
EntityIndex::from_raw_u32(1).unwrap(),
EntityGeneration::from_bits(9)
)
);
assert!(
Entity::from_index_and_generation(
EntityIndex::from_raw_u32(1).unwrap(),
EntityGeneration::from_bits(9)
) > Entity::from_index_and_generation(
EntityIndex::from_raw_u32(9).unwrap(),
EntityGeneration::from_bits(1)
)
);
assert!(
Entity::from_index_and_generation(
EntityIndex::from_raw_u32(1).unwrap(),
EntityGeneration::from_bits(1)
) > Entity::from_index_and_generation(
EntityIndex::from_raw_u32(2).unwrap(),
EntityGeneration::from_bits(1)
)
);
assert!(
Entity::from_index_and_generation(
EntityIndex::from_raw_u32(1).unwrap(),
EntityGeneration::from_bits(1)
) >= Entity::from_index_and_generation(
EntityIndex::from_raw_u32(2).unwrap(),
EntityGeneration::from_bits(1)
)
);
assert!(
Entity::from_index_and_generation(
EntityIndex::from_raw_u32(2).unwrap(),
EntityGeneration::from_bits(2)
) < Entity::from_index_and_generation(
EntityIndex::from_raw_u32(1).unwrap(),
EntityGeneration::from_bits(2)
)
);
assert!(
Entity::from_index_and_generation(
EntityIndex::from_raw_u32(2).unwrap(),
EntityGeneration::from_bits(2)
) <= Entity::from_index_and_generation(
EntityIndex::from_raw_u32(1).unwrap(),
EntityGeneration::from_bits(2)
)
);
}
#[test]
fn entity_hash_keeps_similar_ids_together() {
use core::hash::BuildHasher;
let hash = EntityHash;
let first_id = 0xC0FFEE << 8;
let first_hash = hash.hash_one(Entity::from_index(
EntityIndex::from_raw_u32(first_id).unwrap(),
));
for i in 1..=255 {
let id = first_id + i;
let hash = hash.hash_one(Entity::from_index(EntityIndex::from_raw_u32(id).unwrap()));
assert_eq!(first_hash.wrapping_sub(hash) as u32, i);
}
}
#[test]
fn entity_hash_id_bitflip_affects_high_7_bits() {
use core::hash::BuildHasher;
let hash = EntityHash;
let first_id = 0xC0FFEE;
let first_hash = hash.hash_one(Entity::from_index(
EntityIndex::from_raw_u32(first_id).unwrap(),
)) >> 57;
for bit in 0..u32::BITS {
let id = first_id ^ (1 << bit);
let hash =
hash.hash_one(Entity::from_index(EntityIndex::from_raw_u32(id).unwrap())) >> 57;
assert_ne!(hash, first_hash);
}
}
#[test]
fn entity_generation_is_approximately_ordered() {
use core::cmp::Ordering;
let old = EntityGeneration::FIRST;
let middle = old.after_versions(1);
let younger_before_ord_wrap = middle.after_versions(EntityGeneration::DIFF_MAX);
let younger_after_ord_wrap = younger_before_ord_wrap.after_versions(1);
assert_eq!(middle.cmp_approx(&old), Ordering::Greater);
assert_eq!(middle.cmp_approx(&middle), Ordering::Equal);
assert_eq!(middle.cmp_approx(&younger_before_ord_wrap), Ordering::Less);
assert_eq!(
middle.cmp_approx(&younger_after_ord_wrap),
Ordering::Greater
);
}
#[test]
fn entity_debug() {
let entity = Entity::from_index(EntityIndex::from_raw_u32(42).unwrap());
let string = format!("{entity:?}");
assert_eq!(string, "42v0");
let entity = Entity::PLACEHOLDER;
let string = format!("{entity:?}");
assert_eq!(string, "PLACEHOLDER");
}
#[test]
fn entity_display() {
let entity = Entity::from_index(EntityIndex::from_raw_u32(42).unwrap());
let string = format!("{entity}");
assert_eq!(string, "42v0");
let padded_left = format!("{entity:<5}");
assert_eq!(padded_left, "42v0 ");
let padded_right = format!("{entity:>6}");
assert_eq!(padded_right, " 42v0");
let entity = Entity::PLACEHOLDER;
let string = format!("{entity}");
assert_eq!(string, "PLACEHOLDER");
}
#[test]
fn allocator() {
let mut allocator = EntityAllocator::default();
let mut entities = allocator.alloc_many(2048).collect::<Vec<_>>();
for _ in 0..2048 {
entities.push(allocator.alloc());
}
let pre_len = entities.len();
entities.sort();
entities.dedup();
assert_eq!(pre_len, entities.len());
for e in entities.drain(..) {
allocator.free(e);
}
entities.extend(allocator.alloc_many(5000));
let pre_len = entities.len();
entities.sort();
entities.dedup();
assert_eq!(pre_len, entities.len());
}
#[test]
fn alloc_many_reuses_freed_entities() {
let mut allocator = EntityAllocator::default();
let first_batch: Vec<_> = allocator.alloc_many(5).collect();
assert_eq!(first_batch.len(), 5);
let first_indices: Vec<_> = first_batch.iter().map(|e| e.index_u32()).collect();
for e in &first_batch {
allocator.free(*e);
}
let second_batch: Vec<_> = allocator.alloc_many(5).collect();
assert_eq!(second_batch.len(), 5);
let intersection = first_indices
.iter()
.filter(|idx| second_batch.iter().any(|e| e.index_u32() == **idx))
.count();
assert_eq!(intersection, 5);
}
}