use std::{
iter,
num::NonZeroI32,
sync::atomic::{AtomicU32, Ordering},
u32,
};
use hibitset::{AtomicBitSet, BitSet, BitSetLike, BitSetOr};
use thiserror::Error;
use crate::join::{Index, Join};
#[derive(Debug, Error)]
#[error("Entity is no longer alive or has a mismatched generation")]
pub struct WrongGeneration;
#[derive(Clone, Copy, Debug, Hash, Eq, Ord, PartialEq, PartialOrd)]
pub struct Entity {
index: Index,
generation: AliveGeneration,
}
impl Entity {
#[inline]
pub fn index(self) -> Index {
self.index
}
#[inline]
pub fn generation(self) -> u32 {
self.generation.id() as u32
}
fn new(index: Index, generation: AliveGeneration) -> Entity {
Entity { index, generation }
}
}
pub type LiveBitSet<'a> = BitSetOr<&'a BitSet, &'a AtomicBitSet>;
#[derive(Debug, Default)]
pub struct Allocator {
generations: Vec<Generation>,
alive: BitSet,
raised_atomic: AtomicBitSet,
killed_atomic: AtomicBitSet,
cache: EntityCache,
index_len: AtomicIndex,
}
impl Allocator {
pub fn new() -> Allocator {
Allocator::default()
}
#[inline]
pub fn kill(&mut self, entity: Entity) -> Result<(), WrongGeneration> {
if !self.is_alive(entity) {
return Err(WrongGeneration);
}
self.alive.remove(entity.index);
self.killed_atomic.remove(entity.index);
if self.raised_atomic.remove(entity.index) {
self.update_generation_length();
let generation = &mut self.generations[entity.index as usize];
debug_assert!(!generation.is_alive());
*generation = generation.raised().generation().killed();
} else {
let generation = &mut self.generations[entity.index as usize];
debug_assert!(generation.is_alive());
*generation = generation.killed();
}
self.cache.push(entity.index);
Ok(())
}
#[inline]
pub fn kill_atomic(&self, e: Entity) -> Result<(), WrongGeneration> {
if !self.is_alive(e) {
return Err(WrongGeneration);
}
self.killed_atomic.add_atomic(e.index());
Ok(())
}
#[inline]
pub fn is_alive(&self, e: Entity) -> bool {
self.entity(e.index()) == Some(e)
}
#[inline]
pub fn entity(&self, index: Index) -> Option<Entity> {
let generation = self.generation(index);
if let Some(alive) = generation.to_alive() {
Some(Entity::new(index, alive))
} else if self.raised_atomic.contains(index) {
Some(Entity::new(index, generation.raised()))
} else {
None
}
}
#[inline]
pub fn allocate(&mut self) -> Entity {
let index = self.cache.pop().unwrap_or_else(|| {
let index = *self.index_len.get_mut();
let index_len = index.checked_add(1).expect("no entity left to allocate");
*self.index_len.get_mut() = index_len;
self.update_generation_length();
index
});
self.alive.add(index);
let generation = &mut self.generations[index as usize];
let raised = generation.raised();
*generation = raised.generation();
Entity::new(index, raised)
}
#[inline]
pub fn allocate_atomic(&self) -> Entity {
let index = self.cache.pop_atomic().unwrap_or_else(|| {
atomic_increment(&self.index_len).expect("no entity left to allocate")
});
self.raised_atomic.add_atomic(index);
Entity::new(index, self.generation(index).raised())
}
#[inline]
pub fn live_bitset(&self) -> LiveBitSet {
BitSetOr(&self.alive, &self.raised_atomic)
}
pub fn merge_atomic(&mut self, killed: &mut Vec<Entity>) {
killed.clear();
self.update_generation_length();
for index in (&self.raised_atomic).iter() {
let generation = &mut self.generations[index as usize];
*generation = generation.raised().generation();
self.alive.add(index);
}
self.raised_atomic.clear();
for index in (&self.killed_atomic).iter() {
self.alive.remove(index);
let generation = &mut self.generations[index as usize];
killed.push(Entity::new(index, generation.to_alive().unwrap()));
*generation = generation.killed();
}
self.killed_atomic.clear();
self.cache.extend(killed.iter().map(|e| e.index));
}
fn generation(&self, index: Index) -> Generation {
self.generations
.get(index as usize)
.copied()
.unwrap_or(Generation::zero())
}
fn update_generation_length(&mut self) {
let index_len = *self.index_len.get_mut() as usize;
if self.generations.len() < index_len {
self.generations.resize_with(index_len, Default::default);
}
}
}
impl<'a> Join for &'a Allocator {
type Item = Entity;
type Access = &'a Allocator;
type Mask = LiveBitSet<'a>;
fn open(self) -> (Self::Mask, Self::Access) {
(self.live_bitset(), self)
}
unsafe fn get(access: &Self::Access, index: Index) -> Self::Item {
Entity::new(index, access.generation(index).raised())
}
}
#[derive(Default, Debug)]
struct EntityCache {
cache: Vec<Index>,
len: AtomicIndex,
}
impl EntityCache {
fn push(&mut self, index: Index) {
self.extend(iter::once(index));
}
fn pop(&mut self) -> Option<Index> {
self.maintain();
let x = self.cache.pop();
*self.len.get_mut() = self.cache.len() as Index;
x
}
fn pop_atomic(&self) -> Option<Index> {
atomic_decrement(&self.len).map(|x| self.cache[(x - 1) as usize])
}
fn maintain(&mut self) {
self.cache.truncate(*self.len.get_mut() as usize);
}
}
impl Extend<Index> for EntityCache {
fn extend<T: IntoIterator<Item = Index>>(&mut self, iter: T) {
self.maintain();
self.cache.extend(iter);
*self.len.get_mut() = self.cache.len() as Index;
}
}
const MAX_INDEX: Index = u32::MAX;
type AtomicIndex = AtomicU32;
type GenId = i32;
type NZGenId = NonZeroI32;
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Default)]
struct Generation(GenId);
impl Generation {
fn zero() -> Generation {
Generation(0)
}
fn id(self) -> GenId {
self.0
}
fn is_alive(self) -> bool {
self.0 > 0
}
fn to_alive(self) -> Option<AliveGeneration> {
if self.0 > 0 {
Some(AliveGeneration(unsafe { NZGenId::new_unchecked(self.0) }))
} else {
None
}
}
fn killed(self) -> Generation {
if self.is_alive() {
Generation(-self.id())
} else {
self
}
}
fn raised(self) -> AliveGeneration {
if self.0 > 0 {
AliveGeneration(unsafe { NZGenId::new_unchecked(self.0) })
} else {
let id = (1 as GenId)
.checked_sub(self.id())
.expect("generation overflow");
AliveGeneration(unsafe { NZGenId::new_unchecked(id) })
}
}
}
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug)]
struct AliveGeneration(NZGenId);
impl AliveGeneration {
fn id(self) -> GenId {
self.0.get()
}
fn generation(self) -> Generation {
Generation(self.0.get())
}
}
fn atomic_increment(i: &AtomicIndex) -> Option<Index> {
let mut prev = i.load(Ordering::Relaxed);
while prev != MAX_INDEX {
match i.compare_exchange_weak(prev, prev + 1, Ordering::Relaxed, Ordering::Relaxed) {
Ok(x) => return Some(x),
Err(next_prev) => prev = next_prev,
}
}
None
}
fn atomic_decrement(i: &AtomicIndex) -> Option<Index> {
let mut prev = i.load(Ordering::Relaxed);
while prev != 0 {
match i.compare_exchange_weak(prev, prev - 1, Ordering::Relaxed, Ordering::Relaxed) {
Ok(x) => return Some(x),
Err(next_prev) => prev = next_prev,
}
}
None
}