use core::{marker::PhantomData, mem::MaybeUninit, ptr, ptr::NonNull};
use std::collections::BTreeMap;
use ahash::AHashMap as HashMap;
use hibitset::BitSetLike;
use crate::{
storage::{DistinctStorage, SharedGetMutStorage, SyncUnsafeCell, UnprotectedStorage},
world::Index,
};
pub trait SliceAccess<T> {
type Element;
fn as_slice(&self) -> &[Self::Element];
fn as_mut_slice(&mut self) -> &mut [Self::Element];
}
pub struct BTreeStorage<T>(BTreeMap<Index, SyncUnsafeCell<T>>);
impl<T> Default for BTreeStorage<T> {
fn default() -> Self {
Self(Default::default())
}
}
impl<T> UnprotectedStorage<T> for BTreeStorage<T> {
type AccessMut<'a> = &'a mut T where T: 'a;
unsafe fn clean<B>(&mut self, _has: B)
where
B: BitSetLike,
{
self.0.clear();
}
unsafe fn get(&self, id: Index) -> &T {
let ptr = self.0[&id].get();
unsafe { &*ptr }
}
unsafe fn get_mut(&mut self, id: Index) -> &mut T {
self.0.get_mut(&id).unwrap().get_mut()
}
unsafe fn insert(&mut self, id: Index, v: T) {
self.0.insert(id, SyncUnsafeCell::new(v));
}
unsafe fn remove(&mut self, id: Index) -> T {
self.0.remove(&id).unwrap().0.into_inner()
}
}
impl<T> SharedGetMutStorage<T> for BTreeStorage<T> {
unsafe fn shared_get_mut(&self, id: Index) -> &mut T {
let ptr = self.0[&id].get();
unsafe { &mut *ptr }
}
}
unsafe impl<T> DistinctStorage for BTreeStorage<T> {}
pub struct HashMapStorage<T>(HashMap<Index, SyncUnsafeCell<T>>);
impl<T> Default for HashMapStorage<T> {
fn default() -> Self {
Self(Default::default())
}
}
impl<T> UnprotectedStorage<T> for HashMapStorage<T> {
type AccessMut<'a> = &'a mut T where T: 'a;
unsafe fn clean<B>(&mut self, _has: B)
where
B: BitSetLike,
{
self.0.clear();
}
unsafe fn get(&self, id: Index) -> &T {
let ptr = self.0[&id].get();
unsafe { &*ptr }
}
unsafe fn get_mut(&mut self, id: Index) -> &mut T {
self.0.get_mut(&id).unwrap().get_mut()
}
unsafe fn insert(&mut self, id: Index, v: T) {
self.0.insert(id, SyncUnsafeCell::new(v));
}
unsafe fn remove(&mut self, id: Index) -> T {
self.0.remove(&id).unwrap().0.into_inner()
}
}
impl<T> SharedGetMutStorage<T> for HashMapStorage<T> {
unsafe fn shared_get_mut(&self, id: Index) -> &mut T {
let ptr = self.0[&id].get();
unsafe { &mut *ptr }
}
}
unsafe impl<T> DistinctStorage for HashMapStorage<T> {}
pub struct DenseVecStorage<T> {
data: Vec<SyncUnsafeCell<T>>,
entity_id: Vec<Index>,
data_id: Vec<MaybeUninit<Index>>,
}
impl<T> Default for DenseVecStorage<T> {
fn default() -> Self {
Self {
data: Default::default(),
entity_id: Default::default(),
data_id: Default::default(),
}
}
}
impl<T> SliceAccess<T> for DenseVecStorage<T> {
type Element = T;
#[inline]
fn as_slice(&self) -> &[Self::Element] {
let unsafe_cell_slice_ptr = SyncUnsafeCell::as_cell_of_slice(self.data.as_slice()).get();
unsafe { &*unsafe_cell_slice_ptr }
}
#[inline]
fn as_mut_slice(&mut self) -> &mut [Self::Element] {
SyncUnsafeCell::as_slice_mut(self.data.as_mut_slice())
}
}
impl<T> UnprotectedStorage<T> for DenseVecStorage<T> {
type AccessMut<'a> = &'a mut T where T: 'a;
unsafe fn clean<B>(&mut self, _has: B)
where
B: BitSetLike,
{
self.data_id.clear();
self.entity_id.clear();
self.data.clear();
}
unsafe fn get(&self, id: Index) -> &T {
let did = unsafe { self.data_id.get_unchecked(id as usize).assume_init() };
let ptr = unsafe { self.data.get_unchecked(did as usize) }.get();
unsafe { &*ptr }
}
unsafe fn get_mut(&mut self, id: Index) -> &mut T {
let did = unsafe { self.data_id.get_unchecked(id as usize).assume_init() };
unsafe { self.data.get_unchecked_mut(did as usize) }.get_mut()
}
unsafe fn insert(&mut self, id: Index, v: T) {
let id = if Index::BITS > usize::BITS {
core::cmp::min(id, usize::MAX as Index) as usize
} else {
id as usize
};
if self.data_id.len() <= id {
let delta = if Index::BITS >= usize::BITS {
id.saturating_add(1)
} else {
id + 1
} - self.data_id.len();
self.data_id.reserve(delta);
unsafe { self.data_id.set_len(id + 1) };
}
unsafe { self.data_id.get_unchecked_mut(id) }.write(self.data.len() as Index);
self.entity_id.push(id as Index);
self.data.push(SyncUnsafeCell::new(v));
}
unsafe fn remove(&mut self, id: Index) -> T {
let did = unsafe { self.data_id.get_unchecked(id as usize).assume_init() };
let last = *self.entity_id.last().unwrap();
unsafe { self.data_id.get_unchecked_mut(last as usize) }.write(did);
self.entity_id.swap_remove(did as usize);
self.data.swap_remove(did as usize).0.into_inner()
}
}
impl<T> SharedGetMutStorage<T> for DenseVecStorage<T> {
unsafe fn shared_get_mut(&self, id: Index) -> &mut T {
let did = unsafe { self.data_id.get_unchecked(id as usize).assume_init() };
let ptr = unsafe { self.data.get_unchecked(did as usize) }.get();
unsafe { &mut *ptr }
}
}
unsafe impl<T> DistinctStorage for DenseVecStorage<T> {}
pub struct NullStorage<T>(PhantomData<T>);
impl<T> Default for NullStorage<T> {
fn default() -> Self {
use core::mem::size_of;
assert_eq!(size_of::<T>(), 0, "NullStorage can only be used with ZST");
NullStorage(PhantomData)
}
}
impl<T> UnprotectedStorage<T> for NullStorage<T> {
type AccessMut<'a> = &'a mut T where T: 'a;
unsafe fn clean<B>(&mut self, has: B)
where
B: BitSetLike,
{
for id in has.iter() {
unsafe { self.remove(id) };
}
}
unsafe fn get(&self, _: Index) -> &T {
unsafe { &*NonNull::dangling().as_ptr() }
}
unsafe fn get_mut(&mut self, id: Index) -> &mut T {
unsafe { self.shared_get_mut(id) }
}
unsafe fn insert(&mut self, _: Index, v: T) {
core::mem::forget(v)
}
unsafe fn remove(&mut self, _: Index) -> T {
unsafe { ptr::read(NonNull::dangling().as_ptr()) }
}
}
impl<T> SharedGetMutStorage<T> for NullStorage<T> {
unsafe fn shared_get_mut(&self, _: Index) -> &mut T {
unsafe { &mut *NonNull::dangling().as_ptr() }
}
}
unsafe impl<T> DistinctStorage for NullStorage<T> {}
pub struct VecStorage<T>(Vec<SyncUnsafeCell<MaybeUninit<T>>>);
impl<T> Default for VecStorage<T> {
fn default() -> Self {
Self(Default::default())
}
}
impl<T> SliceAccess<T> for VecStorage<T> {
type Element = MaybeUninit<T>;
#[inline]
fn as_slice(&self) -> &[Self::Element] {
let unsafe_cell_slice_ptr = SyncUnsafeCell::as_cell_of_slice(self.0.as_slice()).get();
unsafe { &*unsafe_cell_slice_ptr }
}
#[inline]
fn as_mut_slice(&mut self) -> &mut [Self::Element] {
SyncUnsafeCell::as_slice_mut(self.0.as_mut_slice())
}
}
impl<T> UnprotectedStorage<T> for VecStorage<T> {
type AccessMut<'a> = &'a mut T where T: 'a;
unsafe fn clean<B>(&mut self, has: B)
where
B: BitSetLike,
{
for (i, v) in self.0.iter_mut().enumerate() {
const _: Index = 0u32;
if has.contains(i as u32) {
let v_inner = v.get_mut();
unsafe { v_inner.assume_init_drop() };
}
}
}
unsafe fn get(&self, id: Index) -> &T {
let ptr = unsafe { self.0.get_unchecked(id as usize) }.get();
let maybe_uninit = unsafe { &*ptr };
unsafe { maybe_uninit.assume_init_ref() }
}
unsafe fn get_mut(&mut self, id: Index) -> &mut T {
let maybe_uninit = unsafe { self.0.get_unchecked_mut(id as usize) }.get_mut();
unsafe { maybe_uninit.assume_init_mut() }
}
#[allow(clippy::uninit_vec)]
unsafe fn insert(&mut self, id: Index, v: T) {
let id = if Index::BITS > usize::BITS {
core::cmp::min(id, usize::MAX as Index) as usize
} else {
id as usize
};
if self.0.len() <= id {
let delta = if Index::BITS >= usize::BITS {
id.saturating_add(1)
} else {
id + 1
} - self.0.len();
self.0.reserve(delta);
unsafe { self.0.set_len(id + 1) };
}
unsafe { self.0.get_unchecked_mut(id) }.get_mut().write(v);
}
unsafe fn remove(&mut self, id: Index) -> T {
let component_ref = unsafe { self.get(id) };
unsafe { ptr::read(component_ref) }
}
}
impl<T> SharedGetMutStorage<T> for VecStorage<T> {
unsafe fn shared_get_mut(&self, id: Index) -> &mut T {
let ptr = unsafe { self.0.get_unchecked(id as usize) }.get();
let maybe_uninit = unsafe { &mut *ptr };
unsafe { maybe_uninit.assume_init_mut() }
}
}
unsafe impl<T> DistinctStorage for VecStorage<T> {}
pub struct DefaultVecStorage<T>(Vec<SyncUnsafeCell<T>>);
impl<T> Default for DefaultVecStorage<T> {
fn default() -> Self {
Self(Default::default())
}
}
impl<T> SliceAccess<T> for DefaultVecStorage<T> {
type Element = T;
#[inline]
fn as_slice(&self) -> &[Self::Element] {
let unsafe_cell_slice_ptr = SyncUnsafeCell::as_cell_of_slice(self.0.as_slice()).get();
unsafe { &*unsafe_cell_slice_ptr }
}
#[inline]
fn as_mut_slice(&mut self) -> &mut [Self::Element] {
SyncUnsafeCell::as_slice_mut(self.0.as_mut_slice())
}
}
impl<T> UnprotectedStorage<T> for DefaultVecStorage<T>
where
T: Default,
{
type AccessMut<'a> = &'a mut T where T: 'a;
unsafe fn clean<B>(&mut self, _has: B)
where
B: BitSetLike,
{
self.0.clear();
}
unsafe fn get(&self, id: Index) -> &T {
let ptr = unsafe { self.0.get_unchecked(id as usize) }.get();
unsafe { &*ptr }
}
unsafe fn get_mut(&mut self, id: Index) -> &mut T {
unsafe { self.0.get_unchecked_mut(id as usize) }.get_mut()
}
unsafe fn insert(&mut self, id: Index, v: T) {
let id = if Index::BITS > usize::BITS {
core::cmp::min(id, usize::MAX as Index) as usize
} else {
id as usize
};
if self.0.len() <= id {
self.0.resize_with(id, Default::default);
self.0.push(SyncUnsafeCell::new(v))
} else {
*self.0[id].get_mut() = v;
}
}
unsafe fn remove(&mut self, id: Index) -> T {
core::mem::take(unsafe { self.0.get_unchecked_mut(id as usize) }.get_mut())
}
}
impl<T> SharedGetMutStorage<T> for DefaultVecStorage<T>
where
T: Default,
{
unsafe fn shared_get_mut(&self, id: Index) -> &mut T {
let ptr = unsafe { self.0.get_unchecked(id as usize) }.get();
unsafe { &mut *ptr }
}
}
unsafe impl<T> DistinctStorage for DefaultVecStorage<T> {}