use core::alloc::Layout;
use core::mem::{self, align_of};
use core::ptr::{self, NonNull};
use alloc::alloc::{dealloc, handle_alloc_error, realloc};
use alloc::collections::BTreeMap;
use crate::component::{ComponentDesc, ComponentKey, ComponentValue};
use crate::format::MissingDebug;
use crate::metadata::debuggable;
use crate::{metadata, Component, Entity};
type Offset = usize;
pub(crate) struct BufferStorage {
data: NonNull<u8>,
cursor: usize,
layout: Layout,
}
impl BufferStorage {
fn new() -> Self {
Self {
data: NonNull::dangling(),
cursor: 0,
layout: Layout::from_size_align(0, align_of::<u8>()).unwrap(),
}
}
fn allocate(&mut self, item_layout: Layout) -> Offset {
let new_offset = self.cursor + (item_layout.align() - self.cursor % item_layout.align());
let new_end = new_offset + item_layout.size();
if (new_end >= self.layout.size() && new_end != 0)
|| self.layout.align() < item_layout.align()
{
let new_size = new_end.next_power_of_two();
let new_align = self.layout.align().max(item_layout.align());
let new_layout = Layout::from_size_align(new_size, new_align).unwrap();
let new_data = if self.layout.size() == 0 {
match NonNull::new(unsafe { alloc::alloc::alloc(new_layout) }) {
Some(v) => v,
None => handle_alloc_error(new_layout),
}
} else if new_align != self.layout.align() {
unsafe {
let old_ptr = self.data.as_ptr();
let new_ptr = match NonNull::new(alloc::alloc::alloc(new_layout)) {
Some(v) => v,
None => handle_alloc_error(new_layout),
};
ptr::copy_nonoverlapping(old_ptr, new_ptr.as_ptr(), self.cursor);
dealloc(old_ptr, self.layout);
new_ptr
}
} else {
unsafe {
match NonNull::new(realloc(self.data.as_ptr(), self.layout, new_size)) {
Some(v) => v,
None => alloc::alloc::handle_alloc_error(self.layout),
}
}
};
self.layout = new_layout;
self.data = new_data;
}
self.cursor = new_end;
new_offset
}
pub(crate) unsafe fn take<T>(&mut self, offset: Offset) -> T {
core::ptr::read(self.data.as_ptr().add(offset).cast::<T>())
}
pub(crate) unsafe fn replace<T>(&mut self, offset: Offset, value: T) -> T {
let dst = self.data.as_ptr().add(offset).cast::<T>();
mem::replace(unsafe { &mut *dst }, value)
}
pub(crate) unsafe fn read<T>(&self, offset: Offset) -> &T {
&*self.data.as_ptr().add(offset).cast::<T>()
}
pub(crate) unsafe fn at_mut(&mut self, offset: Offset) -> *mut u8 {
self.data.as_ptr().add(offset)
}
pub(crate) unsafe fn at(&self, offset: Offset) -> *const u8 {
self.data.as_ptr().add(offset)
}
pub(crate) unsafe fn read_mut<T>(&mut self, offset: Offset) -> &mut T {
&mut *self.data.as_ptr().add(offset).cast::<T>()
}
pub(crate) unsafe fn write<T>(&mut self, offset: Offset, data: T) {
let layout = Layout::new::<T>();
let dst = self.data.as_ptr().add(offset).cast::<T>();
assert_eq!(
self.data.as_ptr() as usize % layout.align(),
0,
"Improper alignment"
);
assert_eq!(dst as usize % layout.align(), 0);
core::ptr::write(dst, data);
}
pub(crate) unsafe fn write_dyn(&mut self, offset: Offset, desc: ComponentDesc, data: *mut u8) {
let dst = self.data.as_ptr().add(offset);
let layout = desc.layout();
assert_eq!(
self.data.as_ptr() as usize % layout.align(),
0,
"Improper alignment"
);
core::ptr::copy_nonoverlapping(data, dst, layout.size());
}
#[inline(always)]
pub(crate) fn reset(&mut self) {
self.cursor = 0;
}
pub(crate) fn push<T>(&mut self, value: T) -> Offset {
let offset = self.allocate(Layout::new::<T>());
unsafe {
self.write(offset, value);
}
offset
}
}
impl Default for BufferStorage {
fn default() -> Self {
Self::new()
}
}
impl Drop for BufferStorage {
fn drop(&mut self) {
if self.layout.size() > 0 {
unsafe { dealloc(self.data.as_ptr(), self.layout) }
}
}
}
#[derive(Default)]
pub struct ComponentBuffer {
entries: BTreeMap<ComponentKey, (ComponentDesc, Offset)>,
storage: BufferStorage,
}
impl core::fmt::Debug for ComponentBuffer {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut s = f.debug_map();
for &(desc, offset) in self.entries.values() {
let debugger = desc.meta_ref().get(debuggable());
if let Some(debugger) = debugger {
unsafe {
let ptr = self.storage.at(offset);
s.entry(&desc.name(), debugger.debug_ptr(&ptr));
}
} else {
s.entry(&desc.name(), &MissingDebug);
}
}
s.finish()
}
}
unsafe impl Send for ComponentBuffer {}
unsafe impl Sync for ComponentBuffer {}
impl ComponentBuffer {
pub fn new() -> Self {
Self::default()
}
pub fn get_mut<T: ComponentValue>(&mut self, component: Component<T>) -> Option<&mut T> {
let &(_, offset) = self.entries.get(&component.key())?;
unsafe { Some(self.storage.read_mut(offset)) }
}
pub fn get<T: ComponentValue>(&self, component: Component<T>) -> Option<&T> {
let &(_, offset) = self.entries.get(&component.key())?;
unsafe { Some(self.storage.read(offset)) }
}
pub fn has<T: ComponentValue>(&self, component: Component<T>) -> bool {
self.entries.contains_key(&component.key())
}
pub fn components(&self) -> impl Iterator<Item = &ComponentDesc> {
self.entries.values().map(|v| &v.0)
}
pub fn remove<T: ComponentValue>(&mut self, component: Component<T>) -> Option<T> {
let (_, offset) = self.entries.remove(&component.key())?;
unsafe { Some(self.storage.take(offset)) }
}
pub fn set<T: ComponentValue>(&mut self, component: Component<T>, value: T) -> Option<T> {
let desc = component.desc();
if let Some(&(_, offset)) = self.entries.get(&desc.key()) {
unsafe { Some(self.storage.replace(offset, value)) }
} else {
if desc.key().is_relation() && desc.meta_ref().has(metadata::exclusive()) {
self.drain_relations_like(desc.key.id());
}
let offset = self.storage.push(value);
self.entries.insert(desc.key(), (desc, offset));
None
}
}
pub(crate) fn drain_relations_like(&mut self, relation: Entity) {
let start = ComponentKey::new(relation, Some(Entity::MIN));
let end = ComponentKey::new(relation, Some(Entity::MAX));
while let Some((&key, _)) = self.entries.range(start..=end).next() {
let (desc, offset) = self.entries.remove(&key).unwrap();
unsafe {
let ptr = self.storage.at_mut(offset);
desc.drop(ptr);
}
}
}
pub(crate) unsafe fn set_dyn(&mut self, desc: ComponentDesc, value: *mut u8) {
if let Some(&(_, offset)) = self.entries.get(&desc.key()) {
let old_ptr = self.storage.at_mut(offset);
desc.drop(old_ptr);
ptr::copy_nonoverlapping(value, old_ptr, desc.size());
} else {
if desc.key().is_relation() && desc.meta_ref().has(metadata::exclusive()) {
self.drain_relations_like(desc.key.id());
}
let offset = self.storage.allocate(desc.layout());
self.storage.write_dyn(offset, desc, value);
self.entries.insert(desc.key(), (desc, offset));
}
}
pub(crate) fn drain(&mut self) -> ComponentBufferIter {
ComponentBufferIter {
entries: &mut self.entries,
storage: &mut self.storage,
}
}
#[inline]
pub fn len(&self) -> usize {
self.entries.len()
}
#[must_use]
#[inline]
pub fn is_empty(&self) -> bool {
self.entries.is_empty()
}
pub(crate) unsafe fn retain(&mut self, mut f: impl FnMut(ComponentDesc, *mut u8) -> bool) {
self.entries.retain(|_, (desc, offset)| {
let ptr = unsafe { self.storage.at_mut(*offset) };
f(*desc, ptr)
})
}
}
pub(crate) struct ComponentBufferIter<'a> {
entries: &'a mut BTreeMap<ComponentKey, (ComponentDesc, Offset)>,
storage: &'a mut BufferStorage,
}
impl<'a> Iterator for ComponentBufferIter<'a> {
type Item = (ComponentDesc, *mut u8);
fn next(&mut self) -> Option<Self::Item> {
let (_, (desc, offset)) = self.entries.pop_first()?;
unsafe {
let data = self.storage.at_mut(offset);
Some((desc, data))
}
}
}
impl Drop for ComponentBuffer {
fn drop(&mut self) {
for &(desc, offset) in self.entries.values() {
unsafe {
let ptr = self.storage.at_mut(offset);
desc.drop(ptr);
}
}
}
}
#[derive(Default)]
pub(crate) struct MultiComponentBuffer {
storage: BufferStorage,
drops: BTreeMap<Offset, unsafe fn(*mut u8)>,
}
impl MultiComponentBuffer {
pub fn push<T: ComponentValue>(&mut self, value: T) -> Offset {
let offset = self.storage.push(value);
let old = self
.drops
.insert(offset, unsafe { |ptr| ptr.cast::<T>().drop_in_place() });
assert!(old.is_none());
offset
}
pub unsafe fn take_dyn(&mut self, offset: Offset) -> *mut u8 {
self.drops.remove(&offset).unwrap();
self.storage.at_mut(offset)
}
pub fn clear(&mut self) {
for (&offset, drop) in &mut self.drops {
unsafe {
let ptr = self.storage.at_mut(offset);
(drop)(ptr)
}
}
self.drops.clear();
self.storage.reset();
}
}
impl Drop for MultiComponentBuffer {
fn drop(&mut self) {
self.clear();
}
}
unsafe impl Send for MultiComponentBuffer {}
unsafe impl Sync for MultiComponentBuffer {}
#[cfg(test)]
mod tests {
use core::mem;
use alloc::{string::String, sync::Arc};
use crate::component;
use super::*;
component! {
a: i32,
b: String,
c: i16,
d: f32,
e: [f64; 100],
f: Arc<String>,
}
#[test]
pub fn component_buffer() {
let shared: Arc<String> = Arc::new("abc".into());
let mut buffer = ComponentBuffer::new();
buffer.set(a(), 7);
buffer.set(c(), 9);
buffer.set(b(), "Hello, World".into());
buffer.set(e(), [5.0; 100]);
buffer.set(f(), shared.clone());
assert_eq!(buffer.get(a()), Some(&7));
assert_eq!(buffer.get(c()), Some(&9));
assert_eq!(buffer.get(b()), Some(&"Hello, World".into()));
assert_eq!(buffer.get(d()), None);
assert_eq!(buffer.get(e()), Some(&[5.0; 100]));
drop(buffer);
assert_eq!(Arc::strong_count(&shared), 1);
}
#[test]
pub fn component_buffer_reinsert() {
let mut buffer = ComponentBuffer::new();
let shared: Arc<String> = Arc::new("abc".into());
let shared_2: Arc<String> = Arc::new("abc".into());
buffer.set(f(), shared.clone());
buffer.set(f(), shared_2.clone());
assert_eq!(Arc::strong_count(&shared), 1);
assert_eq!(Arc::strong_count(&shared_2), 2);
}
#[test]
pub fn component_buffer_reinsert_dyn() {
let mut buffer = ComponentBuffer::new();
let shared: Arc<String> = Arc::new("abc".into());
let shared_2: Arc<String> = Arc::new("abc".into());
unsafe {
let mut shared = shared.clone();
buffer.set_dyn(f().desc(), &mut shared as *mut _ as *mut u8);
mem::forget(shared)
}
unsafe {
let mut shared = shared_2.clone();
buffer.set_dyn(f().desc(), &mut shared as *mut _ as *mut u8);
mem::forget(shared)
}
assert_eq!(Arc::strong_count(&shared), 1);
assert_eq!(Arc::strong_count(&shared_2), 2);
}
#[test]
fn multi_component_buffer() {
let mut buffer = MultiComponentBuffer::default();
let shared = Arc::new(4);
let a = buffer.push(9i32);
let b = buffer.push(String::from("Hello, there"));
let _c = buffer.push(shared.clone());
let d = buffer.push(shared.clone());
unsafe {
assert_eq!(buffer.take_dyn(b).cast::<String>().read(), "Hello, there");
assert_eq!(buffer.take_dyn(a).cast::<i32>().read(), 9);
assert_eq!(buffer.take_dyn(d).cast::<Arc<i32>>().read(), shared);
}
drop(buffer);
assert_eq!(Arc::strong_count(&shared), 1);
}
}