use std::marker::PhantomData;
use vob::Vob;
pub(crate) trait PoolIndex: Copy + From<usize> + Into<usize> + Ord {}
#[derive(Debug, PartialEq, Clone, Copy)]
pub(crate) enum Allocation<T> {
Reused(T),
New(T),
}
mod sealed {
use super::PoolIndex;
use std::cmp::Reverse;
use std::collections::BinaryHeap;
pub(crate) trait AvailableHandler<I: PoolIndex>: Clone {
fn new() -> Self;
fn pop(&mut self) -> Option<I>;
fn push(&mut self, index: I);
fn len(&self) -> usize;
fn clear(&mut self);
fn sort_available(&mut self);
fn pop_smallest_available(&mut self) -> Option<I>;
}
pub(crate) trait IndexPolicy<I: PoolIndex> {
type Handler: AvailableHandler<I>;
}
#[derive(Clone)]
pub struct LifoHandler<I: PoolIndex> {
indices: Vec<I>,
}
impl<I: PoolIndex> AvailableHandler<I> for LifoHandler<I> {
#[inline(always)]
fn new() -> Self {
Self {
indices: Vec::new(),
}
}
#[inline(always)]
fn pop(&mut self) -> Option<I> {
self.indices.pop()
}
#[inline(always)]
fn push(&mut self, index: I) {
self.indices.push(index);
}
#[inline(always)]
fn len(&self) -> usize {
self.indices.len()
}
#[inline(always)]
fn clear(&mut self) {
self.indices.clear();
}
#[inline(always)]
fn sort_available(&mut self) {
self.indices.sort_unstable_by(|a, b| b.cmp(a));
}
#[inline(always)]
fn pop_smallest_available(&mut self) -> Option<I> {
self.indices.pop()
}
}
#[derive(Clone)]
pub struct SifoHandler<I: PoolIndex> {
indices: BinaryHeap<Reverse<I>>,
}
impl<I: PoolIndex> AvailableHandler<I> for SifoHandler<I> {
#[inline(always)]
fn new() -> Self {
Self {
indices: BinaryHeap::new(),
}
}
#[inline(always)]
fn pop(&mut self) -> Option<I> {
self.indices.pop().map(|r| r.0)
}
#[inline(always)]
fn push(&mut self, index: I) {
self.indices.push(Reverse(index));
}
#[inline(always)]
fn len(&self) -> usize {
self.indices.len()
}
#[inline(always)]
fn clear(&mut self) {
self.indices.clear();
}
#[inline(always)]
fn sort_available(&mut self) {}
#[inline(always)]
fn pop_smallest_available(&mut self) -> Option<I> {
self.indices.pop().map(|r| r.0)
}
}
}
use sealed::{AvailableHandler, IndexPolicy};
#[derive(Clone)]
pub(crate) struct IndexPool<I: PoolIndex, P: IndexPolicy<I>, const ENABLE_UNSAFE: bool> {
used: Vob,
available: P::Handler,
_phantom: PhantomData<I>,
}
impl<I: PoolIndex, P: IndexPolicy<I>, const ENABLE_UNSAFE: bool> IndexPool<I, P, ENABLE_UNSAFE> {
pub(crate) fn new(already_in_use: u32) -> Self {
Self {
used: Vob::from_elem(true, already_in_use as usize),
available: P::Handler::new(),
_phantom: PhantomData,
}
}
pub(crate) fn take_used(&mut self) -> Vob {
std::mem::take(&mut self.used)
}
pub(crate) fn pop(&mut self) -> Allocation<I> {
if let Some(index) = self.available.pop() {
if ENABLE_UNSAFE {
unsafe {
let _ = self.used.set_unchecked(index.into(), true);
}
} else {
let _ = self.used.set(index.into(), true);
}
Allocation::Reused(index)
} else {
let index = I::from(self.used.len());
self.used.push(true);
Allocation::New(index)
}
}
pub(crate) fn push(&mut self, index: I) {
debug_assert!(index.into() < self.used.len(), "Invalid index");
if self.used.get(index.into()).unwrap_or(false) {
let _ = self.used.set(index.into(), false);
self.available.push(index);
} else {
debug_assert!(false, "Index already released");
}
}
#[inline(always)]
pub(crate) fn available_count(&self) -> u32 {
self.available.len() as u32
}
#[inline(always)]
pub(crate) fn total_count(&self) -> u32 {
self.used.len() as u32
}
#[inline(always)]
pub(crate) fn active_count(&self) -> u32 {
debug_assert!(self.total_count() >= self.available_count());
self.total_count() - self.available_count()
}
#[allow(dead_code)]
#[inline(always)]
pub(crate) fn is_used(&self, index: I) -> bool {
unsafe { self.used.get_unchecked(index.into()) }
}
pub(crate) fn truncate(&mut self, new_size: usize) {
self.used.resize(new_size, true);
self.used.set_all(true);
self.available.clear();
}
pub(crate) fn defragment<F: FnMut(I, I)>(&mut self, mut remap: F) {
let old_size = self.active_count();
self.available.sort_available();
let mut move_candidate = self.used.len().saturating_sub(1);
while let Some(hole_idx) = self.available.pop_smallest_available() {
let hole_usize = hole_idx.into();
while move_candidate > hole_usize && !unsafe { self.used.get_unchecked(move_candidate) }
{
move_candidate -= 1;
}
if hole_usize >= move_candidate {
break;
}
remap(I::from(move_candidate), hole_idx);
move_candidate -= 1;
}
self.truncate(old_size as usize);
}
}
#[derive(Clone)]
pub(crate) struct Lifo;
#[derive(Clone)]
pub(crate) struct Sifo;
impl<I: PoolIndex> IndexPolicy<I> for Lifo {
type Handler = sealed::LifoHandler<I>;
}
impl<I: PoolIndex> IndexPolicy<I> for Sifo {
type Handler = sealed::SifoHandler<I>;
}
pub(crate) type LifoPool<I, const ENABLE_UNSAFE: bool> = IndexPool<I, Lifo, ENABLE_UNSAFE>;
pub(crate) type SifoPool<I, const ENABLE_UNSAFE: bool> = IndexPool<I, Sifo, ENABLE_UNSAFE>;