use std::any::{Any, TypeId};
use std::sync::atomic::{AtomicU64, Ordering};
#[derive(Debug, Default, Clone, Copy)]
pub struct HeapStats {
pub allocations: u64,
pub reclaimed: u64,
pub live: u64,
pub bytes_allocated: u64,
pub bytes_live: u64,
}
static GLOBAL_ALLOC_COUNT: AtomicU64 = AtomicU64::new(0);
#[must_use]
pub fn global_alloc_count() -> u64 {
GLOBAL_ALLOC_COUNT.load(Ordering::Relaxed)
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct HeapIndex {
index: u32,
generation: u32,
type_id: TypeId,
}
impl HeapIndex {
#[must_use]
pub const fn index(self) -> u32 {
self.index
}
#[must_use]
pub const fn generation(self) -> u32 {
self.generation
}
}
struct HeapEntry {
value: Box<dyn Any + Send + Sync>,
generation: u32,
size_hint: usize,
}
enum HeapSlot {
Occupied(HeapEntry),
Vacant {
next_free: Option<u32>,
generation: u32,
},
}
#[derive(Default)]
pub struct RegionHeap {
slots: Vec<HeapSlot>,
free_head: Option<u32>,
len: usize,
stats: HeapStats,
}
impl std::fmt::Debug for RegionHeap {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("RegionHeap")
.field("len", &self.len)
.field("stats", &self.stats)
.finish_non_exhaustive()
}
}
impl RegionHeap {
#[must_use]
pub fn new() -> Self {
Self {
slots: Vec::new(),
free_head: None,
len: 0,
stats: HeapStats::default(),
}
}
#[must_use]
pub fn with_capacity(capacity: usize) -> Self {
Self {
slots: Vec::with_capacity(capacity),
free_head: None,
len: 0,
stats: HeapStats::default(),
}
}
#[must_use]
pub const fn len(&self) -> usize {
self.len
}
#[must_use]
pub const fn is_empty(&self) -> bool {
self.len == 0
}
#[must_use]
pub const fn stats(&self) -> HeapStats {
self.stats
}
pub fn alloc<T: Send + Sync + 'static>(&mut self, value: T) -> HeapIndex {
let size_hint = std::mem::size_of::<T>();
let type_id = TypeId::of::<T>();
let entry_value: Box<dyn Any + Send + Sync> = Box::new(value);
let heap_index = if let Some(free_index) = self.free_head {
let Some(slot) = self.slots.get_mut(free_index as usize) else {
unreachable!("free list pointed outside heap slots");
};
match slot {
HeapSlot::Vacant {
next_free,
generation,
} => {
let generation_value = *generation;
self.free_head = *next_free;
*slot = HeapSlot::Occupied(HeapEntry {
value: entry_value,
generation: generation_value,
size_hint,
});
HeapIndex {
index: free_index,
generation: generation_value,
type_id,
}
}
HeapSlot::Occupied(_) => unreachable!("free list pointed to occupied slot"),
}
} else {
let index = u32::try_from(self.slots.len()).expect("region heap overflow");
self.slots.push(HeapSlot::Occupied(HeapEntry {
value: entry_value,
generation: 0,
size_hint,
}));
HeapIndex {
index,
generation: 0,
type_id,
}
};
self.len += 1;
self.stats.allocations += 1;
self.stats.live += 1;
self.stats.bytes_allocated += size_hint as u64;
self.stats.bytes_live += size_hint as u64;
GLOBAL_ALLOC_COUNT.fetch_add(1, Ordering::Relaxed);
heap_index
}
#[must_use]
pub fn get<T: 'static>(&self, index: HeapIndex) -> Option<&T> {
if TypeId::of::<T>() != index.type_id {
return None;
}
match self.slots.get(index.index as usize)? {
HeapSlot::Occupied(entry) if entry.generation == index.generation => {
entry.value.downcast_ref::<T>()
}
_ => None,
}
}
pub fn get_mut<T: 'static>(&mut self, index: HeapIndex) -> Option<&mut T> {
if TypeId::of::<T>() != index.type_id {
return None;
}
match self.slots.get_mut(index.index as usize)? {
HeapSlot::Occupied(entry) if entry.generation == index.generation => {
entry.value.downcast_mut::<T>()
}
_ => None,
}
}
#[must_use]
pub fn contains(&self, index: HeapIndex) -> bool {
match self.slots.get(index.index as usize) {
Some(HeapSlot::Occupied(entry)) => entry.generation == index.generation,
_ => false,
}
}
pub fn dealloc(&mut self, index: HeapIndex) -> bool {
let Some(slot) = self.slots.get_mut(index.index as usize) else {
return false;
};
let (size_hint, new_gen) = {
let HeapSlot::Occupied(entry) = slot else {
return false;
};
if entry.generation != index.generation {
return false;
}
(entry.size_hint, entry.generation.wrapping_add(1))
};
let old_slot = std::mem::replace(
slot,
HeapSlot::Vacant {
next_free: self.free_head,
generation: new_gen,
},
);
self.free_head = Some(index.index);
self.len -= 1;
self.stats.reclaimed += 1;
self.stats.live -= 1;
self.stats.bytes_live = self.stats.bytes_live.saturating_sub(size_hint as u64);
GLOBAL_ALLOC_COUNT.fetch_sub(1, Ordering::Relaxed);
drop(old_slot);
true
}
pub fn reclaim_all(&mut self) {
let reclaimed_count = self.len as u64;
GLOBAL_ALLOC_COUNT.fetch_sub(reclaimed_count, Ordering::Relaxed);
self.stats.reclaimed += reclaimed_count;
self.stats.live = 0;
self.stats.bytes_live = 0;
self.free_head = None;
self.len = 0; self.slots.clear();
}
}
impl Drop for RegionHeap {
fn drop(&mut self) {
let live = self.len as u64;
if live > 0 {
GLOBAL_ALLOC_COUNT.fetch_sub(live, Ordering::Relaxed);
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct HeapRef<T> {
index: HeapIndex,
_marker: std::marker::PhantomData<T>,
}
impl<T: Send + Sync + 'static> HeapRef<T> {
#[must_use]
pub const fn new(index: HeapIndex) -> Self {
Self {
index,
_marker: std::marker::PhantomData,
}
}
#[must_use]
pub const fn index(&self) -> HeapIndex {
self.index
}
#[must_use]
pub fn get<'a>(&self, heap: &'a RegionHeap) -> Option<&'a T> {
heap.get::<T>(self.index)
}
pub fn get_mut<'a>(&self, heap: &'a mut RegionHeap) -> Option<&'a mut T> {
heap.get_mut::<T>(self.index)
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::sync::{Arc, Mutex};
#[derive(Clone, Default)]
struct DropLog(Arc<Mutex<Vec<&'static str>>>);
impl DropLog {
fn push(&self, label: &'static str) {
self.0
.lock()
.unwrap_or_else(|poisoned| poisoned.into_inner())
.push(label);
}
fn snapshot(&self) -> Vec<&'static str> {
self.0
.lock()
.unwrap_or_else(|poisoned| poisoned.into_inner())
.clone()
}
}
struct DropProbe {
label: &'static str,
log: DropLog,
}
impl DropProbe {
fn new(label: &'static str, log: DropLog) -> Self {
Self { label, log }
}
}
impl Drop for DropProbe {
fn drop(&mut self) {
self.log.push(self.label);
}
}
fn close_suffix_after_early_deallocs(
first: &'static str,
second: &'static str,
) -> Vec<&'static str> {
let log = DropLog::default();
let mut heap = RegionHeap::new();
let a = heap.alloc(DropProbe::new("a", log.clone()));
let b = heap.alloc(DropProbe::new("b", log.clone()));
let c = heap.alloc(DropProbe::new("c", log.clone()));
let d = heap.alloc(DropProbe::new("d", log.clone()));
for label in [first, second] {
let deallocated = match label {
"a" => heap.dealloc(a),
"b" => heap.dealloc(b),
"c" => heap.dealloc(c),
"d" => heap.dealloc(d),
_ => panic!("unexpected label"),
};
assert!(deallocated, "{label} should deallocate successfully");
}
let dealloc_drops = log.snapshot();
assert_eq!(dealloc_drops.len(), 2, "expected two eager drops");
heap.reclaim_all();
log.snapshot()[dealloc_drops.len()..].to_vec()
}
fn independent_heap_drop_traces(
reclaim_order: [usize; 2],
) -> (Vec<&'static str>, Vec<&'static str>) {
let left_log = DropLog::default();
let right_log = DropLog::default();
let mut left = RegionHeap::new();
left.alloc(DropProbe::new("left-a", left_log.clone()));
left.alloc(DropProbe::new("left-b", left_log.clone()));
let mut right = RegionHeap::new();
right.alloc(DropProbe::new("right-a", right_log.clone()));
right.alloc(DropProbe::new("right-b", right_log.clone()));
for which in reclaim_order {
match which {
0 => left.reclaim_all(),
1 => right.reclaim_all(),
_ => panic!("unexpected heap selector"),
}
}
(left_log.snapshot(), right_log.snapshot())
}
#[test]
fn alloc_and_get() {
let mut heap = RegionHeap::new();
let idx = heap.alloc(42u32);
assert_eq!(heap.get::<u32>(idx), Some(&42));
assert_eq!(heap.len(), 1);
assert_eq!(heap.stats().allocations, 1);
assert_eq!(heap.stats().live, 1);
}
#[test]
fn multiple_types() {
let mut heap = RegionHeap::new();
let idx1 = heap.alloc(42u32);
let idx2 = heap.alloc("hello".to_string());
let idx3 = heap.alloc(vec![1, 2, 3]);
assert_eq!(heap.get::<u32>(idx1), Some(&42));
assert_eq!(heap.get::<String>(idx2).map(String::as_str), Some("hello"));
assert_eq!(heap.get::<Vec<i32>>(idx3), Some(&vec![1, 2, 3]));
assert_eq!(heap.get::<String>(idx1), None);
assert_eq!(heap.get::<u32>(idx2), None);
}
#[test]
fn dealloc_and_reuse() {
let mut heap = RegionHeap::new();
let idx1 = heap.alloc(1u32);
let idx2 = heap.alloc(2u32);
assert!(heap.dealloc(idx1));
assert_eq!(heap.len(), 1);
assert_eq!(heap.stats().live, 1);
assert_eq!(heap.stats().reclaimed, 1);
assert_eq!(heap.get::<u32>(idx1), None);
let idx3 = heap.alloc(3u32);
assert_eq!(idx3.index(), idx1.index());
assert_ne!(idx3.generation(), idx1.generation());
assert_eq!(heap.get::<u32>(idx2), Some(&2));
assert_eq!(heap.get::<u32>(idx3), Some(&3));
}
#[test]
fn generation_prevents_aba() {
let mut heap = RegionHeap::new();
let idx1 = heap.alloc(1u32);
heap.dealloc(idx1);
let idx2 = heap.alloc(2u32);
assert_eq!(idx1.index(), idx2.index());
assert_ne!(idx1.generation(), idx2.generation());
assert_eq!(heap.get::<u32>(idx1), None);
assert_eq!(heap.get::<u32>(idx2), Some(&2));
}
#[test]
fn generation_monotonic_on_reuse() {
let mut heap = RegionHeap::new();
let mut idx = heap.alloc(0u32);
for i in 1u32..16 {
assert!(heap.dealloc(idx));
let next = heap.alloc(i);
assert_eq!(next.index(), idx.index());
assert_eq!(next.generation(), idx.generation().wrapping_add(1));
assert_eq!(heap.get::<u32>(idx), None);
idx = next;
}
}
#[test]
fn deterministic_reuse_pattern() {
fn run_pattern() -> Vec<(u32, u32)> {
let mut heap = RegionHeap::new();
let first = heap.alloc(1u32);
let second = heap.alloc(2u32);
let third = heap.alloc(3u32);
assert!(heap.dealloc(second));
let reuse_second = heap.alloc(4u32);
assert!(heap.dealloc(first));
assert!(heap.dealloc(third));
let reuse_third = heap.alloc(5u32); let reuse_first = heap.alloc(6u32);
vec![
(first.index(), first.generation()),
(second.index(), second.generation()),
(third.index(), third.generation()),
(reuse_second.index(), reuse_second.generation()),
(reuse_third.index(), reuse_third.generation()),
(reuse_first.index(), reuse_first.generation()),
]
}
let first = run_pattern();
let second = run_pattern();
assert_eq!(first, second, "allocation pattern should be deterministic");
}
#[test]
fn mr_early_dealloc_permutation_preserves_close_drop_suffix() {
let forward = close_suffix_after_early_deallocs("b", "d");
let reverse = close_suffix_after_early_deallocs("d", "b");
assert_eq!(forward, reverse);
assert_eq!(forward, vec!["a", "c"]);
}
#[test]
fn mr_independent_heap_reclaim_order_preserves_per_heap_drop_traces() {
let forward = independent_heap_drop_traces([0, 1]);
let reverse = independent_heap_drop_traces([1, 0]);
assert_eq!(forward.0, reverse.0);
assert_eq!(forward.1, reverse.1);
assert_eq!(forward.0, vec!["left-a", "left-b"]);
assert_eq!(forward.1, vec!["right-a", "right-b"]);
}
#[test]
fn alloc_panic_does_not_mutate_len_or_stats() {
let mut heap = RegionHeap::new();
heap.free_head = Some(1);
let before_len = heap.len();
let before_stats = heap.stats();
let result = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
let _ = heap.alloc(123u32);
}));
assert!(
result.is_err(),
"alloc should panic on corrupted free-list head"
);
assert_eq!(heap.len(), before_len);
let after_stats = heap.stats();
assert_eq!(after_stats.allocations, before_stats.allocations);
assert_eq!(after_stats.reclaimed, before_stats.reclaimed);
assert_eq!(after_stats.live, before_stats.live);
assert_eq!(after_stats.bytes_allocated, before_stats.bytes_allocated);
assert_eq!(after_stats.bytes_live, before_stats.bytes_live);
}
#[test]
fn reclaim_all() {
let mut heap = RegionHeap::new();
heap.alloc(1u32);
heap.alloc(2u32);
heap.alloc(3u32);
assert_eq!(heap.len(), 3);
assert_eq!(heap.stats().live, 3);
heap.reclaim_all();
assert_eq!(heap.len(), 0);
assert!(heap.is_empty());
assert_eq!(heap.stats().live, 0);
assert_eq!(heap.stats().reclaimed, 3);
}
#[test]
fn stats_tracking() {
let mut heap = RegionHeap::new();
heap.alloc(42u32);
heap.alloc("hello".to_string());
let stats = heap.stats();
assert_eq!(stats.allocations, 2);
assert_eq!(stats.live, 2);
assert_eq!(stats.reclaimed, 0);
heap.dealloc(HeapIndex {
index: 0,
generation: 0,
type_id: TypeId::of::<u32>(),
});
let stats = heap.stats();
assert_eq!(stats.allocations, 2);
assert_eq!(stats.live, 1);
assert_eq!(stats.reclaimed, 1);
}
#[test]
fn heap_ref_typed_access() {
let mut heap = RegionHeap::new();
let idx = heap.alloc(42u32);
let href: HeapRef<u32> = HeapRef::new(idx);
assert_eq!(href.get(&heap), Some(&42));
*href.get_mut(&mut heap).unwrap() = 100;
assert_eq!(href.get(&heap), Some(&100));
}
#[test]
fn drop_reclaims_memory() {
let mut heap = RegionHeap::new();
for i in 0u64..100 {
heap.alloc(i);
}
assert_eq!(heap.len(), 100);
assert_eq!(heap.stats().live, 100);
assert_eq!(heap.stats().allocations, 100);
assert_eq!(heap.stats().reclaimed, 0);
}
#[test]
fn heap_stats_debug_default_clone_copy() {
let stats = HeapStats::default();
assert_eq!(stats.allocations, 0);
assert_eq!(stats.reclaimed, 0);
assert_eq!(stats.live, 0);
assert_eq!(stats.bytes_allocated, 0);
assert_eq!(stats.bytes_live, 0);
let dbg = format!("{stats:?}");
assert!(dbg.contains("HeapStats"), "{dbg}");
let copied = stats;
let cloned = stats;
assert_eq!(format!("{copied:?}"), format!("{cloned:?}"));
}
#[test]
fn heap_index_debug_clone_copy_eq_hash() {
use std::collections::HashSet;
let mut heap = RegionHeap::new();
let idx1 = heap.alloc(42u32);
let idx2 = heap.alloc(99u32);
let dbg = format!("{idx1:?}");
assert!(dbg.contains("HeapIndex"), "{dbg}");
let copied = idx1;
let cloned = idx1;
assert_eq!(copied, cloned);
assert_eq!(idx1, idx1);
assert_ne!(idx1, idx2);
let mut set = HashSet::new();
set.insert(idx1);
set.insert(idx2);
set.insert(idx1);
assert_eq!(set.len(), 2);
assert_eq!(idx1.index(), 0);
assert_eq!(idx1.generation(), 0);
}
#[test]
fn heap_ref_debug_clone_copy_eq_hash() {
use std::collections::HashSet;
let mut heap = RegionHeap::new();
let idx1 = heap.alloc(42u32);
let idx2 = heap.alloc(99u32);
let r1 = HeapRef::<u32>::new(idx1);
let r2 = HeapRef::<u32>::new(idx2);
let dbg = format!("{r1:?}");
assert!(dbg.contains("HeapRef"), "{dbg}");
let copied = r1;
let cloned = r1;
assert_eq!(copied, cloned);
assert_eq!(r1, r1);
assert_ne!(r1, r2);
let mut set = HashSet::new();
set.insert(r1);
set.insert(r2);
set.insert(r1);
assert_eq!(set.len(), 2);
assert_eq!(r1.get(&heap), Some(&42));
assert_eq!(r2.get(&heap), Some(&99));
}
#[test]
fn region_heap_debug_default() {
let heap = RegionHeap::default();
let dbg = format!("{heap:?}");
assert!(dbg.contains("RegionHeap"), "{dbg}");
assert_eq!(heap.len(), 0);
assert!(heap.is_empty());
}
}