use std::marker::PhantomData;
use std::collections::{HashMap, VecDeque};
#[allow(dead_code)]
pub struct ChainedArena {
blocks: Vec<ArenaBlock>,
block_size: usize,
total_alloc: usize,
}
#[allow(dead_code)]
impl ChainedArena {
pub fn new(block_size: usize) -> Self {
let first = ArenaBlock::new(block_size);
Self {
blocks: vec![first],
block_size,
total_alloc: 0,
}
}
pub fn alloc(&mut self, bytes: usize, align: usize) -> usize {
let last = self.blocks.len() - 1;
if let Some(offset) = self.blocks[last].try_alloc(bytes, align) {
self.total_alloc += bytes;
return offset + last * self.block_size;
}
let new_block_size = self.block_size.max(bytes + align);
let mut block = ArenaBlock::new(new_block_size);
let offset = block
.try_alloc(bytes, align)
.expect("arena block allocation must succeed");
let block_idx = self.blocks.len();
self.blocks.push(block);
self.total_alloc += bytes;
offset + block_idx * self.block_size
}
pub fn num_blocks(&self) -> usize {
self.blocks.len()
}
pub fn total_allocated(&self) -> usize {
self.total_alloc
}
}
#[derive(Debug, Clone)]
pub struct ArenaMap<T, V> {
data: Vec<Option<V>>,
_marker: PhantomData<T>,
}
impl<T, V> ArenaMap<T, V> {
pub fn new() -> Self {
Self {
data: Vec::new(),
_marker: PhantomData,
}
}
pub fn insert(&mut self, idx: Idx<T>, value: V) {
let i = idx.to_usize();
if i >= self.data.len() {
self.data.resize_with(i + 1, || None);
}
self.data[i] = Some(value);
}
pub fn get(&self, idx: Idx<T>) -> Option<&V> {
self.data.get(idx.to_usize())?.as_ref()
}
pub fn get_mut(&mut self, idx: Idx<T>) -> Option<&mut V> {
self.data.get_mut(idx.to_usize())?.as_mut()
}
pub fn remove(&mut self, idx: Idx<T>) -> Option<V> {
self.data.get_mut(idx.to_usize())?.take()
}
pub fn contains(&self, idx: Idx<T>) -> bool {
self.get(idx).is_some()
}
pub fn count(&self) -> usize {
self.data.iter().filter(|e| e.is_some()).count()
}
pub fn iter(&self) -> impl Iterator<Item = (Idx<T>, &V)> {
self.data
.iter()
.enumerate()
.filter_map(|(i, v)| v.as_ref().map(|val| (Idx::new(i as u32), val)))
}
}
#[allow(dead_code)]
pub struct ScopeStack {
names: Vec<String>,
}
#[allow(dead_code)]
impl ScopeStack {
pub fn new() -> Self {
Self { names: Vec::new() }
}
pub fn push(&mut self, name: impl Into<String>) {
self.names.push(name.into());
}
pub fn pop(&mut self) -> Option<String> {
self.names.pop()
}
pub fn current(&self) -> Option<&str> {
self.names.last().map(|s| s.as_str())
}
pub fn depth(&self) -> usize {
self.names.len()
}
pub fn is_empty(&self) -> bool {
self.names.is_empty()
}
pub fn path(&self) -> String {
self.names.join(".")
}
}
#[allow(dead_code)]
pub struct StringInterner {
strings: Vec<String>,
map: std::collections::HashMap<String, u32>,
}
#[allow(dead_code)]
impl StringInterner {
pub fn new() -> Self {
Self {
strings: Vec::new(),
map: std::collections::HashMap::new(),
}
}
pub fn intern(&mut self, s: &str) -> u32 {
if let Some(&id) = self.map.get(s) {
return id;
}
let id = self.strings.len() as u32;
self.strings.push(s.to_string());
self.map.insert(s.to_string(), id);
id
}
pub fn get(&self, id: u32) -> Option<&str> {
self.strings.get(id as usize).map(|s| s.as_str())
}
pub fn len(&self) -> usize {
self.strings.len()
}
pub fn is_empty(&self) -> bool {
self.strings.is_empty()
}
}
#[allow(dead_code)]
pub struct DiagMeta {
pub(super) entries: Vec<(String, String)>,
}
#[allow(dead_code)]
impl DiagMeta {
pub fn new() -> Self {
Self {
entries: Vec::new(),
}
}
pub fn add(&mut self, key: impl Into<String>, val: impl Into<String>) {
self.entries.push((key.into(), val.into()));
}
pub fn get(&self, key: &str) -> Option<&str> {
self.entries
.iter()
.find(|(k, _)| k == key)
.map(|(_, v)| v.as_str())
}
pub fn len(&self) -> usize {
self.entries.len()
}
pub fn is_empty(&self) -> bool {
self.entries.is_empty()
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct StatCache<K: std::hash::Hash + Eq + Clone, V: Clone> {
pub inner: SimpleLruCache<K, V>,
pub hits: u64,
pub misses: u64,
}
#[allow(dead_code)]
impl<K: std::hash::Hash + Eq + Clone, V: Clone> StatCache<K, V> {
pub fn new(capacity: usize) -> Self {
Self {
inner: SimpleLruCache::new(capacity),
hits: 0,
misses: 0,
}
}
pub fn get(&mut self, key: &K) -> Option<&V> {
let result = self.inner.get(key);
if result.is_some() {
self.hits += 1;
} else {
self.misses += 1;
}
None
}
pub fn put(&mut self, key: K, val: V) {
self.inner.put(key, val);
}
pub fn hit_rate(&self) -> f64 {
let total = self.hits + self.misses;
if total == 0 {
return 0.0;
}
self.hits as f64 / total as f64
}
}
#[allow(dead_code)]
pub struct TwoGenerationArena {
pub(crate) nursery: GrowableArena,
pub(crate) stable: GrowableArena,
promotions: usize,
}
#[allow(dead_code)]
impl TwoGenerationArena {
pub fn new(nursery_cap: usize, stable_cap: usize) -> Self {
Self {
nursery: GrowableArena::new(nursery_cap),
stable: GrowableArena::new(stable_cap),
promotions: 0,
}
}
pub fn alloc_nursery(&mut self, size: usize, align: usize) -> usize {
self.nursery.alloc(size, align)
}
pub fn promote(&mut self, size: usize, align: usize) -> usize {
self.promotions += 1;
self.stable.alloc(size, align)
}
pub fn minor_gc(&mut self) {
self.nursery.reset();
}
pub fn num_promotions(&self) -> usize {
self.promotions
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct ArenaCheckpoint {
pub watermark: usize,
}
#[allow(dead_code)]
impl ArenaCheckpoint {
pub fn create(arena: &LinearArena) -> Self {
Self {
watermark: arena.used(),
}
}
pub fn restore(self, arena: &mut LinearArena) {
arena.top = self.watermark;
}
}
#[allow(dead_code)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct Timestamp(u64);
#[allow(dead_code)]
impl Timestamp {
pub const fn from_us(us: u64) -> Self {
Self(us)
}
pub fn as_us(self) -> u64 {
self.0
}
pub fn elapsed_since(self, earlier: Timestamp) -> u64 {
self.0.saturating_sub(earlier.0)
}
}
#[derive(Debug)]
pub struct ScopedArena<T> {
inner: Arena<T>,
}
impl<T> ScopedArena<T> {
pub fn new() -> Self {
Self {
inner: Arena::new(),
}
}
pub fn alloc(&mut self, value: T) -> Idx<T> {
self.inner.alloc(value)
}
pub fn get(&self, idx: Idx<T>) -> &T {
self.inner.get(idx)
}
pub fn checkpoint(&self) -> u32 {
self.inner.len() as u32
}
pub fn rollback(&mut self, checkpoint: u32) {
self.inner.data.truncate(checkpoint as usize);
}
pub fn len(&self) -> usize {
self.inner.len()
}
pub fn is_empty(&self) -> bool {
self.inner.is_empty()
}
}
#[allow(dead_code)]
pub struct LinearArena {
pub(crate) buf: Vec<u8>,
top: usize,
stats: ArenaStatsExt,
}
#[allow(dead_code)]
impl LinearArena {
pub fn new(capacity: usize) -> Self {
Self {
buf: vec![0u8; capacity],
top: 0,
stats: ArenaStatsExt::new(),
}
}
pub fn alloc(&mut self, size: usize, align: usize) -> Option<usize> {
let aligned = (self.top + align - 1) & !(align - 1);
let end = aligned.checked_add(size)?;
if end > self.buf.len() {
return None;
}
let waste = aligned - self.top;
self.top = end;
self.stats.bytes_allocated += size;
self.stats.alloc_count += 1;
self.stats.wasted_bytes += waste;
Some(aligned)
}
pub fn reset(&mut self) {
self.top = 0;
self.stats = ArenaStatsExt::new();
}
pub fn used(&self) -> usize {
self.top
}
pub fn capacity(&self) -> usize {
self.buf.len()
}
pub fn stats(&self) -> &ArenaStatsExt {
&self.stats
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct ArenaStats {
pub len: usize,
pub capacity: usize,
}
impl ArenaStats {
pub fn utilisation(&self) -> f64 {
if self.capacity == 0 {
0.0
} else {
self.len as f64 / self.capacity as f64
}
}
pub fn free(&self) -> usize {
self.capacity.saturating_sub(self.len)
}
}
#[allow(dead_code)]
pub struct AnnotationTable {
map: std::collections::HashMap<String, Vec<String>>,
}
#[allow(dead_code)]
impl AnnotationTable {
pub fn new() -> Self {
Self {
map: std::collections::HashMap::new(),
}
}
pub fn annotate(&mut self, key: impl Into<String>, val: impl Into<String>) {
self.map.entry(key.into()).or_default().push(val.into());
}
pub fn get_all(&self, key: &str) -> &[String] {
self.map.get(key).map(|v| v.as_slice()).unwrap_or(&[])
}
pub fn num_keys(&self) -> usize {
self.map.len()
}
pub fn has(&self, key: &str) -> bool {
self.map.contains_key(key)
}
}
#[derive(Debug)]
pub struct BumpArena {
buf: Vec<u8>,
pos: usize,
}
impl BumpArena {
pub fn with_capacity(cap: usize) -> Self {
Self {
buf: vec![0u8; cap],
pos: 0,
}
}
pub fn alloc_bytes(&mut self, size: usize) -> Option<usize> {
if self.pos + size > self.buf.len() {
None
} else {
let offset = self.pos;
self.pos += size;
Some(offset)
}
}
pub fn write_at(&mut self, offset: usize, data: &[u8]) {
self.buf[offset..offset + data.len()].copy_from_slice(data);
}
pub fn read_at(&self, offset: usize, len: usize) -> &[u8] {
&self.buf[offset..offset + len]
}
pub fn reset(&mut self) {
self.pos = 0;
}
pub fn used(&self) -> usize {
self.pos
}
pub fn capacity(&self) -> usize {
self.buf.len()
}
pub fn remaining(&self) -> usize {
self.buf.len().saturating_sub(self.pos)
}
}
#[derive(Debug)]
pub struct ArenaPool<T> {
pub(super) pool: Vec<Arena<T>>,
}
impl<T> ArenaPool<T> {
pub fn new() -> Self {
Self { pool: Vec::new() }
}
pub fn acquire(&mut self) -> Arena<T> {
self.pool.pop().unwrap_or_default()
}
pub fn release(&mut self, mut arena: Arena<T>) {
arena.clear();
self.pool.push(arena);
}
pub fn pool_size(&self) -> usize {
self.pool.len()
}
}
#[allow(dead_code)]
pub struct SimpleLruCache<K: std::hash::Hash + Eq + Clone, V: Clone> {
capacity: usize,
map: std::collections::HashMap<K, usize>,
keys: Vec<K>,
vals: Vec<V>,
order: Vec<usize>,
}
#[allow(dead_code)]
impl<K: std::hash::Hash + Eq + Clone, V: Clone> SimpleLruCache<K, V> {
pub fn new(capacity: usize) -> Self {
assert!(capacity > 0);
Self {
capacity,
map: std::collections::HashMap::new(),
keys: Vec::new(),
vals: Vec::new(),
order: Vec::new(),
}
}
pub fn put(&mut self, key: K, val: V) {
if let Some(&idx) = self.map.get(&key) {
self.vals[idx] = val;
self.order.retain(|&x| x != idx);
self.order.insert(0, idx);
return;
}
if self.keys.len() >= self.capacity {
let evict_idx = *self
.order
.last()
.expect("order list must be non-empty before eviction");
self.map.remove(&self.keys[evict_idx]);
self.order.pop();
self.keys[evict_idx] = key.clone();
self.vals[evict_idx] = val;
self.map.insert(key, evict_idx);
self.order.insert(0, evict_idx);
} else {
let idx = self.keys.len();
self.keys.push(key.clone());
self.vals.push(val);
self.map.insert(key, idx);
self.order.insert(0, idx);
}
}
pub fn get(&mut self, key: &K) -> Option<&V> {
let idx = *self.map.get(key)?;
self.order.retain(|&x| x != idx);
self.order.insert(0, idx);
Some(&self.vals[idx])
}
pub fn len(&self) -> usize {
self.keys.len()
}
pub fn is_empty(&self) -> bool {
self.keys.is_empty()
}
}
#[allow(dead_code)]
pub struct IdDispenser<T> {
next: u32,
_phantom: std::marker::PhantomData<T>,
}
#[allow(dead_code)]
impl<T> IdDispenser<T> {
pub fn new() -> Self {
Self {
next: 0,
_phantom: std::marker::PhantomData,
}
}
#[allow(clippy::should_implement_trait)]
pub fn next(&mut self) -> TypedId<T> {
let id = TypedId::new(self.next);
self.next += 1;
id
}
pub fn count(&self) -> u32 {
self.next
}
}
#[allow(dead_code)]
pub struct GrowableArena {
pub(crate) data: Vec<u8>,
top: usize,
count: usize,
}
#[allow(dead_code)]
impl GrowableArena {
pub fn new(initial: usize) -> Self {
Self {
data: vec![0u8; initial.max(16)],
top: 0,
count: 0,
}
}
pub fn alloc(&mut self, size: usize, align: usize) -> usize {
let aligned = (self.top + align - 1) & !(align - 1);
let end = aligned + size;
if end > self.data.len() {
let new_cap = (self.data.len() * 2).max(end);
self.data.resize(new_cap, 0);
}
self.top = end;
self.count += 1;
aligned
}
pub fn used(&self) -> usize {
self.top
}
pub fn count(&self) -> usize {
self.count
}
pub fn reset(&mut self) {
self.top = 0;
self.count = 0;
}
}
#[allow(dead_code)]
pub struct TypedArena<T> {
items: Vec<T>,
}
#[allow(dead_code)]
impl<T> TypedArena<T> {
pub fn new() -> Self {
Self { items: Vec::new() }
}
pub fn alloc(&mut self, val: T) -> &T {
self.items.push(val);
self.items.last().expect("items list must be non-empty")
}
pub fn len(&self) -> usize {
self.items.len()
}
pub fn is_empty(&self) -> bool {
self.items.is_empty()
}
pub fn clear(&mut self) {
self.items.clear();
}
}
#[allow(dead_code)]
pub struct Slot<T> {
inner: Option<T>,
}
#[allow(dead_code)]
impl<T> Slot<T> {
pub fn empty() -> Self {
Self { inner: None }
}
pub fn fill(&mut self, val: T) {
assert!(self.inner.is_none(), "Slot: already filled");
self.inner = Some(val);
}
pub fn get(&self) -> Option<&T> {
self.inner.as_ref()
}
pub fn is_filled(&self) -> bool {
self.inner.is_some()
}
pub fn take(&mut self) -> Option<T> {
self.inner.take()
}
pub fn get_or_fill_with(&mut self, f: impl FnOnce() -> T) -> &T {
if self.inner.is_none() {
self.inner = Some(f());
}
self.inner
.as_ref()
.expect("inner value must be initialized before access")
}
}
#[derive(Clone, Copy)]
pub struct IdxRange<T> {
pub start: Idx<T>,
pub end: Idx<T>,
}
impl<T> IdxRange<T> {
pub fn new(start: Idx<T>, end: Idx<T>) -> Self {
Self { start, end }
}
pub fn empty_at(idx: Idx<T>) -> Self {
let r = idx.raw;
Self {
start: Idx::new(r),
end: Idx::new(r),
}
}
pub fn len(self) -> usize {
(self.end.raw as usize).saturating_sub(self.start.raw as usize)
}
pub fn is_empty(self) -> bool {
self.start.raw >= self.end.raw
}
pub fn contains(self, idx: Idx<T>) -> bool {
idx.raw >= self.start.raw && idx.raw < self.end.raw
}
pub fn iter(self) -> impl Iterator<Item = Idx<T>> {
(self.start.raw..self.end.raw).map(Idx::new)
}
}
#[allow(dead_code)]
pub struct ArenaBlock {
data: Vec<u8>,
used: usize,
}
#[allow(dead_code)]
impl ArenaBlock {
pub fn new(size: usize) -> Self {
Self {
data: vec![0u8; size],
used: 0,
}
}
pub fn try_alloc(&mut self, bytes: usize, align: usize) -> Option<usize> {
let base = (self.used + align - 1) & !(align - 1);
let end = base.checked_add(bytes)?;
if end > self.data.len() {
return None;
}
self.used = end;
Some(base)
}
pub fn remaining(&self) -> usize {
self.data.len() - self.used
}
pub fn block_size(&self) -> usize {
self.data.len()
}
}
#[derive(Debug)]
pub struct InterningArena<T: PartialEq + Clone> {
inner: Arena<T>,
}
impl<T: PartialEq + Clone> InterningArena<T> {
pub fn new() -> Self {
Self {
inner: Arena::new(),
}
}
pub fn intern(&mut self, value: T) -> Idx<T> {
for (i, v) in self.inner.data.iter().enumerate() {
if *v == value {
return Idx::new(i as u32);
}
}
self.inner.alloc(value)
}
pub fn get(&self, idx: Idx<T>) -> &T {
self.inner.get(idx)
}
pub fn len(&self) -> usize {
self.inner.len()
}
pub fn is_empty(&self) -> bool {
self.inner.is_empty()
}
pub fn contains(&self, value: &T) -> bool {
self.inner.data.iter().any(|v| v == value)
}
pub fn stats(&self) -> ArenaStats {
self.inner.stats()
}
}
#[allow(dead_code)]
pub struct LoopClock {
start: std::time::Instant,
iters: u64,
}
#[allow(dead_code)]
impl LoopClock {
pub fn start() -> Self {
Self {
start: std::time::Instant::now(),
iters: 0,
}
}
pub fn tick(&mut self) {
self.iters += 1;
}
pub fn elapsed_us(&self) -> f64 {
self.start.elapsed().as_secs_f64() * 1e6
}
pub fn avg_us_per_iter(&self) -> f64 {
if self.iters == 0 {
return 0.0;
}
self.elapsed_us() / self.iters as f64
}
pub fn iters(&self) -> u64 {
self.iters
}
}
#[allow(dead_code)]
pub struct MemoSlot<T: Clone> {
cached: Option<T>,
}
#[allow(dead_code)]
impl<T: Clone> MemoSlot<T> {
pub fn new() -> Self {
Self { cached: None }
}
pub fn get_or_compute(&mut self, f: impl FnOnce() -> T) -> &T {
if self.cached.is_none() {
self.cached = Some(f());
}
self.cached
.as_ref()
.expect("cached value must be initialized before access")
}
pub fn invalidate(&mut self) {
self.cached = None;
}
pub fn is_cached(&self) -> bool {
self.cached.is_some()
}
}
#[derive(Debug)]
pub struct Arena<T> {
data: Vec<T>,
}
impl<T> Arena<T> {
#[inline]
pub fn new() -> Self {
Self { data: Vec::new() }
}
#[inline]
pub fn with_capacity(capacity: usize) -> Self {
Self {
data: Vec::with_capacity(capacity),
}
}
#[inline]
pub fn alloc(&mut self, value: T) -> Idx<T> {
let idx = self.data.len();
assert!(idx < u32::MAX as usize, "arena overflow");
self.data.push(value);
Idx::new(idx as u32)
}
pub fn alloc_many(&mut self, values: impl IntoIterator<Item = T>) -> IdxRange<T> {
let start = Idx::new(self.data.len() as u32);
self.data.extend(values);
let end = Idx::new(self.data.len() as u32);
IdxRange::new(start, end)
}
#[inline]
pub fn get(&self, idx: Idx<T>) -> &T {
&self.data[idx.raw as usize]
}
#[inline]
pub fn get_mut(&mut self, idx: Idx<T>) -> &mut T {
&mut self.data[idx.raw as usize]
}
pub fn get_range(&self, range: IdxRange<T>) -> &[T] {
&self.data[range.start.to_usize()..range.end.to_usize()]
}
#[inline]
pub fn len(&self) -> usize {
self.data.len()
}
#[inline]
pub fn is_empty(&self) -> bool {
self.data.is_empty()
}
pub fn next_idx(&self) -> Idx<T> {
Idx::new(self.data.len() as u32)
}
pub fn iter_indexed(&self) -> impl Iterator<Item = (Idx<T>, &T)> {
self.data
.iter()
.enumerate()
.map(|(i, v)| (Idx::new(i as u32), v))
}
pub fn iter(&self) -> impl Iterator<Item = &T> {
self.data.iter()
}
pub fn stats(&self) -> ArenaStats {
ArenaStats {
len: self.data.len(),
capacity: self.data.capacity(),
}
}
pub fn shrink_to_fit(&mut self) {
self.data.shrink_to_fit();
}
pub fn clear(&mut self) {
self.data.clear();
}
}
#[allow(dead_code)]
pub struct BiMap<A: std::hash::Hash + Eq + Clone, B: std::hash::Hash + Eq + Clone> {
forward: std::collections::HashMap<A, B>,
backward: std::collections::HashMap<B, A>,
}
#[allow(dead_code)]
impl<A: std::hash::Hash + Eq + Clone, B: std::hash::Hash + Eq + Clone> BiMap<A, B> {
pub fn new() -> Self {
Self {
forward: std::collections::HashMap::new(),
backward: std::collections::HashMap::new(),
}
}
pub fn insert(&mut self, a: A, b: B) {
self.forward.insert(a.clone(), b.clone());
self.backward.insert(b, a);
}
pub fn get_b(&self, a: &A) -> Option<&B> {
self.forward.get(a)
}
pub fn get_a(&self, b: &B) -> Option<&A> {
self.backward.get(b)
}
pub fn len(&self) -> usize {
self.forward.len()
}
pub fn is_empty(&self) -> bool {
self.forward.is_empty()
}
}
#[allow(dead_code)]
pub struct ArenaString {
pub(crate) offset: usize,
len: usize,
}
#[allow(dead_code)]
impl ArenaString {
pub fn store(arena: &mut LinearArena, s: &str) -> Option<Self> {
let bytes = s.as_bytes();
let offset = arena.alloc(bytes.len() + 1, 1)?;
let start = offset;
for (i, &b) in bytes.iter().enumerate() {
arena.buf[start + i] = b;
}
arena.buf[start + bytes.len()] = 0;
Some(Self {
offset,
len: bytes.len(),
})
}
pub fn len(&self) -> usize {
self.len
}
pub fn is_empty(&self) -> bool {
self.len == 0
}
}
#[allow(dead_code)]
pub struct IntervalSet {
intervals: Vec<(i64, i64)>,
}
#[allow(dead_code)]
impl IntervalSet {
pub fn new() -> Self {
Self {
intervals: Vec::new(),
}
}
pub fn add(&mut self, lo: i64, hi: i64) {
if lo > hi {
return;
}
let mut new_lo = lo;
let mut new_hi = hi;
let mut i = 0;
while i < self.intervals.len() {
let (il, ih) = self.intervals[i];
if ih < new_lo - 1 {
i += 1;
continue;
}
if il > new_hi + 1 {
break;
}
new_lo = new_lo.min(il);
new_hi = new_hi.max(ih);
self.intervals.remove(i);
}
self.intervals.insert(i, (new_lo, new_hi));
}
pub fn contains(&self, x: i64) -> bool {
self.intervals.iter().any(|&(lo, hi)| x >= lo && x <= hi)
}
pub fn num_intervals(&self) -> usize {
self.intervals.len()
}
pub fn cardinality(&self) -> i64 {
self.intervals.iter().map(|&(lo, hi)| hi - lo + 1).sum()
}
}
#[allow(dead_code)]
pub struct MemoryRegionRegistry {
regions: Vec<MemoryRegion>,
}
#[allow(dead_code)]
impl MemoryRegionRegistry {
pub fn new() -> Self {
Self {
regions: Vec::new(),
}
}
pub fn add(&mut self, region: MemoryRegion) {
self.regions.push(region);
}
pub fn find(&self, addr: usize) -> Option<&MemoryRegion> {
self.regions.iter().find(|r| r.contains(addr))
}
pub fn len(&self) -> usize {
self.regions.len()
}
pub fn is_empty(&self) -> bool {
self.regions.is_empty()
}
}
#[derive(Debug, Default)]
pub struct DoubleArena<A, B> {
pub first: Arena<A>,
pub second: Arena<B>,
}
impl<A, B> DoubleArena<A, B> {
pub fn new() -> Self {
Self {
first: Arena::new(),
second: Arena::new(),
}
}
pub fn alloc_pair(&mut self, a: A, b: B) -> (Idx<A>, Idx<B>) {
let ia = self.first.alloc(a);
let ib = self.second.alloc(b);
(ia, ib)
}
pub fn total_len(&self) -> usize {
self.first.len() + self.second.len()
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct MemoryRegion {
pub label: String,
pub base: usize,
pub size: usize,
pub active: bool,
}
#[allow(dead_code)]
impl MemoryRegion {
pub fn new(label: impl Into<String>, base: usize, size: usize) -> Self {
Self {
label: label.into(),
base,
size,
active: false,
}
}
pub fn activate(&mut self) {
self.active = true;
}
pub fn deactivate(&mut self) {
self.active = false;
}
pub fn end(&self) -> usize {
self.base + self.size
}
pub fn contains(&self, addr: usize) -> bool {
addr >= self.base && addr < self.end()
}
}
#[allow(dead_code)]
pub struct SparseBitSet {
words: Vec<u64>,
}
#[allow(dead_code)]
impl SparseBitSet {
pub fn new(capacity: usize) -> Self {
let words = (capacity + 63) / 64;
Self {
words: vec![0u64; words],
}
}
pub fn set(&mut self, i: usize) {
let word = i / 64;
let bit = i % 64;
if word < self.words.len() {
self.words[word] |= 1u64 << bit;
}
}
pub fn clear(&mut self, i: usize) {
let word = i / 64;
let bit = i % 64;
if word < self.words.len() {
self.words[word] &= !(1u64 << bit);
}
}
pub fn get(&self, i: usize) -> bool {
let word = i / 64;
let bit = i % 64;
self.words.get(word).is_some_and(|w| w & (1u64 << bit) != 0)
}
pub fn count_ones(&self) -> u32 {
self.words.iter().map(|w| w.count_ones()).sum()
}
pub fn union(&self, other: &SparseBitSet) -> SparseBitSet {
let len = self.words.len().max(other.words.len());
let mut result = SparseBitSet {
words: vec![0u64; len],
};
for i in 0..self.words.len() {
result.words[i] |= self.words[i];
}
for i in 0..other.words.len() {
result.words[i] |= other.words[i];
}
result
}
}
#[allow(dead_code)]
pub struct WorkQueue<T> {
items: std::collections::VecDeque<T>,
}
#[allow(dead_code)]
impl<T> WorkQueue<T> {
pub fn new() -> Self {
Self {
items: std::collections::VecDeque::new(),
}
}
pub fn enqueue(&mut self, item: T) {
self.items.push_back(item);
}
pub fn dequeue(&mut self) -> Option<T> {
self.items.pop_front()
}
pub fn is_empty(&self) -> bool {
self.items.is_empty()
}
pub fn len(&self) -> usize {
self.items.len()
}
}
#[allow(dead_code)]
pub struct FrequencyTable<T: std::hash::Hash + Eq + Clone> {
counts: std::collections::HashMap<T, u64>,
}
#[allow(dead_code)]
impl<T: std::hash::Hash + Eq + Clone> FrequencyTable<T> {
pub fn new() -> Self {
Self {
counts: std::collections::HashMap::new(),
}
}
pub fn record(&mut self, item: T) {
*self.counts.entry(item).or_insert(0) += 1;
}
pub fn freq(&self, item: &T) -> u64 {
self.counts.get(item).copied().unwrap_or(0)
}
pub fn most_frequent(&self) -> Option<(&T, u64)> {
self.counts
.iter()
.max_by_key(|(_, &v)| v)
.map(|(k, &v)| (k, v))
}
pub fn total(&self) -> u64 {
self.counts.values().sum()
}
pub fn distinct(&self) -> usize {
self.counts.len()
}
}
#[allow(dead_code)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct TypedId<T> {
pub(super) id: u32,
_phantom: std::marker::PhantomData<T>,
}
#[allow(dead_code)]
impl<T> TypedId<T> {
pub const fn new(id: u32) -> Self {
Self {
id,
_phantom: std::marker::PhantomData,
}
}
pub fn raw(&self) -> u32 {
self.id
}
}
#[allow(dead_code)]
pub struct ScopedArenaExt {
pub(crate) inner: LinearArena,
watermarks: Vec<usize>,
}
#[allow(dead_code)]
impl ScopedArenaExt {
pub fn new(capacity: usize) -> Self {
Self {
inner: LinearArena::new(capacity),
watermarks: Vec::new(),
}
}
pub fn push_scope(&mut self) {
self.watermarks.push(self.inner.used());
}
pub fn pop_scope(&mut self) {
let wm = self.watermarks.pop().expect("ScopedArena: no active scope");
self.inner.top = wm;
}
pub fn alloc(&mut self, size: usize, align: usize) -> Option<usize> {
self.inner.alloc(size, align)
}
pub fn scope_depth(&self) -> usize {
self.watermarks.len()
}
}
#[allow(dead_code)]
pub struct WorkStack<T> {
items: Vec<T>,
}
#[allow(dead_code)]
impl<T> WorkStack<T> {
pub fn new() -> Self {
Self { items: Vec::new() }
}
pub fn push(&mut self, item: T) {
self.items.push(item);
}
pub fn pop(&mut self) -> Option<T> {
self.items.pop()
}
pub fn is_empty(&self) -> bool {
self.items.is_empty()
}
pub fn len(&self) -> usize {
self.items.len()
}
}
#[allow(dead_code)]
pub struct ArenaVec<T: Copy> {
base: usize,
length: usize,
_t: std::marker::PhantomData<T>,
}
#[allow(dead_code)]
impl<T: Copy> ArenaVec<T> {
pub fn new(arena: &mut LinearArena, capacity: usize) -> Option<Self> {
let base = arena.alloc(
capacity * std::mem::size_of::<T>(),
std::mem::align_of::<T>(),
)?;
Some(Self {
base,
length: 0,
_t: std::marker::PhantomData,
})
}
pub fn len(&self) -> usize {
self.length
}
pub fn is_empty(&self) -> bool {
self.length == 0
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct ArenaStatsExt {
pub bytes_allocated: usize,
pub alloc_count: usize,
pub chunk_count: usize,
pub wasted_bytes: usize,
}
#[allow(dead_code)]
impl ArenaStatsExt {
pub fn new() -> Self {
Self {
bytes_allocated: 0,
alloc_count: 0,
chunk_count: 0,
wasted_bytes: 0,
}
}
pub fn avg_alloc_size(&self) -> f64 {
if self.alloc_count == 0 {
return 0.0;
}
self.bytes_allocated as f64 / self.alloc_count as f64
}
pub fn fragmentation(&self) -> f64 {
let total = self.bytes_allocated + self.wasted_bytes;
if total == 0 {
return 0.0;
}
self.wasted_bytes as f64 / total as f64
}
}
#[allow(dead_code)]
pub struct PoolArena {
slot_size: usize,
capacity: usize,
free_list: Vec<usize>,
data: Vec<u8>,
}
#[allow(dead_code)]
impl PoolArena {
pub fn new(slot_size: usize, capacity: usize) -> Self {
let data = vec![0u8; slot_size * capacity];
let free_list: Vec<usize> = (0..capacity).collect();
Self {
slot_size,
capacity,
free_list,
data,
}
}
pub fn alloc_slot(&mut self) -> Option<usize> {
self.free_list.pop()
}
pub fn free_slot(&mut self, idx: usize) {
if idx < self.capacity {
self.free_list.push(idx);
}
}
pub fn available(&self) -> usize {
self.free_list.len()
}
pub fn capacity(&self) -> usize {
self.capacity
}
pub fn slot_size(&self) -> usize {
self.slot_size
}
}
#[derive(Debug)]
pub struct SlabArena<T> {
data: Vec<Option<T>>,
free_list: Vec<u32>,
}
impl<T> SlabArena<T> {
pub fn new() -> Self {
Self {
data: Vec::new(),
free_list: Vec::new(),
}
}
pub fn with_capacity(cap: usize) -> Self {
Self {
data: Vec::with_capacity(cap),
free_list: Vec::new(),
}
}
pub fn alloc(&mut self, value: T) -> Idx<T> {
if let Some(idx) = self.free_list.pop() {
self.data[idx as usize] = Some(value);
Idx::new(idx)
} else {
let idx = self.data.len() as u32;
assert!(idx < u32::MAX, "slab arena overflow");
self.data.push(Some(value));
Idx::new(idx)
}
}
pub fn free(&mut self, idx: Idx<T>) -> Option<T> {
let slot = self.data.get_mut(idx.raw as usize)?;
let value = slot.take()?;
self.free_list.push(idx.raw);
Some(value)
}
pub fn get(&self, idx: Idx<T>) -> Option<&T> {
self.data.get(idx.raw as usize)?.as_ref()
}
pub fn get_mut(&mut self, idx: Idx<T>) -> Option<&mut T> {
self.data.get_mut(idx.raw as usize)?.as_mut()
}
pub fn len(&self) -> usize {
self.data.iter().filter(|s| s.is_some()).count()
}
pub fn capacity_slots(&self) -> usize {
self.data.len()
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
pub fn free_count(&self) -> usize {
self.free_list.len()
}
pub fn iter_live(&self) -> impl Iterator<Item = (Idx<T>, &T)> {
self.data
.iter()
.enumerate()
.filter_map(|(i, slot)| slot.as_ref().map(|v| (Idx::new(i as u32), v)))
}
}
#[derive(Clone, Copy)]
pub struct Idx<T> {
pub(super) raw: u32,
_marker: PhantomData<T>,
}
impl<T> Idx<T> {
#[inline]
pub(crate) fn new(raw: u32) -> Self {
Self {
raw,
_marker: PhantomData,
}
}
#[inline]
pub fn raw(&self) -> u32 {
self.raw
}
pub fn cast<U>(self) -> Idx<U> {
Idx::new(self.raw)
}
pub fn is_first(self) -> bool {
self.raw == 0
}
pub fn next(self) -> Self {
Self::new(self.raw + 1)
}
pub fn to_usize(self) -> usize {
self.raw as usize
}
}
#[allow(dead_code)]
pub struct EventCounter {
counts: std::collections::HashMap<String, u64>,
}
#[allow(dead_code)]
impl EventCounter {
pub fn new() -> Self {
Self {
counts: std::collections::HashMap::new(),
}
}
pub fn inc(&mut self, event: &str) {
*self.counts.entry(event.to_string()).or_insert(0) += 1;
}
pub fn add(&mut self, event: &str, n: u64) {
*self.counts.entry(event.to_string()).or_insert(0) += n;
}
pub fn get(&self, event: &str) -> u64 {
self.counts.get(event).copied().unwrap_or(0)
}
pub fn total(&self) -> u64 {
self.counts.values().sum()
}
pub fn reset(&mut self) {
self.counts.clear();
}
pub fn event_names(&self) -> Vec<&str> {
self.counts.keys().map(|s| s.as_str()).collect()
}
}